Add implemented a tokenizer function

This commit is contained in:
Lev
2021-05-22 19:12:20 -05:00
parent c1521602d0
commit cd40b60b38
2 changed files with 29 additions and 0 deletions

Binary file not shown.

View File

@ -0,0 +1,29 @@
#include <string>
#include <vector>
#include <iostream>
std::vector<std::string> Tokenise(std::string csvLine, char separator)
{
std::vector<std::string> tokens;
signed int start, end;
std::string token;
start = csvLine.find_first_not_of(separator, 0);
do
{
end = csvLine.find_first_of(separator, start);
if(start == csvLine.length() || start == end) break;
if(end >= 0)
token = csvLine.substr(start, end - start);
else
token = csvLine.substr(start, csvLine.length() - start);
tokens.push_back(token);
start = end + 1;
} while (end > 0);
return tokens;
}
int main()
{
return 0;
}