mirror of
https://github.com/GothenburgBitFactory/timewarrior.git
synced 2025-07-07 20:06:39 +02:00
Lexer: Added static ::tokenize method to reduce a string to tokens and types
This commit is contained in:
parent
be80313366
commit
248add6cea
2 changed files with 15 additions and 0 deletions
|
@ -64,6 +64,20 @@ bool Lexer::token (std::string& token, Lexer::Type& type)
|
|||
return false;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
std::vector <std::tuple <std::string, Lexer::Type>> Lexer::tokenize (const std::string& input)
|
||||
{
|
||||
std::vector <std::tuple <std::string, Lexer::Type>> tokens;
|
||||
|
||||
std::string token;
|
||||
Lexer::Type type;
|
||||
Lexer lexer (input);
|
||||
while (lexer.token (token, type))
|
||||
tokens.push_back (std::tuple <std::string, Lexer::Type> (token, type));
|
||||
|
||||
return tokens;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// No L10N - these are for internal purposes.
|
||||
const std::string Lexer::typeName (const Lexer::Type& type)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue