Lexer: Added static ::tokenize method to reduce a string to tokens and types

This commit is contained in:
Paul Beckingham 2016-03-19 15:45:50 -04:00
parent be80313366
commit 248add6cea
2 changed files with 15 additions and 0 deletions

View file

@ -64,6 +64,20 @@ bool Lexer::token (std::string& token, Lexer::Type& type)
return false;
}
////////////////////////////////////////////////////////////////////////////////
std::vector <std::tuple <std::string, Lexer::Type>> Lexer::tokenize (const std::string& input)
{
std::vector <std::tuple <std::string, Lexer::Type>> tokens;
std::string token;
Lexer::Type type;
Lexer lexer (input);
while (lexer.token (token, type))
tokens.push_back (std::tuple <std::string, Lexer::Type> (token, type));
return tokens;
}
////////////////////////////////////////////////////////////////////////////////
// No L10N - these are for internal purposes.
const std::string Lexer::typeName (const Lexer::Type& type)