- Implemented ::token_split, which performs a full lex, and doesn't require
  white space like ::word_split does.
- Added unit tests.
This commit is contained in:
Paul Beckingham 2014-05-31 13:51:10 -04:00
parent 0af9bbdc03
commit 7598997e70
3 changed files with 32 additions and 1 deletions

View file

@ -630,6 +630,19 @@ void Lexer::word_split (std::vector <std::string>& words, const std::string& inp
words.push_back (word);
}
////////////////////////////////////////////////////////////////////////////////
// Split 'input' into 'tokens'.
void Lexer::token_split (std::vector <std::string>& words, const std::string& input)
{
words.clear ();
std::string word;
Lexer::Type type;
Lexer lex (input);
while (lex.token (word, type))
words.push_back (word);
}
////////////////////////////////////////////////////////////////////////////////
bool Lexer::is_punct (int c) const
{