diff --git a/src/Lexer.cpp b/src/Lexer.cpp index 1711fb547..022a5ea30 100644 --- a/src/Lexer.cpp +++ b/src/Lexer.cpp @@ -662,6 +662,19 @@ void Lexer::token_split (std::vector & words, const std::string& in words.push_back (word); } +//////////////////////////////////////////////////////////////////////////////// +// Split 'input' into 'tokens', preserving type. +void Lexer::token_split (std::vector >& lexemes, const std::string& input) +{ + lexemes.clear (); + + std::string word; + Lexer::Type type; + Lexer lex (input); + while (lex.token (word, type)) + lexemes.push_back (std::pair (word, type)); +} + //////////////////////////////////////////////////////////////////////////////// bool Lexer::is_punct (int c) const { diff --git a/src/Lexer.h b/src/Lexer.h index 0d6dbf2f6..1d21161db 100644 --- a/src/Lexer.h +++ b/src/Lexer.h @@ -67,6 +67,7 @@ public: static bool is_ws (int); static void word_split (std::vector &, const std::string&); static void token_split (std::vector &, const std::string&); + static void token_split (std::vector >&, const std::string&); private: bool is_punct (int) const;