diff --git a/lib/parser/tokenizer.cc b/lib/parser/tokenizer.cc index 75e0dfcf..8c616d9b 100644 --- a/lib/parser/tokenizer.cc +++ b/lib/parser/tokenizer.cc @@ -113,11 +113,29 @@ eat_token (std::string& food, size_t& pos) } +static std::string +cleanup (const std::string& dirty) +{ + auto clean = dirty; + + // only accept spc as whitespace + for (auto f = clean.begin(); f != clean.end(); ++f) + if (*f < ' ') + *f = ' '; + + clean.erase (0, clean.find_first_not_of(" ")); + clean.erase (clean.find_last_not_of(" ") + 1); // remove trailing space + + return clean; +} + + Mux::Tokens Mux::tokenize (const std::string& s) { Tokens tokens{}; - std::string food{s}; + + std::string food = cleanup(s); size_t pos{0}; if (s.empty())