diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index b8d7a81f8..db43014b6 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -85,11 +85,31 @@ const std::vector *Tokenizer::getFiles() const // add a token. Used by 'Tokenizer' //--------------------------------------------------------------------------- -void Tokenizer::addtoken(const char str[], const unsigned int lineno, const unsigned int fileno) +void Tokenizer::addtoken(const char str[], const unsigned int lineno, const unsigned int fileno, bool split) { if (str[0] == 0) return; + // If token contains # characters, split it up + if (split && strstr(str, "##")) + { + std::string temp; + for (unsigned int i = 0; str[i]; ++i) + { + if (strncmp(&str[i], "##", 2) == 0) + { + addtoken(temp.c_str(), lineno, fileno, false); + temp.clear(); + addtoken("##", lineno, fileno, false); + ++i; + } + else + temp += str[i]; + } + addtoken(temp.c_str(), lineno, fileno, false); + return; + } + // Replace hexadecimal value with decimal std::ostringstream str2; if (strncmp(str, "0x", 2) == 0) @@ -338,26 +358,7 @@ void Tokenizer::createTokens(std::istream &code) continue; } - // If token contains # characters, split it up - if (CurrentToken.find("##") == std::string::npos) - addtoken(CurrentToken.c_str(), lineno, FileIndex); - else - { - std::string temp; - for (std::string::size_type i = 0; i < CurrentToken.length(); ++i) - { - if (CurrentToken[i] == '#' && CurrentToken.length() + 1 > i && CurrentToken[i+1] == '#') - { - addtoken(temp.c_str(), lineno, FileIndex); - temp.clear(); - addtoken("##", lineno, FileIndex); - ++i; - } - else - temp += CurrentToken[i]; - } - addtoken(temp.c_str(), lineno, FileIndex); - } + addtoken(CurrentToken.c_str(), lineno, FileIndex, true); CurrentToken.clear(); @@ -383,25 +384,7 @@ void Tokenizer::createTokens(std::istream &code) CurrentToken += ch; } - if (CurrentToken.find("##") == std::string::npos) - addtoken(CurrentToken.c_str(), lineno, FileIndex); - else - { - std::string temp; - for (std::string::size_type i = 0; i < CurrentToken.length(); ++i) - { - if (CurrentToken[i] == '#' && CurrentToken.length() + 1 > i && CurrentToken[i+1] == '#') - { - addtoken(temp.c_str(), lineno, FileIndex); - temp.clear(); - addtoken("##", lineno, FileIndex); - ++i; - } - else - temp += CurrentToken[i]; - } - addtoken(temp.c_str(), lineno, FileIndex); - } + addtoken(CurrentToken.c_str(), lineno, FileIndex, true); } void Tokenizer::duplicateTypedefError(const Token *tok1, const Token *tok2, const std::string &type) diff --git a/lib/tokenize.h b/lib/tokenize.h index c38587464..f0cc7bdfb 100644 --- a/lib/tokenize.h +++ b/lib/tokenize.h @@ -262,7 +262,7 @@ public: std::vector _functionList; - void addtoken(const char str[], const unsigned int lineno, const unsigned int fileno); + void addtoken(const char str[], const unsigned int lineno, const unsigned int fileno, bool split=false); void addtoken(const Token *tok, const unsigned int lineno, const unsigned int fileno); /**