diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index df10b8e50..a694bc021 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -341,6 +341,13 @@ void Tokenizer::createTokens(std::istream &code) { // Don't separate doubles "4.2e+10" } + else if (ch=='&' && CurrentToken.empty() && code.peek() == '&') + { + // && + ch = code.get(); + addtoken("&&", lineno, FileIndex, true); + continue; + } else { if (CurrentToken == "#file") @@ -1672,7 +1679,6 @@ bool Tokenizer::tokenize(std::istream &code, const char FileName[], const std::s { { "<", "<", "<<" }, - { "&", "&", "&&" }, { "|", "|", "||" }, { "+", "=", "+=" }, diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index aa00a35c3..e1ee27e44 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -520,6 +520,7 @@ private: ASSERT_EQUALS("( p1 || ! p2 )", tokenizeAndStringify("( p1 != NULL || p2 == NULL )", true)); ASSERT_EQUALS("( p1 && ! p2 )", tokenizeAndStringify("( p1 != NULL && p2 == NULL )", true)); + ASSERT_EQUALS("a & & b", tokenizeAndStringify("a & &b", true)); ASSERT_EQUALS("( ! p )", tokenizeAndStringify("( p == false )", true)); ASSERT_EQUALS("( ! p )", tokenizeAndStringify("( p == 0 )", true));