diff --git a/src/tokenize.cpp b/src/tokenize.cpp index 14d32c644..473f9038b 100644 --- a/src/tokenize.cpp +++ b/src/tokenize.cpp @@ -344,6 +344,12 @@ void Tokenizer::tokenize(std::istream &code, const char FileName[]) tok->next()->str((std::string("-") + tok->strAt(2)).c_str()); tok->next()->deleteNext(); } + + if (Token::Match(tok, "return - %num%") && tok->strAt(2)[0] != '-') + { + tok->next()->str((std::string("-") + tok->strAt(2)).c_str()); + tok->next()->deleteNext(); + } } // Combine tokens.. diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index d55697ea7..36cff9252 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -178,6 +178,7 @@ private: ASSERT_EQUALS("i = -12", tokenizeAndStringify("i = -12")); ASSERT_EQUALS("1 - 2", tokenizeAndStringify("1-2")); ASSERT_EQUALS("foo ( -1 ) - 2", tokenizeAndStringify("foo(-1)-2")); + ASSERT_EQUALS("int f ( ) { return -2 ; }", tokenizeAndStringify("int f(){return -2;}")); }