diff --git a/src/tokenize.cpp b/src/tokenize.cpp index 61e78155d..92126b56d 100644 --- a/src/tokenize.cpp +++ b/src/tokenize.cpp @@ -322,7 +322,7 @@ void Tokenizer::tokenize(std::istream &code, const char FileName[]) if (strchr("#+-*/%&|^?!=<>[](){};:,.~", ch)) { - if (ch == '.' && std::isdigit(CurrentToken[0])) + if (strchr(".+-", ch) && std::isdigit(CurrentToken[0])) { // Don't separate doubles } diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index b39a29b05..f5e417aab 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -997,6 +997,8 @@ private: "{\n" " double a = 4.2;\n" " float b = 4.2f;\n" + " double c = 4.2e+10;\n" + " double d = 4.2e-10;\n" "}\n"; // tokenize.. @@ -1007,7 +1009,7 @@ private: std::ostringstream ostr; for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) ostr << " " << tok->str(); - ASSERT_EQUALS(std::string(" void f ( ) { double a = 4.2 ; float b = 4.2f ; }"), ostr.str()); + ASSERT_EQUALS(std::string(" void f ( ) { double a = 4.2 ; float b = 4.2f ; double c = 4.2e+10 ; double d = 4.2e-10 ; }"), ostr.str()); } void tokenize_strings()