Tokenizer: tokenize 1E-2 into a single token

This commit is contained in:
Daniel Marjamäki 2010-04-04 08:01:05 +02:00
parent be598757a1
commit 849be383e6
3 changed files with 18 additions and 9 deletions

View File

@ -331,7 +331,8 @@ void Tokenizer::createTokens(std::istream &code)
else if (strchr("+-", ch) &&
CurrentToken.length() > 0 &&
std::isdigit(CurrentToken[0]) &&
CurrentToken[CurrentToken.length()-1] == 'e')
(CurrentToken[CurrentToken.length()-1] == 'e' ||
CurrentToken[CurrentToken.length()-1] == 'E'))
{
// Don't separate doubles "4.2e+10"
}

View File

@ -2222,23 +2222,23 @@ private:
"{\n"
" std::cout << log(1.0E-3) << std::endl;\n"
"}");
TODO_ASSERT_EQUALS("", errout.str());
ASSERT_EQUALS("", errout.str());
check("void foo()\n"
"{\n"
" std::cout << log(1E-3) << std::endl;\n"
"}");
TODO_ASSERT_EQUALS("", errout.str());
ASSERT_EQUALS("", errout.str());
// acos
check("void foo()\n"
"{\n"
" std::cout << acos(1) << std::endl;\n"
" std::cout << acos(-1) << std::endl;\n"
" std::cout << acos(0.1) << std::endl;\n"
" std::cout << acos(0.0001) << std::endl;\n"
" std::cout << acos(0.01) << std::endl;\n"
" std::cout << acos(1.0E-1) << std::endl;\n"
" std::cout << acos(1) << std::endl;\n"
" std::cout << acos(-1) << std::endl;\n"
" std::cout << acos(0.1) << std::endl;\n"
" std::cout << acos(0.0001) << std::endl;\n"
" std::cout << acos(0.01) << std::endl;\n"
" std::cout << acos(1.0E-1) << std::endl;\n"
" std::cout << acos(-1.0E-1) << std::endl;\n"
" std::cout << acos(+1.0E-1) << std::endl;\n"
" std::cout << acos(0.1E-1) << std::endl;\n"

View File

@ -41,6 +41,7 @@ private:
TEST_CASE(tokenize2);
TEST_CASE(tokenize3);
TEST_CASE(tokenize4);
TEST_CASE(tokenize5);
// don't freak out when the syntax is wrong
TEST_CASE(wrong_syntax);
@ -306,6 +307,13 @@ private:
ASSERT_EQUALS("", errout.str());
}
void tokenize5()
{
// Tokenize values
ASSERT_EQUALS("; + 1E3 ;", tokenizeAndStringify("; +1E3 ;"));
ASSERT_EQUALS("; 1E-2 ;", tokenizeAndStringify("; 1E-2 ;"));
}
void wrong_syntax()
{
errout.str("");