diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 48f746b73..3df4630a2 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -1565,21 +1565,21 @@ bool Tokenizer::createLinks() if (links.size() > 0) { // Error, { and } don't match. - syntaxError(_tokens, '{'); + syntaxError(links.back(), '{'); return false; } if (links2.size() > 0) { // Error, ( and ) don't match. - syntaxError(_tokens, '('); + syntaxError(links2.back(), '('); return false; } if (links3.size() > 0) { // Error, [ and ] don't match. - syntaxError(_tokens, '['); + syntaxError(links3.back(), '['); return false; } diff --git a/test/teststl.cpp b/test/teststl.cpp index 092abff4f..9372cba3b 100644 --- a/test/teststl.cpp +++ b/test/teststl.cpp @@ -524,7 +524,7 @@ private: Tokenizer tokenizer(0, this); std::istringstream istr(src); ASSERT_EQUALS(false, tokenizer.tokenize(istr, "test.cpp")); - ASSERT_EQUALS("[test.cpp:1]: (error) Invalid number of character ((). Can't process file.\n", errout.str()); + ASSERT_EQUALS("[test.cpp:3]: (error) Invalid number of character ((). Can't process file.\n", errout.str()); } diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 85d520149..6a1108aac 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -2356,6 +2356,42 @@ private: tokenizer.simplifyTokenList(); ASSERT_EQUALS("", errout.str()); } + + { + errout.str(""); + const char code[] = "void f()\n" + "{\n" + " foo(;\n" + "}\n"; + Tokenizer tokenizer(0, this); + std::istringstream istr(code); + ASSERT_EQUALS(false, tokenizer.tokenize(istr, "test.cpp")); + ASSERT_EQUALS("[test.cpp:3]: (error) Invalid number of character ((). Can't process file.\n", errout.str()); + } + + { + errout.str(""); + const char code[] = "void f()\n" + "{\n" + " for(;;){ foo();\n" + "}\n"; + Tokenizer tokenizer(0, this); + std::istringstream istr(code); + ASSERT_EQUALS(false, tokenizer.tokenize(istr, "test.cpp")); + ASSERT_EQUALS("[test.cpp:2]: (error) Invalid number of character ({). Can't process file.\n", errout.str()); + } + + { + errout.str(""); + const char code[] = "void f()\n" + "{\n" + " a[10;\n" + "}\n"; + Tokenizer tokenizer(0, this); + std::istringstream istr(code); + ASSERT_EQUALS(false, tokenizer.tokenize(istr, "test.cpp")); + ASSERT_EQUALS("[test.cpp:3]: (error) Invalid number of character ([). Can't process file.\n", errout.str()); + } } void removeKeywords()