diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 16f4bb17d..c5489fd50 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -1508,6 +1508,7 @@ void Tokenizer::simplifyNamespaces() bool Tokenizer::createLinks() { + std::list type; std::list links; std::list links2; std::list links3; @@ -1521,6 +1522,7 @@ bool Tokenizer::createLinks() if (token->str() == "{") { links.push_back(token); + type.push_back(token); } else if (token->str() == "}") { @@ -1530,6 +1532,12 @@ bool Tokenizer::createLinks() syntaxError(token, '{'); return false; } + if (type.back()->str() != "{") + { + syntaxError(type.back(), type.back()->str()[0]); + return false; + } + type.pop_back(); Token::createMutualLinks(links.back(), token); links.pop_back(); @@ -1537,6 +1545,7 @@ bool Tokenizer::createLinks() else if (token->str() == "(") { links2.push_back(token); + type.push_back(token); } else if (token->str() == ")") { @@ -1546,6 +1555,12 @@ bool Tokenizer::createLinks() syntaxError(token, '('); return false; } + if (type.back()->str() != "(") + { + syntaxError(type.back(), type.back()->str()[0]); + return false; + } + type.pop_back(); Token::createMutualLinks(links2.back(), token); links2.pop_back(); @@ -1553,6 +1568,7 @@ bool Tokenizer::createLinks() else if (token->str() == "[") { links3.push_back(token); + type.push_back(token); } else if (token->str() == "]") { @@ -1562,6 +1578,12 @@ bool Tokenizer::createLinks() syntaxError(token, '['); return false; } + if (type.back()->str() != "[") + { + syntaxError(type.back(), type.back()->str()[0]); + return false; + } + type.pop_back(); Token::createMutualLinks(links3.back(), token); links3.pop_back(); diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index c2c67f759..b9c444282 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -2439,6 +2439,20 @@ private: ASSERT_EQUALS(false, tokenizer.tokenize(istr, "test.cpp")); ASSERT_EQUALS("[test.cpp:3]: (error) Invalid number of character ([). Can't process file.\n", errout.str()); } + + { + errout.str(""); + const char code[] = "{\n" + " a(\n" + "}\n" + "{\n" + " b());\n" + "}\n"; + Tokenizer tokenizer(0, this); + std::istringstream istr(code); + ASSERT_EQUALS(false, tokenizer.tokenize(istr, "test.cpp")); + ASSERT_EQUALS("[test.cpp:2]: (error) Invalid number of character ((). Can't process file.\n", errout.str()); + } } void removeKeywords()