diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index db870bf8c..4c44d07f7 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -2299,6 +2299,8 @@ bool Tokenizer::tokenize(std::istream &code, if (!preprocessorCondition) { setVarId(); + createLinks2(); + // Change initialisation of variable to assignment simplifyInitVar(); } @@ -3296,6 +3298,62 @@ bool Tokenizer::createLinks() return true; } +void Tokenizer::createLinks2() +{ + std::stack type; + std::stack links; + for (Token *token = _tokens; token; token = token->next()) { + if (token->link()) { + if (Token::Match(token, "{|[|(")) + type.push(token); + else if (Token::Match(token, "}|]|)")) { + while (type.top()->str() == "<") + type.pop(); + type.pop(); + } else + token->link(0); + } + + else if (token->str() == ";") + while (!links.empty()) + links.pop(); + else if (token->str() == "<" && token->previous() && token->previous()->isName() && !token->previous()->varId()) { + type.push(token); + links.push(token); + } else if (token->str() == ">" || token->str() == ">>") { + if (links.empty()) // < and > don't match. + continue; + if (token->next() && !token->next()->isName() && !Token::Match(token->next(), ">|&|*|::|,")) + continue; + + // Check type of open link + if (type.empty() || type.top()->str() != "<" || (token->str() == ">>" && type.size() < 2)) { + if (!links.empty()) + links.pop(); + continue; + } + const Token* top = type.top(); + type.pop(); + if (token->str() == ">>" && type.top()->str() != "<") { + type.push(top); + if (!links.empty()) + links.pop(); + continue; + } + + if (token->str() == ">>") { // C++11 right angle bracket + if (links.size() < 2) + continue; + token->str(">"); + token->insertToken(">"); + } + + Token::createMutualLinks(links.top(), token); + links.pop(); + } + } +} + void Tokenizer::simplifySizeof() { for (Token *tok = _tokens; tok; tok = tok->next()) { @@ -8069,17 +8127,16 @@ bool Tokenizer::validate() const const Token *lastTok = 0; for (const Token *tok = tokens(); tok; tok = tok->next()) { lastTok = tok; - if (Token::Match(tok, "[{([]")) { + if (Token::Match(tok, "[{([]") || (tok->str() == "<" && tok->link())) { if (tok->link() == 0) { cppcheckError(tok); return false; } linktok.push(tok); - continue; } - else if (Token::Match(tok, "[})]]")) { + else if (Token::Match(tok, "[})]]") || (tok->str() == ">" && tok->link())) { if (tok->link() == 0) { cppcheckError(tok); return false; @@ -8101,10 +8158,9 @@ bool Tokenizer::validate() const } linktok.pop(); - continue; } - if (tok->link() != 0) { + else if (tok->link() != 0) { cppcheckError(tok); return false; } diff --git a/lib/tokenize.h b/lib/tokenize.h index 8727f8a66..ceeb1355a 100644 --- a/lib/tokenize.h +++ b/lib/tokenize.h @@ -583,6 +583,11 @@ public: */ bool createLinks(); + /** + * Setup links between < and >. + */ + void createLinks2(); + /** Syntax error */ void syntaxError(const Token *tok); diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 0c3f8d86e..ef70f6a08 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -5052,6 +5052,33 @@ private: ASSERT_EQUALS(true, tok->linkAt(8) == tok->tokAt(9)); ASSERT_EQUALS(true, tok->linkAt(9) == tok->tokAt(8)); } + + { + const char code[] = "bool foo(C a, bar>& f, int b) {\n" + " return(af);\n" + "}"; + errout.str(""); + Settings settings; + Tokenizer tokenizer(&settings, this); + std::istringstream istr(code); + tokenizer.tokenize(istr, "test.cpp"); + const Token *tok = tokenizer.tokens(); + // template< + ASSERT_EQUALS((long long)tok->tokAt(6), (long long)tok->linkAt(4)); + ASSERT_EQUALS((long long)tok->tokAt(4), (long long)tok->linkAt(6)); + + // bar< + ASSERT_EQUALS((long long)tok->tokAt(17), (long long)tok->linkAt(10)); + ASSERT_EQUALS((long long)tok->tokAt(10), (long long)tok->linkAt(17)); + + // x< + ASSERT_EQUALS((long long)tok->tokAt(16), (long long)tok->linkAt(14)); + ASSERT_EQUALS((long long)tok->tokAt(14), (long long)tok->linkAt(16)); + + // af + ASSERT_EQUALS(0, (long long)tok->linkAt(28)); + ASSERT_EQUALS(0, (long long)tok->linkAt(32)); + } } void removeExceptionSpecification1() { @@ -5317,8 +5344,7 @@ private: void cpp0xtemplate2() { // tokenize ">>" into "> >" const char *code = "list> ints;\n"; - TODO_ASSERT_EQUALS("list < list < int > > ints ;", - "list < list < int >> ints ;", tokenizeAndStringify(code)); + ASSERT_EQUALS("list < list < int > > ints ;", tokenizeAndStringify(code)); } void cpp0xtemplate3() {