diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 1b7652999..bf4dc3ccc 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -2885,9 +2885,8 @@ void Tokenizer::createLinks2() if (token->str() == ">>" && type.size() < 2) continue; - // if > is followed by ; .. "new a;" is expected // if > is followed by [ .. "new a[" is expected - if (Token::Match(token->next(), ";|[")) { + if (token->strAt(1) == "[") { Token *prev = type.top()->previous(); while (prev && Token::Match(prev->previous(), ":: %var%")) prev = prev->tokAt(-2); diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 85cc3b7c8..be7e33f24 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -4887,6 +4887,21 @@ private: ASSERT_EQUALS("", errout.str()); } + { + // #6242 + const char code[] = "func = integral_;"; + errout.str(""); + Settings settings; + Tokenizer tokenizer(&settings, this); + std::istringstream istr(code); + tokenizer.tokenize(istr, "test.cpp"); + const Token *tok = tokenizer.tokens(); + + ASSERT_EQUALS(true, tok->tokAt(3) == tok->linkAt(9)); + ASSERT_EQUALS(true, tok->linkAt(3) == tok->tokAt(9)); + + ASSERT_EQUALS("", errout.str()); + } { // if (a < b || c > d) { }