diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index b1a5e7b40..21efa72c1 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -5249,7 +5249,7 @@ void Tokenizer::createLinks2() } else if (token->str() == "<" && ((token->previous() && (token->previous()->isTemplate() || (token->previous()->isName() && !token->previous()->varId()) || - (token->strAt(-1) == "]" && !Token::Match(token->linkAt(-1)->previous(), "%name%|)")))) || + (token->strAt(-1) == "]" && (!Token::Match(token->linkAt(-1)->previous(), "%name%|)") || token->linkAt(-1)->previous()->isKeyword())))) || Token::Match(token->next(), ">|>>"))) { type.push(token); if (token->previous()->str() == "template") diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index ec0976528..3688748c3 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -3515,6 +3515,22 @@ private: ASSERT_EQUALS(true, tok1->link() == tok2); ASSERT_EQUALS(true, tok2->link() == tok1); } + + { + const char code[] = "void f() {\n" + " auto g = [] () {\n" + " return [] () {};\n" + " };\n" + "}\n"; + errout.str(""); + Tokenizer tokenizer(&settings0, this); + std::istringstream istr(code); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); + const Token* tok1 = Token::findsimplematch(tokenizer.tokens(), "< T"); + const Token* tok2 = Token::findsimplematch(tok1, "> ("); + ASSERT_EQUALS(true, tok1->link() == tok2); + ASSERT_EQUALS(true, tok2->link() == tok1); + } } void simplifyString() {