diff --git a/lib/tokenlist.cpp b/lib/tokenlist.cpp index 957672a35..aaa26e281 100644 --- a/lib/tokenlist.cpp +++ b/lib/tokenlist.cpp @@ -640,8 +640,17 @@ static bool iscpp11init_impl(const Token * const tok) return true; } + auto isCaseStmt = [](const Token* colonTok) { + if (!Token::Match(colonTok->tokAt(-1), "%name%|%num% :")) + return false; + const Token* caseTok = colonTok->tokAt(-2); + while (Token::Match(caseTok->tokAt(-1), "::|%name%")) + caseTok = caseTok->tokAt(-1); + return Token::simpleMatch(caseTok, "case"); + }; + const Token *endtok = nullptr; - if (Token::Match(nameToken, "%name%|return|: {") && + if (Token::Match(nameToken, "%name%|return|: {") && !isCaseStmt(nameToken) && (!Token::simpleMatch(nameToken->tokAt(2), "[") || findLambdaEndScope(nameToken->tokAt(2)))) endtok = nameToken->linkAt(1); else if (Token::Match(nameToken,"%name% <") && Token::simpleMatch(nameToken->linkAt(1),"> {")) diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index a20a6b667..af9056556 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -6497,6 +6497,32 @@ private: // #10831 ASSERT_EQUALS("f{([= x{([=", testAst("void foo() { F f = [](t x = []() {}) {}; }")); + + // #11357 + ASSERT_NO_THROW(tokenizeAndStringify("void f(std::vector& v, bool c) {\n" + " std::sort(v.begin(), v.end(), [&c](const auto a, const auto b) {\n" + " switch (c) {\n" + " case false: {\n" + " if (a < b) {}\n" + " }\n" + " }\n" + " return a < b;\n" + " });\n" + "}\n")); + + ASSERT_NO_THROW(tokenizeAndStringify("namespace N {\n" + " enum E : bool { F };\n" + "}\n" + "void f(std::vector& v, bool c) {\n" + " std::sort(v.begin(), v.end(), [&c](const auto a, const auto b) {\n" + " switch (c) {\n" + " case N::E::F: {\n" + " if (a < b) {}\n" + " }\n" + " }\n" + " return a < b;\n" + " });\n" + "}\n")); } void astcase() {