diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 63316d81f..51a1679b1 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -1878,8 +1878,10 @@ void Tokenizer::simplifyExternC() void Tokenizer::simplifyRoundCurlyParentheses() { for (Token *tok = list.front(); tok; tok = tok->next()) { - while (Token::Match(tok, "[;{}] ( {") && + while (Token::Match(tok, "[;{}:] ( {") && Token::simpleMatch(tok->linkAt(2), "} ) ;")) { + if (tok->str() == ":" && !Token::Match(tok->tokAt(-2),"[;{}] %type% :")) + break; Token *end = tok->linkAt(2)->tokAt(-3); if (Token::Match(end, "[;{}] %num%|%str% ;")) end->deleteNext(2); diff --git a/test/testincompletestatement.cpp b/test/testincompletestatement.cpp index 4d69ba015..5ebf15199 100644 --- a/test/testincompletestatement.cpp +++ b/test/testincompletestatement.cpp @@ -245,6 +245,12 @@ private: " ({ do_something(); 0; });\n" "}"); ASSERT_EQUALS("", errout.str()); + + check("void f() {\n" + "out:\n" + " ({ do_something(); 0; });\n" + "}"); + ASSERT_EQUALS("", errout.str()); } };