diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index adcad6ace..c20a91a75 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -4906,6 +4906,10 @@ void Tokenizer::simplifyVarDecl(bool only_k_r_fpar) continue; } else if (tok->str() == "(") { tok = tok->link(); + + // TestTokenizer::vardecl24 - lambda functions.. + if (isCPP() && tok->previous()->str() == "}") + tok = tok->previous()->link(); } if (tok->previous() && !Token::Match(tok->previous(), "{|}|;|)|public:|protected:|private:")) diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 13413a4aa..eb3bf6365 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -338,6 +338,7 @@ private: TEST_CASE(vardecl21); // #4042 - a::b const *p = 0; TEST_CASE(vardecl22); // #4211 - segmentation fault TEST_CASE(vardecl23); // #4276 - segmentation fault + TEST_CASE(vardecl24); // #4187 - variable declaration within lambda function TEST_CASE(vardecl_stl_1); TEST_CASE(vardecl_stl_2); TEST_CASE(vardecl_template_1); @@ -5362,6 +5363,24 @@ private: tokenizeAndStringify("class a { protected : template < class int x = 1 ; public : int f ( ) ; }"); } + void vardecl24() { // #4187 - variable declaration within lambda function + const char code[] = "void f() {\n" + " std::for_each(ints.begin(), ints.end(), [](int val)\n" + " {\n" + " int temp = 0;\n" + " });\n" + "}"; + + const char expected[] = "void f ( ) {\n" + "std :: for_each ( ints . begin ( ) , ints . end ( ) , [ ] ( int val )\n" + "{\n" + "int temp ; temp = 0 ;\n" + "} ) ;\n" + "}"; + + ASSERT_EQUALS(expected, tokenizeAndStringify(code)); + } + void volatile_variables() { const char code[] = "volatile int a=0;\n" "volatile int b=0;\n"