From 83625d8055e9bbb2d45a64f8cca177c8f54147c8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Marjam=C3=A4ki?= Date: Fri, 28 Jan 2011 09:19:30 +0100 Subject: [PATCH] Fixed #2505 (Check processing of a preprocessor macro 'FREE') --- lib/tokenize.cpp | 2 +- test/testtokenize.cpp | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index bdbb6e44b..9cbdcd6ea 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -6846,7 +6846,7 @@ bool Tokenizer::simplifyRedundantParanthesis() ret = true; } - while (Token::Match(tok->previous(), "[;{(] ( %var% (") && + while (Token::Match(tok->previous(), "[,;{}(] ( %var% (") && tok->link()->previous() == tok->tokAt(2)->link()) { // We have "( func ( *something* ))", remove the outer diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index d252696c3..f6d07042f 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -205,6 +205,7 @@ private: TEST_CASE(removeParantheses8); // Ticket #1865 TEST_CASE(removeParantheses9); // Ticket #1962 TEST_CASE(removeParantheses10); // Ticket #2320 + TEST_CASE(removeParantheses11); // Ticket #2505 TEST_CASE(tokenize_double); TEST_CASE(tokenize_strings); @@ -3688,6 +3689,12 @@ private: ASSERT_EQUALS("p = buf + 8 ;", tokenizeAndStringify("p = (buf + 8);", false)); } + void removeParantheses11() + { + // #2502 + ASSERT_EQUALS("{ } x ( ) ;", tokenizeAndStringify("{}(x());", false)); + } + void tokenize_double() { const char code[] = "void f()\n"