diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 57b54cc0f..969113ad0 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -5267,6 +5267,8 @@ void Tokenizer::simplifyFunctionPointers() syntaxError(nullptr); } Token *endTok = tok->link()->next()->link(); + if (Token::simpleMatch(endTok, ") throw (")) + endTok = endTok->linkAt(2); if (!Token::Match(endTok, ") const| ;|,|)|=|[|{")) continue; diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 513f27d44..9b09eb130 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -314,6 +314,7 @@ private: TEST_CASE(functionpointer5); TEST_CASE(functionpointer6); TEST_CASE(functionpointer7); + TEST_CASE(functionpointer8); // #7410 - throw TEST_CASE(removeRedundantAssignment); @@ -4718,6 +4719,13 @@ private: ASSERT_EQUALS(expected1, tokenizeDebugListing(code1, false)); } + void functionpointer8() { + const char code1[] = "int (*f)() throw(int);"; + const char expected1[] = "\n\n##file 0\n" + "1: int * f@1 ;\n"; + ASSERT_EQUALS(expected1, tokenizeDebugListing(code1, false)); + } + void removeRedundantAssignment() { ASSERT_EQUALS("void f ( ) { }", tokenizeAndStringify("void f() { int *p, *q; p = q; }", true)); ASSERT_EQUALS("void f ( ) { }", tokenizeAndStringify("void f() { int *p = 0, *q; p = q; }", true));