From 3add466035611c4702d0511d9750a9d494dc306a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Marjam=C3=A4ki?= Date: Fri, 27 Feb 2015 18:29:34 +0100 Subject: [PATCH] Add isFunctionHead() in Tokenizer --- lib/tokenize.cpp | 41 ++++++++++++++++++++++++++++++++++------- test/testvarid.cpp | 19 +++++++++++++------ 2 files changed, 47 insertions(+), 13 deletions(-) diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index db609018a..33a9f121f 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -36,6 +36,33 @@ //--------------------------------------------------------------------------- +/** + * is token pointing at function head? + * @param tok A '(' or ')' token in a possible function head + * @param endsWith string after function head + * @return true if syntax seems to be a function head + */ +static bool isFunctionHead(const Token *tok, const std::string &endsWith) +{ + if (tok->str() == "(") + tok = tok->link(); + if (Token::Match(tok, ") const| [;{]")) { + tok = tok->next(); + if (tok->isName()) + tok = tok->next(); + return endsWith.find(tok->str()) != std::string::npos; + } + if (Token::Match(tok, ") const| throw (")) { + tok = tok->next(); + while (tok->isName()) + tok = tok->next(); + tok = tok->link()->next(); + return endsWith.find(tok->str()) != std::string::npos; + } + return false; +} +//--------------------------------------------------------------------------- + Tokenizer::Tokenizer() : list(0), _settings(0), @@ -775,7 +802,7 @@ void Tokenizer::simplifyTypedef() } // function - else if (Token::Match(tokOffset->link(), ") const| ;|,")) { + else if (isFunctionHead(tokOffset->link(), ";,")) { function = true; if (tokOffset->link()->next()->str() == "const") { specStart = tokOffset->link()->next(); @@ -2562,11 +2589,11 @@ void Tokenizer::setVarId() // scope info to handle shadow variables.. bool newScope = false; if (!initListEndToken && tok->str() == "(") { - if (Token::simpleMatch(tok->tokAt(-2), ") throw ( ) {")) { - tok = tok->next(); + if (Token::Match(tok->tokAt(-2), ")|const throw (") && Token::simpleMatch(tok->link(), ") {")) { + tok = tok->link(); continue; } - if (Token::Match(tok->link(), ") %name%| {") || Token::simpleMatch(tok->link(), ") throw ( ) {")) + if (isFunctionHead(tok, "{")) newScope = true; else { initListEndToken = findInitListEndToken(tok->link()); @@ -2578,9 +2605,9 @@ void Tokenizer::setVarId() scopeInfo.push(variableId); // function declarations - } else if (!executableScope.top() && tok->str() == "(" && Token::Match(tok->link(), ") const| ;")) { + } else if (!executableScope.top() && tok->str() == "(" && isFunctionHead(tok, ";")) { scopeInfo.push(variableId); - } else if (!executableScope.top() && Token::Match(tok, ") const| ;")) { + } else if (!executableScope.top() && tok->str() == ")" && isFunctionHead(tok, ";")) { variableId.swap(scopeInfo.top()); scopeInfo.pop(); @@ -4029,7 +4056,7 @@ void Tokenizer::removeMacrosInGlobalScope() } // replace unknown macros before foo( - if (Token::Match(tok2, "%type% (") && Token::Match(tok2->next()->link(), ") const| {")) { + if (Token::Match(tok2, "%type% (") && isFunctionHead(tok2->next(), "{")) { std::string typeName; for (const Token* tok3 = tok; tok3 != tok2; tok3 = tok3->next()) typeName += tok3->str(); diff --git a/test/testvarid.cpp b/test/testvarid.cpp index be21fbe9d..87aadb8eb 100644 --- a/test/testvarid.cpp +++ b/test/testvarid.cpp @@ -972,12 +972,19 @@ private: } void varid56() { // Ticket #6548 - function with a throw() - const char code[] = "void fred(int x) throw() {}" - "void wilma() { x++; }"; - const char expected[] = "\n\n##file 0\n1: " - "void fred ( int x@1 ) throw ( ) { } " - "void wilma ( ) { x ++ ; }\n"; - ASSERT_EQUALS(expected, tokenize(code, false, "test.cpp")); + const char code1[] = "void fred(int x) throw() {}" + "void wilma() { x++; }"; + const char expected1[] = "\n\n##file 0\n1: " + "void fred ( int x@1 ) throw ( ) { } " + "void wilma ( ) { x ++ ; }\n"; + ASSERT_EQUALS(expected1, tokenize(code1, false, "test.cpp")); + + const char code2[] = "void fred(int x) const throw(EXCEPT) {}" + "void wilma() { x++; }"; + const char expected2[] = "\n\n##file 0\n1: " + "void fred ( int x@1 ) const throw ( EXCEPT ) { } " + "void wilma ( ) { x ++ ; }\n"; + ASSERT_EQUALS(expected2, tokenize(code2, false, "test.cpp")); } void varid_cpp_keywords_in_c_code() {