diff --git a/lib/token.cpp b/lib/token.cpp index 4476b5005..9518634bf 100644 --- a/lib/token.cpp +++ b/lib/token.cpp @@ -81,6 +81,8 @@ void Token::update_property_info() else if (std::isalpha((unsigned char)mStr[0]) || mStr[0] == '_' || mStr[0] == '$') { // Name if (mImpl->mVarId) tokType(eVariable); + else if (mTokensFrontBack && mTokensFrontBack->list && mTokensFrontBack->list->isKeyword(mStr)) + tokType(eKeyword); else if (mTokType != eVariable && mTokType != eFunction && mTokType != eType && mTokType != eKeyword) tokType(eName); } else if (std::isdigit((unsigned char)mStr[0]) || (mStr.length() > 1 && mStr[0] == '-' && std::isdigit((unsigned char)mStr[1]))) @@ -401,7 +403,7 @@ static int multiComparePercent(const Token *tok, const char*& haystack, nonneg i // Type (%type%) { haystack += 5; - if (tok->isName() && tok->varId() == 0 && !tok->isKeyword()) + if (tok->isName() && tok->varId() == 0 && (tok->str() != "delete" || !tok->isKeyword())) // HACK: this is legacy behaviour, it should return false for all keywords, ecxcept types return 1; } break; @@ -1022,7 +1024,7 @@ void Token::insertToken(const std::string &tokenStr, const std::string &original if (mImpl->mScopeInfo) { // If the brace is immediately closed there is no point opening a new scope for it - if (tokenStr == "{") { + if (newToken->str() == "{") { std::string nextScopeNameAddition; // This might be the opening of a member function Token *tok1 = newToken; @@ -1079,7 +1081,7 @@ void Token::insertToken(const std::string &tokenStr, const std::string &original nextScopeNameAddition = ""; newToken->scopeInfo(newScopeInfo); - } else if (tokenStr == "}") { + } else if (newToken->str() == "}") { Token* matchingTok = newToken->previous(); int depth = 0; while (matchingTok && (depth != 0 || !Token::simpleMatch(matchingTok, "{"))) { @@ -1096,7 +1098,7 @@ void Token::insertToken(const std::string &tokenStr, const std::string &original } else { newToken->mImpl->mScopeInfo = mImpl->mScopeInfo; } - if (tokenStr == ";") { + if (newToken->str() == ";") { const Token* statementStart; for (statementStart = newToken; statementStart->previous() && !Token::Match(statementStart->previous(), ";|{"); statementStart = statementStart->previous()); if (Token::Match(statementStart, "using namespace %name% ::|;")) { diff --git a/lib/token.h b/lib/token.h index c44b6f65b..f6c864273 100644 --- a/lib/token.h +++ b/lib/token.h @@ -42,6 +42,7 @@ class Settings; class Type; class ValueType; class Variable; +class TokenList; /** * @brief This struct stores pointers to the front and back tokens of the list this token is in. @@ -49,6 +50,7 @@ class Variable; struct TokensFrontBack { Token *front; Token *back; + const TokenList* list; }; struct ScopeInfo2 { diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index daa38ad66..c6937d1ab 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -9319,7 +9319,7 @@ void Tokenizer::reportUnknownMacros() for (const Token *tok = tokens(); tok; tok = tok->next()) { if (Token::Match(tok, "%name% %num%")) { // A keyword is not an unknown macro - if (list.isKeyword(tok->str())) + if (tok->isKeyword()) continue; if (Token::Match(tok->previous(), "%op%|(")) @@ -9372,7 +9372,7 @@ void Tokenizer::reportUnknownMacros() if (Token::Match(tok, "%name% (") && tok->isUpperCaseName() && Token::simpleMatch(tok->linkAt(1), ") (") && Token::simpleMatch(tok->linkAt(1)->linkAt(1), ") {")) { // A keyword is not an unknown macro - if (list.isKeyword(tok->str())) + if (tok->isKeyword()) continue; const Token *bodyStart = tok->linkAt(1)->linkAt(1)->tokAt(2); @@ -9392,7 +9392,7 @@ void Tokenizer::reportUnknownMacros() // String concatenation with unknown macros for (const Token *tok = tokens(); tok; tok = tok->next()) { if (Token::Match(tok, "%str% %name% (") && Token::Match(tok->linkAt(2), ") %str%")) { - if (list.isKeyword(tok->next()->str())) + if (tok->next()->isKeyword()) continue; unknownMacroError(tok->next()); } @@ -9585,8 +9585,6 @@ void Tokenizer::findGarbageCode() const if (tok->str() != ">" && !Token::simpleMatch(tok->previous(), "operator")) syntaxError(tok, tok->str() + " " + tok->next()->str()); } - if (Token::Match(tok, "( %any% )") && tok->next()->isKeyword() && !Token::simpleMatch(tok->next(), "void")) - syntaxError(tok); if (Token::Match(tok, "%num%|%bool%|%char%|%str% %num%|%bool%|%char%|%str%") && !Token::Match(tok, "%str% %str%")) syntaxError(tok); if (Token::Match(tok, "%assign% typename|class %assign%")) diff --git a/lib/tokenlist.cpp b/lib/tokenlist.cpp index 1ba044f7b..99b7d2189 100644 --- a/lib/tokenlist.cpp +++ b/lib/tokenlist.cpp @@ -44,29 +44,30 @@ TokenList::TokenList(const Settings* settings) : mIsC(false), mIsCpp(false) { + mTokensFrontBack.list = this; mKeywords.insert("auto"); mKeywords.insert("break"); mKeywords.insert("case"); - mKeywords.insert("char"); + //mKeywords.insert("char"); // type mKeywords.insert("const"); mKeywords.insert("continue"); mKeywords.insert("default"); mKeywords.insert("do"); - mKeywords.insert("double"); + //mKeywords.insert("double"); // type mKeywords.insert("else"); mKeywords.insert("enum"); mKeywords.insert("extern"); - mKeywords.insert("float"); + //mKeywords.insert("float"); // type mKeywords.insert("for"); mKeywords.insert("goto"); mKeywords.insert("if"); mKeywords.insert("inline"); - mKeywords.insert("int"); - mKeywords.insert("long"); + //mKeywords.insert("int"); // type + //mKeywords.insert("long"); // type mKeywords.insert("register"); mKeywords.insert("restrict"); mKeywords.insert("return"); - mKeywords.insert("short"); + //mKeywords.insert("short"); // type mKeywords.insert("signed"); mKeywords.insert("sizeof"); mKeywords.insert("static"); @@ -106,6 +107,50 @@ void TokenList::deallocateTokens() mFiles.clear(); } +void TokenList::determineCppC() +{ + if (!mSettings) { + mIsC = Path::isC(getSourceFilePath()); + mIsCpp = Path::isCPP(getSourceFilePath()); + } else { + mIsC = mSettings->enforcedLang == Settings::C || (mSettings->enforcedLang == Settings::None && Path::isC(getSourceFilePath())); + mIsCpp = mSettings->enforcedLang == Settings::CPP || (mSettings->enforcedLang == Settings::None && Path::isCPP(getSourceFilePath())); + } + + if (mIsCpp) { + //mKeywords.insert("bool"); // type + mKeywords.insert("catch"); + mKeywords.insert("delete"); + mKeywords.insert("class"); + mKeywords.insert("const_cast"); + mKeywords.insert("delete"); + mKeywords.insert("dynamic_cast"); + mKeywords.insert("explicit"); + mKeywords.insert("export"); + //mKeywords.insert("false"); // literal + mKeywords.insert("friend"); + mKeywords.insert("mutable"); + mKeywords.insert("namespace"); + mKeywords.insert("new"); + mKeywords.insert("operator"); + mKeywords.insert("private"); + mKeywords.insert("protected"); + mKeywords.insert("public"); + mKeywords.insert("reinterpret_cast"); + mKeywords.insert("static_cast"); + mKeywords.insert("template"); + mKeywords.insert("this"); + mKeywords.insert("throw"); + //mKeywords.insert("true"); // literal + mKeywords.insert("try"); + mKeywords.insert("typeid"); + mKeywords.insert("typename"); + mKeywords.insert("using"); + mKeywords.insert("virtual"); + //mKeywords.insert("wchar_t"); // type + } +} + int TokenList::appendFileIfNew(const std::string &fileName) { // Has this file been tokenized already? @@ -118,45 +163,7 @@ int TokenList::appendFileIfNew(const std::string &fileName) // Update mIsC and mIsCpp properties if (mFiles.size() == 1) { // Update only useful if first file added to _files - if (!mSettings) { - mIsC = Path::isC(getSourceFilePath()); - mIsCpp = Path::isCPP(getSourceFilePath()); - } else { - mIsC = mSettings->enforcedLang == Settings::C || (mSettings->enforcedLang == Settings::None && Path::isC(getSourceFilePath())); - mIsCpp = mSettings->enforcedLang == Settings::CPP || (mSettings->enforcedLang == Settings::None && Path::isCPP(getSourceFilePath())); - } - - if (mIsCpp) { - mKeywords.insert("catch"); - mKeywords.insert("delete"); - mKeywords.insert("class"); - mKeywords.insert("const_cast"); - mKeywords.insert("delete"); - mKeywords.insert("dynamic_cast"); - mKeywords.insert("explicit"); - mKeywords.insert("export"); - mKeywords.insert("false"); - mKeywords.insert("friend"); - mKeywords.insert("mutable"); - mKeywords.insert("namespace"); - mKeywords.insert("new"); - mKeywords.insert("operator"); - mKeywords.insert("private"); - mKeywords.insert("protected"); - mKeywords.insert("public"); - mKeywords.insert("reinterpret_cast"); - mKeywords.insert("static_cast"); - mKeywords.insert("template"); - mKeywords.insert("this"); - mKeywords.insert("throw"); - mKeywords.insert("true"); - mKeywords.insert("try"); - mKeywords.insert("typeid"); - mKeywords.insert("typename"); - mKeywords.insert("using"); - mKeywords.insert("virtual"); - mKeywords.insert("wchar_t"); - } + determineCppC(); } return mFiles.size() - 1; } @@ -207,8 +214,6 @@ void TokenList::addtoken(std::string str, const nonneg int lineno, const nonneg mTokensFrontBack.back->str(str); } - if (isKeyword(str)) - mTokensFrontBack.back->isKeyword(true); mTokensFrontBack.back->linenr(lineno); mTokensFrontBack.back->fileIndex(fileno); } @@ -226,8 +231,6 @@ void TokenList::addtoken(std::string str, const Token *locationTok) mTokensFrontBack.back->str(str); } - if (isCPP() && str == "delete") - mTokensFrontBack.back->isKeyword(true); mTokensFrontBack.back->linenr(locationTok->linenr()); mTokensFrontBack.back->column(locationTok->column()); mTokensFrontBack.back->fileIndex(locationTok->fileIndex()); @@ -391,15 +394,7 @@ void TokenList::createTokens(simplecpp::TokenList&& tokenList) else mFiles.clear(); - mIsC = mIsCpp = false; - if (!mFiles.empty()) { - mIsC = Path::isC(getSourceFilePath()); - mIsCpp = Path::isCPP(getSourceFilePath()); - } - if (mSettings && mSettings->enforcedLang != Settings::None) { - mIsC = (mSettings->enforcedLang == Settings::C); - mIsCpp = (mSettings->enforcedLang == Settings::CPP); - } + determineCppC(); for (const simplecpp::Token *tok = tokenList.cfront(); tok;) { @@ -417,8 +412,6 @@ void TokenList::createTokens(simplecpp::TokenList&& tokenList) mTokensFrontBack.back->str(str); } - if (isCPP() && mTokensFrontBack.back->str() == "delete") - mTokensFrontBack.back->isKeyword(true); mTokensFrontBack.back->fileIndex(tok->location.fileIndex); mTokensFrontBack.back->linenr(tok->location.line); mTokensFrontBack.back->column(tok->location.col); diff --git a/lib/tokenlist.h b/lib/tokenlist.h index ca1882924..8a92a2d2e 100644 --- a/lib/tokenlist.h +++ b/lib/tokenlist.h @@ -197,6 +197,8 @@ private: /** Disable assignment operator, no implementation */ TokenList &operator=(const TokenList &); + void determineCppC(); + /** Token list */ TokensFrontBack mTokensFrontBack; diff --git a/test/testtokenlist.cpp b/test/testtokenlist.cpp index 68adf5db0..aad5ffa6e 100644 --- a/test/testtokenlist.cpp +++ b/test/testtokenlist.cpp @@ -35,6 +35,7 @@ private: TEST_CASE(testaddtoken1); TEST_CASE(testaddtoken2); TEST_CASE(inc); + TEST_CASE(isKeyword); } // inspired by #5895 @@ -65,6 +66,48 @@ private: ASSERT(Token::simpleMatch(tokenlist.front(), "a + + 1 ; 1 + + b ;")); } + + void isKeyword() { + + const char code[] = "for a int delete true"; + + { + TokenList tokenlist(&settings); + std::istringstream istr(code); + tokenlist.createTokens(istr, "a.c"); + + ASSERT_EQUALS(true, tokenlist.front()->isKeyword()); + ASSERT_EQUALS(true, tokenlist.front()->isControlFlowKeyword()); + ASSERT_EQUALS(false, tokenlist.front()->next()->isKeyword()); + ASSERT_EQUALS(false, tokenlist.front()->next()->isControlFlowKeyword()); + ASSERT_EQUALS(false, tokenlist.front()->tokAt(2)->isKeyword()); + ASSERT_EQUALS(true, tokenlist.front()->tokAt(2)->tokType() == Token::eType); + ASSERT_EQUALS(false, tokenlist.front()->tokAt(2)->isControlFlowKeyword()); + ASSERT_EQUALS(false, tokenlist.front()->tokAt(3)->isKeyword()); + ASSERT_EQUALS(false, tokenlist.front()->tokAt(3)->isControlFlowKeyword()); + ASSERT_EQUALS(false, tokenlist.front()->tokAt(4)->isKeyword()); + ASSERT_EQUALS(true, tokenlist.front()->tokAt(4)->isLiteral()); + ASSERT_EQUALS(false, tokenlist.front()->tokAt(4)->isControlFlowKeyword()); + } + { + TokenList tokenlist(&settings); + std::istringstream istr(code); + tokenlist.createTokens(istr, "a.cpp"); + + ASSERT_EQUALS(true, tokenlist.front()->isKeyword()); + ASSERT_EQUALS(true, tokenlist.front()->isControlFlowKeyword()); + ASSERT_EQUALS(false, tokenlist.front()->next()->isKeyword()); + ASSERT_EQUALS(false, tokenlist.front()->next()->isControlFlowKeyword()); + ASSERT_EQUALS(false, tokenlist.front()->tokAt(2)->isKeyword()); + ASSERT_EQUALS(true, tokenlist.front()->tokAt(2)->tokType() == Token::eType); + ASSERT_EQUALS(false, tokenlist.front()->tokAt(2)->isControlFlowKeyword()); + ASSERT_EQUALS(true, tokenlist.front()->tokAt(3)->isKeyword()); + ASSERT_EQUALS(false, tokenlist.front()->tokAt(3)->isControlFlowKeyword()); + ASSERT_EQUALS(false, tokenlist.front()->tokAt(4)->isKeyword()); + ASSERT_EQUALS(true, tokenlist.front()->tokAt(4)->isLiteral()); + ASSERT_EQUALS(false, tokenlist.front()->tokAt(4)->isControlFlowKeyword()); + } + } }; REGISTER_TEST(TestTokenList)