Fixed Token::isKeyword: Actually set this flag to a reasonable value in all code paths

This commit is contained in:
PKEuS 2020-05-17 17:25:33 +02:00
parent c9d8f607df
commit f6788c0472
6 changed files with 109 additions and 69 deletions

View File

@ -81,6 +81,8 @@ void Token::update_property_info()
else if (std::isalpha((unsigned char)mStr[0]) || mStr[0] == '_' || mStr[0] == '$') { // Name
if (mImpl->mVarId)
tokType(eVariable);
else if (mTokensFrontBack && mTokensFrontBack->list && mTokensFrontBack->list->isKeyword(mStr))
tokType(eKeyword);
else if (mTokType != eVariable && mTokType != eFunction && mTokType != eType && mTokType != eKeyword)
tokType(eName);
} else if (std::isdigit((unsigned char)mStr[0]) || (mStr.length() > 1 && mStr[0] == '-' && std::isdigit((unsigned char)mStr[1])))
@ -401,7 +403,7 @@ static int multiComparePercent(const Token *tok, const char*& haystack, nonneg i
// Type (%type%)
{
haystack += 5;
if (tok->isName() && tok->varId() == 0 && !tok->isKeyword())
if (tok->isName() && tok->varId() == 0 && (tok->str() != "delete" || !tok->isKeyword())) // HACK: this is legacy behaviour, it should return false for all keywords, ecxcept types
return 1;
}
break;
@ -1022,7 +1024,7 @@ void Token::insertToken(const std::string &tokenStr, const std::string &original
if (mImpl->mScopeInfo) {
// If the brace is immediately closed there is no point opening a new scope for it
if (tokenStr == "{") {
if (newToken->str() == "{") {
std::string nextScopeNameAddition;
// This might be the opening of a member function
Token *tok1 = newToken;
@ -1079,7 +1081,7 @@ void Token::insertToken(const std::string &tokenStr, const std::string &original
nextScopeNameAddition = "";
newToken->scopeInfo(newScopeInfo);
} else if (tokenStr == "}") {
} else if (newToken->str() == "}") {
Token* matchingTok = newToken->previous();
int depth = 0;
while (matchingTok && (depth != 0 || !Token::simpleMatch(matchingTok, "{"))) {
@ -1096,7 +1098,7 @@ void Token::insertToken(const std::string &tokenStr, const std::string &original
} else {
newToken->mImpl->mScopeInfo = mImpl->mScopeInfo;
}
if (tokenStr == ";") {
if (newToken->str() == ";") {
const Token* statementStart;
for (statementStart = newToken; statementStart->previous() && !Token::Match(statementStart->previous(), ";|{"); statementStart = statementStart->previous());
if (Token::Match(statementStart, "using namespace %name% ::|;")) {

View File

@ -42,6 +42,7 @@ class Settings;
class Type;
class ValueType;
class Variable;
class TokenList;
/**
* @brief This struct stores pointers to the front and back tokens of the list this token is in.
@ -49,6 +50,7 @@ class Variable;
struct TokensFrontBack {
Token *front;
Token *back;
const TokenList* list;
};
struct ScopeInfo2 {

View File

@ -9319,7 +9319,7 @@ void Tokenizer::reportUnknownMacros()
for (const Token *tok = tokens(); tok; tok = tok->next()) {
if (Token::Match(tok, "%name% %num%")) {
// A keyword is not an unknown macro
if (list.isKeyword(tok->str()))
if (tok->isKeyword())
continue;
if (Token::Match(tok->previous(), "%op%|("))
@ -9372,7 +9372,7 @@ void Tokenizer::reportUnknownMacros()
if (Token::Match(tok, "%name% (") && tok->isUpperCaseName() && Token::simpleMatch(tok->linkAt(1), ") (") && Token::simpleMatch(tok->linkAt(1)->linkAt(1), ") {")) {
// A keyword is not an unknown macro
if (list.isKeyword(tok->str()))
if (tok->isKeyword())
continue;
const Token *bodyStart = tok->linkAt(1)->linkAt(1)->tokAt(2);
@ -9392,7 +9392,7 @@ void Tokenizer::reportUnknownMacros()
// String concatenation with unknown macros
for (const Token *tok = tokens(); tok; tok = tok->next()) {
if (Token::Match(tok, "%str% %name% (") && Token::Match(tok->linkAt(2), ") %str%")) {
if (list.isKeyword(tok->next()->str()))
if (tok->next()->isKeyword())
continue;
unknownMacroError(tok->next());
}
@ -9585,8 +9585,6 @@ void Tokenizer::findGarbageCode() const
if (tok->str() != ">" && !Token::simpleMatch(tok->previous(), "operator"))
syntaxError(tok, tok->str() + " " + tok->next()->str());
}
if (Token::Match(tok, "( %any% )") && tok->next()->isKeyword() && !Token::simpleMatch(tok->next(), "void"))
syntaxError(tok);
if (Token::Match(tok, "%num%|%bool%|%char%|%str% %num%|%bool%|%char%|%str%") && !Token::Match(tok, "%str% %str%"))
syntaxError(tok);
if (Token::Match(tok, "%assign% typename|class %assign%"))

View File

@ -44,29 +44,30 @@ TokenList::TokenList(const Settings* settings) :
mIsC(false),
mIsCpp(false)
{
mTokensFrontBack.list = this;
mKeywords.insert("auto");
mKeywords.insert("break");
mKeywords.insert("case");
mKeywords.insert("char");
//mKeywords.insert("char"); // type
mKeywords.insert("const");
mKeywords.insert("continue");
mKeywords.insert("default");
mKeywords.insert("do");
mKeywords.insert("double");
//mKeywords.insert("double"); // type
mKeywords.insert("else");
mKeywords.insert("enum");
mKeywords.insert("extern");
mKeywords.insert("float");
//mKeywords.insert("float"); // type
mKeywords.insert("for");
mKeywords.insert("goto");
mKeywords.insert("if");
mKeywords.insert("inline");
mKeywords.insert("int");
mKeywords.insert("long");
//mKeywords.insert("int"); // type
//mKeywords.insert("long"); // type
mKeywords.insert("register");
mKeywords.insert("restrict");
mKeywords.insert("return");
mKeywords.insert("short");
//mKeywords.insert("short"); // type
mKeywords.insert("signed");
mKeywords.insert("sizeof");
mKeywords.insert("static");
@ -106,18 +107,8 @@ void TokenList::deallocateTokens()
mFiles.clear();
}
int TokenList::appendFileIfNew(const std::string &fileName)
void TokenList::determineCppC()
{
// Has this file been tokenized already?
for (int i = 0; i < mFiles.size(); ++i)
if (Path::sameFileName(mFiles[i], fileName))
return i;
// The "mFiles" vector remembers what files have been tokenized..
mFiles.push_back(fileName);
// Update mIsC and mIsCpp properties
if (mFiles.size() == 1) { // Update only useful if first file added to _files
if (!mSettings) {
mIsC = Path::isC(getSourceFilePath());
mIsCpp = Path::isCPP(getSourceFilePath());
@ -127,6 +118,7 @@ int TokenList::appendFileIfNew(const std::string &fileName)
}
if (mIsCpp) {
//mKeywords.insert("bool"); // type
mKeywords.insert("catch");
mKeywords.insert("delete");
mKeywords.insert("class");
@ -135,7 +127,7 @@ int TokenList::appendFileIfNew(const std::string &fileName)
mKeywords.insert("dynamic_cast");
mKeywords.insert("explicit");
mKeywords.insert("export");
mKeywords.insert("false");
//mKeywords.insert("false"); // literal
mKeywords.insert("friend");
mKeywords.insert("mutable");
mKeywords.insert("namespace");
@ -149,14 +141,29 @@ int TokenList::appendFileIfNew(const std::string &fileName)
mKeywords.insert("template");
mKeywords.insert("this");
mKeywords.insert("throw");
mKeywords.insert("true");
//mKeywords.insert("true"); // literal
mKeywords.insert("try");
mKeywords.insert("typeid");
mKeywords.insert("typename");
mKeywords.insert("using");
mKeywords.insert("virtual");
mKeywords.insert("wchar_t");
//mKeywords.insert("wchar_t"); // type
}
}
int TokenList::appendFileIfNew(const std::string &fileName)
{
// Has this file been tokenized already?
for (int i = 0; i < mFiles.size(); ++i)
if (Path::sameFileName(mFiles[i], fileName))
return i;
// The "mFiles" vector remembers what files have been tokenized..
mFiles.push_back(fileName);
// Update mIsC and mIsCpp properties
if (mFiles.size() == 1) { // Update only useful if first file added to _files
determineCppC();
}
return mFiles.size() - 1;
}
@ -207,8 +214,6 @@ void TokenList::addtoken(std::string str, const nonneg int lineno, const nonneg
mTokensFrontBack.back->str(str);
}
if (isKeyword(str))
mTokensFrontBack.back->isKeyword(true);
mTokensFrontBack.back->linenr(lineno);
mTokensFrontBack.back->fileIndex(fileno);
}
@ -226,8 +231,6 @@ void TokenList::addtoken(std::string str, const Token *locationTok)
mTokensFrontBack.back->str(str);
}
if (isCPP() && str == "delete")
mTokensFrontBack.back->isKeyword(true);
mTokensFrontBack.back->linenr(locationTok->linenr());
mTokensFrontBack.back->column(locationTok->column());
mTokensFrontBack.back->fileIndex(locationTok->fileIndex());
@ -391,15 +394,7 @@ void TokenList::createTokens(simplecpp::TokenList&& tokenList)
else
mFiles.clear();
mIsC = mIsCpp = false;
if (!mFiles.empty()) {
mIsC = Path::isC(getSourceFilePath());
mIsCpp = Path::isCPP(getSourceFilePath());
}
if (mSettings && mSettings->enforcedLang != Settings::None) {
mIsC = (mSettings->enforcedLang == Settings::C);
mIsCpp = (mSettings->enforcedLang == Settings::CPP);
}
determineCppC();
for (const simplecpp::Token *tok = tokenList.cfront(); tok;) {
@ -417,8 +412,6 @@ void TokenList::createTokens(simplecpp::TokenList&& tokenList)
mTokensFrontBack.back->str(str);
}
if (isCPP() && mTokensFrontBack.back->str() == "delete")
mTokensFrontBack.back->isKeyword(true);
mTokensFrontBack.back->fileIndex(tok->location.fileIndex);
mTokensFrontBack.back->linenr(tok->location.line);
mTokensFrontBack.back->column(tok->location.col);

View File

@ -197,6 +197,8 @@ private:
/** Disable assignment operator, no implementation */
TokenList &operator=(const TokenList &);
void determineCppC();
/** Token list */
TokensFrontBack mTokensFrontBack;

View File

@ -35,6 +35,7 @@ private:
TEST_CASE(testaddtoken1);
TEST_CASE(testaddtoken2);
TEST_CASE(inc);
TEST_CASE(isKeyword);
}
// inspired by #5895
@ -65,6 +66,48 @@ private:
ASSERT(Token::simpleMatch(tokenlist.front(), "a + + 1 ; 1 + + b ;"));
}
void isKeyword() {
const char code[] = "for a int delete true";
{
TokenList tokenlist(&settings);
std::istringstream istr(code);
tokenlist.createTokens(istr, "a.c");
ASSERT_EQUALS(true, tokenlist.front()->isKeyword());
ASSERT_EQUALS(true, tokenlist.front()->isControlFlowKeyword());
ASSERT_EQUALS(false, tokenlist.front()->next()->isKeyword());
ASSERT_EQUALS(false, tokenlist.front()->next()->isControlFlowKeyword());
ASSERT_EQUALS(false, tokenlist.front()->tokAt(2)->isKeyword());
ASSERT_EQUALS(true, tokenlist.front()->tokAt(2)->tokType() == Token::eType);
ASSERT_EQUALS(false, tokenlist.front()->tokAt(2)->isControlFlowKeyword());
ASSERT_EQUALS(false, tokenlist.front()->tokAt(3)->isKeyword());
ASSERT_EQUALS(false, tokenlist.front()->tokAt(3)->isControlFlowKeyword());
ASSERT_EQUALS(false, tokenlist.front()->tokAt(4)->isKeyword());
ASSERT_EQUALS(true, tokenlist.front()->tokAt(4)->isLiteral());
ASSERT_EQUALS(false, tokenlist.front()->tokAt(4)->isControlFlowKeyword());
}
{
TokenList tokenlist(&settings);
std::istringstream istr(code);
tokenlist.createTokens(istr, "a.cpp");
ASSERT_EQUALS(true, tokenlist.front()->isKeyword());
ASSERT_EQUALS(true, tokenlist.front()->isControlFlowKeyword());
ASSERT_EQUALS(false, tokenlist.front()->next()->isKeyword());
ASSERT_EQUALS(false, tokenlist.front()->next()->isControlFlowKeyword());
ASSERT_EQUALS(false, tokenlist.front()->tokAt(2)->isKeyword());
ASSERT_EQUALS(true, tokenlist.front()->tokAt(2)->tokType() == Token::eType);
ASSERT_EQUALS(false, tokenlist.front()->tokAt(2)->isControlFlowKeyword());
ASSERT_EQUALS(true, tokenlist.front()->tokAt(3)->isKeyword());
ASSERT_EQUALS(false, tokenlist.front()->tokAt(3)->isControlFlowKeyword());
ASSERT_EQUALS(false, tokenlist.front()->tokAt(4)->isKeyword());
ASSERT_EQUALS(true, tokenlist.front()->tokAt(4)->isLiteral());
ASSERT_EQUALS(false, tokenlist.front()->tokAt(4)->isControlFlowKeyword());
}
}
};
REGISTER_TEST(TestTokenList)