Optimization: Reduced peak memory usage (30% in my test case) by immediately deleting simplecpp::TokenList while creating the cppcheck TokenList.
This commit is contained in:
parent
d78ff975a0
commit
c9d8f607df
|
@ -713,7 +713,7 @@ unsigned int CppCheck::checkFile(const std::string& filename, const std::string
|
||||||
{
|
{
|
||||||
Timer timer("Tokenizer::createTokens", mSettings.showtime, &s_timerResults);
|
Timer timer("Tokenizer::createTokens", mSettings.showtime, &s_timerResults);
|
||||||
simplecpp::TokenList tokensP = preprocessor.preprocess(tokens1, mCurrentConfig, files, true);
|
simplecpp::TokenList tokensP = preprocessor.preprocess(tokens1, mCurrentConfig, files, true);
|
||||||
mTokenizer.createTokens(&tokensP);
|
mTokenizer.createTokens(std::move(tokensP));
|
||||||
}
|
}
|
||||||
hasValidConfig = true;
|
hasValidConfig = true;
|
||||||
|
|
||||||
|
|
|
@ -2302,11 +2302,11 @@ bool Tokenizer::createTokens(std::istream &code,
|
||||||
return list.createTokens(code, FileName);
|
return list.createTokens(code, FileName);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Tokenizer::createTokens(const simplecpp::TokenList *tokenList)
|
void Tokenizer::createTokens(simplecpp::TokenList&& tokenList)
|
||||||
{
|
{
|
||||||
// make sure settings specified
|
// make sure settings specified
|
||||||
assert(mSettings);
|
assert(mSettings);
|
||||||
list.createTokens(tokenList);
|
list.createTokens(std::move(tokenList));
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Tokenizer::simplifyTokens1(const std::string &configuration)
|
bool Tokenizer::simplifyTokens1(const std::string &configuration)
|
||||||
|
|
|
@ -110,7 +110,7 @@ public:
|
||||||
bool isScopeNoReturn(const Token *endScopeToken, bool *unknown = nullptr) const;
|
bool isScopeNoReturn(const Token *endScopeToken, bool *unknown = nullptr) const;
|
||||||
|
|
||||||
bool createTokens(std::istream &code, const std::string& FileName);
|
bool createTokens(std::istream &code, const std::string& FileName);
|
||||||
void createTokens(const simplecpp::TokenList *tokenList);
|
void createTokens(simplecpp::TokenList&& tokenList);
|
||||||
|
|
||||||
bool simplifyTokens1(const std::string &configuration);
|
bool simplifyTokens1(const std::string &configuration);
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -377,17 +377,17 @@ bool TokenList::createTokens(std::istream &code, const std::string& file0)
|
||||||
simplecpp::OutputList outputList;
|
simplecpp::OutputList outputList;
|
||||||
simplecpp::TokenList tokens(code, mFiles, file0, &outputList);
|
simplecpp::TokenList tokens(code, mFiles, file0, &outputList);
|
||||||
|
|
||||||
createTokens(&tokens);
|
createTokens(std::move(tokens));
|
||||||
|
|
||||||
return outputList.empty();
|
return outputList.empty();
|
||||||
}
|
}
|
||||||
|
|
||||||
//---------------------------------------------------------------------------
|
//---------------------------------------------------------------------------
|
||||||
|
|
||||||
void TokenList::createTokens(const simplecpp::TokenList *tokenList)
|
void TokenList::createTokens(simplecpp::TokenList&& tokenList)
|
||||||
{
|
{
|
||||||
if (tokenList->cfront())
|
if (tokenList.cfront())
|
||||||
mOrigFiles = mFiles = tokenList->cfront()->location.files;
|
mOrigFiles = mFiles = tokenList.cfront()->location.files;
|
||||||
else
|
else
|
||||||
mFiles.clear();
|
mFiles.clear();
|
||||||
|
|
||||||
|
@ -401,7 +401,7 @@ void TokenList::createTokens(const simplecpp::TokenList *tokenList)
|
||||||
mIsCpp = (mSettings->enforcedLang == Settings::CPP);
|
mIsCpp = (mSettings->enforcedLang == Settings::CPP);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const simplecpp::Token *tok = tokenList->cfront(); tok; tok = tok->next) {
|
for (const simplecpp::Token *tok = tokenList.cfront(); tok;) {
|
||||||
|
|
||||||
std::string str = tok->str();
|
std::string str = tok->str();
|
||||||
|
|
||||||
|
@ -423,6 +423,10 @@ void TokenList::createTokens(const simplecpp::TokenList *tokenList)
|
||||||
mTokensFrontBack.back->linenr(tok->location.line);
|
mTokensFrontBack.back->linenr(tok->location.line);
|
||||||
mTokensFrontBack.back->column(tok->location.col);
|
mTokensFrontBack.back->column(tok->location.col);
|
||||||
mTokensFrontBack.back->isExpandedMacro(!tok->macro.empty());
|
mTokensFrontBack.back->isExpandedMacro(!tok->macro.empty());
|
||||||
|
|
||||||
|
tok = tok->next;
|
||||||
|
if (tok)
|
||||||
|
tokenList.deleteToken(tok->previous);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (mSettings && mSettings->relativePaths) {
|
if (mSettings && mSettings->relativePaths) {
|
||||||
|
|
|
@ -98,7 +98,7 @@ public:
|
||||||
*/
|
*/
|
||||||
bool createTokens(std::istream &code, const std::string& file0 = emptyString);
|
bool createTokens(std::istream &code, const std::string& file0 = emptyString);
|
||||||
|
|
||||||
void createTokens(const simplecpp::TokenList *tokenList);
|
void createTokens(simplecpp::TokenList&& tokenList);
|
||||||
|
|
||||||
/** Deallocate list */
|
/** Deallocate list */
|
||||||
void deallocateTokens();
|
void deallocateTokens();
|
||||||
|
|
|
@ -138,7 +138,7 @@ private:
|
||||||
|
|
||||||
// Tokenizer..
|
// Tokenizer..
|
||||||
Tokenizer tokenizer(&settings0, this);
|
Tokenizer tokenizer(&settings0, this);
|
||||||
tokenizer.createTokens(&tokens2);
|
tokenizer.createTokens(std::move(tokens2));
|
||||||
tokenizer.simplifyTokens1("");
|
tokenizer.simplifyTokens1("");
|
||||||
|
|
||||||
// Run checks..
|
// Run checks..
|
||||||
|
|
|
@ -51,7 +51,7 @@ private:
|
||||||
|
|
||||||
// Tokenize..
|
// Tokenize..
|
||||||
Tokenizer tokenizer(&settings, this);
|
Tokenizer tokenizer(&settings, this);
|
||||||
tokenizer.createTokens(&tokens2);
|
tokenizer.createTokens(std::move(tokens2));
|
||||||
tokenizer.simplifyTokens1("");
|
tokenizer.simplifyTokens1("");
|
||||||
|
|
||||||
// Check for incomplete statements..
|
// Check for incomplete statements..
|
||||||
|
|
|
@ -218,7 +218,7 @@ private:
|
||||||
|
|
||||||
// Tokenizer..
|
// Tokenizer..
|
||||||
Tokenizer tokenizer(&settings, this);
|
Tokenizer tokenizer(&settings, this);
|
||||||
tokenizer.createTokens(&tokens2);
|
tokenizer.createTokens(std::move(tokens2));
|
||||||
tokenizer.simplifyTokens1("");
|
tokenizer.simplifyTokens1("");
|
||||||
|
|
||||||
// Check for leaks..
|
// Check for leaks..
|
||||||
|
|
|
@ -163,7 +163,7 @@ private:
|
||||||
|
|
||||||
// Tokenizer..
|
// Tokenizer..
|
||||||
Tokenizer tokenizer(&settings, this);
|
Tokenizer tokenizer(&settings, this);
|
||||||
tokenizer.createTokens(&tokens2);
|
tokenizer.createTokens(std::move(tokens2));
|
||||||
tokenizer.simplifyTokens1("");
|
tokenizer.simplifyTokens1("");
|
||||||
|
|
||||||
// Check for null pointer dereferences..
|
// Check for null pointer dereferences..
|
||||||
|
|
|
@ -300,7 +300,7 @@ private:
|
||||||
|
|
||||||
// Tokenizer..
|
// Tokenizer..
|
||||||
Tokenizer tokenizer(settings, this);
|
Tokenizer tokenizer(settings, this);
|
||||||
tokenizer.createTokens(&tokens2);
|
tokenizer.createTokens(std::move(tokens2));
|
||||||
tokenizer.simplifyTokens1("");
|
tokenizer.simplifyTokens1("");
|
||||||
|
|
||||||
// Check..
|
// Check..
|
||||||
|
|
|
@ -81,7 +81,7 @@ private:
|
||||||
|
|
||||||
// Tokenize..
|
// Tokenize..
|
||||||
Tokenizer tokenizer(&settings, this);
|
Tokenizer tokenizer(&settings, this);
|
||||||
tokenizer.createTokens(&tokens2);
|
tokenizer.createTokens(std::move(tokens2));
|
||||||
tokenizer.simplifyTokens1("");
|
tokenizer.simplifyTokens1("");
|
||||||
|
|
||||||
// Check...
|
// Check...
|
||||||
|
|
|
@ -103,7 +103,7 @@ private:
|
||||||
|
|
||||||
// Tokenize..
|
// Tokenize..
|
||||||
Tokenizer tokenizer(&settings, this);
|
Tokenizer tokenizer(&settings, this);
|
||||||
tokenizer.createTokens(&tokens2);
|
tokenizer.createTokens(std::move(tokens2));
|
||||||
tokenizer.simplifyTokens1("");
|
tokenizer.simplifyTokens1("");
|
||||||
|
|
||||||
// Check for unused private functions..
|
// Check for unused private functions..
|
||||||
|
|
|
@ -307,7 +307,7 @@ private:
|
||||||
|
|
||||||
// Tokenize..
|
// Tokenize..
|
||||||
Tokenizer tokenizer(&settings, this);
|
Tokenizer tokenizer(&settings, this);
|
||||||
tokenizer.createTokens(&tokens2);
|
tokenizer.createTokens(std::move(tokens2));
|
||||||
tokenizer.simplifyTokens1("");
|
tokenizer.simplifyTokens1("");
|
||||||
|
|
||||||
settings.debugwarnings = false;
|
settings.debugwarnings = false;
|
||||||
|
|
Loading…
Reference in New Issue