Some small refactorizations:

- Removed redundant calls to std::string constructor
- Use default parameters in some places
This commit is contained in:
PKEuS 2014-10-03 10:02:46 +02:00
parent bb8c8d53cc
commit a02712cb66
4 changed files with 7 additions and 9 deletions

View File

@ -215,7 +215,7 @@ unsigned int CppCheck::processFile(const std::string& filename, const std::strin
if (_settings._errorsOnly == false && it != configurations.begin()) { if (_settings._errorsOnly == false && it != configurations.begin()) {
std::string fixedpath = Path::simplifyPath(filename); std::string fixedpath = Path::simplifyPath(filename);
fixedpath = Path::toNativeSeparators(fixedpath); fixedpath = Path::toNativeSeparators(fixedpath);
_errorLogger.reportOut(std::string("Checking ") + fixedpath + ": " + cfg + std::string("...")); _errorLogger.reportOut("Checking " + fixedpath + ": " + cfg + "...");
} }
if (!_settings.userDefines.empty()) { if (!_settings.userDefines.empty()) {
@ -311,7 +311,7 @@ void CppCheck::analyseFile(std::istream &fin, const std::string &filename)
// Tokenize.. // Tokenize..
Tokenizer tokenizer(&_settings, this); Tokenizer tokenizer(&_settings, this);
std::istringstream istr(code); std::istringstream istr(code);
tokenizer.tokenize(istr, filename.c_str(), ""); tokenizer.tokenize(istr, filename.c_str());
tokenizer.simplifyTokenList2(); tokenizer.simplifyTokenList2();
// Analyse the tokens.. // Analyse the tokens..
@ -340,8 +340,6 @@ bool CppCheck::checkFile(const std::string &code, const char FileName[], std::se
if (_settings._showtime != SHOWTIME_NONE) if (_settings._showtime != SHOWTIME_NONE)
_tokenizer.setTimerResults(&S_timerResults); _tokenizer.setTimerResults(&S_timerResults);
try { try {
bool result;
// Execute rules for "raw" code // Execute rules for "raw" code
for (std::list<Settings::Rule>::const_iterator it = _settings.rules.begin(); it != _settings.rules.end(); ++it) { for (std::list<Settings::Rule>::const_iterator it = _settings.rules.begin(); it != _settings.rules.end(); ++it) {
if (it->tokenlist == "raw") { if (it->tokenlist == "raw") {
@ -357,7 +355,7 @@ bool CppCheck::checkFile(const std::string &code, const char FileName[], std::se
std::istringstream istr(code); std::istringstream istr(code);
Timer timer("Tokenizer::tokenize", _settings._showtime, &S_timerResults); Timer timer("Tokenizer::tokenize", _settings._showtime, &S_timerResults);
result = _tokenizer.tokenize(istr, FileName, cfg); bool result = _tokenizer.tokenize(istr, FileName, cfg);
timer.Stop(); timer.Stop();
if (_settings._force || _settings._maxConfigs > 1) { if (_settings._force || _settings._maxConfigs > 1) {

View File

@ -396,7 +396,7 @@ bool Library::isargvalid(const std::string &functionName, int argnr, const MathL
return true; return true;
TokenList tokenList(0); TokenList tokenList(0);
std::istringstream istr(ac->valid + ','); std::istringstream istr(ac->valid + ',');
tokenList.createTokens(istr,""); tokenList.createTokens(istr);
for (Token *tok = tokenList.front(); tok; tok = tok->next()) { for (Token *tok = tokenList.front(); tok; tok = tok->next()) {
if (Token::Match(tok,"- %num%")) { if (Token::Match(tok,"- %num%")) {
tok->str("-" + tok->strAt(1)); tok->str("-" + tok->strAt(1));

View File

@ -1622,7 +1622,7 @@ bool Tokenizer::tokenizeCondition(const std::string &code)
{ {
std::istringstream istr(code); std::istringstream istr(code);
if (!list.createTokens(istr, "")) { if (!list.createTokens(istr)) {
cppcheckError(0); cppcheckError(0);
return false; return false;
} }

View File

@ -3757,7 +3757,7 @@ private:
Settings settings; Settings settings;
Tokenizer tokenizer(&settings, this); Tokenizer tokenizer(&settings, this);
std::istringstream istr(code); std::istringstream istr(code);
tokenizer.tokenize(istr, "test.cpp", ""); tokenizer.tokenize(istr, "test.cpp");
tokenizer.validate(); tokenizer.validate();
} }
@ -3768,7 +3768,7 @@ private:
Settings settings; Settings settings;
Tokenizer tokenizer(&settings, this); Tokenizer tokenizer(&settings, this);
std::istringstream istr(code); std::istringstream istr(code);
tokenizer.tokenize(istr, "test.cpp", ""); tokenizer.tokenize(istr, "test.cpp");
tokenizer.validate(); tokenizer.validate();
} }