From 6230919976435ccbae8661396182835323517a76 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Daniel=20Marjam=C3=A4ki?= Date: Wed, 17 May 2017 14:57:54 +0200 Subject: [PATCH] Skip 'Stringification => Tokenize' step --- lib/cppcheck.cpp | 17 +++++++---------- lib/token.cpp | 1 + lib/token.h | 18 +++++++++++++----- lib/tokenize.cpp | 7 +++++++ lib/tokenize.h | 5 +++-- lib/tokenlist.cpp | 44 ++++++++++++++++++++++++++++++++++++++++++++ lib/tokenlist.h | 3 +++ 7 files changed, 78 insertions(+), 17 deletions(-) diff --git a/lib/cppcheck.cpp b/lib/cppcheck.cpp index bbf4128f4..de4eedd6b 100644 --- a/lib/cppcheck.cpp +++ b/lib/cppcheck.cpp @@ -283,13 +283,11 @@ unsigned int CppCheck::processFile(const std::string& filename, const std::strin cfg = _settings.userDefines + cfg; } - std::string codeWithoutCfg; - { - Timer t("Preprocessor::getcode", _settings.showtime, &S_timerResults); - codeWithoutCfg = preprocessor.getcode(tokens1, cfg, files, true); - } - if (_settings.preprocessOnly) { + Timer t("Preprocessor::getcode", _settings.showtime, &S_timerResults); + std::string codeWithoutCfg = preprocessor.getcode(tokens1, cfg, files, true); + t.Stop(); + if (codeWithoutCfg.compare(0,5,"#file") == 0) codeWithoutCfg.insert(0U, "//"); std::string::size_type pos = 0; @@ -310,13 +308,12 @@ unsigned int CppCheck::processFile(const std::string& filename, const std::strin _tokenizer.setTimerResults(&S_timerResults); try { + bool result; + // Create tokens, skip rest of iteration if failed - std::istringstream istr(codeWithoutCfg); Timer timer("Tokenizer::createTokens", _settings.showtime, &S_timerResults); - bool result = _tokenizer.createTokens(istr, filename); + _tokenizer.createTokens(preprocessor.preprocess(tokens1, cfg, files)); timer.Stop(); - if (!result) - continue; // skip rest of iteration if just checking configuration if (_settings.checkConfiguration) diff --git a/lib/token.cpp b/lib/token.cpp index 5f5f00ab0..39cfeb2b8 100644 --- a/lib/token.cpp +++ b/lib/token.cpp @@ -43,6 +43,7 @@ Token::Token(Token **tokens) : _varId(0), _fileIndex(0), _linenr(0), + _col(0), _progressValue(0), _tokType(eNone), _flags(0), diff --git a/lib/token.h b/lib/token.h index 21fb230b9..f481894d6 100644 --- a/lib/token.h +++ b/lib/token.h @@ -45,7 +45,7 @@ class Enumerator; * * Tokens are stored as strings. The "if", "while", etc are stored in plain text. * The reason the Token class is needed (instead of using the string class) is that some extra functionality is also needed for tokens: - * - location of the token is stored (linenr, fileIndex) + * - location of the token is stored (fileIndex, linenr, column) * - functions for classifying the token (isName, isNumber, isBoolean, isStandardType) * * The Token class also has other functions for management of token list, matching tokens, etc. @@ -444,6 +444,13 @@ public: */ static int multiCompare(const Token *tok, const char *haystack, unsigned int varid); + unsigned int fileIndex() const { + return _fileIndex; + } + void fileIndex(unsigned int indexOfFile) { + _fileIndex = indexOfFile; + } + unsigned int linenr() const { return _linenr; } @@ -451,11 +458,11 @@ public: _linenr = lineNumber; } - unsigned int fileIndex() const { - return _fileIndex; + unsigned int col() const { + return _col; } - void fileIndex(unsigned int indexOfFile) { - _fileIndex = indexOfFile; + void col(unsigned int c) { + _col = c; } Token *next() const { @@ -844,6 +851,7 @@ private: unsigned int _varId; unsigned int _fileIndex; unsigned int _linenr; + unsigned int _col; /** * A value from 0-100 that provides a rough idea about where in the token diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index d4db60b95..efce842e6 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -1709,6 +1709,13 @@ bool Tokenizer::createTokens(std::istream &code, return list.createTokens(code, Path::getRelativePath(Path::simplifyPath(FileName), _settings->basePaths)); } +void Tokenizer::createTokens(const simplecpp::TokenList &tokenList) +{ + // make sure settings specified + assert(_settings); + list.createTokens(tokenList); +} + bool Tokenizer::simplifyTokens1(const std::string &configuration) { // Fill the map _typeSize.. diff --git a/lib/tokenize.h b/lib/tokenize.h index f980af42e..0d03325a1 100644 --- a/lib/tokenize.h +++ b/lib/tokenize.h @@ -24,6 +24,7 @@ #include "errorlogger.h" #include "tokenlist.h" #include "config.h" +#include "simplecpp.h" #include #include @@ -72,8 +73,8 @@ public: */ bool IsScopeNoReturn(const Token *endScopeToken, bool *unknown = nullptr) const; - bool createTokens(std::istream &code, - const std::string& FileName); + bool createTokens(std::istream &code, const std::string& FileName); + void createTokens(const simplecpp::TokenList &tokenList); bool simplifyTokens1(const std::string &configuration); /** diff --git a/lib/tokenlist.cpp b/lib/tokenlist.cpp index 0edfc30cf..70807cbcd 100644 --- a/lib/tokenlist.cpp +++ b/lib/tokenlist.cpp @@ -409,6 +409,50 @@ bool TokenList::createTokens(std::istream &code, const std::string& file0) //--------------------------------------------------------------------------- +void TokenList::createTokens(const simplecpp::TokenList &tokenList) +{ + if (tokenList.cfront()) + _files = tokenList.cfront()->location.files; + else + _files.clear(); + + _isC = _isCPP = false; + if (!_files.empty()) { + _isC = Path::isC(getSourceFilePath()); + _isCPP = Path::isCPP(getSourceFilePath()); + } + if (_settings && _settings->enforcedLang != Settings::None) { + _isC = (_settings->enforcedLang == Settings::C); + _isCPP = (_settings->enforcedLang == Settings::CPP); + } + + for (const simplecpp::Token *tok = tokenList.cfront(); tok; tok = tok->next) { + if (_back) { + _back->insertToken(tok->str); + } else { + _front = new Token(&_back); + _back = _front; + _back->str(tok->str); + } + + if (isCPP() && _back->str() == "delete") + _back->isKeyword(true); + _back->fileIndex(tok->location.fileIndex); + _back->linenr(tok->location.line); + _back->col(tok->location.col); + _back->isExpandedMacro(!tok->macro.empty()); + } + + if (_settings && _settings->relativePaths) { + for (std::size_t i = 0; i < _files.size(); i++) + _files[i] = Path::getRelativePath(_files[i], _settings->basePaths); + } + + Token::assignProgressValues(_front); +} + +//--------------------------------------------------------------------------- + unsigned long long TokenList::calculateChecksum() const { unsigned long long checksum = 0; diff --git a/lib/tokenlist.h b/lib/tokenlist.h index fa6dadd86..87e6f4f46 100644 --- a/lib/tokenlist.h +++ b/lib/tokenlist.h @@ -24,6 +24,7 @@ #include #include #include "config.h" +#include "simplecpp.h" class Token; class Settings; @@ -79,6 +80,8 @@ public: */ bool createTokens(std::istream &code, const std::string& file0 = emptyString); + void createTokens(const simplecpp::TokenList &tokenList); + /** Deallocate list */ void deallocateTokens();