Skip 'Stringification => Tokenize' step

This commit is contained in:
Daniel Marjamäki 2017-05-17 14:57:54 +02:00
parent 67e3187653
commit 6230919976
7 changed files with 78 additions and 17 deletions

View File

@ -283,13 +283,11 @@ unsigned int CppCheck::processFile(const std::string& filename, const std::strin
cfg = _settings.userDefines + cfg; cfg = _settings.userDefines + cfg;
} }
std::string codeWithoutCfg;
{
Timer t("Preprocessor::getcode", _settings.showtime, &S_timerResults);
codeWithoutCfg = preprocessor.getcode(tokens1, cfg, files, true);
}
if (_settings.preprocessOnly) { if (_settings.preprocessOnly) {
Timer t("Preprocessor::getcode", _settings.showtime, &S_timerResults);
std::string codeWithoutCfg = preprocessor.getcode(tokens1, cfg, files, true);
t.Stop();
if (codeWithoutCfg.compare(0,5,"#file") == 0) if (codeWithoutCfg.compare(0,5,"#file") == 0)
codeWithoutCfg.insert(0U, "//"); codeWithoutCfg.insert(0U, "//");
std::string::size_type pos = 0; std::string::size_type pos = 0;
@ -310,13 +308,12 @@ unsigned int CppCheck::processFile(const std::string& filename, const std::strin
_tokenizer.setTimerResults(&S_timerResults); _tokenizer.setTimerResults(&S_timerResults);
try { try {
bool result;
// Create tokens, skip rest of iteration if failed // Create tokens, skip rest of iteration if failed
std::istringstream istr(codeWithoutCfg);
Timer timer("Tokenizer::createTokens", _settings.showtime, &S_timerResults); Timer timer("Tokenizer::createTokens", _settings.showtime, &S_timerResults);
bool result = _tokenizer.createTokens(istr, filename); _tokenizer.createTokens(preprocessor.preprocess(tokens1, cfg, files));
timer.Stop(); timer.Stop();
if (!result)
continue;
// skip rest of iteration if just checking configuration // skip rest of iteration if just checking configuration
if (_settings.checkConfiguration) if (_settings.checkConfiguration)

View File

@ -43,6 +43,7 @@ Token::Token(Token **tokens) :
_varId(0), _varId(0),
_fileIndex(0), _fileIndex(0),
_linenr(0), _linenr(0),
_col(0),
_progressValue(0), _progressValue(0),
_tokType(eNone), _tokType(eNone),
_flags(0), _flags(0),

View File

@ -45,7 +45,7 @@ class Enumerator;
* *
* Tokens are stored as strings. The "if", "while", etc are stored in plain text. * Tokens are stored as strings. The "if", "while", etc are stored in plain text.
* The reason the Token class is needed (instead of using the string class) is that some extra functionality is also needed for tokens: * The reason the Token class is needed (instead of using the string class) is that some extra functionality is also needed for tokens:
* - location of the token is stored (linenr, fileIndex) * - location of the token is stored (fileIndex, linenr, column)
* - functions for classifying the token (isName, isNumber, isBoolean, isStandardType) * - functions for classifying the token (isName, isNumber, isBoolean, isStandardType)
* *
* The Token class also has other functions for management of token list, matching tokens, etc. * The Token class also has other functions for management of token list, matching tokens, etc.
@ -444,6 +444,13 @@ public:
*/ */
static int multiCompare(const Token *tok, const char *haystack, unsigned int varid); static int multiCompare(const Token *tok, const char *haystack, unsigned int varid);
unsigned int fileIndex() const {
return _fileIndex;
}
void fileIndex(unsigned int indexOfFile) {
_fileIndex = indexOfFile;
}
unsigned int linenr() const { unsigned int linenr() const {
return _linenr; return _linenr;
} }
@ -451,11 +458,11 @@ public:
_linenr = lineNumber; _linenr = lineNumber;
} }
unsigned int fileIndex() const { unsigned int col() const {
return _fileIndex; return _col;
} }
void fileIndex(unsigned int indexOfFile) { void col(unsigned int c) {
_fileIndex = indexOfFile; _col = c;
} }
Token *next() const { Token *next() const {
@ -844,6 +851,7 @@ private:
unsigned int _varId; unsigned int _varId;
unsigned int _fileIndex; unsigned int _fileIndex;
unsigned int _linenr; unsigned int _linenr;
unsigned int _col;
/** /**
* A value from 0-100 that provides a rough idea about where in the token * A value from 0-100 that provides a rough idea about where in the token

View File

@ -1709,6 +1709,13 @@ bool Tokenizer::createTokens(std::istream &code,
return list.createTokens(code, Path::getRelativePath(Path::simplifyPath(FileName), _settings->basePaths)); return list.createTokens(code, Path::getRelativePath(Path::simplifyPath(FileName), _settings->basePaths));
} }
void Tokenizer::createTokens(const simplecpp::TokenList &tokenList)
{
// make sure settings specified
assert(_settings);
list.createTokens(tokenList);
}
bool Tokenizer::simplifyTokens1(const std::string &configuration) bool Tokenizer::simplifyTokens1(const std::string &configuration)
{ {
// Fill the map _typeSize.. // Fill the map _typeSize..

View File

@ -24,6 +24,7 @@
#include "errorlogger.h" #include "errorlogger.h"
#include "tokenlist.h" #include "tokenlist.h"
#include "config.h" #include "config.h"
#include "simplecpp.h"
#include <string> #include <string>
#include <map> #include <map>
@ -72,8 +73,8 @@ public:
*/ */
bool IsScopeNoReturn(const Token *endScopeToken, bool *unknown = nullptr) const; bool IsScopeNoReturn(const Token *endScopeToken, bool *unknown = nullptr) const;
bool createTokens(std::istream &code, bool createTokens(std::istream &code, const std::string& FileName);
const std::string& FileName); void createTokens(const simplecpp::TokenList &tokenList);
bool simplifyTokens1(const std::string &configuration); bool simplifyTokens1(const std::string &configuration);
/** /**

View File

@ -409,6 +409,50 @@ bool TokenList::createTokens(std::istream &code, const std::string& file0)
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
void TokenList::createTokens(const simplecpp::TokenList &tokenList)
{
if (tokenList.cfront())
_files = tokenList.cfront()->location.files;
else
_files.clear();
_isC = _isCPP = false;
if (!_files.empty()) {
_isC = Path::isC(getSourceFilePath());
_isCPP = Path::isCPP(getSourceFilePath());
}
if (_settings && _settings->enforcedLang != Settings::None) {
_isC = (_settings->enforcedLang == Settings::C);
_isCPP = (_settings->enforcedLang == Settings::CPP);
}
for (const simplecpp::Token *tok = tokenList.cfront(); tok; tok = tok->next) {
if (_back) {
_back->insertToken(tok->str);
} else {
_front = new Token(&_back);
_back = _front;
_back->str(tok->str);
}
if (isCPP() && _back->str() == "delete")
_back->isKeyword(true);
_back->fileIndex(tok->location.fileIndex);
_back->linenr(tok->location.line);
_back->col(tok->location.col);
_back->isExpandedMacro(!tok->macro.empty());
}
if (_settings && _settings->relativePaths) {
for (std::size_t i = 0; i < _files.size(); i++)
_files[i] = Path::getRelativePath(_files[i], _settings->basePaths);
}
Token::assignProgressValues(_front);
}
//---------------------------------------------------------------------------
unsigned long long TokenList::calculateChecksum() const unsigned long long TokenList::calculateChecksum() const
{ {
unsigned long long checksum = 0; unsigned long long checksum = 0;

View File

@ -24,6 +24,7 @@
#include <string> #include <string>
#include <vector> #include <vector>
#include "config.h" #include "config.h"
#include "simplecpp.h"
class Token; class Token;
class Settings; class Settings;
@ -79,6 +80,8 @@ public:
*/ */
bool createTokens(std::istream &code, const std::string& file0 = emptyString); bool createTokens(std::istream &code, const std::string& file0 = emptyString);
void createTokens(const simplecpp::TokenList &tokenList);
/** Deallocate list */ /** Deallocate list */
void deallocateTokens(); void deallocateTokens();