minor refactoring: header cleanup. use forward declarations instead of includes
This commit is contained in:
parent
5b33071ec0
commit
17008879ac
|
@ -19,6 +19,7 @@
|
|||
#ifndef checkH
|
||||
#define checkH
|
||||
|
||||
#include "token.h"
|
||||
#include "tokenize.h"
|
||||
#include "settings.h"
|
||||
#include "errorlogger.h"
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
#include "checkheaders.h"
|
||||
#include "tokenize.h"
|
||||
#include "filelister.h"
|
||||
#include "token.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <list>
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
//---------------------------------------------------------------------------
|
||||
#include "checkunusedfunctions.h"
|
||||
#include "tokenize.h"
|
||||
#include "token.h"
|
||||
//---------------------------------------------------------------------------
|
||||
|
||||
|
||||
|
|
|
@ -411,7 +411,7 @@ unsigned int CppCheck::check()
|
|||
|
||||
void CppCheck::checkFile(const std::string &code, const char FileName[])
|
||||
{
|
||||
Tokenizer _tokenizer(_settings, this);
|
||||
Tokenizer _tokenizer(&_settings, this);
|
||||
|
||||
// Tokenize the file
|
||||
{
|
||||
|
|
|
@ -19,8 +19,11 @@
|
|||
|
||||
//---------------------------------------------------------------------------
|
||||
#include "tokenize.h"
|
||||
#include "token.h"
|
||||
#include "filelister.h"
|
||||
#include "mathlib.h"
|
||||
#include "settings.h"
|
||||
#include "errorlogger.h"
|
||||
|
||||
#include <locale>
|
||||
#include <fstream>
|
||||
|
@ -35,18 +38,17 @@
|
|||
//---------------------------------------------------------------------------
|
||||
|
||||
Tokenizer::Tokenizer()
|
||||
: _settings(0), _errorLogger(0)
|
||||
{
|
||||
_tokens = 0;
|
||||
_tokensBack = 0;
|
||||
_errorLogger = 0;
|
||||
}
|
||||
|
||||
Tokenizer::Tokenizer(const Settings &settings, ErrorLogger *errorLogger)
|
||||
Tokenizer::Tokenizer(const Settings *settings, ErrorLogger *errorLogger)
|
||||
: _settings(settings), _errorLogger(errorLogger)
|
||||
{
|
||||
_tokens = 0;
|
||||
_tokensBack = 0;
|
||||
_settings = settings;
|
||||
_errorLogger = errorLogger;
|
||||
}
|
||||
|
||||
Tokenizer::~Tokenizer()
|
||||
|
@ -1496,7 +1498,7 @@ void Tokenizer::simplifyTokenList()
|
|||
|
||||
simplifyComma();
|
||||
createLinks();
|
||||
if (_settings._debug)
|
||||
if (_settings && _settings->_debug)
|
||||
{
|
||||
_tokens->printOut();
|
||||
}
|
||||
|
@ -2988,7 +2990,7 @@ const Token * Tokenizer::findClassFunction(const Token *tok, const char classnam
|
|||
|
||||
void Tokenizer::syntaxError(const Token *tok, char c)
|
||||
{
|
||||
if (_settings._debug)
|
||||
if (_settings && _settings->_debug)
|
||||
{
|
||||
_tokens->printOut();
|
||||
}
|
||||
|
|
|
@ -25,9 +25,10 @@
|
|||
#include <string>
|
||||
#include <map>
|
||||
#include <vector>
|
||||
#include "settings.h"
|
||||
#include "errorlogger.h"
|
||||
#include "token.h"
|
||||
|
||||
class Token;
|
||||
class ErrorLogger;
|
||||
class Settings;
|
||||
|
||||
class Tokenizer
|
||||
{
|
||||
|
@ -37,7 +38,7 @@ private:
|
|||
|
||||
public:
|
||||
Tokenizer();
|
||||
Tokenizer(const Settings &settings, ErrorLogger *errorLogger);
|
||||
Tokenizer(const Settings * settings, ErrorLogger *errorLogger);
|
||||
~Tokenizer();
|
||||
|
||||
/**
|
||||
|
@ -265,12 +266,11 @@ private:
|
|||
|
||||
void syntaxError(const Token *tok, char c);
|
||||
|
||||
Token *_tokensBack;
|
||||
Token *_tokens, *_tokensBack;
|
||||
std::map<std::string, unsigned int> _typeSize;
|
||||
std::vector<std::string> _files;
|
||||
Token *_tokens;
|
||||
Settings _settings;
|
||||
ErrorLogger *_errorLogger;
|
||||
const Settings * const _settings;
|
||||
ErrorLogger * const _errorLogger;
|
||||
};
|
||||
|
||||
//---------------------------------------------------------------------------
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
#include "testsuite.h"
|
||||
#include "../src/preprocessor.h"
|
||||
#include "../src/tokenize.h"
|
||||
#include "../src/token.h"
|
||||
#include <map>
|
||||
#include <string>
|
||||
#include <sstream>
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
|
||||
#include "testsuite.h"
|
||||
#include "../src/tokenize.h"
|
||||
#include "../src/token.h"
|
||||
#include <sstream>
|
||||
|
||||
extern std::ostringstream errout;
|
||||
|
|
|
@ -363,8 +363,7 @@ private:
|
|||
" for ( \n"
|
||||
"}\n";
|
||||
|
||||
Settings s;
|
||||
Tokenizer tokenizer(s, this);
|
||||
Tokenizer tokenizer(0, this);
|
||||
std::istringstream istr(src);
|
||||
ASSERT_EQUALS(false, tokenizer.tokenize(istr, "test.cpp"));
|
||||
ASSERT_EQUALS("[test.cpp:1]: (error) Invalid number of character ((). Can't process file.\n", errout.str());
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
#include <cstring>
|
||||
#include "testsuite.h"
|
||||
#include "../src/tokenize.h"
|
||||
#include "../src/token.h"
|
||||
|
||||
extern std::ostringstream errout;
|
||||
class TestTokenizer : public TestFixture
|
||||
|
@ -2134,12 +2135,10 @@ private:
|
|||
|
||||
void syntax_error()
|
||||
{
|
||||
|
||||
Settings s;
|
||||
{
|
||||
errout.str("");
|
||||
const char code[] = "void f() {}";
|
||||
Tokenizer tokenizer(s, this);
|
||||
Tokenizer tokenizer(0, this);
|
||||
std::istringstream istr(code);
|
||||
ASSERT_EQUALS(true, tokenizer.tokenize(istr, "test.cpp"));
|
||||
ASSERT_EQUALS("", errout.str());
|
||||
|
@ -2148,7 +2147,7 @@ private:
|
|||
{
|
||||
errout.str("");
|
||||
const char code[] = "void f() {{}";
|
||||
Tokenizer tokenizer(s, this);
|
||||
Tokenizer tokenizer(0, this);
|
||||
std::istringstream istr(code);
|
||||
ASSERT_EQUALS(false, tokenizer.tokenize(istr, "test.cpp"));
|
||||
ASSERT_EQUALS("[test.cpp:1]: (error) Invalid number of character ({). Can't process file.\n", errout.str());
|
||||
|
@ -2157,7 +2156,7 @@ private:
|
|||
{
|
||||
errout.str("");
|
||||
const char code[] = "void f()) {}";
|
||||
Tokenizer tokenizer(s, this);
|
||||
Tokenizer tokenizer(0, this);
|
||||
std::istringstream istr(code);
|
||||
ASSERT_EQUALS(false, tokenizer.tokenize(istr, "test.cpp"));
|
||||
ASSERT_EQUALS("[test.cpp:1]: (error) Invalid number of character ((). Can't process file.\n", errout.str());
|
||||
|
@ -2166,7 +2165,7 @@ private:
|
|||
{
|
||||
errout.str("");
|
||||
const char code[] = "namespace extract{\nB(weighted_moment)\n}\nusing extract::weighted_moment;\n";
|
||||
Tokenizer tokenizer(s, this);
|
||||
Tokenizer tokenizer(0, this);
|
||||
std::istringstream istr(code);
|
||||
ASSERT_EQUALS(true, tokenizer.tokenize(istr, "test.cpp"));
|
||||
tokenizer.simplifyTokenList();
|
||||
|
|
Loading…
Reference in New Issue