Refactoring: Tokenizer - renaming functions, moved the 'tokenizeCode' to the private section

This commit is contained in:
Daniel Marjamäki 2008-11-25 18:34:51 +00:00
parent 0d6bd9b7fd
commit 8845e8bc89
14 changed files with 46 additions and 52 deletions

View File

@ -186,7 +186,7 @@ void CppCheck::checkFile(const std::string &code, const char FileName[])
// Tokenize the file // Tokenize the file
{ {
std::istringstream istr(code); std::istringstream istr(code);
_tokenizer.Tokenize(istr, FileName); _tokenizer.tokenize(istr, FileName);
} }
_tokenizer.fillFunctionList(); _tokenizer.fillFunctionList();
@ -218,7 +218,7 @@ void CppCheck::checkFile(const std::string &code, const char FileName[])
// } // }
_tokenizer.SimplifyTokenList(); _tokenizer.simplifyTokenList();
if ( _settings._checkFunctionUsage ) if ( _settings._checkFunctionUsage )

View File

@ -40,10 +40,9 @@ private:
{ {
// Tokenize.. // Tokenize..
Tokenizer tokenizer; Tokenizer tokenizer;
tokenizer._files.push_back( "test.cpp" );
std::istringstream istr(code); std::istringstream istr(code);
tokenizer.TokenizeCode( istr ); tokenizer.tokenize( istr, "test.cpp" );
tokenizer.SimplifyTokenList(); tokenizer.simplifyTokenList();
// Fill function list // Fill function list
Settings settings; Settings settings;
@ -54,7 +53,7 @@ private:
// Clear the error buffer.. // Clear the error buffer..
errout.str(""); errout.str("");
// Check for memory leaks.. // Check for buffer overruns..
CheckBufferOverrunClass checkBufferOverrun( &tokenizer, this ); CheckBufferOverrunClass checkBufferOverrun( &tokenizer, this );
checkBufferOverrun.CheckBufferOverrun(); checkBufferOverrun.CheckBufferOverrun();
} }

View File

@ -45,9 +45,8 @@ private:
{ {
// Tokenize.. // Tokenize..
Tokenizer tokenizer; Tokenizer tokenizer;
tokenizer._files.push_back( "test.cpp" );
std::istringstream istr(code); std::istringstream istr(code);
tokenizer.TokenizeCode( istr ); tokenizer.tokenize( istr, "test.cpp" );
// Fill function list // Fill function list
Settings settings; Settings settings;
@ -58,7 +57,7 @@ private:
// Clear the error buffer.. // Clear the error buffer..
errout.str(""); errout.str("");
// Check for memory leaks.. // Check char variable usage..
CheckOther checkOther( &tokenizer, this ); CheckOther checkOther( &tokenizer, this );
checkOther.CheckCharVariable(); checkOther.CheckCharVariable();
} }

View File

@ -39,15 +39,14 @@ private:
{ {
// Tokenize.. // Tokenize..
Tokenizer tokenizer; Tokenizer tokenizer;
tokenizer._files.push_back( "test.cpp" );
std::istringstream istr(code); std::istringstream istr(code);
tokenizer.TokenizeCode( istr ); tokenizer.tokenize( istr, "test.cpp" );
tokenizer.SimplifyTokenList(); tokenizer.simplifyTokenList();
// Clear the error buffer.. // Clear the error buffer..
errout.str(""); errout.str("");
// Check for memory leaks.. // Check class constructors..
Settings settings; Settings settings;
settings._checkCodingStyle = true; settings._checkCodingStyle = true;
CheckClass checkClass( &tokenizer, settings, this ); CheckClass checkClass( &tokenizer, settings, this );

View File

@ -41,15 +41,13 @@ private:
{ {
// Tokenize.. // Tokenize..
Tokenizer tokenizer; Tokenizer tokenizer;
tokenizer._files.push_back( "test.cpp" );
std::istringstream istr(code); std::istringstream istr(code);
tokenizer.TokenizeCode( istr ); tokenizer.tokenize( istr, "test.cpp" );
//SimplifyTokenList(); <- this can't be used as it removes 'unsigned'
// Clear the error buffer.. // Clear the error buffer..
errout.str(""); errout.str("");
// Check for memory leaks.. // Check for unsigned divisions..
CheckOther checkOther( &tokenizer, this ); CheckOther checkOther( &tokenizer, this );
checkOther.CheckUnsignedDivision(); checkOther.CheckUnsignedDivision();
} }

View File

@ -46,14 +46,13 @@ private:
{ {
// Tokenize.. // Tokenize..
Tokenizer tokenizer; Tokenizer tokenizer;
tokenizer._files.push_back( "test.cpp" );
std::istringstream istr(code); std::istringstream istr(code);
tokenizer.TokenizeCode( istr ); tokenizer.tokenize( istr, "test.cpp" );
// Clear the error buffer.. // Clear the error buffer..
errout.str(""); errout.str("");
// Check for memory leaks.. // Check for unused functions..
CheckFunctionUsage checkFunctionUsage(this); CheckFunctionUsage checkFunctionUsage(this);
checkFunctionUsage.parseTokens( tokenizer ); checkFunctionUsage.parseTokens( tokenizer );
checkFunctionUsage.check(); checkFunctionUsage.check();

View File

@ -40,10 +40,9 @@ private:
{ {
// Tokenize.. // Tokenize..
Tokenizer tokenizer; Tokenizer tokenizer;
tokenizer._files.push_back( "test.cpp" );
std::istringstream istr(code); std::istringstream istr(code);
tokenizer.TokenizeCode( istr ); tokenizer.tokenize( istr, "test.cpp" );
tokenizer.SimplifyTokenList(); tokenizer.simplifyTokenList();
// Clear the error buffer.. // Clear the error buffer..
errout.str(""); errout.str("");

View File

@ -39,10 +39,9 @@ private:
{ {
// Tokenize.. // Tokenize..
Tokenizer tokenizer; Tokenizer tokenizer;
tokenizer._files.push_back( "test.cpp" );
std::istringstream istr(code); std::istringstream istr(code);
tokenizer.TokenizeCode( istr ); tokenizer.tokenize( istr, "test.cpp" );
tokenizer.SimplifyTokenList(); tokenizer.simplifyTokenList();
// Clear the error buffer.. // Clear the error buffer..
errout.str(""); errout.str("");

View File

@ -43,8 +43,8 @@ private:
{ {
std::istringstream istr(code); std::istringstream istr(code);
Tokenizer tokenizer; Tokenizer tokenizer;
tokenizer.TokenizeCode( istr ); tokenizer.tokenize( istr, "test.cpp" );
tokenizer.SimplifyTokenList(); tokenizer.simplifyTokenList();
std::string ret; std::string ret;
for ( const TOKEN *tok = tokenizer.tokens(); tok; tok = tok->next ) for ( const TOKEN *tok = tokenizer.tokens(); tok; tok = tok->next )

View File

@ -65,9 +65,8 @@ private:
// tokenize.. // tokenize..
Tokenizer tokenizer; Tokenizer tokenizer;
tokenizer._files.push_back( "test.cpp" );
std::istringstream istr(filedata); std::istringstream istr(filedata);
tokenizer.TokenizeCode(istr, 0); tokenizer.tokenize(istr, "test.cpp");
// Expected result.. // Expected result..
const char *expected[] = const char *expected[] =
@ -89,9 +88,8 @@ private:
// tokenize.. // tokenize..
Tokenizer tokenizer; Tokenizer tokenizer;
tokenizer._files.push_back( "test.cpp" );
std::istringstream istr(filedata); std::istringstream istr(filedata);
tokenizer.TokenizeCode(istr, 0); tokenizer.tokenize(istr, "test.cpp");
// Expected result.. // Expected result..
ASSERT_EQUALS( std::string(10000,'a'), std::string(tokenizer.tokens()->aaaa()) ); ASSERT_EQUALS( std::string(10000,'a'), std::string(tokenizer.tokens()->aaaa()) );
@ -111,9 +109,8 @@ private:
// tokenize.. // tokenize..
Tokenizer tokenizer; Tokenizer tokenizer;
tokenizer._files.push_back( "test.cpp" );
std::istringstream istr(filedata); std::istringstream istr(filedata);
tokenizer.TokenizeCode(istr, 0); tokenizer.tokenize(istr, "test.cpp");
// Expected result.. // Expected result..
const char *expected[] = const char *expected[] =
@ -142,9 +139,8 @@ private:
"{ }\n"; "{ }\n";
// tokenize.. // tokenize..
Tokenizer tokenizer; Tokenizer tokenizer;
tokenizer._files.push_back( "test.cpp" );
std::istringstream istr(code); std::istringstream istr(code);
tokenizer.TokenizeCode(istr, 0); tokenizer.tokenize(istr, "test.cpp");
tokenizer.fillFunctionList(); tokenizer.fillFunctionList();

View File

@ -46,9 +46,8 @@ private:
{ {
// Tokenize.. // Tokenize..
Tokenizer tokenizer; Tokenizer tokenizer;
tokenizer._files.push_back( "test.cpp" );
std::istringstream istr(code); std::istringstream istr(code);
tokenizer.TokenizeCode( istr ); tokenizer.tokenize( istr, "test.cpp" );
// Clear the error buffer.. // Clear the error buffer..
errout.str(""); errout.str("");

View File

@ -39,10 +39,9 @@ private:
{ {
// Tokenize.. // Tokenize..
Tokenizer tokenizer; Tokenizer tokenizer;
tokenizer._files.push_back( "test.cpp" );
std::istringstream istr(code); std::istringstream istr(code);
tokenizer.TokenizeCode( istr ); tokenizer.tokenize( istr, "test.cpp" );
tokenizer.SimplifyTokenList(); tokenizer.simplifyTokenList();
// Clear the error buffer.. // Clear the error buffer..
errout.str(""); errout.str("");

View File

@ -231,7 +231,7 @@ void Tokenizer::InsertTokens(TOKEN *dest, TOKEN *src, unsigned int n)
// Tokenize - tokenizes a given file. // Tokenize - tokenizes a given file.
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
void Tokenizer::Tokenize(std::istream &code, const char FileName[]) void Tokenizer::tokenize(std::istream &code, const char FileName[])
{ {
// Has this file been tokenized already? // Has this file been tokenized already?
for (unsigned int i = 0; i < _files.size(); i++) for (unsigned int i = 0; i < _files.size(); i++)
@ -244,7 +244,7 @@ void Tokenizer::Tokenize(std::istream &code, const char FileName[])
_files.push_back(FileName); _files.push_back(FileName);
// Tokenize the file.. // Tokenize the file..
TokenizeCode( code, (unsigned int)(_files.size() - 1) ); tokenizeCode( code, (unsigned int)(_files.size() - 1) );
} }
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
@ -252,7 +252,7 @@ void Tokenizer::Tokenize(std::istream &code, const char FileName[])
// Tokenize - tokenizes input stream // Tokenize - tokenizes input stream
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
void Tokenizer::TokenizeCode(std::istream &code, const unsigned int FileIndex) void Tokenizer::tokenizeCode(std::istream &code, const unsigned int FileIndex)
{ {
// Tokenize the file. // Tokenize the file.
unsigned int lineno = 1; unsigned int lineno = 1;
@ -301,7 +301,7 @@ void Tokenizer::TokenizeCode(std::istream &code, const unsigned int FileIndex)
addtoken(line.c_str(), lineno, FileIndex); addtoken(line.c_str(), lineno, FileIndex);
std::ifstream fin( line.c_str() ); std::ifstream fin( line.c_str() );
Tokenize(fin, line.c_str()); tokenize(fin, line.c_str());
} }
else if (strncmp(line.c_str(), "#define", 7) == 0) else if (strncmp(line.c_str(), "#define", 7) == 0)
@ -605,7 +605,7 @@ void Tokenizer::TokenizeCode(std::istream &code, const unsigned int FileIndex)
// Simplify token list // Simplify token list
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
void Tokenizer::SimplifyTokenList() void Tokenizer::simplifyTokenList()
{ {
// Remove the keyword 'unsigned' // Remove the keyword 'unsigned'

View File

@ -35,17 +35,25 @@ private:
// Deallocate lists.. // Deallocate lists..
void DeallocateTokens(); void DeallocateTokens();
/**
* Helper function for "tokenize". This recursively parses into included header files.
*/
void tokenizeCode(std::istream &code, const unsigned int FileIndex=0);
public: public:
Tokenizer(); Tokenizer();
~Tokenizer(); ~Tokenizer();
/**
* Tokenize code
* @param code input stream for code
* @param FileName The filename
*/
void tokenize(std::istream &code, const char FileName[]);
void Tokenize(std::istream &code, const char FileName[]); /** Simplify tokenlist */
void simplifyTokenList();
// Simplify tokenlist
// -----------------------------
void SimplifyTokenList();
void TokenizeCode(std::istream &code, const unsigned int FileIndex=0);
// Helper functions for handling the tokens list.. // Helper functions for handling the tokens list..