Refactoring: tokens_back and TypeSize are no longer global variables

This commit is contained in:
Reijo Tomperi 2008-11-12 22:50:40 +00:00
parent 134985e410
commit 0b1ee10353
13 changed files with 45 additions and 41 deletions

View File

@ -341,7 +341,7 @@ void CheckBufferOverrunClass::CheckBufferOverrun_LocalVariable()
continue; continue;
} }
int total_size = size * Tokenizer::SizeOfType(type); int total_size = size * _tokenizer->SizeOfType(type);
if (total_size == 0) if (total_size == 0)
continue; continue;
@ -397,7 +397,7 @@ void CheckBufferOverrunClass::CheckBufferOverrun_StructVariable()
const char *varname[3] = {0,0,0}; const char *varname[3] = {0,0,0};
varname[1] = Tokenizer::getstr(tok2, ivar); varname[1] = Tokenizer::getstr(tok2, ivar);
int arrsize = atoi(Tokenizer::getstr(tok2, ivar+2)); int arrsize = atoi(Tokenizer::getstr(tok2, ivar+2));
int total_size = arrsize * Tokenizer::SizeOfType(tok2->next->str); int total_size = arrsize * _tokenizer->SizeOfType(tok2->next->str);
if (total_size == 0) if (total_size == 0)
continue; continue;

View File

@ -164,8 +164,7 @@ static void CppCheck(const std::string &code, const char FileName[], unsigned in
OnlyReportUniqueErrors = true; OnlyReportUniqueErrors = true;
// Tokenize the file // Tokenize the file
tokens = tokens_back = NULL;
Files.clear(); Files.clear();
{ {
std::istringstream istr(code); std::istringstream istr(code);

View File

@ -37,10 +37,9 @@ public:
private: private:
void check( const char code[] ) void check( const char code[] )
{ {
// Tokenize.. // Tokenize..
tokens = tokens_back = NULL;
std::istringstream istr(code);
Tokenizer tokenizer; Tokenizer tokenizer;
std::istringstream istr(code);
tokenizer.TokenizeCode( istr ); tokenizer.TokenizeCode( istr );
tokenizer.SimplifyTokenList(); tokenizer.SimplifyTokenList();

View File

@ -44,9 +44,8 @@ private:
void check( const char code[] ) void check( const char code[] )
{ {
// Tokenize.. // Tokenize..
tokens = tokens_back = NULL; Tokenizer tokenizer;
std::istringstream istr(code); std::istringstream istr(code);
Tokenizer tokenizer;
tokenizer.TokenizeCode( istr ); tokenizer.TokenizeCode( istr );
// Fill function list // Fill function list

View File

@ -36,9 +36,8 @@ private:
void check( const char code[] ) void check( const char code[] )
{ {
// Tokenize.. // Tokenize..
tokens = tokens_back = NULL; Tokenizer tokenizer;
std::istringstream istr(code); std::istringstream istr(code);
Tokenizer tokenizer;
tokenizer.TokenizeCode( istr ); tokenizer.TokenizeCode( istr );
tokenizer.SimplifyTokenList(); tokenizer.SimplifyTokenList();

View File

@ -40,9 +40,8 @@ private:
void check( const char code[] ) void check( const char code[] )
{ {
// Tokenize.. // Tokenize..
tokens = tokens_back = NULL; Tokenizer tokenizer;
std::istringstream istr(code); std::istringstream istr(code);
Tokenizer tokenizer;
tokenizer.TokenizeCode( istr ); tokenizer.TokenizeCode( istr );
//SimplifyTokenList(); <- this can't be used as it removes 'unsigned' //SimplifyTokenList(); <- this can't be used as it removes 'unsigned'

View File

@ -38,9 +38,8 @@ private:
void check( const char code[] ) void check( const char code[] )
{ {
// Tokenize.. // Tokenize..
tokens = tokens_back = NULL; Tokenizer tokenizer;
std::istringstream istr(code); std::istringstream istr(code);
Tokenizer tokenizer;
tokenizer.TokenizeCode( istr ); tokenizer.TokenizeCode( istr );
tokenizer.SimplifyTokenList(); tokenizer.SimplifyTokenList();

View File

@ -39,9 +39,8 @@ private:
void check( const char code[] ) void check( const char code[] )
{ {
// Tokenize.. // Tokenize..
tokens = tokens_back = NULL; Tokenizer tokenizer;
std::istringstream istr(code); std::istringstream istr(code);
Tokenizer tokenizer;
tokenizer.TokenizeCode( istr ); tokenizer.TokenizeCode( istr );
tokenizer.SimplifyTokenList(); tokenizer.SimplifyTokenList();

View File

@ -59,9 +59,8 @@ private:
" \"def\"\n"; " \"def\"\n";
// tokenize.. // tokenize..
tokens = tokens_back = NULL; Tokenizer tokenizer;
std::istringstream istr(filedata); std::istringstream istr(filedata);
Tokenizer tokenizer;
tokenizer.TokenizeCode(istr, 0); tokenizer.TokenizeCode(istr, 0);
// Expected result.. // Expected result..
@ -85,9 +84,8 @@ private:
std::string filedata(10000,'a'); std::string filedata(10000,'a');
// tokenize.. // tokenize..
tokens = tokens_back = NULL; Tokenizer tokenizer;
std::istringstream istr(filedata); std::istringstream istr(filedata);
Tokenizer tokenizer;
tokenizer.TokenizeCode(istr, 0); tokenizer.TokenizeCode(istr, 0);
// Expected result.. // Expected result..
@ -109,9 +107,8 @@ private:
"}\n"; "}\n";
// tokenize.. // tokenize..
tokens = tokens_back = NULL; Tokenizer tokenizer;
std::istringstream istr(filedata); std::istringstream istr(filedata);
Tokenizer tokenizer;
tokenizer.TokenizeCode(istr, 0); tokenizer.TokenizeCode(istr, 0);
// Expected result.. // Expected result..

View File

@ -45,9 +45,8 @@ private:
void check( const char code[] ) void check( const char code[] )
{ {
// Tokenize.. // Tokenize..
tokens = tokens_back = NULL; Tokenizer tokenizer;
std::istringstream istr(code); std::istringstream istr(code);
Tokenizer tokenizer;
tokenizer.TokenizeCode( istr ); tokenizer.TokenizeCode( istr );
// Clear the error buffer.. // Clear the error buffer..

View File

@ -38,9 +38,8 @@ private:
void check( const char code[] ) void check( const char code[] )
{ {
// Tokenize.. // Tokenize..
tokens = tokens_back = NULL;
std::istringstream istr(code);
Tokenizer tokenizer; Tokenizer tokenizer;
std::istringstream istr(code);
tokenizer.TokenizeCode( istr ); tokenizer.TokenizeCode( istr );
tokenizer.SimplifyTokenList(); tokenizer.SimplifyTokenList();

View File

@ -25,7 +25,7 @@
#include <locale> #include <locale>
#include <fstream> #include <fstream>
#include <map>
#include <string> #include <string>
#include <cstring> #include <cstring>
@ -60,12 +60,20 @@ TOKEN *Tokenizer::_gettok(TOKEN *tok, int index)
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
std::vector<std::string> Files; std::vector<std::string> Files;
TOKEN *tokens, *tokens_back; TOKEN *tokens;
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
Tokenizer::Tokenizer()
{
tokens = 0;
tokens_back = 0;
}
Tokenizer::~Tokenizer()
{
}
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
// Defined symbols. // Defined symbols.
@ -211,7 +219,7 @@ void Tokenizer::combine_2tokens(TOKEN *tok, const char str1[], const char str2[]
// SizeOfType - gives the size of a type // SizeOfType - gives the size of a type
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
std::map<std::string, unsigned int> TypeSize;
int Tokenizer::SizeOfType(const char type[]) int Tokenizer::SizeOfType(const char type[])
{ {

View File

@ -22,7 +22,8 @@
#define tokenizeH #define tokenizeH
//--------------------------------------------------------------------------- //---------------------------------------------------------------------------
#include <string> #include <string>
#include <map>
#include <vector> #include <vector>
#include <cstdlib> #include <cstdlib>
#include <cstring> #include <cstring>
@ -65,6 +66,8 @@ extern TOKEN *tokens, *tokens_back;
class Tokenizer class Tokenizer
{ {
public: public:
Tokenizer();
~Tokenizer();
void Tokenize(std::istream &code, const char FileName[]); void Tokenize(std::istream &code, const char FileName[]);
@ -83,27 +86,32 @@ public:
static const char *getstr(const TOKEN *tok, int index); static const char *getstr(const TOKEN *tok, int index);
// Return size. // Return size.
static int SizeOfType(const char type[]); int SizeOfType(const char type[]);
void initTokens();
std::vector<std::string> _files; std::vector<std::string> _files;
TOKEN *_tokens; TOKEN *_tokens;
TOKEN *_tokens_back;
private: private:
static void Define(const char Name[], const char Value[]); void Define(const char Name[], const char Value[]);
static void addtoken(const char str[], const unsigned int lineno, const unsigned int fileno); void addtoken(const char str[], const unsigned int lineno, const unsigned int fileno);
static void combine_2tokens(TOKEN *tok, const char str1[], const char str2[]); void combine_2tokens(TOKEN *tok, const char str1[], const char str2[]);
static void DeleteNextToken(TOKEN *tok); void DeleteNextToken(TOKEN *tok);
static TOKEN *_gettok(TOKEN *tok, int index); TOKEN *_gettok(TOKEN *tok, int index);
static void InsertTokens(TOKEN *dest, TOKEN *src, unsigned int n); void InsertTokens(TOKEN *dest, TOKEN *src, unsigned int n);
TOKEN *tokens_back;
std::map<std::string, unsigned int> TypeSize;
}; };