Refactoring: Renamed some member variables: variable -> _variable
This commit is contained in:
parent
0b2e7a0ef3
commit
29a1468523
|
@ -32,7 +32,7 @@
|
|||
|
||||
//---------------------------------------------------------------------------
|
||||
|
||||
// CallStack used when parsing into subfunctions.
|
||||
// _callStack used when parsing into subfunctions.
|
||||
|
||||
|
||||
CheckBufferOverrunClass::CheckBufferOverrunClass( const Tokenizer *tokenizer, ErrorLogger *errorLogger )
|
||||
|
@ -51,7 +51,7 @@ void CheckBufferOverrunClass::ReportError(const TOKEN *tok, const char errmsg[])
|
|||
{
|
||||
std::ostringstream ostr;
|
||||
std::list<const TOKEN *>::const_iterator it;
|
||||
for ( it = CallStack.begin(); it != CallStack.end(); it++ )
|
||||
for ( it = _callStack.begin(); it != _callStack.end(); it++ )
|
||||
ostr << _tokenizer->fileLine(*it ) << " -> ";
|
||||
ostr << _tokenizer->fileLine(tok) << ": " << errmsg;
|
||||
_errorLogger->reportErr(ostr.str());
|
||||
|
@ -219,7 +219,7 @@ void CheckBufferOverrunClass::CheckBufferOverrun_CheckScope( const TOKEN *tok, c
|
|||
if ( TOKEN::Match( tok, "%var% (" ) )
|
||||
{
|
||||
// Don't make recursive checking..
|
||||
if (std::find(CallStack.begin(), CallStack.end(), tok) != CallStack.end())
|
||||
if (std::find(_callStack.begin(), _callStack.end(), tok) != _callStack.end())
|
||||
continue;
|
||||
|
||||
unsigned int parlevel = 0, par = 0;
|
||||
|
@ -287,9 +287,9 @@ void CheckBufferOverrunClass::CheckBufferOverrun_CheckScope( const TOKEN *tok, c
|
|||
ftok = ftok ? ftok->next : 0;
|
||||
|
||||
// Check variable usage in the function..
|
||||
CallStack.push_back( tok );
|
||||
_callStack.push_back( tok );
|
||||
CheckBufferOverrun_CheckScope( ftok, parname, size, total_size );
|
||||
CallStack.pop_back();
|
||||
_callStack.pop_back();
|
||||
|
||||
// break out..
|
||||
break;
|
||||
|
@ -345,7 +345,7 @@ void CheckBufferOverrunClass::CheckBufferOverrun_LocalVariable()
|
|||
continue;
|
||||
|
||||
// The callstack is empty
|
||||
CallStack.clear();
|
||||
_callStack.clear();
|
||||
CheckBufferOverrun_CheckScope( tok->tokAt(5), varname, size, total_size );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,7 +45,7 @@ private:
|
|||
|
||||
const Tokenizer *_tokenizer;
|
||||
ErrorLogger *_errorLogger;
|
||||
std::list<const TOKEN *> CallStack;
|
||||
std::list<const TOKEN *> _callStack;
|
||||
};
|
||||
|
||||
//---------------------------------------------------------------------------
|
||||
|
|
|
@ -33,7 +33,6 @@
|
|||
CheckFunctionUsage::CheckFunctionUsage( ErrorLogger *errorLogger )
|
||||
{
|
||||
_errorLogger = errorLogger;
|
||||
functions.clear();
|
||||
}
|
||||
|
||||
CheckFunctionUsage::~CheckFunctionUsage()
|
||||
|
@ -72,7 +71,7 @@ void CheckFunctionUsage::parseTokens( const Tokenizer &tokenizer )
|
|||
|
||||
if ( funcname )
|
||||
{
|
||||
FunctionUsage &func = functions[ funcname->str ];
|
||||
FunctionUsage &func = _functions[ funcname->str ];
|
||||
|
||||
// No filename set yet..
|
||||
if (func.filename.empty())
|
||||
|
@ -122,7 +121,7 @@ void CheckFunctionUsage::parseTokens( const Tokenizer &tokenizer )
|
|||
|
||||
if ( funcname )
|
||||
{
|
||||
FunctionUsage &func = functions[ funcname->str ];
|
||||
FunctionUsage &func = _functions[ funcname->str ];
|
||||
|
||||
if ( func.filename.empty() || func.filename == "+" )
|
||||
func.usedOtherFile = true;
|
||||
|
@ -138,7 +137,7 @@ void CheckFunctionUsage::parseTokens( const Tokenizer &tokenizer )
|
|||
|
||||
void CheckFunctionUsage::check()
|
||||
{
|
||||
for ( std::map<std::string, FunctionUsage>::const_iterator it = functions.begin(); it != functions.end(); ++it )
|
||||
for ( std::map<std::string, FunctionUsage>::const_iterator it = _functions.begin(); it != _functions.end(); ++it )
|
||||
{
|
||||
const FunctionUsage &func = it->second;
|
||||
if ( func.usedOtherFile || func.filename.empty() )
|
||||
|
|
|
@ -58,7 +58,7 @@ private:
|
|||
bool usedOtherFile;
|
||||
};
|
||||
|
||||
std::map<std::string, FunctionUsage> functions;
|
||||
std::map<std::string, FunctionUsage> _functions;
|
||||
};
|
||||
|
||||
//---------------------------------------------------------------------------
|
||||
|
|
|
@ -132,8 +132,8 @@ CheckMemoryLeakClass::AllocType CheckMemoryLeakClass::GetAllocationType( const T
|
|||
return POPEN;
|
||||
|
||||
// Userdefined allocation function..
|
||||
std::list<AllocFunc>::const_iterator it = listallocfunc.begin();
|
||||
while ( it != listallocfunc.end() )
|
||||
std::list<AllocFunc>::const_iterator it = _listAllocFunc.begin();
|
||||
while ( it != _listAllocFunc.end() )
|
||||
{
|
||||
if ( strcmp(tok2->str, it->funcname) == 0 )
|
||||
return it->alloctype;
|
||||
|
@ -1090,7 +1090,7 @@ void CheckMemoryLeakClass::CheckMemoryLeak_ClassMembers_Variable( const std::vec
|
|||
|
||||
void CheckMemoryLeakClass::CheckMemoryLeak()
|
||||
{
|
||||
listallocfunc.clear();
|
||||
_listAllocFunc.clear();
|
||||
|
||||
// Check for memory leaks inside functions..
|
||||
CheckMemoryLeak_InFunction();
|
||||
|
|
|
@ -77,7 +77,7 @@ private:
|
|||
const Tokenizer *_tokenizer;
|
||||
ErrorLogger *_errorLogger;
|
||||
Settings _settings;
|
||||
std::list<AllocFunc> listallocfunc;
|
||||
std::list<AllocFunc> _listAllocFunc;
|
||||
};
|
||||
|
||||
//---------------------------------------------------------------------------
|
||||
|
|
25
FileLister.h
25
FileLister.h
|
@ -37,19 +37,20 @@
|
|||
|
||||
class FileLister
|
||||
{
|
||||
private:
|
||||
static bool AcceptFile( const std::string &filename );
|
||||
|
||||
#ifdef __BORLANDC__
|
||||
static void AddFiles( std::vector<std::string> &filenames, const std::string &path, const std::string &pattern );
|
||||
#endif
|
||||
|
||||
#ifdef _MSC_VER
|
||||
static void AddFiles( std::vector<std::string> &filenames, const std::string &path, const std::string &pattern );
|
||||
#endif
|
||||
|
||||
public:
|
||||
static void RecursiveAddFiles( std::vector<std::string> &filenames, const std::string &path, bool recursive );
|
||||
static void RecursiveAddFiles( std::vector<std::string> &filenames, const std::string &path, bool recursive );
|
||||
|
||||
private:
|
||||
static bool AcceptFile( const std::string &filename );
|
||||
|
||||
#ifdef __BORLANDC__
|
||||
static void AddFiles( std::vector<std::string> &filenames, const std::string &path, const std::string &pattern );
|
||||
#endif
|
||||
|
||||
#ifdef _MSC_VER
|
||||
static void AddFiles( std::vector<std::string> &filenames, const std::string &path, const std::string &pattern );
|
||||
#endif
|
||||
|
||||
};
|
||||
|
||||
#endif // #ifndef FILELISTER_H
|
||||
|
|
20
cppcheck.cpp
20
cppcheck.cpp
|
@ -132,7 +132,7 @@ void CppCheck::check(int argc, char* argv[])
|
|||
|
||||
for (unsigned int c = 0; c < filenames.size(); c++)
|
||||
{
|
||||
errout.str("");
|
||||
_errout.str("");
|
||||
std::string fname = filenames[c];
|
||||
|
||||
// If only errors are printed, print filename after the check
|
||||
|
@ -148,31 +148,31 @@ void CppCheck::check(int argc, char* argv[])
|
|||
|
||||
if (_settings._errorsOnly)
|
||||
{
|
||||
if ( !errout.str().empty() )
|
||||
if ( !_errout.str().empty() )
|
||||
{
|
||||
std::cout << "Errors found in " << fname << ":\n";
|
||||
std::cerr << errout.str();
|
||||
std::cerr << _errout.str();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if ( errout.str().empty() )
|
||||
if ( _errout.str().empty() )
|
||||
std::cout << "No errors found\n";
|
||||
else
|
||||
std::cerr << errout.str();
|
||||
std::cerr << _errout.str();
|
||||
}
|
||||
}
|
||||
|
||||
// This generates false positives - especially for libraries
|
||||
if ( checkFunctionUsage )
|
||||
{
|
||||
errout.str("");
|
||||
_errout.str("");
|
||||
std::cout << "Checking usage of global functions (this may take several minutes)..\n";
|
||||
checkFunctionUsage->check();
|
||||
if ( ! errout.str().empty() )
|
||||
if ( ! _errout.str().empty() )
|
||||
{
|
||||
std::cerr << "\n";
|
||||
std::cerr << errout.str();
|
||||
std::cerr << _errout.str();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -195,7 +195,7 @@ void CppCheck::checkFile(const std::string &code, const char FileName[], Setting
|
|||
_tokenizer.Tokenize(istr, FileName);
|
||||
}
|
||||
|
||||
_tokenizer.FillFunctionList();
|
||||
_tokenizer.fillFunctionList();
|
||||
|
||||
// Check that the memsets are valid.
|
||||
// The 'memset' function can do dangerous things if used wrong.
|
||||
|
@ -311,7 +311,7 @@ void CppCheck::reportErr( const std::string &errmsg)
|
|||
return;
|
||||
_errorList.push_back( errmsg );
|
||||
}
|
||||
errout << errmsg << std::endl;
|
||||
_errout << errmsg << std::endl;
|
||||
}
|
||||
|
||||
void CppCheck::reportErr( const TOKEN *token, const std::string &errmsg)
|
||||
|
|
|
@ -41,11 +41,11 @@ class CppCheck : public ErrorLogger
|
|||
|
||||
private:
|
||||
void checkFile(const std::string &code, const char FileName[], Settings &_settings, CheckFunctionUsage *checkFunctionUsage);
|
||||
std::list<std::string> _errorList;
|
||||
std::ostringstream errout;
|
||||
|
||||
void reportErr( const std::string &errmsg);
|
||||
void reportErr( const TOKEN *token, const std::string &errmsg);
|
||||
|
||||
std::list<std::string> _errorList;
|
||||
std::ostringstream _errout;
|
||||
};
|
||||
|
||||
#endif // CPPCHECK_H
|
||||
|
|
|
@ -40,7 +40,7 @@ private:
|
|||
{
|
||||
// Tokenize..
|
||||
Tokenizer tokenizer;
|
||||
tokenizer.Files.push_back( "test.cpp" );
|
||||
tokenizer._files.push_back( "test.cpp" );
|
||||
std::istringstream istr(code);
|
||||
tokenizer.TokenizeCode( istr );
|
||||
tokenizer.SimplifyTokenList();
|
||||
|
@ -49,7 +49,7 @@ private:
|
|||
Settings settings;
|
||||
settings._checkCodingStyle = true;
|
||||
tokenizer.settings( settings );
|
||||
tokenizer.FillFunctionList();
|
||||
tokenizer.fillFunctionList();
|
||||
|
||||
// Clear the error buffer..
|
||||
errout.str("");
|
||||
|
|
|
@ -45,7 +45,7 @@ private:
|
|||
{
|
||||
// Tokenize..
|
||||
Tokenizer tokenizer;
|
||||
tokenizer.Files.push_back( "test.cpp" );
|
||||
tokenizer._files.push_back( "test.cpp" );
|
||||
std::istringstream istr(code);
|
||||
tokenizer.TokenizeCode( istr );
|
||||
|
||||
|
@ -53,7 +53,7 @@ private:
|
|||
Settings settings;
|
||||
settings._checkCodingStyle = true;
|
||||
tokenizer.settings( settings );
|
||||
tokenizer.FillFunctionList();
|
||||
tokenizer.fillFunctionList();
|
||||
|
||||
// Clear the error buffer..
|
||||
errout.str("");
|
||||
|
|
|
@ -39,7 +39,7 @@ private:
|
|||
{
|
||||
// Tokenize..
|
||||
Tokenizer tokenizer;
|
||||
tokenizer.Files.push_back( "test.cpp" );
|
||||
tokenizer._files.push_back( "test.cpp" );
|
||||
std::istringstream istr(code);
|
||||
tokenizer.TokenizeCode( istr );
|
||||
tokenizer.SimplifyTokenList();
|
||||
|
|
|
@ -41,7 +41,7 @@ private:
|
|||
{
|
||||
// Tokenize..
|
||||
Tokenizer tokenizer;
|
||||
tokenizer.Files.push_back( "test.cpp" );
|
||||
tokenizer._files.push_back( "test.cpp" );
|
||||
std::istringstream istr(code);
|
||||
tokenizer.TokenizeCode( istr );
|
||||
//SimplifyTokenList(); <- this can't be used as it removes 'unsigned'
|
||||
|
|
|
@ -40,7 +40,7 @@ private:
|
|||
{
|
||||
// Tokenize..
|
||||
Tokenizer tokenizer;
|
||||
tokenizer.Files.push_back( "test.cpp" );
|
||||
tokenizer._files.push_back( "test.cpp" );
|
||||
std::istringstream istr(code);
|
||||
tokenizer.TokenizeCode( istr );
|
||||
tokenizer.SimplifyTokenList();
|
||||
|
|
|
@ -39,7 +39,7 @@ private:
|
|||
{
|
||||
// Tokenize..
|
||||
Tokenizer tokenizer;
|
||||
tokenizer.Files.push_back( "test.cpp" );
|
||||
tokenizer._files.push_back( "test.cpp" );
|
||||
std::istringstream istr(code);
|
||||
tokenizer.TokenizeCode( istr );
|
||||
tokenizer.SimplifyTokenList();
|
||||
|
@ -52,7 +52,7 @@ private:
|
|||
settings._checkCodingStyle = true;
|
||||
settings._showAll = false;
|
||||
tokenizer.settings( settings );
|
||||
tokenizer.FillFunctionList();
|
||||
tokenizer.fillFunctionList();
|
||||
CheckMemoryLeakClass checkMemoryLeak( &tokenizer, settings, this );
|
||||
checkMemoryLeak.CheckMemoryLeak();
|
||||
}
|
||||
|
|
|
@ -65,7 +65,7 @@ private:
|
|||
|
||||
// tokenize..
|
||||
Tokenizer tokenizer;
|
||||
tokenizer.Files.push_back( "test.cpp" );
|
||||
tokenizer._files.push_back( "test.cpp" );
|
||||
std::istringstream istr(filedata);
|
||||
tokenizer.TokenizeCode(istr, 0);
|
||||
|
||||
|
@ -89,7 +89,7 @@ private:
|
|||
|
||||
// tokenize..
|
||||
Tokenizer tokenizer;
|
||||
tokenizer.Files.push_back( "test.cpp" );
|
||||
tokenizer._files.push_back( "test.cpp" );
|
||||
std::istringstream istr(filedata);
|
||||
tokenizer.TokenizeCode(istr, 0);
|
||||
|
||||
|
@ -111,7 +111,7 @@ private:
|
|||
|
||||
// tokenize..
|
||||
Tokenizer tokenizer;
|
||||
tokenizer.Files.push_back( "test.cpp" );
|
||||
tokenizer._files.push_back( "test.cpp" );
|
||||
std::istringstream istr(filedata);
|
||||
tokenizer.TokenizeCode(istr, 0);
|
||||
|
||||
|
@ -142,14 +142,14 @@ private:
|
|||
"{ }\n";
|
||||
// tokenize..
|
||||
Tokenizer tokenizer;
|
||||
tokenizer.Files.push_back( "test.cpp" );
|
||||
tokenizer._files.push_back( "test.cpp" );
|
||||
std::istringstream istr(code);
|
||||
tokenizer.TokenizeCode(istr, 0);
|
||||
|
||||
tokenizer.FillFunctionList();
|
||||
tokenizer.fillFunctionList();
|
||||
|
||||
ASSERT_EQUALS( 1, tokenizer.FunctionList.size() );
|
||||
ASSERT_EQUALS( std::string("b"), tokenizer.FunctionList[0]->str );
|
||||
ASSERT_EQUALS( 1, tokenizer._functionList.size() );
|
||||
ASSERT_EQUALS( std::string("b"), tokenizer._functionList[0]->str );
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ private:
|
|||
{
|
||||
// Tokenize..
|
||||
Tokenizer tokenizer;
|
||||
tokenizer.Files.push_back( "test.cpp" );
|
||||
tokenizer._files.push_back( "test.cpp" );
|
||||
std::istringstream istr(code);
|
||||
tokenizer.TokenizeCode( istr );
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ private:
|
|||
{
|
||||
// Tokenize..
|
||||
Tokenizer tokenizer;
|
||||
tokenizer.Files.push_back( "test.cpp" );
|
||||
tokenizer._files.push_back( "test.cpp" );
|
||||
std::istringstream istr(code);
|
||||
tokenizer.TokenizeCode( istr );
|
||||
tokenizer.SimplifyTokenList();
|
||||
|
|
100
tokenize.cpp
100
tokenize.cpp
|
@ -51,8 +51,8 @@
|
|||
Tokenizer::Tokenizer()
|
||||
{
|
||||
_tokens = 0;
|
||||
tokens_back = 0;
|
||||
dsymlist = 0;
|
||||
_tokensBack = 0;
|
||||
_dsymlist = 0;
|
||||
}
|
||||
|
||||
Tokenizer::~Tokenizer()
|
||||
|
@ -90,7 +90,7 @@ const TOKEN *Tokenizer::tokens() const
|
|||
|
||||
const std::vector<std::string> *Tokenizer::getFiles() const
|
||||
{
|
||||
return &Files;
|
||||
return &_files;
|
||||
}
|
||||
|
||||
void Tokenizer::Define(const char Name[], const char Value[])
|
||||
|
@ -133,8 +133,8 @@ void Tokenizer::Define(const char Name[], const char Value[])
|
|||
memset(NewSym, 0, sizeof(DefineSymbol));
|
||||
NewSym->name = _strdup(Name);
|
||||
NewSym->value = strValue;
|
||||
NewSym->next = dsymlist;
|
||||
dsymlist = NewSym;
|
||||
NewSym->next = _dsymlist;
|
||||
_dsymlist = NewSym;
|
||||
}
|
||||
//---------------------------------------------------------------------------
|
||||
|
||||
|
@ -163,18 +163,18 @@ void Tokenizer::addtoken(const char str[], const unsigned int lineno, const unsi
|
|||
newtoken->setstr(str2.str().c_str());
|
||||
newtoken->linenr = lineno;
|
||||
newtoken->FileIndex = fileno;
|
||||
if (tokens_back)
|
||||
if (_tokensBack)
|
||||
{
|
||||
tokens_back->next = newtoken;
|
||||
tokens_back = newtoken;
|
||||
_tokensBack->next = newtoken;
|
||||
_tokensBack = newtoken;
|
||||
}
|
||||
else
|
||||
{
|
||||
_tokens = tokens_back = newtoken;
|
||||
_tokens = _tokensBack = newtoken;
|
||||
}
|
||||
|
||||
// Check if str is defined..
|
||||
for (DefineSymbol *sym = dsymlist; sym; sym = sym->next)
|
||||
for (DefineSymbol *sym = _dsymlist; sym; sym = sym->next)
|
||||
{
|
||||
if (strcmp(str,sym->name)==0)
|
||||
{
|
||||
|
@ -196,8 +196,8 @@ int Tokenizer::SizeOfType(const char type[]) const
|
|||
if (!type)
|
||||
return 0;
|
||||
|
||||
std::map<std::string, unsigned int>::const_iterator it = TypeSize.find(type);
|
||||
if ( it == TypeSize.end() )
|
||||
std::map<std::string, unsigned int>::const_iterator it = _typeSize.find(type);
|
||||
if ( it == _typeSize.end() )
|
||||
return 0;
|
||||
|
||||
return it->second;
|
||||
|
@ -234,17 +234,17 @@ void Tokenizer::InsertTokens(TOKEN *dest, TOKEN *src, unsigned int n)
|
|||
void Tokenizer::Tokenize(std::istream &code, const char FileName[])
|
||||
{
|
||||
// Has this file been tokenized already?
|
||||
for (unsigned int i = 0; i < Files.size(); i++)
|
||||
for (unsigned int i = 0; i < _files.size(); i++)
|
||||
{
|
||||
if ( SameFileName( Files[i].c_str(), FileName ) )
|
||||
if ( SameFileName( _files[i].c_str(), FileName ) )
|
||||
return;
|
||||
}
|
||||
|
||||
// The "Files" vector remembers what files have been tokenized..
|
||||
Files.push_back(FileName);
|
||||
// The "_files" vector remembers what files have been tokenized..
|
||||
_files.push_back(FileName);
|
||||
|
||||
// Tokenize the file..
|
||||
TokenizeCode( code, (unsigned int)(Files.size() - 1) );
|
||||
TokenizeCode( code, (unsigned int)(_files.size() - 1) );
|
||||
}
|
||||
//---------------------------------------------------------------------------
|
||||
|
||||
|
@ -290,9 +290,9 @@ void Tokenizer::TokenizeCode(std::istream &code, const unsigned int FileIndex)
|
|||
line.erase(line.find("\""));
|
||||
|
||||
// Relative path..
|
||||
if (Files.back().find_first_of("\\/") != std::string::npos)
|
||||
if (_files.back().find_first_of("\\/") != std::string::npos)
|
||||
{
|
||||
std::string path = Files.back();
|
||||
std::string path = _files.back();
|
||||
path.erase( 1 + path.find_last_of("\\/") );
|
||||
line = path + line;
|
||||
}
|
||||
|
@ -636,24 +636,24 @@ void Tokenizer::SimplifyTokenList()
|
|||
}
|
||||
|
||||
|
||||
// Fill the map TypeSize..
|
||||
TypeSize.clear();
|
||||
TypeSize["char"] = sizeof(char);
|
||||
TypeSize["short"] = sizeof(short);
|
||||
TypeSize["int"] = sizeof(int);
|
||||
TypeSize["long"] = sizeof(long);
|
||||
TypeSize["float"] = sizeof(float);
|
||||
TypeSize["double"] = sizeof(double);
|
||||
// Fill the map _typeSize..
|
||||
_typeSize.clear();
|
||||
_typeSize["char"] = sizeof(char);
|
||||
_typeSize["short"] = sizeof(short);
|
||||
_typeSize["int"] = sizeof(int);
|
||||
_typeSize["long"] = sizeof(long);
|
||||
_typeSize["float"] = sizeof(float);
|
||||
_typeSize["double"] = sizeof(double);
|
||||
for (TOKEN *tok = _tokens; tok; tok = tok->next)
|
||||
{
|
||||
if (TOKEN::Match(tok,"class %var%"))
|
||||
{
|
||||
TypeSize[tok->strAt(1)] = 11;
|
||||
_typeSize[tok->strAt(1)] = 11;
|
||||
}
|
||||
|
||||
else if (TOKEN::Match(tok, "struct %var%"))
|
||||
{
|
||||
TypeSize[tok->strAt(1)] = 13;
|
||||
_typeSize[tok->strAt(1)] = 13;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1021,20 +1021,20 @@ bool Tokenizer::simplifyConditions()
|
|||
|
||||
const TOKEN *Tokenizer::GetFunctionTokenByName( const char funcname[] ) const
|
||||
{
|
||||
for ( unsigned int i = 0; i < FunctionList.size(); ++i )
|
||||
for ( unsigned int i = 0; i < _functionList.size(); ++i )
|
||||
{
|
||||
if ( strcmp( FunctionList[i]->str, funcname ) == 0 )
|
||||
if ( strcmp( _functionList[i]->str, funcname ) == 0 )
|
||||
{
|
||||
return FunctionList[i];
|
||||
return _functionList[i];
|
||||
}
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
||||
void Tokenizer::FillFunctionList()
|
||||
void Tokenizer::fillFunctionList()
|
||||
{
|
||||
FunctionList.clear();
|
||||
_functionList.clear();
|
||||
|
||||
bool staticfunc = false;
|
||||
bool classfunc = false;
|
||||
|
@ -1082,7 +1082,7 @@ void Tokenizer::FillFunctionList()
|
|||
{
|
||||
if ( TOKEN::Match(tok2, ") {") )
|
||||
{
|
||||
FunctionList.push_back( tok );
|
||||
_functionList.push_back( tok );
|
||||
tok = tok2;
|
||||
}
|
||||
else
|
||||
|
@ -1097,17 +1097,17 @@ void Tokenizer::FillFunctionList()
|
|||
}
|
||||
}
|
||||
|
||||
// If the FunctionList functions with duplicate names, remove them
|
||||
// If the _functionList functions with duplicate names, remove them
|
||||
// TODO this will need some better handling
|
||||
for ( unsigned int func1 = 0; func1 < FunctionList.size(); )
|
||||
for ( unsigned int func1 = 0; func1 < _functionList.size(); )
|
||||
{
|
||||
bool hasDuplicates = false;
|
||||
for ( unsigned int func2 = func1 + 1; func2 < FunctionList.size(); )
|
||||
for ( unsigned int func2 = func1 + 1; func2 < _functionList.size(); )
|
||||
{
|
||||
if ( strcmp(FunctionList[func1]->str, FunctionList[func2]->str) == 0 )
|
||||
if ( strcmp(_functionList[func1]->str, _functionList[func2]->str) == 0 )
|
||||
{
|
||||
hasDuplicates = true;
|
||||
FunctionList.erase( FunctionList.begin() + func2 );
|
||||
_functionList.erase( _functionList.begin() + func2 );
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -1121,7 +1121,7 @@ void Tokenizer::FillFunctionList()
|
|||
}
|
||||
else
|
||||
{
|
||||
FunctionList.erase( FunctionList.begin() + func1 );
|
||||
_functionList.erase( _functionList.begin() + func1 );
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1138,18 +1138,18 @@ void Tokenizer::DeallocateTokens()
|
|||
{
|
||||
deleteTokens( _tokens );
|
||||
_tokens = 0;
|
||||
tokens_back = 0;
|
||||
_tokensBack = 0;
|
||||
|
||||
while (dsymlist)
|
||||
while (_dsymlist)
|
||||
{
|
||||
struct DefineSymbol *next = dsymlist->next;
|
||||
free(dsymlist->name);
|
||||
free(dsymlist->value);
|
||||
delete dsymlist;
|
||||
dsymlist = next;
|
||||
struct DefineSymbol *next = _dsymlist->next;
|
||||
free(_dsymlist->name);
|
||||
free(_dsymlist->value);
|
||||
delete _dsymlist;
|
||||
_dsymlist = next;
|
||||
}
|
||||
|
||||
Files.clear();
|
||||
_files.clear();
|
||||
}
|
||||
|
||||
void Tokenizer::deleteTokens(TOKEN *tok)
|
||||
|
@ -1182,7 +1182,7 @@ const char *Tokenizer::getParameterName( const TOKEN *ftok, int par )
|
|||
std::string Tokenizer::fileLine( const TOKEN *tok ) const
|
||||
{
|
||||
std::ostringstream ostr;
|
||||
ostr << "[" << Files.at(tok->FileIndex) << ":" << tok->linenr << "]";
|
||||
ostr << "[" << _files.at(tok->FileIndex) << ":" << tok->linenr << "]";
|
||||
return ostr.str();
|
||||
}
|
||||
|
||||
|
|
14
tokenize.h
14
tokenize.h
|
@ -64,7 +64,7 @@ public:
|
|||
|
||||
const std::vector<std::string> *getFiles() const;
|
||||
|
||||
void FillFunctionList();
|
||||
void fillFunctionList();
|
||||
const TOKEN *GetFunctionTokenByName( const char funcname[] ) const;
|
||||
void settings( const Settings &settings );
|
||||
const TOKEN *tokens() const;
|
||||
|
@ -91,14 +91,12 @@ private:
|
|||
|
||||
void InsertTokens(TOKEN *dest, TOKEN *src, unsigned int n);
|
||||
|
||||
TOKEN *tokens_back;
|
||||
std::map<std::string, unsigned int> TypeSize;
|
||||
std::vector<const TOKEN *> FunctionList;
|
||||
std::vector<std::string> Files;
|
||||
TOKEN *_tokensBack;
|
||||
std::map<std::string, unsigned int> _typeSize;
|
||||
std::vector<const TOKEN *> _functionList;
|
||||
std::vector<std::string> _files;
|
||||
Settings _settings;
|
||||
|
||||
|
||||
struct DefineSymbol * dsymlist;
|
||||
struct DefineSymbol * _dsymlist;
|
||||
TOKEN *_tokens;
|
||||
};
|
||||
|
||||
|
|
Loading…
Reference in New Issue