diff --git a/CheckBufferOverrun.cpp b/CheckBufferOverrun.cpp index 769d83f8a..178375552 100644 --- a/CheckBufferOverrun.cpp +++ b/CheckBufferOverrun.cpp @@ -32,7 +32,7 @@ //--------------------------------------------------------------------------- -// CallStack used when parsing into subfunctions. +// _callStack used when parsing into subfunctions. CheckBufferOverrunClass::CheckBufferOverrunClass( const Tokenizer *tokenizer, ErrorLogger *errorLogger ) @@ -51,7 +51,7 @@ void CheckBufferOverrunClass::ReportError(const TOKEN *tok, const char errmsg[]) { std::ostringstream ostr; std::list::const_iterator it; - for ( it = CallStack.begin(); it != CallStack.end(); it++ ) + for ( it = _callStack.begin(); it != _callStack.end(); it++ ) ostr << _tokenizer->fileLine(*it ) << " -> "; ostr << _tokenizer->fileLine(tok) << ": " << errmsg; _errorLogger->reportErr(ostr.str()); @@ -219,7 +219,7 @@ void CheckBufferOverrunClass::CheckBufferOverrun_CheckScope( const TOKEN *tok, c if ( TOKEN::Match( tok, "%var% (" ) ) { // Don't make recursive checking.. - if (std::find(CallStack.begin(), CallStack.end(), tok) != CallStack.end()) + if (std::find(_callStack.begin(), _callStack.end(), tok) != _callStack.end()) continue; unsigned int parlevel = 0, par = 0; @@ -287,9 +287,9 @@ void CheckBufferOverrunClass::CheckBufferOverrun_CheckScope( const TOKEN *tok, c ftok = ftok ? ftok->next : 0; // Check variable usage in the function.. - CallStack.push_back( tok ); + _callStack.push_back( tok ); CheckBufferOverrun_CheckScope( ftok, parname, size, total_size ); - CallStack.pop_back(); + _callStack.pop_back(); // break out.. break; @@ -345,7 +345,7 @@ void CheckBufferOverrunClass::CheckBufferOverrun_LocalVariable() continue; // The callstack is empty - CallStack.clear(); + _callStack.clear(); CheckBufferOverrun_CheckScope( tok->tokAt(5), varname, size, total_size ); } } diff --git a/CheckBufferOverrun.h b/CheckBufferOverrun.h index 1c8196100..a891f9e9f 100644 --- a/CheckBufferOverrun.h +++ b/CheckBufferOverrun.h @@ -45,7 +45,7 @@ private: const Tokenizer *_tokenizer; ErrorLogger *_errorLogger; - std::list CallStack; + std::list _callStack; }; //--------------------------------------------------------------------------- diff --git a/CheckFunctionUsage.cpp b/CheckFunctionUsage.cpp index 6fed913f0..a25213475 100644 --- a/CheckFunctionUsage.cpp +++ b/CheckFunctionUsage.cpp @@ -33,7 +33,6 @@ CheckFunctionUsage::CheckFunctionUsage( ErrorLogger *errorLogger ) { _errorLogger = errorLogger; - functions.clear(); } CheckFunctionUsage::~CheckFunctionUsage() @@ -72,7 +71,7 @@ void CheckFunctionUsage::parseTokens( const Tokenizer &tokenizer ) if ( funcname ) { - FunctionUsage &func = functions[ funcname->str ]; + FunctionUsage &func = _functions[ funcname->str ]; // No filename set yet.. if (func.filename.empty()) @@ -122,7 +121,7 @@ void CheckFunctionUsage::parseTokens( const Tokenizer &tokenizer ) if ( funcname ) { - FunctionUsage &func = functions[ funcname->str ]; + FunctionUsage &func = _functions[ funcname->str ]; if ( func.filename.empty() || func.filename == "+" ) func.usedOtherFile = true; @@ -138,7 +137,7 @@ void CheckFunctionUsage::parseTokens( const Tokenizer &tokenizer ) void CheckFunctionUsage::check() { - for ( std::map::const_iterator it = functions.begin(); it != functions.end(); ++it ) + for ( std::map::const_iterator it = _functions.begin(); it != _functions.end(); ++it ) { const FunctionUsage &func = it->second; if ( func.usedOtherFile || func.filename.empty() ) diff --git a/CheckFunctionUsage.h b/CheckFunctionUsage.h index 07295f8e9..34976f70d 100644 --- a/CheckFunctionUsage.h +++ b/CheckFunctionUsage.h @@ -58,7 +58,7 @@ private: bool usedOtherFile; }; - std::map functions; + std::map _functions; }; //--------------------------------------------------------------------------- diff --git a/CheckMemoryLeak.cpp b/CheckMemoryLeak.cpp index aa61ba9c9..f5fb3d0ba 100644 --- a/CheckMemoryLeak.cpp +++ b/CheckMemoryLeak.cpp @@ -132,8 +132,8 @@ CheckMemoryLeakClass::AllocType CheckMemoryLeakClass::GetAllocationType( const T return POPEN; // Userdefined allocation function.. - std::list::const_iterator it = listallocfunc.begin(); - while ( it != listallocfunc.end() ) + std::list::const_iterator it = _listAllocFunc.begin(); + while ( it != _listAllocFunc.end() ) { if ( strcmp(tok2->str, it->funcname) == 0 ) return it->alloctype; @@ -1090,7 +1090,7 @@ void CheckMemoryLeakClass::CheckMemoryLeak_ClassMembers_Variable( const std::vec void CheckMemoryLeakClass::CheckMemoryLeak() { - listallocfunc.clear(); + _listAllocFunc.clear(); // Check for memory leaks inside functions.. CheckMemoryLeak_InFunction(); diff --git a/CheckMemoryLeak.h b/CheckMemoryLeak.h index 26ac52cc8..52b8ec5c9 100644 --- a/CheckMemoryLeak.h +++ b/CheckMemoryLeak.h @@ -77,7 +77,7 @@ private: const Tokenizer *_tokenizer; ErrorLogger *_errorLogger; Settings _settings; - std::list listallocfunc; + std::list _listAllocFunc; }; //--------------------------------------------------------------------------- diff --git a/FileLister.h b/FileLister.h index 41e85c89f..c545df7ec 100644 --- a/FileLister.h +++ b/FileLister.h @@ -37,19 +37,20 @@ class FileLister { -private: - static bool AcceptFile( const std::string &filename ); - -#ifdef __BORLANDC__ - static void AddFiles( std::vector &filenames, const std::string &path, const std::string &pattern ); -#endif - -#ifdef _MSC_VER - static void AddFiles( std::vector &filenames, const std::string &path, const std::string &pattern ); -#endif - public: - static void RecursiveAddFiles( std::vector &filenames, const std::string &path, bool recursive ); + static void RecursiveAddFiles( std::vector &filenames, const std::string &path, bool recursive ); + +private: + static bool AcceptFile( const std::string &filename ); + +#ifdef __BORLANDC__ + static void AddFiles( std::vector &filenames, const std::string &path, const std::string &pattern ); +#endif + +#ifdef _MSC_VER + static void AddFiles( std::vector &filenames, const std::string &path, const std::string &pattern ); +#endif + }; #endif // #ifndef FILELISTER_H diff --git a/cppcheck.cpp b/cppcheck.cpp index 97a02e8c7..a3b420da4 100644 --- a/cppcheck.cpp +++ b/cppcheck.cpp @@ -132,7 +132,7 @@ void CppCheck::check(int argc, char* argv[]) for (unsigned int c = 0; c < filenames.size(); c++) { - errout.str(""); + _errout.str(""); std::string fname = filenames[c]; // If only errors are printed, print filename after the check @@ -148,31 +148,31 @@ void CppCheck::check(int argc, char* argv[]) if (_settings._errorsOnly) { - if ( !errout.str().empty() ) + if ( !_errout.str().empty() ) { std::cout << "Errors found in " << fname << ":\n"; - std::cerr << errout.str(); + std::cerr << _errout.str(); } } else { - if ( errout.str().empty() ) + if ( _errout.str().empty() ) std::cout << "No errors found\n"; else - std::cerr << errout.str(); + std::cerr << _errout.str(); } } // This generates false positives - especially for libraries if ( checkFunctionUsage ) { - errout.str(""); + _errout.str(""); std::cout << "Checking usage of global functions (this may take several minutes)..\n"; checkFunctionUsage->check(); - if ( ! errout.str().empty() ) + if ( ! _errout.str().empty() ) { std::cerr << "\n"; - std::cerr << errout.str(); + std::cerr << _errout.str(); } } @@ -195,7 +195,7 @@ void CppCheck::checkFile(const std::string &code, const char FileName[], Setting _tokenizer.Tokenize(istr, FileName); } - _tokenizer.FillFunctionList(); + _tokenizer.fillFunctionList(); // Check that the memsets are valid. // The 'memset' function can do dangerous things if used wrong. @@ -311,7 +311,7 @@ void CppCheck::reportErr( const std::string &errmsg) return; _errorList.push_back( errmsg ); } - errout << errmsg << std::endl; + _errout << errmsg << std::endl; } void CppCheck::reportErr( const TOKEN *token, const std::string &errmsg) diff --git a/cppcheck.h b/cppcheck.h index 52538c59b..d61b74fcb 100644 --- a/cppcheck.h +++ b/cppcheck.h @@ -41,11 +41,11 @@ class CppCheck : public ErrorLogger private: void checkFile(const std::string &code, const char FileName[], Settings &_settings, CheckFunctionUsage *checkFunctionUsage); - std::list _errorList; - std::ostringstream errout; - void reportErr( const std::string &errmsg); void reportErr( const TOKEN *token, const std::string &errmsg); + + std::list _errorList; + std::ostringstream _errout; }; #endif // CPPCHECK_H diff --git a/testbufferoverrun.cpp b/testbufferoverrun.cpp index bd1c0f169..2850cd392 100644 --- a/testbufferoverrun.cpp +++ b/testbufferoverrun.cpp @@ -40,7 +40,7 @@ private: { // Tokenize.. Tokenizer tokenizer; - tokenizer.Files.push_back( "test.cpp" ); + tokenizer._files.push_back( "test.cpp" ); std::istringstream istr(code); tokenizer.TokenizeCode( istr ); tokenizer.SimplifyTokenList(); @@ -49,7 +49,7 @@ private: Settings settings; settings._checkCodingStyle = true; tokenizer.settings( settings ); - tokenizer.FillFunctionList(); + tokenizer.fillFunctionList(); // Clear the error buffer.. errout.str(""); diff --git a/testcharvar.cpp b/testcharvar.cpp index be1436b87..ddf0a2d87 100644 --- a/testcharvar.cpp +++ b/testcharvar.cpp @@ -45,7 +45,7 @@ private: { // Tokenize.. Tokenizer tokenizer; - tokenizer.Files.push_back( "test.cpp" ); + tokenizer._files.push_back( "test.cpp" ); std::istringstream istr(code); tokenizer.TokenizeCode( istr ); @@ -53,7 +53,7 @@ private: Settings settings; settings._checkCodingStyle = true; tokenizer.settings( settings ); - tokenizer.FillFunctionList(); + tokenizer.fillFunctionList(); // Clear the error buffer.. errout.str(""); diff --git a/testconstructors.cpp b/testconstructors.cpp index 08bb597f8..351fd733d 100644 --- a/testconstructors.cpp +++ b/testconstructors.cpp @@ -39,7 +39,7 @@ private: { // Tokenize.. Tokenizer tokenizer; - tokenizer.Files.push_back( "test.cpp" ); + tokenizer._files.push_back( "test.cpp" ); std::istringstream istr(code); tokenizer.TokenizeCode( istr ); tokenizer.SimplifyTokenList(); diff --git a/testdivision.cpp b/testdivision.cpp index 9e55675a9..5fd81ec46 100644 --- a/testdivision.cpp +++ b/testdivision.cpp @@ -41,7 +41,7 @@ private: { // Tokenize.. Tokenizer tokenizer; - tokenizer.Files.push_back( "test.cpp" ); + tokenizer._files.push_back( "test.cpp" ); std::istringstream istr(code); tokenizer.TokenizeCode( istr ); //SimplifyTokenList(); <- this can't be used as it removes 'unsigned' diff --git a/testincompletestatement.cpp b/testincompletestatement.cpp index e7ab6cdcd..59303e898 100644 --- a/testincompletestatement.cpp +++ b/testincompletestatement.cpp @@ -40,7 +40,7 @@ private: { // Tokenize.. Tokenizer tokenizer; - tokenizer.Files.push_back( "test.cpp" ); + tokenizer._files.push_back( "test.cpp" ); std::istringstream istr(code); tokenizer.TokenizeCode( istr ); tokenizer.SimplifyTokenList(); diff --git a/testmemleak.cpp b/testmemleak.cpp index 06c05db92..92022bec4 100644 --- a/testmemleak.cpp +++ b/testmemleak.cpp @@ -39,7 +39,7 @@ private: { // Tokenize.. Tokenizer tokenizer; - tokenizer.Files.push_back( "test.cpp" ); + tokenizer._files.push_back( "test.cpp" ); std::istringstream istr(code); tokenizer.TokenizeCode( istr ); tokenizer.SimplifyTokenList(); @@ -52,7 +52,7 @@ private: settings._checkCodingStyle = true; settings._showAll = false; tokenizer.settings( settings ); - tokenizer.FillFunctionList(); + tokenizer.fillFunctionList(); CheckMemoryLeakClass checkMemoryLeak( &tokenizer, settings, this ); checkMemoryLeak.CheckMemoryLeak(); } diff --git a/testtokenize.cpp b/testtokenize.cpp index 659b86e7a..1f28b6798 100644 --- a/testtokenize.cpp +++ b/testtokenize.cpp @@ -65,7 +65,7 @@ private: // tokenize.. Tokenizer tokenizer; - tokenizer.Files.push_back( "test.cpp" ); + tokenizer._files.push_back( "test.cpp" ); std::istringstream istr(filedata); tokenizer.TokenizeCode(istr, 0); @@ -89,7 +89,7 @@ private: // tokenize.. Tokenizer tokenizer; - tokenizer.Files.push_back( "test.cpp" ); + tokenizer._files.push_back( "test.cpp" ); std::istringstream istr(filedata); tokenizer.TokenizeCode(istr, 0); @@ -111,7 +111,7 @@ private: // tokenize.. Tokenizer tokenizer; - tokenizer.Files.push_back( "test.cpp" ); + tokenizer._files.push_back( "test.cpp" ); std::istringstream istr(filedata); tokenizer.TokenizeCode(istr, 0); @@ -142,14 +142,14 @@ private: "{ }\n"; // tokenize.. Tokenizer tokenizer; - tokenizer.Files.push_back( "test.cpp" ); + tokenizer._files.push_back( "test.cpp" ); std::istringstream istr(code); tokenizer.TokenizeCode(istr, 0); - tokenizer.FillFunctionList(); + tokenizer.fillFunctionList(); - ASSERT_EQUALS( 1, tokenizer.FunctionList.size() ); - ASSERT_EQUALS( std::string("b"), tokenizer.FunctionList[0]->str ); + ASSERT_EQUALS( 1, tokenizer._functionList.size() ); + ASSERT_EQUALS( std::string("b"), tokenizer._functionList[0]->str ); } }; diff --git a/testunusedprivfunc.cpp b/testunusedprivfunc.cpp index f351a75c3..472df62c2 100644 --- a/testunusedprivfunc.cpp +++ b/testunusedprivfunc.cpp @@ -46,7 +46,7 @@ private: { // Tokenize.. Tokenizer tokenizer; - tokenizer.Files.push_back( "test.cpp" ); + tokenizer._files.push_back( "test.cpp" ); std::istringstream istr(code); tokenizer.TokenizeCode( istr ); diff --git a/testunusedvar.cpp b/testunusedvar.cpp index c5d26094f..4fc604452 100644 --- a/testunusedvar.cpp +++ b/testunusedvar.cpp @@ -39,7 +39,7 @@ private: { // Tokenize.. Tokenizer tokenizer; - tokenizer.Files.push_back( "test.cpp" ); + tokenizer._files.push_back( "test.cpp" ); std::istringstream istr(code); tokenizer.TokenizeCode( istr ); tokenizer.SimplifyTokenList(); diff --git a/tokenize.cpp b/tokenize.cpp index 32d31c4da..0401cdba1 100644 --- a/tokenize.cpp +++ b/tokenize.cpp @@ -51,8 +51,8 @@ Tokenizer::Tokenizer() { _tokens = 0; - tokens_back = 0; - dsymlist = 0; + _tokensBack = 0; + _dsymlist = 0; } Tokenizer::~Tokenizer() @@ -90,7 +90,7 @@ const TOKEN *Tokenizer::tokens() const const std::vector *Tokenizer::getFiles() const { - return &Files; + return &_files; } void Tokenizer::Define(const char Name[], const char Value[]) @@ -133,8 +133,8 @@ void Tokenizer::Define(const char Name[], const char Value[]) memset(NewSym, 0, sizeof(DefineSymbol)); NewSym->name = _strdup(Name); NewSym->value = strValue; - NewSym->next = dsymlist; - dsymlist = NewSym; + NewSym->next = _dsymlist; + _dsymlist = NewSym; } //--------------------------------------------------------------------------- @@ -163,18 +163,18 @@ void Tokenizer::addtoken(const char str[], const unsigned int lineno, const unsi newtoken->setstr(str2.str().c_str()); newtoken->linenr = lineno; newtoken->FileIndex = fileno; - if (tokens_back) + if (_tokensBack) { - tokens_back->next = newtoken; - tokens_back = newtoken; + _tokensBack->next = newtoken; + _tokensBack = newtoken; } else { - _tokens = tokens_back = newtoken; + _tokens = _tokensBack = newtoken; } // Check if str is defined.. - for (DefineSymbol *sym = dsymlist; sym; sym = sym->next) + for (DefineSymbol *sym = _dsymlist; sym; sym = sym->next) { if (strcmp(str,sym->name)==0) { @@ -196,8 +196,8 @@ int Tokenizer::SizeOfType(const char type[]) const if (!type) return 0; - std::map::const_iterator it = TypeSize.find(type); - if ( it == TypeSize.end() ) + std::map::const_iterator it = _typeSize.find(type); + if ( it == _typeSize.end() ) return 0; return it->second; @@ -234,17 +234,17 @@ void Tokenizer::InsertTokens(TOKEN *dest, TOKEN *src, unsigned int n) void Tokenizer::Tokenize(std::istream &code, const char FileName[]) { // Has this file been tokenized already? - for (unsigned int i = 0; i < Files.size(); i++) + for (unsigned int i = 0; i < _files.size(); i++) { - if ( SameFileName( Files[i].c_str(), FileName ) ) + if ( SameFileName( _files[i].c_str(), FileName ) ) return; } - // The "Files" vector remembers what files have been tokenized.. - Files.push_back(FileName); + // The "_files" vector remembers what files have been tokenized.. + _files.push_back(FileName); // Tokenize the file.. - TokenizeCode( code, (unsigned int)(Files.size() - 1) ); + TokenizeCode( code, (unsigned int)(_files.size() - 1) ); } //--------------------------------------------------------------------------- @@ -290,9 +290,9 @@ void Tokenizer::TokenizeCode(std::istream &code, const unsigned int FileIndex) line.erase(line.find("\"")); // Relative path.. - if (Files.back().find_first_of("\\/") != std::string::npos) + if (_files.back().find_first_of("\\/") != std::string::npos) { - std::string path = Files.back(); + std::string path = _files.back(); path.erase( 1 + path.find_last_of("\\/") ); line = path + line; } @@ -636,24 +636,24 @@ void Tokenizer::SimplifyTokenList() } - // Fill the map TypeSize.. - TypeSize.clear(); - TypeSize["char"] = sizeof(char); - TypeSize["short"] = sizeof(short); - TypeSize["int"] = sizeof(int); - TypeSize["long"] = sizeof(long); - TypeSize["float"] = sizeof(float); - TypeSize["double"] = sizeof(double); + // Fill the map _typeSize.. + _typeSize.clear(); + _typeSize["char"] = sizeof(char); + _typeSize["short"] = sizeof(short); + _typeSize["int"] = sizeof(int); + _typeSize["long"] = sizeof(long); + _typeSize["float"] = sizeof(float); + _typeSize["double"] = sizeof(double); for (TOKEN *tok = _tokens; tok; tok = tok->next) { if (TOKEN::Match(tok,"class %var%")) { - TypeSize[tok->strAt(1)] = 11; + _typeSize[tok->strAt(1)] = 11; } else if (TOKEN::Match(tok, "struct %var%")) { - TypeSize[tok->strAt(1)] = 13; + _typeSize[tok->strAt(1)] = 13; } } @@ -1021,20 +1021,20 @@ bool Tokenizer::simplifyConditions() const TOKEN *Tokenizer::GetFunctionTokenByName( const char funcname[] ) const { - for ( unsigned int i = 0; i < FunctionList.size(); ++i ) + for ( unsigned int i = 0; i < _functionList.size(); ++i ) { - if ( strcmp( FunctionList[i]->str, funcname ) == 0 ) + if ( strcmp( _functionList[i]->str, funcname ) == 0 ) { - return FunctionList[i]; + return _functionList[i]; } } return NULL; } -void Tokenizer::FillFunctionList() +void Tokenizer::fillFunctionList() { - FunctionList.clear(); + _functionList.clear(); bool staticfunc = false; bool classfunc = false; @@ -1082,7 +1082,7 @@ void Tokenizer::FillFunctionList() { if ( TOKEN::Match(tok2, ") {") ) { - FunctionList.push_back( tok ); + _functionList.push_back( tok ); tok = tok2; } else @@ -1097,17 +1097,17 @@ void Tokenizer::FillFunctionList() } } - // If the FunctionList functions with duplicate names, remove them + // If the _functionList functions with duplicate names, remove them // TODO this will need some better handling - for ( unsigned int func1 = 0; func1 < FunctionList.size(); ) + for ( unsigned int func1 = 0; func1 < _functionList.size(); ) { bool hasDuplicates = false; - for ( unsigned int func2 = func1 + 1; func2 < FunctionList.size(); ) + for ( unsigned int func2 = func1 + 1; func2 < _functionList.size(); ) { - if ( strcmp(FunctionList[func1]->str, FunctionList[func2]->str) == 0 ) + if ( strcmp(_functionList[func1]->str, _functionList[func2]->str) == 0 ) { hasDuplicates = true; - FunctionList.erase( FunctionList.begin() + func2 ); + _functionList.erase( _functionList.begin() + func2 ); } else { @@ -1121,7 +1121,7 @@ void Tokenizer::FillFunctionList() } else { - FunctionList.erase( FunctionList.begin() + func1 ); + _functionList.erase( _functionList.begin() + func1 ); } } } @@ -1138,18 +1138,18 @@ void Tokenizer::DeallocateTokens() { deleteTokens( _tokens ); _tokens = 0; - tokens_back = 0; + _tokensBack = 0; - while (dsymlist) + while (_dsymlist) { - struct DefineSymbol *next = dsymlist->next; - free(dsymlist->name); - free(dsymlist->value); - delete dsymlist; - dsymlist = next; + struct DefineSymbol *next = _dsymlist->next; + free(_dsymlist->name); + free(_dsymlist->value); + delete _dsymlist; + _dsymlist = next; } - Files.clear(); + _files.clear(); } void Tokenizer::deleteTokens(TOKEN *tok) @@ -1182,7 +1182,7 @@ const char *Tokenizer::getParameterName( const TOKEN *ftok, int par ) std::string Tokenizer::fileLine( const TOKEN *tok ) const { std::ostringstream ostr; - ostr << "[" << Files.at(tok->FileIndex) << ":" << tok->linenr << "]"; + ostr << "[" << _files.at(tok->FileIndex) << ":" << tok->linenr << "]"; return ostr.str(); } diff --git a/tokenize.h b/tokenize.h index 8d74ea087..6c4def3c5 100644 --- a/tokenize.h +++ b/tokenize.h @@ -64,7 +64,7 @@ public: const std::vector *getFiles() const; - void FillFunctionList(); + void fillFunctionList(); const TOKEN *GetFunctionTokenByName( const char funcname[] ) const; void settings( const Settings &settings ); const TOKEN *tokens() const; @@ -91,14 +91,12 @@ private: void InsertTokens(TOKEN *dest, TOKEN *src, unsigned int n); - TOKEN *tokens_back; - std::map TypeSize; - std::vector FunctionList; - std::vector Files; + TOKEN *_tokensBack; + std::map _typeSize; + std::vector _functionList; + std::vector _files; Settings _settings; - - - struct DefineSymbol * dsymlist; + struct DefineSymbol * _dsymlist; TOKEN *_tokens; };