From bac8ed71272a6f1bb66b6eb0d8c6b5a21aac0bd8 Mon Sep 17 00:00:00 2001 From: PKEuS Date: Mon, 16 Apr 2012 19:51:07 +0200 Subject: [PATCH] Refactorized stringification of tokens: - Function that stringifies one token: Token::stringify() - Functions that stringify a list of tokens: Token::stringifyList() -- Single and powerful "base" function, used by several "light" functions Refactorized - testtokenize.cpp and testsimplifytokens.cpp: Use improved stringification functions instead of several local implementations - Avoided redundand creation of std::string when using TestTokenizer::tokenizeAndStringify and in cmdlineparser.cpp --- cli/cmdlineparser.cpp | 6 +- lib/checkother.cpp | 8 +- lib/token.cpp | 105 +++++++++---------- lib/token.h | 28 ++++- lib/tokenize.cpp | 8 +- test/testsimplifytokens.cpp | 67 ++---------- test/testtokenize.cpp | 197 ++++++++++-------------------------- 7 files changed, 149 insertions(+), 270 deletions(-) diff --git a/cli/cmdlineparser.cpp b/cli/cmdlineparser.cpp index 7c9163073..a6571a001 100644 --- a/cli/cmdlineparser.cpp +++ b/cli/cmdlineparser.cpp @@ -148,7 +148,7 @@ bool CmdLineParser::ParseFromArgs(int argc, const char* const argv[]) std::ifstream f(filename.c_str()); if (!f.is_open()) { - PrintMessage("cppcheck: Couldn't open the file: \"" + std::string(filename) + "\"."); + PrintMessage("cppcheck: Couldn't open the file: \"" + filename + "\"."); return false; } const std::string errmsg(_settings->nofail.parseFile(f)); @@ -164,7 +164,7 @@ bool CmdLineParser::ParseFromArgs(int argc, const char* const argv[]) std::ifstream f(filename.c_str()); if (!f.is_open()) { std::string message("cppcheck: Couldn't open the file: \""); - message += std::string(filename); + message += filename; message += "\"."; if (count(filename.begin(), filename.end(), ',') > 0 || count(filename.begin(), filename.end(), '.') > 1) { @@ -604,7 +604,7 @@ bool CmdLineParser::ParseFromArgs(int argc, const char* const argv[]) _settings->platform(Settings::Unix64); else { std::string message("cppcheck: error: unrecognized platform: \""); - message += argv[i]; + message += platform; message += "\"."; PrintMessage(message); return false; diff --git a/lib/checkother.cpp b/lib/checkother.cpp index 01babac1d..a41a20422 100644 --- a/lib/checkother.cpp +++ b/lib/checkother.cpp @@ -2558,7 +2558,7 @@ void CheckOther::checkDuplicateIf() std::map expressionMap; // get the expression from the token stream - std::string expression = tok->tokAt(2)->stringify(tok->next()->link()); + std::string expression = tok->tokAt(2)->stringifyList(tok->next()->link()); // save the expression and its location expressionMap.insert(std::make_pair(expression, tok)); @@ -2570,7 +2570,7 @@ void CheckOther::checkDuplicateIf() while (Token::simpleMatch(tok1, "} else if (") && Token::simpleMatch(tok1->linkAt(3), ") {")) { // get the expression from the token stream - expression = tok1->tokAt(4)->stringify(tok1->linkAt(3)); + expression = tok1->tokAt(4)->stringifyList(tok1->linkAt(3)); // try to look up the expression to check for duplicates std::map::iterator it = expressionMap.find(expression); @@ -2630,13 +2630,13 @@ void CheckOther::checkDuplicateBranch() if (tok && tok->next() && Token::simpleMatch(tok->next()->link(), ") {") && Token::simpleMatch(tok->next()->link()->next()->link(), "} else {")) { // save if branch code - std::string branch1 = tok->next()->link()->tokAt(2)->stringify(tok->next()->link()->next()->link()); + std::string branch1 = tok->next()->link()->tokAt(2)->stringifyList(tok->next()->link()->next()->link()); // find else branch const Token *tok1 = tok->next()->link()->next()->link(); // save else branch code - std::string branch2 = tok1->tokAt(3)->stringify(tok1->linkAt(2)); + std::string branch2 = tok1->tokAt(3)->stringifyList(tok1->linkAt(2)); // check for duplicates if (branch1 == branch2) diff --git a/lib/token.cpp b/lib/token.cpp index 832c59444..c6a54979b 100644 --- a/lib/token.cpp +++ b/lib/token.cpp @@ -894,60 +894,44 @@ void Token::createMutualLinks(Token *begin, Token *end) void Token::printOut(const char *title) const { - const std::vector fileNames; - std::cout << stringifyList(true, title, fileNames) << std::endl; + if (title) + std::cout << "\n### " << title << " ###\n"; + std::cout << stringifyList(true, true, true, true, true, 0, 0) << std::endl; } void Token::printOut(const char *title, const std::vector &fileNames) const { - std::cout << stringifyList(true, title, fileNames) << std::endl; + if (title) + std::cout << "\n### " << title << " ###\n"; + std::cout << stringifyList(true, true, true, true, true, &fileNames, 0) << std::endl; } -std::string Token::stringify(const Token* end) const +void Token::stringify(std::ostream& os, bool varid, bool attributes) const +{ + if (attributes) { + if (isUnsigned()) + os << "unsigned "; + else if (isSigned()) + os << "signed "; + if (isLong()) + os << "long "; + } + os << _str; + if (varid && _varId != 0) + os << '@' << _varId; +} + +std::string Token::stringifyList(bool varid, bool attributes, bool linenumbers, bool linebreaks, bool files, const std::vector* fileNames, const Token* end) const { if (this == end) return ""; std::ostringstream ret; - if (isUnsigned()) - ret << "unsigned "; - else if (isSigned()) - ret << "signed "; - if (isLong()) - ret << "long "; - ret << str(); - - for (const Token *tok = this->next(); tok && tok != end; tok = tok->next()) { - if (tok->str().empty()) - continue; - if (tok->isUnsigned()) - ret << " unsigned"; - else if (tok->isSigned()) - ret << " signed"; - if (tok->isLong()) - ret << " long"; - ret << ' ' << tok->str(); - } - return ret.str(); -} - -std::string Token::stringifyList(bool varid, const char *title) const -{ - const std::vector fileNames; - return stringifyList(varid, title, fileNames); -} - -std::string Token::stringifyList(bool varid, const char *title, const std::vector &fileNames) const -{ - std::ostringstream ret; - if (title) - ret << "\n### " << title << " ###\n"; - - unsigned int lineNumber = 0; - int fileInd = -1; + unsigned int lineNumber = _linenr; + int fileInd = files?-1:_fileIndex; std::map lineNumbers; - for (const Token *tok = this; tok; tok = tok->next()) { + for (const Token *tok = this; tok != end; tok = tok->next()) { bool fileChange = false; if (static_cast(tok->_fileIndex) != fileInd) { if (fileInd != -1) { @@ -955,29 +939,46 @@ std::string Token::stringifyList(bool varid, const char *title, const std::vecto } fileInd = static_cast(tok->_fileIndex); - ret << "\n\n##file "; - if (fileNames.size() > tok->_fileIndex) - ret << fileNames.at(tok->_fileIndex); - else - ret << fileInd; + if (files) { + ret << "\n\n##file "; + if (fileNames && fileNames->size() > tok->_fileIndex) + ret << fileNames->at(tok->_fileIndex); + else + ret << fileInd; + } lineNumber = lineNumbers[fileInd]; fileChange = true; } - if (lineNumber != tok->linenr() || fileChange) { + if (linebreaks && (lineNumber != tok->linenr() || fileChange)) { while (lineNumber < tok->linenr()) { ++lineNumber; - ret << '\n' << lineNumber << ':'; + ret << '\n'; + if (linenumbers) { + ret << lineNumber << ':'; + if (lineNumber == tok->linenr()) + ret << ' '; + } } lineNumber = tok->linenr(); } - ret << ' ' << tok->str(); - if (varid && tok->varId() > 0) - ret << '@' << tok->varId(); + tok->stringify(ret, varid, attributes); // print token + if (tok->next() != end && (!linebreaks || (tok->next()->linenr() <= tok->linenr() && tok->next()->fileIndex() == tok->fileIndex()))) + ret << ' '; } - ret << '\n'; + if (linebreaks && files) + ret << '\n'; return ret.str(); } +std::string Token::stringifyList(const Token* end, bool attributes) const +{ + return stringifyList(false, attributes, false, false, false, 0, end); +} + +std::string Token::stringifyList(bool varid) const +{ + return stringifyList(varid, false, true, true, true, 0, 0); +} diff --git a/lib/token.h b/lib/token.h index d756181bc..010cf2f11 100644 --- a/lib/token.h +++ b/lib/token.h @@ -21,6 +21,7 @@ #include #include +#include /// @addtogroup Core /// @{ @@ -333,10 +334,29 @@ public: */ static void replace(Token *replaceThis, Token *start, Token *end); - /** Stringify a token list (with or without varId) */ - std::string stringify(const Token* end) const; - std::string stringifyList(bool varid = false, const char *title = 0) const; - std::string stringifyList(bool varid, const char *title, const std::vector &fileNames) const; + /** + * Stringify a token + * @param os The result is shifted into that output stream + * @param varid Print varids. (Style: "varname@id") + * @param attributes Print attributes of tokens like "unsigned" in front of it. + */ + void stringify(std::ostream& os, bool varid, bool attributes) const; + + /** + * Stringify a list of token, from current instance on. + * @param varid Print varids. (Style: "varname@id") + * @param attributes Print attributes of tokens like "unsigned" in front of it. + * @param linenumbers Print line number in front of each line + * @param linebreaks Insert \n into string when line number changes + * @param files print Files as numbers or as names (if fileNames is given) + * @param fileNames Vector of filenames. Used (if given) to print filenames as strings instead of numbers. + * @param title Prints a title on top of output + * @param end Stringification ends before this token is reached. 0 to stringify until end of list. + * @return Stringified token list as a string + */ + std::string stringifyList(bool varid, bool attributes, bool linenumbers, bool linebreaks, bool files, const std::vector* fileNames = 0, const Token* end = 0) const; + std::string stringifyList(const Token* end, bool attributes = true) const; + std::string stringifyList(bool varid = false) const; /** * Remove the contents for this token from the token list. diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 7e56bf2a2..47b90ccfe 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -2488,7 +2488,7 @@ void Tokenizer::simplifySQL() while (end && end->str() != ";") end = end->next(); - std::string instruction = tok->stringify(end); + std::string instruction = tok->stringifyList(end); // delete all tokens until ';' Token::eraseTokens(tok, end); @@ -8977,7 +8977,7 @@ void Tokenizer::simplifyAsm() for (Token *tok = _tokens; tok; tok = tok->next()) { if (Token::Match(tok, "__asm|_asm|asm {") && tok->next()->link()->next()) { - instruction = tok->tokAt(2)->stringify(tok->next()->link()); + instruction = tok->tokAt(2)->stringifyList(tok->next()->link()); Token::eraseTokens(tok, tok->next()->link()->next()); } @@ -8986,7 +8986,7 @@ void Tokenizer::simplifyAsm() Token *partok = tok->next(); if (partok->str() != "(") partok = partok->next(); - instruction = partok->next()->stringify(partok->link()); + instruction = partok->next()->stringifyList(partok->link()); Token::eraseTokens(tok, partok->link()->next()); } @@ -8995,7 +8995,7 @@ void Tokenizer::simplifyAsm() while (tok2 && tok2->linenr() == tok->linenr() && (tok2->isNumber() || tok2->isName() || tok2->str() == ",")) tok2 = tok2->next(); if (!tok2 || tok2->str() == ";" || tok2->linenr() != tok->linenr()) { - instruction = tok->next()->stringify(tok2); + instruction = tok->next()->stringifyList(tok2); Token::eraseTokens(tok, tok2); if (!tok2 || tok2->str() != ";") tok->insertToken(";"); diff --git a/test/testsimplifytokens.cpp b/test/testsimplifytokens.cpp index 63740de92..568b1300e 100644 --- a/test/testsimplifytokens.cpp +++ b/test/testsimplifytokens.cpp @@ -426,22 +426,8 @@ private: tokenizer.simplifyTokenList(); tokenizer.validate(); - std::string ret; - for (const Token *tok1 = tokenizer.tokens(); tok1; tok1 = tok1->next()) { - if (tok1 != tokenizer.tokens()) - ret += " "; - if (!simplify) { - if (tok1->isUnsigned()) - ret += "unsigned "; - else if (tok1->isSigned()) - ret += "signed "; - } - if (tok1->isLong()) - ret += "long "; - ret += tok1->str(); - } - return ret; + return tokenizer.tokens()->stringifyList(0, !simplify); } @@ -937,15 +923,7 @@ private: if (simplify) tokenizer.simplifyTokenList(); - std::ostringstream ostr; - for (const Token *tok1 = tokenizer.tokens(); tok1; tok1 = tok1->next()) { - if (tok1->previous()) { - ostr << " "; - } - ostr << tok1->str(); - } - - return ostr.str(); + return tokenizer.tokens()->stringifyList(0, false); } unsigned int sizeofFromTokenizer(const char type[]) { @@ -2297,18 +2275,8 @@ private: " x(sizeof typename);\n" " type = 0;\n" "}"; - errout.str(""); - Settings settings; - Tokenizer tokenizer(&settings, this); - std::istringstream istr(code); - tokenizer.tokenize(istr, "test.c", "", false); - std::ostringstream ostr; - for (const Token *tok1 = tokenizer.tokens(); tok1; tok1 = tok1->next()) { - ostr << tok1->str(); - if (Token::Match(tok1, "%var% %var%")) - ostr << " "; - } - ASSERT_EQUALS("void f(){x(sizeof typename);type=0;}", ostr.str()); + + ASSERT_EQUALS("void f ( ) { x ( sizeof ( typename ) ) ; type = 0 ; }", sizeof_(code)); } } @@ -2372,11 +2340,7 @@ private: tokenizer.simplifyIfAssign(); - std::ostringstream ostr; - for (const Token *tok1 = tokenizer.tokens(); tok1; tok1 = tok1->next()) - ostr << (tok1->previous() ? " " : "") << tok1->str(); - - return ostr.str(); + return tokenizer.tokens()->stringifyList(0, false); } void ifassign1() { @@ -2449,11 +2413,7 @@ private: tokenizer.simplifyIfNot(); - std::ostringstream ostr; - for (const Token *tok1 = tokenizer.tokens(); tok1; tok1 = tok1->next()) - ostr << (tok1->previous() ? " " : "") << tok1->str(); - - return ostr.str(); + return tokenizer.tokens()->stringifyList(0, false); } void ifnot() { @@ -2480,11 +2440,7 @@ private: std::istringstream istr(code); tokenizer.tokenize(istr, "test.cpp"); - std::ostringstream ostr; - for (const Token *tok1 = tokenizer.tokens(); tok1; tok1 = tok1->next()) - ostr << (tok1->previous() ? " " : "") << tok1->str(); - - return ostr.str(); + return tokenizer.tokens()->stringifyList(0, false); } void not1() { @@ -3545,14 +3501,7 @@ private: tokenizer.createLinks(); tokenizer.simplifyTypedef(); - std::string ret; - for (const Token *tok1 = tokenizer.tokens(); tok1; tok1 = tok1->next()) { - if (tok1 != tokenizer.tokens()) - ret += " "; - ret += tok1->str(); - } - - return ret; + return tokenizer.tokens()->stringifyList(0, false); } diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index f8a82c31a..245868908 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -412,18 +412,7 @@ private: TEST_CASE(platformUnix64); } - - bool cmptok(const char *expected[], const Token *actual) { - unsigned int i = 0; - for (; expected[i] && actual; ++i, actual = actual->next()) { - if (strcmp(expected[i], actual->str().c_str()) != 0) - return false; - } - return (expected[i] == NULL && actual == NULL); - } - - - std::string tokenizeAndStringify(const char code[], bool simplify = false, bool expand = true, Settings::PlatformType platform = Settings::Unspecified, const std::string &filename="test.cpp") { + std::string tokenizeAndStringify(const char code[], bool simplify = false, bool expand = true, Settings::PlatformType platform = Settings::Unspecified, const char* filename = "test.cpp") { errout.str(""); Settings settings; @@ -433,36 +422,11 @@ private: // tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename.c_str()); + tokenizer.tokenize(istr, filename); if (simplify) tokenizer.simplifyTokenList(); - std::ostringstream ostr; - for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) { - if (expand) { - if (tok->isUnsigned()) - ostr << "unsigned "; - else if (tok->isSigned()) - ostr << "signed "; - - if (tok->isLong()) - ostr << "long "; - } - - ostr << tok->str(); - - // Append newlines - if (tok->next()) { - if (tok->linenr() != tok->next()->linenr()) { - for (unsigned int i = tok->linenr(); i < tok->next()->linenr(); ++i) - ostr << "\n"; - } else { - ostr << " "; - } - } - } - - return ostr.str(); + return tokenizer.tokens()->stringifyList(false, expand, false, true, false, 0, 0); } @@ -729,10 +693,7 @@ private: tokenizer.simplifyCasts(); - std::ostringstream ostr; - for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) - ostr << " " << tok->str(); - ASSERT_EQUALS(" int * f ( int * ) ;", ostr.str()); + ASSERT_EQUALS("int * f ( int * ) ;", tokenizer.tokens()->stringifyList(0, false)); } // remove static_cast.. @@ -750,10 +711,7 @@ private: tokenizer.simplifyCasts(); - std::ostringstream ostr; - for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) - ostr << " " << tok->str(); - ASSERT_EQUALS(" t = & p ;", ostr.str()); + ASSERT_EQUALS("t = & p ;", tokenizer.tokens()->stringifyList(0, false)); } void removeCast3() { @@ -1150,14 +1108,7 @@ private: tokenizer.simplifyKnownVariables(); - std::ostringstream ostr; - for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) { - if (tok->previous()) - ostr << " "; - ostr << tok->str(); - } - - return ostr.str(); + return tokenizer.tokens()->stringifyList(0, false); } void simplifyKnownVariables1() { @@ -4124,12 +4075,7 @@ private: std::istringstream istr(code); tokenizer.tokenize(istr, ""); - // Stringify the tokens.. - std::ostringstream ostr; - for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) - ostr << tok->str() << " "; - - ASSERT_EQUALS("a_b TEST ( var , val ) var_val = val ", ostr.str()); + ASSERT_EQUALS("a_b TEST ( var , val ) var_val = val", tokenizer.tokens()->stringifyList(0, false)); } void macrodoublesharp() { @@ -4144,12 +4090,7 @@ private: std::istringstream istr(code); tokenizer.tokenize(istr, ""); - // Stringify the tokens.. - std::ostringstream ostr; - for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) - ostr << tok->str() << " "; - - ASSERT_EQUALS("DBG ( fmt , args . . . ) printf ( fmt , ## args ) ", ostr.str()); + ASSERT_EQUALS("DBG ( fmt , args . . . ) printf ( fmt , ## args )", tokenizer.tokens()->stringifyList(0, false)); } void simplifyFunctionParameters() { @@ -4353,10 +4294,7 @@ private: std::istringstream istr(code); tokenizer.tokenize(istr, "test.cpp"); - std::ostringstream ostr; - for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) - ostr << " " << tok->str(); - ASSERT_EQUALS(" void f ( ) { double a ; a = 4.2 ; float b ; b = 4.2f ; double c ; c = 4.2e+10 ; double d ; d = 4.2e-10 ; int e ; e = 4 + 2 ; }", ostr.str()); + ASSERT_EQUALS("void f ( ) { double a ; a = 4.2 ; float b ; b = 4.2f ; double c ; c = 4.2e+10 ; double d ; d = 4.2e-10 ; int e ; e = 4 + 2 ; }", tokenizer.tokens()->stringifyList(0, false)); } void tokenize_strings() { @@ -4380,10 +4318,7 @@ private: tokenizer.tokenize(istr, "test.cpp"); tokenizer.simplifyTokenList(); - std::ostringstream ostr; - for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) - ostr << " " << tok->str(); - ASSERT_EQUALS(" void f ( ) { const char * a ; a = { \"hello more world\" } ; }", ostr.str()); + ASSERT_EQUALS("void f ( ) { const char * a ; a = { \"hello more world\" } ; }", tokenizer.tokens()->stringifyList(0, false)); } void simplify_constants() { @@ -4410,10 +4345,7 @@ private: tokenizer.simplifyTokenList(); - std::ostringstream ostr; - for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) - ostr << " " << tok->str(); - ASSERT_EQUALS(" void f ( ) { } void g ( ) { }", ostr.str()); + ASSERT_EQUALS("void f ( ) { } void g ( ) { }", tokenizer.tokens()->stringifyList(0, false)); } void simplify_constants2() { @@ -4436,13 +4368,7 @@ private: tokenizer.simplifyTokenList(); - std::ostringstream ostr; - for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) - ostr << " " << tok->str(); - - std::ostringstream oss; - oss << " void f ( Foo & foo , Foo * foo2 ) { foo . a = 90 ; foo2 . a = 45 ; }"; - ASSERT_EQUALS(oss.str(), ostr.str()); + ASSERT_EQUALS("void f ( Foo & foo , Foo * foo2 ) { foo . a = 90 ; foo2 . a = 45 ; }", tokenizer.tokens()->stringifyList(0, false)); } void simplify_constants3() { @@ -4751,9 +4677,9 @@ private: "}\n"; ASSERT_EQUALS("void func (\n" - "int in,\n" - "int r,\n" - "int m)\n" + "int in ,\n" + "int r ,\n" + "int m )\n" "{\n" "}", tokenizeAndStringify(code)); } @@ -4764,7 +4690,7 @@ private: "}\n"; ASSERT_EQUALS("void f (\n" - "char * r)\n" + "char * r )\n" "\n" "{\n" "}", tokenizeAndStringify(code)); @@ -4785,7 +4711,7 @@ private: "}\n"; ASSERT_EQUALS("void f (\n" - "char * r)\n" + "char * r )\n" "\n" "{\n" "}", tokenizeAndStringify(code)); @@ -4798,9 +4724,9 @@ private: "}\n"; ASSERT_EQUALS("void f (\n" - "char * r,\n" + "char * r ,\n" "\n" - "char * s)\n" + "char * s )\n" "\n" "\n" "{\n" @@ -4813,9 +4739,9 @@ private: "}\n"; ASSERT_EQUALS("void f (\n" - "char * r,\n" - "char * s,\n" - "char * t)\n" + "char * r ,\n" + "char * s ,\n" + "char * t )\n" "\n" "{\n" "}", tokenizeAndStringify(code)); @@ -5416,24 +5342,14 @@ private: std::istringstream istr(code); tokenizer.tokenize(istr, "test.cpp"); tokenizer.simplifyFunctionPointers(); - std::ostringstream ostr; - for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) { - if (tok->isUnsigned()) - ostr << " unsigned"; - else if (tok->isSigned()) - ostr << " signed"; - if (tok->isLong()) - ostr << " long"; - ostr << (tok->isName() ? " " : "") << tok->str(); - } - return ostr.str(); + return tokenizer.tokens()->stringifyList(0, true); } void functionpointer1() { - ASSERT_EQUALS(" void* f;", simplifyFunctionPointers("void (*f)();")); - ASSERT_EQUALS(" void** f;", simplifyFunctionPointers("void *(*f)();")); - ASSERT_EQUALS(" unsigned int* f;", simplifyFunctionPointers("unsigned int (*f)();")); - ASSERT_EQUALS(" unsigned int** f;", simplifyFunctionPointers("unsigned int * (*f)();")); + ASSERT_EQUALS("void * f ;", simplifyFunctionPointers("void (*f)();")); + ASSERT_EQUALS("void * * f ;", simplifyFunctionPointers("void *(*f)();")); + ASSERT_EQUALS("unsigned int * f ;", simplifyFunctionPointers("unsigned int (*f)();")); + ASSERT_EQUALS("unsigned int * * f ;", simplifyFunctionPointers("unsigned int * (*f)();")); } void functionpointer2() { @@ -5441,9 +5357,9 @@ private: "void f1 ( ) { }" "PF pf = &f1;" "PF pfs[] = { &f1, &f1 };"; - const char expected[] = " void f1(){} " - "void* pf; pf=& f1; " - "void* pfs[2]={& f1,& f1};"; + const char expected[] = "void f1 ( ) { } " + "void * pf ; pf = & f1 ; " + "void * pfs [ 2 ] = { & f1 , & f1 } ;"; ASSERT_EQUALS(expected, simplifyFunctionPointers(code)); } @@ -5452,8 +5368,8 @@ private: const char code[] = "void f() {\n" "(void)(xy(*p)(0);)" "\n}"; - const char expected[] = " void f(){" - "( void)( xy(* p)(0);)" + const char expected[] = "void f ( ) { " + "( void ) ( xy ( * p ) ( 0 ) ; ) " "}"; ASSERT_EQUALS(expected, simplifyFunctionPointers(code)); } @@ -5517,7 +5433,7 @@ private: "{\n" " fn2();\n" "}\n"; - ASSERT_EQUALS("int main ( )\n{\nfn2 ( ) ;\n}void fn2 ( int t = [ ] { return 1 ; } ( ) )\n{ }", tokenizeAndStringify(code)); + ASSERT_EQUALS("int main ( )\n{\nfn2 ( ) ;\n} void fn2 ( int t = [ ] { return 1 ; } ( ) )\n{ }", tokenizeAndStringify(code)); } void cpp0xtemplate2() { @@ -5589,7 +5505,7 @@ private: std::ostringstream ostr; for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) { - if (tok->isName()) + if (tok->isName() && tok->previous()) ostr << " "; ostr << tok->str(); } @@ -5603,7 +5519,7 @@ private: ASSERT_EQUALS("; foo a[3]={{1,2},{3,4},{5,6}};", arraySize_(";foo a[]={{1,2},{3,4},{5,6}};")); TODO_ASSERT_EQUALS("; int a[1]={ foo< bar1, bar2>(123,4)};", "; int a[]={ foo< bar1, bar2>(123,4)};", arraySize_(";int a[]={foo(123,4)};")); ASSERT_EQUALS("; int a[2]={ b> c?1:2,3};", arraySize_(";int a[]={ b>c?1:2,3};")); - TODO_ASSERT_EQUALS(" int main(){ int a[2]={ b< c?1:2,3}}", " int main(){ int a[]={ b< c?1:2,3}}", arraySize_("int main(){int a[]={bx)=0; }")); + ASSERT_EQUALS("void f(){ ab:;& b=0;}", labels_("void f() { ab: &b=0; }")); + ASSERT_EQUALS("void f(){ ab:;&( b. x)=0;}", labels_("void f() { ab: &(b->x)=0; }")); //with '(' parenthesis - ASSERT_EQUALS(" void f(){ ab:;*(* b). x=0;}", labels_("void f() { ab: *(* b)->x=0; }")); - ASSERT_EQUALS(" void f(){ ab:;(** b). x=0;}", labels_("void f() { ab: (** b).x=0; }")); - ASSERT_EQUALS(" void f(){ ab:;&(* b. x)=0;}", labels_("void f() { ab: &(*b.x)=0; }")); + ASSERT_EQUALS("void f(){ ab:;*(* b). x=0;}", labels_("void f() { ab: *(* b)->x=0; }")); + ASSERT_EQUALS("void f(){ ab:;(** b). x=0;}", labels_("void f() { ab: (** b).x=0; }")); + ASSERT_EQUALS("void f(){ ab:;&(* b. x)=0;}", labels_("void f() { ab: &(*b.x)=0; }")); //with '{' parenthesis - ASSERT_EQUALS(" void f(){ ab:;{ b=0;}}", labels_("void f() { ab: {b=0;} }")); - ASSERT_EQUALS(" void f(){ ab:;{* b=0;}}", labels_("void f() { ab: { *b=0;} }")); - ASSERT_EQUALS(" void f(){ ab:;{& b=0;}}", labels_("void f() { ab: { &b=0;} }")); - ASSERT_EQUALS(" void f(){ ab:;{&(* b. x)=0;}}", labels_("void f() { ab: {&(*b.x)=0;} }")); + ASSERT_EQUALS("void f(){ ab:;{ b=0;}}", labels_("void f() { ab: {b=0;} }")); + ASSERT_EQUALS("void f(){ ab:;{* b=0;}}", labels_("void f() { ab: { *b=0;} }")); + ASSERT_EQUALS("void f(){ ab:;{& b=0;}}", labels_("void f() { ab: { &b=0;} }")); + ASSERT_EQUALS("void f(){ ab:;{&(* b. x)=0;}}", labels_("void f() { ab: {&(*b.x)=0;} }")); //with unhandled MACRO() code - ASSERT_EQUALS(" void f(){ MACRO( ab: b=0;, foo)}", labels_("void f() { MACRO(ab: b=0;, foo)}")); - ASSERT_EQUALS(" void f(){ MACRO( bar, ab:{&(* b. x)=0;})}", labels_("void f() { MACRO(bar, ab: {&(*b.x)=0;})}")); + ASSERT_EQUALS("void f(){ MACRO( ab: b=0;, foo)}", labels_("void f() { MACRO(ab: b=0;, foo)}")); + ASSERT_EQUALS("void f(){ MACRO( bar, ab:{&(* b. x)=0;})}", labels_("void f() { MACRO(bar, ab: {&(*b.x)=0;})}")); //don't crash with garbage code - ASSERT_EQUALS(" switch(){ case}", labels_("switch(){case}")); + ASSERT_EQUALS("switch(){ case}", labels_("switch(){case}")); } // Check simplifyInitVar @@ -6386,7 +6302,7 @@ private: bool simplify = false; bool expand = true; Settings::PlatformType platform = Settings::Unspecified; - const std::string filename="test.cs"; + const char filename[] = "test.cs"; ASSERT_EQUALS("int * x ;", tokenizeAndStringify("int [] x;", simplify, expand, platform, filename)); ASSERT_EQUALS("; int * x , int * y ;", tokenizeAndStringify("; int [] x, int [] y;", simplify, expand, platform, filename)); ASSERT_EQUALS("; int * * x ;", tokenizeAndStringify("; int [][] x;", simplify, expand, platform, filename)); @@ -6406,14 +6322,7 @@ private: std::istringstream istr(javacode); tokenizer.tokenize(istr, "test.java"); - std::ostringstream ostr; - for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) { - ostr << tok->str(); - if (tok->next()) - ostr << " "; - } - - return ostr.str(); + return tokenizer.tokens()->stringifyList(0, false); }