diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index de3525972..a95840453 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -5018,9 +5018,6 @@ void Tokenizer::dump(std::ostream &out) const void Tokenizer::simplifyHeaders() { - // TODO : can we remove anything in headers here? Like unused declarations. - // Maybe if --dump is used we want to have _everything_. - if (mSettings->checkHeaders && mSettings->checkUnusedTemplates) // Full analysis. All information in the headers are kept. return; @@ -5031,11 +5028,16 @@ void Tokenizer::simplifyHeaders() const bool removeUnusedIncludedTemplates = !mSettings->checkUnusedTemplates || !mSettings->checkHeaders; const bool removeUnusedTemplates = !mSettings->checkUnusedTemplates; - // We want to remove selected stuff from the headers but not *everything*. - // The intention here is to not damage the analysis of the source file. - // You should get all warnings in the source file. - - // TODO: Remove unused types/variables/etc in headers.. + // checkHeaders: + // + // If it is true then keep all code in the headers. It's possible + // to remove unused types/variables if false positives / false + // negatives can be avoided. + // + // If it is false, then we want to remove selected stuff from the + // headers but not *everything*. The intention here is to not damage + // the analysis of the source file. You should get all warnings in + // the source file. You should not get false positives. // functions and types to keep std::set keep; @@ -5043,7 +5045,7 @@ void Tokenizer::simplifyHeaders() if (!tok->isName()) continue; - if (checkHeaders && tok->fileIndex() != 0) + if (!checkHeaders && tok->fileIndex() != 0) continue; if (Token::Match(tok, "%name% (") && !Token::simpleMatch(tok->linkAt(1), ") {")) { diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 50632a02e..e145ad033 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -505,6 +505,8 @@ private: TEST_CASE(unknownMacroBeforeReturn); TEST_CASE(cppcast); + + TEST_CASE(checkHeader1); } std::string tokenizeAndStringify(const char code[], bool simplify = false, bool expand = true, Settings::PlatformType platform = Settings::Native, const char* filename = "test.cpp", bool cpp11 = true) { @@ -8601,6 +8603,60 @@ private: ASSERT_EQUALS(tok->str() == "(", tok->isCast()); } } + + std::string checkHeaders(const char code[], bool f) { + // Clear the error buffer.. + errout.str(""); + + Settings settings; + settings.checkHeaders = f; + + // Raw tokens.. + std::vector files(1, "test.cpp"); + std::istringstream istr(code); + const simplecpp::TokenList tokens1(istr, files, files[0]); + + // Preprocess.. + simplecpp::TokenList tokens2(files); + std::map filedata; + simplecpp::preprocess(tokens2, tokens1, files, filedata, simplecpp::DUI()); + + Preprocessor preprocessor(settings0, nullptr); + preprocessor.setDirectives(tokens1); + + // Tokenizer.. + Tokenizer tokenizer(&settings0, this); + tokenizer.createTokens(std::move(tokens2)); + tokenizer.simplifyTokens1(""); + + return tokenizer.tokens()->stringifyList(); + } + + void checkHeader1() { + // #9977 + const char code[] = "# 1 \"test.h\"\n" + "struct A {\n" + " int a = 1;\n" + " void f() { g(1); }\n" + " template void g(T x) { a = 2; }\n" // <- template is used and should be kept + "};"; + + ASSERT_EQUALS("\n\n##file 1\n" + "1: struct A {\n" + "2: int a ; a = 1 ;\n" + "3: void f ( ) { g ( 1 ) ; }\n" + "4: void g ( int x ) ;\n" + "5: } ; void A :: g ( int x ) { a = 2 ; }\n", + checkHeaders(code, true)); + + ASSERT_EQUALS("\n\n##file 1\n" + "1: struct A {\n" + "2: int a ; a = 1 ;\n" + "3: void f ( ) { g ( 1 ) ; }\n" + "4: void g ( int x ) ;\n" + "5: } ; void A :: g ( int x ) { a = 2 ; }\n", + checkHeaders(code, false)); + } }; REGISTER_TEST(TestTokenizer)