From 8eb5295c2345c14283af999b9d47be7c0fa5ca20 Mon Sep 17 00:00:00 2001 From: firewave Date: Wed, 10 Apr 2024 14:34:32 +0200 Subject: [PATCH 1/3] moved directives from `Preprocessor` to `Tokenizer` --- lib/cppcheck.cpp | 5 +- lib/preprocessor.cpp | 19 ++----- lib/preprocessor.h | 12 +---- lib/tokenize.cpp | 31 ++++++++++-- lib/tokenize.h | 5 ++ test/helpers.cpp | 4 +- test/testpreprocessor.cpp | 87 -------------------------------- test/testtokenize.cpp | 101 ++++++++++++++++++++++++++++++++++++++ test/testunusedvar.cpp | 4 +- 9 files changed, 147 insertions(+), 121 deletions(-) diff --git a/lib/cppcheck.cpp b/lib/cppcheck.cpp index 188d54977d2..313b6d5d1ac 100644 --- a/lib/cppcheck.cpp +++ b/lib/cppcheck.cpp @@ -796,7 +796,7 @@ unsigned int CppCheck::checkFile(const std::string& filename, const std::string } // Get directives - preprocessor.setDirectives(tokens1); + std::list directives = preprocessor.createDirectives(tokens1); preprocessor.simplifyPragmaAsm(&tokens1); preprocessor.setPlatformInfo(&tokens1); @@ -821,7 +821,7 @@ unsigned int CppCheck::checkFile(const std::string& filename, const std::string // Run define rules on raw code if (hasRule("define")) { std::string code; - for (const Directive &dir : preprocessor.getDirectives()) { + for (const Directive &dir : directives) { if (startsWith(dir.str,"#define ") || startsWith(dir.str,"#include ")) code += "#line " + std::to_string(dir.linenr) + " \"" + dir.file + "\"\n" + dir.str + '\n'; } @@ -890,6 +890,7 @@ unsigned int CppCheck::checkFile(const std::string& filename, const std::string Tokenizer tokenizer(mSettings, this, &preprocessor); if (mSettings.showtime != SHOWTIME_MODES::SHOWTIME_NONE) tokenizer.setTimerResults(&s_timerResults); + tokenizer.setDirectives(directives); // TODO: how to avoid repeated copies? try { // Create tokens, skip rest of iteration if failed diff --git a/lib/preprocessor.cpp b/lib/preprocessor.cpp index dd449ce3e92..ba5213470aa 100644 --- a/lib/preprocessor.cpp +++ b/lib/preprocessor.cpp @@ -308,10 +308,10 @@ void Preprocessor::inlineSuppressions(const simplecpp::TokenList &tokens, Suppre } } -void Preprocessor::setDirectives(const simplecpp::TokenList &tokens) +std::list Preprocessor::createDirectives(const simplecpp::TokenList &tokens) const { // directive list.. - mDirectives.clear(); + std::list directives; std::vector list; list.reserve(1U + mTokenLists.size()); @@ -337,9 +337,11 @@ void Preprocessor::setDirectives(const simplecpp::TokenList &tokens) else directive.str += tok2->str(); } - mDirectives.push_back(std::move(directive)); + directives.push_back(std::move(directive)); } } + + return directives; } static std::string readcondition(const simplecpp::Token *iftok, const std::set &defined, const std::set &undefined) @@ -912,17 +914,6 @@ void Preprocessor::dump(std::ostream &out) const { // Create a xml dump. - out << " " << std::endl; - for (const Directive &dir : mDirectives) { - out << " ' which - // could result in invalid XML, so run it through toxml(). - << "str=\"" << ErrorLogger::toxml(dir.str) << "\"/>" << std::endl; - } - out << " " << std::endl; - if (!mMacroUsage.empty()) { out << " " << std::endl; for (const simplecpp::MacroUsage ¯oUsage: mMacroUsage) { diff --git a/lib/preprocessor.h b/lib/preprocessor.h index 03b84af5d67..45e9b873f4f 100644 --- a/lib/preprocessor.h +++ b/lib/preprocessor.h @@ -92,12 +92,7 @@ class CPPCHECKLIB Preprocessor { void inlineSuppressions(const simplecpp::TokenList &tokens, SuppressionList &suppressions); - void setDirectives(const simplecpp::TokenList &tokens); - - /** list of all directives met while preprocessing file */ - const std::list &getDirectives() const { - return mDirectives; - } + std::list createDirectives(const simplecpp::TokenList &tokens) const; std::set getConfigs(const simplecpp::TokenList &tokens) const; @@ -143,15 +138,10 @@ class CPPCHECKLIB Preprocessor { static bool hasErrors(const simplecpp::OutputList &outputList); - void setDirectives(const std::list &directives) { - mDirectives = directives; - } - const Settings& mSettings; ErrorLogger *mErrorLogger; /** list of all directives met while preprocessing file */ - std::list mDirectives; std::map mTokenLists; diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 56498b0d786..46487727719 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -5913,6 +5913,26 @@ void Tokenizer::dump(std::ostream &out) const std::set containers; + outs += " "; + outs += '\n'; + for (const Directive &dir : mDirectives) { + outs += " ' which + // could result in invalid XML, so run it through toxml(). + outs += "str=\""; + outs += ErrorLogger::toxml(dir.str); + outs +="\"/>"; + outs += '\n'; + } + outs += " "; + outs += '\n'; + // tokens.. outs += " "; outs += '\n'; @@ -10649,12 +10669,17 @@ void Tokenizer::simplifyNamespaceAliases() } } -// TODO: how to move the Preprocessor dependency out of here? +void Tokenizer::setDirectives(std::list directives) +{ + mDirectives = std::move(directives); +} + bool Tokenizer::hasIfdef(const Token *start, const Token *end) const { assert(mPreprocessor); - return std::any_of(mPreprocessor->getDirectives().cbegin(), mPreprocessor->getDirectives().cend(), [&](const Directive& d) { + const auto& directives = mDirectives; + return std::any_of(directives.cbegin(), directives.cend(), [&](const Directive& d) { return startsWith(d.str, "#if") && d.linenr >= start->linenr() && d.linenr <= end->linenr() && @@ -10667,7 +10692,7 @@ bool Tokenizer::isPacked(const Token * bodyStart) const { assert(mPreprocessor); - const auto& directives = mPreprocessor->getDirectives(); + const auto& directives = mDirectives; // TODO: should this return true if the #pragma exists in any line before the start token? return std::any_of(directives.cbegin(), directives.cend(), [&](const Directive& d) { return d.linenr < bodyStart->linenr() && d.str == "#pragma pack(1)" && d.file == list.getFiles().front(); diff --git a/lib/tokenize.h b/lib/tokenize.h index 304f66f92ef..7a86b6c8d02 100644 --- a/lib/tokenize.h +++ b/lib/tokenize.h @@ -37,6 +37,7 @@ class Token; class TemplateSimplifier; class ErrorLogger; class Preprocessor; +struct Directive; enum class Severity; /// @addtogroup Core @@ -624,6 +625,8 @@ class CPPCHECKLIB Tokenizer { /** Disable assignment operator */ Tokenizer &operator=(const Tokenizer &) = delete; + void setDirectives(std::list directives); + private: const Token *processFunc(const Token *tok2, bool inOperator) const; Token *processFunc(Token *tok2, bool inOperator); @@ -666,6 +669,8 @@ class CPPCHECKLIB Tokenizer { }; std::vector mTypedefInfo; + std::list mDirectives; + /** variable count */ nonneg int mVarId{}; diff --git a/test/helpers.cpp b/test/helpers.cpp index 43117a7a3a8..e63d00c014e 100644 --- a/test/helpers.cpp +++ b/test/helpers.cpp @@ -124,7 +124,6 @@ std::string PreprocessorHelper::getcode(Preprocessor &preprocessor, const std::s tokens1.removeComments(); preprocessor.simplifyPragmaAsm(&tokens1); preprocessor.removeComments(); - preprocessor.setDirectives(tokens1); preprocessor.reportOutput(outputList, true); @@ -179,5 +178,6 @@ void PreprocessorHelper::preprocess(Preprocessor &preprocessor, const char code[ // Tokenizer.. tokenizer.list.createTokens(std::move(tokens2)); - preprocessor.setDirectives(tokens1); + std::list directives = preprocessor.createDirectives(tokens1); + tokenizer.setDirectives(std::move(directives)); } diff --git a/test/testpreprocessor.cpp b/test/testpreprocessor.cpp index 2144d0a9885..a631ca5dfa5 100644 --- a/test/testpreprocessor.cpp +++ b/test/testpreprocessor.cpp @@ -245,10 +245,6 @@ class TestPreprocessor : public TestFixture { TEST_CASE(wrongPathOnErrorDirective); - TEST_CASE(testDirectiveIncludeTypes); - TEST_CASE(testDirectiveIncludeLocations); - TEST_CASE(testDirectiveIncludeComments); - TEST_CASE(testMissingInclude); TEST_CASE(testMissingInclude2); TEST_CASE(testMissingInclude3); @@ -275,7 +271,6 @@ class TestPreprocessor : public TestFixture { tokens.removeComments(); preprocessor0.simplifyPragmaAsm(&tokens); preprocessor0.removeComments(); - preprocessor0.setDirectives(tokens); preprocessor0.reportOutput(outputList, true); @@ -2313,88 +2308,6 @@ class TestPreprocessor : public TestFixture { ASSERT_EQUALS("[test.c:1]: (error) #error hello world!\n", errout_str()); } - void testDirectiveIncludeTypes() { - const char filedata[] = "#define macro some definition\n" - "#undef macro\n" - "#ifdef macro\n" - "#elif some (complex) condition\n" - "#else\n" - "#endif\n" - "#if some other condition\n" - "#pragma some proprietary content\n" - "#\n" /* may appear in old C code */ - "#ident some text\n" /* may appear in old C code */ - "#unknownmacro some unpredictable text\n" - "#warning some warning message\n" - "#error some error message\n"; - const char dumpdata[] = " \n" - - " \n" - " \n" - " \n" - " \n" - " \n" - " \n" - " \n" - " \n" - " \n" - " \n" - " \n" - " \n" - " \n" - " \n"; - - std::ostringstream ostr; - Preprocessor preprocessor(settings0, this); - PreprocessorHelper::getcode(preprocessor, filedata, "", "test.c"); - preprocessor.dump(ostr); - ASSERT_EQUALS(dumpdata, ostr.str()); - } - - void testDirectiveIncludeLocations() { - const char filedata[] = "#define macro1 val\n" - "#file \"inc1.h\"\n" - "#define macro2 val\n" - "#file \"inc2.h\"\n" - "#define macro3 val\n" - "#endfile\n" - "#define macro4 val\n" - "#endfile\n" - "#define macro5 val\n"; - const char dumpdata[] = " \n" - " \n" - " \n" - " \n" - " \n" - " \n" - " \n" - " \n" - " \n"; - - std::ostringstream ostr; - Preprocessor preprocessor(settings0, this); - PreprocessorHelper::getcode(preprocessor, filedata, "", "test.c"); - preprocessor.dump(ostr); - ASSERT_EQUALS(dumpdata, ostr.str()); - } - - void testDirectiveIncludeComments() { - const char filedata[] = "#ifdef macro2 /* this will be removed */\n" - "#else /* this will be removed too */\n" - "#endif /* this will also be removed */\n"; - const char dumpdata[] = " \n" - " \n" - " \n" - " \n" - " \n"; - - std::ostringstream ostr; - Preprocessor preprocessor(settings0, this); - PreprocessorHelper::getcode(preprocessor, filedata, "", "test.c"); - preprocessor.dump(ostr); - ASSERT_EQUALS(dumpdata, ostr.str()); - } - // test for existing local include void testMissingInclude() { /*const*/ Settings settings; diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 81c850b3613..eab7901084b 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -449,6 +449,10 @@ class TestTokenizer : public TestFixture { TEST_CASE(cpp20_default_bitfield_initializer); TEST_CASE(cpp11init); + + TEST_CASE(testDirectiveIncludeTypes); + TEST_CASE(testDirectiveIncludeLocations); + TEST_CASE(testDirectiveIncludeComments); } #define tokenizeAndStringify(...) tokenizeAndStringify_(__FILE__, __LINE__, __VA_ARGS__) @@ -497,6 +501,21 @@ class TestTokenizer : public TestFixture { return tokenizer.tokens()->stringifyList(true,true,true,true,false); } + void directiveDump(const char filedata[], std::ostream& ostr) { + Preprocessor preprocessor(settingsDefault, this); + std::istringstream istr(filedata); + simplecpp::OutputList outputList; + std::vector files; + simplecpp::TokenList tokens1(istr, files, "test.c", &outputList); + std::list directives = preprocessor.createDirectives(tokens1); + + const Settings s = settingsBuilder().severity(Severity::information).build(); + Tokenizer tokenizer(s, this); + tokenizer.setDirectives(std::move(directives)); + + tokenizer.dump(ostr); + } + void tokenize1() { const char code[] = "void f ( )\n" "{ if ( p . y ( ) > yof ) { } }"; @@ -7895,6 +7914,88 @@ class TestTokenizer : public TestFixture { ASSERT_EQUALS("[test.cpp:2]: (debug) auto token with no type.\n", errout_str()); #undef testIsCpp11init } + + void testDirectiveIncludeTypes() { + const char filedata[] = "#define macro some definition\n" + "#undef macro\n" + "#ifdef macro\n" + "#elif some (complex) condition\n" + "#else\n" + "#endif\n" + "#if some other condition\n" + "#pragma some proprietary content\n" + "#\n" /* may appear in old C code */ + "#ident some text\n" /* may appear in old C code */ + "#unknownmacro some unpredictable text\n" + "#warning some warning message\n" + "#error some error message\n"; + const char dumpdata[] = " \n" + + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n"; + + std::ostringstream ostr; + directiveDump(filedata, ostr); + ASSERT_EQUALS(dumpdata, ostr.str()); + } + + void testDirectiveIncludeLocations() { + const char filedata[] = "#define macro1 val\n" + "#file \"inc1.h\"\n" + "#define macro2 val\n" + "#file \"inc2.h\"\n" + "#define macro3 val\n" + "#endfile\n" + "#define macro4 val\n" + "#endfile\n" + "#define macro5 val\n"; + const char dumpdata[] = " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n"; + + std::ostringstream ostr; + directiveDump(filedata, ostr); + ASSERT_EQUALS(dumpdata, ostr.str()); + } + + void testDirectiveIncludeComments() { + const char filedata[] = "#ifdef macro2 /* this will be removed */\n" + "#else /* this will be removed too */\n" + "#endif /* this will also be removed */\n"; + const char dumpdata[] = " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n"; + + std::ostringstream ostr; + directiveDump(filedata, ostr); + ASSERT_EQUALS(dumpdata, ostr.str()); + } }; REGISTER_TEST(TestTokenizer) diff --git a/test/testunusedvar.cpp b/test/testunusedvar.cpp index 24639cb97be..ba87fe9e160 100644 --- a/test/testunusedvar.cpp +++ b/test/testunusedvar.cpp @@ -261,13 +261,13 @@ class TestUnusedVar : public TestFixture { #define checkStructMemberUsage(...) checkStructMemberUsage_(__FILE__, __LINE__, __VA_ARGS__) void checkStructMemberUsage_(const char* file, int line, const char code[], const std::list* directives = nullptr, const Settings *s = nullptr) { Preprocessor preprocessor(settings); - if (directives) - preprocessor.setDirectives(*directives); const Settings *settings1 = s ? s : &settings; // Tokenize.. SimpleTokenizer tokenizer(*settings1, *this, &preprocessor); + if (directives) + tokenizer.setDirectives(*directives); ASSERT_LOC(tokenizer.tokenize(code), file, line); // Check for unused variables.. From e09bcc62d7c980160f872612d5c543ef473448d1 Mon Sep 17 00:00:00 2001 From: firewave Date: Wed, 10 Apr 2024 14:42:06 +0200 Subject: [PATCH 2/3] Tokenizer: removed `Preprocessor` dependency --- lib/cppcheck.cpp | 5 ++- lib/preprocessor.h | 2 +- lib/tokenize.cpp | 9 ++--- lib/tokenize.h | 5 +-- test/helpers.cpp | 1 - test/helpers.h | 9 ++--- test/testclass.cpp | 74 ++++++++++-------------------------------- test/testcondition.cpp | 2 +- test/testgarbage.cpp | 5 +-- test/testother.cpp | 14 +++----- test/testtype.cpp | 2 +- test/testunusedvar.cpp | 8 ++--- 12 files changed, 36 insertions(+), 100 deletions(-) diff --git a/lib/cppcheck.cpp b/lib/cppcheck.cpp index 313b6d5d1ac..33599b53542 100644 --- a/lib/cppcheck.cpp +++ b/lib/cppcheck.cpp @@ -506,8 +506,7 @@ unsigned int CppCheck::checkClang(const std::string &path) } try { - Preprocessor preprocessor(mSettings, this); - Tokenizer tokenizer(mSettings, this, &preprocessor); + Tokenizer tokenizer(mSettings, this); tokenizer.list.appendFileIfNew(path); std::istringstream ast(output2); clangimport::parseClangAstDump(tokenizer, ast); @@ -887,7 +886,7 @@ unsigned int CppCheck::checkFile(const std::string& filename, const std::string continue; } - Tokenizer tokenizer(mSettings, this, &preprocessor); + Tokenizer tokenizer(mSettings, this); if (mSettings.showtime != SHOWTIME_MODES::SHOWTIME_NONE) tokenizer.setTimerResults(&s_timerResults); tokenizer.setDirectives(directives); // TODO: how to avoid repeated copies? diff --git a/lib/preprocessor.h b/lib/preprocessor.h index 45e9b873f4f..b8aad5e0916 100644 --- a/lib/preprocessor.h +++ b/lib/preprocessor.h @@ -68,7 +68,7 @@ struct CPPCHECKLIB Directive { * The preprocessor has special functionality for extracting the various ifdef * configurations that exist in a source file. */ -class CPPCHECKLIB Preprocessor { +class CPPCHECKLIB WARN_UNUSED Preprocessor { // TODO: get rid of this friend class PreprocessorHelper; friend class TestPreprocessor; diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 46487727719..53a08a3a6f1 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -152,12 +152,11 @@ static bool isClassStructUnionEnumStart(const Token * tok) //--------------------------------------------------------------------------- -Tokenizer::Tokenizer(const Settings &settings, ErrorLogger *errorLogger, const Preprocessor *preprocessor) : +Tokenizer::Tokenizer(const Settings &settings, ErrorLogger *errorLogger) : list(&settings), mSettings(settings), mErrorLogger(errorLogger), - mTemplateSimplifier(new TemplateSimplifier(*this)), - mPreprocessor(preprocessor) + mTemplateSimplifier(new TemplateSimplifier(*this)) {} Tokenizer::~Tokenizer() @@ -10676,8 +10675,6 @@ void Tokenizer::setDirectives(std::list directives) bool Tokenizer::hasIfdef(const Token *start, const Token *end) const { - assert(mPreprocessor); - const auto& directives = mDirectives; return std::any_of(directives.cbegin(), directives.cend(), [&](const Directive& d) { return startsWith(d.str, "#if") && @@ -10690,8 +10687,6 @@ bool Tokenizer::hasIfdef(const Token *start, const Token *end) const bool Tokenizer::isPacked(const Token * bodyStart) const { - assert(mPreprocessor); - const auto& directives = mDirectives; // TODO: should this return true if the #pragma exists in any line before the start token? return std::any_of(directives.cbegin(), directives.cend(), [&](const Directive& d) { diff --git a/lib/tokenize.h b/lib/tokenize.h index 7a86b6c8d02..79051884961 100644 --- a/lib/tokenize.h +++ b/lib/tokenize.h @@ -36,7 +36,6 @@ class TimerResults; class Token; class TemplateSimplifier; class ErrorLogger; -class Preprocessor; struct Directive; enum class Severity; @@ -54,7 +53,7 @@ class CPPCHECKLIB Tokenizer { friend class TestTokenizer; public: - explicit Tokenizer(const Settings & settings, ErrorLogger *errorLogger, const Preprocessor *preprocessor = nullptr); + explicit Tokenizer(const Settings & settings, ErrorLogger *errorLogger); ~Tokenizer(); void setTimerResults(TimerResults *tr) { @@ -681,8 +680,6 @@ class CPPCHECKLIB Tokenizer { * TimerResults */ TimerResults* mTimerResults{}; - - const Preprocessor * const mPreprocessor; }; /// @} diff --git a/test/helpers.cpp b/test/helpers.cpp index e63d00c014e..e0b5fdbdf6d 100644 --- a/test/helpers.cpp +++ b/test/helpers.cpp @@ -45,7 +45,6 @@ class SuppressionList; const Settings SimpleTokenizer::s_settings; -const Preprocessor SimpleTokenizer::s_preprocessor{s_settings, nullptr}; // TODO: provide ErrorLogger // TODO: better path-only usage ScopedFile::ScopedFile(std::string name, const std::string &content, std::string path) diff --git a/test/helpers.h b/test/helpers.h index eb37021c1bb..130748f4fc0 100644 --- a/test/helpers.h +++ b/test/helpers.h @@ -42,18 +42,14 @@ namespace simplecpp { class SimpleTokenizer : public Tokenizer { public: SimpleTokenizer(ErrorLogger& errorlogger, const char code[], bool cpp = true) - : Tokenizer{s_settings, &errorlogger, &s_preprocessor} + : Tokenizer{s_settings, &errorlogger} { if (!tokenize(code, cpp)) throw std::runtime_error("creating tokens failed"); } SimpleTokenizer(const Settings& settings, ErrorLogger& errorlogger) - : Tokenizer{settings, &errorlogger, &s_preprocessor} - {} - - SimpleTokenizer(const Settings& settings, ErrorLogger& errorlogger, const Preprocessor* preprocessor) - : Tokenizer{settings, &errorlogger, preprocessor} + : Tokenizer{settings, &errorlogger} {} /* @@ -87,7 +83,6 @@ class SimpleTokenizer : public Tokenizer { private: // TODO. find a better solution static const Settings s_settings; - static const Preprocessor s_preprocessor; }; class SimpleTokenList diff --git a/test/testclass.cpp b/test/testclass.cpp index 989dfa516a0..a6478139074 100644 --- a/test/testclass.cpp +++ b/test/testclass.cpp @@ -21,7 +21,6 @@ #include "errortypes.h" #include "fixture.h" #include "helpers.h" -#include "preprocessor.h" #include "settings.h" #include "tokenize.h" @@ -248,10 +247,8 @@ class TestClass : public TestFixture { void checkCopyCtorAndEqOperator_(const char code[], const char* file, int line) { const Settings settings = settingsBuilder().severity(Severity::warning).build(); - Preprocessor preprocessor(settings); - // Tokenize.. - SimpleTokenizer tokenizer(settings, *this, &preprocessor); + SimpleTokenizer tokenizer(settings, *this); ASSERT_LOC(tokenizer.tokenize(code), file, line); // Check.. @@ -350,10 +347,8 @@ class TestClass : public TestFixture { #define checkExplicitConstructors(code) checkExplicitConstructors_(code, __FILE__, __LINE__) void checkExplicitConstructors_(const char code[], const char* file, int line) { - Preprocessor preprocessor(settings0); - // Tokenize.. - SimpleTokenizer tokenizer(settings0, *this, &preprocessor); + SimpleTokenizer tokenizer(settings0, *this); ASSERT_LOC(tokenizer.tokenize(code), file, line); // Check.. @@ -500,10 +495,8 @@ class TestClass : public TestFixture { #define checkDuplInheritedMembers(code) checkDuplInheritedMembers_(code, __FILE__, __LINE__) void checkDuplInheritedMembers_(const char code[], const char* file, int line) { - Preprocessor preprocessor(settings1); - // Tokenize.. - SimpleTokenizer tokenizer(settings1, *this, &preprocessor); + SimpleTokenizer tokenizer(settings1, *this); ASSERT_LOC(tokenizer.tokenize(code), file, line); // Check.. @@ -718,10 +711,8 @@ class TestClass : public TestFixture { #define checkCopyConstructor(code) checkCopyConstructor_(code, __FILE__, __LINE__) void checkCopyConstructor_(const char code[], const char* file, int line) { - Preprocessor preprocessor(settings3); - // Tokenize.. - SimpleTokenizer tokenizer(settings3, *this, &preprocessor); + SimpleTokenizer tokenizer(settings3, *this); ASSERT_LOC(tokenizer.tokenize(code), file, line); // Check.. @@ -1163,10 +1154,8 @@ class TestClass : public TestFixture { // Check that operator Equal returns reference to this #define checkOpertorEqRetRefThis(code) checkOpertorEqRetRefThis_(code, __FILE__, __LINE__) void checkOpertorEqRetRefThis_(const char code[], const char* file, int line) { - Preprocessor preprocessor(settings0); - // Tokenize.. - SimpleTokenizer tokenizer(settings0, *this, &preprocessor); + SimpleTokenizer tokenizer(settings0, *this); ASSERT_LOC(tokenizer.tokenize(code), file, line); // Check.. @@ -1635,10 +1624,8 @@ class TestClass : public TestFixture { // Check that operator Equal checks for assignment to self #define checkOpertorEqToSelf(code) checkOpertorEqToSelf_(code, __FILE__, __LINE__) void checkOpertorEqToSelf_(const char code[], const char* file, int line) { - Preprocessor preprocessor(settings1); - // Tokenize.. - SimpleTokenizer tokenizer(settings1, *this, &preprocessor); + SimpleTokenizer tokenizer(settings1, *this); ASSERT_LOC(tokenizer.tokenize(code), file, line); // Check.. @@ -2594,10 +2581,8 @@ class TestClass : public TestFixture { void checkVirtualDestructor_(const char* file, int line, const char code[], bool inconclusive = false) { const Settings s = settingsBuilder(settings0).certainty(Certainty::inconclusive, inconclusive).severity(Severity::warning).build(); - Preprocessor preprocessor(s); - // Tokenize.. - SimpleTokenizer tokenizer(s, *this, &preprocessor); + SimpleTokenizer tokenizer(s, *this); ASSERT_LOC(tokenizer.tokenize(code), file, line); // Check.. @@ -2932,10 +2917,8 @@ class TestClass : public TestFixture { } void checkNoMemset_(const char* file, int line, const char code[], const Settings &settings) { - Preprocessor preprocessor(settings); - // Tokenize.. - SimpleTokenizer tokenizer(settings, *this, &preprocessor); + SimpleTokenizer tokenizer(settings, *this); ASSERT_LOC(tokenizer.tokenize(code), file, line); // Check.. @@ -3579,10 +3562,8 @@ class TestClass : public TestFixture { #define checkThisSubtraction(code) checkThisSubtraction_(code, __FILE__, __LINE__) void checkThisSubtraction_(const char code[], const char* file, int line) { - Preprocessor preprocessor(settings1); - // Tokenize.. - SimpleTokenizer tokenizer(settings1, *this, &preprocessor); + SimpleTokenizer tokenizer(settings1, *this); ASSERT_LOC(tokenizer.tokenize(code), file, line); // Check.. @@ -3612,10 +3593,8 @@ class TestClass : public TestFixture { void checkConst_(const char* file, int line, const char code[], const Settings *s = nullptr, bool inconclusive = true) { const Settings settings = settingsBuilder(s ? *s : settings0).certainty(Certainty::inconclusive, inconclusive).build(); - Preprocessor preprocessor(settings); - // Tokenize.. - SimpleTokenizer tokenizer(settings, *this, &preprocessor); + SimpleTokenizer tokenizer(settings, *this); ASSERT_LOC(tokenizer.tokenize(code), file, line); CheckClass checkClass(&tokenizer, &settings, this); @@ -7532,11 +7511,8 @@ class TestClass : public TestFixture { #define checkInitializerListOrder(code) checkInitializerListOrder_(code, __FILE__, __LINE__) void checkInitializerListOrder_(const char code[], const char* file, int line) { - // Check.. - Preprocessor preprocessor(settings2); - // Tokenize.. - SimpleTokenizer tokenizer(settings2, *this, &preprocessor); + SimpleTokenizer tokenizer(settings2, *this); ASSERT_LOC(tokenizer.tokenize(code), file, line); CheckClass checkClass(&tokenizer, &settings2, this); @@ -7663,10 +7639,8 @@ class TestClass : public TestFixture { // Check.. const Settings settings = settingsBuilder().severity(Severity::performance).build(); - Preprocessor preprocessor(settings); - // Tokenize.. - SimpleTokenizer tokenizer(settings, *this, &preprocessor); + SimpleTokenizer tokenizer(settings, *this); ASSERT_LOC(tokenizer.tokenize(code), file, line); CheckClass checkClass(&tokenizer, &settings, this); @@ -7873,10 +7847,8 @@ class TestClass : public TestFixture { #define checkSelfInitialization(code) checkSelfInitialization_(code, __FILE__, __LINE__) void checkSelfInitialization_(const char code[], const char* file, int line) { - Preprocessor preprocessor(settings0); - // Tokenize.. - SimpleTokenizer tokenizer(settings0, *this, &preprocessor); + SimpleTokenizer tokenizer(settings0, *this); ASSERT_LOC(tokenizer.tokenize(code), file, line); CheckClass checkClass(&tokenizer, &settings0, this); @@ -7985,10 +7957,8 @@ class TestClass : public TestFixture { // Check.. const Settings settings = settingsBuilder().severity(Severity::warning).severity(Severity::style).certainty(Certainty::inconclusive, inconclusive).build(); - Preprocessor preprocessor(settings); - // Tokenize.. - SimpleTokenizer tokenizer(settings, *this, &preprocessor); + SimpleTokenizer tokenizer(settings, *this); ASSERT_LOC(tokenizer.tokenize(code), file, line); CheckClass checkClass(&tokenizer, &settings, this); @@ -8330,10 +8300,8 @@ class TestClass : public TestFixture { void checkOverride_(const char code[], const char* file, int line) { const Settings settings = settingsBuilder().severity(Severity::style).build(); - Preprocessor preprocessor(settings); - // Tokenize.. - SimpleTokenizer tokenizer(settings, *this, &preprocessor); + SimpleTokenizer tokenizer(settings, *this); ASSERT_LOC(tokenizer.tokenize(code), file, line); // Check.. @@ -8704,10 +8672,8 @@ class TestClass : public TestFixture { /*const*/ Settings settings = settingsBuilder().severity(Severity::warning).build(); settings.safeChecks.classes = true; - Preprocessor preprocessor(settings); - // Tokenize.. - SimpleTokenizer tokenizer(settings, *this, &preprocessor); + SimpleTokenizer tokenizer(settings, *this); ASSERT_LOC(tokenizer.tokenize(code), file, line); // Check.. @@ -8723,10 +8689,8 @@ class TestClass : public TestFixture { #define checkThisUseAfterFree(code) checkThisUseAfterFree_(code, __FILE__, __LINE__) void checkThisUseAfterFree_(const char code[], const char* file, int line) { - Preprocessor preprocessor(settings1); - // Tokenize.. - SimpleTokenizer tokenizer(settings1, *this, &preprocessor); + SimpleTokenizer tokenizer(settings1, *this); ASSERT_LOC(tokenizer.tokenize(code), file, line); // Check.. @@ -8924,10 +8888,8 @@ class TestClass : public TestFixture { #define getFileInfo(code) getFileInfo_(code, __FILE__, __LINE__) void getFileInfo_(const char code[], const char* file, int line) { - Preprocessor preprocessor(settings1); - // Tokenize.. - SimpleTokenizer tokenizer(settings1, *this, &preprocessor); + SimpleTokenizer tokenizer(settings1, *this); ASSERT_LOC(tokenizer.tokenize(code), file, line); // Check.. diff --git a/test/testcondition.cpp b/test/testcondition.cpp index 7ae5338448f..d1b42b18cc6 100644 --- a/test/testcondition.cpp +++ b/test/testcondition.cpp @@ -129,7 +129,7 @@ class TestCondition : public TestFixture { void check_(const char* file, int line, const char code[], const Settings &settings, const char* filename = "test.cpp") { Preprocessor preprocessor(settings); std::vector files(1, filename); - Tokenizer tokenizer(settings, this, &preprocessor); + Tokenizer tokenizer(settings, this); PreprocessorHelper::preprocess(preprocessor, code, files, tokenizer); // Tokenizer.. diff --git a/test/testgarbage.cpp b/test/testgarbage.cpp index 8972da28463..887f08bd14e 100644 --- a/test/testgarbage.cpp +++ b/test/testgarbage.cpp @@ -20,7 +20,6 @@ #include "errortypes.h" #include "fixture.h" #include "helpers.h" -#include "preprocessor.h" #include "settings.h" #include "token.h" @@ -283,10 +282,8 @@ class TestGarbage : public TestFixture { } std::string checkCodeInternal_(const char code[], bool cpp, const char* file, int line) { - Preprocessor preprocessor(settings); - // tokenize.. - SimpleTokenizer tokenizer(settings, *this, &preprocessor); + SimpleTokenizer tokenizer(settings, *this); ASSERT_LOC(tokenizer.tokenize(code, cpp), file, line); // call all "runChecks" in all registered Check classes diff --git a/test/testother.cpp b/test/testother.cpp index eaddae272c6..6171845505e 100644 --- a/test/testother.cpp +++ b/test/testother.cpp @@ -306,10 +306,8 @@ class TestOther : public TestFixture { settings->certainty.setEnabled(Certainty::inconclusive, inconclusive); settings->verbose = verbose; - Preprocessor preprocessor(*settings); - // Tokenize.. - SimpleTokenizer tokenizer(*settings, *this, &preprocessor); + SimpleTokenizer tokenizer(*settings, *this); ASSERT_LOC(tokenizer.tokenize(code, cpp), file, line); // Check.. @@ -335,7 +333,7 @@ class TestOther : public TestFixture { Preprocessor preprocessor(*settings); std::vector files(1, filename); - Tokenizer tokenizer(*settings, this, &preprocessor); + Tokenizer tokenizer(*settings, this); PreprocessorHelper::preprocess(preprocessor, code, files, tokenizer); // Tokenizer.. @@ -1703,10 +1701,8 @@ class TestOther : public TestFixture { // #5560 - set c++03 const Settings settings = settingsBuilder().severity(Severity::style).cpp(Standards::CPP03).build(); - Preprocessor preprocessor(settings); - // Tokenize.. - SimpleTokenizer tokenizerCpp(settings, *this, &preprocessor); + SimpleTokenizer tokenizerCpp(settings, *this); ASSERT_LOC(tokenizerCpp.tokenize(code), file, line); CheckOther checkOtherCpp(&tokenizerCpp, &settings, this); @@ -1924,10 +1920,8 @@ class TestOther : public TestFixture { /*const*/ Settings settings = settingsBuilder().severity(Severity::warning).severity(Severity::portability, portability).certainty(Certainty::inconclusive, inconclusive).build(); settings.platform.defaultSign = 's'; - Preprocessor preprocessor(settings); - // Tokenize.. - SimpleTokenizer tokenizer(settings, *this, &preprocessor); + SimpleTokenizer tokenizer(settings, *this); ASSERT_LOC(tokenizer.tokenize(code), file, line); CheckOther checkOtherCpp(&tokenizer, &settings, this); diff --git a/test/testtype.cpp b/test/testtype.cpp index 17eebb60c9b..d96ad4661fc 100644 --- a/test/testtype.cpp +++ b/test/testtype.cpp @@ -67,7 +67,7 @@ class TestType : public TestFixture { Preprocessor preprocessor(settings1); std::vector files(1, filename); - Tokenizer tokenizer(settings1, this, &preprocessor); + Tokenizer tokenizer(settings1, this); PreprocessorHelper::preprocess(preprocessor, code, files, tokenizer, dui); // Tokenizer.. diff --git a/test/testunusedvar.cpp b/test/testunusedvar.cpp index ba87fe9e160..8ae6de2019b 100644 --- a/test/testunusedvar.cpp +++ b/test/testunusedvar.cpp @@ -260,12 +260,10 @@ class TestUnusedVar : public TestFixture { #define functionVariableUsage(...) functionVariableUsage_(__FILE__, __LINE__, __VA_ARGS__) #define checkStructMemberUsage(...) checkStructMemberUsage_(__FILE__, __LINE__, __VA_ARGS__) void checkStructMemberUsage_(const char* file, int line, const char code[], const std::list* directives = nullptr, const Settings *s = nullptr) { - Preprocessor preprocessor(settings); - const Settings *settings1 = s ? s : &settings; // Tokenize.. - SimpleTokenizer tokenizer(*settings1, *this, &preprocessor); + SimpleTokenizer tokenizer(*settings1, *this); if (directives) tokenizer.setDirectives(*directives); ASSERT_LOC(tokenizer.tokenize(code), file, line); @@ -279,7 +277,7 @@ class TestUnusedVar : public TestFixture { void checkStructMemberUsageP_(const char* file, int line, const char code[]) { std::vector files(1, "test.cpp"); Preprocessor preprocessor(settings); - Tokenizer tokenizer(settings, this, &preprocessor); + Tokenizer tokenizer(settings, this); PreprocessorHelper::preprocess(preprocessor, code, files, tokenizer); // Tokenizer.. @@ -294,7 +292,7 @@ class TestUnusedVar : public TestFixture { void checkFunctionVariableUsageP_(const char* file, int line, const char code[], const char* filename = "test.cpp") { Preprocessor preprocessor(settings); std::vector files(1, filename); - Tokenizer tokenizer(settings, this, &preprocessor); + Tokenizer tokenizer(settings, this); PreprocessorHelper::preprocess(preprocessor, code, files, tokenizer); // Tokenizer.. From adc5e46ee2f4a4ee1153565dba31c56fbc59b74a Mon Sep 17 00:00:00 2001 From: firewave Date: Wed, 10 Apr 2024 14:59:44 +0200 Subject: [PATCH 3/3] testrunner: small `PreprocessorHelper` cleanup --- test/helpers.cpp | 20 +++----------------- test/helpers.h | 3 +-- test/testcondition.cpp | 4 +--- test/testother.cpp | 4 +--- test/testtokenize.cpp | 3 +-- test/testtype.cpp | 4 +--- test/testunusedvar.cpp | 6 ++---- 7 files changed, 10 insertions(+), 34 deletions(-) diff --git a/test/helpers.cpp b/test/helpers.cpp index e0b5fdbdf6d..a639d27ec42 100644 --- a/test/helpers.cpp +++ b/test/helpers.cpp @@ -146,25 +146,10 @@ std::string PreprocessorHelper::getcode(Preprocessor &preprocessor, const std::s void PreprocessorHelper::preprocess(const char code[], std::vector &files, Tokenizer& tokenizer) { - // Raw Tokens.. - std::istringstream istr(code); - const simplecpp::TokenList tokens1(istr, files, files[0]); - - // Preprocess.. - simplecpp::TokenList tokens2(files); - std::map filedata; - simplecpp::preprocess(tokens2, tokens1, files, filedata, simplecpp::DUI()); - - // Tokenizer.. - tokenizer.list.createTokens(std::move(tokens2)); -} - -void PreprocessorHelper::preprocess(Preprocessor &preprocessor, const char code[], std::vector &files, Tokenizer& tokenizer) -{ - preprocess(preprocessor, code, files, tokenizer, simplecpp::DUI()); + preprocess(code, files, tokenizer, simplecpp::DUI()); } -void PreprocessorHelper::preprocess(Preprocessor &preprocessor, const char code[], std::vector &files, Tokenizer& tokenizer, const simplecpp::DUI& dui) +void PreprocessorHelper::preprocess(const char code[], std::vector &files, Tokenizer& tokenizer, const simplecpp::DUI& dui) { std::istringstream istr(code); const simplecpp::TokenList tokens1(istr, files, files[0]); @@ -177,6 +162,7 @@ void PreprocessorHelper::preprocess(Preprocessor &preprocessor, const char code[ // Tokenizer.. tokenizer.list.createTokens(std::move(tokens2)); + const Preprocessor preprocessor(tokenizer.getSettings()); std::list directives = preprocessor.createDirectives(tokens1); tokenizer.setDirectives(std::move(directives)); } diff --git a/test/helpers.h b/test/helpers.h index 130748f4fc0..fbc8b755ec1 100644 --- a/test/helpers.h +++ b/test/helpers.h @@ -151,8 +151,7 @@ class PreprocessorHelper static std::string getcode(Preprocessor &preprocessor, const std::string &filedata, const std::string &cfg, const std::string &filename, SuppressionList *inlineSuppression = nullptr); static void preprocess(const char code[], std::vector &files, Tokenizer& tokenizer); - static void preprocess(Preprocessor &preprocessor, const char code[], std::vector &files, Tokenizer& tokenizer); - static void preprocess(Preprocessor &preprocessor, const char code[], std::vector &files, Tokenizer& tokenizer, const simplecpp::DUI& dui); + static void preprocess(const char code[], std::vector &files, Tokenizer& tokenizer, const simplecpp::DUI& dui); }; namespace cppcheck { diff --git a/test/testcondition.cpp b/test/testcondition.cpp index d1b42b18cc6..05266e8db93 100644 --- a/test/testcondition.cpp +++ b/test/testcondition.cpp @@ -21,7 +21,6 @@ #include "fixture.h" #include "helpers.h" #include "platform.h" -#include "preprocessor.h" #include "settings.h" #include "tokenize.h" @@ -127,10 +126,9 @@ class TestCondition : public TestFixture { #define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) void check_(const char* file, int line, const char code[], const Settings &settings, const char* filename = "test.cpp") { - Preprocessor preprocessor(settings); std::vector files(1, filename); Tokenizer tokenizer(settings, this); - PreprocessorHelper::preprocess(preprocessor, code, files, tokenizer); + PreprocessorHelper::preprocess(code, files, tokenizer); // Tokenizer.. ASSERT_LOC(tokenizer.simplifyTokens1(""), file, line); diff --git a/test/testother.cpp b/test/testother.cpp index 6171845505e..2caae6262d3 100644 --- a/test/testother.cpp +++ b/test/testother.cpp @@ -21,7 +21,6 @@ #include "fixture.h" #include "helpers.h" #include "platform.h" -#include "preprocessor.h" #include "settings.h" #include "standards.h" #include "tokenize.h" @@ -331,10 +330,9 @@ class TestOther : public TestFixture { settings->standards.cpp = Standards::CPPLatest; settings->certainty.enable(Certainty::inconclusive); - Preprocessor preprocessor(*settings); std::vector files(1, filename); Tokenizer tokenizer(*settings, this); - PreprocessorHelper::preprocess(preprocessor, code, files, tokenizer); + PreprocessorHelper::preprocess(code, files, tokenizer); // Tokenizer.. ASSERT_LOC(tokenizer.simplifyTokens1(""), file, line); diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index eab7901084b..a79e168e049 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -7674,10 +7674,9 @@ class TestTokenizer : public TestFixture { std::string checkHdrs_(const char* file, int line, const char code[], bool checkHeadersFlag) { const Settings settings = settingsBuilder().checkHeaders(checkHeadersFlag).build(); - Preprocessor preprocessor(settings0); std::vector files(1, "test.cpp"); Tokenizer tokenizer(settings, this); - PreprocessorHelper::preprocess(preprocessor, code, files, tokenizer); + PreprocessorHelper::preprocess(code, files, tokenizer); // Tokenizer.. ASSERT_LOC(tokenizer.simplifyTokens1(""), file, line); diff --git a/test/testtype.cpp b/test/testtype.cpp index d96ad4661fc..34ba2f55268 100644 --- a/test/testtype.cpp +++ b/test/testtype.cpp @@ -21,7 +21,6 @@ #include "fixture.h" #include "helpers.h" #include "platform.h" -#include "preprocessor.h" #include "settings.h" #include "standards.h" #include "tokenize.h" @@ -65,10 +64,9 @@ class TestType : public TestFixture { void checkP_(const char* file, int line, const char code[], const Settings& settings, const char filename[] = "test.cpp", const simplecpp::DUI& dui = simplecpp::DUI()) { const Settings settings1 = settingsBuilder(settings).severity(Severity::warning).severity(Severity::portability).build(); - Preprocessor preprocessor(settings1); std::vector files(1, filename); Tokenizer tokenizer(settings1, this); - PreprocessorHelper::preprocess(preprocessor, code, files, tokenizer, dui); + PreprocessorHelper::preprocess(code, files, tokenizer, dui); // Tokenizer.. ASSERT_LOC(tokenizer.simplifyTokens1(""), file, line); diff --git a/test/testunusedvar.cpp b/test/testunusedvar.cpp index 8ae6de2019b..d390ed628a3 100644 --- a/test/testunusedvar.cpp +++ b/test/testunusedvar.cpp @@ -276,9 +276,8 @@ class TestUnusedVar : public TestFixture { #define checkStructMemberUsageP(...) checkStructMemberUsageP_(__FILE__, __LINE__, __VA_ARGS__) void checkStructMemberUsageP_(const char* file, int line, const char code[]) { std::vector files(1, "test.cpp"); - Preprocessor preprocessor(settings); Tokenizer tokenizer(settings, this); - PreprocessorHelper::preprocess(preprocessor, code, files, tokenizer); + PreprocessorHelper::preprocess(code, files, tokenizer); // Tokenizer.. ASSERT_LOC(tokenizer.simplifyTokens1(""), file, line); @@ -290,10 +289,9 @@ class TestUnusedVar : public TestFixture { #define checkFunctionVariableUsageP(...) checkFunctionVariableUsageP_(__FILE__, __LINE__, __VA_ARGS__) void checkFunctionVariableUsageP_(const char* file, int line, const char code[], const char* filename = "test.cpp") { - Preprocessor preprocessor(settings); std::vector files(1, filename); Tokenizer tokenizer(settings, this); - PreprocessorHelper::preprocess(preprocessor, code, files, tokenizer); + PreprocessorHelper::preprocess(code, files, tokenizer); // Tokenizer.. ASSERT_LOC(tokenizer.simplifyTokens1(""), file, line);