diff --git a/lib/cppcheck.cpp b/lib/cppcheck.cpp index 632d86db0ac..1d736a6b989 100644 --- a/lib/cppcheck.cpp +++ b/lib/cppcheck.cpp @@ -506,7 +506,7 @@ unsigned int CppCheck::checkClang(const std::string &path) } try { - Tokenizer tokenizer(mSettings, this); + Tokenizer tokenizer(mSettings, *this); tokenizer.list.appendFileIfNew(path); std::istringstream ast(output2); clangimport::parseClangAstDump(tokenizer, ast); @@ -657,7 +657,7 @@ unsigned int CppCheck::checkFile(const std::string& filename, const std::string if (mSettings.library.markupFile(filename)) { if (mUnusedFunctionsCheck && mSettings.useSingleJob() && mSettings.buildDir.empty()) { // this is not a real source file - we just want to tokenize it. treat it as C anyways as the language needs to be determined. - Tokenizer tokenizer(mSettings, this); + Tokenizer tokenizer(mSettings, *this); tokenizer.list.setLang(Standards::Language::C); if (fileStream) { tokenizer.list.createTokens(*fileStream, filename); @@ -886,7 +886,7 @@ unsigned int CppCheck::checkFile(const std::string& filename, const std::string continue; } - Tokenizer tokenizer(mSettings, this); + Tokenizer tokenizer(mSettings, *this); if (mSettings.showtime != SHOWTIME_MODES::SHOWTIME_NONE) tokenizer.setTimerResults(&s_timerResults); tokenizer.setDirectives(directives); // TODO: how to avoid repeated copies? diff --git a/lib/symboldatabase.cpp b/lib/symboldatabase.cpp index b4d30fa2591..4f86497551c 100644 --- a/lib/symboldatabase.cpp +++ b/lib/symboldatabase.cpp @@ -52,7 +52,7 @@ #include //--------------------------------------------------------------------------- -SymbolDatabase::SymbolDatabase(Tokenizer& tokenizer, const Settings& settings, ErrorLogger* errorLogger) +SymbolDatabase::SymbolDatabase(Tokenizer& tokenizer, const Settings& settings, ErrorLogger& errorLogger) : mTokenizer(tokenizer), mSettings(settings), mErrorLogger(errorLogger) { if (!mTokenizer.tokens()) @@ -169,10 +169,9 @@ void SymbolDatabase::createSymbolDatabaseFindAllScopes() // find all scopes for (const Token *tok = mTokenizer.tokens(); tok; tok = tok ? tok->next() : nullptr) { // #5593 suggested to add here: - if (mErrorLogger) - mErrorLogger->reportProgress(mTokenizer.list.getSourceFilePath(), - "SymbolDatabase", - tok->progressValue()); + mErrorLogger.reportProgress(mTokenizer.list.getSourceFilePath(), + "SymbolDatabase", + tok->progressValue()); // Locate next class if ((tok->isCpp() && tok->isKeyword() && ((Token::Match(tok, "class|struct|union|namespace ::| %name% final| {|:|::|<") && @@ -2087,14 +2086,14 @@ void SymbolDatabase::validateExecutableScopes() const for (std::size_t i = 0; i < functions; ++i) { const Scope* const scope = functionScopes[i]; const Function* const function = scope->function; - if (mErrorLogger && scope->isExecutable() && !function) { + if (scope->isExecutable() && !function) { const std::list callstack(1, scope->classDef); const std::string msg = std::string("Executable scope '") + scope->classDef->str() + "' with unknown function."; const ErrorMessage errmsg(callstack, &mTokenizer.list, Severity::debug, "symbolDatabaseWarning", msg, Certainty::normal); - mErrorLogger->reportErr(errmsg); + mErrorLogger.reportErr(errmsg); } } } @@ -2152,7 +2151,7 @@ void SymbolDatabase::debugSymbolDatabase() const for (const Token* tok = mTokenizer.list.front(); tok != mTokenizer.list.back(); tok = tok->next()) { if (tok->astParent() && tok->astParent()->getTokenDebug() == tok->getTokenDebug()) continue; - if (mErrorLogger && tok->getTokenDebug() == TokenDebug::ValueType) { + if (tok->getTokenDebug() == TokenDebug::ValueType) { std::string msg = "Value type is "; ErrorPath errorPath; @@ -2164,7 +2163,7 @@ void SymbolDatabase::debugSymbolDatabase() const msg += "missing"; } errorPath.emplace_back(tok, ""); - mErrorLogger->reportErr( + mErrorLogger.reportErr( {errorPath, &mTokenizer.list, Severity::debug, "valueType", msg, CWE{0}, Certainty::normal}); } } @@ -3584,27 +3583,27 @@ std::string Type::name() const void SymbolDatabase::debugMessage(const Token *tok, const std::string &type, const std::string &msg) const { - if (tok && mSettings.debugwarnings && mErrorLogger) { + if (tok && mSettings.debugwarnings) { const std::list locationList(1, tok); const ErrorMessage errmsg(locationList, &mTokenizer.list, Severity::debug, type, msg, Certainty::normal); - mErrorLogger->reportErr(errmsg); + mErrorLogger.reportErr(errmsg); } } void SymbolDatabase::returnImplicitIntError(const Token *tok) const { - if (tok && mSettings.severity.isEnabled(Severity::portability) && (tok->isC() && mSettings.standards.c != Standards::C89) && mErrorLogger) { + if (tok && mSettings.severity.isEnabled(Severity::portability) && (tok->isC() && mSettings.standards.c != Standards::C89)) { const std::list locationList(1, tok); const ErrorMessage errmsg(locationList, &mTokenizer.list, Severity::portability, "returnImplicitInt", "Omitted return type of function '" + tok->str() + "' defaults to int, this is not supported by ISO C99 and later standards.", Certainty::normal); - mErrorLogger->reportErr(errmsg); + mErrorLogger.reportErr(errmsg); } } diff --git a/lib/symboldatabase.h b/lib/symboldatabase.h index ffa0777ecf5..901991313d4 100644 --- a/lib/symboldatabase.h +++ b/lib/symboldatabase.h @@ -1319,7 +1319,7 @@ class CPPCHECKLIB ValueType { class CPPCHECKLIB SymbolDatabase { friend class TestSymbolDatabase; public: - SymbolDatabase(Tokenizer& tokenizer, const Settings& settings, ErrorLogger* errorLogger); + SymbolDatabase(Tokenizer& tokenizer, const Settings& settings, ErrorLogger& errorLogger); ~SymbolDatabase(); /** @brief Information about all namespaces/classes/structures */ @@ -1467,7 +1467,7 @@ class CPPCHECKLIB SymbolDatabase { Tokenizer& mTokenizer; const Settings &mSettings; - ErrorLogger *mErrorLogger; + ErrorLogger &mErrorLogger; /** variable symbol table */ std::vector mVariableList; diff --git a/lib/templatesimplifier.cpp b/lib/templatesimplifier.cpp index 173e3b961c1..ece540930ee 100644 --- a/lib/templatesimplifier.cpp +++ b/lib/templatesimplifier.cpp @@ -1170,14 +1170,14 @@ void TemplateSimplifier::useDefaultArgumentValues(TokenAndName &declaration) while (it != eq.cend()) { // check for end if (!it->end) { - if (mSettings.debugwarnings && mErrorLogger && mSettings.severity.isEnabled(Severity::debug)) { + if (mSettings.debugwarnings && mSettings.severity.isEnabled(Severity::debug)) { const std::list locationList(1, it->eq); const ErrorMessage errmsg(locationList, &mTokenizer.list, Severity::debug, "noparamend", "TemplateSimplifier couldn't find end of template parameter.", Certainty::normal); - mErrorLogger->reportErr(errmsg); + mErrorLogger.reportErr(errmsg); } break; } @@ -3078,7 +3078,7 @@ bool TemplateSimplifier::simplifyTemplateInstantiations( numberOfTemplateInstantiations = mTemplateInstantiations.size(); ++recursiveCount; if (recursiveCount > mSettings.maxTemplateRecursion) { - if (mErrorLogger && mSettings.severity.isEnabled(Severity::information)) { + if (mSettings.severity.isEnabled(Severity::information)) { std::list typeStringsUsedInTemplateInstantiation; const std::string typeForNewName = templateDeclaration.name() + "<" + getNewName(instantiation.token(), typeStringsUsedInTemplateInstantiation) + ">"; @@ -3091,7 +3091,7 @@ bool TemplateSimplifier::simplifyTemplateInstantiations( + std::to_string(mSettings.maxTemplateRecursion) + ") reached for template '"+typeForNewName+"'. You might want to limit Cppcheck recursion.", Certainty::normal); - mErrorLogger->reportErr(errmsg); + mErrorLogger.reportErr(errmsg); } // bail out.. @@ -3159,8 +3159,8 @@ bool TemplateSimplifier::simplifyTemplateInstantiations( continue; Token * const tok2 = instantiation.token(); - if (mErrorLogger && !mTokenList.getFiles().empty()) - mErrorLogger->reportProgress(mTokenList.getFiles()[0], "TemplateSimplifier::simplifyTemplateInstantiations()", tok2->progressValue()); + if (!mTokenList.getFiles().empty()) + mErrorLogger.reportProgress(mTokenList.getFiles()[0], "TemplateSimplifier::simplifyTemplateInstantiations()", tok2->progressValue()); if (maxtime > 0 && std::time(nullptr) > maxtime) { if (mSettings.debugwarnings) { @@ -3171,7 +3171,7 @@ bool TemplateSimplifier::simplifyTemplateInstantiations( "Template instantiation maximum time exceeded", "templateMaxTime", Certainty::normal); - mErrorLogger->reportErr(errmsg); + mErrorLogger.reportErr(errmsg); } return false; } @@ -3201,10 +3201,10 @@ bool TemplateSimplifier::simplifyTemplateInstantiations( if ((typeForNewName.empty() && !templateDeclaration.isVariadic()) || (!typeParametersInDeclaration.empty() && !instantiateMatch(tok2, typeParametersInDeclaration.size(), templateDeclaration.isVariadic(), nullptr))) { - if (printDebug && mErrorLogger) { + if (printDebug) { std::list callstack(1, tok2); - mErrorLogger->reportErr(ErrorMessage(callstack, &mTokenList, Severity::debug, "templateInstantiation", - "Failed to instantiate template \"" + instantiation.name() + "\". The checking continues anyway.", Certainty::normal)); + mErrorLogger.reportErr(ErrorMessage(callstack, &mTokenList, Severity::debug, "templateInstantiation", + "Failed to instantiate template \"" + instantiation.name() + "\". The checking continues anyway.", Certainty::normal)); } if (typeForNewName.empty()) continue; @@ -3229,8 +3229,8 @@ bool TemplateSimplifier::simplifyTemplateInstantiations( // TODO: remove the specialized check and handle all uninstantiated templates someday. if (!instantiated && specialized) { auto * tok2 = const_cast(templateDeclaration.nameToken()); - if (mErrorLogger && !mTokenList.getFiles().empty()) - mErrorLogger->reportProgress(mTokenList.getFiles()[0], "TemplateSimplifier::simplifyTemplateInstantiations()", tok2->progressValue()); + if (!mTokenList.getFiles().empty()) + mErrorLogger.reportProgress(mTokenList.getFiles()[0], "TemplateSimplifier::simplifyTemplateInstantiations()", tok2->progressValue()); if (maxtime > 0 && std::time(nullptr) > maxtime) { if (mSettings.debugwarnings) { @@ -3241,7 +3241,7 @@ bool TemplateSimplifier::simplifyTemplateInstantiations( "Template instantiation maximum time exceeded", "templateMaxTime", Certainty::normal); - mErrorLogger->reportErr(errmsg); + mErrorLogger.reportErr(errmsg); } return false; } @@ -3278,10 +3278,10 @@ bool TemplateSimplifier::simplifyTemplateInstantiations( std::string typeForNewName = getNewName(tok2, typeStringsUsedInTemplateInstantiation); if (typeForNewName.empty()) { - if (printDebug && mErrorLogger) { + if (printDebug) { std::list callstack(1, tok2); - mErrorLogger->reportErr(ErrorMessage(callstack, &mTokenList, Severity::debug, "templateInstantiation", - "Failed to instantiate template \"" + templateDeclaration.name() + "\". The checking continues anyway.", Certainty::normal)); + mErrorLogger.reportErr(ErrorMessage(callstack, &mTokenList, Severity::debug, "templateInstantiation", + "Failed to instantiate template \"" + templateDeclaration.name() + "\". The checking continues anyway.", Certainty::normal)); } return false; } @@ -3972,14 +3972,14 @@ void TemplateSimplifier::simplifyTemplates(const std::time_t maxtime) } if (passCount == passCountMax) { - if (mSettings.debugwarnings && mErrorLogger) { + if (mSettings.debugwarnings) { const std::list locationList(1, mTokenList.front()); const ErrorMessage errmsg(locationList, &mTokenizer.list, Severity::debug, "debug", "TemplateSimplifier: pass count limit hit before simplifications were finished.", Certainty::normal); - mErrorLogger->reportErr(errmsg); + mErrorLogger.reportErr(errmsg); } } diff --git a/lib/templatesimplifier.h b/lib/templatesimplifier.h index d3a7bcd6cf6..cc68dbc4a88 100644 --- a/lib/templatesimplifier.h +++ b/lib/templatesimplifier.h @@ -494,7 +494,7 @@ class CPPCHECKLIB TemplateSimplifier { Tokenizer &mTokenizer; TokenList &mTokenList; const Settings &mSettings; - ErrorLogger *mErrorLogger; + ErrorLogger &mErrorLogger; bool mChanged{}; std::list mTemplateDeclarations; diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 40b74271b1b..71220142b01 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -152,7 +152,7 @@ static bool isClassStructUnionEnumStart(const Token * tok) //--------------------------------------------------------------------------- -Tokenizer::Tokenizer(const Settings &settings, ErrorLogger *errorLogger) : +Tokenizer::Tokenizer(const Settings &settings, ErrorLogger &errorLogger) : list(&settings), mSettings(settings), mErrorLogger(errorLogger), @@ -1141,14 +1141,14 @@ void Tokenizer::simplifyTypedefCpp() const std::time_t maxTime = mSettings.typedefMaxTime > 0 ? std::time(nullptr) + mSettings.typedefMaxTime: 0; for (Token *tok = list.front(); tok; tok = tok->next()) { - if (mErrorLogger && !list.getFiles().empty()) - mErrorLogger->reportProgress(list.getFiles()[0], "Tokenize (typedef)", tok->progressValue()); + if (!list.getFiles().empty()) + mErrorLogger.reportProgress(list.getFiles()[0], "Tokenize (typedef)", tok->progressValue()); if (Settings::terminated()) return; if (maxTime > 0 && std::time(nullptr) > maxTime) { - if (mErrorLogger && mSettings.debugwarnings) { + if (mSettings.debugwarnings) { ErrorMessage::FileLocation loc(list.getFiles()[0], 0, 0); ErrorMessage errmsg({std::move(loc)}, emptyString, @@ -1156,7 +1156,7 @@ void Tokenizer::simplifyTypedefCpp() "Typedef simplification instantiation maximum time exceeded", "typedefMaxTime", Certainty::normal); - mErrorLogger->reportErr(errmsg); + mErrorLogger.reportErr(errmsg); } return; } @@ -2876,8 +2876,8 @@ bool Tokenizer::simplifyUsing() std::list usingList; for (Token *tok = list.front(); tok; tok = tok->next()) { - if (mErrorLogger && !list.getFiles().empty()) - mErrorLogger->reportProgress(list.getFiles()[0], "Tokenize (using)", tok->progressValue()); + if (!list.getFiles().empty()) + mErrorLogger.reportProgress(list.getFiles()[0], "Tokenize (using)", tok->progressValue()); if (Settings::terminated()) return substitute; @@ -3342,7 +3342,7 @@ bool Tokenizer::simplifyUsing() void Tokenizer::simplifyUsingError(const Token* usingStart, const Token* usingEnd) { - if (mSettings.debugwarnings && mErrorLogger) { + if (mSettings.debugwarnings) { std::string str; for (const Token *tok = usingStart; tok && tok != usingEnd; tok = tok->next()) { if (!str.empty()) @@ -3351,8 +3351,8 @@ void Tokenizer::simplifyUsingError(const Token* usingStart, const Token* usingEn } str += " ;"; std::list callstack(1, usingStart); - mErrorLogger->reportErr(ErrorMessage(callstack, &list, Severity::debug, "simplifyUsing", - "Failed to parse \'" + str + "\'. The checking continues anyway.", Certainty::normal)); + mErrorLogger.reportErr(ErrorMessage(callstack, &list, Severity::debug, "simplifyUsing", + "Failed to parse \'" + str + "\'. The checking continues anyway.", Certainty::normal)); } } @@ -3405,12 +3405,11 @@ bool Tokenizer::simplifyTokens1(const std::string &configuration) const bool doValueFlow = !disableValueflowEnv || (std::strcmp(disableValueflowEnv, "1") != 0); if (doValueFlow) { - assert(mErrorLogger); if (mTimerResults) { Timer t("Tokenizer::simplifyTokens1::ValueFlow", mSettings.showtime, mTimerResults); - ValueFlow::setValues(list, *mSymbolDatabase, *mErrorLogger, mSettings, mTimerResults); + ValueFlow::setValues(list, *mSymbolDatabase, mErrorLogger, mSettings, mTimerResults); } else { - ValueFlow::setValues(list, *mSymbolDatabase, *mErrorLogger, mSettings, mTimerResults); + ValueFlow::setValues(list, *mSymbolDatabase, mErrorLogger, mSettings, mTimerResults); } arraySizeAfterValueFlow(); @@ -10428,10 +10427,7 @@ void Tokenizer::reportError(const Token* tok, const Severity severity, const std void Tokenizer::reportError(const std::list& callstack, Severity severity, const std::string& id, const std::string& msg, bool inconclusive) const { const ErrorMessage errmsg(callstack, &list, severity, id, msg, inconclusive ? Certainty::inconclusive : Certainty::normal); - if (mErrorLogger) - mErrorLogger->reportErr(errmsg); - else - Check::writeToErrorList(errmsg); + mErrorLogger.reportErr(errmsg); } void Tokenizer::setPodTypes() diff --git a/lib/tokenize.h b/lib/tokenize.h index 79051884961..5eda6a888d1 100644 --- a/lib/tokenize.h +++ b/lib/tokenize.h @@ -53,7 +53,7 @@ class CPPCHECKLIB Tokenizer { friend class TestTokenizer; public: - explicit Tokenizer(const Settings & settings, ErrorLogger *errorLogger); + explicit Tokenizer(const Settings & settings, ErrorLogger &errorLogger); ~Tokenizer(); void setTimerResults(TimerResults *tr) { @@ -645,7 +645,7 @@ class CPPCHECKLIB Tokenizer { const Settings & mSettings; /** errorlogger */ - ErrorLogger* const mErrorLogger; + ErrorLogger& mErrorLogger; /** Symbol database that all checks etc can use */ SymbolDatabase* mSymbolDatabase{}; diff --git a/test/helpers.h b/test/helpers.h index 15eeebf4a65..d7484762ba7 100644 --- a/test/helpers.h +++ b/test/helpers.h @@ -42,14 +42,14 @@ namespace simplecpp { class SimpleTokenizer : public Tokenizer { public: SimpleTokenizer(ErrorLogger& errorlogger, const char code[], bool cpp = true) - : Tokenizer{s_settings, &errorlogger} + : Tokenizer{s_settings, errorlogger} { if (!tokenize(code, cpp)) throw std::runtime_error("creating tokens failed"); } SimpleTokenizer(const Settings& settings, ErrorLogger& errorlogger) - : Tokenizer{settings, &errorlogger} + : Tokenizer{settings, errorlogger} {} /* diff --git a/test/testbufferoverrun.cpp b/test/testbufferoverrun.cpp index f16bc2ac167..7c845c82722 100644 --- a/test/testbufferoverrun.cpp +++ b/test/testbufferoverrun.cpp @@ -66,7 +66,7 @@ class TestBufferOverrun : public TestFixture { .c(Standards::CLatest).cpp(Standards::CPPLatest).certainty(Certainty::inconclusive).build(); std::vector files(1, filename); - Tokenizer tokenizer(settings, this); + Tokenizer tokenizer(settings, *this); PreprocessorHelper::preprocess(code, files, tokenizer, *this); // Tokenizer.. diff --git a/test/testclangimport.cpp b/test/testclangimport.cpp index 65686e7d262..4f8aeb2f685 100644 --- a/test/testclangimport.cpp +++ b/test/testclangimport.cpp @@ -140,7 +140,7 @@ class TestClangImport : public TestFixture { std::string parse(const char clang[]) { const Settings settings = settingsBuilder().clang().build(); - Tokenizer tokenizer(settings, this); + Tokenizer tokenizer(settings, *this); std::istringstream istr(clang); clangimport::parseClangAstDump(tokenizer, istr); if (!tokenizer.tokens()) { @@ -1054,7 +1054,7 @@ class TestClangImport : public TestFixture { #define GET_SYMBOL_DB(AST) \ const Settings settings = settingsBuilder().clang().platform(Platform::Type::Unix64).build(); \ - Tokenizer tokenizer(settings, this); \ + Tokenizer tokenizer(settings, *this); \ { \ std::istringstream istr(AST); \ clangimport::parseClangAstDump(tokenizer, istr); \ diff --git a/test/testclass.cpp b/test/testclass.cpp index 6c90e12407a..344053a53f5 100644 --- a/test/testclass.cpp +++ b/test/testclass.cpp @@ -8504,7 +8504,7 @@ class TestClass : public TestFixture { const Settings settings = settingsBuilder().severity(Severity::style).build(); std::vector files(1, "test.cpp"); - Tokenizer tokenizer(settings, this); + Tokenizer tokenizer(settings, *this); PreprocessorHelper::preprocess(code, files, tokenizer, *this); ASSERT_LOC(tokenizer.simplifyTokens1(""), file, line); @@ -8847,7 +8847,7 @@ class TestClass : public TestFixture { // getFileInfo std::list fileInfo; for (const std::string& c: code) { - Tokenizer tokenizer(settingsDefault, this); + Tokenizer tokenizer(settingsDefault, *this); std::istringstream istr(c); const std::string filename = std::to_string(fileInfo.size()) + ".cpp"; ASSERT(tokenizer.list.createTokens(istr, filename)); @@ -8911,7 +8911,7 @@ class TestClass : public TestFixture { const Settings settings = settingsBuilder().severity(Severity::performance).library("std.cfg").build(); std::vector files(1, "test.cpp"); - Tokenizer tokenizer(settings, this); + Tokenizer tokenizer(settings, *this); PreprocessorHelper::preprocess(code, files, tokenizer, *this); ASSERT_LOC(tokenizer.simplifyTokens1(""), file, line); diff --git a/test/testcondition.cpp b/test/testcondition.cpp index d14238065f0..2c84fedd7a2 100644 --- a/test/testcondition.cpp +++ b/test/testcondition.cpp @@ -127,7 +127,7 @@ class TestCondition : public TestFixture { #define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) void check_(const char* file, int line, const char code[], const Settings &settings, const char* filename = "test.cpp") { std::vector files(1, filename); - Tokenizer tokenizer(settings, this); + Tokenizer tokenizer(settings, *this); PreprocessorHelper::preprocess(code, files, tokenizer, *this); // Tokenizer.. diff --git a/test/testincompletestatement.cpp b/test/testincompletestatement.cpp index 87b0ec84875..44cde2d09ea 100644 --- a/test/testincompletestatement.cpp +++ b/test/testincompletestatement.cpp @@ -39,7 +39,7 @@ class TestIncompleteStatement : public TestFixture { const Settings settings1 = settingsBuilder(settings).certainty(Certainty::inconclusive, inconclusive).build(); std::vector files(1, "test.cpp"); - Tokenizer tokenizer(settings1, this); + Tokenizer tokenizer(settings1, *this); PreprocessorHelper::preprocess(code, files, tokenizer, *this); // Tokenize.. diff --git a/test/testleakautovar.cpp b/test/testleakautovar.cpp index ed47ce67ce0..c74bd22e023 100644 --- a/test/testleakautovar.cpp +++ b/test/testleakautovar.cpp @@ -3071,7 +3071,7 @@ class TestLeakAutoVarRecursiveCountLimit : public TestFixture { #define checkP(...) checkP_(__FILE__, __LINE__, __VA_ARGS__) void checkP_(const char* file, int line, const char code[], bool cpp = false) { std::vector files(1, cpp?"test.cpp":"test.c"); - Tokenizer tokenizer(settings, this); + Tokenizer tokenizer(settings, *this); PreprocessorHelper::preprocess(code, files, tokenizer, *this); // Tokenizer.. diff --git a/test/testnullpointer.cpp b/test/testnullpointer.cpp index 77a6438ef65..3dfaefb06c5 100644 --- a/test/testnullpointer.cpp +++ b/test/testnullpointer.cpp @@ -192,7 +192,7 @@ class TestNullPointer : public TestFixture { const Settings settings1 = settingsBuilder(settings).certainty(Certainty::inconclusive, false).build(); std::vector files(1, "test.cpp"); - Tokenizer tokenizer(settings1, this); + Tokenizer tokenizer(settings1, *this); PreprocessorHelper::preprocess(code, files, tokenizer, *this); // Tokenizer.. diff --git a/test/testother.cpp b/test/testother.cpp index 113de252214..a9112277cde 100644 --- a/test/testother.cpp +++ b/test/testother.cpp @@ -331,7 +331,7 @@ class TestOther : public TestFixture { settings->certainty.enable(Certainty::inconclusive); std::vector files(1, filename); - Tokenizer tokenizer(*settings, this); + Tokenizer tokenizer(*settings, *this); PreprocessorHelper::preprocess(code, files, tokenizer, *this); // Tokenizer.. diff --git a/test/testsimplifytemplate.cpp b/test/testsimplifytemplate.cpp index 1be4c361950..23a8798ed92 100644 --- a/test/testsimplifytemplate.cpp +++ b/test/testsimplifytemplate.cpp @@ -5302,7 +5302,7 @@ class TestSimplifyTemplate : public TestFixture { } unsigned int templateParameters(const char code[]) { - Tokenizer tokenizer(settings, this); + Tokenizer tokenizer(settings, *this); std::istringstream istr(code); if (!tokenizer.list.createTokens(istr, "test.cpp")) @@ -5370,7 +5370,7 @@ class TestSimplifyTemplate : public TestFixture { // Helper function to unit test TemplateSimplifier::getTemplateNamePosition int templateNamePositionHelper(const char code[], unsigned offset = 0) { - Tokenizer tokenizer(settings, this); + Tokenizer tokenizer(settings, *this); std::istringstream istr(code); if (!tokenizer.list.createTokens(istr, "test.cpp")) @@ -5441,7 +5441,7 @@ class TestSimplifyTemplate : public TestFixture { // Helper function to unit test TemplateSimplifier::findTemplateDeclarationEnd bool findTemplateDeclarationEndHelper(const char code[], const char pattern[], unsigned offset = 0) { - Tokenizer tokenizer(settings, this); + Tokenizer tokenizer(settings, *this); std::istringstream istr(code); if (!tokenizer.list.createTokens(istr, "test.cpp")) @@ -5471,7 +5471,7 @@ class TestSimplifyTemplate : public TestFixture { // Helper function to unit test TemplateSimplifier::getTemplateParametersInDeclaration bool getTemplateParametersInDeclarationHelper(const char code[], const std::vector & params) { - Tokenizer tokenizer(settings, this); + Tokenizer tokenizer(settings, *this); std::istringstream istr(code); if (!tokenizer.list.createTokens(istr, "test.cpp")) diff --git a/test/testsimplifytypedef.cpp b/test/testsimplifytypedef.cpp index 310d67b5858..3118afa31ce 100644 --- a/test/testsimplifytypedef.cpp +++ b/test/testsimplifytypedef.cpp @@ -247,7 +247,7 @@ class TestSimplifyTypedef : public TestFixture { } std::string simplifyTypedef(const char code[]) { - Tokenizer tokenizer(settings1, this); + Tokenizer tokenizer(settings1, *this); std::istringstream istr(code); if (!tokenizer.list.createTokens(istr, Standards::Language::CPP)) @@ -261,7 +261,7 @@ class TestSimplifyTypedef : public TestFixture { std::string simplifyTypedefP(const char code[]) { std::vector files(1, "test.cpp"); - Tokenizer tokenizer(settings0, this); + Tokenizer tokenizer(settings0, *this); PreprocessorHelper::preprocess(code, files, tokenizer, *this); // Tokenize.. @@ -282,7 +282,7 @@ class TestSimplifyTypedef : public TestFixture { std::string simplifyTypedefC(const char code[]) { - Tokenizer tokenizer(settings1, this); + Tokenizer tokenizer(settings1, *this); std::istringstream istr(code); if (!tokenizer.list.createTokens(istr, "file.c")) @@ -4173,7 +4173,7 @@ class TestSimplifyTypedef : public TestFixture { "uint8_t t;" "void test(rFunctionPointer_fp functionPointer);"; - Tokenizer tokenizer(settings1, this); + Tokenizer tokenizer(settings1, *this); std::istringstream istr(code); ASSERT(tokenizer.list.createTokens(istr, "file.c")); tokenizer.createLinks(); diff --git a/test/testsimplifyusing.cpp b/test/testsimplifyusing.cpp index fde48a0a9d6..d9f1f241a21 100644 --- a/test/testsimplifyusing.cpp +++ b/test/testsimplifyusing.cpp @@ -98,7 +98,7 @@ class TestSimplifyUsing : public TestFixture { const Settings settings = settingsBuilder(settings0).certainty(Certainty::inconclusive).debugwarnings(debugwarnings).platform(type).build(); if (preprocess) { - Tokenizer tokenizer(settings, this); + Tokenizer tokenizer(settings, *this); std::vector files(1, "test.cpp"); PreprocessorHelper::preprocess(code, files, tokenizer, *this); std::istringstream istr(code); diff --git a/test/testsizeof.cpp b/test/testsizeof.cpp index 9822adae4c3..ea33408911a 100644 --- a/test/testsizeof.cpp +++ b/test/testsizeof.cpp @@ -60,7 +60,7 @@ class TestSizeof : public TestFixture { #define checkP(...) checkP_(__FILE__, __LINE__, __VA_ARGS__) void checkP_(const char* file, int line, const char code[]) { std::vector files(1, "test.cpp"); - Tokenizer tokenizer(settings, this); + Tokenizer tokenizer(settings, *this); PreprocessorHelper::preprocess(code, files, tokenizer, *this); // Tokenize.. diff --git a/test/teststring.cpp b/test/teststring.cpp index ca579783389..931faaa4321 100644 --- a/test/teststring.cpp +++ b/test/teststring.cpp @@ -65,7 +65,7 @@ class TestString : public TestFixture { #define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) void check_(const char* file, int line, const char code[], const char filename[] = "test.cpp") { std::vector files(1, filename); - Tokenizer tokenizer(settings, this); + Tokenizer tokenizer(settings, *this); PreprocessorHelper::preprocess(code, files, tokenizer, *this); // Tokenize.. diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 5536e40698f..9ec0d0a87e5 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -510,7 +510,7 @@ class TestTokenizer : public TestFixture { std::list directives = preprocessor.createDirectives(tokens1); const Settings s = settingsBuilder().severity(Severity::information).build(); - Tokenizer tokenizer(s, this); + Tokenizer tokenizer(s, *this); tokenizer.setDirectives(std::move(directives)); tokenizer.dump(ostr); @@ -804,7 +804,7 @@ class TestTokenizer : public TestFixture { ASSERT_THROW_INTERNAL(tokenizeAndStringify(";template class X { };",false,Platform::Type::Native,false), SYNTAX); ASSERT_THROW_INTERNAL(tokenizeAndStringify("int X() {};",false,Platform::Type::Native,false), SYNTAX); { - Tokenizer tokenizer(settings1, this); + Tokenizer tokenizer(settings1, *this); const char code[] = "void foo(int i) { reinterpret_cast(i) };"; std::istringstream istr(code); ASSERT(tokenizer.list.createTokens(istr, "test.h")); @@ -3560,7 +3560,7 @@ class TestTokenizer : public TestFixture { } void simplifyString() { - Tokenizer tokenizer(settings0, this); + Tokenizer tokenizer(settings0, *this); ASSERT_EQUALS("\"abc\"", tokenizer.simplifyString("\"abc\"")); ASSERT_EQUALS("\"\n\"", tokenizer.simplifyString("\"\\xa\"")); ASSERT_EQUALS("\"3\"", tokenizer.simplifyString("\"\\x33\"")); @@ -5923,7 +5923,7 @@ class TestTokenizer : public TestFixture { std::string testAst(const char code[], AstStyle style = AstStyle::Simple) { // tokenize given code.. - Tokenizer tokenizer(settings0, this); + Tokenizer tokenizer(settings0, *this); std::istringstream istr(code); if (!tokenizer.list.createTokens(istr,"test.cpp")) return "ERROR"; @@ -7701,7 +7701,7 @@ class TestTokenizer : public TestFixture { const Settings settings = settingsBuilder().checkHeaders(checkHeadersFlag).build(); std::vector files(1, "test.cpp"); - Tokenizer tokenizer(settings, this); + Tokenizer tokenizer(settings, *this); PreprocessorHelper::preprocess(code, files, tokenizer, *this); // Tokenizer.. diff --git a/test/testtype.cpp b/test/testtype.cpp index 1a4a3261acf..878e7ac819b 100644 --- a/test/testtype.cpp +++ b/test/testtype.cpp @@ -65,7 +65,7 @@ class TestType : public TestFixture { const Settings settings1 = settingsBuilder(settings).severity(Severity::warning).severity(Severity::portability).build(); std::vector files(1, filename); - Tokenizer tokenizer(settings1, this); + Tokenizer tokenizer(settings1, *this); PreprocessorHelper::preprocess(code, files, tokenizer, *this, dui); // Tokenizer.. diff --git a/test/testunusedfunctions.cpp b/test/testunusedfunctions.cpp index 9ad7d655b5d..8f25ca3c931 100644 --- a/test/testunusedfunctions.cpp +++ b/test/testunusedfunctions.cpp @@ -534,7 +534,7 @@ class TestUnusedFunctions : public TestFixture { for (int i = 1; i <= 2; ++i) { const std::string fname = "test" + std::to_string(i) + ".cpp"; - Tokenizer tokenizer(settings, this); + Tokenizer tokenizer(settings, *this); std::istringstream istr(code); ASSERT(tokenizer.list.createTokens(istr, fname)); ASSERT(tokenizer.simplifyTokens1("")); diff --git a/test/testunusedprivfunc.cpp b/test/testunusedprivfunc.cpp index a9a5357ed25..24494e99d9c 100644 --- a/test/testunusedprivfunc.cpp +++ b/test/testunusedprivfunc.cpp @@ -91,7 +91,7 @@ class TestUnusedPrivateFunction : public TestFixture { const Settings settings1 = settingsBuilder(settings).platform(platform).build(); std::vector files(1, "test.cpp"); - Tokenizer tokenizer(settings1, this); + Tokenizer tokenizer(settings1, *this); PreprocessorHelper::preprocess(code, files, tokenizer, *this); // Tokenize.. diff --git a/test/testunusedvar.cpp b/test/testunusedvar.cpp index deaf3795b2e..d6b1ba8dc5d 100644 --- a/test/testunusedvar.cpp +++ b/test/testunusedvar.cpp @@ -276,7 +276,7 @@ class TestUnusedVar : public TestFixture { #define checkStructMemberUsageP(...) checkStructMemberUsageP_(__FILE__, __LINE__, __VA_ARGS__) void checkStructMemberUsageP_(const char* file, int line, const char code[]) { std::vector files(1, "test.cpp"); - Tokenizer tokenizer(settings, this); + Tokenizer tokenizer(settings, *this); PreprocessorHelper::preprocess(code, files, tokenizer, *this); // Tokenizer.. @@ -290,7 +290,7 @@ class TestUnusedVar : public TestFixture { #define checkFunctionVariableUsageP(...) checkFunctionVariableUsageP_(__FILE__, __LINE__, __VA_ARGS__) void checkFunctionVariableUsageP_(const char* file, int line, const char code[], const char* filename = "test.cpp") { std::vector files(1, filename); - Tokenizer tokenizer(settings, this); + Tokenizer tokenizer(settings, *this); PreprocessorHelper::preprocess(code, files, tokenizer, *this); // Tokenizer.. diff --git a/test/testvalueflow.cpp b/test/testvalueflow.cpp index d7606db0292..1bb631581fa 100644 --- a/test/testvalueflow.cpp +++ b/test/testvalueflow.cpp @@ -476,7 +476,7 @@ class TestValueFlow : public TestFixture { const Settings s = settingsBuilder().debugwarnings().build(); std::vector files(1, "test.cpp"); - Tokenizer tokenizer(s, this); + Tokenizer tokenizer(s, *this); PreprocessorHelper::preprocess(code, files, tokenizer, *this); // Tokenize.. diff --git a/test/testvarid.cpp b/test/testvarid.cpp index 831b74cf5e4..c65a3829ced 100644 --- a/test/testvarid.cpp +++ b/test/testvarid.cpp @@ -265,7 +265,7 @@ class TestVarID : public TestFixture { #define tokenizeHeader(...) tokenizeHeader_(__FILE__, __LINE__, __VA_ARGS__) std::string tokenizeHeader_(const char* file, int line, const char code[], const char filename[]) { - Tokenizer tokenizer(settings, this); + Tokenizer tokenizer(settings, *this); std::istringstream istr(code); ASSERT_LOC(tokenizer.list.createTokens(istr, filename), file, line); EXPECT_EQ(true, tokenizer.simplifyTokens1("")); @@ -279,7 +279,7 @@ class TestVarID : public TestFixture { #define tokenizeExpr(...) tokenizeExpr_(__FILE__, __LINE__, __VA_ARGS__) std::string tokenizeExpr_(const char* file, int line, const char code[], const char filename[] = "test.cpp") { std::vector files(1, filename); - Tokenizer tokenizer(settings, this); + Tokenizer tokenizer(settings, *this); PreprocessorHelper::preprocess(code, files, tokenizer, *this); ASSERT_LOC(tokenizer.simplifyTokens1(""), file, line);