diff --git a/lib/cppcheck.cpp b/lib/cppcheck.cpp index 521b3f106030..ba05b2ef96a7 100644 --- a/lib/cppcheck.cpp +++ b/lib/cppcheck.cpp @@ -492,7 +492,7 @@ unsigned int CppCheck::checkClang(const std::string &path) try { std::istringstream ast(output2); - Tokenizer tokenizer(&mSettings, this); + Tokenizer tokenizer(mSettings, this); tokenizer.list.appendFileIfNew(path); clangimport::parseClangAstDump(&tokenizer, ast); ValueFlow::setValues(tokenizer.list, @@ -660,7 +660,7 @@ unsigned int CppCheck::checkFile(const std::string& filename, const std::string } if (mSettings.library.markupFile(filename)) { - Tokenizer tokenizer(&mSettings, this, &preprocessor); + Tokenizer tokenizer(mSettings, this, &preprocessor); tokenizer.createTokens(std::move(tokens1)); checkUnusedFunctions.getFileInfo(&tokenizer, &mSettings); return EXIT_SUCCESS; @@ -792,7 +792,7 @@ unsigned int CppCheck::checkFile(const std::string& filename, const std::string if (startsWith(dir.str,"#define ") || startsWith(dir.str,"#include ")) code += "#line " + std::to_string(dir.linenr) + " \"" + dir.file + "\"\n" + dir.str + '\n'; } - Tokenizer tokenizer2(&mSettings, this); + Tokenizer tokenizer2(mSettings, this); std::istringstream istr2(code); tokenizer2.list.createTokens(istr2); executeRules("define", tokenizer2); @@ -853,7 +853,7 @@ unsigned int CppCheck::checkFile(const std::string& filename, const std::string continue; } - Tokenizer tokenizer(&mSettings, this, &preprocessor); + Tokenizer tokenizer(mSettings, this, &preprocessor); if (mSettings.showtime != SHOWTIME_MODES::SHOWTIME_NONE) tokenizer.setTimerResults(&s_timerResults); diff --git a/lib/importproject.cpp b/lib/importproject.cpp index 16bcc2e471f7..d61718322941 100644 --- a/lib/importproject.cpp +++ b/lib/importproject.cpp @@ -561,7 +561,7 @@ namespace { // TODO : Better evaluation Settings s; std::istringstream istr(c); - Tokenizer tokenizer(&s); + Tokenizer tokenizer(s); tokenizer.tokenize(istr,"vcxproj"); for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) { if (tok->str() == "(" && tok->astOperand1() && tok->astOperand2()) { diff --git a/lib/templatesimplifier.cpp b/lib/templatesimplifier.cpp index d04a79769be7..b78b41ac7298 100644 --- a/lib/templatesimplifier.cpp +++ b/lib/templatesimplifier.cpp @@ -274,7 +274,7 @@ bool TemplateSimplifier::TokenAndName::isAliasToken(const Token *tok) const } TemplateSimplifier::TemplateSimplifier(Tokenizer &tokenizer) - : mTokenizer(tokenizer), mTokenList(mTokenizer.list), mSettings(*mTokenizer.mSettings), + : mTokenizer(tokenizer), mTokenList(mTokenizer.list), mSettings(*mTokenizer.getSettings()), mErrorLogger(mTokenizer.mErrorLogger) {} diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 15c747b7fe03..e7641622d1a1 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -152,16 +152,13 @@ static bool isClassStructUnionEnumStart(const Token * tok) //--------------------------------------------------------------------------- -Tokenizer::Tokenizer(const Settings *settings, ErrorLogger *errorLogger, const Preprocessor *preprocessor) : - list(settings), +Tokenizer::Tokenizer(const Settings &settings, ErrorLogger *errorLogger, const Preprocessor *preprocessor) : + list(&settings), mSettings(settings), mErrorLogger(errorLogger), mTemplateSimplifier(new TemplateSimplifier(*this)), mPreprocessor(preprocessor) -{ - // make sure settings are specified - assert(mSettings); -} +{} Tokenizer::~Tokenizer() { @@ -178,7 +175,7 @@ nonneg int Tokenizer::sizeOfType(const std::string& type) const { const std::map::const_iterator it = mTypeSize.find(type); if (it == mTypeSize.end()) { - const Library::PodType* podtype = mSettings->library.podtype(type); + const Library::PodType* podtype = mSettings.library.podtype(type); if (!podtype) return 0; @@ -197,7 +194,7 @@ nonneg int Tokenizer::sizeOfType(const Token *type) const const std::map::const_iterator it = mTypeSize.find(type->str()); if (it == mTypeSize.end()) { - const Library::PodType* podtype = mSettings->library.podtype(type->str()); + const Library::PodType* podtype = mSettings.library.podtype(type->str()); if (!podtype) return 0; @@ -205,9 +202,9 @@ nonneg int Tokenizer::sizeOfType(const Token *type) const } if (type->isLong()) { if (type->str() == "double") - return mSettings->platform.sizeof_long_double; + return mSettings.platform.sizeof_long_double; if (type->str() == "long") - return mSettings->platform.sizeof_long_long; + return mSettings.platform.sizeof_long_long; } return it->second; @@ -335,7 +332,7 @@ bool Tokenizer::duplicateTypedef(Token **tokPtr, const Token *name, const Token void Tokenizer::unsupportedTypedef(const Token *tok) const { - if (!mSettings->debugwarnings) + if (!mSettings.debugwarnings) return; std::ostringstream str; @@ -543,7 +540,7 @@ Token *Tokenizer::processFunc(Token *tok2, bool inOperator) void Tokenizer::simplifyUsingToTypedef() { - if (!isCPP() || mSettings->standards.cpp < Standards::CPP11) + if (!isCPP() || mSettings.standards.cpp < Standards::CPP11) return; for (Token *tok = list.front(); tok; tok = tok->next()) { @@ -1075,7 +1072,7 @@ void Tokenizer::simplifyTypedef() if (indentlevel == 0 && tok->str() == "typedef") { TypedefSimplifier ts(tok); if (!ts.fail() && numberOfTypedefs[ts.name()] == 1) { - if (mSettings->severity.isEnabled(Severity::portability) && ts.isInvalidConstFunctionType(typedefs)) + if (mSettings.severity.isEnabled(Severity::portability) && ts.isInvalidConstFunctionType(typedefs)) reportError(tok->next(), Severity::portability, "invalidConstFunctionType", "It is unspecified behavior to const qualify a function type."); typedefs.emplace(ts.name(), ts); @@ -1140,7 +1137,7 @@ void Tokenizer::simplifyTypedefCpp() // Convert "using a::b;" to corresponding typedef statements simplifyUsingToTypedef(); - const std::time_t maxTime = mSettings->typedefMaxTime > 0 ? std::time(nullptr) + mSettings->typedefMaxTime: 0; + const std::time_t maxTime = mSettings.typedefMaxTime > 0 ? std::time(nullptr) + mSettings.typedefMaxTime: 0; for (Token *tok = list.front(); tok; tok = tok->next()) { if (mErrorLogger && !list.getFiles().empty()) @@ -1150,7 +1147,7 @@ void Tokenizer::simplifyTypedefCpp() return; if (maxTime > 0 && std::time(nullptr) > maxTime) { - if (mSettings->debugwarnings) { + if (mSettings.debugwarnings) { ErrorMessage::FileLocation loc; loc.setfile(list.getFiles()[0]); ErrorMessage errmsg({std::move(loc)}, @@ -2838,7 +2835,7 @@ static unsigned int tokDistance(const Token* tok1, const Token* tok2) { bool Tokenizer::simplifyUsing() { - if (!isCPP() || mSettings->standards.cpp < Standards::CPP11) + if (!isCPP() || mSettings.standards.cpp < Standards::CPP11) return false; const unsigned int maxReplacementTokens = 1000; // limit the number of tokens we replace @@ -2875,7 +2872,7 @@ bool Tokenizer::simplifyUsing() if (Token::Match(tok, "{|}|namespace|class|struct|union") || Token::Match(tok, "using namespace %name% ;|::")) { try { - setScopeInfo(tok, ¤tScope, mSettings->debugwarnings); + setScopeInfo(tok, ¤tScope, mSettings.debugwarnings); } catch (const std::runtime_error &) { reportError(tok, Severity::debug, "simplifyUsingUnmatchedBodyEnd", "simplifyUsing: unmatched body end"); @@ -3029,7 +3026,7 @@ bool Tokenizer::simplifyUsing() if ((Token::Match(tok1, "{|}|namespace|class|struct|union") && tok1->strAt(-1) != "using") || Token::Match(tok1, "using namespace %name% ;|::")) { try { - setScopeInfo(tok1, ¤tScope1, mSettings->debugwarnings); + setScopeInfo(tok1, ¤tScope1, mSettings.debugwarnings); } catch (const std::runtime_error &) { reportError(tok1, Severity::debug, "simplifyUsingUnmatchedBodyEnd", "simplifyUsing: unmatched body end"); @@ -3317,7 +3314,7 @@ bool Tokenizer::simplifyUsing() void Tokenizer::simplifyUsingError(const Token* usingStart, const Token* usingEnd) { - if (mSettings->debugwarnings && mErrorLogger) { + if (mSettings.debugwarnings && mErrorLogger) { std::string str; for (const Token *tok = usingStart; tok && tok != usingEnd; tok = tok->next()) { if (!str.empty()) @@ -3350,7 +3347,7 @@ bool Tokenizer::simplifyTokens1(const std::string &configuration) mConfiguration = configuration; if (mTimerResults) { - Timer t("Tokenizer::simplifyTokens1::simplifyTokenList1", mSettings->showtime, mTimerResults); + Timer t("Tokenizer::simplifyTokens1::simplifyTokenList1", mSettings.showtime, mTimerResults); if (!simplifyTokenList1(list.getFiles().front().c_str())) return false; } else { @@ -3359,23 +3356,23 @@ bool Tokenizer::simplifyTokens1(const std::string &configuration) } if (mTimerResults) { - Timer t("Tokenizer::simplifyTokens1::createAst", mSettings->showtime, mTimerResults); + Timer t("Tokenizer::simplifyTokens1::createAst", mSettings.showtime, mTimerResults); list.createAst(); - list.validateAst(mSettings->debugnormal); + list.validateAst(mSettings.debugnormal); } else { list.createAst(); - list.validateAst(mSettings->debugnormal); + list.validateAst(mSettings.debugnormal); } if (mTimerResults) { - Timer t("Tokenizer::simplifyTokens1::createSymbolDatabase", mSettings->showtime, mTimerResults); + Timer t("Tokenizer::simplifyTokens1::createSymbolDatabase", mSettings.showtime, mTimerResults); createSymbolDatabase(); } else { createSymbolDatabase(); } if (mTimerResults) { - Timer t("Tokenizer::simplifyTokens1::setValueType", mSettings->showtime, mTimerResults); + Timer t("Tokenizer::simplifyTokens1::setValueType", mSettings.showtime, mTimerResults); mSymbolDatabase->setValueTypeInTokenList(false); mSymbolDatabase->setValueTypeInTokenList(true); } else { @@ -3383,7 +3380,7 @@ bool Tokenizer::simplifyTokens1(const std::string &configuration) mSymbolDatabase->setValueTypeInTokenList(true); } - if (!mSettings->buildDir.empty()) + if (!mSettings.buildDir.empty()) Summaries::create(this, configuration); // TODO: do not run valueflow if no checks are being performed at all - e.g. unusedFunctions only @@ -3392,17 +3389,17 @@ bool Tokenizer::simplifyTokens1(const std::string &configuration) if (doValueFlow) { if (mTimerResults) { - Timer t("Tokenizer::simplifyTokens1::ValueFlow", mSettings->showtime, mTimerResults); - ValueFlow::setValues(list, *mSymbolDatabase, mErrorLogger, mSettings, mTimerResults); + Timer t("Tokenizer::simplifyTokens1::ValueFlow", mSettings.showtime, mTimerResults); + ValueFlow::setValues(list, *mSymbolDatabase, mErrorLogger, &mSettings, mTimerResults); } else { - ValueFlow::setValues(list, *mSymbolDatabase, mErrorLogger, mSettings, mTimerResults); + ValueFlow::setValues(list, *mSymbolDatabase, mErrorLogger, &mSettings, mTimerResults); } arraySizeAfterValueFlow(); } // Warn about unhandled character literals - if (mSettings->severity.isEnabled(Severity::portability)) { + if (mSettings.severity.isEnabled(Severity::portability)) { for (const Token *tok = tokens(); tok; tok = tok->next()) { if (tok->tokType() == Token::eChar && tok->values().empty()) { try { @@ -3457,18 +3454,18 @@ void Tokenizer::fillTypeSizes() { mTypeSize.clear(); mTypeSize["char"] = 1; - mTypeSize["_Bool"] = mSettings->platform.sizeof_bool; - mTypeSize["bool"] = mSettings->platform.sizeof_bool; - mTypeSize["short"] = mSettings->platform.sizeof_short; - mTypeSize["int"] = mSettings->platform.sizeof_int; - mTypeSize["long"] = mSettings->platform.sizeof_long; - mTypeSize["long long"] = mSettings->platform.sizeof_long_long; - mTypeSize["float"] = mSettings->platform.sizeof_float; - mTypeSize["double"] = mSettings->platform.sizeof_double; - mTypeSize["long double"] = mSettings->platform.sizeof_long_double; - mTypeSize["wchar_t"] = mSettings->platform.sizeof_wchar_t; - mTypeSize["size_t"] = mSettings->platform.sizeof_size_t; - mTypeSize["*"] = mSettings->platform.sizeof_pointer; + mTypeSize["_Bool"] = mSettings.platform.sizeof_bool; + mTypeSize["bool"] = mSettings.platform.sizeof_bool; + mTypeSize["short"] = mSettings.platform.sizeof_short; + mTypeSize["int"] = mSettings.platform.sizeof_int; + mTypeSize["long"] = mSettings.platform.sizeof_long; + mTypeSize["long long"] = mSettings.platform.sizeof_long_long; + mTypeSize["float"] = mSettings.platform.sizeof_float; + mTypeSize["double"] = mSettings.platform.sizeof_double; + mTypeSize["long double"] = mSettings.platform.sizeof_long_double; + mTypeSize["wchar_t"] = mSettings.platform.sizeof_wchar_t; + mTypeSize["size_t"] = mSettings.platform.sizeof_size_t; + mTypeSize["*"] = mSettings.platform.sizeof_pointer; } void Tokenizer::combineOperators() @@ -3558,7 +3555,7 @@ void Tokenizer::combineStringAndCharLiterals() while (Token::Match(tok->next(), "%str%") || Token::Match(tok->next(), "_T|_TEXT|TEXT ( %str% )")) { if (tok->next()->isName()) { - if (!mSettings->platform.isWindows()) + if (!mSettings.platform.isWindows()) break; tok->deleteNext(2); tok->next()->deleteNext(); @@ -4082,7 +4079,7 @@ void Tokenizer::simplifyTemplates() if (isC()) return; - const std::time_t maxTime = mSettings->templateMaxTime > 0 ? std::time(nullptr) + mSettings->templateMaxTime : 0; + const std::time_t maxTime = mSettings.templateMaxTime > 0 ? std::time(nullptr) + mSettings.templateMaxTime : 0; mTemplateSimplifier->simplifyTemplates( maxTime); } @@ -4668,7 +4665,7 @@ void Tokenizer::setVarIdPass1() continue; bool decl; - if (isCPP() && mSettings->standards.cpp >= Standards::CPP17 && Token::Match(tok, "[(;{}] const| auto &|&&| [")) { + if (isCPP() && mSettings.standards.cpp >= Standards::CPP17 && Token::Match(tok, "[(;{}] const| auto &|&&| [")) { // Structured bindings tok2 = Token::findsimplematch(tok, "["); if ((Token::simpleMatch(tok->previous(), "for (") && Token::simpleMatch(tok2->link(), "] :")) || @@ -5466,7 +5463,7 @@ bool Tokenizer::simplifyTokenList1(const char FileName[]) simplifyAsm(); // foo < bar < >> => foo < bar < > > - if (isCPP() || mSettings->daca) + if (isCPP() || mSettings.daca) splitTemplateRightAngleBrackets(!isCPP()); // Remove extra "template" tokens that are not used by cppcheck @@ -5476,7 +5473,7 @@ bool Tokenizer::simplifyTokenList1(const char FileName[]) // Bail out if code is garbage if (mTimerResults) { - Timer t("Tokenizer::simplifyTokens1::simplifyTokenList1::findGarbageCode", mSettings->showtime, mTimerResults); + Timer t("Tokenizer::simplifyTokens1::simplifyTokenList1::findGarbageCode", mSettings.showtime, mTimerResults); findGarbageCode(); } else { findGarbageCode(); @@ -5591,7 +5588,7 @@ bool Tokenizer::simplifyTokenList1(const char FileName[]) // simplify labels and 'case|default'-like syntaxes simplifyLabelsCaseDefault(); - if (!isC() && !mSettings->library.markupFile(FileName)) { + if (!isC() && !mSettings.library.markupFile(FileName)) { findComplicatedSyntaxErrorsInTemplates(); } @@ -5647,7 +5644,7 @@ bool Tokenizer::simplifyTokenList1(const char FileName[]) // typedef.. if (mTimerResults) { - Timer t("Tokenizer::simplifyTokens1::simplifyTokenList1::simplifyTypedef", mSettings->showtime, mTimerResults); + Timer t("Tokenizer::simplifyTokens1::simplifyTokenList1::simplifyTypedef", mSettings.showtime, mTimerResults); simplifyTypedef(); } else { simplifyTypedef(); @@ -5663,7 +5660,7 @@ bool Tokenizer::simplifyTokenList1(const char FileName[]) prepareTernaryOpForAST(); // class x y { - if (isCPP() && mSettings->severity.isEnabled(Severity::information)) { + if (isCPP() && mSettings.severity.isEnabled(Severity::information)) { for (const Token *tok = list.front(); tok; tok = tok->next()) { if (Token::Match(tok, "class %type% %type% [:{]")) { unhandled_macro_class_x_y(tok); @@ -5748,7 +5745,7 @@ bool Tokenizer::simplifyTokenList1(const char FileName[]) if (!isC()) { // Handle templates.. if (mTimerResults) { - Timer t("Tokenizer::simplifyTokens1::simplifyTokenList1::simplifyTemplates", mSettings->showtime, mTimerResults); + Timer t("Tokenizer::simplifyTokens1::simplifyTokenList1::simplifyTemplates", mSettings.showtime, mTimerResults); simplifyTemplates(); } else { simplifyTemplates(); @@ -5778,7 +5775,7 @@ bool Tokenizer::simplifyTokenList1(const char FileName[]) validate(); // #6772 "segmentation fault (invalid code) in Tokenizer::setVarId" if (mTimerResults) { - Timer t("Tokenizer::simplifyTokens1::simplifyTokenList1::setVarId", mSettings->showtime, mTimerResults); + Timer t("Tokenizer::simplifyTokens1::simplifyTokenList1::setVarId", mSettings.showtime, mTimerResults); setVarId(); } else { setVarId(); @@ -5836,33 +5833,33 @@ bool Tokenizer::simplifyTokenList1(const char FileName[]) void Tokenizer::printDebugOutput(int simplification) const { - const bool debug = (simplification != 1U && mSettings->debugSimplified) || - (simplification != 2U && mSettings->debugnormal); + const bool debug = (simplification != 1U && mSettings.debugSimplified) || + (simplification != 2U && mSettings.debugnormal); if (debug && list.front()) { list.front()->printOut(nullptr, list.getFiles()); - if (mSettings->xml) + if (mSettings.xml) std::cout << "" << std::endl; if (mSymbolDatabase) { - if (mSettings->xml) + if (mSettings.xml) mSymbolDatabase->printXml(std::cout); - else if (mSettings->verbose) { + else if (mSettings.verbose) { mSymbolDatabase->printOut("Symbol database"); } } - if (mSettings->verbose) - list.front()->printAst(mSettings->verbose, mSettings->xml, list.getFiles(), std::cout); + if (mSettings.verbose) + list.front()->printAst(mSettings.verbose, mSettings.xml, list.getFiles(), std::cout); - list.front()->printValueFlow(mSettings->xml, std::cout); + list.front()->printValueFlow(mSettings.xml, std::cout); - if (mSettings->xml) + if (mSettings.xml) std::cout << "" << std::endl; } - if (mSymbolDatabase && simplification == 2U && mSettings->debugwarnings) { + if (mSymbolDatabase && simplification == 2U && mSettings.debugwarnings) { printUnknownTypes(); // the typeStartToken() should come before typeEndToken() @@ -6035,7 +6032,7 @@ void Tokenizer::dump(std::ostream &out) const containers.insert(tok->valueType()->container); } if (!tok->varId() && tok->scope()->isExecutable() && Token::Match(tok, "%name% (")) { - if (mSettings->library.isnoreturn(tok)) + if (mSettings.library.isnoreturn(tok)) outs += " noreturn=\"true\""; } @@ -6111,15 +6108,15 @@ void Tokenizer::dump(std::ostream &out) const void Tokenizer::simplifyHeadersAndUnusedTemplates() { - if (mSettings->checkHeaders && mSettings->checkUnusedTemplates) + if (mSettings.checkHeaders && mSettings.checkUnusedTemplates) // Full analysis. All information in the headers are kept. return; - const bool checkHeaders = mSettings->checkHeaders; - const bool removeUnusedIncludedFunctions = !mSettings->checkHeaders; - const bool removeUnusedIncludedClasses = !mSettings->checkHeaders; - const bool removeUnusedIncludedTemplates = !mSettings->checkUnusedTemplates || !mSettings->checkHeaders; - const bool removeUnusedTemplates = !mSettings->checkUnusedTemplates; + const bool checkHeaders = mSettings.checkHeaders; + const bool removeUnusedIncludedFunctions = !mSettings.checkHeaders; + const bool removeUnusedIncludedClasses = !mSettings.checkHeaders; + const bool removeUnusedIncludedTemplates = !mSettings.checkUnusedTemplates || !mSettings.checkHeaders; + const bool removeUnusedTemplates = !mSettings.checkUnusedTemplates; // checkHeaders: // @@ -6163,7 +6160,7 @@ void Tokenizer::simplifyHeadersAndUnusedTemplates() const bool isIncluded = (tok->fileIndex() != 0); // Remove executable code - if (isIncluded && !mSettings->checkHeaders && tok->str() == "{") { + if (isIncluded && !mSettings.checkHeaders && tok->str() == "{") { // TODO: We probably need to keep the executable code if this function is called from the source file. const Token *prev = tok->previous(); while (prev && prev->isName()) @@ -6398,9 +6395,9 @@ void Tokenizer::removeMacrosInGlobalScope() void Tokenizer::removePragma() { - if (isC() && mSettings->standards.c == Standards::C89) + if (isC() && mSettings.standards.c == Standards::C89) return; - if (isCPP() && mSettings->standards.cpp == Standards::CPP03) + if (isCPP() && mSettings.standards.cpp == Standards::CPP03) return; for (Token *tok = list.front(); tok; tok = tok->next()) { while (Token::simpleMatch(tok, "_Pragma (")) { @@ -6978,7 +6975,7 @@ void Tokenizer::simplifyVarDecl(const bool only_k_r_fpar) void Tokenizer::simplifyVarDecl(Token * tokBegin, const Token * const tokEnd, const bool only_k_r_fpar) { - const bool isCPP11 = mSettings->standards.cpp >= Standards::CPP11; + const bool isCPP11 = mSettings.standards.cpp >= Standards::CPP11; // Split up variable declarations.. // "int a=4;" => "int a; a=4;" @@ -7659,10 +7656,10 @@ void Tokenizer::elseif() void Tokenizer::simplifyIfSwitchForInit() { - if (!isCPP() || mSettings->standards.cpp < Standards::CPP17) + if (!isCPP() || mSettings.standards.cpp < Standards::CPP17) return; - const bool forInit = (mSettings->standards.cpp >= Standards::CPP20); + const bool forInit = (mSettings.standards.cpp >= Standards::CPP20); for (Token *tok = list.front(); tok; tok = tok->next()) { if (!Token::Match(tok, "if|switch|for (")) @@ -7946,13 +7943,13 @@ void Tokenizer::simplifyTypeIntrinsics() bool Tokenizer::isScopeNoReturn(const Token *endScopeToken, bool *unknown) const { std::string unknownFunc; - const bool ret = mSettings->library.isScopeNoReturn(endScopeToken,&unknownFunc); - if (!unknownFunc.empty() && mSettings->summaryReturn.find(unknownFunc) != mSettings->summaryReturn.end()) { + const bool ret = mSettings.library.isScopeNoReturn(endScopeToken,&unknownFunc); + if (!unknownFunc.empty() && mSettings.summaryReturn.find(unknownFunc) != mSettings.summaryReturn.end()) { return false; } if (unknown) *unknown = !unknownFunc.empty(); - if (!unknownFunc.empty() && mSettings->checkLibrary) { + if (!unknownFunc.empty() && mSettings.checkLibrary) { bool warn = true; if (Token::simpleMatch(endScopeToken->tokAt(-2), ") ; }")) { const Token * const ftok = endScopeToken->linkAt(-2)->previous(); @@ -8072,7 +8069,7 @@ bool Tokenizer::isOneNumber(const std::string &s) // ------------------------------------------------------------------------ void Tokenizer::checkConfiguration() const { - if (!mSettings->checkConfiguration) + if (!mSettings.checkConfiguration) return; for (const Token *tok = tokens(); tok; tok = tok->next()) { if (!Token::Match(tok, "%name% (")) @@ -8350,7 +8347,7 @@ void Tokenizer::reportUnknownMacros() const void Tokenizer::findGarbageCode() const { - const bool isCPP11 = isCPP() && mSettings->standards.cpp >= Standards::CPP11; + const bool isCPP11 = isCPP() && mSettings.standards.cpp >= Standards::CPP11; static const std::unordered_set nonConsecutiveKeywords{ "break", "continue", @@ -8392,7 +8389,7 @@ void Tokenizer::findGarbageCode() const // Assign/increment/decrement literal else if (Token::Match(tok, "!!) %num%|%str%|%char% %assign%|++|--")) { - if (!isCPP() || mSettings->standards.cpp < Standards::CPP20 || !Token::Match(tok->previous(), "%name% : %num% =")) + if (!isCPP() || mSettings.standards.cpp < Standards::CPP20 || !Token::Match(tok->previous(), "%name% : %num% =")) syntaxError(tok, tok->next()->str() + " " + tok->strAt(2)); } else if (Token::simpleMatch(tok, ") return") && !Token::Match(tok->link()->previous(), "if|while|for (")) { @@ -8420,7 +8417,7 @@ void Tokenizer::findGarbageCode() const if (!Token::Match(tok->next(), "( !!)")) syntaxError(tok); if (tok->str() != "for") { - if (isGarbageExpr(tok->next(), tok->linkAt(1), mSettings->standards.cpp>=Standards::cppstd_t::CPP17)) + if (isGarbageExpr(tok->next(), tok->linkAt(1), mSettings.standards.cpp>=Standards::cppstd_t::CPP17)) syntaxError(tok); } } @@ -8527,7 +8524,7 @@ void Tokenizer::findGarbageCode() const // if we have an invalid number of semicolons inside for( ), assume syntax error if (semicolons > 2) syntaxError(tok); - if (semicolons == 1 && !(isCPP() && mSettings->standards.cpp >= Standards::CPP20)) + if (semicolons == 1 && !(isCPP() && mSettings.standards.cpp >= Standards::CPP20)) syntaxError(tok); } @@ -8900,7 +8897,7 @@ void Tokenizer::simplifyStructDecl() void Tokenizer::simplifyCallingConvention() { - const bool windows = mSettings->platform.isWindows(); + const bool windows = mSettings.platform.isWindows(); for (Token *tok = list.front(); tok; tok = tok->next()) { while (Token::Match(tok, "__cdecl|__stdcall|__fastcall|__thiscall|__clrcall|__syscall|__pascal|__fortran|__far|__near") || (windows && Token::Match(tok, "WINAPI|APIENTRY|CALLBACK"))) { @@ -8985,10 +8982,10 @@ void Tokenizer::simplifyDeclspec() void Tokenizer::simplifyAttribute() { for (Token *tok = list.front(); tok; tok = tok->next()) { - if (!tok->isKeyword() && Token::Match(tok, "%type% (") && !mSettings->library.isNotLibraryFunction(tok)) { - if (mSettings->library.isFunctionConst(tok->str(), true)) + if (!tok->isKeyword() && Token::Match(tok, "%type% (") && !mSettings.library.isNotLibraryFunction(tok)) { + if (mSettings.library.isFunctionConst(tok->str(), true)) tok->isAttributePure(true); - if (mSettings->library.isFunctionConst(tok->str(), false)) + if (mSettings.library.isFunctionConst(tok->str(), false)) tok->isAttributeConst(true); } while (isAttribute(tok, true)) { @@ -9109,7 +9106,7 @@ void Tokenizer::simplifyCppcheckAttribute() void Tokenizer::simplifyCPPAttribute() { - if (mSettings->standards.cpp < Standards::CPP11 || isC()) + if (mSettings.standards.cpp < Standards::CPP11 || isC()) return; for (Token *tok = list.front(); tok;) { @@ -9180,7 +9177,7 @@ void Tokenizer::simplifyCPPAttribute() void Tokenizer::simplifySpaceshipOperator() { - if (isCPP() && mSettings->standards.cpp >= Standards::CPP20) { + if (isCPP() && mSettings.standards.cpp >= Standards::CPP20) { for (Token *tok = list.front(); tok && tok->next(); tok = tok->next()) { if (Token::simpleMatch(tok, "<= >")) { tok->str("<=>"); @@ -9212,9 +9209,9 @@ void Tokenizer::simplifyKeyword() // linux kernel code at least uses "_inline" as struct member name at some // places. - const bool c99 = isC() && mSettings->standards.c >= Standards::C99; - const bool cpp11 = isCPP() && mSettings->standards.cpp >= Standards::CPP11; - const bool cpp20 = isCPP() && mSettings->standards.cpp >= Standards::CPP20; + const bool c99 = isC() && mSettings.standards.c >= Standards::C99; + const bool cpp11 = isCPP() && mSettings.standards.cpp >= Standards::CPP11; + const bool cpp20 = isCPP() && mSettings.standards.cpp >= Standards::CPP20; for (Token *tok = list.front(); tok; tok = tok->next()) { if (keywords.find(tok->str()) != keywords.end()) { @@ -9234,7 +9231,7 @@ void Tokenizer::simplifyKeyword() } } - if (isC() || mSettings->standards.cpp == Standards::CPP03) { + if (isC() || mSettings.standards.cpp == Standards::CPP03) { if (tok->str() == "auto") tok->deleteThis(); } @@ -9264,7 +9261,7 @@ void Tokenizer::simplifyKeyword() tok->deleteThis(); } - if (mSettings->standards.c >= Standards::C11) { + if (mSettings.standards.c >= Standards::C11) { while (tok->str() == "_Atomic") { for (Token* temp: getTypeTokens()) temp->isAtomic(true); @@ -9342,7 +9339,7 @@ static Token* setTokenDebug(Token* start, TokenDebug td) void Tokenizer::simplifyDebug() { - if (!mSettings->debugnormal && !mSettings->debugwarnings) + if (!mSettings.debugnormal && !mSettings.debugwarnings) return; static const std::unordered_map m = {{"debug_valueflow", TokenDebug::ValueFlow}, {"debug_valuetype", TokenDebug::ValueType}}; @@ -9670,14 +9667,14 @@ void Tokenizer::simplifyNamespaceStd() if (start != tok && start->isName() && !start->isKeyword() && (!start->previous() || Token::Match(start->previous(), "[;{}]"))) userFunctions.insert(tok->str()); } - if (userFunctions.find(tok->str()) == userFunctions.end() && mSettings->library.matchArguments(tok, "std::" + tok->str())) + if (userFunctions.find(tok->str()) == userFunctions.end() && mSettings.library.matchArguments(tok, "std::" + tok->str())) insert = true; } else if (Token::simpleMatch(tok->next(), "<") && - (mSettings->library.detectContainerOrIterator(tok, nullptr, /*withoutStd*/ true) || mSettings->library.detectSmartPointer(tok, /*withoutStd*/ true))) + (mSettings.library.detectContainerOrIterator(tok, nullptr, /*withoutStd*/ true) || mSettings.library.detectSmartPointer(tok, /*withoutStd*/ true))) insert = true; - else if (mSettings->library.hasAnyTypeCheck("std::" + tok->str()) || - mSettings->library.podtype("std::" + tok->str()) || - mSettings->library.detectContainerOrIterator(tok, nullptr, /*withoutStd*/ true)) + else if (mSettings.library.hasAnyTypeCheck("std::" + tok->str()) || + mSettings.library.podtype("std::" + tok->str()) || + mSettings.library.detectContainerOrIterator(tok, nullptr, /*withoutStd*/ true)) insert = true; if (insert) { @@ -9700,7 +9697,7 @@ void Tokenizer::simplifyNamespaceStd() void Tokenizer::simplifyMicrosoftMemoryFunctions() { // skip if not Windows - if (!mSettings->platform.isWindows()) + if (!mSettings.platform.isWindows()) return; for (Token *tok = list.front(); tok; tok = tok->next()) { @@ -9795,10 +9792,10 @@ namespace { void Tokenizer::simplifyMicrosoftStringFunctions() { // skip if not Windows - if (!mSettings->platform.isWindows()) + if (!mSettings.platform.isWindows()) return; - const bool ansi = mSettings->platform.type == Platform::Type::Win32A; + const bool ansi = mSettings.platform.type == Platform::Type::Win32A; for (Token *tok = list.front(); tok; tok = tok->next()) { if (tok->strAt(1) != "(") continue; @@ -9831,7 +9828,7 @@ void Tokenizer::simplifyMicrosoftStringFunctions() void Tokenizer::simplifyBorland() { // skip if not Windows - if (!mSettings->platform.isWindows()) + if (!mSettings.platform.isWindows()) return; if (isC()) return; @@ -9882,7 +9879,7 @@ void Tokenizer::simplifyBorland() void Tokenizer::createSymbolDatabase() { if (!mSymbolDatabase) - mSymbolDatabase = new SymbolDatabase(*this, *mSettings, mErrorLogger); + mSymbolDatabase = new SymbolDatabase(*this, mSettings, mErrorLogger); mSymbolDatabase->validate(); } @@ -10034,7 +10031,7 @@ void Tokenizer::simplifyOperatorName() } } - if (mSettings->debugwarnings) { + if (mSettings.debugwarnings) { const Token *tok = list.front(); while ((tok = Token::findsimplematch(tok, "operator")) != nullptr) { @@ -10323,14 +10320,12 @@ void Tokenizer::reportError(const std::list& callstack, Severity s void Tokenizer::setPodTypes() { - if (!mSettings) - return; for (Token *tok = list.front(); tok; tok = tok->next()) { if (!tok->isName() || tok->varId()) continue; // pod type - const Library::PodType *podType = mSettings->library.podtype(tok->str()); + const Library::PodType *podType = mSettings.library.podtype(tok->str()); if (podType) { const Token *prev = tok->previous(); while (prev && prev->isName()) @@ -10402,7 +10397,7 @@ void Tokenizer::simplifyNestedNamespace() void Tokenizer::simplifyCoroutines() { - if (!isCPP() || mSettings->standards.cpp < Standards::CPP20) + if (!isCPP() || mSettings.standards.cpp < Standards::CPP20) return; for (Token *tok = list.front(); tok; tok = tok->next()) { if (!tok->isName() || !Token::Match(tok, "co_return|co_yield|co_await")) diff --git a/lib/tokenize.h b/lib/tokenize.h index 8da9789f83f4..d7ece866fb69 100644 --- a/lib/tokenize.h +++ b/lib/tokenize.h @@ -59,7 +59,7 @@ class CPPCHECKLIB Tokenizer { friend class TemplateSimplifier; public: - explicit Tokenizer(const Settings * settings, ErrorLogger *errorLogger = nullptr, const Preprocessor *preprocessor = nullptr); + explicit Tokenizer(const Settings & settings, ErrorLogger *errorLogger = nullptr, const Preprocessor *preprocessor = nullptr); ~Tokenizer(); void setTimerResults(TimerResults *tr) { @@ -640,8 +640,9 @@ class CPPCHECKLIB Tokenizer { */ static const Token * startOfExecutableScope(const Token * tok); + // TODO: return reference const Settings *getSettings() const { - return mSettings; + return &mSettings; } void calculateScopes(); @@ -668,7 +669,7 @@ class CPPCHECKLIB Tokenizer { void setPodTypes(); /** settings */ - const Settings * const mSettings; + const Settings & mSettings; /** errorlogger */ ErrorLogger* const mErrorLogger; diff --git a/test/helpers.h b/test/helpers.h index d1e7022b377c..89503254f0b9 100644 --- a/test/helpers.h +++ b/test/helpers.h @@ -42,7 +42,7 @@ class givenACodeSampleToTokenize { public: explicit givenACodeSampleToTokenize(const char sample[], bool createOnly = false, bool cpp = true) - : tokenizer(&settings, nullptr) { + : tokenizer(settings, nullptr) { std::istringstream iss(sample); if (createOnly) tokenizer.list.createTokens(iss, cpp ? "test.cpp" : "test.c"); diff --git a/test/test64bit.cpp b/test/test64bit.cpp index 72d54f91d1e3..041e3228fd79 100644 --- a/test/test64bit.cpp +++ b/test/test64bit.cpp @@ -48,7 +48,7 @@ class Test64BitPortability : public TestFixture { errout.str(""); // Tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); diff --git a/test/testassert.cpp b/test/testassert.cpp index 589b53641236..8c4864ef7181 100644 --- a/test/testassert.cpp +++ b/test/testassert.cpp @@ -39,7 +39,7 @@ class TestAssert : public TestFixture { errout.str(""); // Tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); diff --git a/test/testastutils.cpp b/test/testastutils.cpp index d6ba495b482b..53ea0643a6f3 100644 --- a/test/testastutils.cpp +++ b/test/testastutils.cpp @@ -51,7 +51,7 @@ class TestAstUtils : public TestFixture { #define findLambdaEndToken(...) findLambdaEndToken_(__FILE__, __LINE__, __VA_ARGS__) bool findLambdaEndToken_(const char* file, int line, const char code[], const char pattern[] = nullptr, bool checkNext = true) { const Settings settings; - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); const Token* const tokStart = pattern ? Token::findsimplematch(tokenizer.tokens(), pattern, strlen(pattern)) : tokenizer.tokens(); @@ -90,7 +90,7 @@ class TestAstUtils : public TestFixture { #define findLambdaStartToken(code) findLambdaStartToken_(code, __FILE__, __LINE__) bool findLambdaStartToken_(const char code[], const char* file, int line) { const Settings settings; - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); const Token * const tokStart = (::findLambdaStartToken)(tokenizer.list.back()); @@ -123,7 +123,7 @@ class TestAstUtils : public TestFixture { #define isNullOperand(code) isNullOperand_(code, __FILE__, __LINE__) bool isNullOperand_(const char code[], const char* file, int line) { const Settings settings; - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); return (::isNullOperand)(tokenizer.tokens()); @@ -145,7 +145,7 @@ class TestAstUtils : public TestFixture { #define isReturnScope(code, offset) isReturnScope_(code, offset, __FILE__, __LINE__) bool isReturnScope_(const char code[], int offset, const char* file, int line) { const Settings settings; - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); const Token * const tok = (offset < 0) @@ -177,7 +177,7 @@ class TestAstUtils : public TestFixture { bool isSameExpression_(const char* file, int line, const char code[], const char tokStr1[], const char tokStr2[]) { const Settings settings; Library library; - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); const Token * const tok1 = Token::findsimplematch(tokenizer.tokens(), tokStr1, strlen(tokStr1)); @@ -215,7 +215,7 @@ class TestAstUtils : public TestFixture { #define isVariableChanged(code, startPattern, endPattern) isVariableChanged_(code, startPattern, endPattern, __FILE__, __LINE__) bool isVariableChanged_(const char code[], const char startPattern[], const char endPattern[], const char* file, int line) { const Settings settings; - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); const Token * const tok1 = Token::findsimplematch(tokenizer.tokens(), startPattern, strlen(startPattern)); @@ -250,7 +250,7 @@ class TestAstUtils : public TestFixture { #define isVariableChangedByFunctionCall(code, pattern, inconclusive) isVariableChangedByFunctionCall_(code, pattern, inconclusive, __FILE__, __LINE__) bool isVariableChangedByFunctionCall_(const char code[], const char pattern[], bool *inconclusive, const char* file, int line) { const Settings settings; - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); const Token * const argtok = Token::findmatch(tokenizer.tokens(), pattern); @@ -393,7 +393,7 @@ class TestAstUtils : public TestFixture { int line) { const Settings settings = settingsBuilder().library("std.cfg").build(); - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); const Token* const start = Token::findsimplematch(tokenizer.tokens(), startPattern, strlen(startPattern)); @@ -425,7 +425,7 @@ class TestAstUtils : public TestFixture { #define nextAfterAstRightmostLeaf(code, parentPattern, rightPattern) nextAfterAstRightmostLeaf_(code, parentPattern, rightPattern, __FILE__, __LINE__) bool nextAfterAstRightmostLeaf_(const char code[], const char parentPattern[], const char rightPattern[], const char* file, int line) { const Settings settings; - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); const Token * tok = Token::findsimplematch(tokenizer.tokens(), parentPattern, strlen(parentPattern)); @@ -450,7 +450,7 @@ class TestAstUtils : public TestFixture { Result isUsedAsBool(const char code[], const char pattern[]) { const Settings settings; - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); if (!tokenizer.tokenize(istr, "test.cpp")) return Result::Fail; diff --git a/test/testautovariables.cpp b/test/testautovariables.cpp index 8ba1298158e1..86488536a4ed 100644 --- a/test/testautovariables.cpp +++ b/test/testautovariables.cpp @@ -40,7 +40,7 @@ class TestAutoVariables : public TestFixture { const Settings settings1 = settingsBuilder(settings).certainty(Certainty::inconclusive, inconclusive).build(); // Tokenize.. - Tokenizer tokenizer(&settings1, this); + Tokenizer tokenizer(settings1, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); diff --git a/test/testbool.cpp b/test/testbool.cpp index c16477fb60eb..443fefa7097e 100644 --- a/test/testbool.cpp +++ b/test/testbool.cpp @@ -82,7 +82,7 @@ class TestBool : public TestFixture { errout.str(""); // Tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); diff --git a/test/testboost.cpp b/test/testboost.cpp index fd6d8ef4b6a8..080cf45303ae 100644 --- a/test/testboost.cpp +++ b/test/testboost.cpp @@ -42,7 +42,7 @@ class TestBoost : public TestFixture { errout.str(""); // Tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); diff --git a/test/testbufferoverrun.cpp b/test/testbufferoverrun.cpp index c655c3d8f7a6..128b8120a6fc 100644 --- a/test/testbufferoverrun.cpp +++ b/test/testbufferoverrun.cpp @@ -48,7 +48,7 @@ class TestBufferOverrun : public TestFixture { const Settings settings = settingsBuilder(settings0).certainty(Certainty::inconclusive).build(); // Tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); @@ -57,7 +57,7 @@ class TestBufferOverrun : public TestFixture { } void check_(const char* file, int line, const char code[], const Settings &settings, const char filename[] = "test.cpp") { - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); @@ -78,7 +78,7 @@ class TestBufferOverrun : public TestFixture { .c(Standards::CLatest).cpp(Standards::CPPLatest).certainty(Certainty::inconclusive).build(); std::vector files(1, filename); - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); PreprocessorHelper::preprocess(code, files, tokenizer); // Tokenizer.. @@ -5175,7 +5175,7 @@ class TestBufferOverrun : public TestFixture { errout.str(""); // Tokenize.. - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); diff --git a/test/testcharvar.cpp b/test/testcharvar.cpp index 1936720e5614..47fe3b3cabc9 100644 --- a/test/testcharvar.cpp +++ b/test/testcharvar.cpp @@ -45,7 +45,7 @@ class TestCharVar : public TestFixture { errout.str(""); // Tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); diff --git a/test/testclangimport.cpp b/test/testclangimport.cpp index e301c41db023..fa570321b0e5 100644 --- a/test/testclangimport.cpp +++ b/test/testclangimport.cpp @@ -140,7 +140,7 @@ class TestClangImport : public TestFixture { std::string parse(const char clang[]) { const Settings settings = settingsBuilder().clang().build(); - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(clang); clangimport::parseClangAstDump(&tokenizer, istr); if (!tokenizer.tokens()) { @@ -1054,7 +1054,7 @@ class TestClangImport : public TestFixture { #define GET_SYMBOL_DB(AST) \ const Settings settings = settingsBuilder().clang().platform(Platform::Type::Unix64).build(); \ - Tokenizer tokenizer(&settings, this); \ + Tokenizer tokenizer(settings, this); \ { \ std::istringstream istr(AST); \ clangimport::parseClangAstDump(&tokenizer, istr); \ diff --git a/test/testclass.cpp b/test/testclass.cpp index 528188947222..a34db18d0de7 100644 --- a/test/testclass.cpp +++ b/test/testclass.cpp @@ -251,7 +251,7 @@ class TestClass : public TestFixture { Preprocessor preprocessor(settings); // Tokenize.. - Tokenizer tokenizer(&settings, this, &preprocessor); + Tokenizer tokenizer(settings, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -357,7 +357,7 @@ class TestClass : public TestFixture { Preprocessor preprocessor(settings0); // Tokenize.. - Tokenizer tokenizer(&settings0, this, &preprocessor); + Tokenizer tokenizer(settings0, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -511,7 +511,7 @@ class TestClass : public TestFixture { Preprocessor preprocessor(settings1); // Tokenize.. - Tokenizer tokenizer(&settings1, this, &preprocessor); + Tokenizer tokenizer(settings1, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -722,7 +722,7 @@ class TestClass : public TestFixture { Preprocessor preprocessor(settings0); // Tokenize.. - Tokenizer tokenizer(&settings0, this, &preprocessor); + Tokenizer tokenizer(settings0, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -1171,7 +1171,7 @@ class TestClass : public TestFixture { Preprocessor preprocessor(settings0); // Tokenize.. - Tokenizer tokenizer(&settings0, this, &preprocessor); + Tokenizer tokenizer(settings0, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -1647,7 +1647,7 @@ class TestClass : public TestFixture { Preprocessor preprocessor(settings1); // Tokenize.. - Tokenizer tokenizer(&settings1, this, &preprocessor); + Tokenizer tokenizer(settings1, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -2612,7 +2612,7 @@ class TestClass : public TestFixture { Preprocessor preprocessor(settings0); // Tokenize.. - Tokenizer tokenizer(&settings0, this, &preprocessor); + Tokenizer tokenizer(settings0, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -2954,7 +2954,7 @@ class TestClass : public TestFixture { Preprocessor preprocessor(settings); // Tokenize.. - Tokenizer tokenizer(&settings, this, &preprocessor); + Tokenizer tokenizer(settings, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -3585,7 +3585,7 @@ class TestClass : public TestFixture { Preprocessor preprocessor(settings1); // Tokenize.. - Tokenizer tokenizer(&settings1, this, &preprocessor); + Tokenizer tokenizer(settings1, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -3622,7 +3622,7 @@ class TestClass : public TestFixture { Preprocessor preprocessor(settings); // Tokenize.. - Tokenizer tokenizer(&settings, this, &preprocessor); + Tokenizer tokenizer(settings, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -7547,7 +7547,7 @@ class TestClass : public TestFixture { Preprocessor preprocessor(settings0); // Tokenize.. - Tokenizer tokenizer(&settings0, this, &preprocessor); + Tokenizer tokenizer(settings0, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -7584,7 +7584,7 @@ class TestClass : public TestFixture { Preprocessor preprocessor(settings); // Tokenize.. - Tokenizer tokenizer(&settings, this, &preprocessor); + Tokenizer tokenizer(settings, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -7798,7 +7798,7 @@ class TestClass : public TestFixture { Preprocessor preprocessor(settings0); // Tokenize.. - Tokenizer tokenizer(&settings0, this, &preprocessor); + Tokenizer tokenizer(settings0, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -7914,7 +7914,7 @@ class TestClass : public TestFixture { Preprocessor preprocessor(settings); // Tokenize.. - Tokenizer tokenizer(&settings, this, &preprocessor); + Tokenizer tokenizer(settings, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -8263,7 +8263,7 @@ class TestClass : public TestFixture { Preprocessor preprocessor(settings); // Tokenize.. - Tokenizer tokenizer(&settings, this, &preprocessor); + Tokenizer tokenizer(settings, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -8449,7 +8449,7 @@ class TestClass : public TestFixture { const Settings settings = settingsBuilder().severity(Severity::style).build(); std::vector files(1, "test.cpp"); - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); PreprocessorHelper::preprocess(code, files, tokenizer); ASSERT_LOC(tokenizer.simplifyTokens1(""), file, line); @@ -8625,7 +8625,7 @@ class TestClass : public TestFixture { Preprocessor preprocessor(settings); // Tokenize.. - Tokenizer tokenizer(&settings, this, &preprocessor); + Tokenizer tokenizer(settings, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -8648,7 +8648,7 @@ class TestClass : public TestFixture { Preprocessor preprocessor(settings1); // Tokenize.. - Tokenizer tokenizer(&settings1, this, &preprocessor); + Tokenizer tokenizer(settings1, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -8805,7 +8805,7 @@ class TestClass : public TestFixture { // getFileInfo std::list fileInfo; for (const std::string& c: code) { - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(c); ASSERT(tokenizer.tokenize(istr, (std::to_string(fileInfo.size()) + ".cpp").c_str())); fileInfo.push_back(check.getFileInfo(&tokenizer, &settings)); @@ -8853,7 +8853,7 @@ class TestClass : public TestFixture { Preprocessor preprocessor(settings1); // Tokenize.. - Tokenizer tokenizer(&settings1, this, &preprocessor); + Tokenizer tokenizer(settings1, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); diff --git a/test/testcondition.cpp b/test/testcondition.cpp index 30cbfd2a5d8d..ab40dd5e817a 100644 --- a/test/testcondition.cpp +++ b/test/testcondition.cpp @@ -131,7 +131,7 @@ class TestCondition : public TestFixture { Preprocessor preprocessor(settings); std::vector files(1, filename); - Tokenizer tokenizer(&settings, this, &preprocessor); + Tokenizer tokenizer(settings, this, &preprocessor); PreprocessorHelper::preprocess(preprocessor, code, files, tokenizer); // Tokenizer.. @@ -547,7 +547,7 @@ class TestCondition : public TestFixture { errout.str(""); // Tokenize.. - Tokenizer tokenizer(&settings1, this); + Tokenizer tokenizer(settings1, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); diff --git a/test/testconstructors.cpp b/test/testconstructors.cpp index 77ab92d86778..55a3bf732c21 100644 --- a/test/testconstructors.cpp +++ b/test/testconstructors.cpp @@ -42,7 +42,7 @@ class TestConstructors : public TestFixture { const Settings settings1 = settingsBuilder(settings).certainty(Certainty::inconclusive, inconclusive).build(); // Tokenize.. - Tokenizer tokenizer(&settings1, this); + Tokenizer tokenizer(settings1, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -56,7 +56,7 @@ class TestConstructors : public TestFixture { errout.str(""); // Tokenize.. - Tokenizer tokenizer(&s, this); + Tokenizer tokenizer(s, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); diff --git a/test/testexceptionsafety.cpp b/test/testexceptionsafety.cpp index 76291ee8541a..117ebc8015cd 100644 --- a/test/testexceptionsafety.cpp +++ b/test/testexceptionsafety.cpp @@ -66,7 +66,7 @@ class TestExceptionSafety : public TestFixture { const Settings settings1 = settingsBuilder(s ? *s : settings).certainty(Certainty::inconclusive, inconclusive).build(); // Tokenize.. - Tokenizer tokenizer(&settings1, this); + Tokenizer tokenizer(settings1, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); diff --git a/test/testfunctions.cpp b/test/testfunctions.cpp index 636df6e8d06e..422d50ab14a5 100644 --- a/test/testfunctions.cpp +++ b/test/testfunctions.cpp @@ -113,7 +113,7 @@ class TestFunctions : public TestFixture { settings_ = &settings; // Tokenize.. - Tokenizer tokenizer(settings_, this); + Tokenizer tokenizer(*settings_, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); diff --git a/test/testgarbage.cpp b/test/testgarbage.cpp index 88387d4a4251..75f3ff2abeb0 100644 --- a/test/testgarbage.cpp +++ b/test/testgarbage.cpp @@ -289,7 +289,7 @@ class TestGarbage : public TestFixture { Preprocessor preprocessor(settings); // tokenize.. - Tokenizer tokenizer(&settings, this, &preprocessor); + Tokenizer tokenizer(settings, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); @@ -303,7 +303,7 @@ class TestGarbage : public TestFixture { #define getSyntaxError(code) getSyntaxError_(code, __FILE__, __LINE__) std::string getSyntaxError_(const char code[], const char* file, int line) { - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); try { ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -321,7 +321,7 @@ class TestGarbage : public TestFixture { const char code[] = "class __declspec(dllexport) x final { };"; { errout.str(""); - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT_EQUALS("", errout.str()); @@ -368,7 +368,7 @@ class TestGarbage : public TestFixture { " )\n" "}"; - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); try { ASSERT(tokenizer.tokenize(istr, "test.cpp")); @@ -403,14 +403,14 @@ class TestGarbage : public TestFixture { { errout.str(""); - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.c")); ASSERT_EQUALS("", errout.str()); } { errout.str(""); - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT_EQUALS("[test.cpp:1]: (information) The code 'class x y {' is not handled. You can use -I or --include to add handling of this code.\n", errout.str()); diff --git a/test/testincompletestatement.cpp b/test/testincompletestatement.cpp index 1cf105b0ace5..9b895c517ee7 100644 --- a/test/testincompletestatement.cpp +++ b/test/testincompletestatement.cpp @@ -42,7 +42,7 @@ class TestIncompleteStatement : public TestFixture { const Settings settings1 = settingsBuilder(settings).certainty(Certainty::inconclusive, inconclusive).build(); std::vector files(1, "test.cpp"); - Tokenizer tokenizer(&settings1, this); + Tokenizer tokenizer(settings1, this); PreprocessorHelper::preprocess(code, files, tokenizer); // Tokenize.. diff --git a/test/testinternal.cpp b/test/testinternal.cpp index 8887b701f06f..ee271f0d3089 100644 --- a/test/testinternal.cpp +++ b/test/testinternal.cpp @@ -54,7 +54,7 @@ class TestInternal : public TestFixture { errout.str(""); // Tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); diff --git a/test/testio.cpp b/test/testio.cpp index 4f01d6b6fc70..024757bf6c90 100644 --- a/test/testio.cpp +++ b/test/testio.cpp @@ -96,7 +96,7 @@ class TestIO : public TestFixture { PLATFORM(settings1.platform, platform); // Tokenize.. - Tokenizer tokenizer(&settings1, this); + Tokenizer tokenizer(settings1, this); std::istringstream istr(code); const std::string file_in = cpp ? "test.cpp" : "test.c"; ASSERT_LOC(tokenizer.tokenize(istr, file_in.c_str()), file, line); diff --git a/test/testleakautovar.cpp b/test/testleakautovar.cpp index c6815db4bab3..5d3ab7b3bba6 100644 --- a/test/testleakautovar.cpp +++ b/test/testleakautovar.cpp @@ -239,7 +239,7 @@ class TestLeakAutoVar : public TestFixture { const Settings settings1 = settingsBuilder(s ? *s : settings).checkLibrary().build(); // Tokenize.. - Tokenizer tokenizer(&settings1, this); + Tokenizer tokenizer(settings1, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, cpp ? "test.cpp" : "test.c"), file, line); @@ -254,7 +254,7 @@ class TestLeakAutoVar : public TestFixture { const Settings settings0 = settingsBuilder(s).checkLibrary().build(); // Tokenize.. - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -2994,7 +2994,7 @@ class TestLeakAutoVarRecursiveCountLimit : public TestFixture { errout.str(""); std::vector files(1, cpp?"test.cpp":"test.c"); - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); PreprocessorHelper::preprocess(code, files, tokenizer); // Tokenizer.. @@ -3045,7 +3045,7 @@ class TestLeakAutoVarStrcpy : public TestFixture { errout.str(""); // Tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -3130,7 +3130,7 @@ class TestLeakAutoVarWindows : public TestFixture { errout.str(""); // Tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.c"), file, line); diff --git a/test/testlibrary.cpp b/test/testlibrary.cpp index 11870ba90294..fc5b2f1fda0d 100644 --- a/test/testlibrary.cpp +++ b/test/testlibrary.cpp @@ -580,14 +580,14 @@ class TestLibrary : public TestFixture { ASSERT_EQUALS(library.functions.size(), 1U); { - Tokenizer tokenizer(&settings, nullptr); + Tokenizer tokenizer(settings, nullptr); std::istringstream istr("CString str; str.Format();"); ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(library.isnotnoreturn(Token::findsimplematch(tokenizer.tokens(), "Format"))); } { - Tokenizer tokenizer(&settings, nullptr); + Tokenizer tokenizer(settings, nullptr); std::istringstream istr("HardDrive hd; hd.Format();"); ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(!library.isnotnoreturn(Token::findsimplematch(tokenizer.tokens(), "Format"))); @@ -606,14 +606,14 @@ class TestLibrary : public TestFixture { ASSERT(loadxmldata(library, xmldata, sizeof(xmldata))); { - Tokenizer tokenizer(&settings, nullptr); + Tokenizer tokenizer(settings, nullptr); std::istringstream istr("struct X : public Base { void dostuff() { f(0); } };"); ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(library.isnullargbad(Token::findsimplematch(tokenizer.tokens(), "f"),1)); } { - Tokenizer tokenizer(&settings, nullptr); + Tokenizer tokenizer(settings, nullptr); std::istringstream istr("struct X : public Base { void dostuff() { f(1,2); } };"); ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(!library.isnullargbad(Token::findsimplematch(tokenizer.tokens(), "f"),1)); diff --git a/test/testmemleak.cpp b/test/testmemleak.cpp index 7358cd97fb6f..6564db786562 100644 --- a/test/testmemleak.cpp +++ b/test/testmemleak.cpp @@ -50,7 +50,7 @@ class TestMemleak : private TestFixture { errout.str(""); // Tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -102,7 +102,7 @@ class TestMemleak : private TestFixture { // Clear the error buffer.. errout.str(""); - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); @@ -132,7 +132,7 @@ class TestMemleakInFunction : public TestFixture { errout.str(""); // Tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -471,7 +471,7 @@ class TestMemleakInClass : public TestFixture { errout.str(""); // Tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -1678,7 +1678,7 @@ class TestMemleakStructMember : public TestFixture { errout.str(""); // Tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, isCPP ? "test.cpp" : "test.c"), file, line); @@ -2291,7 +2291,7 @@ class TestMemleakNoVar : public TestFixture { errout.str(""); // Tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); diff --git a/test/testnullpointer.cpp b/test/testnullpointer.cpp index f2ad99a9ede5..d67cd8329d86 100644 --- a/test/testnullpointer.cpp +++ b/test/testnullpointer.cpp @@ -184,7 +184,7 @@ class TestNullPointer : public TestFixture { const Settings settings1 = settingsBuilder(settings).certainty(Certainty::inconclusive, inconclusive).build(); // Tokenize.. - Tokenizer tokenizer(&settings1, this); + Tokenizer tokenizer(settings1, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); @@ -200,7 +200,7 @@ class TestNullPointer : public TestFixture { const Settings settings1 = settingsBuilder(settings).certainty(Certainty::inconclusive, false).build(); std::vector files(1, "test.cpp"); - Tokenizer tokenizer(&settings1, this); + Tokenizer tokenizer(settings1, this); PreprocessorHelper::preprocess(code, files, tokenizer); // Tokenizer.. @@ -4148,7 +4148,7 @@ class TestNullPointer : public TestFixture { void functioncalllibrary() { const Settings settings1; - Tokenizer tokenizer(&settings1,this); + Tokenizer tokenizer(settings1,this); std::istringstream code("void f() { int a,b,c; x(a,b,c); }"); ASSERT_EQUALS(true, tokenizer.tokenize(code, "test.c")); const Token *xtok = Token::findsimplematch(tokenizer.tokens(), "x"); @@ -4492,7 +4492,7 @@ class TestNullPointer : public TestFixture { errout.str(""); // Tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); diff --git a/test/testother.cpp b/test/testother.cpp index 8e33d2d099d1..6bce0a87d2f4 100644 --- a/test/testother.cpp +++ b/test/testother.cpp @@ -314,7 +314,7 @@ class TestOther : public TestFixture { Preprocessor preprocessor(*settings); // Tokenize.. - Tokenizer tokenizer(settings, this, &preprocessor); + Tokenizer tokenizer(*settings, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, filename ? filename : "test.cpp"), file, line); @@ -344,7 +344,7 @@ class TestOther : public TestFixture { Preprocessor preprocessor(*settings); std::vector files(1, filename); - Tokenizer tokenizer(settings, this, &preprocessor); + Tokenizer tokenizer(*settings, this, &preprocessor); PreprocessorHelper::preprocess(preprocessor, code, files, tokenizer); // Tokenizer.. @@ -1709,7 +1709,7 @@ class TestOther : public TestFixture { Preprocessor preprocessor(settings); // Tokenize.. - Tokenizer tokenizerCpp(&settings, this, &preprocessor); + Tokenizer tokenizerCpp(settings, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizerCpp.tokenize(istr, "test.cpp"), file, line); @@ -1911,7 +1911,7 @@ class TestOther : public TestFixture { Preprocessor preprocessor(settings); // Tokenize.. - Tokenizer tokenizer(&settings, this, &preprocessor); + Tokenizer tokenizer(settings, this, &preprocessor); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); diff --git a/test/testpostfixoperator.cpp b/test/testpostfixoperator.cpp index 48ff941f542e..622bdff486b9 100644 --- a/test/testpostfixoperator.cpp +++ b/test/testpostfixoperator.cpp @@ -38,7 +38,7 @@ class TestPostfixOperator : public TestFixture { errout.str(""); // Tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); diff --git a/test/testsimplifytemplate.cpp b/test/testsimplifytemplate.cpp index 03d62dbe7bab..3894ef3d3068 100644 --- a/test/testsimplifytemplate.cpp +++ b/test/testsimplifytemplate.cpp @@ -312,7 +312,7 @@ class TestSimplifyTemplate : public TestFixture { errout.str(""); const Settings settings1 = settingsBuilder(settings).library("std.cfg").debugwarnings(debugwarnings).platform(type).build(); - Tokenizer tokenizer(&settings1, this); + Tokenizer tokenizer(settings1, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -5287,7 +5287,7 @@ class TestSimplifyTemplate : public TestFixture { } unsigned int templateParameters(const char code[]) { - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); tokenizer.createTokens(istr, "test.cpp"); @@ -5354,7 +5354,7 @@ class TestSimplifyTemplate : public TestFixture { // Helper function to unit test TemplateSimplifier::getTemplateNamePosition int templateNamePositionHelper(const char code[], unsigned offset = 0) { - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); tokenizer.createTokens(istr, "test.cpp"); @@ -5424,7 +5424,7 @@ class TestSimplifyTemplate : public TestFixture { // Helper function to unit test TemplateSimplifier::findTemplateDeclarationEnd bool findTemplateDeclarationEndHelper(const char code[], const char pattern[], unsigned offset = 0) { - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); tokenizer.createTokens(istr, "test.cpp"); @@ -5453,7 +5453,7 @@ class TestSimplifyTemplate : public TestFixture { // Helper function to unit test TemplateSimplifier::getTemplateParametersInDeclaration bool getTemplateParametersInDeclarationHelper(const char code[], const std::vector & params) { - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); tokenizer.createTokens(istr, "test.cpp"); @@ -5756,7 +5756,7 @@ class TestSimplifyTemplate : public TestFixture { #define instantiateMatch(code, numberOfArguments, patternAfter) instantiateMatch_(code, numberOfArguments, patternAfter, __FILE__, __LINE__) bool instantiateMatch_(const char code[], const std::size_t numberOfArguments, const char patternAfter[], const char* file, int line) { - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp", ""), file, line); diff --git a/test/testsimplifytokens.cpp b/test/testsimplifytokens.cpp index 3cb4dd7c24f2..865d01b41897 100644 --- a/test/testsimplifytokens.cpp +++ b/test/testsimplifytokens.cpp @@ -167,7 +167,7 @@ class TestSimplifyTokens : public TestFixture { errout.str(""); const Settings settings = settingsBuilder(settings0).platform(type).build(); - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -178,7 +178,7 @@ class TestSimplifyTokens : public TestFixture { std::string tok_(const char* file, int line, const char code[], const char filename[], bool simplify = true) { errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); @@ -195,7 +195,7 @@ class TestSimplifyTokens : public TestFixture { const Settings settings = settingsBuilder(settings1).debugwarnings().platform(platform).cpp(cpp11 ? Standards::CPP11 : Standards::CPP03).build(); // tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, filename), file, linenr); @@ -221,7 +221,7 @@ class TestSimplifyTokens : public TestFixture { std::string tokenizeDebugListing_(const char* file, int line, const char code[], bool simplify = false, const char filename[] = "test.cpp") { errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); @@ -264,7 +264,7 @@ class TestSimplifyTokens : public TestFixture { const char expected[] = "a = L\"hello world\" ;"; - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); @@ -276,7 +276,7 @@ class TestSimplifyTokens : public TestFixture { const char expected[] = "abcd = u\"abcd\" ;"; - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); @@ -288,7 +288,7 @@ class TestSimplifyTokens : public TestFixture { const char expected[] = "abcd = U\"abcd\" ;"; - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); @@ -300,7 +300,7 @@ class TestSimplifyTokens : public TestFixture { const char expected[] = "abcd = u8\"abcd\" ;"; - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); @@ -312,7 +312,7 @@ class TestSimplifyTokens : public TestFixture { const char expected[] = "abcdef = L\"abcdef\" ;"; - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); @@ -1492,7 +1492,7 @@ class TestSimplifyTokens : public TestFixture { std::string simplifyKnownVariables_(const char code[], const char* file, int line) { errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); diff --git a/test/testsimplifytypedef.cpp b/test/testsimplifytypedef.cpp index 234a58a2dbc8..3f68230420f7 100644 --- a/test/testsimplifytypedef.cpp +++ b/test/testsimplifytypedef.cpp @@ -238,7 +238,7 @@ class TestSimplifyTypedef : public TestFixture { // show warnings about unhandled typedef const Settings settings = settingsBuilder(settings0).certainty(Certainty::inconclusive).debugwarnings(debugwarnings).platform(type).build(); - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -249,7 +249,7 @@ class TestSimplifyTypedef : public TestFixture { std::string simplifyTypedef(const char code[]) { errout.str(""); - Tokenizer tokenizer(&settings1, this); + Tokenizer tokenizer(settings1, this); std::istringstream istr(code); tokenizer.list.createTokens(istr); @@ -265,7 +265,7 @@ class TestSimplifyTypedef : public TestFixture { errout.str(""); std::vector files(1, "test.cpp"); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); PreprocessorHelper::preprocess(code, files, tokenizer); // Tokenize.. @@ -281,7 +281,7 @@ class TestSimplifyTypedef : public TestFixture { // Tokenize.. // show warnings about unhandled typedef const Settings settings = settingsBuilder(settings0).certainty(Certainty::inconclusive).debugwarnings().build(); - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); } @@ -290,7 +290,7 @@ class TestSimplifyTypedef : public TestFixture { std::string simplifyTypedefC(const char code[]) { errout.str(""); - Tokenizer tokenizer(&settings1, this); + Tokenizer tokenizer(settings1, this); std::istringstream istr(code); tokenizer.list.createTokens(istr, "file.c"); diff --git a/test/testsimplifyusing.cpp b/test/testsimplifyusing.cpp index 5e3a5427a576..2585547c80c7 100644 --- a/test/testsimplifyusing.cpp +++ b/test/testsimplifyusing.cpp @@ -99,7 +99,7 @@ class TestSimplifyUsing : public TestFixture { const Settings settings = settingsBuilder(settings0).certainty(Certainty::inconclusive).debugwarnings(debugwarnings).platform(type).build(); - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); if (preprocess) { std::vector files(1, "test.cpp"); diff --git a/test/testsizeof.cpp b/test/testsizeof.cpp index 22bd7e7ed421..9cd18196529f 100644 --- a/test/testsizeof.cpp +++ b/test/testsizeof.cpp @@ -54,7 +54,7 @@ class TestSizeof : public TestFixture { errout.str(""); // Tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -68,7 +68,7 @@ class TestSizeof : public TestFixture { errout.str(""); std::vector files(1, "test.cpp"); - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); PreprocessorHelper::preprocess(code, files, tokenizer); // Tokenize.. diff --git a/test/teststl.cpp b/test/teststl.cpp index 1b26e57ebb45..9106f8319532 100644 --- a/test/teststl.cpp +++ b/test/teststl.cpp @@ -183,7 +183,7 @@ class TestStl : public TestFixture { const Settings settings1 = settingsBuilder(settings).certainty(Certainty::inconclusive, inconclusive).cpp(cppstandard).build(); // Tokenize.. - Tokenizer tokenizer(&settings1, this); + Tokenizer tokenizer(settings1, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); @@ -201,7 +201,7 @@ class TestStl : public TestFixture { errout.str(""); // Tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); diff --git a/test/teststring.cpp b/test/teststring.cpp index b4d8d815646c..ba843bcd8803 100644 --- a/test/teststring.cpp +++ b/test/teststring.cpp @@ -68,7 +68,7 @@ class TestString : public TestFixture { errout.str(""); std::vector files(1, filename); - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); PreprocessorHelper::preprocess(code, files, tokenizer); // Tokenize.. diff --git a/test/testsummaries.cpp b/test/testsummaries.cpp index cfead86fdd75..ecaa893d6874 100644 --- a/test/testsummaries.cpp +++ b/test/testsummaries.cpp @@ -45,7 +45,7 @@ class TestSummaries : public TestFixture { // tokenize.. const Settings settings; - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); return Summaries::create(&tokenizer, ""); diff --git a/test/testsymboldatabase.cpp b/test/testsymboldatabase.cpp index b59ed1fbfb71..8f05b976ab9a 100644 --- a/test/testsymboldatabase.cpp +++ b/test/testsymboldatabase.cpp @@ -45,20 +45,20 @@ class TestSymbolDatabase; #define GET_SYMBOL_DB_STD(code) \ - Tokenizer tokenizer(&settings1, this); \ + Tokenizer tokenizer(settings1, this); \ LOAD_LIB_2(settings1.library, "std.cfg"); \ const SymbolDatabase *db = getSymbolDB_inner(tokenizer, code, "test.cpp"); \ ASSERT(db); \ do {} while (false) #define GET_SYMBOL_DB(code) \ - Tokenizer tokenizer(&settings1, this); \ + Tokenizer tokenizer(settings1, this); \ const SymbolDatabase *db = getSymbolDB_inner(tokenizer, code, "test.cpp"); \ ASSERT(db); \ do {} while (false) #define GET_SYMBOL_DB_C(code) \ - Tokenizer tokenizer(&settings1, this); \ + Tokenizer tokenizer(settings1, this); \ const SymbolDatabase *db = getSymbolDB_inner(tokenizer, code, "test.c"); \ do {} while (false) @@ -105,7 +105,7 @@ class TestSymbolDatabase : public TestFixture { unsigned int exprline2, SourceLocation loc = SourceLocation::current()) { - Tokenizer tokenizer(&settings1, this); + Tokenizer tokenizer(settings1, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), loc.file_name(), loc.line()); @@ -2447,7 +2447,7 @@ class TestSymbolDatabase : public TestFixture { const Settings settings = settingsBuilder(pSettings ? *pSettings : settings1).debugwarnings(debug).build(); // Tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); @@ -8423,7 +8423,7 @@ class TestSymbolDatabase : public TestFixture { } #define typeOf(...) typeOf_(__FILE__, __LINE__, __VA_ARGS__) std::string typeOf_(const char* file, int line, const char code[], const char pattern[], const char filename[] = "test.cpp", const Settings *settings = nullptr) { - Tokenizer tokenizer(settings ? settings : &settings2, this); + Tokenizer tokenizer(settings ? *settings : settings2, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); const Token* tok; diff --git a/test/testtoken.cpp b/test/testtoken.cpp index 213c9fd12043..05fb01d0b715 100644 --- a/test/testtoken.cpp +++ b/test/testtoken.cpp @@ -137,7 +137,7 @@ class TestToken : public TestFixture { #define MatchCheck(...) MatchCheck_(__FILE__, __LINE__, __VA_ARGS__) bool MatchCheck_(const char* file, int line, const std::string& code, const std::string& pattern, unsigned int varid = 0) { const Settings settings; - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(";" + code + ";"); try { ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 43d534c3823e..03b780eab47b 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -457,7 +457,7 @@ class TestTokenizer : public TestFixture { const Settings settings = settingsBuilder(settings1).debugwarnings().cpp(std).platform(platform).build(); // tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, filename), file, linenr); @@ -484,7 +484,7 @@ class TestTokenizer : public TestFixture { const Settings settings = settingsBuilder(settings_windows).debugwarnings().cpp(cpp11 ? Standards::CPP11 : Standards::CPP03).platform(platform).build(); // tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, filename), file, linenr); @@ -507,7 +507,7 @@ class TestTokenizer : public TestFixture { errout.str(""); // tokenize.. - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); if (!tokenizer.tokens()) @@ -521,7 +521,7 @@ class TestTokenizer : public TestFixture { const Settings settings = settingsBuilder(settings0).c(Standards::C89).cpp(Standards::CPP03).build(); - Tokenizer tokenizer(&settings, this); + Tokenizer tokenizer(settings, this); std::istringstream istr(code); ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); @@ -1552,7 +1552,7 @@ class TestTokenizer : public TestFixture { { const char code[] = "extern \"C\" int foo();"; // tokenize.. - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); // Expected result.. @@ -1562,7 +1562,7 @@ class TestTokenizer : public TestFixture { { const char code[] = "extern \"C\" { int foo(); }"; // tokenize.. - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); // Expected result.. @@ -1572,7 +1572,7 @@ class TestTokenizer : public TestFixture { { const char code[] = "extern \"C++\" int foo();"; // tokenize.. - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); // Expected result.. @@ -1582,7 +1582,7 @@ class TestTokenizer : public TestFixture { { const char code[] = "extern \"C++\" { int foo(); }"; // tokenize.. - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); // Expected result.. @@ -2909,7 +2909,7 @@ class TestTokenizer : public TestFixture { " void f() {}\n" "};"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); @@ -2934,7 +2934,7 @@ class TestTokenizer : public TestFixture { " char *b ; b = new char[a[0]];\n" "};"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); @@ -2958,7 +2958,7 @@ class TestTokenizer : public TestFixture { " foo(g());\n" "};"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); @@ -2978,7 +2978,7 @@ class TestTokenizer : public TestFixture { " return(af);\n" "}"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); @@ -3006,7 +3006,7 @@ class TestTokenizer : public TestFixture { " return static_cast(a);\n" "}"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); @@ -3023,7 +3023,7 @@ class TestTokenizer : public TestFixture { " nvwa<(x > y)> ERROR_nnn;\n" "}"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); @@ -3039,7 +3039,7 @@ class TestTokenizer : public TestFixture { // #4860 const char code[] = "class A : public B {};"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); @@ -3055,7 +3055,7 @@ class TestTokenizer : public TestFixture { // #4860 const char code[] = "Bar>>>::set(1, 2, 3);"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); @@ -3072,7 +3072,7 @@ class TestTokenizer : public TestFixture { // #5627 const char code[] = "new Foo[10];"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); @@ -3088,7 +3088,7 @@ class TestTokenizer : public TestFixture { // #6242 const char code[] = "func = integral_;"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); @@ -3103,7 +3103,7 @@ class TestTokenizer : public TestFixture { // if (a < b || c > d) { } const char code[] = "{ if (a < b || c > d); }"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); @@ -3115,7 +3115,7 @@ class TestTokenizer : public TestFixture { // bool f = a < b || c > d const char code[] = "bool f = a < b || c > d;"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); @@ -3127,7 +3127,7 @@ class TestTokenizer : public TestFixture { // template const char code[] = "a < b || c > d;"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); @@ -3139,7 +3139,7 @@ class TestTokenizer : public TestFixture { // if (a < ... > d) { } const char code[] = "{ if (a < b || c == 3 || d > e); }"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); @@ -3151,7 +3151,7 @@ class TestTokenizer : public TestFixture { // template const char code[] = "a d;"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); @@ -3162,7 +3162,7 @@ class TestTokenizer : public TestFixture { // template const char code[] = "a d;"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); @@ -3172,7 +3172,7 @@ class TestTokenizer : public TestFixture { { const char code[] = "template < f = b || c > struct S;"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); @@ -3183,7 +3183,7 @@ class TestTokenizer : public TestFixture { { const char code[] = "struct A : B {};"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); @@ -3194,7 +3194,7 @@ class TestTokenizer : public TestFixture { { const char code[] = "Data;"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); @@ -3206,7 +3206,7 @@ class TestTokenizer : public TestFixture { // #6601 const char code[] = "template struct FuncType : FuncType { };"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); @@ -3226,7 +3226,7 @@ class TestTokenizer : public TestFixture { // #7158 const char code[] = "enum { value = boost::mpl::at_c };"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = Token::findsimplematch(tokenizer.tokens(), "<"); @@ -3240,7 +3240,7 @@ class TestTokenizer : public TestFixture { "struct CheckedDivOp< T, U, typename std::enable_if::value || std::is_floating_point::value>::type> {\n" "};\n"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok1 = Token::findsimplematch(tokenizer.tokens(), "struct")->tokAt(2); @@ -3253,7 +3253,7 @@ class TestTokenizer : public TestFixture { // #7975 const char code[] = "template X copy() {};\n"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok1 = Token::findsimplematch(tokenizer.tokens(), "< Y"); @@ -3266,7 +3266,7 @@ class TestTokenizer : public TestFixture { // #8006 const char code[] = "C && a = b;"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok1 = tokenizer.tokens()->next(); @@ -3279,7 +3279,7 @@ class TestTokenizer : public TestFixture { // #8115 const char code[] = "void Test(C && c);"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok1 = Token::findsimplematch(tokenizer.tokens(), "<"); @@ -3292,7 +3292,7 @@ class TestTokenizer : public TestFixture { const char code[] = "template struct A {}; " "template struct foo : A... {};"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *A = Token::findsimplematch(tokenizer.tokens(), "A <"); @@ -3303,7 +3303,7 @@ class TestTokenizer : public TestFixture { const char code[] = "template::type>" "void basic_json() {}"; errout.str(""); - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT_EQUALS(true, Token::simpleMatch(tokenizer.tokens()->next()->link(), "> void")); @@ -3312,7 +3312,7 @@ class TestTokenizer : public TestFixture { { // #9094 - template usage or comparison? const char code[] = "a = f(x%x<--a==x>x);"; - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(nullptr == Token::findsimplematch(tokenizer.tokens(), "<")->link()); @@ -3323,7 +3323,7 @@ class TestTokenizer : public TestFixture { const char code[] = "using std::same_as;\n" "template T>\n" "void f();"; - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok1 = Token::findsimplematch(tokenizer.tokens(), "template <"); @@ -3335,7 +3335,7 @@ class TestTokenizer : public TestFixture { { // #9131 - template usage or comparison? const char code[] = "using std::list; list l;"; - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(nullptr != Token::findsimplematch(tokenizer.tokens(), "<")->link()); @@ -3347,7 +3347,7 @@ class TestTokenizer : public TestFixture { "{\n" " for (set::iterator i = sources.begin(); i != sources.end(); ++i) {}\n" "}"; - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(nullptr != Token::findsimplematch(tokenizer.tokens(), "<")->link()); @@ -3359,7 +3359,7 @@ class TestTokenizer : public TestFixture { " a<> b;\n" " b.a<>::c();\n" "}\n"; - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(nullptr != Token::findsimplematch(tokenizer.tokens(), "> ::")->link()); @@ -3371,7 +3371,7 @@ class TestTokenizer : public TestFixture { "template struct c {\n" " void d() { a[0]; }\n" "};\n"; - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(nullptr != Token::findsimplematch(tokenizer.tokens(), "> [")->link()); @@ -3384,7 +3384,7 @@ class TestTokenizer : public TestFixture { "template using f = c;\n" "template > struct g {};\n" "template using baz = g;\n"; - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(nullptr != Token::findsimplematch(tokenizer.tokens(), "> ;")->link()); @@ -3399,7 +3399,7 @@ class TestTokenizer : public TestFixture { "template using c = a;\n" "template c e;\n" "auto f = -e<1> == 0;\n"; - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(nullptr != Token::findsimplematch(tokenizer.tokens(), "> ==")->link()); @@ -3418,7 +3418,7 @@ class TestTokenizer : public TestFixture { "constexpr void b::operator()(c &&) const {\n" " i<3>.f([] {});\n" "}\n"; - Tokenizer tokenizer(&settings0, this); + Tokenizer tokenizer(settings0, this); std::istringstream istr(code); ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(nullptr != Token::findsimplematch(tokenizer.tokens(), "> . f (")->link()); @@ -3428,7 +3428,7 @@ class TestTokenizer : public TestFixture { // #10491 const char code[] = "template