Skip to content

Commit

Permalink
pass Settings by reference into Tokenizer
Browse files Browse the repository at this point in the history
  • Loading branch information
firewave committed Jan 7, 2024
1 parent bbd2b2a commit 8f62d83
Show file tree
Hide file tree
Showing 50 changed files with 354 additions and 357 deletions.
8 changes: 4 additions & 4 deletions lib/cppcheck.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -492,7 +492,7 @@ unsigned int CppCheck::checkClang(const std::string &path)

try {
std::istringstream ast(output2);
Tokenizer tokenizer(&mSettings, this);
Tokenizer tokenizer(mSettings, this);
tokenizer.list.appendFileIfNew(path);
clangimport::parseClangAstDump(&tokenizer, ast);
ValueFlow::setValues(tokenizer.list,
Expand Down Expand Up @@ -660,7 +660,7 @@ unsigned int CppCheck::checkFile(const std::string& filename, const std::string
}

if (mSettings.library.markupFile(filename)) {
Tokenizer tokenizer(&mSettings, this, &preprocessor);
Tokenizer tokenizer(mSettings, this, &preprocessor);
tokenizer.createTokens(std::move(tokens1));
checkUnusedFunctions.getFileInfo(&tokenizer, &mSettings);
return EXIT_SUCCESS;
Expand Down Expand Up @@ -792,7 +792,7 @@ unsigned int CppCheck::checkFile(const std::string& filename, const std::string
if (startsWith(dir.str,"#define ") || startsWith(dir.str,"#include "))
code += "#line " + std::to_string(dir.linenr) + " \"" + dir.file + "\"\n" + dir.str + '\n';
}
Tokenizer tokenizer2(&mSettings, this);
Tokenizer tokenizer2(mSettings, this);
std::istringstream istr2(code);
tokenizer2.list.createTokens(istr2);
executeRules("define", tokenizer2);
Expand Down Expand Up @@ -853,7 +853,7 @@ unsigned int CppCheck::checkFile(const std::string& filename, const std::string
continue;
}

Tokenizer tokenizer(&mSettings, this, &preprocessor);
Tokenizer tokenizer(mSettings, this, &preprocessor);
if (mSettings.showtime != SHOWTIME_MODES::SHOWTIME_NONE)
tokenizer.setTimerResults(&s_timerResults);

Expand Down
2 changes: 1 addition & 1 deletion lib/importproject.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -561,7 +561,7 @@ namespace {
// TODO : Better evaluation
Settings s;
std::istringstream istr(c);
Tokenizer tokenizer(&s);
Tokenizer tokenizer(s);
tokenizer.tokenize(istr,"vcxproj");
for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) {
if (tok->str() == "(" && tok->astOperand1() && tok->astOperand2()) {
Expand Down
2 changes: 1 addition & 1 deletion lib/templatesimplifier.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@ bool TemplateSimplifier::TokenAndName::isAliasToken(const Token *tok) const
}

TemplateSimplifier::TemplateSimplifier(Tokenizer &tokenizer)
: mTokenizer(tokenizer), mTokenList(mTokenizer.list), mSettings(*mTokenizer.mSettings),
: mTokenizer(tokenizer), mTokenList(mTokenizer.list), mSettings(*mTokenizer.getSettings()),
mErrorLogger(mTokenizer.mErrorLogger)
{}

Expand Down
212 changes: 104 additions & 108 deletions lib/tokenize.cpp

Large diffs are not rendered by default.

7 changes: 4 additions & 3 deletions lib/tokenize.h
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ class CPPCHECKLIB Tokenizer {
friend class TemplateSimplifier;

public:
explicit Tokenizer(const Settings * settings, ErrorLogger *errorLogger = nullptr, const Preprocessor *preprocessor = nullptr);
explicit Tokenizer(const Settings & settings, ErrorLogger *errorLogger = nullptr, const Preprocessor *preprocessor = nullptr);
~Tokenizer();

void setTimerResults(TimerResults *tr) {
Expand Down Expand Up @@ -640,8 +640,9 @@ class CPPCHECKLIB Tokenizer {
*/
static const Token * startOfExecutableScope(const Token * tok);

// TODO: return reference
const Settings *getSettings() const {
return mSettings;
return &mSettings;
}

void calculateScopes();
Expand All @@ -668,7 +669,7 @@ class CPPCHECKLIB Tokenizer {
void setPodTypes();

/** settings */
const Settings * const mSettings;
const Settings & mSettings;

/** errorlogger */
ErrorLogger* const mErrorLogger;
Expand Down
2 changes: 1 addition & 1 deletion test/helpers.h
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ class givenACodeSampleToTokenize {

public:
explicit givenACodeSampleToTokenize(const char sample[], bool createOnly = false, bool cpp = true)
: tokenizer(&settings, nullptr) {
: tokenizer(settings, nullptr) {
std::istringstream iss(sample);
if (createOnly)
tokenizer.list.createTokens(iss, cpp ? "test.cpp" : "test.c");
Expand Down
2 changes: 1 addition & 1 deletion test/test64bit.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ class Test64BitPortability : public TestFixture {
errout.str("");

// Tokenize..
Tokenizer tokenizer(&settings, this);
Tokenizer tokenizer(settings, this);
std::istringstream istr(code);
ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line);

Expand Down
2 changes: 1 addition & 1 deletion test/testassert.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ class TestAssert : public TestFixture {
errout.str("");

// Tokenize..
Tokenizer tokenizer(&settings, this);
Tokenizer tokenizer(settings, this);
std::istringstream istr(code);
ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line);

Expand Down
20 changes: 10 additions & 10 deletions test/testastutils.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ class TestAstUtils : public TestFixture {
#define findLambdaEndToken(...) findLambdaEndToken_(__FILE__, __LINE__, __VA_ARGS__)
bool findLambdaEndToken_(const char* file, int line, const char code[], const char pattern[] = nullptr, bool checkNext = true) {
const Settings settings;
Tokenizer tokenizer(&settings, this);
Tokenizer tokenizer(settings, this);
std::istringstream istr(code);
ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line);
const Token* const tokStart = pattern ? Token::findsimplematch(tokenizer.tokens(), pattern, strlen(pattern)) : tokenizer.tokens();
Expand Down Expand Up @@ -90,7 +90,7 @@ class TestAstUtils : public TestFixture {
#define findLambdaStartToken(code) findLambdaStartToken_(code, __FILE__, __LINE__)
bool findLambdaStartToken_(const char code[], const char* file, int line) {
const Settings settings;
Tokenizer tokenizer(&settings, this);
Tokenizer tokenizer(settings, this);
std::istringstream istr(code);
ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line);
const Token * const tokStart = (::findLambdaStartToken)(tokenizer.list.back());
Expand Down Expand Up @@ -123,7 +123,7 @@ class TestAstUtils : public TestFixture {
#define isNullOperand(code) isNullOperand_(code, __FILE__, __LINE__)
bool isNullOperand_(const char code[], const char* file, int line) {
const Settings settings;
Tokenizer tokenizer(&settings, this);
Tokenizer tokenizer(settings, this);
std::istringstream istr(code);
ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line);
return (::isNullOperand)(tokenizer.tokens());
Expand All @@ -145,7 +145,7 @@ class TestAstUtils : public TestFixture {
#define isReturnScope(code, offset) isReturnScope_(code, offset, __FILE__, __LINE__)
bool isReturnScope_(const char code[], int offset, const char* file, int line) {
const Settings settings;
Tokenizer tokenizer(&settings, this);
Tokenizer tokenizer(settings, this);
std::istringstream istr(code);
ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line);
const Token * const tok = (offset < 0)
Expand Down Expand Up @@ -177,7 +177,7 @@ class TestAstUtils : public TestFixture {
bool isSameExpression_(const char* file, int line, const char code[], const char tokStr1[], const char tokStr2[]) {
const Settings settings;
Library library;
Tokenizer tokenizer(&settings, this);
Tokenizer tokenizer(settings, this);
std::istringstream istr(code);
ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line);
const Token * const tok1 = Token::findsimplematch(tokenizer.tokens(), tokStr1, strlen(tokStr1));
Expand Down Expand Up @@ -215,7 +215,7 @@ class TestAstUtils : public TestFixture {
#define isVariableChanged(code, startPattern, endPattern) isVariableChanged_(code, startPattern, endPattern, __FILE__, __LINE__)
bool isVariableChanged_(const char code[], const char startPattern[], const char endPattern[], const char* file, int line) {
const Settings settings;
Tokenizer tokenizer(&settings, this);
Tokenizer tokenizer(settings, this);
std::istringstream istr(code);
ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line);
const Token * const tok1 = Token::findsimplematch(tokenizer.tokens(), startPattern, strlen(startPattern));
Expand Down Expand Up @@ -250,7 +250,7 @@ class TestAstUtils : public TestFixture {
#define isVariableChangedByFunctionCall(code, pattern, inconclusive) isVariableChangedByFunctionCall_(code, pattern, inconclusive, __FILE__, __LINE__)
bool isVariableChangedByFunctionCall_(const char code[], const char pattern[], bool *inconclusive, const char* file, int line) {
const Settings settings;
Tokenizer tokenizer(&settings, this);
Tokenizer tokenizer(settings, this);
std::istringstream istr(code);
ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line);
const Token * const argtok = Token::findmatch(tokenizer.tokens(), pattern);
Expand Down Expand Up @@ -393,7 +393,7 @@ class TestAstUtils : public TestFixture {
int line)
{
const Settings settings = settingsBuilder().library("std.cfg").build();
Tokenizer tokenizer(&settings, this);
Tokenizer tokenizer(settings, this);
std::istringstream istr(code);
ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line);
const Token* const start = Token::findsimplematch(tokenizer.tokens(), startPattern, strlen(startPattern));
Expand Down Expand Up @@ -425,7 +425,7 @@ class TestAstUtils : public TestFixture {
#define nextAfterAstRightmostLeaf(code, parentPattern, rightPattern) nextAfterAstRightmostLeaf_(code, parentPattern, rightPattern, __FILE__, __LINE__)
bool nextAfterAstRightmostLeaf_(const char code[], const char parentPattern[], const char rightPattern[], const char* file, int line) {
const Settings settings;
Tokenizer tokenizer(&settings, this);
Tokenizer tokenizer(settings, this);
std::istringstream istr(code);
ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line);
const Token * tok = Token::findsimplematch(tokenizer.tokens(), parentPattern, strlen(parentPattern));
Expand All @@ -450,7 +450,7 @@ class TestAstUtils : public TestFixture {

Result isUsedAsBool(const char code[], const char pattern[]) {
const Settings settings;
Tokenizer tokenizer(&settings, this);
Tokenizer tokenizer(settings, this);
std::istringstream istr(code);
if (!tokenizer.tokenize(istr, "test.cpp"))
return Result::Fail;
Expand Down
2 changes: 1 addition & 1 deletion test/testautovariables.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ class TestAutoVariables : public TestFixture {
const Settings settings1 = settingsBuilder(settings).certainty(Certainty::inconclusive, inconclusive).build();

// Tokenize..
Tokenizer tokenizer(&settings1, this);
Tokenizer tokenizer(settings1, this);
std::istringstream istr(code);
ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line);

Expand Down
2 changes: 1 addition & 1 deletion test/testbool.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ class TestBool : public TestFixture {
errout.str("");

// Tokenize..
Tokenizer tokenizer(&settings, this);
Tokenizer tokenizer(settings, this);
std::istringstream istr(code);
ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line);

Expand Down
2 changes: 1 addition & 1 deletion test/testboost.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ class TestBoost : public TestFixture {
errout.str("");

// Tokenize..
Tokenizer tokenizer(&settings, this);
Tokenizer tokenizer(settings, this);
std::istringstream istr(code);
ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line);

Expand Down
8 changes: 4 additions & 4 deletions test/testbufferoverrun.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ class TestBufferOverrun : public TestFixture {
const Settings settings = settingsBuilder(settings0).certainty(Certainty::inconclusive).build();

// Tokenize..
Tokenizer tokenizer(&settings, this);
Tokenizer tokenizer(settings, this);
std::istringstream istr(code);
ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line);

Expand All @@ -57,7 +57,7 @@ class TestBufferOverrun : public TestFixture {
}

void check_(const char* file, int line, const char code[], const Settings &settings, const char filename[] = "test.cpp") {
Tokenizer tokenizer(&settings, this);
Tokenizer tokenizer(settings, this);
std::istringstream istr(code);
ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line);

Expand All @@ -78,7 +78,7 @@ class TestBufferOverrun : public TestFixture {
.c(Standards::CLatest).cpp(Standards::CPPLatest).certainty(Certainty::inconclusive).build();

std::vector<std::string> files(1, filename);
Tokenizer tokenizer(&settings, this);
Tokenizer tokenizer(settings, this);
PreprocessorHelper::preprocess(code, files, tokenizer);

// Tokenizer..
Expand Down Expand Up @@ -5175,7 +5175,7 @@ class TestBufferOverrun : public TestFixture {
errout.str("");

// Tokenize..
Tokenizer tokenizer(&settings0, this);
Tokenizer tokenizer(settings0, this);
std::istringstream istr(code);
ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line);

Expand Down
2 changes: 1 addition & 1 deletion test/testcharvar.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ class TestCharVar : public TestFixture {
errout.str("");

// Tokenize..
Tokenizer tokenizer(&settings, this);
Tokenizer tokenizer(settings, this);
std::istringstream istr(code);
ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line);

Expand Down
4 changes: 2 additions & 2 deletions test/testclangimport.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ class TestClangImport : public TestFixture {

std::string parse(const char clang[]) {
const Settings settings = settingsBuilder().clang().build();
Tokenizer tokenizer(&settings, this);
Tokenizer tokenizer(settings, this);
std::istringstream istr(clang);
clangimport::parseClangAstDump(&tokenizer, istr);
if (!tokenizer.tokens()) {
Expand Down Expand Up @@ -1054,7 +1054,7 @@ class TestClangImport : public TestFixture {

#define GET_SYMBOL_DB(AST) \
const Settings settings = settingsBuilder().clang().platform(Platform::Type::Unix64).build(); \
Tokenizer tokenizer(&settings, this); \
Tokenizer tokenizer(settings, this); \
{ \
std::istringstream istr(AST); \
clangimport::parseClangAstDump(&tokenizer, istr); \
Expand Down
Loading

0 comments on commit 8f62d83

Please sign in to comment.