diff --git a/addons/cppcheckdata.py b/addons/cppcheckdata.py index 38f9a0f3169..2de7a3512e3 100755 --- a/addons/cppcheckdata.py +++ b/addons/cppcheckdata.py @@ -1281,6 +1281,7 @@ def iterconfigurations(self): # Iterating iter_typedef_info = False + # Iterating iter_directive = False # Use iterable objects to traverse XML tree for dump files incrementally. diff --git a/lib/preprocessor.cpp b/lib/preprocessor.cpp index f3227d9f62a..473d5c658ec 100644 --- a/lib/preprocessor.cpp +++ b/lib/preprocessor.cpp @@ -43,16 +43,22 @@ static bool sameline(const simplecpp::Token *tok1, const simplecpp::Token *tok2) return tok1 && tok2 && tok1->location.sameline(tok2->location); } -Directive::Directive(std::string _file, const int _linenr, const std::string &_str) : +Directive::Directive(const simplecpp::Location & _loc, const std::string & _str) : + file(_loc.file()), + linenr(_loc.line), + str(_str) +{} + +Directive::Directive(std::string _file, const int _linenr, const std::string & _str) : file(std::move(_file)), linenr(_linenr), - str(trim(_str)) + str(_str) {} -Directive::DirectiveToken::DirectiveToken(std::string _str, int _line, int _column) : - line(_line), - column(_column), - tokStr(std::move(_str)) +Directive::DirectiveToken::DirectiveToken(const simplecpp::Token & _tok) : + line(_tok.location.line), + column(_tok.location.col), + tokStr(_tok.str()) {} char Preprocessor::macroChar = char(1); @@ -334,7 +340,7 @@ std::list Preprocessor::createDirectives(const simplecpp::TokenList & continue; if (tok->next && tok->next->str() == "endfile") continue; - Directive directive(tok->location.file(), tok->location.line, emptyString); + Directive directive(tok->location, emptyString); for (const simplecpp::Token *tok2 = tok; tok2 && tok2->location.line == directive.linenr; tok2 = tok2->next) { if (tok2->comment) continue; @@ -345,7 +351,7 @@ std::list Preprocessor::createDirectives(const simplecpp::TokenList & else directive.str += tok2->str(); - directive.strTokens.emplace_back(tok2->str(), tok2->location.line, tok2->location.col); + directive.strTokens.emplace_back(*tok2); } directives.push_back(std::move(directive)); } diff --git a/lib/preprocessor.h b/lib/preprocessor.h index bd05d0fcf65..ad6d98f9f5c 100644 --- a/lib/preprocessor.h +++ b/lib/preprocessor.h @@ -57,7 +57,7 @@ struct CPPCHECKLIB Directive { std::string str; struct DirectiveToken { - explicit DirectiveToken(std::string _str, int _line, int _column); + explicit DirectiveToken(const simplecpp::Token & _tok); int line; int column; std::string tokStr; @@ -66,6 +66,7 @@ struct CPPCHECKLIB Directive { std::vector strTokens; /** record a directive (possibly filtering src) */ + Directive(const simplecpp::Location & _loc, const std::string & _str); Directive(std::string _file, const int _linenr, const std::string &_str); }; diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 0efe8a49db4..aab4a4913b1 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -5961,9 +5961,6 @@ void Tokenizer::dump(std::ostream &out) const outs += '\n'; for (const auto & strToken : dir.strTokens) { outs += " \n" - " \n" - " \n" - " \n" - " \n" - " \n" + " \n" + " \n" + " \n" + " \n" + " \n" " \n" " \n" - " \n" - " \n" - " \n" + " \n" + " \n" + " \n" " \n" " \n" - " \n" - " \n" - " \n" + " \n" + " \n" + " \n" " \n" " \n" - " \n" - " \n" - " \n" - " \n" - " \n" - " \n" - " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" " \n" " \n" - " \n" - " \n" + " \n" + " \n" " \n" " \n" - " \n" - " \n" + " \n" + " \n" " \n" " \n" - " \n" - " \n" - " \n" - " \n" - " \n" + " \n" + " \n" + " \n" + " \n" + " \n" " \n" " \n" - " \n" - " \n" - " \n" - " \n" - " \n" + " \n" + " \n" + " \n" + " \n" + " \n" " \n" " \n" - " \n" + " \n" " \n" " \n" - " \n" - " \n" - " \n" - " \n" + " \n" + " \n" + " \n" + " \n" " \n" " \n" - " \n" - " \n" - " \n" - " \n" - " \n" + " \n" + " \n" + " \n" + " \n" + " \n" " \n" " \n" - " \n" - " \n" - " \n" + " \n" + " \n" + " \n" " \n" " \n" - " \n" - " \n" - " \n" + " \n" + " \n" + " \n" " \n" " \n" " \n" @@ -8170,44 +8170,44 @@ class TestTokenizer : public TestFixture { "#define macro5 val\n"; const char dumpdata[] = " \n" " \n" - " \n" - " \n" - " \n" - " \n" + " \n" + " \n" + " \n" + " \n" " \n" " \n" - " \n" - " \n" - " \n" + " \n" + " \n" + " \n" " \n" " \n" - " \n" - " \n" - " \n" - " \n" + " \n" + " \n" + " \n" + " \n" " \n" " \n" - " \n" - " \n" - " \n" + " \n" + " \n" + " \n" " \n" " \n" - " \n" - " \n" - " \n" - " \n" + " \n" + " \n" + " \n" + " \n" " \n" " \n" - " \n" - " \n" - " \n" - " \n" + " \n" + " \n" + " \n" + " \n" " \n" " \n" - " \n" - " \n" - " \n" - " \n" + " \n" + " \n" + " \n" + " \n" " \n" " \n" " \n" @@ -8223,17 +8223,17 @@ class TestTokenizer : public TestFixture { "#endif /* this will also be removed */\n"; const char dumpdata[] = " \n" " \n" - " \n" - " \n" - " \n" + " \n" + " \n" + " \n" " \n" " \n" - " \n" - " \n" + " \n" + " \n" " \n" " \n" - " \n" - " \n" + " \n" + " \n" " \n" " \n" " \n" @@ -8248,10 +8248,10 @@ class TestTokenizer : public TestFixture { const char filedata[] = "#define macro 1\n"; const char dumpdata[] = " \n" " \n" - " \n" - " \n" - " \n" - " \n" + " \n" + " \n" + " \n" + " \n" " \n" " \n" " \n"