Skip to content

Commit

Permalink
bail out on invalid tokenlist in --rule-file / some rule related cl…
Browse files Browse the repository at this point in the history
…eanups (#6205)
  • Loading branch information
firewave authored Mar 30, 2024
1 parent 356b2bd commit aef62ee
Show file tree
Hide file tree
Showing 10 changed files with 120 additions and 59 deletions.
4 changes: 3 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ ifndef VERBOSE
endif
# To compile with rules, use 'make HAVE_RULES=yes'
ifndef HAVE_RULES
HAVE_RULES=no
HAVE_RULES=
endif

ifndef MATCHCOMPILER
Expand Down Expand Up @@ -160,6 +160,8 @@ ifeq ($(HAVE_RULES),yes)
else
LIBS=$(shell $(PCRE_CONFIG) --libs)
endif
else ifneq ($(HAVE_RULES),)
$(error invalid HAVE_RULES value '$(HAVE_RULES)')
endif

ifndef PREFIX
Expand Down
10 changes: 10 additions & 0 deletions cli/cmdlineparser.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1121,6 +1121,16 @@ CmdLineParser::Result CmdLineParser::parseFromArgs(int argc, const char* const a
return Result::Fail;
}

if (rule.tokenlist.empty()) {
mLogger.printError("unable to load rule-file '" + ruleFile + "' - a rule is lacking a tokenlist.");
return Result::Fail;
}

if (rule.tokenlist != "normal" && rule.tokenlist != "define" && rule.tokenlist != "raw") {
mLogger.printError("unable to load rule-file '" + ruleFile + "' - a rule is using the unsupported tokenlist '" + rule.tokenlist + "'.");
return Result::Fail;
}

mSettings.rules.emplace_back(std::move(rule));
}
} else {
Expand Down
55 changes: 17 additions & 38 deletions lib/cppcheck.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -819,20 +819,17 @@ unsigned int CppCheck::checkFile(const std::string& filename, const std::string

#ifdef HAVE_RULES
// Run define rules on raw code
const auto rules_it = std::find_if(mSettings.rules.cbegin(), mSettings.rules.cend(), [](const Settings::Rule& rule) {
return rule.tokenlist == "define";
});
if (rules_it != mSettings.rules.cend()) {
if (hasRule("define")) {
std::string code;
const std::list<Directive> &directives = preprocessor.getDirectives();
for (const Directive &dir : directives) {
for (const Directive &dir : preprocessor.getDirectives()) {
if (startsWith(dir.str,"#define ") || startsWith(dir.str,"#include "))
code += "#line " + std::to_string(dir.linenr) + " \"" + dir.file + "\"\n" + dir.str + '\n';
}
Tokenizer tokenizer2(mSettings, this);
TokenList tokenlist(&mSettings);
std::istringstream istr2(code);
tokenizer2.list.createTokens(istr2, Path::identify(*files.begin()));
executeRules("define", tokenizer2);
// TODO: asserts when file has unknown extension
tokenlist.createTokens(istr2, Path::identify(*files.begin())); // TODO: check result?
executeRules("define", tokenlist);
}
#endif

Expand Down Expand Up @@ -924,8 +921,10 @@ unsigned int CppCheck::checkFile(const std::string& filename, const std::string
if (mSettings.checkConfiguration)
continue;

// Check raw tokens
checkRawTokens(tokenizer);
#ifdef HAVE_RULES
// Execute rules for "raw" code
executeRules("raw", tokenizer.list);
#endif

// Simplify tokens into normal form, skip rest of iteration if failed
if (!tokenizer.simplifyTokens1(mCurrentConfig))
Expand Down Expand Up @@ -959,13 +958,6 @@ unsigned int CppCheck::checkFile(const std::string& filename, const std::string

// Check normal tokens
checkNormalTokens(tokenizer);

#ifdef HAVE_RULES
// handling of "simple" rules has been removed.
if (hasRule("simple"))
throw InternalError(nullptr, "Handling of \"simple\" rules has been removed in Cppcheck. Use --addon instead.");
#endif

} catch (const simplecpp::Output &o) {
// #error etc during preprocessing
configurationError.push_back((mCurrentConfig.empty() ? "\'\'" : mCurrentConfig) + " : [" + o.location.file() + ':' + std::to_string(o.location.line) + "] " + o.msg);
Expand Down Expand Up @@ -1072,19 +1064,6 @@ void CppCheck::internalError(const std::string &filename, const std::string &msg
mErrorLogger.reportErr(errmsg);
}

//---------------------------------------------------------------------------
// CppCheck - A function that checks a raw token list
//---------------------------------------------------------------------------
void CppCheck::checkRawTokens(const Tokenizer &tokenizer)
{
#ifdef HAVE_RULES
// Execute rules for "raw" code
executeRules("raw", tokenizer);
#else
(void)tokenizer;
#endif
}

//---------------------------------------------------------------------------
// CppCheck - A function that checks a normal token list
//---------------------------------------------------------------------------
Expand Down Expand Up @@ -1169,7 +1148,7 @@ void CppCheck::checkNormalTokens(const Tokenizer &tokenizer)
}

#ifdef HAVE_RULES
executeRules("normal", tokenizer);
executeRules("normal", tokenizer.list);
#endif
}

Expand Down Expand Up @@ -1312,15 +1291,15 @@ static const char * pcreErrorCodeToString(const int pcreExecRet)
return "";
}

void CppCheck::executeRules(const std::string &tokenlist, const Tokenizer &tokenizer)
void CppCheck::executeRules(const std::string &tokenlist, const TokenList &list)
{
// There is no rule to execute
if (!hasRule(tokenlist))
return;

// Write all tokens in a string that can be parsed by pcre
std::ostringstream ostr;
for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next())
for (const Token *tok = list.front(); tok; tok = tok->next())
ostr << " " << tok->str();
const std::string str(ostr.str());

Expand Down Expand Up @@ -1400,14 +1379,14 @@ void CppCheck::executeRules(const std::string &tokenlist, const Tokenizer &token
pos = (int)pos2;

// determine location..
std::string file = tokenizer.list.getSourceFilePath();
std::string file = list.getSourceFilePath();
int line = 0;

std::size_t len = 0;
for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) {
for (const Token *tok = list.front(); tok; tok = tok->next()) {
len = len + 1U + tok->str().size();
if (len > pos1) {
file = tokenizer.list.getFiles().at(tok->fileIndex());
file = list.getFiles().at(tok->fileIndex());
line = tok->linenr();
break;
}
Expand All @@ -1421,7 +1400,7 @@ void CppCheck::executeRules(const std::string &tokenlist, const Tokenizer &token
summary = "found '" + str.substr(pos1, pos2 - pos1) + "'";
else
summary = rule.summary;
const ErrorMessage errmsg({std::move(loc)}, tokenizer.list.getSourceFilePath(), rule.severity, summary, rule.id, Certainty::normal);
const ErrorMessage errmsg({std::move(loc)}, list.getSourceFilePath(), rule.severity, summary, rule.id, Certainty::normal);

// Report error
reportErr(errmsg);
Expand Down
14 changes: 4 additions & 10 deletions lib/cppcheck.h
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
#include <unordered_set>
#include <vector>

class Tokenizer;
class TokenList;
enum class SHOWTIME_MODES;
struct FileSettings;
class CheckUnusedFunctions;
Expand Down Expand Up @@ -169,12 +169,6 @@ class CPPCHECKLIB CppCheck : ErrorLogger {
*/
unsigned int checkFile(const std::string& filename, const std::string &cfgname, std::istream* fileStream = nullptr);

/**
* @brief Check raw tokens
* @param tokenizer tokenizer instance
*/
void checkRawTokens(const Tokenizer &tokenizer);

/**
* @brief Check normal tokens
* @param tokenizer tokenizer instance
Expand All @@ -195,10 +189,10 @@ class CPPCHECKLIB CppCheck : ErrorLogger {
#ifdef HAVE_RULES
/**
* @brief Execute rules, if any
* @param tokenlist token list to use (normal / simple)
* @param tokenizer tokenizer
* @param tokenlist token list to use (define / normal / raw)
* @param list token list
*/
void executeRules(const std::string &tokenlist, const Tokenizer &tokenizer);
void executeRules(const std::string &tokenlist, const TokenList &list);
#endif

unsigned int checkClang(const std::string &path);
Expand Down
2 changes: 1 addition & 1 deletion lib/library.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ static std::vector<std::string> getnames(const char *names)
static void gettokenlistfromvalid(const std::string& valid, bool cpp, TokenList& tokenList)
{
std::istringstream istr(valid + ',');
tokenList.createTokens(istr, cpp ? Standards::Language::CPP : Standards::Language::C);
tokenList.createTokens(istr, cpp ? Standards::Language::CPP : Standards::Language::C); // TODO: check result?
for (Token *tok = tokenList.front(); tok; tok = tok->next()) {
if (Token::Match(tok,"- %num%")) {
tok->str("-" + tok->strAt(1));
Expand Down
2 changes: 1 addition & 1 deletion lib/symboldatabase.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -7583,7 +7583,7 @@ void SymbolDatabase::setValueTypeInTokenList(bool reportDebugWarnings, Token *to
ValueType valuetype;
TokenList tokenList(&mSettings);
std::istringstream istr(typestr+";");
tokenList.createTokens(istr, tok->isCpp() ? Standards::Language::CPP : Standards::Language::C);
tokenList.createTokens(istr, tok->isCpp() ? Standards::Language::CPP : Standards::Language::C); // TODO: check result?
tokenList.simplifyStdType();
if (parsedecl(tokenList.front(), &valuetype, mDefaultSignedness, mSettings)) {
valuetype.originalTypeName = typestr;
Expand Down
2 changes: 1 addition & 1 deletion lib/valueflow.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3815,7 +3815,7 @@ static bool isNotEqual(std::pair<const Token*, const Token*> x, const std::strin
{
TokenList tokenList(nullptr);
std::istringstream istr(y);
tokenList.createTokens(istr, cpp ? Standards::Language::CPP : Standards::Language::C);
tokenList.createTokens(istr, cpp ? Standards::Language::CPP : Standards::Language::C); // TODO: check result?
return isNotEqual(x, std::make_pair(tokenList.front(), tokenList.back()));
}
static bool isNotEqual(std::pair<const Token*, const Token*> x, const ValueType* y, bool cpp)
Expand Down
48 changes: 48 additions & 0 deletions test/cli/other_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -1180,3 +1180,51 @@ def test_unknown_extension(tmpdir):
assert exitcode == 0, stderr
assert stdout == ''
assert stderr == ''


def test_multiple_define_rules(tmpdir):
rule_file = os.path.join(tmpdir, 'rule_file.xml')
with open(rule_file, 'wt') as f:
f.write("""
<rules>
<rule>
<tokenlist>define</tokenlist>
<pattern>DEF_1</pattern>
<message>
<severity>error</severity>
<id>ruleId1</id>
</message>
</rule>
<rule>
<tokenlist>define</tokenlist>
<pattern>DEF_2</pattern>
<message>
<severity>error</severity>
<id>ruleId2</id>
</message>
</rule>
</rules>""")

test_file = os.path.join(tmpdir, 'test.c')
with open(test_file, 'wt') as f:
f.write('''
#define DEF_1
#define DEF_2
void f() { }
''')

exitcode, stdout, stderr = cppcheck(['--template=simple', '--rule-file={}'.format(rule_file), test_file])
assert exitcode == 0, stderr
lines = stdout.splitlines()
assert lines == [
'Checking {} ...'.format(test_file),
'Processing rule: DEF_1',
'Processing rule: DEF_2'
]
lines = stderr.splitlines()
assert lines == [
"{}:2:0: error: found 'DEF_1' [ruleId1]".format(test_file),
"{}:3:0: error: found 'DEF_2' [ruleId2]".format(test_file)
]

# TODO: test "raw" and "normal" rules
38 changes: 32 additions & 6 deletions test/testcmdlineparser.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -344,6 +344,8 @@ class TestCmdlineParser : public TestFixture {
TEST_CASE(ruleFileEmptyElements2);
TEST_CASE(ruleFileUnknownElement1);
TEST_CASE(ruleFileUnknownElement2);
TEST_CASE(ruleFileMissingTokenList);
TEST_CASE(ruleFileUnknownTokenList);
#else
TEST_CASE(ruleFileNotSupported);
#endif
Expand Down Expand Up @@ -2157,7 +2159,7 @@ class TestCmdlineParser : public TestFixture {
ScopedFile file("rule.xml",
"<rules>\n"
"<rule>\n"
"<tokenlist>toklist1</tokenlist>\n"
"<tokenlist>raw</tokenlist>\n"
"<pattern>.+</pattern>\n"
"<message>\n"
"<severity>error</severity>\n"
Expand All @@ -2166,7 +2168,7 @@ class TestCmdlineParser : public TestFixture {
"</message>\n"
"</rule>\n"
"<rule>\n"
"<tokenlist>toklist2</tokenlist>\n"
"<tokenlist>define</tokenlist>\n"
"<pattern>.*</pattern>\n"
"<message>\n"
"<severity>warning</severity>\n"
Expand All @@ -2179,13 +2181,13 @@ class TestCmdlineParser : public TestFixture {
ASSERT_EQUALS_ENUM(CmdLineParser::Result::Success, parser->parseFromArgs(3, argv));
ASSERT_EQUALS(2, settings->rules.size());
auto it = settings->rules.cbegin();
ASSERT_EQUALS("toklist1", it->tokenlist);
ASSERT_EQUALS("raw", it->tokenlist);
ASSERT_EQUALS(".+", it->pattern);
ASSERT_EQUALS_ENUM(Severity::error, it->severity);
ASSERT_EQUALS("ruleId1", it->id);
ASSERT_EQUALS("ruleSummary1", it->summary);
++it;
ASSERT_EQUALS("toklist2", it->tokenlist);
ASSERT_EQUALS("define", it->tokenlist);
ASSERT_EQUALS(".*", it->pattern);
ASSERT_EQUALS_ENUM(Severity::warning, it->severity);
ASSERT_EQUALS("ruleId2", it->id);
Expand All @@ -2196,7 +2198,7 @@ class TestCmdlineParser : public TestFixture {
REDIRECT;
ScopedFile file("rule.xml",
"<rule>\n"
"<tokenlist>toklist</tokenlist>\n"
"<tokenlist>define</tokenlist>\n"
"<pattern>.+</pattern>\n"
"<message>\n"
"<severity>error</severity>\n"
Expand All @@ -2208,7 +2210,7 @@ class TestCmdlineParser : public TestFixture {
ASSERT_EQUALS_ENUM(CmdLineParser::Result::Success, parser->parseFromArgs(3, argv));
ASSERT_EQUALS(1, settings->rules.size());
auto it = settings->rules.cbegin();
ASSERT_EQUALS("toklist", it->tokenlist);
ASSERT_EQUALS("define", it->tokenlist);
ASSERT_EQUALS(".+", it->pattern);
ASSERT_EQUALS_ENUM(Severity::error, it->severity);
ASSERT_EQUALS("ruleId", it->id);
Expand Down Expand Up @@ -2300,6 +2302,30 @@ class TestCmdlineParser : public TestFixture {
ASSERT_EQUALS_ENUM(CmdLineParser::Result::Fail, parser->parseFromArgs(3, argv));
ASSERT_EQUALS("cppcheck: error: unable to load rule-file 'rule.xml' - unknown element 'pattern' encountered in 'message'.\n", logger->str());
}

void ruleFileMissingTokenList() {
REDIRECT;
ScopedFile file("rule.xml",
"<rule>\n"
"<tokenlist/>\n"
"<pattern>.+</pattern>\n"
"</rule>\n");
const char * const argv[] = {"cppcheck", "--rule-file=rule.xml", "file.cpp"};
ASSERT_EQUALS_ENUM(CmdLineParser::Result::Fail, parser->parseFromArgs(3, argv));
ASSERT_EQUALS("cppcheck: error: unable to load rule-file 'rule.xml' - a rule is lacking a tokenlist.\n", logger->str());
}

void ruleFileUnknownTokenList() {
REDIRECT;
ScopedFile file("rule.xml",
"<rule>\n"
"<tokenlist>simple</tokenlist>\n"
"<pattern>.+</pattern>\n"
"</rule>\n");
const char * const argv[] = {"cppcheck", "--rule-file=rule.xml", "file.cpp"};
ASSERT_EQUALS_ENUM(CmdLineParser::Result::Fail, parser->parseFromArgs(3, argv));
ASSERT_EQUALS("cppcheck: error: unable to load rule-file 'rule.xml' - a rule is using the unsupported tokenlist 'simple'.\n", logger->str());
}
#else
void ruleFileNotSupported() {
REDIRECT;
Expand Down
4 changes: 3 additions & 1 deletion tools/dmake/dmake.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -584,7 +584,7 @@ int main(int argc, char **argv)
<< "endif\n";

fout << "# To compile with rules, use 'make HAVE_RULES=yes'\n";
makeConditionalVariable(fout, "HAVE_RULES", "no");
makeConditionalVariable(fout, "HAVE_RULES", "");

makeMatchcompiler(fout, emptyString, emptyString);

Expand Down Expand Up @@ -743,6 +743,8 @@ int main(int argc, char **argv)
<< " else\n"
<< " LIBS=$(shell $(PCRE_CONFIG) --libs)\n"
<< " endif\n"
<< "else ifneq ($(HAVE_RULES),)\n"
<< " $(error invalid HAVE_RULES value '$(HAVE_RULES)')\n"
<< "endif\n\n";

makeConditionalVariable(fout, "PREFIX", "/usr");
Expand Down

0 comments on commit aef62ee

Please sign in to comment.