diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 92841e7605f..c54bc1ef688 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -8674,8 +8674,6 @@ void Tokenizer::findGarbageCode() const if (Token::Match(tok->next(), ")|]|>|%assign%|%or%|%oror%|==|!=|/|>=|<=|&&")) syntaxError(tok); } - if ((!isCPP() || !Token::simpleMatch(tok->previous(), "operator")) && Token::Match(tok, "[,;] ,")) - syntaxError(tok); if (Token::simpleMatch(tok, ".") && !Token::simpleMatch(tok->previous(), ".") && !Token::simpleMatch(tok->next(), ".") && @@ -8703,15 +8701,19 @@ void Tokenizer::findGarbageCode() const syntaxError(tok); if (Token::Match(tok, "! %comp%")) syntaxError(tok); - if (Token::Match(tok, "] %name%") && (!isCPP() || !(tok->tokAt(-1) && Token::simpleMatch(tok->tokAt(-2), "delete [")))) - syntaxError(tok); + if (Token::Match(tok, "] %name%") && (!isCPP() || !(tok->tokAt(-1) && Token::simpleMatch(tok->tokAt(-2), "delete [")))) { + if (tok->next()->isUpperCaseName()) + unknownMacroError(tok->next()); + else + syntaxError(tok); + } if (tok->link() && Token::Match(tok, "[([]") && (!tok->tokAt(-1) || !tok->tokAt(-1)->isControlFlowKeyword())) { const Token* const end = tok->link(); for (const Token* inner = tok->next(); inner != end; inner = inner->next()) { if (inner->str() == "{") inner = inner->link(); - else if (inner->str() == ";") { + else if (inner->str() == ";" || (Token::simpleMatch(inner, ", ,") && (!isCPP() || !Token::simpleMatch(inner->previous(), "operator")))) { if (tok->tokAt(-1) && tok->tokAt(-1)->isUpperCaseName()) unknownMacroError(tok->tokAt(-1)); else @@ -8719,6 +8721,8 @@ void Tokenizer::findGarbageCode() const } } } + if ((!isCPP() || !Token::simpleMatch(tok->previous(), "operator")) && Token::Match(tok, "[,;] ,")) + syntaxError(tok); } // ternary operator without : diff --git a/test/testgarbage.cpp b/test/testgarbage.cpp index 2ad697cea0b..15c42f46964 100644 --- a/test/testgarbage.cpp +++ b/test/testgarbage.cpp @@ -1214,7 +1214,7 @@ class TestGarbage : public TestFixture { " typedef S0 b[][1][1] != 0\n" "};\n" "b[K][0] S0 b[][1][1] != 4{ 0 };\n" - "b[0][0]"), SYNTAX); + "b[0][0]"), UNKNOWN_MACRO); } void garbageCode149() { // #7085 diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index eb1a6b01b41..b07114d7f79 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -7133,6 +7133,14 @@ class TestTokenizer : public TestFixture { InternalError, "There is an unknown macro here somewhere. Configuration is required. If MACRO is a macro then please configure it."); + ASSERT_THROW_EQUALS(tokenizeAndStringify("struct S { int a[2] PACKED; };\n"), + InternalError, + "There is an unknown macro here somewhere. Configuration is required. If PACKED is a macro then please configure it."); + + ASSERT_THROW_EQUALS(tokenizeAndStringify("MACRO(a, b,,)\n"), + InternalError, + "There is an unknown macro here somewhere. Configuration is required. If MACRO is a macro then please configure it."); + ASSERT_THROW_INTERNAL(tokenizeAndStringify("{ for (()()) }"), SYNTAX); // #11643 ASSERT_NO_THROW(tokenizeAndStringify("S* g = ::new(ptr) S();")); // #12552