Skip to content

Commit 9fca821

Browse files
authored
converted TestTokenizerCompileLimits into a Python test (danmar#7847)
1 parent 8da9e50 commit 9fca821

File tree

2 files changed

+32
-48
lines changed

2 files changed

+32
-48
lines changed

test/cli/other_test.py

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3653,3 +3653,35 @@ def test_debug_syntaxerror_c(tmp_path):
36533653
assert stderr.splitlines() == [
36543654
"{}:2:1: error: Code 'template<...' is invalid C code. [syntaxError]".format(test_file)
36553655
]
3656+
3657+
3658+
def test_ast_max_depth(tmp_path):
3659+
test_file = tmp_path / 'test.cpp'
3660+
with open(test_file, "w") as f:
3661+
f.write(
3662+
"""
3663+
#define PTR1 (* (* (* (*
3664+
#define PTR2 PTR1 PTR1 PTR1 PTR1
3665+
#define PTR3 PTR2 PTR2 PTR2 PTR2
3666+
#define PTR4 PTR3 PTR3 PTR3 PTR3
3667+
3668+
#define RBR1 ) ) ) )
3669+
#define RBR2 RBR1 RBR1 RBR1 RBR1
3670+
#define RBR3 RBR2 RBR2 RBR2 RBR2
3671+
#define RBR4 RBR3 RBR3 RBR3 RBR3
3672+
3673+
int PTR4 q4_var RBR4 = 0;
3674+
""")
3675+
3676+
args = [
3677+
'-q',
3678+
'--template=simple',
3679+
str(test_file)
3680+
]
3681+
3682+
exitcode, stdout, stderr = cppcheck(args)
3683+
assert exitcode == 0, stdout
3684+
assert stdout.splitlines() == []
3685+
assert stderr.splitlines() == [
3686+
'{}:12:5: error: maximum AST depth exceeded [internalAstError]'.format(test_file)
3687+
]

test/testtokenize.cpp

Lines changed: 0 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -8704,51 +8704,3 @@ class TestTokenizer : public TestFixture {
87048704
};
87058705

87068706
REGISTER_TEST(TestTokenizer)
8707-
8708-
class TestTokenizerCompileLimits : public TestFixture
8709-
{
8710-
public:
8711-
TestTokenizerCompileLimits() : TestFixture("TestTokenizerCompileLimits") {}
8712-
8713-
private:
8714-
void run() override
8715-
{
8716-
TEST_CASE(test); // #5592 crash: gcc: testsuit: gcc.c-torture/compile/limits-declparen.c
8717-
}
8718-
8719-
#define tokenizeAndStringify(...) tokenizeAndStringify_(__FILE__, __LINE__, __VA_ARGS__)
8720-
std::string tokenizeAndStringify_(const char* file, int linenr, const std::string& code) {
8721-
// tokenize..
8722-
SimpleTokenizer tokenizer(settingsDefault, *this);
8723-
ASSERT_LOC(tokenizer.tokenize(code), file, linenr);
8724-
8725-
if (tokenizer.tokens())
8726-
return tokenizer.tokens()->stringifyList(false, true, false, true, false, nullptr, nullptr);
8727-
return "";
8728-
}
8729-
8730-
void test() {
8731-
const char raw_code[] = "#define PTR1 (* (* (* (*\n"
8732-
"#define PTR2 PTR1 PTR1 PTR1 PTR1\n"
8733-
"#define PTR3 PTR2 PTR2 PTR2 PTR2\n"
8734-
"#define PTR4 PTR3 PTR3 PTR3 PTR3\n"
8735-
"\n"
8736-
"#define RBR1 ) ) ) )\n"
8737-
"#define RBR2 RBR1 RBR1 RBR1 RBR1\n"
8738-
"#define RBR3 RBR2 RBR2 RBR2 RBR2\n"
8739-
"#define RBR4 RBR3 RBR3 RBR3 RBR3\n"
8740-
"\n"
8741-
"int PTR4 q4_var RBR4 = 0;\n";
8742-
8743-
// Preprocess file..
8744-
simplecpp::OutputList outputList;
8745-
std::vector<std::string> files;
8746-
const simplecpp::TokenList tokens1(raw_code, sizeof(raw_code), files, "", &outputList);
8747-
const std::string filedata = tokens1.stringify();
8748-
const std::string code = PreprocessorHelper::getcodeforcfg(settingsDefault, *this, filedata, "", "test.c");
8749-
8750-
ASSERT_THROW_INTERNAL_EQUALS(tokenizeAndStringify(code), AST, "maximum AST depth exceeded");
8751-
}
8752-
};
8753-
8754-
REGISTER_TEST(TestTokenizerCompileLimits)

0 commit comments

Comments
 (0)