From 8f2e3e7af0360cca7f8918ae41cc573f8cd88d7f Mon Sep 17 00:00:00 2001 From: Roland Reichwein Date: Mon, 9 Nov 2020 16:55:10 +0100 Subject: Support empty translation unit --- Makefile | 2 +- grammer.cpp | 3 --- tests/test-cpp.cpp | 4 +++- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/Makefile b/Makefile index 8cc8c0f..be048c4 100644 --- a/Makefile +++ b/Makefile @@ -91,7 +91,7 @@ TESTSRC=\ SRC=$(PROGSRC) mcc.cpp all: test-$(PROJECTNAME) mcc - ./test-$(PROJECTNAME) --gtest_filter='CppTest.compile_2_times' + ./test-$(PROJECTNAME) #--gtest_filter='CppTest.compile_2_times' # testsuite ---------------------------------------------- test-$(PROJECTNAME): $(TESTSRC:.cpp=.o) diff --git a/grammer.cpp b/grammer.cpp index 31a4bbf..3f3a0f1 100644 --- a/grammer.cpp +++ b/grammer.cpp @@ -392,9 +392,6 @@ std::vector Compiler::compile(std::vector p_tokens) clear(); tokens = p_tokens; - if (tokens.size() == 0) - throw std::runtime_error("No tokens"); - // // top-down algorithm: // diff --git a/tests/test-cpp.cpp b/tests/test-cpp.cpp index 0a0276e..e80f2d6 100644 --- a/tests/test-cpp.cpp +++ b/tests/test-cpp.cpp @@ -57,7 +57,9 @@ TEST_F(CppTest, preprocessing_tokenize_empty) { auto nodes = cpp.analysis(tokens); - ASSERT_EQ(nodes.size(), 0); + ASSERT_EQ(nodes.size(), 1); + + ASSERT_EQ(nodes[0].type, "translation-unit"); } TEST_F(CppTest, preprocessing_tokenize_compile_error) { -- cgit v1.2.3