1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
|
#include "bnf.h"
#include "cpp.h"
#include "cppbnf.h"
#include "lexer.h"
#include "grammer.h"
#include "minicc.h"
#include "debug.h"
#include <boost/algorithm/string.hpp>
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#include <algorithm>
#include <cctype>
#include <deque>
#include <map>
#include <memory>
#include <string>
#include <utility>
#include <vector>
class CppTest: public ::testing::Test
{
protected:
CppTest() {
debug = true;
}
~CppTest() {
}
};
TEST_F(CppTest, preprocessing_tokenize) {
CPP cpp;
auto pp_tokens = cpp.preprocessing_tokenize("int main() { return 1; }");
ASSERT_EQ(pp_tokens.size(), 9);
auto tokens = cpp.tokens_from_pptokens(pp_tokens);
ASSERT_EQ(tokens.size(), 9);
auto nodes = cpp.analysis(tokens);
ASSERT_EQ(nodes.size(), 58/*44*/);
}
TEST_F(CppTest, preprocessing_tokenize_empty) {
CPP cpp;
auto pp_tokens = cpp.preprocessing_tokenize("");
ASSERT_EQ(pp_tokens.size(), 0);
auto tokens = cpp.tokens_from_pptokens(pp_tokens);
ASSERT_EQ(tokens.size(), 0);
auto nodes = cpp.analysis(tokens);
ASSERT_EQ(nodes.size(), 1);
ASSERT_EQ(nodes[0].type, "translation-unit");
}
TEST_F(CppTest, preprocessing_tokenize_compile_error) {
CPP cpp;
auto ppTree = cpp.preprocessing_tokenize("in ma");
auto tokens = cpp.tokens_from_pptokens(ppTree);
ASSERT_EQ(tokens.size(), 2);
try {
auto nodes = cpp.analysis(tokens);
} catch (const std::exception& ex) {
EXPECT_EQ(ex.what(), "Compile error"s);
return;
}
FAIL() << "Exception expected";
}
TEST_F(CppTest, compile) {
CPP cpp;
cpp.compile("int main() { return 1 + 1; }");
}
TEST_F(CppTest, compile_2_times) {
CPP cpp;
cpp.compile("int main() { return (1 + 2) * 2; }");
cpp.compile("int main() { return 1 + 2 * 2; }");
}
|