1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
|
/*
* Copyright (C) 2023 Carlos Maniero
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
#define MUNIT_ENABLE_ASSERT_ALIASES
#include "lexer.h"
#include "munit.h"
void
make_lexer_from_static_src(lexer_t *lexer, char *src)
{
lexer->srclen = 0;
lexer->cur = 0;
lexer->row = 0;
lexer->bol = 0;
lexer->src = src;
lexer->srclen = strlen(src);
}
void
assert_token_at(char *source, int token_index, token_kind_t expected_kind, char *expected)
{
lexer_t lexer;
token_t token;
make_lexer_from_static_src(&lexer, source);
while (token_index > 0) {
lexer_next_token(&lexer, &token);
token_index--;
}
lexer_next_token(&lexer, &token);
char actual[token.value.size + 1];
string_view_to_str(&token.value, actual);
assert_string_equal(token_kind_to_str(expected_kind), token_kind_to_str(token.kind));
assert_int(expected_kind, ==, token.kind);
assert_string_equal(expected, actual);
}
void
assert_keyword(char *source, token_kind_t expected_kind)
{
assert_token_at(source, 0, expected_kind, source);
}
static MunitResult
test_tokenize_keywords(const MunitParameter params[], void *user_data_or_fixture)
{
assert_keyword("return", TOKEN_KEYWORD_RETURN);
assert_keyword("fn", TOKEN_KEYWORD_FN);
assert_keyword("let", TOKEN_KEYWORD_LET);
return MUNIT_OK;
}
static MunitResult
test_tokenize_number(const MunitParameter params[], void *user_data_or_fixture)
{
assert_token_at("1", 0, TOKEN_NUMBER, "1");
assert_token_at(" 13 ", 0, TOKEN_NUMBER, "13");
assert_token_at(" \n 13 ", 0, TOKEN_NUMBER, "13");
return MUNIT_OK;
}
static MunitResult
test_tokenize_name(const MunitParameter params[], void *user_data_or_fixture)
{
assert_token_at("myname", 0, TOKEN_NAME, "myname");
assert_token_at("my_name", 0, TOKEN_NAME, "my_name");
assert_token_at("myname2", 0, TOKEN_NAME, "myname2");
return MUNIT_OK;
}
static MunitResult
test_tokenize_op(const MunitParameter params[], void *user_data_or_fixture)
{
assert_token_at(" + 2", 0, TOKEN_PLUS, "+");
assert_token_at(" - \n", 0, TOKEN_MINUS, "-");
assert_token_at(" * ;", 0, TOKEN_STAR, "*");
assert_token_at(" / ", 0, TOKEN_SLASH, "/");
assert_token_at(" = ", 0, TOKEN_EQUAL, "=");
assert_token_at("1 * 2", 0, TOKEN_NUMBER, "1");
assert_token_at("1 * 2", 1, TOKEN_STAR, "*");
assert_token_at("1 * 2", 2, TOKEN_NUMBER, "2");
return MUNIT_OK;
}
static MunitResult
test_tokenize_unknown(const MunitParameter params[], void *user_data_or_fixture)
{
assert_token_at(" @ ", 0, TOKEN_UNKNOWN, "@");
assert_token_at(" $ ", 0, TOKEN_UNKNOWN, "$");
return MUNIT_OK;
}
static MunitResult
test_peek_next_token(const MunitParameter params[], void *user_data_or_fixture)
{
lexer_t lexer;
make_lexer_from_static_src(&lexer, "()");
token_t peek_token;
lexer_peek_next_token(&lexer, &peek_token);
token_t next_token;
lexer_next_token(&lexer, &next_token);
assert_int(peek_token.kind, ==, next_token.kind);
assert_char((char)peek_token.value.str[0], ==, (char)next_token.value.str[0]);
return MUNIT_OK;
}
static MunitTest tests[] = {
{ "/test_tokenize_digit", test_tokenize_number, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL },
{ "/test_tokenize_keywords", test_tokenize_keywords, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL },
{ "/test_tokenize_name", test_tokenize_name, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL },
{ "/test_tokenize_op", test_tokenize_op, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL },
{ "/test_tokenize_unknown", test_tokenize_unknown, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL },
{ "/test_peek_next_token", test_peek_next_token, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL },
{ NULL, NULL, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL }
};
static const MunitSuite suite = { "/lexer_test", tests, NULL, 1, MUNIT_SUITE_OPTION_NONE };
int
main(int argc, char *argv[])
{
return munit_suite_main(&suite, NULL, argc, argv);
return EXIT_SUCCESS;
}
|