diff options
author | Carlos Maniero <carlos@maniero.me> | 2023-05-06 12:29:05 -0300 |
---|---|---|
committer | Johnny Richard <johnny@johnnyrichard.com> | 2023-05-06 22:53:49 +0200 |
commit | ccd5e8585f10488eed72c772cc1804efea6b8fb4 (patch) | |
tree | f5690cb92e94758d44f5c29715c0d2415659fa7c /test | |
parent | 6a297e0ef57f2ae6d6134bd44a33c55fa9628cfe (diff) |
lexer: Tokenize logical and bitwise operators
The followed logic operators were added to lexer:
TOKEN_EQUAL ==
TOKEN_NOT !
TOKEN_NOT_EQUAL !=
TOKEN_GT >
TOKEN_GT_EQUAL >=
TOKEN_LT <
TOKEN_LT_EQUAL <=
TOKEN_AND &&
TOKEN_OR ||
Bitwise operators were also added
TOKEN_BITWISE_AND &
TOKEN_BITWISE_OR |
TOKEN_BITWISE_SHIFT_LEFT <<
TOKEN_BITWISE_SHIFT_RIGHT >>
TOKEN_BITWISE_XOR ^
TOKEN_BITWISE_NOT ~
TOKEN_EQUAL '=' was renamed TOKEN_ASSIGN, and now TOKEN_EQUAL is used
for the logical comparator '=='.
Signed-off-by: Carlos Maniero <carlos@maniero.me>
Diffstat (limited to 'test')
-rw-r--r-- | test/lexer_test.c | 17 |
1 files changed, 11 insertions, 6 deletions
diff --git a/test/lexer_test.c b/test/lexer_test.c index 3c43342..87928b4 100644 --- a/test/lexer_test.c +++ b/test/lexer_test.c @@ -17,6 +17,7 @@ #define MUNIT_ENABLE_ASSERT_ALIASES #include "lexer.h" #include "munit.h" +#include <stdio.h> void make_lexer_from_static_src(lexer_t *lexer, char *src) @@ -88,17 +89,21 @@ test_tokenize_name(const MunitParameter params[], void *user_data_or_fixture) } static MunitResult -test_tokenize_op(const MunitParameter params[], void *user_data_or_fixture) +test_tokenize_basic_check(const MunitParameter params[], void *user_data_or_fixture) { assert_token_at(" + 2", 0, TOKEN_PLUS, "+"); assert_token_at(" - \n", 0, TOKEN_MINUS, "-"); assert_token_at(" * ;", 0, TOKEN_STAR, "*"); assert_token_at(" / ", 0, TOKEN_SLASH, "/"); - assert_token_at(" = ", 0, TOKEN_EQUAL, "="); - assert_token_at("1 * 2", 0, TOKEN_NUMBER, "1"); - assert_token_at("1 * 2", 1, TOKEN_STAR, "*"); - assert_token_at("1 * 2", 2, TOKEN_NUMBER, "2"); + for (size_t kind = TOKEN_OPAREN; kind < TOKEN_UNKNOWN; kind++) { + char source[128]; + sprintf(source, "1 %s 2", token_kind_to_str(kind)); + + assert_token_at(source, 0, TOKEN_NUMBER, "1"); + assert_token_at(source, 1, kind, token_kind_to_str(kind)); + assert_token_at(source, 2, TOKEN_NUMBER, "2"); + } return MUNIT_OK; } @@ -134,7 +139,7 @@ static MunitTest tests[] = { { "/test_tokenize_digit", test_tokenize_number, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL }, { "/test_tokenize_keywords", test_tokenize_keywords, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL }, { "/test_tokenize_name", test_tokenize_name, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL }, - { "/test_tokenize_op", test_tokenize_op, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL }, + { "/test_tokenize_basic_check", test_tokenize_basic_check, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL }, { "/test_tokenize_unknown", test_tokenize_unknown, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL }, { "/test_peek_next_token", test_peek_next_token, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL }, { NULL, NULL, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL } |