From ccd5e8585f10488eed72c772cc1804efea6b8fb4 Mon Sep 17 00:00:00 2001 From: Carlos Maniero Date: Sat, 6 May 2023 12:29:05 -0300 Subject: lexer: Tokenize logical and bitwise operators The followed logic operators were added to lexer: TOKEN_EQUAL == TOKEN_NOT ! TOKEN_NOT_EQUAL != TOKEN_GT > TOKEN_GT_EQUAL >= TOKEN_LT < TOKEN_LT_EQUAL <= TOKEN_AND && TOKEN_OR || Bitwise operators were also added TOKEN_BITWISE_AND & TOKEN_BITWISE_OR | TOKEN_BITWISE_SHIFT_LEFT << TOKEN_BITWISE_SHIFT_RIGHT >> TOKEN_BITWISE_XOR ^ TOKEN_BITWISE_NOT ~ TOKEN_EQUAL '=' was renamed TOKEN_ASSIGN, and now TOKEN_EQUAL is used for the logical comparator '=='. Signed-off-by: Carlos Maniero --- test/lexer_test.c | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) (limited to 'test/lexer_test.c') diff --git a/test/lexer_test.c b/test/lexer_test.c index 3c43342..87928b4 100644 --- a/test/lexer_test.c +++ b/test/lexer_test.c @@ -17,6 +17,7 @@ #define MUNIT_ENABLE_ASSERT_ALIASES #include "lexer.h" #include "munit.h" +#include void make_lexer_from_static_src(lexer_t *lexer, char *src) @@ -88,17 +89,21 @@ test_tokenize_name(const MunitParameter params[], void *user_data_or_fixture) } static MunitResult -test_tokenize_op(const MunitParameter params[], void *user_data_or_fixture) +test_tokenize_basic_check(const MunitParameter params[], void *user_data_or_fixture) { assert_token_at(" + 2", 0, TOKEN_PLUS, "+"); assert_token_at(" - \n", 0, TOKEN_MINUS, "-"); assert_token_at(" * ;", 0, TOKEN_STAR, "*"); assert_token_at(" / ", 0, TOKEN_SLASH, "/"); - assert_token_at(" = ", 0, TOKEN_EQUAL, "="); - assert_token_at("1 * 2", 0, TOKEN_NUMBER, "1"); - assert_token_at("1 * 2", 1, TOKEN_STAR, "*"); - assert_token_at("1 * 2", 2, TOKEN_NUMBER, "2"); + for (size_t kind = TOKEN_OPAREN; kind < TOKEN_UNKNOWN; kind++) { + char source[128]; + sprintf(source, "1 %s 2", token_kind_to_str(kind)); + + assert_token_at(source, 0, TOKEN_NUMBER, "1"); + assert_token_at(source, 1, kind, token_kind_to_str(kind)); + assert_token_at(source, 2, TOKEN_NUMBER, "2"); + } return MUNIT_OK; } @@ -134,7 +139,7 @@ static MunitTest tests[] = { { "/test_tokenize_digit", test_tokenize_number, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL }, { "/test_tokenize_keywords", test_tokenize_keywords, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL }, { "/test_tokenize_name", test_tokenize_name, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL }, - { "/test_tokenize_op", test_tokenize_op, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL }, + { "/test_tokenize_basic_check", test_tokenize_basic_check, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL }, { "/test_tokenize_unknown", test_tokenize_unknown, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL }, { "/test_peek_next_token", test_peek_next_token, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL }, { NULL, NULL, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL } -- cgit v1.2.3