diff options
-rw-r--r-- | src/lexer.c | 2 | ||||
-rw-r--r-- | test/lexer_test.c | 11 |
2 files changed, 12 insertions, 1 deletions
diff --git a/src/lexer.c b/src/lexer.c index 56e24af..5a6d245 100644 --- a/src/lexer.c +++ b/src/lexer.c @@ -82,7 +82,7 @@ lexer_tokenize_name(lexer_t *lexer, token_t *token) { size_t begin = lexer->cur; - while (lexer_is_not_eof(lexer) && isalnum(lexer_current_char(lexer))) { + while (lexer_is_not_eof(lexer) && (isalnum(lexer_current_char(lexer)) || lexer_current_char(lexer) == '_')) { lexer_drop_char(lexer); } diff --git a/test/lexer_test.c b/test/lexer_test.c index a3f644d..3c43342 100644 --- a/test/lexer_test.c +++ b/test/lexer_test.c @@ -78,6 +78,16 @@ test_tokenize_number(const MunitParameter params[], void *user_data_or_fixture) } static MunitResult +test_tokenize_name(const MunitParameter params[], void *user_data_or_fixture) +{ + assert_token_at("myname", 0, TOKEN_NAME, "myname"); + assert_token_at("my_name", 0, TOKEN_NAME, "my_name"); + assert_token_at("myname2", 0, TOKEN_NAME, "myname2"); + + return MUNIT_OK; +} + +static MunitResult test_tokenize_op(const MunitParameter params[], void *user_data_or_fixture) { assert_token_at(" + 2", 0, TOKEN_PLUS, "+"); @@ -123,6 +133,7 @@ test_peek_next_token(const MunitParameter params[], void *user_data_or_fixture) static MunitTest tests[] = { { "/test_tokenize_digit", test_tokenize_number, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL }, { "/test_tokenize_keywords", test_tokenize_keywords, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL }, + { "/test_tokenize_name", test_tokenize_name, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL }, { "/test_tokenize_op", test_tokenize_op, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL }, { "/test_tokenize_unknown", test_tokenize_unknown, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL }, { "/test_peek_next_token", test_peek_next_token, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL }, |