summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCarlos Maniero <carlos@maniero.me>2023-05-03 23:56:48 -0300
committerJohnny Richard <johnny@johnnyrichard.com>2023-05-04 21:50:03 +0200
commit17ae189d4a6aa926d8931b1e4f7db8de6caddd90 (patch)
tree2b135502ce598046e2c4e0394a95ada18b8c26bb
parente2e0ed950bb147ebca3b9ac879268feeb185e20b (diff)
lexer: Allows snake_case token names
Signed-off-by: Carlos Maniero <carlos@maniero.me> Reviewed-by: Johnny Richard <johnny@johnnyrichard.com>
-rw-r--r--src/lexer.c2
-rw-r--r--test/lexer_test.c11
2 files changed, 12 insertions, 1 deletions
diff --git a/src/lexer.c b/src/lexer.c
index 56e24af..5a6d245 100644
--- a/src/lexer.c
+++ b/src/lexer.c
@@ -82,7 +82,7 @@ lexer_tokenize_name(lexer_t *lexer, token_t *token)
{
size_t begin = lexer->cur;
- while (lexer_is_not_eof(lexer) && isalnum(lexer_current_char(lexer))) {
+ while (lexer_is_not_eof(lexer) && (isalnum(lexer_current_char(lexer)) || lexer_current_char(lexer) == '_')) {
lexer_drop_char(lexer);
}
diff --git a/test/lexer_test.c b/test/lexer_test.c
index a3f644d..3c43342 100644
--- a/test/lexer_test.c
+++ b/test/lexer_test.c
@@ -78,6 +78,16 @@ test_tokenize_number(const MunitParameter params[], void *user_data_or_fixture)
}
static MunitResult
+test_tokenize_name(const MunitParameter params[], void *user_data_or_fixture)
+{
+ assert_token_at("myname", 0, TOKEN_NAME, "myname");
+ assert_token_at("my_name", 0, TOKEN_NAME, "my_name");
+ assert_token_at("myname2", 0, TOKEN_NAME, "myname2");
+
+ return MUNIT_OK;
+}
+
+static MunitResult
test_tokenize_op(const MunitParameter params[], void *user_data_or_fixture)
{
assert_token_at(" + 2", 0, TOKEN_PLUS, "+");
@@ -123,6 +133,7 @@ test_peek_next_token(const MunitParameter params[], void *user_data_or_fixture)
static MunitTest tests[] = {
{ "/test_tokenize_digit", test_tokenize_number, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL },
{ "/test_tokenize_keywords", test_tokenize_keywords, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL },
+ { "/test_tokenize_name", test_tokenize_name, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL },
{ "/test_tokenize_op", test_tokenize_op, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL },
{ "/test_tokenize_unknown", test_tokenize_unknown, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL },
{ "/test_peek_next_token", test_peek_next_token, NULL, NULL, MUNIT_TEST_OPTION_NONE, NULL },