From b8e76aa1e888a4602d4220206bbea8d04882fae2 Mon Sep 17 00:00:00 2001 From: Carlos Maniero Date: Wed, 26 Apr 2023 01:57:40 -0300 Subject: lexer: Split operation tokens into their own token The +, -, *, and / tokens used to be TOKEN_OP, but the TOKEN_OP has been removed and a token for each operation has been introduced. Python's token names were followed: https://docs.python.org/3/library/token.html Signed-off-by: Carlos Maniero Reviewed-by: Johnny Richard --- test/lexer_test.c | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) (limited to 'test/lexer_test.c') diff --git a/test/lexer_test.c b/test/lexer_test.c index abfac16..bbb6784 100644 --- a/test/lexer_test.c +++ b/test/lexer_test.c @@ -64,14 +64,14 @@ test_tokenize_number(const MunitParameter params[], void *user_data_or_fixture) static MunitResult test_tokenize_op(const MunitParameter params[], void *user_data_or_fixture) { - assert_token_at(" + 2", 0, TOKEN_OP, "+"); - assert_token_at(" - \n", 0, TOKEN_OP, "-"); - assert_token_at(" * ;", 0, TOKEN_OP, "*"); - assert_token_at(" / ", 0, TOKEN_OP, "/"); - assert_token_at(" = ", 0, TOKEN_OP, "="); + assert_token_at(" + 2", 0, TOKEN_PLUS, "+"); + assert_token_at(" - \n", 0, TOKEN_MINUS, "-"); + assert_token_at(" * ;", 0, TOKEN_STAR, "*"); + assert_token_at(" / ", 0, TOKEN_SLASH, "/"); + assert_token_at(" = ", 0, TOKEN_EQUAL, "="); assert_token_at("1 * 2", 0, TOKEN_NUMBER, "1"); - assert_token_at("1 * 2", 1, TOKEN_OP, "*"); + assert_token_at("1 * 2", 1, TOKEN_STAR, "*"); assert_token_at("1 * 2", 2, TOKEN_NUMBER, "2"); return MUNIT_OK; -- cgit v1.2.3