diff --git a/Makefile b/Makefile index e567b37..d685209 100644 --- a/Makefile +++ b/Makefile @@ -10,7 +10,7 @@ OBJECTS = $(SOURCES:.c=.o) DEPENDENCIES = $(SOURCES:.c=.d) TARGET?=oas OUTPUTS=oas oas-asan oas-msan oas-afl -RUNARGUMENTS?=-tokens tests/input/valid.asm +RUNARGUMENTS?=ast tests/input/valid.asm all: $(TARGET) diff --git a/src/main.c b/src/main.c index d74d710..7b09662 100644 --- a/src/main.c +++ b/src/main.c @@ -1,5 +1,6 @@ #include "error.h" #include "lexer.h" +#include "parser/parser.h" #include "tokenlist.h" #include @@ -7,38 +8,64 @@ #include #include -bool print_token(lexer_token_t *token) { - lexer_token_print(token); - return true; +typedef enum mode { MODE_AST, MODE_TEXT, MODE_TOKENS } mode_t; + +void print_tokens(tokenlist_t *list) { + for (auto entry = list->head; entry; entry = entry->next) { + auto token = &entry->token; + lexer_token_print(token); + } } -bool print_value(lexer_token_t *token) { - if (token->id == TOKEN_ERROR) { - printf("%s\n", token->value); - for (size_t i = 0; i < token->character_number; ++i) - printf(" "); - printf("^-- %s\n", token->explanation); - } else { - printf("%s", token->value); +void print_text(tokenlist_t *list) { + for (auto entry = list->head; entry; entry = entry->next) { + auto token = &entry->token; + if (token->id == TOKEN_ERROR) { + printf("%s\n", token->value); + for (size_t i = 0; i < token->character_number; ++i) + printf(" "); + printf("^-- %s\n", token->explanation); + return; + } else { + printf("%s", token->value); + } } - return token->id != TOKEN_ERROR; +} + +void print_ast(tokenlist_t *list) { + parse_result_t result = parse(list->head); + if (result.err) { + puts(result.err->message); + error_free(result.err); + return; + } + ast_node_print(result.node); + + if (result.next != nullptr) { + puts("First unparsed token:"); + lexer_token_print(&result.next->token); + } + + ast_node_free(result.node); +} + +int get_execution_mode(int argc, char *argv[]) { + if (argc != 3 || (strcmp(argv[1], "tokens") != 0 && + strcmp(argv[1], "text") != 0 && strcmp(argv[1], "ast"))) { + puts("Usage: oas [tokens|text|ast] "); + exit(1); + } + + if (strcmp(argv[1], "tokens") == 0) + return MODE_TOKENS; + if (strcmp(argv[1], "text") == 0) + return MODE_TEXT; + return MODE_AST; } int main(int argc, char *argv[]) { - if (argc != 3 || - (strcmp(argv[1], "-tokens") != 0 && strcmp(argv[1], "-text") != 0)) { - puts("Usage: oas -tokens "); - puts("Usage: oas -text "); - return 1; - } - - bool (*print_fn)(lexer_token_t *); + mode_t mode = get_execution_mode(argc, argv); char *filename = argv[2]; - if (strcmp(argv[1], "-tokens") == 0) { - print_fn = print_token; - } else { - print_fn = print_value; - } lexer_t *lex = &(lexer_t){}; error_t *err = lexer_open(lex, filename); @@ -54,9 +81,18 @@ int main(int argc, char *argv[]) { if (err) goto cleanup_tokens; - for (auto entry = list->head; entry; entry = entry->next) { - print_fn(&entry->token); + switch (mode) { + case MODE_TOKENS: + print_tokens(list); + break; + case MODE_TEXT: + print_text(list); + break; + case MODE_AST: + print_ast(list); + break; } + tokenlist_free(list); error_free(err); return 0; diff --git a/validate.sh b/validate.sh index 10c2142..33313da 100755 --- a/validate.sh +++ b/validate.sh @@ -10,7 +10,7 @@ scan-build -o reports/static-analysis/ -plist-html --status-bugs make all # Run the sanitizer builds and valgrind make clean sanitize all -ARGUMENTS=("-tokens" "-text") +ARGUMENTS=("tokens" "text" "ast") while IFS= read -r INPUT_FILE; do for ARGS in ${ARGUMENTS[@]}; do ./oas-asan $ARGS $INPUT_FILE > /dev/null