add functionality to main to parse and print the ast

This commit is contained in:
omicron 2025-04-01 15:03:36 +02:00
parent b4757e008c
commit 5ea942024f
3 changed files with 65 additions and 29 deletions

View File

@ -10,7 +10,7 @@ OBJECTS = $(SOURCES:.c=.o)
DEPENDENCIES = $(SOURCES:.c=.d) DEPENDENCIES = $(SOURCES:.c=.d)
TARGET?=oas TARGET?=oas
OUTPUTS=oas oas-asan oas-msan oas-afl OUTPUTS=oas oas-asan oas-msan oas-afl
RUNARGUMENTS?=-tokens tests/input/valid.asm RUNARGUMENTS?=ast tests/input/valid.asm
all: $(TARGET) all: $(TARGET)

View File

@ -1,5 +1,6 @@
#include "error.h" #include "error.h"
#include "lexer.h" #include "lexer.h"
#include "parser/parser.h"
#include "tokenlist.h" #include "tokenlist.h"
#include <limits.h> #include <limits.h>
@ -7,38 +8,64 @@
#include <stdlib.h> #include <stdlib.h>
#include <string.h> #include <string.h>
bool print_token(lexer_token_t *token) { typedef enum mode { MODE_AST, MODE_TEXT, MODE_TOKENS } mode_t;
lexer_token_print(token);
return true; void print_tokens(tokenlist_t *list) {
for (auto entry = list->head; entry; entry = entry->next) {
auto token = &entry->token;
lexer_token_print(token);
}
} }
bool print_value(lexer_token_t *token) { void print_text(tokenlist_t *list) {
if (token->id == TOKEN_ERROR) { for (auto entry = list->head; entry; entry = entry->next) {
printf("%s\n", token->value); auto token = &entry->token;
for (size_t i = 0; i < token->character_number; ++i) if (token->id == TOKEN_ERROR) {
printf(" "); printf("%s\n", token->value);
printf("^-- %s\n", token->explanation); for (size_t i = 0; i < token->character_number; ++i)
} else { printf(" ");
printf("%s", token->value); printf("^-- %s\n", token->explanation);
return;
} else {
printf("%s", token->value);
}
} }
return token->id != TOKEN_ERROR; }
void print_ast(tokenlist_t *list) {
parse_result_t result = parse(list->head);
if (result.err) {
puts(result.err->message);
error_free(result.err);
return;
}
ast_node_print(result.node);
if (result.next != nullptr) {
puts("First unparsed token:");
lexer_token_print(&result.next->token);
}
ast_node_free(result.node);
}
int get_execution_mode(int argc, char *argv[]) {
if (argc != 3 || (strcmp(argv[1], "tokens") != 0 &&
strcmp(argv[1], "text") != 0 && strcmp(argv[1], "ast"))) {
puts("Usage: oas [tokens|text|ast] <filename>");
exit(1);
}
if (strcmp(argv[1], "tokens") == 0)
return MODE_TOKENS;
if (strcmp(argv[1], "text") == 0)
return MODE_TEXT;
return MODE_AST;
} }
int main(int argc, char *argv[]) { int main(int argc, char *argv[]) {
if (argc != 3 || mode_t mode = get_execution_mode(argc, argv);
(strcmp(argv[1], "-tokens") != 0 && strcmp(argv[1], "-text") != 0)) {
puts("Usage: oas -tokens <filename>");
puts("Usage: oas -text <filename>");
return 1;
}
bool (*print_fn)(lexer_token_t *);
char *filename = argv[2]; char *filename = argv[2];
if (strcmp(argv[1], "-tokens") == 0) {
print_fn = print_token;
} else {
print_fn = print_value;
}
lexer_t *lex = &(lexer_t){}; lexer_t *lex = &(lexer_t){};
error_t *err = lexer_open(lex, filename); error_t *err = lexer_open(lex, filename);
@ -54,9 +81,18 @@ int main(int argc, char *argv[]) {
if (err) if (err)
goto cleanup_tokens; goto cleanup_tokens;
for (auto entry = list->head; entry; entry = entry->next) { switch (mode) {
print_fn(&entry->token); case MODE_TOKENS:
print_tokens(list);
break;
case MODE_TEXT:
print_text(list);
break;
case MODE_AST:
print_ast(list);
break;
} }
tokenlist_free(list); tokenlist_free(list);
error_free(err); error_free(err);
return 0; return 0;

View File

@ -10,7 +10,7 @@ scan-build -o reports/static-analysis/ -plist-html --status-bugs make all
# Run the sanitizer builds and valgrind # Run the sanitizer builds and valgrind
make clean sanitize all make clean sanitize all
ARGUMENTS=("-tokens" "-text") ARGUMENTS=("tokens" "text" "ast")
while IFS= read -r INPUT_FILE; do while IFS= read -r INPUT_FILE; do
for ARGS in ${ARGUMENTS[@]}; do for ARGS in ${ARGUMENTS[@]}; do
./oas-asan $ARGS $INPUT_FILE > /dev/null ./oas-asan $ARGS $INPUT_FILE > /dev/null