diff --git a/src/ast.c b/src/ast.c index 769d3b7..88caf60 100644 --- a/src/ast.c +++ b/src/ast.c @@ -3,7 +3,7 @@ #include #include -error_t *err_node_children_cap = &(error_t){ +error_t *const err_ast_children_cap = &(error_t){ .message = "Failed to increase ast node children, max capacity reached"}; error_t *ast_node_alloc(ast_node_t **output) { @@ -50,7 +50,7 @@ error_t *ast_node_alloc_children(ast_node_t *node) { error_t *ast_node_grow_cap(ast_node_t *node) { if (node->cap >= node_max_children_cap) { - return err_node_children_cap; + return err_ast_children_cap; } size_t new_cap = node->cap * 2; diff --git a/src/ast.h b/src/ast.h index 448c274..fb6371c 100644 --- a/src/ast.h +++ b/src/ast.h @@ -7,6 +7,8 @@ #include #include +extern error_t *const err_ast_children_cap; + typedef enum node_id { NODE_INVALID, diff --git a/src/error.c b/src/error.c index 9ede6cb..31bf014 100644 --- a/src/error.c +++ b/src/error.c @@ -9,8 +9,13 @@ error_t *const err_errorf_alloc = &(error_t){ error_t *const err_errorf_length = &(error_t){ .message = "Formatting of another error failed to determine the error length"}; +error_t *const err_eof = + &(error_t){.message = "Read failed because EOF is reached"}; -error_t *err_allocation_failed = +error_t *const err_unknown_read_failure = + &(error_t){.message = "Unknown read error"}; + +error_t *const err_allocation_failed = &(error_t){.message = "Memory allocation failed"}; error_t *errorf(const char *fmt, ...) { diff --git a/src/error.h b/src/error.h index 96374e0..e637c27 100644 --- a/src/error.h +++ b/src/error.h @@ -19,6 +19,8 @@ static inline void error_free(error_t *err) { } /* Some global errors */ -extern error_t *err_allocation_failed; +extern error_t *const err_allocation_failed; +extern error_t *const err_eof; +extern error_t *const err_unknown_read_failure; #endif // INCLUDE_SRC_ERROR_H_ diff --git a/src/lexer.c b/src/lexer.c index 27aef0c..d183821 100644 --- a/src/lexer.c +++ b/src/lexer.c @@ -5,21 +5,16 @@ #include #include -error_t *err_lexer_already_open = &(error_t){ +error_t *const err_lexer_already_open = &(error_t){ .message = "Can't open on a lexer object that is already opened. Close it first."}; -error_t *err_prefix_too_large = +error_t *const err_prefix_too_large = &(error_t){.message = "Prefix too large for internal lexer buffer"}; -error_t *err_buffer_underrun = &(error_t){ +error_t *const err_buffer_underrun = &(error_t){ .message = "Buffer does not contain enough characters for lexer_consume_n"}; -error_t *err_consume_excessive_length = +error_t *const err_consume_excessive_length = &(error_t){.message = "Too many valid characters to consume"}; -error_t *err_eof = - &(error_t){.message = "Can't read from file because EOF is reached"}; - -error_t *err_unknown_read = &(error_t){.message = "Unknown read error"}; - typedef bool (*char_predicate_t)(char); const char *lexer_token_id_to_cstr(lexer_token_id_t id) { @@ -112,7 +107,7 @@ error_t *lexer_fill_buffer(lexer_t *lex) { if (n == 0 && ferror(lex->fp)) return errorf("Read error: %s", strerror(errno)); if (n == 0) - return err_unknown_read; + return err_unknown_read_failure; remaining -= n; lex->buffer_count += n; } diff --git a/src/lexer.h b/src/lexer.h index 3265f37..6cbbab1 100644 --- a/src/lexer.h +++ b/src/lexer.h @@ -5,7 +5,10 @@ #include #include -extern error_t *err_eof; +extern error_t *const err_lexer_already_open; +extern error_t *const err_prefix_too_large; +extern error_t *const err_buffer_underrun; +extern error_t *const err_consume_excessive_length; typedef enum { TOKEN_ERROR, diff --git a/src/parser/util.c b/src/parser/util.c index 09741b2..29d665d 100644 --- a/src/parser/util.c +++ b/src/parser/util.c @@ -1,7 +1,7 @@ #include "util.h" #include "../tokenlist.h" -error_t *err_parse_no_match = +error_t *const err_parse_no_match = &(error_t){.message = "parsing failed to find the correct token sequence"}; parse_result_t parse_error(error_t *err) { diff --git a/src/parser/util.h b/src/parser/util.h index 5c074b0..18b4684 100644 --- a/src/parser/util.h +++ b/src/parser/util.h @@ -21,6 +21,6 @@ parse_result_t parse_token(tokenlist_entry_t *current, token_validator_t is_valid); parse_result_t parse_result_wrap(node_id_t id, parse_result_t result); -extern error_t *err_parse_no_match; +extern error_t *const err_parse_no_match; #endif // INCLUDE_PARSER_UTIL_H_