TODO: REVIEW ME AND WRITE PROPER MESSAGE
Fix lexer issue where consuming n tokens always fails if there are n tokens and always succeeds if they aren't n tokens
This commit is contained in:
		| @@ -183,7 +183,7 @@ error_t *lexer_consume_n(lexer_t *lex, const size_t len, | ||||
|                          char buffer[static len], const size_t n) { | ||||
|     if (lex->buffer_count < n) | ||||
|         return err_buffer_underrun; | ||||
|     if (len > n) | ||||
|     if (n > len) | ||||
|         return err_consume_excessive_length; | ||||
|  | ||||
|     memcpy(buffer, lex->buffer, n); | ||||
|   | ||||
		Reference in New Issue
	
	Block a user