change over to a linked list system and start trying to fix bug which causes the parser to not output anything meaningful due to memory deallocation

This commit is contained in:
2025-05-28 22:51:27 +01:00
parent 43bc7663fc
commit b82e351daf
13 changed files with 465 additions and 176 deletions

View File

@@ -1,4 +1,5 @@
#include "lexer/lexer.h"
#include "lexer/token.h"
#include "parser/parser.h"
#include <stdbool.h>
@@ -37,22 +38,28 @@ int main() {
const char * path = "test.ar";
char *content = read_file_as_text(path);
TokenStruct* tokenStruct = init_token();
LinkedList* tokens = create_list(sizeof(Token));
if (!content) return 1;
LexerState state = {
path,
content,
1,
tokenStruct
tokens
};
lexer(state);
LinkedList * parsed = create_list(sizeof(TaggedValue));
parser(parsed, tokens, false);
free_list(tokens);
free(content);
TaggedValueStruct taggedValueStruct = init_TaggedValueStruct();
Node *current = parsed->head;
while (current) {
printf("%s\n", (char*)current->data);
current = current->next;
}
parser(&taggedValueStruct, tokenStruct, false);
free_tokens(tokenStruct);
return 0;
}