From e9ffff6e9ef0cc002486dddf07bb1b5ce81cbcef Mon Sep 17 00:00:00 2001 From: timerix Date: Wed, 15 Mar 2023 05:53:43 +0600 Subject: [PATCH] LinkedList(Tokens) and memory leak fixes --- cbuild | 2 +- default.config | 2 +- kerep | 2 +- src/cb2c/cb2c.c | 8 ++++---- src/cb2c/main.c | 16 +++++++++------ src/lexer/analize.c | 49 ++++++++++++++++++--------------------------- src/lexer/context.h | 8 ++++---- src/lexer/init.c | 3 ++- src/lexer/lexer.c | 12 ++++++----- src/lexer/lexer.h | 8 ++++---- src/lexer/tokens.c | 2 +- src/lexer/tokens.h | 6 ++++-- 12 files changed, 58 insertions(+), 60 deletions(-) diff --git a/cbuild b/cbuild index 112fcc0..9f04507 160000 --- a/cbuild +++ b/cbuild @@ -1 +1 @@ -Subproject commit 112fcc04652d6ce65fbde215cd3d1f8935db2a7c +Subproject commit 9f04507bd880343b5a99194d5c744879cdb9efe4 diff --git a/default.config b/default.config index 52dc182..fd0a8d0 100644 --- a/default.config +++ b/default.config @@ -7,7 +7,7 @@ CMP_C=gcc CMP_CPP=g++ STD_C=c11 STD_CPP=c++17 -WARN_C="-Wall -Wno-discarded-qualifiers -ftrack-macro-expansion=0" +WARN_C="-Wall -Wno-discarded-qualifiers" WARN_CPP="-Wall" SRC_C="$( find src -name '*.c')" SRC_CPP="$( find src -name '*.cpp')" diff --git a/kerep b/kerep index 2a9214f..a200f2c 160000 --- a/kerep +++ b/kerep @@ -1 +1 @@ -Subproject commit 2a9214fb795e97aa754ac46346310f94b7e61bfd +Subproject commit a200f2c96563f5782ed7b56de5a5889b7d1c31cd diff --git a/src/cb2c/cb2c.c b/src/cb2c/cb2c.c index d2aed5d..c8bb721 100644 --- a/src/cb2c/cb2c.c +++ b/src/cb2c/cb2c.c @@ -4,11 +4,11 @@ #define addc(C) StringBuilder_append_char(b, C); #define adds(S) StringBuilder_append_cptr(b, S); -void appendToken(StringBuilder* b, Token tok, u16* _tab_count){ +void appendToken(StringBuilder* b, LLNode(Token)* tokNode, u16* _tab_count){ u16 tab_count=*_tab_count; - adds(tok.value) + adds(tokNode->value.value) - switch(tok.id){ + switch(tokNode->value.id){ case tok_lbracket_fi: tab_count++; goto add_new_line; @@ -48,7 +48,7 @@ Maybe appendNamespaceContext(StringBuilder* b, NamespaceContext* context){ adds(" */\n\n") u16 tab_count=0; - Autoarr_foreach(context->base.tokens, tok, appendToken(b, tok, &tab_count)); + LinkedList_foreach(context->base.tokens, tok, appendToken(b, tok, &tab_count)); addc('\n'); return MaybeNull; diff --git a/src/cb2c/main.c b/src/cb2c/main.c index 4e17ee8..ea034dd 100644 --- a/src/cb2c/main.c +++ b/src/cb2c/main.c @@ -1,7 +1,6 @@ #include "../../kerep/src/kprint/kprint.h" #include "../../kerep/src/Filesystem/filesystem.h" #include "cb2c.h" -#include "../lexer/lexer.h" char cb2c_version[]="0.0.1"; @@ -117,12 +116,14 @@ i32 main(const int argc, const char* const* argv){ Autoarr_foreach(source_files, src_file_name, ({ tryLast(Lexer_parseFile(lexer, src_file_name), m_tokens) - Autoarr(Token)* tokens=m_tokens.value.VoidPtr; - kprintf("tokens: %u\n", Autoarr_length(tokens)); - Autoarr_foreach(tokens, tok, kprintf("%u %s\n",tok.id, tok.value)); + LinkedList(Token)* tokens=m_tokens.value.VoidPtr; + kprintf("tokens count: %u\n", tokens->count); + LinkedList_foreach(tokens, tokNode, + kprintf("%u %s\n", tokNode->value.id, tokNode->value.value)); + char* basename=path_basename(src_file_name, false); NamespaceContext file_context={ .base={ - .name=path_basename(src_file_name, false), + .name=basename, .namespace=NULL, .parent=NULL, .type=ContextType_Namespace, @@ -138,16 +139,19 @@ i32 main(const int argc, const char* const* argv){ generated_file_name=cptr_concat(file_context.base.namespace,".",file_context.base.name,".g.c"); else generated_file_name=cptr_concat(file_context.base.name,".g.c"); tryLast(file_open(generated_file_name, FileOpenMode_Write), m_generated_file) - File* generated_file=m_generated_file.value.VoidPtr; + FileHandle generated_file=m_generated_file.value.VoidPtr; kprintf("created file %s\n", generated_file_name); tryLast(file_writeCptr(generated_file, generated_code),_m_1885); tryLast(file_close(generated_file),_m_14415); kprintf("source code has been written to the file\n"); + free(basename); + LinkedList_free(tokens); free(generated_code); free(generated_file_name); })); Autoarr_free(source_files, true); Lexer_destroy(lexer); + kt_free(); return 0; } diff --git a/src/lexer/analize.c b/src/lexer/analize.c index 098d11e..4e07970 100644 --- a/src/lexer/analize.c +++ b/src/lexer/analize.c @@ -15,28 +15,24 @@ void _tryAddLabel(Lexer* lex){ Unitype uni=ST_pullString(lex->keywordSearchTree, lex->label); if(uni.VoidPtr!=NULL) // built-in keyword - Autoarr_add(lex->tokens, *(Token*)uni.VoidPtr); + LinkedList_addToEnd(lex->tokens, LLNode_create(Token,*(Token*)uni.VoidPtr)); else { // user-defined lex->label - Token ut; - ut.value=string_extract(lex->label); - ut.on_heap=true; - switch(*lex->label.ptr){ + TokenId udt_id=tok_label; + switch(*lex->label.ptr){ // starts with number case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': - ut.id=tok_number; - break; - default: - ut.id=tok_label; + udt_id=tok_number; break; } - Autoarr_add(lex->tokens, ut); + Token udt=Token_createUD(udt_id, string_extract(lex->label)); + LinkedList_addToEnd(lex->tokens, LLNode_create(Token,udt)); } lex->label=(string){lex->source, 0}; }; #define tryAddLabel() _tryAddLabel(lex) -#define addDefTok(id) { tryAddLabel(); Autoarr_add(lex->tokens, default_tokens[id]); } +#define addDefTok(id) { tryAddLabel(); LinkedList_addToEnd(lex->tokens, LLNode_create(Token,default_tokens[id])); } void _addDefTok_ifnext(Lexer* lex, char next, TokenId yes, TokenId no){ if(*(lex->source+1)==next){ @@ -73,7 +69,9 @@ Maybe _readString(Lexer* lex, char quotChar){ Maybe __Lexer_analize(Lexer* lex, char* _filename, char* _source){ lex->filename=_filename; lex->source=_source; - lex->tokens=Autoarr_create(Token, 64, 1024); + if(lex->tokens!=NULL) + LinkedList_free(lex->tokens); + lex->tokens=LinkedList_create(Token); lex->label=(string){_source, 0}; lex->line=(string){_source, 0}; lex->linenum=0; @@ -101,22 +99,16 @@ Maybe __Lexer_analize(Lexer* lex, char* _filename, char* _source){ case '\'': tryAddLabel(); - try(readString('\''), maybeC, ;){ - Token ctok={ - .id=tok_character, - .value=(char*)maybeC.value.VoidPtr - }; - Autoarr_add(lex->tokens, ctok); + try(readString('\''), m_ch, ;){ + Token ctok=Token_createUD(tok_character, m_ch.value.VoidPtr); + LinkedList_addToEnd(lex->tokens, LLNode_create(Token,ctok)); } break; case '"': tryAddLabel(); - try(readString('"'), maybeS, ;){ - Token stok={ - .id=tok_string, - .value=(char*)maybeS.value.VoidPtr - }; - Autoarr_add(lex->tokens, stok); + try(readString('"'), m_Str, ;){ + Token stok=Token_createUD(tok_string, m_Str.value.VoidPtr); + LinkedList_addToEnd(lex->tokens, LLNode_create(Token,stok)); } break; @@ -150,11 +142,8 @@ Maybe __Lexer_analize(Lexer* lex, char* _filename, char* _source){ addDefTok(tok_slash); break; } - Token comTok={ - .value=string_extract(commentStr), - .id=tok_comment - }; - Autoarr_add(lex->tokens, comTok); + Token comTok=Token_createUD(tok_comment, string_extract(commentStr)); + LinkedList_addToEnd(lex->tokens, LLNode_create(Token,comTok)); break; case '=': addDefTok_ifnext('=', tok_equal, tok_assign); break; @@ -180,5 +169,5 @@ Maybe __Lexer_analize(Lexer* lex, char* _filename, char* _source){ } tryAddLabel(); - return SUCCESS(UniHeapPtr(Autoarr(Token), lex->tokens)); + return SUCCESS(UniHeapPtr(LinkedList(Token), lex->tokens)); } diff --git a/src/lexer/context.h b/src/lexer/context.h index 4c8c0f1..2a4de07 100644 --- a/src/lexer/context.h +++ b/src/lexer/context.h @@ -14,7 +14,7 @@ STRUCT(Context, char* name; char* namespace; /* nullable */ Context* parent; /* nullable */ - Autoarr(Token)* tokens; + LinkedList(Token)* tokens; ContextType type; ) @@ -24,14 +24,14 @@ STRUCT(NamespaceContext, STRUCT(ClassContext, Context base; - Autoarr(Token)* attributes; + LinkedList(Token)* attributes; Token accessModifier; ) STRUCT(FunctionContext, Context base; - Autoarr(Token)* arguments; - Autoarr(Token)* attributes; + LinkedList(Token)* arguments; + LinkedList(Token)* attributes; Token accessModifier; Token returnType; ) diff --git a/src/lexer/init.c b/src/lexer/init.c index 8f8c540..a25013c 100644 --- a/src/lexer/init.c +++ b/src/lexer/init.c @@ -2,5 +2,6 @@ void kt_initLexerTypes(){ kt_register(Token); - kt_register(Autoarr_Token); + kt_register(LinkedList_Token); + kt_register(LLNode_Token); } diff --git a/src/lexer/lexer.c b/src/lexer/lexer.c index 1bb803f..c304f4e 100644 --- a/src/lexer/lexer.c +++ b/src/lexer/lexer.c @@ -29,15 +29,17 @@ void Lexer_destroy(Lexer* lex){ free(lex); } -///@return Maybe +///@return Maybe Maybe Lexer_parseFile(Lexer* lex, char* src_file_name){ try(file_open(src_file_name, FileOpenMode_Read), m_src_file,;) - File* src_file=m_src_file.value.VoidPtr; + FileHandle src_file=m_src_file.value.VoidPtr; char* src_text; try(file_readAll(src_file, &src_text), m_src_len, file_close(src_file)) u64 src_len=m_src_len.value.UInt64; + try(file_close(src_file),_m_215, free(src_text)); kprintf("srclen: %lu\n", src_len); - try(Lexer_parseText(lex, src_file_name, src_text), m_tokens, file_close(src_file)) - Autoarr(Token)* tokens=m_tokens.value.VoidPtr; - return SUCCESS(UniHeapPtr(Autoarr(Token), tokens)); + try(Lexer_parseText(lex, src_file_name, src_text), m_tokens,;) + LinkedList(Token)* tokens=m_tokens.value.VoidPtr; + free(src_text); + return SUCCESS(UniHeapPtr(LinkedList(Token), tokens)); } diff --git a/src/lexer/lexer.h b/src/lexer/lexer.h index 756d6cf..cc10597 100644 --- a/src/lexer/lexer.h +++ b/src/lexer/lexer.h @@ -11,7 +11,7 @@ STRUCT(Lexer, char* source; char* filename; - Autoarr(Token)* tokens; + LinkedList(Token)* tokens; string context; string line; string label; @@ -21,12 +21,12 @@ STRUCT(Lexer, Lexer* Lexer_create(); void Lexer_destroy(Lexer* lex); -///@return Maybe +///@return Maybe -///@return Maybe +///@return Maybe Maybe Lexer_parseFile(Lexer* lex, char* src_file_name); -///@return Maybe +///@return Maybe static inline Maybe Lexer_parseText(Lexer* lex, char* src_file_name, char* src_file_text){ return lex->analize(lex, src_file_name, src_file_text); } diff --git a/src/lexer/tokens.c b/src/lexer/tokens.c index 8053781..c3475d8 100644 --- a/src/lexer/tokens.c +++ b/src/lexer/tokens.c @@ -11,4 +11,4 @@ char* Token_toString(void* _t, u32 fmt){ } kt_define(Token, Token_freeMembers, Token_toString); -Autoarr_define(Token, false) +LinkedList_define(Token, false) diff --git a/src/lexer/tokens.h b/src/lexer/tokens.h index 3b56729..e3eb4e0 100644 --- a/src/lexer/tokens.h +++ b/src/lexer/tokens.h @@ -1,6 +1,6 @@ #pragma once -#include "../../kerep/src/Autoarr/Autoarr.h" +#include "../../kerep/src/LinkedList/LinkedList.h" #include "../../kerep/src/SearchTree/SearchTree.h" PACKED_ENUM(TokenId, @@ -104,7 +104,9 @@ STRUCT(Token, TokenId id; bool on_heap; // allocated on stack or heap ) -Autoarr_declare(Token) +LinkedList_declare(Token) +/// user-defined token constructor +#define Token_createUD(ID, VALUE) (Token){ .id=ID, .value=VALUE, .on_heap=true } static const Token default_tokens[]={ /* base types */