fixed a memory leak in lexer, all tests are now memory safe
This commit is contained in:
parent
cb48a9f573
commit
a9a26ec122
8 changed files with 378 additions and 518 deletions
|
|
@ -208,6 +208,10 @@ void ARC_Lexer_LexString(ARC_Lexer *lexer, ARC_String **data){
|
|||
|
||||
//check if a token was found if it wasn't continue. I'm doing this to try to cut down on the ammount of indentation
|
||||
if(tokenLength == 0){
|
||||
if(tokenData != NULL){
|
||||
ARC_String_Destroy(tokenData);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
@ -225,6 +229,12 @@ void ARC_Lexer_LexString(ARC_Lexer *lexer, ARC_String **data){
|
|||
|
||||
//update the last found tokenLength to the max length
|
||||
lastTokenLength = tokenLength;
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if(tokenData != NULL){
|
||||
ARC_String_Destroy(tokenData);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -397,6 +407,11 @@ void ARC_LexerTokenRule_DestroyCharAutomataDataFn(void *automataData){
|
|||
free((char *)automataData);
|
||||
}
|
||||
|
||||
//private function to free automataData stored as an ARC_String
|
||||
void ARC_LexerTokenRule_DestroyStringAutomataDataFn(void *automataData){
|
||||
ARC_String_Destroy((ARC_String *)automataData);
|
||||
}
|
||||
|
||||
ARC_LexerTokenRule ARC_LexerTokenRule_CreateAndReturnMatchCharRule(uint32_t id, char character){
|
||||
//create the token rule
|
||||
ARC_LexerTokenRule tokenRule;
|
||||
|
|
@ -442,11 +457,6 @@ ARC_LexerTokenRule ARC_LexerTokenRule_CreateAndReturnMatchCharOrBetween(uint32_t
|
|||
return tokenRule;
|
||||
}
|
||||
|
||||
//private function to free automataData stored as an ARC_String
|
||||
void ARC_LexerTokenRule_DestroyStringAutomataDataFn(void *automataData){
|
||||
ARC_String_Destroy((ARC_String *)automataData);
|
||||
}
|
||||
|
||||
ARC_LexerTokenRule ARC_LexerTokenRule_CreateAndReturnMatchStringRule(uint32_t id, ARC_String *string){
|
||||
//create the token rule
|
||||
ARC_LexerTokenRule tokenRule;
|
||||
|
|
@ -490,39 +500,3 @@ ARC_LexerTokenRule ARC_LexerTokenRule_CreateAndReturnMatchCharInStringRule(uint3
|
|||
//return the created tokenRule
|
||||
return tokenRule;
|
||||
}
|
||||
|
||||
void ARC_Lexer_InitBasicTokenRules(ARC_Lexer *lexer){
|
||||
//null
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_NULL, 0));
|
||||
|
||||
//number
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharOrBetween(ARC_LEXER_TOKEN_NUMBER, '0', '9'));
|
||||
|
||||
//alpha char
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharOrBetween(ARC_LEXER_TOKEN_ALPHA_LOWER_CHAR, 'a', 'z'));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharOrBetween(ARC_LEXER_TOKEN_ALPHA_UPPER_CHAR, 'A', 'Z'));
|
||||
|
||||
//whitespace
|
||||
//TODO: fix this
|
||||
ARC_String *whitespaceString;
|
||||
ARC_String_CreateWithStrlen(&whitespaceString, " \t");
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharInStringRule(ARC_LEXER_TOKEN_WHITESPACE, whitespaceString));
|
||||
ARC_String_Destroy(whitespaceString);
|
||||
|
||||
//single char tokens
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_NEWLINE_ID , ARC_LEXER_TOKEN_NEWLINE_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_COLON_ID , ARC_LEXER_TOKEN_COLON_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_SEMICOLON_ID , ARC_LEXER_TOKEN_SEMICOLON_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_COMMA_ID , ARC_LEXER_TOKEN_COMMA_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_PERIOD_ID , ARC_LEXER_TOKEN_PERIOD_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_FORWARD_SLASH_ID , ARC_LEXER_TOKEN_FORWARD_SLASH_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_BACK_SLASH_ID , ARC_LEXER_TOKEN_BACK_SLASH_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_LEFT_PARENTHESIS_ID , ARC_LEXER_TOKEN_LEFT_PARENTHESIS_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_RIGHT_PARENTHESIS_ID, ARC_LEXER_TOKEN_RIGHT_PARENTHESIS_CHAR));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_LEFT_CURLY_BRACE_ID , ARC_LEXER_TOKEN_LEFT_CURLY_BRACE_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_RIGHT_CURLY_BRACE_ID, ARC_LEXER_TOKEN_RIGHT_CURLY_BRACE_CHAR));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_BANG_ID , ARC_LEXER_TOKEN_BANG_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_AT_ID , ARC_LEXER_TOKEN_AT_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_HASH_ID , ARC_LEXER_TOKEN_HASH_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_PERCENT_ID , ARC_LEXER_TOKEN_PERCENT_CHAR ));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ void ARC_ParserLang_InitLexerRulesFn(ARC_Lexer *lexer){
|
|||
//whitespace
|
||||
ARC_String *whitespaceString;
|
||||
ARC_String_CreateWithStrlen(&whitespaceString, " \t");
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharInStringRule(ARC_LEXER_TOKEN_WHITESPACE, whitespaceString));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharInStringRule(ARC_PARSERLANG_TOKEN_WHITESPACE, whitespaceString));
|
||||
ARC_String_Destroy(whitespaceString);
|
||||
|
||||
//single char tokens
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue