testing working, and fixed memory leak in replace with substring function
This commit is contained in:
parent
4c0c5d377d
commit
5de968688c
7 changed files with 131 additions and 75 deletions
|
|
@ -85,7 +85,7 @@ void ARC_Lexer_LexString(ARC_Lexer *lexer, ARC_String **data){
|
|||
}
|
||||
|
||||
//this will run untill everything token is stripped or there is an error
|
||||
while(data != NULL){
|
||||
while(*data != NULL){
|
||||
ARC_Bool tokenFound = ARC_False;
|
||||
for(uint32_t index = 0; index < ARC_Vector_GetSize(lexer->tokenRules); index++){
|
||||
//check if the token rule is found
|
||||
|
|
@ -106,7 +106,7 @@ void ARC_Lexer_LexString(ARC_Lexer *lexer, ARC_String **data){
|
|||
token->data = tokenData;
|
||||
|
||||
//add to the vector and check for error (I'd be surprised if the error ever happened because that would most likely mean overflow)
|
||||
ARC_Vector_Add(lexer->tokens, token);
|
||||
ARC_Vector_Add(lexer->tokens, (void *)token);
|
||||
if(arc_errno){
|
||||
ARC_DEBUG_LOG_ERROR("ARC_Lexer_LexString(lexer, data), errored when running ARC_Vector_Add(lexer->tokens, token);. check logs for more info");
|
||||
free(token);
|
||||
|
|
@ -205,6 +205,7 @@ ARC_Bool ARC_Lexer_AutomataMatchStringFn(ARC_String **string, ARC_String **token
|
|||
|
||||
//check to see if there is a match with automataData as a string
|
||||
ARC_String *automataDataString = (ARC_String *)automataData;
|
||||
|
||||
//to keep from erroring instead of stripping from a same length string we can just delete it
|
||||
if(ARC_String_Equals(*string, automataDataString)){
|
||||
if((*string)->length == automataDataString->length){
|
||||
|
|
@ -276,18 +277,18 @@ ARC_LexerTokenRule ARC_LexerTokenRule_CreateAndReturnMatchStringRule(uint32_t id
|
|||
}
|
||||
|
||||
void ARC_Lexer_InitBasicTokenRules(ARC_Lexer *lexer){
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_COLON_ID , LEXER_TOKEN_COLON_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_SEMICOLON_ID , LEXER_TOKEN_SEMICOLON_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_COMMA_ID , LEXER_TOKEN_COMMA_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_PERIOD_ID , LEXER_TOKEN_PERIOD_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_FORWARD_SLASH_ID , LEXER_TOKEN_FORWARD_SLASH_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_BACK_SLASH_ID , LEXER_TOKEN_BACK_SLASH_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_LEFT_PARENTHESIS_ID , LEXER_TOKEN_LEFT_PARENTHESIS_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_RIGHT_PARENTHESIS_ID, LEXER_TOKEN_RIGHT_PARENTHESIS_CHAR));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_LEFT_CURLY_BRACE_ID , LEXER_TOKEN_LEFT_CURLY_BRACE_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_RIGHT_CURLY_BRACE_ID, LEXER_TOKEN_RIGHT_CURLY_BRACE_CHAR));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_BANG_ID , LEXER_TOKEN_BANG_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_AT_ID , LEXER_TOKEN_AT_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_HASH_ID , LEXER_TOKEN_HASH_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_PERCENT_ID , LEXER_TOKEN_PERCENT_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_COLON_ID , ARC_LEXER_TOKEN_COLON_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_SEMICOLON_ID , ARC_LEXER_TOKEN_SEMICOLON_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_COMMA_ID , ARC_LEXER_TOKEN_COMMA_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_PERIOD_ID , ARC_LEXER_TOKEN_PERIOD_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_FORWARD_SLASH_ID , ARC_LEXER_TOKEN_FORWARD_SLASH_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_BACK_SLASH_ID , ARC_LEXER_TOKEN_BACK_SLASH_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_LEFT_PARENTHESIS_ID , ARC_LEXER_TOKEN_LEFT_PARENTHESIS_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_RIGHT_PARENTHESIS_ID, ARC_LEXER_TOKEN_RIGHT_PARENTHESIS_CHAR));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_LEFT_CURLY_BRACE_ID , ARC_LEXER_TOKEN_LEFT_CURLY_BRACE_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_RIGHT_CURLY_BRACE_ID, ARC_LEXER_TOKEN_RIGHT_CURLY_BRACE_CHAR));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_BANG_ID , ARC_LEXER_TOKEN_BANG_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_AT_ID , ARC_LEXER_TOKEN_AT_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_HASH_ID , ARC_LEXER_TOKEN_HASH_CHAR ));
|
||||
ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_PERCENT_ID , ARC_LEXER_TOKEN_PERCENT_CHAR ));
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue