diff --git a/CMakeLists.txt b/CMakeLists.txt index e99eb0f..0c56225 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -3,6 +3,8 @@ cmake_minimum_required(VERSION 3.25) set(ARCHEUS_STD_VERSION 0.0.0) project(archeus_std LANGUAGES C VERSION ${ARCHEUS_STD_VERSION} DESCRIPTION "libarcheus_std standard archeus c library") +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + include(GNUInstallDirs) function(print var) @@ -23,6 +25,7 @@ add_compile_options( # ~ OPTIONS ~ # option(ARCHEUS_STD_DEFAULT_CONFIG "Build with default config keys" ON) +option(ARCHEUS_STD_TESTS "Build with tests" OFF) set(ARCHEUS_STD_CONSOLE_BACKEND "NONE" CACHE STRING "Console Backend to build with") set_property(CACHE ARCHEUS_STD_CONSOLE_BACKEND PROPERTY STRINGS NONE NCURSES) @@ -59,6 +62,7 @@ set(ARCHEUS_STD_SOURCES src/std/handler.c src/std/hashtable.c src/std/io.c + src/std/lexer.c src/std/queue.c src/std/stack.c src/std/string.c @@ -79,15 +83,6 @@ set(ARCHEUS_STD_SOURCES src/engine/state.c ) -#TODO: add this -#add_executable(tests -# tests/test.c -# -# tests/std/vector.c -# -# ${ARCHEUS_STD_SOURCES} -#) - if(CMAKE_BUILD_TYPE STREQUAL "Debug") string(APPEND ARCHEUS_STD_FLAGS "-Wall -Werror -g -ggdb -DARC_DEBUG ") endif() @@ -127,6 +122,21 @@ libssh_check_and_init_needed(ARCHEUS_STD_FLAGS ARCHEUS_STD_SOURCES ${ARCHEUS_STD set(CMAKE_C_FLAGS ${ARCHEUS_STD_FLAGS}) +if(ARCHEUS_STD_TESTS) + add_executable(tests + tests/test.c + + #tests/std/vector.c + tests/std/lexer.c + + ${ARCHEUS_STD_SOURCES} + ) + + target_compile_options(tests PUBLIC "-DARC_DEBUG_LOG_STREAM_OVERRIDE") + target_include_directories(tests ${ARCHEUS_STD_INCLUDE_DIRECTORIES}) + target_link_libraries(tests ${ARCHEUS_STD_LINK_LIBRARIES}) +endif() + if(WIN32 AND NOT MSVC) add_library(archeus_std STATIC ${ARCHEUS_STD_SOURCES}) else() diff --git a/include/arc/std/lexer.h b/include/arc/std/lexer.h index 8680b19..c94d768 100644 --- a/include/arc/std/lexer.h +++ b/include/arc/std/lexer.h @@ -176,48 +176,48 @@ ARC_LexerTokenRule ARC_LexerTokenRule_CreateAndReturnMatchStringRule(uint32_t id /** * @brief basic token type ids, chars, and tags */ -#define LEXER_TOKEN_COLON_ID 1 -#define LEXER_TOKEN_COLON_CHAR ':' -#define LEXER_TOKEN_COLON_TAG "COLON" -#define LEXER_TOKEN_SEMICOLON_ID 2 -#define LEXER_TOKEN_SEMICOLON_CHAR ';' -#define LEXER_TOKEN_SEMICOLON_TAG "SEMICOLON" -#define LEXER_TOKEN_COMMA_ID 3 -#define LEXER_TOKEN_COMMA_CHAR ',' -#define LEXER_TOKEN_COMMA_TAG "COMMA" -#define LEXER_TOKEN_PERIOD_ID 4 -#define LEXER_TOKEN_PERIOD_CHAR '.' -#define LEXER_TOKEN_PERIOD_TAG "PERIOD" -#define LEXER_TOKEN_FORWARD_SLASH_ID 5 -#define LEXER_TOKEN_FORWARD_SLASH_CHAR '/' -#define LEXER_TOKEN_FORWARD_SLASH_TAG "FORWARD_SLASH" -#define LEXER_TOKEN_BACK_SLASH_ID 6 -#define LEXER_TOKEN_BACK_SLASH_CHAR '\\' -#define LEXER_TOKEN_BACK_SLASH_TAG "BACK_SLASH" -#define LEXER_TOKEN_LEFT_PARENTHESIS_ID 7 -#define LEXER_TOKEN_LEFT_PARENTHESIS_CHAR '(' -#define LEXER_TOKEN_LEFT_PARENTHESIS_TAG "LEFT_PARENTHESIS" -#define LEXER_TOKEN_RIGHT_PARENTHESIS_ID 8 -#define LEXER_TOKEN_RIGHT_PARENTHESIS_CHAR ')' -#define LEXER_TOKEN_RIGHT_PARENTHESIS_TAG "RIGHT_PARENTHESIS" -#define LEXER_TOKEN_LEFT_CURLY_BRACE_ID 9 -#define LEXER_TOKEN_LEFT_CURLY_BRACE_CHAR '{' -#define LEXER_TOKEN_LEFT_CURLY_BRACE_TAG "LEFT_CURLY_BRACE" -#define LEXER_TOKEN_RIGHT_CURLY_BRACE_ID 10 -#define LEXER_TOKEN_RIGHT_CURLY_BRACE_CHAR '}' -#define LEXER_TOKEN_RIGHT_CURLY_BRACE_TAG "RIGHT_CURLY_BRACE" -#define LEXER_TOKEN_BANG_ID 11 -#define LEXER_TOKEN_BANG_CHAR '!' -#define LEXER_TOKEN_BANG_TAG "BANG" -#define LEXER_TOKEN_AT_ID 12 -#define LEXER_TOKEN_AT_CHAR '!' -#define LEXER_TOKEN_AT_TAG "AT" -#define LEXER_TOKEN_HASH_ID 13 -#define LEXER_TOKEN_HASH_CHAR '#' -#define LEXER_TOKEN_HASH_TAG "HASH" -#define LEXER_TOKEN_PERCENT_ID 14 -#define LEXER_TOKEN_PERCENT_CHAR '%' -#define LEXER_TOKEN_PERCENT_TAG "PERCENT" +#define ARC_LEXER_TOKEN_COLON_ID 1 +#define ARC_LEXER_TOKEN_COLON_CHAR ':' +#define ARC_LEXER_TOKEN_COLON_TAG "COLON" +#define ARC_LEXER_TOKEN_SEMICOLON_ID 2 +#define ARC_LEXER_TOKEN_SEMICOLON_CHAR ';' +#define ARC_LEXER_TOKEN_SEMICOLON_TAG "SEMICOLON" +#define ARC_LEXER_TOKEN_COMMA_ID 3 +#define ARC_LEXER_TOKEN_COMMA_CHAR ',' +#define ARC_LEXER_TOKEN_COMMA_TAG "COMMA" +#define ARC_LEXER_TOKEN_PERIOD_ID 4 +#define ARC_LEXER_TOKEN_PERIOD_CHAR '.' +#define ARC_LEXER_TOKEN_PERIOD_TAG "PERIOD" +#define ARC_LEXER_TOKEN_FORWARD_SLASH_ID 5 +#define ARC_LEXER_TOKEN_FORWARD_SLASH_CHAR '/' +#define ARC_LEXER_TOKEN_FORWARD_SLASH_TAG "FORWARD_SLASH" +#define ARC_LEXER_TOKEN_BACK_SLASH_ID 6 +#define ARC_LEXER_TOKEN_BACK_SLASH_CHAR '\\' +#define ARC_LEXER_TOKEN_BACK_SLASH_TAG "BACK_SLASH" +#define ARC_LEXER_TOKEN_LEFT_PARENTHESIS_ID 7 +#define ARC_LEXER_TOKEN_LEFT_PARENTHESIS_CHAR '(' +#define ARC_LEXER_TOKEN_LEFT_PARENTHESIS_TAG "LEFT_PARENTHESIS" +#define ARC_LEXER_TOKEN_RIGHT_PARENTHESIS_ID 8 +#define ARC_LEXER_TOKEN_RIGHT_PARENTHESIS_CHAR ')' +#define ARC_LEXER_TOKEN_RIGHT_PARENTHESIS_TAG "RIGHT_PARENTHESIS" +#define ARC_LEXER_TOKEN_LEFT_CURLY_BRACE_ID 9 +#define ARC_LEXER_TOKEN_LEFT_CURLY_BRACE_CHAR '{' +#define ARC_LEXER_TOKEN_LEFT_CURLY_BRACE_TAG "LEFT_CURLY_BRACE" +#define ARC_LEXER_TOKEN_RIGHT_CURLY_BRACE_ID 10 +#define ARC_LEXER_TOKEN_RIGHT_CURLY_BRACE_CHAR '}' +#define ARC_LEXER_TOKEN_RIGHT_CURLY_BRACE_TAG "RIGHT_CURLY_BRACE" +#define ARC_LEXER_TOKEN_BANG_ID 11 +#define ARC_LEXER_TOKEN_BANG_CHAR '!' +#define ARC_LEXER_TOKEN_BANG_TAG "BANG" +#define ARC_LEXER_TOKEN_AT_ID 12 +#define ARC_LEXER_TOKEN_AT_CHAR '!' +#define ARC_LEXER_TOKEN_AT_TAG "AT" +#define ARC_LEXER_TOKEN_HASH_ID 13 +#define ARC_LEXER_TOKEN_HASH_CHAR '#' +#define ARC_LEXER_TOKEN_HASH_TAG "HASH" +#define ARC_LEXER_TOKEN_PERCENT_ID 14 +#define ARC_LEXER_TOKEN_PERCENT_CHAR '%' +#define ARC_LEXER_TOKEN_PERCENT_TAG "PERCENT" /** * @brief adds a bunch of basic token rules (matching the BasicTokens above) diff --git a/src/std/handler.c b/src/std/handler.c index 8f82b2b..78588d5 100644 --- a/src/std/handler.c +++ b/src/std/handler.c @@ -13,8 +13,8 @@ struct ARC_Handler { void ARC_Handler_Create(ARC_Handler **handler, ARC_Handler_CompareDataFn *compareFn, ARC_Handler_CleanDataFn cleanfn){ *handler = (ARC_Handler *) malloc(sizeof(ARC_Handler)); - ARC_Vector_Create(&((*handler)->data), NULL); - ARC_Vector_Create(&((*handler)->trash), compareFn); + ARC_Vector_Create(&((*handler)->data), NULL, NULL); + ARC_Vector_Create(&((*handler)->trash), compareFn, NULL); (*handler)->cleanfn = cleanfn; } diff --git a/src/std/lexer.c b/src/std/lexer.c index 4e53cae..6a513b0 100644 --- a/src/std/lexer.c +++ b/src/std/lexer.c @@ -85,7 +85,7 @@ void ARC_Lexer_LexString(ARC_Lexer *lexer, ARC_String **data){ } //this will run untill everything token is stripped or there is an error - while(data != NULL){ + while(*data != NULL){ ARC_Bool tokenFound = ARC_False; for(uint32_t index = 0; index < ARC_Vector_GetSize(lexer->tokenRules); index++){ //check if the token rule is found @@ -106,7 +106,7 @@ void ARC_Lexer_LexString(ARC_Lexer *lexer, ARC_String **data){ token->data = tokenData; //add to the vector and check for error (I'd be surprised if the error ever happened because that would most likely mean overflow) - ARC_Vector_Add(lexer->tokens, token); + ARC_Vector_Add(lexer->tokens, (void *)token); if(arc_errno){ ARC_DEBUG_LOG_ERROR("ARC_Lexer_LexString(lexer, data), errored when running ARC_Vector_Add(lexer->tokens, token);. check logs for more info"); free(token); @@ -205,6 +205,7 @@ ARC_Bool ARC_Lexer_AutomataMatchStringFn(ARC_String **string, ARC_String **token //check to see if there is a match with automataData as a string ARC_String *automataDataString = (ARC_String *)automataData; + //to keep from erroring instead of stripping from a same length string we can just delete it if(ARC_String_Equals(*string, automataDataString)){ if((*string)->length == automataDataString->length){ @@ -276,18 +277,18 @@ ARC_LexerTokenRule ARC_LexerTokenRule_CreateAndReturnMatchStringRule(uint32_t id } void ARC_Lexer_InitBasicTokenRules(ARC_Lexer *lexer){ - ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_COLON_ID , LEXER_TOKEN_COLON_CHAR )); - ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_SEMICOLON_ID , LEXER_TOKEN_SEMICOLON_CHAR )); - ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_COMMA_ID , LEXER_TOKEN_COMMA_CHAR )); - ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_PERIOD_ID , LEXER_TOKEN_PERIOD_CHAR )); - ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_FORWARD_SLASH_ID , LEXER_TOKEN_FORWARD_SLASH_CHAR )); - ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_BACK_SLASH_ID , LEXER_TOKEN_BACK_SLASH_CHAR )); - ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_LEFT_PARENTHESIS_ID , LEXER_TOKEN_LEFT_PARENTHESIS_CHAR )); - ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_RIGHT_PARENTHESIS_ID, LEXER_TOKEN_RIGHT_PARENTHESIS_CHAR)); - ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_LEFT_CURLY_BRACE_ID , LEXER_TOKEN_LEFT_CURLY_BRACE_CHAR )); - ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_RIGHT_CURLY_BRACE_ID, LEXER_TOKEN_RIGHT_CURLY_BRACE_CHAR)); - ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_BANG_ID , LEXER_TOKEN_BANG_CHAR )); - ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_AT_ID , LEXER_TOKEN_AT_CHAR )); - ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_HASH_ID , LEXER_TOKEN_HASH_CHAR )); - ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(LEXER_TOKEN_PERCENT_ID , LEXER_TOKEN_PERCENT_CHAR )); + ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_COLON_ID , ARC_LEXER_TOKEN_COLON_CHAR )); + ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_SEMICOLON_ID , ARC_LEXER_TOKEN_SEMICOLON_CHAR )); + ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_COMMA_ID , ARC_LEXER_TOKEN_COMMA_CHAR )); + ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_PERIOD_ID , ARC_LEXER_TOKEN_PERIOD_CHAR )); + ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_FORWARD_SLASH_ID , ARC_LEXER_TOKEN_FORWARD_SLASH_CHAR )); + ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_BACK_SLASH_ID , ARC_LEXER_TOKEN_BACK_SLASH_CHAR )); + ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_LEFT_PARENTHESIS_ID , ARC_LEXER_TOKEN_LEFT_PARENTHESIS_CHAR )); + ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_RIGHT_PARENTHESIS_ID, ARC_LEXER_TOKEN_RIGHT_PARENTHESIS_CHAR)); + ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_LEFT_CURLY_BRACE_ID , ARC_LEXER_TOKEN_LEFT_CURLY_BRACE_CHAR )); + ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_RIGHT_CURLY_BRACE_ID, ARC_LEXER_TOKEN_RIGHT_CURLY_BRACE_CHAR)); + ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_BANG_ID , ARC_LEXER_TOKEN_BANG_CHAR )); + ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_AT_ID , ARC_LEXER_TOKEN_AT_CHAR )); + ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_HASH_ID , ARC_LEXER_TOKEN_HASH_CHAR )); + ARC_Lexer_RegisterTokenRule(lexer, ARC_LexerTokenRule_CreateAndReturnMatchCharRule(ARC_LEXER_TOKEN_PERCENT_ID , ARC_LEXER_TOKEN_PERCENT_CHAR )); } diff --git a/src/std/string.c b/src/std/string.c index cf6425c..310ccc8 100644 --- a/src/std/string.c +++ b/src/std/string.c @@ -89,12 +89,12 @@ void ARC_String_ReplaceWithSubstring(ARC_String **string, uint64_t start, uint64 //if error or substring is null free memory and return if(arc_errno || substring == NULL){ if(substring != NULL){ - free(substring); + ARC_String_Destroy(substring); } return; } - free(*string); + ARC_String_Destroy(*string); *string = substring; } diff --git a/tests/std/lexer.c b/tests/std/lexer.c new file mode 100644 index 0000000..e78a7b8 --- /dev/null +++ b/tests/std/lexer.c @@ -0,0 +1,39 @@ +#include "../test.h" +#include "arc/std/lexer.h" + +ARC_TEST(Lexer_Char_Match){ + ARC_Lexer *lexer; + ARC_Lexer_Create(&lexer); + + ARC_Lexer_InitBasicTokenRules(lexer); + + ARC_String *simple; + ARC_String_CreateWithStrlen(&simple, "::{}!/."); + + ARC_Lexer_LexString(lexer, &simple); + + ARC_LexerToken token; + + token = ARC_Lexer_GetToken(lexer, 0); + ARC_CHECK(token.rule == ARC_LEXER_TOKEN_COLON_ID); + + token = ARC_Lexer_GetToken(lexer, 1); + ARC_CHECK(token.rule == ARC_LEXER_TOKEN_COLON_ID); + + token = ARC_Lexer_GetToken(lexer, 2); + ARC_CHECK(token.rule == ARC_LEXER_TOKEN_LEFT_CURLY_BRACE_ID); + + token = ARC_Lexer_GetToken(lexer, 3); + ARC_CHECK(token.rule == ARC_LEXER_TOKEN_RIGHT_CURLY_BRACE_ID); + + token = ARC_Lexer_GetToken(lexer, 4); + ARC_CHECK(token.rule == ARC_LEXER_TOKEN_BANG_ID); + + token = ARC_Lexer_GetToken(lexer, 5); + ARC_CHECK(token.rule == ARC_LEXER_TOKEN_FORWARD_SLASH_ID); + + token = ARC_Lexer_GetToken(lexer, 6); + ARC_CHECK(token.rule == ARC_LEXER_TOKEN_PERIOD_ID); + + ARC_Lexer_Destroy(lexer); +} diff --git a/tests/src/vector.c b/tests/std/vector.c similarity index 93% rename from tests/src/vector.c rename to tests/std/vector.c index 15fa581..77677e1 100644 --- a/tests/src/vector.c +++ b/tests/std/vector.c @@ -3,6 +3,7 @@ #include "arc/std/errno.h" #include "arc/std/vector.h" #include +#include ARC_Bool TEST_Vector_CompareDataFn(void *dataA, void *dataB){ if(*(int32_t *)dataA == *(int32_t *)dataB){ @@ -12,9 +13,14 @@ ARC_Bool TEST_Vector_CompareDataFn(void *dataA, void *dataB){ return ARC_False; } +//TODO: more tests with destroy data fn added +void TEST_Vector_DestroyDataFn(void *data){ + free((int32_t *)data); +} + ARC_TEST(Vector_Add_RemoveIndex_Get){ ARC_Vector *vector; - ARC_Vector_Create(&vector, NULL); + ARC_Vector_Create(&vector, NULL, NULL); int32_t val0 = 0; int32_t val1 = 1; @@ -59,7 +65,7 @@ ARC_TEST(Vector_Add_RemoveIndex_Get){ ARC_TEST(Vector_Add_Remove_Get){ ARC_Vector *vector; ARC_Vector_CompareDataFn testCompareDataFn = TEST_Vector_CompareDataFn; - ARC_Vector_Create(&vector, &testCompareDataFn); + ARC_Vector_Create(&vector, &testCompareDataFn, NULL); int32_t val0 = 0; int32_t val1 = 1; @@ -103,7 +109,7 @@ ARC_TEST(Vector_Add_Remove_Get){ ARC_TEST(Vector_Add_RemoveIndex_GetSize){ ARC_Vector *vector; - ARC_Vector_Create(&vector, NULL); + ARC_Vector_Create(&vector, NULL, NULL); int32_t val0 = 0; int32_t val1 = 1; @@ -139,7 +145,7 @@ ARC_TEST(Vector_Add_RemoveIndex_GetSize){ ARC_TEST(Vector_Add_RemoveIndex_Get_Try_Out_Of_Bounds){ ARC_Vector *vector; - ARC_Vector_Create(&vector, NULL); + ARC_Vector_Create(&vector, NULL, NULL); int32_t val0 = 0;