updated hashtable and started on basic hashtable testing

This commit is contained in:
herbglitch 2025-02-14 03:23:42 -07:00
parent 122eb1d351
commit c344594af7
7 changed files with 453 additions and 187 deletions

View file

@ -126,12 +126,13 @@ if(ARCHEUS_STD_TESTS)
add_executable(tests
tests/test.c
tests/std/chemical.c
tests/std/vector.c
tests/std/lexer.c
tests/std/parser.c
tests/std/parser/csv.c
tests/std/parser/parserlang.c
#tests/std/chemical.c
tests/std/hashtable.c
#tests/std/lexer.c
#tests/std/parser.c
#tests/std/parser/csv.c
#tests/std/parser/parserlang.c
#tests/std/vector.c
${ARCHEUS_STD_SOURCES}
)

15
doc/diagrams/vector.txt Normal file
View file

@ -0,0 +1,15 @@
┌───────────────────────────────────────────┐
│ │ │
│ The Problem │ A Fix With a Vector │
│ │ │
│ ┌────────┐ │ ┌────────┐ ┌─────────┐ │
│ │ Hat │ │ │ Hat │────►│ Hat │ │
│ │ Pants │ │ │ Pants │────►│ Pants │ │
│ │ Jacket │ │ │ Jacket │────►│ Jacket │ │
│ │ Shoes │ │ │ Shoes │────►│ Shoes │ │
│ └────────┘ │ └────────┘┌───►│ Socks │ │
│ ▲ │ Socks───┘ │ │ │
│ │ │ │ │ │
│ Socks │ │ │ │
│ │ └─────────┘ │
└───────────────────────────────────────────┘

View file

@ -5,105 +5,115 @@
extern "C" {
#endif
#include "arc/std/bool.h"
#include <stdint.h>
#include <stddef.h>
/**
* @brief the arc hashtable data type
* @brief a hashing function ptr
*
* @param[in] key value to hash
*
* @return uint32_t hashed value of the tkey
*/
typedef uint32_t (* ARC_Hashtable_HashFn)(void *key);
/**
* @brief the hashtable key comparison callback function
*
* @param[in] key1 first key
* @param[in] key2 second key
*
* @return ARC_True when keys match
*/
typedef ARC_Bool (* ARC_Hashtable_KeyCompareFn)(void *key1, void *key2);
/**
* @brief a callback that cleans up a key and value's memory when it is removed from the hashtable
*
* @param[in] data the key to destroy
*/
typedef void (* ARC_Hashtable_DestroyKeyValueFn)(void *key, void *value);
/**
* @brief a callback to be used by ARC_Hashtable_RunIteration
*
* @param[in] key a key at the current iteration
* @param[in] value a value that matches the key at the current iteration
*/
typedef void (* ARC_Hashtable_IteratorFn)(void *key, void *value);
/**
* @brief a resizable hashtable data type (will find next open slot before resizing)
*/
typedef struct ARC_Hashtable ARC_Hashtable;
/**
* @brief a node that contains a key-value reference along with a linked list like node
*/
typedef struct ARC_HashtableNode ARC_HashtableNode;
struct ARC_HashtableNode {
void *key;
size_t keysize;
void *data;
ARC_HashtableNode *node;
};
/**
* @brief a hashing function ptr
*
* @param key value to hash
* @param keysize should be sizeof(key) before key is a void ptr
* @param hashval value of hash, does not need to be within range of buckets
*/
typedef void (* ARC_Hashtable_Hash)(void *key, size_t *keysize, uint32_t *hashval);
/**
* @brief key comparison function ptr
*
* @param key1 first key
* @param key2 second key
*
* @return 0 when keys match
*/
typedef int8_t (* ARC_Hashtable_KeyCompare)(void *key1, size_t *key1size, void *key2, size_t *key2size);
/**
* @brief callback to allow memory freeing of nodes
*
* @param node node to be destroyed
* @param userdata any data the user wants to access in the callback
*/
typedef void (* ARC_HashtableNode_DestroyExternal)(ARC_HashtableNode *node, void *userdata);
/**
* @brief cteates ARC_Hashtable type
*
* @param htable where to store data
* @param bucketsize num of nodes to create in inital table
* @param hash hashing function to be used, if set to NULL, CRC32 will be used
* @param compare comparison functon for checking keys, if set to NULL, addresses will be compared
* @note if the default hashing function is used (CRC32), then the key value needs to be a string or end in '\0'
* @note an error will be thrown if the key is NULL
*
* @param[out] hashtable ARC_Hashtable to initialize
* @param[in] hashFn a callback for a hashing function to be used, if set to NULL, CRC32 will be used
* @param[in] compareFn a callback for checking keys, if set to NULL, addresses will be compared
* @param[in] destroyKeyValueFn a callback to free the key and value
*/
void ARC_Hashtable_Create(ARC_Hashtable **htable, uint32_t bucketsize, ARC_Hashtable_Hash hash, ARC_Hashtable_KeyCompare compare);
void ARC_Hashtable_Create(ARC_Hashtable **hashtable, ARC_Hashtable_HashFn *hashFn, ARC_Hashtable_KeyCompareFn *keyCompareFn, ARC_Hashtable_DestroyKeyValueFn *destroyKeyValueFn);
/**
* @brief destroys ARC_Hashtable type
* @brief destroys an ARC_Hashtable
*
* @param htable htable that will be destroyed
* @param external function to allow external freeing of nodes, can be NULL
* @param userdata any data the user wants access to in the callback
* @param[in] hashtable ARC_Hashtable to free
*/
void ARC_Hashtable_Destroy(ARC_Hashtable *htable, ARC_HashtableNode_DestroyExternal external, void *userdata);
void ARC_Hashtable_Destroy(ARC_Hashtable *hashtable);
/**
* @brief adds value to hastable
* @brief adds a value at a key to hastable
*
* @param htable ARC_Hashtable to add to
* @param key key for node that is being added
* @param keysize sizeof key before it is passed into a void *
* @param data data for node that is being added
* @note this will error if you add more than 4,294,967,295 items (the max value of an unsigned int 32)
*
* @param[in] hashtable ARC_Hashtable to add to
* @param[in] key key for node that is being added
* @param[in] value value for node that is being added
*/
void ARC_Hashtable_Add(ARC_Hashtable *htable, void *key, size_t keysize, void *data);
void ARC_Hashtable_Add(ARC_Hashtable *hashtable, void *key, void *value);
/**
* @brief gets value from hashtable by key
* @brief removes value from hashtable at a given key
*
* @param htable table to get value from
* @param key key to get value from table
* @param keysize sizeof key before it is passed into a void *
* @param data data retrieved from table
* @param[in] hashtable ARC_Hashtable to remove from
* @param[in] key key of data to remove from the hashtable
*/
void ARC_Hashtable_Get(ARC_Hashtable *htable, void *key, size_t keysize, void **data);
void ARC_Hashtable_Remove(ARC_Hashtable *hashtable, void *key);
/**
* @brief removes value from hashtable
* @brief clears all values from a hashtable
*
* @param htable ARC_Hashtable to remove from
* @param key key of data to remove from hash table
* @param keysize sizeof key before it is passed into a void *
* @param external function to allow external freeing of data, can be NULL
* @param userdata any data the user wants access to in the callback
* @param[in] hashtable ARC_Hashtable to clear
*/
void ARC_Hashtable_Remove(ARC_Hashtable *htable, void *key, size_t keysize, ARC_HashtableNode_DestroyExternal external, void *userdata);
void ARC_Hashtable_Clear(ARC_Hashtable *hashtable);
/**
* @brief gets a value from hashtable by key
*
* @param[in] hashtable the hashtable to get the value from
* @param[in] key the key to match against for a value
*
* @return the value if there is a key match, otherwise NULL
*/
void *ARC_Hashtable_Get(ARC_Hashtable *hashtable, void *key);
/**
* @brief iterates through a hashtable passing available key value pairs to a callback
*
* @param[in] hashtable the hashtable to iterate through
* @param[in] iteratorFn the callback which will can use the iterated key value pairs
*/
void ARC_Hashtable_RunIteration(ARC_Hashtable *hashtable, ARC_Hashtable_IteratorFn iteratorFn);
#ifdef __cplusplus
}
#endif
#endif //ARC_STD_HASHTABLE_H_
#endif //ARC_STD_HASHTABLE_H_

View file

@ -46,8 +46,7 @@ void ARC_Vector_Create(ARC_Vector **vector, ARC_Vector_CompareDataFn *compareDat
/**
* @brief destroys an ARC_Vector
*
* @note this will not free the items stored in the vector
* @note please make sure to clear and free the children before destroying an ARC_Vector
* @note this will only free the items if destroyDataFn is passed in on creation
*
* @param[in] vector ARC_Vector to free
*/

View file

@ -4,167 +4,363 @@
#include <stdlib.h>
#include <stdio.h>
struct ARC_Hashtable {
uint32_t size;
ARC_HashtableNode **nodes;
ARC_Hashtable_Hash hash;
ARC_Hashtable_KeyCompare compare;
//a private struct to hold the keys and values of the hashtable
typedef struct ARC_HashtableNode ARC_HashtableNode;
struct ARC_HashtableNode {
void *key;
void *value;
uint32_t hashvalue;
//will be set if next slot is searched for, to be used to remove elements faster
uint32_t initialIndex;
};
void CRC32(void *key, size_t *keysize, uint32_t *hashval){
*hashval = 0xffffffff;
struct ARC_Hashtable {
uint32_t currentCapacity;
uint32_t currentSize;
for(size_t i = 0; i < *keysize; i++){
ARC_HashtableNode *nodes;
ARC_Hashtable_HashFn hashFn;
ARC_Hashtable_KeyCompareFn keyCompareFn;
ARC_Hashtable_DestroyKeyValueFn *destroyKeyValueFn;
};
//copied from here: https://en.wikipedia.org/wiki/Computation_of_cyclic_redundancy_checks#CRC-32_example
uint32_t CRC32Fn(void *key){
uint32_t hashvalue = 0xffffffff;
for(uint32_t i = 0; *(((char *)key) + i) != '\0'; i++){
uint8_t value = *(((uint8_t *)key) + i);
for(uint8_t j = 0; j < 8; j++){
uint8_t flag = (uint8_t)((value ^ *hashval) & 1);
*hashval >>= 1;
if(flag){ *hashval ^= 0xEDB888320; }
uint8_t flag = (uint8_t)((value ^ hashvalue) & 1);
hashvalue >>= 1;
if(flag){
hashvalue ^= 0xEDB888320;
}
value >>= 1;
}
}
*hashval = ~*hashval;
hashvalue = ~hashvalue;
return hashvalue;
}
int8_t ARC_Default_Key_Compare(void *key1, size_t *key1size, void *key2, size_t *key2size){
return key1 == key2;
ARC_Bool ARC_Hashtable_DefaultKeyCompareFn(void *key1, void *key2){
return (ARC_Bool)(key1 == key2);
}
void ARC_HashtableNode_Create(ARC_HashtableNode **node, void *key, size_t *keysize, void *data){
*node = (ARC_HashtableNode *) malloc(sizeof(ARC_HashtableNode));
(*node)->key = key;
(*node)->keysize = *keysize;
(*node)->data = data;
(*node)->node = NULL;
}
void ARC_Hashtable_Create(ARC_Hashtable **hashtable, ARC_Hashtable_HashFn *hashFn, ARC_Hashtable_KeyCompareFn *keyCompareFn, ARC_Hashtable_DestroyKeyValueFn *destroyKeyValueFn){
//clear the hashtable
*hashtable = (ARC_Hashtable *) malloc(sizeof(ARC_Hashtable));
void ARC_HashtableNode_Destroy(ARC_HashtableNode *node, ARC_HashtableNode_DestroyExternal external, void *userdata){
if(node == NULL){
return;
//set current capacity and size to start
(*hashtable)->currentCapacity = 1;
(*hashtable)->currentSize = 0;
//reserve enough memory for one node
(*hashtable)->nodes = (ARC_HashtableNode *)malloc(sizeof(ARC_HashtableNode));
//set first and only key to null
(*hashtable)->nodes[0] = (ARC_HashtableNode){ NULL, NULL, 0, 0 };
//default to CRC32, then override if hashFn exists
(*hashtable)->hashFn = CRC32Fn;
if(hashFn != NULL){
(*hashtable)->hashFn = *hashFn;
}
ARC_HashtableNode_Destroy(node->node, external, userdata);
if(external){
external(node, userdata);
//default to comparing pointers, then override if keyCompareFn exists
(*hashtable)->keyCompareFn = ARC_Hashtable_DefaultKeyCompareFn;
if(keyCompareFn != NULL){
(*hashtable)->keyCompareFn = *keyCompareFn;
}
free(node);
//default to NULL, then create and copy destroyKeyValueFn if it exists
(*hashtable)->destroyKeyValueFn = NULL;
if(destroyKeyValueFn != NULL){
(*hashtable)->destroyKeyValueFn = (ARC_Hashtable_DestroyKeyValueFn *)malloc(sizeof(ARC_Hashtable_DestroyKeyValueFn));
*((*hashtable)->destroyKeyValueFn) = *destroyKeyValueFn;
}
}
void ARC_Hashtable_Create(ARC_Hashtable **htable, uint32_t bucketsize, ARC_Hashtable_Hash hash, ARC_Hashtable_KeyCompare compare){
*htable = (ARC_Hashtable *) malloc(sizeof(ARC_Hashtable));
(*htable)->size = bucketsize;
(*htable)->nodes = (ARC_HashtableNode **) calloc(bucketsize, sizeof(ARC_HashtableNode *));
(*htable)->hash = (hash)? hash : CRC32;
(*htable)->compare = (compare)? compare : ARC_Default_Key_Compare;
void ARC_Hashtable_Destroy(ARC_Hashtable *hashtable){
//remove all the contents before clearing the
ARC_Hashtable_Clear(hashtable);
//free the destroyKeyValueFn if it exists
if(hashtable->destroyKeyValueFn != NULL){
free(hashtable->destroyKeyValueFn);
}
//free the empty nodes container
free(hashtable->nodes);
//free the hashtable
free(hashtable);
}
void ARC_Hashtable_Destroy(ARC_Hashtable *htable, ARC_HashtableNode_DestroyExternal external, void *userdata){
for(uint32_t i = 0; i < htable->size; i++){
if(htable->nodes[i]){
ARC_HashtableNode_Destroy(htable->nodes[i], external, userdata);
}
}
void ARC_HashtableNode_SetNearestNodeToArray(ARC_HashtableNode *nodes, uint32_t capacity, ARC_HashtableNode node){
//get the first possible index based on the node's hashvalue
uint32_t index = node.hashvalue % capacity;
free(htable->nodes);
free(htable);
}
//get the first possible node
ARC_HashtableNode foundNode = nodes[index];
void ARC_Hashtable_Add(ARC_Hashtable *htable, void *key, size_t keysize, void *data){
uint32_t size = 0;
htable->hash(key, &keysize, &size);
//init variable for found node
uint32_t nextIndex = index;
ARC_HashtableNode *bucket = htable->nodes[size % htable->size];
if(!bucket){
ARC_HashtableNode_Create(&bucket, key, &keysize, data);
htable->nodes[size % htable->size] = bucket;
return;
}
//check each available node for a free slot
while(foundNode.key != NULL){
//up the current index by one
nextIndex++;
if(!htable->compare(bucket->key, &bucket->keysize, key, &keysize)){
arc_errno = ARC_ERRNO_EXISTS;
return;
}
while(bucket->node){
if(!htable->compare(bucket->node->key, &bucket->node->keysize, key, &keysize)){
arc_errno = ARC_ERRNO_EXISTS;
return;
//cycle back to the first index if it is above the array's capacity
if(nextIndex >= capacity){
index = 0;
}
bucket = bucket->node;
//check if the loop has circled back to the starting index to stop checking
if(index == nextIndex){
break;
}
//get the next possible node
foundNode = nodes[index];
}
ARC_HashtableNode_Create(&(bucket->node), key, &keysize, data);
//set the foundNode and next index
nodes[nextIndex] = node;
nodes[nextIndex].initialIndex = index;
}
void ARC_Hashtable_Get(ARC_Hashtable *htable, void *key, size_t keysize, void **data){
uint32_t size = 0;
htable->hash(key, &keysize, &size);
void ARC_Hashtable_Add(ARC_Hashtable *hashtable, void *key, void *value){
//check to see if the current size is the same as a max uint32_t and if so it will overflow so throw an error
if(hashtable->currentSize == ~((uint32_t)0)){
arc_errno = ARC_ERRNO_OVERFLOW;
ARC_DEBUG_LOG_ERROR("ARC_Hashtable_Add(hashtable, key, value), hashtable at max capacity tried adding another value");
return;
}
ARC_HashtableNode *bucket = htable->nodes[size % htable->size];
if(!bucket){
*data = NULL;
//check to make sure key is not NULL
if(key == NULL){
arc_errno = ARC_ERRNO_NULL;
ARC_DEBUG_LOG_ERROR("ARC_Hashtable_Add(hashtable, key, value), NULL was passed in for the key, this function cannot handle that");
return;
}
if(!htable->compare(bucket->key, &bucket->keysize, key, &keysize)){
*data = bucket->data;
//check if we are at the max of the current capacity
if(hashtable->currentSize == hashtable->currentCapacity){
//move the current nodes into a temporary variable to move into a resized array
uint64_t oldCapacity = hashtable->currentCapacity;
ARC_HashtableNode *oldNodes = hashtable->nodes;
//increase the current capacity by double
hashtable->currentCapacity <<= 1;
//if for some reason the capacity is 0, we should set it to one so we do not error on realloc
if(hashtable->currentCapacity != 0){
hashtable->currentCapacity++;
}
//resize the hashtable's array and copy the contents at the same time
hashtable->nodes = (ARC_HashtableNode *)malloc(sizeof(ARC_HashtableNode) * hashtable->currentCapacity);
//set keys to null
for(uint32_t index = 0; index < hashtable->currentCapacity; index++){
hashtable->nodes[index].key = NULL;
}
//add the old nodes into the new array
for(uint32_t index = 0; index < oldCapacity; index++){
ARC_HashtableNode_SetNearestNodeToArray(hashtable->nodes, hashtable->currentCapacity, oldNodes[index]);
}
}
//get the hashvalue
uint32_t hashvalue = hashtable->hashFn(key);
//add to the vectors array and increase its current size
ARC_HashtableNode_SetNearestNodeToArray(hashtable->nodes, hashtable->currentCapacity, (ARC_HashtableNode){ key, value, hashvalue, 0 });
hashtable->currentSize++;
}
void ARC_Hashtable_Remove(ARC_Hashtable *hashtable, void *key){
//get the index from a hashvalue
uint32_t index = hashtable->hashFn(key) % hashtable->currentCapacity;
//get the first possible node
ARC_HashtableNode node = hashtable->nodes[index];
//check each available node for a match and break if the current nodes doesn't hold anything
ARC_Bool nodeFound = ARC_False;
for(uint32_t nextIndex = index; node.key != NULL;){
if(hashtable->keyCompareFn(node.key, key) == ARC_True){
index = nextIndex;
nodeFound = ARC_True;
}
//up the current index by one
nextIndex++;
//cycle back to the first index if it is above the array's capacity
if(nextIndex >= hashtable->currentCapacity){
nextIndex = 0;
}
//check if the loop has circled back to the starting index to stop checking
if(index == nextIndex){
break;
}
//get the next possible node
node = hashtable->nodes[nextIndex];
}
//error if the node was not found
if(nodeFound == ARC_False){
arc_errno = ARC_ERRNO_DATA;
ARC_DEBUG_LOG_ERROR("ARC_Hashtable_Remove(hashtable, key), key was not found in hashtable, could not remove");
return;
}
while(bucket->node){
if(!htable->compare(bucket->node->key, &bucket->node->keysize, key, &keysize)){
*data = bucket->node->data;
//call delete data to clean up item if delete data function exists
if(hashtable->destroyKeyValueFn != NULL){
(*(hashtable->destroyKeyValueFn))(node.key, node.value);
}
//set the current index to a starting index
uint32_t currentIndex = index;
//get the next possible node
node = hashtable->nodes[currentIndex];
//while the current node
while(node.initialIndex == index){
//set the last index to move a offset node back to
uint32_t lastIndex = index;
//up the current index by one
currentIndex++;
//cycle back to the first index if it is above the array's capacity
if(currentIndex >= hashtable->currentCapacity){
currentIndex = 0;
}
//check if the loop has circled back to the starting index to stop checking and throw an error
if(index == currentIndex){
arc_errno = ARC_ERRNO_DATA;
ARC_DEBUG_LOG_ERROR("ARC_Hashtable_Remove(hashtable, key), removed index matched initalIndex of node, this should never happen");
return;
}
bucket = bucket->node;
//move the current node back one
hashtable->nodes[lastIndex] = hashtable->nodes[currentIndex];
//get the next possible node
node = hashtable->nodes[currentIndex];
}
arc_errno = ARC_ERRNO_NULL;
}
//set the current value to an empty node
hashtable->nodes[currentIndex] = (ARC_HashtableNode){ NULL, NULL, 0, currentIndex };
void ARC_Hashtable_Remove(ARC_Hashtable *htable, void *key, size_t keysize, ARC_HashtableNode_DestroyExternal external, void *userdata){
uint32_t size = 0;
htable->hash(key, &keysize, &size);
//we have removed the item so we can decrease the current size
hashtable->currentSize--;
ARC_HashtableNode *bucket = htable->nodes[size % htable->size];
if(!bucket){
arc_errno = ARC_ERRNO_NULL;
//if the current size is half the current capacity or the current capacity is at the smallest limit, we do not need to do anything else
if(hashtable->currentSize != hashtable->currentCapacity >> 1 || hashtable->currentCapacity == 1){
return;
}
if(!htable->compare(bucket->key, &bucket->keysize, key, &keysize)){
ARC_HashtableNode *temp = bucket;
htable->nodes[size % htable->size] = bucket->node;
//move the current nodes into a temporary variable to move into a resized array
uint64_t oldCapacity = hashtable->currentCapacity;
ARC_HashtableNode *oldNodes = hashtable->nodes;
if(external){
external(temp, userdata);
}
//half the capacity and copy it into a smaller array
hashtable->currentCapacity >>= 1;
free(temp);
return;
//resize the hashtable's array and copy the contents at the same time
hashtable->nodes = (ARC_HashtableNode *)malloc(sizeof(ARC_HashtableNode) * hashtable->currentCapacity);
//set keys to null
for(uint32_t index = 0; index < hashtable->currentCapacity; index++){
hashtable->nodes[index].key = NULL;
}
while(bucket->node){
if(!htable->compare(bucket->node->key, &bucket->node->keysize, key, &keysize)){
ARC_HashtableNode *temp = bucket->node;
bucket->node = bucket->node->node;
if(external){
external(temp, userdata);
}
free(temp);
return;
}
bucket = bucket->node;
//add the old nodes into the new array
for(uint32_t index = 0; index < oldCapacity; index++){
ARC_HashtableNode_SetNearestNodeToArray(hashtable->nodes, hashtable->currentCapacity, oldNodes[index]);
}
}
void ARC_Hashtable_Clear(ARC_Hashtable *hashtable){
//delete the array holding all the nodes
free(hashtable->nodes);
//set current capacity and size to start
hashtable->currentCapacity = 1;
hashtable->currentSize = 0;
//reserve enough memory for one node
hashtable->nodes = (ARC_HashtableNode *)malloc(sizeof(ARC_HashtableNode));
//set first and only key to null
hashtable->nodes[0].key = NULL;
}
void *ARC_Hashtable_Get(ARC_Hashtable *hashtable, void *key){
//get the index from a hashvalue
uint32_t index = hashtable->hashFn(key) % hashtable->currentCapacity;
//get the first possible node
ARC_HashtableNode node = hashtable->nodes[index];
//check each available node for a match
for(uint32_t nextIndex = index; node.key != NULL;){
//if the key is found, return its value
if(hashtable->keyCompareFn(node.key, key) == ARC_True){
return node.value;
}
//up the current index by one
nextIndex++;
//cycle back to the first index if it is above the array's capacity
if(nextIndex >= hashtable->currentCapacity){
nextIndex = 0;
}
//check if the loop has circled back to the starting index to stop checking
if(index == nextIndex){
break;
}
//get the next possible node
node = hashtable->nodes[nextIndex];
}
//could not find node, so return NULL
return NULL;
}
void ARC_Hashtable_RunIteration(ARC_Hashtable *hashtable, ARC_Hashtable_IteratorFn iteratorFn){
//pass each non NULL nodes into an iteratorFn callback
for(uint32_t index = 0; index < hashtable->currentCapacity; index++){
//get the current node
ARC_HashtableNode node = hashtable->nodes[index];
//skip past NULL keys
if(node.key == NULL){
continue;
}
//passes current iteration into the callback function
iteratorFn(node.key, node.value);
}
arc_errno = ARC_ERRNO_NULL;
}

View file

@ -53,7 +53,7 @@ void ARC_Vector_Destroy(ARC_Vector *vector){
ARC_Vector_Clear(vector);
//free the delete data function if it exists
if(vector->destroyDataFn){
if(vector->destroyDataFn != NULL){
free(vector->destroyDataFn);
}

45
tests/std/hashtable.c Normal file
View file

@ -0,0 +1,45 @@
#include "../test.h"
#include "arc/std/bool.h"
#include "arc/std/errno.h"
#include "arc/std/hashtable.h"
#include <stdint.h>
#include <stdlib.h>
#include <string.h>
//TODO: add hash function for testing
ARC_Bool TEST_Hashtable_KeyCompareDataFn(void *dataA, void *dataB){
return (ARC_Bool)strcmp((const char *)dataA, (const char *)dataB);
}
//TODO: more tests with destroy data fn added
void TEST_Hashtable_DestroyKeyValueFn(void *key, void *value){
free((char *)key);
free((int32_t *)value);
}
ARC_TEST(Hashtable_Init){
ARC_Hashtable *hashtable;
ARC_Hashtable_KeyCompareFn keyCompareFn = TEST_Hashtable_KeyCompareDataFn;
ARC_Hashtable_DestroyKeyValueFn destroyKeyValueFn = TEST_Hashtable_DestroyKeyValueFn;
ARC_Hashtable_Create(&hashtable, NULL, &keyCompareFn, &destroyKeyValueFn);
ARC_CHECK(arc_errno == 0);
ARC_Hashtable_Destroy(hashtable);
}
ARC_TEST(Vector_Add){
ARC_Hashtable *hashtable;
ARC_Hashtable_KeyCompareFn keyCompareFn = TEST_Hashtable_KeyCompareDataFn;
ARC_Hashtable_DestroyKeyValueFn destroyKeyValueFn = TEST_Hashtable_DestroyKeyValueFn;
ARC_Hashtable_Create(&hashtable, NULL, &keyCompareFn, &destroyKeyValueFn);
char *key0 = (char *)"test";
int32_t val0 = 2;
ARC_Hashtable_Add(hashtable, &key0, &val0);
ARC_CHECK(2 == *(int32_t *)ARC_Hashtable_Get(hashtable, (char *)"test"));
ARC_Hashtable_Destroy(hashtable);
}