first
This commit is contained in:
commit
db1adbb838
35 changed files with 4408 additions and 0 deletions
167
src/std/hashtable.c
Normal file
167
src/std/hashtable.c
Normal file
|
|
@ -0,0 +1,167 @@
|
|||
#include "arc/std/hashtable.h"
|
||||
|
||||
#include "arc/std/errno.h"
|
||||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
|
||||
struct ARC_Hashtable {
|
||||
uint32_t size;
|
||||
ARC_HashtableNode **nodes;
|
||||
ARC_Hashtable_Hash hash;
|
||||
ARC_Hashtable_KeyCompare compare;
|
||||
};
|
||||
|
||||
int32_t CRC32(void *key, size_t *keysize, uint32_t *hashval){
|
||||
*hashval = 0xffffffff;
|
||||
|
||||
for(size_t i = 0; i < *keysize; i++){
|
||||
uint8_t value = *(((uint8_t *)key) + i);
|
||||
for(uint8_t j = 0; j < 8; j++){
|
||||
uint8_t flag = (uint8_t)((value ^ *hashval) & 1);
|
||||
*hashval >>= 1;
|
||||
if(flag){ *hashval ^= 0xEDB888320; }
|
||||
value >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
*hashval = ~*hashval;
|
||||
return 0;
|
||||
}
|
||||
|
||||
int32_t ARC_Default_Key_Compare(void *key1, size_t *key1size, void *key2, size_t *key2size){
|
||||
return key1 - key2;
|
||||
}
|
||||
|
||||
void ARC_HashtableNode_Create(ARC_HashtableNode **node, void *key, size_t *keysize, void *data){
|
||||
*node = (ARC_HashtableNode *) malloc(sizeof(ARC_HashtableNode));
|
||||
(*node)->key = key;
|
||||
(*node)->keysize = *keysize;
|
||||
(*node)->data = data;
|
||||
(*node)->node = NULL;
|
||||
}
|
||||
|
||||
int32_t ARC_HashtableNode_Destroy(ARC_HashtableNode *node, ARC_HashtableNode_DestroyExternal external, void *userdata){
|
||||
if(node == NULL){ return 0; }
|
||||
ARC_HashtableNode_Destroy(node->node, external, userdata);
|
||||
|
||||
if(external){
|
||||
int32_t err = external(node, userdata);
|
||||
if(err){ return err; }
|
||||
}
|
||||
|
||||
free(node);
|
||||
return 0;
|
||||
}
|
||||
|
||||
void ARC_Hashtable_Create(ARC_Hashtable **htable, uint32_t bucketsize, ARC_Hashtable_Hash hash, ARC_Hashtable_KeyCompare compare){
|
||||
*htable = (ARC_Hashtable *) malloc(sizeof(ARC_Hashtable));
|
||||
(*htable)->size = bucketsize;
|
||||
(*htable)->nodes = (ARC_HashtableNode **) calloc(bucketsize, sizeof(ARC_HashtableNode *));
|
||||
(*htable)->hash = (hash)? hash : CRC32;
|
||||
(*htable)->compare = (compare)? compare : ARC_Default_Key_Compare;
|
||||
}
|
||||
|
||||
int32_t ARC_Hashtable_Destroy(ARC_Hashtable *htable, ARC_HashtableNode_DestroyExternal external, void *userdata){
|
||||
for(uint32_t i = 0; i < htable->size; i++){
|
||||
if(htable->nodes[i]){
|
||||
int32_t err = ARC_HashtableNode_Destroy(htable->nodes[i], external, userdata);
|
||||
if(err){ return err; }
|
||||
}
|
||||
}
|
||||
|
||||
free(htable->nodes);
|
||||
free(htable);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int32_t ARC_Hashtable_Add(ARC_Hashtable *htable, void *key, size_t keysize, void *data){
|
||||
uint32_t size = 0;
|
||||
int32_t err = htable->hash(key, &keysize, &size);
|
||||
if(err){ return err; }
|
||||
|
||||
ARC_HashtableNode *bucket = htable->nodes[size % htable->size];
|
||||
if(!bucket){
|
||||
ARC_HashtableNode_Create(&bucket, key, &keysize, data);
|
||||
htable->nodes[size % htable->size] = bucket;
|
||||
return 0;
|
||||
}
|
||||
|
||||
if(!htable->compare(bucket->key, &bucket->keysize, key, &keysize)){ return ARC_ERRNO_EXISTS; }
|
||||
|
||||
while(bucket->node){
|
||||
if(!htable->compare(bucket->node->key, &bucket->node->keysize, key, &keysize)){ return ARC_ERRNO_EXISTS; }
|
||||
bucket = bucket->node;
|
||||
}
|
||||
|
||||
ARC_HashtableNode_Create(&(bucket->node), key, &keysize, data);
|
||||
return 0;
|
||||
}
|
||||
|
||||
int32_t ARC_Hashtable_Get(ARC_Hashtable *htable, void *key, size_t keysize, void **data){
|
||||
uint32_t size = 0;
|
||||
int32_t err = htable->hash(key, &keysize, &size);
|
||||
if(err){ return err; }
|
||||
|
||||
ARC_HashtableNode *bucket = htable->nodes[size % htable->size];
|
||||
if(!bucket){
|
||||
*data = NULL;
|
||||
return ARC_ERRNO_NULL;
|
||||
}
|
||||
|
||||
if(!htable->compare(bucket->key, &bucket->keysize, key, &keysize)){
|
||||
*data = bucket->data;
|
||||
return 0;
|
||||
}
|
||||
|
||||
while(bucket->node){
|
||||
if(!htable->compare(bucket->node->key, &bucket->node->keysize, key, &keysize)){
|
||||
*data = bucket->node->data;
|
||||
return 0;
|
||||
}
|
||||
bucket = bucket->node;
|
||||
}
|
||||
|
||||
return ARC_ERRNO_NULL;
|
||||
}
|
||||
|
||||
int32_t ARC_Hashtable_Remove(ARC_Hashtable *htable, void *key, size_t keysize, ARC_HashtableNode_DestroyExternal external, void *userdata){
|
||||
uint32_t size = 0;
|
||||
int32_t err = htable->hash(key, &keysize, &size);
|
||||
if(err){ return err; }
|
||||
|
||||
ARC_HashtableNode *bucket = htable->nodes[size % htable->size];
|
||||
if(!bucket){ return ARC_ERRNO_NULL; }
|
||||
|
||||
if(!htable->compare(bucket->key, &bucket->keysize, key, &keysize)){
|
||||
ARC_HashtableNode *temp = bucket;
|
||||
htable->nodes[size % htable->size] = bucket->node;
|
||||
|
||||
if(external){
|
||||
err = external(temp, userdata);
|
||||
if(err){ return err; }
|
||||
}
|
||||
|
||||
free(temp);
|
||||
return 0;
|
||||
}
|
||||
|
||||
while(bucket->node){
|
||||
if(!htable->compare(bucket->node->key, &bucket->node->keysize, key, &keysize)){
|
||||
ARC_HashtableNode *temp = bucket->node;
|
||||
bucket->node = bucket->node->node;
|
||||
|
||||
if(external){
|
||||
err = external(temp, userdata);
|
||||
if(err){ return err; }
|
||||
}
|
||||
|
||||
free(temp);
|
||||
return 0;
|
||||
}
|
||||
|
||||
bucket = bucket->node;
|
||||
}
|
||||
|
||||
return ARC_ERRNO_NULL;
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue