|
@@ -51,7 +51,6 @@
|
|
#define ERS_ROOT_SIZE 256
|
|
#define ERS_ROOT_SIZE 256
|
|
#define ERS_BLOCK_ENTRIES 4096
|
|
#define ERS_BLOCK_ENTRIES 4096
|
|
|
|
|
|
-typedef struct ers_instance_t;
|
|
|
|
struct ers_list
|
|
struct ers_list
|
|
{
|
|
{
|
|
struct ers_list *Next;
|
|
struct ers_list *Next;
|
|
@@ -169,7 +168,7 @@ static void *ers_obj_alloc_entry(ERS self)
|
|
|
|
|
|
if (instance->Cache->ReuseList != NULL)
|
|
if (instance->Cache->ReuseList != NULL)
|
|
{
|
|
{
|
|
- ret = (void *)((unsigned int)instance->Cache->ReuseList + sizeof(struct ers_list));
|
|
|
|
|
|
+ ret = (void *)((unsigned char *)instance->Cache->ReuseList + sizeof(struct ers_list));
|
|
instance->Cache->ReuseList = instance->Cache->ReuseList->Next;
|
|
instance->Cache->ReuseList = instance->Cache->ReuseList->Next;
|
|
}
|
|
}
|
|
else if (instance->Cache->Free > 0)
|
|
else if (instance->Cache->Free > 0)
|
|
@@ -200,7 +199,7 @@ static void *ers_obj_alloc_entry(ERS self)
|
|
static void ers_obj_free_entry(ERS self, void *entry)
|
|
static void ers_obj_free_entry(ERS self, void *entry)
|
|
{
|
|
{
|
|
ers_instance_t *instance = (ers_instance_t *)self;
|
|
ers_instance_t *instance = (ers_instance_t *)self;
|
|
- struct ers_list *reuse = (struct ers_list *)((unsigned int)entry - sizeof(struct ers_list));
|
|
|
|
|
|
+ struct ers_list *reuse = (struct ers_list *)((unsigned char *)entry - sizeof(struct ers_list));
|
|
|
|
|
|
if (instance == NULL)
|
|
if (instance == NULL)
|
|
{
|
|
{
|