DWORD t;
/* XSI says: "If tzp is not a null pointer, the behavior is unspecified" */
- assert(tzp == NULL);
+ ks_assert(tzp == NULL);
t = timeGetTime();
tp->tv_sec = t / 1000;
/* set default values for initialized lists */
static int ks_list_attributes_setdefaults(ks_list_t *restrict l);
-#ifndef NDEBUG
/* check whether the list internal REPresentation is valid -- Costs O(n) */
static int ks_list_repOk(const ks_list_t *restrict l);
/* check whether the list attribute set is valid -- Costs O(1) */
static int ks_list_attrOk(const ks_list_t *restrict l);
-#endif
/* do not inline, this is recursive */
static void ks_list_sort_quicksort(ks_list_t *restrict l, int versus,
ks_list_clear(l);
break;
case KS_MPCL_DESTROY:
+ ks_rwl_write_lock(l->lock);
for (unsigned int i = 0; i < l->spareelsnum; i++) ks_pool_free(l->pool, &l->spareels[i]);
+ l->spareelsnum = 0;
ks_pool_free(l->pool, &l->spareels);
ks_pool_free(l->pool, &l->head_sentinel);
ks_pool_free(l->pool, &l->tail_sentinel);
+ ks_rwl_write_unlock(l->lock);
+ ks_rwl_destroy(&l->lock);
break;
}
}
/* list initialization */
-KS_DECLARE(ks_status_t) ks_list_create(ks_list_t ** list, ks_pool_t *pool) {
+KS_DECLARE(ks_status_t) ks_list_create(ks_list_t **list, ks_pool_t *pool) {
ks_list_t *l = NULL;
- ks_assert(l);
+ ks_assert(list);
ks_assert(pool);
seed_random();
l->pool = pool;
l->numels = 0;
+ ks_rwl_create(&l->lock, pool);
+ ks_assert(l->lock);
+
/* head/tail sentinels and mid pointer */
l->head_sentinel = (struct ks_list_entry_s *)ks_pool_alloc(pool, sizeof(struct ks_list_entry_s));
l->tail_sentinel = (struct ks_list_entry_s *)ks_pool_alloc(pool, sizeof(struct ks_list_entry_s));
ks_list_attributes_setdefaults(l);
- assert(ks_list_repOk(l));
- assert(ks_list_attrOk(l));
+ ks_assert(ks_list_repOk(l));
+ ks_assert(ks_list_attrOk(l));
ks_assert(ks_pool_set_cleanup(pool, l, NULL, ks_list_cleanup) == KS_STATUS_SUCCESS);
return KS_STATUS_SUCCESS;
}
-KS_DECLARE(ks_status_t) ks_list_destroy(ks_list_t ** list) {
+KS_DECLARE(ks_status_t) ks_list_destroy(ks_list_t **list) {
ks_list_t *l = NULL;
ks_assert(list);
l->attrs.serializer = NULL;
l->attrs.unserializer = NULL;
- assert(ks_list_attrOk(l));
+ ks_assert(ks_list_attrOk(l));
return 0;
}
int ks_list_attributes_comparator(ks_list_t *restrict l, element_comparator comparator_fun) {
if (l == NULL) return -1;
+ ks_rwl_write_lock(l->lock);
l->attrs.comparator = comparator_fun;
- assert(ks_list_attrOk(l));
+ ks_assert(ks_list_attrOk(l));
+
+ ks_rwl_write_unlock(l->lock);
return 0;
}
int ks_list_attributes_seeker(ks_list_t *restrict l, element_seeker seeker_fun) {
if (l == NULL) return -1;
+ ks_rwl_write_lock(l->lock);
l->attrs.seeker = seeker_fun;
- assert(ks_list_attrOk(l));
+
+ ks_assert(ks_list_attrOk(l));
+
+ ks_rwl_write_unlock(l->lock);
return 0;
}
int ks_list_attributes_copy(ks_list_t *restrict l, element_meter metric_fun, int copy_data) {
if (l == NULL || (metric_fun == NULL && copy_data != 0)) return -1;
+ ks_rwl_write_lock(l->lock);
l->attrs.meter = metric_fun;
l->attrs.copy_data = copy_data;
- assert(ks_list_attrOk(l));
+ ks_assert(ks_list_attrOk(l));
+
+ ks_rwl_write_unlock(l->lock);
return 0;
}
int ks_list_attributes_hash_computer(ks_list_t *restrict l, element_hash_computer hash_computer_fun) {
if (l == NULL) return -1;
+ ks_rwl_write_lock(l->lock);
l->attrs.hasher = hash_computer_fun;
- assert(ks_list_attrOk(l));
+
+ ks_assert(ks_list_attrOk(l));
+
+ ks_rwl_write_unlock(l->lock);
+
return 0;
}
int ks_list_attributes_serializer(ks_list_t *restrict l, element_serializer serializer_fun) {
if (l == NULL) return -1;
+ ks_rwl_write_lock(l->lock);
l->attrs.serializer = serializer_fun;
- assert(ks_list_attrOk(l));
+
+ ks_assert(ks_list_attrOk(l));
+
+ ks_rwl_write_unlock(l->lock);
+
return 0;
}
int ks_list_attributes_unserializer(ks_list_t *restrict l, element_unserializer unserializer_fun) {
if (l == NULL) return -1;
+ ks_rwl_write_lock(l->lock);
l->attrs.unserializer = unserializer_fun;
- assert(ks_list_attrOk(l));
+
+ ks_assert(ks_list_attrOk(l));
+
+ ks_rwl_write_unlock(l->lock);
+
return 0;
}
KS_DECLARE(void *) ks_list_get_at(const ks_list_t *restrict l, unsigned int pos) {
struct ks_list_entry_s *tmp;
+ void *data = NULL;
+ ks_rwl_read_lock(l->lock);
tmp = ks_list_findpos(l, pos);
+ data = tmp != NULL ? tmp->data : NULL;
+ ks_rwl_read_unlock(l->lock);
- return (tmp != NULL ? tmp->data : NULL);
+ return data;
}
KS_DECLARE(void *) ks_list_get_max(const ks_list_t *restrict l) {
if (l->attrs.comparator == NULL || l->numels == 0)
return NULL;
+ ks_rwl_read_lock(l->lock);
curminmax = l->head_sentinel->next->data;
for (s = l->head_sentinel->next->next; s != l->tail_sentinel; s = s->next) {
if (l->attrs.comparator(curminmax, s->data) * versus > 0)
curminmax = s->data;
}
+ ks_rwl_read_unlock(l->lock);
return curminmax;
}
if (l->iter_active || pos >= l->numels) return NULL;
+ ks_rwl_write_lock(l->lock);
tmp = ks_list_findpos(l, pos);
data = tmp->data;
ks_list_drop_elem(l, tmp, pos);
l->numels--;
- assert(ks_list_repOk(l));
+ ks_assert(ks_list_repOk(l));
+
+ ks_rwl_write_unlock(l->lock);
return data;
}
if (l->iter_active || pos > l->numels) return -1;
+ ks_rwl_write_lock(l->lock);
/* this code optimizes malloc() with a free-list */
if (l->spareelsnum > 0) {
lent = l->spareels[l->spareelsnum - 1];
}
else {
lent = (struct ks_list_entry_s *)ks_pool_alloc(l->pool, sizeof(struct ks_list_entry_s));
- if (lent == NULL)
- return -1;
+ ks_assert(lent);
}
if (l->attrs.copy_data) {
if (pos <= (l->numels - 1) / 2) l->mid = l->mid->prev;
}
- assert(ks_list_repOk(l));
+ ks_assert(ks_list_repOk(l));
+
+ ks_rwl_write_unlock(l->lock);
return 1;
}
KS_DECLARE(int) ks_list_delete(ks_list_t *restrict l, const void *data) {
int pos, r;
+ int ret = 0;
- pos = ks_list_locate(l, data);
- if (pos < 0)
- return -1;
+ ks_rwl_write_lock(l->lock);
+
+ pos = ks_list_locate(l, data, KS_TRUE);
+ if (pos < 0) {
+ ret = -1;
+ goto done;
+ }
r = ks_list_delete_at(l, pos);
- if (r < 0)
- return -1;
+ if (r < 0) ret = -1;
- assert(ks_list_repOk(l));
+done:
+ ks_assert(ks_list_repOk(l));
- return 0;
+ ks_rwl_write_unlock(l->lock);
+
+ return ret;
}
KS_DECLARE(int) ks_list_delete_at(ks_list_t *restrict l, unsigned int pos) {
struct ks_list_entry_s *delendo;
-
if (l->iter_active || pos >= l->numels) return -1;
+ ks_rwl_write_lock(l->lock);
+
delendo = ks_list_findpos(l, pos);
ks_list_drop_elem(l, delendo, pos);
l->numels--;
+ ks_assert(ks_list_repOk(l));
- assert(ks_list_repOk(l));
+ ks_rwl_write_unlock(l->lock);
return 0;
}
numdel = posend - posstart + 1;
if (numdel == l->numels) return ks_list_clear(l);
+ ks_rwl_write_lock(l->lock);
+
tmp = ks_list_findpos(l, posstart); /* first el to be deleted */
lastvalid = tmp->prev; /* last valid element */
for (i = 0; i < (unsigned int)movedx; l->mid = l->mid->prev, i++);
}
- assert(posstart == 0 || lastvalid != l->head_sentinel);
+ ks_assert(posstart == 0 || lastvalid != l->head_sentinel);
i = posstart;
if (l->attrs.copy_data) {
/* also free element data */
}
}
}
- assert(i == posend + 1 && (posend != l->numels || tmp == l->tail_sentinel));
+ ks_assert(i == posend + 1 && (posend != l->numels || tmp == l->tail_sentinel));
lastvalid->next = tmp;
tmp->prev = lastvalid;
l->numels -= posend - posstart + 1;
- assert(ks_list_repOk(l));
+ ks_assert(ks_list_repOk(l));
+
+ ks_rwl_write_unlock(l->lock);
return numdel;
}
KS_DECLARE(int) ks_list_clear(ks_list_t *restrict l) {
struct ks_list_entry_s *s;
unsigned int numels;
+ int ret = -1;
+
+ ks_rwl_write_lock(l->lock);
/* will be returned */
numels = l->numels;
- if (l->iter_active) return -1;
+ if (l->iter_active) {
+ ret = -1;
+ goto done;
+ }
if (l->attrs.copy_data) { /* also free user data */
/* spare a loop conditional with two loops: spareing elems and freeing elems */
l->numels = 0;
l->mid = NULL;
- assert(ks_list_repOk(l));
+done:
+ ks_assert(ks_list_repOk(l));
+
+ ks_rwl_write_unlock(l->lock);
return numels;
}
return (l->numels == 0);
}
-KS_DECLARE(int) ks_list_locate(const ks_list_t *restrict l, const void *data) {
+KS_DECLARE(int) ks_list_locate(const ks_list_t *restrict l, const void *data, ks_bool_t prelocked) {
struct ks_list_entry_s *el;
int pos = 0;
+ if (!prelocked) ks_rwl_read_lock(l->lock);
+
if (l->attrs.comparator != NULL) {
/* use comparator */
for (el = l->head_sentinel->next; el != l->tail_sentinel; el = el->next, pos++) {
if (el->data == data) break;
}
}
+ if (!prelocked) ks_rwl_read_unlock(l->lock);
if (el == l->tail_sentinel) return -1;
return pos;
KS_DECLARE(void *) ks_list_seek(ks_list_t *restrict l, const void *indicator) {
const struct ks_list_entry_s *iter;
+ void *ret = NULL;
if (l->attrs.seeker == NULL) return NULL;
+ ks_rwl_read_lock(l->lock);
+
for (iter = l->head_sentinel->next; iter != l->tail_sentinel; iter = iter->next) {
- if (l->attrs.seeker(iter->data, indicator) != 0) return iter->data;
+ if (l->attrs.seeker(iter->data, indicator) != 0) {
+ ret = iter->data;
+ break;
+ }
}
- return NULL;
+ ks_rwl_read_unlock(l->lock);
+
+ return ret;
}
KS_DECLARE(int) ks_list_contains(const ks_list_t *restrict l, const void *data) {
- return (ks_list_locate(l, data) >= 0);
+ return (ks_list_locate(l, data, KS_FALSE) >= 0);
}
KS_DECLARE(int) ks_list_concat(const ks_list_t *l1, const ks_list_t *l2, ks_list_t *restrict dest) {
return -1;
//ks_list_init(dest);
+ ks_rwl_read_lock(l1->lock);
+ ks_rwl_read_lock(l2->lock);
+ ks_rwl_write_lock(dest->lock);
dest->numels = l1->numels + l2->numels;
- if (dest->numels == 0)
- return 0;
+ if (dest->numels == 0) goto done;
/* copy list1 */
srcel = l1->head_sentinel->next;
for (cnt = 0; cnt < (unsigned int)err; cnt++) dest->mid = dest->mid->prev;
}
- assert(!(ks_list_repOk(l1) && ks_list_repOk(l2)) || ks_list_repOk(dest));
+done:
+
+ ks_assert(!(ks_list_repOk(l1) && ks_list_repOk(l2)) || ks_list_repOk(dest));
+
+ ks_rwl_write_unlock(dest->lock);
+ ks_rwl_read_unlock(l2->lock);
+ ks_rwl_read_unlock(l1->lock);
return 0;
}
if (l->numels <= 1)
return 0;
+
+ ks_rwl_write_lock(l->lock);
ks_list_sort_quicksort(l, versus, 0, l->head_sentinel->next, l->numels - 1, l->tail_sentinel->prev);
- assert(ks_list_repOk(l));
+
+ ks_assert(ks_list_repOk(l));
+
+ ks_rwl_write_unlock(l->lock);
+
return 0;
}
KS_DECLARE(int) ks_list_iterator_start(ks_list_t *restrict l) {
if (l->iter_active) return 0;
+ ks_rwl_write_lock(l->lock);
l->iter_pos = 0;
l->iter_active = 1;
l->iter_curentry = l->head_sentinel->next;
+ ks_rwl_write_unlock(l->lock);
return 1;
}
if (!l->iter_active) return NULL;
+ ks_rwl_write_lock(l->lock);
toret = l->iter_curentry->data;
l->iter_curentry = l->iter_curentry->next;
l->iter_pos++;
+ ks_rwl_write_unlock(l->lock);
return toret;
}
KS_DECLARE(int) ks_list_iterator_hasnext(const ks_list_t *restrict l) {
+ int ret = 0;
if (!l->iter_active) return 0;
- return (l->iter_pos < l->numels);
+ ks_rwl_read_lock(l->lock);
+ ret = (l->iter_pos < l->numels);
+ ks_rwl_read_unlock(l->lock);
+ return ret;
}
KS_DECLARE(int) ks_list_iterator_stop(ks_list_t *restrict l) {
if (!l->iter_active) return 0;
+ ks_rwl_write_lock(l->lock);
l->iter_pos = 0;
l->iter_active = 0;
+ ks_rwl_write_unlock(l->lock);
return 1;
}
KS_DECLARE(int) ks_list_hash(const ks_list_t *restrict l, ks_list_hash_t *restrict hash) {
struct ks_list_entry_s *x;
ks_list_hash_t tmphash;
+ int ret = 0;
+
+ ks_assert(hash != NULL);
- assert(hash != NULL);
+ ks_rwl_read_lock(l->lock);
tmphash = l->numels * 2 + 100;
if (l->attrs.hasher == NULL) {
tmphash += tmphash % l->numels;
}
#else
- return -1;
+ ret = -1;
#endif
}
else {
}
}
+ ks_rwl_read_unlock(l->lock);
+
*hash = tmphash;
- return 0;
+ return ret;
}
#ifndef SIMCLIST_NO_DUMPRESTORE
/* include an hash, if possible */
if (l->attrs.hasher != NULL) {
- if (htonl(list_hash(l, &header.listhash)) != 0) {
+ if (htonl(ks_list_hash(l, &header.listhash)) != 0) {
/* could not compute list hash! */
return -1;
}
buf = ks_pool_alloc(l->pool, header.elemlen);
for (cnt = 0; cnt < header.numels; cnt++) {
READ_ERRCHECK(fd, buf, header.elemlen);
- list_append(l, l->attrs.unserializer(buf, &elsize));
+ ks_list_append(l, l->attrs.unserializer(buf, &elsize));
totmemorylen += elsize;
}
}
for (cnt = 0; cnt < header.numels; cnt++) {
buf = ks_pool_alloc(l->pool, header.elemlen);
READ_ERRCHECK(fd, buf, header.elemlen);
- list_append(l, buf);
+ ks_list_append(l, buf);
}
totmemorylen = header.numels * header.elemlen;
}
buf = ks_pool_alloc(l->pool, (ks_size_t)elsize);
READ_ERRCHECK(fd, buf, elsize);
totreadlen += elsize;
- list_append(l, l->attrs.unserializer(buf, &elsize));
+ ks_list_append(l, l->attrs.unserializer(buf, &elsize));
totmemorylen += elsize;
}
}
buf = ks_pool_alloc(l->pool, elsize);
READ_ERRCHECK(fd, buf, elsize);
totreadlen += elsize;
- list_append(l, buf);
+ ks_list_append(l, buf);
}
totmemorylen = totreadlen;
}
/* possibly verify the list consistency */
/* wrt hash */
/* don't do that
- if (header.listhash != 0 && header.listhash != list_hash(l)) {
+ if (header.listhash != 0 && header.listhash != ks_list_hash(l)) {
errno = ECANCELED;
return -1;
}
fd = open(filename, oflag, mode);
if (fd < 0) return -1;
+ ks_rwl_write_lock(l->lock);
ks_list_dump_filedescriptor(l, fd, len);
+ ks_rwl_write_unlock(l->lock);
close(fd);
return 0;
fd = open(filename, O_RDONLY, 0);
if (fd < 0) return -1;
+ ks_rwl_write_lock(l->lock);
ks_list_restore_filedescriptor(l, fd, len);
+ ks_rwl_write_unlock(l->lock);
close(fd);
return 0;
}
-#ifndef NDEBUG
static int ks_list_repOk(const ks_list_t *restrict l) {
int ok, i;
struct ks_list_entry_s *s;
return ok;
}
-#endif
-
/* For Emacs:
* Local Variables: