| 95 size_t elem_size, size_t index, const void *other, size_t n) { |
99 size_t elem_size, size_t index, const void *other, size_t n) { |
| 96 // out of bounds and special case check |
100 // out of bounds and special case check |
| 97 if (index > array->size) return -1; |
101 if (index > array->size) return -1; |
| 98 if (n == 0) return 0; |
102 if (n == 0) return 0; |
| 99 |
103 |
| |
104 // calculate required capacity |
| |
105 size_t req_capacity = array->size + n; |
| |
106 if (req_capacity <= array->size) { |
| |
107 errno = EOVERFLOW; |
| |
108 return -1; |
| |
109 } |
| |
110 |
| 100 // guarantee enough capacity |
111 // guarantee enough capacity |
| 101 if (array->capacity < array->size + n) { |
112 if (array->capacity < req_capacity) { |
| 102 const size_t new_capacity = cx_array_grow_capacity(array->capacity,array->size + n); |
113 const size_t new_capacity = cx_array_grow_capacity(array->capacity,req_capacity); |
| 103 if (cxReallocateArray(allocator, &array->data, new_capacity, elem_size)) { |
114 if (cxReallocateArray(allocator, &array->data, new_capacity, elem_size)) { |
| 104 return -1; // LCOV_EXCL_LINE |
115 return -1; |
| 105 } |
116 } |
| 106 array->capacity = new_capacity; |
117 array->capacity = new_capacity; |
| 107 } |
118 } |
| 108 |
119 |
| 109 // determine insert position |
120 // determine insert position |
| 110 char *dst = array->data; |
121 char *dst = array->data; |
| 111 dst += index * elem_size; |
122 dst += index * elem_size; |
| 112 |
123 |
| 113 // do we need to move some elements? |
124 // do we need to move some elements? |
| 114 if (index < array->size) { |
125 size_t elems_to_move = array->size - index; |
| 115 size_t elems_to_move = array->size - index; |
126 if (elems_to_move > 0) { |
| 116 char *target = dst + n * elem_size; |
127 char *target = dst + n * elem_size; |
| 117 memmove(target, dst, elems_to_move * elem_size); |
128 memmove(target, dst, elems_to_move * elem_size); |
| 118 } |
129 } |
| 119 |
130 |
| 120 // place the new elements, if any |
131 // place the new elements, if any |
| 337 |
350 |
| 338 int cx_array_insert_sorted_( |
351 int cx_array_insert_sorted_( |
| 339 const CxAllocator *allocator, |
352 const CxAllocator *allocator, |
| 340 CxArray *array, |
353 CxArray *array, |
| 341 size_t elem_size, |
354 size_t elem_size, |
| 342 cx_compare_func cmp_func, |
|
| 343 const void *sorted_data, |
355 const void *sorted_data, |
| 344 size_t n, |
356 size_t n, |
| |
357 cx_compare_func cmp_func, |
| 345 bool allow_duplicates |
358 bool allow_duplicates |
| 346 ) { |
359 ) { |
| 347 cx_compare_func_wrapper wrapper = {cmp_func}; |
360 cx_compare_func_wrapper wrapper = {cmp_func}; |
| 348 return cx_array_insert_sorted_s_(allocator, array, elem_size, sorted_data, |
361 return cx_array_insert_sorted_c_(allocator, array, elem_size, sorted_data, |
| 349 n, allow_duplicates, cx_acmp_wrap, &wrapper); |
362 n, cx_cmp_wrap, &wrapper, allow_duplicates); |
| |
363 } |
| |
364 |
| |
365 #ifndef WITH_QSORT_R |
| |
366 static cx_thread_local cx_compare_func2 cx_array_fn_for_qsort; |
| |
367 static cx_thread_local void *cx_array_context_for_qsort; |
| |
368 static int cx_array_qsort_wrapper(const void *l, const void *r) { |
| |
369 return cx_array_fn_for_qsort(l, r, cx_array_context_for_qsort); |
| |
370 } |
| |
371 #endif |
| |
372 |
| |
373 #if defined(WITH_QSORT_R) && defined(__APPLE__) |
| |
374 // macOS uses a different comparefunc signature for qsort_r |
| |
375 typedef struct QsortCmpFuncWrapper { |
| |
376 cx_compare_func2 fn; |
| |
377 void *context; |
| |
378 } QsortCmpFuncWrapper; |
| |
379 |
| |
380 static int sort_comparefunc(void *context, const void *left, const void *right){ |
| |
381 QsortCmpFuncWrapper *w = context; |
| |
382 return w->fn(left, right, w->context); |
| |
383 } |
| |
384 #endif |
| |
385 |
| |
386 void cx_array_qsort_c(void *array, size_t nmemb, size_t size, |
| |
387 cx_compare_func2 fn, void *context) { |
| |
388 #ifdef WITH_QSORT_R |
| |
389 #ifndef __APPLE__ |
| |
390 qsort_r(array, nmemb, size, fn, context); |
| |
391 #else |
| |
392 QsortCmpFuncWrapper wrapper; |
| |
393 wrapper.fn = fn; |
| |
394 wrapper.context = context; |
| |
395 qsort_r(array, nmemb, size, &wrapper, sort_comparefunc); |
| |
396 #endif |
| |
397 #else |
| |
398 cx_array_fn_for_qsort = fn; |
| |
399 cx_array_context_for_qsort = context; |
| |
400 qsort(array, nmemb, size, cx_array_qsort_wrapper); |
| |
401 #endif |
| |
402 } |
| |
403 |
| |
404 void cx_array_sort_(CxArray *array, size_t elem_size, |
| |
405 cx_compare_func fn) { |
| |
406 qsort(array->data, array->size, elem_size, fn); |
| |
407 } |
| |
408 |
| |
409 void cx_array_sort_c_(CxArray *array, size_t elem_size, |
| |
410 cx_compare_func2 fn, void *context) { |
| |
411 cx_array_qsort_c(array->data, array->size, elem_size, fn, context); |
| 350 } |
412 } |
| 351 |
413 |
| 352 CxIterator cx_array_iterator_(CxArray *array, size_t elem_size) { |
414 CxIterator cx_array_iterator_(CxArray *array, size_t elem_size) { |
| 353 return cxIterator(array->data, elem_size, array->size); |
415 return cxIterator(array->data, elem_size, array->size); |
| 354 } |
416 } |
| 355 |
417 |
| 356 CxIterator cx_array_iterator_ptr_(CxArray *array) { |
418 CxIterator cx_array_iterator_ptr_(CxArray *array) { |
| 357 return cxIteratorPtr(array->data, array->size); |
419 return cxIteratorPtr(array->data, array->size); |
| |
420 } |
| |
421 |
| |
422 void cx_array_remove_(CxArray *array, size_t elem_size, size_t index, size_t n, bool fast) { |
| |
423 if (n == 0) return; |
| |
424 if (index >= array->size) return; |
| |
425 if (index + n >= array->size) { |
| |
426 // only tail elements are removed |
| |
427 array->size = index; |
| |
428 return; |
| |
429 } |
| |
430 array->size -= n; |
| |
431 size_t remaining = array->size - index; |
| |
432 char *dest = ((char*)array->data) + index * elem_size; |
| |
433 if (fast) { |
| |
434 char *src = dest + remaining * elem_size; |
| |
435 if (n == 1 && elem_size <= CX_WORDSIZE/8) { |
| |
436 // try to optimize int-sized values |
| |
437 // (from likely to unlikely) |
| |
438 if (elem_size == sizeof(int32_t)) { |
| |
439 *(int32_t*)dest = *(int32_t*)src; |
| |
440 return; |
| |
441 } |
| |
442 #if CX_WORDSIZE == 64 |
| |
443 if (elem_size == sizeof(int64_t)) { |
| |
444 *(int64_t*)dest = *(int64_t*)src; |
| |
445 return; |
| |
446 } |
| |
447 #endif |
| |
448 if (elem_size == sizeof(int8_t)) { |
| |
449 *(int8_t*)dest = *(int8_t*)src; |
| |
450 return; |
| |
451 } |
| |
452 if (elem_size == sizeof(int16_t)) { |
| |
453 *(int16_t*)dest = *(int16_t*)src; |
| |
454 return; |
| |
455 } |
| |
456 // note we cannot optimize the last branch, because |
| |
457 // the elem_size could be crazily misaligned |
| |
458 } |
| |
459 memcpy(dest, src, n * elem_size); |
| |
460 } else { |
| |
461 char *src = dest + n * elem_size; |
| |
462 memmove(dest, src, remaining * elem_size); |
| |
463 } |
| 358 } |
464 } |
| 359 |
465 |
| 360 void cx_array_free_(const CxAllocator *allocator, CxArray *array) { |
466 void cx_array_free_(const CxAllocator *allocator, CxArray *array) { |
| 361 cxFree(allocator, array->data); |
467 cxFree(allocator, array->data); |
| 362 array->data = NULL; |
468 array->data = NULL; |
| 499 size_t elem_size, |
605 size_t elem_size, |
| 500 const void *elem, |
606 const void *elem, |
| 501 cx_compare_func cmp_func |
607 cx_compare_func cmp_func |
| 502 ) { |
608 ) { |
| 503 cx_compare_func_wrapper wrapper = {cmp_func}; |
609 cx_compare_func_wrapper wrapper = {cmp_func}; |
| 504 return cx_array_binary_search_inf_c(arr, size, elem_size, elem, cx_acmp_wrap, &wrapper); |
610 return cx_array_binary_search_inf_c(arr, size, elem_size, elem, cx_cmp_wrap, &wrapper); |
| 505 } |
611 } |
| 506 |
612 |
| 507 size_t cx_array_binary_search( |
613 size_t cx_array_binary_search( |
| 508 const void *arr, |
614 const void *arr, |
| 509 size_t size, |
615 size_t size, |
| 510 size_t elem_size, |
616 size_t elem_size, |
| 511 const void *elem, |
617 const void *elem, |
| 512 cx_compare_func cmp_func |
618 cx_compare_func cmp_func |
| 513 ) { |
619 ) { |
| 514 cx_compare_func_wrapper wrapper = {cmp_func}; |
620 cx_compare_func_wrapper wrapper = {cmp_func}; |
| 515 return cx_array_binary_search_c(arr, size, elem_size, elem, cx_acmp_wrap, &wrapper); |
621 return cx_array_binary_search_c(arr, size, elem_size, elem, cx_cmp_wrap, &wrapper); |
| 516 } |
622 } |
| 517 |
623 |
| 518 size_t cx_array_binary_search_sup( |
624 size_t cx_array_binary_search_sup( |
| 519 const void *arr, |
625 const void *arr, |
| 520 size_t size, |
626 size_t size, |
| 521 size_t elem_size, |
627 size_t elem_size, |
| 522 const void *elem, |
628 const void *elem, |
| 523 cx_compare_func cmp_func |
629 cx_compare_func cmp_func |
| 524 ) { |
630 ) { |
| 525 cx_compare_func_wrapper wrapper = {cmp_func}; |
631 cx_compare_func_wrapper wrapper = {cmp_func}; |
| 526 return cx_array_binary_search_sup_c(arr, size, elem_size, elem, cx_acmp_wrap, &wrapper); |
632 return cx_array_binary_search_sup_c(arr, size, elem_size, elem, cx_cmp_wrap, &wrapper); |
| 527 } |
633 } |
| 528 |
634 |
| 529 #ifndef CX_ARRAY_SWAP_SBO_SIZE |
635 #ifndef CX_ARRAY_SWAP_SBO_SIZE |
| 530 #define CX_ARRAY_SWAP_SBO_SIZE 128 |
636 #define CX_ARRAY_SWAP_SBO_SIZE 128 |
| 531 #endif |
637 #endif |
| 847 cur += list->collection.elem_size; |
952 cur += list->collection.elem_size; |
| 848 } |
953 } |
| 849 return list->collection.size; |
954 return list->collection.size; |
| 850 } |
955 } |
| 851 |
956 |
| 852 // TODO: remove this hack once we have a solution for qsort() / qsort_s() |
|
| 853 static _Thread_local CxList *cx_hack_for_qsort_list; |
|
| 854 static int cx_hack_cmp_for_qsort(const void *l, const void *r) { |
|
| 855 return cx_list_compare_wrapper(l, r, cx_hack_for_qsort_list); |
|
| 856 } |
|
| 857 |
|
| 858 static void cx_arl_sort(struct cx_list_s *list) { |
957 static void cx_arl_sort(struct cx_list_s *list) { |
| 859 // TODO: think about if we can somehow use qsort()_s |
958 cx_array_qsort_c(((cx_array_list *) list)->data, |
| 860 cx_hack_for_qsort_list = list; |
|
| 861 qsort(((cx_array_list *) list)->data, |
|
| 862 list->collection.size, |
959 list->collection.size, |
| 863 list->collection.elem_size, |
960 list->collection.elem_size, |
| 864 cx_hack_cmp_for_qsort |
961 cx_list_compare_wrapper, |
| |
962 list |
| 865 ); |
963 ); |
| 866 } |
964 } |
| 867 |
965 |
| 868 static int cx_arl_compare( |
966 static int cx_arl_compare( |
| 869 const struct cx_list_s *list, |
967 const struct cx_list_s *list, |
| 993 allocator = cxDefaultAllocator; |
1091 allocator = cxDefaultAllocator; |
| 994 } |
1092 } |
| 995 |
1093 |
| 996 cx_array_list *list = cxCalloc(allocator, 1, sizeof(cx_array_list)); |
1094 cx_array_list *list = cxCalloc(allocator, 1, sizeof(cx_array_list)); |
| 997 if (list == NULL) return NULL; |
1095 if (list == NULL) return NULL; |
| 998 cx_list_init((CxList*)list, &cx_array_list_class, |
1096 cx_list_init((CxList*)list, &cx_array_list_class, allocator, elem_size); |
| 999 allocator, elem_size); |
|
| 1000 list->capacity = initial_capacity; |
1097 list->capacity = initial_capacity; |
| 1001 |
1098 |
| 1002 // allocate the array after the real elem_size is known |
1099 // allocate the array after the real elem_size is known |
| 1003 list->data = cxCalloc(allocator, initial_capacity, |
1100 list->data = cxCalloc(allocator, initial_capacity, |
| 1004 list->base.collection.elem_size); |
1101 list->base.collection.elem_size); |