178 |
|
|
179 |
#endif |
#endif |
180 |
|
|
181 |
|
|
182 |
/* This function is called at compiler startup time and destroy at shutdown. */ |
/* This function is called at compiler startup time and destroy at shutdown. */ |
183 |
void gl_init_pool(void) { |
void gl_init_pool(void){ |
184 |
#if LISTUSESPOOL |
#if LISTUSESPOOL |
185 |
if (g_list_head_pool != NULL) { |
if (g_list_head_pool != NULL) { |
186 |
ASC_PANIC("ERROR: gl_init_pool called twice.\n"); |
ASC_PANIC("ERROR: gl_init_pool called twice.\n"); |
195 |
#endif |
#endif |
196 |
} |
} |
197 |
|
|
198 |
void gl_destroy_pool(void) { |
|
199 |
|
void gl_destroy_pool(void){ |
200 |
#if LISTUSESPOOL |
#if LISTUSESPOOL |
201 |
if (g_list_head_pool==NULL) return; |
if (g_list_head_pool==NULL) return; |
202 |
gl_emptyrecycler(); /* deallocate data in recycled lists, zero RecycledContents[] */ |
gl_emptyrecycler(); /* deallocate data in recycled lists, zero RecycledContents[] */ |
209 |
} |
} |
210 |
|
|
211 |
|
|
212 |
int gl_pool_initialized(void) |
int gl_pool_initialized(void){ |
|
{ |
|
213 |
#if LISTUSESPOOL |
#if LISTUSESPOOL |
214 |
return (g_list_head_pool == NULL) ? FALSE : TRUE; |
return (g_list_head_pool == NULL) ? FALSE : TRUE; |
215 |
#else |
#else |
219 |
} |
} |
220 |
|
|
221 |
|
|
222 |
void gl_report_pool(FILE *f) |
void gl_report_pool(FILE *f){ |
|
{ |
|
223 |
#if LISTUSESPOOL |
#if LISTUSESPOOL |
224 |
if (g_list_head_pool==NULL) |
if (g_list_head_pool==NULL) |
225 |
FPRINTF(f,"ListHeadPool is empty\n"); |
FPRINTF(f,"ListHeadPool is empty\n"); |
230 |
#endif |
#endif |
231 |
} |
} |
232 |
|
|
233 |
|
|
234 |
/* |
/* |
235 |
* Guess of the capacity of the list. If the |
* Guess of the capacity of the list. If the |
236 |
* list capacity needs to be expanded it will |
* list capacity needs to be expanded it will |
237 |
* be. It is good to be close though |
* be. It is good to be close though |
238 |
*/ |
*/ |
239 |
struct gl_list_t *gl_create(unsigned long int capacity) |
struct gl_list_t *gl_create(unsigned long int capacity){ |
|
{ |
|
240 |
struct gl_list_t *new; |
struct gl_list_t *new; |
241 |
unsigned long i; |
unsigned long i; |
242 |
i = capacity = MAX((unsigned long)LOWCAPACITY,capacity); |
i = capacity = MAX((unsigned long)LOWCAPACITY,capacity); |
284 |
} |
} |
285 |
} |
} |
286 |
|
|
287 |
void gl_free_and_destroy(struct gl_list_t *list) |
|
288 |
{ |
void gl_free_and_destroy(struct gl_list_t *list){ |
289 |
unsigned long c; |
unsigned long c; |
290 |
if (list == NULL) return; |
if (list == NULL) return; |
291 |
#if LISTUSESPOOL |
#if LISTUSESPOOL |
312 |
HighWaterMark[c] = RecycledContents[c]; |
HighWaterMark[c] = RecycledContents[c]; |
313 |
} |
} |
314 |
#endif |
#endif |
315 |
} else{ |
}else{ |
316 |
#if LISTRECYCLERDEBUG |
#if LISTRECYCLERDEBUG |
317 |
if (c<=MAXRECYCLESIZE) { |
if (c<=MAXRECYCLESIZE) { |
318 |
ListsDestroyed[c]++; |
ListsDestroyed[c]++; |
325 |
} |
} |
326 |
} |
} |
327 |
|
|
328 |
void gl_destroy(struct gl_list_t *list) |
|
329 |
{ |
void gl_destroy(struct gl_list_t *list){ |
330 |
unsigned long c; |
unsigned long c; |
331 |
if (list == NULL) return; |
if (list == NULL) return; |
332 |
#if LISTUSESPOOL |
#if LISTUSESPOOL |
362 |
} |
} |
363 |
} |
} |
364 |
|
|
365 |
|
|
366 |
VOIDPTR gl_fetchF(CONST struct gl_list_t *list, unsigned long int pos){ |
VOIDPTR gl_fetchF(CONST struct gl_list_t *list, unsigned long int pos){ |
367 |
asc_assert(NULL != list); |
asc_assert(NULL != list); |
368 |
ASSERTRANGE(list,pos,"gl_fetchF"); |
ASSERTRANGE(list,pos,"gl_fetchF"); |
369 |
return list->data[pos-1]; |
return list->data[pos-1]; |
370 |
} |
} |
371 |
|
|
372 |
|
|
373 |
#define SORTED_OFF(l) (l)->flags &= (~gsf_SORTED) |
#define SORTED_OFF(l) (l)->flags &= (~gsf_SORTED) |
374 |
#define EXPAND_OFF(l) (l)->flags &= (~gsf_EXPANDABLE) |
#define EXPAND_OFF(l) (l)->flags &= (~gsf_EXPANDABLE) |
375 |
|
|
376 |
|
|
377 |
/* for internal use only, where we can verify that STUFF is correct |
/* for internal use only, where we can verify that STUFF is correct |
378 |
* and it is SOMEONE else job to maintain the integrity of |
* and it is SOMEONE else job to maintain the integrity of |
379 |
* flags.sorted and list-> length. |
* flags.sorted and list-> length. |
383 |
#else |
#else |
384 |
#define GL_STORE(list,pos,ptr) gl_store((list),(pos),(ptr)) |
#define GL_STORE(list,pos,ptr) gl_store((list),(pos),(ptr)) |
385 |
#endif |
#endif |
386 |
void gl_store(struct gl_list_t *list, unsigned long int pos, VOIDPTR ptr) |
void gl_store(struct gl_list_t *list, unsigned long int pos, VOIDPTR ptr){ |
|
{ |
|
387 |
asc_assert(NULL != list); |
asc_assert(NULL != list); |
388 |
if (GL_LENGTH(list)>1) SORTED_OFF(list); |
if (GL_LENGTH(list)>1) SORTED_OFF(list); |
389 |
ASSERTRANGE(list,pos,"gl_store"); |
ASSERTRANGE(list,pos,"gl_store"); |
390 |
list->data[pos-1] = ptr; |
list->data[pos-1] = ptr; |
391 |
} |
} |
392 |
|
|
393 |
|
|
394 |
#if REALLOCDEBUG |
#if REALLOCDEBUG |
395 |
/* this we know to not leak if MOD_REALLOC is defined */ |
/* this we know to not leak if MOD_REALLOC is defined */ |
396 |
#define DATAREALLOC(l,incr) \ |
#define DATAREALLOC(l,incr) \ |
406 |
* was being left out of the cleanup process at shutdown. |
* was being left out of the cleanup process at shutdown. |
407 |
*/ |
*/ |
408 |
|
|
409 |
static void gl_expand_list(struct gl_list_t *list) |
|
410 |
{ |
static void gl_expand_list(struct gl_list_t *list){ |
411 |
VOIDPTR *tmp; |
VOIDPTR *tmp; |
412 |
unsigned long increment; |
unsigned long increment; |
413 |
increment = (list->capacity*50)/100; /* 10% is too small. try 50% baa */ |
increment = (list->capacity*50)/100; /* 10% is too small. try 50% baa */ |
423 |
} |
} |
424 |
} |
} |
425 |
|
|
426 |
|
|
427 |
/* changes only the capacity of the list, and possibly data pointer */ |
/* changes only the capacity of the list, and possibly data pointer */ |
428 |
static void gl_expand_list_by(struct gl_list_t *list,unsigned long addlen) |
static void gl_expand_list_by(struct gl_list_t *list,unsigned long addlen) |
429 |
{ |
{ |
436 |
asc_assert(list->data!=NULL); |
asc_assert(list->data!=NULL); |
437 |
} |
} |
438 |
|
|
439 |
void gl_append_ptr(struct gl_list_t *list, VOIDPTR ptr) |
|
440 |
{ |
void gl_append_ptr(struct gl_list_t *list, VOIDPTR ptr){ |
441 |
asc_assert((NULL != list) && (0 != gl_expandable(list))); |
asc_assert((NULL != list) && (0 != gl_expandable(list))); |
442 |
if (list->length > 0) SORTED_OFF(list); |
if (list->length > 0) SORTED_OFF(list); |
443 |
if (++(list->length) > list->capacity) /* expand list capacity*/ |
if (++(list->length) > list->capacity) /* expand list capacity*/ |
445 |
list->data[list->length-1] = ptr; |
list->data[list->length-1] = ptr; |
446 |
} |
} |
447 |
|
|
448 |
void gl_append_list(struct gl_list_t *extendlist, struct gl_list_t *list) |
|
449 |
{ |
void gl_append_list(struct gl_list_t *extendlist, struct gl_list_t *list){ |
450 |
register unsigned long c,len,oldlen,newlen; |
register unsigned long c,len,oldlen,newlen; |
451 |
|
|
452 |
asc_assert((NULL != extendlist) && |
asc_assert((NULL != extendlist) && |
468 |
} |
} |
469 |
} |
} |
470 |
|
|
471 |
void gl_fast_append_ptr(struct gl_list_t *list, VOIDPTR ptr) |
|
472 |
{ |
void gl_fast_append_ptr(struct gl_list_t *list, VOIDPTR ptr){ |
473 |
asc_assert(NULL != list); |
asc_assert(NULL != list); |
474 |
SORTED_OFF(list); |
SORTED_OFF(list); |
475 |
list->data[list->length] = ptr; |
list->data[list->length] = ptr; |
476 |
++(list->length); |
++(list->length); |
477 |
} |
} |
478 |
|
|
479 |
unsigned long gl_safe_length(CONST struct gl_list_t *list) |
|
480 |
{ |
unsigned long gl_safe_length(CONST struct gl_list_t *list){ |
481 |
if (list !=NULL) { |
if (list !=NULL) { |
482 |
return list->length; |
return list->length; |
483 |
} else { |
} else { |
484 |
return 0L; |
return 0L; |
485 |
} |
} |
486 |
} |
} |
487 |
unsigned long gl_lengthF(CONST struct gl_list_t *list) |
|
488 |
{ |
|
489 |
|
unsigned long gl_lengthF(CONST struct gl_list_t *list){ |
490 |
asc_assert(NULL != list); |
asc_assert(NULL != list); |
491 |
return list->length; |
return list->length; |
492 |
} |
} |
493 |
|
|
494 |
unsigned long gl_capacity(CONST struct gl_list_t *list) |
|
495 |
{ |
unsigned long gl_capacity(CONST struct gl_list_t *list){ |
496 |
if (list==NULL) return 0; |
if (list==NULL) return 0; |
497 |
return list->capacity; |
return list->capacity; |
498 |
} |
} |
499 |
|
|
500 |
|
|
501 |
int gl_sorted(CONST struct gl_list_t *list) |
int gl_sorted(CONST struct gl_list_t *list){ |
|
{ |
|
502 |
asc_assert(NULL != list); |
asc_assert(NULL != list); |
503 |
return (int)(list->flags & gsf_SORTED); |
return (int)(list->flags & gsf_SORTED); |
504 |
} |
} |
505 |
|
|
506 |
|
|
507 |
#if LISTIMPLEMENTED |
#if LISTIMPLEMENTED |
508 |
/* I believe this function is ok */ |
/* I believe this function is ok */ |
509 |
static void *gl_choose_ptrpivot(struct gl_list_t *list, |
/** |
510 |
unsigned long int lower, |
This function will choose a pivot. It can actually return any |
511 |
unsigned long int upper) |
number between and including upper and lower. However, the goal |
512 |
/*********************************************************************\ |
is to return a pivot that will not cause Quick Sort to have |
513 |
* This function will choose a pivot. It can actually return any |
O(n^2) behavior. This returns the value rather than the address |
514 |
* number between and including upper and lower. However, the goal |
of the pivot. |
515 |
* is to return a pivot that will not cause Quick Sort to have |
|
516 |
* O(n^2) behavior. This returns the value rather than the address |
The heuristic I am choosing is to pick the middle of three test |
517 |
* of the pivot. |
sites which are upper,lower, and (upper+lower)/2. |
518 |
* |
*/ |
519 |
* The heuristic I am choosing is to pick the middle of three test |
static void *gl_choose_ptrpivot(struct gl_list_t *list |
520 |
* sites which are upper,lower, and (upper+lower)/2. |
, unsigned long int lower, unsigned long int upper |
521 |
\*********************************************************************/ |
) |
522 |
{ |
{ |
523 |
unsigned long middle; |
unsigned long middle; |
524 |
middle = (upper+lower)/2; |
middle = (upper+lower)/2; |
525 |
if (gl_fetch(list,middle) > gl_fetch(list,lower)) { |
if(gl_fetch(list,middle) > gl_fetch(list,lower)) { |
526 |
/* middle > lower */ |
/* middle > lower */ |
527 |
if (gl_fetch(list,upper) > gl_fetch(list,middle)) { |
if(gl_fetch(list,upper) > gl_fetch(list,middle)) { |
528 |
/* middle > lower && upper > middle */ |
/* middle > lower && upper > middle */ |
529 |
return gl_fetch(list,middle); |
return gl_fetch(list,middle); |
530 |
} |
}else{ |
|
else { |
|
531 |
/* middle > lower && middle >= upper */ |
/* middle > lower && middle >= upper */ |
532 |
if (gl_fetch(list,upper) > gl_fetch(list,lower)) { |
if(gl_fetch(list,upper) > gl_fetch(list,lower)) { |
533 |
/* middle > lower && middle >= upper && upper > lower */ |
/* middle > lower && middle >= upper && upper > lower */ |
534 |
return gl_fetch(list,upper); |
return gl_fetch(list,upper); |
535 |
} |
}else{ |
536 |
else { |
/* middle > lower && middle >= upper && lower >= upper */ |
537 |
/* middle > lower && middle >= upper && lower >= upper */ |
return gl_fetch(list,lower); |
|
return gl_fetch(list,lower); |
|
538 |
} |
} |
539 |
} |
} |
540 |
} |
}else{ |
|
else { |
|
541 |
/* lower >= middle */ |
/* lower >= middle */ |
542 |
if (gl_fetch(list,upper) > gl_fetch(list,lower)) { |
if(gl_fetch(list,upper) > gl_fetch(list,lower)) { |
543 |
/* lower >= middle && upper > lower */ |
/* lower >= middle && upper > lower */ |
544 |
return gl_fetch(list,lower); |
return gl_fetch(list,lower); |
545 |
} |
}else{ |
|
else { |
|
546 |
/* lower >= middle && lower >= upper */ |
/* lower >= middle && lower >= upper */ |
547 |
if (gl_fetch(list,middle) > gl_fetch(list,upper)) { |
if(gl_fetch(list,middle) > gl_fetch(list,upper)) { |
548 |
/* lower >= middle && lower >= upper && middle > upper */ |
/* lower >= middle && lower >= upper && middle > upper */ |
549 |
return gl_fetch(list,middle); |
return gl_fetch(list,middle); |
550 |
} |
}else{ |
551 |
else { |
/* lower >= middle && lower >= upper && upper >= middle */ |
552 |
/* lower >= middle && lower >= upper && upper >= middle */ |
return gl_fetch(list,upper); |
|
return gl_fetch(list,upper); |
|
553 |
} |
} |
554 |
} |
} |
555 |
} |
} |
556 |
} |
} |
557 |
#endif |
#endif |
558 |
|
|
559 |
static |
/** |
560 |
unsigned long gl_choose_pivot(struct gl_list_t *list, |
This function will choose a pivot. It can actually return any |
561 |
unsigned long int lower, |
number between and including upper and lower. However, the goal |
562 |
unsigned long int upper, |
is to return a pivot that will not cause Quick Sort to have |
563 |
CmpFunc func) |
O(n^2) behavior. |
564 |
/* |
|
565 |
* This function will choose a pivot. It can actually return any |
The heuristic I am choosing is to pick the middle of three test |
566 |
* number between and including upper and lower. However, the goal |
sites which are upper,lower, and (upper+lower)/2. |
567 |
* is to return a pivot that will not cause Quick Sort to have |
*/ |
568 |
* O(n^2) behavior. |
static unsigned long gl_choose_pivot(struct gl_list_t *list |
569 |
* |
, unsigned long int lower, unsigned long int upper, CmpFunc func |
570 |
* The heuristic I am choosing is to pick the middle of three test |
){ |
|
* sites which are upper,lower, and (upper+lower)/2. |
|
|
*/ |
|
|
{ |
|
571 |
unsigned long middle; |
unsigned long middle; |
572 |
middle = (upper+lower)/2; |
middle = (upper+lower)/2; |
573 |
if ((*func)(gl_fetch(list,middle),gl_fetch(list,lower)) > 0) { |
if((*func)(gl_fetch(list,middle),gl_fetch(list,lower)) > 0) { |
574 |
/* middle > lower */ |
/* middle > lower */ |
575 |
if ((*func)(gl_fetch(list,upper),gl_fetch(list,middle)) > 0) { |
if((*func)(gl_fetch(list,upper),gl_fetch(list,middle)) > 0) { |
576 |
/* middle > lower && upper > middle */ |
/* middle > lower && upper > middle */ |
577 |
return middle; |
return middle; |
578 |
} |
}else{ |
|
else { |
|
579 |
/* middle > lower && middle >= upper */ |
/* middle > lower && middle >= upper */ |
580 |
if ((*func)(gl_fetch(list,upper),gl_fetch(list,lower)) > 0) { |
if ((*func)(gl_fetch(list,upper),gl_fetch(list,lower)) > 0) { |
581 |
/* middle > lower && middle >= upper && upper > lower */ |
/* middle > lower && middle >= upper && upper > lower */ |
582 |
return upper; |
return upper; |
583 |
} |
}else{ |
584 |
else { |
/* middle > lower && middle >= upper && lower >= upper */ |
585 |
/* middle > lower && middle >= upper && lower >= upper */ |
return lower; |
|
return lower; |
|
586 |
} |
} |
587 |
} |
} |
588 |
} |
}else{ |
|
else { |
|
589 |
/* lower >= middle */ |
/* lower >= middle */ |
590 |
if ((*func)(gl_fetch(list,upper),gl_fetch(list,lower)) > 0) { |
if ((*func)(gl_fetch(list,upper),gl_fetch(list,lower)) > 0) { |
591 |
/* lower >= middle && upper > lower */ |
/* lower >= middle && upper > lower */ |
592 |
return lower; |
return lower; |
593 |
} |
}else{ |
|
else { |
|
594 |
/* lower >= middle && lower >= upper */ |
/* lower >= middle && lower >= upper */ |
595 |
if ((*func)(gl_fetch(list,middle),gl_fetch(list,upper)) > 0) { |
if ((*func)(gl_fetch(list,middle),gl_fetch(list,upper)) > 0) { |
596 |
/* lower >= middle && lower >= upper && middle > upper */ |
/* lower >= middle && lower >= upper && middle > upper */ |
597 |
return middle; |
return middle; |
598 |
} |
}else{ |
599 |
else { |
/* lower >= middle && lower >= upper && upper >= middle */ |
600 |
/* lower >= middle && lower >= upper && upper >= middle */ |
return upper; |
|
return upper; |
|
601 |
} |
} |
602 |
} |
} |
603 |
} |
} |
604 |
} |
} |
605 |
|
|
606 |
|
|
607 |
/* do not export. makes rosy assumptions. callers of this function |
/* do not export. makes rosy assumptions. callers of this function |
608 |
* are responsible for making sure list.sorted is set appropriately. |
* are responsible for making sure list.sorted is set appropriately. |
609 |
*/ |
*/ |
610 |
static void gl_swap(struct gl_list_t *list, |
static void gl_swap(struct gl_list_t *list |
611 |
unsigned long int i, unsigned long int j) |
, unsigned long int i, unsigned long int j |
612 |
{ |
){ |
613 |
VOIDPTR temp; |
VOIDPTR temp; |
614 |
temp = gl_fetch(list,i); |
temp = gl_fetch(list,i); |
615 |
GL_STORE(list,i,gl_fetch(list,j)); |
GL_STORE(list,i,gl_fetch(list,j)); |
616 |
GL_STORE(list,j,temp); |
GL_STORE(list,j,temp); |
617 |
} |
} |
618 |
|
|
619 |
|
|
620 |
#if LISTIMPLEMENTED |
#if LISTIMPLEMENTED |
621 |
/* this function is probably ok */ |
/* this function is probably ok */ |
622 |
void gl_upsort(struct gl_list_t *list, |
void gl_upsort(struct gl_list_t *list, |
666 |
#endif |
#endif |
667 |
|
|
668 |
static |
static |
669 |
void gl_qsort(struct gl_list_t *list, |
void gl_qsort(struct gl_list_t *list, unsigned long int lower |
670 |
unsigned long int lower, |
, unsigned long int upper, CmpFunc func |
671 |
unsigned long int upper, |
){ |
|
CmpFunc func) |
|
|
{ |
|
672 |
unsigned long pivot,i,j; |
unsigned long pivot,i,j; |
673 |
VOIDPTR pivot_element; |
VOIDPTR pivot_element; |
674 |
j = upper; |
j = upper; |
675 |
i = lower; |
i = lower; |
676 |
pivot = gl_choose_pivot(list,lower,upper,func); |
pivot = gl_choose_pivot(list,lower,upper,func); |
677 |
pivot_element = gl_fetch(list,pivot); |
pivot_element = gl_fetch(list,pivot); |
678 |
do { |
do{ |
679 |
while ((*func)(pivot_element,gl_fetch(list,i))>0) i += 1; |
while((*func)(pivot_element,gl_fetch(list,i))>0) i += 1; |
680 |
while ((*func)(gl_fetch(list,j),pivot_element)>0) j -= 1; |
while((*func)(gl_fetch(list,j),pivot_element)>0) j -= 1; |
681 |
if (i <= j) { |
if(i <= j){ |
682 |
gl_swap(list,i++,j--); |
gl_swap(list,i++,j--); |
683 |
} |
} |
684 |
} while(i <= j); |
}while(i <= j); |
685 |
if (lower < j) gl_qsort(list,lower,j,func); |
if(lower < j) gl_qsort(list,lower,j,func); |
686 |
if (i<upper) gl_qsort(list,i,upper,func); |
if(i<upper)gl_qsort(list,i,upper,func); |
687 |
} |
} |
688 |
|
|
689 |
|
|
690 |
#if LISTIMPLEMENTED |
#if LISTIMPLEMENTED |
691 |
/* ok once its children are ok */ |
/* ok once its children are ok */ |
692 |
void gl_ptr_sort(struct gl_list_t *list, int increasing) |
void gl_ptr_sort(struct gl_list_t *list, int increasing) |
702 |
} |
} |
703 |
#endif |
#endif |
704 |
|
|
705 |
|
|
706 |
#if LISTIMPLEMENTED |
#if LISTIMPLEMENTED |
707 |
/* broken */ |
/* broken */ |
708 |
void gl_insert_ptr_sorted(struct gl_list_t *,VOIDPTR,int) |
void gl_insert_ptr_sorted(struct gl_list_t *,VOIDPTR,int){} |
|
{ |
|
|
} |
|
709 |
#endif |
#endif |
710 |
|
|
711 |
void gl_sort(struct gl_list_t *list, CmpFunc func) |
|
712 |
{ |
void gl_sort(struct gl_list_t *list, CmpFunc func){ |
713 |
asc_assert((NULL != list) && (NULL != func)); |
asc_assert((NULL != list) && (NULL != func)); |
714 |
if (GL_LENGTH(list) > 1) { |
if (GL_LENGTH(list) > 1) { |
715 |
gl_qsort(list,1,GL_LENGTH(list),func); |
gl_qsort(list,1,GL_LENGTH(list),func); |
717 |
list->flags |= gsf_SORTED; |
list->flags |= gsf_SORTED; |
718 |
} |
} |
719 |
|
|
720 |
void gl_insert_sorted(struct gl_list_t *list, |
|
721 |
VOIDPTR ptr, CmpFunc func) |
void gl_insert_sorted(struct gl_list_t *list |
722 |
{ |
, VOIDPTR ptr, CmpFunc func |
723 |
|
){ |
724 |
int comparison; |
int comparison; |
725 |
register unsigned long lower,upper,search=0L; |
register unsigned long lower,upper,search=0L; |
726 |
|
|
727 |
asc_assert((NULL != list) && |
asc_assert((NULL != list) && |
728 |
(0 != gl_expandable(list)) && |
(0 != gl_expandable(list)) && |
729 |
(NULL != func)); |
(NULL != func)); |
730 |
if (list->flags & gsf_SORTED) { |
if(list->flags & gsf_SORTED) { |
731 |
if (GL_LENGTH(list)==0) { |
if(GL_LENGTH(list)==0) { |
732 |
gl_append_ptr(list,ptr); |
gl_append_ptr(list,ptr); |
733 |
list->flags |= gsf_SORTED; |
list->flags |= gsf_SORTED; |
734 |
return; |
return; |
735 |
} |
} |
736 |
if (++list->length > list->capacity) /* expand list capacity */ |
if(++list->length > list->capacity) /* expand list capacity */ |
737 |
gl_expand_list(list); |
gl_expand_list(list); |
738 |
lower = 1; |
lower = 1; |
739 |
upper = GL_LENGTH(list)-1; |
upper = GL_LENGTH(list)-1; |
741 |
search = ( lower + upper) >> 1; /* divide by two */ |
search = ( lower + upper) >> 1; /* divide by two */ |
742 |
comparison = (*func)(gl_fetch(list,search),ptr); |
comparison = (*func)(gl_fetch(list,search),ptr); |
743 |
if (comparison==0) { |
if (comparison==0) { |
744 |
/* make room */ |
/* make room */ |
745 |
for(lower=GL_LENGTH(list);lower>search;lower--) |
for(lower=GL_LENGTH(list);lower>search;lower--) |
746 |
GL_STORE(list,lower,gl_fetch(list,lower-1)); |
GL_STORE(list,lower,gl_fetch(list,lower-1)); |
747 |
/* store it */ |
/* store it */ |
748 |
GL_STORE(list,search+1,ptr); |
GL_STORE(list,search+1,ptr); |
749 |
list->flags |= gsf_SORTED; |
list->flags |= gsf_SORTED; |
750 |
return; |
return; |
751 |
} |
} |
752 |
if (comparison < 0) lower = search + 1; |
if (comparison < 0) lower = search + 1; |
753 |
else upper = search - 1; |
else upper = search - 1; |
754 |
} |
} |
755 |
if ((*func)(gl_fetch(list,search),ptr) > 0) { |
if ((*func)(gl_fetch(list,search),ptr) > 0) { |
756 |
for(lower=GL_LENGTH(list);lower>search;lower--) { |
for(lower=GL_LENGTH(list);lower>search;lower--) { |
757 |
GL_STORE(list,lower,gl_fetch(list,lower-1)); |
GL_STORE(list,lower,gl_fetch(list,lower-1)); |
758 |
} |
} |
759 |
GL_STORE(list,search,ptr); |
GL_STORE(list,search,ptr); |
760 |
} |
}else{ |
|
else { |
|
761 |
for(lower=GL_LENGTH(list);lower>(search+1);lower--) { |
for(lower=GL_LENGTH(list);lower>(search+1);lower--) { |
762 |
GL_STORE(list,lower,gl_fetch(list,lower-1)); |
GL_STORE(list,lower,gl_fetch(list,lower-1)); |
763 |
} |
} |
764 |
GL_STORE(list,search+1,ptr); |
GL_STORE(list,search+1,ptr); |
765 |
} |
} |
766 |
list->flags |= gsf_SORTED; |
list->flags |= gsf_SORTED; |
767 |
} |
}else{ |
|
else { |
|
768 |
ERROR_REPORTER_HERE(ASC_PROG_WARNING,"gl_insert_sorted called on unsorted list -- sorting list now."); |
ERROR_REPORTER_HERE(ASC_PROG_WARNING,"gl_insert_sorted called on unsorted list -- sorting list now."); |
769 |
gl_append_ptr(list,ptr); |
gl_append_ptr(list,ptr); |
770 |
gl_sort(list,func); |
gl_sort(list,func); |
771 |
} |
} |
772 |
} |
} |
773 |
|
|
774 |
void gl_iterate(struct gl_list_t *list, void (*func) (VOIDPTR)) |
|
775 |
{ |
void gl_iterate(struct gl_list_t *list, void (*func) (VOIDPTR)){ |
776 |
#ifdef NDEBUG |
#ifdef NDEBUG |
777 |
register unsigned long length,counter; |
register unsigned long length,counter; |
778 |
length = GL_LENGTH(list); |
length = GL_LENGTH(list); |
788 |
#endif |
#endif |
789 |
} |
} |
790 |
|
|
791 |
/* this will probably need casts to compile or an intermediate |
|
792 |
* void *array variable |
/* |
793 |
* this function is on the critical path in the compiler, or |
this will probably need casts to compile or an intermediate |
794 |
* should be, so it is highly tweaked here. |
void *array variable |
795 |
*/ |
this function is on the critical path in the compiler, or |
796 |
unsigned long gl_ptr_search(CONST struct gl_list_t *list, |
should be, so it is highly tweaked here. |
797 |
CONST VOIDPTR match, int increasing) |
*/ |
798 |
{ |
unsigned long gl_ptr_search(CONST struct gl_list_t *list |
799 |
|
, CONST VOIDPTR match, int increasing |
800 |
|
){ |
801 |
#define SHORTSEARCH 5 |
#define SHORTSEARCH 5 |
802 |
/* if list is short, use linear instead of binary search. |
/* if list is short, use linear instead of binary search. |
803 |
* SHORTSEARCH is the minimum size for a binary search. |
SHORTSEARCH is the minimum size for a binary search. |
804 |
* SHORTSEARCH must be >=0 |
SHORTSEARCH must be >=0 */ |
|
*/ |
|
805 |
register unsigned long lower,upper,search; |
register unsigned long lower,upper,search; |
806 |
|
|
807 |
asc_assert(NULL != list); |
asc_assert(NULL != list); |
808 |
if (!GL_LENGTH(list)) return 0; |
if(!GL_LENGTH(list))return 0; |
809 |
if ( (list->flags & gsf_SORTED) |
if((list->flags & gsf_SORTED) |
810 |
#if (SHORTSEARCH > 0) |
#if (SHORTSEARCH > 0) |
811 |
&& (upper = GL_LENGTH(list)-1) > SHORTSEARCH |
&& (upper = GL_LENGTH(list)-1) > SHORTSEARCH |
812 |
#endif |
#endif |
813 |
) { /* use binary search */ |
){ /* use binary search */ |
814 |
register long comparison; |
register long comparison; |
815 |
lower = 0; |
lower = 0; |
816 |
#if (SHORTSEARCH <= 0) |
#if (SHORTSEARCH <= 0) |
817 |
upper = GL_LENGTH(list)-1; |
upper = GL_LENGTH(list)-1; |
818 |
#endif |
#endif |
819 |
if (increasing) { |
if(increasing){ |
820 |
while (lower <= upper) { |
while(lower <= upper) { |
821 |
search = (lower+upper) / 2; |
search = (lower+upper) / 2; |
822 |
comparison = |
comparison = |
823 |
((char *)list->data[search]-(char *)match); /* pointer difference */ |
((char *)list->data[search]-(char *)match); /* pointer difference */ |
824 |
if (comparison==0) { |
if(comparison==0){ |
825 |
return (search + 1); |
return (search + 1); |
826 |
} |
}else if(comparison < 0){ |
|
else if (comparison < 0) { |
|
827 |
lower = search + 1; |
lower = search + 1; |
828 |
} |
}else if(search > 0){ |
|
else if (search > 0) { |
|
829 |
upper = search - 1; |
upper = search - 1; |
830 |
} |
}else{ |
|
else { |
|
831 |
return 0; |
return 0; |
832 |
} |
} |
833 |
} |
} |
834 |
} else { |
}else{ |
835 |
while (lower <= upper) { |
while(lower <= upper){ |
836 |
search = (lower+upper) / 2; |
search = (lower+upper) / 2; |
837 |
comparison = |
comparison = |
838 |
((char *)match-(char *)list->data[search]); /* pointer difference */ |
((char *)match-(char *)list->data[search]); /* pointer difference */ |
839 |
if (comparison==0) { |
if(comparison==0){ |
840 |
return (search + 1); |
return (search + 1); |
841 |
} |
}else if (comparison < 0){ |
|
else if (comparison < 0) { |
|
842 |
lower = search + 1; |
lower = search + 1; |
843 |
} |
}else if (search > 0){ |
|
else if (search > 0) { |
|
844 |
upper = search - 1; |
upper = search - 1; |
845 |
} |
}else{ |
|
else { |
|
846 |
return 0; |
return 0; |
847 |
} |
} |
848 |
} |
} |
849 |
} |
} |
850 |
} |
}else{ /* use linear search */ |
|
else { /* use linear search */ |
|
851 |
upper = GL_LENGTH(list); |
upper = GL_LENGTH(list); |
852 |
if (increasing) { |
if(increasing){ |
853 |
for(search=0; search < upper; search++) { |
for(search=0; search < upper; search++) { |
854 |
if (list->data[search]==match) return search+1; |
if (list->data[search]==match) return search+1; |
855 |
} |
} |
856 |
} else { |
}else{ |
857 |
/* the test could be written 'search < upper' with unsigned assumption, |
/* the test could be written 'search < upper' with unsigned assumption, |
858 |
but someday we may want to make list length signed and move on. */ |
but someday we may want to make list length signed and move on. */ |
859 |
/* I removed the 'search >= 0 &&' part of the test because indeed it's tautological with the current unsigned integers */ |
/* I removed the 'search >= 0 &&' part of the test because indeed it's |
860 |
for(search=upper-1; /* search >= 0 && */ search < upper; search--) { |
tautological with the current unsigned integers -- JP */ |
861 |
|
for(search=upper-1; /* search >= 0 && */ search < upper; search--){ |
862 |
if (list->data[search]==match) return search+1; |
if (list->data[search]==match) return search+1; |
863 |
} |
} |
864 |
} |
} |
866 |
return 0; /* search failed */ |
return 0; /* search failed */ |
867 |
} |
} |
868 |
|
|
869 |
unsigned long gl_search(CONST struct gl_list_t *list, |
|
870 |
CONST VOIDPTR match, CmpFunc func) |
unsigned long gl_search(CONST struct gl_list_t *list |
871 |
{ |
,CONST VOIDPTR match, CmpFunc func |
872 |
|
){ |
873 |
register unsigned long lower,upper,search; |
register unsigned long lower,upper,search; |
874 |
#ifdef __alpha |
#ifdef __alpha |
875 |
long comparison; |
long comparison; |
887 |
if (comparison < 0) lower = search + 1; |
if (comparison < 0) lower = search + 1; |
888 |
else upper = search -1; |
else upper = search -1; |
889 |
} |
} |
890 |
} |
}else{ /* use linear search */ |
|
else { /* use linear search */ |
|
891 |
upper = GL_LENGTH(list); |
upper = GL_LENGTH(list); |
892 |
for(search=0; search < upper; search++) { |
for(search=0; search < upper; search++) { |
893 |
if ((*func)(list->data[search],match)==0) return search+1; |
if ((*func)(list->data[search],match)==0) return search+1; |
897 |
} |
} |
898 |
|
|
899 |
|
|
900 |
unsigned long gl_search_reverse(CONST struct gl_list_t *list, |
unsigned long gl_search_reverse(CONST struct gl_list_t *list |
901 |
CONST VOIDPTR match, CmpFunc func) |
,CONST VOIDPTR match, CmpFunc func |
902 |
{ |
){ |
903 |
register unsigned long search; |
register unsigned long search; |
904 |
|
|
905 |
asc_assert((NULL != list) && (NULL != func)); |
asc_assert((NULL != list) && (NULL != func)); |
906 |
if (list->flags & gsf_SORTED) { /* use binary search */ |
if(list->flags & gsf_SORTED) { /* use binary search */ |
907 |
return gl_search(list, match, func); |
return gl_search(list, match, func); |
908 |
} else { /* use linear search */ |
}else{ /* use linear search */ |
909 |
/* since search is unsigned, the FOR loop cannot go to |
/* since search is unsigned, the FOR loop cannot go to |
910 |
* zero, since 0-- is ULONG_MAX. Must do zero separately. |
* zero, since 0-- is ULONG_MAX. Must do zero separately. |
911 |
*/ |
*/ |
912 |
for( search = (GL_LENGTH(list)-1); search > 0; search-- ) { |
for(search = (GL_LENGTH(list)-1); search > 0; search-- ) { |
913 |
if ((*func)(list->data[search],match)==0) return search+1; |
if((*func)(list->data[search],match)==0) return search+1; |
914 |
} |
} |
915 |
search = 0; |
search = 0; |
916 |
if ((*func)(list->data[search],match)==0) return search+1; |
if((*func)(list->data[search],match)==0) return search+1; |
917 |
} |
} |
918 |
return 0; /* search failed */ |
return 0; /* search failed */ |
919 |
} |
} |
920 |
|
|
921 |
|
|
922 |
int gl_unique_list(CONST struct gl_list_t *list) |
int gl_unique_list(CONST struct gl_list_t *list){ |
|
{ |
|
923 |
unsigned long i,j,len,lm1; |
unsigned long i,j,len,lm1; |
924 |
VOIDPTR e; |
VOIDPTR e; |
925 |
VOIDPTR *d; |
VOIDPTR *d; |
941 |
return 1; |
return 1; |
942 |
} |
} |
943 |
|
|
944 |
int gl_empty(CONST struct gl_list_t *list) |
|
945 |
{ |
int gl_empty(CONST struct gl_list_t *list){ |
946 |
asc_assert(NULL != list); |
asc_assert(NULL != list); |
947 |
return(GL_LENGTH(list)==0); |
return(GL_LENGTH(list)==0); |
948 |
} |
} |
949 |
|
|
950 |
void gl_delete(struct gl_list_t *list, unsigned long int pos, int dispose) |
|
951 |
{ |
void gl_delete(struct gl_list_t *list, unsigned long int pos, int dispose){ |
952 |
unsigned long c,length; |
unsigned long c,length; |
953 |
int sorted; |
int sorted; |
954 |
VOIDPTR ptr; |
VOIDPTR ptr; |
980 |
list->length--; |
list->length--; |
981 |
if (sorted) { |
if (sorted) { |
982 |
list->flags |= gsf_SORTED; |
list->flags |= gsf_SORTED; |
983 |
} else { |
}else{ |
984 |
SORTED_OFF(list); |
SORTED_OFF(list); |
985 |
} |
} |
986 |
} |
} |
987 |
} |
} |
988 |
|
|
989 |
void gl_reverse(struct gl_list_t *list) |
|
990 |
{ |
void gl_reverse(struct gl_list_t *list){ |
991 |
VOIDPTR *tmpdata; |
VOIDPTR *tmpdata; |
992 |
unsigned long c,len; |
unsigned long c,len; |
993 |
|
|
1004 |
list->data = tmpdata; |
list->data = tmpdata; |
1005 |
} |
} |
1006 |
|
|
1007 |
void gl_reset(struct gl_list_t *list) |
|
1008 |
{ |
void gl_reset(struct gl_list_t *list){ |
1009 |
asc_assert(NULL != list); |
asc_assert(NULL != list); |
1010 |
list->flags = (unsigned int)(gsf_SORTED | gsf_EXPANDABLE); |
list->flags = (unsigned int)(gsf_SORTED | gsf_EXPANDABLE); |
1011 |
list->length = 0; |
list->length = 0; |
1012 |
} |
} |
1013 |
|
|
1014 |
|
|
1015 |
struct gl_list_t *gl_copy(CONST struct gl_list_t *list) |
struct gl_list_t *gl_copy(CONST struct gl_list_t *list){ |
|
{ |
|
1016 |
struct gl_list_t *new; |
struct gl_list_t *new; |
1017 |
unsigned long counter,length; |
unsigned long counter,length; |
1018 |
|
|
1026 |
return new; |
return new; |
1027 |
} |
} |
1028 |
|
|
1029 |
struct gl_list_t *gl_concat(CONST struct gl_list_t *list1, |
struct gl_list_t *gl_concat(CONST struct gl_list_t *list1 |
1030 |
CONST struct gl_list_t *list2) |
, CONST struct gl_list_t *list2 |
1031 |
{ |
){ |
1032 |
struct gl_list_t *new; |
struct gl_list_t *new; |
1033 |
unsigned long counter,length1,length2; |
unsigned long counter,length1,length2; |
1034 |
|
|
1037 |
length1 = GL_LENGTH(list1); |
length1 = GL_LENGTH(list1); |
1038 |
length2 = GL_LENGTH(list2); |
length2 = GL_LENGTH(list2); |
1039 |
new = gl_create(length1+length2); |
new = gl_create(length1+length2); |
1040 |
for (counter = 1;counter<= length1; counter++) { |
for(counter = 1;counter<= length1; counter++) { |
1041 |
gl_append_ptr(new,gl_fetch(list1,counter)); |
gl_append_ptr(new,gl_fetch(list1,counter)); |
1042 |
} |
} |
1043 |
for (counter = 1;counter<= length2; counter++) { |
for(counter = 1;counter<= length2; counter++) { |
1044 |
gl_append_ptr(new,gl_fetch(list2,counter)); |
gl_append_ptr(new,gl_fetch(list2,counter)); |
1045 |
} |
} |
1046 |
return new; |
return new; |
1047 |
} |
} |
1048 |
|
|
1049 |
int gl_compare_ptrs(CONST struct gl_list_t *l1, CONST struct gl_list_t *l2) |
int gl_compare_ptrs(CONST struct gl_list_t *l1, CONST struct gl_list_t *l2){ |
|
{ |
|
1050 |
unsigned long len,c; |
unsigned long len,c; |
1051 |
register unsigned long i1,i2; |
register unsigned long i1,i2; |
1052 |
if (l1==NULL || l2 == NULL) { |
if (l1==NULL || l2 == NULL) { |
1053 |
ASC_PANIC("Called with NULL input"); |
ASC_PANIC("Called with NULL input"); |
1054 |
} |
} |
1055 |
len = MIN(GL_LENGTH(l1),GL_LENGTH(l2)); |
len = MIN(GL_LENGTH(l1),GL_LENGTH(l2)); |
1056 |
for (c=0;c < len; c++) { |
for(c=0;c < len; c++) { |
1057 |
i1 = (asc_intptr_t)(l1->data[c]); |
i1 = (asc_intptr_t)(l1->data[c]); |
1058 |
i2 = (asc_intptr_t)(l2->data[c]); |
i2 = (asc_intptr_t)(l2->data[c]); |
1059 |
if (i1<i2) { |
if(i1<i2) { |
1060 |
return -1; |
return -1; |
1061 |
} |
} |
1062 |
if (i1>i2) { |
if(i1>i2) { |
1063 |
return 1; |
return 1; |
1064 |
} |
} |
1065 |
} |
} |
1068 |
(( GL_LENGTH(l2) > len) ? -1 : 1); |
(( GL_LENGTH(l2) > len) ? -1 : 1); |
1069 |
} |
} |
1070 |
|
|
1071 |
void gl_set_sorted(struct gl_list_t *list, int TRUE_or_FALSE) |
|
1072 |
{ |
void gl_set_sorted(struct gl_list_t *list, int TRUE_or_FALSE){ |
1073 |
asc_assert(NULL != list); |
asc_assert(NULL != list); |
1074 |
if (FALSE == TRUE_or_FALSE) |
if (FALSE == TRUE_or_FALSE) |
1075 |
list->flags &= ~gsf_SORTED; |
list->flags &= ~gsf_SORTED; |
1077 |
list->flags |= gsf_SORTED; |
list->flags |= gsf_SORTED; |
1078 |
} |
} |
1079 |
|
|
1080 |
int gl_expandable(struct gl_list_t *list) |
|
1081 |
{ |
int gl_expandable(struct gl_list_t *list){ |
1082 |
asc_assert(NULL != list); |
asc_assert(NULL != list); |
1083 |
return (int)(list->flags & gsf_EXPANDABLE); |
return (int)(list->flags & gsf_EXPANDABLE); |
1084 |
} |
} |
1085 |
|
|
1086 |
void gl_set_expandable(struct gl_list_t *list, int TRUE_or_FALSE) |
|
1087 |
{ |
void gl_set_expandable(struct gl_list_t *list, int TRUE_or_FALSE){ |
1088 |
asc_assert(NULL != list); |
asc_assert(NULL != list); |
1089 |
if (FALSE == TRUE_or_FALSE) |
if (FALSE == TRUE_or_FALSE) |
1090 |
list->flags &= ~gsf_EXPANDABLE; |
list->flags &= ~gsf_EXPANDABLE; |
1093 |
} |
} |
1094 |
|
|
1095 |
|
|
1096 |
VOIDPTR *gl_fetchaddr(CONST struct gl_list_t *list, |
VOIDPTR *gl_fetchaddr(CONST struct gl_list_t *list, unsigned long int pos){ |
|
unsigned long int pos) |
|
|
{ |
|
1097 |
asc_assert(NULL != list); |
asc_assert(NULL != list); |
1098 |
ASSERTRANGE(list,pos,"gl_fetchaddr"); |
ASSERTRANGE(list,pos,"gl_fetchaddr"); |
1099 |
return (VOIDPTR *)&(list->data[pos-1]); |
return (VOIDPTR *)&(list->data[pos-1]); |
1100 |
} |
} |
1101 |
|
|
1102 |
|
|
1103 |
void gl_emptyrecycler() |
void gl_emptyrecycler(){ |
|
{ |
|
1104 |
int i; |
int i; |
1105 |
#if LISTRECYCLERDEBUG |
#if LISTRECYCLERDEBUG |
1106 |
unsigned long bytecount = 0; |
unsigned long bytecount = 0; |
1151 |
#endif |
#endif |
1152 |
} |
} |
1153 |
|
|
1154 |
void gl_reportrecycler(FILE *fp) |
|
1155 |
{ |
void gl_reportrecycler(FILE *fp){ |
1156 |
int i; |
int i; |
1157 |
unsigned long bytecount = 0; |
unsigned long bytecount = 0; |
1158 |
#if !LISTRECYCLERDEBUG |
#if !LISTRECYCLERDEBUG |
1190 |
FPRINTF(fp,"Total bytes:\t%lu\n",bytecount); |
FPRINTF(fp,"Total bytes:\t%lu\n",bytecount); |
1191 |
#endif |
#endif |
1192 |
} |
} |
1193 |
|
|