/[ascend]/trunk/base/generic/general/list.c
ViewVC logotype

Contents of /trunk/base/generic/general/list.c

Parent Directory Parent Directory | Revision Log Revision Log


Revision 122 - (show annotations) (download) (as text)
Mon Dec 19 06:12:40 2005 UTC (18 years, 5 months ago) by johnpye
File MIME type: text/x-csrc
File size: 34580 byte(s)
Refactoring all MAX, MIN, ABS calls to general/mathmacros.
Adding a GCC optimisation for these macros.
1 /*
2 * Implementation
3 * of List Module
4 * Tom Epperly
5 * Part of Ascend
6 * Version: $Revision: 1.3 $
7 * Version control file: $RCSfile: list.c,v $
8 * Date last modified: $Date: 1998/01/06 12:06:55 $
9 * Last modified by: $Author: ballan $
10 *
11 * This file is part of the Ascend Language Interpreter.
12 *
13 * Copyright (C) 1990, 1993, 1994 Thomas Guthrie Epperly
14 *
15 * The Ascend Language Interpreter is free software; you can redistribute
16 * it and/or modify it under the terms of the GNU General Public License
17 * as published by the Free Software Foundation; either version 2 of the
18 * License, or (at your option) any later version.
19 *
20 * The Ascend Language Interpreter is distributed in hope that it will be
21 * useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
22 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
23 * General Public License for more details.
24 *
25 * You should have received a copy of the GNU General Public License
26 * along with the program; if not, write to the Free Software Foundation,
27 * Inc., 675 Mass Ave, Cambridge, MA 02139 USA. Check the file named
28 * COPYING.
29 *
30 */
31
32 #include <stdarg.h>
33 #include "utilities/ascConfig.h"
34 #include "utilities/ascPanic.h"
35 #include "utilities/ascMalloc.h"
36 #include "general/list.h"
37 #if LISTUSESPOOL
38 #include "general/pool.h"
39 #endif
40 #include "general/mathmacros.h"
41
42 #ifndef lint
43 static CONST char ListID[] = "$Id: list.c,v 1.3 1998/01/06 12:06:55 ballan Exp $";
44 #endif
45
46 #define address(a,b) (((unsigned long)(a) + ((b)*sizeof(VOIDPTR))-sizeof(VOIDPTR)))
47
48 #define GL_LENGTH(l) ((l)->length)
49 #define LOWCAPACITY 2
50 /* Shortest list we will allow. It must not be 0! */
51 static const unsigned long MIN_INCREMENT = 8;
52 /* Minimum number of elements to increase when expanding list. */
53
54 /*
55 * Be a bit more informative when dying on a list range error.
56 * No printf we know of returns -2, so the print will always be
57 * just before assertion acts to handle the range error.
58 */
59 #define ASSERTRANGE(list,pos,n) \
60 asc_assert(((pos >= 1) && (pos <= GL_LENGTH(list))) || \
61 (PRINTF("%s called with illegal length %lu\n",n,pos) == -2));
62
63 #ifndef MOD_REALLOC
64 #define REALLOCDEBUG FALSE
65 #else
66 #define REALLOCDEBUG TRUE
67 #endif
68 /* if REALLOCDEBUG TRUE, then we use our own pricey version of realloc
69 * instead of the standard library version.
70 * This is to test for a bug in purify/os interactions.
71 */
72 /*
73 * We have modified gl_destroy to recycle lists of up to a certain size
74 * MAXRECYCLESIZE.
75 * The data structure for holding the recycled lists is a vector
76 * of pointers to gl_list_t. Item 1 in a recycled list is the pointer
77 * to the next recycled list, which may be NULL. This is a change
78 * from Tom's initial implementation which used a dense array to store
79 * pointers to lists.
80 *
81 * The number of recycled lists of a particular
82 * capacity i to be allowed is found in the AllowedContents[i].
83 *
84 * It is intended that RecycledContents[i] is the current count of
85 * lists of capacity i. It is a coincidence of implementation that
86 * when RecycledContents[i] <=0 RecycledList[i]==NULL.
87 *
88 * If LISTRECYCLERDEBUG,
89 * HighWaterMark[i] is the most of capacity i ever recycled.
90 * ListsDestroyed[i] is likewise lists not recycled because full.
91 */
92 #define LISTRECYCLERDEBUG 0
93 #define MAXRECYCLESIZE 500 /* the maximum list size to be recycled */
94 #define MAXRECYCLESMALLITEMS 300/* the maximum number of size < LARGEITEM */
95 #define MAXRECYCLELARGEITEMS 25 /* the maximum number of each large size */
96 #define MAXRECYCLESEARCH 2 /* how much larger a list will be searched for */
97 /* search performance. 0 and 2 are best values, 2 slightly better. alphaosf2 */
98 #define LARGEITEM 21 /* smallest large item */
99 /* note: baa, 3/8/96 LARGEITEM and MAXRECYCLELARGEITEMS not in use yet */
100 static struct gl_list_t *RecycledList[MAXRECYCLESIZE+1];
101 static int RecycledContents[MAXRECYCLESIZE+1];
102 static int AllowedContents[MAXRECYCLESIZE+1];
103 /*
104 * It is assumed that these arrays are initialized to zero according to the
105 * rules given in "The C Programming Language", Brian W. Kernighan and Dennis
106 * M. Richie, second edition.
107 */
108 #if LISTRECYCLERDEBUG
109 static int HighWaterMark[MAXRECYCLESIZE+1];
110 /* most ever in recycle list of size i */
111 static int ListsDestroyed[MAXRECYCLESIZE+1];
112 /* number of lists destroyed because Allowed was exceeded of size i */
113
114 static int ListsDestroyedTotal = 0;
115 /* number of lists destroyed because Allowed was exceeded all sizes */
116 static int NewListsUsed = 0;
117 /* calls to gl_create that required malloc */
118 static int RecycledListsUsed = 0;
119 /* calls to gl_create that used recycle */
120 #endif
121
122 /* If this function is not called, no recycling will take place
123 * because all the Alloweds will be at default of 0.
124 * This initializes the AllowedContents array, and nothing else.
125 * General limits are set based on MAXRECYCLE*ITEMS.
126 * Specific ones are done after and may be tuned.
127 */
128 void gl_init(void)
129 {
130 int i;
131 if (AllowedContents[0] &&
132 AllowedContents[0] != MAXRECYCLESMALLITEMS) {
133 PRINTF("gl_init recalled after data corrupted!\n");
134 return;
135 /* somebody called us twice, with intervening nonsense. punt */
136 }
137 for (i = LOWCAPACITY; i < LARGEITEM; i++) {
138 AllowedContents[i] = MAXRECYCLESMALLITEMS;
139 }
140 for (i = LARGEITEM; i <= MAXRECYCLESIZE; i++) {
141 AllowedContents[i] = MAXRECYCLELARGEITEMS;
142 }
143 #define LARGESYSTEM 1
144 #if LARGESYSTEM
145 AllowedContents[2] = 40000;
146 AllowedContents[3] = 20000;
147 AllowedContents[4] = 8000;
148 AllowedContents[5] = 2000;
149 AllowedContents[6] = 2000;
150 AllowedContents[7] = 5000;
151 AllowedContents[8] = 1000;
152 #else
153 AllowedContents[2] = 10000;
154 AllowedContents[3] = 5000;
155 AllowedContents[4] = 5000;
156 AllowedContents[7] = 5000;
157 #endif
158 AllowedContents[23] = 50;
159 }
160
161 #if LISTUSESPOOL
162 static pool_store_t g_list_head_pool = NULL;
163 /* global for our memory manager */
164 #define POOL_ALLOCHEAD (struct gl_list_t *)(pool_get_element(g_list_head_pool))
165 /* get a token. Token is the size of the struct struct gl_list_t */
166 #define POOL_FREEHEAD(p) (pool_free_element(g_list_head_pool,((void *)p)))
167 /* return a struct gl_list_t */
168
169 #define GLP_LEN 10
170 #ifdef __alpha
171 #define GLP_WID 72
172 #else
173 #define GLP_WID 127
174 #endif
175 /* retune rpwid if the size of pending_t changes dramatically */
176 #define GLP_ELT_SIZE (sizeof(struct gl_list_t))
177 #define GLP_MORE_ELTS 10
178 /*
179 * Number of slots filled if more elements needed.
180 * So if the pool grows, it grows by GLP_MORE_ELTS*GLP_WID elements at a time.
181 */
182 #define GLP_MORE_BARS 500
183 /* This is the number of pool bar slots to add during expansion.
184 not all the slots will be filled immediately. */
185 #else
186
187 /* we default back to malloc/free if LISTUSESPOOL == FALSE */
188 #define POOL_ALLOCHEAD ((struct gl_list_t*)ascmalloc(sizeof(struct gl_list_t)))
189 #define POOL_FREEHEAD(p) ascfree((char *)(p))
190
191 #endif
192
193 /* This function is called at compiler startup time and destroy at shutdown. */
194 void gl_init_pool(void) {
195 #if LISTUSESPOOL
196 if (g_list_head_pool != NULL) {
197 Asc_Panic(2, NULL, "ERROR: gl_init_pool called twice.\n");
198 }
199 g_list_head_pool = pool_create_store(GLP_LEN, GLP_WID, GLP_ELT_SIZE,
200 GLP_MORE_ELTS, GLP_MORE_BARS);
201 if (g_list_head_pool == NULL) {
202 Asc_Panic(2, NULL, "ERROR: gl_init_pool unable to allocate pool.\n");
203 }
204 #else
205 PRINTF("list.[ch] built without pooling of overheads\n");
206 #endif
207 }
208
209 void gl_destroy_pool(void) {
210 #if LISTUSESPOOL
211 if (g_list_head_pool==NULL) return;
212 gl_emptyrecycler(); /* deallocate data in recycled lists, zero RecycledContents[] */
213 pool_clear_store(g_list_head_pool);
214 pool_destroy_store(g_list_head_pool);
215 g_list_head_pool = NULL;
216 #else
217 PRINTF("list.[ch] built without pooling of overheads\n");
218 #endif
219 }
220
221
222 int gl_pool_initialized(void)
223 {
224 #if LISTUSESPOOL
225 return (g_list_head_pool == NULL) ? FALSE : TRUE;
226 #else
227 PRINTF("list.[ch] built without pooling of overheads\n");
228 return TRUE;
229 #endif
230 }
231
232
233 void gl_report_pool(FILE *f)
234 {
235 #if LISTUSESPOOL
236 if (g_list_head_pool==NULL)
237 FPRINTF(f,"ListHeadPool is empty\n");
238 FPRINTF(f,"ListHeadPool ");
239 pool_print_store(f,g_list_head_pool,0);
240 #else
241 FPRINTF(f,"list.[ch] built without pooling of overheads\n");
242 #endif
243 }
244
245 /*
246 * Guess of the capacity of the list. If the
247 * list capacity needs to be expanded it will
248 * be. It is good to be close though
249 */
250 struct gl_list_t *gl_create(unsigned long int capacity)
251 {
252 struct gl_list_t *new;
253 unsigned long i;
254 i = capacity = MAX((unsigned long)LOWCAPACITY,capacity);
255 while(i <= capacity+MAXRECYCLESEARCH &&
256 i <= MAXRECYCLESIZE){
257 asc_assert(RecycledContents[i] >= 0 &&
258 RecycledContents[i] <= AllowedContents[i]);
259 if (RecycledContents[i] > 0){
260 #if LISTRECYCLERDEBUG
261 RecycledListsUsed++;
262 #endif
263 new = RecycledList[i]; /* fetch last recycled list */
264 asc_assert(new != NULL);
265 RecycledList[i] = (struct gl_list_t *)new->data[0]; /* point to next */
266 --RecycledContents[i]; /* reduce list count for this size */
267 new->length = 0;
268 new->flags = (unsigned int)(gsf_SORTED | gsf_EXPANDABLE);
269 #ifndef NDEBUG
270 if (i>0) new->data[0]=NULL;
271 #endif
272 return new;
273 }
274 i++;
275 }
276 /*FPRINTF(ASCERR,"ABOUT TO ALLOCHEAD\n");*/
277 new=POOL_ALLOCHEAD;
278 /*FPRINTF(ASCERR,"GL_CREATE: COULDN'T ALLOC SIZE %d, new = %p\n",sizeof(struct gl_list_t),POOL_ALLOCHEAD);*/
279 if (new != NULL) {
280 new->length = 0;
281 #if LISTRECYCLERDEBUG
282 NewListsUsed++;
283 #endif
284 #ifndef NDEBUG
285 new->data = (VOIDPTR *)asccalloc((unsigned)capacity,sizeof(VOIDPTR));
286 #else
287 new->data = (VOIDPTR *)ascmalloc((unsigned)capacity*sizeof(VOIDPTR));
288 #endif
289 new->capacity = capacity;
290 new->flags = (unsigned int)(gsf_SORTED | gsf_EXPANDABLE);
291 return new;
292 }
293 else {
294 FPRINTF(ASCERR,"UNABLE TO ALLOCATE MEMORY FOR LIST\n");
295 return NULL;
296 }
297 }
298
299 void gl_free_and_destroy(struct gl_list_t *list)
300 {
301 unsigned long c;
302 if (list == NULL) return;
303 #if LISTUSESPOOL
304 AssertMemory(list);
305 #else
306 AssertAllocatedMemory(list,sizeof(struct gl_list_t));
307 #endif
308 for(c=0;c<GL_LENGTH(list);c++) {
309 AssertContainedMemory(list->data[c],0);
310 ascfree(list->data[c]);
311 }
312 #ifndef NDEBUG
313 ascbzero(list->data,sizeof(VOIDPTR)*list->capacity);
314 #endif
315 c = list->capacity; /* gonna use it a lot */
316 if (c <= MAXRECYCLESIZE && RecycledContents[c] < AllowedContents[c]) {
317 asc_assert(RecycledContents[c] >= 0);
318 /* push list into LIFO list of lists, and bump the count. */
319 list->data[0] = RecycledList[c];
320 RecycledList[c] = list;
321 RecycledContents[c]++;
322 #if LISTRECYCLERDEBUG
323 if (RecycledContents[c] > HighWaterMark[c]) {
324 HighWaterMark[c] = RecycledContents[c];
325 }
326 #endif
327 } else{
328 #if LISTRECYCLERDEBUG
329 if (c<=MAXRECYCLESIZE) {
330 ListsDestroyed[c]++;
331 }
332 #endif
333 ascfree(list->data);
334 list->data = NULL;
335 list->capacity = list->length = 0;
336 POOL_FREEHEAD(list);
337 }
338 }
339
340 void gl_destroy(struct gl_list_t *list)
341 {
342 unsigned long c;
343 if (list == NULL) return;
344 #if LISTUSESPOOL
345 AssertMemory(list);
346 #else
347 AssertAllocatedMemory(list,sizeof(struct gl_list_t));
348 #endif
349 c = list->capacity; /* gonna use it a lot */
350 if (c <= MAXRECYCLESIZE && RecycledContents[c] < AllowedContents[c]) {
351 asc_assert(RecycledContents[c] >= 0);
352 /* push list into LIFO list of lists, and bump the count. */
353 list->data[0] = RecycledList[c];
354 RecycledList[c] = list;
355 RecycledContents[c]++;
356 #if LISTRECYCLERDEBUG
357 if (RecycledContents[c] > HighWaterMark[c]) {
358 HighWaterMark[c] = RecycledContents[c];
359 }
360 #endif
361 } else {
362 #if LISTRECYCLERDEBUG
363 if (c<=MAXRECYCLESIZE) {
364 ListsDestroyed[c]++;
365 }
366 #endif
367 #ifndef NDEBUG
368 if (list->capacity>0) list->data[0]=NULL;
369 #endif
370 ascfree(list->data);
371 list->data = NULL;
372 list->capacity = list->length = 0;
373 POOL_FREEHEAD(list);
374 }
375 }
376
377 VOIDPTR gl_fetchF(CONST struct gl_list_t *list, unsigned long int pos)
378 {
379 asc_assert(NULL != list);
380 ASSERTRANGE(list,pos,"gl_fetchF");
381 return list->data[pos-1];
382 }
383
384 #define SORTED_OFF(l) (l)->flags &= (~gsf_SORTED)
385 #define EXPAND_OFF(l) (l)->flags &= (~gsf_EXPANDABLE)
386
387 /* for internal use only, where we can verify that STUFF is correct
388 * and it is SOMEONE else job to maintain the integrity of
389 * flags.sorted and list-> length.
390 */
391 #ifdef NDEBUG
392 #define GL_STORE(list,pos,ptr) ((list)->data[(pos)-1] = (ptr))
393 #else
394 #define GL_STORE(list,pos,ptr) gl_store((list),(pos),(ptr))
395 #endif
396 void gl_store(struct gl_list_t *list, unsigned long int pos, VOIDPTR ptr)
397 {
398 asc_assert(NULL != list);
399 if (GL_LENGTH(list)>1) SORTED_OFF(list);
400 ASSERTRANGE(list,pos,"gl_store");
401 list->data[pos-1] = ptr;
402 }
403
404 #if REALLOCDEBUG
405 /* this we know to not leak if MOD_REALLOC is defined */
406 #define DATAREALLOC(l,incr) \
407 ascreallocPURE((char *)(l)->data, \
408 (size_t)(((l)->capacity-(incr))*sizeof(VOIDPTR)), \
409 (size_t)((l)->capacity*sizeof(VOIDPTR)))
410 #else
411 /* this we think might leak under hpux, sunos4. more like purify lies. */
412 #define DATAREALLOC(l,incr) \
413 ascrealloc((l)->data,(unsigned)((l)->capacity*sizeof(VOIDPTR)))
414 #endif
415 /* actually, when all said and done, a static structure in solver/analyze.c
416 * was being left out of the cleanup process at shutdown.
417 */
418
419 static void gl_expand_list(struct gl_list_t *list)
420 {
421 VOIDPTR *tmp;
422 unsigned long increment;
423 increment = (list->capacity*50)/100; /* 10% is too small. try 50% baa */
424 increment = MAX(MIN_INCREMENT,increment);
425 list->capacity += increment;
426 tmp = (VOIDPTR *) DATAREALLOC(list,increment);
427
428 if (tmp==NULL) {
429 PRINTF("gl_expand_list: memory allocation failed\n");
430 list->capacity -= increment;
431 } else {
432 list->data = tmp;
433 }
434 }
435
436 /* changes only the capacity of the list, and possibly data pointer */
437 static void gl_expand_list_by(struct gl_list_t *list,unsigned long addlen)
438 {
439 if (!addlen) return;
440 addlen = MAX(MIN_INCREMENT,addlen); /* expand by at least 8 */
441 list->capacity += addlen;
442 list->data = (VOIDPTR *)DATAREALLOC(list,addlen);
443
444 if (list->data==NULL) PRINTF("gl_expand_list_by: memory allocation failed\n");
445 asc_assert(list->data!=NULL);
446 }
447
448 void gl_append_ptr(struct gl_list_t *list, VOIDPTR ptr)
449 {
450 asc_assert((NULL != list) && (0 != gl_expandable(list)));
451 if (list->length > 0) SORTED_OFF(list);
452 if (++(list->length) > list->capacity) /* expand list capacity*/
453 gl_expand_list(list);
454 list->data[list->length-1] = ptr;
455 }
456
457 void gl_append_list(struct gl_list_t *extendlist, struct gl_list_t *list)
458 {
459 register unsigned long c,len,oldlen,newlen;
460
461 asc_assert((NULL != extendlist) &&
462 (0 != gl_expandable(extendlist)) &&
463 (NULL != list));
464 if (extendlist->length >0 || list->length > 0) {
465 SORTED_OFF(extendlist);
466 }
467 newlen = extendlist->length + list->length;
468 if (newlen > extendlist->capacity) {
469 gl_expand_list_by(extendlist,newlen - extendlist->capacity);
470 }
471 len = list->length;
472 oldlen = extendlist->length;
473 extendlist->length += len;
474 for (c=0;c<len;c++) {
475 extendlist->data[oldlen] = list->data[c];
476 oldlen++;
477 }
478 }
479
480 void gl_fast_append_ptr(struct gl_list_t *list, VOIDPTR ptr)
481 {
482 asc_assert(NULL != list);
483 SORTED_OFF(list);
484 list->data[list->length] = ptr;
485 ++(list->length);
486 }
487
488 unsigned long gl_safe_length(CONST struct gl_list_t *list)
489 {
490 if (list !=NULL) {
491 return list->length;
492 } else {
493 return 0L;
494 }
495 }
496 unsigned long gl_lengthF(CONST struct gl_list_t *list)
497 {
498 asc_assert(NULL != list);
499 return list->length;
500 }
501
502 unsigned long gl_capacity(CONST struct gl_list_t *list)
503 {
504 if (list==NULL) return 0;
505 return list->capacity;
506 }
507
508
509 int gl_sorted(CONST struct gl_list_t *list)
510 {
511 asc_assert(NULL != list);
512 return (int)(list->flags & gsf_SORTED);
513 }
514
515 #if LISTIMPLEMENTED
516 /* I believe this function is ok */
517 static void *gl_choose_ptrpivot(struct gl_list_t *list,
518 unsigned long int lower,
519 unsigned long int upper)
520 /*********************************************************************\
521 * This function will choose a pivot. It can actually return any
522 * number between and including upper and lower. However, the goal
523 * is to return a pivot that will not cause Quick Sort to have
524 * O(n^2) behavior. This returns the value rather than the address
525 * of the pivot.
526 *
527 * The heuristic I am choosing is to pick the middle of three test
528 * sites which are upper,lower, and (upper+lower)/2.
529 \*********************************************************************/
530 {
531 unsigned long middle;
532 middle = (upper+lower)/2;
533 if (gl_fetch(list,middle) > gl_fetch(list,lower)) {
534 /* middle > lower */
535 if (gl_fetch(list,upper) > gl_fetch(list,middle)) {
536 /* middle > lower && upper > middle */
537 return gl_fetch(list,middle);
538 }
539 else {
540 /* middle > lower && middle >= upper */
541 if (gl_fetch(list,upper) > gl_fetch(list,lower)) {
542 /* middle > lower && middle >= upper && upper > lower */
543 return gl_fetch(list,upper);
544 }
545 else {
546 /* middle > lower && middle >= upper && lower >= upper */
547 return gl_fetch(list,lower);
548 }
549 }
550 }
551 else {
552 /* lower >= middle */
553 if (gl_fetch(list,upper) > gl_fetch(list,lower)) {
554 /* lower >= middle && upper > lower */
555 return gl_fetch(list,lower);
556 }
557 else {
558 /* lower >= middle && lower >= upper */
559 if (gl_fetch(list,middle) > gl_fetch(list,upper)) {
560 /* lower >= middle && lower >= upper && middle > upper */
561 return gl_fetch(list,middle);
562 }
563 else {
564 /* lower >= middle && lower >= upper && upper >= middle */
565 return gl_fetch(list,upper);
566 }
567 }
568 }
569 }
570 #endif
571
572 static
573 unsigned long gl_choose_pivot(struct gl_list_t *list,
574 unsigned long int lower,
575 unsigned long int upper,
576 CmpFunc func)
577 /*
578 * This function will choose a pivot. It can actually return any
579 * number between and including upper and lower. However, the goal
580 * is to return a pivot that will not cause Quick Sort to have
581 * O(n^2) behavior.
582 *
583 * The heuristic I am choosing is to pick the middle of three test
584 * sites which are upper,lower, and (upper+lower)/2.
585 */
586 {
587 unsigned long middle;
588 middle = (upper+lower)/2;
589 if ((*func)(gl_fetch(list,middle),gl_fetch(list,lower)) > 0) {
590 /* middle > lower */
591 if ((*func)(gl_fetch(list,upper),gl_fetch(list,middle)) > 0) {
592 /* middle > lower && upper > middle */
593 return middle;
594 }
595 else {
596 /* middle > lower && middle >= upper */
597 if ((*func)(gl_fetch(list,upper),gl_fetch(list,lower)) > 0) {
598 /* middle > lower && middle >= upper && upper > lower */
599 return upper;
600 }
601 else {
602 /* middle > lower && middle >= upper && lower >= upper */
603 return lower;
604 }
605 }
606 }
607 else {
608 /* lower >= middle */
609 if ((*func)(gl_fetch(list,upper),gl_fetch(list,lower)) > 0) {
610 /* lower >= middle && upper > lower */
611 return lower;
612 }
613 else {
614 /* lower >= middle && lower >= upper */
615 if ((*func)(gl_fetch(list,middle),gl_fetch(list,upper)) > 0) {
616 /* lower >= middle && lower >= upper && middle > upper */
617 return middle;
618 }
619 else {
620 /* lower >= middle && lower >= upper && upper >= middle */
621 return upper;
622 }
623 }
624 }
625 }
626
627 /* do not export. makes rosy assumptions. callers of this function
628 * are responsible for making sure list.sorted is set appropriately.
629 */
630 static void gl_swap(struct gl_list_t *list,
631 unsigned long int i, unsigned long int j)
632 {
633 VOIDPTR temp;
634 temp = gl_fetch(list,i);
635 GL_STORE(list,i,gl_fetch(list,j));
636 GL_STORE(list,j,temp);
637 }
638
639 #if LISTIMPLEMENTED
640 /* this function is probably ok */
641 void gl_upsort(struct gl_list_t *list,
642 unsigned long int lower,
643 unsigned long int upper)
644 {
645 register unsigned long i,j;
646 VOIDPTR pivot_element;
647 j = upper;
648 i = lower;
649 pivot_element = gl_choose_ptrpivot(list,lower,upper);
650 do {
651 while (pivot_element > gl_fetch(list,i)) i += 1;
652 while (gl_fetch(list,j) > pivot_element) j -= 1;
653 if (i <= j) {
654 register VOIDPTR temp;
655 temp = gl_fetch(list,i);
656 GL_STORE(list,i,gl_fetch(list,j));
657 GL_STORE(list,j,temp);
658 i++; j--;
659 }
660 } while(i <= j);
661 if (lower < j) gl_upsort(list,lower,j);
662 if (i < upper) gl_upsort(list,i,upper);
663 }
664
665 /* this function is not ok */
666 void gl_downsort(struct gl_list_t *list,
667 unsigned long int lower,
668 unsigned long int upper)
669 {
670 unsigned long pivot,i,j;
671 VOIDPTR pivot_element;
672 j = upper;
673 i = lower;
674 pivot_element = gl_choose_ptrpivot(list,lower,upper);
675 do {
676 while ((*func)(pivot_element,gl_fetch(list,i))>0) i += 1;
677 while ((*func)(gl_fetch(list,j),pivot_element)>0) j -= 1;
678 if (i <= j) {
679 gl_swap(list,i++,j--);
680 }
681 } while(i <= j);
682 if (lower < j) gl_downsort(list,lower,j);
683 if (i<upper) gl_downsort(list,i,upper);
684 }
685 #endif
686
687 static
688 void gl_qsort(struct gl_list_t *list,
689 unsigned long int lower,
690 unsigned long int upper,
691 CmpFunc func)
692 {
693 unsigned long pivot,i,j;
694 VOIDPTR pivot_element;
695 j = upper;
696 i = lower;
697 pivot = gl_choose_pivot(list,lower,upper,func);
698 pivot_element = gl_fetch(list,pivot);
699 do {
700 while ((*func)(pivot_element,gl_fetch(list,i))>0) i += 1;
701 while ((*func)(gl_fetch(list,j),pivot_element)>0) j -= 1;
702 if (i <= j) {
703 gl_swap(list,i++,j--);
704 }
705 } while(i <= j);
706 if (lower < j) gl_qsort(list,lower,j,func);
707 if (i<upper) gl_qsort(list,i,upper,func);
708 }
709
710 #if LISTIMPLEMENTED
711 /* ok once its children are ok */
712 void gl_ptr_sort(struct gl_list_t *list, int increasing)
713 {
714 if (GL_LENGTH(list) > 1) {
715 if (increasing) {
716 gl_upsort(list,1,GL_LENGTH(list));
717 } else {
718 gl_downsort(list,1,GL_LENGTH(list));
719 }
720 }
721 list->flags |= gsf_SORTED;
722 }
723 #endif
724
725 #if LISTIMPLEMENTED
726 /* broken */
727 void gl_insert_ptr_sorted(struct gl_list_t *,VOIDPTR,int)
728 {
729 }
730 #endif
731
732 void gl_sort(struct gl_list_t *list, CmpFunc func)
733 {
734 asc_assert((NULL != list) && (NULL != func));
735 if (GL_LENGTH(list) > 1) {
736 gl_qsort(list,1,GL_LENGTH(list),func);
737 }
738 list->flags |= gsf_SORTED;
739 }
740
741 void gl_insert_sorted(struct gl_list_t *list,
742 VOIDPTR ptr, CmpFunc func)
743 {
744 int comparison;
745 register unsigned long lower,upper,search=0L;
746
747 asc_assert((NULL != list) &&
748 (0 != gl_expandable(list)) &&
749 (NULL != func));
750 if (list->flags & gsf_SORTED) {
751 if (GL_LENGTH(list)==0) {
752 gl_append_ptr(list,ptr);
753 list->flags |= gsf_SORTED;
754 return;
755 }
756 if (++list->length > list->capacity) /* expand list capacity */
757 gl_expand_list(list);
758 lower = 1;
759 upper = GL_LENGTH(list)-1;
760 while (lower<=upper) {
761 search = ( lower + upper) >> 1; /* divide by two */
762 comparison = (*func)(gl_fetch(list,search),ptr);
763 if (comparison==0) {
764 /* make room */
765 for(lower=GL_LENGTH(list);lower>search;lower--)
766 GL_STORE(list,lower,gl_fetch(list,lower-1));
767 /* store it */
768 GL_STORE(list,search+1,ptr);
769 list->flags |= gsf_SORTED;
770 return;
771 }
772 if (comparison < 0) lower = search + 1;
773 else upper = search - 1;
774 }
775 if ((*func)(gl_fetch(list,search),ptr) > 0) {
776 for(lower=GL_LENGTH(list);lower>search;lower--) {
777 GL_STORE(list,lower,gl_fetch(list,lower-1));
778 }
779 GL_STORE(list,search,ptr);
780 }
781 else {
782 for(lower=GL_LENGTH(list);lower>(search+1);lower--) {
783 GL_STORE(list,lower,gl_fetch(list,lower-1));
784 }
785 GL_STORE(list,search+1,ptr);
786 }
787 list->flags |= gsf_SORTED;
788 }
789 else {
790 PRINTF(
791 "Warning gl_insert_sorted called on unsorted list.\nSorting list.\n");
792 gl_append_ptr(list,ptr);
793 gl_sort(list,func);
794 }
795 }
796
797 void gl_iterate(struct gl_list_t *list, void (*func) (VOIDPTR))
798 {
799 #ifdef NDEBUG
800 register unsigned long length,counter;
801 length = GL_LENGTH(list);
802 for (counter=0;counter<length;counter++)
803 (*func)(list->data[counter]);
804 #else
805 unsigned long length,counter;
806 asc_assert(NULL != list);
807 asc_assert(NULL != func);
808 length = GL_LENGTH(list);
809 for (counter=1;counter<=length;counter++)
810 (*func)(gl_fetch(list,counter));
811 #endif
812 }
813
814 /* this will probably need casts to compile or an intermediate
815 * void *array variable
816 * this function is on the critical path in the compiler, or
817 * should be, so it is highly tweaked here.
818 */
819 unsigned long gl_ptr_search(CONST struct gl_list_t *list,
820 CONST VOIDPTR match, int increasing)
821 {
822 #define SHORTSEARCH 5
823 /* if list is short, use linear instead of binary search.
824 * SHORTSEARCH is the minimum size for a binary search.
825 * SHORTSEARCH must be >=0
826 */
827 register unsigned long lower,upper,search;
828
829 asc_assert(NULL != list);
830 if (!GL_LENGTH(list)) return 0;
831 if ( (list->flags & gsf_SORTED)
832 #if (SHORTSEARCH > 0)
833 && (upper = GL_LENGTH(list)-1) > SHORTSEARCH
834 #endif
835 ) { /* use binary search */
836 register long comparison;
837 lower = 0;
838 #if (SHORTSEARCH <= 0)
839 upper = GL_LENGTH(list)-1;
840 #endif
841 if (increasing) {
842 while (lower <= upper) {
843 search = (lower+upper) / 2;
844 comparison =
845 ((char *)list->data[search]-(char *)match); /* pointer difference */
846 if (comparison==0) {
847 return (search + 1);
848 }
849 else if (comparison < 0) {
850 lower = search + 1;
851 }
852 else if (search > 0) {
853 upper = search - 1;
854 }
855 else {
856 return 0;
857 }
858 }
859 } else {
860 while (lower <= upper) {
861 search = (lower+upper) / 2;
862 comparison =
863 ((char *)match-(char *)list->data[search]); /* pointer difference */
864 if (comparison==0) {
865 return (search + 1);
866 }
867 else if (comparison < 0) {
868 lower = search + 1;
869 }
870 else if (search > 0) {
871 upper = search - 1;
872 }
873 else {
874 return 0;
875 }
876 }
877 }
878 }
879 else { /* use linear search */
880 upper = GL_LENGTH(list);
881 for(search=0; search < upper; search++) {
882 if (list->data[search]==match) return search+1;
883 }
884 }
885 return 0; /* search failed */
886 }
887
888 unsigned long gl_search(CONST struct gl_list_t *list,
889 CONST VOIDPTR match, CmpFunc func)
890 {
891 register unsigned long lower,upper,search;
892 #ifdef __alpha
893 long comparison;
894 #else
895 int comparison;
896 #endif
897 asc_assert((NULL != list) && (NULL != func));
898 if (list->flags & gsf_SORTED) { /* use binary search */
899 lower = 1;
900 upper = GL_LENGTH(list);
901 while (lower <= upper) {
902 search = (lower+upper) / 2;
903 comparison = (*func)(gl_fetch(list,search),match);
904 if (comparison==0) return search;
905 if (comparison < 0) lower = search + 1;
906 else upper = search -1;
907 }
908 }
909 else { /* use linear search */
910 upper = GL_LENGTH(list);
911 for(search=0; search < upper; search++) {
912 if ((*func)(list->data[search],match)==0) return search+1;
913 }
914 }
915 return 0; /* search failed */
916 }
917
918
919 unsigned long gl_search_reverse(CONST struct gl_list_t *list,
920 CONST VOIDPTR match, CmpFunc func)
921 {
922 register unsigned long search;
923
924 asc_assert((NULL != list) && (NULL != func));
925 if (list->flags & gsf_SORTED) { /* use binary search */
926 return gl_search(list, match, func);
927 } else { /* use linear search */
928 /* since search is unsigned, the FOR loop cannot go to
929 * zero, since 0-- is ULONG_MAX. Must do zero separately.
930 */
931 for( search = (GL_LENGTH(list)-1); search > 0; search-- ) {
932 if ((*func)(list->data[search],match)==0) return search+1;
933 }
934 search = 0;
935 if ((*func)(list->data[search],match)==0) return search+1;
936 }
937 return 0; /* search failed */
938 }
939
940
941 int gl_unique_list(CONST struct gl_list_t *list)
942 {
943 unsigned long i,j,len,lm1;
944 VOIDPTR e;
945 VOIDPTR *d;
946
947 if (list==NULL || GL_LENGTH(list)<2) {
948 return 1;
949 }
950 len = GL_LENGTH(list);
951 lm1 = len-1;
952 d = list->data;
953 for (i=0; i<lm1; i++) {
954 e = d[i];
955 for (j=i+1; j < len; j++) {
956 if (d[j]==e) {
957 return 0;
958 }
959 }
960 }
961 return 1;
962 }
963
964 int gl_empty(CONST struct gl_list_t *list)
965 {
966 asc_assert(NULL != list);
967 return(GL_LENGTH(list)==0);
968 }
969
970 void gl_delete(struct gl_list_t *list, unsigned long int pos, int dispose)
971 {
972 unsigned long c,length;
973 int sorted;
974 VOIDPTR ptr;
975
976 asc_assert(NULL != list);
977 /* should be ASSERTRANGE(list,pos,"gl_delete");
978 The check for (pos > 0) below is suspicious, though.
979 It suggests this function may be called with pos == 0. */
980 asc_assert(pos <= gl_length(list));
981
982 if (pos > 0) {
983 sorted = (int)(list->flags & gsf_SORTED);
984 if (dispose) {
985 ptr = gl_fetch(list,pos);
986 ascfree(ptr);
987 }
988 length = GL_LENGTH(list);
989 #ifdef NDEBUG
990 /* the copyleft the remainder of the array. easy-eye version */
991 for (c = pos; c < length; c++) {
992 list->data[c-1] = list->data[c];
993 }
994 #else
995 /* shift the remainder of the array using the macros */
996 for (c=pos+1;c<=length;c++) {
997 GL_STORE(list,c-1,gl_fetch(list,c));
998 }
999 #endif
1000 list->length--;
1001 if (sorted) {
1002 list->flags |= gsf_SORTED;
1003 } else {
1004 SORTED_OFF(list);
1005 }
1006 }
1007 }
1008
1009 void gl_reverse(struct gl_list_t *list)
1010 {
1011 VOIDPTR *tmpdata;
1012 unsigned long c,len;
1013
1014 if (list==NULL || list->length <2L) return;
1015 tmpdata = (VOIDPTR *)ascmalloc(list->capacity*sizeof(VOIDPTR));
1016 if (tmpdata==NULL) {
1017 Asc_Panic(2, NULL, "gl_reverse out of memory. Bye!\n");
1018 }
1019 len = list->length;
1020 for (c=0;c < len;c++) {
1021 tmpdata[len - (c+1)] = list->data[c];
1022 }
1023 ascfree(list->data);
1024 list->data = tmpdata;
1025 }
1026
1027 void gl_reset(struct gl_list_t *list)
1028 {
1029 asc_assert(NULL != list);
1030 list->flags = (unsigned int)(gsf_SORTED | gsf_EXPANDABLE);
1031 list->length = 0;
1032 }
1033
1034
1035 struct gl_list_t *gl_copy(CONST struct gl_list_t *list)
1036 {
1037 struct gl_list_t *new;
1038 unsigned long counter,length;
1039
1040 asc_assert(NULL != list);
1041
1042 length = GL_LENGTH(list);
1043 new = gl_create(length);
1044 for (counter = 1;counter <= length;counter++)
1045 gl_append_ptr(new,gl_fetch(list,counter));
1046 new->flags = list->flags;
1047 return new;
1048 }
1049
1050 struct gl_list_t *gl_concat(CONST struct gl_list_t *list1,
1051 CONST struct gl_list_t *list2)
1052 {
1053 struct gl_list_t *new;
1054 unsigned long counter,length1,length2;
1055
1056 asc_assert ((NULL != list1) && (NULL != list2));
1057
1058 length1 = GL_LENGTH(list1);
1059 length2 = GL_LENGTH(list2);
1060 new = gl_create(length1+length2);
1061 for (counter = 1;counter<= length1; counter++) {
1062 gl_append_ptr(new,gl_fetch(list1,counter));
1063 }
1064 for (counter = 1;counter<= length2; counter++) {
1065 gl_append_ptr(new,gl_fetch(list2,counter));
1066 }
1067 return new;
1068 }
1069
1070 int gl_compare_ptrs(CONST struct gl_list_t *l1, CONST struct gl_list_t *l2)
1071 {
1072 unsigned long len,c;
1073 register unsigned long i1,i2;
1074 if (l1==NULL || l2 == NULL) {
1075 Asc_Panic(2,"gl_compare_ptrs","Called with NULL input");
1076 }
1077 len = MIN(GL_LENGTH(l1),GL_LENGTH(l2));
1078 for (c=0;c < len; c++) {
1079 i1 = (unsigned long)(l1->data[c]);
1080 i2 = (unsigned long)(l2->data[c]);
1081 if (i1<i2) {
1082 return -1;
1083 }
1084 if (i1>i2) {
1085 return 1;
1086 }
1087 }
1088 return (GL_LENGTH(l1) == GL_LENGTH(l2)) ?
1089 0 :
1090 (( GL_LENGTH(l2) > len) ? -1 : 1);
1091 }
1092
1093 void gl_set_sorted(struct gl_list_t *list, int TRUE_or_FALSE)
1094 {
1095 asc_assert(NULL != list);
1096 if (FALSE == TRUE_or_FALSE)
1097 list->flags &= ~gsf_SORTED;
1098 else
1099 list->flags |= gsf_SORTED;
1100 }
1101
1102 int gl_expandable(struct gl_list_t *list)
1103 {
1104 asc_assert(NULL != list);
1105 return (int)(list->flags & gsf_EXPANDABLE);
1106 }
1107
1108 void gl_set_expandable(struct gl_list_t *list, int TRUE_or_FALSE)
1109 {
1110 asc_assert(NULL != list);
1111 if (FALSE == TRUE_or_FALSE)
1112 list->flags &= ~gsf_EXPANDABLE;
1113 else
1114 list->flags |= gsf_EXPANDABLE;
1115 }
1116
1117
1118 VOIDPTR *gl_fetchaddr(CONST struct gl_list_t *list,
1119 unsigned long int pos)
1120 {
1121 asc_assert(NULL != list);
1122 ASSERTRANGE(list,pos,"gl_fetchaddr");
1123 return (VOIDPTR *)&(list->data[pos-1]);
1124 }
1125
1126
1127 void gl_emptyrecycler()
1128 {
1129 int i;
1130 #if LISTRECYCLERDEBUG
1131 unsigned long bytecount = 0;
1132 #endif
1133 struct gl_list_t *list;
1134
1135 #if LISTRECYCLERDEBUG
1136 PRINTF("LIST RECYCLER SUMMARY\n");
1137 PRINTF("=====================\n");
1138 PRINTF("New lists created : %d\n", NewListsUsed);
1139 PRINTF("Recycled lists used: %d\n", RecycledListsUsed);
1140 ListsDestroyedTotal = 0;
1141 for (i=0;i <=MAXRECYCLESIZE; i++) {
1142 ListsDestroyedTotal += ListsDestroyed[i];
1143 }
1144 PRINTF("Lists destroyed : %d\n", ListsDestroyedTotal);
1145 PRINTF("Size\tAllowed\tCurr.\tMax\tDestroyed\n");
1146 #endif
1147 for(i = 0 ; i <= MAXRECYCLESIZE ; i++){
1148 #if LISTRECYCLERDEBUG
1149 if (HighWaterMark[i] >0) {
1150 PRINTF("%d\t%d\t%d\t%d\t%d\n",i,AllowedContents[i], RecycledContents[i],
1151 HighWaterMark[i], ListsDestroyed[i]);
1152 }
1153 bytecount += sizeof(struct gl_list_t)*RecycledContents[i];
1154 bytecount += sizeof(void *)*i*RecycledContents[i];
1155 #endif
1156 while (RecycledContents[i] > 0){
1157 /* pop a list off the LIFO list */
1158 list = RecycledList[i];
1159 RecycledList[i] = (struct gl_list_t *)list->data[0];
1160 #ifndef NDEBUG
1161 list->data[0] = NULL;
1162 #endif
1163 --RecycledContents[i];
1164 ascfree(list->data);
1165 #ifndef NDEBUG
1166 list->data = NULL;
1167 list->capacity = list->length = 0;
1168 #endif
1169 POOL_FREEHEAD(list);
1170 }
1171 asc_assert(RecycledList[i]==NULL); /* someone forgot to empty the last one */
1172 }
1173 #if LISTRECYCLERDEBUG
1174 bytecount += MAXRECYCLESIZE*sizeof(void *);
1175 PRINTF("Total bytes:\t%lu\n",bytecount);
1176 #endif
1177 }
1178
1179 void gl_reportrecycler(FILE *fp)
1180 {
1181 int i;
1182 unsigned long bytecount = 0;
1183 #if !LISTRECYCLERDEBUG
1184 (void) fp;
1185 #endif
1186
1187 #if LISTRECYCLERDEBUG
1188 asc_assert(NULL != fp);
1189 FPRINTF(fp,"LIST RECYCLER SUMMARY\n");
1190 FPRINTF(fp,"=====================\n");
1191 FPRINTF(fp,"New lists created : %d\n", NewListsUsed);
1192 FPRINTF(fp,"Recycled lists used: %d\n", RecycledListsUsed);
1193 for (i=0;i <=MAXRECYCLESIZE; i++) {
1194 ListsDestroyedTotal += ListsDestroyed[i];
1195 }
1196 FPRINTF(fp,"Lists destroyed : %d\n", ListsDestroyedTotal);
1197 FPRINTF(fp,"Size\tAllowed\tCurr.\tMax\tDestroyed\n");
1198 #endif
1199
1200 for(i = 0 ; i <= MAXRECYCLESIZE ; i++){
1201 #if LISTRECYCLERDEBUG
1202 if (HighWaterMark[i] >0) {
1203 FPRINTF(fp,"%d\t%d\t%d\t%d\t%d\n",i,AllowedContents[i],
1204 RecycledContents[i], HighWaterMark[i], ListsDestroyed[i]);
1205 bytecount += sizeof(struct gl_list_t)*RecycledContents[i];
1206 bytecount += sizeof(void *)*i*RecycledContents[i];
1207 }
1208 #else
1209 bytecount += sizeof(struct gl_list_t)*RecycledContents[i];
1210 bytecount += sizeof(void *)*RecycledContents[i]*i;
1211 #endif
1212 }
1213 #if LISTRECYCLERDEBUG
1214 bytecount += MAXRECYCLESIZE*sizeof(void *);
1215 FPRINTF(fp,"Total bytes:\t%lu\n",bytecount);
1216 #endif
1217 }

john.pye@anu.edu.au
ViewVC Help
Powered by ViewVC 1.1.22