1 |
aw0a |
1 |
/* |
2 |
|
|
* mtx: Ascend Sparse Matrix Package Reordering |
3 |
|
|
* by Ben Allan |
4 |
|
|
* Created: 5/31/96 |
5 |
|
|
* Version: $Revision: 1.10 $ |
6 |
|
|
* Version control file: $RCSfile: mtx_reorder.c,v $ |
7 |
|
|
* Date last modified: $Date: 1997/07/28 20:53:09 $ |
8 |
|
|
* Last modified by: $Author: ballan $ |
9 |
|
|
* |
10 |
|
|
* This file is part of the SLV solver. |
11 |
|
|
* |
12 |
|
|
* Copyright (C) 1996 Benjamin Andrew Allan |
13 |
|
|
* |
14 |
|
|
* The SLV solver is free software; you can redistribute |
15 |
|
|
* it and/or modify it under the terms of the GNU General Public License as |
16 |
|
|
* published by the Free Software Foundation; either version 2 of the |
17 |
|
|
* License, or (at your option) any later version. |
18 |
|
|
* |
19 |
|
|
* The SLV solver is distributed in hope that it will be |
20 |
|
|
* useful, but WITHOUT ANY WARRANTY; without even the implied warranty of |
21 |
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
22 |
|
|
* General Public License for more details. |
23 |
|
|
* |
24 |
|
|
* You should have received a copy of the GNU General Public License along with |
25 |
|
|
* the program; if not, write to the Free Software Foundation, Inc., 675 |
26 |
|
|
* Mass Ave, Cambridge, MA 02139 USA. Check the file named COPYING. |
27 |
|
|
* COPYING is found in ../compiler. |
28 |
|
|
*/ |
29 |
|
|
|
30 |
|
|
#include <math.h> |
31 |
|
|
#include "utilities/ascConfig.h" |
32 |
|
|
#include "utilities/ascMalloc.h" |
33 |
|
|
#include "utilities/mem.h" |
34 |
|
|
#include "solver/mtx.h" |
35 |
|
|
#define __MTX_C_SEEN__ |
36 |
|
|
#include "solver/mtx_use_only.h" |
37 |
|
|
#define R_DEBUG FALSE |
38 |
|
|
|
39 |
|
|
/* |
40 |
|
|
* All I know for sure is that reordering shouldn't be the |
41 |
|
|
* job of a linear factorization method. |
42 |
|
|
* Note: all of the following is still in the 'outside mtx' |
43 |
|
|
* idiom which makes some things terribly inefficient. |
44 |
|
|
* As a result, it doesn't need to include the mtx_use_only.h. |
45 |
|
|
* For efficiency, this needs to be converted over to mtx |
46 |
|
|
* internal idiom. |
47 |
|
|
* On top of all that, this stuff really belongs in mtx_perms.[ch] |
48 |
|
|
* and not out by itself, imho. |
49 |
|
|
* BAA 5/96. |
50 |
|
|
* |
51 |
|
|
* bugs: |
52 |
|
|
* spk1 and tspk1 use that stupid ptr backup game to get arrays that |
53 |
|
|
* look bigger than they actually are. This needs to be replaced with |
54 |
|
|
* use of mtx's reusable buffer space. |
55 |
|
|
*/ |
56 |
|
|
/***************************************************************************\ |
57 |
|
|
Reordering functions for SPK1, and possibly for other schemes to be |
58 |
|
|
implemented later. |
59 |
|
|
The stuff here is almost, but not quite, black magic. Don't tinker with it. |
60 |
|
|
Once it is translated into mtx idiom, it will be black magic. |
61 |
|
|
\***************************************************************************/ |
62 |
|
|
|
63 |
|
|
/********************************* |
64 |
|
|
begin of spk1 stuff |
65 |
|
|
*********************************/ |
66 |
|
|
struct reorder_list { /* List of rows/columns and their counts. */ |
67 |
|
|
int32 ndx; |
68 |
|
|
int32 count; |
69 |
|
|
struct reorder_list *next; |
70 |
|
|
}; |
71 |
|
|
|
72 |
|
|
struct reorder_vars { |
73 |
|
|
mtx_matrix_t mtx; |
74 |
|
|
mtx_region_t reg; /* Current active region */ |
75 |
|
|
int32 colhigh; /* Original value of reg.col.high */ |
76 |
|
|
struct reorder_list *tlist; /* Array of (enough) list elements */ |
77 |
|
|
int32 *rowcount; /* rowcount[reg.row.low .. reg.row.high] */ |
78 |
|
|
}; |
79 |
|
|
|
80 |
|
|
static void adjust_row_count(struct reorder_vars *vars,int32 removed_col) |
81 |
|
|
/** |
82 |
|
|
*** Adjusts the row counts to account for the (to be) removed column. |
83 |
|
|
**/ |
84 |
|
|
{ |
85 |
|
|
mtx_coord_t nz; |
86 |
|
|
nz.col = removed_col; |
87 |
|
|
nz.row = mtx_FIRST; |
88 |
|
|
while( mtx_next_in_col(vars->mtx,&nz,&(vars->reg.row)), |
89 |
|
|
nz.row != mtx_LAST ) |
90 |
|
|
--(vars->rowcount[nz.row]); |
91 |
|
|
} |
92 |
|
|
|
93 |
|
|
static void assign_row_and_col(struct reorder_vars *vars, |
94 |
|
|
int32 row, |
95 |
|
|
int32 col) |
96 |
|
|
/** |
97 |
|
|
*** Assigns the given row to the given column, moving the row and column |
98 |
|
|
*** to the beginning of the active region and removing them (readjusting |
99 |
|
|
*** the region). The row counts are NOT adjusted. If col == mtx_NONE, |
100 |
|
|
*** then no column is assigned and the row is moved to the end of the |
101 |
|
|
*** active block instead. Otherwise, it is assumed that the column |
102 |
|
|
*** is active. |
103 |
|
|
**/ |
104 |
|
|
{ |
105 |
|
|
if( col == mtx_NONE ) { |
106 |
|
|
mtx_swap_rows(vars->mtx,row,vars->reg.row.high); |
107 |
|
|
vars->rowcount[row] = vars->rowcount[vars->reg.row.high]; |
108 |
|
|
--(vars->reg.row.high); |
109 |
|
|
} else { |
110 |
|
|
mtx_swap_rows(vars->mtx,row,vars->reg.row.low); |
111 |
|
|
vars->rowcount[row] = vars->rowcount[vars->reg.row.low]; |
112 |
|
|
mtx_swap_cols(vars->mtx,col,vars->reg.col.low); |
113 |
|
|
++(vars->reg.row.low); |
114 |
|
|
++(vars->reg.col.low); |
115 |
|
|
} |
116 |
|
|
} |
117 |
|
|
|
118 |
|
|
static void push_column_on_stack(struct reorder_vars *vars,int32 col) |
119 |
|
|
/** |
120 |
|
|
*** Pushes the given column onto the stack. It is assumed that the |
121 |
|
|
*** column is active. Row counts are adjusted. |
122 |
|
|
**/ |
123 |
|
|
{ |
124 |
|
|
adjust_row_count(vars,col); |
125 |
|
|
mtx_swap_cols(vars->mtx,col,vars->reg.col.high); |
126 |
|
|
--(vars->reg.col.high); |
127 |
|
|
} |
128 |
|
|
|
129 |
|
|
static int32 pop_column_from_stack(struct reorder_vars *vars) |
130 |
|
|
/** |
131 |
|
|
*** Pops the column on the "top" of the stack off of the stack and |
132 |
|
|
*** returns the column index, where it now lies in the active region. |
133 |
|
|
*** If the stack is empty, mtx_NONE is returned. Row counts are NOT |
134 |
|
|
*** adjusted (this together with a subsequent assignment of this column |
135 |
|
|
*** ==> no row count adjustment necessary). |
136 |
|
|
**/ |
137 |
|
|
{ |
138 |
|
|
if( vars->reg.col.high < vars->colhigh ) |
139 |
|
|
return(++(vars->reg.col.high)); |
140 |
|
|
else |
141 |
|
|
return( mtx_NONE ); |
142 |
|
|
} |
143 |
|
|
|
144 |
|
|
static void assign_null_rows(struct reorder_vars *vars) |
145 |
|
|
/** |
146 |
|
|
*** Assigns empty rows, moving them to the assigned region. It is |
147 |
|
|
*** assumed that row counts are correct. Columns are assigned off the |
148 |
|
|
*** stack. |
149 |
|
|
**/ |
150 |
|
|
{ |
151 |
|
|
int32 row; |
152 |
|
|
|
153 |
|
|
for( row = vars->reg.row.low ; row <= vars->reg.row.high ; ++row ) |
154 |
|
|
if( vars->rowcount[row] == 0 ) |
155 |
|
|
assign_row_and_col(vars , row , pop_column_from_stack(vars)); |
156 |
|
|
} |
157 |
|
|
|
158 |
|
|
static void forward_triangularize(struct reorder_vars *vars) |
159 |
|
|
/** |
160 |
|
|
*** Forward triangularizes the region, assigning singleton rows with their |
161 |
|
|
*** one and only incident column until there are no more. The row counts |
162 |
|
|
*** must be correct, and they are updated. |
163 |
|
|
**/ |
164 |
|
|
{ |
165 |
|
|
boolean change; |
166 |
|
|
mtx_coord_t nz; |
167 |
|
|
|
168 |
|
|
do { |
169 |
|
|
change = FALSE; |
170 |
|
|
for( nz.row = vars->reg.row.low ; |
171 |
|
|
nz.row <= vars->reg.row.high && vars->rowcount[nz.row] != 1; |
172 |
|
|
++nz.row ) ; |
173 |
|
|
if( nz.row <= vars->reg.row.high ) { |
174 |
|
|
/* found singleton row */ |
175 |
|
|
nz.col = mtx_FIRST; /* this is somehow coming back with nz.col -1 */ |
176 |
|
|
mtx_next_in_row(vars->mtx,&nz,&(vars->reg.col)); |
177 |
|
|
adjust_row_count(vars,nz.col); |
178 |
|
|
assign_row_and_col(vars,nz.row,nz.col); |
179 |
|
|
change = TRUE; |
180 |
|
|
} |
181 |
|
|
} while( change ); |
182 |
|
|
} |
183 |
|
|
|
184 |
|
|
static int32 select_row(struct reorder_vars *vars) |
185 |
|
|
/** |
186 |
|
|
*** Selects a row and returns its index. It is assumed that there is a |
187 |
|
|
*** row. Row counts must be correct. vars->tlist will be used. |
188 |
|
|
**/ |
189 |
|
|
{ |
190 |
|
|
int32 min_row_count; |
191 |
|
|
int32 max_col_count; |
192 |
|
|
int32 row; |
193 |
|
|
int32 i, nties=-2; /* # elements currently defined in vars->tlist */ |
194 |
|
|
int32 sum; |
195 |
|
|
mtx_coord_t nz; |
196 |
|
|
mtx_matrix_t mtx; |
197 |
|
|
mtx_range_t *colrng, *rowrng; |
198 |
|
|
|
199 |
|
|
/* Set to something > any possible value */ |
200 |
|
|
min_row_count = vars->reg.col.high-vars->reg.col.low+2; |
201 |
|
|
for( row = vars->reg.row.low ; row <= vars->reg.row.high ; ++row ) |
202 |
|
|
if( vars->rowcount[row] <= min_row_count ) { |
203 |
|
|
if( vars->rowcount[row] < min_row_count ) { |
204 |
|
|
min_row_count = vars->rowcount[row]; |
205 |
|
|
nties = 0; |
206 |
|
|
} |
207 |
|
|
vars->tlist[nties++].ndx = row; |
208 |
|
|
} |
209 |
|
|
/** |
210 |
|
|
*** vars->tlist[0..nties-1] is a list of row numbers which tie for |
211 |
|
|
*** minimum row count. |
212 |
|
|
**/ |
213 |
|
|
|
214 |
|
|
max_col_count = -1; /* < any possible value */ |
215 |
|
|
i = 0; |
216 |
|
|
mtx = vars->mtx; |
217 |
|
|
colrng=&(vars->reg.col); |
218 |
|
|
rowrng=&(vars->reg.row); |
219 |
|
|
while( i < nties ) { |
220 |
|
|
|
221 |
|
|
sum = 0; |
222 |
|
|
nz.row = vars->tlist[i].ndx; |
223 |
|
|
nz.col = mtx_FIRST; |
224 |
|
|
while(mtx_next_in_row(mtx,&nz,colrng), |
225 |
|
|
nz.col != mtx_LAST ) |
226 |
|
|
sum += mtx_nonzeros_in_col(mtx,nz.col,rowrng); |
227 |
|
|
if( sum > max_col_count ) { |
228 |
|
|
max_col_count = sum; |
229 |
|
|
row = nz.row; |
230 |
|
|
} |
231 |
|
|
i++; |
232 |
|
|
} |
233 |
|
|
/** |
234 |
|
|
*** Now row contains the row with the minimum row count, which has the |
235 |
|
|
*** greatest total column count of incident columns among all rows with |
236 |
|
|
*** the same (minimum) row count. Select it. |
237 |
|
|
**/ |
238 |
|
|
return(row); |
239 |
|
|
} |
240 |
|
|
|
241 |
|
|
static void mtx_spk1_reorder(struct reorder_vars *vars) |
242 |
|
|
/** |
243 |
|
|
*** Reorders the assigned matrix vars->mtx within the specified bounding |
244 |
|
|
*** block region vars->reg. The region is split into 6 subregions during |
245 |
|
|
*** reordering: the rows are divided in two, and the columns divided in |
246 |
|
|
*** three. Initially everything is in the active subregion. Ultimately, |
247 |
|
|
*** everything will be assigned. |
248 |
|
|
*** |
249 |
|
|
*** <-- assigned -->|<-- active-->|<-- on stack -->| |
250 |
|
|
*** ----+----------------+-------------+----------------+ |
251 |
|
|
*** a | | | | |
252 |
|
|
*** s | | | | |
253 |
|
|
*** s | | | | |
254 |
|
|
*** i | (SQUARE) | | | |
255 |
|
|
*** g | | | | |
256 |
|
|
*** n | | | | |
257 |
|
|
*** e | | | | |
258 |
|
|
*** d | | | | |
259 |
|
|
*** ----+----------------+-------------+----------------+ |
260 |
|
|
*** a | | | | |
261 |
|
|
*** c | | ACTIVE | | |
262 |
|
|
*** t | | REGION | | |
263 |
|
|
*** i | | | | |
264 |
|
|
*** v | | | | |
265 |
|
|
*** e | | | | |
266 |
|
|
*** ----+----------------+-------------+----------------+ |
267 |
|
|
*** |
268 |
|
|
*** The algorithm is roughly as follows: |
269 |
|
|
*** (1) select a row (by some criterion). |
270 |
|
|
*** (2) push columns incident on that row onto the stack in decreasing |
271 |
|
|
*** order of their length. |
272 |
|
|
*** (3) pop first column off the stack and assign it to the selected |
273 |
|
|
*** row. |
274 |
|
|
*** (4) forward-triangularize (assign singleton rows with their one |
275 |
|
|
*** and only incident column, until no longer possible). |
276 |
|
|
*** |
277 |
|
|
*** (1)-(4) should be repeated until the active subregion becomes empty. |
278 |
|
|
*** |
279 |
|
|
*** Everything above was written as though the entire matrix is |
280 |
|
|
*** involved. In reality, only the relevant square region is involved. |
281 |
|
|
**/ |
282 |
|
|
{ |
283 |
|
|
int32 row, size; |
284 |
|
|
int32 *rowcount_array_origin; |
285 |
|
|
mtx_matrix_t mtx; |
286 |
|
|
|
287 |
|
|
size = MAX(vars->reg.row.high,vars->reg.col.high) + 1; |
288 |
|
|
vars->tlist = size > 0 ? (struct reorder_list *) |
289 |
|
|
ascmalloc( size*sizeof(struct reorder_list) ) : NULL; |
290 |
|
|
vars->rowcount = rowcount_array_origin = size > 0 ? (int32 *) |
291 |
|
|
ascmalloc( size*sizeof(int32) ) : NULL; |
292 |
|
|
mtx = vars->mtx; |
293 |
|
|
|
294 |
|
|
vars->colhigh = vars->reg.col.high; |
295 |
|
|
/* Establish row counts */ |
296 |
|
|
for( row = vars->reg.row.low ; row <= vars->reg.row.high ; ++row ) |
297 |
|
|
vars->rowcount[row] = |
298 |
|
|
mtx_nonzeros_in_row(mtx,row,&(vars->reg.col)); |
299 |
|
|
|
300 |
|
|
while(TRUE) { |
301 |
|
|
struct reorder_list *head; |
302 |
|
|
int32 nelts; /* # elements "allocated" from vars->tlist */ |
303 |
|
|
mtx_coord_t nz; |
304 |
|
|
|
305 |
|
|
forward_triangularize(vars); |
306 |
|
|
assign_null_rows(vars); |
307 |
|
|
if( vars->reg.row.low>vars->reg.row.high || |
308 |
|
|
vars->reg.col.low>vars->reg.col.high ) { |
309 |
|
|
/* Active region is now empty, done */ |
310 |
|
|
if( NOTNULL(vars->tlist) ) |
311 |
|
|
ascfree( vars->tlist ); |
312 |
|
|
if( NOTNULL(rowcount_array_origin) ) |
313 |
|
|
ascfree( rowcount_array_origin ); |
314 |
|
|
return; |
315 |
|
|
} |
316 |
|
|
|
317 |
|
|
head = NULL; |
318 |
|
|
nelts = 0; |
319 |
|
|
nz.row = select_row(vars); |
320 |
|
|
nz.col = mtx_FIRST; |
321 |
|
|
while(mtx_next_in_row(mtx,&nz,&(vars->reg.col)), |
322 |
|
|
nz.col != mtx_LAST ) { |
323 |
|
|
struct reorder_list **q,*p; |
324 |
|
|
|
325 |
|
|
p = &(vars->tlist[nelts++]); |
326 |
|
|
p->ndx = mtx_col_to_org(mtx,nz.col); |
327 |
|
|
p->count = mtx_nonzeros_in_col(mtx,nz.col,&(vars->reg.row)); |
328 |
|
|
for( q = &head; *q && (*q)->count > p->count; q = &((*q)->next) ); |
329 |
|
|
p->next = *q; |
330 |
|
|
*q = p; |
331 |
|
|
} |
332 |
|
|
/** |
333 |
|
|
*** We now have a list of columns which intersect the selected row. |
334 |
|
|
*** The list is in order of decreasing column count. |
335 |
|
|
**/ |
336 |
|
|
|
337 |
|
|
/* Push incident columns on stack */ |
338 |
|
|
for( ; NOTNULL(head) ; head = head->next ) |
339 |
|
|
push_column_on_stack(vars,mtx_org_to_col(mtx,head->ndx)); |
340 |
|
|
|
341 |
|
|
/* Pop column off stack and assign to selected row */ |
342 |
|
|
assign_row_and_col(vars , nz.row , pop_column_from_stack(vars)); |
343 |
|
|
} |
344 |
|
|
/* Not reached. */ |
345 |
|
|
} |
346 |
|
|
|
347 |
|
|
/********************************* |
348 |
|
|
end of spk1 stuff |
349 |
|
|
*********************************/ |
350 |
|
|
/********************************* |
351 |
|
|
begin of tspk1 stuff |
352 |
|
|
*********************************/ |
353 |
|
|
struct creorder_list { /* List of columns/rows and their counts. */ |
354 |
|
|
int32 ndx; |
355 |
|
|
int32 count; |
356 |
|
|
struct creorder_list *next; |
357 |
|
|
}; |
358 |
|
|
|
359 |
|
|
struct creorder_vars { |
360 |
|
|
mtx_matrix_t mtx; |
361 |
|
|
mtx_region_t reg; /* Current active region */ |
362 |
|
|
int32 rowhigh; /* Original value of reg.row.high */ |
363 |
|
|
struct creorder_list *tlist; /* Array of (enough) list elements */ |
364 |
|
|
int32 *colcount; /* colcount[reg.col.low .. reg.col.high] */ |
365 |
|
|
}; |
366 |
|
|
|
367 |
|
|
static void adjust_col_count(struct creorder_vars *vars,int32 removed_row) |
368 |
|
|
/** |
369 |
|
|
*** Adjusts the column counts to account for the (to be) removed row. |
370 |
|
|
**/ |
371 |
|
|
{ |
372 |
|
|
mtx_coord_t nz; |
373 |
|
|
nz.row = removed_row; |
374 |
|
|
nz.col = mtx_FIRST; |
375 |
|
|
while( mtx_next_in_row(vars->mtx,&nz,&(vars->reg.col)), |
376 |
|
|
nz.col != mtx_LAST ) |
377 |
|
|
--(vars->colcount[nz.col]); |
378 |
|
|
} |
379 |
|
|
|
380 |
|
|
static void assign_col_and_row(struct creorder_vars *vars, |
381 |
|
|
int32 col, |
382 |
|
|
int32 row) |
383 |
|
|
/** |
384 |
|
|
*** Assigns the given row to the given column, moving the row and column |
385 |
|
|
*** to the beginning of the active region and removing them (readjusting |
386 |
|
|
*** the region). The col counts are NOT adjusted. If col == mtx_NONE, |
387 |
|
|
*** then no column is assigned and the col is moved to the end of the |
388 |
|
|
*** active block instead. Otherwise, it is assumed that the row |
389 |
|
|
*** is active. |
390 |
|
|
**/ |
391 |
|
|
{ |
392 |
|
|
if( row == mtx_NONE ) { |
393 |
|
|
mtx_swap_cols(vars->mtx,col,vars->reg.col.high); |
394 |
|
|
vars->colcount[col] = vars->colcount[vars->reg.col.high]; |
395 |
|
|
--(vars->reg.col.high); |
396 |
|
|
} else { |
397 |
|
|
mtx_swap_cols(vars->mtx,col,vars->reg.col.low); |
398 |
|
|
vars->colcount[col] = vars->colcount[vars->reg.col.low]; |
399 |
|
|
mtx_swap_rows(vars->mtx,row,vars->reg.row.low); |
400 |
|
|
++(vars->reg.col.low); |
401 |
|
|
++(vars->reg.row.low); |
402 |
|
|
} |
403 |
|
|
} |
404 |
|
|
|
405 |
|
|
static void push_row_on_stack(struct creorder_vars *vars,int32 row) |
406 |
|
|
/** |
407 |
|
|
*** Pushes the given row onto the stack. It is assumed that the |
408 |
|
|
*** row is active. Col counts are adjusted. |
409 |
|
|
**/ |
410 |
|
|
{ |
411 |
|
|
adjust_col_count(vars,row); |
412 |
|
|
mtx_swap_rows(vars->mtx,row,vars->reg.row.high); |
413 |
|
|
--(vars->reg.row.high); |
414 |
|
|
} |
415 |
|
|
|
416 |
|
|
static int32 pop_row_from_stack(struct creorder_vars *vars) |
417 |
|
|
/** |
418 |
|
|
*** Pops the row on the "top" of the stack off of the stack and |
419 |
|
|
*** returns the row index, where it now lies in the active region. |
420 |
|
|
*** If the stack is empty, mtx_NONE is returned. Col counts are NOT |
421 |
|
|
*** adjusted (this together with a subsequent assignment of this row |
422 |
|
|
*** ==> no col count adjustment necessary). |
423 |
|
|
**/ |
424 |
|
|
{ |
425 |
|
|
if( vars->reg.row.high < vars->rowhigh ) |
426 |
|
|
return(++(vars->reg.row.high)); |
427 |
|
|
else |
428 |
|
|
return( mtx_NONE ); |
429 |
|
|
} |
430 |
|
|
|
431 |
|
|
static void assign_null_cols(struct creorder_vars *vars) |
432 |
|
|
/** |
433 |
|
|
*** Assigns empty cols, moving them to the assigned region. It is |
434 |
|
|
*** assumed that col counts are correct. Rows are assigned off the |
435 |
|
|
*** stack. |
436 |
|
|
**/ |
437 |
|
|
{ |
438 |
|
|
int32 col; |
439 |
|
|
|
440 |
|
|
for( col = vars->reg.col.low ; col <= vars->reg.col.high ; ++col ) |
441 |
|
|
if( vars->colcount[col] == 0 ) |
442 |
|
|
assign_col_and_row(vars , col , pop_row_from_stack(vars)); |
443 |
|
|
} |
444 |
|
|
|
445 |
|
|
static void cforward_triangularize(struct creorder_vars *vars) |
446 |
|
|
/** |
447 |
|
|
*** Forward triangularizes the region, assigning singleton columns with their |
448 |
|
|
*** one and only incident row until there are no more. The column counts |
449 |
|
|
*** must be correct, and they are updated. |
450 |
|
|
**/ |
451 |
|
|
{ |
452 |
|
|
boolean change; |
453 |
|
|
|
454 |
|
|
do { |
455 |
|
|
mtx_coord_t nz; |
456 |
|
|
change = FALSE; |
457 |
|
|
for( nz.col = vars->reg.col.low ; |
458 |
|
|
nz.col <= vars->reg.col.high && vars->colcount[nz.col] != 1; |
459 |
|
|
++nz.col ) ; |
460 |
|
|
if( nz.col <= vars->reg.col.high ) { |
461 |
|
|
/* found singleton col */ |
462 |
|
|
nz.row = mtx_FIRST; |
463 |
|
|
mtx_next_in_col(vars->mtx,&nz,&(vars->reg.row)); |
464 |
|
|
adjust_col_count(vars,nz.row); |
465 |
|
|
assign_col_and_row(vars,nz.col,nz.row); |
466 |
|
|
change = TRUE; |
467 |
|
|
} |
468 |
|
|
} while( change ); |
469 |
|
|
} |
470 |
|
|
|
471 |
|
|
static int32 select_col(struct creorder_vars *vars) |
472 |
|
|
/** |
473 |
|
|
*** Selects a col and returns its index. It is assumed that there is a |
474 |
|
|
*** col. Col counts must be correct. vars->tlist will be used. |
475 |
|
|
**/ |
476 |
|
|
{ |
477 |
|
|
int32 min_col_count; |
478 |
|
|
int32 max_row_count; |
479 |
|
|
int32 col; |
480 |
|
|
int32 i, nties=-2; /* # elements currently defined in vars->tlist */ |
481 |
|
|
int32 sum; |
482 |
|
|
mtx_coord_t nz; |
483 |
|
|
mtx_matrix_t mtx; |
484 |
|
|
mtx_range_t *colrng, *rowrng; |
485 |
|
|
|
486 |
|
|
/* Set to something > any possible value */ |
487 |
|
|
min_col_count = vars->reg.row.high-vars->reg.row.low+2; |
488 |
|
|
for( col = vars->reg.col.low ; col <= vars->reg.col.high ; ++col ) |
489 |
|
|
if( vars->colcount[col] <= min_col_count ) { |
490 |
|
|
if( vars->colcount[col] < min_col_count ) { |
491 |
|
|
min_col_count = vars->colcount[col]; |
492 |
|
|
nties = 0; |
493 |
|
|
} |
494 |
|
|
vars->tlist[nties++].ndx = col; |
495 |
|
|
} |
496 |
|
|
/** |
497 |
|
|
*** vars->tlist[0..nties-1] is a list of row numbers which tie for |
498 |
|
|
*** minimum col count. |
499 |
|
|
**/ |
500 |
|
|
|
501 |
|
|
max_row_count = -1; /* < any possible value */ |
502 |
|
|
i = 0; |
503 |
|
|
mtx = vars->mtx; |
504 |
|
|
rowrng=&(vars->reg.row); |
505 |
|
|
colrng=&(vars->reg.col); |
506 |
|
|
while( i < nties ) { |
507 |
|
|
|
508 |
|
|
sum = 0; |
509 |
|
|
nz.row = mtx_FIRST; |
510 |
|
|
nz.col = vars->tlist[i].ndx; |
511 |
|
|
while( mtx_next_in_col(mtx,&nz,rowrng), |
512 |
|
|
nz.row != mtx_LAST ) |
513 |
|
|
sum += mtx_nonzeros_in_row(mtx,nz.row,colrng); |
514 |
|
|
if( sum > max_row_count ) { |
515 |
|
|
max_row_count = sum; |
516 |
|
|
col = nz.col; |
517 |
|
|
} |
518 |
|
|
i++; |
519 |
|
|
} |
520 |
|
|
/** |
521 |
|
|
*** Now col contains the col with the minimum col count, which has the |
522 |
|
|
*** greatest total row count of incident rows among all cols with |
523 |
|
|
*** the same (minimum) col count. Select it. |
524 |
|
|
**/ |
525 |
|
|
return(col); |
526 |
|
|
} |
527 |
|
|
|
528 |
|
|
static void mtx_tspk1_reorder(struct creorder_vars *vars) |
529 |
|
|
/** |
530 |
|
|
*** Transpose the picture and explanation that follows: |
531 |
|
|
*** Reorders the assigned matrix vars->mtx within the specified bounding |
532 |
|
|
*** block region vars->reg. The region is split into 6 subregions during |
533 |
|
|
*** reordering: the rows are divided in two, and the columns divided in |
534 |
|
|
*** three. Initially everything is in the active subregion. Ultimately, |
535 |
|
|
*** everything will be assigned. |
536 |
|
|
*** |
537 |
|
|
*** <-- assigned -->|<-- active-->|<-- on stack -->| |
538 |
|
|
*** ----+----------------+-------------+----------------+ |
539 |
|
|
*** a | | | | |
540 |
|
|
*** s | | | | |
541 |
|
|
*** s | | | | |
542 |
|
|
*** i | (SQUARE) | | | |
543 |
|
|
*** g | | | | |
544 |
|
|
*** n | | | | |
545 |
|
|
*** e | | | | |
546 |
|
|
*** d | | | | |
547 |
|
|
*** ----+----------------+-------------+----------------+ |
548 |
|
|
*** a | | | | |
549 |
|
|
*** c | | ACTIVE | | |
550 |
|
|
*** t | | REGION | | |
551 |
|
|
*** i | | | | |
552 |
|
|
*** v | | | | |
553 |
|
|
*** e | | | | |
554 |
|
|
*** ----+----------------+-------------+----------------+ |
555 |
|
|
*** |
556 |
|
|
*** The algorithm is roughly as follows: |
557 |
|
|
*** (1) select a row (by some criterion). |
558 |
|
|
*** (2) push columns incident on that row onto the stack in decreasing |
559 |
|
|
*** order of their length. |
560 |
|
|
*** (3) pop first column off the stack and assign it to the selected |
561 |
|
|
*** row. |
562 |
|
|
*** (4) forward-triangularize (assign singleton rows with their one |
563 |
|
|
*** and only incident column, until no longer possible). |
564 |
|
|
*** |
565 |
|
|
*** (1)-(4) should be repeated until the active subregion becomes empty. |
566 |
|
|
*** |
567 |
|
|
*** Everything above was written as though the entire matrix is |
568 |
|
|
*** involved. In reality, only the relevant square region is involved. |
569 |
|
|
**/ |
570 |
|
|
{ |
571 |
|
|
int32 col, size; |
572 |
|
|
int32 *colcount_array_origin; |
573 |
|
|
mtx_matrix_t mtx; |
574 |
|
|
|
575 |
|
|
size = vars->reg.col.high - vars->reg.col.low + 1; |
576 |
|
|
size = MAX(size,vars->reg.row.high - vars->reg.row.low + 1); |
577 |
|
|
vars->tlist = size > 0 ? (struct creorder_list *) |
578 |
|
|
ascmalloc( size*sizeof(struct creorder_list) ) : NULL; |
579 |
|
|
colcount_array_origin = size > 0 ? (int32 *) |
580 |
|
|
ascmalloc( size*sizeof(int32) ) : NULL; |
581 |
|
|
vars->colcount = |
582 |
|
|
NOTNULL(colcount_array_origin) ? |
583 |
|
|
colcount_array_origin - vars->reg.col.low : NULL; |
584 |
|
|
mtx = vars->mtx; |
585 |
|
|
|
586 |
|
|
vars->rowhigh = vars->reg.row.high; |
587 |
|
|
/* Establish col counts */ |
588 |
|
|
for( col = vars->reg.col.low ; col <= vars->reg.col.high ; ++col ) |
589 |
|
|
vars->colcount[col] = |
590 |
|
|
mtx_nonzeros_in_col(mtx,col,&(vars->reg.row)); |
591 |
|
|
|
592 |
|
|
while(TRUE) { |
593 |
|
|
struct creorder_list *head; |
594 |
|
|
int32 nelts; /* # elements "allocated" from vars->tlist */ |
595 |
|
|
mtx_coord_t nz; |
596 |
|
|
|
597 |
|
|
cforward_triangularize(vars); |
598 |
|
|
assign_null_cols(vars); |
599 |
|
|
if( vars->reg.col.low > vars->reg.col.high || |
600 |
|
|
vars->reg.row.low > vars->reg.row.high ) { |
601 |
|
|
/* Active region is now empty, done */ |
602 |
|
|
if( NOTNULL(vars->tlist) ) |
603 |
|
|
ascfree( vars->tlist ); |
604 |
|
|
if( NOTNULL(colcount_array_origin) ) |
605 |
|
|
ascfree( colcount_array_origin ); |
606 |
|
|
return; |
607 |
|
|
} |
608 |
|
|
|
609 |
|
|
head = NULL; |
610 |
|
|
nelts = 0; |
611 |
|
|
nz.row = mtx_FIRST; |
612 |
|
|
nz.col = select_col(vars); |
613 |
|
|
while( mtx_next_in_col(mtx,&nz,&(vars->reg.row)), |
614 |
|
|
nz.row != mtx_LAST ) { |
615 |
|
|
struct creorder_list **q,*p; |
616 |
|
|
|
617 |
|
|
p = &(vars->tlist[nelts++]); |
618 |
|
|
p->ndx = mtx_row_to_org(mtx,nz.row); |
619 |
|
|
p->count = mtx_nonzeros_in_row(mtx,nz.row,&(vars->reg.col)); |
620 |
|
|
for( q = &head; *q && (*q)->count > p->count; q = &((*q)->next) ); |
621 |
|
|
p->next = *q; |
622 |
|
|
*q = p; |
623 |
|
|
} |
624 |
|
|
/** |
625 |
|
|
*** We now have a list of columns which intersect the selected row. |
626 |
|
|
*** The list is in order of decreasing column count. |
627 |
|
|
**/ |
628 |
|
|
|
629 |
|
|
/* Push incident rows on stack */ |
630 |
|
|
for( ; NOTNULL(head) ; head = head->next ) |
631 |
|
|
push_row_on_stack(vars,mtx_org_to_row(mtx,head->ndx)); |
632 |
|
|
|
633 |
|
|
/* Pop row off stack and assign to selected col */ |
634 |
|
|
assign_col_and_row(vars , nz.col , pop_row_from_stack(vars)); |
635 |
|
|
} |
636 |
|
|
/* Not reached. */ |
637 |
|
|
} |
638 |
|
|
|
639 |
|
|
/********************************* |
640 |
|
|
end of tspk1 stuff |
641 |
|
|
*********************************/ |
642 |
|
|
|
643 |
|
|
static void square_region(mtx_region_t *region, mtx_range_t *rng) |
644 |
|
|
/** |
645 |
|
|
*** Get the largest square confined to the diagonal within the region given |
646 |
|
|
*** and set rng accordingly. |
647 |
|
|
**/ |
648 |
|
|
{ |
649 |
|
|
rng->low = MAX(region->row.low,region->col.low); |
650 |
|
|
rng->high = MIN(region->row.high,region->col.high); |
651 |
|
|
} |
652 |
|
|
|
653 |
|
|
static int ranki_reorder(mtx_matrix_t mtx,mtx_region_t *region) |
654 |
|
|
/** |
655 |
|
|
*** The region to reorder is first isolated by truncating the region |
656 |
|
|
*** provided to the largest square region confined to the matrix diagonal. |
657 |
|
|
*** It is presumed it will contain no empty rows or columns and will |
658 |
|
|
*** provide the basis of candidate pivots when factoring. |
659 |
|
|
**/ |
660 |
|
|
{ |
661 |
|
|
struct reorder_vars vars; |
662 |
|
|
mtx_range_t rng; |
663 |
|
|
|
664 |
|
|
square_region(region,&rng); |
665 |
|
|
vars.mtx = mtx; |
666 |
|
|
vars.reg.row.low = vars.reg.col.low = rng.low; |
667 |
|
|
vars.reg.row.high = vars.reg.col.high = rng.high; |
668 |
|
|
mtx_spk1_reorder(&vars); |
669 |
|
|
return 0; |
670 |
|
|
} |
671 |
|
|
|
672 |
|
|
static int tranki_reorder(mtx_matrix_t mtx,mtx_region_t *region) |
673 |
|
|
/** |
674 |
|
|
*** The region to reorder is first isolated by truncating the region |
675 |
|
|
*** provided to the largest square region confined to the matrix diagonal. |
676 |
|
|
*** It is presumed it will contain no empty rows or columns and will |
677 |
|
|
*** provide the basis of candidate pivots when factoring. |
678 |
|
|
**/ |
679 |
|
|
{ |
680 |
|
|
struct creorder_vars vars; |
681 |
|
|
mtx_range_t rng; |
682 |
|
|
|
683 |
|
|
square_region(region,&rng); |
684 |
|
|
vars.mtx = mtx; |
685 |
|
|
vars.reg.row.low = vars.reg.col.low = rng.low; |
686 |
|
|
vars.reg.row.high = vars.reg.col.high = rng.high; |
687 |
|
|
mtx_tspk1_reorder(&vars); |
688 |
|
|
return 0; |
689 |
|
|
} |
690 |
|
|
|
691 |
|
|
/***************************************************************************\ |
692 |
|
|
End of reordering functions for SPK1. |
693 |
|
|
\***************************************************************************/ |
694 |
|
|
|
695 |
|
|
|
696 |
|
|
int mtx_reorder(mtx_matrix_t mtx,mtx_region_t *region, |
697 |
|
|
enum mtx_reorder_method m) |
698 |
|
|
{ |
699 |
|
|
if (!mtx_check_matrix(mtx)){ |
700 |
|
|
FPRINTF(g_mtxerr,"mtx_reorder called with bad matrix\n"); |
701 |
|
|
return 1; |
702 |
|
|
} |
703 |
|
|
if (region==NULL){ |
704 |
|
|
FPRINTF(g_mtxerr,"mtx_reorder called with mtx_ENTIRE_MATRIX\n"); |
705 |
|
|
return 1; |
706 |
|
|
} |
707 |
|
|
switch (m) { |
708 |
|
|
case mtx_SPK1: |
709 |
|
|
return ranki_reorder(mtx,region); |
710 |
|
|
case mtx_TSPK1: |
711 |
|
|
return tranki_reorder(mtx,region); |
712 |
|
|
case mtx_NATURAL: |
713 |
|
|
return 0; |
714 |
|
|
default: |
715 |
|
|
FPRINTF(g_mtxerr,"mtx_reorder called with unknown reorder method\n"); |
716 |
|
|
return 1; |
717 |
|
|
} |
718 |
|
|
} |
719 |
|
|
#undef __MTX_C_SEEN__ |