Flow123d  last_with_con_2.0.0-4-g42e6930
linsys.cc
Go to the documentation of this file.
1 /*!
2  *
3  * Copyright (C) 2015 Technical University of Liberec. All rights reserved.
4  *
5  * This program is free software; you can redistribute it and/or modify it under
6  * the terms of the GNU General Public License version 3 as published by the
7  * Free Software Foundation. (http://www.gnu.org/licenses/gpl-3.0.en.html)
8  *
9  * This program is distributed in the hope that it will be useful, but WITHOUT
10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
11  * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
12  *
13  *
14  * @file linsys.cc
15  * @ingroup la
16  * @brief Wrappers for linear systems based on MPIAIJ and MATIS format.
17  * @author Jan Brezina
18  */
19 
20 #include <algorithm>
21 #include <limits>
22 #include <petscmat.h>
23 #include "system/system.hh"
24 #include "la/linsys.hh"
25 
26 
27 namespace it = Input::Type;
28 
30  return it::Abstract("LinSys", "Linear solver setting.")
31  .close();
32 }
33 
34 #if 0
35 
36 /**
37  * @brief Constructs a parallel system with given local size.
38  *
39  * By the parameter @p vec_lsize we initialize distribution of the rhs and solution vectors.
40  * For MPIAIJ matrix this is also distribution of its rows, but for IS matrix this is only size of
41  * principial local part without interface.
42  */
43 LinSys::LinSys(unsigned int vec_lsize, double *sol_array)
44 :type(MAT_MPIAIJ),
45  matrix(NULL),
46  vec_ds(vec_lsize, PETSC_COMM_WORLD),
47  symmetric(false),
48  positive_definite(false),
49  status(NONE)
50 {
51  // create PETSC vectors
52  v_rhs=(double *) xmalloc(sizeof(double) * (this->vec_lsize() + 1) );
53  VecCreateMPIWithArray(PETSC_COMM_WORLD,1, this->vec_lsize(), PETSC_DECIDE, v_rhs, &rhs);
54  VecZeroEntries(rhs);
55 
56  if (sol_array == NULL) v_solution=(double *)xmalloc(sizeof(double) * (this->vec_lsize() + 1));
57  else v_solution=sol_array;
58  VecCreateMPIWithArray(PETSC_COMM_WORLD,1, this->vec_lsize(), PETSC_DECIDE, v_solution, &solution);
59  own_solution=false;
60  //VecZeroEntries(solution);
61 }
62 
63 
65 {
66  switch (status) {
67  case ALLOCATE:
68  preallocate_matrix();
69  break;
70  case INSERT:
71  finalize(MAT_FLUSH_ASSEMBLY);
72  break;
73  case ADD:
74  case DONE:
75  break;
76  default:
77  OLD_ASSERT(0, "Can not set values. Matrix is not preallocated.\n");
78  }
79  status=ADD;
80 }
81 
83 {
84  switch (status) {
85  case ALLOCATE:
86  preallocate_matrix();
87  break;
88  case ADD:
89  finalize(MAT_FLUSH_ASSEMBLY);
90  break;
91  case INSERT:
92  case DONE:
93  break;
94  default:
95  OLD_ASSERT(0, "Can not set values. Matrix is not preallocated.\n");
96  }
97  status=INSERT;
98 }
99 
100 
101 void LinSys::finalize(MatAssemblyType assembly_type)
102 {
103  if (status == ALLOCATE) {
104  WarningOut() << "Finalizing linear system without setting values.\n";
105  preallocate_matrix();
106  }
107  MatAssemblyBegin(matrix, assembly_type);
108  VecAssemblyBegin(rhs);
109  if (assembly_type == MAT_FINAL_ASSEMBLY) status=DONE;
110  MatAssemblyEnd(matrix, assembly_type);
111  VecAssemblyEnd(rhs);
112 }
113 
114 void view(std::ostream output_stream, int * output_mapping = NULL)
115 {
116 
117 }
118 
120 {
121 
122  if (matrix) MatDestroy(&matrix);
123  VecDestroy(&rhs);
124  VecDestroy(&solution);
125 
126  xfree(v_rhs);
127  if (own_solution) xfree(v_solution);
128 }
129 
130 
131 // ======================================================================================
132 // LSView - output assembled system in side,el,edge ordering
133 
134 void LSView(LinSystem *ls)
135 {
136  FILE *f;
137  int i, j, n;
138  PetscInt ncols;
139  const PetscInt *cols;
140  const PetscScalar *vals;
141  MatInfo info;
142 
143  // output matrix
144  MatGetInfo(ls->A,MAT_GLOBAL_SUM,&info);
145  MPI_Barrier(PETSC_COMM_WORLD);
146  if (ls->ds->myp() == 0) {
147  f=xfopen("matrix.dat","w");
148  xfprintf(f,"%d %d 0.0\n",ls->size, ls->size);
149  //xfprintf(f,"zzz=zeros(%d,3)\n",(int)(info.nz_used));
150  //xfprintf(f,"zzz=[\n");
151  xfclose(f);
152  }
153  for(n=0;n<ls->ds->np();n++) {
154  MPI_Barrier(PETSC_COMM_WORLD);
155  if (n==ls->ds->myp()) {
156  f=xfopen("matrix.dat","a");
157  for(i=ls->ds->begin(); i<ls->ds->end();i++) {
158  MatGetRow(ls->A,i,&ncols,&cols,&vals);
159  for(j=0;j<ncols;j++)
160  xfprintf(f,"%d %d %f\n",ls->old_4_new[i]+1,ls->old_4_new[cols[j]]+1,vals[j]);
161  MatRestoreRow(ls->A,i,&ncols,&cols,&vals);
162  }
163  xfclose(f);
164  }
165  }
166  MPI_Barrier(PETSC_COMM_WORLD);
167  if (ls->ds->myp() == 0) {
168  f=xfopen("rhs.m","w");
169  xfprintf(f,"yyy = zeros(%d,2)\n yyy=[\n",ls->size);
170  xfclose(f);
171  }
172  // output vec
173  for(n=0; n<ls->ds->np(); n++) {
174  MPI_Barrier(PETSC_COMM_WORLD);
175  if (n==ls->ds->myp()) {
176  f=xfopen("rhs.m","a");
177  for(i=ls->ds->begin(); i<ls->ds->end(); i++) {
178  xfprintf(f,"%d %f\n",ls->old_4_new[i]+1,ls->vb[i-ls->ds->begin()]);
179  }
180  xfclose(f);
181  }
182  }
183  MPI_Barrier(PETSC_COMM_WORLD);
184  if (ls->ds->myp() == 0) {
185  f=xfopen("rhs.m","a");
186  xfprintf(f,"];\n xxx=sortrows(yyy); Vec=xxx(:,2);\n");
187  xfclose(f);
188  }
189 }
190 
191 // ======================================================================================
192 // LSView - output assembled system in side,el,edge ordering
193 
194 void MyVecView(Vec v,int *order,const char* fname) {
195 
196 FILE *f;
197 
198 
199 int i,n;
200 double *array;
201 
202 
203  Distribution ds(v); // get distribution of the vector
204 
205 
206  if (ds.myp() == 0) {
207  f=xfopen(fname,"w");
208  xfprintf(f,"yyy = zeros(%d,2)\n yyy=[\n",ds.size());
209  xfclose(f);
210  }
211  // output vec
212  for(n=0; n<ds.np(); n++) {
213  MPI_Barrier(PETSC_COMM_WORLD);
214  if (n==ds.myp()) {
215  VecGetArray(v,&array);
216  f=xfopen(fname,"a");
217  for(i=ds.begin(); i<ds.end(); i++) {
218  xfprintf(f,"%d %f\n",order[i]+1,array[i-ds.begin()]);
219  }
220  VecRestoreArray(v,&array);
221  xfclose(f);
222  }
223  }
224  MPI_Barrier(PETSC_COMM_WORLD);
225  if (ds.myp() == 0) {
226  f=xfopen(fname,"a");
227  xfprintf(f,"];\n xxx=sortrows(yyy); Vec=xxx(:,2);\n");
228  xfclose(f);
229  }
230 }
231 */
232 
233 //=========================================================================================
234 /*! @brief convert linear system to pure CSR format
235  *
236  * - allocate CSR arrays according to MatGetInfo
237  * - indexes are form 0
238  * - make CSR format of the matrix (i.e. I,J,A vectors) from the PETSC format
239  */
240 
241 void LSSetCSR( LinSystem *mtx )
242 {
243  int i,pos,row,nnz;
244  PetscInt nnz_loc;
245  const PetscInt *cols;
246  const PetscScalar *vals;
247  MatInfo info;
248 
249  MatGetInfo(mtx->A,MAT_LOCAL,&info);
250  nnz=(int)(info.nz_used)+1; // allocate one more to be sure
251  // allocate
252  if (mtx->i == NULL) mtx->i=(int *)xmalloc( (mtx->size+1)*sizeof(int) );
253  if (mtx->j == NULL) mtx->j=(int *)xmalloc( nnz*sizeof(int) );
254  if (mtx->a == NULL) mtx->a=(double *)xmalloc( nnz*sizeof(double) );
255  DebugOut().fmt("mtx size: {} nnz: {}\n", mtx->size, nnz);
256  // setup matrix from PETSC SeqAIJ format
257  pos=0;
258  for( row=0; row< mtx->size; row++ ) {
259  mtx->i[row]=pos;
260  MatGetRow(mtx->A,row,&nnz_loc,&cols,&vals);
261  DebugOut().fmt("CSR row: {} nnz_loc {}\n", row, nnz_loc);
262  for(i=0; i<nnz_loc; i++) {
263  OLD_ASSERT(pos<nnz,"More nonzeroes then allocated! row: %d entry: %d\n",row,i);
264  mtx->j[pos]=cols[i];
265  mtx->a[pos]=vals[i];
266  pos++;
267  }
268  MatRestoreRow(mtx->A,row,&nnz_loc,&cols,&vals);
269  }
270  mtx->i[row]=pos;
271 }
272 
273 void LSFreeCSR( LinSystem *mtx )
274 {
275  xfree(mtx->i);
276  xfree(mtx->j);
277  xfree(mtx->a);
278 }
279 
280 
281 //**********************************************************************************************
282 
283 
284 void LinSys_MPIAIJ::start_allocation()
285 {
286  if (status != NONE) {
287  // reinit linear system
288 
289  }
290  VecCreateMPI(PETSC_COMM_WORLD, vec_ds.lsize(), PETSC_DECIDE, &(on_vec));
291  VecDuplicate(on_vec,&(off_vec));
292  status=ALLOCATE;
293 
294 }
295 
296 void LinSys_MPIAIJ::preallocate_matrix()
297 {
298  OLD_ASSERT(status == ALLOCATE, "Linear system has to be in ALLOCATE status.");
299 
300  PetscScalar *on_array, *off_array;
301  int *on_nz, *off_nz;
302  int i;
303 
304  // assembly and get values from counting vectors, destroy them
305  VecAssemblyBegin(on_vec);
306  VecAssemblyBegin(off_vec);
307 
308  on_nz=(int *)xmalloc( 2 * vec_ds.lsize() * sizeof(int));
309  off_nz=on_nz + vec_ds.lsize();
310 
311  VecAssemblyEnd(on_vec);
312  VecAssemblyEnd(off_vec);
313 
314  VecGetArray(on_vec,&on_array);
315  VecGetArray(off_vec,&off_array);
316 
317  for(i=0; i<vec_ds.lsize(); i++) {
318  on_nz[i]=min((unsigned int)(on_array[i]+0.1),vec_ds.lsize()); // small fraction to ensure correct rounding
319  off_nz[i]=(int)(off_array[i]+0.1);
320  }
321 
322  VecRestoreArray(on_vec,&on_array);
323  VecRestoreArray(off_vec,&off_array);
324  VecDestroy(&on_vec);
325  VecDestroy(&off_vec);
326 
327  // create PETSC matrix with preallocation
328  MatCreateAIJ(PETSC_COMM_WORLD, vec_ds.lsize(), vec_ds.lsize(), PETSC_DETERMINE, PETSC_DETERMINE,
329  0, on_nz, 0, off_nz, &matrix);
330 
331  if (symmetric) MatSetOption(matrix, MAT_SYMMETRIC, PETSC_TRUE);
332  MatSetOption(matrix, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE);
333 
334 
335  xfree(on_nz);
336 }
337 
338 void LinSys_MPIAIJ::preallocate_values(int nrow,int *rows,int ncol,int *cols)
339 {
340  int i,j,row,col;
341 
342  for (i=0; i<nrow; i++) {
343  row=rows[i];
344  for(j=0; j<ncol; j++) {
345  col=cols[j];
346  if (vec_ds.get_proc(row) == vec_ds.get_proc(col))
347  VecSetValue(on_vec,row,1.0,ADD_VALUES);
348  else
349  VecSetValue(off_vec,row,1.0,ADD_VALUES);
350  }
351  }
352 }
353 
354 void LinSys_MPIAIJ::view_local_matrix()
355 {
356  // print local subdomain matrix
357  MessageOut() << "Printing of local matrix is not supported yet for MPIAIJ matrix. \n";
358 
359 }
360 
361 
362 LinSys_MPIAIJ:: ~LinSys_MPIAIJ()
363 {
364  if (status == ALLOCATE) {
365  VecDestroy(&on_vec);
366  VecDestroy(&off_vec);
367  }
368 }
369 
370 //**********************************************************************************************
371 
372 LinSys_MATIS::LinSys_MATIS(boost::shared_ptr<LocalToGlobalMap> global_row_4_sub_row, double *sol_array)
373 : LinSys(global_row_4_sub_row->get_distr()->lsize(), sol_array), lg_map(global_row_4_sub_row)
374 {
375  PetscErrorCode err;
376 
377  //MessageOut().fmt("sub size {} \n", subdomain_size);
378 
379  // vytvorit mapping v PETSc z global_row_4_sub_row
380  // check possible index range of lg_map to fit into signed int type
381  if (lg_map->get_distr()->size() > numeric_limits<PetscInt>::max()) xprintf(Err,"Index range doesn't fit into signed int!");
382  err = ISLocalToGlobalMappingCreate(PETSC_COMM_WORLD,
383  lg_map->size(),
384  (const PetscInt*)(&(lg_map->get_map_vector()[0])),
385  PETSC_COPY_VALUES, &map_local_to_global);
386 
387  OLD_ASSERT(err == 0,"Error in ISLocalToGlobalMappingCreate.");
388 
389  // initialize loc_rows array
390  loc_rows_size=100;
391  loc_rows = new int[loc_rows_size];
392 
393  type = MAT_IS;
394 };
395 
396 
397 void LinSys_MATIS::start_allocation()
398 {
399  OLD_ASSERT(0, "Not implemented");
400  /*
401  PetscErrorCode err;
402 
403  if (status != NONE) {
404  // reinit linear system
405 
406  }
407  err = MatCreateIS(PETSC_COMM_WORLD, 1, vec_ds.lsize(), vec_ds.lsize(), vec_ds.size(), vec_ds.size(),
408  map_local_to_global, &matrix);
409  OLD_ASSERT(err == 0,"Error in MatCreateIS.");
410 
411  err = MatISGetLocalMat(matrix, &local_matrix);
412  OLD_ASSERT(err == 0,"Error in MatISGetLocalMat.");
413 
414  // extract scatter
415  MatMyIS *mis = (MatMyIS*) matrix->data;
416  sub_scatter = mis->ctx;
417 
418  subdomain_nz= new int[subdomain_size]; // count local nozero for every row of subdomain matrix
419  SET_ARRAY_ZERO(subdomain_nz,subdomain_size); // set zeros to the array
420 
421  status=ALLOCATE;
422 
423  DebugOut() << "allocation started\n";
424  */
425 }
426 
427 void LinSys_MATIS::preallocate_matrix()
428 {
429  OLD_ASSERT(status == ALLOCATE, "Linear system has to be in ALLOCATE status.");
430 
431  // printing subdomain_nz
432  //DBGPRINT_INT("subdomain_nz",subdomain_size,subdomain_nz);
433 
434 
435  // preallocation of local subdomain matrix
436  MatSeqAIJSetPreallocation(local_matrix, 0, subdomain_nz);
437 
438  if (symmetric) MatSetOption(matrix, MAT_SYMMETRIC, PETSC_TRUE);
439  MatSetOption(matrix, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE);
440 
441  delete[] subdomain_nz;
442 }
443 
444 void LinSys_MATIS::preallocate_values(int nrow,int *rows,int ncol,int *cols)
445 {
446  int i,row, n_loc_rows;
447  PetscErrorCode err;
448 
449  if (loc_rows_size < nrow) {
450  delete[] loc_rows;
451  loc_rows_size=nrow;
452  loc_rows=new int[loc_rows_size];
453 
454  }
455 
456  // translate from global to local indexes
457  err = ISGlobalToLocalMappingApply(map_local_to_global, IS_GTOLM_DROP, nrow, rows, &n_loc_rows, loc_rows);
458  OLD_ASSERT(err == 0,"Error in ISGlobalToLocalMappingApply.");
459  OLD_ASSERT(nrow == n_loc_rows,"Not all global indices translated to local indices.");
460  // printing subdomain_embedding
461  //DBGPRINT_INT("embed_element_to",nrow,loc_rows);
462 
463 
464  /* We don't need columns for preallocation.
465 
466  if (loc_cols_size < ncol) {
467  delete loc_cols;
468  loc_cols_size=ncol;
469  loc_cols=new int[loc_cols_size];
470  }
471 
472  // translate from global to local indexes
473  ISGlobalToLocalMappingApply(local_to_global, IS_GTOLM_DROP, ncol, cols, &n_loc_cols, loc_cols);
474  */
475  for (i=0; i<n_loc_rows; i++) {
476  row=loc_rows[i];
477  subdomain_nz[row] = subdomain_nz[row] + ncol;
478  }
479 }
480 
481 void LinSys_MATIS::view_local_matrix()
482 {
483  PetscErrorCode err;
484 // PetscViewer lab;
485 // char numstring[6] = "00000";
486 // int myid;
487 // int ierr;
488 
489 // err = PetscViewerSetFormat(PETSC_VIEWER_STDOUT_SELF,PETSC_VIEWER_ASCII_DENSE);
490 // OLD_ASSERT(err == 0,"Error in PetscViewerSetFormat.");
491 
492  // print local subdomain matrix
493  err = MatView(local_matrix,PETSC_VIEWER_STDOUT_SELF);
494  OLD_ASSERT(err == 0,"Error in MatView.");
495 //
496 // ierr=MPI_Comm_rank(PETSC_COMM_WORLD, &(myid));
497 // OLD_ASSERT( ! ierr , "Can not get MPI rank.\n" );
498 //
499 // sprintf(numstring,"%5.5d",myid);
500 // printf("Nunstring is >>>%s<<<\n",numstring);
501 //
502 // PetscViewerASCIIOpen(PETSC_COMM_SELF,numstring,&lab);
503 //
504 // OLD_ASSERT(!(loc_rows == NULL),"Local matrix is not assigned.");
505 // err = PetscViewerSetFormat(lab,PETSC_VIEWER_ASCII_DENSE);
506 // OLD_ASSERT(err == 0,"Error in PetscViewerSetFormat.");
507 //
508 // // print local subdomain matrix
509 // err = MatView(local_matrix,lab);
510 // OLD_ASSERT(err == 0,"Error in MatView.");
511 //
512 // PetscViewerDestroy(lab);
513 }
514 
515 LinSys_MATIS:: ~LinSys_MATIS()
516 {
517  PetscErrorCode err;
518 
519  // destroy mapping
520  err = ISLocalToGlobalMappingDestroy(&map_local_to_global);
521  OLD_ASSERT(err == 0,"Error in ISLocalToGlobalMappingDestroy.");
522  MessageOut().fmt("Error code {} \n", err);
523 
524 
525  delete[] loc_rows;
526  delete[] subdomain_indices;
527  if (status == ALLOCATE) {
528  delete subdomain_nz;
529  }
530 
531 }
532 
533 #endif
534 
535 
536 #ifdef HAVE_ATLAS_ONLY_LAPACK
537 /*
538  * This is a workaround to build Flow with PETSC 3.0 and ATLAS, since one particular and unimportant preconditioner (asa)
539  * needs this Lapack function, which is not implemented in ATLAS.
540  */
541 extern "C" {
542 
543 void dgeqrf_(int m, int n, double **A, double *TAU, double *work, int lwork, int info)
544 {
545 }
546 
547 }
548 #endif
549 
unsigned int size() const
get global size
virtual void start_insert_assembly()
Definition: linsys.hh:305
virtual void view()
Definition: linsys.hh:546
#define MessageOut()
Macro defining &#39;message&#39; record of log.
Definition: logger.hh:231
virtual void start_add_assembly()
Definition: linsys.hh:297
int xfclose(FILE *stream)
FCLOSE WITH ERROR HANDLING.
Definition: xio.cc:292
Wrappers for linear systems based on MPIAIJ and MATIS format.
unsigned int vec_lsize()
Definition: linsys.hh:147
int xfprintf(FILE *out, const char *fmt,...)
FPRINTF WITH ERROR HANDLING.
Definition: xio.cc:378
Abstract & close()
Can be used to close the Abstract for further declarations of keys.
LinSys(const Distribution *rows_ds)
Definition: linsys.hh:103
virtual ~LinSys()
Definition: linsys.hh:565
#define OLD_ASSERT(...)
Definition: global_defs.h:131
unsigned int begin(int proc) const
get starting local index
#define xprintf(...)
Definition: system.hh:87
unsigned int np() const
get num of processors
Class for declaration of polymorphic Record.
unsigned int myp() const
get my processor
unsigned int end(int proc) const
get last local index +1
#define WarningOut()
Macro defining &#39;warning&#39; record of log.
Definition: logger.hh:234
Definition: system.hh:59
static Input::Type::Abstract & get_input_type()
Definition: linsys.cc:29
#define MPI_Barrier(comm)
Definition: mpi.h:531
#define DebugOut()
Macro defining &#39;debug&#39; record of log.
Definition: logger.hh:240
FILE * xfopen(const std::string &fname, const char *mode)
Definition: xio.cc:229