Flow123d  JS_before_hm-1621-g63a12c7
linsys.cc
Go to the documentation of this file.
1 /*!
2  *
3  * Copyright (C) 2015 Technical University of Liberec. All rights reserved.
4  *
5  * This program is free software; you can redistribute it and/or modify it under
6  * the terms of the GNU General Public License version 3 as published by the
7  * Free Software Foundation. (http://www.gnu.org/licenses/gpl-3.0.en.html)
8  *
9  * This program is distributed in the hope that it will be useful, but WITHOUT
10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
11  * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
12  *
13  *
14  * @file linsys.cc
15  * @ingroup la
16  * @brief Wrappers for linear systems based on MPIAIJ and MATIS format.
17  * @author Jan Brezina
18  */
19 
20 #include <algorithm>
21 #include <limits>
22 #include <petscmat.h>
23 #include "system/system.hh"
24 #include "la/linsys.hh"
25 
26 
27 namespace it = Input::Type;
28 
30  return it::Abstract("LinSys", "Linear solver settings.")
31  .allow_auto_conversion("Petsc")
32  .close();
33 }
34 
35 #if 0
36 
37 /**
38  * @brief Constructs a parallel system with given local size.
39  *
40  * By the parameter @p vec_lsize we initialize distribution of the rhs and solution vectors.
41  * For MPIAIJ matrix this is also distribution of its rows, but for IS matrix this is only size of
42  * principial local part without interface.
43  */
44 LinSys::LinSys(unsigned int vec_lsize, double *sol_array)
45 :type(MAT_MPIAIJ),
46  matrix(NULL),
47  vec_ds(vec_lsize, PETSC_COMM_WORLD),
48  symmetric(false),
49  positive_definite(false),
50  status(NONE)
51 {
52  // create PETSC vectors
53  v_rhs=(double *) xmalloc(sizeof(double) * (this->vec_lsize() + 1) );
54  VecCreateMPIWithArray(PETSC_COMM_WORLD,1, this->vec_lsize(), PETSC_DECIDE, v_rhs, &rhs);
55  VecZeroEntries(rhs);
56 
57  if (sol_array == NULL) v_solution=(double *)xmalloc(sizeof(double) * (this->vec_lsize() + 1));
58  else v_solution=sol_array;
59  VecCreateMPIWithArray(PETSC_COMM_WORLD,1, this->vec_lsize(), PETSC_DECIDE, v_solution, &solution);
60  own_solution=false;
61  //VecZeroEntries(solution);
62 }
63 
64 
66 {
67  switch (status) {
68  case ALLOCATE:
69  preallocate_matrix();
70  break;
71  case INSERT:
72  finalize(MAT_FLUSH_ASSEMBLY);
73  break;
74  case ADD:
75  case DONE:
76  break;
77  default:
78  OLD_ASSERT(0, "Can not set values. Matrix is not preallocated.\n");
79  }
80  status=ADD;
81 }
82 
84 {
85  switch (status) {
86  case ALLOCATE:
87  preallocate_matrix();
88  break;
89  case ADD:
90  finalize(MAT_FLUSH_ASSEMBLY);
91  break;
92  case INSERT:
93  case DONE:
94  break;
95  default:
96  OLD_ASSERT(0, "Can not set values. Matrix is not preallocated.\n");
97  }
98  status=INSERT;
99 }
100 
101 
102 void LinSys::finalize(MatAssemblyType assembly_type)
103 {
104  if (status == ALLOCATE) {
105  WarningOut() << "Finalizing linear system without setting values.\n";
106  preallocate_matrix();
107  }
108  MatAssemblyBegin(matrix, assembly_type);
109  VecAssemblyBegin(rhs);
110  if (assembly_type == MAT_FINAL_ASSEMBLY) status=DONE;
111  MatAssemblyEnd(matrix, assembly_type);
112  VecAssemblyEnd(rhs);
113 }
114 
115 void view(std::ostream output_stream, int * output_mapping = NULL)
116 {
117 
118 }
119 
121 {
122 
123  if (matrix) chkerr(MatDestroy(&matrix));
124  VecDestroy(&rhs);
125  VecDestroy(&solution);
126 
127  xfree(v_rhs);
128  if (own_solution) xfree(v_solution);
129 }
130 
131 
132 // ======================================================================================
133 // LSView - output assembled system in side,el,edge ordering
134 
135 void LSView(LinSystem *ls)
136 {
137  FILE *f;
138  int i, j, n;
139  PetscInt ncols;
140  const PetscInt *cols;
141  const PetscScalar *vals;
142  MatInfo info;
143 
144  // output matrix
145  MatGetInfo(ls->A,MAT_GLOBAL_SUM,&info);
146  MPI_Barrier(PETSC_COMM_WORLD);
147  if (ls->ds->myp() == 0) {
148  f=xfopen("matrix.dat","w");
149  xfprintf(f,"%d %d 0.0\n",ls->size, ls->size);
150  //xfprintf(f,"zzz=zeros(%d,3)\n",(int)(info.nz_used));
151  //xfprintf(f,"zzz=[\n");
152  xfclose(f);
153  }
154  for(n=0;n<ls->ds->np();n++) {
155  MPI_Barrier(PETSC_COMM_WORLD);
156  if (n==ls->ds->myp()) {
157  f=xfopen("matrix.dat","a");
158  for(i=ls->ds->begin(); i<ls->ds->end();i++) {
159  MatGetRow(ls->A,i,&ncols,&cols,&vals);
160  for(j=0;j<ncols;j++)
161  xfprintf(f,"%d %d %f\n",ls->old_4_new[i]+1,ls->old_4_new[cols[j]]+1,vals[j]);
162  MatRestoreRow(ls->A,i,&ncols,&cols,&vals);
163  }
164  xfclose(f);
165  }
166  }
167  MPI_Barrier(PETSC_COMM_WORLD);
168  if (ls->ds->myp() == 0) {
169  f=xfopen("rhs.m","w");
170  xfprintf(f,"yyy = zeros(%d,2)\n yyy=[\n",ls->size);
171  xfclose(f);
172  }
173  // output vec
174  for(n=0; n<ls->ds->np(); n++) {
175  MPI_Barrier(PETSC_COMM_WORLD);
176  if (n==ls->ds->myp()) {
177  f=xfopen("rhs.m","a");
178  for(i=ls->ds->begin(); i<ls->ds->end(); i++) {
179  xfprintf(f,"%d %f\n",ls->old_4_new[i]+1,ls->vb[i-ls->ds->begin()]);
180  }
181  xfclose(f);
182  }
183  }
184  MPI_Barrier(PETSC_COMM_WORLD);
185  if (ls->ds->myp() == 0) {
186  f=xfopen("rhs.m","a");
187  xfprintf(f,"];\n xxx=sortrows(yyy); Vec=xxx(:,2);\n");
188  xfclose(f);
189  }
190 }
191 
192 // ======================================================================================
193 // LSView - output assembled system in side,el,edge ordering
194 
195 void MyVecView(Vec v,int *order,const char* fname) {
196 
197 FILE *f;
198 
199 
200 int i,n;
201 double *array;
202 
203 
204  Distribution ds(v); // get distribution of the vector
205 
206 
207  if (ds.myp() == 0) {
208  f=xfopen(fname,"w");
209  xfprintf(f,"yyy = zeros(%d,2)\n yyy=[\n",ds.size());
210  xfclose(f);
211  }
212  // output vec
213  for(n=0; n<ds.np(); n++) {
214  MPI_Barrier(PETSC_COMM_WORLD);
215  if (n==ds.myp()) {
216  VecGetArray(v,&array);
217  f=xfopen(fname,"a");
218  for(i=ds.begin(); i<ds.end(); i++) {
219  xfprintf(f,"%d %f\n",order[i]+1,array[i-ds.begin()]);
220  }
221  VecRestoreArray(v,&array);
222  xfclose(f);
223  }
224  }
225  MPI_Barrier(PETSC_COMM_WORLD);
226  if (ds.myp() == 0) {
227  f=xfopen(fname,"a");
228  xfprintf(f,"];\n xxx=sortrows(yyy); Vec=xxx(:,2);\n");
229  xfclose(f);
230  }
231 }
232 */
233 
234 //=========================================================================================
235 /*! @brief convert linear system to pure CSR format
236  *
237  * - allocate CSR arrays according to MatGetInfo
238  * - indexes are form 0
239  * - make CSR format of the matrix (i.e. I,J,A vectors) from the PETSC format
240  */
241 
242 void LSSetCSR( LinSystem *mtx )
243 {
244  int i,pos,row,nnz;
245  PetscInt nnz_loc;
246  const PetscInt *cols;
247  const PetscScalar *vals;
248  MatInfo info;
249 
250  MatGetInfo(mtx->A,MAT_LOCAL,&info);
251  nnz=(int)(info.nz_used)+1; // allocate one more to be sure
252  // allocate
253  if (mtx->i == NULL) mtx->i=(int *)xmalloc( (mtx->size+1)*sizeof(int) );
254  if (mtx->j == NULL) mtx->j=(int *)xmalloc( nnz*sizeof(int) );
255  if (mtx->a == NULL) mtx->a=(double *)xmalloc( nnz*sizeof(double) );
256  DebugOut().fmt("mtx size: {} nnz: {}\n", mtx->size, nnz);
257  // setup matrix from PETSC SeqAIJ format
258  pos=0;
259  for( row=0; row< mtx->size; row++ ) {
260  mtx->i[row]=pos;
261  MatGetRow(mtx->A,row,&nnz_loc,&cols,&vals);
262  DebugOut().fmt("CSR row: {} nnz_loc {}\n", row, nnz_loc);
263  for(i=0; i<nnz_loc; i++) {
264  OLD_ASSERT(pos<nnz,"More nonzeroes then allocated! row: %d entry: %d\n",row,i);
265  mtx->j[pos]=cols[i];
266  mtx->a[pos]=vals[i];
267  pos++;
268  }
269  MatRestoreRow(mtx->A,row,&nnz_loc,&cols,&vals);
270  }
271  mtx->i[row]=pos;
272 }
273 
274 void LSFreeCSR( LinSystem *mtx )
275 {
276  xfree(mtx->i);
277  xfree(mtx->j);
278  xfree(mtx->a);
279 }
280 
281 
282 //**********************************************************************************************
283 
284 
285 void LinSys_MPIAIJ::start_allocation()
286 {
287  if (status != NONE) {
288  // reinit linear system
289 
290  }
291  VecCreateMPI(PETSC_COMM_WORLD, vec_ds.lsize(), PETSC_DECIDE, &(on_vec));
292  VecDuplicate(on_vec,&(off_vec));
293  status=ALLOCATE;
294 
295 }
296 
297 void LinSys_MPIAIJ::preallocate_matrix()
298 {
299  OLD_ASSERT(status == ALLOCATE, "Linear system has to be in ALLOCATE status.");
300 
301  PetscScalar *on_array, *off_array;
302  int *on_nz, *off_nz;
303  int i;
304 
305  // assembly and get values from counting vectors, destroy them
306  VecAssemblyBegin(on_vec);
307  VecAssemblyBegin(off_vec);
308 
309  on_nz=(int *)xmalloc( 2 * vec_ds.lsize() * sizeof(int));
310  off_nz=on_nz + vec_ds.lsize();
311 
312  VecAssemblyEnd(on_vec);
313  VecAssemblyEnd(off_vec);
314 
315  VecGetArray(on_vec,&on_array);
316  VecGetArray(off_vec,&off_array);
317 
318  for(i=0; i<vec_ds.lsize(); i++) {
319  on_nz[i]=min((unsigned int)(on_array[i]+0.1),vec_ds.lsize()); // small fraction to ensure correct rounding
320  off_nz[i]=(int)(off_array[i]+0.1);
321  }
322 
323  VecRestoreArray(on_vec,&on_array);
324  VecRestoreArray(off_vec,&off_array);
325  VecDestroy(&on_vec);
326  VecDestroy(&off_vec);
327 
328  // create PETSC matrix with preallocation
329  MatCreateAIJ(PETSC_COMM_WORLD, vec_ds.lsize(), vec_ds.lsize(), PETSC_DETERMINE, PETSC_DETERMINE,
330  0, on_nz, 0, off_nz, &matrix);
331 
332  if (symmetric) MatSetOption(matrix, MAT_SYMMETRIC, PETSC_TRUE);
333  MatSetOption(matrix, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE);
334 
335 
336  xfree(on_nz);
337 }
338 
339 void LinSys_MPIAIJ::preallocate_values(int nrow,int *rows,int ncol,int *cols)
340 {
341  int i,j,row,col;
342 
343  for (i=0; i<nrow; i++) {
344  row=rows[i];
345  for(j=0; j<ncol; j++) {
346  col=cols[j];
347  if (vec_ds.get_proc(row) == vec_ds.get_proc(col))
348  VecSetValue(on_vec,row,1.0,ADD_VALUES);
349  else
350  VecSetValue(off_vec,row,1.0,ADD_VALUES);
351  }
352  }
353 }
354 
355 void LinSys_MPIAIJ::view_local_matrix()
356 {
357  // print local subdomain matrix
358  MessageOut() << "Printing of local matrix is not supported yet for MPIAIJ matrix. \n";
359 
360 }
361 
362 
363 LinSys_MPIAIJ:: ~LinSys_MPIAIJ()
364 {
365  if (status == ALLOCATE) {
366  VecDestroy(&on_vec);
367  VecDestroy(&off_vec);
368  }
369 }
370 
371 //**********************************************************************************************
372 
373 LinSys_MATIS::LinSys_MATIS(std::shared_ptr<LocalToGlobalMap> global_row_4_sub_row, double *sol_array)
374 : LinSys(global_row_4_sub_row->get_distr()->lsize(), sol_array), lg_map(global_row_4_sub_row)
375 {
376  PetscErrorCode err;
377 
378  //MessageOut().fmt("sub size {} \n", subdomain_size);
379 
380  // vytvorit mapping v PETSc z global_row_4_sub_row
381  // check possible index range of lg_map to fit into signed int type
382  // ASSERT should be replaced with exception
383  ASSERT_LE(lg_map->get_distr()->size(), numeric_limits<PetscInt>::max()).error("Index range doesn't fit into signed int!");
384  err = ISLocalToGlobalMappingCreate(PETSC_COMM_WORLD,
385  lg_map->size(),
386  (const PetscInt*)(&(lg_map->get_map_vector()[0])),
387  PETSC_COPY_VALUES, &map_local_to_global);
388 
389  OLD_ASSERT(err == 0,"Error in ISLocalToGlobalMappingCreate.");
390 
391  // initialize loc_rows array
392  loc_rows_size=100;
393  loc_rows = new int[loc_rows_size];
394 
395  type = MAT_IS;
396 };
397 
398 
399 void LinSys_MATIS::start_allocation()
400 {
401  OLD_ASSERT(0, "Not implemented");
402  /*
403  PetscErrorCode err;
404 
405  if (status != NONE) {
406  // reinit linear system
407 
408  }
409  err = MatCreateIS(PETSC_COMM_WORLD, 1, vec_ds.lsize(), vec_ds.lsize(), vec_ds.size(), vec_ds.size(),
410  map_local_to_global, &matrix);
411  OLD_ASSERT(err == 0,"Error in MatCreateIS.");
412 
413  err = MatISGetLocalMat(matrix, &local_matrix);
414  OLD_ASSERT(err == 0,"Error in MatISGetLocalMat.");
415 
416  // extract scatter
417  MatMyIS *mis = (MatMyIS*) matrix->data;
418  sub_scatter = mis->ctx;
419 
420  subdomain_nz= new int[subdomain_size]; // count local nozero for every row of subdomain matrix
421  SET_ARRAY_ZERO(subdomain_nz,subdomain_size); // set zeros to the array
422 
423  status=ALLOCATE;
424 
425  DebugOut() << "allocation started\n";
426  */
427 }
428 
429 void LinSys_MATIS::preallocate_matrix()
430 {
431  OLD_ASSERT(status == ALLOCATE, "Linear system has to be in ALLOCATE status.");
432 
433  // printing subdomain_nz
434  //DBGPRINT_INT("subdomain_nz",subdomain_size,subdomain_nz);
435 
436 
437  // preallocation of local subdomain matrix
438  MatSeqAIJSetPreallocation(local_matrix, 0, subdomain_nz);
439 
440  if (symmetric) MatSetOption(matrix, MAT_SYMMETRIC, PETSC_TRUE);
441  MatSetOption(matrix, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE);
442 
443  delete[] subdomain_nz;
444 }
445 
446 void LinSys_MATIS::preallocate_values(int nrow,int *rows,int ncol,int *cols)
447 {
448  int i,row, n_loc_rows;
449  PetscErrorCode err;
450 
451  if (loc_rows_size < nrow) {
452  delete[] loc_rows;
453  loc_rows_size=nrow;
454  loc_rows=new int[loc_rows_size];
455 
456  }
457 
458  // translate from global to local indexes
459  err = ISGlobalToLocalMappingApply(map_local_to_global, IS_GTOLM_DROP, nrow, rows, &n_loc_rows, loc_rows);
460  OLD_ASSERT(err == 0,"Error in ISGlobalToLocalMappingApply.");
461  OLD_ASSERT(nrow == n_loc_rows,"Not all global indices translated to local indices.");
462  // printing subdomain_embedding
463  //DBGPRINT_INT("embed_element_to",nrow,loc_rows);
464 
465 
466  /* We don't need columns for preallocation.
467 
468  if (loc_cols_size < ncol) {
469  delete loc_cols;
470  loc_cols_size=ncol;
471  loc_cols=new int[loc_cols_size];
472  }
473 
474  // translate from global to local indexes
475  ISGlobalToLocalMappingApply(local_to_global, IS_GTOLM_DROP, ncol, cols, &n_loc_cols, loc_cols);
476  */
477  for (i=0; i<n_loc_rows; i++) {
478  row=loc_rows[i];
479  subdomain_nz[row] = subdomain_nz[row] + ncol;
480  }
481 }
482 
483 void LinSys_MATIS::view_local_matrix()
484 {
485  PetscErrorCode err;
486 // PetscViewer lab;
487 // char numstring[6] = "00000";
488 // int myid;
489 // int ierr;
490 
491 // err = PetscViewerSetFormat(PETSC_VIEWER_STDOUT_SELF,PETSC_VIEWER_ASCII_DENSE);
492 // OLD_ASSERT(err == 0,"Error in PetscViewerSetFormat.");
493 
494  // print local subdomain matrix
495  err = MatView(local_matrix,PETSC_VIEWER_STDOUT_SELF);
496  OLD_ASSERT(err == 0,"Error in MatView.");
497 //
498 // ierr=MPI_Comm_rank(PETSC_COMM_WORLD, &(myid));
499 // OLD_ASSERT( ! ierr , "Can not get MPI rank.\n" );
500 //
501 // sprintf(numstring,"%5.5d",myid);
502 // printf("Nunstring is >>>%s<<<\n",numstring);
503 //
504 // PetscViewerASCIIOpen(PETSC_COMM_SELF,numstring,&lab);
505 //
506 // OLD_ASSERT(!(loc_rows == NULL),"Local matrix is not assigned.");
507 // err = PetscViewerSetFormat(lab,PETSC_VIEWER_ASCII_DENSE);
508 // OLD_ASSERT(err == 0,"Error in PetscViewerSetFormat.");
509 //
510 // // print local subdomain matrix
511 // err = MatView(local_matrix,lab);
512 // OLD_ASSERT(err == 0,"Error in MatView.");
513 //
514 // PetscViewerDestroy(lab);
515 }
516 
517 LinSys_MATIS:: ~LinSys_MATIS()
518 {
519  PetscErrorCode err;
520 
521  // destroy mapping
522  err = ISLocalToGlobalMappingDestroy(&map_local_to_global);
523  OLD_ASSERT(err == 0,"Error in ISLocalToGlobalMappingDestroy.");
524  MessageOut().fmt("Error code {} \n", err);
525 
526 
527  delete[] loc_rows;
528  delete[] subdomain_indices;
529  if (status == ALLOCATE) {
530  delete subdomain_nz;
531  }
532 
533 }
534 
535 #endif
536 
537 
538 #ifdef HAVE_ATLAS_ONLY_LAPACK
539 /*
540  * This is a workaround to build Flow with PETSC 3.0 and ATLAS, since one particular and unimportant preconditioner (asa)
541  * needs this Lapack function, which is not implemented in ATLAS.
542  */
543 extern "C" {
544 
545 void dgeqrf_(int m, int n, double **A, double *TAU, double *work, int lwork, int info)
546 {
547 }
548 
549 }
550 #endif
551 
unsigned int size() const
get global size
virtual void start_insert_assembly()
Definition: linsys.hh:349
Abstract & allow_auto_conversion(const string &type_default)
Allows shorter input of the Abstract providing the default value to the "TYPE" key.
virtual void view()
Definition: linsys.hh:633
#define MessageOut()
Macro defining &#39;message&#39; record of log.
Definition: logger.hh:267
virtual void start_add_assembly()
Definition: linsys.hh:341
Wrappers for linear systems based on MPIAIJ and MATIS format.
unsigned int vec_lsize()
Definition: linsys.hh:172
#define ASSERT_LE(a, b)
Definition of comparative assert macro (Less or Equal)
Definition: asserts.hh:304
void chkerr(unsigned int ierr)
Replacement of new/delete operator in the spirit of xmalloc.
Definition: system.hh:142
Abstract & close()
Close the Abstract and add its to type repository (see TypeRepository::add_type). ...
LinSys(const Distribution *rows_ds)
Definition: linsys.hh:128
virtual ~LinSys()
Definition: linsys.hh:652
Array< double > array
Definition: armor.hh:890
#define OLD_ASSERT(...)
Definition: global_defs.h:108
unsigned int begin(int proc) const
get starting local index
unsigned int np() const
get num of processors
Class for declaration of polymorphic Record.
unsigned int myp() const
get my processor
unsigned int end(int proc) const
get last local index +1
#define WarningOut()
Macro defining &#39;warning&#39; record of log.
Definition: logger.hh:270
static Input::Type::Abstract & get_input_type()
Definition: linsys.cc:29
#define MPI_Barrier(comm)
Definition: mpi.h:531
#define DebugOut()
Macro defining &#39;debug&#39; record of log.
Definition: logger.hh:276