Flow123d  release_3.0.0-1210-g1d9e2bd
vector_mpi.hh
Go to the documentation of this file.
1 /*!
2  *
3  * Copyright (C) 2015 Technical University of Liberec. All rights reserved.
4  *
5  * This program is free software; you can redistribute it and/or modify it under
6  * the terms of the GNU General Public License version 3 as published by the
7  * Free Software Foundation. (http://www.gnu.org/licenses/gpl-3.0.en.html)
8  *
9  * This program is distributed in the hope that it will be useful, but WITHOUT
10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
11  * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
12  *
13  *
14  * @file vector_mpi.hh
15  * @brief
16  */
17 
18 #ifndef VECTOR_MPI_HH_
19 #define VECTOR_MPI_HH_
20 
21 #include <vector>
22 #include <memory>
23 #include "system/system.hh"
24 #include "system/global_defs.h"
25 #include "mesh/long_idx.hh"
26 
27 #include <petscvec.h>
28 
29 /**
30  * Auxiliary class for output elementwise concentration vectors
31  * in convection transport, sorptions, dual porosity etc.
32  *
33  * Stores data in two formats:
34  * - shared pointer to std::vector of double
35  * - pointer to PETSC vector that use same data
36  *
37  * Allows the following functionalities:
38  * - access to local part
39  * - return shared pointer to std::vector of double
40  * - return pointer to PETSC vector
41  */
42 class VectorMPI {
43 public:
44  typedef typename std::vector<double> VectorData;
45  typedef typename std::shared_ptr< VectorData > VectorDataPtr;
46 
47  VectorMPI(MPI_Comm comm = PETSC_COMM_SELF)
48  : communicator_(comm) {}
49 
50  /// Create shared pointer and PETSC vector with given size. COLLECTIVE.
51  VectorMPI(unsigned int local_size, MPI_Comm comm = PETSC_COMM_WORLD)
52  : communicator_(comm) {
53  resize(local_size);
54  }
55 
56  /// Create PETSc vector with ghost values whose indices are specified in @p ghost_idx.
57  VectorMPI(unsigned int local_size, std::vector<LongIdx> &ghost_idx)
58  : communicator_(PETSC_COMM_WORLD) {
59  resize(local_size, ghost_idx);
60  }
61 
62  /**
63  * Helper method creating VectorMPI of given size with serial Petsc communicator.
64  *
65  * Method is used for better readability of code.
66  */
67  static VectorMPI sequential(unsigned int size)
68  {
69  return VectorMPI(size, PETSC_COMM_SELF);
70  }
71 
72  /**
73  * Resize the vector to given local size. Operation is allowed only if this object is
74  * a unique vector object pointing to the actual data.
75  */
76  void resize(unsigned int local_size) {
77  if (data_ptr_.use_count() ==0) {
78  data_ptr_ = std::make_shared< std::vector<double> >(local_size);
79  } else {
80  ASSERT_DBG( data_ptr_.use_count() == 1 ) ( data_ptr_.use_count() ).error("Object referenced by other pointer. Can not resize.");
81  chkerr(VecDestroy(&data_petsc_));
82  data_ptr_->resize(local_size);
83  }
84  if (communicator_ == PETSC_COMM_SELF)
85  chkerr(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, local_size, &((*data_ptr_)[0]), &data_petsc_));
86  else
87  chkerr(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, local_size, PETSC_DECIDE, &((*data_ptr_)[0]), &data_petsc_));
88  chkerr(VecZeroEntries( data_petsc_ ));
89  }
90 
91  /**
92  * Resize the vector to given local size with ghost values. Indices of ghost values are in ghost_idx.
93  */
94  void resize(unsigned int local_size, std::vector<LongIdx> &ghost_idx) {
95  ASSERT_DBG(communicator_ == PETSC_COMM_WORLD).error("Cannot allocate ghost values in sequential vector.");
96  if (data_ptr_.use_count() ==0) {
97  data_ptr_ = std::make_shared< std::vector<double> >(local_size + ghost_idx.size());
98  } else {
99  ASSERT_DBG( data_ptr_.use_count() == 1 ) ( data_ptr_.use_count() ).error("Object referenced by other pointer. Can not resize.");
100  chkerr(VecDestroy(&data_petsc_));
101  data_ptr_->resize(local_size + ghost_idx.size());
102  }
103  chkerr(VecCreateGhostWithArray(PETSC_COMM_WORLD, local_size, PETSC_DECIDE, ghost_idx.size(), ghost_idx.data(), data_ptr_->data(), &data_petsc_));
104  chkerr(VecZeroEntries( data_petsc_ ));
105  }
106 
107  /// Return new vector with same parallel structure.
108  void duplicate(VectorMPI other) {
109  ASSERT_EQ(this->communicator_, other.communicator_);
110  this->resize(other.data().size());
111  }
112 
113  /// Getter for shared pointer of output data.
114  VectorDataPtr data_ptr()
115  {
116  return data_ptr_;
117  }
118 
119  /// Getter for PETSC vector of output data (e.g. can be used by scatters).
120  Vec &petsc_vec()
121  {
122  return data_petsc_;
123  }
124 
125  void zero_entries() {
126  chkerr(VecZeroEntries( data_petsc_ ));
127  }
128 
129  VectorData &data()
130  {
132  return *data_ptr_;
133  }
134 
135  const VectorData &data() const
136  {
138  return *data_ptr_;
139  }
140 
141  void swap(VectorMPI &other) {
142  ASSERT_EQ(this->communicator_, other.communicator_);
143  ASSERT_EQ(this->data_ptr_->size(), other.data_ptr_->size());
144  uint size = this->data_ptr_->size();
145  std::swap(this->data_ptr_, other.data_ptr_);
146  chkerr(VecDestroy(&data_petsc_));
147  chkerr(VecDestroy(&other.data_petsc_));
148  if (communicator_ == PETSC_COMM_SELF) {
149  chkerr(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, size, &((*data_ptr_)[0]), &data_petsc_));
150  chkerr(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, size, &((*other.data_ptr_)[0]), &other.data_petsc_));
151  } else {
152  chkerr(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, size, PETSC_DECIDE, &((*data_ptr_)[0]), &data_petsc_));
153  chkerr(VecCreateMPIWithArray(PETSC_COMM_WORLD, 1, size, PETSC_DECIDE, &((*other.data_ptr_)[0]), &other.data_petsc_));
154  }
155  }
156 
157 
158  void copy(VectorMPI &other) {
159  ASSERT_EQ(this->communicator_, other.communicator_);
160  ASSERT_EQ(this->data_ptr_->size(), other.data_ptr_->size());
161  chkerr(VecCopy(other.data_petsc_, data_petsc_));
162  }
163 
164 
165  /// local_to_ghost_{begin,end} updates the ghost values on neighbouring processors from local values
167  VecGhostUpdateBegin(data_petsc_, INSERT_VALUES, SCATTER_FORWARD);
168  }
169 
170  /// local_to_ghost_{begin,end} updates the ghost values on neighbouring processors from local values
172  VecGhostUpdateEnd(data_petsc_, INSERT_VALUES, SCATTER_FORWARD);
173  }
174 
175  /// ghost_to_local_{begin,end} updates the local values by adding ghost values from neighbouring processors
177  VecGhostUpdateBegin(data_petsc_, ADD_VALUES, SCATTER_REVERSE);
178  }
179 
180  /// ghost_to_local_{begin,end} updates the local values by adding ghost values from neighbouring processors
182  VecGhostUpdateEnd(data_petsc_, ADD_VALUES, SCATTER_REVERSE);
183  }
184 
185  /// Return size of output data.
186  unsigned int size() const
187  {
188  ASSERT_PTR(data_ptr_).error("Uninitialized data vector.\n");
189  return data_ptr_->size();
190  }
191 
192 
193  /// Destructor.
195  {
196  if (data_ptr_.use_count() == 1)
197  if (data_petsc_) chkerr(VecDestroy(&data_petsc_));
198  }
199 
200  /**
201  * Access to the vector element on local index @p idx.
202  */
203  inline double &operator[](unsigned int idx)
204  {
206  ASSERT_DBG(idx < data_ptr_->size()) (idx) (data_ptr_->size());
207  return (*data_ptr_)[idx];
208  }
209 
210  /**
211  * Access to the vector element on local index @p idx (const version).
212  */
213  inline double &operator[](unsigned int idx) const
214  {
216  ASSERT_DBG(idx < data_ptr_->size()) (idx) (data_ptr_->size());
217  return (*data_ptr_)[idx];
218  }
219 
220 private:
221 
222  /// shared pointer to vector of data
223  VectorDataPtr data_ptr_;
224  /// stored vector of data in PETSC format
226  /// communicator
228 };
229 
230 
231 #endif /* VECTOR_MPI_HH_ */
unsigned int size() const
Return size of output data.
Definition: vector_mpi.hh:186
MPI_Comm communicator_
communicator
Definition: vector_mpi.hh:227
void copy(VectorMPI &other)
Definition: vector_mpi.hh:158
unsigned int uint
int MPI_Comm
Definition: mpi.h:141
VectorMPI(unsigned int local_size, MPI_Comm comm=PETSC_COMM_WORLD)
Create shared pointer and PETSC vector with given size. COLLECTIVE.
Definition: vector_mpi.hh:51
void ghost_to_local_end()
ghost_to_local_{begin,end} updates the local values by adding ghost values from neighbouring processo...
Definition: vector_mpi.hh:181
void local_to_ghost_begin()
local_to_ghost_{begin,end} updates the ghost values on neighbouring processors from local values ...
Definition: vector_mpi.hh:166
void zero_entries()
Definition: vector_mpi.hh:125
void duplicate(VectorMPI other)
Return new vector with same parallel structure.
Definition: vector_mpi.hh:108
std::vector< double > VectorData
Definition: vector_mpi.hh:44
void chkerr(unsigned int ierr)
Replacement of new/delete operator in the spirit of xmalloc.
Definition: system.hh:147
VectorDataPtr data_ptr()
Getter for shared pointer of output data.
Definition: vector_mpi.hh:114
double & operator[](unsigned int idx)
Definition: vector_mpi.hh:203
double & operator[](unsigned int idx) const
Definition: vector_mpi.hh:213
~VectorMPI()
Destructor.
Definition: vector_mpi.hh:194
const VectorData & data() const
Definition: vector_mpi.hh:135
VectorMPI(MPI_Comm comm=PETSC_COMM_SELF)
Definition: vector_mpi.hh:47
Global macros to enhance readability and debugging, general constants.
VectorDataPtr data_ptr_
shared pointer to vector of data
Definition: vector_mpi.hh:223
Vec data_petsc_
stored vector of data in PETSC format
Definition: vector_mpi.hh:225
void resize(unsigned int local_size)
Definition: vector_mpi.hh:76
Vec & petsc_vec()
Getter for PETSC vector of output data (e.g. can be used by scatters).
Definition: vector_mpi.hh:120
void swap(nlohmann::json &j1, nlohmann::json &j2) noexcept(is_nothrow_move_constructible< nlohmann::json >::value andis_nothrow_move_assignable< nlohmann::json >::value)
exchanges the values of two JSON objects
Definition: json.hpp:8688
void local_to_ghost_end()
local_to_ghost_{begin,end} updates the ghost values on neighbouring processors from local values ...
Definition: vector_mpi.hh:171
void resize(unsigned int local_size, std::vector< LongIdx > &ghost_idx)
Definition: vector_mpi.hh:94
#define ASSERT_PTR(ptr)
Definition of assert macro checking non-null pointer (PTR)
Definition: asserts.hh:335
static VectorMPI sequential(unsigned int size)
Definition: vector_mpi.hh:67
void swap(VectorMPI &other)
Definition: vector_mpi.hh:141
#define ASSERT_DBG(expr)
Definition: asserts.hh:349
VectorMPI(unsigned int local_size, std::vector< LongIdx > &ghost_idx)
Create PETSc vector with ghost values whose indices are specified in ghost_idx.
Definition: vector_mpi.hh:57
void ghost_to_local_begin()
ghost_to_local_{begin,end} updates the local values by adding ghost values from neighbouring processo...
Definition: vector_mpi.hh:176
std::shared_ptr< VectorData > VectorDataPtr
Definition: vector_mpi.hh:45
VectorData & data()
Definition: vector_mpi.hh:129
#define ASSERT_EQ(a, b)
Definition of comparative assert macro (EQual)
Definition: asserts.hh:327