Flow123d  last_with_con_2.0.0-4-g42e6930
sparse_graph.hh
Go to the documentation of this file.
1 /*!
2  *
3  * Copyright (C) 2015 Technical University of Liberec. All rights reserved.
4  *
5  * This program is free software; you can redistribute it and/or modify it under
6  * the terms of the GNU General Public License version 3 as published by the
7  * Free Software Foundation. (http://www.gnu.org/licenses/gpl-3.0.en.html)
8  *
9  * This program is distributed in the hope that it will be useful, but WITHOUT
10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
11  * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
12  *
13  *
14  * @file sparse_graph.hh
15  * @brief Distributed sparse graphs, partitioning.
16  */
17 
18 #ifndef SPARSE_GRAPH_HH_
19 #define SPARSE_GRAPH_HH_
20 
21 
22 #include <vector>
23 #include <stack>
24 #include <ostream>
25 #include <petscmat.h>
26 
27 #include "la/distribution.hh"
28 using namespace std;
29 
30 
31 
32 
33 
34 /**
35  * @brief Virtual class for construction and partitioning of a distributed sparse graph.
36  *
37  * An empty graph is created in constructor (collective). You has to provide either Distribution of the vertices or
38  * local number of vertices and communicator. Than you can add edges with set_edge method. Added edges are
39  * stored in the buffer on calling processor. Than you have to call finalize method (collective) to make sparse graph.
40  * Graph partitioning is either directly through METIS or through PETSC (ParMETIS, and others). There is derived class for
41  * particular partitioning method.
42  *
43  * Actual implementation is not fully scalable. row sizes (vtx degree)
44  * are accumulated in global arrays, then communicated to all processors and finally
45  * only local part is used. There should be only local part of the final array with aditional buffer for ex-processor
46  * values.
47  *
48  * Other issue is that we hav eproblem using Parmetis so acctualy only METIS can be used on
49  * a parallel graph located on the first processor.
50  *
51  * TODO: use Boost parallel graphs and write interface form them to METIS, Parmetis or PETSC
52  *
53  */
54 class SparseGraph {
55 public:
56 
57  /**
58  * Construct an empty graph form given Distribution of vertices.
59  */
60  SparseGraph(const Distribution &distr);
61 
62  /**
63  * Construct an empty graph for given number of local vertices of the graph.
64  * Make its own distribution object.
65  */
66  SparseGraph(int loc_size, MPI_Comm comm);
67 
68  /**
69  * Store an edge. We just store all inserted edges and count support arrays to
70  * communicate edges to the right processors
71  *
72  * @param[in] a - starting vertex of the edge
73  * @param[in] b - terminal vertex of the edge
74  * @param[in] weight - optional weight of the edge (integer)
75  *
76  */
77  void set_edge(const int a, const int b, int weight=1);
78 
79  /**
80  * Set position and possibly weight of a local vertex. Assume that vtx is an index of local vertex.
81  * Positions are used for initial distribution when using ParMETIS.
82  *
83  * @param[in] vtx - global vertex index (from zero)
84  * @param[in] xyz - coordinates of vetrex position
85  * @param[in] weight - optional weight of the vertex
86  *
87  */
88  void set_vtx_position(const int vtx, const float xyz[3], int weight=1);
89 
90  /**
91  * @brief Make sparse graph structures: rows, adj
92  *
93  * 1) send edges to the owner of .from vertex
94  * 2) sort local edges
95  * 3) fill rows, adj; remove duplicities
96  */
97  //Assume that vtx is an index of local vertex.
98  void finalize();
99 
100  /**
101  * Fills an array of integers by the partitioning of the vertices.
102  * The array size is number of local verices according to the distribution of the graph.
103  * see get_distr() method. The array has to be PREALLOCATED by user to the correct size.
104  *
105  */
106  virtual void partition(int *loc_part) = 0;
107 
108  /**
109  * Check if the subgraphs of the given partitioning are connected.
110  * Works only for fully localized graph (on one processor).
111  */
112  bool check_subgraph_connectivity(int *part);
113  void DFS(int vtx);
114 
115  /**
116  * Returns reference to the distribution of the graph.
117  */
118  Distribution get_distr() {return vtx_distr;}
119 
120  /**
121  * Simple graph output. Has to be already finalized.
122  */
123  void view();
124 
125  /**
126  * Check symmetry of the local part of the graph. Has to be already finalized.
127  */
128  bool is_symmetric();
129  virtual ~SparseGraph();
130 
131  //friend ostream & operator <<(ostream & out, const SparseGraph &sg);
132 protected:
133  // Auxiliary Edge type.
134  typedef struct {
135  int from, to, weight; ///< Edge weights for communication (optional).
136  } Edge;
137 
138 
139  Distribution vtx_distr; ///< distribution of vertexes
140  int *rows; ///< starts of vtx rows in adj array,
141  ///< vtx i have edge to vertexes adj[rows[i]] .. adj[rows[i+1]-1]
142  float * vtx_XYZ; ///< optional vertex coordinates (global array)
143  int *vtx_weights; ///< Vertex weights for computations (optional).
144  int *adj; ///< sparse adjency
146 
147  int *part_to_check; ///< created partitioning used through check of connectivity
148  int proc_to_check; ///< subgraph to check
149  std::vector<int> checked_vtx; ///< coloring of DFS algorithm
150 
151  // temporary arrays
152  vector< stack<Edge> > adj_of_proc; ///< storage for graph edges for individual processors
153 
154  /**
155  * Use virtual method for allocation since for PETSC interface we have to use PetscMalloc.
156  */
157  virtual void allocate_sparse_graph(int lsize_vtxs, int lsize_adj)=0;
158 
159  friend bool operator <(const Edge &a,const Edge &b);
160 
161 };
162 
163 ostream & operator <<(ostream & out, const SparseGraph &sg);
164 
165 
166 
167 /**
168  * Sparse Graph that use PETSC for partitioning.
169  */
171 {
172 
173 
174 public:
175  /**
176  * Construct empty graph only from number of vertices, use default initial distribution.
177  */
178  SparseGraphPETSC(int n_vtxs, MPI_Comm comm)
179  : SparseGraph(Distribution(DistributionBlock(), n_vtxs,comm)), petsc_adj_mat(0), petsc_part(0), part_IS(0) {}
180 
181  /**
182  * Construct empty graph with given distribution of vertices.
183  */
185  : SparseGraph(distr), petsc_adj_mat(0), petsc_part(0), part_IS(0) {}
186  /**
187  * Implementation of partitioning using PETSC interface to various partitioning software.
188  */
189  virtual void partition(int *loc_part);
190 
191 private:
193  MatPartitioning petsc_part;
195 
196  virtual void allocate_sparse_graph(int lsize_vtxs, int lsize_adj);
197  virtual ~SparseGraphPETSC();
198 
199 };
200 
201 
202 /**
203  * Sparse Graph that use METIS for partitioning.
204  */
206 {
207 public:
208  /**
209  * Construct empty graph only from number of vertices,
210  * use localized distribution in order to use sequential METIS library.
211  */
212  SparseGraphMETIS(int n_vtxs, MPI_Comm comm)
213  : SparseGraph(Distribution(DistributionLocalized(), n_vtxs,comm)) {}
214 
215  /**
216  * Construct empty graph with given distribution of vertices.
217  */
219  : SparseGraph(distr) {}
220 
221  /**
222  * Implementation of partitioning using METIS.
223  */
224  virtual void partition(int *loc_part);
225 
226 private:
227  virtual void allocate_sparse_graph(int lsize_vtxs, int lsize_adj);
228  virtual ~SparseGraphMETIS();
229 };
230 
231 
232 #endif /* SPARSE_GRAPH_HH_ */
MatPartitioning petsc_part
SparseGraphMETIS(int n_vtxs, MPI_Comm comm)
ostream & operator<<(ostream &out, const SparseGraph &sg)
Output a sparse graph.
int MPI_Comm
Definition: mpi.h:141
SparseGraphPETSC(int n_vtxs, MPI_Comm comm)
int * part_to_check
created partitioning used through check of connectivity
int weight
Edge weights for communication (optional).
Virtual class for construction and partitioning of a distributed sparse graph.
Definition: sparse_graph.hh:54
Distribution vtx_distr
distribution of vertexes
vector< stack< Edge > > adj_of_proc
storage for graph edges for individual processors
int proc_to_check
subgraph to check
std::vector< int > checked_vtx
coloring of DFS algorithm
Distribution get_distr()
Support classes for parallel programing.
float * vtx_XYZ
optional vertex coordinates (global array)
bool operator<(const SparseGraph::Edge &a, const SparseGraph::Edge &b)
Definition: sparse_graph.cc:89
SparseGraphMETIS(const Distribution &distr)
SparseGraphPETSC(const Distribution &distr)
int * adj_weights
int * vtx_weights
Vertex weights for computations (optional).
int * adj
sparse adjency