19 "Different algorithms to make the sparse graph with weighted edges\n"
20 "from the multidimensional mesh. Main difference is dealing with \n"
21 "neighborings of elements of different dimension.")
22 .
add_value(any_neighboring,
"any_neighboring",
"Add edge for any pair of neighboring elements.")
23 .
add_value(any_weight_lower_dim_cuts,
"any_wight_lower_dim_cuts",
"Same as before and assign higher weight to cuts of lower dimension in order to make them stick to one face.")
24 .
add_value(same_dimension_neighboring,
"same_dimension_neghboring",
"Add edge for any pair of neighboring elements of same dimension (bad for matrix multiply).")
28 =
IT::Selection(
"PartTool",
"Select the partitioning tool to use.")
29 .
add_value(PETSc,
"PETSc",
"Use PETSc interface to various partitioning tools.")
30 .
add_value(METIS,
"METIS",
"Use direct interface to Metis.")
34 =
IT::Record(
"Partition",
"Setting for various types of mesh partitioning." )
35 .
declare_key(
"tool", Partitioning::tool_sel,
IT::Default(
"METIS"),
"Software package used for partitioning. See corresponding selection.")
36 .
declare_key(
"graph_type", Partitioning::graph_type_sel,
IT::Default(
"any_neighboring"),
"Algorithm for generating graph and its weights from a multidimensional mesh.")
42 : mesh_(mesh), in_(in), graph_(NULL), loc_part_(NULL), init_el_ds_(NULL)
98 if (e_idx != ele.index()) {
107 for (i_neigh = 0; i_neigh < ele->n_neighs_vb; i_neigh++) {
108 n_s = ele->neigh_vb[i_neigh]->edge()->n_sides;
109 for (i_s = 0; i_s < n_s; i_s++) {
159 Distribution * &new_ds,
int * &id_4_loc,
int * &new_4_id) {
161 IS part, new_numbering;
162 unsigned int size = old_ds.
size();
163 int new_counts[old_ds.
np()];
169 ISCreateGeneral(PETSC_COMM_WORLD, old_ds.
lsize(), loc_part, PETSC_COPY_VALUES, &part);
170 ISPartitioningCount(part, old_ds.
np(), new_counts);
172 new_ds =
new Distribution((
unsigned int *) new_counts, PETSC_COMM_WORLD);
173 ISPartitioningToNumbering(part, &new_numbering);
175 old_4_new = (
int *)
xmalloc(size *
sizeof(
int));
176 id_4_loc = (
int *)
xmalloc(new_ds->lsize() *
sizeof(int));
177 new_4_id = (
int *)
xmalloc((n_ids + 1) *
sizeof(int));
180 AOCreateBasicIS(new_numbering, PETSC_NULL, &new_old_ao);
181 for (
unsigned int i = 0; i < size; i++)
183 AOApplicationToPetsc(new_old_ao, size, old_4_new);
184 AODestroy(&(new_old_ao));
189 for (
unsigned int i_new = new_ds->begin(); i_new < new_ds->end(); i_new++) {
190 id_4_loc[i_loc++] = id_4_old[old_4_new[i_new]];
193 for (i_loc = 0; i_loc <= n_ids; i_loc++)
194 new_4_id[i_loc] = -1;
195 for (
unsigned int i_new = 0; i_new < size; i_new++)
196 new_4_id[id_4_old[old_4_new[i_new]]] = i_new;
212 seq_part_ = make_shared< vector<int> >(seq_size);
unsigned int size() const
get global size
Mesh * mesh_
The input mesh.
~Partitioning()
Destructor.
shared_ptr< vector< int > > subdomain_id_field_data()
void id_maps(int n_ids, int *id_4_old, Distribution *&new_ds, int *&id_4_loc, int *&new_4_id)
void make_element_connection_graph()
#define FOR_EDGE_SIDES(i, j)
#define FOR_ELEMENTS(_mesh_, __i)
shared_ptr< vector< int > > seq_part_
Sequential partitioning for output.
#define ELEMENT_FULL_ITER(_mesh_, i)
#define MPI_Gatherv(sendbuf, sendcount, sendtype, recvbuf, recvcounts, displs, recvtype, root, comm)
#define FOR_ELEMENT_SIDES(i, j)
const unsigned int * get_lsizes_array()
get local sizes array
unsigned int n_elements() const
bool is_local(unsigned int idx) const
identify local index
Use PETSc interface to various partitioing tools.
Input::Record in_
Input Record accessor.
static Input::Type::Record input_type
const unsigned int * get_starts_array() const
get local starts array
MPI_Comm get_comm() const
void finalize()
Make sparse graph structures: rows, adj.
static Input::Type::Selection tool_sel
virtual void partition(int *loc_part)=0
Use direct interface to Metis.
static Input::Type::Selection graph_type_sel
Input specification objects.
unsigned int np() const
get num of processors
void * xmalloc(size_t size)
Memory allocation with checking.
Distribution * init_el_ds_
Original distribution of elements. Depends on type of partitioner.
unsigned int myp() const
get my processor
Support classes for parallel programing.
ElementFullIter element() const
MPI_Comm get_comm() const
Returns communicator.
Distributed sparse graphs, partitioning.
const Distribution * get_init_distr() const
const int * get_loc_part() const
SparseGraph * graph_
Graph used to partitioning the mesh.
Add edge for any pair of neighboring elements of same dimension (bad for matrix multiply) ...
Partitioning(Mesh *mesh, Input::Record in)
SideIter side(const unsigned int i) const
void set_edge(const int a, const int b, int weight=1)
int * loc_part_
Partition numbers for local elements in original distribution of elements given be init_el_ds_...
ElementVector element
Vector of elements of the mesh.
unsigned int lsize(int proc) const
get local size