34 "Different algorithms to make the sparse graph with weighted edges\n" 35 "from the multidimensional mesh. Main difference is dealing with \n" 36 "neighboring of elements of different dimension.")
38 .
add_value(
any_weight_lower_dim_cuts,
"any_weight_lower_dim_cuts",
"Same as before and assign higher weight to cuts of lower dimension in order to make them stick to one face.")
44 return IT::Selection(
"PartTool",
"Select the partitioning tool to use.")
45 .
add_value(
PETSc,
"PETSc",
"Use PETSc interface to various partitioning tools.")
51 static IT::Record input_type =
IT::Record(
"Partition",
"Setting for various types of mesh partitioning." )
107 if ( !edistr.
is_local( ele.idx() ) )
111 for (
unsigned int si=0; si<ele->n_sides(); si++) {
114 for (
unsigned int li=0; li<edg->
n_sides; li++) {
119 if ( e_idx != ele.idx() ) {
128 for (i_neigh = 0; i_neigh < ele->n_neighs_vb(); i_neigh++) {
129 n_s = ele->neigh_vb[i_neigh]->edge()->n_sides;
130 for (i_s = 0; i_s < n_s; i_s++) {
131 e_idx = ele->neigh_vb[i_neigh]->edge()->side(i_s)->element().idx();
160 if (mesh_size < num_of_procs) {
161 THROW( ExcDecomposeMesh() << EI_NElems( mesh_size ) << EI_NProcs( num_of_procs ) );
190 IS part, new_numbering;
191 unsigned int size = old_ds.
size();
192 int new_counts[old_ds.
np()];
198 ISCreateGeneral(PETSC_COMM_WORLD, old_ds.
lsize(), loc_part, PETSC_COPY_VALUES, &part);
199 ISPartitioningCount(part, old_ds.
np(), new_counts);
201 new_ds =
new Distribution((
unsigned int *) new_counts, PETSC_COMM_WORLD);
202 ISPartitioningToNumbering(part, &new_numbering);
205 old_4_new =
new int [size];
206 id_4_loc =
new LongIdx [ new_ds->lsize() ];
207 new_4_id =
new LongIdx [ n_ids + 1 ];
210 AOCreateBasicIS(new_numbering, PETSC_NULL, &new_old_ao);
211 ISDestroy(&new_numbering);
212 for (
unsigned int i = 0; i < size; i++)
214 AOApplicationToPetsc(new_old_ao, size, old_4_new);
215 AODestroy(&(new_old_ao));
220 for (
unsigned int i_new = new_ds->begin(); i_new < new_ds->end(); i_new++) {
221 id_4_loc[i_loc++] = id_4_old[old_4_new[i_new]];
224 for (i_loc = 0; i_loc <= n_ids; i_loc++)
225 new_4_id[i_loc] = -1;
226 for (
unsigned int i_new = 0; i_new < size; i_new++)
227 new_4_id[id_4_old[old_4_new[i_new]]] = i_new;
244 seq_part_ = make_shared< vector<int> >(seq_size);
int LongIdx
Define type that represents indices of large arrays (elements, nodes, dofs etc.)
const Edge * edge() const
unsigned int size() const
get global size
Mesh * mesh_
The input mesh.
~Partitioning()
Destructor.
shared_ptr< vector< int > > subdomain_id_field_data()
void make_element_connection_graph()
static const Input::Type::Record & get_input_type()
Add edge for any pair of neighboring elements.
shared_ptr< vector< int > > seq_part_
Sequential partitioning for output.
#define MPI_Gatherv(sendbuf, sendcount, sendtype, recvbuf, recvcounts, displs, recvtype, root, comm)
#define ASSERT(expr)
Allow use shorter versions of macro names if these names is not used with external library...
ElementAccessor< 3 > element() const
const unsigned int * get_lsizes_array()
get local sizes array
LongIdx * loc_part_
Partition numbers for local elements in original distribution of elements given be init_el_ds_...
bool is_local(unsigned int idx) const
identify local index
Use PETSc interface to various partitioing tools.
static const Input::Type::Selection & get_tool_sel()
Input::Record in_
Input Record accessor.
const unsigned int * get_starts_array() const
get local starts array
MPI_Comm get_comm() const
void finalize()
Make sparse graph structures: rows, adj.
virtual void partition(int *loc_part)=0
Use direct interface to Metis.
virtual Range< ElementAccessor< 3 > > elements_range() const
Returns range of bulk elements.
unsigned int np() const
get num of processors
Distribution * init_el_ds_
Original distribution of elements. Depends on type of partitioner.
unsigned int myp() const
get my processor
Support classes for parallel programing.
void id_maps(int n_ids, LongIdx *id_4_old, Distribution *&new_ds, LongIdx *&id_4_loc, LongIdx *&new_4_id)
virtual unsigned int n_elements(bool boundary=false) const
Returns count of boundary or bulk elements.
MPI_Comm get_comm() const
Returns communicator.
Same as before and assign higher weight to cuts of lower dimension in order to make them stick to one...
Distributed sparse graphs, partitioning.
const Distribution * get_init_distr() const
const LongIdx * get_loc_part() const
SparseGraph * graph_
Graph used to partitioning the mesh.
static const Input::Type::Selection & get_graph_type_sel()
Input specification objects.
Add edge for any pair of neighboring elements of same dimension (bad for matrix multiply) ...
Partitioning(Mesh *mesh, Input::Record in)
unsigned int idx() const
Return local idx of element in boundary / bulk part of element vector.
#define THROW(whole_exception_expr)
Wrapper for throw. Saves the throwing point.
SideIter side(const unsigned int i) const
Implementation of range helper class.
void set_edge(const int a, const int b, int weight=1)
unsigned int lsize(int proc) const
get local size