[1] | 1 | /*!\file: PartitionSets.cpp
|
---|
| 2 | * \brief partition degree of freedome sets
|
---|
| 3 | */
|
---|
| 4 |
|
---|
| 5 | #include "stdio.h"
|
---|
[3913] | 6 | #include "../../shared/shared.h"
|
---|
| 7 | #include "../../include/include.h"
|
---|
[1] | 8 |
|
---|
| 9 | void PartitionSets(Vec* ppartitionb, Vec* ppartitionc,Vec flags_a,Vec flags_b,Vec flags_c,int gsize){
|
---|
| 10 |
|
---|
| 11 | int i;
|
---|
| 12 |
|
---|
| 13 | /*output: */
|
---|
| 14 | Vec partitionb=NULL;
|
---|
[3595] | 15 | Vec partitionb_vec=NULL;
|
---|
[1] | 16 | double* partitionb_local=NULL;
|
---|
| 17 | Vec partitionc=NULL;
|
---|
[3595] | 18 | Vec partitionc_vec=NULL;
|
---|
[1] | 19 | double* partitionc_local=NULL;
|
---|
| 20 |
|
---|
| 21 | /*intermediary: */
|
---|
| 22 | int local_size;
|
---|
| 23 | int* idxm=NULL;
|
---|
| 24 | double* flags_a_local=NULL;
|
---|
| 25 | double* flags_b_local=NULL;
|
---|
| 26 | double* flags_c_local=NULL;
|
---|
| 27 | int lower_row, upper_row, range;
|
---|
| 28 | int asize, bsize, csize;
|
---|
| 29 | int acount, bcount, ccount, dummy;
|
---|
| 30 |
|
---|
[1938] | 31 |
|
---|
[1] | 32 |
|
---|
| 33 | /*First recover flags locally on each cpu: */
|
---|
| 34 | VecGetOwnershipRange(flags_a,&lower_row,&upper_row); upper_row--; range=upper_row-lower_row+1;
|
---|
| 35 |
|
---|
| 36 | if(range){
|
---|
| 37 | idxm=(int*)xmalloc(range*sizeof(int));
|
---|
| 38 | flags_a_local=(double*)xmalloc(range*sizeof(double));
|
---|
| 39 | flags_b_local=(double*)xmalloc(range*sizeof(double));
|
---|
| 40 | flags_c_local=(double*)xmalloc(range*sizeof(double));
|
---|
| 41 | }
|
---|
| 42 | for(i=0;i<range;i++)idxm[i]=lower_row+i;
|
---|
| 43 |
|
---|
| 44 | VecGetValues(flags_a,range,idxm,flags_a_local);
|
---|
| 45 | VecGetValues(flags_b,range,idxm,flags_b_local);
|
---|
| 46 | VecGetValues(flags_c,range,idxm,flags_c_local);
|
---|
| 47 |
|
---|
| 48 |
|
---|
| 49 | /*Figure out local size of a, b and c sets on each cpu: */
|
---|
| 50 | asize=0;
|
---|
| 51 | bsize=0;
|
---|
| 52 | csize=0;
|
---|
| 53 | for(i=0;i<range;i++){
|
---|
| 54 | if (flags_a_local[i]){
|
---|
| 55 | if (flags_b_local[i])bsize++;
|
---|
| 56 | if (flags_c_local[i])csize++;
|
---|
| 57 | asize++;
|
---|
| 58 | }
|
---|
| 59 | }
|
---|
| 60 |
|
---|
| 61 | /*Figure out start of "a" set local cpu counter: */
|
---|
| 62 | GetOwnershipBoundariesFromRange(&acount,&dummy,asize);
|
---|
| 63 |
|
---|
| 64 | /*Allocate local partitioning vectors: */
|
---|
| 65 | if(bsize)partitionb_local=(double*)xmalloc(bsize*sizeof(double));
|
---|
| 66 | if(csize)partitionc_local=(double*)xmalloc(csize*sizeof(double));
|
---|
| 67 |
|
---|
| 68 | /*Loop again, and plug values: */
|
---|
| 69 | bcount=0;
|
---|
| 70 | ccount=0;
|
---|
| 71 | for(i=0;i<range;i++){
|
---|
| 72 | if (flags_a_local[i]){
|
---|
| 73 | if (flags_b_local[i]){
|
---|
| 74 | partitionb_local[bcount]=acount+1; //matlab indexing
|
---|
| 75 | bcount++;
|
---|
| 76 | }
|
---|
| 77 | if (flags_c_local[i]){
|
---|
| 78 | partitionc_local[ccount]=acount+1; //matlab indexing
|
---|
| 79 | ccount++;
|
---|
| 80 | }
|
---|
[6412] | 81 | if (flags_b_local[i] && flags_c_local[i]) _error_("%s%i%s"," for dof ",i,": breach of exclusive partitioning between sets");
|
---|
[1] | 82 |
|
---|
| 83 | acount++;
|
---|
| 84 | }
|
---|
| 85 | }
|
---|
| 86 |
|
---|
| 87 | /*Now, using the local partitions for b and c, create parallel vectors: */
|
---|
[3595] | 88 | VecCreateMPIWithArray(MPI_COMM_WORLD,bsize,PETSC_DECIDE,partitionb_local,&partitionb_vec);
|
---|
| 89 | VecCreateMPIWithArray(MPI_COMM_WORLD,csize,PETSC_DECIDE,partitionc_local,&partitionc_vec);
|
---|
[1] | 90 |
|
---|
[3595] | 91 | VecAssemblyBegin(partitionb_vec);
|
---|
| 92 | VecAssemblyEnd(partitionb_vec);
|
---|
| 93 |
|
---|
| 94 | VecAssemblyBegin(partitionc_vec);
|
---|
| 95 | VecAssemblyEnd(partitionc_vec);
|
---|
| 96 |
|
---|
| 97 | /*Now we must duplicate these vectors because:
|
---|
| 98 | *
|
---|
| 99 | * Petsc specifies on http://www.mcs.anl.gov/petsc/petsc-as/snapshots/petsc-current/docs/manualpages/Vec/VecCreateMPIWithArray.html
|
---|
| 100 | * "PETSc does NOT free the array when the vector is destroyed via VecDestroy().
|
---|
| 101 | * The user should not free the array until the vector is destroyed"
|
---|
| 102 | *
|
---|
| 103 | * The only way we can keep the vector partitionb_vec while destroying the array partitionb_local
|
---|
| 104 | * is to duplicate the vector and then destroy the initial vector as well as the array*/
|
---|
| 105 | VecDuplicate(partitionb_vec,&partitionb); VecCopy(partitionb_vec,partitionb); VecFree(&partitionb_vec); xfree((void**)&partitionb_local);
|
---|
| 106 | VecDuplicate(partitionc_vec,&partitionc); VecCopy(partitionc_vec,partitionc); VecFree(&partitionc_vec); xfree((void**)&partitionc_local);
|
---|
| 107 |
|
---|
[1] | 108 | /*Free ressources:*/
|
---|
| 109 | xfree((void**)&idxm);
|
---|
| 110 | xfree((void**)&flags_a_local);
|
---|
| 111 | xfree((void**)&flags_b_local);
|
---|
| 112 | xfree((void**)&flags_c_local);
|
---|
[1967] | 113 |
|
---|
[1] | 114 | /*Assign output pointers*/
|
---|
| 115 | *ppartitionb=partitionb;
|
---|
| 116 | *ppartitionc=partitionc;
|
---|
| 117 |
|
---|
| 118 | }
|
---|