Changeset 13595
- Timestamp:
- 10/10/12 21:37:39 (13 years ago)
- Location:
- issm/trunk-jpl/src/c/toolkits/petsc
- Files:
-
- 8 edited
Legend:
- Unmodified
- Added
- Removed
-
issm/trunk-jpl/src/c/toolkits/petsc/objects/PetscMat.cpp
r12859 r13595 31 31 PetscMat::PetscMat(int M,int N){ 32 32 33 this->matrix=NewMat(M,N );33 this->matrix=NewMat(M,N,IssmComm::GetComm()); 34 34 } 35 35 /*}}}*/ … … 128 128 _assert_(this->matrix); 129 129 _assert_(X->vector); 130 MatMultPatch(this->matrix,X->vector,AX->vector );130 MatMultPatch(this->matrix,X->vector,AX->vector,IssmComm::GetComm()); 131 131 132 132 } … … 150 150 IssmDouble* output=NULL; 151 151 152 MatToSerial(&output,this->matrix );152 MatToSerial(&output,this->matrix,IssmComm::GetComm()); 153 153 return output; 154 154 -
issm/trunk-jpl/src/c/toolkits/petsc/objects/PetscVec.cpp
r12863 r13595 167 167 168 168 IssmDouble* vec_serial=NULL; 169 VecToMPISerial(&vec_serial, this->vector );169 VecToMPISerial(&vec_serial, this->vector,IssmComm::GetComm()); 170 170 return vec_serial; 171 171 -
issm/trunk-jpl/src/c/toolkits/petsc/patches/GetOwnershipBoundariesFromRange.cpp
r12431 r13595 14 14 #include "../../../shared/shared.h" 15 15 16 void GetOwnershipBoundariesFromRange(int* plower_row,int* pupper_row,int range ){16 void GetOwnershipBoundariesFromRange(int* plower_row,int* pupper_row,int range,COMM comm){ 17 17 18 18 /*externals :*/ … … 29 29 /*Gather all range values into allranges, for all nodes*/ 30 30 allranges=xNew<int>(num_procs); 31 MPI_Allgather(&range,1,MPI_INT,allranges,1,MPI_INT, MPI_COMM_WORLD);31 MPI_Allgather(&range,1,MPI_INT,allranges,1,MPI_INT,comm); 32 32 33 33 /*From all ranges, get lower row and upper row*/ -
issm/trunk-jpl/src/c/toolkits/petsc/patches/MatMultPatch.cpp
r12431 r13595 17 17 18 18 /*Function prototypes: */ 19 int MatMultCompatible(Mat A,Vec x );19 int MatMultCompatible(Mat A,Vec x,COMM comm); 20 20 void VecRelocalize(Vec* outvector,Vec vector,int m); 21 21 22 void MatMultPatch(Mat A,Vec X, Vec AX ){ //same prototype as MatMult in Petsc22 void MatMultPatch(Mat A,Vec X, Vec AX,COMM comm){ //same prototype as MatMult in Petsc 23 23 24 24 int m,n; … … 27 27 _assert_(A); _assert_(X); 28 28 29 if (MatMultCompatible(A,X )){29 if (MatMultCompatible(A,X,comm)){ 30 30 MatMult(A,X,AX); 31 31 } … … 42 42 } 43 43 44 int MatMultCompatible(Mat A,Vec x ){44 int MatMultCompatible(Mat A,Vec x,COMM comm){ 45 45 46 46 /*error management*/ … … 58 58 59 59 /*synchronize result: */ 60 MPI_Reduce (&result,&sumresult,1,MPI_INT,MPI_SUM,0, MPI_COMM_WORLD);61 MPI_Bcast(&sumresult,1,MPI_INT,0, MPI_COMM_WORLD);60 MPI_Reduce (&result,&sumresult,1,MPI_INT,MPI_SUM,0,comm ); 61 MPI_Bcast(&sumresult,1,MPI_INT,0,comm); 62 62 if (sumresult!=num_procs){ 63 63 result=0; -
issm/trunk-jpl/src/c/toolkits/petsc/patches/MatToSerial.cpp
r12431 r13595 13 13 #include "../../../shared/shared.h" 14 14 15 void MatToSerial(double** poutmatrix,Mat matrix ){15 void MatToSerial(double** poutmatrix,Mat matrix,COMM comm){ 16 16 17 17 int i; … … 66 66 buffer[1]=lower_row; 67 67 buffer[2]=range; 68 MPI_Send(buffer,3,MPI_INT,0,1, MPI_COMM_WORLD);69 if (range)MPI_Send(local_matrix,N*range,MPI_DOUBLE,0,1, MPI_COMM_WORLD);68 MPI_Send(buffer,3,MPI_INT,0,1,comm); 69 if (range)MPI_Send(local_matrix,N*range,MPI_DOUBLE,0,1,comm); 70 70 } 71 71 if (my_rank==0){ 72 MPI_Recv(buffer,3,MPI_INT,i,1, MPI_COMM_WORLD,&status);73 if (buffer[2])MPI_Recv(outmatrix+(buffer[1]*N),N*buffer[2],MPI_DOUBLE,i,1, MPI_COMM_WORLD,&status);72 MPI_Recv(buffer,3,MPI_INT,i,1,comm,&status); 73 if (buffer[2])MPI_Recv(outmatrix+(buffer[1]*N),N*buffer[2],MPI_DOUBLE,i,1,comm,&status); 74 74 } 75 75 } -
issm/trunk-jpl/src/c/toolkits/petsc/patches/NewMat.cpp
r12729 r13595 21 21 22 22 /*NewMat(int M,int N){{{*/ 23 Mat NewMat(int M,int N ){23 Mat NewMat(int M,int N,COMM comm){ 24 24 25 25 /*output:*/ … … 40 40 41 41 #if _PETSC_MAJOR_ == 3 && _PETSC_MINOR_ > 2 42 MatCreateAIJ( MPI_COMM_WORLD,m,n,M,N,d_nz,NULL,o_nz,NULL,&outmatrix);42 MatCreateAIJ(comm,m,n,M,N,d_nz,NULL,o_nz,NULL,&outmatrix); 43 43 #else 44 MatCreateMPIAIJ( MPI_COMM_WORLD,m,n,M,N,d_nz,NULL,o_nz,NULL,&outmatrix);44 MatCreateMPIAIJ(comm,m,n,M,N,d_nz,NULL,o_nz,NULL,&outmatrix); 45 45 #endif 46 46 … … 49 49 /*}}}*/ 50 50 /*NewMat(int M,int N,double sparsity){{{*/ 51 Mat NewMat(int M,int N,double sparsity ){51 Mat NewMat(int M,int N,double sparsity,COMM comm){ 52 52 53 53 /*output:*/ … … 69 69 #if _PETSC_MAJOR_ == 3 && _PETSC_MINOR_ > 2 70 70 if(sparsity==1){ 71 MatCreateDense( MPI_COMM_WORLD,m,n,M,N,NULL,&outmatrix);71 MatCreateDense(comm,m,n,M,N,NULL,&outmatrix); 72 72 } 73 73 else{ 74 MatCreateAIJ( MPI_COMM_WORLD,m,n,M,N,d_nz,NULL,o_nz,NULL,&outmatrix);74 MatCreateAIJ(comm,m,n,M,N,d_nz,NULL,o_nz,NULL,&outmatrix); 75 75 } 76 76 #else 77 MatCreateMPIAIJ( MPI_COMM_WORLD,m,n,M,N,d_nz,NULL,o_nz,NULL,&outmatrix);77 MatCreateMPIAIJ(comm,m,n,M,N,d_nz,NULL,o_nz,NULL,&outmatrix); 78 78 #endif 79 79 … … 82 82 /*}}}*/ 83 83 /*NewMat(int M,int N,int connectivity,int numberofdofspernode){{{*/ 84 Mat NewMat(int M,int N,int connectivity,int numberofdofspernode ){84 Mat NewMat(int M,int N,int connectivity,int numberofdofspernode,COMM comm){ 85 85 86 86 /*output:*/ … … 106 106 o_nz=(int)connectivity*numberofdofspernode/2; 107 107 108 MatCreate( MPI_COMM_WORLD,&outmatrix);108 MatCreate(comm,&outmatrix); 109 109 MatSetSizes(outmatrix,m,n,M,N); 110 110 MatSetFromOptions(outmatrix); -
issm/trunk-jpl/src/c/toolkits/petsc/patches/VecToMPISerial.cpp
r12431 r13595 13 13 #include "../../../shared/shared.h" 14 14 15 int VecToMPISerial(double** pgathered_vector, Vec vector ){15 int VecToMPISerial(double** pgathered_vector, Vec vector,COMM comm){ 16 16 17 17 int i; … … 66 66 buffer[1]=lower_row; 67 67 buffer[2]=range; 68 MPI_Send(buffer,3,MPI_INT,0,1, MPI_COMM_WORLD);69 if (range)MPI_Send(local_vector,range,MPI_DOUBLE,0,1, MPI_COMM_WORLD);68 MPI_Send(buffer,3,MPI_INT,0,1,comm); 69 if (range)MPI_Send(local_vector,range,MPI_DOUBLE,0,1,comm); 70 70 } 71 71 if (my_rank==0){ 72 MPI_Recv(buffer,3,MPI_INT,i,1, MPI_COMM_WORLD,&status);73 if (buffer[2])MPI_Recv(gathered_vector+buffer[1],buffer[2],MPI_DOUBLE,i,1, MPI_COMM_WORLD,&status);72 MPI_Recv(buffer,3,MPI_INT,i,1,comm,&status); 73 if (buffer[2])MPI_Recv(gathered_vector+buffer[1],buffer[2],MPI_DOUBLE,i,1,comm,&status); 74 74 } 75 75 } … … 81 81 82 82 /*Now, broadcast gathered_vector from node 0 to other nodes: */ 83 MPI_Bcast(gathered_vector,vector_size,MPI_DOUBLE,0, MPI_COMM_WORLD);83 MPI_Bcast(gathered_vector,vector_size,MPI_DOUBLE,0,comm); 84 84 85 85 /*Assign output pointers: */ -
issm/trunk-jpl/src/c/toolkits/petsc/patches/petscpatches.h
r12101 r13595 13 13 #include "./SolverEnum.h" 14 14 #include "../../toolkitsenums.h" 15 #include "../../../include/types.h" 15 16 16 17 class Parameters; 17 18 18 19 Vec NewVec(int size,bool fromlocalsize=false); 19 Mat NewMat(int M,int N );20 Mat NewMat(int M,int N,double sparsity );21 Mat NewMat(int M,int N,int connectivity,int numberofdofspernode );20 Mat NewMat(int M,int N,COMM comm); 21 Mat NewMat(int M,int N,double sparsity,COMM comm); 22 Mat NewMat(int M,int N,int connectivity,int numberofdofspernode, COMM comm); 22 23 23 24 int VecTranspose(Vec* tvector,Vec vector); 24 int VecToMPISerial(double** pgathered_vector, Vec vector );25 void GetOwnershipBoundariesFromRange(int* plower_row,int* pupper_row,int range );25 int VecToMPISerial(double** pgathered_vector, Vec vector,COMM comm); 26 void GetOwnershipBoundariesFromRange(int* plower_row,int* pupper_row,int range,COMM comm); 26 27 void MatFree(Mat* pmat); 27 28 void ISFree(IS* pis); … … 35 36 void PetscOptionsDetermineSolverType(int* psolver_type); 36 37 void VecMerge(Vec A, Vec B, double* row_partition_vector,int row_partition_size); 37 void MatMultPatch(Mat A,Vec X, Vec AX );38 void MatToSerial(double** poutmatrix,Mat matrix );38 void MatMultPatch(Mat A,Vec X, Vec AX,COMM comm); 39 void MatToSerial(double** poutmatrix,Mat matrix,COMM comm); 39 40 void VecDuplicatePatch(Vec* output, Vec input); 40 41 Vec SerialToVec(double* vector,int vector_size);
Note:
See TracChangeset
for help on using the changeset viewer.