Changeset 13595


Ignore:
Timestamp:
10/10/12 21:37:39 (13 years ago)
Author:
Eric.Larour
Message:

CHG: more changes to switch from my_rank and num_procs to IssmComm::GetSize and IssmComm::GetRank

Location:
issm/trunk-jpl/src/c/toolkits/petsc
Files:
8 edited

Legend:

Unmodified
Added
Removed
  • issm/trunk-jpl/src/c/toolkits/petsc/objects/PetscMat.cpp

    r12859 r13595  
    3131PetscMat::PetscMat(int M,int N){
    3232
    33         this->matrix=NewMat(M,N);
     33        this->matrix=NewMat(M,N,IssmComm::GetComm());
    3434}
    3535/*}}}*/
     
    128128        _assert_(this->matrix);
    129129        _assert_(X->vector);
    130         MatMultPatch(this->matrix,X->vector,AX->vector);
     130        MatMultPatch(this->matrix,X->vector,AX->vector,IssmComm::GetComm());
    131131
    132132}
     
    150150         IssmDouble* output=NULL;
    151151
    152          MatToSerial(&output,this->matrix);
     152         MatToSerial(&output,this->matrix,IssmComm::GetComm());
    153153         return output;
    154154
  • issm/trunk-jpl/src/c/toolkits/petsc/objects/PetscVec.cpp

    r12863 r13595  
    167167       
    168168        IssmDouble* vec_serial=NULL;
    169         VecToMPISerial(&vec_serial, this->vector);
     169        VecToMPISerial(&vec_serial, this->vector,IssmComm::GetComm());
    170170        return vec_serial;
    171171
  • issm/trunk-jpl/src/c/toolkits/petsc/patches/GetOwnershipBoundariesFromRange.cpp

    r12431 r13595  
    1414#include "../../../shared/shared.h"
    1515
    16 void GetOwnershipBoundariesFromRange(int* plower_row,int* pupper_row,int range){
     16void GetOwnershipBoundariesFromRange(int* plower_row,int* pupper_row,int range,COMM comm){
    1717
    1818        /*externals :*/
     
    2929        /*Gather all range values into allranges, for all nodes*/
    3030        allranges=xNew<int>(num_procs);
    31         MPI_Allgather(&range,1,MPI_INT,allranges,1,MPI_INT,MPI_COMM_WORLD);
     31        MPI_Allgather(&range,1,MPI_INT,allranges,1,MPI_INT,comm);
    3232
    3333        /*From all ranges, get lower row and upper row*/
  • issm/trunk-jpl/src/c/toolkits/petsc/patches/MatMultPatch.cpp

    r12431 r13595  
    1717
    1818/*Function prototypes: */
    19 int MatMultCompatible(Mat A,Vec x);
     19int MatMultCompatible(Mat A,Vec x,COMM comm);
    2020void VecRelocalize(Vec* outvector,Vec vector,int m);
    2121
    22 void MatMultPatch(Mat A,Vec X, Vec AX){ //same prototype as MatMult in Petsc
     22void MatMultPatch(Mat A,Vec X, Vec AX,COMM comm){ //same prototype as MatMult in Petsc
    2323
    2424        int m,n;
     
    2727        _assert_(A); _assert_(X);
    2828
    29         if (MatMultCompatible(A,X)){
     29        if (MatMultCompatible(A,X,comm)){
    3030                MatMult(A,X,AX);
    3131        }
     
    4242}
    4343
    44 int MatMultCompatible(Mat A,Vec x){
     44int MatMultCompatible(Mat A,Vec x,COMM comm){
    4545       
    4646        /*error management*/
     
    5858       
    5959        /*synchronize result: */
    60         MPI_Reduce (&result,&sumresult,1,MPI_INT,MPI_SUM,0,MPI_COMM_WORLD );
    61         MPI_Bcast(&sumresult,1,MPI_INT,0,MPI_COMM_WORLD);               
     60        MPI_Reduce (&result,&sumresult,1,MPI_INT,MPI_SUM,0,comm );
     61        MPI_Bcast(&sumresult,1,MPI_INT,0,comm);               
    6262        if (sumresult!=num_procs){
    6363                result=0;
  • issm/trunk-jpl/src/c/toolkits/petsc/patches/MatToSerial.cpp

    r12431 r13595  
    1313#include "../../../shared/shared.h"
    1414
    15 void MatToSerial(double** poutmatrix,Mat matrix){
     15void MatToSerial(double** poutmatrix,Mat matrix,COMM comm){
    1616
    1717        int i;
     
    6666                        buffer[1]=lower_row;
    6767                        buffer[2]=range;
    68                         MPI_Send(buffer,3,MPI_INT,0,1,MPI_COMM_WORLD);   
    69                         if (range)MPI_Send(local_matrix,N*range,MPI_DOUBLE,0,1,MPI_COMM_WORLD);
     68                        MPI_Send(buffer,3,MPI_INT,0,1,comm);   
     69                        if (range)MPI_Send(local_matrix,N*range,MPI_DOUBLE,0,1,comm);
    7070                }
    7171                if (my_rank==0){
    72                         MPI_Recv(buffer,3,MPI_INT,i,1,MPI_COMM_WORLD,&status);
    73                         if (buffer[2])MPI_Recv(outmatrix+(buffer[1]*N),N*buffer[2],MPI_DOUBLE,i,1,MPI_COMM_WORLD,&status);
     72                        MPI_Recv(buffer,3,MPI_INT,i,1,comm,&status);
     73                        if (buffer[2])MPI_Recv(outmatrix+(buffer[1]*N),N*buffer[2],MPI_DOUBLE,i,1,comm,&status);
    7474                }
    7575        }
  • issm/trunk-jpl/src/c/toolkits/petsc/patches/NewMat.cpp

    r12729 r13595  
    2121
    2222/*NewMat(int M,int N){{{*/
    23 Mat NewMat(int M,int N){
     23Mat NewMat(int M,int N,COMM comm){
    2424
    2525        /*output:*/
     
    4040
    4141        #if _PETSC_MAJOR_ == 3 && _PETSC_MINOR_ > 2
    42         MatCreateAIJ(MPI_COMM_WORLD,m,n,M,N,d_nz,NULL,o_nz,NULL,&outmatrix);
     42        MatCreateAIJ(comm,m,n,M,N,d_nz,NULL,o_nz,NULL,&outmatrix);
    4343        #else
    44         MatCreateMPIAIJ(MPI_COMM_WORLD,m,n,M,N,d_nz,NULL,o_nz,NULL,&outmatrix);
     44        MatCreateMPIAIJ(comm,m,n,M,N,d_nz,NULL,o_nz,NULL,&outmatrix);
    4545        #endif
    4646
     
    4949/*}}}*/
    5050/*NewMat(int M,int N,double sparsity){{{*/
    51 Mat NewMat(int M,int N,double sparsity){
     51Mat NewMat(int M,int N,double sparsity,COMM comm){
    5252
    5353        /*output:*/
     
    6969        #if _PETSC_MAJOR_ == 3 && _PETSC_MINOR_ > 2
    7070        if(sparsity==1){
    71                 MatCreateDense(MPI_COMM_WORLD,m,n,M,N,NULL,&outmatrix);
     71                MatCreateDense(comm,m,n,M,N,NULL,&outmatrix);
    7272        }
    7373        else{
    74                 MatCreateAIJ(MPI_COMM_WORLD,m,n,M,N,d_nz,NULL,o_nz,NULL,&outmatrix);
     74                MatCreateAIJ(comm,m,n,M,N,d_nz,NULL,o_nz,NULL,&outmatrix);
    7575        }
    7676        #else
    77         MatCreateMPIAIJ(MPI_COMM_WORLD,m,n,M,N,d_nz,NULL,o_nz,NULL,&outmatrix);
     77        MatCreateMPIAIJ(comm,m,n,M,N,d_nz,NULL,o_nz,NULL,&outmatrix);
    7878        #endif
    7979
     
    8282/*}}}*/
    8383/*NewMat(int M,int N,int connectivity,int numberofdofspernode){{{*/
    84 Mat NewMat(int M,int N,int connectivity,int numberofdofspernode){
     84Mat NewMat(int M,int N,int connectivity,int numberofdofspernode,COMM comm){
    8585
    8686        /*output:*/
     
    106106        o_nz=(int)connectivity*numberofdofspernode/2;
    107107
    108         MatCreate(MPI_COMM_WORLD,&outmatrix);
     108        MatCreate(comm,&outmatrix);
    109109        MatSetSizes(outmatrix,m,n,M,N);
    110110        MatSetFromOptions(outmatrix);
  • issm/trunk-jpl/src/c/toolkits/petsc/patches/VecToMPISerial.cpp

    r12431 r13595  
    1313#include "../../../shared/shared.h"
    1414
    15 int VecToMPISerial(double** pgathered_vector, Vec vector){
     15int VecToMPISerial(double** pgathered_vector, Vec vector,COMM comm){
    1616       
    1717        int i;
     
    6666                        buffer[1]=lower_row;
    6767                        buffer[2]=range;
    68                         MPI_Send(buffer,3,MPI_INT,0,1,MPI_COMM_WORLD); 
    69                         if (range)MPI_Send(local_vector,range,MPI_DOUBLE,0,1,MPI_COMM_WORLD);
     68                        MPI_Send(buffer,3,MPI_INT,0,1,comm); 
     69                        if (range)MPI_Send(local_vector,range,MPI_DOUBLE,0,1,comm);
    7070                }
    7171                if (my_rank==0){
    72                         MPI_Recv(buffer,3,MPI_INT,i,1,MPI_COMM_WORLD,&status);
    73                         if (buffer[2])MPI_Recv(gathered_vector+buffer[1],buffer[2],MPI_DOUBLE,i,1,MPI_COMM_WORLD,&status);
     72                        MPI_Recv(buffer,3,MPI_INT,i,1,comm,&status);
     73                        if (buffer[2])MPI_Recv(gathered_vector+buffer[1],buffer[2],MPI_DOUBLE,i,1,comm,&status);
    7474                }
    7575        }
     
    8181
    8282        /*Now, broadcast gathered_vector from node 0 to other nodes: */
    83         MPI_Bcast(gathered_vector,vector_size,MPI_DOUBLE,0,MPI_COMM_WORLD);
     83        MPI_Bcast(gathered_vector,vector_size,MPI_DOUBLE,0,comm);
    8484
    8585        /*Assign output pointers: */
  • issm/trunk-jpl/src/c/toolkits/petsc/patches/petscpatches.h

    r12101 r13595  
    1313#include "./SolverEnum.h"
    1414#include "../../toolkitsenums.h"
     15#include "../../../include/types.h"
    1516
    1617class Parameters;
    1718
    1819Vec NewVec(int size,bool fromlocalsize=false);
    19 Mat NewMat(int M,int N);
    20 Mat NewMat(int M,int N,double sparsity);
    21 Mat NewMat(int M,int N,int connectivity,int numberofdofspernode);
     20Mat NewMat(int M,int N,COMM comm);
     21Mat NewMat(int M,int N,double sparsity,COMM comm);
     22Mat NewMat(int M,int N,int connectivity,int numberofdofspernode, COMM comm);
    2223
    2324int VecTranspose(Vec* tvector,Vec vector);
    24 int VecToMPISerial(double** pgathered_vector, Vec vector);
    25 void GetOwnershipBoundariesFromRange(int* plower_row,int* pupper_row,int range);
     25int VecToMPISerial(double** pgathered_vector, Vec vector,COMM comm);
     26void GetOwnershipBoundariesFromRange(int* plower_row,int* pupper_row,int range,COMM comm);
    2627void MatFree(Mat* pmat);
    2728void ISFree(IS* pis);
     
    3536void PetscOptionsDetermineSolverType(int* psolver_type);
    3637void VecMerge(Vec A, Vec B, double* row_partition_vector,int row_partition_size);
    37 void MatMultPatch(Mat A,Vec X, Vec AX);
    38 void MatToSerial(double** poutmatrix,Mat matrix);
     38void MatMultPatch(Mat A,Vec X, Vec AX,COMM comm);
     39void MatToSerial(double** poutmatrix,Mat matrix,COMM comm);
    3940void VecDuplicatePatch(Vec* output, Vec input);
    4041Vec  SerialToVec(double* vector,int vector_size);
Note: See TracChangeset for help on using the changeset viewer.