Changeset 13590


Ignore:
Timestamp:
10/10/12 20:56:09 (12 years ago)
Author:
Eric.Larour
Message:

CHG: replaced instances of MPI_COMM_WORLD in modules with IssmComm::GetComm()

Location:
issm/trunk-jpl/src/c/modules
Files:
38 edited

Legend:

Unmodified
Added
Removed
  • issm/trunk-jpl/src/c/modules/ConstraintsStatex/RiftConstraintsState.cpp

    r12515 r13590  
    3333
    3434        #ifdef _HAVE_MPI_
    35         MPI_Reduce (&found,&mpi_found,1,MPI_INT,MPI_SUM,0,MPI_COMM_WORLD );
    36         MPI_Bcast(&mpi_found,1,MPI_INT,0,MPI_COMM_WORLD);               
     35        MPI_Reduce (&found,&mpi_found,1,MPI_INT,MPI_SUM,0,IssmComm::GetComm() );
     36        MPI_Bcast(&mpi_found,1,MPI_INT,0,IssmComm::GetComm());               
    3737        found=mpi_found;
    3838        #endif
     
    9696
    9797        #ifdef _HAVE_MPI_
    98         MPI_Reduce (&num_unstable_constraints,&sum_num_unstable_constraints,1,MPI_INT,MPI_SUM,0,MPI_COMM_WORLD );
    99         MPI_Bcast(&sum_num_unstable_constraints,1,MPI_INT,0,MPI_COMM_WORLD);               
     98        MPI_Reduce (&num_unstable_constraints,&sum_num_unstable_constraints,1,MPI_INT,MPI_SUM,0,IssmComm::GetComm() );
     99        MPI_Bcast(&sum_num_unstable_constraints,1,MPI_INT,0,IssmComm::GetComm());               
    100100        num_unstable_constraints=sum_num_unstable_constraints;
    101101        #endif
     
    136136        /*Is there just one found? that would mean we have frozen! : */
    137137        #ifdef _HAVE_MPI_
    138         MPI_Reduce (&found,&mpi_found,1,MPI_INT,MPI_MAX,0,MPI_COMM_WORLD );
    139         MPI_Bcast(&mpi_found,1,MPI_INT,0,MPI_COMM_WORLD);               
     138        MPI_Reduce (&found,&mpi_found,1,MPI_INT,MPI_MAX,0,IssmComm::GetComm() );
     139        MPI_Bcast(&mpi_found,1,MPI_INT,0,IssmComm::GetComm());               
    140140        found=mpi_found;
    141141        #endif
     
    196196
    197197        #ifdef _HAVE_MPI_
    198         MPI_Reduce (&found,&mpi_found,1,MPI_INT,MPI_SUM,0,MPI_COMM_WORLD );
    199         MPI_Bcast(&mpi_found,1,MPI_INT,0,MPI_COMM_WORLD);               
     198        MPI_Reduce (&found,&mpi_found,1,MPI_INT,MPI_SUM,0,IssmComm::GetComm() );
     199        MPI_Bcast(&mpi_found,1,MPI_INT,0,IssmComm::GetComm());               
    200200        found=mpi_found;
    201201        #endif
     
    229229
    230230        #ifdef _HAVE_MPI_
    231         MPI_Reduce (&found,&mpi_found,1,MPI_INT,MPI_SUM,0,MPI_COMM_WORLD );
    232         MPI_Bcast(&mpi_found,1,MPI_INT,0,MPI_COMM_WORLD);               
     231        MPI_Reduce (&found,&mpi_found,1,MPI_INT,MPI_SUM,0,IssmComm::GetComm() );
     232        MPI_Bcast(&mpi_found,1,MPI_INT,0,IssmComm::GetComm());               
    233233        found=mpi_found;
    234234        #endif
     
    290290
    291291        #ifdef _HAVE_MPI_
    292         MPI_Reduce (&num_unstable_constraints,&sum_num_unstable_constraints,1,MPI_INT,MPI_SUM,0,MPI_COMM_WORLD );
    293         MPI_Bcast(&sum_num_unstable_constraints,1,MPI_INT,0,MPI_COMM_WORLD);               
     292        MPI_Reduce (&num_unstable_constraints,&sum_num_unstable_constraints,1,MPI_INT,MPI_SUM,0,IssmComm::GetComm() );
     293        MPI_Bcast(&sum_num_unstable_constraints,1,MPI_INT,0,IssmComm::GetComm());               
    294294        num_unstable_constraints=sum_num_unstable_constraints;
    295295        #endif
     
    330330
    331331        #ifdef _HAVE_MPI_
    332         MPI_Reduce (&max_penetration,&mpi_max_penetration,1,MPI_DOUBLE,MPI_MAX,0,MPI_COMM_WORLD );
    333         MPI_Bcast(&mpi_max_penetration,1,MPI_DOUBLE,0,MPI_COMM_WORLD);               
     332        MPI_Reduce (&max_penetration,&mpi_max_penetration,1,MPI_DOUBLE,MPI_MAX,0,IssmComm::GetComm() );
     333        MPI_Bcast(&mpi_max_penetration,1,MPI_DOUBLE,0,IssmComm::GetComm());               
    334334        max_penetration=mpi_max_penetration;
    335335        #endif
     
    369369
    370370        #ifdef _HAVE_MPI_
    371         MPI_Reduce (&num_unstable_constraints,&sum_num_unstable_constraints,1,MPI_INT,MPI_SUM,0,MPI_COMM_WORLD );
    372         MPI_Bcast(&sum_num_unstable_constraints,1,MPI_INT,0,MPI_COMM_WORLD);               
     371        MPI_Reduce (&num_unstable_constraints,&sum_num_unstable_constraints,1,MPI_INT,MPI_SUM,0,IssmComm::GetComm() );
     372        MPI_Bcast(&sum_num_unstable_constraints,1,MPI_INT,0,IssmComm::GetComm());               
    373373        num_unstable_constraints=sum_num_unstable_constraints;
    374374        #endif
  • issm/trunk-jpl/src/c/modules/ConstraintsStatex/ThermalConstraintsState.cpp

    r12102 r13590  
    3737
    3838        #ifdef _HAVE_MPI_
    39         MPI_Reduce (&num_unstable_constraints,&sum_num_unstable_constraints,1,MPI_INT,MPI_SUM,0,MPI_COMM_WORLD );
    40         MPI_Bcast(&sum_num_unstable_constraints,1,MPI_INT,0,MPI_COMM_WORLD);               
     39        MPI_Reduce (&num_unstable_constraints,&sum_num_unstable_constraints,1,MPI_INT,MPI_SUM,0,IssmComm::GetComm() );
     40        MPI_Bcast(&sum_num_unstable_constraints,1,MPI_INT,0,IssmComm::GetComm());               
    4141        num_unstable_constraints=sum_num_unstable_constraints;
    4242        #endif
  • issm/trunk-jpl/src/c/modules/ConstraintsStatex/ThermalIsPresent.cpp

    r12102 r13590  
    2929       
    3030        #ifdef _HAVE_MPI_
    31         MPI_Reduce (&found,&mpi_found,1,MPI_INT,MPI_SUM,0,MPI_COMM_WORLD );
    32         MPI_Bcast(&mpi_found,1,MPI_INT,0,MPI_COMM_WORLD);               
     31        MPI_Reduce (&found,&mpi_found,1,MPI_INT,MPI_SUM,0,IssmComm::GetComm() );
     32        MPI_Bcast(&mpi_found,1,MPI_INT,0,IssmComm::GetComm());               
    3333        found=mpi_found;
    3434        #endif
  • issm/trunk-jpl/src/c/modules/DragCoefficientAbsGradientx/DragCoefficientAbsGradientx.cpp

    r13073 r13590  
    2828        /*Sum all J from all cpus of the cluster:*/
    2929        #ifdef _HAVE_MPI_
    30         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,MPI_COMM_WORLD );
    31         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,MPI_COMM_WORLD);
     30        MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
     31        MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
    3232        J=J_sum;
    3333        #endif
  • issm/trunk-jpl/src/c/modules/ElementResponsex/ElementResponsex.cpp

    r13073 r13590  
    3838        /*Broadcast whether we found the element: */
    3939        #ifdef _HAVE_MPI_
    40         MPI_Allreduce ( &found,&sumfound,1,MPI_INT,MPI_SUM,MPI_COMM_WORLD);
     40        MPI_Allreduce ( &found,&sumfound,1,MPI_INT,MPI_SUM,IssmComm::GetComm());
    4141        if(!sumfound)_error_("could not find material with id" << index << " to compute ElementResponse");
    4242        #endif
     
    4949        /*Broadcast and plug into response: */
    5050        #ifdef _HAVE_MPI_
    51         MPI_Allreduce ( &cpu_found,&cpu_found,1,MPI_INT,MPI_MAX,MPI_COMM_WORLD);
    52         MPI_Bcast(&response,1,MPI_DOUBLE,cpu_found,MPI_COMM_WORLD);
     51        MPI_Allreduce ( &cpu_found,&cpu_found,1,MPI_INT,MPI_MAX,IssmComm::GetComm());
     52        MPI_Bcast(&response,1,MPI_DOUBLE,cpu_found,IssmComm::GetComm());
    5353        #endif
    5454
  • issm/trunk-jpl/src/c/modules/GroundinglineMigrationx/GroundinglineMigrationx.cpp

    r13216 r13590  
    157157               
    158158                #ifdef _HAVE_MPI_
    159                 MPI_Allreduce(&local_nflipped,&nflipped,1,MPI_INT,MPI_SUM,MPI_COMM_WORLD);
     159                MPI_Allreduce(&local_nflipped,&nflipped,1,MPI_INT,MPI_SUM,IssmComm::GetComm());
    160160                if(VerboseConvergence()) _pprintLine_("   Additional number of vertices allowed to unground: " << nflipped);
    161161                #else
  • issm/trunk-jpl/src/c/modules/IceVolumex/IceVolumex.cpp

    r13073 r13590  
    2020        }
    2121        #ifdef _HAVE_MPI_
    22         MPI_Reduce(&local_ice_volume,&total_ice_volume,1,MPI_DOUBLE,MPI_SUM,0,MPI_COMM_WORLD );
    23         MPI_Bcast(&total_ice_volume,1,MPI_DOUBLE,0,MPI_COMM_WORLD);
     22        MPI_Reduce(&local_ice_volume,&total_ice_volume,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
     23        MPI_Bcast(&total_ice_volume,1,MPI_DOUBLE,0,IssmComm::GetComm());
    2424        #else
    2525        total_ice_volume=local_ice_volume;
  • issm/trunk-jpl/src/c/modules/InputConvergencex/InputConvergencex.cpp

    r12515 r13590  
    3131        /*In parallel, we need to gather the converged status: */
    3232        #ifdef _HAVE_MPI_
    33         MPI_Allreduce ( (void*)&num_notconverged,(void*)&total_notconverged,1,MPI_INT,MPI_SUM,MPI_COMM_WORLD);
     33        MPI_Allreduce ( (void*)&num_notconverged,(void*)&total_notconverged,1,MPI_INT,MPI_SUM,IssmComm::GetComm());
    3434        num_notconverged=total_notconverged;
    3535        #endif
  • issm/trunk-jpl/src/c/modules/InputUpdateFromDakotax/InputUpdateFromDakotax.cpp

    r13056 r13590  
    6868
    6969                        #ifdef _DEBUG_
    70                                 PetscSynchronizedPrintf(MPI_COMM_WORLD,"Parameter matrix:");
    71                                 PetscSynchronizedFlush(MPI_COMM_WORLD);
     70                                PetscSynchronizedPrintf(IssmComm::GetComm(),"Parameter matrix:");
     71                                PetscSynchronizedFlush(IssmComm::GetComm());
    7272                                for(l=0;l<ncols;l++){
    73                                         PetscSynchronizedPrintf(MPI_COMM_WORLD," time %i\n",l);
    74                                         PetscSynchronizedFlush(MPI_COMM_WORLD);
     73                                        PetscSynchronizedPrintf(IssmComm::GetComm()," time %i\n",l);
     74                                        PetscSynchronizedFlush(IssmComm::GetComm());
    7575
    7676                                        for(k=0;k<numberofvertices;k++){
    77                                                 PetscSynchronizedPrintf(MPI_COMM_WORLD," node %i value %g\n",k+1,*(parameter+k*ncols+l));
    78                                                 PetscSynchronizedFlush(MPI_COMM_WORLD);
     77                                                PetscSynchronizedPrintf(IssmComm::GetComm()," node %i value %g\n",k+1,*(parameter+k*ncols+l));
     78                                                PetscSynchronizedFlush(IssmComm::GetComm());
    7979                                        }
    8080                                }
    81                                 PetscSynchronizedPrintf(MPI_COMM_WORLD," descriptor: %s root %s enum: %i\n",descriptor,root,StringToEnumx(root));
    82                                 PetscSynchronizedFlush(MPI_COMM_WORLD);
     81                                PetscSynchronizedPrintf(IssmComm::GetComm()," descriptor: %s root %s enum: %i\n",descriptor,root,StringToEnumx(root));
     82                                PetscSynchronizedFlush(IssmComm::GetComm());
    8383                        #endif
    8484                         
  • issm/trunk-jpl/src/c/modules/Krigingx/pKrigingx.cpp

    r13376 r13590  
    6464                double *sumpredictions =xNew<double>(n_interp);
    6565                double *sumerror       =xNew<double>(n_interp);
    66                 MPI_Allreduce(predictions,sumpredictions,n_interp,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD);
    67                 MPI_Allreduce(error,sumerror,n_interp,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD);
     66                MPI_Allreduce(predictions,sumpredictions,n_interp,MPI_DOUBLE,MPI_SUM,IssmComm::GetComm());
     67                MPI_Allreduce(error,sumerror,n_interp,MPI_DOUBLE,MPI_SUM,IssmComm::GetComm());
    6868                xDelete<double>(error); error=sumerror;
    6969                xDelete<double>(predictions); predictions=sumpredictions;
     
    8181#ifdef _HAVE_MPI_
    8282                double *sumpredictions =xNew<double>(n_interp);
    83                 MPI_Allreduce(predictions,sumpredictions,n_interp,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD);
     83                MPI_Allreduce(predictions,sumpredictions,n_interp,MPI_DOUBLE,MPI_SUM,IssmComm::GetComm());
    8484                xDelete<double>(predictions); predictions=sumpredictions;
    8585#endif
     
    9898#ifdef _HAVE_MPI_
    9999                double *sumpredictions =xNew<double>(n_interp);
    100                 MPI_Allreduce(predictions,sumpredictions,n_interp,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD);
     100                MPI_Allreduce(predictions,sumpredictions,n_interp,MPI_DOUBLE,MPI_SUM,IssmComm::GetComm());
    101101                xDelete<double>(predictions); predictions=sumpredictions;
    102102#endif
  • issm/trunk-jpl/src/c/modules/MassFluxx/MassFluxx.cpp

    r13483 r13590  
    6060
    6161        #ifdef _HAVE_MPI_
    62         MPI_Allreduce ( (void*)&mass_flux,(void*)&all_mass_flux,1,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD);
     62        MPI_Allreduce ( (void*)&mass_flux,(void*)&all_mass_flux,1,MPI_DOUBLE,MPI_SUM,IssmComm::GetComm());
    6363        mass_flux=all_mass_flux;
    6464        #endif
  • issm/trunk-jpl/src/c/modules/MaxAbsVxx/MaxAbsVxx.cpp

    r13073 r13590  
    3333        /*Figure out maximum across the cluster: */
    3434        #ifdef _HAVE_MPI_
    35         MPI_Reduce (&maxabsvx,&node_maxabsvx,1,MPI_DOUBLE,MPI_MAX,0,MPI_COMM_WORLD );
    36         MPI_Bcast(&node_maxabsvx,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
     35        MPI_Reduce (&maxabsvx,&node_maxabsvx,1,MPI_DOUBLE,MPI_MAX,0,IssmComm::GetComm() );
     36        MPI_Bcast(&node_maxabsvx,1,MPI_DOUBLE,0,IssmComm::GetComm());   
    3737        maxabsvx=node_maxabsvx;
    3838        #endif
  • issm/trunk-jpl/src/c/modules/MaxAbsVyx/MaxAbsVyx.cpp

    r13073 r13590  
    3434        /*Figure out maximum across the cluster: */
    3535        #ifdef _HAVE_MPI_
    36         MPI_Reduce (&maxabsvy,&node_maxabsvy,1,MPI_DOUBLE,MPI_MAX,0,MPI_COMM_WORLD );
    37         MPI_Bcast(&node_maxabsvy,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
     36        MPI_Reduce (&maxabsvy,&node_maxabsvy,1,MPI_DOUBLE,MPI_MAX,0,IssmComm::GetComm() );
     37        MPI_Bcast(&node_maxabsvy,1,MPI_DOUBLE,0,IssmComm::GetComm());   
    3838        maxabsvy=node_maxabsvy;
    3939        #endif
  • issm/trunk-jpl/src/c/modules/MaxAbsVzx/MaxAbsVzx.cpp

    r13073 r13590  
    3333        /*Figure out minimum across the cluster: */
    3434        #ifdef _HAVE_MPI_
    35         MPI_Reduce (&maxabsvz,&node_maxabsvz,1,MPI_DOUBLE,MPI_MAX,0,MPI_COMM_WORLD );
    36         MPI_Bcast(&node_maxabsvz,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
     35        MPI_Reduce (&maxabsvz,&node_maxabsvz,1,MPI_DOUBLE,MPI_MAX,0,IssmComm::GetComm() );
     36        MPI_Bcast(&node_maxabsvz,1,MPI_DOUBLE,0,IssmComm::GetComm());   
    3737        maxabsvz=node_maxabsvz;
    3838        #endif
  • issm/trunk-jpl/src/c/modules/MaxVelx/MaxVelx.cpp

    r13073 r13590  
    3434        /*Figure out maximum across the cluster: */
    3535        #ifdef _HAVE_MPI_
    36         MPI_Reduce (&maxvel,&node_maxvel,1,MPI_DOUBLE,MPI_MAX,0,MPI_COMM_WORLD );
    37         MPI_Bcast(&node_maxvel,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
     36        MPI_Reduce (&maxvel,&node_maxvel,1,MPI_DOUBLE,MPI_MAX,0,IssmComm::GetComm() );
     37        MPI_Bcast(&node_maxvel,1,MPI_DOUBLE,0,IssmComm::GetComm());   
    3838        maxvel=node_maxvel;
    3939        #endif
  • issm/trunk-jpl/src/c/modules/MaxVxx/MaxVxx.cpp

    r13073 r13590  
    3333        /*Figure out minimum across the cluster: */
    3434        #ifdef _HAVE_MPI_
    35         MPI_Reduce (&maxvx,&node_maxvx,1,MPI_DOUBLE,MPI_MAX,0,MPI_COMM_WORLD );
    36         MPI_Bcast(&node_maxvx,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
     35        MPI_Reduce (&maxvx,&node_maxvx,1,MPI_DOUBLE,MPI_MAX,0,IssmComm::GetComm() );
     36        MPI_Bcast(&node_maxvx,1,MPI_DOUBLE,0,IssmComm::GetComm());   
    3737        maxvx=node_maxvx;
    3838        #endif
  • issm/trunk-jpl/src/c/modules/MaxVyx/MaxVyx.cpp

    r13073 r13590  
    3333        /*Figure out minimum across the cluster: */
    3434        #ifdef _HAVE_MPI_
    35         MPI_Reduce (&maxvy,&node_maxvy,1,MPI_DOUBLE,MPI_MAX,0,MPI_COMM_WORLD );
    36         MPI_Bcast(&node_maxvy,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
     35        MPI_Reduce (&maxvy,&node_maxvy,1,MPI_DOUBLE,MPI_MAX,0,IssmComm::GetComm() );
     36        MPI_Bcast(&node_maxvy,1,MPI_DOUBLE,0,IssmComm::GetComm());   
    3737        maxvy=node_maxvy;
    3838        #endif
  • issm/trunk-jpl/src/c/modules/MaxVzx/MaxVzx.cpp

    r13073 r13590  
    3434        /*Figure out minimum across the cluster: */
    3535        #ifdef _HAVE_MPI_
    36         MPI_Reduce (&maxvz,&node_maxvz,1,MPI_DOUBLE,MPI_MAX,0,MPI_COMM_WORLD );
    37         MPI_Bcast(&node_maxvz,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
     36        MPI_Reduce (&maxvz,&node_maxvz,1,MPI_DOUBLE,MPI_MAX,0,IssmComm::GetComm() );
     37        MPI_Bcast(&node_maxvz,1,MPI_DOUBLE,0,IssmComm::GetComm());   
    3838        maxvz=node_maxvz;
    3939        #endif
  • issm/trunk-jpl/src/c/modules/MinVelx/MinVelx.cpp

    r13073 r13590  
    3434        /*Figure out minimum across the cluster: */
    3535        #ifdef _HAVE_MPI_
    36         MPI_Reduce (&minvel,&node_minvel,1,MPI_DOUBLE,MPI_MIN,0,MPI_COMM_WORLD );
    37         MPI_Bcast(&node_minvel,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
     36        MPI_Reduce (&minvel,&node_minvel,1,MPI_DOUBLE,MPI_MIN,0,IssmComm::GetComm() );
     37        MPI_Bcast(&node_minvel,1,MPI_DOUBLE,0,IssmComm::GetComm());   
    3838        minvel=node_minvel;
    3939        #endif
  • issm/trunk-jpl/src/c/modules/MinVxx/MinVxx.cpp

    r13073 r13590  
    3333        /*Figure out minimum across the cluster: */
    3434        #ifdef _HAVE_MPI_
    35         MPI_Reduce (&minvx,&node_minvx,1,MPI_DOUBLE,MPI_MIN,0,MPI_COMM_WORLD );
    36         MPI_Bcast(&node_minvx,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
     35        MPI_Reduce (&minvx,&node_minvx,1,MPI_DOUBLE,MPI_MIN,0,IssmComm::GetComm() );
     36        MPI_Bcast(&node_minvx,1,MPI_DOUBLE,0,IssmComm::GetComm());   
    3737        minvx=node_minvx;
    3838        #endif
  • issm/trunk-jpl/src/c/modules/MinVyx/MinVyx.cpp

    r13073 r13590  
    3333        /*Figure out minimum across the cluster: */
    3434        #ifdef _HAVE_MPI_
    35         MPI_Reduce (&minvy,&node_minvy,1,MPI_DOUBLE,MPI_MIN,0,MPI_COMM_WORLD );
    36         MPI_Bcast(&node_minvy,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
     35        MPI_Reduce (&minvy,&node_minvy,1,MPI_DOUBLE,MPI_MIN,0,IssmComm::GetComm() );
     36        MPI_Bcast(&node_minvy,1,MPI_DOUBLE,0,IssmComm::GetComm());   
    3737        minvy=node_minvy;
    3838        #endif
  • issm/trunk-jpl/src/c/modules/MinVzx/MinVzx.cpp

    r13073 r13590  
    3333        /*Figure out minimum across the cluster: */
    3434        #ifdef _HAVE_MPI_
    35         MPI_Reduce (&minvz,&node_minvz,1,MPI_DOUBLE,MPI_MIN,0,MPI_COMM_WORLD );
    36         MPI_Bcast(&node_minvz,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
     35        MPI_Reduce (&minvz,&node_minvz,1,MPI_DOUBLE,MPI_MIN,0,IssmComm::GetComm() );
     36        MPI_Bcast(&node_minvz,1,MPI_DOUBLE,0,IssmComm::GetComm());   
    3737        minvz=node_minvz;
    3838        #endif
  • issm/trunk-jpl/src/c/modules/NodalValuex/NodalValuex.cpp

    r13056 r13590  
    3737        /*Broadcast whether we found the element: */
    3838        #ifdef _HAVE_MPI_
    39         MPI_Allreduce ( &found,&sumfound,1,MPI_INT,MPI_SUM,MPI_COMM_WORLD);
     39        MPI_Allreduce ( &found,&sumfound,1,MPI_INT,MPI_SUM,IssmComm::GetComm());
    4040        if(!sumfound)_error_("could not find element with vertex with id" << index << " to compute nodal value " << EnumToStringx(natureofdataenum));
    4141        #endif
     
    4343        /*Broadcast and plug into response: */
    4444        #ifdef _HAVE_MPI_
    45         MPI_Allreduce ( &cpu_found,&cpu_found,1,MPI_INT,MPI_MAX,MPI_COMM_WORLD);
    46         MPI_Bcast(&value,1,MPI_DOUBLE,cpu_found,MPI_COMM_WORLD);
     45        MPI_Allreduce ( &cpu_found,&cpu_found,1,MPI_INT,MPI_MAX,IssmComm::GetComm());
     46        MPI_Bcast(&value,1,MPI_DOUBLE,cpu_found,IssmComm::GetComm());
    4747        #else
    4848        value=cpu_found;
  • issm/trunk-jpl/src/c/modules/ParsePetscOptionsx/ParsePetscOptionsx.cpp

    r13056 r13590  
    9696        /*Ok, broadcast to other cpus: */
    9797        #ifdef _HAVE_MPI_
    98         MPI_Bcast(&numanalyses,1,MPI_INT,0,MPI_COMM_WORLD);
     98        MPI_Bcast(&numanalyses,1,MPI_INT,0,IssmComm::GetComm());
    9999        if(my_rank!=0){
    100100                analyses=xNew<IssmPDouble>(numanalyses);
    101101                strings=xNew<char*>(numanalyses);
    102102        }
    103         MPI_Bcast(analyses,numanalyses,MPI_DOUBLE,0,MPI_COMM_WORLD);
     103        MPI_Bcast(analyses,numanalyses,MPI_DOUBLE,0,IssmComm::GetComm());
    104104        #endif
    105105        for(i=0;i<numanalyses;i++){
     
    110110                if(my_rank==0)stringlength=(strlen(string)+1)*sizeof(char);
    111111                #ifdef _HAVE_MPI_
    112                 MPI_Bcast(&stringlength,1,MPI_INT,0,MPI_COMM_WORLD);
     112                MPI_Bcast(&stringlength,1,MPI_INT,0,IssmComm::GetComm());
    113113                if(my_rank!=0)string=xNew<char>(stringlength);
    114                 MPI_Bcast(string,stringlength,MPI_CHAR,0,MPI_COMM_WORLD);
     114                MPI_Bcast(string,stringlength,MPI_CHAR,0,IssmComm::GetComm());
    115115                if(my_rank!=0)strings[i]=string;
    116116                #endif
  • issm/trunk-jpl/src/c/modules/RheologyBbarAbsGradientx/RheologyBbarAbsGradientx.cpp

    r13073 r13590  
    2828        /*Sum all J from all cpus of the cluster:*/
    2929        #ifdef _HAVE_MPI_
    30         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,MPI_COMM_WORLD );
    31         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,MPI_COMM_WORLD);
     30        MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
     31        MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
    3232        J=J_sum;
    3333        #endif
  • issm/trunk-jpl/src/c/modules/Solverx/SolverxPetsc.cpp

    r13056 r13590  
    102102
    103103        /*Prepare solver*/
    104         KSPCreate(MPI_COMM_WORLD,&ksp);
     104        KSPCreate(IssmComm::GetComm(),&ksp);
    105105        KSPSetOperators(ksp,Kff,Kff,DIFFERENT_NONZERO_PATTERN);
    106106        KSPSetFromOptions(ksp);
  • issm/trunk-jpl/src/c/modules/SurfaceAbsVelMisfitx/SurfaceAbsVelMisfitx.cpp

    r13073 r13590  
    2828        /*Sum all J from all cpus of the cluster:*/
    2929        #ifdef _HAVE_MPI_
    30         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,MPI_COMM_WORLD );
    31         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,MPI_COMM_WORLD);
     30        MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
     31        MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
    3232        J=J_sum;
    3333        #endif
  • issm/trunk-jpl/src/c/modules/SurfaceAreax/SurfaceAreax.cpp

    r12470 r13590  
    2929        /*Sum all J from all cpus of the cluster:*/
    3030        #ifdef _HAVE_MPI_
    31         MPI_Reduce (&S,&S_sum,1,MPI_DOUBLE,MPI_SUM,0,MPI_COMM_WORLD );
    32         MPI_Bcast(&S_sum,1,MPI_DOUBLE,0,MPI_COMM_WORLD);
     31        MPI_Reduce (&S,&S_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
     32        MPI_Bcast(&S_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
    3333        S=S_sum;
    3434        #endif
  • issm/trunk-jpl/src/c/modules/SurfaceAverageVelMisfitx/SurfaceAverageVelMisfitx.cpp

    r13073 r13590  
    3232        /*Sum all J from all cpus of the cluster:*/
    3333        #ifdef _HAVE_MPI_
    34         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,MPI_COMM_WORLD );
    35         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,MPI_COMM_WORLD);
     34        MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
     35        MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
    3636        J=J_sum;
    3737        #endif
  • issm/trunk-jpl/src/c/modules/SurfaceLogVelMisfitx/SurfaceLogVelMisfitx.cpp

    r13073 r13590  
    2828        /*Sum all J from all cpus of the cluster:*/
    2929        #ifdef _HAVE_MPI_
    30         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,MPI_COMM_WORLD );
    31         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,MPI_COMM_WORLD);
     30        MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
     31        MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
    3232        J=J_sum;
    3333        #endif
  • issm/trunk-jpl/src/c/modules/SurfaceLogVxVyMisfitx/SurfaceLogVxVyMisfitx.cpp

    r13073 r13590  
    2828        /*Sum all J from all cpus of the cluster:*/
    2929        #ifdef _HAVE_MPI_
    30         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,MPI_COMM_WORLD );
    31         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,MPI_COMM_WORLD);
     30        MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
     31        MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
    3232        J=J_sum;
    3333        #endif
  • issm/trunk-jpl/src/c/modules/SurfaceRelVelMisfitx/SurfaceRelVelMisfitx.cpp

    r13073 r13590  
    2828        /*Sum all J from all cpus of the cluster:*/
    2929        #ifdef _HAVE_MPI_
    30         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,MPI_COMM_WORLD );
    31         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,MPI_COMM_WORLD);
     30        MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
     31        MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
    3232        J=J_sum;
    3333        #endif
  • issm/trunk-jpl/src/c/modules/ThicknessAbsGradientx/ThicknessAbsGradientx.cpp

    r13073 r13590  
    2828        /*Sum all J from all cpus of the cluster:*/
    2929        #ifdef _HAVE_MPI_
    30         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,MPI_COMM_WORLD );
    31         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,MPI_COMM_WORLD);
     30        MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
     31        MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
    3232        J=J_sum;
    3333        #endif
  • issm/trunk-jpl/src/c/modules/ThicknessAbsMisfitx/ThicknessAbsMisfitx.cpp

    r13073 r13590  
    2828        /*Sum all J from all cpus of the cluster:*/
    2929        #ifdef _HAVE_MPI_
    30         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,MPI_COMM_WORLD );
    31         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,MPI_COMM_WORLD);
     30        MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
     31        MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
    3232        J=J_sum;
    3333        #endif
  • issm/trunk-jpl/src/c/modules/ThicknessAcrossGradientx/ThicknessAcrossGradientx.cpp

    r13073 r13590  
    2828        /*Sum all J from all cpus of the cluster:*/
    2929        #ifdef _HAVE_MPI_
    30         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,MPI_COMM_WORLD );
    31         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,MPI_COMM_WORLD);
     30        MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
     31        MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
    3232        J=J_sum;
    3333        #endif
  • issm/trunk-jpl/src/c/modules/ThicknessAlongGradientx/ThicknessAlongGradientx.cpp

    r13073 r13590  
    2828        /*Sum all J from all cpus of the cluster:*/
    2929        #ifdef _HAVE_MPI_
    30         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,MPI_COMM_WORLD );
    31         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,MPI_COMM_WORLD);
     30        MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
     31        MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
    3232        J=J_sum;
    3333        #endif
  • issm/trunk-jpl/src/c/modules/TimeAdaptx/TimeAdaptx.cpp

    r12470 r13590  
    3333        /*Figure out minimum across the cluster: */
    3434        #ifdef _HAVE_MPI_
    35         MPI_Reduce (&min_dt,&node_min_dt,1,MPI_DOUBLE,MPI_MIN,0,MPI_COMM_WORLD );
    36         MPI_Bcast(&node_min_dt,1,MPI_DOUBLE,0,MPI_COMM_WORLD);
     35        MPI_Reduce (&min_dt,&node_min_dt,1,MPI_DOUBLE,MPI_MIN,0,IssmComm::GetComm() );
     36        MPI_Bcast(&node_min_dt,1,MPI_DOUBLE,0,IssmComm::GetComm());
    3737        min_dt=node_min_dt;
    3838        #endif
  • issm/trunk-jpl/src/c/modules/TotalSmbx/TotalSmbx.cpp

    r13073 r13590  
    2020        }
    2121        #ifdef _HAVE_MPI_
    22         MPI_Reduce(&local_smb,&total_smb,1,MPI_DOUBLE,MPI_SUM,0,MPI_COMM_WORLD );
    23         MPI_Bcast(&total_smb,1,MPI_DOUBLE,0,MPI_COMM_WORLD);
     22        MPI_Reduce(&local_smb,&total_smb,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
     23        MPI_Bcast(&total_smb,1,MPI_DOUBLE,0,IssmComm::GetComm());
    2424        #else
    2525        total_smb=local_smb;
Note: See TracChangeset for help on using the changeset viewer.