Changeset 15838


Ignore:
Timestamp:
08/19/13 15:15:59 (12 years ago)
Author:
Eric.Larour
Message:

CHG: initial conversion from mpi to issmmpi layer. Starting validation of the new code changes

Location:
issm/trunk-jpl/src/c
Files:
1 deleted
52 edited

Legend:

Unmodified
Added
Removed
  • issm/trunk-jpl/src/c/Makefile.am

    r15836 r15838  
    173173                                        ./shared/io/Print/Print.h\
    174174                                        ./shared/io/Comm/Comm.h\
    175                                         ./shared/io/Comm/CommDef.h\
    176175                                        ./shared/io/Comm/IssmComm.h\
    177176                                        ./shared/io/Comm/IssmComm.cpp\
     
    763762#Mpi sources  {{{
    764763mpi_sources= ./toolkits/mpi/issmmpi.h\
    765                                 ./toolkits/mpi/issmpi.cpp\
     764                                ./toolkits/mpi/issmmpi.cpp\
    766765                                ./toolkits/mpi/commops/commops.h\
    767766                                ./toolkits/mpi/commops/DetermineLocalSize.cpp\
  • issm/trunk-jpl/src/c/analyses/DakotaSpawnCore.cpp

    r15104 r15838  
    4949
    5050        /*If counter==-1 on cpu0, it means that the dakota runs are done. In which case, bail out and return 0: */
    51         #ifdef _HAVE_MPI_
    52         MPI_Bcast(&counter,1,MPI_INT,0,IssmComm::GetComm());
    53         #endif
     51        ISSM_MPI_Bcast(&counter,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    5452        if(counter==-1)return 0;
    5553
     
    116114
    117115        /*numvariables: */
    118         MPI_Bcast(&numvariables,1,MPI_INT,0,IssmComm::GetComm());
     116        ISSM_MPI_Bcast(&numvariables,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    119117
    120118        /*variables:*/
    121119        if(my_rank!=0)variables=xNew<double>(numvariables);
    122         MPI_Bcast(variables,numvariables,MPI_DOUBLE,0,IssmComm::GetComm());
     120        ISSM_MPI_Bcast(variables,numvariables,MPI_DOUBLE,0,IssmComm::GetComm());
    123121
    124122        /*variables_descriptors: */
     
    131129                        string_length=(strlen(string)+1)*sizeof(char);
    132130                }
    133                 MPI_Bcast(&string_length,1,MPI_INT,0,IssmComm::GetComm());
     131                ISSM_MPI_Bcast(&string_length,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    134132                if(my_rank!=0)string=xNew<char>(string_length);
    135                 MPI_Bcast(string,string_length,MPI_CHAR,0,IssmComm::GetComm());
     133                ISSM_MPI_Bcast(string,string_length,ISSM_MPI_CHAR,0,IssmComm::GetComm());
    136134                if(my_rank!=0)variables_descriptors[i]=string;
    137135        }
    138136
    139137        /*numresponses: */
    140         MPI_Bcast(&numresponses,1,MPI_INT,0,IssmComm::GetComm());
     138        ISSM_MPI_Bcast(&numresponses,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    141139
    142140        /*Assign output pointers:*/
  • issm/trunk-jpl/src/c/analyses/EnvironmentFinalize.cpp

    r14917 r15838  
    1212void EnvironmentFinalize(void){
    1313
    14         #ifdef _HAVE_MPI_
     14        int my_rank;
    1515
    1616        /*Make sure we are all here*/
    17         MPI_Barrier(MPI_COMM_WORLD);
     17        ISSM_MPI_Barrier(ISSM_MPI_COMM_WORLD);
    1818
    1919        /*Print closing statement*/
    20         int my_rank;
    21         MPI_Comm_rank(MPI_COMM_WORLD,&my_rank);
     20        ISSM_MPI_Comm_rank(ISSM_MPI_COMM_WORLD,&my_rank);
    2221        if(!my_rank) printf("closing MPI\n");
    2322
    2423        /*Finalize: */
    25         MPI_Finalize();
    26 
    27         #endif
     24        ISSM_MPI_Finalize();
    2825}
  • issm/trunk-jpl/src/c/analyses/EnvironmentInit.cpp

    r14917 r15838  
    1717        /*Initialize MPI environment: */
    1818        #if defined(_HAVE_MPI_)
    19         MPI_Init(&argc,&argv);
    20         comm = MPI_COMM_WORLD;
     19        ISSM_MPI_Init(&argc,&argv);
     20        comm = ISSM_MPI_COMM_WORLD;
    2121        #else
    2222        comm = 1; //bogus number for comm, which does not exist anyway.
     
    2525        /*Print Banner*/
    2626        int my_rank = 0;
    27         #ifdef _HAVE_MPI_
    28         MPI_Comm_rank(comm,&my_rank);
    29         #endif
     27        ISSM_MPI_Comm_rank(comm,&my_rank);
    3028        if(!my_rank) printf("\n");
    3129        if(!my_rank) printf("Ice Sheet System Model (%s) version  %s\n",PACKAGE_NAME,PACKAGE_VERSION);
  • issm/trunk-jpl/src/c/classes/Constraints/Constraints.cpp

    r15012 r15838  
    2929
    3030        /*figure out total number of constraints combining all the cpus (no clones here)*/
    31         #ifdef _HAVE_MPI_
    32                 MPI_Reduce(&localconstraints,&numberofconstraints,1,MPI_INT,MPI_SUM,0,IssmComm::GetComm() );
    33                 MPI_Bcast(&numberofconstraints,1,MPI_INT,0,IssmComm::GetComm());
    34         #else
    35                 numberofconstraints=localconstraints;
    36         #endif
     31        ISSM_MPI_Reduce(&localconstraints,&numberofconstraints,1,ISSM_MPI_INT,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     32        ISSM_MPI_Bcast(&numberofconstraints,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    3733
    3834        return numberofconstraints;
  • issm/trunk-jpl/src/c/classes/Elements/Elements.cpp

    r15375 r15838  
    114114
    115115        /*Synchronize across cluster, so as to not end up with different sizes for each patch on each cpu: */
    116         #ifdef _HAVE_MPI_
    117         MPI_Reduce (&numvertices,&max_numvertices,1,MPI_INT,MPI_MAX,0,IssmComm::GetComm() );
    118         MPI_Bcast(&max_numvertices,1,MPI_INT,0,IssmComm::GetComm());
     116        ISSM_MPI_Reduce (&numvertices,&max_numvertices,1,ISSM_MPI_INT,ISSM_MPI_MAX,0,IssmComm::GetComm() );
     117        ISSM_MPI_Bcast(&max_numvertices,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    119118        numvertices=max_numvertices;
    120119
    121         MPI_Reduce (&numnodes,&max_numnodes,1,MPI_INT,MPI_MAX,0,IssmComm::GetComm() );
    122         MPI_Bcast(&max_numnodes,1,MPI_INT,0,IssmComm::GetComm());
     120        ISSM_MPI_Reduce (&numnodes,&max_numnodes,1,ISSM_MPI_INT,ISSM_MPI_MAX,0,IssmComm::GetComm() );
     121        ISSM_MPI_Bcast(&max_numnodes,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    123122        numnodes=max_numnodes;
    124         #endif
    125123
    126124        /*Ok, initialize Patch object: */
     
    186184
    187185                /*Get rank of first cpu that has results*/
    188                 #ifdef _HAVE_MPI_
    189186                if(this->Size()) rank=my_rank;
    190187                else rank=num_procs;
    191                 MPI_Allreduce (&rank,&minrank,1,MPI_INT,MPI_MIN,IssmComm::GetComm());
    192                 #else
    193                 minrank=my_rank;
    194                 #endif
     188                ISSM_MPI_Allreduce (&rank,&minrank,1,ISSM_MPI_INT,ISSM_MPI_MIN,IssmComm::GetComm());
    195189
    196190                /*see what the first element of this partition has in stock (this is common to all partitions)*/
     
    200194                        element->ListResultsInfo(&resultsenums,&resultssizes,&resultstimes,&resultssteps,&numberofresults);
    201195                }
    202                 #ifdef _HAVE_MPI_
    203                 MPI_Bcast(&numberofresults,1,MPI_DOUBLE,minrank,IssmComm::GetComm());
    204                 #endif
     196                ISSM_MPI_Bcast(&numberofresults,1,ISSM_MPI_DOUBLE,minrank,IssmComm::GetComm());
    205197
    206198                /*Get out if there is no results. Otherwise broadcast info*/
    207199                if(!numberofresults) return;
    208                 #ifdef _HAVE_MPI_
    209200                if(my_rank!=minrank){
    210201                        resultsenums=xNew<int>(numberofresults);
     
    213204                        resultssteps=xNew<int>(numberofresults);
    214205                }
    215                 MPI_Bcast(resultsenums,numberofresults,MPI_INT,minrank,IssmComm::GetComm());
    216                 MPI_Bcast(resultssizes,numberofresults,MPI_INT,minrank,IssmComm::GetComm());
    217                 MPI_Bcast(resultstimes,numberofresults,MPI_DOUBLE,minrank,IssmComm::GetComm());
    218                 MPI_Bcast(resultssteps,numberofresults,MPI_INT,minrank,IssmComm::GetComm());
    219                 #endif
     206                ISSM_MPI_Bcast(resultsenums,numberofresults,ISSM_MPI_INT,minrank,IssmComm::GetComm());
     207                ISSM_MPI_Bcast(resultssizes,numberofresults,ISSM_MPI_INT,minrank,IssmComm::GetComm());
     208                ISSM_MPI_Bcast(resultstimes,numberofresults,ISSM_MPI_DOUBLE,minrank,IssmComm::GetComm());
     209                ISSM_MPI_Bcast(resultssteps,numberofresults,ISSM_MPI_INT,minrank,IssmComm::GetComm());
    220210
    221211                /*Loop over all results and get nodal vector*/
     
    299289
    300290        /*Grab max of all cpus: */
    301         #ifdef _HAVE_MPI_
    302         MPI_Allreduce((void*)&max,(void*)&allmax,1,MPI_INT,MPI_MAX,IssmComm::GetComm());
     291        ISSM_MPI_Allreduce((void*)&max,(void*)&allmax,1,ISSM_MPI_INT,ISSM_MPI_MAX,IssmComm::GetComm());
    303292        max=allmax;
    304         #endif
    305293
    306294        return max;
     
    314302
    315303        local_nelem=this->Size();
    316         #ifdef _HAVE_MPI_
    317         MPI_Allreduce ( (void*)&local_nelem,(void*)&numberofelements,1,MPI_INT,MPI_SUM,IssmComm::GetComm());
    318         #else
    319         numberofelements=local_nelem;
    320         #endif
     304        ISSM_MPI_Allreduce ( (void*)&local_nelem,(void*)&numberofelements,1,ISSM_MPI_INT,ISSM_MPI_SUM,IssmComm::GetComm());
    321305
    322306        return numberofelements;
  • issm/trunk-jpl/src/c/classes/FemModel.cpp

    r15726 r15838  
    569569
    570570        /*sum over all cpus*/
    571 #ifdef _HAVE_MPI_
    572         MPI_Allreduce((void*)connectivity_clone,(void*)all_connectivity_clone,numnodes,MPI_INT,MPI_SUM,IssmComm::GetComm());
    573 #endif
     571        ISSM_MPI_Allreduce((void*)connectivity_clone,(void*)all_connectivity_clone,numnodes,ISSM_MPI_INT,ISSM_MPI_SUM,IssmComm::GetComm());
    574572        xDelete<int>(connectivity_clone);
    575573
     
    965963
    966964        /*Figure out minimum across the cluster: */
    967 #ifdef _HAVE_MPI_
    968         MPI_Reduce (&min_dt,&node_min_dt,1,MPI_DOUBLE,MPI_MIN,0,IssmComm::GetComm() );
    969         MPI_Bcast(&node_min_dt,1,MPI_DOUBLE,0,IssmComm::GetComm());
     965        ISSM_MPI_Reduce (&min_dt,&node_min_dt,1,ISSM_MPI_DOUBLE,ISSM_MPI_MIN,0,IssmComm::GetComm() );
     966        ISSM_MPI_Bcast(&node_min_dt,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    970967        min_dt=node_min_dt;
    971 #endif
    972968
    973969        /*Assign output pointers:*/
     
    10181014        }
    10191015
    1020 #ifdef _HAVE_MPI_
    1021         MPI_Allreduce ( (void*)&mass_flux,(void*)&all_mass_flux,1,MPI_DOUBLE,MPI_SUM,IssmComm::GetComm());
     1016        ISSM_MPI_Allreduce ( (void*)&mass_flux,(void*)&all_mass_flux,1,ISSM_MPI_DOUBLE,ISSM_MPI_SUM,IssmComm::GetComm());
    10221017        mass_flux=all_mass_flux;
    1023 #endif
    10241018
    10251019        /*Free ressources:*/
     
    10521046
    10531047        /*Figure out maximum across the cluster: */
    1054 #ifdef _HAVE_MPI_
    1055         MPI_Reduce(&maxabsvx,&node_maxabsvx,1,MPI_DOUBLE,MPI_MAX,0,IssmComm::GetComm() );
    1056         MPI_Bcast(&node_maxabsvx,1,MPI_DOUBLE,0,IssmComm::GetComm());   
     1048        ISSM_MPI_Reduce(&maxabsvx,&node_maxabsvx,1,ISSM_MPI_DOUBLE,ISSM_MPI_MAX,0,IssmComm::GetComm() );
     1049        ISSM_MPI_Bcast(&node_maxabsvx,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());   
    10571050        maxabsvx=node_maxabsvx;
    1058 #endif
    10591051
    10601052        /*Assign output pointers:*/
     
    10781070
    10791071        /*Figure out maximum across the cluster: */
    1080 #ifdef _HAVE_MPI_
    1081         MPI_Reduce(&maxabsvy,&node_maxabsvy,1,MPI_DOUBLE,MPI_MAX,0,IssmComm::GetComm() );
    1082         MPI_Bcast(&node_maxabsvy,1,MPI_DOUBLE,0,IssmComm::GetComm());   
     1072        ISSM_MPI_Reduce(&maxabsvy,&node_maxabsvy,1,ISSM_MPI_DOUBLE,ISSM_MPI_MAX,0,IssmComm::GetComm() );
     1073        ISSM_MPI_Bcast(&node_maxabsvy,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());   
    10831074        maxabsvy=node_maxabsvy;
    1084 #endif
    10851075
    10861076        /*Assign output pointers:*/
     
    11041094
    11051095        /*Figure out maximum across the cluster: */
    1106 #ifdef _HAVE_MPI_
    1107         MPI_Reduce(&maxabsvz,&node_maxabsvz,1,MPI_DOUBLE,MPI_MAX,0,IssmComm::GetComm() );
    1108         MPI_Bcast(&node_maxabsvz,1,MPI_DOUBLE,0,IssmComm::GetComm());   
     1096        ISSM_MPI_Reduce(&maxabsvz,&node_maxabsvz,1,ISSM_MPI_DOUBLE,ISSM_MPI_MAX,0,IssmComm::GetComm() );
     1097        ISSM_MPI_Bcast(&node_maxabsvz,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());   
    11091098        maxabsvz=node_maxabsvz;
    1110 #endif
    11111099
    11121100        /*Assign output pointers:*/
     
    11301118
    11311119        /*Figure out maximum across the cluster: */
    1132 #ifdef _HAVE_MPI_
    1133         MPI_Reduce(&maxvel,&node_maxvel,1,MPI_DOUBLE,MPI_MAX,0,IssmComm::GetComm() );
    1134         MPI_Bcast(&node_maxvel,1,MPI_DOUBLE,0,IssmComm::GetComm());   
     1120        ISSM_MPI_Reduce(&maxvel,&node_maxvel,1,ISSM_MPI_DOUBLE,ISSM_MPI_MAX,0,IssmComm::GetComm() );
     1121        ISSM_MPI_Bcast(&node_maxvel,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());   
    11351122        maxvel=node_maxvel;
    1136 #endif
    11371123
    11381124        /*Assign output pointers:*/
     
    11561142
    11571143        /*Figure out maximum across the cluster: */
    1158 #ifdef _HAVE_MPI_
    1159         MPI_Reduce(&maxvx,&node_maxvx,1,MPI_DOUBLE,MPI_MAX,0,IssmComm::GetComm() );
    1160         MPI_Bcast(&node_maxvx,1,MPI_DOUBLE,0,IssmComm::GetComm());   
     1144        ISSM_MPI_Reduce(&maxvx,&node_maxvx,1,ISSM_MPI_DOUBLE,ISSM_MPI_MAX,0,IssmComm::GetComm() );
     1145        ISSM_MPI_Bcast(&node_maxvx,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());   
    11611146        maxvx=node_maxvx;
    1162 #endif
    11631147
    11641148        /*Assign output pointers:*/
     
    11821166
    11831167        /*Figure out maximum across the cluster: */
    1184 #ifdef _HAVE_MPI_
    1185         MPI_Reduce(&maxvy,&node_maxvy,1,MPI_DOUBLE,MPI_MAX,0,IssmComm::GetComm() );
    1186         MPI_Bcast(&node_maxvy,1,MPI_DOUBLE,0,IssmComm::GetComm());   
     1168        ISSM_MPI_Reduce(&maxvy,&node_maxvy,1,ISSM_MPI_DOUBLE,ISSM_MPI_MAX,0,IssmComm::GetComm() );
     1169        ISSM_MPI_Bcast(&node_maxvy,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());   
    11871170        maxvy=node_maxvy;
    1188 #endif
    11891171
    11901172        /*Assign output pointers:*/
     
    12081190
    12091191        /*Figure out maximum across the cluster: */
    1210 #ifdef _HAVE_MPI_
    1211         MPI_Reduce(&maxvz,&node_maxvz,1,MPI_DOUBLE,MPI_MAX,0,IssmComm::GetComm() );
    1212         MPI_Bcast(&node_maxvz,1,MPI_DOUBLE,0,IssmComm::GetComm());   
     1192        ISSM_MPI_Reduce(&maxvz,&node_maxvz,1,ISSM_MPI_DOUBLE,ISSM_MPI_MAX,0,IssmComm::GetComm() );
     1193        ISSM_MPI_Bcast(&node_maxvz,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());   
    12131194        maxvz=node_maxvz;
    1214 #endif
    12151195
    12161196        /*Assign output pointers:*/
     
    12341214
    12351215        /*Figure out minimum across the cluster: */
    1236 #ifdef _HAVE_MPI_
    1237         MPI_Reduce(&minvel,&node_minvel,1,MPI_DOUBLE,MPI_MAX,0,IssmComm::GetComm() );
    1238         MPI_Bcast(&node_minvel,1,MPI_DOUBLE,0,IssmComm::GetComm());   
     1216        ISSM_MPI_Reduce(&minvel,&node_minvel,1,ISSM_MPI_DOUBLE,ISSM_MPI_MAX,0,IssmComm::GetComm() );
     1217        ISSM_MPI_Bcast(&node_minvel,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());   
    12391218        minvel=node_minvel;
    1240 #endif
    12411219
    12421220        /*Assign output pointers:*/
     
    12601238
    12611239        /*Figure out minimum across the cluster: */
    1262 #ifdef _HAVE_MPI_
    1263         MPI_Reduce(&minvx,&node_minvx,1,MPI_DOUBLE,MPI_MAX,0,IssmComm::GetComm() );
    1264         MPI_Bcast(&node_minvx,1,MPI_DOUBLE,0,IssmComm::GetComm());   
     1240        ISSM_MPI_Reduce(&minvx,&node_minvx,1,ISSM_MPI_DOUBLE,ISSM_MPI_MAX,0,IssmComm::GetComm() );
     1241        ISSM_MPI_Bcast(&node_minvx,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());   
    12651242        minvx=node_minvx;
    1266 #endif
    12671243
    12681244        /*Assign output pointers:*/
     
    12861262
    12871263        /*Figure out minimum across the cluster: */
    1288 #ifdef _HAVE_MPI_
    1289         MPI_Reduce(&minvy,&node_minvy,1,MPI_DOUBLE,MPI_MAX,0,IssmComm::GetComm() );
    1290         MPI_Bcast(&node_minvy,1,MPI_DOUBLE,0,IssmComm::GetComm());   
     1264        ISSM_MPI_Reduce(&minvy,&node_minvy,1,ISSM_MPI_DOUBLE,ISSM_MPI_MAX,0,IssmComm::GetComm() );
     1265        ISSM_MPI_Bcast(&node_minvy,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());   
    12911266        minvy=node_minvy;
    1292 #endif
    12931267
    12941268        /*Assign output pointers:*/
     
    13121286
    13131287        /*Figure out minimum across the cluster: */
    1314 #ifdef _HAVE_MPI_
    1315         MPI_Reduce(&minvz,&node_minvz,1,MPI_DOUBLE,MPI_MAX,0,IssmComm::GetComm() );
    1316         MPI_Bcast(&node_minvz,1,MPI_DOUBLE,0,IssmComm::GetComm());   
     1288        ISSM_MPI_Reduce(&minvz,&node_minvz,1,ISSM_MPI_DOUBLE,ISSM_MPI_MAX,0,IssmComm::GetComm() );
     1289        ISSM_MPI_Bcast(&node_minvz,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());   
    13171290        minvz=node_minvz;
    1318 #endif
    13191291
    13201292        /*Assign output pointers:*/
     
    13311303                local_smb+=element->TotalSmb();
    13321304        }
    1333 #ifdef _HAVE_MPI_
    1334         MPI_Reduce(&local_smb,&total_smb,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
    1335         MPI_Bcast(&total_smb,1,MPI_DOUBLE,0,IssmComm::GetComm());
    1336 #else
    1337         total_smb=local_smb;
    1338 #endif
     1305        ISSM_MPI_Reduce(&local_smb,&total_smb,1,ISSM_MPI_DOUBLE,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     1306        ISSM_MPI_Bcast(&total_smb,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    13391307
    13401308        /*Assign output pointers: */
     
    13511319                local_ice_volume+=element->IceVolume();
    13521320        }
    1353         #ifdef _HAVE_MPI_
    1354         MPI_Reduce(&local_ice_volume,&total_ice_volume,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
    1355         MPI_Bcast(&total_ice_volume,1,MPI_DOUBLE,0,IssmComm::GetComm());
    1356         #else
    1357         total_ice_volume=local_ice_volume;
    1358         #endif
     1321        ISSM_MPI_Reduce(&local_ice_volume,&total_ice_volume,1,ISSM_MPI_DOUBLE,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     1322        ISSM_MPI_Bcast(&total_ice_volume,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    13591323
    13601324        /*Assign output pointers: */
     
    13861350
    13871351        /*Broadcast whether we found the element: */
    1388 #ifdef _HAVE_MPI_
    1389         MPI_Allreduce ( &found,&sumfound,1,MPI_INT,MPI_SUM,IssmComm::GetComm());
     1352        ISSM_MPI_Allreduce ( &found,&sumfound,1,ISSM_MPI_INT,ISSM_MPI_SUM,IssmComm::GetComm());
    13901353        if(!sumfound)_error_("could not find material with id" << index << " to compute ElementResponse");
    1391 #endif
    13921354
    13931355        /*Ok, we found the element, compute responseocity: */
     
    13971359
    13981360        /*Broadcast and plug into response: */
    1399 #ifdef _HAVE_MPI_
    1400         MPI_Allreduce ( &cpu_found,&cpu_found,1,MPI_INT,MPI_MAX,IssmComm::GetComm());
    1401         MPI_Bcast(&response,1,MPI_DOUBLE,cpu_found,IssmComm::GetComm());
    1402 #endif
     1361        ISSM_MPI_Allreduce ( &cpu_found,&cpu_found,1,ISSM_MPI_INT,ISSM_MPI_MAX,IssmComm::GetComm());
     1362        ISSM_MPI_Bcast(&response,1,ISSM_MPI_DOUBLE,cpu_found,IssmComm::GetComm());
    14031363
    14041364        /*Assign output pointers: */
     
    14171377                J+=element->BalancethicknessMisfit(weight_index);
    14181378        }
    1419         #ifdef _HAVE_MPI_
    1420         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
    1421         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
     1379        ISSM_MPI_Reduce (&J,&J_sum,1,ISSM_MPI_DOUBLE,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     1380        ISSM_MPI_Bcast(&J_sum,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    14221381        J=J_sum;
    1423         #endif
    14241382
    14251383        /*Assign output pointers: */
     
    14441402
    14451403        /*Sum all J from all cpus of the cluster:*/
    1446         #ifdef _HAVE_MPI_
    1447         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
    1448         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
     1404        ISSM_MPI_Reduce (&J,&J_sum,1,ISSM_MPI_DOUBLE,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     1405        ISSM_MPI_Bcast(&J_sum,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    14491406        J=J_sum;
    1450         #endif
    14511407
    14521408        /*Assign output pointers: */
     
    16531609        }
    16541610        xDelete<IssmDouble>(serial_active);
    1655         #ifdef _HAVE_MPI_
    16561611        int sum_counter;
    1657         MPI_Reduce(&counter,&sum_counter,1,MPI_INT,MPI_SUM,0,IssmComm::GetComm() );
    1658         MPI_Bcast(&sum_counter,1,MPI_INT,0,IssmComm::GetComm());               
     1612        ISSM_MPI_Reduce(&counter,&sum_counter,1,ISSM_MPI_INT,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     1613        ISSM_MPI_Bcast(&sum_counter,1,ISSM_MPI_INT,0,IssmComm::GetComm());               
    16591614        counter=sum_counter;
    1660         #endif
    16611615        if(VerboseSolution()) _printf0_("   Number of active nodes in EPL layer: "<< counter <<"\n");
    16621616
  • issm/trunk-jpl/src/c/classes/IndependentObject.cpp

    r15643 r15838  
    111111                scalar<<=pscalar;
    112112
    113                 #ifdef _HAVE_MPI_
    114                 MPI_Bcast(&scalar,1,MPI_DOUBLE,0,IssmComm::GetComm());
    115                 #endif
     113                ISSM_MPI_Bcast(&scalar,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    116114
    117115                /*Ok, we are almost done. scalar is now an independent variable. We don't want this variable to be fetched again in the
     
    145143                        if(fread(&M,sizeof(int),1,fid)!=1) _error_("could not read number of rows for matrix ");
    146144                }
    147                 #ifdef _HAVE_MPI_
    148                 MPI_Bcast(&M,1,MPI_INT,0,IssmComm::GetComm());
    149                 #endif
     145                ISSM_MPI_Bcast(&M,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    150146
    151147                if(my_rank==0){ 
    152148                        if(fread(&N,sizeof(int),1,fid)!=1) _error_("could not read number of columns for matrix ");
    153149                }
    154                 #ifdef _HAVE_MPI_
    155                 MPI_Bcast(&N,1,MPI_INT,0,IssmComm::GetComm());
    156                 #endif
     150                ISSM_MPI_Bcast(&N,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    157151
    158152                /*Now allocate matrix: */
     
    168162                                for (int i=0;i<M*N;++i) matrix[i]<<=buffer[i];  /*we use the <<= ADOLC overloaded operator to declare the independency*/
    169163                        }
    170                         #ifdef _HAVE_MPI_
    171                         MPI_Bcast(matrix,M*N,MPI_DOUBLE,0,IssmComm::GetComm());
    172                         #endif
     164                        ISSM_MPI_Bcast(matrix,M*N,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    173165
    174166                        xDelete<IssmPDouble>(buffer);
  • issm/trunk-jpl/src/c/classes/IoModel.cpp

    r15749 r15838  
    343343                                /*Ok, we have reached the end of the file. break: */
    344344                                record_code=0; //0 means bailout
    345                                 #ifdef _HAVE_MPI_
    346                                 MPI_Bcast(&record_code,1,MPI_INT,0,IssmComm::GetComm());  /*tell others cpus we are bailing: */
    347                                 #endif
     345                                ISSM_MPI_Bcast(&record_code,1,ISSM_MPI_INT,0,IssmComm::GetComm());  /*tell others cpus we are bailing: */
    348346                                break;
    349347                        }
     
    354352                                if(fread(&record_code  ,sizeof(int),1,this->fid)!=1) _error_("Cound not read record_code");
    355353
    356                                 #ifdef _HAVE_MPI_
    357354                                /*Tell other cpus what we are doing: */
    358                                 MPI_Bcast(&record_code,1,MPI_INT,0,IssmComm::GetComm());  /*tell other cpus what we are going to do: */
     355                                ISSM_MPI_Bcast(&record_code,1,ISSM_MPI_INT,0,IssmComm::GetComm());  /*tell other cpus what we are going to do: */
    359356
    360357                                /*Tell other cpus the name of the data, then branch according to the data type: */
    361                                 MPI_Bcast(&record_enum,1,MPI_INT,0,IssmComm::GetComm()); 
    362                                 MPI_Bcast(&record_length,1,MPI_INT,0,IssmComm::GetComm()); 
    363                                 #endif
     358                                ISSM_MPI_Bcast(&record_enum,1,ISSM_MPI_INT,0,IssmComm::GetComm()); 
     359                                ISSM_MPI_Bcast(&record_length,1,ISSM_MPI_INT,0,IssmComm::GetComm()); 
    364360
    365361                                switch(record_code){
     
    367363                                                /*Read the boolean and broadcast it to other cpus:*/
    368364                                                if(fread(&booleanint,sizeof(int),1,this->fid)!=1) _error_("could not read boolean ");
    369                                                 #ifdef _HAVE_MPI_
    370                                                 MPI_Bcast(&booleanint,1,MPI_INT,0,IssmComm::GetComm());
    371                                                 #endif
     365                                                ISSM_MPI_Bcast(&booleanint,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    372366
    373367                                                /*create BoolParam: */
     
    378372                                                /*Read the integer and broadcast it to other cpus:*/
    379373                                                if(fread(&integer,sizeof(int),1,this->fid)!=1) _error_("could not read integer ");
    380                                                 #ifdef _HAVE_MPI_
    381                                                 MPI_Bcast(&integer,1,MPI_INT,0,IssmComm::GetComm());
    382                                                 #endif
     374                                                ISSM_MPI_Bcast(&integer,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    383375
    384376                                                /*create IntParam: */
     
    395387                                                else{
    396388                                                        if(fread(&pscalar,sizeof(IssmPDouble),1,this->fid)!=1) _error_("could not read scalar ");
    397                                                         #ifdef _HAVE_MPI_
    398                                                         MPI_Bcast(&pscalar,1,MPI_DOUBLE,0,IssmComm::GetComm());
    399                                                         #endif
     389                                                        ISSM_MPI_Bcast(&pscalar,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    400390                                                        scalar=reCast<IssmDouble>(pscalar);
    401391                                                }
     
    408398                                                /*We have to read a string from disk. First read the dimensions of the string, then the string: */
    409399                                                if(fread(&string_size,sizeof(int),1,this->fid)!=1) _error_("could not read length of string ");
    410                                                 #ifdef _HAVE_MPI_
    411                                                 MPI_Bcast(&string_size,1,MPI_INT,0,IssmComm::GetComm());
    412                                                 #endif
     400                                                ISSM_MPI_Bcast(&string_size,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    413401
    414402                                                if(string_size){
     
    418406                                                        /*Read string, then broadcast: */
    419407                                                        if(fread(string,string_size*sizeof(char),1,this->fid)!=1)_error_(" could not read string ");
    420                                                         #ifdef _HAVE_MPI_
    421                                                         MPI_Bcast(string,string_size,MPI_CHAR,0,IssmComm::GetComm());
    422                                                         #endif
     408                                                        ISSM_MPI_Bcast(string,string_size,ISSM_MPI_CHAR,0,IssmComm::GetComm());
    423409                                                }
    424410                                                else{
     
    474460                }
    475461        } //}}}
    476         #ifdef _HAVE_MPI_
    477462        else{ //cpu ~0 {{{
    478463                for(;;){ //wait on cpu 0
    479                         MPI_Bcast(&record_code,1,MPI_INT,0,IssmComm::GetComm());  /*get from cpu 0 what we are going to do: */
     464                        ISSM_MPI_Bcast(&record_code,1,ISSM_MPI_INT,0,IssmComm::GetComm());  /*get from cpu 0 what we are going to do: */
    480465                        if(record_code==0){
    481466                                break; //we are done, break from the loop
    482467                        }
    483468                        else{
    484                                 MPI_Bcast(&record_enum,1,MPI_INT,0,IssmComm::GetComm());   //get from cpu 0 name of the data
    485                                 MPI_Bcast(&record_length,1,MPI_INT,0,IssmComm::GetComm()); 
     469                                ISSM_MPI_Bcast(&record_enum,1,ISSM_MPI_INT,0,IssmComm::GetComm());   //get from cpu 0 name of the data
     470                                ISSM_MPI_Bcast(&record_length,1,ISSM_MPI_INT,0,IssmComm::GetComm()); 
    486471                                switch(record_code){
    487472                                case 1:
    488473                                        /*boolean. get it from cpu 0 */
    489                                         MPI_Bcast(&booleanint,1,MPI_INT,0,IssmComm::GetComm());
     474                                        ISSM_MPI_Bcast(&booleanint,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    490475
    491476                                        /*create BoolParam: */
     
    495480                                case 2:
    496481                                        /*integer. get it from cpu 0 */
    497                                         MPI_Bcast(&integer,1,MPI_INT,0,IssmComm::GetComm());
     482                                        ISSM_MPI_Bcast(&integer,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    498483
    499484                                        /*create IntParam: */
     
    503488                                case 3:
    504489                                        /*scalar. get it from cpu 0 */
    505                                         MPI_Bcast(&scalar,1,MPI_DOUBLE,0,IssmComm::GetComm());
     490                                        ISSM_MPI_Bcast(&scalar,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    506491
    507492                                        /*create DoubleParam: */
     
    510495                                        break;
    511496                                case 4:
    512                                         MPI_Bcast(&string_size,1,MPI_INT,0,IssmComm::GetComm());
     497                                        ISSM_MPI_Bcast(&string_size,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    513498                                        if(string_size){
    514499                                                string=xNew<char>((string_size+1));
     
    516501
    517502                                                /*Read string from cpu 0: */
    518                                                 MPI_Bcast(string,string_size,MPI_CHAR,0,IssmComm::GetComm());
     503                                                ISSM_MPI_Bcast(string,string_size,ISSM_MPI_CHAR,0,IssmComm::GetComm());
    519504                                        }
    520505                                        else{
     
    543528                }
    544529        } //}}}
    545         #endif
    546530}
    547531/*}}}*/
     
    567551                if(fread(&booleanint,sizeof(int),1,fid)!=1) _error_("could not read boolean ");
    568552        }
    569         #ifdef _HAVE_MPI_
    570         MPI_Bcast(&booleanint,1,MPI_INT,0,IssmComm::GetComm());
    571         #endif
     553        ISSM_MPI_Bcast(&booleanint,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    572554
    573555        /*cast to bool: */
     
    599581        }
    600582
    601         #ifdef _HAVE_MPI_
    602         MPI_Bcast(&integer,1,MPI_INT,0,IssmComm::GetComm());
    603         #endif
     583        ISSM_MPI_Bcast(&integer,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    604584
    605585        /*Assign output pointers: */
     
    628608                if(fread(&scalar,sizeof(IssmPDouble),1,fid)!=1)_error_("could not read scalar ");
    629609        }
    630         #ifdef _HAVE_MPI_
    631         MPI_Bcast(&scalar,1,MPI_DOUBLE,0,IssmComm::GetComm());
    632         #endif
     610        ISSM_MPI_Bcast(&scalar,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    633611
    634612        /*Assign output pointers: */
     
    662640        }
    663641
    664         #ifdef _HAVE_MPI_
    665         MPI_Bcast(&string_size,1,MPI_INT,0,IssmComm::GetComm());
    666         #endif
     642        ISSM_MPI_Bcast(&string_size,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    667643
    668644        /*Now allocate string: */
     
    675651                        if(fread(string,string_size*sizeof(char),1,fid)!=1)_error_(" could not read string ");
    676652                }
    677                 #ifdef _HAVE_MPI_
    678                 MPI_Bcast(string,string_size,MPI_CHAR,0,IssmComm::GetComm());
    679                 #endif
     653                ISSM_MPI_Bcast(string,string_size,ISSM_MPI_CHAR,0,IssmComm::GetComm());
    680654        }
    681655        else{
     
    716690        }
    717691
    718         #ifdef _HAVE_MPI_
    719         MPI_Bcast(&M,1,MPI_INT,0,IssmComm::GetComm());
    720         #endif
     692        ISSM_MPI_Bcast(&M,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    721693
    722694        if(my_rank==0){ 
    723695                if(fread(&N,sizeof(int),1,fid)!=1) _error_("could not read number of columns for matrix ");
    724696        }
    725         #ifdef _HAVE_MPI_
    726         MPI_Bcast(&N,1,MPI_INT,0,IssmComm::GetComm());
    727         #endif
     697        ISSM_MPI_Bcast(&N,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    728698
    729699        /*Now allocate matrix: */
     
    736706                }
    737707
    738                 #ifdef _HAVE_MPI_
    739                 MPI_Bcast(matrix,M*N,MPI_DOUBLE,0,IssmComm::GetComm());
    740                 #endif
     708                ISSM_MPI_Bcast(matrix,M*N,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    741709        }
    742710
     
    787755                if(fread(&M,sizeof(int),1,fid)!=1) _error_("could not read number of rows for matrix ");
    788756        }
    789         #ifdef _HAVE_MPI_
    790         MPI_Bcast(&M,1,MPI_INT,0,IssmComm::GetComm());
    791         #endif
     757        ISSM_MPI_Bcast(&M,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    792758
    793759        if(my_rank==0){ 
    794760                if(fread(&N,sizeof(int),1,fid)!=1) _error_("could not read number of columns for matrix ");
    795761        }
    796         #ifdef _HAVE_MPI_
    797         MPI_Bcast(&N,1,MPI_INT,0,IssmComm::GetComm());
    798         #endif
     762        ISSM_MPI_Bcast(&N,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    799763
    800764        /*Now allocate matrix: */
     
    806770                        if(fread(matrix,M*N*sizeof(IssmPDouble),1,fid)!=1) _error_("could not read matrix ");
    807771                }
    808                 #ifdef _HAVE_MPI_
    809                 MPI_Bcast(matrix,M*N,MPI_DOUBLE,0,IssmComm::GetComm());
    810                 #endif
     772                ISSM_MPI_Bcast(matrix,M*N,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    811773
    812774                _assert_(this->independents);
     
    857819                if(fread(&numstrings,sizeof(int),1,fid)!=1) _error_("could not read length of string array");
    858820        }
    859         #ifdef _HAVE_MPI_
    860         MPI_Bcast(&numstrings,1,MPI_INT,0,IssmComm::GetComm());
    861         #endif
     821        ISSM_MPI_Bcast(&numstrings,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    862822
    863823        /*Now allocate string array: */
     
    872832                                if(fread(&string_size,sizeof(int),1,fid)!=1) _error_("could not read length of string ");
    873833                        }
    874                         #ifdef _HAVE_MPI_
    875                         MPI_Bcast(&string_size,1,MPI_INT,0,IssmComm::GetComm());
    876                         #endif
     834                        ISSM_MPI_Bcast(&string_size,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    877835                        if(string_size){
    878836                                string=xNew<char>((string_size+1));
     
    883841                                        if(fread(string,string_size*sizeof(char),1,fid)!=1)_error_(" could not read string ");
    884842                                }
    885                                 #ifdef _HAVE_MPI_
    886                                 MPI_Bcast(string,string_size,MPI_CHAR,0,IssmComm::GetComm());
    887                                 #endif
     843                                ISSM_MPI_Bcast(string,string_size,ISSM_MPI_CHAR,0,IssmComm::GetComm());
    888844                        }
    889845                        else{
     
    930886                if(fread(&numrecords,sizeof(int),1,fid)!=1) _error_("could not read number of records in matrix array ");
    931887        }
    932         #ifdef _HAVE_MPI_
    933         MPI_Bcast(&numrecords,1,MPI_INT,0,IssmComm::GetComm());
    934         #endif
     888        ISSM_MPI_Bcast(&numrecords,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    935889
    936890        if(numrecords){
     
    953907                                if(fread(&M,sizeof(int),1,fid)!=1) _error_("could not read number of rows in " << i << "th matrix of matrix array");
    954908                        }
    955                         #ifdef _HAVE_MPI_
    956                         MPI_Bcast(&M,1,MPI_INT,0,IssmComm::GetComm());
    957                         #endif
     909                        ISSM_MPI_Bcast(&M,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    958910
    959911                        if(my_rank==0){ 
    960912                                if(fread(&N,sizeof(int),1,fid)!=1) _error_("could not read number of columns in " << i << "th matrix of matrix array");
    961913                        }
    962                         #ifdef _HAVE_MPI_
    963                         MPI_Bcast(&N,1,MPI_INT,0,IssmComm::GetComm());
    964                         #endif
     914                        ISSM_MPI_Bcast(&N,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    965915
    966916                        /*Now allocate matrix: */
     
    973923                                }
    974924
    975                                 #ifdef _HAVE_MPI_
    976                                 MPI_Bcast(matrix,M*N,MPI_DOUBLE,0,IssmComm::GetComm());
    977                                 #endif
     925                                ISSM_MPI_Bcast(matrix,M*N,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    978926                                matrices[i]=xNew<IssmDouble>(M*N);
    979927                                for (int j=0;j<M*N;++j) {matrices[i][j]=matrix[j];}
     
    13071255        }
    13081256        /*Broadcast code and vector type: */
    1309 #ifdef _HAVE_MPI_
    1310         MPI_Bcast(&lastindex,1,MPI_INT,0,IssmComm::GetComm());
    1311 #endif
     1257        ISSM_MPI_Bcast(&lastindex,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    13121258
    13131259        /*Assign output pointers:*/
     
    13651311                }
    13661312        }
    1367         #ifdef _HAVE_MPI_
    1368         MPI_Bcast(&found,1,MPI_INT,0,IssmComm::GetComm());
     1313        ISSM_MPI_Bcast(&found,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    13691314        if(!found)_error_("could not find data with name " << EnumToStringx(data_enum) << " in binary file");
    1370         #endif
    13711315
    13721316        /*Broadcast code and vector type: */
    1373         #ifdef _HAVE_MPI_
    1374         MPI_Bcast(&record_code,1,MPI_INT,0,IssmComm::GetComm());
    1375         MPI_Bcast(&vector_type,1,MPI_INT,0,IssmComm::GetComm());
    1376         if(record_code==5) MPI_Bcast(&vector_type,1,MPI_INT,0,IssmComm::GetComm());
    1377         #endif
     1317        ISSM_MPI_Bcast(&record_code,1,ISSM_MPI_INT,0,IssmComm::GetComm());
     1318        ISSM_MPI_Bcast(&vector_type,1,ISSM_MPI_INT,0,IssmComm::GetComm());
     1319        if(record_code==5) ISSM_MPI_Bcast(&vector_type,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    13781320
    13791321        /*Assign output pointers:*/
  • issm/trunk-jpl/src/c/classes/Loads/Loads.cpp

    r15012 r15838  
    6767
    6868        /*Grab sum of all cpus: */
    69 #ifdef _HAVE_MPI_
    70         MPI_Allreduce((void*)&ispenalty,(void*)&allispenalty,1,MPI_INT,MPI_SUM,IssmComm::GetComm());
     69        ISSM_MPI_Allreduce((void*)&ispenalty,(void*)&allispenalty,1,ISSM_MPI_INT,ISSM_MPI_SUM,IssmComm::GetComm());
    7170        ispenalty=allispenalty;
    72 #endif
    7371
    7472        if(ispenalty)
     
    9694
    9795        /*Grab max of all cpus: */
    98 #ifdef _HAVE_MPI_
    99         MPI_Allreduce((void*)&max,(void*)&allmax,1,MPI_INT,MPI_MAX,IssmComm::GetComm());
     96        ISSM_MPI_Allreduce((void*)&max,(void*)&allmax,1,ISSM_MPI_INT,ISSM_MPI_MAX,IssmComm::GetComm());
    10097        max=allmax;
    101 #endif
    10298
    10399        return max;
     
    114110
    115111        /*figure out total number of loads combining all the cpus (no clones here)*/
    116         #ifdef _HAVE_MPI_
    117         MPI_Reduce(&localloads,&numberofloads,1,MPI_INT,MPI_SUM,0,IssmComm::GetComm() );
    118         MPI_Bcast(&numberofloads,1,MPI_INT,0,IssmComm::GetComm());
    119         #else
    120         numberofloads=localloads;
    121         #endif
     112        ISSM_MPI_Reduce(&localloads,&numberofloads,1,ISSM_MPI_INT,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     113        ISSM_MPI_Bcast(&numberofloads,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    122114
    123115        return numberofloads;
     
    140132
    141133        /*figure out total number of loads combining all the cpus (no clones here)*/
    142 #ifdef _HAVE_MPI_
    143         MPI_Reduce(&localloads,&numberofloads,1,MPI_INT,MPI_SUM,0,IssmComm::GetComm() );
    144         MPI_Bcast(&numberofloads,1,MPI_INT,0,IssmComm::GetComm());
    145 #else
    146         numberofloads=localloads;
    147 #endif
     134        ISSM_MPI_Reduce(&localloads,&numberofloads,1,ISSM_MPI_INT,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     135        ISSM_MPI_Bcast(&numberofloads,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    148136
    149137        return numberofloads;
  • issm/trunk-jpl/src/c/classes/Nodes.cpp

    r15639 r15838  
    6565         * First: get number of dofs for each cpu*/
    6666        alldofcount=xNew<int>(num_procs);
    67         #ifdef _HAVE_MPI_
    68         MPI_Gather(&dofcount,1,MPI_INT,alldofcount,1,MPI_INT,0,IssmComm::GetComm());
    69         MPI_Bcast(alldofcount,num_procs,MPI_INT,0,IssmComm::GetComm());
    70         #else
    71         alldofcount[0]=dofcount;
    72         #endif
     67        ISSM_MPI_Gather(&dofcount,1,ISSM_MPI_INT,alldofcount,1,ISSM_MPI_INT,0,IssmComm::GetComm());
     68        ISSM_MPI_Bcast(alldofcount,num_procs,ISSM_MPI_INT,0,IssmComm::GetComm());
    7369
    7470        /* Every cpu should start its own dof count at the end of the dofcount from cpu-1*/
     
    10298        }
    10399
    104         #ifdef _HAVE_MPI_
    105         MPI_Allreduce((void*)truedofs,(void*)alltruedofs,numnodes*maxdofspernode,MPI_INT,MPI_MAX,IssmComm::GetComm());
    106         #else
    107         for(i=0;i<numnodes*maxdofspernode;i++)alltruedofs[i]=truedofs[i];
    108         #endif
     100        ISSM_MPI_Allreduce((void*)truedofs,(void*)alltruedofs,numnodes*maxdofspernode,ISSM_MPI_INT,ISSM_MPI_MAX,IssmComm::GetComm());
    109101
    110102        /* Now every cpu knows the true dofs of everyone else that is not a clone*/
     
    147139         * dealt with by another cpu. We take the minimum because we are going to manage dof assignment in increasing
    148140         * order of cpu rank. This is also why we initialized this array to num_procs.*/
    149         #ifdef _HAVE_MPI_
    150         MPI_Allreduce((void*)ranks,(void*)minranks,numnodes,MPI_INT,MPI_MIN,IssmComm::GetComm());
    151         #else
    152         for(i=0;i<numnodes;i++)minranks[i]=ranks[i];
    153         #endif
     141        ISSM_MPI_Allreduce((void*)ranks,(void*)minranks,numnodes,ISSM_MPI_INT,ISSM_MPI_MIN,IssmComm::GetComm());
    154142
    155143        /*Now go through all objects, and use minranks to flag which objects are cloned: */
     
    192180
    193181        /*Grab max of all cpus: */
    194         #ifdef _HAVE_MPI_
    195         MPI_Allreduce((void*)&max,(void*)&allmax,1,MPI_INT,MPI_MAX,IssmComm::GetComm());
     182        ISSM_MPI_Allreduce((void*)&max,(void*)&allmax,1,ISSM_MPI_INT,ISSM_MPI_MAX,IssmComm::GetComm());
    196183        max=allmax;
    197         #endif
    198184
    199185        return max;
     
    225211
    226212        /*Grab max of all cpus: */
    227 #ifdef _HAVE_MPI_
    228         MPI_Allreduce((void*)&max,(void*)&allmax,1,MPI_INT,MPI_MAX,IssmComm::GetComm());
     213        ISSM_MPI_Allreduce((void*)&max,(void*)&allmax,1,ISSM_MPI_INT,ISSM_MPI_MAX,IssmComm::GetComm());
    229214        max=allmax;
    230 #endif
    231215
    232216        return max;
     
    242226
    243227        /*Gather from all cpus: */
    244         #ifdef _HAVE_MPI_
    245         MPI_Allreduce ( (void*)&numdofs,(void*)&allnumdofs,1,MPI_INT,MPI_SUM,IssmComm::GetComm());
    246         #else
    247         allnumdofs=numdofs;
    248         #endif
     228        ISSM_MPI_Allreduce ( (void*)&numdofs,(void*)&allnumdofs,1,ISSM_MPI_INT,ISSM_MPI_SUM,IssmComm::GetComm());
    249229        return allnumdofs;
    250230}
     
    290270
    291271        /*Gather from all cpus: */
    292         #ifdef _HAVE_MPI_
    293         MPI_Allreduce ( (void*)&numnodes,(void*)&allnumnodes,1,MPI_INT,MPI_SUM,IssmComm::GetComm());
    294         #else
    295         allnumnodes=numnodes;
    296         #endif
     272        ISSM_MPI_Allreduce ( (void*)&numnodes,(void*)&allnumnodes,1,ISSM_MPI_INT,ISSM_MPI_SUM,IssmComm::GetComm());
    297273
    298274        return allnumnodes;
     
    320296        }
    321297
    322         #ifdef _HAVE_MPI_
    323         MPI_Reduce (&max_sid,&node_max_sid,1,MPI_INT,MPI_MAX,0,IssmComm::GetComm() );
    324         MPI_Bcast(&node_max_sid,1,MPI_INT,0,IssmComm::GetComm());
     298        ISSM_MPI_Reduce (&max_sid,&node_max_sid,1,ISSM_MPI_INT,ISSM_MPI_MAX,0,IssmComm::GetComm() );
     299        ISSM_MPI_Bcast(&node_max_sid,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    325300        max_sid=node_max_sid;
    326         #endif
    327301
    328302        /*sid starts at 0*/
  • issm/trunk-jpl/src/c/classes/Patch.cpp

    r14996 r15838  
    111111        int         node_numrows;
    112112        IssmDouble     *total_values  = NULL;
    113         #ifdef _HAVE_MPI_
    114         MPI_Status  status;
    115         #endif
     113        ISSM_MPI_Status  status;
    116114
    117115        /*recover my_rank:*/
     
    120118
    121119        /*First, figure out total number of rows combining all the cpus: */
    122         #ifdef _HAVE_MPI_
    123         MPI_Reduce(&this->numrows,&total_numrows,1,MPI_INT,MPI_SUM,0,IssmComm::GetComm() );
    124         MPI_Bcast(&total_numrows,1,MPI_INT,0,IssmComm::GetComm());
    125         #else
    126         total_numrows=this->numrows;
    127         #endif
     120        ISSM_MPI_Reduce(&this->numrows,&total_numrows,1,ISSM_MPI_INT,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     121        ISSM_MPI_Bcast(&total_numrows,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    128122
    129123        /*return if patch empty*/
     
    141135
    142136        /*Now, ask other nodes to send their values: */
    143         #ifdef _HAVE_MPI_
    144137        for(int i=1;i<num_procs;i++){
    145138                if (my_rank==i){
    146                         MPI_Send(&this->numrows,1,MPI_INT,0,1,IssmComm::GetComm());   
    147                         if (this->numrows)MPI_Send(this->values,this->numrows*this->numcols,MPI_DOUBLE,0,1,IssmComm::GetComm());
     139                        ISSM_MPI_Send(&this->numrows,1,ISSM_MPI_INT,0,1,IssmComm::GetComm());   
     140                        if (this->numrows)ISSM_MPI_Send(this->values,this->numrows*this->numcols,ISSM_MPI_DOUBLE,0,1,IssmComm::GetComm());
    148141                }
    149142                if (my_rank==0){
    150                         MPI_Recv(&node_numrows,1,MPI_INT,i,1,IssmComm::GetComm(),&status);
    151                         if (node_numrows)MPI_Recv(total_values+count,node_numrows*this->numcols,MPI_DOUBLE,i,1,IssmComm::GetComm(),&status);
     143                        ISSM_MPI_Recv(&node_numrows,1,ISSM_MPI_INT,i,1,IssmComm::GetComm(),&status);
     144                        if (node_numrows)ISSM_MPI_Recv(total_values+count,node_numrows*this->numcols,ISSM_MPI_DOUBLE,i,1,IssmComm::GetComm(),&status);
    152145                        count+=node_numrows*this->numcols;
    153146                }
    154147        }       
    155         #endif
    156148
    157149        /*Now, node 0 has total_values, of size total_numrows*this->numcols. Update the fields in the patch, to reflect this new
     
    162154                this->values=total_values;
    163155        }
    164         #ifdef _HAVE_MPI_
    165156        else{
    166157                this->numrows=0;
    167158                xDelete<IssmDouble>(this->values);
    168159        }
    169         #endif
    170160}/*}}}*/
  • issm/trunk-jpl/src/c/classes/Profiler.cpp

    r15104 r15838  
    7373         *in the execution: */
    7474        if(!dontmpisync){
    75                 #ifdef _HAVE_MPI_
    76                 MPI_Barrier(IssmComm::GetComm());
    77                 #endif
     75                ISSM_MPI_Barrier(IssmComm::GetComm());
    7876        }
    7977
    8078        /*Capture time: */
    8179        #ifdef _HAVE_MPI_
    82         t=MPI_Wtime();
     80        t=ISSM_MPI_Wtime();
    8381        #else
    8482        t=(IssmPDouble)clock();
  • issm/trunk-jpl/src/c/classes/Vertices.cpp

    r15012 r15838  
    6464         * First: get number of pids for each cpu*/
    6565        allpidcount=xNew<int>(num_procs);
    66         #ifdef _HAVE_MPI_
    67         MPI_Gather(&pidcount,1,MPI_INT,allpidcount,1,MPI_INT,0,IssmComm::GetComm());
    68         MPI_Bcast(allpidcount,num_procs,MPI_INT,0,IssmComm::GetComm());
    69         #else
    70         allpidcount[0]=pidcount;
    71         #endif
     66        ISSM_MPI_Gather(&pidcount,1,ISSM_MPI_INT,allpidcount,1,ISSM_MPI_INT,0,IssmComm::GetComm());
     67        ISSM_MPI_Bcast(allpidcount,num_procs,ISSM_MPI_INT,0,IssmComm::GetComm());
    7268
    7369        /* Every cpu should start its own pid count at the end of the pidcount from cpu-1*/
     
    9288                vertex->ShowTruePids(truepids);
    9389        }
    94         #ifdef _HAVE_MPI_
    95         MPI_Allreduce((void*)truepids,(void*)alltruepids,numberofobjects,MPI_INT,MPI_MAX,IssmComm::GetComm());
    96         #else
    97         for(i=0;i<numberofobjects;i++)alltruepids[i]=truepids[i];
    98         #endif
     90        ISSM_MPI_Allreduce((void*)truepids,(void*)alltruepids,numberofobjects,ISSM_MPI_INT,ISSM_MPI_MAX,IssmComm::GetComm());
    9991
    10092        /* Now every cpu knows the true pids of everyone else that is not a clone*/
     
    135127         * dealt with by another cpu. We take the minimum because we are going to manage dof assignment in increasing
    136128         * order of cpu rank. This is also why we initialized this array to num_procs.*/
    137         #ifdef _HAVE_MPI_
    138         MPI_Allreduce ( (void*)ranks,(void*)minranks,numberofobjects,MPI_INT,MPI_MIN,IssmComm::GetComm());
    139         #else
    140         for(i=0;i<numberofobjects;i++)minranks[i]=ranks[i];
    141         #endif
     129        ISSM_MPI_Allreduce ( (void*)ranks,(void*)minranks,numberofobjects,ISSM_MPI_INT,ISSM_MPI_MIN,IssmComm::GetComm());
    142130
    143131        /*Now go through all objects, and use minranks to flag which objects are cloned: */
     
    167155        }
    168156
    169         #ifdef _HAVE_MPI_
    170         MPI_Reduce (&max_sid,&vertex_max_sid,1,MPI_INT,MPI_MAX,0,IssmComm::GetComm() );
    171         MPI_Bcast(&vertex_max_sid,1,MPI_INT,0,IssmComm::GetComm());
     157        ISSM_MPI_Reduce (&max_sid,&vertex_max_sid,1,ISSM_MPI_INT,ISSM_MPI_MAX,0,IssmComm::GetComm() );
     158        ISSM_MPI_Bcast(&vertex_max_sid,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    172159        max_sid=vertex_max_sid;
    173         #endif
    174160
    175161        /*sid starts at 0*/
  • issm/trunk-jpl/src/c/modules/ConstraintsStatex/RiftConstraintsState.cpp

    r15104 r15838  
    2828        }
    2929
    30         #ifdef _HAVE_MPI_
    31         MPI_Reduce (&found,&mpi_found,1,MPI_INT,MPI_SUM,0,IssmComm::GetComm() );
    32         MPI_Bcast(&mpi_found,1,MPI_INT,0,IssmComm::GetComm());               
     30        ISSM_MPI_Reduce (&found,&mpi_found,1,ISSM_MPI_INT,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     31        ISSM_MPI_Bcast(&mpi_found,1,ISSM_MPI_INT,0,IssmComm::GetComm());               
    3332        found=mpi_found;
    34         #endif
    3533
    3634        return found;
     
    9088        }
    9189
    92         #ifdef _HAVE_MPI_
    93         MPI_Reduce (&num_unstable_constraints,&sum_num_unstable_constraints,1,MPI_INT,MPI_SUM,0,IssmComm::GetComm() );
    94         MPI_Bcast(&sum_num_unstable_constraints,1,MPI_INT,0,IssmComm::GetComm());               
     90        ISSM_MPI_Reduce (&num_unstable_constraints,&sum_num_unstable_constraints,1,ISSM_MPI_INT,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     91        ISSM_MPI_Bcast(&sum_num_unstable_constraints,1,ISSM_MPI_INT,0,IssmComm::GetComm());               
    9592        num_unstable_constraints=sum_num_unstable_constraints;
    96         #endif
    9793
    9894        /*Assign output pointers: */
     
    130126
    131127        /*Is there just one found? that would mean we have frozen! : */
    132         #ifdef _HAVE_MPI_
    133         MPI_Reduce (&found,&mpi_found,1,MPI_INT,MPI_MAX,0,IssmComm::GetComm() );
    134         MPI_Bcast(&mpi_found,1,MPI_INT,0,IssmComm::GetComm());               
     128        ISSM_MPI_Reduce (&found,&mpi_found,1,ISSM_MPI_INT,ISSM_MPI_MAX,0,IssmComm::GetComm() );
     129        ISSM_MPI_Bcast(&mpi_found,1,ISSM_MPI_INT,0,IssmComm::GetComm());               
    135130        found=mpi_found;
    136         #endif
    137131
    138132        return found;
     
    190184        }
    191185
    192         #ifdef _HAVE_MPI_
    193         MPI_Reduce (&found,&mpi_found,1,MPI_INT,MPI_SUM,0,IssmComm::GetComm() );
    194         MPI_Bcast(&mpi_found,1,MPI_INT,0,IssmComm::GetComm());               
     186        ISSM_MPI_Reduce (&found,&mpi_found,1,ISSM_MPI_INT,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     187        ISSM_MPI_Bcast(&mpi_found,1,ISSM_MPI_INT,0,IssmComm::GetComm());               
    195188        found=mpi_found;
    196         #endif
    197189
    198190        return found;
     
    222214        }
    223215
    224         #ifdef _HAVE_MPI_
    225         MPI_Reduce (&found,&mpi_found,1,MPI_INT,MPI_SUM,0,IssmComm::GetComm() );
    226         MPI_Bcast(&mpi_found,1,MPI_INT,0,IssmComm::GetComm());               
     216        ISSM_MPI_Reduce (&found,&mpi_found,1,ISSM_MPI_INT,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     217        ISSM_MPI_Bcast(&mpi_found,1,ISSM_MPI_INT,0,IssmComm::GetComm());               
    227218        found=mpi_found;
    228         #endif
    229219
    230220        if (found){
     
    282272        }
    283273
    284         #ifdef _HAVE_MPI_
    285         MPI_Reduce (&num_unstable_constraints,&sum_num_unstable_constraints,1,MPI_INT,MPI_SUM,0,IssmComm::GetComm() );
    286         MPI_Bcast(&sum_num_unstable_constraints,1,MPI_INT,0,IssmComm::GetComm());               
     274        ISSM_MPI_Reduce (&num_unstable_constraints,&sum_num_unstable_constraints,1,ISSM_MPI_INT,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     275        ISSM_MPI_Bcast(&sum_num_unstable_constraints,1,ISSM_MPI_INT,0,IssmComm::GetComm());               
    287276        num_unstable_constraints=sum_num_unstable_constraints;
    288         #endif
    289277
    290278        /*Assign output pointers: */
     
    322310        }
    323311
    324         #ifdef _HAVE_MPI_
    325         MPI_Reduce (&max_penetration,&mpi_max_penetration,1,MPI_DOUBLE,MPI_MAX,0,IssmComm::GetComm() );
    326         MPI_Bcast(&mpi_max_penetration,1,MPI_DOUBLE,0,IssmComm::GetComm());               
     312        ISSM_MPI_Reduce (&max_penetration,&mpi_max_penetration,1,ISSM_MPI_DOUBLE,ISSM_MPI_MAX,0,IssmComm::GetComm() );
     313        ISSM_MPI_Bcast(&mpi_max_penetration,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());               
    327314        max_penetration=mpi_max_penetration;
    328         #endif
    329315
    330316        /*feed max_penetration to inputs: */
     
    361347        }
    362348
    363         #ifdef _HAVE_MPI_
    364         MPI_Reduce (&num_unstable_constraints,&sum_num_unstable_constraints,1,MPI_INT,MPI_SUM,0,IssmComm::GetComm() );
    365         MPI_Bcast(&sum_num_unstable_constraints,1,MPI_INT,0,IssmComm::GetComm());               
     349        ISSM_MPI_Reduce (&num_unstable_constraints,&sum_num_unstable_constraints,1,ISSM_MPI_INT,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     350        ISSM_MPI_Bcast(&sum_num_unstable_constraints,1,ISSM_MPI_INT,0,IssmComm::GetComm());               
    366351        num_unstable_constraints=sum_num_unstable_constraints;
    367         #endif
    368352
    369353        return num_unstable_constraints;
  • issm/trunk-jpl/src/c/modules/ConstraintsStatex/ThermalConstraintsState.cpp

    r13590 r15838  
    3636        }
    3737
    38         #ifdef _HAVE_MPI_
    39         MPI_Reduce (&num_unstable_constraints,&sum_num_unstable_constraints,1,MPI_INT,MPI_SUM,0,IssmComm::GetComm() );
    40         MPI_Bcast(&sum_num_unstable_constraints,1,MPI_INT,0,IssmComm::GetComm());               
     38        ISSM_MPI_Reduce (&num_unstable_constraints,&sum_num_unstable_constraints,1,ISSM_MPI_INT,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     39        ISSM_MPI_Bcast(&sum_num_unstable_constraints,1,ISSM_MPI_INT,0,IssmComm::GetComm());               
    4140        num_unstable_constraints=sum_num_unstable_constraints;
    42         #endif
    4341
    4442        /*Have we converged? : */
  • issm/trunk-jpl/src/c/modules/ConstraintsStatex/ThermalIsPresent.cpp

    r13622 r15838  
    2828        }
    2929
    30         #ifdef _HAVE_MPI_
    31         MPI_Reduce (&found,&mpi_found,1,MPI_INT,MPI_SUM,0,IssmComm::GetComm() );
    32         MPI_Bcast(&mpi_found,1,MPI_INT,0,IssmComm::GetComm());               
     30        ISSM_MPI_Reduce (&found,&mpi_found,1,ISSM_MPI_INT,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     31        ISSM_MPI_Bcast(&mpi_found,1,ISSM_MPI_INT,0,IssmComm::GetComm());               
    3332        found=mpi_found;
    34         #endif
    3533
    3634        return found;
  • issm/trunk-jpl/src/c/modules/DragCoefficientAbsGradientx/DragCoefficientAbsGradientx.cpp

    r15130 r15838  
    2525
    2626        /*Sum all J from all cpus of the cluster:*/
    27         #ifdef _HAVE_MPI_
    28         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
    29         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
     27        ISSM_MPI_Reduce (&J,&J_sum,1,ISSM_MPI_DOUBLE,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     28        ISSM_MPI_Bcast(&J_sum,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    3029        J=J_sum;
    31         #endif
    3230
    3331        /*Assign output pointers: */
  • issm/trunk-jpl/src/c/modules/GroundinglineMigrationx/GroundinglineMigrationx.cpp

    r15104 r15838  
    160160                vec_nodes_on_floatingice->Assemble();
    161161
    162                 #ifdef _HAVE_MPI_
    163                 MPI_Allreduce(&local_nflipped,&nflipped,1,MPI_INT,MPI_SUM,IssmComm::GetComm());
     162                ISSM_MPI_Allreduce(&local_nflipped,&nflipped,1,ISSM_MPI_INT,ISSM_MPI_SUM,IssmComm::GetComm());
    164163                if(VerboseConvergence()) _printf0_("   Additional number of vertices allowed to unground: " << nflipped << "\n");
    165                 #else
    166                 nflipped=local_nflipped;
    167                 #endif
    168164
    169165                /*Avoid leaks: */
  • issm/trunk-jpl/src/c/modules/Krigingx/pKrigingx.cpp

    r15557 r15838  
    3737
    3838        /*Get some Options*/
    39         MPI_Barrier(MPI_COMM_WORLD); start=MPI_Wtime();
     39        ISSM_MPI_Barrier(ISSM_MPI_COMM_WORLD); start=ISSM_MPI_Wtime();
    4040        options->Get(&radius,"searchradius",0.);
    4141        options->Get(&mindata,"mindata",1);
     
    4343
    4444        /*Process observation dataset*/
    45         MPI_Barrier(MPI_COMM_WORLD); start_init=MPI_Wtime();
     45        ISSM_MPI_Barrier(ISSM_MPI_COMM_WORLD); start_init=ISSM_MPI_Wtime();
    4646        observations=new Observations(obs_list,obs_x,obs_y,obs_length,options);
    47         MPI_Barrier(MPI_COMM_WORLD); finish_init=MPI_Wtime();
     47        ISSM_MPI_Barrier(ISSM_MPI_COMM_WORLD); finish_init=ISSM_MPI_Wtime();
    4848
    4949        /*Allocate output*/
     
    5454        options->Get(&output,"output",(char*)"prediction");
    5555
    56         MPI_Barrier(MPI_COMM_WORLD); start_core=MPI_Wtime( );
     56        ISSM_MPI_Barrier(ISSM_MPI_COMM_WORLD); start_core=ISSM_MPI_Wtime( );
    5757        if(strcmp(output,"quadtree")==0){
    5858                observations->QuadtreeColoring(predictions,x_interp,y_interp,n_interp);
     
    7373                _printf0_("      interpolation progress: "<<fixed<<setw(6)<<setprecision(4)<<100.<<"%  \n");
    7474
    75 #ifdef _HAVE_MPI_
    7675                double *sumpredictions =xNew<double>(n_interp);
    7776                double *sumerror       =xNew<double>(n_interp);
    78                 MPI_Allreduce(predictions,sumpredictions,n_interp,MPI_DOUBLE,MPI_SUM,IssmComm::GetComm());
    79                 MPI_Allreduce(error,sumerror,n_interp,MPI_DOUBLE,MPI_SUM,IssmComm::GetComm());
     77                ISSM_MPI_Allreduce(predictions,sumpredictions,n_interp,ISSM_MPI_DOUBLE,ISSM_MPI_SUM,IssmComm::GetComm());
     78                ISSM_MPI_Allreduce(error,sumerror,n_interp,ISSM_MPI_DOUBLE,ISSM_MPI_SUM,IssmComm::GetComm());
    8079                xDelete<double>(error); error=sumerror;
    8180                xDelete<double>(predictions); predictions=sumpredictions;
    82 #endif
    8381        }
    8482        else if(strcmp(output,"v4")==0){
     
    9189                _printf0_("      interpolation progress: "<<fixed<<setw(6)<<setprecision(4)<<100.<<"%  \n");
    9290
    93 #ifdef _HAVE_MPI_
    9491                double *sumpredictions =xNew<double>(n_interp);
    95                 MPI_Allreduce(predictions,sumpredictions,n_interp,MPI_DOUBLE,MPI_SUM,IssmComm::GetComm());
     92                ISSM_MPI_Allreduce(predictions,sumpredictions,n_interp,ISSM_MPI_DOUBLE,ISSM_MPI_SUM,IssmComm::GetComm());
    9693                xDelete<double>(predictions); predictions=sumpredictions;
    97 #endif
    9894        }
    9995        else if(strcmp(output,"nearestneighbor")==0){
     
    106102                _printf0_("      interpolation progress: "<<fixed<<setw(6)<<setprecision(4)<<100.<<"%  \n");
    107103
    108 #ifdef _HAVE_MPI_
    109104                double *sumpredictions =xNew<double>(n_interp);
    110                 MPI_Allreduce(predictions,sumpredictions,n_interp,MPI_DOUBLE,MPI_SUM,IssmComm::GetComm());
     105                ISSM_MPI_Allreduce(predictions,sumpredictions,n_interp,ISSM_MPI_DOUBLE,ISSM_MPI_SUM,IssmComm::GetComm());
    111106                xDelete<double>(predictions); predictions=sumpredictions;
    112 #endif
    113107        }
    114108        else if(strcmp(output,"idw")==0){
     
    123117                _printf0_("      interpolation progress: "<<fixed<<setw(6)<<setprecision(4)<<100.<<"%  \n");
    124118
    125 #ifdef _HAVE_MPI_
    126119                double *sumpredictions =xNew<double>(n_interp);
    127                 MPI_Allreduce(predictions,sumpredictions,n_interp,MPI_DOUBLE,MPI_SUM,IssmComm::GetComm());
     120                ISSM_MPI_Allreduce(predictions,sumpredictions,n_interp,ISSM_MPI_DOUBLE,ISSM_MPI_SUM,IssmComm::GetComm());
    128121                xDelete<double>(predictions); predictions=sumpredictions;
    129 #endif
    130122        }
    131123        else{
    132124                _error_("output '" << output << "' not supported yet");
    133125        }
    134         MPI_Barrier(MPI_COMM_WORLD); finish_core=MPI_Wtime( );
     126        ISSM_MPI_Barrier(ISSM_MPI_COMM_WORLD); finish_core=ISSM_MPI_Wtime( );
    135127
    136128        /*clean-up and Assign output pointer*/
     
    141133        *perror       = error;
    142134
    143         MPI_Barrier(MPI_COMM_WORLD); finish=MPI_Wtime( );
     135        ISSM_MPI_Barrier(ISSM_MPI_COMM_WORLD); finish=ISSM_MPI_Wtime( );
    144136        _printf0_("\n   " << setw(34) << left << "Observation fitering elapsed time: " << finish_init-start_init << " seconds  \n\n");
    145137        _printf0_("   " << setw(34) << left << "Kriging prediction elapsed time: " << finish_core-start_core << " seconds  \n\n");
  • issm/trunk-jpl/src/c/modules/NodalValuex/NodalValuex.cpp

    r15130 r15838  
    3737
    3838        /*Broadcast whether we found the element: */
    39         #ifdef _HAVE_MPI_
    40         MPI_Allreduce ( &found,&sumfound,1,MPI_INT,MPI_SUM,IssmComm::GetComm());
     39        ISSM_MPI_Allreduce ( &found,&sumfound,1,ISSM_MPI_INT,ISSM_MPI_SUM,IssmComm::GetComm());
    4140        if(!sumfound)_error_("could not find element with vertex with id" << index << " to compute nodal value " << EnumToStringx(natureofdataenum));
    42         #endif
    4341
    4442        /*Broadcast and plug into response: */
    45         #ifdef _HAVE_MPI_
    46         MPI_Allreduce ( &cpu_found,&cpu_found,1,MPI_INT,MPI_MAX,IssmComm::GetComm());
    47         MPI_Bcast(&value,1,MPI_DOUBLE,cpu_found,IssmComm::GetComm());
    48         #else
    49         value=cpu_found;
    50         #endif
     43        ISSM_MPI_Allreduce ( &cpu_found,&cpu_found,1,ISSM_MPI_INT,ISSM_MPI_MAX,IssmComm::GetComm());
     44        ISSM_MPI_Bcast(&value,1,ISSM_MPI_DOUBLE,cpu_found,IssmComm::GetComm());
    5145
    5246        *pnodalvalue=value;
  • issm/trunk-jpl/src/c/modules/ParseToolkitsOptionsx/ParseToolkitsOptionsx.cpp

    r14999 r15838  
    9595
    9696        /*Ok, broadcast to other cpus: */
    97         #ifdef _HAVE_MPI_
    98         MPI_Bcast(&numanalyses,1,MPI_INT,0,IssmComm::GetComm());
     97        ISSM_MPI_Bcast(&numanalyses,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    9998        if(my_rank!=0){
    10099                analyses=xNew<IssmDouble>(numanalyses);
    101100                strings=xNew<char*>(numanalyses);
    102101        }
    103         MPI_Bcast(analyses,numanalyses,MPI_DOUBLE,0,IssmComm::GetComm());
    104         #endif
     102        ISSM_MPI_Bcast(analyses,numanalyses,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    105103        for(i=0;i<numanalyses;i++){
    106104                char* string=strings[i];
     
    109107                }
    110108                if(my_rank==0)stringlength=(strlen(string)+1)*sizeof(char);
    111                 #ifdef _HAVE_MPI_
    112                 MPI_Bcast(&stringlength,1,MPI_INT,0,IssmComm::GetComm());
     109                ISSM_MPI_Bcast(&stringlength,1,ISSM_MPI_INT,0,IssmComm::GetComm());
    113110                if(my_rank!=0)string=xNew<char>(stringlength);
    114                 MPI_Bcast(string,stringlength,MPI_CHAR,0,IssmComm::GetComm());
     111                ISSM_MPI_Bcast(string,stringlength,ISSM_MPI_CHAR,0,IssmComm::GetComm());
    115112                if(my_rank!=0)strings[i]=string;
    116                 #endif
    117113        }
    118114
  • issm/trunk-jpl/src/c/modules/RheologyBbarAbsGradientx/RheologyBbarAbsGradientx.cpp

    r15130 r15838  
    2525
    2626        /*Sum all J from all cpus of the cluster:*/
    27         #ifdef _HAVE_MPI_
    28         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
    29         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
     27        ISSM_MPI_Reduce (&J,&J_sum,1,ISSM_MPI_DOUBLE,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     28        ISSM_MPI_Bcast(&J_sum,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    3029        J=J_sum;
    31         #endif
    3230
    3331        /*Assign output pointers: */
  • issm/trunk-jpl/src/c/modules/SurfaceAbsVelMisfitx/SurfaceAbsVelMisfitx.cpp

    r15130 r15838  
    2525
    2626        /*Sum all J from all cpus of the cluster:*/
    27         #ifdef _HAVE_MPI_
    28         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
    29         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
     27        ISSM_MPI_Reduce (&J,&J_sum,1,ISSM_MPI_DOUBLE,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     28        ISSM_MPI_Bcast(&J_sum,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    3029        J=J_sum;
    31         #endif
    3230
    3331        /*Assign output pointers: */
  • issm/trunk-jpl/src/c/modules/SurfaceAreax/SurfaceAreax.cpp

    r14999 r15838  
    2626
    2727        /*Sum all J from all cpus of the cluster:*/
    28         #ifdef _HAVE_MPI_
    29         MPI_Reduce (&S,&S_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
    30         MPI_Bcast(&S_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
     28        ISSM_MPI_Reduce (&S,&S_sum,1,ISSM_MPI_DOUBLE,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     29        ISSM_MPI_Bcast(&S_sum,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    3130        S=S_sum;
    32         #endif
    3331
    3432        /*add surface area to element inputs:*/
  • issm/trunk-jpl/src/c/modules/SurfaceAverageVelMisfitx/SurfaceAverageVelMisfitx.cpp

    r15130 r15838  
    2929
    3030        /*Sum all J from all cpus of the cluster:*/
    31         #ifdef _HAVE_MPI_
    32         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
    33         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
     31        ISSM_MPI_Reduce (&J,&J_sum,1,ISSM_MPI_DOUBLE,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     32        ISSM_MPI_Bcast(&J_sum,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    3433        J=J_sum;
    35         #endif
    3634
    3735        /*Assign output pointers: */
  • issm/trunk-jpl/src/c/modules/SurfaceLogVelMisfitx/SurfaceLogVelMisfitx.cpp

    r15130 r15838  
    2525
    2626        /*Sum all J from all cpus of the cluster:*/
    27         #ifdef _HAVE_MPI_
    28         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
    29         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
     27        ISSM_MPI_Reduce (&J,&J_sum,1,ISSM_MPI_DOUBLE,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     28        ISSM_MPI_Bcast(&J_sum,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    3029        J=J_sum;
    31         #endif
    3230
    3331        /*Assign output pointers: */
  • issm/trunk-jpl/src/c/modules/SurfaceLogVxVyMisfitx/SurfaceLogVxVyMisfitx.cpp

    r15130 r15838  
    2525
    2626        /*Sum all J from all cpus of the cluster:*/
    27         #ifdef _HAVE_MPI_
    28         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
    29         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
     27        ISSM_MPI_Reduce (&J,&J_sum,1,ISSM_MPI_DOUBLE,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     28        ISSM_MPI_Bcast(&J_sum,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    3029        J=J_sum;
    31         #endif
    3230
    3331        /*Assign output pointers: */
  • issm/trunk-jpl/src/c/modules/SurfaceRelVelMisfitx/SurfaceRelVelMisfitx.cpp

    r15130 r15838  
    2525
    2626        /*Sum all J from all cpus of the cluster:*/
    27         #ifdef _HAVE_MPI_
    28         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
    29         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
     27        ISSM_MPI_Reduce (&J,&J_sum,1,ISSM_MPI_DOUBLE,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     28        ISSM_MPI_Bcast(&J_sum,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    3029        J=J_sum;
    31         #endif
    3230
    3331        /*Assign output pointers: */
  • issm/trunk-jpl/src/c/modules/ThicknessAbsMisfitx/ThicknessAbsMisfitx.cpp

    r15130 r15838  
    2525
    2626        /*Sum all J from all cpus of the cluster:*/
    27         #ifdef _HAVE_MPI_
    28         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
    29         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
     27        ISSM_MPI_Reduce (&J,&J_sum,1,ISSM_MPI_DOUBLE,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     28        ISSM_MPI_Bcast(&J_sum,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    3029        J=J_sum;
    31         #endif
    3230
    3331        /*Assign output pointers: */
  • issm/trunk-jpl/src/c/modules/ThicknessAcrossGradientx/ThicknessAcrossGradientx.cpp

    r15130 r15838  
    2525
    2626        /*Sum all J from all cpus of the cluster:*/
    27         #ifdef _HAVE_MPI_
    28         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
    29         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
     27        ISSM_MPI_Reduce (&J,&J_sum,1,ISSM_MPI_DOUBLE,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     28        ISSM_MPI_Bcast(&J_sum,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    3029        J=J_sum;
    31         #endif
    3230
    3331        /*Assign output pointers: */
  • issm/trunk-jpl/src/c/modules/ThicknessAlongGradientx/ThicknessAlongGradientx.cpp

    r15130 r15838  
    2525
    2626        /*Sum all J from all cpus of the cluster:*/
    27         #ifdef _HAVE_MPI_
    28         MPI_Reduce (&J,&J_sum,1,MPI_DOUBLE,MPI_SUM,0,IssmComm::GetComm() );
    29         MPI_Bcast(&J_sum,1,MPI_DOUBLE,0,IssmComm::GetComm());
     27        ISSM_MPI_Reduce (&J,&J_sum,1,ISSM_MPI_DOUBLE,ISSM_MPI_SUM,0,IssmComm::GetComm() );
     28        ISSM_MPI_Bcast(&J_sum,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    3029        J=J_sum;
    31         #endif
    3230
    3331        /*Assign output pointers: */
  • issm/trunk-jpl/src/c/shared/Numerics/types.h

    r14915 r15838  
    1717#if ISSM_USE_64BIT_INDICES == 1
    1818typedef long long IssmInt;
    19 //#define MPIU_INT MPI_LONG_LONG_INT already define in petsc
    2019#else
    2120typedef int IssmInt;
    22 //#define MPIU_INT MPI_INT already defined in petsc
    2321#endif 
    2422
  • issm/trunk-jpl/src/c/shared/io/Comm/Comm.h

    r14897 r15838  
    66#define _IO_COMM_H_
    77
    8 #include "./CommDef.h"
     8#include "../../../toolkits/mpi/issmmpi.h"
    99#include "./IssmComm.h"
    1010
  • issm/trunk-jpl/src/c/shared/io/Comm/IssmComm.cpp

    r14950 r15838  
    1313#include "../../Exceptions/exceptions.h"
    1414
    15 void IssmComm::SetComm(COMM incomm){ /*{{{*/
     15void IssmComm::SetComm(ISSM_MPI_COMM incomm){ /*{{{*/
    1616
    1717        /*A comm is provided, we are running in parallel (this is not a module)*/
     
    2828
    2929}/*}}}*/
    30 COMM IssmComm::GetComm(){  /*{{{*/
     30ISSM_MPI_COMM IssmComm::GetComm(){  /*{{{*/
    3131        if(!parallel) _error_("Cannot return comm in serial mode");
    3232        return comm;
     
    3939        if(!parallel) return my_rank;
    4040
    41         #ifdef _HAVE_MPI_
    42         MPI_Comm_rank(comm,&my_rank);
    43         #endif
     41        ISSM_MPI_Comm_rank(comm,&my_rank);
    4442
    4543        return my_rank;
     
    5351        if(!parallel) return size;
    5452
    55         #ifdef _HAVE_MPI_
    56         MPI_Comm_size(comm,&size);
    57         #endif
     53        ISSM_MPI_Comm_size(comm,&size);
    5854
    5955        return size;
  • issm/trunk-jpl/src/c/shared/io/Comm/IssmComm.h

    r14897 r15838  
    2121
    2222        private:
    23                 static COMM comm;
     23                static ISSM_MPI_Comm comm;
    2424                static bool parallel;
    2525
    2626        public:
    27                 static void SetComm(COMM incomm);
     27                static void SetComm(ISSM_MPI_COMM incomm);
    2828                static void SetComm(void);
    29                 static COMM GetComm(void);
     29                static ISSM_MPI_COMM GetComm(void);
    3030                static int GetRank(void);
    3131                static int GetSize(void);
  • issm/trunk-jpl/src/c/shared/io/Disk/pfopen.cpp

    r15559 r15838  
    3939        for(int i=0;i<num_proc;i++){
    4040                if(my_rank==i) fid = fopen(filename,format);
    41 #ifdef _HAVE_MPI_
    42                 MPI_Barrier(IssmComm::GetComm());
    43 #endif
     41                ISSM_MPI_Barrier(IssmComm::GetComm());
    4442        }
    4543        if(fid==NULL) _error_("could not open file " << filename << " for binary reading or writing");
  • issm/trunk-jpl/src/c/toolkits/issm/Bucket.h

    r15104 r15838  
    1313/*}}}*/
    1414
    15 /*how many MPI_Isend requests does it take to transfer the contents of a bucket to another cpu?*/
     15/*how many ISSM_MPI_Isend requests does it take to transfer the contents of a bucket to another cpu?*/
    1616#define MATRIXBUCKETSIZEOFREQUESTS 7
    1717#define VECTORBUCKETSIZEOFREQUESTS 5
  • issm/trunk-jpl/src/c/toolkits/issm/IssmMpiDenseMat.h

    r15104 r15838  
    153153                                        }
    154154                                }
    155                                 MPI_Barrier(IssmComm::GetComm());
     155                                ISSM_MPI_Barrier(IssmComm::GetComm());
    156156                        }
    157157
     
    187187                        /*some communicator info: */
    188188                        num_procs=IssmComm::GetSize();
    189                         MPI_Comm comm=IssmComm::GetComm();
     189                        ISSM_MPI_Comm comm=IssmComm::GetComm();
    190190
    191191                        /*First, make a vector of size M, which for each row between 0 and M-1, tells which cpu this row belongs to: */
     
    215215                        numvalues_fromcpu   = xNew<int>(num_procs);
    216216                        for(i=0;i<num_procs;i++){
    217                                 MPI_Scatter(numvalues_forcpu,1,MPI_INT,numvalues_fromcpu+i,1,MPI_INT,i,comm);
     217                                ISSM_MPI_Scatter(numvalues_forcpu,1,ISSM_MPI_INT,numvalues_fromcpu+i,1,ISSM_MPI_INT,i,comm);
    218218                        }
    219219
     
    241241                        /*Scatter values around: {{{*/
    242242                        /*Now, to scatter values across the cluster, we need sendcnts and displs. Our sendbufs have been built by BucketsBuildScatterBuffers, with a stride given
    243                          * by numvalues_forcpu. Get this ready to go before starting the scatter itslef. For reference, here is the MPI_Scatterv prototype:
    244                          * int MPI_Scatterv( void *sendbuf, int *sendcnts, int *displs, MPI_Datatype sendtype, void *recvbuf, int recvcnt, MPI_Datatype recvtype, int root, MPI_Comm comm) :*/
     243                         * by numvalues_forcpu. Get this ready to go before starting the scatter itslef. For reference, here is the ISSM_MPI_Scatterv prototype:
     244                         * int ISSM_MPI_Scatterv( void *sendbuf, int *sendcnts, int *displs, ISSM_MPI_Datatype sendtype, void *recvbuf, int recvcnt, ISSM_MPI_Datatype recvtype, int root, ISSM_MPI_Comm comm) :*/
    245245                        sendcnts=xNew<int>(num_procs);
    246246                        displs=xNew<int>(num_procs);
     
    253253
    254254                        for(i=0;i<num_procs;i++){
    255                                 MPI_Scatterv( row_indices_forcpu, sendcnts, displs, MPI_INT, row_indices_fromcpu[i], numvalues_fromcpu[i], MPI_INT, i, comm);
    256                                 MPI_Scatterv( col_indices_forcpu, sendcnts, displs, MPI_INT, col_indices_fromcpu[i], numvalues_fromcpu[i], MPI_INT, i, comm);
    257                                 MPI_Scatterv( values_forcpu, sendcnts, displs, MPI_DOUBLE, values_fromcpu[i], numvalues_fromcpu[i], MPI_DOUBLE, i, comm);
    258                                 MPI_Scatterv( modes_forcpu, sendcnts, displs, MPI_INT, modes_fromcpu[i], numvalues_fromcpu[i], MPI_INT, i, comm);
     255                                ISSM_MPI_Scatterv( row_indices_forcpu, sendcnts, displs, ISSM_MPI_INT, row_indices_fromcpu[i], numvalues_fromcpu[i], ISSM_MPI_INT, i, comm);
     256                                ISSM_MPI_Scatterv( col_indices_forcpu, sendcnts, displs, ISSM_MPI_INT, col_indices_fromcpu[i], numvalues_fromcpu[i], ISSM_MPI_INT, i, comm);
     257                                ISSM_MPI_Scatterv( values_forcpu, sendcnts, displs, ISSM_MPI_DOUBLE, values_fromcpu[i], numvalues_fromcpu[i], ISSM_MPI_DOUBLE, i, comm);
     258                                ISSM_MPI_Scatterv( modes_forcpu, sendcnts, displs, ISSM_MPI_INT, modes_fromcpu[i], numvalues_fromcpu[i], ISSM_MPI_INT, i, comm);
    259259                        }
    260260                        /*}}}*/
     
    330330                                                local_norm=max(local_norm,absolute);
    331331                                        }
    332                                         MPI_Reduce(&local_norm, &norm, 1, MPI_DOUBLE, MPI_MAX, 0, IssmComm::GetComm());
    333                                         MPI_Bcast(&norm,1,MPI_DOUBLE,0,IssmComm::GetComm());
     332                                        ISSM_MPI_Reduce(&local_norm, &norm, 1, ISSM_MPI_DOUBLE, ISSM_MPI_MAX, 0, IssmComm::GetComm());
     333                                        ISSM_MPI_Bcast(&norm,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    334334                                        return norm;
    335335                                        break;
     
    341341                                                }
    342342                                        }
    343                                         MPI_Reduce(&local_norm, &norm, 1, MPI_DOUBLE, MPI_SUM, 0, IssmComm::GetComm());
    344                                         MPI_Bcast(&norm,1,MPI_DOUBLE,0,IssmComm::GetComm());
     343                                        ISSM_MPI_Reduce(&local_norm, &norm, 1, ISSM_MPI_DOUBLE, ISSM_MPI_SUM, 0, IssmComm::GetComm());
     344                                        ISSM_MPI_Bcast(&norm,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    345345                                        return sqrt(norm);
    346346                                        break;
  • issm/trunk-jpl/src/c/toolkits/issm/IssmMpiVec.h

    r15365 r15838  
    2121#include "../../shared/MemOps/MemOps.h"
    2222#include "../../shared/io/io.h"
    23 #ifdef _HAVE_MPI_
    24 #include "../mpi/mpiincludes.h"
    25 #endif
     23#include "../mpi/issmmpi.h"
    2624#include <math.h>
    2725
     
    138136                                        _printf_("\n");
    139137                                }
    140                                 MPI_Barrier(IssmComm::GetComm());
     138                                ISSM_MPI_Barrier(IssmComm::GetComm());
    141139                        }
    142140                }
     
    171169                        /*some communicator info: */
    172170                        num_procs=IssmComm::GetSize();
    173                         MPI_Comm comm=IssmComm::GetComm();
     171                        ISSM_MPI_Comm comm=IssmComm::GetComm();
    174172
    175173                        /*First, make a vector of size M, which for each row between 0 and M-1, tells which cpu this row belongs to: */
     
    199197                        numvalues_fromcpu   = xNew<int>(num_procs);
    200198                        for(i=0;i<num_procs;i++){
    201                                 MPI_Scatter(numvalues_forcpu,1,MPI_INT,numvalues_fromcpu+i,1,MPI_INT,i,comm);
     199                                ISSM_MPI_Scatter(numvalues_forcpu,1,ISSM_MPI_INT,numvalues_fromcpu+i,1,ISSM_MPI_INT,i,comm);
    202200                        }
    203201
     
    222220                        /*Scatter values around: {{{*/
    223221                        /*Now, to scatter values across the cluster, we need sendcnts and displs. Our sendbufs have been built by BucketsBuildScatterBuffers, with a stride given
    224                          * by numvalues_forcpu. Get this ready to go before starting the scatter itslef. For reference, here is the MPI_Scatterv prototype:
    225                          * int MPI_Scatterv( void *sendbuf, int *sendcnts, int *displs, MPI_Datatype sendtype, void *recvbuf, int recvcnt, MPI_Datatype recvtype, int root, MPI_Comm comm) :*/
     222                         * by numvalues_forcpu. Get this ready to go before starting the scatter itslef. For reference, here is the ISSM_MPI_Scatterv prototype:
     223                         * int ISSM_MPI_Scatterv( void *sendbuf, int *sendcnts, int *displs, ISSM_MPI_Datatype sendtype, void *recvbuf, int recvcnt, ISSM_MPI_Datatype recvtype, int root, ISSM_MPI_Comm comm) :*/
    226224                        sendcnts=xNew<int>(num_procs);
    227225                        displs=xNew<int>(num_procs);
     
    234232
    235233                        for(i=0;i<num_procs;i++){
    236                                 MPI_Scatterv( row_indices_forcpu, sendcnts, displs, MPI_INT, row_indices_fromcpu[i], numvalues_fromcpu[i], MPI_INT, i, comm);
    237                                 MPI_Scatterv( values_forcpu, sendcnts, displs, MPI_DOUBLE, values_fromcpu[i], numvalues_fromcpu[i], MPI_DOUBLE, i, comm);
    238                                 MPI_Scatterv( modes_forcpu, sendcnts, displs, MPI_INT, modes_fromcpu[i], numvalues_fromcpu[i], MPI_INT, i, comm);
     234                                ISSM_MPI_Scatterv( row_indices_forcpu, sendcnts, displs, ISSM_MPI_INT, row_indices_fromcpu[i], numvalues_fromcpu[i], ISSM_MPI_INT, i, comm);
     235                                ISSM_MPI_Scatterv( values_forcpu, sendcnts, displs, ISSM_MPI_DOUBLE, values_fromcpu[i], numvalues_fromcpu[i], ISSM_MPI_DOUBLE, i, comm);
     236                                ISSM_MPI_Scatterv( modes_forcpu, sendcnts, displs, ISSM_MPI_INT, modes_fromcpu[i], numvalues_fromcpu[i], ISSM_MPI_INT, i, comm);
    239237                        }
    240238                        /*}}}*/
     
    378376
    379377                        /*communicator info: */
    380                         MPI_Comm comm;
     378                        ISSM_MPI_Comm comm;
    381379                        int num_procs;
    382380
    383                         /*MPI_Allgatherv info: */
     381                        /*ISSM_MPI_Allgatherv info: */
    384382                        int  lower_row,upper_row;
    385383                        int* recvcounts=NULL;
     
    399397
    400398                        /*recvcounts:*/
    401                         MPI_Allgather(&this->m,1,MPI_INT,recvcounts,1,MPI_INT,comm);
     399                        ISSM_MPI_Allgather(&this->m,1,ISSM_MPI_INT,recvcounts,1,ISSM_MPI_INT,comm);
    402400
    403401                        /*get lower_row: */
     
    405403
    406404                        /*displs: */
    407                         MPI_Allgather(&lower_row,1,MPI_INT,displs,1,MPI_INT,comm);
     405                        ISSM_MPI_Allgather(&lower_row,1,ISSM_MPI_INT,displs,1,ISSM_MPI_INT,comm);
    408406
    409407                        /*All gather:*/
    410                         MPI_Allgatherv(this->vector, this->m, MPI_DOUBLE, buffer, recvcounts, displs, MPI_DOUBLE,comm);
     408                        ISSM_MPI_Allgatherv(this->vector, this->m, ISSM_MPI_DOUBLE, buffer, recvcounts, displs, ISSM_MPI_DOUBLE,comm);
    411409
    412410                        /*free ressources: */
     
    445443                                        //local_norm=0; for(i=0;i<this->m;i++)local_norm=max(local_norm,fabs(this->vector[i]));
    446444                                        local_norm=0; for(i=0;i<this->m;i++)local_norm=max(local_norm,this->vector[i]);
    447                                         MPI_Reduce(&local_norm, &norm, 1, MPI_DOUBLE, MPI_MAX, 0, IssmComm::GetComm());
    448                                         MPI_Bcast(&norm,1,MPI_DOUBLE,0,IssmComm::GetComm());
     445                                        ISSM_MPI_Reduce(&local_norm, &norm, 1, ISSM_MPI_DOUBLE, ISSM_MPI_MAX, 0, IssmComm::GetComm());
     446                                        ISSM_MPI_Bcast(&norm,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    449447                                        return norm;
    450448                                        break;
     
    452450                                        local_norm=0;
    453451                                        for(i=0;i<this->m;i++)local_norm+=pow(this->vector[i],2);
    454                                         MPI_Reduce(&local_norm, &norm, 1, MPI_DOUBLE, MPI_SUM, 0, IssmComm::GetComm());
    455                                         MPI_Bcast(&norm,1,MPI_DOUBLE,0,IssmComm::GetComm());
     452                                        ISSM_MPI_Reduce(&local_norm, &norm, 1, ISSM_MPI_DOUBLE, ISSM_MPI_SUM, 0, IssmComm::GetComm());
     453                                        ISSM_MPI_Bcast(&norm,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    456454                                        return sqrt(norm);
    457455                                        break;
     
    484482                        for(i=0;i<this->m;i++)local_dot+=this->vector[i]*input->vector[i];
    485483
    486                         #ifdef _HAVE_MPI_
    487                         /*MPI_SUM all the dots across the cluster: */
    488                         MPI_Reduce(&local_dot, &dot, 1, MPI_DOUBLE, MPI_SUM, 0, IssmComm::GetComm());
    489                         MPI_Bcast(&dot,1,MPI_DOUBLE,0,IssmComm::GetComm());
    490                         #endif
     484                        /*ISSM_MPI_SUM all the dots across the cluster: */
     485                        ISSM_MPI_Reduce(&local_dot, &dot, 1, ISSM_MPI_DOUBLE, ISSM_MPI_SUM, 0, IssmComm::GetComm());
     486                        ISSM_MPI_Bcast(&dot,1,ISSM_MPI_DOUBLE,0,IssmComm::GetComm());
    491487
    492488                        return dot;
  • issm/trunk-jpl/src/c/toolkits/mpi/commops/DetermineGlobalSize.cpp

    r14915 r15838  
    1313        int  global_size;
    1414
    15         MPI_Reduce(&local_size, &global_size, 1, MPI_INT, MPI_SUM, 0, comm);
    16         MPI_Bcast(&global_size,1,MPI_INT,0,comm);
     15        ISSM_MPI_Reduce(&local_size, &global_size, 1, ISSM_MPI_INT, ISSM_MPI_SUM, 0, comm);
     16        ISSM_MPI_Bcast(&global_size,1,ISSM_MPI_INT,0,comm);
    1717
    1818        return global_size;
  • issm/trunk-jpl/src/c/toolkits/mpi/commops/DetermineLocalSize.cpp

    r14965 r15838  
    2222
    2323        /*recover my_rank*/
    24         MPI_Comm_rank(comm,&my_rank);
    25         MPI_Comm_size(comm,&num_procs);
     24        ISSM_MPI_Comm_rank(comm,&my_rank);
     25        ISSM_MPI_Comm_size(comm,&num_procs);
    2626
    2727        /* TODO replace the following with ->
  • issm/trunk-jpl/src/c/toolkits/mpi/commops/DetermineRowRankFromLocalSize.cpp

    r14915 r15838  
    2121        int* RowRank=NULL;
    2222
    23         MPI_Comm_rank(comm,&my_rank);
    24         MPI_Comm_size(comm,&num_procs);
     23        ISSM_MPI_Comm_rank(comm,&my_rank);
     24        ISSM_MPI_Comm_size(comm,&num_procs);
    2525
    2626        /*allocate: */
     
    2929        /*Gather all local_size values into alllocalsizes, for all cpus*/
    3030        int* alllocalsizes=xNew<int>(num_procs);
    31         MPI_Allgather(&localsize,1,MPI_INT,alllocalsizes,1,MPI_INT,comm);
     31        ISSM_MPI_Allgather(&localsize,1,ISSM_MPI_INT,alllocalsizes,1,ISSM_MPI_INT,comm);
    3232
    3333        /*From all localsizes, get lower row and upper row*/
  • issm/trunk-jpl/src/c/toolkits/mpi/commops/GetOwnershipBoundariesFromRange.cpp

    r14950 r15838  
    2121
    2222        /*recover my_rank and num_procs:*/
    23         MPI_Comm_size(comm,&num_procs);
    24         MPI_Comm_rank(comm,&my_rank);
     23        ISSM_MPI_Comm_size(comm,&num_procs);
     24        ISSM_MPI_Comm_rank(comm,&my_rank);
    2525
    2626        /*output: */
     
    2929        /*Gather all range values into allranges, for all nodes*/
    3030        int* allranges=xNew<int>(num_procs);
    31         MPI_Allgather(&range,1,MPI_INT,allranges,1,MPI_INT,comm);
     31        ISSM_MPI_Allgather(&range,1,ISSM_MPI_INT,allranges,1,ISSM_MPI_INT,comm);
    3232
    3333        /*From all ranges, get lower row and upper row*/
  • issm/trunk-jpl/src/c/toolkits/mumps/MpiDenseMumpsSolve.cpp

    r14950 r15838  
    1414#include "../../shared/Exceptions/exceptions.h"
    1515#include "../../shared/io/Comm/Comm.h"
    16 #include "../mpi/patches/mpipatches.h"
     16#include "../mpi/issmmpi.h"
    1717
    1818/*Mumps header files: */
     
    2828        /*Variables: {{{*/
    2929
    30         MPI_Comm   comm;
     30        ISSM_MPI_Comm   comm;
    3131        int        my_rank;
    3232        int        num_procs;
     
    8484        }
    8585
    86         MPI_Reduce(&local_nnz,&nnz,1,MPI_INT,MPI_SUM,0,comm);
    87         MPI_Bcast(&nnz,1,MPI_INT,0,comm);
     86        ISSM_MPI_Reduce(&local_nnz,&nnz,1,ISSM_MPI_INT,ISSM_MPI_SUM,0,comm);
     87        ISSM_MPI_Bcast(&nnz,1,ISSM_MPI_INT,0,comm);
    8888        id.nz=nnz;
    8989        id.nz_loc=local_nnz;
     
    113113        id.a_loc=a_loc;
    114114
    115         /*Deal with right hand side. We need to MPI_Gather it onto cpu 0: */
     115        /*Deal with right hand side. We need to ISSM_MPI_Gather it onto cpu 0: */
    116116        rhs=xNew<IssmPDouble>(pf_M);
    117117
     
    120120
    121121        /*recvcounts:*/
    122         MPI_Allgather(&pf_m,1,MPI_INT,recvcounts,1,MPI_INT,comm);
     122        ISSM_MPI_Allgather(&pf_m,1,ISSM_MPI_INT,recvcounts,1,ISSM_MPI_INT,comm);
    123123
    124124        /*displs: */
    125         MPI_Allgather(&lower_row,1,MPI_INT,displs,1,MPI_INT,comm);
     125        ISSM_MPI_Allgather(&lower_row,1,ISSM_MPI_INT,displs,1,ISSM_MPI_INT,comm);
    126126
    127127        /*Gather:*/
    128         MPI_Gatherv(pf, pf_m, MPI_DOUBLE, rhs, recvcounts, displs, MPI_DOUBLE,0,comm);
     128        ISSM_MPI_Gatherv(pf, pf_m, ISSM_MPI_DOUBLE, rhs, recvcounts, displs, ISSM_MPI_DOUBLE,0,comm);
    129129        id.rhs=rhs;
    130130        id.nrhs=1;
     
    137137        /*}}}*/
    138138        /*Now scatter from cpu 0 to all other cpus: {{{*/
    139         MPI_Scatterv( rhs, recvcounts, displs, MPI_DOUBLE, uf, uf_m, MPI_DOUBLE, 0, comm);
     139        ISSM_MPI_Scatterv( rhs, recvcounts, displs, ISSM_MPI_DOUBLE, uf, uf_m, ISSM_MPI_DOUBLE, 0, comm);
    140140
    141141        /*}}}*/
  • issm/trunk-jpl/src/c/toolkits/petsc/patches/MatMultPatch.cpp

    r15513 r15838  
    1414#include <petscksp.h>
    1515
    16 #include "../../mpi/mpiincludes.h"
     16#include "../../mpi/issmmpi.h"
    1717#include "../../../shared/shared.h"
    1818
     
    5454
    5555        /*recover num_procs:*/
    56         MPI_Comm_size(comm,&num_procs);
     56        ISSM_MPI_Comm_size(comm,&num_procs);
    5757
    5858        MatGetLocalSize(A,&local_m,&local_n);;
     
    6262
    6363        /*synchronize result: */
    64         MPI_Reduce (&result,&sumresult,1,MPI_INT,MPI_SUM,0,comm );
    65         MPI_Bcast(&sumresult,1,MPI_INT,0,comm);               
     64        ISSM_MPI_Reduce (&result,&sumresult,1,ISSM_MPI_INT,ISSM_MPI_SUM,0,comm );
     65        ISSM_MPI_Bcast(&sumresult,1,ISSM_MPI_INT,0,comm);               
    6666        if (sumresult!=num_procs){
    6767                result=0;
  • issm/trunk-jpl/src/c/toolkits/petsc/patches/MatToSerial.cpp

    r13622 r15838  
    2222        int range;
    2323        int M,N; //size of matrix
    24         MPI_Status status;
     24        ISSM_MPI_Status status;
    2525        int* idxm=NULL;
    2626        int* idxn=NULL;
     
    2929
    3030        /*recover my_rank and num_procs:*/
    31         MPI_Comm_size(comm,&num_procs);
    32         MPI_Comm_rank(comm,&my_rank);
     31        ISSM_MPI_Comm_size(comm,&num_procs);
     32        ISSM_MPI_Comm_rank(comm,&my_rank);
    3333
    3434        /*Output*/
     
    6969                        buffer[1]=lower_row;
    7070                        buffer[2]=range;
    71                         MPI_Send(buffer,3,MPI_INT,0,1,comm);   
    72                         if (range)MPI_Send(local_matrix,N*range,MPI_DOUBLE,0,1,comm);
     71                        ISSM_MPI_Send(buffer,3,ISSM_MPI_INT,0,1,comm);   
     72                        if (range)ISSM_MPI_Send(local_matrix,N*range,ISSM_MPI_DOUBLE,0,1,comm);
    7373                }
    7474                if (my_rank==0){
    75                         MPI_Recv(buffer,3,MPI_INT,i,1,comm,&status);
    76                         if (buffer[2])MPI_Recv(outmatrix+(buffer[1]*N),N*buffer[2],MPI_DOUBLE,i,1,comm,&status);
     75                        ISSM_MPI_Recv(buffer,3,ISSM_MPI_INT,i,1,comm,&status);
     76                        if (buffer[2])ISSM_MPI_Recv(outmatrix+(buffer[1]*N),N*buffer[2],ISSM_MPI_DOUBLE,i,1,comm,&status);
    7777                }
    7878        }
  • issm/trunk-jpl/src/c/toolkits/petsc/patches/NewMat.cpp

    r15029 r15838  
    1515
    1616#include "./petscpatches.h"
    17 
    1817#include "../../../shared/shared.h"
    19 #include "../../mpi/patches/mpipatches.h"
     18#include "../../mpi/issmmpi.h"
    2019
    2120/*NewMat(int M,int N){{{*/
  • issm/trunk-jpl/src/c/toolkits/petsc/patches/NewVec.cpp

    r13760 r15838  
    1515
    1616#include "./petscpatches.h"
    17 #include "../../mpi/patches/mpipatches.h"
     17#include "../../mpi/issmmpi.h"
    1818
    1919Vec NewVec(int size,COMM comm,bool fromlocalsize){
  • issm/trunk-jpl/src/c/toolkits/petsc/patches/VecToMPISerial.cpp

    r13622 r15838  
    1919
    2020        /*Petsc*/
    21         MPI_Status status;
     21        ISSM_MPI_Status status;
    2222        PetscInt lower_row,upper_row;
    2323        int range;
     
    3535
    3636        /*recover my_rank and num_procs*/
    37         MPI_Comm_size(comm,&num_procs);
    38         MPI_Comm_rank(comm,&my_rank);
     37        ISSM_MPI_Comm_size(comm,&num_procs);
     38        ISSM_MPI_Comm_rank(comm,&my_rank);
    3939
    4040        VecGetSize(vector,&vector_size);
     
    6969                        buffer[1]=lower_row;
    7070                        buffer[2]=range;
    71                         MPI_Send(buffer,3,MPI_INT,0,1,comm); 
    72                         if (range)MPI_Send(local_vector,range,MPI_DOUBLE,0,1,comm);
     71                        ISSM_MPI_Send(buffer,3,ISSM_MPI_INT,0,1,comm); 
     72                        if (range)ISSM_MPI_Send(local_vector,range,ISSM_MPI_DOUBLE,0,1,comm);
    7373                }
    7474                if (my_rank==0){
    75                         MPI_Recv(buffer,3,MPI_INT,i,1,comm,&status);
    76                         if (buffer[2])MPI_Recv(gathered_vector+buffer[1],buffer[2],MPI_DOUBLE,i,1,comm,&status);
     75                        ISSM_MPI_Recv(buffer,3,ISSM_MPI_INT,i,1,comm,&status);
     76                        if (buffer[2])ISSM_MPI_Recv(gathered_vector+buffer[1],buffer[2],ISSM_MPI_DOUBLE,i,1,comm,&status);
    7777                }
    7878        }
     
    8484
    8585        /*Now, broadcast gathered_vector from node 0 to other nodes: */
    86         MPI_Bcast(gathered_vector,vector_size,MPI_DOUBLE,0,comm);
     86        ISSM_MPI_Bcast(gathered_vector,vector_size,ISSM_MPI_DOUBLE,0,comm);
    8787
    8888        /*Assign output pointers: */
  • issm/trunk-jpl/src/c/toolkits/plapack/patches/PlapackInvertMatrix.cpp

    r14917 r15838  
    1212#include "../../scalapack/FortranMapping.h"
    1313
    14 void PlapackInvertMatrixLocalCleanup(PLA_Obj* pa,PLA_Template* ptempl,double** parrayA,int** pidxnA,MPI_Comm* pcomm_2d);
     14void PlapackInvertMatrixLocalCleanup(PLA_Obj* pa,PLA_Template* ptempl,double** parrayA,int** pidxnA,ISSM_MPI_Comm* pcomm_2d);
    1515
    1616int PlapackInvertMatrix(Mat* A,Mat* inv_A,int status,int con,COMM comm){
     
    2727
    2828        /*Plapack: */
    29         MPI_Datatype   datatype;
    30         MPI_Comm       comm_2d;
     29        ISSM_MPI_Datatype   datatype;
     30        ISSM_MPI_Comm       comm_2d;
    3131        PLA_Obj a=NULL;
    3232        PLA_Template   templ;   
     
    8585
    8686        /* Set the datatype */
    87         datatype = MPI_DOUBLE;
     87        datatype = ISSM_MPI_DOUBLE;
    8888
    8989        /* Copy A into a*/
     
    123123        /*Finalize PLAPACK*/
    124124        PLA_Finalize();
    125         MPI_Comm_free(&comm_2d);
     125        ISSM_MPI_Comm_free(&comm_2d);
    126126}
  • issm/trunk-jpl/src/c/toolkits/toolkits.h

    r15053 r15838  
    1616#endif
    1717
    18 #ifdef _HAVE_MPI_
    19 #include "./mpi/mpiincludes.h"
    20 #endif
     18#include "./mpi/issmmpi.h"
    2119
    2220#ifdef _HAVE_METIS_
Note: See TracChangeset for help on using the changeset viewer.