Changeset 12016


Ignore:
Timestamp:
04/16/12 19:19:56 (13 years ago)
Author:
Eric.Larour
Message:

Took out all specific parts of the code that were supposedly PARALLEL.

Location:
issm/trunk-jpl/src/c
Files:
30 edited

Legend:

Unmodified
Added
Removed
  • issm/trunk-jpl/src/c/Container/Constraints.cpp

    r10522 r12016  
    4848
    4949        /*figure out total number of constraints combining all the cpus (no clones here)*/
    50         #ifdef _PARALLEL_
    5150        MPI_Reduce(&localconstraints,&numberofconstraints,1,MPI_INT,MPI_SUM,0,MPI_COMM_WORLD );
    5251        MPI_Bcast(&numberofconstraints,1,MPI_INT,0,MPI_COMM_WORLD);
    53         #else
    54         numberofconstraints=localconstraints;
    55         #endif
    5652
    5753        return numberofconstraints;
  • issm/trunk-jpl/src/c/Container/Elements.cpp

    r11695 r12016  
    124124        }
    125125
    126         #ifdef _PARALLEL_
    127126        /*Synchronize across cluster, so as to not end up with different sizes for each patch on each cpu: */
    128127        MPI_Reduce (&numvertices,&max_numvertices,1,MPI_INT,MPI_MAX,0,MPI_COMM_WORLD );
     
    133132        MPI_Bcast(&max_numnodes,1,MPI_INT,0,MPI_COMM_WORLD);
    134133        numnodes=max_numnodes;
    135         #endif
    136134
    137135        /*Ok, initialize Patch object: */
     
    250248
    251249                /*Gather onto master cpu 0, if needed: */
    252 #ifdef _PARALLEL_
    253250                if(io_gather)patch->Gather();
    254 #endif
    255251
    256252                /*create result object and add to results dataset:*/
     
    276272        int numberofelements;
    277273
    278         #ifdef _PARALLEL_
    279274        local_nelem=this->Size();
    280275        MPI_Allreduce ( (void*)&local_nelem,(void*)&numberofelements,1,MPI_INT,MPI_SUM,MPI_COMM_WORLD);
    281         #else
    282         numberofelements=this->Size();
    283         #endif
    284276
    285277        return numberofelements;
  • issm/trunk-jpl/src/c/Container/Loads.cpp

    r10522 r12016  
    6363
    6464        /*figure out total number of loads combining all the cpus (no clones here)*/
    65         #ifdef _PARALLEL_
    6665        MPI_Reduce(&localloads,&numberofloads,1,MPI_INT,MPI_SUM,0,MPI_COMM_WORLD );
    6766        MPI_Bcast(&numberofloads,1,MPI_INT,0,MPI_COMM_WORLD);
    68         #else
    69         numberofloads=localloads;
    70         #endif
    7167
    7268        return numberofloads;
  • issm/trunk-jpl/src/c/Container/Nodes.cpp

    r11298 r12016  
    204204        }
    205205
    206 #ifdef _PARALLEL_
    207206        /*Grab max of all cpus: */
    208207        MPI_Allreduce ( (void*)&max,(void*)&allmax,1,MPI_INT,MPI_MAX,MPI_COMM_WORLD);
    209208        max=allmax;
    210 #endif
    211209
    212210        return max;
     
    288286        }
    289287
    290         #ifdef _PARALLEL_
    291                 MPI_Reduce (&max_sid,&node_max_sid,1,MPI_INT,MPI_MAX,0,MPI_COMM_WORLD );
    292                 MPI_Bcast(&node_max_sid,1,MPI_INT,0,MPI_COMM_WORLD);
    293                 max_sid=node_max_sid;
    294         #endif
     288        MPI_Reduce (&max_sid,&node_max_sid,1,MPI_INT,MPI_MAX,0,MPI_COMM_WORLD );
     289        MPI_Bcast(&node_max_sid,1,MPI_INT,0,MPI_COMM_WORLD);
     290        max_sid=node_max_sid;
    295291
    296292        if(max_sid==1){
  • issm/trunk-jpl/src/c/Container/Vertices.cpp

    r11298 r12016  
    149149        }
    150150
    151         #ifdef _PARALLEL_
    152151        MPI_Reduce (&max_sid,&vertex_max_sid,1,MPI_INT,MPI_MAX,0,MPI_COMM_WORLD );
    153152        MPI_Bcast(&vertex_max_sid,1,MPI_INT,0,MPI_COMM_WORLD);
    154153        max_sid=vertex_max_sid;
    155         #endif
    156154
    157155        /*sid starts at 0*/
  • issm/trunk-jpl/src/c/Makefile.am

    r12015 r12016  
    361361#}}}
    362362#Transient sources  {{{1
    363 transient_sources  = ./modules/ModelProcessorx/Transient/UpdateElementsTransient.cpp
    364 transient_psources = ./solutions/transient_core.cpp
     363transient_sources  = ./modules/ModelProcessorx/Transient/UpdateElementsTransient.cpp \
     364                                        ./solutions/transient_core.cpp
    365365#}}}
    366366#Steadystate sources  {{{1
    367 steadystate_psources = ./solutions/steadystate_core.cpp\
    368                                                 ./solutions/steadystateconvergence.cpp
     367steadystate_sources = ./solutions/steadystate_core.cpp\
     368                                          ./solutions/steadystateconvergence.cpp
    369369#}}}
    370370#Prognostic sources  {{{1
     
    372372                                              ./modules/ModelProcessorx/Prognostic/CreateNodesPrognostic.cpp\
    373373                                              ./modules/ModelProcessorx/Prognostic/CreateConstraintsPrognostic.cpp\
    374                                               ./modules/ModelProcessorx/Prognostic/CreateLoadsPrognostic.cpp
    375 prognostic_psources = ./solutions/prognostic_core.cpp
     374                                              ./modules/ModelProcessorx/Prognostic/CreateLoadsPrognostic.cpp\
     375                                                  ./solutions/prognostic_core.cpp
    376376#}}}
    377377#Thermal sources  {{{1
     
    390390                                           ./modules/ConstraintsStatex/ThermalConstraintsState.cpp\
    391391                                           ./modules/ConstraintsStatex/ThermalIsPresent.cpp\
    392                                            ./modules/ResetConstraintsx/ThermalConstraintsReset.cpp
    393 
    394 thermal_psources = ./solutions/thermal_core.cpp\
    395                                             ./solutions/enthalpy_core.cpp\
    396                                             ./solvers/solver_thermal_nonlinear.cpp
     392                                           ./modules/ResetConstraintsx/ThermalConstraintsReset.cpp \
     393                                           ./solutions/thermal_core.cpp\
     394                                           ./solutions/enthalpy_core.cpp\
     395                                           ./solvers/solver_thermal_nonlinear.cpp
    397396#}}}
    398397#Control sources  {{{1
     
    438437                                          ./objects/Inputs/ControlInput.cpp\
    439438                                          ./shared/Numerics/BrentSearch.cpp\
    440                                           ./shared/Numerics/OptimalSearch.cpp
    441 
    442 control_psources=./solutions/control_core.cpp\
     439                                          ./shared/Numerics/OptimalSearch.cpp \
     440                                          ./solutions/control_core.cpp\
    443441                                          ./solutions/controltao_core.cpp\
    444442                                          ./solutions/controlrestart.cpp\
     
    456454                                              ./modules/ModelProcessorx/Hydrology/CreateNodesHydrology.cpp\
    457455                                              ./modules/ModelProcessorx/Hydrology/CreateConstraintsHydrology.cpp\
    458                                               ./modules/ModelProcessorx/Hydrology/CreateLoadsHydrology.cpp
    459                                          
    460 hydrology_psources  = ./solutions/hydrology_core.cpp\
    461                                                ./solutions/hydrology_core_step.cpp
     456                                              ./modules/ModelProcessorx/Hydrology/CreateLoadsHydrology.cpp \
     457                                                  ./solutions/hydrology_core.cpp\
     458                                                  ./solutions/hydrology_core_step.cpp
    462459#}}}
    463460#Diagnostic sources  {{{1
     
    474471                                              ./modules/ModelProcessorx/DiagnosticHutter/CreateConstraintsDiagnosticHutter.cpp \
    475472                                              ./modules/ModelProcessorx/DiagnosticHutter/CreateLoadsDiagnosticHutter.cpp \
    476                                                         ./shared/Elements/CoordinateSystemTransform.cpp\
    477                                                         ./shared/Elements/TransformLoadVectorCoord.cpp \
    478                                                         ./shared/Elements/TransformStiffnessMatrixCoord.cpp \
    479                                                         ./shared/Elements/TransformInvStiffnessMatrixCoord.cpp \
    480                                                         ./shared/Elements/TransformSolutionCoord.cpp
    481 diagnostic_psources =./solutions/diagnostic_core.cpp\
    482                                               ./solvers/solver_stokescoupling_nonlinear.cpp
     473                                                  ./shared/Elements/CoordinateSystemTransform.cpp\
     474                                                  ./shared/Elements/TransformLoadVectorCoord.cpp \
     475                                                  ./shared/Elements/TransformStiffnessMatrixCoord.cpp \
     476                                                  ./shared/Elements/TransformInvStiffnessMatrixCoord.cpp \
     477                                                  ./shared/Elements/TransformSolutionCoord.cpp\
     478                                                  ./solutions/diagnostic_core.cpp\
     479                                                  ./solvers/solver_stokescoupling_nonlinear.cpp
    483480#}}}
    484481#Balanced sources  {{{1
     
    486483                                            ./modules/ModelProcessorx/Balancethickness/CreateNodesBalancethickness.cpp\
    487484                                            ./modules/ModelProcessorx/Balancethickness/CreateConstraintsBalancethickness.cpp\
    488                                             ./modules/ModelProcessorx/Balancethickness/CreateLoadsBalancethickness.cpp
    489 balanced_psources = ./solutions/balancethickness_core.cpp
     485                                                ./modules/ModelProcessorx/Balancethickness/CreateLoadsBalancethickness.cpp\
     486                                                ./solutions/balancethickness_core.cpp
    490487#}}}
    491488#Responses sources  {{{1
     
    527524                                          ./modules/ModelProcessorx/SurfaceSlope/CreateNodesSurfaceSlope.cpp \
    528525                                          ./modules/ModelProcessorx/SurfaceSlope/CreateConstraintsSurfaceSlope.cpp\
    529                                           ./modules/ModelProcessorx/SurfaceSlope/CreateLoadsSurfaceSlope.cpp
    530 slope_psources = ./solutions/surfaceslope_core.cpp\
     526                                          ./modules/ModelProcessorx/SurfaceSlope/CreateLoadsSurfaceSlope.cpp\
     527                                          ./solutions/surfaceslope_core.cpp\
    531528                                          ./solutions/bedslope_core.cpp
    532529#}}}
  • issm/trunk-jpl/src/c/matlab/io/PrintfFunction.cpp

    r12013 r12016  
    5151
    5252        /*Ok, if we are running in parallel, get node 0 to print*/
    53 #if defined(_PARALLEL_)
    5453        if(my_rank==0)printf(buffer);
    55 #else
    56         mexPrintf(buffer);
    57 #endif
    5854
    5955        /*Clean up and return*/
  • issm/trunk-jpl/src/c/modules/ConstraintsStatex/RiftConstraintsState.cpp

    r9761 r12016  
    3232        }
    3333
    34         #ifdef _PARALLEL_
    3534        MPI_Reduce (&found,&mpi_found,1,MPI_INT,MPI_SUM,0,MPI_COMM_WORLD );
    3635        MPI_Bcast(&mpi_found,1,MPI_INT,0,MPI_COMM_WORLD);               
    3736        found=mpi_found;
    38         #endif
    3937
    4038        return found;
     
    9593        }
    9694
    97         #ifdef _PARALLEL_
    9895        MPI_Reduce (&num_unstable_constraints,&sum_num_unstable_constraints,1,MPI_INT,MPI_SUM,0,MPI_COMM_WORLD );
    9996        MPI_Bcast(&sum_num_unstable_constraints,1,MPI_INT,0,MPI_COMM_WORLD);               
    10097        num_unstable_constraints=sum_num_unstable_constraints;
    101         #endif
    10298       
    10399        /*Assign output pointers: */
     
    135131       
    136132        /*Is there just one found? that would mean we have frozen! : */
    137         #ifdef _PARALLEL_
    138133        MPI_Reduce (&found,&mpi_found,1,MPI_INT,MPI_MAX,0,MPI_COMM_WORLD );
    139134        MPI_Bcast(&mpi_found,1,MPI_INT,0,MPI_COMM_WORLD);               
    140135        found=mpi_found;
    141         #endif
    142136
    143137        return found;
     
    195189        }
    196190
    197         #ifdef _PARALLEL_
    198191        MPI_Reduce (&found,&mpi_found,1,MPI_INT,MPI_SUM,0,MPI_COMM_WORLD );
    199192        MPI_Bcast(&mpi_found,1,MPI_INT,0,MPI_COMM_WORLD);               
    200193        found=mpi_found;
    201         #endif
    202194
    203195        return found;
     
    228220        }
    229221
    230         #ifdef _PARALLEL_
    231222        MPI_Reduce (&found,&mpi_found,1,MPI_INT,MPI_SUM,0,MPI_COMM_WORLD );
    232223        MPI_Bcast(&mpi_found,1,MPI_INT,0,MPI_COMM_WORLD);               
    233224        found=mpi_found;
    234         #endif
    235225
    236226        if (found){
     
    289279        }
    290280
    291         #ifdef _PARALLEL_
    292281        MPI_Reduce (&num_unstable_constraints,&sum_num_unstable_constraints,1,MPI_INT,MPI_SUM,0,MPI_COMM_WORLD );
    293282        MPI_Bcast(&sum_num_unstable_constraints,1,MPI_INT,0,MPI_COMM_WORLD);               
    294283        num_unstable_constraints=sum_num_unstable_constraints;
    295         #endif
    296284       
    297285        /*Assign output pointers: */
     
    329317        }
    330318
    331         #ifdef _PARALLEL_
    332319        MPI_Reduce (&max_penetration,&mpi_max_penetration,1,MPI_DOUBLE,MPI_MAX,0,MPI_COMM_WORLD );
    333320        MPI_Bcast(&mpi_max_penetration,1,MPI_DOUBLE,0,MPI_COMM_WORLD);               
    334321        max_penetration=mpi_max_penetration;
    335         #endif
    336322
    337323        /*feed max_penetration to inputs: */
     
    368354        }
    369355
    370         #ifdef _PARALLEL_
    371356        MPI_Reduce (&num_unstable_constraints,&sum_num_unstable_constraints,1,MPI_INT,MPI_SUM,0,MPI_COMM_WORLD );
    372357        MPI_Bcast(&sum_num_unstable_constraints,1,MPI_INT,0,MPI_COMM_WORLD);               
    373358        num_unstable_constraints=sum_num_unstable_constraints;
    374         #endif
    375359
    376360        return num_unstable_constraints;
  • issm/trunk-jpl/src/c/modules/ConstraintsStatex/ThermalConstraintsState.cpp

    r9883 r12016  
    3636        }
    3737
    38         #ifdef _PARALLEL_
    3938        MPI_Reduce (&num_unstable_constraints,&sum_num_unstable_constraints,1,MPI_INT,MPI_SUM,0,MPI_COMM_WORLD );
    4039        MPI_Bcast(&sum_num_unstable_constraints,1,MPI_INT,0,MPI_COMM_WORLD);               
    4140        num_unstable_constraints=sum_num_unstable_constraints;
    42         #endif
    4341
    4442        /*Have we converged? : */
  • issm/trunk-jpl/src/c/modules/ConstraintsStatex/ThermalIsPresent.cpp

    r9883 r12016  
    2828        }
    2929       
    30         #ifdef _PARALLEL_
    3130        MPI_Reduce (&found,&mpi_found,1,MPI_INT,MPI_SUM,0,MPI_COMM_WORLD );
    3231        MPI_Bcast(&mpi_found,1,MPI_INT,0,MPI_COMM_WORLD);               
    3332        found=mpi_found;
    34         #endif
    3533
    3634        return found;
  • issm/trunk-jpl/src/c/modules/Dakotax/Dakotax.cpp

    r12011 r12016  
    7272        parameters->FindParam(&dakota_error_file,QmuErrNameEnum);
    7373
    74         #ifdef _PARALLEL_
    7574        if(my_rank==0){
    76         #endif
    7775       
    7876                // Instantiate/initialize the parallel library and problem description
     
    111109                selected_strategy.run_strategy();
    112110               
    113                 #ifdef _PARALLEL_
    114111                //Warn other cpus that we are done running the dakota iterator, by setting the counter to -1:
    115112                SpawnCore(NULL,0, NULL,NULL,0,femmodel,-1);
    116                 #endif
    117113
    118         #ifdef _PARALLEL_
    119114        }
    120115        else{
     
    124119                }
    125120        }
    126         #endif //#ifdef _PARALLEL_
    127121
    128122        /*Free ressources:*/
  • issm/trunk-jpl/src/c/modules/InputConvergencex/InputConvergencex.cpp

    r9761 r12016  
    3030
    3131        /*In parallel, we need to gather the converged status: */
    32         #ifdef _PARALLEL_
    3332        MPI_Allreduce ( (void*)&num_notconverged,(void*)&total_notconverged,1,MPI_INT,MPI_SUM,MPI_COMM_WORLD);
    3433        num_notconverged=total_notconverged;
    35         #endif
    3634        _printf_(VerboseConvergence(),"      #elements above convergence criterion = %i\n",num_notconverged);
    3735
  • issm/trunk-jpl/src/c/modules/MassFluxx/MassFluxx.cpp

    r8263 r12016  
    5959        }
    6060
    61         #ifdef _PARALLEL_
    6261        MPI_Allreduce ( (void*)&mass_flux,(void*)&all_mass_flux,1,MPI_DOUBLE,MPI_SUM,MPI_COMM_WORLD);
    6362        mass_flux=all_mass_flux;
    64         #endif
    6563
    6664        /*Free ressources:*/
  • issm/trunk-jpl/src/c/modules/MaxAbsVxx/MaxAbsVxx.cpp

    r5870 r12016  
    3131        }
    3232
    33         #ifdef _PARALLEL_
    3433        /*Figure out maximum across the cluster: */
    3534        MPI_Reduce (&maxabsvx,&node_maxabsvx,1,MPI_DOUBLE,MPI_MAX,0,MPI_COMM_WORLD );
    3635        MPI_Bcast(&node_maxabsvx,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
    3736        maxabsvx=node_maxabsvx;
    38         #endif
    3937
    4038        /*Assign output pointers:*/
  • issm/trunk-jpl/src/c/modules/MaxAbsVyx/MaxAbsVyx.cpp

    r5871 r12016  
    3232        }
    3333
    34         #ifdef _PARALLEL_
    3534        /*Figure out maximum across the cluster: */
    3635        MPI_Reduce (&maxabsvy,&node_maxabsvy,1,MPI_DOUBLE,MPI_MAX,0,MPI_COMM_WORLD );
    3736        MPI_Bcast(&node_maxabsvy,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
    3837        maxabsvy=node_maxabsvy;
    39         #endif
    4038
    4139        /*Assign output pointers:*/
  • issm/trunk-jpl/src/c/modules/MaxAbsVzx/MaxAbsVzx.cpp

    r5414 r12016  
    3131        }
    3232
    33         #ifdef _PARALLEL_
    3433        /*Figure out minimum across the cluster: */
    3534        MPI_Reduce (&maxabsvz,&node_maxabsvz,1,MPI_DOUBLE,MPI_MAX,0,MPI_COMM_WORLD );
    3635        MPI_Bcast(&node_maxabsvz,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
    3736        maxabsvz=node_maxabsvz;
    38         #endif
    3937
    4038        /*Assign output pointers:*/
  • issm/trunk-jpl/src/c/modules/MaxVelx/MaxVelx.cpp

    r5414 r12016  
    3232        }
    3333
    34         #ifdef _PARALLEL_
    3534        /*Figure out maximum across the cluster: */
    3635        MPI_Reduce (&maxvel,&node_maxvel,1,MPI_DOUBLE,MPI_MAX,0,MPI_COMM_WORLD );
    3736        MPI_Bcast(&node_maxvel,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
    3837        maxvel=node_maxvel;
    39         #endif
    4038
    4139        /*Assign output pointers:*/
  • issm/trunk-jpl/src/c/modules/MaxVxx/MaxVxx.cpp

    r5414 r12016  
    3131        }
    3232
    33         #ifdef _PARALLEL_
    3433        /*Figure out minimum across the cluster: */
    3534        MPI_Reduce (&maxvx,&node_maxvx,1,MPI_DOUBLE,MPI_MAX,0,MPI_COMM_WORLD );
    3635        MPI_Bcast(&node_maxvx,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
    3736        maxvx=node_maxvx;
    38         #endif
    3937
    4038        /*Assign output pointers:*/
  • issm/trunk-jpl/src/c/modules/MaxVyx/MaxVyx.cpp

    r5414 r12016  
    3131        }
    3232
    33         #ifdef _PARALLEL_
    3433        /*Figure out minimum across the cluster: */
    3534        MPI_Reduce (&maxvy,&node_maxvy,1,MPI_DOUBLE,MPI_MAX,0,MPI_COMM_WORLD );
    3635        MPI_Bcast(&node_maxvy,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
    3736        maxvy=node_maxvy;
    38         #endif
    3937
    4038        /*Assign output pointers:*/
  • issm/trunk-jpl/src/c/modules/MaxVzx/MaxVzx.cpp

    r5414 r12016  
    3232        }
    3333
    34         #ifdef _PARALLEL_
    3534        /*Figure out minimum across the cluster: */
    3635        MPI_Reduce (&maxvz,&node_maxvz,1,MPI_DOUBLE,MPI_MAX,0,MPI_COMM_WORLD );
    3736        MPI_Bcast(&node_maxvz,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
    3837        maxvz=node_maxvz;
    39         #endif
    4038
    4139        /*Assign output pointers:*/
  • issm/trunk-jpl/src/c/modules/MinVelx/MinVelx.cpp

    r5414 r12016  
    3232        }
    3333
    34         #ifdef _PARALLEL_
    3534        /*Figure out minimum across the cluster: */
    3635        MPI_Reduce (&minvel,&node_minvel,1,MPI_DOUBLE,MPI_MIN,0,MPI_COMM_WORLD );
    3736        MPI_Bcast(&node_minvel,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
    3837        minvel=node_minvel;
    39         #endif
    4038
    4139        /*Assign output pointers:*/
  • issm/trunk-jpl/src/c/modules/MinVxx/MinVxx.cpp

    r5414 r12016  
    3131        }
    3232
    33         #ifdef _PARALLEL_
    3433        /*Figure out minimum across the cluster: */
    3534        MPI_Reduce (&minvx,&node_minvx,1,MPI_DOUBLE,MPI_MIN,0,MPI_COMM_WORLD );
    3635        MPI_Bcast(&node_minvx,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
    3736        minvx=node_minvx;
    38         #endif
    3937
    4038        /*Assign output pointers:*/
  • issm/trunk-jpl/src/c/modules/MinVyx/MinVyx.cpp

    r5414 r12016  
    3131        }
    3232
    33         #ifdef _PARALLEL_
    3433        /*Figure out minimum across the cluster: */
    3534        MPI_Reduce (&minvy,&node_minvy,1,MPI_DOUBLE,MPI_MIN,0,MPI_COMM_WORLD );
    3635        MPI_Bcast(&node_minvy,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
    3736        minvy=node_minvy;
    38         #endif
    3937
    4038        /*Assign output pointers:*/
  • issm/trunk-jpl/src/c/modules/MinVzx/MinVzx.cpp

    r5414 r12016  
    3131        }
    3232
    33         #ifdef _PARALLEL_
    3433        /*Figure out minimum across the cluster: */
    3534        MPI_Reduce (&minvz,&node_minvz,1,MPI_DOUBLE,MPI_MIN,0,MPI_COMM_WORLD );
    3635        MPI_Bcast(&node_minvz,1,MPI_DOUBLE,0,MPI_COMM_WORLD);   
    3736        minvz=node_minvz;
    38         #endif
    3937
    4038        /*Assign output pointers:*/
  • issm/trunk-jpl/src/c/modules/ModelProcessorx/ElementsAndVerticesPartitioning.cpp

    r9733 r12016  
    6363        else elements_width=6; //penta elements
    6464
    65         #ifdef _PARALLEL_
    6665        /*Determine parallel partitioning of elements: we use Metis for now. First load the data, then partition*/
    6766        if(dim==2){
     
    7978        xfree((void**)&elements);
    8079        xfree((void**)&elements2d);
    81 
    82         #else
    83         /*In serial mode, epart is full of 0: all elements belong to cpu 0: */
    84         epart=(int*)xcalloc(numberofelements,sizeof(int));
    85         #endif
    8680
    8781        /*Deal with rifts, they have to be included into one partition only, not several: */
  • issm/trunk-jpl/src/c/modules/ParsePetscOptionsx/ParsePetscOptionsx.cpp

    r11945 r12016  
    9292        }
    9393
    94         #ifdef _PARALLEL_
    9594        /*Ok, broadcast to other cpus: */
    9695        MPI_Bcast(&numanalyses,1,MPI_INT,0,MPI_COMM_WORLD);
     
    111110                if(my_rank!=0)strings[i]=string;
    112111        }
    113         #endif
    114112
    115113        /*Ok, out of strings and analyses and numanalyses, create parameters, and plug them into parameters container: */
  • issm/trunk-jpl/src/c/modules/Solverx/SolverxPetsc.cpp

    r11679 r12016  
    123123        KSPSetFromOptions(ksp);
    124124
    125         #if defined(_SERIAL_) && _PETSC_MAJOR_==3
     125        #if _PETSC_MAJOR_==3
    126126        /*Specific solver?: */
    127127        KSPGetPC(ksp,&pc);
     
    133133                #endif
    134134        }
    135         #endif
    136135
    137         #if defined(_PARALLEL_) && _PETSC_MAJOR_==3
    138136        /*Stokes: */
    139137        if (solver_type==StokesSolverEnum){
  • issm/trunk-jpl/src/c/modules/TimeAdaptx/TimeAdaptx.cpp

    r6130 r12016  
    3131        }
    3232
    33         #ifdef _PARALLEL_
    3433        /*Figure out minimum across the cluster: */
    3534        MPI_Reduce (&min_dt,&node_min_dt,1,MPI_DOUBLE,MPI_MIN,0,MPI_COMM_WORLD );
    3635        MPI_Bcast(&node_min_dt,1,MPI_DOUBLE,0,MPI_COMM_WORLD);
    3736        min_dt=node_min_dt;
    38         #endif
    3937
    4038        /*Assign output pointers:*/
  • issm/trunk-jpl/src/c/objects/FemModel.cpp

    r11695 r12016  
    2222/*FUNCTION FemModel::constructor {{{1*/
    2323FemModel::FemModel(char* inputfilename, char* outputfilename, const int in_solution_type,const int* analyses,const int nummodels){
    24 #ifdef _PARALLEL_
    2524
    2625        /*intermediary*/
     
    7574        /*Add output file name to parameters: */
    7675        this->parameters->AddObject(new StringParam(OutputfilenameEnum,outputfilename));
    77 
    78 #endif
    7976
    8077}
  • issm/trunk-jpl/src/c/solutions/issm.cpp

    r11983 r12016  
    3636
    3737        MODULEBOOT();
    38 
    39         #ifndef _PARALLEL_
    40         _error_(" parallel executable was compiled without support of parallel libraries!");
    41         #endif
    4238
    4339        /*Initialize environments: Petsc, MPI, etc...: */
Note: See TracChangeset for help on using the changeset viewer.