Changeset 2492
- Timestamp:
- 10/22/09 15:06:33 (15 years ago)
- Location:
- issm/trunk/src/c/parallel
- Files:
-
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
issm/trunk/src/c/parallel/diagnostic.cpp
r2489 r2492 113 113 _printf_("initialize results:\n"); 114 114 results=new DataSet(ResultsEnum()); 115 116 115 MPI_Barrier(MPI_COMM_WORLD); finish_init=MPI_Wtime(); 117 116 -
issm/trunk/src/c/parallel/prognostic.cpp
r2397 r2492 46 46 Param* param=NULL; 47 47 48 /*time*/ 49 double start, finish; 50 double start_core, finish_core; 51 double start_init, finish_init; 52 48 53 MODULEBOOT(); 49 54 … … 52 57 #endif 53 58 59 /*Initialize Petsc and get start time*/ 54 60 PetscInitialize(&argc,&argv,(char *)0,""); 61 MPI_Barrier(MPI_COMM_WORLD); start=MPI_Wtime(); 55 62 56 63 /*Size and rank: */ … … 64 71 65 72 /*Initialize model structure: */ 73 MPI_Barrier(MPI_COMM_WORLD); start_init=MPI_Wtime(); 66 74 model=new Model(); 67 75 … … 98 106 _printf_("initialize results:\n"); 99 107 results=new DataSet(ResultsEnum()); 108 MPI_Barrier(MPI_COMM_WORLD); finish_init=MPI_Wtime(); 100 109 101 110 /*are we running the solutoin sequence, or a qmu wrapper around it? : */ … … 104 113 /*run prognostic analysis: */ 105 114 _printf_("call computational core:\n"); 115 MPI_Barrier(MPI_COMM_WORLD); start_core=MPI_Wtime( ); 106 116 prognostic_core(results,model,inputs); 117 MPI_Barrier(MPI_COMM_WORLD); finish_core=MPI_Wtime( ); 107 118 108 119 } … … 113 124 114 125 #ifdef _HAVE_DAKOTA_ 126 MPI_Barrier(MPI_COMM_WORLD); start_core=MPI_Wtime( ); 115 127 Qmux(model,inputs,PrognosticAnalysisEnum(),NoneAnalysisEnum()); 128 MPI_Barrier(MPI_COMM_WORLD); finish_core=MPI_Wtime( ); 116 129 #else 117 130 throw ErrorException(__FUNCT__," Dakota not present, cannot do qmu!"); … … 140 153 delete inputs; 141 154 155 /*Get finish time and close*/ 156 MPI_Barrier(MPI_COMM_WORLD); finish = MPI_Wtime( ); 157 _printf_("\n %-34s %f seconds \n","Model initialization elapsed time:",finish_init-start_init); 158 _printf_(" %-34s %f seconds \n","Core solution elapsed time:",finish_core-start_core); 159 _printf_(" %-34s %f seconds\n\n","Total elapsed time:",finish-start); 142 160 _printf_("closing MPI and Petsc\n"); 143 161 PetscFinalize(); -
issm/trunk/src/c/parallel/steadystate.cpp
r2397 r2492 47 47 Param* param=NULL; 48 48 49 /*time*/ 50 double start, finish; 51 double start_core, finish_core; 52 double start_init, finish_init; 53 49 54 MODULEBOOT(); 50 55 … … 53 58 #endif 54 59 60 /*Initialize Petsc and get start time*/ 55 61 PetscInitialize(&argc,&argv,(char *)0,""); 62 MPI_Barrier(MPI_COMM_WORLD); start=MPI_Wtime(); 56 63 57 64 /*Size and rank: */ … … 68 75 69 76 /*Initialize model structure: */ 77 MPI_Barrier(MPI_COMM_WORLD); start_init=MPI_Wtime(); 70 78 model=new Model(); 71 79 … … 115 123 _printf_("initialize results:\n"); 116 124 results=new DataSet(ResultsEnum()); 125 MPI_Barrier(MPI_COMM_WORLD); finish_init=MPI_Wtime(); 117 126 118 127 /*are we running the solution sequence, or a qmu wrapper around it? : */ … … 122 131 /*run diagnostic analysis: */ 123 132 _printf_("call computational core:\n"); 133 MPI_Barrier(MPI_COMM_WORLD); start_core=MPI_Wtime( ); 124 134 steadystate_core(results,model,inputs); 135 MPI_Barrier(MPI_COMM_WORLD); finish_core=MPI_Wtime( ); 125 136 126 137 /*Add analysis_type to results: */ … … 141 152 /*run control analysis: */ 142 153 _printf_("call computational core:\n"); 154 MPI_Barrier(MPI_COMM_WORLD); start_core=MPI_Wtime( ); 143 155 control_core(results,model,inputs); 156 MPI_Barrier(MPI_COMM_WORLD); finish_core=MPI_Wtime( ); 144 157 145 158 /*Add analysis_type and control_type to results: */ … … 163 176 164 177 #ifdef _HAVE_DAKOTA_ 178 MPI_Barrier(MPI_COMM_WORLD); start_core=MPI_Wtime( ); 165 179 Qmux(model,inputs,SteadystateAnalysisEnum(),NoneAnalysisEnum()); 180 MPI_Barrier(MPI_COMM_WORLD); finish_core=MPI_Wtime( ); 166 181 #else 167 182 throw ErrorException(__FUNCT__," Dakota not present, cannot do qmu!"); … … 183 198 delete inputs; 184 199 200 /*Get finish time and close*/ 201 MPI_Barrier(MPI_COMM_WORLD); finish = MPI_Wtime( ); 202 _printf_("\n %-34s %f seconds \n","Model initialization elapsed time:",finish_init-start_init); 203 _printf_(" %-34s %f seconds \n","Core solution elapsed time:",finish_core-start_core); 204 _printf_(" %-34s %f seconds\n\n","Total elapsed time:",finish-start); 185 205 _printf_("closing MPI and Petsc\n"); 186 206 PetscFinalize(); -
issm/trunk/src/c/parallel/thermal.cpp
r2397 r2492 26 26 int qmu_analysis=0; 27 27 int numberofnodes; 28 int waitonlock=0; 28 29 29 30 /*Model: */ … … 44 45 double yts; 45 46 46 int waitonlock=0; 47 /*time*/ 48 double start, finish; 49 double start_core, finish_core; 50 double start_init, finish_init; 47 51 48 52 MODULEBOOT(); … … 52 56 #endif 53 57 58 /*Initialize Petsc and get start time*/ 54 59 PetscInitialize(&argc,&argv,(char *)0,""); 60 MPI_Barrier(MPI_COMM_WORLD); start=MPI_Wtime(); 55 61 56 62 /*Size and rank: */ … … 66 72 67 73 /*Initialize model structure: */ 74 MPI_Barrier(MPI_COMM_WORLD); start_init=MPI_Wtime(); 68 75 model=new Model(); 69 76 … … 92 99 _printf_("initialize results:\n"); 93 100 results=new DataSet(ResultsEnum()); 101 MPI_Barrier(MPI_COMM_WORLD); finish_init=MPI_Wtime(); 94 102 95 103 /*are we running the solutoin sequence, or a qmu wrapper around it? : */ … … 98 106 /*run thermal analysis: */ 99 107 _printf_("call computational core:\n"); 108 MPI_Barrier(MPI_COMM_WORLD); start_core=MPI_Wtime( ); 100 109 thermal_core(results,model,inputs); 110 MPI_Barrier(MPI_COMM_WORLD); finish_core=MPI_Wtime( ); 101 111 102 112 /*Add analysis_type to results: */ … … 115 125 116 126 #ifdef _HAVE_DAKOTA_ 127 MPI_Barrier(MPI_COMM_WORLD); start_core=MPI_Wtime( ); 117 128 Qmux(model,inputs,ThermalAnalysisEnum(),NoneAnalysisEnum()); 129 MPI_Barrier(MPI_COMM_WORLD); finish_core=MPI_Wtime( ); 118 130 #else 119 131 throw ErrorException(__FUNCT__," Dakota not present, cannot do qmu!"); … … 134 146 delete inputs; 135 147 148 /*Get finish time and close*/ 149 MPI_Barrier(MPI_COMM_WORLD); finish = MPI_Wtime( ); 150 _printf_("\n %-34s %f seconds \n","Model initialization elapsed time:",finish_init-start_init); 151 _printf_(" %-34s %f seconds \n","Core solution elapsed time:",finish_core-start_core); 152 _printf_(" %-34s %f seconds\n\n","Total elapsed time:",finish-start); 136 153 _printf_("closing MPI and Petsc\n"); 137 154 PetscFinalize(); -
issm/trunk/src/c/parallel/transient.cpp
r2397 r2492 25 25 int numberofnodes; 26 26 int qmu_analysis=0; 27 int waitonlock=0; 27 28 28 29 /*Model: */ … … 36 37 37 38 ParameterInputs* inputs=NULL; 38 int waitonlock=0;39 39 40 40 /*inputs: */ … … 46 46 Param* param=NULL; 47 47 48 /*time*/ 49 double start, finish; 50 double start_core, finish_core; 51 double start_init, finish_init; 52 48 53 MODULEBOOT(); 49 54 … … 52 57 #endif 53 58 59 /*Initialize Petsc and get start time*/ 54 60 PetscInitialize(&argc,&argv,(char *)0,""); 61 MPI_Barrier(MPI_COMM_WORLD); start=MPI_Wtime(); 55 62 56 63 /*Size and rank: */ … … 67 74 68 75 /*Initialize model structure: */ 76 MPI_Barrier(MPI_COMM_WORLD); start_init=MPI_Wtime(); 69 77 model=new Model(); 70 78 … … 118 126 _printf_("initialize results:\n"); 119 127 results=new DataSet(ResultsEnum()); 128 MPI_Barrier(MPI_COMM_WORLD); finish_init=MPI_Wtime(); 120 129 121 130 /*are we running the solution sequence, or a qmu wrapper around it? : */ … … 124 133 /*run diagnostic analysis: */ 125 134 _printf_("call computational core:\n"); 135 MPI_Barrier(MPI_COMM_WORLD); start_core=MPI_Wtime( ); 126 136 transient_core(results,model,inputs); 137 MPI_Barrier(MPI_COMM_WORLD); finish_core=MPI_Wtime( ); 127 138 128 139 /*Add analysis_type to results: */ … … 141 152 142 153 #ifdef _HAVE_DAKOTA_ 154 MPI_Barrier(MPI_COMM_WORLD); start_core=MPI_Wtime( ); 143 155 Qmux(model,inputs,TransientAnalysisEnum(),NoneAnalysisEnum()); 156 MPI_Barrier(MPI_COMM_WORLD); finish_core=MPI_Wtime( ); 144 157 #else 145 158 throw ErrorException(__FUNCT__," Dakota not present, cannot do qmu!"); … … 159 172 xfree((void**)&m_g); 160 173 xfree((void**)&a_g); 161 174 175 /*Get finish time and close*/ 176 MPI_Barrier(MPI_COMM_WORLD); finish = MPI_Wtime( ); 177 _printf_("\n %-34s %f seconds \n","Model initialization elapsed time:",finish_init-start_init); 178 _printf_(" %-34s %f seconds \n","Core solution elapsed time:",finish_core-start_core); 179 _printf_(" %-34s %f seconds\n\n","Total elapsed time:",finish-start); 162 180 _printf_("closing MPI and Petsc\n"); 163 181 PetscFinalize();
Note:
See TracChangeset
for help on using the changeset viewer.