Changeset 667
- Timestamp:
- 06/01/09 11:06:10 (15 years ago)
- Location:
- issm/trunk/src/c
- Files:
-
- 8 edited
Legend:
- Unmodified
- Added
- Removed
-
issm/trunk/src/c/Makefile.am
r666 r667 546 546 ./parallel/SpawnCore.cpp\ 547 547 ./parallel/ProcessResults.cpp\ 548 ./parallel/prognostic_core.cpp\ 548 549 ./parallel/qmu.cpp\ 549 550 ./parallel/DakotaResponses.cpp\ 550 ./parallel/OutputResults.cpp 551 ./parallel/OutputResults.cpp\ 552 ./parallel/OutputControl.cpp 551 553 552 554 libpISSM_a_CXXFLAGS = -fPIC -D_PARALLEL_ -D_C_ -
issm/trunk/src/c/parallel/ProcessResults.cpp
r659 r667 33 33 DataSet* newresults=NULL; 34 34 35 /*fem models: */35 /*fem diagnostic models: */ 36 36 FemModel* fem_dh=NULL; 37 37 FemModel* fem_dv=NULL; … … 39 39 FemModel* fem_ds=NULL; 40 40 FemModel* fem_sl=NULL; 41 42 /*fem prognostic models: */ 43 FemModel* fem_p=NULL; 41 44 42 45 int ishutter; … … 57 60 double yts; 58 61 62 Vec h_g=NULL; 63 double* h_g_serial=NULL; 64 double* thickness=NULL; 65 59 66 int numberofnodes; 60 67 … … 80 87 } 81 88 89 if(analysis_type==PrognosticAnalysisEnum()){ 90 fem_p=fems+0; 91 } 82 92 83 93 … … 185 195 xfree((void**)&partition); 186 196 } 197 else if(strcmp(result->GetFieldName(),"h_g")==0){ 198 /*easy, h_g is of size numberofnodes, on 1 dof, just repartition: */ 199 result->GetField(&h_g); 200 VecToMPISerial(&h_g_serial,h_g); 201 fem_p->parameters->FindParam((void*)&numberofnodes,"numberofnodes"); 202 VecToMPISerial(&partition,fem_p->partition); 203 204 thickness=(double*)xmalloc(numberofnodes*sizeof(double)); 205 206 for(i=0;i<numberofnodes;i++){ 207 thickness[i]=h_g_serial[(int)partition[i]]; 208 } 209 210 /*Ok, add pressure to newresults: */ 211 newresult=new Result(newresults->Size()+1,result->GetTime(),result->GetStep(),"thickness",thickness,numberofnodes); 212 newresults->AddObject(newresult); 213 214 /*do some cleanup: */ 215 xfree((void**)&h_g_serial); 216 xfree((void**)&partition); 217 } 187 218 } 188 219 -
issm/trunk/src/c/parallel/control.cpp
r643 r667 158 158 inputs->Add("fit",fit[n]); 159 159 diagnostic_core_nonlinear(&u_g,NULL,NULL,&femmodel,inputs,analysis_type,sub_analysis_type); 160 //OutputControl(u_g,p_g,J,nsteps,femmodel.partition,outputfilename,femmodel.nodesets);160 OutputControl(u_g,p_g,J,nsteps,&femmodel,outputfilename); 161 161 _printf_("%s\n"," done."); 162 162 } … … 179 179 180 180 _printf_("%s\n"," saving final results..."); 181 //OutputControl(u_g,p_g,J,nsteps,femmodel.partition,outputfilename,femmodel.nodesets);181 OutputControl(u_g,p_g,J,nsteps,&femmodel,outputfilename); 182 182 _printf_("%s\n"," done."); 183 183 … … 188 188 189 189 _printf_("closing MPI and Petsc\n"); 190 MPI_Barrier(MPI_COMM_WORLD);191 192 /*Close MPI libraries: */193 190 PetscFinalize(); 194 191 -
issm/trunk/src/c/parallel/diagnostic_core.cpp
r643 r667 4 4 5 5 #undef __FUNCT__ 6 #define __FUNCT__ " cielodiagnostic_core"6 #define __FUNCT__ "diagnostic_core" 7 7 8 8 #include "../toolkits/toolkits.h" -
issm/trunk/src/c/parallel/diagnostic_core_nonlinear.cpp
r586 r667 4 4 5 5 #undef __FUNCT__ 6 #define __FUNCT__ " cielodiagnostic_core_nonlinear"6 #define __FUNCT__ "diagnostic_core_nonlinear" 7 7 8 8 #include "../toolkits/toolkits.h" … … 161 161 } 162 162 163 //more output might be needed, when running in c ielocontrol.c163 //more output might be needed, when running in control.c 164 164 if(pKff0){ 165 165 -
issm/trunk/src/c/parallel/parallel.h
r659 r667 12 12 13 13 void diagnostic_core(DataSet* results,FemModel* fems, ParameterInputs* inputs); 14 void prognostic_core(DataSet* results,FemModel* fems, ParameterInputs* inputs); 14 15 15 16 void thermal_core(DataSet* results,FemModel* fems, ParameterInputs* inputs); … … 32 33 //int ParameterUpdate(double* search_vector,int step, WorkspaceParams* workspaceparams,BatchParams* batchparams); 33 34 void OutputResults(DataSet* results,char* filename); 35 void OutputControl(Vec u_g,double* p_g,double* J,int nsteps,FemModel* fem,char* outputfilename); 34 36 void WriteLockFile(char* filename); 35 37 -
issm/trunk/src/c/parallel/prognostic.cpp
r643 r667 23 23 char* outputfilename=NULL; 24 24 char* lockname=NULL; 25 char* qmuinname=NULL; 26 char* qmuoutname=NULL; 27 char* qmuerrname=NULL; 25 28 int numberofnodes; 26 29 int waitonlock=0; 27 30 28 31 FemModel fem; 32 29 33 Vec h_g=NULL; 30 34 Vec u_g=NULL; … … 34 38 double* accumulation_g=NULL; 35 39 double dt; 40 int qmu_analysis; 41 42 /*Results: */ 43 DataSet* results=NULL; 36 44 37 45 … … 55 63 outputfilename=argv[3]; 56 64 lockname=argv[4]; 65 qmuinname=argv[5]; 66 qmuoutname=argv[6]; 67 qmuerrname=argv[7]; 57 68 58 69 /*Open handle to data on disk: */ … … 70 81 fem.parameters->FindParam((void*)&numberofnodes,"numberofnodes"); 71 82 72 _printf_("depth averaging velocity...");73 u_g=SerialToVec(u_g_serial,numberofnodes*3); xfree((void**)&u_g_serial);//vx,vy and vz should be present at this point.74 VelocityDepthAveragex( u_g, fem.elements,fem.nodes, fem.loads, fem.materials);75 76 83 _printf_("initialize inputs:\n"); 77 84 inputs=new ParameterInputs; 78 inputs->Add("velocity _average",u_g,3,numberofnodes);85 inputs->Add("velocity",u_g_serial,3,numberofnodes); 79 86 inputs->Add("thickness",h_g_initial,1,numberofnodes); 80 87 inputs->Add("melting",melting_g,1,numberofnodes); … … 82 89 inputs->Add("dt",dt); 83 90 84 /*lighten up on parameters : to be done */ 91 /*are we running the solutoin sequence, or a qmu wrapper around it? : */ 92 fem.parameters->FindParam((void*)&qmu_analysis,"qmu_analysis"); 93 if(!qmu_analysis){ 85 94 86 _printf_("call computational core:\n"); 87 diagnostic_core_linear(&h_g,&fem,inputs,PrognosticAnalysisEnum(),NoneAnalysisEnum()); 95 /*run prognostic analysis: */ 96 _printf_("call computational core:\n"); 97 prognostic_core(results,&fem,inputs); 88 98 89 _printf_("extrude computed thickness on all layers:\n"); 90 ThicknessExtrudex( h_g, fem.elements,fem.nodes, fem.loads, fem.materials); 99 } 100 else{ 101 /*run qmu analysis: */ 102 _printf_("calling qmu analysis on prognostic core:\n"); 103 104 #ifdef _HAVE_DAKOTA_ 105 qmu(qmuinname,qmuoutname,qmuerrname,&fem,inputs,PrognosticAnalysisEnum(),NoneAnalysisEnum()); 106 #else 107 throw ErrorException(__FUNCT__," Dakota not present, cannot do qmu!"); 108 #endif 109 } 91 110 111 112 _printf_("process results:\n"); 113 ProcessResults(&results,&fem,DiagnosticAnalysisEnum()); 114 92 115 _printf_("write results to disk:\n"); 93 //OutputPrognostic(h_g,&fem,outputfilename);116 OutputResults(results,outputfilename); 94 117 95 118 _printf_("write lock file:\n"); … … 100 123 101 124 _printf_("closing MPI and Petsc\n"); 102 MPI_Barrier(MPI_COMM_WORLD);103 104 /*Close MPI libraries: */105 125 PetscFinalize(); 106 107 126 108 127 /*end module: */ -
issm/trunk/src/c/parallel/thermal.cpp
r659 r667 24 24 char* outputfilename=NULL; 25 25 char* lockname=NULL; 26 char* qmuinname=NULL; 27 char* qmuoutname=NULL; 28 char* qmuerrname=NULL; 29 int qmu_analysis=0; 26 30 int numberofnodes; 27 31 … … 57 61 outputfilename=argv[3]; 58 62 lockname=argv[4]; 63 qmuinname=argv[5]; 64 qmuoutname=argv[6]; 65 qmuerrname=argv[7]; 59 66 60 67 /*Open handle to data on disk: */ … … 89 96 femmodels[1].parameters->DeleteObject((Object*)param); 90 97 98 /*are we running the solutoin sequence, or a qmu wrapper around it? : */ 99 femmodels[0].parameters->FindParam((void*)&qmu_analysis,"qmu_analysis"); 100 if(!qmu_analysis){ 101 102 /*run thermal analysis: */ 103 _printf_("call computational core:\n"); 104 thermal_core(results,femmodels,inputs); 105 106 } 107 else{ 108 /*run qmu analysis: */ 109 _printf_("calling qmu analysis on thermal core:\n"); 91 110 92 _printf_("call computational core:\n"); 93 thermal_core(results,femmodels,inputs); 111 #ifdef _HAVE_DAKOTA_ 112 qmu(qmuinname,qmuoutname,qmuerrname,&femmodels[0],inputs,ThermalAnalysisEnum(),NoneAnalysisEnum()); 113 #else 114 throw ErrorException(__FUNCT__," Dakota not present, cannot do qmu!"); 115 #endif 116 } 94 117 118 95 119 _printf_("process results:\n"); 96 ProcessResults(&results,&femmodels[0], ThermalAnalysisEnum());97 120 ProcessResults(&results,&femmodels[0],DiagnosticAnalysisEnum()); 121 98 122 _printf_("write results to disk:\n"); 99 123 OutputResults(results,outputfilename); … … 107 131 108 132 _printf_("closing MPI and Petsc\n"); 109 MPI_Barrier(MPI_COMM_WORLD);110 111 /*Close MPI libraries: */112 133 PetscFinalize(); 113 134
Note:
See TracChangeset
for help on using the changeset viewer.