source: issm/trunk/src/c/solutions/diagnostic.cpp@ 3938

Last change on this file since 3938 was 3938, checked in by Eric.Larour, 15 years ago

New results API

File size: 4.1 KB
RevLine 
[1]1/*!\file: diagnostic.cpp
2 * \brief: diagnostic solution
3 */
4
5#ifdef HAVE_CONFIG_H
6 #include "config.h"
7#else
8#error "Cannot compile with HAVE_CONFIG_H symbol! run configure first!"
9#endif
10
[3751]11#include "../objects/objects.h"
12#include "../shared/shared.h"
13#include "../DataSet/DataSet.h"
14#include "../EnumDefinitions/EnumDefinitions.h"
[3775]15#include "../include/include.h"
[3913]16#include "../modules/modules.h"
[3895]17#include "./solutions.h"
[643]18
[1]19int main(int argc,char* *argv){
20
21 /*I/O: */
22 FILE* fid=NULL;
23 char* inputfilename=NULL;
24 char* outputfilename=NULL;
25 char* lockname=NULL;
[3767]26 bool qmu_analysis=false;
27 bool control_analysis=false;
[1]28
[1826]29 /*Model: */
30 Model* model=NULL;
[643]31
32 /*Results: */
[3938]33 Results* results=NULL;
[434]34
[3767]35 bool waitonlock=false;
[210]36
[2489]37 /*time*/
38 double start, finish;
39 double start_core, finish_core;
40 double start_init, finish_init;
41
[1]42 MODULEBOOT();
43
44 #if !defined(_PARALLEL_) || (defined(_PARALLEL_) && !defined(_HAVE_PETSC_))
[3332]45 ISSMERROR(" parallel executable was compiled without support of parallel libraries!");
[1]46 #endif
47
[2489]48 /*Initialize Petsc and get start time*/
[1]49 PetscInitialize(&argc,&argv,(char *)0,"");
[2489]50 MPI_Barrier(MPI_COMM_WORLD); start=MPI_Wtime();
[1]51
52 /*Size and rank: */
53 MPI_Comm_rank(MPI_COMM_WORLD,&my_rank);
54 MPI_Comm_size(MPI_COMM_WORLD,&num_procs);
55
56 _printf_("recover , input file name and output file name:\n");
57 inputfilename=argv[2];
58 outputfilename=argv[3];
59 lockname=argv[4];
60
[1826]61 /*Initialize model structure: */
[2489]62 MPI_Barrier(MPI_COMM_WORLD); start_init=MPI_Wtime();
[1826]63 model=new Model();
64
[1]65 /*Open handle to data on disk: */
[472]66 fid=pfopen(inputfilename,"rb");
[358]67
[1]68 _printf_("read and create finite element model:\n");
[458]69 _printf_("\n reading diagnostic horiz model data:\n");
[3567]70 model->AddFormulation(fid,DiagnosticAnalysisEnum,HorizAnalysisEnum);
[1881]71
[458]72 _printf_("\n reading diagnostic vert model data:\n");
[3567]73 model->AddFormulation(fid,DiagnosticAnalysisEnum,VertAnalysisEnum);
[1881]74
[458]75 _printf_("\n reading diagnostic stokes model data:\n");
[3567]76 model->AddFormulation(fid,DiagnosticAnalysisEnum,StokesAnalysisEnum);
[1881]77
[458]78 _printf_("\n reading diagnostic hutter model data:\n");
[3567]79 model->AddFormulation(fid,DiagnosticAnalysisEnum,HutterAnalysisEnum);
[1881]80
[458]81 _printf_("\n reading surface and bed slope computation model data:\n");
[3567]82 model->AddFormulation(fid,SlopecomputeAnalysisEnum);
[1942]83
[1887]84 /*get parameters: */
[3699]85 model->FindParam(&qmu_analysis,QmuAnalysisEnum);
86 model->FindParam(&control_analysis,ControlAnalysisEnum);
87 model->FindParam(&waitonlock,WaitOnLockEnum);
[1]88
[2489]89 MPI_Barrier(MPI_COMM_WORLD); finish_init=MPI_Wtime();
90
[732]91 /*are we running the solution sequence, or a qmu wrapper around it? : */
[586]92 if(!qmu_analysis){
[1911]93 if(!control_analysis){
[3887]94
[1911]95 _printf_("call computational core:\n");
[2489]96 MPI_Barrier(MPI_COMM_WORLD); start_core=MPI_Wtime( );
[3887]97 results=diagnostic_core(model);
[2489]98 MPI_Barrier(MPI_COMM_WORLD); finish_core=MPI_Wtime( );
[2234]99
[2048]100 }
101 else{
[2234]102 /*run control analysis: */
103 _printf_("call computational core:\n");
[2489]104 MPI_Barrier(MPI_COMM_WORLD); start_core=MPI_Wtime( );
[3887]105 results=control_core(model);
[2489]106 MPI_Barrier(MPI_COMM_WORLD); finish_core=MPI_Wtime( );
[2234]107
[2048]108 }
109
110 _printf_("write results to disk:\n");
[3938]111 OutputResults(results,outputfilename);
[586]112 }
113 else{
114 /*run qmu analysis: */
115 _printf_("calling qmu analysis on diagnostic core:\n");
[804]116
[662]117 #ifdef _HAVE_DAKOTA_
[2489]118 MPI_Barrier(MPI_COMM_WORLD); start_core=MPI_Wtime( );
[3699]119 Qmux(model,DiagnosticAnalysisEnum,NoneAnalysisEnum);
[2489]120 MPI_Barrier(MPI_COMM_WORLD); finish_core=MPI_Wtime( );
[662]121 #else
[3332]122 ISSMERROR(" Dakota not present, cannot do qmu!");
[662]123 #endif
[586]124 }
[1]125
[2629]126 if (waitonlock>0){
127 _printf_("write lock file:\n");
[1]128 WriteLockFile(lockname);
129 }
[2048]130
[1911]131 /*Free ressources */
[1942]132 delete model;
133 delete results;
[1911]134
[2489]135 /*Get finish time and close*/
136 MPI_Barrier(MPI_COMM_WORLD); finish = MPI_Wtime( );
137 _printf_("\n %-34s %f seconds \n","Model initialization elapsed time:",finish_init-start_init);
138 _printf_(" %-34s %f seconds \n","Core solution elapsed time:",finish_core-start_core);
[3097]139 _printf_("\n %s %i hrs %i min %i sec\n\n","Total elapsed time:",int((finish-start)/3600),int(int(finish-start)%3600/60),int(finish-start)%60);
[2397]140 _printf_("closing MPI and Petsc\n");
141 PetscFinalize();
142
143 /*end module: */
144 MODULEEND();
145
[1]146 return 0; //unix success return;
147}
Note: See TracBrowser for help on using the repository browser.