source: issm/trunk/src/c/solutions/bedslope.cpp@ 4030

Last change on this file since 4030 was 4030, checked in by Eric.Larour, 15 years ago

diagnostic_core_linear, diagnostic_core_nonlinear, thermal_core_nonlinear are now gone to solvers.
we now have surfaceslope and bedslope solutions, with their corresponding cores.
simplified again diagnostic solution.

File size: 3.3 KB
Line 
1/*!\file: bedslope.cpp
2 * \brief: bedslope computation solution
3 */
4
5#ifdef HAVE_CONFIG_H
6 #include "config.h"
7#else
8#error "Cannot compile with HAVE_CONFIG_H symbol! run configure first!"
9#endif
10
11#include "../objects/objects.h"
12#include "../shared/shared.h"
13#include "../DataSet/DataSet.h"
14#include "../EnumDefinitions/EnumDefinitions.h"
15#include "../include/include.h"
16#include "../modules/modules.h"
17#include "./solutions.h"
18
19int main(int argc,char* *argv){
20
21 /*I/O: */
22 FILE* fid=NULL;
23 char* inputfilename=NULL;
24 char* outputfilename=NULL;
25 char* lockname=NULL;
26 bool qmu_analysis=false;
27
28 /*FemModel: */
29 FemModel* femmodel=NULL;
30
31 /*Results: */
32 bool waitonlock=false;
33
34 /*time*/
35 double start, finish;
36 double start_core, finish_core;
37 double start_init, finish_init;
38
39 int analyses[1]={BedSlopeComputeAnalysisEnum};
40 int solution_type=BedSlopeComputeSolutionEnum;
41
42 MODULEBOOT();
43
44 #if !defined(_PARALLEL_) || (defined(_PARALLEL_) && !defined(_HAVE_PETSC_))
45 ISSMERROR(" parallel executable was compiled without support of parallel libraries!");
46 #endif
47
48 /*Initialize Petsc and get start time*/
49 PetscInitialize(&argc,&argv,(char *)0,"");
50 MPI_Barrier(MPI_COMM_WORLD); start=MPI_Wtime();
51
52 /*Size and rank: */
53 MPI_Comm_rank(MPI_COMM_WORLD,&my_rank);
54 MPI_Comm_size(MPI_COMM_WORLD,&num_procs);
55
56 _printf_("recover , input file name and output file name:\n");
57 inputfilename=argv[2];
58 outputfilename=argv[3];
59 lockname=argv[4];
60
61 /*Initialize femmodel structure: */
62 MPI_Barrier(MPI_COMM_WORLD); start_init=MPI_Wtime();
63
64 /*Open handle to data on disk: */
65 fid=pfopen(inputfilename,"rb");
66
67 _printf_("create finite element model, using analyses types statically defined above:\n");
68 femmodel=new FemModel(fid,solution_type,analyses,1);
69
70 /*get parameters: */
71 femmodel->parameters->FindParam(&qmu_analysis,QmuAnalysisEnum);
72 femmodel->parameters->FindParam(&waitonlock,WaitOnLockEnum);
73
74 MPI_Barrier(MPI_COMM_WORLD); finish_init=MPI_Wtime();
75
76 /*are we running the solution sequence, or a qmu wrapper around it? : */
77 if(!qmu_analysis){
78
79 _printf_("call computational core:\n");
80 MPI_Barrier(MPI_COMM_WORLD); start_core=MPI_Wtime( );
81 bedslope_core(femmodel);
82 MPI_Barrier(MPI_COMM_WORLD); finish_core=MPI_Wtime( );
83
84 _printf_("write results to disk:\n");
85 OutputResults(femmodel,outputfilename,DiagnosticAnalysisEnum);
86 }
87 else{
88
89 /*run qmu analysis: */
90 _printf_("calling qmu analysis on bed slope core:\n");
91
92 #ifdef _HAVE_DAKOTA_
93 MPI_Barrier(MPI_COMM_WORLD); start_core=MPI_Wtime( );
94 Qmux(femmodel,SlopeComputeAnalysisEnum,NoneAnalysisEnum);
95 MPI_Barrier(MPI_COMM_WORLD); finish_core=MPI_Wtime( );
96 #else
97 ISSMERROR(" Dakota not present, cannot do qmu!");
98 #endif
99 }
100
101 if (waitonlock>0){
102 _printf_("write lock file:\n");
103 WriteLockFile(lockname);
104 }
105
106 /*Free ressources */
107 delete femmodel;
108
109 /*Get finish time and close*/
110 MPI_Barrier(MPI_COMM_WORLD); finish = MPI_Wtime( );
111 _printf_("\n %-34s %f seconds \n","FemModel initialization elapsed time:",finish_init-start_init);
112 _printf_(" %-34s %f seconds \n","Core solution elapsed time:",finish_core-start_core);
113 _printf_("\n %s %i hrs %i min %i sec\n\n","Total elapsed time:",int((finish-start)/3600),int(int(finish-start)%3600/60),int(finish-start)%60);
114 _printf_("closing MPI and Petsc\n");
115 PetscFinalize();
116
117 /*end module: */
118 MODULEEND();
119
120 return 0; //unix success return;
121}
Note: See TracBrowser for help on using the repository browser.