/*!\file: diagnostic.cpp * \brief: diagnostic solution */ #ifdef HAVE_CONFIG_H #include "config.h" #else #error "Cannot compile with HAVE_CONFIG_H symbol! run configure first!" #endif #include "../objects/objects.h" #include "../shared/shared.h" #include "../DataSet/DataSet.h" #include "../EnumDefinitions/EnumDefinitions.h" #include "../include/include.h" #include "../modules/modules.h" #include "./solutions.h" int main(int argc,char* *argv){ /*I/O: */ FILE* fid=NULL; char* inputfilename=NULL; char* outputfilename=NULL; char* lockname=NULL; bool qmu_analysis=false; bool control_analysis=false; bool waitonlock=false; /*FemModel: */ FemModel* femmodel=NULL; /*time*/ double start, finish; double start_core, finish_core; double start_init, finish_init; int analyses[5]={DiagnosticHorizAnalysisEnum,DiagnosticVertAnalysisEnum,DiagnosticStokesAnalysisEnum,DiagnosticHutterAnalysisEnum,SlopeComputeAnalysisEnum}; int solution_type=DiagnosticAnalysisEnum; MODULEBOOT(); #if !defined(_PARALLEL_) || (defined(_PARALLEL_) && !defined(_HAVE_PETSC_)) ISSMERROR(" parallel executable was compiled without support of parallel libraries!"); #endif /*Initialize Petsc and get start time*/ PetscInitialize(&argc,&argv,(char *)0,""); MPI_Barrier(MPI_COMM_WORLD); start=MPI_Wtime(); /*Size and rank: */ MPI_Comm_rank(MPI_COMM_WORLD,&my_rank); MPI_Comm_size(MPI_COMM_WORLD,&num_procs); _printf_("recover , input file name and output file name:\n"); inputfilename=argv[2]; outputfilename=argv[3]; lockname=argv[4]; /*Initialize femmodel structure: */ MPI_Barrier(MPI_COMM_WORLD); start_init=MPI_Wtime(); /*Open handle to data on disk: */ fid=pfopen(inputfilename,"rb"); _printf_("create finite element model:\n"); femmodel=new FemModel(fid,solution_type,analyses,5); /*get parameters: */ femmodel->parameters->FindParam(&qmu_analysis,QmuAnalysisEnum); femmodel->parameters->FindParam(&control_analysis,ControlAnalysisEnum); femmodel->parameters->FindParam(&waitonlock,WaitOnLockEnum); MPI_Barrier(MPI_COMM_WORLD); finish_init=MPI_Wtime(); /*are we running the solution sequence, or a qmu wrapper around it? : */ if(!qmu_analysis){ if(!control_analysis){ _printf_("call computational core:\n"); MPI_Barrier(MPI_COMM_WORLD); start_core=MPI_Wtime( ); diagnostic_core(femmodel); MPI_Barrier(MPI_COMM_WORLD); finish_core=MPI_Wtime( ); } else{ /*run control analysis: */ _printf_("call computational core:\n"); MPI_Barrier(MPI_COMM_WORLD); start_core=MPI_Wtime( ); control_core(femmodel); MPI_Barrier(MPI_COMM_WORLD); finish_core=MPI_Wtime( ); } _printf_("write results to disk:\n"); OutputResults(femmodel,outputfilename,DiagnosticSolutionEnum); } else{ /*run qmu analysis: */ _printf_("calling qmu analysis on diagnostic core:\n"); #ifdef _HAVE_DAKOTA_ MPI_Barrier(MPI_COMM_WORLD); start_core=MPI_Wtime( ); Qmux(femmodel,DiagnosticAnalysisEnum,NoneAnalysisEnum); MPI_Barrier(MPI_COMM_WORLD); finish_core=MPI_Wtime( ); #else ISSMERROR(" Dakota not present, cannot do qmu!"); #endif } if (waitonlock>0){ _printf_("write lock file:\n"); WriteLockFile(lockname); } /*Free ressources */ delete femmodel; /*Get finish time and close*/ MPI_Barrier(MPI_COMM_WORLD); finish = MPI_Wtime( ); _printf_("\n %-34s %f seconds \n","FemModel initialization elapsed time:",finish_init-start_init); _printf_(" %-34s %f seconds \n","Core solution elapsed time:",finish_core-start_core); _printf_("\n %s %i hrs %i min %i sec\n\n","Total elapsed time:",int((finish-start)/3600),int(int(finish-start)%3600/60),int(finish-start)%60); _printf_("closing MPI and Petsc\n"); PetscFinalize(); /*end module: */ MODULEEND(); return 0; //unix success return; }