Changeset 22769
- Timestamp:
- 05/15/18 08:16:55 (7 years ago)
- Location:
- issm/trunk-jpl/src/m
- Files:
-
- 1 added
- 7 edited
Legend:
- Unmodified
- Added
- Removed
-
issm/trunk-jpl/src/m/classes/clusters/generic.m
r22666 r22769 111 111 else 112 112 %Add --gen-suppressions=all to get suppression lines 113 fprintf(fid,'LD_PRELOAD=%s \\\n',cluster.valgrindlib);113 %fprintf(fid,'LD_PRELOAD=%s \\\n',cluster.valgrindlib); 114 114 if ismac, 115 115 if IssmConfig('_HAVE_MPI_'), 116 fprintf(fid,'mpiexec -np %i %s --leak-check=full -- error-limit=no --dsymutil=yes --suppressions=%s %s/%s %s %s %s 2> %s.errlog >%s.outlog ',...116 fprintf(fid,'mpiexec -np %i %s --leak-check=full --gen-suppressions=all --error-limit=no --dsymutil=yes --suppressions=%s %s/%s %s %s %s 2> %s.errlog >%s.outlog ',... 117 117 cluster.np,cluster.valgrind,cluster.valgrindsup,cluster.codepath,executable,solution,[cluster.executionpath '/' dirname], modelname,modelname,modelname); 118 118 else … … 122 122 else 123 123 if IssmConfig('_HAVE_MPI_'), 124 fprintf(fid,'mpiexec -np %i %s --leak-check=full -- error-limit=no --suppressions=%s %s/%s %s %s %s 2> %s.errlog >%s.outlog ',...124 fprintf(fid,'mpiexec -np %i %s --leak-check=full --gen-suppressions=all --error-limit=no --suppressions=%s %s/%s %s %s %s 2> %s.errlog >%s.outlog ',... 125 125 cluster.np,cluster.valgrind,cluster.valgrindsup,cluster.codepath,executable,solution,[cluster.executionpath '/' dirname],modelname,modelname,modelname); 126 126 else -
issm/trunk-jpl/src/m/classes/clusters/pfe.m
r21914 r22769 11 11 name = 'pfe' 12 12 login = ''; 13 modules = {'comp-intel/201 6.2.181' 'mpi-sgi/mpt'};13 modules = {'comp-intel/2018.0.128' 'mpi-sgi/mpt'}; 14 14 numnodes = 20; 15 15 cpuspernode = 8; … … 163 163 fprintf(fid,'#PBS -e %s.errlog \n\n',[cluster.executionpath '/' dirname '/' modelname]); 164 164 fprintf(fid,'. /usr/share/modules/init/bash\n\n'); 165 fprintf(fid,'module load comp-intel/2016.2.181\n'); 166 fprintf(fid,'module load mpi-sgi/mpt\n'); 165 for i=1:numel(cluster.modules), 166 fprintf(fid,['module load ' cluster.modules{i} '\n']); 167 end 167 168 fprintf(fid,'export PATH="$PATH:."\n\n'); 168 169 fprintf(fid,'export MPI_GROUP_MAX=64\n\n'); 170 fprintf(fid,'export MKL_NUM_THREADS=2\n\n'); 169 171 fprintf(fid,'export ISSM_DIR="%s/../"\n',cluster.codepath); %FIXME 170 172 fprintf(fid,'source $ISSM_DIR/etc/environment.sh\n'); %FIXME … … 226 228 fprintf(fid,'#PBS -e %s.errlog \n\n',[cluster.executionpath '/' dirname '/' modelname]); 227 229 fprintf(fid,'. /usr/share/modules/init/bash\n\n'); 228 fprintf(fid,'module load comp-intel/2016.2.181\n'); 229 fprintf(fid,'module load mpi-sgi/mpt\n'); 230 for i=1:numel(cluster.modules), 231 fprintf(fid,['module load ' cluster.modules{i} '\n']); 232 end 230 233 fprintf(fid,'export PATH="$PATH:."\n\n'); 231 234 fprintf(fid,'export MPI_GROUP_MAX=64\n\n'); … … 353 356 fprintf(fid,'#PBS -e %s.errlog \n\n',modelname); 354 357 fprintf(fid,'. /usr/share/modules/init/bash\n\n'); 358 %for i=1:numel(cluster.modules), 359 % fprintf(fid,['module load ' cluster.modules{i} '\n']); 360 %end 355 361 fprintf(fid,'module load comp-intel/2016.2.181\n'); 356 362 fprintf(fid,'module load netcdf/4.4.1.1_mpt\n'); -
issm/trunk-jpl/src/m/inversions/marshallcostfunctions.m
r21049 r22769 15 15 pos=find(cost_functions==510); data(pos) = {'ThicknessPositive'}; 16 16 pos=find(cost_functions==601); data(pos) = {'SurfaceAbsMisfit'}; 17 pos=find(cost_functions==602); data(pos) = {'OmegaAbsGradient'}; 18 pos=find(cost_functions==603); data(pos) = {'EtaDiff'}; -
issm/trunk-jpl/src/m/inversions/supportedcontrols.m
r19001 r22769 2 2 3 3 list = {... 4 'BalancethicknessSpcthickness',... 4 5 'BalancethicknessThickeningRate',... 5 6 'FrictionCoefficient',... -
issm/trunk-jpl/src/m/mesh/planet/gmsh/gmshplanet.m
r19288 r22769 102 102 end 103 103 104 %Find path to gmsh 105 paths = {[issmdir() '/bin/gmsh'],[issmdir() '/externalpackages/gmsh/install/gmsh']}; 106 gmshpath = ''; 107 for i=paths 108 if exist(i{1},'file'), 109 gmshpath = i{1} 110 end 111 end 112 if isempty(gmshpath), 113 error('gmt not found, make sure it is properly installed'); 114 end 115 104 116 %call gmsh 105 117 if exist(options,'refine'), 106 eval(['!gmsh-tol 1e-8 -2 sphere.geo -bgm sphere.pos']);118 system([gmshpath ' -tol 1e-8 -2 sphere.geo -bgm sphere.pos']); 107 119 else 108 %call gmsh 109 eval(['!gmsh -tol 1e-8 -2 sphere.geo']); 120 system([gmshpath ' -tol 1e-8 -2 sphere.geo']); 110 121 end 111 122 … … 163 174 164 175 %erase files: 165 eval(['!rm -rf sphere.geo sphere.msh sphere.pos']);176 system('rm -rf sphere.geo sphere.msh sphere.pos'); 166 177 167 178 %return mesh: -
issm/trunk-jpl/src/m/plot/subplotmodel.m
r14400 r22769 30 30 if(((i-1)*ncols+j)==num) 31 31 ha = axes('Units','normalized', ... 32 'Position',[xmin ymin width height] ,'XTickLabel','','YTickLabel','','Visible','off');32 'Position',[xmin ymin width height]);%,'XTickLabel','','YTickLabel','','Visible','off'); 33 33 return 34 34 end -
issm/trunk-jpl/src/m/solvers/mumpsoptions.m
r22562 r22769 27 27 mumps.pc_factor_mat_solver_package=getfieldvalue(options,'pc_factor_mat_solver_package','mumps'); 28 28 mumps.mat_mumps_icntl_14=getfieldvalue(options,'mat_mumps_icntl_14',120); 29 30 %Seems like this one is not needed anymore 29 31 mumps.pc_factor_shift_positive_definite=getfieldvalue(options,'pc_factor_shift_positive_definite','true'); 30 32 31 33 %These 2 lines make raijin break (ptwgts error during solver with PETSc 3.3) 32 mumps.mat_mumps_icntl_28=2; %1=serial, 2=parallel33 mumps.mat_mumps_icntl_29=2; %parallel ordering 1 = ptscotch, 2 = parmetis34 %mumps.mat_mumps_icntl_28=2; %1=serial, 2=parallel 35 %mumps.mat_mumps_icntl_29=2; %parallel ordering 1 = ptscotch, 2 = parmetis 34 36 end
Note:
See TracChangeset
for help on using the changeset viewer.