Changeset 24269
- Timestamp:
- 10/23/19 23:06:43 (5 years ago)
- Location:
- issm/trunk-jpl/src/m
- Files:
-
- 12 edited
Legend:
- Unmodified
- Added
- Removed
-
issm/trunk-jpl/src/m/classes/clusters/cyclone.py
r24213 r24269 73 73 #write queuing script 74 74 fid = open(modelname + '.queue', 'w') 75 fid.write('export ISSM_DIR ="%s/../ "\n' % self.codepath)75 fid.write('export ISSM_DIR="%s/../ "\n' % self.codepath) 76 76 fid.write('source $ISSM_DIR/etc/environment.sh\n') 77 77 fid.write('INTELLIBS = "/opt/intel/intelcompiler-12.04/composerxe-2011.4.191/compiler/lib/intel64"\n') -
issm/trunk-jpl/src/m/classes/clusters/hexagon.py
r24213 r24269 107 107 fid.write('#PBS -o %s/%s/%s.outlog \n' % (self.executionpath, dirname, modelname)) 108 108 fid.write('#PBS -e %s/%s/%s.errlog \n\n' % (self.executionpath, dirname, modelname)) 109 fid.write('export ISSM_DIR ="%s/../"\n' % self.codepath)110 fid.write('export CRAY_ROOTFS =DSL\n')109 fid.write('export ISSM_DIR="%s/../"\n' % self.codepath) 110 fid.write('export CRAY_ROOTFS=DSL\n') 111 111 fid.write('module swap PrgEnv-cray / 5.2.40 PrgEnv - gnu\n') 112 112 fid.write('module load cray-petsc\n') -
issm/trunk-jpl/src/m/classes/clusters/pfe.py
r24213 r24269 150 150 fid.write('module load comp-intel/2015.0.090\n') 151 151 fid.write('module load mpi-sgi/mpt.2.11r13\n') 152 fid.write('export PATH ="$PATH:."\n\n')153 fid.write('export MPI_GROUP_MAX =64\n\n')154 fid.write('export ISSM_DIR ="%s/../ "\n' % self.codepath)152 fid.write('export PATH="$PATH:."\n\n') 153 fid.write('export MPI_GROUP_MAX=64\n\n') 154 fid.write('export ISSM_DIR="%s/../ "\n' % self.codepath) 155 155 fid.write('source $ISSM_DIR/etc/environment.sh\n') 156 156 fid.write('cd %s/%s/ \n\n' % (self.executionpath, dirname)) -
issm/trunk-jpl/src/m/classes/clusters/stallo.py
r24240 r24269 124 124 fid.write('#SBATCH --error %s/%s/%s.errlog \n\n' % (self.executionpath, dirname, modelname)) 125 125 126 fid.write('export ISSM_DIR ="%s/../"\n' % self.codepath)126 fid.write('export ISSM_DIR="%s/../"\n' % self.codepath) 127 127 fid.write('module purge\n') 128 128 fid.write('module load CMake/3.8.0-GCCcore-6.3.0\n') -
issm/trunk-jpl/src/m/classes/clusters/vilje.py
r24213 r24269 108 108 fid.write('#PBS -o %s/%s/%s.outlog \n' % (self.executionpath, dirname, modelname)) 109 109 fid.write('#PBS -e %s/%s/%s.errlog \n\n' % (self.executionpath, dirname, modelname)) 110 fid.write('export ISSM_DIR ="%s/../ "\n' % self.codepath)110 fid.write('export ISSM_DIR="%s/../ "\n' % self.codepath) 111 111 fid.write('module load intelcomp/17.0.0\n') 112 112 fid.write('module load mpt/2.14\n') -
issm/trunk-jpl/src/m/classes/pairoptions.py
r24213 r24269 1 1 from collections import OrderedDict 2 3 2 4 3 class pairoptions(object): … … 31 30 if self.list: 32 31 s += " list: ({}x{}) \n\n".format(len(self.list), 2) 33 for item in list(self.list.items()): 34 s += " field: {} value: '{}'\n".format((item[0], item[1])) 32 for item in self.list.items(): 33 s += " field: {} value: '{}'\n".format(item[0], item[1]) 34 print(s) 35 35 else: 36 36 s += " list: empty\n" -
issm/trunk-jpl/src/m/classes/plotoptions.py
r24213 r24269 26 26 s += " list: (%ix%i)\n" % (len(self.list), 2) 27 27 for item in list(self.list.items()): 28 #s += " options of plot number %i\n" % item 29 if isinstance(item[1], str): 30 s += " field: % - 10s value: '%s'\n" % (item[0], item[1]) 31 elif isinstance(item[1], (bool, int, float)): 32 s += " field: % - 10s value: '%g'\n" % (item[0], item[1]) 33 else: 34 s += " field: % - 10s value: '%s'\n" % (item[0], item[1]) 28 s += " field: {:10} value: '{}'\n".format(item[0], item[1]) 29 print(s) 35 30 else: 36 31 s += " list: empty\n" -
issm/trunk-jpl/src/m/contrib/defleurian/paraview/exportVTK.py
r24261 r24269 108 108 num_of_points = every_nodes 109 109 if dim == 2: 110 points = np.column_stack((md.mesh.x, md.mesh.y, md.geometry.surface)) 110 mesh_alti = input('''This is a 2D model, what should be the 3rd dimension of the mesh : 111 1 : md.geometry.surface 112 2 : md.geometry.base 113 3 : md.geometry.bed 114 4 : 0\n''') 115 if mesh_alti == 1: 116 points = np.column_stack((md.mesh.x, md.mesh.y, md.geometry.surface)) 117 elif mesh_alti == 2: 118 points = np.column_stack((md.mesh.x, md.mesh.y, md.geometry.base)) 119 elif mesh_alti == 3: 120 points = np.column_stack((md.mesh.x, md.mesh.y, md.geometry.bed)) 121 elif mesh_alti == 4: 122 points = np.column_stack((md.mesh.x, md.mesh.y, 0. * md.mesh.x)) 123 else: 124 points = np.column_stack((md.mesh.x, md.mesh.y, md.geometry.surface)) 111 125 elif dim == 3: 126 mesh_alti = 1 112 127 points = np.column_stack((md.mesh.x, md.mesh.y, md.mesh.z)) 113 128 else: … … 127 142 fid.write('POINTS {:d} float\n'.format(num_of_points)) 128 143 #updating z for mesh evolution 129 if moving_mesh :144 if moving_mesh and mesh_alti in [1, 2]: 130 145 base = np.squeeze(res_struct.__dict__['TransientSolution'][step].__dict__['Base'][enveloppe_index]) 131 146 thick_change_ratio = (np.squeeze(res_struct.__dict__['TransientSolution'][step].__dict__['Thickness'][enveloppe_index]) / md.geometry.thickness[enveloppe_index]) … … 242 257 fid.write('{:f} {:f} {:f}\n'.format(0, 0, 0)) 243 258 else: 244 if (np.size(spe_res_struct.__dict__[field]) == 1): 259 if np.size(spe_res_struct.__dict__[field]) == 1: 260 if field == 'time': 261 current_time = spe_res_struct.__dict__[field] 245 262 #skipping integers 246 263 continue 247 elif ((np.size(spe_res_struct.__dict__[field])) == every_nodes):264 elif np.size(spe_res_struct.__dict__[field]) == every_nodes: 248 265 fid.write('SCALARS {} float 1 \n'.format(field)) 249 266 fid.write('LOOKUP_TABLE default\n') … … 251 268 outval = cleanOutliers(np.squeeze(spe_res_struct.__dict__[field][enveloppe_index[node]])) 252 269 fid.write('{:f}\n'.format(outval)) 253 elif (np.shape(spe_res_struct.__dict__[field])[0] == np.size(spe_res_struct.__dict__[field]) == every_cells):270 elif np.shape(spe_res_struct.__dict__[field])[0] == np.size(spe_res_struct.__dict__[field]) == every_cells: 254 271 saved_cells[field] = np.squeeze(spe_res_struct.__dict__[field]) 255 272 else: … … 262 279 othernames = (dict.keys(other_struct.__dict__)) 263 280 for field in othernames: 264 if (np.size(other_struct.__dict__[field]) == 1):281 if np.size(other_struct.__dict__[field]) == 1: 265 282 #skipping integers 266 283 continue 267 elif (np.size(other_struct.__dict__[field]) == every_nodes):284 elif np.size(other_struct.__dict__[field]) == every_nodes: 268 285 fid.write('SCALARS {} float 1 \n'.format(field)) 269 286 fid.write('LOOKUP_TABLE default\n') … … 271 288 outval = cleanOutliers(other_struct.__dict__[field][enveloppe_index[node]]) 272 289 fid.write('{:f}\n'.format(outval)) 273 elif (np.shape(other_struct.__dict__[field])[0] == np.size(other_struct.__dict__[field]) == every_cells): 290 elif np.shape(other_struct.__dict__[field])[0] == every_nodes + 1: 291 #we are dealing with a forcing of some kind. 292 forcing_time = other_struct.__dict__[field][-1, :] 293 if any(forcing_time == current_time): 294 forcing_index = np.where(forcing_time == current_time) 295 forcing_val = other_struct.__dict__[field][:, forcing_index] 296 elif forcing_time[0] > current_time: 297 forcing_val = other_struct.__dict__[field][:, 0] 298 elif forcing_time[-1] < current_time: 299 forcing_val = other_struct.__dict__[field][:, -1] 300 else: 301 forcing_index = np.where(forcing_time < current_time)[-1][-1] 302 delta_time = forcing_time[forcing_index + 1] - forcing_time[forcing_index] #compute forcing Dt 303 delta_current = current_time - forcing_time[forcing_index] # time since last forcing 304 ratio = delta_current / delta_time #compute weighting factor for preceding forcing vallue 305 forcing_evol = (other_struct.__dict__[field][:, forcing_index + 1] - other_struct.__dict__[field][:, forcing_index]) * ratio 306 forcing_val = other_struct.__dict__[field][:, forcing_index] + forcing_evol 307 # and now write it down 308 fid.write('SCALARS {}_{} float 1 \n'.format(other, field)) 309 fid.write('LOOKUP_TABLE default\n') 310 for node in range(0, num_of_points): 311 outval = cleanOutliers(forcing_val[enveloppe_index[node]]) 312 fid.write('{:f}\n'.format(outval)) 313 elif np.shape(other_struct.__dict__[field])[0] == np.size(other_struct.__dict__[field]) == every_cells: 274 314 saved_cells[field] = other_struct.__dict__[field] 275 315 else: -
issm/trunk-jpl/src/m/plot/plot_BC.py
r24213 r24269 51 51 slicesize = len(x) 52 52 fulldata = md.__dict__[str(spc_dict[str(key)][0])].__dict__[str(key)] 53 print(key)54 53 data = fulldata[(plotlayer - 1) * slicesize:plotlayer * slicesize] 55 print(np.shape(data))56 54 mark = spc_dict[str(key)][1] 57 55 color = spc_dict[str(key)][2] -
issm/trunk-jpl/src/m/plot/plot_manager.py
r24213 r24269 35 35 See also: PLOTMODEL, PLOT_UNIT 36 36 ''' 37 38 37 #parse options and get a structure of options 39 38 options = checkplotoptions(md, options) -
issm/trunk-jpl/src/m/plot/plot_unit.py
r24213 r24269 22 22 """ 23 23 #if we are plotting 3d replace the current axis 24 print(is2d)25 24 if not is2d: 26 25 axgrid[gridindex].axis('off') 27 26 ax = inset_locator.inset_axes(axgrid[gridindex], width='100%', height='100%', loc=3, borderpad=0, axes_class=Axes3D) 28 ax.set_axis_bgcolor((0.7, 0.7, 0.7))29 27 else: 30 28 ax = axgrid[gridindex] -
issm/trunk-jpl/src/m/plot/plotmodel.py
r24213 r24269 17 17 #First process options 18 18 options = plotoptions(*args) 19 20 19 #get number of subplots 21 20 subplotwidth = ceil(sqrt(options.numberofplots)) … … 97 96 for ax in axgrid.cbar_axes: 98 97 fig._axstack.remove(ax) 99 100 98 for i, ax in enumerate(axgrid.axes_all): 101 plot_manager(options.list[i].getfieldvalue('model', md), options.list[i], fig, axgrid, i) 99 try: 100 plot_manager(options.list[i].getfieldvalue('model', md), options.list[i], fig, axgrid, i) 101 except KeyError: 102 print("Too many axes present, we delete the overflow") 103 fig.delaxes(axgrid[i]) 102 104 fig.show() 103 105 else:
Note:
See TracChangeset
for help on using the changeset viewer.