Changeset 24255
- Timestamp:
- 10/18/19 06:22:34 (6 years ago)
- Location:
- issm/trunk-jpl
- Files:
-
- 1 deleted
- 13 edited
Legend:
- Unmodified
- Added
- Removed
-
issm/trunk-jpl/src/m/boundaryconditions/SetIceShelfBC.py
r24213 r24255 52 52 raise NameError('mesh type not supported yet') 53 53 if any(md.mask.ice_levelset <= 0): 54 values = md.mask.ice_levelset[md.mesh.segments[:, 0: -1] - 1]54 values = md.mask.ice_levelset[md.mesh.segments[:, 0:-1] - 1] 55 55 segmentsfront = 1 - values 56 56 np.sum(segmentsfront, axis=1) != numbernodesfront 57 57 segments = np.nonzero(np.sum(segmentsfront, axis=1) != numbernodesfront)[0] 58 58 #Find all nodes for these segments and spc them 59 pos = md.mesh.segments[segments, 0: -1] - 159 pos = md.mesh.segments[segments, 0:-1] - 1 60 60 else: 61 61 pos = np.nonzero(md.mesh.vertexonboundary)[0] -
issm/trunk-jpl/src/m/boundaryconditions/SetMarineIceSheetBC.py
r24213 r24255 64 64 segments = np.nonzero(np.sum(segmentsfront, axis=1) != numbernodesfront)[0] 65 65 #Find all nodes for these segments and spc them 66 pos = md.mesh.segments[segments, 0: -1] - 166 pos = md.mesh.segments[segments, 0:-1] - 1 67 67 else: 68 68 pos = np.nonzero(md.mesh.vertexonboundary)[0] -
issm/trunk-jpl/src/m/classes/esa.py
r24213 r24255 32 32 string = "%s\n%s" % (string, fielddisplay(self, 'love_h', 'load Love number for radial displacement')) 33 33 string = "%s\n%s" % (string, fielddisplay(self, 'love_l', 'load Love number for horizontal displaements')) 34 string = "%s\n%s" % (string, fielddisplay(self, 'hemisphere', 'North - south, East - west components of 2 - D horiz displacement vector: -1 south, 1 north'))34 string = "%s\n%s" % (string, fielddisplay(self, 'hemisphere', 'North-south, East-west components of 2-D horiz displacement vector:-1 south, 1 north')) 35 35 string = "%s\n%s" % (string, fielddisplay(self, 'degacc', 'accuracy (default .01 deg) for numerical discretization of the Green''s functions')) 36 36 string = "%s\n%s" % (string, fielddisplay(self, 'transitions', 'indices into parts of the mesh that will be icecaps')) -
issm/trunk-jpl/src/m/classes/thermal.py
r24213 r24255 107 107 md = checkfield(md, 'fieldname', 'thermal.watercolumn_upperlimit', '>=', 0) 108 108 109 TEMP = md.thermal.spctemperature[: -1].flatten(- 1)109 TEMP = md.thermal.spctemperature[:-1].flatten(- 1) 110 110 pos = np.where(~np.isnan(TEMP)) 111 111 try: -
issm/trunk-jpl/src/m/consistency/checkfield.py
r24213 r24255 167 167 minval = np.nanmin(field) 168 168 if options.getfieldvalue('timeseries', 0): 169 minval = np.nanmin(field[: -1])169 minval = np.nanmin(field[:-1]) 170 170 elif options.getfieldvalue('singletimeseries', 0): 171 171 if np.size(field) == 1: #some singletimeseries are just one value … … 187 187 minval = np.nanmin(field) 188 188 if options.getfieldvalue('timeseries', 0): 189 minval = np.nanmin(field[: -1])189 minval = np.nanmin(field[:-1]) 190 190 elif options.getfieldvalue('singletimeseries', 0): 191 191 if np.size(field) == 1: #some singletimeseries are just one value … … 208 208 maxval = np.nanmax(field) 209 209 if options.getfieldvalue('timeseries', 0): 210 maxval = np.nanmax(field[: -1])210 maxval = np.nanmax(field[:-1]) 211 211 elif options.getfieldvalue('singletimeseries', 0): 212 212 if np.size(field) == 1: #some singletimeseries are just one value … … 230 230 maxval = np.nanmax(field) 231 231 if options.getfieldvalue('timeseries', 0): 232 maxval = np.nanmax(field[: -1])232 maxval = np.nanmax(field[:-1]) 233 233 elif options.getfieldvalue('singletimeseries', 0): 234 234 if np.size(field) == 1: #some singletimeseries are just one value -
issm/trunk-jpl/src/m/contrib/defleurian/paraview/exportVTK.py
r24241 r24255 242 242 fid.write('{:f} {:f} {:f}\n'.format(0, 0, 0)) 243 243 else: 244 if ((np.size(spe_res_struct.__dict__[field])) == every_nodes): 244 if (np.size(spe_res_struct.__dict__[field]) == 1): 245 #skipping integers 246 continue 247 elif ((np.size(spe_res_struct.__dict__[field])) == every_nodes): 245 248 fid.write('SCALARS {} float 1 \n'.format(field)) 246 249 fid.write('LOOKUP_TABLE default\n') … … 248 251 outval = cleanOutliers(np.squeeze(spe_res_struct.__dict__[field][enveloppe_index[node]])) 249 252 fid.write('{:f}\n'.format(outval)) 250 elif ( (np.size(spe_res_struct.__dict__[field])) == every_cells):253 elif (np.shape(spe_res_struct.__dict__[field])[0] == np.size(spe_res_struct.__dict__[field]) == every_cells): 251 254 saved_cells[field] = np.squeeze(spe_res_struct.__dict__[field]) 255 else: 256 print("format for field {}.{} is not suported, field is skipped".format(sol, field)) 257 continue 252 258 # }}} 253 259 # loop on arguments, if something other than result is asked, do it now {{{ … … 256 262 othernames = (dict.keys(other_struct.__dict__)) 257 263 for field in othernames: 258 if (np.size(other_struct.__dict__[field]) == every_nodes): 264 if (np.size(other_struct.__dict__[field]) == 1): 265 #skipping integers 266 continue 267 elif (np.size(other_struct.__dict__[field]) == every_nodes): 259 268 fid.write('SCALARS {} float 1 \n'.format(field)) 260 269 fid.write('LOOKUP_TABLE default\n') … … 262 271 outval = cleanOutliers(other_struct.__dict__[field][enveloppe_index[node]]) 263 272 fid.write('{:f}\n'.format(outval)) 264 elif (np.s ize(other_struct.__dict__[field]) == every_cells):273 elif (np.shape(other_struct.__dict__[field])[0] == np.size(other_struct.__dict__[field]) == every_cells): 265 274 saved_cells[field] = other_struct.__dict__[field] 275 else: 276 print("format for field {}.{} is not suported, field is skipped".format(other, field)) 277 continue 266 278 # }}} 267 279 # Now writting cell variables {{{ -
issm/trunk-jpl/src/m/extrusion/project3d.py
r24213 r24255 53 53 projected_vector = (paddingvalue * np.ones((md.mesh.numberofvertices + 1))).astype(vector2d.dtype) 54 54 projected_vector[-1] = vector2d[-1] 55 vector2d = vector2d[: -1]55 vector2d = vector2d[:-1] 56 56 else: 57 57 raise TypeError("vector length not supported") … … 68 68 projected_vector = (paddingvalue * np.ones((md.mesh.numberofvertices + 1, np.size(vector2d, axis=1)))).astype(vector2d.dtype) 69 69 projected_vector[-1, :] = vector2d[-1, :] 70 vector2d = vector2d[: -1, :]70 vector2d = vector2d[:-1, :] 71 71 else: 72 72 raise TypeError("vector length not supported") … … 86 86 projected_vector = (paddingvalue * np.ones((md.mesh.numberofelements + 1))).astype(vector2d.dtype) 87 87 projected_vector[-1] = vector2d[-1] 88 vector2d = vector2d[: -1]88 vector2d = vector2d[:-1] 89 89 else: 90 90 raise TypeError("vector length not supported") … … 101 101 projected_vector = (paddingvalue * np.ones((md.mesh.numberofelements + 1, np.size(vector2d, axis=1)))).astype(vector2d.dtype) 102 102 projected_vector[-1, :] = vector2d[-1, :] 103 vector2d = vector2d[: -1, :]103 vector2d = vector2d[:-1, :] 104 104 else: 105 105 raise TypeError("vector length not supported") -
issm/trunk-jpl/src/m/miscellaneous/fielddisplay.py
r24213 r24255 106 106 string = "%s%dx1%s" % (sbeg, len(field), send) 107 107 else: 108 string = string[: -1] + send108 string = string[:-1] + send 109 109 110 110 #call displayunit -
issm/trunk-jpl/src/m/plot/processdata.py
r24213 r24255 120 120 print('multiple-column spc field; specify column to plot using option "spccol"') 121 121 print(('column ', spccol, ' plotted for time: ', procdata[-1, spccol])) 122 procdata = procdata[0: -1, spccol]122 procdata = procdata[0:-1, spccol] 123 123 124 124 #mask? -
issm/trunk-jpl/src/m/solve/WriteData.py
r24213 r24255 44 44 scale = options.getfieldvalue('scale') 45 45 if np.size(data) > 1 and np.ndim(data) > 1 and np.size(data, 0) == timeserieslength: 46 data[0: - 1, :] = scale * data[0: -1, :]46 data[0:-1, :] = scale * data[0:-1, :] 47 47 else: 48 48 data = scale * data -
issm/trunk-jpl/src/m/solve/parseresultsfromdisk.py
r24240 r24255 149 149 try: 150 150 length = struct.unpack('i', fid.read(struct.calcsize('i')))[0] 151 fieldname = struct.unpack('{}s'.format(length), fid.read(length))[0][: -1]151 fieldname = struct.unpack('{}s'.format(length), fid.read(length))[0][:-1] 152 152 fieldname = fieldname.decode() #strings are binaries when stored so need to be converted back 153 153 time = struct.unpack('d', fid.read(struct.calcsize('d')))[0] … … 159 159 160 160 elif datatype == 2: 161 field = struct.unpack('{}s'.format(M), fid.read(M))[0][: -1]161 field = struct.unpack('{}s'.format(M), fid.read(M))[0][:-1] 162 162 field = field.decode() 163 163 … … 282 282 try: 283 283 length = struct.unpack('i', fid.read(struct.calcsize('i')))[0] 284 fieldname = struct.unpack('{}s'.format(length), fid.read(length))[0][: -1]284 fieldname = struct.unpack('{}s'.format(length), fid.read(length))[0][:-1] 285 285 time = struct.unpack('d', fid.read(struct.calcsize('d')))[0] 286 286 step = struct.unpack('i', fid.read(struct.calcsize('i')))[0] -
issm/trunk-jpl/test/NightlyRun/IdToName.py
r24226 r24255 13 13 14 14 string = '#Test Name:' 15 name = file_text[len(string) + 1: -1]15 name = file_text[len(string) + 1:-1] 16 16 return name -
issm/trunk-jpl/test/NightlyRun/test244.py
r24214 r24255 75 75 md.qmu.variables.surface_mass_balanceC = normal_uncertain.normal_uncertain('scaled_SmbC', 1, 0.5) 76 76 Tmin = 273. 77 telms = np.atleast_2d(np.min(md.smb.Ta[0: -1, :], 1))77 telms = np.atleast_2d(np.min(md.smb.Ta[0:-1, :], 1)) 78 78 mint_on_partition = telms.flatten() 79 79 for pa in range(np.size(mint_on_partition)):
Note:
See TracChangeset
for help on using the changeset viewer.