Changeset 27143
- Timestamp:
- 07/04/22 01:28:35 (3 years ago)
- Location:
- issm/trunk-jpl/src/m/contrib/defleurian/paraview
- Files:
-
- 1 added
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
issm/trunk-jpl/src/m/contrib/defleurian/paraview/exportVTK.py
r26572 r27143 29 29 Basile de Fleurian: 30 30 ''' 31 #verbosity of the code, 0 is no messages, 5 is chatty 32 verbose = 0 33 34 print(""" 35 ========================================= 36 # A # 37 # / \ exportVTK is now obsolete # 38 # / | \ You should use export VTU # 39 # / | \ faster, smaller files # 40 # / o \ and more capacities # 41 # --------- # 42 #======================================== 43 """) 44 31 45 32 46 for key in kwargs.keys(): 33 47 if key not in ['clipping', 'coarsetime', 'singletime']: 34 raise BadOption('Provided option "{}" is not supported possibilities are : {}'.format(key, ['cliping', 'coarsetime' ]))48 raise BadOption('Provided option "{}" is not supported possibilities are : {}'.format(key, ['cliping', 'coarsetime', 'singletime'])) 35 49 36 50 if 'coarsetime' in kwargs.keys() and 'singletime' in kwargs.keys(): … … 56 70 57 71 # this is the result structure {{{ 58 print('Getting accessorie variables') 72 if verbose > 3: 73 print('Getting accessorie variables') 59 74 res_struct = md.results 60 75 moving_mesh = False … … 80 95 81 96 # get the element related variables {{{ 82 print('Now treating the mesh') 97 if verbose > 3: 98 print('Now treating the mesh') 83 99 #first get the general things 84 100 dim = int(md.mesh.domaintype()[0]) 85 101 every_nodes = md.mesh.numberofvertices 86 102 every_cells = md.mesh.numberofelements 103 try: 104 every_edges = md.mesh.numberofedges 105 except AttributeError: 106 #3D meshes do not have edges 107 every_edges = 0 87 108 88 109 if np.shape(md.mesh.elements)[1] == 3 or enveloppe: … … 103 124 convert_index = np.nan * np.ones(np.shape(md.mesh.x)) 104 125 convert_index = np.asarray([[i, np.where(enveloppe_index == i)[0][0]] for i, val in enumerate(convert_index) if any(enveloppe_index == i)]) 126 127 num_of_points = np.size(enveloppe_index) 105 128 points = np.column_stack((md.mesh.x[enveloppe_index], 106 129 md.mesh.y[enveloppe_index], … … 114 137 connect[elt, 2] = convert_index[np.where(convert_index == connect[elt, 2])[0], 1][0] 115 138 116 num_of_points = np.size(enveloppe_index) 139 num_of_edges = every_edges #looks like edges is only defined on the 2d mesh 140 if num_of_edges > 0: 141 edges = md.mesh.edges[:, 0:2].reshape(int(num_of_edges), 2) - 1 142 117 143 else: 118 144 raise BadDimension("exportVTK can't get an enveloppe for dimension {}".format(dim)) … … 122 148 num_of_elt = every_cells 123 149 connect = md.mesh.elements - 1 150 num_of_edges = every_edges 151 if num_of_edges > 0: 152 edges = md.mesh.edges[:, 0:2].reshape(int(num_of_edges), 2) - 1 124 153 enveloppe_index = np.arange(0, np.size(md.mesh.x)) 125 154 num_of_points = every_nodes … … 129 158 2 : md.geometry.base 130 159 3 : md.geometry.bed 131 4 : 0\n''') 160 4 : 0 161 5 : Custom\n''') 132 162 if mesh_alti == '1': 133 163 points = np.column_stack((md.mesh.x, md.mesh.y, md.geometry.surface)) … … 138 168 elif mesh_alti == '4': 139 169 points = np.column_stack((md.mesh.x, md.mesh.y, 0. * md.mesh.x)) 170 elif mesh_alti == '5': 171 alti_field = input("Which field should be used as 3rd dimension: ") 172 alti_var = eval(alti_field) 173 if np.shape(np.squeeze(alti_var)) == np.shape(md.mesh.x): 174 points = np.column_stack((md.mesh.x, md.mesh.y, np.squeeze(alti_var))) 175 else: 176 raise BadDimension('field given for 3rd dimension should be defined on vertices {} is not.'.format(alti_field)) 140 177 else: 141 178 points = np.column_stack((md.mesh.x, md.mesh.y, md.geometry.surface)) … … 166 203 clip_convert_index = np.nan * np.ones(np.shape(points)[0]) 167 204 205 #define the vertices that are within clipping window 168 206 Inclipping = np.intersect1d(InX, InY) 169 207 Isinside[Inclipping] = True … … 171 209 num_of_points = np.shape(points)[0] 172 210 211 #go thorough the elements and keep those for which one node is in the clipped arrea 173 212 clipconnect = np.asarray([], dtype=int) 174 213 for elt in connect: … … 176 215 clipconnect = np.append(clipconnect, elt, axis=0) 177 216 217 #reshape 178 218 num_of_elt = int(np.size(clipconnect) / 3) 179 219 connect = clipconnect.reshape(num_of_elt, 3) … … 181 221 clip_convert_index = np.asarray([[i, np.where(Inclipping == i)[0][0]] for i, val in enumerate(clip_convert_index) if any(Inclipping == i)]) 182 222 enveloppe_index = enveloppe_index[clip_convert_index[:, 0]] 223 224 #convert indexing and exclude elements that are partly outside of the region 183 225 for elt in range(0, num_of_elt): 184 226 try: … … 198 240 num_of_elt = np.shape(connect)[0] 199 241 242 if num_of_edges > 0: 243 clipedges = np.asarray([], dtype=int) 244 for edge in edges: 245 if set(edge).issubset(Inclipping): 246 clipedges = np.append(clipedges, edge, axis=0) 247 248 num_of_edges = int(np.size(clipedges) / 2) 249 edges = clipedges.reshape(num_of_edges, 2) 250 251 for edge in range(0, num_of_edges): 252 try: 253 edges[edge, 0] = clip_convert_index[np.where(clip_convert_index == edges[edge, 0])[0], 1][0] 254 except IndexError: 255 edges[edge, 0] = -1 256 try: 257 edges[edge, 1] = clip_convert_index[np.where(clip_convert_index == edges[edge, 1])[0], 1][0] 258 except IndexError: 259 edges[edge, 1] = -1 260 edges = edges[np.where(edges != -1)[0], :] 261 num_of_edges = np.shape(edges)[0] 262 200 263 # }}} 264 201 265 # write header and mesh {{{ 202 print('Now starting to write stuff') 266 if verbose > 3: 267 print('Now starting to write stuff') 203 268 204 269 if 'coarsetime' in kwargs.keys(): … … 210 275 211 276 for step in steplist: 212 print('Writing for step {}'.format(step)) 277 if verbose > 2: 278 print('Writing for step {}'.format(step)) 213 279 saved_cells = {} 280 saved_edges = {} 214 281 timestep = step 215 fid = open((filename + '/Timestep.vtk' + str(timestep) + '.vtk'), 'w+') 216 fid.write('# vtk DataFile Version 3.0 \n') 217 fid.write('Data for run {} \n'.format(md.miscellaneous.name)) 218 fid.write('ASCII \n') 219 fid.write('DATASET UNSTRUCTURED_GRID \n') 220 fid.write('POINTS {:d} float\n'.format(num_of_points)) 221 #updating z for mesh evolution 222 if moving_mesh and mesh_alti in ['1', '2']: 223 base = np.squeeze(res_struct.__dict__['TransientSolution'][step].__dict__['Base'][enveloppe_index]) 224 thick_change_ratio = (np.squeeze(res_struct.__dict__['TransientSolution'][step].__dict__['Thickness'][enveloppe_index]) / md.geometry.thickness[enveloppe_index]) 225 above_bed = points[:, 2] - md.geometry.base[enveloppe_index] 226 altitude = base + thick_change_ratio * above_bed 227 else: 228 altitude = points[:, 2] 229 for index, point in enumerate(points): 230 fid.write('{:f} {:f} {:f} \n'.format(point[0], point[1], altitude[index])) 231 232 fid.write('CELLS {:d} {:d}\n'.format(num_of_elt, num_of_elt * (point_per_elt + 1))) 233 234 for elt in range(0, num_of_elt): 235 if celltype == 5: 236 fid.write('3 {:d} {:d} {:d}\n'.format(connect[elt, 0], 237 connect[elt, 1], 238 connect[elt, 2])) 239 elif celltype == 13: 240 fid.write('6 {:d} {:d} {:d} {:d} {:d} {:d}\n'.format(connect[elt, 0], 241 connect[elt, 1], 242 connect[elt, 2], 243 connect[elt, 3], 244 connect[elt, 4], 245 connect[elt, 5])) 246 247 fid.write('CELL_TYPES {:d}\n'.format(num_of_elt)) 248 for elt in range(0, num_of_elt): 249 fid.write('{:d}\n'.format(celltype)) 250 251 fid.write('POINT_DATA {:s} \n'.format(str(num_of_points))) 282 with open((filename + '/Timestep.vtk' + str(timestep) + '.vtk'), 'w+') as fid: 283 fid.write('# vtk DataFile Version 3.0 \n') 284 fid.write('Data for run {} \n'.format(md.miscellaneous.name)) 285 fid.write('ASCII \n') 286 fid.write('DATASET UNSTRUCTURED_GRID \n') 287 fid.write('POINTS {:d} float\n'.format(num_of_points)) 288 #updating z for mesh evolution 289 if moving_mesh and mesh_alti in ['1', '2']: 290 base = np.squeeze(res_struct.__dict__['TransientSolution'][step].__dict__['Base'][enveloppe_index]) 291 thick_change_ratio = (np.squeeze(res_struct.__dict__['TransientSolution'][step].__dict__['Thickness'][enveloppe_index]) / md.geometry.thickness[enveloppe_index]) 292 above_bed = points[:, 2] - md.geometry.base[enveloppe_index] 293 altitude = base + thick_change_ratio * above_bed 294 else: 295 altitude = points[:, 2] 296 for index, point in enumerate(points): 297 fid.write('{:f} {:f} {:f} \n'.format(point[0], point[1], altitude[index])) 298 299 fid.write('CELLS {:d} {:d}\n'.format((num_of_elt + num_of_edges), num_of_elt * (point_per_elt + 1) + num_of_edges * 3)) 300 301 for elt in range(0, num_of_elt): 302 if celltype == 5: 303 fid.write('3 {:d} {:d} {:d}\n'.format(connect[elt, 0], 304 connect[elt, 1], 305 connect[elt, 2])) 306 elif celltype == 13: 307 fid.write('6 {:d} {:d} {:d} {:d} {:d} {:d}\n'.format(connect[elt, 0], 308 connect[elt, 1], 309 connect[elt, 2], 310 connect[elt, 3], 311 connect[elt, 4], 312 connect[elt, 5])) 313 for edge in range(0, num_of_edges): 314 fid.write('2 {:d} {:d}\n'.format(edges[edge, 0], 315 edges[edge, 1])) 316 317 fid.write('CELL_TYPES {:d}\n'.format(num_of_elt + num_of_edges)) 318 for elt in range(0, num_of_elt): 319 fid.write('{:d}\n'.format(celltype)) 320 for edge in range(0, num_of_edges): 321 fid.write('3\n') #3 is for lines 322 323 fid.write('POINT_DATA {:s} \n'.format(str(num_of_points))) 324 # }}} 325 # {{{loop over the different solution structures 326 # first check if there are solutions to grab 327 if 'solnames' in locals(): 328 for sol in solnames: 329 treated_res = [] 330 #dealing with results on different timesteps 331 try: 332 if(len(res_struct.__dict__[sol]) > timestep): 333 timestep = step 334 else: 335 timestep = np.size(res_struct.__dict__[sol]) 336 except TypeError: 337 #result as no len() so no timesteps 338 timestep = 1 339 340 #getting the fields in the solution 341 if(type(res_struct.__dict__[sol]).__name__ == 'solution'): 342 spe_res_struct = res_struct.__dict__[sol].__getitem__(timestep) 343 fieldnames = list(dict.keys(spe_res_struct.__dict__)) 344 elif(type(res_struct.__dict__[sol]).__name__ == 'solutionstep'): 345 spe_res_struct = res_struct.__dict__[sol] 346 fieldnames = list(dict.keys(spe_res_struct.__dict__)) 347 elif(type(res_struct.__dict__[sol]).__name__ == 'results'): #this is a result without steps 348 spe_res_struct = res_struct.__dict__[sol] 349 fieldnames = list(dict.keys(spe_res_struct.__dict__)) 350 else: 351 print("WARNING, solution type '{}' is not recognise, exported results might be wrong".format(type(res_struct.__dict__[sol]))) 352 spe_res_struct = res_struct.__dict__[sol] 353 fieldnames = list(dict.keys(spe_res_struct.__dict__)) 354 355 #Sorting scalars, vectors and tensors 356 tensors = [field for field in fieldnames if field[-2:] in ['xx', 'yy', 'xy', 'zz', 'xz', 'yz']] 357 non_tensor = [field for field in fieldnames if field not in tensors] 358 vectors = [field for field in non_tensor if field[-1] in ['x', 'y', 'z'] and field[-4:] not in ['Flux']] 359 #check which field is a real result and print 360 for field in fieldnames: 361 if verbose > 2: 362 print("Treating {}".format(field)) 363 if field in treated_res: 364 if verbose > 2: 365 print("{} is already done".format(field)) 366 continue 367 elif field in vectors: 368 if verbose > 2: 369 print("{} is a vector".format(field)) 370 try: 371 Vxstruct = np.squeeze(spe_res_struct.__dict__[field[:-1] + 'x']) 372 Vystruct = np.squeeze(spe_res_struct.__dict__[field[:-1] + 'y']) 373 treated_res += [field[:-1] + 'x', field[:-1] + 'y'] 374 if dim == 3 and field[:-1] + 'z' in fieldnames: 375 #some fields like adjoint or always 2D 376 Vzstruct = np.squeeze(spe_res_struct.__dict__[field[:-1] + 'z']) 377 treated_res += [field[:-1] + 'z'] 378 379 except KeyError: 380 fieldnames += field 381 vectors.remove(field) 382 383 fid.write('VECTORS {} float \n'.format(field[:-1])) 384 for node in range(0, num_of_points): 385 Vx = cleanOutliers(Vxstruct[enveloppe_index[node]]) 386 Vy = cleanOutliers(Vystruct[enveloppe_index[node]]) 387 if dim == 3 and field[:-1] + 'z' in fieldnames: 388 Vz = cleanOutliers(Vzstruct[enveloppe_index[node]]) 389 fid.write('{:f} {:f} {:f}\n'.format(Vx, Vy, Vz)) 390 else: 391 fid.write('{:f} {:f} {:f}\n'.format(Vx, Vy, 0)) 392 393 elif field in tensors: 394 if verbose > 2: 395 print("{} is a tensor".format(field)) 396 try: 397 Txxstruct = np.squeeze(spe_res_struct.__dict__[field[:-2] + 'xx']) 398 Txystruct = np.squeeze(spe_res_struct.__dict__[field[:-2] + 'xy']) 399 Tyystruct = np.squeeze(spe_res_struct.__dict__[field[:-2] + 'yy']) 400 treated_res += [field[:-2] + 'xx', field[:-2] + 'xy', field[:-2] + 'yy'] 401 if dim == 3: 402 Tzzstruct = np.squeeze(spe_res_struct.__dict__[field[:-2] + 'zz']) 403 Txzstruct = np.squeeze(spe_res_struct.__dict__[field[:-2] + 'xz']) 404 Tyzstruct = np.squeeze(spe_res_struct.__dict__[field[:-2] + 'yz']) 405 treated_res += [field[:-2] + 'zz', field[:-2] + 'xz', field[:-2] + 'yz'] 406 407 except KeyError: 408 fieldnames += field 409 tensors.remove(field) 410 411 fid.write('TENSORS {} float \n'.format(field[:-2])) 412 for node in range(0, num_of_points): 413 Txx = cleanOutliers(Txxstruct[enveloppe_index[node]]) 414 Tyy = cleanOutliers(Tyystruct[enveloppe_index[node]]) 415 Txy = cleanOutliers(Txystruct[enveloppe_index[node]]) 416 if dim == 3: 417 Tzz = cleanOutliers(Tzzstruct[enveloppe_index[node]]) 418 Txz = cleanOutliers(Txzstruct[enveloppe_index[node]]) 419 Tyz = cleanOutliers(Tyzstruct[enveloppe_index[node]]) 420 fid.write('{:f} {:f} {:f}\n'.format(Txx, Txy, Txz)) 421 fid.write('{:f} {:f} {:f}\n'.format(Txy, Tyy, Tyz)) 422 fid.write('{:f} {:f} {:f}\n'.format(Txz, Tyz, Tzz)) 423 elif dim == 2: 424 fid.write('{:f} {:f} {:f}\n'.format(Txx, Txy, 0)) 425 fid.write('{:f} {:f} {:f}\n'.format(Txy, Tyy, 0)) 426 fid.write('{:f} {:f} {:f}\n'.format(0, 0, 0)) 427 else: 428 if np.size(spe_res_struct.__dict__[field]) == 1: 429 if field == 'time': 430 current_time = spe_res_struct.__dict__[field] 431 #skipping integers 432 continue 433 elif np.size(spe_res_struct.__dict__[field]) == every_nodes: 434 fid.write('SCALARS {} float 1 \n'.format(field)) 435 fid.write('LOOKUP_TABLE default\n') 436 for node in range(0, num_of_points): 437 outval = cleanOutliers(np.squeeze(spe_res_struct.__dict__[field][enveloppe_index[node]])) 438 fid.write('{:f}\n'.format(outval)) 439 elif np.shape(spe_res_struct.__dict__[field])[0] == np.size(spe_res_struct.__dict__[field]) == every_cells: 440 saved_cells[field] = np.squeeze(spe_res_struct.__dict__[field]) 441 elif np.shape(spe_res_struct.__dict__[field])[0] == np.size(spe_res_struct.__dict__[field]) == every_edges: 442 saved_edges[field] = np.squeeze(spe_res_struct.__dict__[field]) 443 else: 444 print("format for field {}.{} is not suported, field is skipped".format(sol, field)) 445 # }}} 446 # loop on arguments, if something other than result is asked, do it now {{{ 447 for other in args: 448 other_struct = md.__dict__[other] 449 othernames = (dict.keys(other_struct.__dict__)) 450 for field in othernames: 451 if np.size(other_struct.__dict__[field]) == 1: 452 #skipping integers 453 continue 454 elif np.size(other_struct.__dict__[field]) == every_nodes: 455 fid.write('SCALARS {} float 1 \n'.format(field)) 456 fid.write('LOOKUP_TABLE default\n') 457 for node in range(0, num_of_points): 458 outval = cleanOutliers(other_struct.__dict__[field][enveloppe_index[node]]) 459 fid.write('{:f}\n'.format(outval)) 460 elif np.shape(other_struct.__dict__[field])[0] == every_nodes + 1: 461 #we are dealing with a forcing of some kind. 462 forcing_time = other_struct.__dict__[field][-1, :] 463 if any(forcing_time == current_time): 464 forcing_index = np.where(forcing_time == current_time) 465 forcing_val = other_struct.__dict__[field][:, forcing_index] 466 elif forcing_time[0] > current_time: 467 forcing_val = other_struct.__dict__[field][:, 0] 468 elif forcing_time[-1] < current_time: 469 forcing_val = other_struct.__dict__[field][:, -1] 470 else: 471 forcing_index = np.where(forcing_time < current_time)[-1][-1] 472 delta_time = forcing_time[forcing_index + 1] - forcing_time[forcing_index] #compute forcing Dt 473 delta_current = current_time - forcing_time[forcing_index] # time since last forcing 474 ratio = delta_current / delta_time #compute weighting factor for preceding forcing vallue 475 forcing_evol = (other_struct.__dict__[field][:, forcing_index + 1] - other_struct.__dict__[field][:, forcing_index]) * ratio 476 forcing_val = other_struct.__dict__[field][:, forcing_index] + forcing_evol 477 # and now write it down 478 fid.write('SCALARS {}_{} float 1 \n'.format(other, field)) 479 fid.write('LOOKUP_TABLE default\n') 480 for node in range(0, num_of_points): 481 outval = cleanOutliers(forcing_val[enveloppe_index[node]]) 482 fid.write('{:f}\n'.format(outval)) 483 elif np.shape(other_struct.__dict__[field])[0] == np.size(other_struct.__dict__[field]) == every_cells: 484 saved_cells[field] = other_struct.__dict__[field] 485 elif np.shape(other_struct.__dict__[field])[0] == np.size(other_struct.__dict__[field]) == every_edges: 486 saved_edges[field] = other_struct.__dict__[field] 487 else: 488 print("format for field {}.{} is not suported, field is skipped".format(other, field)) 489 continue 490 # }}} 491 # Now writting cell variables {{{ 492 if np.size(list(saved_cells.keys())) > 0: 493 fid.write('CELL_DATA {:d} \n'.format(num_of_elt + num_of_edges)) 494 for key in list(saved_cells.keys()): 495 fid.write('SCALARS {} float 1 \n'.format(key)) 496 fid.write('LOOKUP_TABLE default\n') 497 for cell in range(0, num_of_elt): 498 outval = cleanOutliers(saved_cells[key][cell]) 499 fid.write('{:f}\n'.format(outval)) 500 for edge in range(0, num_of_edges): 501 fid.write('{:f}\n'.format(-9999.999)) 502 # }}} 503 # Now writting edge variables {{{ 504 if np.size(list(saved_edges.keys())) > 0: 505 for key in list(saved_edges.keys()): 506 fid.write('SCALARS {} float 1 \n'.format(key)) 507 fid.write('LOOKUP_TABLE default\n') 508 for cell in range(0, num_of_elt): 509 fid.write('{:f}\n'.format(-9999.999)) 510 for edge in range(0, num_of_edges): 511 outval = cleanOutliers(saved_edges[key][edge]) 512 fid.write('{:f}\n'.format(outval)) 252 513 # }}} 253 # {{{loop over the different solution structures254 # first check if there are solutions to grab255 if 'solnames' in locals():256 for sol in solnames:257 treated_res = []258 #dealing with results on different timesteps259 try:260 if(len(res_struct.__dict__[sol]) > timestep):261 timestep = step262 else:263 timestep = np.size(res_struct.__dict__[sol])264 except TypeError:265 #result as no len() so no timesteps266 timestep = 1267 268 #getting the fields in the solution269 if(type(res_struct.__dict__[sol]).__name__ == 'solution'):270 spe_res_struct = res_struct.__dict__[sol].__getitem__(timestep)271 fieldnames = dict.keys(spe_res_struct.__dict__)272 elif(type(res_struct.__dict__[sol]).__name__ == 'solutionstep'):273 spe_res_struct = res_struct.__dict__[sol]274 fieldnames = dict.keys(spe_res_struct.__dict__)275 elif(type(res_struct.__dict__[sol]).__name__ == 'results'): #this is a result without steps276 spe_res_struct = res_struct.__dict__[sol]277 fieldnames = dict.keys(spe_res_struct.__dict__)278 else:279 print("WARNING, solution type '{}' is not recognise, exported results might be wrong".format(type(res_struct.__dict__[sol])))280 spe_res_struct = res_struct.__dict__[sol]281 fieldnames = dict.keys(spe_res_struct.__dict__)282 283 #Sorting scalars, vectors and tensors284 tensors = [field for field in fieldnames if field[-2:] in ['xx', 'yy', 'xy', 'zz', 'xz', 'yz']]285 non_tensor = [field for field in fieldnames if field not in tensors]286 vectors = [field for field in non_tensor if field[-1] in ['x', 'y', 'z'] and field[-4:] not in ['Flux']]287 288 #check which field is a real result and print289 for field in fieldnames:290 print("Treating {}".format(field))291 if field in treated_res:292 print("{} is already done".format(field))293 continue294 elif field in vectors:295 print("{} is a vector".format(field))296 try:297 Vxstruct = np.squeeze(spe_res_struct.__dict__[field[:-1] + 'x'])298 Vystruct = np.squeeze(spe_res_struct.__dict__[field[:-1] + 'y'])299 treated_res += [field[:-1] + 'x', field[:-1] + 'y']300 if dim == 3:301 Vzstruct = np.squeeze(spe_res_struct.__dict__[field[:-1] + 'z'])302 treated_res += field[:-1] + 'z'303 304 except KeyError:305 fieldnames += field306 vectors.remove(field)307 308 fid.write('VECTORS {} float \n'.format(field[:-1]))309 for node in range(0, num_of_points):310 Vx = cleanOutliers(Vxstruct[enveloppe_index[node]])311 Vy = cleanOutliers(Vystruct[enveloppe_index[node]])312 if dim == 3:313 Vz = cleanOutliers(Vzstruct[enveloppe_index[node]])314 fid.write('{:f} {:f} {:f}\n'.format(Vx, Vy, Vz))315 elif dim == 2:316 fid.write('{:f} {:f} {:f}\n'.format(Vx, Vy, 0))317 318 elif field in tensors:319 print("{} is a tensor".format(field))320 try:321 Txxstruct = np.squeeze(spe_res_struct.__dict__[field[:-2] + 'xx'])322 Txystruct = np.squeeze(spe_res_struct.__dict__[field[:-2] + 'xy'])323 Tyystruct = np.squeeze(spe_res_struct.__dict__[field[:-2] + 'yy'])324 treated_res += [field[:-2] + 'xx', field[:-2] + 'xy', field[:-2] + 'yy']325 if dim == 3:326 Tzzstruct = np.squeeze(spe_res_struct.__dict__[field[:-2] + 'zz'])327 Txzstruct = np.squeeze(spe_res_struct.__dict__[field[:-2] + 'xz'])328 Tyzstruct = np.squeeze(spe_res_struct.__dict__[field[:-2] + 'yz'])329 treated_res += [field[:-2] + 'zz', field[:-2] + 'xz', field[:-2] + 'yz']330 331 except KeyError:332 fieldnames += field333 tensors.remove(field)334 335 fid.write('TENSORS {} float \n'.format(field[:-2]))336 for node in range(0, num_of_points):337 Txx = cleanOutliers(Txxstruct[enveloppe_index[node]])338 Tyy = cleanOutliers(Tyystruct[enveloppe_index[node]])339 Txy = cleanOutliers(Txystruct[enveloppe_index[node]])340 if dim == 3:341 Tzz = cleanOutliers(Tzzstruct[enveloppe_index[node]])342 Txz = cleanOutliers(Txzstruct[enveloppe_index[node]])343 Tyz = cleanOutliers(Tyzstruct[enveloppe_index[node]])344 fid.write('{:f} {:f} {:f}\n'.format(Txx, Txy, Txz))345 fid.write('{:f} {:f} {:f}\n'.format(Txy, Tyy, Tyz))346 fid.write('{:f} {:f} {:f}\n'.format(Txz, Tyz, Tzz))347 elif dim == 2:348 fid.write('{:f} {:f} {:f}\n'.format(Txx, Txy, 0))349 fid.write('{:f} {:f} {:f}\n'.format(Txy, Tyy, 0))350 fid.write('{:f} {:f} {:f}\n'.format(0, 0, 0))351 else:352 if np.size(spe_res_struct.__dict__[field]) == 1:353 if field == 'time':354 current_time = spe_res_struct.__dict__[field]355 #skipping integers356 continue357 elif np.size(spe_res_struct.__dict__[field]) == every_nodes:358 fid.write('SCALARS {} float 1 \n'.format(field))359 fid.write('LOOKUP_TABLE default\n')360 for node in range(0, num_of_points):361 outval = cleanOutliers(np.squeeze(spe_res_struct.__dict__[field][enveloppe_index[node]]))362 fid.write('{:f}\n'.format(outval))363 elif np.shape(spe_res_struct.__dict__[field])[0] == np.size(spe_res_struct.__dict__[field]) == every_cells:364 saved_cells[field] = np.squeeze(spe_res_struct.__dict__[field])365 else:366 print("format for field {}.{} is not suported, field is skipped".format(sol, field))367 # }}}368 # loop on arguments, if something other than result is asked, do it now {{{369 for other in args:370 other_struct = md.__dict__[other]371 othernames = (dict.keys(other_struct.__dict__))372 for field in othernames:373 if np.size(other_struct.__dict__[field]) == 1:374 #skipping integers375 continue376 elif np.size(other_struct.__dict__[field]) == every_nodes:377 fid.write('SCALARS {} float 1 \n'.format(field))378 fid.write('LOOKUP_TABLE default\n')379 for node in range(0, num_of_points):380 outval = cleanOutliers(other_struct.__dict__[field][enveloppe_index[node]])381 fid.write('{:f}\n'.format(outval))382 elif np.shape(other_struct.__dict__[field])[0] == every_nodes + 1:383 #we are dealing with a forcing of some kind.384 forcing_time = other_struct.__dict__[field][-1, :]385 if any(forcing_time == current_time):386 forcing_index = np.where(forcing_time == current_time)387 forcing_val = other_struct.__dict__[field][:, forcing_index]388 elif forcing_time[0] > current_time:389 forcing_val = other_struct.__dict__[field][:, 0]390 elif forcing_time[-1] < current_time:391 forcing_val = other_struct.__dict__[field][:, -1]392 else:393 forcing_index = np.where(forcing_time < current_time)[-1][-1]394 delta_time = forcing_time[forcing_index + 1] - forcing_time[forcing_index] #compute forcing Dt395 delta_current = current_time - forcing_time[forcing_index] # time since last forcing396 ratio = delta_current / delta_time #compute weighting factor for preceding forcing vallue397 forcing_evol = (other_struct.__dict__[field][:, forcing_index + 1] - other_struct.__dict__[field][:, forcing_index]) * ratio398 forcing_val = other_struct.__dict__[field][:, forcing_index] + forcing_evol399 # and now write it down400 fid.write('SCALARS {}_{} float 1 \n'.format(other, field))401 fid.write('LOOKUP_TABLE default\n')402 for node in range(0, num_of_points):403 outval = cleanOutliers(forcing_val[enveloppe_index[node]])404 fid.write('{:f}\n'.format(outval))405 elif np.shape(other_struct.__dict__[field])[0] == np.size(other_struct.__dict__[field]) == every_cells:406 saved_cells[field] = other_struct.__dict__[field]407 else:408 print("format for field {}.{} is not suported, field is skipped".format(other, field))409 continue410 # }}}411 # Now writting cell variables {{{412 if np.size(list(saved_cells.keys())) > 0:413 fid.write('CELL_DATA {:d} \n'.format(num_of_elt))414 for key in list(saved_cells.keys()):415 fid.write('SCALARS {} float 1 \n'.format(key))416 fid.write('LOOKUP_TABLE default\n')417 for cell in range(0, num_of_elt):418 outval = cleanOutliers(saved_cells[key][cell])419 fid.write('{:f}\n'.format(outval))420 # }}}421 fid.close()422 514 423 515
Note:
See TracChangeset
for help on using the changeset viewer.