Changeset 25163
- Timestamp:
- 06/26/20 11:03:14 (5 years ago)
- Location:
- issm/trunk-jpl
- Files:
-
- 1 added
- 12 edited
Legend:
- Unmodified
- Added
- Removed
-
issm/trunk-jpl/src/m/classes/slr.py
r25125 r25163 10 10 11 11 class slr(object): 12 """12 ''' 13 13 SLR class definition 14 14 15 15 Usage: 16 16 slr = slr() 17 """17 ''' 18 18 def __init__(self): # {{{ 19 self.deltathickness = float('NaN')20 self.sealevel = float('NaN')21 self.spcthickness = float('NaN')19 self.deltathickness = np.nan 20 self.sealevel = np.nan 21 self.spcthickness = np.nan 22 22 self.maxiter = 0 23 23 self.reltol = 0 … … 42 42 self.degacc = 0 43 43 self.horiz = 0 44 self.Ngia = float('NaN')45 self.Ugia = float('NaN')46 44 self.planetradius = planetradius('earth') 47 45 self.requested_outputs = [] … … 88 86 def setdefaultparameters(self): # {{{ 89 87 #Convergence criterion: absolute, relative and residual 90 self.reltol = 0.01 91 self.abstol = float('NaN')#1 mm of sea level rise88 self.reltol = 0.01 #default 89 self.abstol = np.nan #1 mm of sea level rise 92 90 #maximum of non - linear iterations. 93 91 self.maxiter = 5 … … 110 108 #numerical discretization accuracy 111 109 self.degacc = 0.01 112 #steric: 113 self.steric_rate = 0 110 #hydro 114 111 self.hydro_rate = 0 115 112 #how many time steps we skip before we run SLR solver during transient … … 148 145 md = checkfield(md, 'fieldname', 'slr.maxiter', 'size', [1, 1], '>=', 1) 149 146 md = checkfield(md, 'fieldname', 'slr.geodetic_run_frequency', 'size', [1, 1], '>=', 1) 150 md = checkfield(md, 'fieldname', 'slr.steric_rate', 'NaN', 1, 'Inf', 1, 'size', [md.mesh.numberofvertices])151 147 md = checkfield(md, 'fieldname', 'slr.hydro_rate', 'NaN', 1, 'Inf', 1, 'size', [md.mesh.numberofvertices]) 152 148 md = checkfield(md, 'fieldname', 'slr.degacc', 'size', [1, 1], '>=', 1e-10) 153 149 md = checkfield(md, 'fieldname', 'slr.requested_outputs', 'stringrow', 1) 154 150 md = checkfield(md, 'fieldname', 'slr.horiz', 'NaN', 1, 'Inf', 1, 'values', [0, 1]) 155 md = checkfield(md, 'fieldname', 'slr.Ngia', 'NaN', 1, 'Inf', 1, 'size', [md.mesh.numberofvertices])156 md = checkfield(md, 'fieldname', 'slr.Ugia', 'NaN', 1, 'Inf', 1, 'size', [md.mesh.numberofvertices])157 151 158 152 #check that love numbers are provided at the same level of accuracy: … … 199 193 WriteData(fid, prefix, 'object', self, 'fieldname', 'ocean_area_scaling', 'format', 'Boolean') 200 194 WriteData(fid, prefix, 'object', self, 'fieldname', 'geodetic_run_frequency', 'format', 'Integer') 201 WriteData(fid, prefix, 'object', self, 'fieldname', 'steric_rate', 'format', 'DoubleMat', 'mattype', 1, 'scale', 1e-3 / md.constants.yts)202 195 WriteData(fid, prefix, 'object', self, 'fieldname', 'hydro_rate', 'format', 'DoubleMat', 'mattype', 1, 'scale', 1e-3 / md.constants.yts) 203 WriteData(fid, prefix, 'object', self, 'fieldname', 'Ngia', 'format', 'DoubleMat', 'mattype', 1, 'scale', 1e-3 / md.constants.yts)204 WriteData(fid, prefix, 'object', self, 'fieldname', 'Ugia', 'format', 'DoubleMat', 'mattype', 1, 'scale', 1e-3 / md.constants.yts)205 196 WriteData(fid, prefix, 'object', self, 'fieldname', 'degacc', 'format', 'Double') 206 197 WriteData(fid, prefix, 'object', self, 'fieldname', 'transitions', 'format', 'MatArray') -
issm/trunk-jpl/src/m/coordsystems/epsg2proj.py
r25100 r25163 21 21 22 22 #First, get GDAL version 23 subproc_args = "gdalsrsinfo --version | awk '{print $2}' | cut -d '.' -f1"23 subproc_args = shlex.split("gdalsrsinfo --version | awk '{print $2}' | cut -d '.' -f1") 24 24 subproc = subprocess.Popen(subproc_args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 25 25 outs, errs = subproc.communicate() … … 29 29 version_major=int(outs) 30 30 31 subproc_args = "gdalsrsinfo epsg:{} | grep PROJ.4 | tr -d '\n' | sed 's/PROJ.4 : //'".format(epsg)31 subproc_args = shlex.split("gdalsrsinfo epsg:{} | grep PROJ.4 | tr -d '\n' | sed 's/PROJ.4 : //'".format(epsg)) 32 32 subproc = subprocess.Popen(subproc_args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 33 33 outs, errs = subproc.communicate() -
issm/trunk-jpl/src/m/coordsystems/gdaltransform.py
r25125 r25163 34 34 35 35 TODO: 36 - Remove shlex and follow subprocess pattern implemented in src/m/coordsystems/epsg2proj.py36 - Follow subprocess pattern implemented in src/m/coordsystems/epsg2proj.py 37 37 ''' 38 38 … … 46 46 file_in.close() 47 47 48 args = shlex.split('gdaltransform -s_srs {} -t_srs {} < {} > {}'.format(proj_in, proj_out, filename_in, filename_out))49 subprocess.check_call( args) # Will raise CalledProcessError if return code is not 048 subproc_args = shlex.split('gdaltransform -s_srs {} -t_srs {} < {} > {}'.format(proj_in, proj_out, filename_in, filename_out)) 49 subprocess.check_call(subproc_args) # Will raise CalledProcessError if return code is not 0 50 50 51 51 A = loadvars(filename_out) -
issm/trunk-jpl/src/m/partition/AreaAverageOntoPartition.py
r25110 r25163 1 import copy 2 1 3 import numpy as np 2 4 3 5 from adjacency import adjacency 4 import copy5 6 from project2d import project2d 6 7 from qmupart2npart import qmupart2npart -
issm/trunk-jpl/src/m/plot/processmesh.m
r25058 r25163 15 15 end 16 16 17 %special case for mes g2dvertical17 %special case for mesh 2dvertical 18 18 if strcmp(domaintype(md.mesh),'2Dvertical'), 19 19 [x y z elements is2d isplanet] = processmesh(md.mesh,options); … … 64 64 65 65 %is it a 2d plot? 66 if md.mesh.dimension()==2 ,66 if md.mesh.dimension()==2 | getfieldvalue(options,'layer',0)>=1, 67 67 is2d=1; 68 68 else 69 if getfieldvalue(options,'layer',0)>=1, 70 is2d=1; 71 else 72 is2d=0; 73 end 69 is2d=0; 74 70 end 75 71 … … 79 75 error('processmesh error message: cannot work with 3D meshes for now'); 80 76 end 81 %we modify the mesh temporarily to a 2d mesh from which the 3d mesh was extruded .77 %we modify the mesh temporarily to a 2d mesh from which the 3d mesh was extruded 82 78 x=x2d; 83 79 y=y2d; -
issm/trunk-jpl/src/m/plot/processmesh.py
r24213 r25163 1 #import matplotlib.delaunay as delaunay 1 2 import numpy as np 2 3 3 4 4 5 def processmesh(md, data, options): 5 """6 PROCESSMESH - process the mesh for plotting6 ''' 7 PROCESSMESH - process mesh to be plotted 7 8 8 9 Usage: 9 x, y, z, elements, is2d = processmech(md, data, options)10 x, y, z, elements, is2d = processmech(md, data, options) 10 11 11 12 See also: PLOTMODEL, PROCESSDATA 12 """13 13 14 # {{{ check mesh size parameters 14 TODO: 15 - Test application of matlplotlib.delaunay 16 - Test that output of delaunay matches output of delaunay in MATLAB (see 17 src/m/plot/processmesh.m) 18 ''' 19 20 #some checks 15 21 if md.mesh.numberofvertices == 0: 16 22 raise ValueError('processmesh error: mesh is empty') 17 23 if md.mesh.numberofvertices == md.mesh.numberofelements: 18 24 raise ValueError('processmesh error: the number of elements is the same as the number of nodes') 19 # }}}20 # {{{ treating coordinates21 25 22 try: 26 #special case for mesh 2dvertical 27 if md.mesh.domaintype() == '2Dvertical': 28 x, y, z, elements, is2d, isplanet = processmesh(md.mesh, options) 29 return x, y, z, elements, is2d, isplanet 30 31 # # special case for mesh 3dsurface 32 # if md.mesh.domaintype() == '3Dsurface': 33 # x, y, z, elements, is2d, isplanet = processmesh(md.mesh, options) 34 # if options.getfieldvalue('coord', 'xy') == 'latlon': 35 # x = md.mesh.long 36 # y = md.mesh.lat 37 # elements = delaunay(x, y) 38 # z = md.mesh.lat 39 # z[:] = 0 40 # return x, y, z, elements, is2d, isplanet 41 42 if hasattr(md.mesh, 'elements2d'): 43 elements2d = md.mesh.elements2d 44 45 if options.exist('amr'): 46 step = options.getfieldvalue('amr') 47 x = md.results.TransientSolution[step].MeshX 48 y = md.results.TransientSolution[step].MeshY 49 elements = md.results.TransientSolution[step].MeshElements 50 else: 51 elements = md.mesh.elements 52 if options.getfieldvalue('coord', 'xy') != 'latlon': 53 x = md.mesh.x 54 if hasattr(md.mesh, 'x2d'): 55 x2d = md.mesh.x2d 56 y = md.mesh.y 57 if hasattr(md.mesh, 'y2d'): 58 y2d = md.mesh.y2d 59 60 if hasattr(md.mesh, 'z'): 23 61 z = md.mesh.z 24 e xcept AttributeError:62 else: 25 63 z = np.zeros(np.shape(md.mesh.x)) 26 elements = md.mesh.elements - 1 27 28 if options.getfieldvalue('layer', 0) >= 1: 29 x = md.mesh.x2d 30 y = md.mesh.y2d 31 z = np.zeros(np.shape(x)) 32 elements = md.mesh.elements2d - 1 33 elif 'latlon' in options.getfieldvalue('coord', 'xy'): 34 x = md.mesh.long 35 y = md.mesh.lat 36 else: 37 x = md.mesh.x 38 y = md.mesh.y 64 z = options.getfieldvalue('z', z) 65 if isinstance(z, basestring): 66 z = getattr(md, z) 39 67 40 68 #is it a 2D plot? … … 43 71 else: 44 72 is2d = 0 73 74 elements = md.mesh.elements - 1 75 76 #layer projection? 77 if options.getfieldvalue('layer', 0) >= 1: 78 if options.getfieldvalue('coord', 'xy') == 'latlon': 79 raise Exception('processmesh error message: cannot work with 3D meshes for now') 80 #we modify the mesh temporarily to a 2d mesh from which the 3d mesh was extruded 81 x = x2d 82 y = y2d 83 z = np.zeros(np.shape(x2d)) 84 elements = md.mesh.elements2d - 1 45 85 46 86 #units … … 51 91 z = z * unit 52 92 53 #is model a member of planet class? (workaround until planet class defined) 93 #is model a member of planet class? 94 # 95 # TODO: Change this when planet class defined (see src/m/plot/processmesh.m) 96 # 54 97 if md.__class__.__name__ != 'model': 55 98 isplanet = 1 -
issm/trunk-jpl/src/m/qmu/dakota_in_data.py
r24213 r25163 9 9 def dakota_in_data(dmeth, variables, responses, dparams, filei, *args): 10 10 ''' 11 define the data to write the dakota .in and .m files. 11 DAKOTA_IN_DATA - define the data to write the dakota .in and .m files. 12 13 Usage: 14 [] = dakota_in_data(dmeth, variables, responses, dparams, filei, *args) 12 15 13 [] = dakota_in_data(dmeth, variables, responses, dparams, filei, *args) 16 where the required input is: 17 dmeth (dakota_method, method class object) 18 variables (structure array, variable class objects) 19 responses (structure array, response class objects) 20 dparams (structure array, method - independent parameters) 21 filei (character, name of .in and .m files) 14 22 15 where the required input is: 16 dmeth (dakota_method, method class object) 17 variables (structure array, variable class objects) 18 responses (structure array, response class objects) 19 dparams (structure array, method - independent parameters) 20 filei (character, name of .in and .m files) 23 Params may be empty, in which case defaults will be used. 21 24 22 params may be empty, in which case defaults will be used. 25 The optional args are passed directly through to the QmuUpdateFunctions 26 brancher to be used by the analysis package. For example, this could be 27 model information. 23 28 24 the optional args are passed directly through to the 25 QmuUpdateFunctions brancher to be used by the analysis 26 package. for example, this could be model information. 29 This function defines the data to write the dakota .in and .m files. It is 30 necessary for multiple reasons. First, it collects the parameters and 31 applies some defaults that are unique to the environment. Second, some 32 analysis package ariables and/or responses may be treated differently by 33 Dakota. For example, an analysis package variable may be defined as an 34 array, so the QmuSetupDesign brancher will create Dakota variables for each 35 element of the array. Finally, it calls the functions to write the .in and 36 .m files. this function is independent of the particular analysis package. 27 37 28 this function defines the data to write the dakota .in and 29 .m files. it is necessary for multiple reasons. first, 30 it collects the parameters and applies some defaults that 31 are unique to the environment. second, some analysis package 32 variables and / or responses may be treated differently by 33 dakota. for example, an analysis package variable may be 34 defined as an array, so the QmuSetupDesign brancher will 35 create dakota variables for each element of the array. 36 finally it calls the functions to write the .in and .m files. 37 this function is independent of the particular analysis 38 package. 39 40 this data would typically be generated by a matlab script 41 for a specific model, using the method, variable, and 42 response class objects. 43 ''' 38 This data would typically be generated by a MATLAB script for a specific 39 model, using the method, variable, and response class objects. 40 ''' 44 41 45 42 # parameters -
issm/trunk-jpl/src/m/qmu/postqmu.m
r25057 r25163 1 1 function md=postqmu(md) 2 %INPUT function md=postqmu(md) 3 %Deal with dakota output results in files. 2 %POSTQMU - Deal with Dakota output results in files 3 % 4 % Usage: 5 % md = postqmu(md) 4 6 5 7 % check to see if dakota returned errors in the err file … … 14 16 fline=fgetl(fide); 15 17 end 16 warning(['Dakota returned error in ''' qmuerrfile ' file. 18 warning(['Dakota returned error in ''' qmuerrfile ' file. qmu directory retained.']) 17 19 end 18 20 status=fclose(fide); … … 46 48 end 47 49 48 49 50 %put dakotaresults in their right location. 50 51 md.results.dakota=dakotaresults; -
issm/trunk-jpl/src/m/qmu/postqmu.py
r24541 r25163 1 1 from os import getpid, stat 2 2 from os.path import isfile 3 from subprocess import Popen 3 import shlex 4 from subprocess import call 5 4 6 from dakota_out_parse import * 5 7 from helpers import * 8 from loadresultsfromdisk import * 6 9 7 10 8 11 def postqmu(md): 9 12 ''' 10 Deal with dakota output results in files. 13 POSTQMU - Deal with Dakota output results in files 14 15 Usage: 16 md = postqmu(md) 11 17 12 INPUT function13 md = postqmu(md)14 18 TODO: 19 - Run Dakota test to check that updates from 6/26 are working 20 - Add checks to Popen 15 21 ''' 16 22 … … 41 47 dakotaresults.dresp_dat = dresp_dat 42 48 49 if md.qmu.output: 50 if md.qmu.method.method == 'nond_sampling': 51 dakotaresults.modelresults = [] 52 md2 = copy.deepcopy(md) 53 md2.qmu.isdakota = 0 54 for i in range(md2.qmu.method.params.samples): 55 print('reading qmu file {}.outbin.{}'.format(md2.miscellaneous.name, i)) 56 md2 = loadresultsfromdisk(md2, '{}.outbin.{}'.format(md2.miscellaneous.name, i)) 57 dakotaresults.modelresults.append(md2.results) 58 43 59 # put dakotaresults in their right location. 44 60 md.results.dakota = dakotaresults … … 46 62 # move all the individual function evalutations into zip files 47 63 if not md.qmu.isdakota: 48 Popen('zip - mq params.in.zip params.in.[1 - 9] * ', shell=True) 49 Popen('zip - mq results.out.zip results.out.[1 - 9] * ', shell=True) 50 Popen('zip - mq matlab.out.zip matlab * .out.[1 - 9] * ', shell=True) 64 subproc_args = shlex.split('zip -mq params.in.zip params.in.[1-9]*') 65 call(subproc_args, shell=True) 66 subproc_args = shlex.split('zip -mq results.out.zip results.out.[1-9]*') 67 call(subproc_args, shell=True) 68 subproc_args = shlex.split('zip -mq matlab.out.zip matlab*.out.[1-9]*') 69 call(subproc_args, shell=True) 51 70 52 71 return md -
issm/trunk-jpl/src/m/shp/shpwrite.py
r25065 r25163 3 3 except ImportError: 4 4 print("could not import shapefile, PyShp has not been installed, no shapefile reading capabilities enabled") 5 5 6 6 7 def shpwrite(shp, filename): #{{{ -
issm/trunk-jpl/test/NightlyRun/test2010.m
r25147 r25163 29 29 mask=gmtmask(md.mesh.lat,md.mesh.long); 30 30 icemask=ones(md.mesh.numberofvertices,1); 31 pos=find(mask==0); icemask(pos)=-1; 32 pos=find(sum(mask(md.mesh.elements),2)<3); icemask(md.mesh.elements(pos,:))=-1; 31 pos=find(mask==0); 32 icemask(pos)=-1; 33 pos=find(sum(mask(md.mesh.elements),2)<3); 34 icemask(md.mesh.elements(pos,:))=-1; 33 35 md.mask.ice_levelset=icemask; 34 36 md.mask.ocean_levelset=-icemask; … … 65 67 % }}} 66 68 67 68 69 %eustatic + rigid + elastic run: 69 md.solidearth.settings.rigid=1; md.solidearth.settings.elastic=1; md.solidearth.settings.rotation=1; 70 md.solidearth.settings.rigid=1; 71 md.solidearth.settings.elastic=1; md.solidearth.settings.rotation=1; 70 72 md.cluster=generic('name',oshostname(),'np',3); 71 73 -
issm/trunk-jpl/test/NightlyRun/test2010.py
r25158 r25163 22 22 longe = sum(md.mesh.long[md.mesh.elements - 1], 1) / 3 23 23 24 md.solidearth. deltathickness = np.zeros((md.mesh.numberofelements, ))24 md.solidearth.surfaceload.icethicknesschange = np.zeros(md.mesh.numberofelements) 25 25 pos = np.intersect1d(np.array(np.where(late < -75)), np.array(np.where(longe > 0))) 26 26 #python does not include last element in array slices, (6:7) -> [5:7] 27 md.solidearth. deltathickness[pos[5:7]] = -127 md.solidearth.surfaceload.icethicknesschange[pos[5:7]] = -1 28 28 29 md.solidearth.sealevel = np.zeros( (md.mesh.numberofvertices, ))30 md.dsl.global_average_thermosteric_sea_level_change =np.zeros((2,))31 md.dsl.sea_surface_height_change_above_geoid =np.zeros((md.mesh.numberofvertices+1, ))32 md.dsl.sea_water_pressure_change_at_sea_floor =np.zeros((md.mesh.numberofvertices+1, ))29 md.solidearth.sealevel = np.zeros(md.mesh.numberofvertices) 30 md.dsl.global_average_thermosteric_sea_level_change = np.zeros((2, 1)) 31 md.dsl.sea_surface_height_change_above_geoid = np.zeros(md.mesh.numberofvertices + 1) 32 md.dsl.sea_water_pressure_change_at_sea_floor = np.zeros(md.mesh.numberofvertices + 1) 33 33 34 34 md.solidearth.ocean_area_scaling = 1 … … 40 40 #mask: {{{ 41 41 mask = gmtmask(md.mesh.lat, md.mesh.long) 42 icemask = np.ones( (md.mesh.numberofvertices, ))42 icemask = np.ones(md.mesh.numberofvertices) 43 43 pos = np.where(mask == 0) 44 44 icemask[pos[0]] = -1 45 pos = np.where( sum(mask[md.mesh.elements - 1], 1) < 3)45 pos = np.where(np.sum(mask[md.mesh.elements - 1], 1) < 3) 46 46 icemask[md.mesh.elements[pos, :] - 1] = -1 47 47 md.mask.ice_levelset = icemask 48 48 md.mask.ocean_levelset = -icemask 49 49 50 #make sure that the ice level set is all inclusive: 51 md.mask.land_levelset = np.zeros((md.mesh.numberofvertices, )) 52 md.mask.ocean_levelset = -np.ones((md.mesh.numberofvertices, )) 53 54 #make sure that the elements that have loads are fully grounded: 55 pos = np.nonzero(md.solidearth.deltathickness)[0] 56 md.mask.ocean_levelset[md.mesh.elements[pos, :] - 1] = 1 50 #make sure that the elements that have loads are fully grounded 51 pos = np.nonzero(md.solidearth.surfaceload.icethicknesschange)[0] 52 md.mask.ocean_levelset = -np.ones(md.mesh.numberofvertices) 57 53 58 54 #make sure wherever there is an ice load, that the mask is set to ice: 55 #pos = np.nonzero(md.solidearth.surfaceload.icethicknesschange)[0] # Do we need to do this twice? 56 md.mask.ice_levelset[md.mesh.elements[pos, :] - 1] = 1 57 58 #make sure wherever there is an ice load, that the mask is set to ice 59 md.mask.ice_levelset = np.ones(md.mesh.numberofvertices) 59 60 icemask[md.mesh.elements[pos, :] - 1] = -1 60 61 md.mask.ice_levelset = icemask 61 62 # }}} 63 62 64 #geometry {{{ 63 65 di = md.materials.rho_ice / md.materials.rho_water 64 md.geometry.thickness = np.ones( (md.mesh.numberofvertices, ))65 md.geometry.surface = (1 - di) * np.zeros( (md.mesh.numberofvertices, ))66 md.geometry.thickness = np.ones(md.mesh.numberofvertices) 67 md.geometry.surface = (1 - di) * np.zeros(md.mesh.numberofvertices) 66 68 md.geometry.base = md.geometry.surface - md.geometry.thickness 67 69 md.geometry.bed = md.geometry.base 68 70 # }}} 69 71 #materials {{{ 70 md.initialization.temperature = 273.25 * np.ones( (md.mesh.numberofvertices, ))72 md.initialization.temperature = 273.25 * np.ones(md.mesh.numberofvertices) 71 73 md.materials.rheology_B = paterson(md.initialization.temperature) 72 md.materials.rheology_n = 3 * np.ones( (md.mesh.numberofelements, ))74 md.materials.rheology_n = 3 * np.ones(md.mesh.numberofelements) 73 75 # }}} 74 76 #Miscellaneous {{{ … … 76 78 # }}} 77 79 #Solution parameters {{{ 78 md.solidearth. reltol = float('NaN')79 md.solidearth. abstol = 1e-380 md.solidearth. geodetic = 180 md.solidearth.settings.reltol = np.nan 81 md.solidearth.settings.abstol = 1e-3 82 md.solidearth.settings.geodetic = 1 81 83 # }}} 82 84 83 #New stuff84 md.solidearth.spcthickness = np.nan * np.ones((md.mesh.numberofvertices, ))85 md.solidearth.Ngia = np.zeros((md.mesh.numberofvertices, ))86 md.solidearth.Ugia = np.zeros((md.mesh.numberofvertices, ))87 md.solidearth.hydro_rate = np.zeros((md.mesh.numberofvertices, ))88 89 85 #eustatic + rigid + elastic run: 90 md.solidearth. rigid = 191 md.solidearth. elastic = 192 md.solidearth. rotation = 186 md.solidearth.settings.rigid = 1 87 md.solidearth.settings.elastic = 1 88 md.solidearth.settings.rotation = 1 93 89 md.cluster = generic('name', gethostname(), 'np', 3) 94 90 … … 96 92 md = solve(md, 'Sealevelrise') 97 93 eus = md.results.SealevelriseSolution.SealevelRSLEustatic 98 s olidearth= md.results.SealevelriseSolution.Sealevel94 slr = md.results.SealevelriseSolution.Sealevel 99 95 moixz = md.results.SealevelriseSolution.SealevelInertiaTensorXZ 100 96 moiyz = md.results.SealevelriseSolution.SealevelInertiaTensorYZ … … 111 107 112 108 #Fields and tolerances to track changes 113 field_names = ['eus', 's olidearth', 'moixz', 'moiyz', 'moizz']109 field_names = ['eus', 'slr', 'moixz', 'moiyz', 'moizz'] 114 110 field_tolerances = [1e-13, 1e-13, 1e-13, 1e-13, 1e-13] 115 111 field_values = [eus, solidearth, moixz, moiyz, moizz]
Note:
See TracChangeset
for help on using the changeset viewer.