Changeset 24313 for issm/trunk/test/NightlyRun/test244.py
- Timestamp:
- 11/01/19 12:01:57 (5 years ago)
- Location:
- issm/trunk
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
issm/trunk
-
issm/trunk/test
- Property svn:mergeinfo changed
-
issm/trunk/test/NightlyRun/test244.py
r23189 r24313 19 19 from dmeth_params_set import * 20 20 21 md = triangle(model(), '../Exp/Square.exp',200000.)22 md = setmask(md, 'all','')23 md = parameterize(md, '../Par/SquareShelf.py')24 md = setflowequation(md, 'SSA','all')21 md = triangle(model(), '../Exp/Square.exp', 300000.) 22 md = setmask(md, 'all', '') 23 md = parameterize(md, '../Par/SquareShelf.py') 24 md = setflowequation(md, 'SSA', 'all') 25 25 md.materials.rho_ice = 910 26 md.cluster = generic('name', gethostname(),'np',3)26 md.cluster = generic('name', gethostname(), 'np', 3) 27 27 md.geometry.bed = md.geometry.base 28 28 29 29 # Use of Gemb method for SMB computation 30 30 md.smb = SMBgemb() 31 md.smb.setdefaultparameters(md.mesh,md.geometry) 31 md.smb.setdefaultparameters(md.mesh, md.geometry) 32 md.smb.dsnowIdx = 0 32 33 33 34 #load hourly surface forcing date from 1979 to 2009: 34 inputs = spio.loadmat('../Data/gemb_input.mat', squeeze_me =True)35 inputs = spio.loadmat('../Data/gemb_input.mat', squeeze_me=True) 35 36 36 #setup the inputs: 37 md.smb.Ta = np.append(np.tile(np.conjugate(inputs['Ta0']), (md.mesh.numberofelements,1)),np.conjugate([inputs['dateN']]),axis=0)38 md.smb.V = np.append(np.tile(np.conjugate(inputs['V0']), (md.mesh.numberofelements,1)),np.conjugate([inputs['dateN']]),axis=0)39 md.smb.dswrf = np.append(np.tile(np.conjugate(inputs['dsw0']), (md.mesh.numberofelements,1)),np.conjugate([inputs['dateN']]),axis=0)40 md.smb.dlwrf = np.append(np.tile(np.conjugate(inputs['dlw0']), (md.mesh.numberofelements,1)),np.conjugate([inputs['dateN']]),axis=0)41 md.smb.P = np.append(np.tile(np.conjugate(inputs['P0']), (md.mesh.numberofelements,1)),np.conjugate([inputs['dateN']]),axis=0)42 md.smb.eAir = np.append(np.tile(np.conjugate(inputs['eAir0']), (md.mesh.numberofelements,1)),np.conjugate([inputs['dateN']]),axis=0)43 md.smb.pAir = np.append(np.tile(np.conjugate(inputs['pAir0']), (md.mesh.numberofelements,1)),np.conjugate([inputs['dateN']]),axis=0)44 md.smb.pAir = np.append(np.tile(np.conjugate(inputs['pAir0']), (md.mesh.numberofelements,1)),np.conjugate([inputs['dateN']]),axis=0)45 md.smb.Vz = np.tile(np.conjugate(inputs['LP']['Vz']), (md.mesh.numberofelements,1)).flatten()46 md.smb.Tz = np.tile(np.conjugate(inputs['LP']['Tz']), (md.mesh.numberofelements,1)).flatten()47 md.smb.Tmean = np.tile(np.conjugate(inputs['LP']['Tmean']), (md.mesh.numberofelements,1)).flatten()48 md.smb.C = np.tile(np.conjugate(inputs['LP']['C']), (md.mesh.numberofelements,1)).flatten()37 #setup the inputs: 38 md.smb.Ta = np.append(np.tile(np.conjugate(inputs['Ta0']), (md.mesh.numberofelements, 1)), np.conjugate([inputs['dateN']]), axis=0) 39 md.smb.V = np.append(np.tile(np.conjugate(inputs['V0']), (md.mesh.numberofelements, 1)), np.conjugate([inputs['dateN']]), axis=0) 40 md.smb.dswrf = np.append(np.tile(np.conjugate(inputs['dsw0']), (md.mesh.numberofelements, 1)), np.conjugate([inputs['dateN']]), axis=0) 41 md.smb.dlwrf = np.append(np.tile(np.conjugate(inputs['dlw0']), (md.mesh.numberofelements, 1)), np.conjugate([inputs['dateN']]), axis=0) 42 md.smb.P = np.append(np.tile(np.conjugate(inputs['P0']), (md.mesh.numberofelements, 1)), np.conjugate([inputs['dateN']]), axis=0) 43 md.smb.eAir = np.append(np.tile(np.conjugate(inputs['eAir0']), (md.mesh.numberofelements, 1)), np.conjugate([inputs['dateN']]), axis=0) 44 md.smb.pAir = np.append(np.tile(np.conjugate(inputs['pAir0']), (md.mesh.numberofelements, 1)), np.conjugate([inputs['dateN']]), axis=0) 45 md.smb.pAir = np.append(np.tile(np.conjugate(inputs['pAir0']), (md.mesh.numberofelements, 1)), np.conjugate([inputs['dateN']]), axis=0) 46 md.smb.Vz = np.tile(np.conjugate(inputs['LP']['Vz']), (md.mesh.numberofelements, 1)).flatten() 47 md.smb.Tz = np.tile(np.conjugate(inputs['LP']['Tz']), (md.mesh.numberofelements, 1)).flatten() 48 md.smb.Tmean = np.tile(np.conjugate(inputs['LP']['Tmean']), (md.mesh.numberofelements, 1)).flatten() 49 md.smb.C = np.tile(np.conjugate(inputs['LP']['C']), (md.mesh.numberofelements, 1)).flatten() 49 50 50 51 #smb settings 51 md.smb.requested_outputs = ['SmbDz', 'SmbT','SmbD','SmbRe','SmbGdn','SmbGsp','SmbEC','SmbA','SmbMassBalance']52 md.smb.requested_outputs = ['SmbDz', 'SmbT', 'SmbD', 'SmbRe', 'SmbGdn', 'SmbGsp', 'SmbEC', 'SmbA', 'SmbMassBalance'] 52 53 53 #only run smb core: 54 #only run smb core: 54 55 md.transient.isstressbalance = 0 55 56 md.transient.ismasstransport = 1 56 57 md.transient.isthermal = 0 57 58 58 #time stepping: 59 #time stepping: 59 60 md.timestepping.start_time = 1965. 60 61 md.timestepping.final_time = 1965.75 61 md.timestepping.time_step = 1. /365.062 md.timestepping.time_step = 1. / 365.0 62 63 md.timestepping.interp_forcings = 0. 63 64 … … 68 69 #partitioning 69 70 md.qmu.numberofpartitions = md.mesh.numberofelements 70 md = partitioner(md, 'package','linear')71 md.qmu. partition = (md.qmu.partition - 1).reshape(-1,1).T71 md = partitioner(md, 'package', 'linear', 'type', 'element') 72 md.qmu.epartition = (md.qmu.epartition - 1) 72 73 73 74 #variables 74 md.qmu.variables.surface_mass_balanceC = normal_uncertain.normal_uncertain('scaled_SmbC', 1,0.5)75 md.qmu.variables.surface_mass_balanceC = normal_uncertain.normal_uncertain('scaled_SmbC', 1, 0.5) 75 76 Tmin = 273. 76 telms = np.atleast_2d(np.min(md.smb.Ta[0:-1, :],1))77 telms = np.atleast_2d(np.min(md.smb.Ta[0:-1, :], 1)) 77 78 mint_on_partition = telms.flatten() 78 79 for pa in range(np.size(mint_on_partition)): 79 vi = np.where(md.qmu.partition == pa)80 mint = telms[vi]*1.0581 82 83 mint_on_partition[pa] = max(mint/telms[vi])80 vi = np.where(md.qmu.epartition == pa) 81 mint = telms[0, vi] * 1.05 82 pos = np.where(mint < Tmin) 83 mint[pos] = Tmin 84 mint_on_partition[pa] = max(mint / telms[0, vi]) 84 85 85 86 mint_on_partition[np.where(np.isnan(mint_on_partition))] = 10**-10 86 md.qmu.variables.surface_mass_balanceTa = uniform_uncertain.uniform_uncertain('scaled_SmbTa', 1,0.05)87 md.qmu.variables.surface_mass_balanceTa = uniform_uncertain.uniform_uncertain('scaled_SmbTa', 1, 0.05) 87 88 md.qmu.variables.surface_mass_balanceTa[0].lower = 0.95 88 md.qmu.variables.surface_mass_balanceTa[0].upper = np.maximum(np.minimum(np.maximum(1.05, mint_on_partition),0.9999),0.0001)89 md.qmu.variables.surface_mass_balanceTa[0].upper = np.maximum(np.minimum(np.maximum(1.05, mint_on_partition), 0.9999), 0.0001) 89 90 90 91 #responses 91 md.qmu.responses.IceVolume = response_function.response_function('IceVolume', [],[0.0001,0.001,0.01,0.25,0.5,0.75,0.99,0.999,0.9999])92 md.qmu.responses.IceMass = response_function.response_function('IceMass', [],[0.0001,0.001,0.01,0.25,0.5,0.75,0.99,0.999,0.9999])93 md.qmu.responses.TotalSmb = response_function.response_function('TotalSmb', [],[0.0001,0.001,0.01,0.25,0.5,0.75,0.99,0.999,0.9999])92 md.qmu.responses.IceVolume = response_function.response_function('IceVolume', [], [0.0001, 0.001, 0.01, 0.25, 0.5, 0.75, 0.99, 0.999, 0.9999]) 93 md.qmu.responses.IceMass = response_function.response_function('IceMass', [], [0.0001, 0.001, 0.01, 0.25, 0.5, 0.75, 0.99, 0.999, 0.9999]) 94 md.qmu.responses.TotalSmb = response_function.response_function('TotalSmb', [], [0.0001, 0.001, 0.01, 0.25, 0.5, 0.75, 0.99, 0.999, 0.9999]) 94 95 95 96 # nond_sampling study 96 97 md.qmu.method = dakota_method.dakota_method('nond_samp') 97 md.qmu.method = dmeth_params_set(md.qmu.method, 'seed',1234,'samples',3,'sample_type','lhs')98 md.qmu.method = dmeth_params_set(md.qmu.method, 'seed', 1234, 'samples', 3, 'sample_type', 'lhs') 98 99 dver = str(version) 99 if ((int(dver[0]) == 4 and int(dver[2]) >2) or int(dver[0])>4):100 md.qmu.method = dmeth_params_set(md.qmu.method,'rng','rnum2')100 if ((int(dver[0]) == 4 and int(dver[2]) > 2) or int(dver[0]) > 4): 101 md.qmu.method = dmeth_params_set(md.qmu.method, 'rng', 'rnum2') 101 102 102 103 #parameters … … 108 109 109 110 if version >= 6: 110 111 112 111 md.qmu.params.analysis_driver = 'matlab' 112 md.qmu.params.evaluation_scheduling = 'master' 113 md.qmu.params.processors_per_evaluation = 2 113 114 else: 114 115 115 md.qmu.params.analysis_driver = 'stressbalance' 116 md.qmu.params.evaluation_concurrency = 1 116 117 117 118 118 md.stressbalance.reltol = 10**-5 #tighten for qmu analyses119 md.transient.requested_outputs = ['IceVolume', 'TotalSmb','IceMass']119 md.stressbalance.reltol = 10**-5 #tighten for qmu analyses 120 md.transient.requested_outputs = ['IceVolume', 'TotalSmb', 'IceMass'] 120 121 121 122 #solve 122 md.verbose = verbose('000000000') 123 md = solve(md, 'Transient','overwrite','y')123 md.verbose = verbose('000000000') # this line is recommended 124 md = solve(md, 'Transient', 'overwrite', 'y') 124 125 md.qmu.results = md.results.dakota 125 126 … … 127 128 md.results.dakota.moments = [] 128 129 for i in range(3): 129 130 md.results.dakota.moments.append(md.results.dakota.dresp_out[i].mean) 130 131 131 132 for i in range(3): 132 133 md.results.dakota.moments.append(md.results.dakota.dresp_out[i].stddev) 133 134 134 135 field_names = ['moments'] 135 136 field_tolerances = [1e-11] 136 137 field_values = [md.results.dakota.moments] 137
Note:
See TracChangeset
for help on using the changeset viewer.