Changeset 25010
- Timestamp:
- 06/11/20 10:32:01 (5 years ago)
- Location:
- issm/trunk-jpl
- Files:
-
- 15 edited
Legend:
- Unmodified
- Added
- Removed
-
issm/trunk-jpl/src/m/classes/qmu.py
r24593 r25010 27 27 self.params = OrderedStruct() 28 28 self.results = OrderedDict() 29 self.vpartition = float('NaN')30 self.epartition = float('NaN')31 29 self.numberofpartitions = 0 32 30 self.numberofresponses = 0 … … 78 76 s += " method : '%s'\n" % (method.method) 79 77 80 # params could behave a number of forms (mainly 1 struct or many)78 # params could have a number of forms (mainly 1 struct or many) 81 79 if type(self.params) == OrderedStruct: 82 80 params = [self.params] … … 184 182 WriteData(fid, prefix, 'data', False, 'name', 'md.qmu.mass_flux_segments_present', 'format', 'Boolean') 185 183 return 186 WriteData(fid, prefix, 'object', self, 'fieldname', 'vpartition', 'format', 'DoubleMat', 'mattype', 2)187 WriteData(fid, prefix, 'object', self, 'fieldname', 'epartition', 'format', 'DoubleMat', 'mattype', 2)188 184 WriteData(fid, prefix, 'object', self, 'fieldname', 'numberofpartitions', 'format', 'Integer') 189 185 WriteData(fid, prefix, 'object', self, 'fieldname', 'numberofresponses', 'format', 'Integer') -
issm/trunk-jpl/test/NightlyRun/test218.py
r24870 r25010 72 72 73 73 #partitioning 74 md.qmu.numberofpartitions = md.mesh.numberofvertices 75 md = partitioner(md, 'package', 'linear', 'npart', md.qmu.numberofpartitions) 76 md.qmu.vpartition = md.qmu.vpartition - 1 74 npart = md.mesh.numberofvertices 75 partition = partitioner(md, 'package', 'linear', 'npart', npart) - 1 77 76 78 77 #Dakota options … … 88 87 'mean', np.ones(md.mesh.numberofvertices), 89 88 'stddev', .05 * np.ones(md.mesh.numberofvertices), 90 'partition', md.qmu.vpartition89 'partition', partition 91 90 ) 92 91 … … 121 120 #Fields and tolerances to track changes 122 121 md.qmu.results = md.results.dakota 123 md.results.dakota.importancefactors = importancefactors(md, 'scaled_MaterialsRheologyB', 'MaxVel' ).reshape(-1, 1)122 md.results.dakota.importancefactors = importancefactors(md, 'scaled_MaterialsRheologyB', 'MaxVel', partition).reshape(-1, 1) 124 123 field_names = ['importancefactors'] 125 124 field_tolerances = [1e-10] -
issm/trunk-jpl/test/NightlyRun/test234.py
r24870 r25010 38 38 39 39 #partitioning 40 md.qmu.numberofpartitions = 20 41 md = partitioner(md, 'package', 'chaco', 'npart', md.qmu.numberofpartitions, 'weighting', 'on') 42 md.qmu.vpartition = md.qmu.vpartition - 1 40 npart = 20 41 partition = partitioner(md, 'package', 'chaco', 'npart', npart, 'weighting', 'on') - 1 43 42 44 43 #variables 45 44 md.qmu.variables.surface_mass_balance = normal_uncertain.normal_uncertain( 46 45 'descriptor', 'scaled_SmbMassBalance', 47 'mean', np.ones( md.qmu.numberofpartitions),48 'stddev', .1 * np.ones( md.qmu.numberofpartitions),49 'partition', md.qmu.vpartition46 'mean', np.ones(npart), 47 'stddev', .1 * np.ones(npart), 48 'partition', partition 50 49 ) 51 50 -
issm/trunk-jpl/test/NightlyRun/test250.py
r24870 r25010 37 37 #partitioning 38 38 md.qmu.numberofpartitions = md.mesh.numberofvertices 39 md = partitioner(md, 'package', 'linear') 39 partition = partitioner(md, 'package', 'linear', 'npart', md.mesh.numberofvertices) - 1 40 40 md.qmu.vpartition = md.qmu.vpartition - 1 41 41 … … 43 43 md.qmu.variables.surface_mass_balance = normal_uncertain.normal_uncertain( 44 44 'descriptor', 'scaled_SmbMassBalance', 45 'mean', np.ones(md. qmu.numberofpartitions),46 'stddev', .1 * np.ones(md. qmu.numberofpartitions),47 'partition', md.qmu.vpartition45 'mean', np.ones(md.mesh.numberofvertices), 46 'stddev', .1 * np.ones(md.mesh.numberofvertices), 47 'partition', partition 48 48 ) 49 49 -
issm/trunk-jpl/test/NightlyRun/test251.py
r24870 r25010 37 37 38 38 #partitioning 39 md.qmu.numberofpartitions = md.mesh.numberofvertices 40 md = partitioner(md, 'package', 'linear') 41 md.qmu.vpartition = md.qmu.vpartition - 1 39 partition = partitioner(md, 'package', 'linear', 'npart', md.mesh.numberofvertices) - 1 42 40 43 41 #variables … … 46 44 'mean', np.ones(md.qmu.numberofpartitions), 47 45 'stddev', 100 * np.ones(md.qmu.numberofpartitions), 48 'partition', md.qmu.vpartition46 'partition', partition 49 47 ) 50 48 -
issm/trunk-jpl/test/NightlyRun/test412.py
r24870 r25010 19 19 20 20 #partitioning 21 md.qmu.numberofpartitions = md.mesh.numberofvertices 22 md = partitioner(md, 'package', 'linear', 'npart', md.qmu.numberofpartitions) 23 md.qmu.vpartition = md.qmu.vpartition - 1 21 md = partitioner(md, 'package', 'linear', 'npart', md.mesh.numberofvertices) - 1 24 22 md.qmu.isdakota = 1 25 23 … … 40 38 'mean', np.ones(md.mesh.numberofvertices), 41 39 'stddev', .01 * np.ones(md.mesh.numberofvertices), 42 'partition', md.qmu.vpartition40 'partition', partition 43 41 ) 44 42 … … 70 68 #Fields and tolerances to track changes 71 69 md.qmu.results = md.results.dakota 72 md.results.dakota.importancefactors = importancefactors(md, 'scaled_FrictionCoefficient', 'MaxVel' ).T70 md.results.dakota.importancefactors = importancefactors(md, 'scaled_FrictionCoefficient', 'MaxVel', partition).T 73 71 field_names = ['importancefactors'] 74 72 field_tolerances = [1e-10] -
issm/trunk-jpl/test/NightlyRun/test413.py
r24872 r25010 24 24 #partitioning 25 25 md.qmu.numberofpartitions = 20 26 md = partitioner(md, 'package', 'chaco', 'npart', md.qmu.numberofpartitions, 'weighting', 'on') 27 md.qmu.vpartition = md.qmu.vpartition - 1 26 partition = partitioner(md, 'package', 'chaco', 'npart', npart, 'weighting', 'on') - 1 28 27 29 28 #variables … … 35 34 md.qmu.variables.drag_coefficient = normal_uncertain.normal_uncertain( 36 35 'descriptor', 'scaled_FrictionCoefficient', 37 'mean', np.ones( md.qmu.numberofpartitions),38 'stddev', .01 * np.ones( md.qmu.numberofpartitions),39 'partition', md.qmu.vpartition36 'mean', np.ones(npart), 37 'stddev', .01 * np.ones(npart), 38 'partition', partition 40 39 ) 41 40 … … 69 68 #Fields and tolerances to track changes 70 69 md.qmu.results = md.results.dakota 71 md.results.dakota.importancefactors = importancefactors(md, 'scaled_FrictionCoefficient', 'MaxVel' ).T70 md.results.dakota.importancefactors = importancefactors(md, 'scaled_FrictionCoefficient', 'MaxVel', partition).T 72 71 field_names = ['importancefactors'] 73 72 field_tolerances = [1e-10] -
issm/trunk-jpl/test/NightlyRun/test414.py
r24870 r25010 32 32 33 33 #partitioning 34 md.qmu.numberofpartitions = 20 35 md = partitioner(md, 'package', 'chaco', 'npart', md.qmu.numberofpartitions, 'weighting', 'on') 36 md.qmu.vpartition = md.qmu.vpartition - 1 34 npart = 20 35 partition = partitioner(md, 'package', 'chaco', 'npart', npart, 'weighting', 'on') - 1 37 36 38 37 #variables 39 38 md.qmu.variables.drag_coefficient = normal_uncertain.normal_uncertain( 40 39 'descriptor', 'scaled_FrictionCoefficient', 41 'mean', np.ones( md.qmu.numberofpartitions),42 'stddev', .01 * np.ones( md.qmu.numberofpartitions),43 'partition', md.qmu.vpartition40 'mean', np.ones(npart), 41 'stddev', .01 * np.ones(npart), 42 'partition', partition 44 43 ) 45 44 -
issm/trunk-jpl/test/NightlyRun/test417.py
r24870 r25010 32 32 33 33 #partitioning 34 md.qmu.numberofpartitions= 2035 md = partitioner(md, 'package', 'chaco', 'npart', md.qmu.numberofpartitions, 'weighting', 'on') 36 md.qmu. vpartition = md.qmu.vpartition -134 npart = 20 35 partition = partitioner(md, 'package', 'chaco', 'npart', npart, 'weighting', 'on') - 1 36 md.qmu.isdakota = 1 37 37 38 38 #variables 39 39 md.qmu.variables.drag_coefficient = normal_uncertain.normal_uncertain( 40 40 'descriptor', 'scaled_FrictionCoefficient', 41 'mean', np.ones( md.qmu.numberofpartitions),42 'stddev', .01 * np.ones( md.qmu.numberofpartitions),43 'partition', md.qmu.vpartition41 'mean', np.ones(npart), 42 'stddev', .01 * np.ones(npart), 43 'partition', partition 44 44 ) 45 45 … … 59 59 md.qmu.mass_flux_profile_directory = getcwd() 60 60 61 # method61 # nond_sampling study 62 62 md.qmu.method = dakota_method.dakota_method('nond_samp') 63 63 md.qmu.method = dmeth_params_set(md.qmu.method, 'seed', 1234, 'samples', 20, 'sample_type', 'lhs') 64 64 65 # parameters65 # parameters 66 66 md.qmu.params.interval_type = 'forward' 67 67 md.qmu.params.direct = True … … 75 75 md.qmu.params.analysis_driver = 'stressbalance' 76 76 md.qmu.params.evaluation_concurrency = 1 77 78 #partitioning79 md.qmu.numberofpartitions = 2080 md = partitioner(md, 'package', 'chaco', 'npart', md.qmu.numberofpartitions, 'weighting', 'on')81 md.qmu.vpartition = md.qmu.vpartition - 182 md.qmu.isdakota = 183 77 84 78 md.stressbalance.reltol = 10**-5 #tighten for qmu analyses -
issm/trunk-jpl/test/NightlyRun/test418.py
r24834 r25010 22 22 23 23 #partitioning 24 md.qmu.numberofpartitions= 10024 npart = 100 25 25 26 # Partitioner seam d to generate the following message,26 # Partitioner seamed to generate the following message, 27 27 # 28 28 # corrupted size vs. prev_size … … 36 36 # - Run valgrind and fix the above 37 37 # 38 md = partitioner(md, 'package', 'chaco', 'npart', md.qmu.numberofpartitions) 39 md.qmu.vpartition = md.qmu.vpartition - 1 38 partition = partitioner(md, 'package', 'chaco', 'npart', npart) - 1 40 39 41 40 vector = np.arange(1, 1 + md.mesh.numberofvertices, 1).reshape(-1, 1) 42 41 # double check this before committing: 43 vector_on_partition = AreaAverageOntoPartition(md, vector )44 vector_on_nodes = vector_on_partition[ md.qmu.vpartition]42 vector_on_partition = AreaAverageOntoPartition(md, vector, partition) 43 vector_on_nodes = vector_on_partition[partition + 1] 45 44 46 45 field_names = ['vector_on_nodes'] -
issm/trunk-jpl/test/NightlyRun/test420.m
r25005 r25010 48 48 49 49 %test on thickness 50 h=zeros( part,1);50 h=zeros(npart,1); 51 51 for i=1:npart, 52 52 h(i)=md.qmu.results.dresp_out(i).mean; -
issm/trunk-jpl/test/NightlyRun/test420.py
r24870 r25010 17 17 18 18 #partitioning 19 md.qmu.numberofpartitions = 10 20 md = partitioner(md, 'package', 'chaco', 'npart', md.qmu.numberofpartitions) 21 md.qmu.vpartition = md.qmu.vpartition - 1 19 npart = 10 20 partition = partitioner(md, 'package', 'chaco', 'npart', npart) - 1 22 21 md.qmu.isdakota = 1 23 22 … … 38 37 md.qmu.responses.MaxVel = response_function.response_function( 39 38 'descriptor', 'scaled_Thickness', 40 'partition', md.qmu.vpartition39 'partition', partition 41 40 ) 42 41 … … 65 64 66 65 #test on thickness 67 h = np.zeros( (md.qmu.numberofpartitions, ))68 for i in range( md.qmu.numberofpartitions):66 h = np.zeros(npart) 67 for i in range(npart): 69 68 h[i] = md.qmu.results.dresp_out[i].mean 70 69 71 70 #project onto grid 72 thickness = h[(md.qmu.vpartition ).flatten()]71 thickness = h[(md.qmu.vpartition + 1).flatten()] 73 72 74 73 #Fields and tolerances to track changes -
issm/trunk-jpl/test/NightlyRun/test440.py
r24870 r25010 18 18 19 19 #partitioning 20 md.qmu.numberofpartitions = md.mesh.numberofvertices 21 md = partitioner(md, 'package', 'linear') 22 md.qmu.vpartition = md.qmu.vpartition - 1 20 npart = md.mesh.numberofvertices 21 partition = partitioner(md, 'package', 'linear', 'npart', npart) - 1 23 22 md.qmu.isdakota = 1 24 23 … … 39 38 md.qmu.responses.MaxVel = response_function.response_function( 40 39 'descriptor', 'scaled_Thickness', 41 'partition', md.qmu.vpartition40 'partition', partition 42 41 ) 43 42 … … 66 65 67 66 #test on thickness 68 h = np.zeros( md.qmu.numberofpartitions)69 for i in range( md.qmu.numberofpartitions):67 h = np.zeros(npart) 68 for i in range(npart): 70 69 h[i] = md.qmu.results.dresp_out[i].mean 71 70 72 71 #project onto grid 73 thickness = h[ md.qmu.vpartition]72 thickness = h[partition] 74 73 75 74 #Fields and tolerances to track changes -
issm/trunk-jpl/test/NightlyRun/test444.py
r24870 r25010 66 66 67 67 #partitioning 68 md.qmu.numberofpartitions = 10 69 md = partitioner(md, 'package', 'chaco', 'npart', md.qmu.numberofpartitions, 'weighting', 'on') 70 md.qmu.vpartition = md.qmu.vpartition - 1 68 npart = 10 69 partition = partitioner(md, 'package', 'chaco', 'npart', npart, 'weighting', 'on') - 1 71 70 md.qmu.isdakota = 1 72 71 … … 74 73 md.qmu.variables.drag_coefficient = normal_uncertain.normal_uncertain( 75 74 'descriptor', 'scaled_BasalforcingsFloatingiceMeltingRate', 76 'mean', np.ones( md.qmu.numberofpartitions),77 'stddev', .1 * np.ones( md.qmu.numberofpartitions),78 'partition', md.qmu.vpartition75 'mean', np.ones(npart), 76 'stddev', .1 * np.ones(npart), 77 'partition', partition 79 78 ) 80 79 … … 110 109 md.qmu.params.evaluation_concurrency = 1 111 110 112 #partitioning113 md.qmu.numberofpartitions = 10114 md = partitioner(md, 'package', 'chaco', 'npart', md.qmu.numberofpartitions, 'weighting', 'on')115 md.qmu.vpartition = md.qmu.vpartition - 1116 md.qmu.isdakota = 1117 118 111 md.stressbalance.reltol = 10**-5 #tighten for qmu analyses 119 112 -
issm/trunk-jpl/test/NightlyRun/test445.py
r24870 r25010 35 35 36 36 #partitioning 37 md.qmu.numberofpartitions = 10 38 md = partitioner(md, 'package', 'chaco', 'npart', md.qmu.numberofpartitions, 'weighting', 'on') 39 md.qmu.vpartition = md.qmu.vpartition - 1 37 npart = 10 38 partitioner = partitioner(md, 'package', 'chaco', 'npart', npart, 'weighting', 'on') - 1 40 39 md.qmu.isdakota = 1 41 40 … … 43 42 md.qmu.variables.neff = normal_uncertain.normal_uncertain( 44 43 'descriptor', 'scaled_FrictionEffectivePressure', 45 'mean', np.ones( md.qmu.numberofpartitions),46 'stddev', .05 * np .ones(md.qmu.numberofpartitions),47 'partition', md.qmu.vpartition44 'mean', np.ones(npart), 45 'stddev', .05 * npart), 46 'partition', partition 48 47 ) 49 48 md.qmu.variables.geoflux = normal_uncertain.normal_uncertain( 50 49 'descriptor', 'scaled_BasalforcingsGeothermalflux', 51 'mean', np.ones( md.qmu.numberofpartitions),52 'stddev', .05 * np.ones( md.qmu.numberofpartitions),53 'partition', md.qmu.vpartition50 'mean', np.ones(npart), 51 'stddev', .05 * np.ones(npart), 52 'partition', partition 54 53 ) 55 54
Note:
See TracChangeset
for help on using the changeset viewer.