source:
issm/oecreview/Archive/24684-25833/ISSM-25089-25090.diff@
28275
Last change on this file since 28275 was 25834, checked in by , 4 years ago | |
---|---|
File size: 28.3 KB |
-
../trunk-jpl/src/m/classes/qmu.m
122 122 end 123 123 end 124 124 if strcmpi(classlist{i},'uniform_uncertain') 125 if 125 if (h~=0), 126 126 error('uniform_uncertain variables should be declared before histogram_bin uncertain variables'); 127 127 else 128 128 u=1; -
../trunk-jpl/src/m/classes/qmu/histogram_bin_uncertain.py
1 import numpy as np 2 3 4 class histogram_bin_uncertain(object): 5 ''' 6 HISTOGRAM_BIN_UNCERTAIN class definition 7 8 Usage: 9 [hbu] = histogram_bin_uncertain( 10 'descriptor', descriptor, 11 'pairs_per_variable', pairs_per_variable, 12 'abscissas', abscissas, 13 'counts', counts 14 ) 15 16 where the required args are: 17 descriptor (char, description, '') 18 pairs_per_variable (double list, []) 19 abscissas (double list, []) 20 counts (int list, []) 21 22 NOTE: A call to the constructor with zero arguments will return a default 23 instance; one argument of the class copies the instance; three or more 24 arguments constructs a new instance from the arguments. 25 ''' 26 27 def __init__(self): #{{{ 28 self.descriptor = '' 29 self.pairs_per_variable = [] 30 self.abscissas = [] 31 self.counts = [] 32 #}}} 33 34 @staticmethod 35 def histogram_bin_uncertain(*args): #{{{ 36 nargin = len(args) 37 38 # create a default object 39 if nargin == 0: 40 return histogram_bin_uncertain() 41 42 # copy the object 43 elif nargin == 1: 44 if isinstance(args[0], histogram_bin_uncertain): 45 hbu = args[0] 46 else: 47 raise Exception("Object {} is a {} class object, not 'histogram_bin_uncertain'.".format(str(args[0]), str(type(args[0])))) 48 49 elif nargin == 2 or nargin == 3: 50 raise Exception("Construction of 'histogram_bin_uncertain' class object requires at least {} inputs.".format(4)) 51 52 # create the object from the input 53 elif nargin == 4: 54 hbu = histogram_bin_uncertain() 55 56 #recover options: 57 options = pairoptions(*args) 58 59 #initialize fields: 60 hbu.descriptor = options.getfieldvalue('descriptor') 61 hbu.pairs_per_variable = options.getfieldvalue('pairs_per_variable') 62 hbu.abscissas = options.getfieldvalue('abscissas') 63 hbu.counts = options.getfieldvalue('counts') 64 65 else: 66 raise Exception("Construction of histogram_bin_uncertain class object requires either (1) no arguments, (2) a histogram_bin_uncertain instance to copy from, or (3) a descriptor and pairs per variable, abscissas, and counts lists") -
../trunk-jpl/src/m/classes/qmu/normal_uncertain.py
13 13 NORMAL_UNCERTAIN class definition 14 14 15 15 Usage: 16 nuv = normal_uncertain('descriptor',descriptor,'mean',mean,'stddev',stddev,'partition',partition) 17 where nuv is the normal_uncertain object returned by the constructor, mean and stddev are self 18 explanatory. partition is the partition vector for distributed variables. Can be a partition 19 vector over elements or vertices. 16 [nuv] = normal_uncertain( 17 'descriptor', descriptor, 18 'mean', mean, 19 'stddev', stddev, 20 'partition', partition 21 ) 20 22 23 where nuv is the normal_uncertain object returned by the constructor, 24 mean and stddev are self explanatory, and partition is the partition 25 vector for distributed variables. Can be a partition vector over 26 elements or vertices. 27 21 28 Example: 22 29 md.qmu.variables.rheology=normal_uncertain( 23 30 'descriptor','RheologyBBar', … … 31 38 'partition',vpartition 32 39 ) 33 40 ''' 34 def __init__(self): 41 def __init__(self): #{{{ 35 42 self.descriptor = '' 36 43 self.mean = np.NaN 37 44 self.stddev = np.NaN 38 45 self.partition = [] 39 46 self.nsteps = 0 47 #}}} 40 48 41 49 @staticmethod 42 50 def normal_uncertain(*args): #{{{ … … 51 59 if isinstance(args[0], normal_uncertain): 52 60 nuv = args[0] 53 61 else: 54 raise RuntimeError('Object ' + str(args[0]) + ' is a ' + str(type(args[0])) + ' class object, not "normal_uncertain".')62 raise Exception('Object ' + str(args[0]) + ' is a ' + str(type(args[0])) + ' class object, not "normal_uncertain".') 55 63 56 64 # create the object from the input 57 65 else: 58 # lines differ here in other classes / tests; see asizec problem in notes59 66 nuv = normal_uncertain() 60 67 61 68 #recover options: … … 66 73 nuv.mean = options.getfieldvalue('mean') 67 74 nuv.stddev = options.getfieldvalue('stddev') 68 75 69 #if the variable is scaled, a partition vector should have been supplied, and70 # that partition vector should have as many partitions as the mean and stddev71 # vectors:76 #if the variable is scaled, a partition vector should have been 77 #supplied, and that partition vector should have as many partitions 78 #as the mean and stddev vectors: 72 79 if nuv.isscaled(): 73 80 nuv.partition = options.getfieldvalue('partition') 74 81 nuv.nsteps = options.getfieldvalue('nsteps', 1) 75 82 npart = qmupart2npart(nuv.partition) 76 83 if npart != nuv.mean.shape[0]: 77 raise RuntimeError("normal_uncertain constructor: for the scaled variable %s the row size of the mean field should be identical to the number of partitions" % nuv.descriptor)84 raise Exception("normal_uncertain constructor: for the scaled variable %s the row size of the mean field should be identical to the number of partitions" % nuv.descriptor) 78 85 if npart != nuv.stddev.shape[0]: 79 raise RuntimeError("normal_uncertain constructor: for the scaled variable %s the row size of the stddev field should be identical to the number of partitions" % nuv.descriptor)86 raise Exception("normal_uncertain constructor: for the scaled variable %s the row size of the stddev field should be identical to the number of partitions" % nuv.descriptor) 80 87 if nuv.nsteps != nuv.mean.shape[1]: 81 raise RuntimeError("normal_uncertain constructor: for the scaled variable %s the col size of the mean field should be identical to the number of time steps" % nuv.descriptor)88 raise Exception("normal_uncertain constructor: for the scaled variable %s the col size of the mean field should be identical to the number of time steps" % nuv.descriptor) 82 89 if nuv.nsteps != nuv.stddev.shape[1]: 83 raise RuntimeError("normal_uncertain constructor: for the scaled variable %s the col size of the stddev field should be identical to the number of time steps" % nuv.descriptor)90 raise Exception("normal_uncertain constructor: for the scaled variable %s the col size of the stddev field should be identical to the number of time steps" % nuv.descriptor) 84 91 85 92 return [nuv] # Always return a list, so we have something akin to a MATLAB single row matrix 86 93 #}}} … … 106 113 107 114 def checkconsistency(self, md, solution, analyses): #{{{ 108 115 md = checkfield(md, 'field', self.mean, 'fieldname', 'normal_uncertain.mean', 'NaN', 1, 'Inf', 1, '>=', 0) 109 md = checkfield(md, 'field', self.stddev, 'fieldname', 'normal_uncertain.stddev', 'NaN', 1, 'Inf', 1, '>=', 0 , 'numel', len(self.mean))116 md = checkfield(md, 'field', self.stddev, 'fieldname', 'normal_uncertain.stddev', 'NaN', 1, 'Inf', 1, '>=', 0) 110 117 if self.isscaled(): 111 118 if self.partition == []: 112 raise RuntimeError("normal_uncertain is a scaled variable, but it's missing a partition vector")119 raise Exception("normal_uncertain is a scaled variable, but it's missing a partition vector") 113 120 #better have a partition vector that has as many partitions as stddev's size: 114 121 if self.stddev.shape[0] != partition_npart(self.partititon): 115 raise RuntimeError("normal_uncertain error message: row size of stddev and partition size should be identical")122 raise Exception("normal_uncertain error message: row size of stddev and partition size should be identical") 116 123 if self.mean.shape[0] != partition_npart(self.partition): 117 raise RuntimeError("normal_uncertain error message: row size of mean and partition size should be identical")124 raise Exception("normal_uncertain error message: row size of mean and partition size should be identical") 118 125 #we need as many steps in stddev and mean as there are in time steps 119 126 if self.stddev.shape[1] != self.nsteps: 120 raise RuntimeError("normal_uncertain error message: col size of stddev and partition size should be identical")127 raise Exception("normal_uncertain error message: col size of stddev and partition size should be identical") 121 128 if self.mean.shape[1] != self.nsteps: 122 raise RuntimeError("normal_uncertain error message: col size of mean and partition size should be identical")129 raise Exception("normal_uncertain error message: col size of mean and partition size should be identical") 123 130 md = checkfield(md, 'field', self.partition, 'fieldname', 'normal_uncertain.partition', 'NaN', 1, 'Inf', 1, '>=', -1, 'numel', [md.mesh.numberofvertices, md.mesh.numberofvertices]) 124 131 if self.partition.shape[1] > 1: 125 raise RuntimeError("normal_uncertain error message: partition should be a column vector")132 raise Exception("normal_uncertain error message: partition should be a column vector") 126 133 partcheck = np.unique(self.partition) 127 134 partmin = min(partcheck) 128 135 partmax = max(partcheck) 129 136 if partmax < -1: 130 raise RuntimeError("normal_uncertain error message: partition vector's min value should be -1 (for no partition), or start at 0")137 raise Exception("normal_uncertain error message: partition vector's min value should be -1 (for no partition), or start at 0") 131 138 nmax = max(md.mesh.numberofelements, md.mesh.numberofvertices) 132 139 if partmax > nmax: 133 raise RuntimeError("normal_uncertain error message: partition vector's values cannot go over the number of vertices or elements")140 raise Exception("normal_uncertain error message: partition vector's values cannot go over the number of vertices or elements") 134 141 #}}} 135 142 136 143 #virtual functions needed by qmu processing algorithms -
../trunk-jpl/src/m/classes/qmu/histogram_bin_uncertain.m
1 % % definition for the histogram_bin_uncertain class.1 %HISTOGRAM BIN UNCERTAIN class definition 2 2 % 3 % 3 % [hbu]=histogram_bin_uncertain(varargin) 4 4 % 5 % 6 % descriptor(char, description, '')7 % pairs_per_variable(double vector, [])8 % abscissas(double vector, [])9 % counts(int vector, [])5 % where the required varargin are: 6 % descriptor (char, description, '') 7 % pairs_per_variable (double vector, []) 8 % abscissas (double vector, []) 9 % counts (int vector, []) 10 10 % 11 % note that zero arguments constructs a default instance; one12 % argument of the class copies the instance; and three or more13 % 11 % NOTE: A call to the constructor with zero arguments will return a default 12 % instance; one argument of the class copies the instance; three or more 13 % arguments constructs a new instance from the arguments. 14 14 % 15 15 classdef histogram_bin_uncertain 16 17 16 properties 17 descriptor=''; 18 18 pairs_per_variable=[]; 19 abscissas = []; 20 counts = []; 21 end 22 23 methods 24 function [hbu]=histogram_bin_uncertain(varargin) % {{{ 25 26 switch nargin 27 case 0 % create a default object 28 case 1 % copy the object 29 if isa(varargin{1},'histogram_bin_uncertain') 30 hbu=varargin{1}; 31 else 32 error('Object ''%s'' is a ''%s'' class object, not ''%s''.',... 33 inputname(1),class(varargin{1}),'histogram_bin_uncertain'); 34 end 35 case {2,3} % not enough arguments 36 error('Construction of ''%s'' class object requires at least %d inputs.',... 37 'histogram_bin_uncertain',4) 38 case 4 % 19 abscissas = []; 20 counts = []; 21 end 22 methods 23 function [hbu]=histogram_bin_uncertain(varargin) % {{{ 24 switch nargin 25 case 0 % create a default object 26 case 1 % copy the object 27 if isa(varargin{1},'histogram_bin_uncertain') 28 hbu=varargin{1}; 29 else 30 error('Object ''%s'' is a ''%s'' class object, not ''%s''.',... 31 inputname(1),class(varargin{1}),'histogram_bin_uncertain'); 32 end 33 case {2,3} % not enough arguments 34 error('Construction of ''histogram_bin_uncertain'' class object requires at least %d inputs.',4) 35 case 4 % 39 36 % create the object from the input 40 37 hbu = histogram_bin_uncertain; 41 38 hbu.descriptor=varargin{1}; … … 43 40 hbu.abscissas=varargin{3}; 44 41 hbu.counts=varargin{4}; 45 42 46 otherwise 47 error('Construction of histogram_bin_uncertain class object requires three arguments, descriptor, abscissas and counts'); 48 end 49 50 end % }}} 43 otherwise 44 error('Construction of histogram_bin_uncertain class object requires either (1) no arguments, (2) a histogram_bin_uncertain instance to copy from, or (3) a descriptor and pairs per variable, abscissas, and counts lists'); 45 end 46 end % }}} 51 47 function md=checkconsistency(self,md,solution,analyses) % {{{ 52 48 end % }}} 53 function []=disp(hbu) % {{{ 54 55 % display the object 56 57 disp(sprintf('\n')); 58 for i=1:numel(hbu) 49 function []=disp(hbu) % {{{ 50 % display the object 51 disp(sprintf('\n')); 52 for i=1:numel(hbu) 59 53 disp(sprintf('class ''%s'' object ''%s%s'' = \n',... 60 54 class(hbu),inputname(1),string_dim(hbu,i))); 61 55 disp(sprintf(' descriptor: ''%s''' ,hbu(i).descriptor)); … … 63 57 disp(sprintf(' abscissas: %g' ,hbu(i).abscissas)); 64 58 disp(sprintf(' counts: %g' ,hbu(i).counts)); 65 59 end 66 67 60 end % }}} 68 function [desc] 61 function [desc]=prop_desc(hbu,dstr) % {{{ 69 62 desc=cell(1,numel(hbu)); 70 63 for i=1:numel(hbu) 71 64 if ~isempty(hbu(i).descriptor) … … 83 76 function [initpt]=prop_initpt(hbu) % {{{ 84 77 initpt=[]; 85 78 end % }}} 86 function [lower] 79 function [lower]=prop_lower(hbu) % {{{ 87 80 lower=[]; 88 81 end % }}} 89 function [upper] 82 function [upper]=prop_upper(hbu) % {{{ 90 83 upper=[]; 91 84 end % }}} 92 function [mean] 85 function [mean]=prop_mean(hbu) % {{{ 93 86 mean=[]; 94 87 end % }}} 95 88 function [stddev]=prop_stddev(hbu) % {{{ … … 98 91 function [initst]=prop_initst(hbu) % {{{ 99 92 initst=[]; 100 93 end % }}} 101 function [stype] 94 function [stype]=prop_stype(hbu) % {{{ 102 95 stype={}; 103 96 end % }}} 104 function [scale] 97 function [scale]=prop_scale(hbu) % {{{ 105 98 scale=[]; 106 99 end % }}} 107 function [abscissas] =prop_abscissas(hbu) % {{{ 108 abscissas=[]; 109 for i=1:numel(hbu) 110 abscissas=[abscissas hbu(i).abscissas]; 111 end 112 abscissas=allequal(abscissas,-Inf); 113 114 end % }}} 100 function [abscissas]=prop_abscissas(hbu) % {{{ 101 abscissas=[]; 102 for i=1:numel(hbu) 103 abscissas=[abscissas hbu(i).abscissas]; 104 end 105 abscissas=allequal(abscissas,-Inf); 106 end % }}} 115 107 function [pairs_per_variable] =prop_pairs_per_variable(hbu) % {{{ 116 108 pairs_per_variable=zeros(1,numel(hbu)); 117 109 for i=1:numel(hbu) … … 120 112 pairs_per_variable=allequal(pairs_per_variable,-Inf); 121 113 end % }}} 122 114 function [counts] =prop_counts(hbu) % {{{ 123 counts=[]; 124 for i=1:numel(hbu) 125 counts=[counts hbu(i).counts]; 126 end 127 counts=allequal(counts,-Inf); 128 115 counts=[]; 116 for i=1:numel(hbu) 117 counts=[counts hbu(i).counts]; 118 end 119 counts=allequal(counts,-Inf); 129 120 end % }}} 130 121 function scaled=isscaled(self) % {{{ 131 122 if strncmp(self.descriptor,'scaled_',7), … … 137 128 end 138 129 methods (Static) 139 130 function []=dakota_write(fidi,dvar) % {{{ 131 % collect only the variables of the appropriate class 132 hbu=struc_class(dvar,'histogram_bin_uncertain'); 140 133 141 % collect only the variables of the appropriate class 142 143 hbu=struc_class(dvar,'histogram_bin_uncertain'); 144 145 % write variables 146 134 % write variables 147 135 vlist_write(fidi,'histogram_bin_uncertain','hbu',hbu); 148 136 end % }}} 149 137 end -
../trunk-jpl/src/m/classes/qmu/uniform_uncertain.py
13 13 UNIFORM_UNCERTAIN class definition 14 14 15 15 Usage: 16 uuv= uniform_uncertain(16 [uuv] = uniform_uncertain( 17 17 'descriptor', descriptor, 18 18 'lower', lower, 19 19 'upper', upper, … … 38 38 'partition', vpartition 39 39 ) 40 40 ''' 41 def __init__(self): 41 def __init__(self): #{{{ 42 42 self.descriptor = '' 43 43 self.lower = -np.Inf 44 44 self.upper = np.Inf 45 45 self.partition = [] 46 46 self.nsteps = 0 47 #}}} 47 48 48 49 @staticmethod 49 def uniform_uncertain(*args): 50 def uniform_uncertain(*args): #{{{ 50 51 nargin = len(args) 51 52 52 53 # create a default object … … 58 59 if isinstance(args[0], uniform_uncertain): 59 60 uuv = args[0] 60 61 else: 61 raise RuntimeError('Object ' + str(args[0]) + ' is a ' + str(type(args[0])) + ' class object, not "uniform_uncertain".')62 raise Exception('Object ' + str(args[0]) + ' is a ' + str(type(args[0])) + ' class object, not "uniform_uncertain".') 62 63 63 64 # create the object from the input 64 65 else: … … 73 74 uuv.upper = options.getfieldvalue('upper') 74 75 75 76 #if the variable is scaled, a partition vector should have been 76 #supplied, and that partition vector should have as many partitions as77 # the lower and upper vectors:77 #supplied, and that partition vector should have as many 78 #partitions as the lower and upper vectors: 78 79 if uuv.isscaled(): 79 80 uuv.partition = options.getfieldvalue('partition') 80 81 uuv.nsteps = options.getfieldvalue('nsteps', 1) 81 82 npart = qmupart2npart(uuv.partition) 82 83 if npart != uuv.upper.shape[0]: 83 raise RuntimeError("uniform_uncertain constructor: for the scaled variable %s the upper field is not currently a vector of values for all the partitions described in the partition vector" % uuv.descriptor)84 raise Exception("uniform_uncertain constructor: for the scaled variable %s the upper field is not currently a vector of values for all the partitions described in the partition vector" % uuv.descriptor) 84 85 if npart != uuv.lower.shape[0]: 85 raise RuntimeError("uniform_uncertain constructor: for the scaled variable %s the lower field is not currently a vector of values for all the partitions described in the partition vector" % uuv.descriptor)86 raise Exception("uniform_uncertain constructor: for the scaled variable %s the lower field is not currently a vector of values for all the partitions described in the partition vector" % uuv.descriptor) 86 87 if uuv.nsteps != uuv.upper.shape[1]: 87 raise RuntimeError("uniform_uncertain constructor: for the scaled variable %s the col size of the upper field should be identical to the number of time steps" % uuv.descriptor)88 raise Exception("uniform_uncertain constructor: for the scaled variable %s the col size of the upper field should be identical to the number of time steps" % uuv.descriptor) 88 89 if uuv.nsteps != uuv.lower.shape[1]: 89 raise RuntimeError("uniform_uncertain constructor: for the scaled variable %s the col size of the lower field should be identical to the number of time steps" % uuv.descriptor)90 raise Exception("uniform_uncertain constructor: for the scaled variable %s the col size of the lower field should be identical to the number of time steps" % uuv.descriptor) 90 91 91 92 return [uuv] # Always return a list, so we have something akin to a MATLAB single row matrix 93 #}}} 92 94 93 95 def __repr__(self): #{{{ 94 96 string = ' uniform uncertain variable: ' … … 114 116 md = checkfield(md, 'field', self.lower, 'fieldname', 'uniform_uncertain.upper', 'NaN', 1, 'Inf', 1, '<', self.upper, 'numel', len(self.upper)) 115 117 if self.isscaled(): 116 118 if self.partition == []: 117 raise RuntimeError("uniform_uncertain is a scaled variable, but it's missing a partition vector")119 raise Exception("uniform_uncertain is a scaled variable, but it's missing a partition vector") 118 120 #better have a partition vector that has as many partitions as 119 121 #upper and lower's size: 120 122 if self.upper.shape[0] != partition_npart(self.partititon): 121 raise RuntimeError("uniform_uncertain error message: row size of upper and partition size should be identical")123 raise Exception("uniform_uncertain error message: row size of upper and partition size should be identical") 122 124 if self.lower.shape[0] != partition_npart(self.partition): 123 raise RuntimeError("uniform_uncertain error message: row size of lower and partition size should be identical")125 raise Exception("uniform_uncertain error message: row size of lower and partition size should be identical") 124 126 #we need as steps in upper and lower as there are time steps 125 127 if self.stddev.shape[1] != self.nsteps: 126 raise RuntimeError("uniform_uncertain error message: col size of upper and partition size should be identical")128 raise Exception("uniform_uncertain error message: col size of upper and partition size should be identical") 127 129 if self.mean.shape[1] != self.nsteps: 128 raise RuntimeError("uniform_uncertain error message: col size of lower and partition size should be identical")130 raise Exception("uniform_uncertain error message: col size of lower and partition size should be identical") 129 131 md = checkfield(md, 'field', self.partition, 'fieldname', 'uniform_uncertain.partition', 'NaN', 1, 'Inf', 1, '>=', -1, 'numel', [md.mesh.numberofvertices, md.mesh.numberofvertices]) 130 132 if self.partition.shape[1] > 1: 131 raise RuntimeError("uniform_uncertain error message: partition should be a column vector")133 raise Exception("uniform_uncertain error message: partition should be a column vector") 132 134 partcheck = np.unique(self.partition) 133 135 partmin = min(partcheck) 134 136 partmax = max(partcheck) 135 137 if partmax < -1: 136 raise RuntimeError("uniform_uncertain error message: partition vector's min value should be -1 (for no partition), or start at 0")138 raise Exception("uniform_uncertain error message: partition vector's min value should be -1 (for no partition), or start at 0") 137 139 nmax = max(md.mesh.numberofelements, md.mesh.numberofvertices) 138 140 if partmax > nmax: 139 raise RuntimeError("uniform_uncertain error message: partition vector's values cannot go over the number of vertices or elements")141 raise Exception("uniform_uncertain error message: partition vector's values cannot go over the number of vertices or elements") 140 142 #}}} 141 143 142 144 #virtual functions needed by qmu processing algorithms: -
../trunk-jpl/src/m/classes/qmu.py
22 22 self.isdakota = 0 23 23 self.output = 0 24 24 self.variables = OrderedStruct() 25 self.correlation_matrix = [] 25 26 self.responses = OrderedStruct() 26 27 self.method = OrderedDict() 27 28 self.params = OrderedStruct() … … 154 155 md.checkmessage('in parallel library mode, Dakota needs to run at least one slave on one cpu (md.qmu.params.processors_per_evaluation >= 1)!') 155 156 156 157 if np.mod(md.cluster.np - 1, self.params.processors_per_evaluation): 157 md.checkmessage('in parallel library mode, the requirement is for md.cluster.np = md.qmu.params.processors_per_evaluation * number_of_slaves, where number_of_slaves will automatically be determined by Dakota. Modify md.cluster.np accordingly')158 #md.checkmessage('in parallel library mode, the requirement is for md.cluster.np = md.qmu.params.processors_per_evaluation * number_of_slaves, where number_of_slaves will automatically be determined by Dakota. Modify md.cluster.np accordingly') 158 159 159 160 # Go through variables and check for consistency 160 161 fv = fieldnames(self.variables) … … 163 164 if hasattr(variable, 'checkconsistency'): 164 165 variable.checkconsistency(md, solution, analyses) 165 166 167 # Go through variables and check that we have normal uncertains first, 168 # then uniform uncertains and finally histogram_bin_uncertain. Indeed, 169 # Dakota will order them this waym, and when we send partitions for 170 # scaled variables, they better show up in the order Dakota is feeding 171 # them to us in InputUpdateFromDakotax! 172 fv = fieldnames(self.variables) 173 classlist = [] 174 for i in range(len(fv)): 175 classlist.append(self.variables[fv[i]].__class__.__name__) 176 n = 0 177 u = 0 178 h = 0 179 for i in range(len(classlist)): 180 if classlist[i] == 'normal_uncertain': 181 if u != 0 or h != 0: 182 raise Exception('normal uncertain variables should be declared before uniform and hhistogram_bin uncertain variables') 183 else: 184 n = 1 185 if classlist[i] == 'uniform_uncertain': 186 if h != 0: 187 raise Exception('uniform_uncertain variables should be declared before histogram_bin uncertain variables') 188 else: 189 u = 1 190 if classlist[i] == 'histogram_bin_uncertain': 191 h = 1 192 166 193 return md 167 194 # }}} 168 195 def marshall(self, prefix, md, fid): # {{{
Note:
See TracBrowser
for help on using the repository browser.