[25834] | 1 | Index: ../trunk-jpl/src/m/classes/qmu.m
|
---|
| 2 | ===================================================================
|
---|
| 3 | --- ../trunk-jpl/src/m/classes/qmu.m (revision 25089)
|
---|
| 4 | +++ ../trunk-jpl/src/m/classes/qmu.m (revision 25090)
|
---|
| 5 | @@ -122,7 +122,7 @@
|
---|
| 6 | end
|
---|
| 7 | end
|
---|
| 8 | if strcmpi(classlist{i},'uniform_uncertain')
|
---|
| 9 | - if (h~=0),
|
---|
| 10 | + if (h~=0),
|
---|
| 11 | error('uniform_uncertain variables should be declared before histogram_bin uncertain variables');
|
---|
| 12 | else
|
---|
| 13 | u=1;
|
---|
| 14 | Index: ../trunk-jpl/src/m/classes/qmu/histogram_bin_uncertain.py
|
---|
| 15 | ===================================================================
|
---|
| 16 | --- ../trunk-jpl/src/m/classes/qmu/histogram_bin_uncertain.py (nonexistent)
|
---|
| 17 | +++ ../trunk-jpl/src/m/classes/qmu/histogram_bin_uncertain.py (revision 25090)
|
---|
| 18 | @@ -0,0 +1,66 @@
|
---|
| 19 | +import numpy as np
|
---|
| 20 | +
|
---|
| 21 | +
|
---|
| 22 | +class histogram_bin_uncertain(object):
|
---|
| 23 | + '''
|
---|
| 24 | + HISTOGRAM_BIN_UNCERTAIN class definition
|
---|
| 25 | +
|
---|
| 26 | + Usage:
|
---|
| 27 | + [hbu] = histogram_bin_uncertain(
|
---|
| 28 | + 'descriptor', descriptor,
|
---|
| 29 | + 'pairs_per_variable', pairs_per_variable,
|
---|
| 30 | + 'abscissas', abscissas,
|
---|
| 31 | + 'counts', counts
|
---|
| 32 | + )
|
---|
| 33 | +
|
---|
| 34 | + where the required args are:
|
---|
| 35 | + descriptor (char, description, '')
|
---|
| 36 | + pairs_per_variable (double list, [])
|
---|
| 37 | + abscissas (double list, [])
|
---|
| 38 | + counts (int list, [])
|
---|
| 39 | +
|
---|
| 40 | + NOTE: A call to the constructor with zero arguments will return a default
|
---|
| 41 | + instance; one argument of the class copies the instance; three or more
|
---|
| 42 | + arguments constructs a new instance from the arguments.
|
---|
| 43 | + '''
|
---|
| 44 | +
|
---|
| 45 | + def __init__(self): #{{{
|
---|
| 46 | + self.descriptor = ''
|
---|
| 47 | + self.pairs_per_variable = []
|
---|
| 48 | + self.abscissas = []
|
---|
| 49 | + self.counts = []
|
---|
| 50 | + #}}}
|
---|
| 51 | +
|
---|
| 52 | + @staticmethod
|
---|
| 53 | + def histogram_bin_uncertain(*args): #{{{
|
---|
| 54 | + nargin = len(args)
|
---|
| 55 | +
|
---|
| 56 | + # create a default object
|
---|
| 57 | + if nargin == 0:
|
---|
| 58 | + return histogram_bin_uncertain()
|
---|
| 59 | +
|
---|
| 60 | + # copy the object
|
---|
| 61 | + elif nargin == 1:
|
---|
| 62 | + if isinstance(args[0], histogram_bin_uncertain):
|
---|
| 63 | + hbu = args[0]
|
---|
| 64 | + else:
|
---|
| 65 | + raise Exception("Object {} is a {} class object, not 'histogram_bin_uncertain'.".format(str(args[0]), str(type(args[0]))))
|
---|
| 66 | +
|
---|
| 67 | + elif nargin == 2 or nargin == 3:
|
---|
| 68 | + raise Exception("Construction of 'histogram_bin_uncertain' class object requires at least {} inputs.".format(4))
|
---|
| 69 | +
|
---|
| 70 | + # create the object from the input
|
---|
| 71 | + elif nargin == 4:
|
---|
| 72 | + hbu = histogram_bin_uncertain()
|
---|
| 73 | +
|
---|
| 74 | + #recover options:
|
---|
| 75 | + options = pairoptions(*args)
|
---|
| 76 | +
|
---|
| 77 | + #initialize fields:
|
---|
| 78 | + hbu.descriptor = options.getfieldvalue('descriptor')
|
---|
| 79 | + hbu.pairs_per_variable = options.getfieldvalue('pairs_per_variable')
|
---|
| 80 | + hbu.abscissas = options.getfieldvalue('abscissas')
|
---|
| 81 | + hbu.counts = options.getfieldvalue('counts')
|
---|
| 82 | +
|
---|
| 83 | + else:
|
---|
| 84 | + raise Exception("Construction of histogram_bin_uncertain class object requires either (1) no arguments, (2) a histogram_bin_uncertain instance to copy from, or (3) a descriptor and pairs per variable, abscissas, and counts lists")
|
---|
| 85 | Index: ../trunk-jpl/src/m/classes/qmu/normal_uncertain.py
|
---|
| 86 | ===================================================================
|
---|
| 87 | --- ../trunk-jpl/src/m/classes/qmu/normal_uncertain.py (revision 25089)
|
---|
| 88 | +++ ../trunk-jpl/src/m/classes/qmu/normal_uncertain.py (revision 25090)
|
---|
| 89 | @@ -13,11 +13,18 @@
|
---|
| 90 | NORMAL_UNCERTAIN class definition
|
---|
| 91 |
|
---|
| 92 | Usage:
|
---|
| 93 | - nuv = normal_uncertain('descriptor',descriptor,'mean',mean,'stddev',stddev,'partition',partition)
|
---|
| 94 | - where nuv is the normal_uncertain object returned by the constructor, mean and stddev are self
|
---|
| 95 | - explanatory. partition is the partition vector for distributed variables. Can be a partition
|
---|
| 96 | - vector over elements or vertices.
|
---|
| 97 | + [nuv] = normal_uncertain(
|
---|
| 98 | + 'descriptor', descriptor,
|
---|
| 99 | + 'mean', mean,
|
---|
| 100 | + 'stddev', stddev,
|
---|
| 101 | + 'partition', partition
|
---|
| 102 | + )
|
---|
| 103 |
|
---|
| 104 | + where nuv is the normal_uncertain object returned by the constructor,
|
---|
| 105 | + mean and stddev are self explanatory, and partition is the partition
|
---|
| 106 | + vector for distributed variables. Can be a partition vector over
|
---|
| 107 | + elements or vertices.
|
---|
| 108 | +
|
---|
| 109 | Example:
|
---|
| 110 | md.qmu.variables.rheology=normal_uncertain(
|
---|
| 111 | 'descriptor','RheologyBBar',
|
---|
| 112 | @@ -31,12 +38,13 @@
|
---|
| 113 | 'partition',vpartition
|
---|
| 114 | )
|
---|
| 115 | '''
|
---|
| 116 | - def __init__(self):
|
---|
| 117 | + def __init__(self): #{{{
|
---|
| 118 | self.descriptor = ''
|
---|
| 119 | self.mean = np.NaN
|
---|
| 120 | self.stddev = np.NaN
|
---|
| 121 | self.partition = []
|
---|
| 122 | self.nsteps = 0
|
---|
| 123 | + #}}}
|
---|
| 124 |
|
---|
| 125 | @staticmethod
|
---|
| 126 | def normal_uncertain(*args): #{{{
|
---|
| 127 | @@ -51,11 +59,10 @@
|
---|
| 128 | if isinstance(args[0], normal_uncertain):
|
---|
| 129 | nuv = args[0]
|
---|
| 130 | else:
|
---|
| 131 | - raise RuntimeError('Object ' + str(args[0]) + ' is a ' + str(type(args[0])) + ' class object, not "normal_uncertain".')
|
---|
| 132 | + raise Exception('Object ' + str(args[0]) + ' is a ' + str(type(args[0])) + ' class object, not "normal_uncertain".')
|
---|
| 133 |
|
---|
| 134 | # create the object from the input
|
---|
| 135 | else:
|
---|
| 136 | - # lines differ here in other classes / tests; see asizec problem in notes
|
---|
| 137 | nuv = normal_uncertain()
|
---|
| 138 |
|
---|
| 139 | #recover options:
|
---|
| 140 | @@ -66,21 +73,21 @@
|
---|
| 141 | nuv.mean = options.getfieldvalue('mean')
|
---|
| 142 | nuv.stddev = options.getfieldvalue('stddev')
|
---|
| 143 |
|
---|
| 144 | - #if the variable is scaled, a partition vector should have been supplied, and
|
---|
| 145 | - #that partition vector should have as many partitions as the mean and stddev
|
---|
| 146 | - #vectors:
|
---|
| 147 | + #if the variable is scaled, a partition vector should have been
|
---|
| 148 | + #supplied, and that partition vector should have as many partitions
|
---|
| 149 | + #as the mean and stddev vectors:
|
---|
| 150 | if nuv.isscaled():
|
---|
| 151 | nuv.partition = options.getfieldvalue('partition')
|
---|
| 152 | nuv.nsteps = options.getfieldvalue('nsteps', 1)
|
---|
| 153 | npart = qmupart2npart(nuv.partition)
|
---|
| 154 | if npart != nuv.mean.shape[0]:
|
---|
| 155 | - raise RuntimeError("normal_uncertain constructor: for the scaled variable %s the row size of the mean field should be identical to the number of partitions" % nuv.descriptor)
|
---|
| 156 | + raise Exception("normal_uncertain constructor: for the scaled variable %s the row size of the mean field should be identical to the number of partitions" % nuv.descriptor)
|
---|
| 157 | if npart != nuv.stddev.shape[0]:
|
---|
| 158 | - raise RuntimeError("normal_uncertain constructor: for the scaled variable %s the row size of the stddev field should be identical to the number of partitions" % nuv.descriptor)
|
---|
| 159 | + raise Exception("normal_uncertain constructor: for the scaled variable %s the row size of the stddev field should be identical to the number of partitions" % nuv.descriptor)
|
---|
| 160 | if nuv.nsteps != nuv.mean.shape[1]:
|
---|
| 161 | - raise RuntimeError("normal_uncertain constructor: for the scaled variable %s the col size of the mean field should be identical to the number of time steps" % nuv.descriptor)
|
---|
| 162 | + raise Exception("normal_uncertain constructor: for the scaled variable %s the col size of the mean field should be identical to the number of time steps" % nuv.descriptor)
|
---|
| 163 | if nuv.nsteps != nuv.stddev.shape[1]:
|
---|
| 164 | - raise RuntimeError("normal_uncertain constructor: for the scaled variable %s the col size of the stddev field should be identical to the number of time steps" % nuv.descriptor)
|
---|
| 165 | + raise Exception("normal_uncertain constructor: for the scaled variable %s the col size of the stddev field should be identical to the number of time steps" % nuv.descriptor)
|
---|
| 166 |
|
---|
| 167 | return [nuv] # Always return a list, so we have something akin to a MATLAB single row matrix
|
---|
| 168 | #}}}
|
---|
| 169 | @@ -106,31 +113,31 @@
|
---|
| 170 |
|
---|
| 171 | def checkconsistency(self, md, solution, analyses): #{{{
|
---|
| 172 | md = checkfield(md, 'field', self.mean, 'fieldname', 'normal_uncertain.mean', 'NaN', 1, 'Inf', 1, '>=', 0)
|
---|
| 173 | - md = checkfield(md, 'field', self.stddev, 'fieldname', 'normal_uncertain.stddev', 'NaN', 1, 'Inf', 1, '>=', 0, 'numel', len(self.mean))
|
---|
| 174 | + md = checkfield(md, 'field', self.stddev, 'fieldname', 'normal_uncertain.stddev', 'NaN', 1, 'Inf', 1, '>=', 0)
|
---|
| 175 | if self.isscaled():
|
---|
| 176 | if self.partition == []:
|
---|
| 177 | - raise RuntimeError("normal_uncertain is a scaled variable, but it's missing a partition vector")
|
---|
| 178 | + raise Exception("normal_uncertain is a scaled variable, but it's missing a partition vector")
|
---|
| 179 | #better have a partition vector that has as many partitions as stddev's size:
|
---|
| 180 | if self.stddev.shape[0] != partition_npart(self.partititon):
|
---|
| 181 | - raise RuntimeError("normal_uncertain error message: row size of stddev and partition size should be identical")
|
---|
| 182 | + raise Exception("normal_uncertain error message: row size of stddev and partition size should be identical")
|
---|
| 183 | if self.mean.shape[0] != partition_npart(self.partition):
|
---|
| 184 | - raise RuntimeError("normal_uncertain error message: row size of mean and partition size should be identical")
|
---|
| 185 | + raise Exception("normal_uncertain error message: row size of mean and partition size should be identical")
|
---|
| 186 | #we need as many steps in stddev and mean as there are in time steps
|
---|
| 187 | if self.stddev.shape[1] != self.nsteps:
|
---|
| 188 | - raise RuntimeError("normal_uncertain error message: col size of stddev and partition size should be identical")
|
---|
| 189 | + raise Exception("normal_uncertain error message: col size of stddev and partition size should be identical")
|
---|
| 190 | if self.mean.shape[1] != self.nsteps:
|
---|
| 191 | - raise RuntimeError("normal_uncertain error message: col size of mean and partition size should be identical")
|
---|
| 192 | + raise Exception("normal_uncertain error message: col size of mean and partition size should be identical")
|
---|
| 193 | md = checkfield(md, 'field', self.partition, 'fieldname', 'normal_uncertain.partition', 'NaN', 1, 'Inf', 1, '>=', -1, 'numel', [md.mesh.numberofvertices, md.mesh.numberofvertices])
|
---|
| 194 | if self.partition.shape[1] > 1:
|
---|
| 195 | - raise RuntimeError("normal_uncertain error message: partition should be a column vector")
|
---|
| 196 | + raise Exception("normal_uncertain error message: partition should be a column vector")
|
---|
| 197 | partcheck = np.unique(self.partition)
|
---|
| 198 | partmin = min(partcheck)
|
---|
| 199 | partmax = max(partcheck)
|
---|
| 200 | if partmax < -1:
|
---|
| 201 | - raise RuntimeError("normal_uncertain error message: partition vector's min value should be -1 (for no partition), or start at 0")
|
---|
| 202 | + raise Exception("normal_uncertain error message: partition vector's min value should be -1 (for no partition), or start at 0")
|
---|
| 203 | nmax = max(md.mesh.numberofelements, md.mesh.numberofvertices)
|
---|
| 204 | if partmax > nmax:
|
---|
| 205 | - raise RuntimeError("normal_uncertain error message: partition vector's values cannot go over the number of vertices or elements")
|
---|
| 206 | + raise Exception("normal_uncertain error message: partition vector's values cannot go over the number of vertices or elements")
|
---|
| 207 | #}}}
|
---|
| 208 |
|
---|
| 209 | #virtual functions needed by qmu processing algorithms
|
---|
| 210 | Index: ../trunk-jpl/src/m/classes/qmu/histogram_bin_uncertain.m
|
---|
| 211 | ===================================================================
|
---|
| 212 | --- ../trunk-jpl/src/m/classes/qmu/histogram_bin_uncertain.m (revision 25089)
|
---|
| 213 | +++ ../trunk-jpl/src/m/classes/qmu/histogram_bin_uncertain.m (revision 25090)
|
---|
| 214 | @@ -1,41 +1,38 @@
|
---|
| 215 | -% % definition for the histogram_bin_uncertain class.
|
---|
| 216 | +%HISTOGRAM BIN UNCERTAIN class definition
|
---|
| 217 | %
|
---|
| 218 | -% [hbu]=histogram_bin_uncertain(varargin)
|
---|
| 219 | +% [hbu]=histogram_bin_uncertain(varargin)
|
---|
| 220 | %
|
---|
| 221 | -% where the required varargin are:
|
---|
| 222 | -% descriptor (char, description, '')
|
---|
| 223 | -% pairs_per_variable (double vector, [])
|
---|
| 224 | -% abscissas (double vector, [])
|
---|
| 225 | -% counts (int vector, [])
|
---|
| 226 | +% where the required varargin are:
|
---|
| 227 | +% descriptor (char, description, '')
|
---|
| 228 | +% pairs_per_variable (double vector, [])
|
---|
| 229 | +% abscissas (double vector, [])
|
---|
| 230 | +% counts (int vector, [])
|
---|
| 231 | %
|
---|
| 232 | -% note that zero arguments constructs a default instance; one
|
---|
| 233 | -% argument of the class copies the instance; and three or more
|
---|
| 234 | -% arguments constructs a new instance from the arguments.
|
---|
| 235 | +% NOTE: A call to the constructor with zero arguments will return a default
|
---|
| 236 | +% instance; one argument of the class copies the instance; three or more
|
---|
| 237 | +% arguments constructs a new instance from the arguments.
|
---|
| 238 | %
|
---|
| 239 | classdef histogram_bin_uncertain
|
---|
| 240 | - properties
|
---|
| 241 | - descriptor='';
|
---|
| 242 | + properties
|
---|
| 243 | + descriptor='';
|
---|
| 244 | pairs_per_variable=[];
|
---|
| 245 | - abscissas = [];
|
---|
| 246 | - counts = [];
|
---|
| 247 | - end
|
---|
| 248 | -
|
---|
| 249 | - methods
|
---|
| 250 | - function [hbu]=histogram_bin_uncertain(varargin) % {{{
|
---|
| 251 | -
|
---|
| 252 | - switch nargin
|
---|
| 253 | - case 0 % create a default object
|
---|
| 254 | - case 1 % copy the object
|
---|
| 255 | - if isa(varargin{1},'histogram_bin_uncertain')
|
---|
| 256 | - hbu=varargin{1};
|
---|
| 257 | - else
|
---|
| 258 | - error('Object ''%s'' is a ''%s'' class object, not ''%s''.',...
|
---|
| 259 | - inputname(1),class(varargin{1}),'histogram_bin_uncertain');
|
---|
| 260 | - end
|
---|
| 261 | - case {2,3} % not enough arguments
|
---|
| 262 | - error('Construction of ''%s'' class object requires at least %d inputs.',...
|
---|
| 263 | - 'histogram_bin_uncertain',4)
|
---|
| 264 | - case 4 %
|
---|
| 265 | + abscissas = [];
|
---|
| 266 | + counts = [];
|
---|
| 267 | + end
|
---|
| 268 | + methods
|
---|
| 269 | + function [hbu]=histogram_bin_uncertain(varargin) % {{{
|
---|
| 270 | + switch nargin
|
---|
| 271 | + case 0 % create a default object
|
---|
| 272 | + case 1 % copy the object
|
---|
| 273 | + if isa(varargin{1},'histogram_bin_uncertain')
|
---|
| 274 | + hbu=varargin{1};
|
---|
| 275 | + else
|
---|
| 276 | + error('Object ''%s'' is a ''%s'' class object, not ''%s''.',...
|
---|
| 277 | + inputname(1),class(varargin{1}),'histogram_bin_uncertain');
|
---|
| 278 | + end
|
---|
| 279 | + case {2,3} % not enough arguments
|
---|
| 280 | + error('Construction of ''histogram_bin_uncertain'' class object requires at least %d inputs.',4)
|
---|
| 281 | + case 4 %
|
---|
| 282 | % create the object from the input
|
---|
| 283 | hbu = histogram_bin_uncertain;
|
---|
| 284 | hbu.descriptor=varargin{1};
|
---|
| 285 | @@ -43,19 +40,16 @@
|
---|
| 286 | hbu.abscissas=varargin{3};
|
---|
| 287 | hbu.counts=varargin{4};
|
---|
| 288 |
|
---|
| 289 | - otherwise
|
---|
| 290 | - error('Construction of histogram_bin_uncertain class object requires three arguments, descriptor, abscissas and counts');
|
---|
| 291 | - end
|
---|
| 292 | -
|
---|
| 293 | - end % }}}
|
---|
| 294 | + otherwise
|
---|
| 295 | + error('Construction of histogram_bin_uncertain class object requires either (1) no arguments, (2) a histogram_bin_uncertain instance to copy from, or (3) a descriptor and pairs per variable, abscissas, and counts lists');
|
---|
| 296 | + end
|
---|
| 297 | + end % }}}
|
---|
| 298 | function md=checkconsistency(self,md,solution,analyses) % {{{
|
---|
| 299 | end % }}}
|
---|
| 300 | - function []=disp(hbu) % {{{
|
---|
| 301 | -
|
---|
| 302 | -% display the object
|
---|
| 303 | -
|
---|
| 304 | - disp(sprintf('\n'));
|
---|
| 305 | - for i=1:numel(hbu)
|
---|
| 306 | + function []=disp(hbu) % {{{
|
---|
| 307 | + % display the object
|
---|
| 308 | + disp(sprintf('\n'));
|
---|
| 309 | + for i=1:numel(hbu)
|
---|
| 310 | disp(sprintf('class ''%s'' object ''%s%s'' = \n',...
|
---|
| 311 | class(hbu),inputname(1),string_dim(hbu,i)));
|
---|
| 312 | disp(sprintf(' descriptor: ''%s''' ,hbu(i).descriptor));
|
---|
| 313 | @@ -63,9 +57,8 @@
|
---|
| 314 | disp(sprintf(' abscissas: %g' ,hbu(i).abscissas));
|
---|
| 315 | disp(sprintf(' counts: %g' ,hbu(i).counts));
|
---|
| 316 | end
|
---|
| 317 | -
|
---|
| 318 | end % }}}
|
---|
| 319 | - function [desc] =prop_desc(hbu,dstr) % {{{
|
---|
| 320 | + function [desc]=prop_desc(hbu,dstr) % {{{
|
---|
| 321 | desc=cell(1,numel(hbu));
|
---|
| 322 | for i=1:numel(hbu)
|
---|
| 323 | if ~isempty(hbu(i).descriptor)
|
---|
| 324 | @@ -83,13 +76,13 @@
|
---|
| 325 | function [initpt]=prop_initpt(hbu) % {{{
|
---|
| 326 | initpt=[];
|
---|
| 327 | end % }}}
|
---|
| 328 | - function [lower] =prop_lower(hbu) % {{{
|
---|
| 329 | + function [lower]=prop_lower(hbu) % {{{
|
---|
| 330 | lower=[];
|
---|
| 331 | end % }}}
|
---|
| 332 | - function [upper] =prop_upper(hbu) % {{{
|
---|
| 333 | + function [upper]=prop_upper(hbu) % {{{
|
---|
| 334 | upper=[];
|
---|
| 335 | end % }}}
|
---|
| 336 | - function [mean] =prop_mean(hbu) % {{{
|
---|
| 337 | + function [mean]=prop_mean(hbu) % {{{
|
---|
| 338 | mean=[];
|
---|
| 339 | end % }}}
|
---|
| 340 | function [stddev]=prop_stddev(hbu) % {{{
|
---|
| 341 | @@ -98,20 +91,19 @@
|
---|
| 342 | function [initst]=prop_initst(hbu) % {{{
|
---|
| 343 | initst=[];
|
---|
| 344 | end % }}}
|
---|
| 345 | - function [stype] =prop_stype(hbu) % {{{
|
---|
| 346 | + function [stype]=prop_stype(hbu) % {{{
|
---|
| 347 | stype={};
|
---|
| 348 | end % }}}
|
---|
| 349 | - function [scale] =prop_scale(hbu) % {{{
|
---|
| 350 | + function [scale]=prop_scale(hbu) % {{{
|
---|
| 351 | scale=[];
|
---|
| 352 | end % }}}
|
---|
| 353 | - function [abscissas] =prop_abscissas(hbu) % {{{
|
---|
| 354 | - abscissas=[];
|
---|
| 355 | - for i=1:numel(hbu)
|
---|
| 356 | - abscissas=[abscissas hbu(i).abscissas];
|
---|
| 357 | - end
|
---|
| 358 | - abscissas=allequal(abscissas,-Inf);
|
---|
| 359 | -
|
---|
| 360 | - end % }}}
|
---|
| 361 | + function [abscissas]=prop_abscissas(hbu) % {{{
|
---|
| 362 | + abscissas=[];
|
---|
| 363 | + for i=1:numel(hbu)
|
---|
| 364 | + abscissas=[abscissas hbu(i).abscissas];
|
---|
| 365 | + end
|
---|
| 366 | + abscissas=allequal(abscissas,-Inf);
|
---|
| 367 | + end % }}}
|
---|
| 368 | function [pairs_per_variable] =prop_pairs_per_variable(hbu) % {{{
|
---|
| 369 | pairs_per_variable=zeros(1,numel(hbu));
|
---|
| 370 | for i=1:numel(hbu)
|
---|
| 371 | @@ -120,12 +112,11 @@
|
---|
| 372 | pairs_per_variable=allequal(pairs_per_variable,-Inf);
|
---|
| 373 | end % }}}
|
---|
| 374 | function [counts] =prop_counts(hbu) % {{{
|
---|
| 375 | - counts=[];
|
---|
| 376 | - for i=1:numel(hbu)
|
---|
| 377 | - counts=[counts hbu(i).counts];
|
---|
| 378 | - end
|
---|
| 379 | - counts=allequal(counts,-Inf);
|
---|
| 380 | -
|
---|
| 381 | + counts=[];
|
---|
| 382 | + for i=1:numel(hbu)
|
---|
| 383 | + counts=[counts hbu(i).counts];
|
---|
| 384 | + end
|
---|
| 385 | + counts=allequal(counts,-Inf);
|
---|
| 386 | end % }}}
|
---|
| 387 | function scaled=isscaled(self) % {{{
|
---|
| 388 | if strncmp(self.descriptor,'scaled_',7),
|
---|
| 389 | @@ -137,13 +128,10 @@
|
---|
| 390 | end
|
---|
| 391 | methods (Static)
|
---|
| 392 | function []=dakota_write(fidi,dvar) % {{{
|
---|
| 393 | + % collect only the variables of the appropriate class
|
---|
| 394 | + hbu=struc_class(dvar,'histogram_bin_uncertain');
|
---|
| 395 |
|
---|
| 396 | -% collect only the variables of the appropriate class
|
---|
| 397 | -
|
---|
| 398 | - hbu=struc_class(dvar,'histogram_bin_uncertain');
|
---|
| 399 | -
|
---|
| 400 | -% write variables
|
---|
| 401 | -
|
---|
| 402 | + % write variables
|
---|
| 403 | vlist_write(fidi,'histogram_bin_uncertain','hbu',hbu);
|
---|
| 404 | end % }}}
|
---|
| 405 | end
|
---|
| 406 | Index: ../trunk-jpl/src/m/classes/qmu/uniform_uncertain.py
|
---|
| 407 | ===================================================================
|
---|
| 408 | --- ../trunk-jpl/src/m/classes/qmu/uniform_uncertain.py (revision 25089)
|
---|
| 409 | +++ ../trunk-jpl/src/m/classes/qmu/uniform_uncertain.py (revision 25090)
|
---|
| 410 | @@ -13,7 +13,7 @@
|
---|
| 411 | UNIFORM_UNCERTAIN class definition
|
---|
| 412 |
|
---|
| 413 | Usage:
|
---|
| 414 | - uuv = uniform_uncertain(
|
---|
| 415 | + [uuv] = uniform_uncertain(
|
---|
| 416 | 'descriptor', descriptor,
|
---|
| 417 | 'lower', lower,
|
---|
| 418 | 'upper', upper,
|
---|
| 419 | @@ -38,15 +38,16 @@
|
---|
| 420 | 'partition', vpartition
|
---|
| 421 | )
|
---|
| 422 | '''
|
---|
| 423 | - def __init__(self):
|
---|
| 424 | + def __init__(self): #{{{
|
---|
| 425 | self.descriptor = ''
|
---|
| 426 | self.lower = -np.Inf
|
---|
| 427 | self.upper = np.Inf
|
---|
| 428 | self.partition = []
|
---|
| 429 | self.nsteps = 0
|
---|
| 430 | + #}}}
|
---|
| 431 |
|
---|
| 432 | @staticmethod
|
---|
| 433 | - def uniform_uncertain(*args):
|
---|
| 434 | + def uniform_uncertain(*args): #{{{
|
---|
| 435 | nargin = len(args)
|
---|
| 436 |
|
---|
| 437 | # create a default object
|
---|
| 438 | @@ -58,7 +59,7 @@
|
---|
| 439 | if isinstance(args[0], uniform_uncertain):
|
---|
| 440 | uuv = args[0]
|
---|
| 441 | else:
|
---|
| 442 | - raise RuntimeError('Object ' + str(args[0]) + ' is a ' + str(type(args[0])) + ' class object, not "uniform_uncertain".')
|
---|
| 443 | + raise Exception('Object ' + str(args[0]) + ' is a ' + str(type(args[0])) + ' class object, not "uniform_uncertain".')
|
---|
| 444 |
|
---|
| 445 | # create the object from the input
|
---|
| 446 | else:
|
---|
| 447 | @@ -73,22 +74,23 @@
|
---|
| 448 | uuv.upper = options.getfieldvalue('upper')
|
---|
| 449 |
|
---|
| 450 | #if the variable is scaled, a partition vector should have been
|
---|
| 451 | - #supplied, and that partition vector should have as many partitions as
|
---|
| 452 | - #the lower and upper vectors:
|
---|
| 453 | + #supplied, and that partition vector should have as many
|
---|
| 454 | + #partitions as the lower and upper vectors:
|
---|
| 455 | if uuv.isscaled():
|
---|
| 456 | uuv.partition = options.getfieldvalue('partition')
|
---|
| 457 | uuv.nsteps = options.getfieldvalue('nsteps', 1)
|
---|
| 458 | npart = qmupart2npart(uuv.partition)
|
---|
| 459 | if npart != uuv.upper.shape[0]:
|
---|
| 460 | - raise RuntimeError("uniform_uncertain constructor: for the scaled variable %s the upper field is not currently a vector of values for all the partitions described in the partition vector" % uuv.descriptor)
|
---|
| 461 | + raise Exception("uniform_uncertain constructor: for the scaled variable %s the upper field is not currently a vector of values for all the partitions described in the partition vector" % uuv.descriptor)
|
---|
| 462 | if npart != uuv.lower.shape[0]:
|
---|
| 463 | - raise RuntimeError("uniform_uncertain constructor: for the scaled variable %s the lower field is not currently a vector of values for all the partitions described in the partition vector" % uuv.descriptor)
|
---|
| 464 | + raise Exception("uniform_uncertain constructor: for the scaled variable %s the lower field is not currently a vector of values for all the partitions described in the partition vector" % uuv.descriptor)
|
---|
| 465 | if uuv.nsteps != uuv.upper.shape[1]:
|
---|
| 466 | - raise RuntimeError("uniform_uncertain constructor: for the scaled variable %s the col size of the upper field should be identical to the number of time steps" % uuv.descriptor)
|
---|
| 467 | + raise Exception("uniform_uncertain constructor: for the scaled variable %s the col size of the upper field should be identical to the number of time steps" % uuv.descriptor)
|
---|
| 468 | if uuv.nsteps != uuv.lower.shape[1]:
|
---|
| 469 | - raise RuntimeError("uniform_uncertain constructor: for the scaled variable %s the col size of the lower field should be identical to the number of time steps" % uuv.descriptor)
|
---|
| 470 | + raise Exception("uniform_uncertain constructor: for the scaled variable %s the col size of the lower field should be identical to the number of time steps" % uuv.descriptor)
|
---|
| 471 |
|
---|
| 472 | return [uuv] # Always return a list, so we have something akin to a MATLAB single row matrix
|
---|
| 473 | + #}}}
|
---|
| 474 |
|
---|
| 475 | def __repr__(self): #{{{
|
---|
| 476 | string = ' uniform uncertain variable: '
|
---|
| 477 | @@ -114,29 +116,29 @@
|
---|
| 478 | md = checkfield(md, 'field', self.lower, 'fieldname', 'uniform_uncertain.upper', 'NaN', 1, 'Inf', 1, '<', self.upper, 'numel', len(self.upper))
|
---|
| 479 | if self.isscaled():
|
---|
| 480 | if self.partition == []:
|
---|
| 481 | - raise RuntimeError("uniform_uncertain is a scaled variable, but it's missing a partition vector")
|
---|
| 482 | + raise Exception("uniform_uncertain is a scaled variable, but it's missing a partition vector")
|
---|
| 483 | #better have a partition vector that has as many partitions as
|
---|
| 484 | #upper and lower's size:
|
---|
| 485 | if self.upper.shape[0] != partition_npart(self.partititon):
|
---|
| 486 | - raise RuntimeError("uniform_uncertain error message: row size of upper and partition size should be identical")
|
---|
| 487 | + raise Exception("uniform_uncertain error message: row size of upper and partition size should be identical")
|
---|
| 488 | if self.lower.shape[0] != partition_npart(self.partition):
|
---|
| 489 | - raise RuntimeError("uniform_uncertain error message: row size of lower and partition size should be identical")
|
---|
| 490 | + raise Exception("uniform_uncertain error message: row size of lower and partition size should be identical")
|
---|
| 491 | #we need as steps in upper and lower as there are time steps
|
---|
| 492 | if self.stddev.shape[1] != self.nsteps:
|
---|
| 493 | - raise RuntimeError("uniform_uncertain error message: col size of upper and partition size should be identical")
|
---|
| 494 | + raise Exception("uniform_uncertain error message: col size of upper and partition size should be identical")
|
---|
| 495 | if self.mean.shape[1] != self.nsteps:
|
---|
| 496 | - raise RuntimeError("uniform_uncertain error message: col size of lower and partition size should be identical")
|
---|
| 497 | + raise Exception("uniform_uncertain error message: col size of lower and partition size should be identical")
|
---|
| 498 | md = checkfield(md, 'field', self.partition, 'fieldname', 'uniform_uncertain.partition', 'NaN', 1, 'Inf', 1, '>=', -1, 'numel', [md.mesh.numberofvertices, md.mesh.numberofvertices])
|
---|
| 499 | if self.partition.shape[1] > 1:
|
---|
| 500 | - raise RuntimeError("uniform_uncertain error message: partition should be a column vector")
|
---|
| 501 | + raise Exception("uniform_uncertain error message: partition should be a column vector")
|
---|
| 502 | partcheck = np.unique(self.partition)
|
---|
| 503 | partmin = min(partcheck)
|
---|
| 504 | partmax = max(partcheck)
|
---|
| 505 | if partmax < -1:
|
---|
| 506 | - raise RuntimeError("uniform_uncertain error message: partition vector's min value should be -1 (for no partition), or start at 0")
|
---|
| 507 | + raise Exception("uniform_uncertain error message: partition vector's min value should be -1 (for no partition), or start at 0")
|
---|
| 508 | nmax = max(md.mesh.numberofelements, md.mesh.numberofvertices)
|
---|
| 509 | if partmax > nmax:
|
---|
| 510 | - raise RuntimeError("uniform_uncertain error message: partition vector's values cannot go over the number of vertices or elements")
|
---|
| 511 | + raise Exception("uniform_uncertain error message: partition vector's values cannot go over the number of vertices or elements")
|
---|
| 512 | #}}}
|
---|
| 513 |
|
---|
| 514 | #virtual functions needed by qmu processing algorithms:
|
---|
| 515 | Index: ../trunk-jpl/src/m/classes/qmu.py
|
---|
| 516 | ===================================================================
|
---|
| 517 | --- ../trunk-jpl/src/m/classes/qmu.py (revision 25089)
|
---|
| 518 | +++ ../trunk-jpl/src/m/classes/qmu.py (revision 25090)
|
---|
| 519 | @@ -22,6 +22,7 @@
|
---|
| 520 | self.isdakota = 0
|
---|
| 521 | self.output = 0
|
---|
| 522 | self.variables = OrderedStruct()
|
---|
| 523 | + self.correlation_matrix = []
|
---|
| 524 | self.responses = OrderedStruct()
|
---|
| 525 | self.method = OrderedDict()
|
---|
| 526 | self.params = OrderedStruct()
|
---|
| 527 | @@ -154,7 +155,7 @@
|
---|
| 528 | md.checkmessage('in parallel library mode, Dakota needs to run at least one slave on one cpu (md.qmu.params.processors_per_evaluation >= 1)!')
|
---|
| 529 |
|
---|
| 530 | if np.mod(md.cluster.np - 1, self.params.processors_per_evaluation):
|
---|
| 531 | - md.checkmessage('in parallel library mode, the requirement is for md.cluster.np = md.qmu.params.processors_per_evaluation * number_of_slaves, where number_of_slaves will automatically be determined by Dakota. Modify md.cluster.np accordingly')
|
---|
| 532 | + #md.checkmessage('in parallel library mode, the requirement is for md.cluster.np = md.qmu.params.processors_per_evaluation * number_of_slaves, where number_of_slaves will automatically be determined by Dakota. Modify md.cluster.np accordingly')
|
---|
| 533 |
|
---|
| 534 | # Go through variables and check for consistency
|
---|
| 535 | fv = fieldnames(self.variables)
|
---|
| 536 | @@ -163,6 +164,32 @@
|
---|
| 537 | if hasattr(variable, 'checkconsistency'):
|
---|
| 538 | variable.checkconsistency(md, solution, analyses)
|
---|
| 539 |
|
---|
| 540 | + # Go through variables and check that we have normal uncertains first,
|
---|
| 541 | + # then uniform uncertains and finally histogram_bin_uncertain. Indeed,
|
---|
| 542 | + # Dakota will order them this waym, and when we send partitions for
|
---|
| 543 | + # scaled variables, they better show up in the order Dakota is feeding
|
---|
| 544 | + # them to us in InputUpdateFromDakotax!
|
---|
| 545 | + fv = fieldnames(self.variables)
|
---|
| 546 | + classlist = []
|
---|
| 547 | + for i in range(len(fv)):
|
---|
| 548 | + classlist.append(self.variables[fv[i]].__class__.__name__)
|
---|
| 549 | + n = 0
|
---|
| 550 | + u = 0
|
---|
| 551 | + h = 0
|
---|
| 552 | + for i in range(len(classlist)):
|
---|
| 553 | + if classlist[i] == 'normal_uncertain':
|
---|
| 554 | + if u != 0 or h != 0:
|
---|
| 555 | + raise Exception('normal uncertain variables should be declared before uniform and hhistogram_bin uncertain variables')
|
---|
| 556 | + else:
|
---|
| 557 | + n = 1
|
---|
| 558 | + if classlist[i] == 'uniform_uncertain':
|
---|
| 559 | + if h != 0:
|
---|
| 560 | + raise Exception('uniform_uncertain variables should be declared before histogram_bin uncertain variables')
|
---|
| 561 | + else:
|
---|
| 562 | + u = 1
|
---|
| 563 | + if classlist[i] == 'histogram_bin_uncertain':
|
---|
| 564 | + h = 1
|
---|
| 565 | +
|
---|
| 566 | return md
|
---|
| 567 | # }}}
|
---|
| 568 | def marshall(self, prefix, md, fid): # {{{
|
---|