Index: ../trunk-jpl/src/m/classes/qmu.m =================================================================== --- ../trunk-jpl/src/m/classes/qmu.m (revision 25089) +++ ../trunk-jpl/src/m/classes/qmu.m (revision 25090) @@ -122,7 +122,7 @@ end end if strcmpi(classlist{i},'uniform_uncertain') - if (h~=0), + if (h~=0), error('uniform_uncertain variables should be declared before histogram_bin uncertain variables'); else u=1; Index: ../trunk-jpl/src/m/classes/qmu/histogram_bin_uncertain.py =================================================================== --- ../trunk-jpl/src/m/classes/qmu/histogram_bin_uncertain.py (nonexistent) +++ ../trunk-jpl/src/m/classes/qmu/histogram_bin_uncertain.py (revision 25090) @@ -0,0 +1,66 @@ +import numpy as np + + +class histogram_bin_uncertain(object): + ''' + HISTOGRAM_BIN_UNCERTAIN class definition + + Usage: + [hbu] = histogram_bin_uncertain( + 'descriptor', descriptor, + 'pairs_per_variable', pairs_per_variable, + 'abscissas', abscissas, + 'counts', counts + ) + + where the required args are: + descriptor (char, description, '') + pairs_per_variable (double list, []) + abscissas (double list, []) + counts (int list, []) + + NOTE: A call to the constructor with zero arguments will return a default + instance; one argument of the class copies the instance; three or more + arguments constructs a new instance from the arguments. + ''' + + def __init__(self): #{{{ + self.descriptor = '' + self.pairs_per_variable = [] + self.abscissas = [] + self.counts = [] + #}}} + + @staticmethod + def histogram_bin_uncertain(*args): #{{{ + nargin = len(args) + + # create a default object + if nargin == 0: + return histogram_bin_uncertain() + + # copy the object + elif nargin == 1: + if isinstance(args[0], histogram_bin_uncertain): + hbu = args[0] + else: + raise Exception("Object {} is a {} class object, not 'histogram_bin_uncertain'.".format(str(args[0]), str(type(args[0])))) + + elif nargin == 2 or nargin == 3: + raise Exception("Construction of 'histogram_bin_uncertain' class object requires at least {} inputs.".format(4)) + + # create the object from the input + elif nargin == 4: + hbu = histogram_bin_uncertain() + + #recover options: + options = pairoptions(*args) + + #initialize fields: + hbu.descriptor = options.getfieldvalue('descriptor') + hbu.pairs_per_variable = options.getfieldvalue('pairs_per_variable') + hbu.abscissas = options.getfieldvalue('abscissas') + hbu.counts = options.getfieldvalue('counts') + + else: + raise Exception("Construction of histogram_bin_uncertain class object requires either (1) no arguments, (2) a histogram_bin_uncertain instance to copy from, or (3) a descriptor and pairs per variable, abscissas, and counts lists") Index: ../trunk-jpl/src/m/classes/qmu/normal_uncertain.py =================================================================== --- ../trunk-jpl/src/m/classes/qmu/normal_uncertain.py (revision 25089) +++ ../trunk-jpl/src/m/classes/qmu/normal_uncertain.py (revision 25090) @@ -13,11 +13,18 @@ NORMAL_UNCERTAIN class definition Usage: - nuv = normal_uncertain('descriptor',descriptor,'mean',mean,'stddev',stddev,'partition',partition) - where nuv is the normal_uncertain object returned by the constructor, mean and stddev are self - explanatory. partition is the partition vector for distributed variables. Can be a partition - vector over elements or vertices. + [nuv] = normal_uncertain( + 'descriptor', descriptor, + 'mean', mean, + 'stddev', stddev, + 'partition', partition + ) + where nuv is the normal_uncertain object returned by the constructor, + mean and stddev are self explanatory, and partition is the partition + vector for distributed variables. Can be a partition vector over + elements or vertices. + Example: md.qmu.variables.rheology=normal_uncertain( 'descriptor','RheologyBBar', @@ -31,12 +38,13 @@ 'partition',vpartition ) ''' - def __init__(self): + def __init__(self): #{{{ self.descriptor = '' self.mean = np.NaN self.stddev = np.NaN self.partition = [] self.nsteps = 0 + #}}} @staticmethod def normal_uncertain(*args): #{{{ @@ -51,11 +59,10 @@ if isinstance(args[0], normal_uncertain): nuv = args[0] else: - raise RuntimeError('Object ' + str(args[0]) + ' is a ' + str(type(args[0])) + ' class object, not "normal_uncertain".') + raise Exception('Object ' + str(args[0]) + ' is a ' + str(type(args[0])) + ' class object, not "normal_uncertain".') # create the object from the input else: - # lines differ here in other classes / tests; see asizec problem in notes nuv = normal_uncertain() #recover options: @@ -66,21 +73,21 @@ nuv.mean = options.getfieldvalue('mean') nuv.stddev = options.getfieldvalue('stddev') - #if the variable is scaled, a partition vector should have been supplied, and - #that partition vector should have as many partitions as the mean and stddev - #vectors: + #if the variable is scaled, a partition vector should have been + #supplied, and that partition vector should have as many partitions + #as the mean and stddev vectors: if nuv.isscaled(): nuv.partition = options.getfieldvalue('partition') nuv.nsteps = options.getfieldvalue('nsteps', 1) npart = qmupart2npart(nuv.partition) if npart != nuv.mean.shape[0]: - raise RuntimeError("normal_uncertain constructor: for the scaled variable %s the row size of the mean field should be identical to the number of partitions" % nuv.descriptor) + raise Exception("normal_uncertain constructor: for the scaled variable %s the row size of the mean field should be identical to the number of partitions" % nuv.descriptor) if npart != nuv.stddev.shape[0]: - raise RuntimeError("normal_uncertain constructor: for the scaled variable %s the row size of the stddev field should be identical to the number of partitions" % nuv.descriptor) + raise Exception("normal_uncertain constructor: for the scaled variable %s the row size of the stddev field should be identical to the number of partitions" % nuv.descriptor) if nuv.nsteps != nuv.mean.shape[1]: - raise RuntimeError("normal_uncertain constructor: for the scaled variable %s the col size of the mean field should be identical to the number of time steps" % nuv.descriptor) + raise Exception("normal_uncertain constructor: for the scaled variable %s the col size of the mean field should be identical to the number of time steps" % nuv.descriptor) if nuv.nsteps != nuv.stddev.shape[1]: - raise RuntimeError("normal_uncertain constructor: for the scaled variable %s the col size of the stddev field should be identical to the number of time steps" % nuv.descriptor) + raise Exception("normal_uncertain constructor: for the scaled variable %s the col size of the stddev field should be identical to the number of time steps" % nuv.descriptor) return [nuv] # Always return a list, so we have something akin to a MATLAB single row matrix #}}} @@ -106,31 +113,31 @@ def checkconsistency(self, md, solution, analyses): #{{{ md = checkfield(md, 'field', self.mean, 'fieldname', 'normal_uncertain.mean', 'NaN', 1, 'Inf', 1, '>=', 0) - md = checkfield(md, 'field', self.stddev, 'fieldname', 'normal_uncertain.stddev', 'NaN', 1, 'Inf', 1, '>=', 0, 'numel', len(self.mean)) + md = checkfield(md, 'field', self.stddev, 'fieldname', 'normal_uncertain.stddev', 'NaN', 1, 'Inf', 1, '>=', 0) if self.isscaled(): if self.partition == []: - raise RuntimeError("normal_uncertain is a scaled variable, but it's missing a partition vector") + raise Exception("normal_uncertain is a scaled variable, but it's missing a partition vector") #better have a partition vector that has as many partitions as stddev's size: if self.stddev.shape[0] != partition_npart(self.partititon): - raise RuntimeError("normal_uncertain error message: row size of stddev and partition size should be identical") + raise Exception("normal_uncertain error message: row size of stddev and partition size should be identical") if self.mean.shape[0] != partition_npart(self.partition): - raise RuntimeError("normal_uncertain error message: row size of mean and partition size should be identical") + raise Exception("normal_uncertain error message: row size of mean and partition size should be identical") #we need as many steps in stddev and mean as there are in time steps if self.stddev.shape[1] != self.nsteps: - raise RuntimeError("normal_uncertain error message: col size of stddev and partition size should be identical") + raise Exception("normal_uncertain error message: col size of stddev and partition size should be identical") if self.mean.shape[1] != self.nsteps: - raise RuntimeError("normal_uncertain error message: col size of mean and partition size should be identical") + raise Exception("normal_uncertain error message: col size of mean and partition size should be identical") md = checkfield(md, 'field', self.partition, 'fieldname', 'normal_uncertain.partition', 'NaN', 1, 'Inf', 1, '>=', -1, 'numel', [md.mesh.numberofvertices, md.mesh.numberofvertices]) if self.partition.shape[1] > 1: - raise RuntimeError("normal_uncertain error message: partition should be a column vector") + raise Exception("normal_uncertain error message: partition should be a column vector") partcheck = np.unique(self.partition) partmin = min(partcheck) partmax = max(partcheck) if partmax < -1: - raise RuntimeError("normal_uncertain error message: partition vector's min value should be -1 (for no partition), or start at 0") + raise Exception("normal_uncertain error message: partition vector's min value should be -1 (for no partition), or start at 0") nmax = max(md.mesh.numberofelements, md.mesh.numberofvertices) if partmax > nmax: - raise RuntimeError("normal_uncertain error message: partition vector's values cannot go over the number of vertices or elements") + raise Exception("normal_uncertain error message: partition vector's values cannot go over the number of vertices or elements") #}}} #virtual functions needed by qmu processing algorithms Index: ../trunk-jpl/src/m/classes/qmu/histogram_bin_uncertain.m =================================================================== --- ../trunk-jpl/src/m/classes/qmu/histogram_bin_uncertain.m (revision 25089) +++ ../trunk-jpl/src/m/classes/qmu/histogram_bin_uncertain.m (revision 25090) @@ -1,41 +1,38 @@ -% % definition for the histogram_bin_uncertain class. +%HISTOGRAM BIN UNCERTAIN class definition % -% [hbu]=histogram_bin_uncertain(varargin) +% [hbu]=histogram_bin_uncertain(varargin) % -% where the required varargin are: -% descriptor (char, description, '') -% pairs_per_variable (double vector, []) -% abscissas (double vector, []) -% counts (int vector, []) +% where the required varargin are: +% descriptor (char, description, '') +% pairs_per_variable (double vector, []) +% abscissas (double vector, []) +% counts (int vector, []) % -% note that zero arguments constructs a default instance; one -% argument of the class copies the instance; and three or more -% arguments constructs a new instance from the arguments. +% NOTE: A call to the constructor with zero arguments will return a default +% instance; one argument of the class copies the instance; three or more +% arguments constructs a new instance from the arguments. % classdef histogram_bin_uncertain - properties - descriptor=''; + properties + descriptor=''; pairs_per_variable=[]; - abscissas = []; - counts = []; - end - - methods - function [hbu]=histogram_bin_uncertain(varargin) % {{{ - - switch nargin - case 0 % create a default object - case 1 % copy the object - if isa(varargin{1},'histogram_bin_uncertain') - hbu=varargin{1}; - else - error('Object ''%s'' is a ''%s'' class object, not ''%s''.',... - inputname(1),class(varargin{1}),'histogram_bin_uncertain'); - end - case {2,3} % not enough arguments - error('Construction of ''%s'' class object requires at least %d inputs.',... - 'histogram_bin_uncertain',4) - case 4 % + abscissas = []; + counts = []; + end + methods + function [hbu]=histogram_bin_uncertain(varargin) % {{{ + switch nargin + case 0 % create a default object + case 1 % copy the object + if isa(varargin{1},'histogram_bin_uncertain') + hbu=varargin{1}; + else + error('Object ''%s'' is a ''%s'' class object, not ''%s''.',... + inputname(1),class(varargin{1}),'histogram_bin_uncertain'); + end + case {2,3} % not enough arguments + error('Construction of ''histogram_bin_uncertain'' class object requires at least %d inputs.',4) + case 4 % % create the object from the input hbu = histogram_bin_uncertain; hbu.descriptor=varargin{1}; @@ -43,19 +40,16 @@ hbu.abscissas=varargin{3}; hbu.counts=varargin{4}; - otherwise - error('Construction of histogram_bin_uncertain class object requires three arguments, descriptor, abscissas and counts'); - end - - end % }}} + otherwise + error('Construction of histogram_bin_uncertain class object requires either (1) no arguments, (2) a histogram_bin_uncertain instance to copy from, or (3) a descriptor and pairs per variable, abscissas, and counts lists'); + end + end % }}} function md=checkconsistency(self,md,solution,analyses) % {{{ end % }}} - function []=disp(hbu) % {{{ - -% display the object - - disp(sprintf('\n')); - for i=1:numel(hbu) + function []=disp(hbu) % {{{ + % display the object + disp(sprintf('\n')); + for i=1:numel(hbu) disp(sprintf('class ''%s'' object ''%s%s'' = \n',... class(hbu),inputname(1),string_dim(hbu,i))); disp(sprintf(' descriptor: ''%s''' ,hbu(i).descriptor)); @@ -63,9 +57,8 @@ disp(sprintf(' abscissas: %g' ,hbu(i).abscissas)); disp(sprintf(' counts: %g' ,hbu(i).counts)); end - end % }}} - function [desc] =prop_desc(hbu,dstr) % {{{ + function [desc]=prop_desc(hbu,dstr) % {{{ desc=cell(1,numel(hbu)); for i=1:numel(hbu) if ~isempty(hbu(i).descriptor) @@ -83,13 +76,13 @@ function [initpt]=prop_initpt(hbu) % {{{ initpt=[]; end % }}} - function [lower] =prop_lower(hbu) % {{{ + function [lower]=prop_lower(hbu) % {{{ lower=[]; end % }}} - function [upper] =prop_upper(hbu) % {{{ + function [upper]=prop_upper(hbu) % {{{ upper=[]; end % }}} - function [mean] =prop_mean(hbu) % {{{ + function [mean]=prop_mean(hbu) % {{{ mean=[]; end % }}} function [stddev]=prop_stddev(hbu) % {{{ @@ -98,20 +91,19 @@ function [initst]=prop_initst(hbu) % {{{ initst=[]; end % }}} - function [stype] =prop_stype(hbu) % {{{ + function [stype]=prop_stype(hbu) % {{{ stype={}; end % }}} - function [scale] =prop_scale(hbu) % {{{ + function [scale]=prop_scale(hbu) % {{{ scale=[]; end % }}} - function [abscissas] =prop_abscissas(hbu) % {{{ - abscissas=[]; - for i=1:numel(hbu) - abscissas=[abscissas hbu(i).abscissas]; - end - abscissas=allequal(abscissas,-Inf); - - end % }}} + function [abscissas]=prop_abscissas(hbu) % {{{ + abscissas=[]; + for i=1:numel(hbu) + abscissas=[abscissas hbu(i).abscissas]; + end + abscissas=allequal(abscissas,-Inf); + end % }}} function [pairs_per_variable] =prop_pairs_per_variable(hbu) % {{{ pairs_per_variable=zeros(1,numel(hbu)); for i=1:numel(hbu) @@ -120,12 +112,11 @@ pairs_per_variable=allequal(pairs_per_variable,-Inf); end % }}} function [counts] =prop_counts(hbu) % {{{ - counts=[]; - for i=1:numel(hbu) - counts=[counts hbu(i).counts]; - end - counts=allequal(counts,-Inf); - + counts=[]; + for i=1:numel(hbu) + counts=[counts hbu(i).counts]; + end + counts=allequal(counts,-Inf); end % }}} function scaled=isscaled(self) % {{{ if strncmp(self.descriptor,'scaled_',7), @@ -137,13 +128,10 @@ end methods (Static) function []=dakota_write(fidi,dvar) % {{{ + % collect only the variables of the appropriate class + hbu=struc_class(dvar,'histogram_bin_uncertain'); -% collect only the variables of the appropriate class - - hbu=struc_class(dvar,'histogram_bin_uncertain'); - -% write variables - + % write variables vlist_write(fidi,'histogram_bin_uncertain','hbu',hbu); end % }}} end Index: ../trunk-jpl/src/m/classes/qmu/uniform_uncertain.py =================================================================== --- ../trunk-jpl/src/m/classes/qmu/uniform_uncertain.py (revision 25089) +++ ../trunk-jpl/src/m/classes/qmu/uniform_uncertain.py (revision 25090) @@ -13,7 +13,7 @@ UNIFORM_UNCERTAIN class definition Usage: - uuv = uniform_uncertain( + [uuv] = uniform_uncertain( 'descriptor', descriptor, 'lower', lower, 'upper', upper, @@ -38,15 +38,16 @@ 'partition', vpartition ) ''' - def __init__(self): + def __init__(self): #{{{ self.descriptor = '' self.lower = -np.Inf self.upper = np.Inf self.partition = [] self.nsteps = 0 + #}}} @staticmethod - def uniform_uncertain(*args): + def uniform_uncertain(*args): #{{{ nargin = len(args) # create a default object @@ -58,7 +59,7 @@ if isinstance(args[0], uniform_uncertain): uuv = args[0] else: - raise RuntimeError('Object ' + str(args[0]) + ' is a ' + str(type(args[0])) + ' class object, not "uniform_uncertain".') + raise Exception('Object ' + str(args[0]) + ' is a ' + str(type(args[0])) + ' class object, not "uniform_uncertain".') # create the object from the input else: @@ -73,22 +74,23 @@ uuv.upper = options.getfieldvalue('upper') #if the variable is scaled, a partition vector should have been - #supplied, and that partition vector should have as many partitions as - #the lower and upper vectors: + #supplied, and that partition vector should have as many + #partitions as the lower and upper vectors: if uuv.isscaled(): uuv.partition = options.getfieldvalue('partition') uuv.nsteps = options.getfieldvalue('nsteps', 1) npart = qmupart2npart(uuv.partition) if npart != uuv.upper.shape[0]: - raise RuntimeError("uniform_uncertain constructor: for the scaled variable %s the upper field is not currently a vector of values for all the partitions described in the partition vector" % uuv.descriptor) + raise Exception("uniform_uncertain constructor: for the scaled variable %s the upper field is not currently a vector of values for all the partitions described in the partition vector" % uuv.descriptor) if npart != uuv.lower.shape[0]: - raise RuntimeError("uniform_uncertain constructor: for the scaled variable %s the lower field is not currently a vector of values for all the partitions described in the partition vector" % uuv.descriptor) + raise Exception("uniform_uncertain constructor: for the scaled variable %s the lower field is not currently a vector of values for all the partitions described in the partition vector" % uuv.descriptor) if uuv.nsteps != uuv.upper.shape[1]: - raise RuntimeError("uniform_uncertain constructor: for the scaled variable %s the col size of the upper field should be identical to the number of time steps" % uuv.descriptor) + raise Exception("uniform_uncertain constructor: for the scaled variable %s the col size of the upper field should be identical to the number of time steps" % uuv.descriptor) if uuv.nsteps != uuv.lower.shape[1]: - raise RuntimeError("uniform_uncertain constructor: for the scaled variable %s the col size of the lower field should be identical to the number of time steps" % uuv.descriptor) + raise Exception("uniform_uncertain constructor: for the scaled variable %s the col size of the lower field should be identical to the number of time steps" % uuv.descriptor) return [uuv] # Always return a list, so we have something akin to a MATLAB single row matrix + #}}} def __repr__(self): #{{{ string = ' uniform uncertain variable: ' @@ -114,29 +116,29 @@ md = checkfield(md, 'field', self.lower, 'fieldname', 'uniform_uncertain.upper', 'NaN', 1, 'Inf', 1, '<', self.upper, 'numel', len(self.upper)) if self.isscaled(): if self.partition == []: - raise RuntimeError("uniform_uncertain is a scaled variable, but it's missing a partition vector") + raise Exception("uniform_uncertain is a scaled variable, but it's missing a partition vector") #better have a partition vector that has as many partitions as #upper and lower's size: if self.upper.shape[0] != partition_npart(self.partititon): - raise RuntimeError("uniform_uncertain error message: row size of upper and partition size should be identical") + raise Exception("uniform_uncertain error message: row size of upper and partition size should be identical") if self.lower.shape[0] != partition_npart(self.partition): - raise RuntimeError("uniform_uncertain error message: row size of lower and partition size should be identical") + raise Exception("uniform_uncertain error message: row size of lower and partition size should be identical") #we need as steps in upper and lower as there are time steps if self.stddev.shape[1] != self.nsteps: - raise RuntimeError("uniform_uncertain error message: col size of upper and partition size should be identical") + raise Exception("uniform_uncertain error message: col size of upper and partition size should be identical") if self.mean.shape[1] != self.nsteps: - raise RuntimeError("uniform_uncertain error message: col size of lower and partition size should be identical") + raise Exception("uniform_uncertain error message: col size of lower and partition size should be identical") md = checkfield(md, 'field', self.partition, 'fieldname', 'uniform_uncertain.partition', 'NaN', 1, 'Inf', 1, '>=', -1, 'numel', [md.mesh.numberofvertices, md.mesh.numberofvertices]) if self.partition.shape[1] > 1: - raise RuntimeError("uniform_uncertain error message: partition should be a column vector") + raise Exception("uniform_uncertain error message: partition should be a column vector") partcheck = np.unique(self.partition) partmin = min(partcheck) partmax = max(partcheck) if partmax < -1: - raise RuntimeError("uniform_uncertain error message: partition vector's min value should be -1 (for no partition), or start at 0") + raise Exception("uniform_uncertain error message: partition vector's min value should be -1 (for no partition), or start at 0") nmax = max(md.mesh.numberofelements, md.mesh.numberofvertices) if partmax > nmax: - raise RuntimeError("uniform_uncertain error message: partition vector's values cannot go over the number of vertices or elements") + raise Exception("uniform_uncertain error message: partition vector's values cannot go over the number of vertices or elements") #}}} #virtual functions needed by qmu processing algorithms: Index: ../trunk-jpl/src/m/classes/qmu.py =================================================================== --- ../trunk-jpl/src/m/classes/qmu.py (revision 25089) +++ ../trunk-jpl/src/m/classes/qmu.py (revision 25090) @@ -22,6 +22,7 @@ self.isdakota = 0 self.output = 0 self.variables = OrderedStruct() + self.correlation_matrix = [] self.responses = OrderedStruct() self.method = OrderedDict() self.params = OrderedStruct() @@ -154,7 +155,7 @@ md.checkmessage('in parallel library mode, Dakota needs to run at least one slave on one cpu (md.qmu.params.processors_per_evaluation >= 1)!') if np.mod(md.cluster.np - 1, self.params.processors_per_evaluation): - md.checkmessage('in parallel library mode, the requirement is for md.cluster.np = md.qmu.params.processors_per_evaluation * number_of_slaves, where number_of_slaves will automatically be determined by Dakota. Modify md.cluster.np accordingly') + #md.checkmessage('in parallel library mode, the requirement is for md.cluster.np = md.qmu.params.processors_per_evaluation * number_of_slaves, where number_of_slaves will automatically be determined by Dakota. Modify md.cluster.np accordingly') # Go through variables and check for consistency fv = fieldnames(self.variables) @@ -163,6 +164,32 @@ if hasattr(variable, 'checkconsistency'): variable.checkconsistency(md, solution, analyses) + # Go through variables and check that we have normal uncertains first, + # then uniform uncertains and finally histogram_bin_uncertain. Indeed, + # Dakota will order them this waym, and when we send partitions for + # scaled variables, they better show up in the order Dakota is feeding + # them to us in InputUpdateFromDakotax! + fv = fieldnames(self.variables) + classlist = [] + for i in range(len(fv)): + classlist.append(self.variables[fv[i]].__class__.__name__) + n = 0 + u = 0 + h = 0 + for i in range(len(classlist)): + if classlist[i] == 'normal_uncertain': + if u != 0 or h != 0: + raise Exception('normal uncertain variables should be declared before uniform and hhistogram_bin uncertain variables') + else: + n = 1 + if classlist[i] == 'uniform_uncertain': + if h != 0: + raise Exception('uniform_uncertain variables should be declared before histogram_bin uncertain variables') + else: + u = 1 + if classlist[i] == 'histogram_bin_uncertain': + h = 1 + return md # }}} def marshall(self, prefix, md, fid): # {{{