Ignore:
Timestamp:
12/08/20 08:45:53 (4 years ago)
Author:
Mathieu Morlighem
Message:

merged trunk-jpl and trunk for revision 25834

Location:
issm/trunk
Files:
3 edited

Legend:

Unmodified
Added
Removed
  • issm/trunk

  • issm/trunk/src

  • issm/trunk/src/m/classes/qmu/normal_uncertain.py

    r24313 r25836  
    11import numpy as np
     2
    23from MatlabArray import *
     4from MatlabFuncs import *
     5from fielddisplay import fielddisplay
     6from pairoptions import pairoptions
     7from partition_npart import *
     8from qmupart2npart import qmupart2npart
    39
    410
    511class normal_uncertain(object):
    6     '''
    7   definition for the normal_uncertain class.
    8 
    9   [nuv] = normal_uncertain.normal_uncertain(args)
    10    nuv = normal_uncertain()
    11 
    12   where the required args are:
    13     descriptor    (str, description, '')
    14     mean          (float, mean, float('NaN'))
    15     stddev        (float, standard deviation, float('NaN'))
    16   and the optional args and defaults are:
    17     lower         (float, lower bound, -np.Inf)
    18     upper         (float, upper bound, np.Inf)
    19 
    20   note that zero arguments constructs a default instance, one
    21   argument of the class copies the instance, and three or more
    22   arguments constructs a new instance from the arguments.
    23 '''
    24     def __init__(self):
     12    """NORMAL_UNCERTAIN class definition
     13
     14    Usage:
     15        [nuv] = normal_uncertain(
     16            'descriptor', descriptor,
     17            'mean', mean,
     18            'stddev', stddev,
     19            'partition', partition
     20            )
     21
     22        where nuv is the normal_uncertain object returned by the constructor,
     23        mean and stddev are self explanatory, and partition is the partition
     24        vector for distributed variables. Can be a partition vector over
     25        elements or vertices.
     26
     27    Example:
     28        md.qmu.variables.rheology=normal_uncertain(
     29            'descriptor','RheologyBBar',
     30            'mean',1,
     31            'stddev',.05
     32            )
     33        md.qmu.variables.rheology=normal_uncertain(
     34            'descriptor','scaled_RheologyBBar',
     35            'mean',1,
     36            'stddev',.05,
     37            'partition',vpartition
     38            )
     39    """
     40
     41    def __init__(self): #{{{
    2542        self.descriptor = ''
    26         self.mean = float('NaN')
    27         self.stddev = float('NaN')
    28         self.lower = -np.Inf
    29         self.upper = np.Inf
    30 
    31     @staticmethod
    32     def normal_uncertain(*args):
     43        self.mean       = np.nan
     44        self.stddev     = np.nan
     45        self.partition  = []
     46        self.nsteps     = 0
     47    #}}}
     48
     49    @staticmethod
     50    def normal_uncertain(*args): #{{{
    3351        nargin = len(args)
    3452
     
    4260                nuv = args[0]
    4361            else:
    44                 raise RuntimeError('Object ' + str(args[0]) + ' is a ' + str(type(args[0])) + ' class object, not "normal_uncertain".')
    45 
    46         # not enough arguments
    47         elif nargin == 2:
    48             raise RuntimeError('Construction of "normal_uncertain" class object requires at least 3 inputs.')
    49 
    50     # create the object from the input
    51         else:
    52             # lines differ here in other classes / tests; see asizec problem in notes
     62                raise Exception('Object ' + str(args[0]) + ' is a ' + str(type(args[0])) + ' class object, not "normal_uncertain".')
     63
     64        # create the object from the input
     65        else:
    5366            nuv = normal_uncertain()
    54             nuv.descriptor = str(args[0])
    55             nuv.mean = args[1]
    56             nuv.stddev = args[2]
    57             if nargin >= 4:
    58                 nuv.lower = args[3]
    59             if nargin >= 5:
    60                 nuv.upper = args[4]
    61             if nargin > 5:
    62                 print('WARNING: normal_uncertain:extra_arg: Extra arguments for object of class ' + str(type(nuv)) + '.')
    63 
    64         return [nuv]
    65 
    66     def __repr__(self):
    67         # display an individual object
    68         string = '\n'
    69         string += 'class "normal_uncertain" object = \n'
    70         string += '    descriptor: ' + str(self.descriptor) + '\n'
    71         string += '          mean: ' + str(self.mean) + '\n'
    72         string += '        stddev: ' + str(self.stddev) + '\n'
    73         string += '         lower: ' + str(self.lower) + '\n'
    74         string += '         upper: ' + str(self.upper) + '\n'
     67
     68            #recover options:
     69            options = pairoptions(*args)
     70
     71            #initialize fields:
     72            nuv.descriptor = options.getfieldvalue('descriptor')
     73            nuv.mean       = options.getfieldvalue('mean')
     74            nuv.stddev     = options.getfieldvalue('stddev')
     75
     76            #if the variable is scaled, a partition vector should have been
     77            #supplied, and that partition vector should have as many partitions
     78            #as the mean and stddev vectors:
     79            if nuv.isscaled():
     80                nuv.partition = options.getfieldvalue('partition')
     81                nuv.nsteps = options.getfieldvalue('nsteps', 1)
     82                npart = qmupart2npart(nuv.partition)
     83                if npart != nuv.mean.shape[0]:
     84                    raise Exception("normal_uncertain constructor: for the scaled variable %s the row size of the mean field should be identical to the number of partitions" % nuv.descriptor)
     85                if npart != nuv.stddev.shape[0]:
     86                    raise Exception("normal_uncertain constructor: for the scaled variable %s the row size of the stddev field should be identical to the number of partitions" % nuv.descriptor)
     87                if nuv.nsteps != nuv.mean.shape[1]:
     88                    raise Exception("normal_uncertain constructor: for the scaled variable %s the col size of the mean field should be identical to the number of time steps" % nuv.descriptor)
     89                if nuv.nsteps != nuv.stddev.shape[1]:
     90                    raise Exception("normal_uncertain constructor: for the scaled variable %s the col size of the stddev field should be identical to the number of time steps" % nuv.descriptor)
     91
     92        return [nuv] # Always return a list, so we have something akin to a MATLAB single row matrix
     93    #}}}
     94
     95    def __repr__(self): #{{{
     96        string = '   normal uncertain variable: '
     97        string = "%s\n%s" % (string, fielddisplay(self, 'descriptor', 'name tag'))
     98        string = "%s\n%s" % (string, fielddisplay(self, 'mean', 'pdf mean'))
     99        string = "%s\n%s" % (string, fielddisplay(self, 'stddev', 'pdf standard deviation'))
     100        if self.partition != []:
     101            string = "%s\n%s" % (string, fielddisplay(self, 'partition', 'partition vector defining where sampling will occur'))
     102        string = "%s\n%s" % (string, fielddisplay(self, 'nsteps', 'number of time steps'))
    75103
    76104        return string
    77 
    78     # from here on, nuv is either a single, or a 1d vector of, normal_uncertain
    79 
    80     @staticmethod
    81     def prop_desc(nuv, dstr):
    82         if type(nuv) not in [list, np.ndarray]:
    83             if nuv.descriptor != '' or type(nuv.descriptor) != str:
    84                 desc = str(nuv.descriptor)
    85             elif dstr != '':
    86                 desc = str(dstr)
    87             else:
    88                 desc = 'nuv'
    89             return desc
    90 
     105    #}}}
     106
     107    def __len__(self): #{{{
     108        if type(self.mean) in [list, np.ndarray]:
     109            return len(self.mean)
     110        else:
     111            return 1
     112    #}}}
     113
     114    def checkconsistency(self, md, solution, analyses): #{{{
     115        md = checkfield(md, 'field', self.mean, 'fieldname', 'normal_uncertain.mean', 'NaN', 1, 'Inf', 1, '>=', 0)
     116        md = checkfield(md, 'field', self.stddev, 'fieldname', 'normal_uncertain.stddev', 'NaN', 1, 'Inf', 1, '>=', 0)
     117        if self.isscaled():
     118            if self.partition == []:
     119                raise Exception("normal_uncertain is a scaled variable, but it's missing a partition vector")
     120            #better have a partition vector that has as many partitions as stddev's size:
     121            if self.stddev.shape[0] != partition_npart(self.partititon):
     122                raise Exception("normal_uncertain error message: row size of stddev and partition size should be identical")
     123            if self.mean.shape[0] != partition_npart(self.partition):
     124                raise Exception("normal_uncertain error message: row size of mean and partition size should be identical")
     125            #we need as many steps in stddev and mean as there are in time steps
     126            if self.stddev.shape[1] != self.nsteps:
     127                raise Exception("normal_uncertain error message: col size of stddev and partition size should be identical")
     128            if self.mean.shape[1] != self.nsteps:
     129                raise Exception("normal_uncertain error message: col size of mean and partition size should be identical")
     130            md = checkfield(md, 'field', self.partition, 'fieldname', 'normal_uncertain.partition', 'NaN', 1, 'Inf', 1, '>=', -1, 'numel', [md.mesh.numberofvertices, md.mesh.numberofvertices])
     131            if self.partition.shape[1] > 1:
     132                raise Exception("normal_uncertain error message: partition should be a column vector")
     133            partcheck = np.unique(self.partition)
     134            partmin = min(partcheck)
     135            partmax = max(partcheck)
     136            if partmax < -1:
     137                raise Exception("normal_uncertain error message: partition vector's min value should be -1 (for no partition), or start at 0")
     138            nmax = max(md.mesh.numberofelements, md.mesh.numberofvertices)
     139            if partmax > nmax:
     140                raise Exception("normal_uncertain error message: partition vector's values cannot go over the number of vertices or elements")
     141    #}}}
     142
     143    #virtual functions needed by qmu processing algorithms
     144    #implemented:
     145
     146    @staticmethod
     147    def prop_desc(nuv, dstr): #{{{
    91148        desc = ['' for i in range(np.size(nuv))]
    92149        for i in range(np.size(nuv)):
     
    101158
    102159        return desc
    103 
    104     @staticmethod
    105     def prop_initpt(nuv):
    106         initpt = []
    107         return initpt
    108 
    109     @staticmethod
    110     def prop_lower(nuv):
    111         if type(nuv) not in [list, np.ndarray]:
    112             return nuv.lower
    113 
    114         lower = np.zeros(np.size(nuv))
    115         for i in range(np.size(nuv)):
    116             lower[i] = nuv[i].lower
    117 
    118         lower = allequal(lower, -np.inf)
    119 
    120         return lower
    121 
    122     @staticmethod
    123     def prop_upper(nuv):
    124         if type(nuv) not in [list, np.ndarray]:
    125             return nuv.upper
    126 
    127         upper = np.zeros(np.size(nuv))
    128         for i in range(np.size(nuv)):
    129             upper[i] = nuv[i].upper
    130 
    131         upper = allequal(upper, -np.inf)
    132         return upper
    133 
    134     @staticmethod
    135     def prop_mean(nuv):
    136         if type(nuv) not in [list, np.ndarray]:
    137             return nuv.mean
    138 
     160    #}}}
     161
     162    @staticmethod
     163    def prop_mean(nuv): #{{{
    139164        mean = np.zeros(np.size(nuv))
    140165        for i in range(np.size(nuv)):
    141166            mean[i] = nuv[i].mean
    142 
    143167        return mean
    144 
    145     @staticmethod
    146     def prop_stddev(nuv):
    147         if type(nuv) not in [list, np.ndarray]:
    148             return nuv.stddev
    149 
     168    #}}}
     169
     170    @staticmethod
     171    def prop_stddev(nuv): #{{{
    150172        stddev = np.zeros(np.size(nuv))
    151173        for i in range(np.size(nuv)):
    152174            stddev[i] = nuv[i].stddev
    153 
    154175        return stddev
    155 
    156     @staticmethod
    157     def prop_initst(nuv):
    158         initst = []
    159         return initst
    160 
    161     @staticmethod
    162     def prop_stype(nuv):
     176    #}}}
     177
     178    @staticmethod
     179    def prop_lower(nuv): #{{{
     180        lower = []
     181        return lower
     182    #}}}
     183
     184    @staticmethod
     185    def prop_upper(nuv): #{{{
     186        upper = []
     187        return upper
     188    #}}}
     189
     190    #default
     191    @staticmethod
     192    def prop_abscissas(hbu): #{{{
     193        abscissas = []
     194        return abscissas
     195    #}}}
     196
     197    @staticmethod
     198    def prop_pairs_per_variable(hbu): #{{{
     199        pairs_per_variable = []
     200        return pairs_per_variable
     201    #}}}
     202
     203    @staticmethod
     204    def prop_counts(hbu): #{{{
     205        counts = []
     206        return counts
     207    #}}}
     208    @staticmethod
     209    def prop_initpt(nuv): #{{{
     210        initpt = []
     211        return initpt
     212    #}}}
     213
     214    @staticmethod
     215    def prop_initst(nuv): #{{{
     216        inist = []
     217        return inist
     218    #}}}
     219
     220    @staticmethod
     221    def prop_stype(nuv): #{{{
    163222        stype = []
    164223        return stype
    165 
    166     @staticmethod
    167     def prop_scale(nuv):
     224    #}}}
     225
     226    @staticmethod
     227    def prop_scale(nuv): #{{{
    168228        scale = []
    169229        return scale
     230    #}}}
     231
     232    #new methods:
     233    def isdistributed(self): #{{{
     234        if strncmp(self.descriptor, 'distributed_', 12):
     235            return True
     236        else:
     237            return False
     238    #}}}
     239   
     240    def isscaled(self): #{{{
     241        if strncmp(self.descriptor, 'scaled_', 7):
     242            return True
     243        else:
     244            return False
     245    #}}}
    170246
    171247    @staticmethod
    172248    def dakota_write(fidi, dvar):
     249        # possible namespace pollution, the above import seems not to work
     250        from vlist_write import vlist_write
    173251        # collect only the variables of the appropriate class
    174         nuv = [struc_class(i, 'normal_uncertain', 'nuv') for i in dvar]
    175 
    176     # possible namespace pollution, the above import seems not to work
    177         from vlist_write import vlist_write
    178     # write variables
    179         vlist_write(fidi, 'normal_uncertain', 'nuv', nuv)
     252        nuv = deepcopy(dvar)
     253        fields = fieldnames(nuv)
     254        for field in fields:
     255            if getattr(nuv, field)[0].__class__.__name__ != 'normal_uncertain':
     256                delattr(nuv, field)
     257        if len(nuv) > 0:
     258            vlist_write(fidi, 'normal_uncertain', 'nuv', nuv)
     259    #}}}
     260
Note: See TracChangeset for help on using the changeset viewer.