Changeset 24847


Ignore:
Timestamp:
05/11/20 20:02:58 (5 years ago)
Author:
jdquinn
Message:

CHG: Translation of changes to MATLAB->Python.

Location:
issm/trunk-jpl/src/m/classes/qmu
Files:
2 edited

Legend:

Unmodified
Added
Removed
  • issm/trunk-jpl/src/m/classes/qmu/normal_uncertain.m

    r24837 r24847  
    33%   Usage:
    44%      nuv=normal_uncertain('descriptor',descriptor,'mean',mean,'stddev',stddev,'partition',partition);
    5 %      where nuv is the normal_uncertain object returned by the constructor, mean and stddev are self 
    6 %      explanatory.  partition is the partition vector for distributed variables. Can be a partition 
     5%      where nuv is the normal_uncertain object returned by the constructor, mean and stddev are self
     6%      explanatory.  partition is the partition vector for distributed variables. Can be a partition
    77%      vector over elements or vertices.
    8 % 
     8%
    99%   Example:
    1010%      md.qmu.variables.rheology=normal_uncertain('descriptor','RheologyBBar','mean',1,'stddev',.05);
    1111%      md.qmu.variables.rheology=normal_uncertain('descriptor','scaled_RheologyBBar','mean',1,'stddev',.05,'partition',vpartition);
    12 % 
     12%
    1313
    1414classdef normal_uncertain
     
    2222                function self=normal_uncertain(varargin) %constructor {{{
    2323
    24                         %recover options: 
     24                        %recover options:
    2525                        options = pairoptions(varargin{:});
    2626
    27                         %initialize fields: 
     27                        %initialize fields:
    2828                        self.descriptor=getfieldvalue(options,'descriptor');
    2929                        self.mean=getfieldvalue(options,'mean');
    3030                        self.stddev=getfieldvalue(options,'stddev');
    31                        
    32                         %if the variable is scaled,  a partition vector should have been supplied, and 
    33                         %that partition vector should have as many partitions as the mean and stddev 
     31
     32                        %if the variable is scaled,  a partition vector should have been supplied, and
     33                        %that partition vector should have as many partitions as the mean and stddev
    3434                        %vectors:
    3535                        if self.isscaled(),
     
    4040                                end
    4141                                if npart~=length(self.stddev),
    42                                         error(['normal_uncertain constructor: for the scaled variable' self.described ' the stddev field is not currently a vector of values for all the partitions described in the partition vector']);
     42                                        error(['normal_uncertain constructor: for the scaled variable' self.descriptor ' the stddev field is not currently a vector of values for all the partitions described in the partition vector']);
    4343                                end
    4444                        end
     
    5555                        fielddisplay(self,'partition','partition vector defining where sampling will occur');
    5656                end
    57         end 
     57        end
    5858        %}}}
    5959        function md=checkconsistency(self,md,solution,analyses) % {{{
     
    6565                                error('normal_uncertain is a scaled variable, but it''s missing a partition vector');
    6666                        end
    67                         %better have a partition vector that has as many partitions as stddev's size: 
    68                         if  length(self.stddev)~=partition_npart(self.partition),
     67                        %better have a partition vector that has as many partitions as stddev's size:
     68                        if length(self.stddev)~=partition_npart(self.partition),
    6969                                error('normal_uncertain error message: stddev and partition should be vectors of identical size');
    7070                        end
    71                         if  length(self.mean)~=partition_npart(self.partition),
     71                        if length(self.mean)~=partition_npart(self.partition),
    7272                                error('normal_uncertain error message: mean and partition should be vectors of identical size');
    7373                        end
     
    7777                                error('normal_uncertain error message: partition should be a column vector');
    7878                        end
    79                         partcheck=unique(self.partition); 
    80                         partmin=min(partcheck); 
     79                        partcheck=unique(self.partition);
     80                        partmin=min(partcheck);
    8181                        partmax=max(partcheck);
    8282                        if partmax<-1,
     
    120120        %default
    121121        function [abscissas] =prop_abscissas(hbu) % {{{
    122                 abscissas=[]; 
     122                abscissas=[];
    123123        end % }}}
    124124        function [counts] =prop_counts(hbu) % {{{
    125                 counts=[]; 
     125                counts=[];
    126126        end % }}}
    127127        function [pairs_per_variable] =prop_pairs_per_variable(hbu) % {{{
     
    146146                scale=[];
    147147        end % }}}
    148         %new methods: 
     148        %new methods:
    149149        function scaled =isscaled(self) % {{{
    150150                if strncmp(self.descriptor,'scaled_',7),
  • issm/trunk-jpl/src/m/classes/qmu/normal_uncertain.py

    r24261 r24847  
    11import numpy as np
    2 from MatlabArray import *
     2from pairoptions import pairoptions
    33
    44
    55class normal_uncertain(object):
    66    '''
    7   definition for the normal_uncertain class.
     7    NORMAL_UNCERTAIN class definition
    88
    9   [nuv] = normal_uncertain.normal_uncertain(args)
    10    nuv = normal_uncertain()
     9        Usage:
     10            nuv = normal_uncertain('descriptor',descriptor,'mean',mean,'stddev',stddev,'partition',partition)
     11            where nuv is the normal_uncertain object returned by the constructor, mean and stddev are self
     12            explanatory.  partition is the partition vector for distributed variables. Can be a partition
     13            vector over elements or vertices.
    1114
    12   where the required args are:
    13     descriptor    (str, description, '')
    14     mean          (float, mean, float('NaN'))
    15     stddev        (float, standard deviation, float('NaN'))
    16   and the optional args and defaults are:
    17     lower         (float, lower bound, -np.Inf)
    18     upper         (float, upper bound, np.Inf)
    19 
    20   note that zero arguments constructs a default instance, one
    21   argument of the class copies the instance, and three or more
    22   arguments constructs a new instance from the arguments.
    23 '''
    24     def __init__(self):
     15        Example:
     16            md.qmu.variables.rheology=normal_uncertain('descriptor','RheologyBBar','mean',1,'stddev',.05);
     17            md.qmu.variables.rheology=normal_uncertain('descriptor','scaled_RheologyBBar','mean',1,'stddev',.05,'partition',vpartition);
     18    '''
     19    def __init__(self, *args): #{{{
    2520        self.descriptor = ''
    2621        self.mean = float('NaN')
    2722        self.stddev = float('NaN')
    28         self.lower = -np.Inf
    29         self.upper = np.Inf
     23        self.partition = []
     24
     25        #recover options:
     26        options = pairoptions(*args)
     27
     28        #initialize fields:
     29        self.descriptor = getfieldvalue(options, 'descriptor')
     30        self.mean = getfieldvalue(options, 'mean')
     31        self.stddev = getfieldvalue(options, 'stddev')
     32
     33        #if the variable is scales, a partition vector should have been supplied, and
     34        #that partition vector should have as many partitions as the mean and stddev
     35        #vectors:
     36        if self.isscaled():
     37            self.partition = getfieldvalue(options, 'partition')
     38            npart = partition_npart(self.partition)
     39            if npart != len(self.mean):
     40                error("normal_uncertain constructor: for the scaled variable %s the mean field is not currently a vector of values for all the partitions described in the partition vector" % self.descriptor)
     41            if npart != len(self.stddev):
     42                error("normal_uncertain constructor: for the scaled variable %s the stddev field is not cureently a vector of values for all the partitions described in the partition vector" % self.descriptor)
     43    #}}}
     44
     45    def __repr__(self):
     46        string = '\n'
     47        string += 'normal uncertain variable: '
     48        string += "%s\n%s" % (string, fielddisplay(self, 'descriptor', 'name tag'))
     49        string += "%s\n%s" % (string, fielddisplay(self, 'mean', 'pdf mean'))
     50        string += "%s\n%s" % (string, fielddisplay(self, 'stddev', 'pdf standard deviation'))
     51        if self.partition:
     52            string += "%s\n%s" % (string, fielddisplay(self, 'partition', 'partitionb vector defining where sampling will occur'))
     53        return string
     54    #}}}
     55
     56    def checkconsistency(self, md, solution, analyses): #{{{
     57        md = checkfield(md, 'field', self.mean, 'fieldname', 'normal_uncertain.mean', 'NaN', 1, 'Inf', 1, '>=', 0)
     58        md = checkfield(md, 'field', self.stddev, 'fieldname', 'normal_uncertain.stddev', 'NaN', 1, 'Inf', 1, '>=', 0, 'numel', len(self.mean))
     59        if self.isscaled():
     60            if not self.partition:
     61                error("normal_uncertain is a scaled variable, but it's missing a partition vector")
     62            #better have a partition vector that has as many partitions as stddev's size:
     63            if len(self.stddev) != partition_npart(self.partititon):
     64                error("normal_uncertain error message: stddev and partition should be vectors of identical size")
     65            if len(self.mean) != partition_npart(self.partition):
     66                error("normal_uncertain error message: mean and partition should be vectors of identical size")
     67            md = checkfield(md, 'field', self.partition, 'fieldname', 'normal_uncertain.partition', 'NaN', 1, 'Inf', 1, '>=', -1, 'numel', [md.mesh.numberofvertices, md.mesh.numberofvertices])
     68            if self.partition.shape[1] > 1:
     69                error("normal_uncertain error message: partition should be a column vector")
     70            partcheck = np.unique(self.partition)
     71            partmin = min(partcheck)
     72            partmax = max(partcheck)
     73            if partmax < -1:
     74                error("normal_uncertain error message: partition vector's min value should be -1 (for no partition), or start at 0")
     75            nmax = max(md.mesh.numberofelements, md.mesh.numberofvertices)
     76            if partmax > nmax:
     77                error("normal_uncertain error message: partition vector's values cannot go over the number of vertices or elements")
     78    #}}}
     79
     80    #virtual functions needed by qmu processing algorithms
     81    #implemented:
    3082
    3183    @staticmethod
    32     def normal_uncertain(*args):
    33         nargin = len(args)
    34 
    35         # create a default object
    36         if nargin == 0:
    37             return normal_uncertain()
    38 
    39         # copy the object
    40         elif nargin == 1:
    41             if isinstance(args[0], normal_uncertain):
    42                 nuv = args[0]
    43             else:
    44                 raise RuntimeError('Object ' + str(args[0]) + ' is a ' + str(type(args[0])) + ' class object, not "normal_uncertain".')
    45 
    46         # not enough arguments
    47         elif nargin == 2:
    48             raise RuntimeError('Construction of "normal_uncertain" class object requires at least 3 inputs.')
    49 
    50     # create the object from the input
    51         else:
    52             # lines differ here in other classes / tests; see asizec problem in notes
    53             nuv = normal_uncertain()
    54             nuv.descriptor = str(args[0])
    55             nuv.mean = args[1]
    56             nuv.stddev = args[2]
    57             if nargin >= 4:
    58                 nuv.lower = args[3]
    59             if nargin >= 5:
    60                 nuv.upper = args[4]
    61             if nargin > 5:
    62                 print('WARNING: normal_uncertain:extra_arg: Extra arguments for object of class ' + str(type(nuv)) + '.')
    63 
    64         return [nuv]
    65 
    66     def __repr__(self):
    67         # display an individual object
    68         string = '\n'
    69         string += 'class "normal_uncertain" object = \n'
    70         string += '    descriptor: ' + str(self.descriptor) + '\n'
    71         string += '          mean: ' + str(self.mean) + '\n'
    72         string += '        stddev: ' + str(self.stddev) + '\n'
    73         string += '         lower: ' + str(self.lower) + '\n'
    74         string += '         upper: ' + str(self.upper) + '\n'
    75 
    76         return string
    77 
    78     # from here on, nuv is either a single, or a 1d vector of, normal_uncertain
    79 
    80     @staticmethod
    81     def prop_desc(nuv, dstr):
    82         if type(nuv) not in [list, np.ndarray]:
    83             if nuv.descriptor != '' or type(nuv.descriptor) != str:
    84                 desc = str(nuv.descriptor)
    85             elif dstr != '':
    86                 desc = str(dstr)
    87             else:
    88                 desc = 'nuv'
    89             return desc
    90 
     84    def prop_desc(nuv, dstr): #{{{
    9185        desc = ['' for i in range(np.size(nuv))]
    9286        for i in range(np.size(nuv)):
    93             if nuv[i].descriptor != '' or type(nuv[i].descriptor) != str:
     87            if nuv[i].descriptor:
    9488                desc[i] = str(nuv[i].descriptor)
    95             elif dstr != '':
     89            elif dstr:
    9690                desc[i] = str(dstr) + str(string_dim(nuv, i, 'vector'))
    9791            else:
    9892                desc[i] = 'nuv' + str(string_dim(nuv, i, 'vector'))
    99 
    10093        desc = allempty(desc)
    10194
    10295        return desc
     96    #}}}
    10397
    10498    @staticmethod
    105     def prop_initpt(nuv):
     99    def prop_mean(nuv): #{{{
     100        mean = np.zeros(np.size(nuv))
     101        for i in range(np.size(nuv)):
     102            mean[i] = nuv[i].mean
     103        return mean
     104    #}}}
     105
     106    @staticmethod
     107    def prop_stddev(nuv): #{{{
     108        stddev = np.zeros(np.size(nuv))
     109        for i in range(np.size(nuv)):
     110            stddev[i] = nuv[i].stddev
     111        return stddev
     112    #}}}
     113
     114    #default
     115    @staticmethod
     116    def prop_abscissas(nbu): #{{{
     117        abscissas = []
     118        return abscissas
     119    #}}}
     120
     121    @staticmethod
     122    def prop_counts(nbu): #{{{
     123        counts = []
     124        return counts
     125    #}}}
     126
     127    @staticmethod
     128    def prop_pairs_per_variable(nbu): #{{{
     129        pairs_per_variable = []
     130        return pairs_per_variable
     131    #}}}
     132
     133    @staticmethod
     134    def prop_initpt(nuv): #{{{
    106135        initpt = []
    107136        return initpt
     137    #}}}
    108138
    109139    @staticmethod
    110     def prop_lower(nuv):
    111         if type(nuv) not in [list, np.ndarray]:
    112             return nuv.lower
    113 
    114         lower = np.zeros(np.size(nuv))
    115         for i in range(np.size(nuv)):
    116             lower[i] = nuv[i].lower
    117 
    118         lower = allequal(lower, -np.inf)
    119 
     140    def prop_lower(nuv): #{{{
     141        lower = []
    120142        return lower
     143    #}}}
    121144
    122145    @staticmethod
    123146    def prop_upper(nuv):
    124         if type(nuv) not in [list, np.ndarray]:
    125             return nuv.upper
    126 
    127         upper = np.zeros(np.size(nuv))
    128         for i in range(np.size(nuv)):
    129             upper[i] = nuv[i].upper
    130 
    131         upper = allequal(upper, -np.inf)
     147        upper = []
    132148        return upper
    133 
    134     @staticmethod
    135     def prop_mean(nuv):
    136         if type(nuv) not in [list, np.ndarray]:
    137             return nuv.mean
    138 
    139         mean = np.zeros(np.size(nuv))
    140         for i in range(np.size(nuv)):
    141             mean[i] = nuv[i].mean
    142 
    143         return mean
    144 
    145     @staticmethod
    146     def prop_stddev(nuv):
    147         if type(nuv) not in [list, np.ndarray]:
    148             return nuv.stddev
    149 
    150         stddev = np.zeros(np.size(nuv))
    151         for i in range(np.size(nuv)):
    152             stddev[i] = nuv[i].stddev
    153 
    154         return stddev
     149    #}}}
    155150
    156151    @staticmethod
    157152    def prop_initst(nuv):
    158         initst = []
    159         return initst
     153        inist = []
     154        return inist
     155    #}}}
    160156
    161157    @staticmethod
     
    163159        stype = []
    164160        return stype
     161    #}}}
    165162
    166163    @staticmethod
     
    168165        scale = []
    169166        return scale
     167    #}}}
     168
     169    #new methods:
     170    def isscaled(self): #{{{
     171        if self.descriptor[:7] == 'scaled_':
     172            return 1
     173        else:
     174            return 0
     175    #}}}
    170176
    171177    @staticmethod
    172     def dakota_write(fidi, dvar):
     178    def dakota_write(fidi, dvar): #{{{
    173179        # collect only the variables of the appropriate class
    174180        nuv = [struc_class(i, 'normal_uncertain', 'nuv') for i in dvar]
    175181
    176     # possible namespace pollution, the above import seems not to work
    177         from vlist_write import vlist_write
    178     # write variables
     182        # # possible namespace pollution, the above import seems not to work
     183        # from vlist_write import vlist_write
     184
     185        # write variables
    179186        vlist_write(fidi, 'normal_uncertain', 'nuv', nuv)
     187    #}}}
Note: See TracChangeset for help on using the changeset viewer.