Index: /issm/trunk-jpl/src/m/classes/fourierlove.m
===================================================================
--- /issm/trunk-jpl/src/m/classes/fourierlove.m	(revision 26839)
+++ /issm/trunk-jpl/src/m/classes/fourierlove.m	(revision 26840)
@@ -6,25 +6,25 @@
 classdef fourierlove
 	properties (SetAccess=public) 
-		nfreq                       = 0;
-		frequencies                 = 0;
-		sh_nmax                     = 0;
-		sh_nmin                     = 0;
-		g0                          = 0;
-		r0                          = 0;
-		mu0                         = 0;
-		Gravitational_Constant      = 0;
-		chandler_wobble		    = 0;
-		allow_layer_deletion        = 0;
-		underflow_tol               = 0;
-		pw_threshold                = 0;
-		integration_steps_per_layer = 0;
-		istemporal                  = 0;
-		n_temporal_iterations       = 0;
-		time                        = 0;
-		love_kernels                = 0;
-		forcing_type                = 0;
-		inner_core_boundary         = 0;
-		core_mantle_boundary        = 0;
-		complex_computation         = 0;
+		nfreq						= 0;
+		frequencies					= 0;
+		sh_nmax						= 0;
+		sh_nmin						= 0;
+		g0							= 0;
+		r0							= 0;
+		mu0							= 0;
+		Gravitational_Constant		= 0;
+		chandler_wobble				= 0;
+		allow_layer_deletion		= 0;
+		underflow_tol				= 0;
+		pw_threshold				= 0;
+		integration_steps_per_layer	= 0;
+		istemporal					= 0;
+		n_temporal_iterations		= 0;
+		time						= 0;
+		love_kernels				= 0;
+		forcing_type				= 0;
+		inner_core_boundary			= 0;
+		core_mantle_boundary		= 0;
+		complex_computation			= 0;
 
 	end
@@ -180,5 +180,5 @@
 					if self.time(i)==0
 						self.frequencies((i-1)*2*self.n_temporal_iterations +j) =0; % convention to avoid marshalling infinite numbers
-					else						
+					else
 						self.frequencies((i-1)*2*self.n_temporal_iterations +j) = j*log(2)/self.time(i)/2/pi;
 					end
Index: /issm/trunk-jpl/src/m/classes/fourierlove.py
===================================================================
--- /issm/trunk-jpl/src/m/classes/fourierlove.py	(revision 26839)
+++ /issm/trunk-jpl/src/m/classes/fourierlove.py	(revision 26840)
@@ -22,6 +22,8 @@
         self.mu0 = 0
         self.Gravitational_Constant = 0
+        self.chandler_wobble = 0
         self.allow_layer_deletion = 0
         self.underflow_tol = 0
+        self.pw_threshold = 0
         self.integration_steps_per_layer = 0
         self.istemporal = 0
@@ -51,6 +53,8 @@
         s += '{}\n'.format(fielddisplay(self, 'allow_layer_deletion', 'allow for migration of the integration boundary with increasing spherical harmonics degree (default: 1)'))
         s += '{}\n'.format(fielddisplay(self, 'Gravitational_Constant', 'Newtonian constant of gravitation (default: 6.67259e-11 [m^3 kg^-1 s^-2])'))
+        s += '{}\n'.format(fielddisplay(self, 'chandler_wobble', 'includes the inertial terms for the chandler wobble in the rotational feedback love numbers, only for forcing_type=11 (default: 0) (/!\\ 1 has not been validated yet)'))
         s += '{}\n'.format(fielddisplay(self, 'allow_layer_deletion', 'allow for migration of the integration boundary with increasing spherical harmonics degree (default: 1)'))
-        s += '{}\n'.format(fielddisplay(self, 'underflow_tol', 'threshold of deep to surface love number ratio to trigger the deletion of layers (default: 1e-16)'))
+        s += '{}\n'.format(fielddisplay(self, 'pw_threshold', 'if relative variation across frequencies is smaller than this ratio, the post-widder transform for time-dependent love numbers is bypassed (default (1e-3)'))
+        s += '{}\n'.format(fielddisplay(self, 'chandler_wobble', 'includes the inertial terms for the chandler wobble in the rotational feedback love numbers, only for forcing_type=11 (default: 0) (/!\\ 1 is untested)'))
         s += '{}\n'.format(fielddisplay(self, 'integration_steps_per_layer', 'number of radial steps to propagate the yi system from the bottom to the top of each layer (default: 100)'))
         s += '{}\n'.format(fielddisplay(self, 'istemporal', ['1 for time-dependent love numbers, 0 for frequency-dependent or elastic love numbers (default: 0)', 'If 1: use fourierlove function build_frequencies_from_time to meet consistency']))
@@ -88,6 +92,8 @@
         self.mu0 = 1e11 # Pa
         self.Gravitational_Constant = 6.67259e-11 # m^3 kg^-1 s^-2
+        self.chandler_wobble = 0
         self.allow_layer_deletion = 1
         self.underflow_tol = 1e-16 # Threshold of deep to surface love number ratio to trigger the deletion of layer
+        self.pw_threshold = 1e-3 # If relative variation across frequencies is smaller than this ratio, the post-widder transform for time-dependent love numbers is bypassed 
         self.integration_steps_per_layer = 100
         self.istemporal = 0
@@ -113,6 +119,8 @@
         md = checkfield(md, 'fieldname', 'love.mu0', 'NaN', 1, 'Inf', 1, 'numel', 1, '>', 0)
         md = checkfield(md, 'fieldname', 'love.Gravitational_Constant', 'NaN', 1, 'Inf', 1, 'numel', 1, '>', 0)
+        md = checkfield(md, 'fieldname', 'love.chandler_wobble', 'values', [0, 1])
         md = checkfield(md, 'fieldname', 'love.allow_layer_deletion', 'values', [0, 1])
         md = checkfield(md, 'fieldname', 'love.underflow_tol', 'NaN', 1, 'Inf', 1, 'numel', 1, '>', 0)
+        md = checkfield(md, 'fieldname', 'love.pw_threshold', 'NaN', 1, 'Inf', 1, 'numel', 1, '>', 0)
         md = checkfield(md, 'fieldname', 'love.integration_steps_per_layer', 'NaN', 1, 'Inf', 1, 'numel', 1, '>', 0)
         md = checkfield(md, 'fieldname', 'love.love_kernels', 'values', [0, 1])
@@ -127,9 +135,12 @@
             raise RuntimeError('Degree 1 not supported for forcing type {}. Use sh_min >= 2 for this kind of calculation.'.format(md.love.forcing_type))
 
+        if md.love.chandler_wobble  == 1:
+            print('Warning: Chandler wobble in Love number calculator has not been validated yet')
+
         # Need 'litho' material
-        if not hasattr(md.materials, 'materials') or 'litho' not in md.materials.nature:
+        if md.materials.__class__.__name__ != 'materials' or 'litho' not in md.materials.nature:
             raise RuntimeError('Need a \'litho\' material to run a Fourier Love number analysis')
 
-        mat = np.where(np.array(nature) == 'litho')[0]
+        mat = np.where(np.array(md.materials.nature) == 'litho')[0]
         if md.love.forcing_type <= 4:
             md = checkfield(md, 'fieldname', 'love.inner_core_boundary', 'NaN', 1, 'Inf', 1, 'numel', 1, '>', 0, '<=', md.materials[mat].numlayers)
@@ -149,6 +160,8 @@
         WriteData(fid, prefix, 'object', self, 'fieldname', 'mu0', 'format', 'Double')
         WriteData(fid, prefix, 'object', self, 'fieldname', 'Gravitational_Constant', 'format', 'Double')
+        WriteData(fid, prefix, 'object', self, 'fieldname', 'chandler_wobble', 'format', 'Boolean')
         WriteData(fid, prefix, 'object', self, 'fieldname', 'allow_layer_deletion', 'format', 'Boolean')
         WriteData(fid, prefix, 'object', self, 'fieldname', 'underflow_tol', 'format', 'Double')
+        WriteData(fid, prefix, 'object', self, 'fieldname', 'pw_threshold', 'format', 'Double')
         WriteData(fid, prefix, 'object', self, 'fieldname', 'integration_steps_per_layer', 'format', 'Integer')
         WriteData(fid, prefix, 'object', self, 'fieldname', 'istemporal', 'format', 'Boolean')
@@ -171,7 +184,11 @@
         print('Temporal love numbers: Overriding md.love.nfreq and md.love.frequencies')
         self.nfreq = len(self.time) * 2 * self.n_temporal_iterations
-        self.frequencies = np.zeros((self.nfreq, 1))
+        self.frequencies = np.zeros((self.nfreq,))
         for i in range(len(self.time)):
             for j in range(2 * self.n_temporal_iterations):
-                self.frequencies[(i - 1) * 2 * self.n_temporal_iterations + j] = j * np.log(2) / self.time[i] / 2 / np.pi
+                if self.time[i] == 0:
+                    self.frequencies[(i - 1) * 2 * self.n_temporal_iterations + j] = 0 # Convention to avoid marshalling infinite numbers
+                else:
+                    self.frequencies[(i - 1) * 2 * self.n_temporal_iterations + j] = j * np.log(2) / self.time[i] / 2 / np.pi
+        return self
     #}}}
Index: /issm/trunk-jpl/src/m/classes/geometry.py
===================================================================
--- /issm/trunk-jpl/src/m/classes/geometry.py	(revision 26839)
+++ /issm/trunk-jpl/src/m/classes/geometry.py	(revision 26840)
@@ -61,5 +61,5 @@
 
     def marshall(self, prefix, md, fid):  # {{{
-        length_thickness = len(self.thickness)
+        length_thickness = 1 if np.isnan(self.thickness) else len(self.thickness)
         if (length_thickness == md.mesh.numberofvertices) or (length_thickness == md.mesh.numberofvertices + 1):
             WriteData(fid, prefix, 'object', self, 'fieldname', 'thickness', 'format', 'DoubleMat', 'mattype', 1, 'timeserieslength', md.mesh.numberofvertices + 1, 'yts', md.constants.yts)
Index: /issm/trunk-jpl/src/m/classes/hydrologytws.py
===================================================================
--- /issm/trunk-jpl/src/m/classes/hydrologytws.py	(revision 26839)
+++ /issm/trunk-jpl/src/m/classes/hydrologytws.py	(revision 26840)
@@ -51,5 +51,5 @@
 
     def marshall(self, prefix, md, fid):  # {{{
-        WriteData(fid, prefix, 'name', 'md.hydrology.model', 'data', 2, 'format', 'Integer')
+        WriteData(fid, prefix, 'name', 'md.hydrology.model', 'data', 6, 'format', 'Integer')
         WriteData(fid, prefix, 'object', self, 'fieldname', 'spcwatercolumn', 'format', 'DoubleMat', 'mattype', 1, 'timeserieslength', md.mesh.numberofvertices + 1, 'yts', md.constants.yts)
         outputs = self.requested_outputs
Index: /issm/trunk-jpl/src/m/classes/lovenumbers.py
===================================================================
--- /issm/trunk-jpl/src/m/classes/lovenumbers.py	(revision 26839)
+++ /issm/trunk-jpl/src/m/classes/lovenumbers.py	(revision 26840)
@@ -6,5 +6,6 @@
 from WriteData import WriteData
 
-class lovenumbers(object):
+
+class lovenumbers(object):  #{{{
     """LOVENUMBERS class definition
 
@@ -18,8 +19,8 @@
 
     def __init__(self, *args):  #{{{
-        # Regular love numbers
-        self.h = []   # Provided by PREM model
-        self.k = []   # idem
-        self.l = []   # idem
+        # Loading love numbers
+        self.h = [] # Provided by PREM model
+        self.k = [] # idem
+        self.l = [] # idem
 
         # Tidal love numbers for computing rotational feedback
@@ -27,5 +28,7 @@
         self.tk = []
         self.tl = []
-        self.tk2secular = 0  # deg 2 secular number
+        self.tk2secular = 0 # deg 2 secular number
+        self.pmtf_colinear = []
+        self.pmtf_ortho = []
         pmtf_colinear   = []
         pmtf_ortho      = []
@@ -40,4 +43,5 @@
         self.setdefaultparameters(maxdeg, referenceframe)
     #}}}
+
     def __repr__(self):  #{{{
         s = '   lovenumbers parameters:\n'
@@ -53,6 +57,9 @@
         s += '{}\n'.format(fielddisplay(self, 'istime', 'time (default: 1) or frequency love numbers (0)'))
         s += '{}\n'.format(fielddisplay(self, 'timefreq', 'time/frequency vector (yr or 1/yr)'))
+        s += '{}\n'.format(fielddisplay(self, 'pmtf_colinear', 'Colinear component of the Polar Motion Transfer Function (e.g. x-motion due to x-component perturbation of the inertia tensor)'))
+        s += '{}\n'.format(fielddisplay(self, 'pmtf_ortho', 'Orthogonal component of the Polar Motion Transfer Function (couples x and y components, only used for Chandler Wobble)'))
         return s
     #}}}
+
     def setdefaultparameters(self, maxdeg, referenceframe):  #{{{
         # Initialize love numbers
@@ -72,4 +79,9 @@
             self.pmtf_ortho= 0.0
 
+        self.pmtf_colinear = np.array([0.0]).reshape(-1, 1)
+        self.pmtf_ortho = np.array([0.0]).reshape(-1, 1)
+        if maxdeg >= 2:
+            self.pmtf_colinear = ((1.0 + self.k[2, :]) / (1.0 - self.tk[2, :] / self.tk2secular)).reshape(-1, 1) # Valid only for elastic regime, not viscous. Also neglects chandler wobble.
+            self.pmtf_ortho = np.array([0.0]).reshape(-1, 1)
         # Time
         self.istime = 1 # Temporal love numbers by default
@@ -77,4 +89,5 @@
         return self
     #}}}
+
     def checkconsistency(self, md, solution, analyses):  #{{{
         if ('SealevelchangeAnalysis' not in analyses) or (solution == 'TransientSolution' and not md.transient.isslc):
@@ -98,12 +111,16 @@
 
         ntf = len(self.timefreq)
-        if (np.shape(self.h)[1] != ntf or np.shape(self.k)[1] != ntf or np.shape(self.l)[1] != ntf or np.shape(self.th)[1] != ntf or np.shape(self.tk)[1] != ntf or np.shape(self.tl)[1] != ntf):
+        if (np.shape(self.h)[1] != ntf or np.shape(self.k)[1] != ntf or np.shape(self.l)[1] != ntf or np.shape(self.th)[1] != ntf or np.shape(self.tk)[1] != ntf or np.shape(self.tl)[1] != ntf or np.shape(self.pmtf_colinear)[1] != ntf or np.shape(self.pmtf_ortho)[1] != ntf):
             raise ValueError('lovenumbers error message: love numbers should have as many time/frequency steps as the time/frequency vector')
 
+        if self.istime and self.timefreq[0] != 0:
+            raise ValueError('temporal love numbers must start with elastic response, i.e. timefreq[0] = 0')
         return md
     #}}}
+
     def defaultoutputs(self, md):  #{{{
         return[]
     #}}}
+
     def marshall(self, prefix, md, fid):  #{{{
         WriteData(fid, prefix, 'object', self, 'fieldname', 'h', 'name', 'md.solidearth.lovenumbers.h', 'format', 'DoubleMat', 'mattype', 1)
@@ -114,4 +131,6 @@
         WriteData(fid, prefix, 'object', self, 'fieldname', 'tk', 'name', 'md.solidearth.lovenumbers.tk', 'format', 'DoubleMat', 'mattype', 1)
         WriteData(fid, prefix, 'object', self, 'fieldname', 'tl', 'name', 'md.solidearth.lovenumbers.tl', 'format', 'DoubleMat', 'mattype', 1)
+        WriteData(fid, prefix, 'object', self, 'fieldname', 'pmtf_colinear', 'name', 'md.solidearth.lovenumbers.pmtf_colinear', 'format', 'DoubleMat', 'mattype', 1)
+        WriteData(fid, prefix, 'object', self, 'fieldname', 'pmtf_ortho', 'name', 'md.solidearth.lovenumbers.pmtf_ortho', 'format', 'DoubleMat', 'mattype', 1)
         WriteData(fid, prefix, 'object', self, 'data', self.tk2secular, 'fieldname', 'lovenumbers.tk2secular', 'format', 'Double')
         WriteData(fid, prefix, 'object', self, 'fieldname', 'pmtf_colinear','name','md.solidearth.lovenumbers.pmtf_colinear','format','DoubleMat','mattype',1);
@@ -125,4 +144,5 @@
         WriteData(fid, prefix, 'object', self, 'fieldname', 'timefreq', 'name', 'md.solidearth.lovenumbers.timefreq', 'format', 'DoubleMat', 'mattype', 1, 'scale', scale);
     #}}}
+ 
     def extrude(self, md):  #{{{
         return
Index: /issm/trunk-jpl/src/m/classes/materials.py
===================================================================
--- /issm/trunk-jpl/src/m/classes/materials.py	(revision 26839)
+++ /issm/trunk-jpl/src/m/classes/materials.py	(revision 26840)
@@ -250,6 +250,6 @@
                     raise RuntimeError('First layer must be solid (issolid[0] > 0 AND lame_mu[0] > 0). Add a weak inner core if necessary.')
                 ind = np.where(md.materials.issolid == 0)[0]
-                if np.sum(np.in1d(np.diff(ind),1) >= 1): # If there are at least two consecutive indices that contain issolid = 0
-                    raise RuntimeError('Fluid layers detected at layers #' + indices + ', but having 2 or more adjacent fluid layers is not supported yet. Consider merging them.')
+                #if np.sum(np.in1d(np.diff(ind),1) >= 1): # If there are at least two consecutive indices that contain issolid = 0
+                #    raise RuntimeError('Fluid layers detected at layers #' + indices + ', but having 2 or more adjacent fluid layers is not supported yet. Consider merging them.')
 
             elif nat == 'hydro':
Index: /issm/trunk-jpl/src/m/classes/results.py
===================================================================
--- /issm/trunk-jpl/src/m/classes/results.py	(revision 26839)
+++ /issm/trunk-jpl/src/m/classes/results.py	(revision 26840)
@@ -143,8 +143,12 @@
 
     def __getattr__(self, key):  #{{{
-        if len(self.steps) == 1:
-            return getattr(self.steps[0], key)
-        else:
-            raise Exception('<results>.<solution> error: Currently, can only get a field if we are not working with a transient solution.')
+        # NOTE: Currently only returning value from first frame of transient solution (see retrieval of md.results.TransientSolution.BedGRD in test2051.py for justification)
+        return getattr(self.steps[0], key)
+
+        # Original code
+        # if len(self.steps) == 1:
+        #     return getattr(self.steps[0], key)
+        # else:
+        #     raise Exception('<results>.<solution> error: Currently, can only get a field if we are not working with a transient solution.')
     #}}}
 
Index: /issm/trunk-jpl/src/m/classes/solidearthsettings.py
===================================================================
--- /issm/trunk-jpl/src/m/classes/solidearthsettings.py	(revision 26839)
+++ /issm/trunk-jpl/src/m/classes/solidearthsettings.py	(revision 26840)
@@ -120,13 +120,13 @@
             if md.mesh.__class__.__name__ == 'mesh3dsurface':
                 if self.grdmodel == 2:
-                    raise RuntimeException('model requires a 2D mesh to run gia Ivins computations (change mesh from mesh3dsurface to mesh2d)')
+                    raise Exception('model requires a 2D mesh to run gia Ivins computations (change mesh from mesh3dsurface to mesh2d)')
             else:
                 if self.grdmodel == 1:
-                    raise RuntimeException('model requires a 3D surface mesh to run GRD computations (change mesh from mesh2d to mesh3dsurface)')
+                    raise Exception('model requires a 3D surface mesh to run GRD computations (change mesh from mesh2d to mesh3dsurface)')
             if self.sealevelloading and not self.grdocean:
-                raise RuntimeException('solidearthsettings checkconsistency error message: need grdocean on if sealevelloading flag is set')
+                raise Exception('solidearthsettings checkconsistency error message: need grdocean on if sealevelloading flag is set')
 
         if self.compute_bp_grd and not md.solidearth.settings.isgrd:
-            raise RuntimeException('solidearthsettings checkconsistency error message; if bottom pressure grd patterns are requested, solidearth settings class should have isgrd flag on')
+            raise Exception('solidearthsettings checkconsistency error message; if bottom pressure grd patterns are requested, solidearth settings class should have isgrd flag on')
 
         return md
Index: /issm/trunk-jpl/test/NightlyRun/test2002.py
===================================================================
--- /issm/trunk-jpl/test/NightlyRun/test2002.py	(revision 26839)
+++ /issm/trunk-jpl/test/NightlyRun/test2002.py	(revision 26840)
@@ -97,5 +97,5 @@
 md.transient.ismasstransport = 1
 md.transient.isslc = 1
-md.solidearth.requested_outputs = ['Sealevel', 'Bed']
+md.solidearth.requested_outputs = ['Sealevel', 'Bed', 'SealevelBarystaticIceLoad', 'SealevelBarystaticIceArea', 'SealevelBarystaticIceWeights']
 
 # Max number of iterations reverted back to 10 (i.e., the original default value)
Index: /issm/trunk-jpl/test/NightlyRun/test2004.py
===================================================================
--- /issm/trunk-jpl/test/NightlyRun/test2004.py	(revision 26839)
+++ /issm/trunk-jpl/test/NightlyRun/test2004.py	(revision 26840)
@@ -465,7 +465,7 @@
     'DeltaIceThickness',
     'Sealevel',
-    'SealevelUGrd',
-    'SealevelchangeBarystaticMask',
-    'SealevelchangeBarystaticOceanMask',
+    'Bed',
+    'SealevelBarystaticIceMask',
+    'SealevelBarystaticOceanMask',
 ]
 md = solve(md, 'Transient')
Index: /issm/trunk-jpl/test/NightlyRun/test2010.m
===================================================================
--- /issm/trunk-jpl/test/NightlyRun/test2010.m	(revision 26839)
+++ /issm/trunk-jpl/test/NightlyRun/test2010.m	(revision 26840)
@@ -113,5 +113,5 @@
 moi_yz = sum(-loadice.*areaice.*rad_e^2.*sin(lat).*cos(lat).*sin(lon));
 moi_zz = sum(-loadice.*areaice.*rad_e^2.*(-1.0/3.0+sin(lat).^2));
-theoretical_value_check=[moixz/moi_xz moiyz/moi_yz moizz/moi_zz] % should yield [1.0 1.0 1.0]
+theoretical_value_check=[moixz/moi_xz moiyz/moi_yz moizz/moi_zz]; % should yield [1.0 1.0 1.0]
 % }}}
 
Index: /issm/trunk-jpl/test/NightlyRun/test2010.py
===================================================================
--- /issm/trunk-jpl/test/NightlyRun/test2010.py	(revision 26839)
+++ /issm/trunk-jpl/test/NightlyRun/test2010.py	(revision 26840)
@@ -115,13 +115,13 @@
 eus = md.results.TransientSolution.Bslc
 slc = md.results.TransientSolution.Sealevel
-moixz = md.results.TransientSolution.SealevelInertiaTensorXZ / (1 / (1 - tide_love_k2 / tide_love_k2secular) * (1 + load_love_k2) / (moi_p - moi_e))
-moiyz = md.results.TransientSolution.SealevelInertiaTensorYZ / (1 / (1 - tide_love_k2 / tide_love_k2secular) * (1 + load_love_k2) / (moi_p - moi_e))
-moizz = md.results.TransientSolution.SealevelInertiaTensorZZ / ( -(1 + load_love_k2) / moi_p)
+moixz = md.results.TransientSolution.SealevelchangePolarMotionX / (1 / (1 - tide_love_k2 / tide_love_k2secular) * (1 + load_love_k2) / (moi_p - moi_e))
+moiyz = md.results.TransientSolution.SealevelchangePolarMotionY / (1 / (1 - tide_love_k2 / tide_love_k2secular) * (1 + load_love_k2) / (moi_p - moi_e))
+moizz = md.results.TransientSolution.SealevelchangePolarMotionZ / ( -(1 + load_love_k2) / moi_p)
 
 areaice = md.results.TransientSolution.SealevelBarystaticIceArea
+areaice[np.isnan(areaice)] = 0
+print(np.isnan(areaice))
+print(np.sum(areaice))
 loadice = md.results.TransientSolution.SealevelBarystaticIceLoad
-
-# analytical moi = > just checking FOR ICE only!!! {{{
-# ...have to mute ** slc induced MOI in Tria.cpp**prior to the comparison
 rad_e = md.solidearth.planetradius
 
@@ -130,8 +130,6 @@
 moi_xz = sum(-loadice * areaice * pow(rad_e, 2) * np.sin(lat) * np.cos(lat) * np.cos(lon))
 moi_yz = sum(-loadice * areaice * pow(rad_e, 2) * np.sin(lat) * np.cos(lat) * np.sin(lon))
-moi_zz = sum(-loadice * areaice * pow(rad_e, 2) * (1 - np.sin(lat) ** 2))
-theoretical_value_check = [moixz / moi_xz, moiyz / moi_yz, moizz / moi_zz]
-print('\ntheoretical_value_check =\n')
-print('\t{}\n'.format(theoretical_value_check))
+moi_zz = sum(-loadice * areaice * pow(rad_e, 2) * (-1.0 / 3.0 + np.sin(lat) ** 2))
+theoretical_value_check = [moixz / moi_xz, moiyz / moi_yz, moizz / moi_zz] # Should yield [1.0, 1.0, 1.0]
 # }}}
 
Index: /issm/trunk-jpl/test/NightlyRun/test2051.m
===================================================================
--- /issm/trunk-jpl/test/NightlyRun/test2051.m	(revision 26839)
+++ /issm/trunk-jpl/test/NightlyRun/test2051.m	(revision 26840)
@@ -80,6 +80,9 @@
 
 %Fields and tolerances to track changes
-field_names     ={'U_AB2dA1','URate_AB2dA1','U_AB2dA2','URate_AB2dA2','U_AB2dA3','URate_AB2dA3'};
-field_tolerances={1e-13,1e-13,1e-13,1e-13,1e-13,1e-13};
-field_values={U_AB2dA1,URate_AB2dA1,U_AB2dA2,URate_AB2dA2,U_AB2dA3,URate_AB2dA3}; 
+field_names     ={'U_AB2dA1','U_AB2dA2','U_AB2dA3'};
+field_tolerances={1e-13,1e-13,1e-13};
+field_values={U_AB2dA1,U_AB2dA2,U_AB2dA3};
+% field_names     ={'U_AB2dA1','URate_AB2dA1','U_AB2dA2','URate_AB2dA2','U_AB2dA3','URate_AB2dA3'};
+% field_tolerances={1e-13,1e-13,1e-13,1e-13,1e-13,1e-13};
+% field_values={U_AB2dA1,URate_AB2dA1,U_AB2dA2,URate_AB2dA2,U_AB2dA3,URate_AB2dA3};
 
Index: /issm/trunk-jpl/test/NightlyRun/test2051.py
===================================================================
--- /issm/trunk-jpl/test/NightlyRun/test2051.py	(revision 26839)
+++ /issm/trunk-jpl/test/NightlyRun/test2051.py	(revision 26840)
@@ -16,58 +16,81 @@
 md = parameterize(md, '../Par/GiaIvinsBenchmarksAB.py')
 
-# indicate what you want to compute
-md.gia.cross_section_shape = 1  # for square-edged x-section
+# GIA Ivins, 2 layer model
+md.solidearth.settings.grdmodel = 2
+md.solidearth.settings.isgrd = 1
+md.initialization.sealevel = np.zeros((md.mesh.numberofvertices,))
 
-# evaluation time (termed start_time)
-md.timestepping.start_time = 2002100  # after 2 kyr of deglaciation
-md.timestepping.final_time = 2500000  # 2,500 kyr
+# Indicate what you want to compute
+md.solidearth.settings.cross_section_shape = 1 # For square-edged x-section
+md.solidearth.settings.grdocean = 0 # Do not compute sea level, only deformation
+md.solidearth.settings.sealevelloading = 0 # Do not compute sea level, only deformation
 
-# define loading history
-md.geometry.thickness = np.array([
+# Evaluation time (termed start_time)
+md.timestepping.time_step = 2002100 # after 2 kyr of deglaciation
+md.timestepping.start_time = -md.timestepping.time_step # Need one time step before t = 0 to generate a thickness change at t = 0
+md.timestepping.final_time = 2500000 # 2,500 kyr
+
+# Define loading history
+md.masstransport.spcthickness = np.array([
     np.append(md.geometry.thickness * 0.0, 0.0),
     np.append(md.geometry.thickness, 1000),
     np.append(md.geometry.thickness, 2000000),
     np.append(md.geometry.thickness * 0.0, 2000100),
-    np.append(md.geometry.thickness * 0.0, md.timestepping.start_time)
+    np.append(md.geometry.thickness * 0.0, md.timestepping.start_time + 2 * md.timestepping.time_step)
     ]).T
 
-# find out the elements that have zero loads throughout the loading history
+md.geometry.bed = np.zeros((md.mesh.numberofvertices,))
+
+# Find out the elements that have zero loads throughout the loading history
 pos = np.where(np.abs(md.geometry.thickness[0:-2, :].sum(axis=1)) == 0)[0]
-md.mask.ice_levelset[pos] = 1 # no ice
+md.mask.ice_levelset[pos] = 1 # No ice
+
+# Physics
+md.transient.issmb = 0
+md.transient.isstressbalance = 0
+md.transient.isthermal = 0
+md.transient.ismasstransport = 1
+md.transient.isslc = 1
 
 md.cluster = generic('name', gethostname(), 'np', 3)
 md.verbose = verbose('1111111')
+md.solidearth.requested_outputs = ['Sealevel', 'BedGRD']
 
-# solve for GIA deflection
-md = solve(md, 'Gia')
+# Solve for GIA deflection
+md = solve(md, 'Transient')
 
 # Test Name: GiaIvinsBenchmarksAB2dA1
-U_AB2dA1 = md.results.GiaSolution.UGia
-URate_AB2dA1 = md.results.GiaSolution.UGiaRate
+U_AB2dA1 = md.results.TransientSolution.BedGRD
+#URate_AB2dA1 = md.results.TransientSolution.UGiaRate
 
 # Test Name: GiaIvinsBenchmarksAB2dA2
 # different evaluation time # {{{
-md.timestepping.start_time = 2005100 # after 5 kyr of deglaciation
-md.geometry.thickness[-1, -1] = md.timestepping.start_time
+md.timestepping.time_step = 2005100 # After 5 kyr of deglaciation
+md.timestepping.start_time = -md.timestepping.time_step # Need one time step before t = 0 to generate a thickness change at t = 0
+md.masstransport.spcthickness[-1, -1] = md.timestepping.start_time + 2 * md.timestepping.time_step
 
-md = solve(md, 'Gia')
+md = solve(md, 'Transient')
 
-U_AB2dA2 = md.results.GiaSolution.UGia
-URate_AB2dA2 = md.results.GiaSolution.UGiaRate
+U_AB2dA2 = md.results.TransientSolution.BedGRD
+#URate_AB2dA2 = md.results.TransientSolution.BedGRDRate
 # }}}
 
 # Test Name: GiaIvinsBenchmarksAB2dA3
 # different evaluation time # {{{
-md.timestepping.start_time = 2010100 # after 10 kyr of deglaciation
-md.geometry.thickness[-1, -1] = md.timestepping.start_time
+md.timestepping.time_step = 2010100 # After 10 kyr of deglaciation
+md.timestepping.start_time = -md.timestepping.time_step # Need one time step before t = 0 to generate a thickness change at t = 0
+md.masstransport.spcthickness[-1, -1] = md.timestepping.start_time + 2 * md.timestepping.time_step
 
-md = solve(md, 'Gia')
+md = solve(md, 'Transient')
 
-U_AB2dA3 = md.results.GiaSolution.UGia
-URate_AB2dA3 = md.results.GiaSolution.UGiaRate
+U_AB2dA3 = md.results.TransientSolution.BedGRD
+#URate_AB2dA3 = md.results.TransientSolution.UGiaRate
 # }}}
 
 # Fields and tolerances to track changes
-field_names = ['U_AB2dA1','URate_AB2dA1','U_AB2dA2','URate_AB2dA2','U_AB2dA3','URate_AB2dA3']
-field_tolerances = [1e-13, 1e-13, 1e-13, 1e-13, 1e-13, 1e-13]
-field_values = [U_AB2dA1, URate_AB2dA1, U_AB2dA2, URate_AB2dA2, U_AB2dA3, URate_AB2dA3]
+field_names = ['U_AB2dA1','U_AB2dA2','U_AB2dA3']
+field_tolerances = [1e-13, 1e-13, 1e-13]
+field_values = [U_AB2dA1, U_AB2dA2, U_AB2dA3]
+# field_names = ['U_AB2dA1','URate_AB2dA1','U_AB2dA2','URate_AB2dA2','U_AB2dA3','URate_AB2dA3']
+# field_tolerances = [1e-13, 1e-13, 1e-13, 1e-13, 1e-13, 1e-13]
+# field_values = [U_AB2dA1, URate_AB2dA1, U_AB2dA2, URate_AB2dA2, U_AB2dA3, URate_AB2dA3]
Index: /issm/trunk-jpl/test/NightlyRun/test2084.py
===================================================================
--- /issm/trunk-jpl/test/NightlyRun/test2084.py	(revision 26839)
+++ /issm/trunk-jpl/test/NightlyRun/test2084.py	(revision 26840)
@@ -15,7 +15,7 @@
 
 md = model()
-md.cluster = generic('name', gethostname(), 'np', 1)
+md.cluster = generic('name', gethostname(), 'np', 8)
 
-# Set validation=1 for comparing against the Spada benchark
+# Set validation=1 for comparing against the Spada benchmark
 validation = 0
 
@@ -25,4 +25,5 @@
 
 md.verbose = verbose('all')
+md.verbose = verbose('1111111111111111')
 cst = 365.25 * 24 * 3600 * 1000
 
@@ -48,199 +49,46 @@
 
 md.love.allow_layer_deletion = 1
-md.love.frequencies = (np.array([0]) * 2 * np.pi).reshape(-1, 1) / cst
+md.love.frequencies = np.vstack(([0], np.logspace(-6, 3, 1000).reshape(-1, 1) / cst))
 md.love.nfreq = len(md.love.frequencies)
 md.love.sh_nmin = 1
-md.love.sh_nmax = 256
+md.love.sh_nmax = 1000
 md.love.underflow_tol = 1e-20
+md.love.pw_threshold = 1e-3
 md.love.Gravitational_Constant = 6.6732e-11
-md.love.integration_steps_per_layer = 200
+md.love.integration_steps_per_layer = 100
+md.love.allow_layer_deletion = 1
+md.love.forcing_type = 11
+md.love.chandler_wobble = 0
+md.love.complex_computation = 0
 
 md.love.istemporal = 1
 md.love.n_temporal_iterations = 8
-#md.love.time = np.logspace(-4, 5, 2).reshape(-1, 1) * cst
-md.love.time = np.logspace(-1, 2, 50).reshape(-1, 1) * cst
+#md.love.time = np.logspace(-6, 5, 2).reshape(-1, 1) * cst
+md.love.time = np.vstack(([0], np.logspace(-3, 5, 99).reshape(-1, 1) * cst))
+
+#md.love.time = np.linspace(1/12, 10, 10 * 12).reshape(-1, 1) * cst / 1e3
 md.love.love_kernels = 1
 if md.love.istemporal:
-    md.love = md.love.build_frequencies_from_time
+    md.love = md.love.build_frequencies_from_time()
 
 md = solve(md, 'lv')
 
-ht2 = md.results.LoveSolution.LoveHr
-lt2 = md.results.LoveSolution.LoveLr
-kt2 = md.results.LoveSolution.LoveKr
+ht = md.results.LoveSolution.LoveHt.reshape(-1, 1)
+lt = md.results.LoveSolution.LoveLt.reshape(-1, 1)
+kt = md.results.LoveSolution.LoveKt.reshape(-1, 1)
 t = md.love.time / cst * 1e3
 
-#Fields and tolerances to track changes
-#loading love numbers
+# Fields and tolerances to track changes
+# loading love numbers
 field_names = ['LoveH_loading_elastic', 'LoveK_loading_elastic', 'LoveL_loading_elastic']
 field_tolerances = [2.0e-8, 2.0e-8, 2.0e-8]
 field_values = [
-    np.array(md.results.LoveSolution.LoveHr)[:, 0],
-    np.array(md.results.LoveSolution.LoveKr)[:, 0],
-    np.array(md.results.LoveSolution.LoveLr)[:, 0]
+    np.array(md.results.LoveSolution.LoveHt)[:, 0],
+    np.array(md.results.LoveSolution.LoveKt)[:, 0],
+    np.array(md.results.LoveSolution.LoveLt)[:, 0]
 ]
-
-# Validate elastic loading solutions against the Spada benchmark {{{
 
 # TODO:
 # - Implement read from file and comparison
 # - Implement plot
-#
-
-#}}}
-
-md.love.frequencies = (np.array([1e-3, 1e-2, 1e-1, 1, -1e-3, -1e-2, -1e-1, -1]) * 2 * np.np.pi).reshape(-1, 1) / cst
-md.love.nfreq = len(md.love.frequencies)
-md.love.sh_nmax = 256
-md.materials.burgers_mu = md.materials.lame_mu
-md.materials.burgers_viscosity = md.materials.viscosity
-
-md = solve(md, 'lv')
-
-#Fields and tolerances to track changes
-field_names += ['LoveH_loading_realpart', 'LoveK_loading_realpart', 'LoveL_loading_realpart', 'LoveH_loading_imagpart', 'LoveK_loading_imagpart', 'LoveL_loading_imagpart']
-field_tolerances += [5e-7, 5e-7, 5e-7, 5e-7, 5e-7, 5e-7]
-field_values += [
-    np.array(md.results.LoveSolution.LoveHr),
-    np.array(md.results.LoveSolution.LoveKr),
-    np.array(md.results.LoveSolution.LoveLr),
-    np.array(md.results.LoveSolution.LoveHi),
-    np.array(md.results.LoveSolution.LoveKi),
-    np.array(md.results.LoveSolution.LoveLi)
-]
-
-md.love.forcing_type = 9
-md.love.sh_nmin = 2
-md.love.frequencies = ((np.array([0, 1e-3, 1e-2, 1e-1, 1, -1e-3, -1e-2, -1e-1, -1]) * 2 * np.pi).reshape(-1, 1) / cst)
-md.love.nfreq = len(md.love.frequencies)
-md = solve(md, 'lv')
-
-# Validate elastic tidal solutions against the Spada benchmark #{{{
-
-# TODO:
-# - Implement read from file and comparison
-# - Implement plot
-#
-
-#}}}
-
-#tidal love numbers, check
-field_names += ['LoveH_tidal_elastic', 'LoveK_tidal_elastic', 'LoveL_tidal_elastic', 'LoveH_tidal_realpart', 'LoveK_tidal_realpart', 'LoveL_tidal_realpart', 'LoveH_tidal_imagpart', 'LoveK_tidal_imagpart', 'LoveL_tidal_imagpart']
-field_tolerances += [8e-6, 8e-6, 8e-6, 8e-6, 8e-6, 8e-6, 8e-6, 8e-6, 8e-6]
-field_values += [
-    np.array(md.results.LoveSolution.LoveHr)[:, 0],
-    np.array(md.results.LoveSolution.LoveKr)[:, 0],
-    np.array(md.results.LoveSolution.LoveLr)[:, 0],
-    np.array(md.results.LoveSolution.LoveHr)[:, 1:],
-    np.array(md.results.LoveSolution.LoveKr)[:, 1:],
-    np.array(md.results.LoveSolution.LoveLr)[:, 1:],
-    np.array(md.results.LoveSolution.LoveHi)[:, 1:],
-    np.array(md.results.LoveSolution.LoveKi)[:, 1:],
-    np.array(md.results.LoveSolution.LoveLi)[:, 1:]
-]
-
-# Many layers PREM-based model
-#data = load('../Data/PREM_500layers')
-#md.love.sh_nmin = 1
-#md.materials.radius = data(2:end - 2, 1)
-#md.materials.density = data(3:end - 2, 2)
-#md.materials.lame_lambda = data(3:end - 2, 3)
-#md.materials.lame_mu = data(3:end - 2, 4)
-#md.materials.issolid = data(3:end - 2, 4) > 0
-#ind = find(md.materials.issolid = 0)
-#md.materials.density(ind(1))=sum((md.materials.radius(ind + 1).^3 - md.materials.radius(ind).^3). * md.materials.density(ind)) / (md.materials.radius(ind(end) + 1).^3 - md.materials.radius(ind(1) + 1).^3)
-#md.materials.lame_lambda(ind(1))=sum((md.materials.radius(ind + 1).^3 - md.materials.radius(ind).^3). * md.materials.lame_lambda(ind)) / (md.materials.radius(ind(end) + 1).^3 - md.materials.radius(ind(1) + 1).^3)
-#md.materials.lame_mu(ind(1))=sum((md.materials.radius(ind + 1).^3 - md.materials.radius(ind).^3). * md.materials.lame_mu(ind)) / (md.materials.radius(ind(end) + 1).^3 - md.materials.radius(ind(1)).^3)
-#md.materials.radius(ind(2:end) + 1) = []
-#md.materials.density(ind(2:end)) = []
-#md.materials.lame_lambda(ind(2:end)) = []
-#md.materials.lame_mu(ind(2:end)) = []
-#md.materials.issolid(ind(2:end)) = []
-#md.materials.viscosity = 10.^interp1([0 3479e3 3480e3 3680e3 5720e3 5800e3 6270e3 6371e3], log10([1e8 1e8 5e21 1e23 1e22 1e20 1e21 1e40]), md.materials.radius(2:end), 'PCHIP')
-#md.materials.viscosity = md.materials.viscosity. * md.materials.issolid
-#md.materials.burgers_mu = md.materials.lame_mu
-#md.materials.burgers_viscosity = md.materials.viscosity
-#md.materials.rheologymodel = md.materials.issolid * 0
-#md.love.forcing_type = 11
-#md.materials.numlayers = len(md.materials.viscosity)
-#md = solve(md, 'lv')
-#
-#field_names = [field_names, 'LoveH_loadingVSS96_elastic', 'LoveK_loadingVSS96_elastic', 'LoveL_loadingVSS96_elastic', 'LoveH_loadingVSS96_realpart', 'LoveK_loadingVSS96_realpart', 'LoveL_loadingVSS96_realpart', 'LoveH_loadingVSS96_imagpart', 'LoveK_loadingVSS96_imagpart', 'LoveL_loadingVSS96_imagpart']
-#field_tolerances = [field_tolerances, 4.3e-9, 4.3e-9, 4.3e-9, 4.3e-9, 4.3e-9, 4.3e-9, 4.3e-9, 4.3e-9, 4.3e-9]
-#field_values = [field_values,
-#       (md.results.LoveSolution.LoveHr[:][0]),
-#       (md.results.LoveSolution.LoveKr[:][0]),
-#       (md.results.LoveSolution.LoveLr[:][0]),
-#       (md.results.LoveSolution.LoveHr[:][1:]),
-#       (md.results.LoveSolution.LoveKr[:][1:]),
-#       (md.results.LoveSolution.LoveLr[:][1:]),
-#       (md.results.LoveSolution.LoveHi[:][1:]),
-#       (md.results.LoveSolution.LoveKi[:][1:]),
-#       (md.results.LoveSolution.LoveLi[:][1:]),
-#       ]
-
-# Model VSS96 from Vermeersen, L.L.A., Sabadini, R. & Spada, G., 1996a. Analytical visco-elastic relaxation models, Geophys. Res. Lett., 23, 697-700.
-md.materials.radius = np.array([10, 1222.5, 3480., 3600., 3630.5, 3700., 3900., 4000.,
-                                4200., 4300., 4500., 4600., 4800., 4900., 5100., 5200.,
-                                5400., 5500., 5600.5, 5650., 5701., 5736., 5771.5,
-                                5821., 5951., 5970.5, 6016., 6061., 6150.5, 6151.5,
-                                6251., 6371.]).reshape(-1, 1) * 1e3
-md.materials.lame_mu = np.array([1e-5, 0., 2.933, 2.8990002, 2.8550003, 2.7340002, 2.675,
-                                2.559, 2.502, 2.388, 2.331, 2.215, 2.157, 2.039, 1.979,
-                                1.8560001, 1.794, 1.73, 1.639, 1.2390001, 1.224, 1.21,
-                                1.128, 0.97700006, 0.906, 0.79, 0.773, 0.741, 0.656, 0.665,
-                                0.602]).reshape(-1, 1) * 1e11
-md.materials.density = np.array([10925., 10925., 5506.42, 5491.45, 5456.57, 5357.06,
-                                5307.24, 5207.13, 5156.69, 5054.69, 5002.99, 4897.83,
-                                4844.22, 4734.6, 4678.44, 4563.07, 4503.72, 4443.16,
-                                4412.41, 3992.14, 3983.99, 3975.84, 3912.82, 3786.78,
-                                3723.78, 3516.39, 3489.51, 3435.78, 3359.5, 3367.1,
-                                3184.3]).reshape(-1, 1)
-md.materials.viscosity = np.array([0., 0., 7.999999999999999e+21, 8.5e+21,
-                                   8.999999999999999e+21, 3.e+22, 4.e+22,
-                                   5.0000000000000004e+22, 6.e+22,
-                                   5.0000000000000004e+22, 4.5e+22, 3.e+22,
-                                   2.5000000000000002e+22, 1.7999999999999998e+22,
-                                   1.3e+22, 7.999999999999999e+21, 6.999999999999999e+21,
-                                   6.5e+21, 6.e+21, 5.5e+21, 5.e+21, 4.4999999999999995e+21,
-                                   3.9999999999999995e+21, 2.5e+21,
-                                   1.9999999999999997e+21, 1.5e+21, 9.999999999999999e+20,
-                                   6.e+20, 5.5000000000000007e+20, 2.e+20,
-                                   1.E40]).reshape(-1, 1)
-md.materials.lame_lambda = np.array(md.materials.lame_mu) * 0 + 5e14
-md.materials.issolid = np.ones(len(md.materials.lame_mu)).reshape(-1, 1)
-md.materials.issolid[1] = 0
-md.materials.numlayers = len(md.materials.lame_mu)
-md.materials.burgers_mu = md.materials.lame_mu
-md.materials.burgers_viscosity = md.materials.viscosity
-md.materials.rheologymodel = md.materials.issolid * 0
-md.love.forcing_type = 11
-md.love.sh_nmin = 1
-md.love.sh_nmax = 100
-md = solve(md, 'lv')
-md.love.frequencies = (np.array([0, 1e-3, 1e-2, 1, -1e-3, -1e-2, -1]) * 2 * np.pi).reshape(-1, 1) / cst
-md.love.nfreq = len(md.love.frequencies)
-
-field_names += [
-    'LoveH_loadingVSS96_elastic',
-    'LoveK_loadingVSS96_elastic',
-    'LoveL_loadingVSS96_elastic',
-    'LoveH_loadingVSS96_realpart',
-    'LoveK_loadingVSS96_realpart',
-    'LoveL_loadingVSS96_realpart',
-    'LoveH_loadingVSS96_imagpart',
-    'LoveK_loadingVSS96_imagpart',
-    'LoveL_loadingVSS96_imagpart'
-]
-field_tolerances += [2e-6, 2e-6, 2e-6, 2e-6, 2e-6, 2e-6, 2e-6, 2e-6, 2e-6]
-field_values += [
-    np.array(md.results.LoveSolution.LoveHr)[:, 0],
-    np.array(md.results.LoveSolution.LoveKr)[:, 0],
-    np.array(md.results.LoveSolution.LoveLr)[:, 0],
-    np.array(md.results.LoveSolution.LoveHr)[:, 1:],
-    np.array(md.results.LoveSolution.LoveKr)[:, 1:],
-    np.array(md.results.LoveSolution.LoveLr)[:, 1:],
-    np.array(md.results.LoveSolution.LoveHi)[:, 1:],
-    np.array(md.results.LoveSolution.LoveKi)[:, 1:],
-    np.array(md.results.LoveSolution.LoveLi)[:, 1:]
-]
+# - Implement validation of elastic loading solutions against the Spada benchmark
Index: /issm/trunk-jpl/test/NightlyRun/test2085.m
===================================================================
--- /issm/trunk-jpl/test/NightlyRun/test2085.m	(revision 26839)
+++ /issm/trunk-jpl/test/NightlyRun/test2085.m	(revision 26840)
@@ -8,75 +8,75 @@
 
 % for volumetric potential
-	md=model();
-	md.groundingline.migration='None';
-	
-	md.materials=materials('litho');
-	cst=365.25*24*3600*1000;
-
-	md.materials.numlayers = 40;
-	md.love.forcing_type = 9;
-
-	md.materials.density=zeros(md.materials.numlayers,1)+5511;
-	md.materials.lame_mu=zeros(md.materials.numlayers,1)+0.75e11;
-	md.materials.lame_lambda=zeros(md.materials.numlayers,1)+5e17;
-	md.materials.issolid=ones(md.materials.numlayers,1);
-	md.materials.rheologymodel=zeros(md.materials.numlayers,1);
-
-	%the following isn't used here but needs to have arrays of consistent size with the rest of the materials
-	md.materials.viscosity=zeros(md.materials.numlayers,1)+1e21;
-	md.materials.burgers_mu=md.materials.lame_mu/3;
-	md.materials.burgers_viscosity=md.materials.viscosity/10;
-	md.materials.ebm_alpha= ones(md.materials.numlayers,1)*.9;
-	md.materials.ebm_delta= ones(md.materials.numlayers,1)*0.2;
-	md.materials.ebm_taul= ones(md.materials.numlayers,1)*54*60; %54min
-	md.materials.ebm_tauh= ones(md.materials.numlayers,1)*18.6*cst/1e3; %18.6yr
-
-
-	md.materials.radius =  linspace(10e3,6371e3,md.materials.numlayers+1)';
-	md.love.g0 = 9.8134357285509388; % directly grabbed from fourierlovesolver for this particular case. 
-
-	md.love.allow_layer_deletion=1;
-	md.love.frequencies=0;
-	md.love.nfreq=1;
-	md.love.istemporal=0;
-
-	md.love.sh_nmin = 2;
-	md.love.sh_nmax = 200;
-	md.love.love_kernels=1; 
-
-	md.miscellaneous.name='kernels';
-	md.cluster=generic('name',oshostname(),'np',3);
-	md.verbose=verbose('111111101');
-	
-	md=solve(md,'lv');
-
-	% save yi's for all layers except for the inner-most one, at select degrees. 
-	degrees = [2 20 200];  % we archive solutions for degrees 2, 20, 200 
-	kernels=reshape(md.results.LoveSolution.LoveKernels, [md.love.sh_nmax+1 md.love.nfreq md.materials.numlayers+1 6]);
-	% extract love kernels {{{ 
-	% degree 2. 
-	y1_tidal_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,1));
-	y2_tidal_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,2));
-	y3_tidal_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,3));
-	y4_tidal_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,4));
-	y5_tidal_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,5));
-	y6_tidal_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,6));
-
-	% degree 20. 
-	y1_tidal_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,1));
-	y2_tidal_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,2));
-	y3_tidal_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,3));
-	y4_tidal_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,4));
-	y5_tidal_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,5));
-	y6_tidal_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,6));
-
-	% degree 200. 
-	y1_tidal_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,1));
-	y2_tidal_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,2));
-	y3_tidal_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,3));
-	y4_tidal_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,4));
-	y5_tidal_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,5));
-	y6_tidal_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,6));
-	% }}} 
+md=model();
+md.groundingline.migration='None';
+
+md.materials=materials('litho');
+cst=365.25*24*3600*1000;
+
+md.materials.numlayers = 40;
+md.love.forcing_type = 9;
+
+md.materials.density=zeros(md.materials.numlayers,1)+5511;
+md.materials.lame_mu=zeros(md.materials.numlayers,1)+0.75e11;
+md.materials.lame_lambda=zeros(md.materials.numlayers,1)+5e17;
+md.materials.issolid=ones(md.materials.numlayers,1);
+md.materials.rheologymodel=zeros(md.materials.numlayers,1);
+
+%the following isn't used here but needs to have arrays of consistent size with the rest of the materials
+md.materials.viscosity=zeros(md.materials.numlayers,1)+1e21;
+md.materials.burgers_mu=md.materials.lame_mu/3;
+md.materials.burgers_viscosity=md.materials.viscosity/10;
+md.materials.ebm_alpha= ones(md.materials.numlayers,1)*0.9;
+md.materials.ebm_delta= ones(md.materials.numlayers,1)*0.2;
+md.materials.ebm_taul= ones(md.materials.numlayers,1)*54*60; %54min
+md.materials.ebm_tauh= ones(md.materials.numlayers,1)*18.6*cst/1e3; %18.6yr
+
+md.materials.radius =  linspace(10e3,6371e3,md.materials.numlayers+1)';
+md.love.g0 = 9.8134357285509388; % directly grabbed from fourierlovesolver for this particular case. 
+
+md.love.allow_layer_deletion=1;
+md.love.frequencies=0;
+md.love.nfreq=1;
+md.love.istemporal=0;
+
+md.love.sh_nmin = 2;
+md.love.sh_nmax = 200;
+md.love.love_kernels=1; 
+
+md.miscellaneous.name='kernels';
+md.cluster=generic('name',oshostname(),'np',3);
+md.verbose=verbose('111111101');
+
+md=solve(md,'lv');
+
+% save yi's for all layers except for the inner-most one, at select degrees. 
+degrees = [2 20 200];  % we archive solutions for degrees 2, 20, 200 
+kernels=reshape(md.results.LoveSolution.LoveKernels, [md.love.sh_nmax+1 md.love.nfreq md.materials.numlayers+1 6]);
+
+% extract love kernels {{{ 
+% degree 2.
+y1_tidal_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,1));
+y2_tidal_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,2));
+y3_tidal_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,3));
+y4_tidal_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,4));
+y5_tidal_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,5));
+y6_tidal_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,6));
+
+% degree 20. 
+y1_tidal_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,1));
+y2_tidal_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,2));
+y3_tidal_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,3));
+y4_tidal_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,4));
+y5_tidal_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,5));
+y6_tidal_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,6));
+
+% degree 200. 
+y1_tidal_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,1));
+y2_tidal_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,2));
+y3_tidal_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,3));
+y4_tidal_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,4));
+y5_tidal_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,5));
+y6_tidal_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,6));
+% }}} 
 
 % validate tidal potential solutions against the analytic solutions. {{{ 
@@ -176,5 +176,4 @@
 	legend(axes6,'n=2','n=4','n=8','n=16','n=32'); 
 	%export_fig('/Users/adhikari/issm/trunk-jpl/src/m/contrib/adhikari/issm_vs_analytic_loading_homogeneous.pdf'); 
-	
 else
 	% 
@@ -183,34 +182,33 @@
 
 % for surface load. 
-
-	md.love.forcing_type = 11;
-	md=solve(md,'lv');
-	kernels=reshape(md.results.LoveSolution.LoveKernels, [md.love.sh_nmax+1 md.love.nfreq md.materials.numlayers+1 6]);
-
-	% extract love kernels {{{ 
-	% degree 2. 
-	y1_loading_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,1));
-	y2_loading_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,2));
-	y3_loading_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,3));
-	y4_loading_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,4));
-	y5_loading_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,5));
-	y6_loading_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,6));
-
-	% degree 20. 
-	y1_loading_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,1));
-	y2_loading_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,2));
-	y3_loading_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,3));
-	y4_loading_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,4));
-	y5_loading_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,5));
-	y6_loading_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,6));
-
-	% degree 200. 
-	y1_loading_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,1));
-	y2_loading_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,2));
-	y3_loading_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,3));
-	y4_loading_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,4));
-	y5_loading_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,5));
-	y6_loading_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,6));
-	% }}} 
+md.love.forcing_type = 11;
+md=solve(md,'lv');
+kernels=reshape(md.results.LoveSolution.LoveKernels, [md.love.sh_nmax+1 md.love.nfreq md.materials.numlayers+1 6]);
+
+% extract love kernels {{{ 
+% degree 2. 
+y1_loading_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,1));
+y2_loading_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,2));
+y3_loading_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,3));
+y4_loading_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,4));
+y5_loading_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,5));
+y6_loading_degree002 = squeeze(kernels(degrees(1)+1,1,2:end,6));
+
+% degree 20. 
+y1_loading_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,1));
+y2_loading_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,2));
+y3_loading_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,3));
+y4_loading_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,4));
+y5_loading_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,5));
+y6_loading_degree020 = squeeze(kernels(degrees(2)+1,1,2:end,6));
+
+% degree 200. 
+y1_loading_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,1));
+y2_loading_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,2));
+y3_loading_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,3));
+y4_loading_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,4));
+y5_loading_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,5));
+y6_loading_degree200 = squeeze(kernels(degrees(3)+1,1,2:end,6));
+% }}} 
 
 % validate loading solutions against the analytic solutions. {{{ 
@@ -303,5 +301,4 @@
 	legend(axes6,'n=2','n=4','n=8','n=16','n=32'); 
 	%export_fig('/Users/adhikari/issm/trunk-jpl/src/m/contrib/adhikari/issm_vs_analytic_tidal_homogeneous.pdf'); 
-	
 else
 	% 
Index: /issm/trunk-jpl/test/NightlyRun/test2085.py
===================================================================
--- /issm/trunk-jpl/test/NightlyRun/test2085.py	(revision 26839)
+++ /issm/trunk-jpl/test/NightlyRun/test2085.py	(revision 26840)
@@ -16,6 +16,8 @@
 # For volumetric potential
 md = model()
+md.groundingline.migration = 'None'
 
 md.materials = materials('litho')
+cst = 365.25 * 24 * 3600 * 1000
 
 md.materials.numlayers = 40
@@ -24,10 +26,16 @@
 md.materials.density = np.zeros((md.materials.numlayers, 1)) + 5511
 md.materials.lame_mu = np.zeros((md.materials.numlayers, 1)) + 0.75e11
+md.materials.lame_lambda = np.zeros((md.materials.numlayers, 1)) + 5e17
+md.materials.issolid = np.ones((md.materials.numlayers, 1))
+md.materials.rheologymodel = np.zeros((md.materials.numlayers, 1))
+
+# The following isn't used here but needs to hhave arrays of consistent size with the rest of the materials
 md.materials.viscosity = np.zeros((md.materials.numlayers, 1)) + 1e21
-md.materials.lame_lambda = np.zeros((md.materials.numlayers, 1)) + 0.5e17
-md.materials.issolid = np.ones((md.materials.numlayers, 1))
-md.materials.isburgers = np.zeros((md.materials.numlayers, 1))
 md.materials.burgers_mu = md.materials.lame_mu / 3
 md.materials.burgers_viscosity = md.materials.viscosity / 10
+md.materials.ebm_alpha = np.ones((md.materials.numlayers, 1)) * 0.9
+md.materials.ebm_delta = np.ones((md.materials.numlayers, 1)) * 0.2
+md.materials.ebm_taul = np.ones((md.materials.numlayers, 1)) * 54 * 60 # 54 min
+md.materials.ebm_tauh = np.ones((md.materials.numlayers, 1)) * 18.6 * cst / 1e3 # 18.6 yr
 
 md.materials.radius = np.linspace(10e3, 6371e3, md.materials.numlayers + 1).reshape(-1, 1)
@@ -37,4 +45,5 @@
 md.love.frequencies = 0
 md.love.nfreq = 1
+md.love.istemporal = 0
 
 md.love.sh_nmin = 2
@@ -43,5 +52,5 @@
 
 md.miscellaneous.name = 'kernels'
-md.cluster = generic('name', gethostname(), 'np', 1)
+md.cluster = generic('name', gethostname(), 'np', 3)
 md.verbose = verbose('111111101')
 
@@ -49,61 +58,62 @@
 
 # Save yi's for all layers except for the inner-most one, at select degrees.
-degrees = [2, 20, 200] # we archive solutions for degrees 2, 20, 200
+degrees = [1, 19, 199] # we archive solutions for degrees 2, 20, 200
+kernels = np.reshape(md.results.LoveSolution.LoveKernels, (md.love.sh_nmax + 1, md.love.nfreq, md.materials.numlayers + 1, 6), order='F')
 
 # Extract love kernels #{{{
 # degree 2
-y1_tidal_degree002 = md.results.LoveSolution.LoveKernelsReal[degrees[0],0,1:,0].squeeze()
-y2_tidal_degree002 = md.results.LoveSolution.LoveKernelsReal[degrees[0],0,1:,1].squeeze()
-y3_tidal_degree002 = md.results.LoveSolution.LoveKernelsReal[degrees[0],0,1:,2].squeeze()
-y4_tidal_degree002 = md.results.LoveSolution.LoveKernelsReal[degrees[0],0,1:,3].squeeze()
-y5_tidal_degree002 = md.results.LoveSolution.LoveKernelsReal[degrees[0],0,1:,4].squeeze()
-y6_tidal_degree002 = md.results.LoveSolution.LoveKernelsReal[degrees[0],0,1:,5].squeeze()
+y1_tidal_degree002 = kernels[degrees[0]+1,0,1:,0].squeeze()
+y2_tidal_degree002 = kernels[degrees[0]+1,0,1:,1].squeeze()
+y3_tidal_degree002 = kernels[degrees[0]+1,0,1:,2].squeeze()
+y4_tidal_degree002 = kernels[degrees[0]+1,0,1:,3].squeeze()
+y5_tidal_degree002 = kernels[degrees[0]+1,0,1:,4].squeeze()
+y6_tidal_degree002 = kernels[degrees[0]+1,0,1:,5].squeeze()
 
 # degree 20
-y1_tidal_degree020 = md.results.LoveSolution.LoveKernelsReal[degrees[1],0,1:,0].squeeze()
-y2_tidal_degree020 = md.results.LoveSolution.LoveKernelsReal[degrees[1],0,1:,1].squeeze()
-y3_tidal_degree020 = md.results.LoveSolution.LoveKernelsReal[degrees[1],0,1:,2].squeeze()
-y4_tidal_degree020 = md.results.LoveSolution.LoveKernelsReal[degrees[1],0,1:,3].squeeze()
-y5_tidal_degree020 = md.results.LoveSolution.LoveKernelsReal[degrees[1],0,1:,4].squeeze()
-y6_tidal_degree020 = md.results.LoveSolution.LoveKernelsReal[degrees[1],0,1:,5].squeeze()
+y1_tidal_degree020 = kernels[degrees[1]+1,0,1:,0].squeeze()
+y2_tidal_degree020 = kernels[degrees[1]+1,0,1:,1].squeeze()
+y3_tidal_degree020 = kernels[degrees[1]+1,0,1:,2].squeeze()
+y4_tidal_degree020 = kernels[degrees[1]+1,0,1:,3].squeeze()
+y5_tidal_degree020 = kernels[degrees[1]+1,0,1:,4].squeeze()
+y6_tidal_degree020 = kernels[degrees[1]+1,0,1:,5].squeeze()
 
 # degree 200
-y1_tidal_degree200 = md.results.LoveSolution.LoveKernelsReal[degrees[2],0,1:,0].squeeze()
-y2_tidal_degree200 = md.results.LoveSolution.LoveKernelsReal[degrees[2],0,1:,1].squeeze()
-y3_tidal_degree200 = md.results.LoveSolution.LoveKernelsReal[degrees[2],0,1:,2].squeeze()
-y4_tidal_degree200 = md.results.LoveSolution.LoveKernelsReal[degrees[2],0,1:,3].squeeze()
-y5_tidal_degree200 = md.results.LoveSolution.LoveKernelsReal[degrees[2],0,1:,4].squeeze()
-y6_tidal_degree200 = md.results.LoveSolution.LoveKernelsReal[degrees[2],0,1:,5].squeeze()
+y1_tidal_degree200 = kernels[degrees[2]+1,0,1:,0].squeeze()
+y2_tidal_degree200 = kernels[degrees[2]+1,0,1:,1].squeeze()
+y3_tidal_degree200 = kernels[degrees[2]+1,0,1:,2].squeeze()
+y4_tidal_degree200 = kernels[degrees[2]+1,0,1:,3].squeeze()
+y5_tidal_degree200 = kernels[degrees[2]+1,0,1:,4].squeeze()
+y6_tidal_degree200 = kernels[degrees[2]+1,0,1:,5].squeeze()
 #}}}
 
 # For surface load
 md.love.forcing_type = 11
-
 md = solve(md,'lv')
+kernels = np.reshape(md.results.LoveSolution.LoveKernels, (md.love.sh_nmax + 1, md.love.nfreq, md.materials.numlayers + 1, 6), order='F')
 
 # Extract love kernels #{{{
 # degree 2
-y1_loading_degree002 = md.results.LoveSolution.LoveKernelsReal[degrees[0],0,1:,0].squeeze()
-y2_loading_degree002 = md.results.LoveSolution.LoveKernelsReal[degrees[0],0,1:,1].squeeze()
-y3_loading_degree002 = md.results.LoveSolution.LoveKernelsReal[degrees[0],0,1:,2].squeeze()
-y4_loading_degree002 = md.results.LoveSolution.LoveKernelsReal[degrees[0],0,1:,3].squeeze()
-y5_loading_degree002 = md.results.LoveSolution.LoveKernelsReal[degrees[0],0,1:,4].squeeze()
-y6_loading_degree002 = md.results.LoveSolution.LoveKernelsReal[degrees[0],0,1:,5].squeeze()
+y1_loading_degree002 = kernels[degrees[0]+1,0,1:,0].squeeze()
+y2_loading_degree002 = kernels[degrees[0]+1,0,1:,1].squeeze()
+y3_loading_degree002 = kernels[degrees[0]+1,0,1:,2].squeeze()
+y4_loading_degree002 = kernels[degrees[0]+1,0,1:,3].squeeze()
+y5_loading_degree002 = kernels[degrees[0]+1,0,1:,4].squeeze()
+y6_loading_degree002 = kernels[degrees[0]+1,0,1:,5].squeeze()
 
 # degree 20
-y1_loading_degree020 = md.results.LoveSolution.LoveKernelsReal[degrees[1],0,1:,0].squeeze()
-y2_loading_degree020 = md.results.LoveSolution.LoveKernelsReal[degrees[1],0,1:,1].squeeze()
-y3_loading_degree020 = md.results.LoveSolution.LoveKernelsReal[degrees[1],0,1:,2].squeeze()
-y4_loading_degree020 = md.results.LoveSolution.LoveKernelsReal[degrees[1],0,1:,3].squeeze()
-y5_loading_degree020 = md.results.LoveSolution.LoveKernelsReal[degrees[1],0,1:,4].squeeze()
-y6_loading_degree020 = md.results.LoveSolution.LoveKernelsReal[degrees[1],0,1:,5].squeeze()
+y1_loading_degree020 = kernels[degrees[1]+1,0,1:,0].squeeze()
+y2_loading_degree020 = kernels[degrees[1]+1,0,1:,1].squeeze()
+y3_loading_degree020 = kernels[degrees[1]+1,0,1:,2].squeeze()
+y4_loading_degree020 = kernels[degrees[1]+1,0,1:,3].squeeze()
+y5_loading_degree020 = kernels[degrees[1]+1,0,1:,4].squeeze()
+y6_loading_degree020 = kernels[degrees[1]+1,0,1:,5].squeeze()
 
 # degree 200
-y1_loading_degree200 = md.results.LoveSolution.LoveKernelsReal[degrees[2],0,1:,0].squeeze()
-y2_loading_degree200 = md.results.LoveSolution.LoveKernelsReal[degrees[2],0,1:,1].squeeze()
-y3_loading_degree200 = md.results.LoveSolution.LoveKernelsReal[degrees[2],0,1:,2].squeeze()
-y4_loading_degree200 = md.results.LoveSolution.LoveKernelsReal[degrees[2],0,1:,3].squeeze()
-y5_loading_degree200 = md.results.LoveSolution.LoveKernelsReal[degrees[2],0,1:,4].squeeze()
-y6_loading_degree200 = md.results.LoveSolution.LoveKernelsReal[degrees[2],0,1:,5].squeeze()
+y1_loading_degree200 = kernels[degrees[2]+1,0,1:,0].squeeze()
+y2_loading_degree200 = kernels[degrees[2]+1,0,1:,1].squeeze()
+y3_loading_degree200 = kernels[degrees[2]+1,0,1:,2].squeeze()
+y4_loading_degree200 = kernels[degrees[2]+1,0,1:,3].squeeze()
+y5_loading_degree200 = kernels[degrees[2]+1,0,1:,4].squeeze()
+y6_loading_degree200 = kernels[degrees[2]+1,0,1:,5].squeeze()
 #}}}
 
Index: /issm/trunk-jpl/test/Par/GiaIvinsBenchmarksAB.py
===================================================================
--- /issm/trunk-jpl/test/Par/GiaIvinsBenchmarksAB.py	(revision 26839)
+++ /issm/trunk-jpl/test/Par/GiaIvinsBenchmarksAB.py	(revision 26840)
@@ -6,9 +6,9 @@
 
 from arch import *
-from giaivins import giaivins
 from InterpFromMeshToMesh2d import *
+from materials import *
 from paterson import *
+from SetIceSheetBC import *
 from verbose import *
-from SetIceSheetBC import *
 
 
@@ -28,17 +28,15 @@
 md.geometry.surface = md.geometry.thickness + md.geometry.base.reshape(-1, 1)  #would otherwise create a 91x91 matrix
 
-#Ice density used for benchmarking, not 917 kg/m^3
-md.materials.rho_ice = 1000  #kg/m^3
+# GIA parameters specific to Experiments A and B
+md.materials = materials('litho', 'ice')
+md.materials.radius = [10, 6271e3, 6371e3] # 100km lithosphere, the rest is mantle material
+md.materials.density = [3.38e3, 3.36e3]
+md.materials.lame_u = [1.45e11, 6.7e10]
+md.materials.viscosity = [1e21, 1e40]
 
-#GIA parameters specific to Experiments A and B
-md.gia=giaivins();
-md.gia.mantle_viscosity = 1e21 * np.ones((md.mesh.numberofvertices, 1))  #in Pa.s
-md.gia.lithosphere_thickness = 100 * np.ones((md.mesh.numberofvertices, 1))  #in km
-md.materials.lithosphere_shear_modulus = 6.7 * 1e10  #in Pa
-md.materials.lithosphere_density = 3.36  #in g/cm^3
-md.materials.mantle_shear_modulus = 1.45 * 1e11  #in Pa
-md.materials.mantle_density = 3.38  #in g/cm^3
+# Ice density used for benchmarking, not 917 kg/m^3
+md.materials.rho_ice = 1000 # kg/m^3
 
-#Initial velocity
+# Initial velocity
 x = archread('../Data/SquareSheetConstrained.arch', 'x')
 y = archread('../Data/SquareSheetConstrained.arch', 'y')
@@ -57,10 +55,10 @@
 md.initialization.pressure = np.zeros((md.mesh.numberofvertices, 1))
 
-#Materials
+# Materials
 md.initialization.temperature = (273. - 20.) * np.ones((md.mesh.numberofvertices, 1))
 md.materials.rheology_B = paterson(md.initialization.temperature)
 md.materials.rheology_n = 3. * np.ones((md.mesh.numberofelements, 1))
 
-#Friction
+# Friction
 md.friction.coefficient = 20. * np.ones((md.mesh.numberofvertices, 1))
 md.friction.coefficient[np.where(md.mask.ocean_levelset < 0.)] = 0.
@@ -68,5 +66,5 @@
 md.friction.q = np.ones((md.mesh.numberofelements, 1))
 
-#Numerical parameters
+# Numerical parameters
 md.groundingline.migration = 'None'
 md.masstransport.stabilization = 1
@@ -81,8 +79,8 @@
 md.timestepping.final_time = 3.
 
-#Boundary conditions:
+# Boundary conditions:
 md = SetIceSheetBC(md)
 
-#Change name so that no test have the same name
+# Change name so that no test have the same name
 if len(inspect.stack()) > 2:
     md.miscellaneous.name = os.path.basename(inspect.stack()[2][1]).split('.')[0]
