Changeset 24115
- Timestamp:
- 07/30/19 07:52:21 (6 years ago)
- Location:
- issm/trunk-jpl/src/m
- Files:
-
- 15 edited
Legend:
- Unmodified
- Added
- Removed
-
issm/trunk-jpl/src/m/classes/clusters/generic.py
r23716 r24115 12 12 13 13 class generic(object): 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 #write queuing script 82 83 84 85 86 87 if self.interactive: 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 else: 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 #write queuing script 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 14 """ 15 GENERIC cluster class definition 16 17 Usage: 18 cluster=generic('name','astrid','np',3); 19 cluster=generic('name',gethostname(),'np',3,'login','username'); 20 """ 21 22 def __init__(self,*args): # {{{ 23 24 self.name='' 25 self.login='' 26 self.np=1 27 self.port=0 28 self.interactive=1 29 self.codepath=IssmConfig('ISSM_PREFIX')[0]+'/bin' 30 self.executionpath=issmdir()+'/execution' 31 self.valgrind=issmdir()+'/externalpackages/valgrind/install/bin/valgrind' 32 self.valgrindlib=issmdir()+'/externalpackages/valgrind/install/lib/libmpidebug.so' 33 self.valgrindsup=issmdir()+'/externalpackages/valgrind/issm.supp' 34 35 #use provided options to change fields 36 options=pairoptions(*args) 37 38 #get name 39 self.name=socket.gethostname() 40 41 #initialize cluster using user settings if provided 42 if os.path.exists(self.name+'_settings.py'): 43 exec(compile(open(self.name+'_settings.py').read(), self.name+'_settings.py', 'exec'),globals()) 44 45 #OK get other fields 46 self=options.AssignObjectFields(self) 47 # }}} 48 def __repr__(self): # {{{ 49 # display the object 50 s ="class '%s' object '%s' = \n" % (type(self),'self') 51 s+=" name: %s\n" % self.name 52 s+=" login: %s\n" % self.login 53 s+=" np: %i\n" % self.np 54 s+=" port: %i\n" % self.port 55 s+=" codepath: %s\n" % self.codepath 56 s+=" executionpath: %s\n" % self.executionpath 57 s+=" valgrind: %s\n" % self.valgrind 58 s+=" valgrindlib: %s\n" % self.valgrindlib 59 s+=" valgrindsup: %s\n" % self.valgrindsup 60 return s 61 # }}} 62 def checkconsistency(self,md,solution,analyses): # {{{ 63 if self.np<1: 64 md = checkmessage(md,'number of processors should be at least 1') 65 if math.isnan(self.np): 66 md = checkmessage(md,'number of processors should not be NaN!') 67 68 return md 69 # }}} 70 def BuildQueueScript(self,dirname,modelname,solution,io_gather,isvalgrind,isgprof,isdakota,isoceancoupling): # {{{ 71 72 executable='issm.exe'; 73 if isdakota: 74 version=IssmConfig('_DAKOTA_VERSION_') 75 version=float(version[0]) 76 if version>=6: 77 executable='issm_dakota.exe' 78 if isoceancoupling: 79 executable='issm_ocean.exe' 80 81 #write queuing script 82 if not m.ispc(): 83 84 fid=open(modelname+'.queue','w') 85 fid.write('#!/bin/sh\n') 86 if not isvalgrind: 87 if self.interactive: 88 if IssmConfig('_HAVE_MPI_')[0]: 89 fid.write('mpiexec -np %i %s/%s %s %s/%s %s ' % (self.np,self.codepath,executable,solution,self.executionpath,dirname,modelname)) 90 else: 91 fid.write('%s/%s %s %s/%s %s ' % (self.codepath,executable,solution,self.executionpath,dirname,modelname)) 92 else: 93 if IssmConfig('_HAVE_MPI_')[0]: 94 fid.write('mpiexec -np %i %s/%s %s %s/%s %s 2> %s.errlog >%s.outlog ' % (self.np,self.codepath,executable,solution,self.executionpath,dirname,modelname,modelname,modelname)) 95 else: 96 fid.write('%s/%s %s %s/%s %s 2> %s.errlog >%s.outlog ' % (self.codepath,executable,solution,self.executionpath,dirname,modelname,modelname,modelname)) 97 elif isgprof: 98 fid.write('\n gprof %s/%s gmon.out > %s.performance' % (self.codepath,executable,modelname)) 99 else: 100 #Add --gen-suppressions=all to get suppression lines 101 fid.write('LD_PRELOAD=%s \\\n' % self.valgrindlib) 102 if IssmConfig('_HAVE_MPI_')[0]: 103 fid.write('mpiexec -np %i %s --leak-check=full --suppressions=%s %s/%s %s %s/%s %s 2> %s.errlog >%s.outlog ' % \ 104 (self.np,self.valgrind,self.valgrindsup,self.codepath,executable,solution,self.executionpath,dirname,modelname,modelname,modelname)) 105 else: 106 fid.write('%s --leak-check=full --suppressions=%s %s/%s %s %s/%s %s 2> %s.errlog >%s.outlog ' % \ 107 (self.valgrind,self.valgrindsup,self.codepath,executable,solution,self.executionpath,dirname,modelname,modelname,modelname)) 108 109 if not io_gather: #concatenate the output files: 110 fid.write('\ncat %s.outbin.* > %s.outbin' % (modelname,modelname)) 111 fid.close() 112 113 else: # Windows 114 115 fid=open(modelname+'.bat','w') 116 fid.write('@echo off\n') 117 if self.interactive: 118 fid.write('"%s/%s" %s "%s/%s" %s ' % (self.codepath,executable,solution,self.executionpath,dirname,modelname)) 119 else: 120 fid.write('"%s/%s" %s "%s/%s" %s 2> %s.errlog >%s.outlog' % \ 121 (self.codepath,executable,solution,self.executionpath,dirname,modelname,modelname,modelname)) 122 fid.close() 123 124 #in interactive mode, create a run file, and errlog and outlog file 125 if self.interactive: 126 fid=open(modelname+'.errlog','w') 127 fid.close() 128 fid=open(modelname+'.outlog','w') 129 fid.close() 130 # }}} 131 def BuildKrigingQueueScript(self,modelname,solution,io_gather,isvalgrind,isgprof): # {{{ 132 133 #write queuing script 134 if not m.ispc(): 135 136 fid=open(modelname+'.queue','w') 137 fid.write('#!/bin/sh\n') 138 if not isvalgrind: 139 if self.interactive: 140 fid.write('mpiexec -np %i %s/kriging.exe %s/%s %s ' % (self.np,self.codepath,self.executionpath,modelname,modelname)) 141 else: 142 fid.write('mpiexec -np %i %s/kriging.exe %s/%s %s 2> %s.errlog >%s.outlog ' % (self.np,self.codepath,self.executionpath,modelname,modelname,modelname,modelname)) 143 elif isgprof: 144 fid.write('\n gprof %s/kriging.exe gmon.out > %s.performance' & (self.codepath,modelname)) 145 else: 146 #Add --gen-suppressions=all to get suppression lines 147 fid.write('LD_PRELOAD=%s \\\n' % self.valgrindlib) 148 fid.write('mpiexec -np %i %s --leak-check=full --suppressions=%s %s/kriging.exe %s/%s %s 2> %s.errlog >%s.outlog ' % \ 149 (self.np,self.valgrind,self.valgrindsup,self.codepath,self.executionpath,modelname,modelname,modelname,modelname)) 150 if not io_gather: #concatenate the output files: 151 fid.write('\ncat %s.outbin.* > %s.outbin' % (modelname,modelname)) 152 fid.close() 153 154 else: # Windows 155 156 fid=open(modelname+'.bat','w') 157 fid.write('@echo off\n') 158 if self.interactive: 159 fid.write('"%s/issm.exe" %s "%s/%s" %s ' % (self.codepath,solution,self.executionpath,modelname,modelname)) 160 else: 161 fid.write('"%s/issm.exe" %s "%s/%s" %s 2> %s.errlog >%s.outlog' % \ 162 (self.codepath,solution,self.executionpath,modelname,modelname,modelname,modelname)) 163 fid.close() 164 165 #in interactive mode, create a run file, and errlog and outlog file 166 if self.interactive: 167 fid=open(modelname+'.errlog','w') 168 fid.close() 169 fid=open(modelname+'.outlog','w') 170 fid.close() 171 # }}} 172 def UploadQueueJob(self,modelname,dirname,filelist): # {{{ 173 174 #compress the files into one zip. 175 compressstring='tar -zcf %s.tar.gz ' % dirname 176 for file in filelist: 177 compressstring += ' %s' % file 178 if self.interactive: 179 compressstring += ' %s.errlog %s.outlog ' % (modelname,modelname) 180 subprocess.call(compressstring,shell=True) 181 182 print('uploading input file and queueing script') 183 issmscpout(self.name,self.executionpath,self.login,self.port,[dirname+'.tar.gz']) 184 185 # }}} 186 def LaunchQueueJob(self,modelname,dirname,filelist,restart,batch): # {{{ 187 188 print('launching solution sequence on remote cluster') 189 if restart: 190 launchcommand='cd %s && cd %s chmod 777 %s.queue && ./%s.queue' % (self.executionpath,dirname,modelname,modelname) 191 else: 192 if batch: 193 launchcommand='cd %s && rm -rf ./%s && mkdir %s && cd %s && mv ../%s.tar.gz ./ && tar -zxf %s.tar.gz' % \ 194 (self.executionpath,dirname,dirname,dirname,dirname,dirname) 195 else: 196 launchcommand='cd %s && rm -rf ./%s && mkdir %s && cd %s && mv ../%s.tar.gz ./ && tar -zxf %s.tar.gz && chmod 777 %s.queue && ./%s.queue' % \ 197 (self.executionpath,dirname,dirname,dirname,dirname,dirname,modelname,modelname) 198 issmssh(self.name,self.login,self.port,launchcommand) 199 # }}} 200 def Download(self,dirname,filelist): # {{{ 201 202 if m.ispc(): 203 #do nothing 204 return 205 206 #copy files from cluster to current directory 207 directory='%s/%s/' % (self.executionpath,dirname) 208 issmscpin(self.name,self.login,self.port,directory,filelist) 209 # }}} -
issm/trunk-jpl/src/m/classes/levelset.py
r24039 r24115 4 4 from WriteData import WriteData 5 5 6 6 7 class levelset(object): 7 8 8 """ 9 LEVELSET class definition 9 10 10 11 12 11 Usage: 12 levelset=levelset(); 13 """ 13 14 14 def __init__(self):# {{{15 def __init__(self): # {{{ 15 16 16 self.stabilization= 017 self.spclevelset= float('NaN')18 19 self.kill_icebergs= 020 self.calving_max= 0.21 self.fe= 'P1'17 self.stabilization = 0 18 self.spclevelset = float('NaN') 19 self.reinit_frequency = 0 20 self.kill_icebergs = 0 21 self.calving_max = 0. 22 self.fe = 'P1' 22 23 23 24 24 #set defaults 25 self.setdefaultparameters() 25 26 26 27 def __repr__(self):# {{{28 string=' Level-set parameters:'29 string="%s\n%s"%(string,fielddisplay(self,'stabilization','0: no, 1: artificial_diffusivity, 2: streamline upwinding'))30 string="%s\n%s"%(string,fielddisplay(self,'spclevelset','levelset constraints (NaN means no constraint)'))31 string="%s\n%s"%(string,fielddisplay(self,'reinit_frequency','Amount of time steps after which the levelset function in re-initialized'))32 string="%s\n%s"%(string,fielddisplay(self,'kill_icebergs','remove floating icebergs to prevent rigid body motions (1: true, 0: false)'))33 string="%s\n%s"%(string,fielddisplay(self,'calving_max','maximum allowed calving rate (m/a)'))34 string="%s\n%s"%(string,fielddisplay(self,'fe','Finite Element type: ''P1'' (default), or ''P2'''))27 #}}} 28 def __repr__(self): # {{{ 29 string = ' Level-set parameters:' 30 string = "%s\n%s" % (string, fielddisplay(self, 'stabilization', '0: no, 1: artificial_diffusivity, 2: streamline upwinding')) 31 string = "%s\n%s" % (string, fielddisplay(self, 'spclevelset', 'levelset constraints (NaN means no constraint)')) 32 string = "%s\n%s" % (string, fielddisplay(self, 'reinit_frequency', 'Amount of time steps after which the levelset function in re-initialized')) 33 string = "%s\n%s" % (string, fielddisplay(self, 'kill_icebergs', 'remove floating icebergs to prevent rigid body motions (1: true, 0: false)')) 34 string = "%s\n%s" % (string, fielddisplay(self, 'calving_max', 'maximum allowed calving rate (m/a)')) 35 string = "%s\n%s" % (string, fielddisplay(self, 'fe', 'Finite Element type: ''P1'' (default), or ''P2''')) 35 36 36 return string 37 #}}} 38 def extrude(self,md): # {{{ 39 self.spclevelset=project3d(md,'vector',self.spclevelset,'type','node') 40 return self 41 #}}} 42 def setdefaultparameters(self): # {{{ 37 return string 38 #}}} 43 39 44 #stabilization = 1 by default 45 self.stabilization = 1 46 self.reinit_frequency = 5 47 self.kill_icebergs = 1 48 self.calving_max = 3000. 40 def extrude(self, md): # {{{ 41 self.spclevelset = project3d(md, 'vector', self.spclevelset, 'type', 'node') 42 return self 43 #}}} 49 44 50 #Linear elements by default 51 self.fe='P1' 45 def setdefaultparameters(self): # {{{ 52 46 53 return self 54 #}}} 55 def checkconsistency(self,md,solution,analyses): # {{{ 47 #stabilization = 1 by default 48 self.stabilization = 1 49 self.reinit_frequency = 5 50 self.kill_icebergs = 1 51 self.calving_max = 3000. 56 52 57 #Early return 58 if (solution!='TransientSolution') or (not md.transient.ismovingfront): 59 return md 53 #Linear elements by default 54 self.fe = 'P1' 60 55 61 md = checkfield(md,'fieldname','levelset.spclevelset','Inf',1,'timeseries',1) 62 md = checkfield(md,'fieldname','levelset.stabilization','numel',[1],'values',[0,1,2]); 63 md = checkfield(md,'fieldname','levelset.kill_icebergs','numel',[1],'values',[0,1]); 64 md = checkfield(md,'fieldname','levelset.calving_max','numel',[1],'NaN',1,'Inf',1,'>',0); 65 md = checkfield(md,'fieldname','levelset.fe','values',['P1','P2']); 56 return self 57 #}}} 58 def checkconsistency(self, md, solution, analyses): # {{{ 66 59 67 return md 68 # }}} 69 def marshall(self,prefix,md,fid): # {{{ 60 #Early return 61 if (solution != 'TransientSolution') or (not md.transient.ismovingfront): 62 return md 70 63 71 yts=md.constants.yts; 64 md = checkfield(md, 'fieldname', 'levelset.spclevelset', 'Inf', 1, 'timeseries', 1) 65 md = checkfield(md, 'fieldname', 'levelset.stabilization', 'numel', [1], 'values', [0, 1, 2]) 66 md = checkfield(md, 'fieldname', 'levelset.kill_icebergs', 'numel', [1], 'values', [0, 1]) 67 md = checkfield(md, 'fieldname', 'levelset.calving_max', 'numel', [1], 'NaN', 1, 'Inf', 1, '>', 0) 68 md = checkfield(md, 'fieldname', 'levelset.fe', 'values', ['P1', 'P2']) 72 69 73 WriteData(fid,prefix,'object',self,'fieldname','stabilization','format','Integer'); 74 WriteData(fid,prefix,'object',self,'fieldname','spclevelset','format','DoubleMat','mattype',1,'timeserieslength',md.mesh.numberofvertices+1,'yts',md.constants.yts); 75 WriteData(fid,prefix,'object',self,'fieldname','reinit_frequency','format','Integer'); 76 WriteData(fid,prefix,'object',self,'fieldname','kill_icebergs','format','Boolean'); 77 WriteData(fid,prefix,'object',self,'fieldname','calving_max','format','Double','scale',1./yts); 78 WriteData(fid,prefix,'object',self,'fieldname','fe','format','String'); 79 # }}} 70 return md 71 # }}} 72 73 def marshall(self, prefix, md, fid): # {{{ 74 75 yts = md.constants.yts 76 77 WriteData(fid, prefix, 'object', self, 'fieldname', 'stabilization', 'format', 'Integer') 78 WriteData(fid, prefix, 'object', self, 'fieldname', 'spclevelset', 'format', 'DoubleMat', 'mattype', 1, 'timeserieslength', md.mesh.numberofvertices + 1, 'yts', yts) 79 WriteData(fid, prefix, 'object', self, 'fieldname', 'reinit_frequency', 'format', 'Integer') 80 WriteData(fid, prefix, 'object', self, 'fieldname', 'kill_icebergs', 'format', 'Boolean') 81 WriteData(fid, prefix, 'object', self, 'fieldname', 'calving_max', 'format', 'Double', 'scale', 1. / yts) 82 WriteData(fid, prefix, 'object', self, 'fieldname', 'fe', 'format', 'String') 83 # }}} -
issm/trunk-jpl/src/m/classes/toolkits.py
r24082 r24115 3 3 from iluasmoptions import iluasmoptions 4 4 from fielddisplay import fielddisplay 5 from checkfield import checkfield6 5 from issmgslsolver import issmgslsolver 7 6 from issmmumpssolver import issmmumpssolver 8 7 8 9 9 class toolkits(object): 10 11 10 """ 11 TOOLKITS class definition 12 12 13 14 15 13 Usage: 14 self=toolkits(); 15 """ 16 16 17 18 19 20 21 22 self.DefaultAnalysis= mumpsoptions()23 24 self.DefaultAnalysis= iluasmoptions()25 26 27 self.DefaultAnalysis= issmmumpssolver()28 29 self.DefaultAnalysis= issmgslsolver()30 31 17 def __init__(self): # {{{ 18 #default toolkits 19 if IssmConfig('_HAVE_PETSC_')[0]: 20 #MUMPS is the default toolkits 21 if IssmConfig('_HAVE_MUMPS_')[0]: 22 self.DefaultAnalysis = mumpsoptions() 23 else: 24 self.DefaultAnalysis = iluasmoptions() 25 else: 26 if IssmConfig('_HAVE_MUMPS_')[0]: 27 self.DefaultAnalysis = issmmumpssolver() 28 elif IssmConfig('_HAVE_GSL_')[0]: 29 self.DefaultAnalysis = issmgslsolver() 30 else: 31 raise IOError("ToolkitsFile error: need at least Mumps or Gsl to define issm solver type") 32 32 33 34 33 #Use same solver for Recovery mode 34 self.RecoveryAnalysis = self.DefaultAnalysis 35 35 36 37 38 39 s ="List of toolkits options per analysis:\n\n"40 41 s+="%s\n" % fielddisplay(self,analysis,'')36 #The other properties are dynamic 37 # }}} 38 def __repr__(self): # {{{ 39 s = "List of toolkits options per analysis:\n\n" 40 for analysis in list(vars(self).keys()): 41 s += "%s\n" % fielddisplay(self, analysis, '') 42 42 43 return s 44 # }}} 45 def addoptions(self,analysis,*args): # {{{ 46 # Usage example: 47 # md.toolkits=addoptions(md.toolkits,'StressbalanceAnalysis',FSoptions()); 48 # md.toolkits=addoptions(md.toolkits,'StressbalanceAnalysis'); 43 return s 44 # }}} 49 45 50 #Create dynamic property if property does not exist yet 51 if not hasattr(self,analysis): 52 setattr(self,analysis,None) 46 def addoptions(self, analysis, *args): # {{{ 47 # Usage example: 48 # md.toolkits=addoptions(md.toolkits,'StressbalanceAnalysis',FSoptions()); 49 # md.toolkits=addoptions(md.toolkits,'StressbalanceAnalysis'); 53 50 54 #Add toolkits options to analysis 55 if len(args)==1:56 setattr(self,analysis,args[0])51 #Create dynamic property if property does not exist yet 52 if not hasattr(self, analysis): 53 setattr(self, analysis, None) 57 54 58 return self 59 # }}} 60 def checkconsistency(self,md,solution,analyses): # {{{ 61 for analysis in list(vars(self).keys()): 62 if not getattr(self,analysis): 63 md.checkmessage("md.toolkits.%s is empty" % analysis) 55 #Add toolkits options to analysis 56 if len(args) == 1: 57 setattr(self, analysis, args[0]) 64 58 65 return md 66 # }}} 67 def ToolkitsFile(self,filename): # {{{ 68 """ 69 TOOLKITSFILE- build toolkits file 59 return self 60 # }}} 70 61 71 Build a Petsc compatible options file, from the toolkits model field + return options string 72 This file will also be used when the toolkit used is 'issm' instead of 'petsc' 62 def checkconsistency(self, md, solution, analyses): # {{{ 63 for analysis in list(vars(self).keys()): 64 if not getattr(self, analysis): 65 md.checkmessage("md.toolkits.%s is empty" % analysis) 66 67 return md 68 # }}} 69 70 def ToolkitsFile(self, filename): # {{{ 71 """ 72 TOOLKITSFILE- build toolkits file 73 74 Build a Petsc compatible options file, from the toolkits model field + return options string 75 This file will also be used when the toolkit used is 'issm' instead of 'petsc' 73 76 74 77 75 76 78 Usage: ToolkitsFile(toolkits,filename); 79 """ 77 80 78 79 80 fid=open(filename,'w')81 82 81 #open file for writing 82 try: 83 fid = open(filename, 'w') 84 except IOError as e: 85 raise IOError("ToolkitsFile error: could not open '%s' for writing." % filename) 83 86 84 85 fid.write("%s%s%s\n" % ('%Toolkits options file: ',filename,' written from Python toolkits array'))87 #write header 88 fid.write("%s%s%s\n" % ('%Toolkits options file: ', filename, ' written from Python toolkits array')) 86 89 87 88 89 options=getattr(self,analysis)90 #start writing options 91 for analysis in list(vars(self).keys()): 92 options = getattr(self, analysis) 90 93 91 92 93 94 for optionname,optionvalue in list(options.items()):94 #first write analysis: 95 fid.write("\n+%s\n" % analysis) #append a + to recognize it's an analysis enum 96 #now, write options 97 for optionname, optionvalue in list(options.items()): 95 98 96 97 98 99 100 101 if isinstance(optionvalue,(bool,int,float)):102 fid.write("-%s %g\n" % (optionname,optionvalue))103 elif isinstance(optionvalue,str):104 fid.write("-%s %s\n" % (optionname,optionvalue))105 106 99 if not optionvalue: 100 #this option has only one argument 101 fid.write("-%s\n" % optionname) 102 else: 103 #option with value. value can be string or scalar 104 if isinstance(optionvalue, (bool, int, float)): 105 fid.write("-%s %g\n" % (optionname, optionvalue)) 106 elif isinstance(optionvalue, str): 107 fid.write("-%s %s\n" % (optionname, optionvalue)) 108 else: 109 raise TypeError("ToolkitsFile error: option '%s' is not well formatted." % optionname) 107 110 108 109 111 fid.close() 112 # }}} -
issm/trunk-jpl/src/m/contrib/defleurian/paraview/exportVTK.py
r23870 r24115 21 21 22 22 TODO: - make time easily accessible 23 - make evolving geometry24 23 25 24 Basile de Fleurian: … … 191 190 Vxstruct = np.squeeze(spe_res_struct.__dict__[field[:-1] + 'x']) 192 191 Vystruct = np.squeeze(spe_res_struct.__dict__[field[:-1] + 'y']) 192 treated_res += [field[:-1] + 'x', field[:-1] + 'y'] 193 193 if dim == 3: 194 194 Vzstruct = np.squeeze(spe_res_struct.__dict__[field[:-1] + 'z']) 195 treated_res += [field[:-1] + 'x', field[:-1] + 'y', field[:-1] + 'z'] 196 elif dim == 2: 197 treated_res += [field[:-1] + 'x', field[:-1] + 'y'] 195 treated_res += field[:-1] + 'z' 196 198 197 except KeyError: 199 198 fieldnames += field … … 211 210 212 211 elif field in tensors: 213 print("nothing") 212 try: 213 Txxstruct = np.squeeze(spe_res_struct.__dict__[field[:-2] + 'xx']) 214 Txystruct = np.squeeze(spe_res_struct.__dict__[field[:-2] + 'xy']) 215 Tyystruct = np.squeeze(spe_res_struct.__dict__[field[:-2] + 'yy']) 216 treated_res += [field[:-2] + 'xx', field[:-2] + 'xy', field[:-2] + 'yy'] 217 if dim == 3: 218 Tzzstruct = np.squeeze(spe_res_struct.__dict__[field[:-2] + 'zz']) 219 Txzstruct = np.squeeze(spe_res_struct.__dict__[field[:-2] + 'xz']) 220 Tyzstruct = np.squeeze(spe_res_struct.__dict__[field[:-2] + 'yz']) 221 treated_res += [field[:-2] + 'zz', field[:-2] + 'xz', field[:-2] + 'yz'] 222 223 except KeyError: 224 fieldnames += field 225 tensors.remove(field) 226 227 fid.write('TENSORS {} float \n'.format(field[:-2])) 228 for node in range(0, num_of_points): 229 Txx = cleanOutliers(Txxstruct[enveloppe_index[node]]) 230 Tyy = cleanOutliers(Tyystruct[enveloppe_index[node]]) 231 Txy = cleanOutliers(Txystruct[enveloppe_index[node]]) 232 if dim == 3: 233 Tzz = cleanOutliers(Tzzstruct[enveloppe_index[node]]) 234 Txz = cleanOutliers(Txzstruct[enveloppe_index[node]]) 235 Tyz = cleanOutliers(Tyzstruct[enveloppe_index[node]]) 236 fid.write('{:f} {:f} {:f}\n'.format(Txx, Txy, Txz)) 237 fid.write('{:f} {:f} {:f}\n'.format(Txy, Tyy, Tyz)) 238 fid.write('{:f} {:f} {:f}\n'.format(Txz, Tyz, Tzz)) 239 elif dim == 2: 240 fid.write('{:f} {:f} {:f}\n'.format(Txx, Txy, 0)) 241 fid.write('{:f} {:f} {:f}\n'.format(Txy, Tyy, 0)) 242 fid.write('{:f} {:f} {:f}\n'.format(0, 0, 0)) 214 243 else: 215 244 if ((np.size(spe_res_struct.__dict__[field])) == every_nodes): -
issm/trunk-jpl/src/m/geometry/slope.py
r22173 r24115 1 import numpy as np 2 from GetNodalFunctionsCoeff import GetNodalFunctionsCoeff 1 import numpy as np 2 from GetNodalFunctionsCoeff import GetNodalFunctionsCoeff 3 from project3d import project3d 3 4 4 def slope(md,*args):5 """6 SLOPE - compute the surface slope7 5 8 Usage: 9 sx,sy,s=slope(md) 10 sx,sy,s=slope(md,md.results.TransientSolution(1).Surface) 11 """ 6 def slope(md, *args): 7 """ 8 SLOPE - compute the surface slope 12 9 13 #load some variables (it is much faster if the variables are loaded from md once for all) 14 if md.mesh.dimension()==2: 15 numberofelements=md.mesh.numberofelements 16 numberofnodes=md.mesh.numberofvertices 17 index=md.mesh.elements 18 x=md.mesh.x ; y=md.mesh.y 19 else: 20 numberofelements=md.mesh.numberofelements2d 21 numberofnodes=md.mesh.numberofvertices2d 22 index=md.mesh.elements2d 23 x=md.mesh.x2d; y=md.mesh.y2d 10 Usage: 11 sx,sy,s=slope(md) 12 sx,sy,s=slope(md,md.results.TransientSolution(1).Surface) 13 """ 24 14 25 if len(args)==0: 26 surf=md.geometry.surface 27 elif len(args)==1: 28 surf=args[0] 29 else: 30 raise RuntimeError("slope.py usage error") 15 #load some variables (it is much faster if the variables are loaded from md once for all) 16 if md.mesh.dimension() == 2: 17 index = md.mesh.elements 18 x = md.mesh.x 19 y = md.mesh.y 20 else: 21 index = md.mesh.elements2d 22 x = md.mesh.x2d 23 y = md.mesh.y2d 31 24 32 #%compute nodal functions coefficients N(x,y)=alpha x + beta y + gamma 33 alpha,beta=GetNodalFunctionsCoeff(index,x,y)[0:2] 25 if len(args) == 0: 26 surf = md.geometry.surface 27 elif len(args) == 1: 28 surf = args[0] 29 else: 30 raise RuntimeError("slope.py usage error") 34 31 35 summation=np.array([[1],[1],[1]]) 36 sx=np.dot(surf[index-1,0]*alpha,summation).reshape(-1,) 37 sy=np.dot(surf[index-1,0]*beta,summation).reshape(-1,) 32 #%compute nodal functions coefficients N(x,y)=alpha x + beta y + gamma 33 alpha, beta = GetNodalFunctionsCoeff(index, x, y)[0:2] 38 34 39 s=np.sqrt(sx**2+sy**2) 35 summation = np.array([[1], [1], [1]]) 36 sx = np.dot(surf[index - 1, 0] * alpha, summation).reshape(-1,) 37 sy = np.dot(surf[index - 1, 0] * beta, summation).reshape(-1,) 40 38 41 if md.mesh.dimension()==3: 42 sx=project3d(md,'vector',sx,'type','element') 43 sy=project3d(md,'vector',sy,'type','element') 44 s=np.sqrt(sx**2+sy**2) 39 s = np.sqrt(sx**2 + sy**2) 45 40 46 return (sx,sy,s) 41 if md.mesh.dimension() == 3: 42 sx = project3d(md, 'vector', sx, 'type', 'element') 43 sy = project3d(md, 'vector', sy, 'type', 'element') 44 s = np.sqrt(sx**2 + sy**2) 45 46 return (sx, sy, s) -
issm/trunk-jpl/src/m/interp/averaging.py
r23716 r24115 1 import numpy as 1 import numpy as np 2 2 from GetAreas import GetAreas 3 import MatlabFuncs as m4 3 try: 5 4 from scipy.sparse import csc_matrix 6 5 except ImportError: 7 6 print("could not import scipy, no averaging capabilities enabled") 8 7 9 def averaging(md,data,iterations,layer=0):10 '''11 AVERAGING - smooths the input over the mesh12 13 This routine takes a list over the elements or the nodes in input14 and return a list over the nodes.15 For each iterations it computes the average over each element (average16 of the vertices values) and then computes the average over each node17 by taking the average of the element around a node weighted by the18 elements volume19 For 3d mesh, a last argument can be added to specify the layer to be averaged on.20 21 Usage:22 smoothdata=averaging(md,data,iterations)23 smoothdata=averaging(md,data,iterations,layer)24 25 Examples:26 velsmoothed=averaging(md,md.initialization.vel,4)27 pressure=averaging(md,md.initialization.pressure,0)28 temperature=averaging(md,md.initialization.temperature,1,1)29 '''30 8 31 if len(data)!=md.mesh.numberofelements and len(data)!=md.mesh.numberofvertices: 32 raise Exception('averaging error message: data not supported yet') 33 if md.mesh.dimension()==3 and layer!=0: 34 if layer<=0 or layer>md.mesh.numberoflayers: 35 raise ValueError('layer should be between 1 and md.mesh.numberoflayers') 36 else: 37 layer=0 38 39 #initialization 40 if layer==0: 41 weights=np.zeros(md.mesh.numberofvertices,) 42 data=data.flatten(1) 43 else: 44 weights=np.zeros(md.mesh.numberofvertices2d,) 45 data=data[(layer-1)*md.mesh.numberofvertices2d+1:layer*md.mesh.numberofvertices2d,:] 46 47 #load some variables (it is much faster if the variabes are loaded from md once for all) 48 if layer==0: 49 index=md.mesh.elements 50 numberofnodes=md.mesh.numberofvertices 51 numberofelements=md.mesh.numberofelements 52 else: 53 index=md.mesh.elements2d 54 numberofnodes=md.mesh.numberofvertices2d 55 numberofelements=md.mesh.numberofelements2d 9 def averaging(md, data, iterations, layer=0): 10 ''' 11 AVERAGING - smooths the input over the mesh 56 12 57 58 #build some variables 59 if md.mesh.dimension()==3 and layer==0: 60 rep=6 61 areas=GetAreas(index,md.mesh.x,md.mesh.y,md.mesh.z) 62 elif md.mesh.dimension()==2: 63 rep=3 64 areas=GetAreas(index,md.mesh.x,md.mesh.y) 65 else: 66 rep=3 67 areas=GetAreas(index,md.mesh.x2d,md.mesh.y2d) 13 This routine takes a list over the elements or the nodes in input 14 and return a list over the nodes. 15 For each iterations it computes the average over each element (average 16 of the vertices values) and then computes the average over each node 17 by taking the average of the element around a node weighted by the 18 elements volume 19 For 3d mesh, a last argument can be added to specify the layer to be averaged on. 68 20 69 index=index-1 # since python indexes starting from zero 70 line=index.flatten(1) 71 areas=np.vstack(areas).reshape(-1,) 72 summation=1./rep*np.ones(rep,) 73 linesize=rep*numberofelements 74 75 #update weights that holds the volume of all the element holding the node i 76 weights=csc_matrix( (np.tile(areas,(rep,1)).reshape(-1,),(line,np.zeros(linesize,))), shape=(numberofnodes,1)) 77 78 #initialization 79 if len(data)==numberofelements: 80 average_node=csc_matrix( (np.tile(areas*data,(rep,1)).reshape(-1,),(line,np.zeros(linesize,))), shape=(numberofnodes,1)) 81 average_node=average_node/weights 82 average_node = csc_matrix(average_node) 83 else: 84 average_node=csc_matrix(data.reshape(-1,1)) 21 Usage: 22 smoothdata=averaging(md, data, iterations) 23 smoothdata=averaging(md, data, iterations, layer) 85 24 86 #loop over iteration 87 for i in np.arange(1,iterations+1): 88 average_el=np.asarray(np.dot(average_node.todense()[index].reshape(numberofelements,rep),np.vstack(summation))).reshape(-1,) 89 average_node=csc_matrix( (np.tile(areas*average_el.reshape(-1),(rep,1)).reshape(-1,),(line,np.zeros(linesize,))), shape=(numberofnodes,1)) 90 average_node=average_node/weights 91 average_node=csc_matrix(average_node) 92 93 #return output as a full matrix (C code do not like sparse matrices) 94 average=np.asarray(average_node.todense()).reshape(-1,) 25 Examples: 26 velsmoothed=averaging(md, md.initialization.vel, 4) 27 pressure=averaging(md, md.initialization.pressure, 0) 28 temperature=averaging(md, md.initialization.temperature, 1, 1) 29 ''' 95 30 96 return average 31 if len(data) != md.mesh.numberofelements and len(data) != md.mesh.numberofvertices: 32 raise Exception('averaging error message: data not supported yet') 33 if md.mesh.dimension() == 3 and layer != 0: 34 if layer <= 0 or layer > md.mesh.numberoflayers: 35 raise ValueError('layer should be between 1 and md.mesh.numberoflayers') 36 else: 37 layer = 0 38 39 #initialization 40 if layer == 0: 41 weights = np.zeros(md.mesh.numberofvertices,) 42 data = data.flatten(1) 43 else: 44 weights = np.zeros(md.mesh.numberofvertices2d,) 45 data = data[(layer - 1) * md.mesh.numberofvertices2d + 1:layer * md.mesh.numberofvertices2d, :] 46 47 #load some variables (it is much faster if the variabes are loaded from md once for all) 48 if layer == 0: 49 index = md.mesh.elements 50 numberofnodes = md.mesh.numberofvertices 51 numberofelements = md.mesh.numberofelements 52 else: 53 index = md.mesh.elements2d 54 numberofnodes = md.mesh.numberofvertices2d 55 numberofelements = md.mesh.numberofelements2d 56 57 #build some variables 58 if md.mesh.dimension() == 3 and layer == 0: 59 rep = 6 60 areas = GetAreas(index, md.mesh.x, md.mesh.y, md.mesh.z) 61 elif md.mesh.dimension() == 2: 62 rep = 3 63 areas = GetAreas(index, md.mesh.x, md.mesh.y) 64 else: 65 rep = 3 66 areas = GetAreas(index, md.mesh.x2d, md.mesh.y2d) 67 68 index = index - 1 # since python indexes starting from zero 69 line = index.flatten(1) 70 areas = np.vstack(areas).reshape(-1,) 71 summation = 1. / rep * np.ones(rep,) 72 linesize = rep * numberofelements 73 74 #update weights that holds the volume of all the element holding the node i 75 weights = csc_matrix((np.tile(areas, (rep, 1)).reshape(-1,), (line, np.zeros(linesize,))), shape=(numberofnodes, 1)) 76 77 #initialization 78 if len(data) == numberofelements: 79 average_node = csc_matrix((np.tile(areas * data, (rep, 1)).reshape(-1,), (line, np.zeros(linesize,))), shape=(numberofnodes, 1)) 80 average_node = average_node / weights 81 average_node = csc_matrix(average_node) 82 else: 83 average_node = csc_matrix(data.reshape(-1, 1)) 84 85 #loop over iteration 86 for i in np.arange(1, iterations + 1): 87 average_el = np.asarray(np.dot(average_node.todense()[index].reshape(numberofelements, rep), np.vstack(summation))).reshape(-1,) 88 average_node = csc_matrix((np.tile(areas * average_el.reshape(-1), (rep, 1)).reshape(-1,), (line, np.zeros(linesize, ))), shape=(numberofnodes, 1)) 89 average_node = average_node / weights 90 average_node = csc_matrix(average_node) 91 92 #return output as a full matrix (C code do not like sparse matrices) 93 average = np.asarray(average_node.todense()).reshape(-1,) 94 95 return average -
issm/trunk-jpl/src/m/mesh/GetNodalFunctionsCoeff.py
r21303 r24115 1 1 import numpy as np 2 2 3 def GetNodalFunctionsCoeff(index,x,y):4 """5 GETNODELFUNCTIONSCOEFF - compute nodal functions coefficients6 3 7 Compute the coefficients alpha beta and optionaly gamma of 8 2d triangular elements. For each element, the nodal function 9 is defined as: 10 N(x,y)=sum(i=1:3) alpha_i * x + beta_i * y + gamma_i 4 def GetNodalFunctionsCoeff(index, x, y): 5 """ 6 GETNODELFUNCTIONSCOEFF - compute nodal functions coefficients 11 7 12 Usage: 13 [alpha beta]=GetNodalFunctionsCoeff(index,x,y); 14 [alpha beta gamma]=GetNodalFunctionsCoeff(index,x,y); 8 Compute the coefficients alpha beta and optionaly gamma of 9 2d triangular elements. For each element, the nodal function 10 is defined as: 11 N(x, y)=sum(i=1:3) alpha_i * x + beta_i * y + gamma_i 15 12 16 Example:17 [alpha beta gamma]=GetNodalFunctionsCoeff(md.mesh.elements,md.mesh.x,md.mesh.y);18 """ 13 Usage: 14 [alpha beta]=GetNodalFunctionsCoeff(index, x, y); 15 [alpha beta gamma]=GetNodalFunctionsCoeff(index, x, y); 19 16 20 #make columns out of x and y 21 x=x.reshape(-1) 22 y=y.reshape(-1) 17 Example: 18 [alpha beta gamma]=GetNodalFunctionsCoeff(md.mesh.elements, md.mesh.x, md.mesh.y); 19 """ 23 20 24 #get nels and nods 25 nels=np.size(index,axis=0)26 nods=np.size(x)21 #make columns out of x and y 22 x = x.reshape(-1) 23 y = y.reshape(-1) 27 24 28 #some checks 29 if np.size(y)!=nods: 30 raise TypeError("GetNodalFunctionsCoeff error message: x and y do not have the same length.") 31 if np.max(index)>nods: 32 raise TypeError("GetNodalFunctionsCoeff error message: index should not have values above %d." % nods) 33 if np.size(index,axis=1)!=3: 34 raise TypeError("GetNodalFunctionsCoeff error message: only 2d meshes supported. index should have 3 columns.") 25 #get nels and nods 26 nels = np.size(index, axis=0) 27 nods = np.size(x) 35 28 36 #initialize output 37 alpha=np.zeros((nels,3)) 38 beta=np.zeros((nels,3)) 29 #some checks 30 if np.size(y) != nods: 31 raise TypeError("GetNodalFunctionsCoeff error message: x and y do not have the same length.") 32 if np.max(index) > nods: 33 raise TypeError("GetNodalFunctionsCoeff error message: index should not have values above {}.".format(nods)) 34 if np.size(index, axis=1) != 3: 35 raise TypeError("GetNodalFunctionsCoeff error message: only 2d meshes supported. index should have 3 columns.") 39 36 40 #compute nodal functions coefficients N(x,y)=alpha x + beta y +gamma 41 x1=x[index[:,0]-1] 42 x2=x[index[:,1]-1] 43 x3=x[index[:,2]-1] 44 y1=y[index[:,0]-1] 45 y2=y[index[:,1]-1] 46 y3=y[index[:,2]-1] 47 invdet=1./(x1*(y2-y3)-x2*(y1-y3)+x3*(y1-y2)) 37 #initialize output 38 alpha = np.zeros((nels, 3)) 39 beta = np.zeros((nels, 3)) 48 40 49 #get alpha and beta 50 alpha=np.vstack(((invdet*(y2-y3)).reshape(-1,),(invdet*(y3-y1)).reshape(-1,),(invdet*(y1-y2)).reshape(-1,))).T 51 beta =np.vstack(((invdet*(x3-x2)).reshape(-1,),(invdet*(x1-x3)).reshape(-1,),(invdet*(x2-x1)).reshape(-1,))).T 41 #compute nodal functions coefficients N(x, y) = alpha x + beta y +gamma 42 x1 = x[index[:, 0] - 1] 43 x2 = x[index[:, 1] - 1] 44 x3 = x[index[:, 2] - 1] 45 y1 = y[index[:, 0] - 1] 46 y2 = y[index[:, 1] - 1] 47 y3 = y[index[:, 2] - 1] 48 invdet = 1. / (x1 * (y2 - y3) - x2 * (y1 - y3) + x3 * (y1 - y2)) 52 49 53 #get gamma if requested 54 gamma=np.zeros((nels,3)) 55 gamma=np.vstack(((invdet*(x2*y3-x3*y2)).reshape(-1,),(invdet*(y1*x3-y3*x1)).reshape(-1,),(invdet*(x1*y2-x2*y1)).reshape(-1,))).T50 #get alpha and beta 51 alpha = np.vstack(((invdet * (y2 - y3)).reshape(-1,), (invdet * (y3 - y1)).reshape(-1,), (invdet * (y1 - y2)).reshape(-1,))).T 52 beta = np.vstack(((invdet * (x3 - x2)).reshape(-1,), (invdet * (x1 - x3)).reshape(-1,), (invdet * (x2 - x1)).reshape(-1,))).T 56 53 57 return alpha,beta,gamma 54 #get gamma if requested 55 gamma = np.zeros((nels, 3)) 56 gamma = np.vstack(((invdet * (x2 * y3 - x3 * y2)).reshape(-1,), (invdet * (y1 * x3 - y3 * x1)).reshape(-1,), (invdet * (x1 * y2 - x2 * y1)).reshape(-1,))).T 58 57 58 return alpha, beta, gamma -
issm/trunk-jpl/src/m/mesh/bamg.py
r23716 r24115 1 1 import os.path 2 import numpy as 2 import numpy as np 3 3 from mesh2d import * 4 4 from mesh2dvertical import * … … 15 15 from ContourToNodes import ContourToNodes 16 16 17 def bamg(md,*args): 18 """ 19 BAMG - mesh generation 20 21 Available options (for more details see ISSM website http://issm.jpl.nasa.gov/): 22 23 - domain : followed by an ARGUS file that prescribes the domain outline 24 - holes : followed by an ARGUS file that prescribes the holes 25 - subdomains : followed by an ARGUS file that prescribes the list of 26 subdomains (that need to be inside domain) 27 28 - hmin : minimum edge length (default is 10^-100) 29 - hmax : maximum edge length (default is 10^100) 30 - hVertices : imposed edge length for each vertex (geometry or mesh) 31 - hminVertices : minimum edge length for each vertex (mesh) 32 - hmaxVertices : maximum edge length for each vertex (mesh) 33 34 - anisomax : maximum ratio between the smallest and largest edges (default is 10^30) 35 - coeff : coefficient applied to the metric (2-> twice as many elements, default is 1) 36 - cutoff : scalar used to compute the metric when metric type 2 or 3 are applied 37 - err : error used to generate the metric from a field 38 - errg : geometric error (default is 0.1) 39 - field : field of the model that will be used to compute the metric 40 to apply several fields, use one column per field 41 - gradation : maximum ratio between two adjacent edges 42 - Hessiantype : 0 -> use double P2 projection (default) 43 1 -> use Green formula 44 - KeepVertices : try to keep initial vertices when adaptation is done on an existing mesh (default 1) 45 - maxnbv : maximum number of vertices used to allocate memory (default is 10^6) 46 - maxsubdiv : maximum subdivision of exisiting elements (default is 10) 47 - metric : matrix (numberofnodes x 3) used as a metric 48 - Metrictype : 1 -> absolute error c/(err coeff^2) * Abs(H) (default) 49 2 -> relative error c/(err coeff^2) * Abs(H)/max(s,cutoff*max(s)) 50 3 -> rescaled absolute error c/(err coeff^2) * Abs(H)/(smax-smin) 51 - nbjacoby : correction used by Hessiantype=1 (default is 1) 52 - nbsmooth : number of metric smoothing procedure (default is 3) 53 - omega : relaxation parameter of the smoothing procedure (default is 1.8) 54 - power : power applied to the metric (default is 1) 55 - splitcorners : split triangles whuch have 3 vertices on the outline (default is 1) 56 - verbose : level of verbosity (default is 1) 57 58 - rifts : followed by an ARGUS file that prescribes the rifts 59 - toltip : tolerance to move tip on an existing point of the domain outline 60 - tracks : followed by an ARGUS file that prescribes the tracks that the mesh will stick to 61 - RequiredVertices : mesh vertices that are required. [x,y,ref]; ref is optional 62 - tol : if the distance between 2 points of the domain outline is less than tol, they 63 will be merged 64 65 Examples: 66 md=bamg(md,'domain','DomainOutline.exp','hmax',3000); 67 md=bamg(md,'field',[md.inversion.vel_obs md.geometry.thickness],'hmax',20000,'hmin',1000); 68 md=bamg(md,'metric',A,'hmin',1000,'hmax',20000,'gradation',3,'anisomax',1); 69 """ 70 71 #process options 72 options=pairoptions(*args) 73 # options=deleteduplicates(options,1); 74 75 #initialize the structures required as input of Bamg 76 bamg_options=OrderedDict() 77 bamg_geometry=bamggeom() 78 bamg_mesh=bamgmesh() 79 80 subdomain_ref = 1 81 hole_ref = 1 82 83 # Bamg Geometry parameters {{{ 84 if options.exist('domain'): 85 #Check that file exists 86 domainfile=options.getfieldvalue('domain') 87 if type(domainfile) == str: 88 if not os.path.exists(domainfile): 89 raise IOError("bamg error message: file '%s' not found" % domainfile) 90 domain=expread(domainfile) 91 else: 92 domain=domainfile 93 94 #Build geometry 95 count=0 96 for i,domaini in enumerate(domain): 97 #Check that the domain is closed 98 if (domaini['x'][0]!=domaini['x'][-1] or domaini['y'][0]!=domaini['y'][-1]): 99 raise RuntimeError("bamg error message: all contours provided in ''domain'' should be closed") 100 101 #Checks that all holes are INSIDE the principle domain outline princial domain should be index 0 102 if i: 103 flags=ContourToNodes(domaini['x'],domaini['y'],domainfile,0)[0] 104 if np.any(np.logical_not(flags)): 105 raise RuntimeError("bamg error message: All holes should be strictly inside the principal domain") 106 107 #Check orientation 108 nods=domaini['nods']-1#the domain are closed 1=end; 109 110 test = np.sum((domaini['x'][1:nods+1] - domaini['x'][0:nods])*(domaini['y'][1:nods+1] + domaini['y'][0:nods])) 111 if (i==0 and test>0) or (i>0 and test<0): 112 print('At least one contour was not correctly oriented and has been re-oriented') 113 domaini['x'] = np.flipud(domaini['x']) 114 domaini['y'] = np.flipud(domaini['y']) 115 116 117 #Add all points to bamg_geometry 118 nods=domaini['nods']-1 #the domain are closed 0=end 119 bamg_geometry.Vertices=np.vstack((bamg_geometry.Vertices,np.vstack((domaini['x'][0:nods],domaini['y'][0:nods],np.ones((nods)))).T)) 120 bamg_geometry.Edges =np.vstack((bamg_geometry.Edges,np.vstack((np.arange(count+1,count+nods+1),np.hstack((np.arange(count+2,count+nods+1),count+1)),1.*np.ones((nods)))).T)) 121 if i: 122 bamg_geometry.SubDomains=np.vstack((bamg_geometry.SubDomains,[2,count+1,1,-subdomain_ref])) 123 subdomain_ref = subdomain_ref+1; 124 else: 125 bamg_geometry.SubDomains=np.vstack((bamg_geometry.SubDomains,[2,count+1,1,0])) 126 127 #update counter 128 count+=nods 129 130 #Deal with domain holes 131 if options.exist('holes'): 132 holesfile=options.getfieldvalue('holes') 133 if type(holesfile) == str: 134 if not os.path.exists(holesfile): 135 raise IOError("bamg error message: file '%s' not found" % holesfile) 136 holes=expread(holesfile) 137 else: 138 holes=holesfile 139 140 #Build geometry 141 for i,holei in enumerate(holes): 142 #Check that the hole is closed 143 if (holei['x'][0]!=holei['x'][-1] or holei['y'][0]!=holei['y'][-1]): 144 raise RuntimeError("bamg error message: all contours provided in ''hole'' should be closed") 145 146 #Checks that all holes are INSIDE the principle domain outline princial domain should be index 0 147 flags=ContourToNodes(holei['x'],holei['y'],domainfile,0)[0] 148 if np.any(np.logical_not(flags)): 149 raise RuntimeError("bamg error message: All holes should be strictly inside the principal domain") 150 151 #Check orientation 152 nods=holei['nods']-1#the hole are closed 1=end; 153 test = np.sum((holei['x'][1:nods+1] - holei['x'][0:nods])*(holei['y'][1:nods+1] + holei['y'][0:nods])) 154 if test<0: 155 print('At least one hole was not correctly oriented and has been re-oriented') 156 holei['x'] = np.flipud(holei['x']) 157 holei['y'] = np.flipud(holei['y']) 158 159 #Add all points to bamg_geometry 160 nods=holei['nods']-1 #the hole are closed 0=end 161 bamg_geometry.Vertices=np.vstack((bamg_geometry.Vertices,np.vstack((holei['x'][0:nods],holei['y'][0:nods],np.ones((nods)))).T)) 162 bamg_geometry.Edges =np.vstack((bamg_geometry.Edges,np.vstack((np.arange(count+1,count+nods+1),np.hstack((np.arange(count+2,count+nods+1),count+1)),1.*np.ones((nods)))).T)) 163 #update counter 164 count+=nods 165 166 #And subdomains 167 if options.exist('subdomains'): 168 subdomainfile=options.getfieldvalue('subdomains') 169 if type(subdomainfile) == str: 170 if not os.path.exists(subdomainfile): 171 raise IOError("bamg error message: file '%s' not found" % subdomainfile) 172 subdomains=expread(subdomainfile) 173 else: 174 subdomains=subdomainfile 175 176 #Build geometry 177 for i,subdomaini in enumerate(subdomains): 178 #Check that the subdomain is closed 179 if (subdomaini['x'][0]!=subdomaini['x'][-1] or subdomaini['y'][0]!=subdomaini['y'][-1]): 180 raise RuntimeError("bamg error message: all contours provided in ''subdomain'' should be closed") 181 182 #Checks that all subdomains are INSIDE the principle subdomain outline princial domain should be index 0 183 if i: 184 flags=ContourToNodes(subdomaini['x'],subdomaini['y'],domainfile,0)[0] 185 if np.any(np.logical_not(flags)): 186 raise RuntimeError("bamg error message: All subdomains should be strictly inside the principal subdomain") 187 188 #Check orientation 189 nods=subdomaini['nods']-1#the subdomain are closed 1=end; 190 191 test = np.sum((subdomaini['x'][1:nods+1] - subdomaini['x'][0:nods])*(subdomaini['y'][1:nods+1] + subdomaini['y'][0:nods])) 192 if test>0: 193 print('At least one subcontour was not correctly oriented and has been re-oriented') 194 subdomaini['x'] = np.flipud(subdomaini['x']) 195 subdomaini['y'] = np.flipud(subdomaini['y']) 196 197 #Add all points to bamg_geometry 198 nods=subdomaini['nods']-1 #the subdomain are closed 0=end 199 bamg_geometry.Vertices=np.vstack((bamg_geometry.Vertices,np.vstack((subdomaini['x'][0:nods],subdomaini['y'][0:nods],np.ones((nods)))).T)) 200 bamg_geometry.Edges =np.vstack((bamg_geometry.Edges,np.vstack((np.arange(count+1,count+nods+1),np.hstack((np.arange(count+2,count+nods+1),count+1)),1.*np.ones((nods)))).T)) 201 #update counter 202 count+=nods 203 204 if options.getfieldvalue('vertical',0): 205 if np.size(options.getfieldvalue('Markers',[]))!=np.size(bamg_geometry.Edges,0): 206 raise RuntimeError('for 2d vertical mesh, ''Markers'' option is required, and should be of size ' + str(np.size(bamg_geometry.Edges,0))) 207 if np.size(options.getfieldvalue('Markers',[]))==np.size(bamg_geometry.Edges,0): 208 bamg_geometry.Edges[:,2]=options.getfieldvalue('Markers') 209 210 #take care of rifts 211 if options.exist('rifts'): 212 213 #Check that file exists 214 riftfile=options.getfieldvalue('rifts') 215 if not os.path.exists(riftfile): 216 raise IOError("bamg error message: file '%s' not found" % riftfile) 217 rift=expread(riftfile) 218 219 for i,rifti in enumerate(rift): 220 221 #detect whether all points of the rift are inside the domain 222 flags=ContourToNodes(rifti['x'],rifti['y'],domain[0],0)[0] 223 if np.all(np.logical_not(flags)): 224 raise RuntimeError("one rift has all its points outside of the domain outline") 225 226 elif np.any(np.logical_not(flags)): 227 #We LOTS of work to do 228 print("Rift tip outside of or on the domain has been detected and is being processed...") 229 230 #check that only one point is outside (for now) 231 if np.sum(np.logical_not(flags).astype(int))!=1: 232 raise RuntimeError("bamg error message: only one point outside of the domain is supported yet") 233 234 #Move tip outside to the first position 235 if not flags[0]: 236 #OK, first point is outside (do nothing), 237 pass 238 elif not flags[-1]: 239 rifti['x']=np.flipud(rifti['x']) 240 rifti['y']=np.flipud(rifti['y']) 241 else: 242 raise RuntimeError("bamg error message: only a rift tip can be outside of the domain") 243 244 #Get cordinate of intersection point 245 x1=rifti['x'][0] 246 y1=rifti['y'][0] 247 x2=rifti['x'][1] 248 y2=rifti['y'][1] 249 for j in range(0,np.size(domain[0]['x'])-1): 250 if SegIntersect(np.array([[x1,y1],[x2,y2]]),np.array([[domain[0]['x'][j],domain[0]['y'][j]],[domain[0]['x'][j+1],domain[0]['y'][j+1]]])): 251 252 #Get position of the two nodes of the edge in domain 253 i1=j 254 i2=j+1 255 256 #rift is crossing edge [i1 i2] of the domain 257 #Get coordinate of intersection point (http://mathworld.wolfram.com/Line-LineIntersection.html) 258 x3=domain[0]['x'][i1] 259 y3=domain[0]['y'][i1] 260 x4=domain[0]['x'][i2] 261 y4=domain[0]['y'][i2] 262 # x=det([det([x1 y1; x2 y2]) x1-x2;det([x3 y3; x4 y4]) x3-x4])/det([x1-x2 y1-y2;x3-x4 y3-y4]); 263 # y=det([det([x1 y1; x2 y2]) y1-y2;det([x3 y3; x4 y4]) y3-y4])/det([x1-x2 y1-y2;x3-x4 y3-y4]); 264 x=np.linalg.det(np.array([[np.linalg.det(np.array([[x1,y1],[x2,y2]])),x1-x2],[np.linalg.det(np.array([[x3,y3],[x4,y4]])),x3-x4]]))/np.linalg.det(np.array([[x1-x2,y1-y2],[x3-x4,y3-y4]])) 265 y=np.linalg.det(np.array([[np.linalg.det(np.array([[x1,y1],[x2,y2]])),y1-y2],[np.linalg.det(np.array([[x3,y3],[x4,y4]])),y3-y4]]))/np.linalg.det(np.array([[x1-x2,y1-y2],[x3-x4,y3-y4]])) 266 267 segdis= sqrt((x4-x3)**2+(y4-y3)**2) 268 tipdis=np.array([sqrt((x-x3)**2+(y-y3)**2),sqrt((x-x4)**2+(y-y4)**2)]) 269 270 if np.min(tipdis)/segdis < options.getfieldvalue('toltip',0): 271 print("moving tip-domain intersection point") 272 273 #Get position of the closer point 274 if tipdis[0]>tipdis[1]: 275 pos=i2 276 else: 277 pos=i1 278 279 #This point is only in Vertices (number pos). 280 #OK, now we can add our own rift 281 nods=rifti['nods']-1 282 bamg_geometry.Vertices=np.vstack((bamg_geometry.Vertices,np.hstack((rifti['x'][1:].reshape(-1,),rifti['y'][1:].reshape(-1,),np.ones((nods,1)))))) 283 bamg_geometry.Edges=np.vstack((bamg_geometry.Edges,\ 284 np.array([[pos,count+1,(1+i)]]),\ 285 np.hstack((np.arange(count+1,count+nods).reshape(-1,),np.arange(count+2,count+nods+1).reshape(-1,),(1+i)*np.ones((nods-1,1)))))) 286 count+=nods 287 288 break 289 290 else: 291 #Add intersection point to Vertices 292 bamg_geometry.Vertices=np.vstack((bamg_geometry.Vertices,np.array([[x,y,1]]))) 293 count+=1 294 295 #Decompose the crossing edge into 2 subedges 296 pos=np.nonzero(np.logical_and(bamg_geometry.Edges[:,0]==i1,bamg_geometry.Edges[:,1]==i2))[0] 297 if not pos: 298 raise RuntimeError("bamg error message: a problem occurred...") 299 bamg_geometry.Edges=np.vstack((bamg_geometry.Edges[0:pos-1,:],\ 300 np.array([[bamg_geometry.Edges[pos,0],count ,bamg_geometry.Edges[pos,2]]]),\ 301 np.array([[count ,bamg_geometry.Edges[pos,1],bamg_geometry.Edges[pos,2]]]),\ 302 bamg_geometry.Edges[pos+1:,:])) 303 304 #OK, now we can add our own rift 305 nods=rifti['nods']-1 306 bamg_geometry.Vertices=np.vstack((bamg_geometry.Vertices,np.hstack((rifti['x'][1:].reshape(-1,),rifti['y'][1:].reshape(-1,),np.ones((nods,1)))))) 307 bamg_geometry.Edges=np.vstack((bamg_geometry.Edges,\ 308 np.array([[count,count+1,2]]),\ 309 np.hstack((np.arange(count+1,count+nods).reshape(-1,),np.arange(count+2,count+nods+1).reshape(-1,),(1+i)*np.ones((nods-1,1)))))) 310 count+=nods 311 312 break 313 314 else: 315 nods=rifti['nods']-1 316 bamg_geometry.Vertices=np.vstack(bamg_geometry.Vertices, np.hstack(rifti['x'][:],rifti['y'][:],np.ones((nods+1,1)))) 317 bamg_geometry.Edges =np.vstack(bamg_geometry.Edges, np.hstack(np.arange(count+1,count+nods).reshape(-1,),np.arange(count+2,count+nods+1).reshape(-1,),i*np.ones((nods,1)))) 318 count=+nods+1 319 320 #Deal with tracks 321 if options.exist('tracks'): 322 323 #read tracks 324 track=options.getfieldvalue('tracks') 325 if all(isinstance(track,str)): 326 A=expread(track) 327 track=np.hstack((A.x.reshape(-1,),A.y.reshape(-1,))) 328 else: 329 track=float(track) #for some reason, it is of class "single" 330 if np.size(track,axis=1)==2: 331 track=np.hstack((track,3.*np.ones((size(track,axis=0),1)))) 332 333 #only keep those inside 334 flags=ContourToNodes(track[:,0],track[:,1],domainfile,0)[0] 335 track=track[np.nonzero(flags),:] 336 337 #Add all points to bamg_geometry 338 nods=np.size(track,axis=0) 339 bamg_geometry.Vertices=np.vstack((bamg_geometry.Vertices,track)) 340 bamg_geometry.Edges =np.vstack((bamg_geometry.Edges,np.hstack((np.arange(count+1,count+nods).reshape(-1,),np.arange(count+2,count+nods+1).reshape(-1,),3.*np.ones((nods-1,1)))))) 341 342 #update counter 343 count+=nods 344 345 #Deal with vertices that need to be kept by mesher 346 if options.exist('RequiredVertices'): 347 348 #recover RequiredVertices 349 requiredvertices=options.getfieldvalue('RequiredVertices') #for some reason, it is of class "single" 350 if np.size(requiredvertices,axis=1)==2: 351 requiredvertices=np.hstack((requiredvertices,4.*np.ones((np.size(requiredvertices,axis=0),1)))) 352 353 #only keep those inside 354 flags=ContourToNodes(requiredvertices[:,0],requiredvertices[:,1],domainfile,0)[0] 355 requiredvertices=requiredvertices[np.nonzero(flags)[0],:] 356 #Add all points to bamg_geometry 357 nods=np.size(requiredvertices,axis=0) 358 bamg_geometry.Vertices=np.vstack((bamg_geometry.Vertices,requiredvertices)) 359 360 #update counter 361 count+=nods 362 363 #process geom 364 #bamg_geometry=processgeometry(bamg_geometry,options.getfieldvalue('tol',float(nan)),domain[0]) 365 366 elif isinstance(md.private.bamg,dict) and 'geometry' in md.private.bamg: 367 bamg_geometry=bamggeom(md.private.bamg['geometry'].__dict__) 368 else: 369 #do nothing... 370 pass 371 #}}} 372 # Bamg Mesh parameters {{{ 373 if not options.exist('domain') and md.mesh.numberofvertices and m.strcmp(md.mesh.elementtype(),'Tria'): 374 375 if isinstance(md.private.bamg,dict) and 'mesh' in md.private.bamg: 376 bamg_mesh=bamgmesh(md.private.bamg['mesh'].__dict__) 377 else: 378 bamg_mesh.Vertices=np.vstack((md.mesh.x,md.mesh.y,np.ones((md.mesh.numberofvertices)))).T 379 #bamg_mesh.Vertices=np.hstack((md.mesh.x.reshape(-1,),md.mesh.y.reshape(-1,),np.ones((md.mesh.numberofvertices,1)))) 380 bamg_mesh.Triangles=np.hstack((md.mesh.elements,np.ones((md.mesh.numberofelements,1)))) 381 382 if isinstance(md.rifts.riftstruct,dict): 383 raise TypeError("bamg error message: rifts not supported yet. Do meshprocessrift AFTER bamg") 384 #}}} 385 # Bamg Options {{{ 386 bamg_options['Crack']=options.getfieldvalue('Crack',0) 387 bamg_options['anisomax']=options.getfieldvalue('anisomax',10.**30) 388 bamg_options['coeff']=options.getfieldvalue('coeff',1.) 389 bamg_options['cutoff']=options.getfieldvalue('cutoff',10.**-5) 390 bamg_options['err']=options.getfieldvalue('err',np.array([[0.01]])) 391 bamg_options['errg']=options.getfieldvalue('errg',0.1) 392 bamg_options['field']=options.getfieldvalue('field',np.empty((0,1))) 393 bamg_options['gradation']=options.getfieldvalue('gradation',1.5) 394 bamg_options['Hessiantype']=options.getfieldvalue('Hessiantype',0) 395 bamg_options['hmin']=options.getfieldvalue('hmin',10.**-100) 396 bamg_options['hmax']=options.getfieldvalue('hmax',10.**100) 397 bamg_options['hminVertices']=options.getfieldvalue('hminVertices',np.empty((0,1))) 398 bamg_options['hmaxVertices']=options.getfieldvalue('hmaxVertices',np.empty((0,1))) 399 bamg_options['hVertices']=options.getfieldvalue('hVertices',np.empty((0,1))) 400 bamg_options['KeepVertices']=options.getfieldvalue('KeepVertices',1) 401 bamg_options['maxnbv']=options.getfieldvalue('maxnbv',10**6) 402 bamg_options['maxsubdiv']=options.getfieldvalue('maxsubdiv',10.) 403 bamg_options['metric']=options.getfieldvalue('metric',np.empty((0,1))) 404 bamg_options['Metrictype']=options.getfieldvalue('Metrictype',0) 405 bamg_options['nbjacobi']=options.getfieldvalue('nbjacobi',1) 406 bamg_options['nbsmooth']=options.getfieldvalue('nbsmooth',3) 407 bamg_options['omega']=options.getfieldvalue('omega',1.8) 408 bamg_options['power']=options.getfieldvalue('power',1.) 409 bamg_options['splitcorners']=options.getfieldvalue('splitcorners',1) 410 bamg_options['verbose']=options.getfieldvalue('verbose',1) 411 #}}} 412 413 #call Bamg 414 bamgmesh_out,bamggeom_out=BamgMesher(bamg_mesh.__dict__,bamg_geometry.__dict__,bamg_options) 415 416 # plug results onto model 417 if options.getfieldvalue('vertical',0): 418 md.mesh=mesh2dvertical() 419 md.mesh.x=bamgmesh_out['Vertices'][:,0].copy() 420 md.mesh.y=bamgmesh_out['Vertices'][:,1].copy() 421 md.mesh.elements=bamgmesh_out['Triangles'][:,0:3].astype(int) 422 md.mesh.edges=bamgmesh_out['IssmEdges'].astype(int) 423 md.mesh.segments=bamgmesh_out['IssmSegments'][:,0:3].astype(int) 424 md.mesh.segmentmarkers=bamgmesh_out['IssmSegments'][:,3].astype(int) 425 426 #Fill in rest of fields: 427 md.mesh.numberofelements=np.size(md.mesh.elements,axis=0) 428 md.mesh.numberofvertices=np.size(md.mesh.x) 429 md.mesh.numberofedges=np.size(md.mesh.edges,axis=0) 430 md.mesh.vertexonboundary=np.zeros(md.mesh.numberofvertices,bool) 431 md.mesh.vertexonboundary[md.mesh.segments[:,0:2]-1]=True 432 433 #Now, build the connectivity tables for this mesh. Doubled in matlab for some reason 434 md.mesh.vertexonboundary=np.zeros(md.mesh.numberofvertices,) 435 md.mesh.vertexonboundary[md.mesh.segments[:,0:2]-1]=1 436 437 elif options.getfieldvalue('3dsurface',0): 438 md.mesh=mesh3dsurface() 439 md.mesh.x=bamgmesh_out['Vertices'][:,0].copy() 440 md.mesh.y=bamgmesh_out['Vertices'][:,1].copy() 441 md.mesh.z=md.mesh.x 442 md.mesh.z[:]=0 443 md.mesh.elements=bamgmesh_out['Triangles'][:,0:3].astype(int) 444 md.mesh.edges=bamgmesh_out['IssmEdges'].astype(int) 445 md.mesh.segments=bamgmesh_out['IssmSegments'][:,0:3].astype(int) 446 md.mesh.segmentmarkers=bamgmesh_out['IssmSegments'][:,3].astype(int) 447 448 #Fill in rest of fields: 449 md.mesh.numberofelements=np.size(md.mesh.elements,axis=0) 450 md.mesh.numberofvertices=np.size(md.mesh.x) 451 md.mesh.numberofedges=np.size(md.mesh.edges,axis=0) 452 md.mesh.vertexonboundary=np.zeros(md.mesh.numberofvertices,bool) 453 md.mesh.vertexonboundary[md.mesh.segments[:,0:2]-1]=True 454 455 else: 456 md.mesh=mesh2d() 457 md.mesh.x=bamgmesh_out['Vertices'][:,0].copy() 458 md.mesh.y=bamgmesh_out['Vertices'][:,1].copy() 459 md.mesh.elements=bamgmesh_out['Triangles'][:,0:3].astype(int) 460 md.mesh.edges=bamgmesh_out['IssmEdges'].astype(int) 461 md.mesh.segments=bamgmesh_out['IssmSegments'][:,0:3].astype(int) 462 md.mesh.segmentmarkers=bamgmesh_out['IssmSegments'][:,3].astype(int) 463 464 #Fill in rest of fields: 465 md.mesh.numberofelements=np.size(md.mesh.elements,axis=0) 466 md.mesh.numberofvertices=np.size(md.mesh.x) 467 md.mesh.numberofedges=np.size(md.mesh.edges,axis=0) 468 md.mesh.vertexonboundary=np.zeros(md.mesh.numberofvertices,bool) 469 md.mesh.vertexonboundary[md.mesh.segments[:,0:2]-1]=True 470 471 #Bamg private fields 472 md.private.bamg=OrderedDict() 473 md.private.bamg['mesh']=bamgmesh(bamgmesh_out) 474 md.private.bamg['geometry']=bamggeom(bamggeom_out) 475 md.mesh.elementconnectivity=md.private.bamg['mesh'].ElementConnectivity 476 md.mesh.elementconnectivity[np.nonzero(np.isnan(md.mesh.elementconnectivity))]=0 477 md.mesh.elementconnectivity=md.mesh.elementconnectivity.astype(int) 478 479 #Check for orphan 480 if np.any(np.logical_not(np.in1d(np.arange(1,md.mesh.numberofvertices+1),md.mesh.elements.flat))): 481 raise RuntimeError("Output mesh has orphans. Check your Domain and/or RequiredVertices") 482 483 return md 484 485 def processgeometry(geom,tol,outline): # {{{ 486 487 raise RuntimeError("bamg.py/processgeometry is not complete.") 488 #Deal with edges 489 print("Checking Edge crossing...") 490 i=0 491 while (i<np.size(geom.Edges,axis=0)): 492 493 #edge counter 494 i+=1 495 496 #Get coordinates 497 x1=geom.Vertices[geom.Edges[i,0],0] 498 y1=geom.Vertices[geom.Edges[i,0],1] 499 x2=geom.Vertices[geom.Edges[i,1],0] 500 y2=geom.Vertices[geom.Edges[i,1],1] 501 color1=geom.Edges[i,2] 502 503 j=i #test edges located AFTER i only 504 while (j<np.size(geom.Edges,axis=0)): 505 506 #edge counter 507 j+=1 508 509 #Skip if the two edges already have a vertex in common 510 if any(m.ismember(geom.Edges[i,0:2],geom.Edges[j,0:2])): 511 continue 512 513 #Get coordinates 514 x3=geom.Vertices[geom.Edges[j,0],0] 515 y3=geom.Vertices[geom.Edges[j,0],1] 516 x4=geom.Vertices[geom.Edges[j,1],0] 517 y4=geom.Vertices[geom.Edges[j,1],1] 518 color2=geom.Edges[j,2] 519 520 #Check if the two edges are crossing one another 521 if SegIntersect(np.array([[x1,y1],[x2,y2]]),np.array([[x3,y3],[x4,y4]])): 522 523 #Get coordinate of intersection point (http://mathworld.wolfram.com/Line-LineIntersection.html) 524 x=np.linalg.det(np.array([np.linalg.det(np.array([[x1,y1],[x2,y2]])),x1-x2],[np.linalg.det(np.array([[x3,y3],[x4,y4]])),x3-x4])/np.linalg.det(np.array([[x1-x2,y1-y2],[x3-x4,y3-y4]]))) 525 y=np.linalg.det(np.array([np.linalg.det(np.array([[x1,y1],[x2,y2]])),y1-y2],[np.linalg.det(np.array([[x3,y3],[x4,y4]])),y3-y4])/np.linalg.det(np.array([[x1-x2,y1-y2],[x3-x4,y3-y4]]))) 526 527 #Add vertex to the list of vertices 528 geom.Vertices=np.vstack((geom.Vertices,[x,y,min(color1,color2)])) 529 id=np.size(geom.Vertices,axis=0) 530 531 #Update edges i and j 532 edgei=geom.Edges[i,:].copy() 533 edgej=geom.Edges[j,:].copy() 534 geom.Edges[i,:] =[edgei(0),id ,edgei(2)] 535 geom.Edges=np.vstack((geom.Edges,[id ,edgei(1),edgei(2)])) 536 geom.Edges[j,:] =[edgej(0),id ,edgej(2)] 537 geom.Edges=np.vstack((geom.Edges,[id ,edgej(1),edgej(2)])) 538 539 #update current edge second tip 540 x2=x 541 y2=y 542 543 #Check point outside 544 print("Checking for points outside the domain...") 545 i=0 546 num=0 547 while (i<np.size(geom.Vertices,axis=0)): 548 549 #vertex counter 550 i+=1 551 552 #Get coordinates 553 x=geom.Vertices[i,0] 554 y=geom.Vertices[i,1] 555 color=geom.Vertices[i,2] 556 557 #Check that the point is inside the domain 558 if color!=1 and not ContourToNodes(x,y,outline[0],1): 559 560 #Remove points from list of Vertices 561 num+=1 562 geom.Vertices[i,:]=[] 563 564 #update edges 565 posedges=np.nonzero(geom.Edges==i) 566 geom.Edges[posedges[0],:]=[] 567 posedges=np.nonzero(geom.Edges>i) 568 geom.Edges[posedges]=geom.Edges[posedges]-1 569 570 #update counter 571 i-=1 572 573 if num: 574 print(("WARNING: %d points outside the domain outline have been removed" % num)) 575 576 """ 577 %Check point spacing 578 if ~isnan(tol), 579 disp('Checking point spacing...'); 580 i=0; 581 while (i<size(geom.Vertices,1)), 582 583 %vertex counter 584 i=i+1; 585 586 %Get coordinates 587 x1=geom.Vertices(i,1); 588 y1=geom.Vertices(i,2); 589 590 j=i; %test edges located AFTER i only 591 while (j<size(geom.Vertices,1)), 592 593 %vertex counter 594 j=j+1; 595 596 %Get coordinates 597 x2=geom.Vertices(j,1); 598 y2=geom.Vertices(j,2); 599 600 %Check whether the two vertices are too close 601 if ((x2-x1)**2+(y2-y1)**2<tol**2) 602 603 %Remove points from list of Vertices 604 geom.Vertices(j,:)=[]; 605 606 %update edges 607 posedges=find(m.ismember(geom.Edges,j)); 608 geom.Edges(posedges)=i; 609 posedges=find(geom.Edges>j); 610 geom.Edges(posedges)=geom.Edges(posedges)-1; 611 612 %update counter 613 j=j-1; 614 615 end 616 end 617 end 618 end 619 %remove empty edges 620 geom.Edges(find(geom.Edges(:,1)==geom.Edges(:,2)),:)=[]; 621 """ 622 return geom 17 18 def bamg(md, *args): 19 """ 20 BAMG - mesh generation 21 22 Available options (for more details see ISSM website http: / / issm.jpl.nasa.gov / ): 23 24 - domain : followed by an ARGUS file that prescribes the domain outline 25 - holes : followed by an ARGUS file that prescribes the holes 26 - subdomains : followed by an ARGUS file that prescribes the list of 27 subdomains (that need to be inside domain) 28 29 - hmin : minimum edge length (default is 10^ - 100) 30 - hmax : maximum edge length (default is 10^100) 31 - hVertices : imposed edge length for each vertex (geometry or mesh) 32 - hminVertices : minimum edge length for each vertex (mesh) 33 - hmaxVertices : maximum edge length for each vertex (mesh) 34 35 - anisomax : maximum ratio between the smallest and largest edges (default is 10^30) 36 - coeff : coefficient applied to the metric (2 - > twice as many elements, default is 1) 37 - cutoff : scalar used to compute the metric when metric type 2 or 3 are applied 38 - err : error used to generate the metric from a field 39 - errg : geometric error (default is 0.1) 40 - field : field of the model that will be used to compute the metric 41 to apply several fields, use one column per field 42 - gradation : maximum ratio between two adjacent edges 43 - Hessiantype : 0 - > use double P2 projection (default) 44 1 - > use Green formula 45 - KeepVertices : try to keep initial vertices when adaptation is done on an existing mesh (default 1) 46 - maxnbv : maximum number of vertices used to allocate memory (default is 10^6) 47 - maxsubdiv : maximum subdivision of exisiting elements (default is 10) 48 - metric : matrix (numberofnodes x 3) used as a metric 49 - Metrictype : 1 - > absolute error c / (err coeff^2) * Abs(H) (default) 50 2 - > relative error c / (err coeff^2) * Abs(H) / max(s, cutoff * max(s)) 51 3 - > rescaled absolute error c / (err coeff^2) * Abs(H) / (smax - smin) 52 - nbjacoby : correction used by Hessiantype = 1 (default is 1) 53 - nbsmooth : number of metric smoothing procedure (default is 3) 54 - omega : relaxation parameter of the smoothing procedure (default is 1.8) 55 - power : power applied to the metric (default is 1) 56 - splitcorners : split triangles whuch have 3 vertices on the outline (default is 1) 57 - verbose : level of verbosity (default is 1) 58 59 - rifts : followed by an ARGUS file that prescribes the rifts 60 - toltip : tolerance to move tip on an existing point of the domain outline 61 - tracks : followed by an ARGUS file that prescribes the tracks that the mesh will stick to 62 - RequiredVertices : mesh vertices that are required. [x, y, ref]; ref is optional 63 - tol : if the distance between 2 points of the domain outline is less than tol, they 64 will be merged 65 66 Examples: 67 md = bamg(md, 'domain', 'DomainOutline.exp', 'hmax', 3000) 68 md = bamg(md, 'field', [md.inversion.vel_obs md.geometry.thickness], 'hmax', 20000, 'hmin', 1000) 69 md = bamg(md, 'metric', A, 'hmin', 1000, 'hmax', 20000, 'gradation', 3, 'anisomax', 1) 70 """ 71 72 #process options 73 options = pairoptions(*args) 74 #options = deleteduplicates(options, 1) 75 76 #initialize the structures required as input of Bamg 77 bamg_options = OrderedDict() 78 bamg_geometry = bamggeom() 79 bamg_mesh = bamgmesh() 80 81 subdomain_ref = 1 82 hole_ref = 1 83 84 # Bamg Geometry parameters {{{ 85 if options.exist('domain'): 86 #Check that file exists 87 domainfile = options.getfieldvalue('domain') 88 if type(domainfile) == str: 89 if not os.path.exists(domainfile): 90 raise IOError("bamg error message: file '%s' not found" % domainfile) 91 domain = expread(domainfile) 92 else: 93 domain = domainfile 94 95 #Build geometry 96 count = 0 97 for i, domaini in enumerate(domain): 98 #Check that the domain is closed 99 if (domaini['x'][0] != domaini['x'][-1] or domaini['y'][0] != domaini['y'][-1]): 100 raise RuntimeError("bamg error message: all contours provided in ''domain'' should be closed") 101 102 #Checks that all holes are INSIDE the principle domain outline princial domain should be index 0 103 if i: 104 flags = ContourToNodes(domaini['x'], domaini['y'], domainfile, 0)[0] 105 if np.any(np.logical_not(flags)): 106 raise RuntimeError("bamg error message: All holes should be strictly inside the principal domain") 107 108 #Check orientation 109 nods = domaini['nods'] - 1 #the domain are closed 1 = end 110 111 test = np.sum((domaini['x'][1:nods + 1] - domaini['x'][0:nods]) * (domaini['y'][1:nods + 1] + domaini['y'][0:nods])) 112 if (i == 0 and test > 0) or (i > 0 and test < 0): 113 print('At least one contour was not correctly oriented and has been re - oriented') 114 domaini['x'] = np.flipud(domaini['x']) 115 domaini['y'] = np.flipud(domaini['y']) 116 117 #Add all points to bamg_geometry 118 nods = domaini['nods'] - 1 #the domain are closed 0 = end 119 bamg_geometry.Vertices = np.vstack((bamg_geometry.Vertices, np.vstack((domaini['x'][0:nods], domaini['y'][0:nods], np.ones((nods)))).T)) 120 bamg_geometry.Edges = np.vstack((bamg_geometry.Edges, np.vstack((np.arange(count + 1, count + nods + 1), np.hstack((np.arange(count + 2, count + nods + 1), count + 1)), 1. * np.ones((nods)))).T)) 121 if i: 122 bamg_geometry.SubDomains = np.vstack((bamg_geometry.SubDomains, [2, count + 1, 1, - subdomain_ref])) 123 subdomain_ref = subdomain_ref + 1 124 else: 125 bamg_geometry.SubDomains = np.vstack((bamg_geometry.SubDomains, [2, count + 1, 1, 0])) 126 127 #update counter 128 count += nods 129 130 #Deal with domain holes 131 if options.exist('holes'): 132 holesfile = options.getfieldvalue('holes') 133 if type(holesfile) == str: 134 if not os.path.exists(holesfile): 135 raise IOError("bamg error message: file '%s' not found" % holesfile) 136 holes = expread(holesfile) 137 else: 138 holes = holesfile 139 140 #Build geometry 141 for i, holei in enumerate(holes): 142 #Check that the hole is closed 143 if (holei['x'][0] != holei['x'][-1] or holei['y'][0] != holei['y'][-1]): 144 raise RuntimeError("bamg error message: all contours provided in ''hole'' should be closed") 145 146 #Checks that all holes are INSIDE the principle domain outline princial domain should be index 0 147 flags = ContourToNodes(holei['x'], holei['y'], domainfile, 0)[0] 148 if np.any(np.logical_not(flags)): 149 raise RuntimeError("bamg error message: All holes should be strictly inside the principal domain") 150 151 #Check orientation 152 nods = holei['nods'] - 1 #the hole are closed 1 = end 153 test = np.sum((holei['x'][1:nods + 1] - holei['x'][0:nods]) * (holei['y'][1:nods + 1] + holei['y'][0:nods])) 154 if test < 0: 155 print('At least one hole was not correctly oriented and has been re - oriented') 156 holei['x'] = np.flipud(holei['x']) 157 holei['y'] = np.flipud(holei['y']) 158 159 #Add all points to bamg_geometry 160 nods = holei['nods'] - 1 #the hole are closed 0 = end 161 bamg_geometry.Vertices = np.vstack((bamg_geometry.Vertices, np.vstack((holei['x'][0:nods], holei['y'][0:nods], np.ones((nods)))).T)) 162 bamg_geometry.Edges = np.vstack((bamg_geometry.Edges, np.vstack((np.arange(count + 1, count + nods + 1), np.hstack((np.arange(count + 2, count + nods + 1), count + 1)), 1. * np.ones((nods)))).T)) 163 #update counter 164 count += nods 165 166 #And subdomains 167 if options.exist('subdomains'): 168 subdomainfile = options.getfieldvalue('subdomains') 169 if type(subdomainfile) == str: 170 if not os.path.exists(subdomainfile): 171 raise IOError("bamg error message: file '{}' not found".format(subdomainfile)) 172 subdomains = expread(subdomainfile) 173 else: 174 subdomains = subdomainfile 175 176 #Build geometry 177 for i, subdomaini in enumerate(subdomains): 178 #Check that the subdomain is closed 179 if (subdomaini['x'][0] != subdomaini['x'][-1] or subdomaini['y'][0] != subdomaini['y'][-1]): 180 raise RuntimeError("bamg error message: all contours provided in ''subdomain'' should be closed") 181 182 #Checks that all subdomains are INSIDE the principle subdomain outline princial domain should be index 0 183 if i: 184 flags = ContourToNodes(subdomaini['x'], subdomaini['y'], domainfile, 0)[0] 185 if np.any(np.logical_not(flags)): 186 raise RuntimeError("bamg error message: All subdomains should be strictly inside the principal subdomain") 187 188 #Check orientation 189 nods = subdomaini['nods'] - 1 #the subdomain are closed 1 = end 190 191 test = np.sum((subdomaini['x'][1:nods + 1] - subdomaini['x'][0:nods]) * (subdomaini['y'][1:nods + 1] + subdomaini['y'][0:nods])) 192 if test > 0: 193 print('At least one subcontour was not correctly oriented and has been re - oriented') 194 subdomaini['x'] = np.flipud(subdomaini['x']) 195 subdomaini['y'] = np.flipud(subdomaini['y']) 196 197 #Add all points to bamg_geometry 198 nods = subdomaini['nods'] - 1 #the subdomain are closed 0 = end 199 bamg_geometry.Vertices = np.vstack((bamg_geometry.Vertices, np.vstack((subdomaini['x'][0:nods], subdomaini['y'][0:nods], np.ones((nods)))).T)) 200 bamg_geometry.Edges = np.vstack((bamg_geometry.Edges, np.vstack((np.arange(count + 1, count + nods + 1), np.hstack((np.arange(count + 2, count + nods + 1), count + 1)), 1. * np.ones((nods)))).T)) 201 #update counter 202 count += nods 203 204 if options.getfieldvalue('vertical', 0): 205 if np.size(options.getfieldvalue('Markers', [])) != np.size(bamg_geometry.Edges, 0): 206 raise RuntimeError('for 2d vertical mesh, ''Markers'' option is required, and should be of size ' + str(np.size(bamg_geometry.Edges, 0))) 207 if np.size(options.getfieldvalue('Markers', [])) == np.size(bamg_geometry.Edges, 0): 208 bamg_geometry.Edges[:, 2] = options.getfieldvalue('Markers') 209 210 #take care of rifts 211 if options.exist('rifts'): 212 213 #Check that file exists 214 riftfile = options.getfieldvalue('rifts') 215 if not os.path.exists(riftfile): 216 raise IOError("bamg error message: file '%s' not found" % riftfile) 217 rift = expread(riftfile) 218 219 for i, rifti in enumerate(rift): 220 221 #detect whether all points of the rift are inside the domain 222 flags = ContourToNodes(rifti['x'], rifti['y'], domain[0], 0)[0] 223 if np.all(np.logical_not(flags)): 224 raise RuntimeError("one rift has all its points outside of the domain outline") 225 226 elif np.any(np.logical_not(flags)): 227 #We LOTS of work to do 228 print("Rift tip outside of or on the domain has been detected and is being processed...") 229 230 #check that only one point is outside (for now) 231 if np.sum(np.logical_not(flags).astype(int)) != 1: 232 raise RuntimeError("bamg error message: only one point outside of the domain is supported yet") 233 234 #Move tip outside to the first position 235 if not flags[0]: 236 #OK, first point is outside (do nothing), 237 pass 238 elif not flags[-1]: 239 rifti['x'] = np.flipud(rifti['x']) 240 rifti['y'] = np.flipud(rifti['y']) 241 else: 242 raise RuntimeError("bamg error message: only a rift tip can be outside of the domain") 243 244 #Get cordinate of intersection point 245 x1 = rifti['x'][0] 246 y1 = rifti['y'][0] 247 x2 = rifti['x'][1] 248 y2 = rifti['y'][1] 249 for j in range(0, np.size(domain[0]['x']) - 1): 250 if SegIntersect(np.array([[x1, y1], [x2, y2]]), np.array([[domain[0]['x'][j], domain[0]['y'][j]], [domain[0]['x'][j + 1], domain[0]['y'][j + 1]]])): 251 252 #Get position of the two nodes of the edge in domain 253 i1 = j 254 i2 = j + 1 255 256 #rift is crossing edge [i1 i2] of the domain 257 #Get coordinate of intersection point (http: / / mathworld.wolfram.com / Line - LineIntersection.html) 258 x3 = domain[0]['x'][i1] 259 y3 = domain[0]['y'][i1] 260 x4 = domain[0]['x'][i2] 261 y4 = domain[0]['y'][i2] 262 #x = det([det([x1 y1; x2 y2]) x1 - x2;det([x3 y3; x4 y4]) x3 - x4]) / det([x1 - x2 y1 - y2;x3 - x4 y3 - y4]) 263 #y = det([det([x1 y1; x2 y2]) y1 - y2;det([x3 y3; x4 y4]) y3 - y4]) / det([x1 - x2 y1 - y2;x3 - x4 y3 - y4]) 264 x = np.linalg.det(np.array([[np.linalg.det(np.array([[x1, y1], [x2, y2]])), x1 - x2], [np.linalg.det(np.array([[x3, y3], [x4, y4]])), x3 - x4]])) / np.linalg.det(np.array([[x1 - x2, y1 - y2], [x3 - x4, y3 - y4]])) 265 y = np.linalg.det(np.array([[np.linalg.det(np.array([[x1, y1], [x2, y2]])), y1 - y2], [np.linalg.det(np.array([[x3, y3], [x4, y4]])), y3 - y4]])) / np.linalg.det(np.array([[x1 - x2, y1 - y2], [x3 - x4, y3 - y4]])) 266 267 segdis = sqrt((x4 - x3)**2 + (y4 - y3)**2) 268 tipdis = np.array([sqrt((x - x3)**2 + (y - y3)**2), sqrt((x - x4)**2 + (y - y4)**2)]) 269 270 if np.min(tipdis) / segdis < options.getfieldvalue('toltip', 0): 271 print("moving tip - domain intersection point") 272 273 #Get position of the closer point 274 if tipdis[0] > tipdis[1]: 275 pos = i2 276 else: 277 pos = i1 278 279 #This point is only in Vertices (number pos). 280 #OK, now we can add our own rift 281 nods = rifti['nods'] - 1 282 bamg_geometry.Vertices = np.vstack((bamg_geometry.Vertices, np.hstack((rifti['x'][1:].reshape(- 1, ), rifti['y'][1:].reshape(- 1, ), np.ones((nods, 1)))))) 283 bamg_geometry.Edges = np.vstack((bamg_geometry.Edges, 284 np.array([[pos, count + 1, (1 + i)]]), 285 np.hstack((np.arange(count + 1, count + nods).reshape(- 1, ), np.arange(count + 2, count + nods + 1).reshape(- 1, ), (1 + i) * np.ones((nods - 1, 1)))))) 286 count += nods 287 288 break 289 290 else: 291 #Add intersection point to Vertices 292 bamg_geometry.Vertices = np.vstack((bamg_geometry.Vertices, np.array([[x, y, 1]]))) 293 count += 1 294 295 #Decompose the crossing edge into 2 subedges 296 pos = np.nonzero(np.logical_and(bamg_geometry.Edges[:, 0] == i1, bamg_geometry.Edges[:, 1] == i2))[0] 297 if not pos: 298 raise RuntimeError("bamg error message: a problem occurred...") 299 bamg_geometry.Edges = np.vstack((bamg_geometry.Edges[0:pos - 1, :], 300 np.array([[bamg_geometry.Edges[pos, 0], count, bamg_geometry.Edges[pos, 2]]]), 301 np.array([[count, bamg_geometry.Edges[pos, 1], bamg_geometry.Edges[pos, 2]]]), 302 bamg_geometry.Edges[pos + 1:, :])) 303 304 #OK, now we can add our own rift 305 nods = rifti['nods'] - 1 306 bamg_geometry.Vertices = np.vstack((bamg_geometry.Vertices, np.hstack((rifti['x'][1:].reshape(- 1, ), rifti['y'][1:].reshape(- 1, ), np.ones((nods, 1)))))) 307 bamg_geometry.Edges = np.vstack((bamg_geometry.Edges, 308 np.array([[count, count + 1, 2]]), 309 np.hstack((np.arange(count + 1, count + nods).reshape(- 1, ), np.arange(count + 2, count + nods + 1).reshape(- 1, ), (1 + i) * np.ones((nods - 1, 1)))))) 310 count += nods 311 312 break 313 314 else: 315 nods = rifti['nods'] - 1 316 bamg_geometry.Vertices = np.vstack(bamg_geometry.Vertices, np.hstack(rifti['x'][:], rifti['y'][:], np.ones((nods + 1, 1)))) 317 bamg_geometry.Edges = np.vstack(bamg_geometry.Edges, np.hstack(np.arange(count + 1, count + nods).reshape(- 1, ), np.arange(count + 2, count + nods + 1).reshape(- 1, ), i * np.ones((nods, 1)))) 318 count += nods + 1 319 320 #Deal with tracks 321 if options.exist('tracks'): 322 323 #read tracks 324 track = options.getfieldvalue('tracks') 325 if all(isinstance(track, str)): 326 A = expread(track) 327 track = np.hstack((A.x.reshape(- 1, ), A.y.reshape(- 1, ))) 328 else: 329 track = float(track) #for some reason, it is of class "single" 330 if np.size(track, axis = 1) == 2: 331 track = np.hstack((track, 3. * np.ones((size(track, axis = 0), 1)))) 332 333 #only keep those inside 334 flags = ContourToNodes(track[:, 0], track[:, 1], domainfile, 0)[0] 335 track = track[np.nonzero(flags), :] 336 337 #Add all points to bamg_geometry 338 nods = np.size(track, axis = 0) 339 bamg_geometry.Vertices = np.vstack((bamg_geometry.Vertices, track)) 340 bamg_geometry.Edges = np.vstack((bamg_geometry.Edges, np.hstack((np.arange(count + 1, count + nods).reshape(-1, ), np.arange(count + 2, count + nods + 1).reshape(-1, ), 3. * np.ones((nods - 1, 1)))))) 341 342 #update counter 343 count += nods 344 345 #Deal with vertices that need to be kept by mesher 346 if options.exist('RequiredVertices'): 347 348 #recover RequiredVertices 349 requiredvertices = options.getfieldvalue('RequiredVertices') #for some reason, it is of class "single" 350 if np.size(requiredvertices, axis = 1) == 2: 351 requiredvertices = np.hstack((requiredvertices, 4. * np.ones((np.size(requiredvertices, axis = 0), 1)))) 352 353 #only keep those inside 354 flags = ContourToNodes(requiredvertices[:, 0], requiredvertices[:, 1], domainfile, 0)[0] 355 requiredvertices = requiredvertices[np.nonzero(flags)[0], :] 356 #Add all points to bamg_geometry 357 nods = np.size(requiredvertices, axis = 0) 358 bamg_geometry.Vertices = np.vstack((bamg_geometry.Vertices, requiredvertices)) 359 360 #update counter 361 count += nods 362 363 #process geom 364 #bamg_geometry = processgeometry(bamg_geometry, options.getfieldvalue('tol', float(nan)), domain[0]) 365 366 elif isinstance(md.private.bamg, dict) and 'geometry' in md.private.bamg: 367 bamg_geometry = bamggeom(md.private.bamg['geometry'].__dict__) 368 else: 369 #do nothing... 370 pass 371 #}}} 372 # Bamg Mesh parameters {{{ 373 if not options.exist('domain') and md.mesh.numberofvertices and m.strcmp(md.mesh.elementtype(), 'Tria'): 374 if isinstance(md.private.bamg, dict) and 'mesh' in md.private.bamg: 375 bamg_mesh = bamgmesh(md.private.bamg['mesh'].__dict__) 376 else: 377 bamg_mesh.Vertices = np.vstack((md.mesh.x, md.mesh.y, np.ones((md.mesh.numberofvertices)))).T 378 #bamg_mesh.Vertices = np.hstack((md.mesh.x.reshape(- 1, ), md.mesh.y.reshape(- 1, ), np.ones((md.mesh.numberofvertices, 1)))) 379 bamg_mesh.Triangles = np.hstack((md.mesh.elements, np.ones((md.mesh.numberofelements, 1)))) 380 381 if isinstance(md.rifts.riftstruct, dict): 382 raise TypeError("bamg error message: rifts not supported yet. Do meshprocessrift AFTER bamg") 383 #}}} 384 # Bamg Options {{{ 385 bamg_options['Crack'] = options.getfieldvalue('Crack', 0) 386 bamg_options['anisomax'] = options.getfieldvalue('anisomax', 10.**18) 387 bamg_options['coeff'] = options.getfieldvalue('coeff', 1.) 388 bamg_options['cutoff'] = options.getfieldvalue('cutoff', 10.**-5) 389 bamg_options['err'] = options.getfieldvalue('err', np.array([[0.01]])) 390 bamg_options['errg'] = options.getfieldvalue('errg', 0.1) 391 bamg_options['field'] = options.getfieldvalue('field', np.empty((0, 1))) 392 bamg_options['gradation'] = options.getfieldvalue('gradation', 1.5) 393 bamg_options['Hessiantype'] = options.getfieldvalue('Hessiantype', 0) 394 bamg_options['hmin'] = options.getfieldvalue('hmin', 10.**-100) 395 bamg_options['hmax'] = options.getfieldvalue('hmax', 10.**100) 396 bamg_options['hminVertices'] = options.getfieldvalue('hminVertices', np.empty((0, 1))) 397 bamg_options['hmaxVertices'] = options.getfieldvalue('hmaxVertices', np.empty((0, 1))) 398 bamg_options['hVertices'] = options.getfieldvalue('hVertices', np.empty((0, 1))) 399 bamg_options['KeepVertices'] = options.getfieldvalue('KeepVertices', 1) 400 bamg_options['maxnbv'] = options.getfieldvalue('maxnbv', 10**6) 401 bamg_options['maxsubdiv'] = options.getfieldvalue('maxsubdiv', 10.) 402 bamg_options['metric'] = options.getfieldvalue('metric', np.empty((0, 1))) 403 bamg_options['Metrictype'] = options.getfieldvalue('Metrictype', 0) 404 bamg_options['nbjacobi'] = options.getfieldvalue('nbjacobi', 1) 405 bamg_options['nbsmooth'] = options.getfieldvalue('nbsmooth', 3) 406 bamg_options['omega'] = options.getfieldvalue('omega', 1.8) 407 bamg_options['power'] = options.getfieldvalue('power', 1.) 408 bamg_options['splitcorners'] = options.getfieldvalue('splitcorners', 1) 409 bamg_options['verbose'] = options.getfieldvalue('verbose', 1) 410 #}}} 411 412 #call Bamg 413 bamgmesh_out, bamggeom_out = BamgMesher(bamg_mesh.__dict__, bamg_geometry.__dict__, bamg_options) 414 415 # plug results onto model 416 if options.getfieldvalue('vertical', 0): 417 md.mesh = mesh2dvertical() 418 md.mesh.x = bamgmesh_out['Vertices'][:, 0].copy() 419 md.mesh.y = bamgmesh_out['Vertices'][:, 1].copy() 420 md.mesh.elements = bamgmesh_out['Triangles'][:, 0:3].astype(int) 421 md.mesh.edges = bamgmesh_out['IssmEdges'].astype(int) 422 md.mesh.segments = bamgmesh_out['IssmSegments'][:, 0:3].astype(int) 423 md.mesh.segmentmarkers = bamgmesh_out['IssmSegments'][:, 3].astype(int) 424 425 #Fill in rest of fields: 426 md.mesh.numberofelements = np.size(md.mesh.elements, axis=0) 427 md.mesh.numberofvertices = np.size(md.mesh.x) 428 md.mesh.numberofedges = np.size(md.mesh.edges, axis=0) 429 md.mesh.vertexonboundary = np.zeros(md.mesh.numberofvertices, bool) 430 md.mesh.vertexonboundary[md.mesh.segments[:, 0:2] - 1] = True 431 432 #Now, build the connectivity tables for this mesh. Doubled in matlab for some reason 433 md.mesh.vertexonboundary = np.zeros(md.mesh.numberofvertices, ) 434 md.mesh.vertexonboundary[md.mesh.segments[:, 0:2] - 1] = 1 435 436 elif options.getfieldvalue('3dsurface', 0): 437 md.mesh = mesh3dsurface() 438 md.mesh.x = bamgmesh_out['Vertices'][:, 0].copy() 439 md.mesh.y = bamgmesh_out['Vertices'][:, 1].copy() 440 md.mesh.z = md.mesh.x 441 md.mesh.z[:] = 0 442 md.mesh.elements = bamgmesh_out['Triangles'][:, 0:3].astype(int) 443 md.mesh.edges = bamgmesh_out['IssmEdges'].astype(int) 444 md.mesh.segments = bamgmesh_out['IssmSegments'][:, 0:3].astype(int) 445 md.mesh.segmentmarkers = bamgmesh_out['IssmSegments'][:, 3].astype(int) 446 447 #Fill in rest of fields: 448 md.mesh.numberofelements = np.size(md.mesh.elements, axis=0) 449 md.mesh.numberofvertices = np.size(md.mesh.x) 450 md.mesh.numberofedges = np.size(md.mesh.edges, axis=0) 451 md.mesh.vertexonboundary = np.zeros(md.mesh.numberofvertices, bool) 452 md.mesh.vertexonboundary[md.mesh.segments[:, 0:2] - 1] = True 453 454 else: 455 md.mesh = mesh2d() 456 md.mesh.x = bamgmesh_out['Vertices'][:, 0].copy() 457 md.mesh.y = bamgmesh_out['Vertices'][:, 1].copy() 458 md.mesh.elements = bamgmesh_out['Triangles'][:, 0:3].astype(int) 459 md.mesh.edges = bamgmesh_out['IssmEdges'].astype(int) 460 md.mesh.segments = bamgmesh_out['IssmSegments'][:, 0:3].astype(int) 461 md.mesh.segmentmarkers = bamgmesh_out['IssmSegments'][:, 3].astype(int) 462 463 #Fill in rest of fields: 464 md.mesh.numberofelements = np.size(md.mesh.elements, axis=0) 465 md.mesh.numberofvertices = np.size(md.mesh.x) 466 md.mesh.numberofedges = np.size(md.mesh.edges, axis=0) 467 md.mesh.vertexonboundary = np.zeros(md.mesh.numberofvertices, bool) 468 md.mesh.vertexonboundary[md.mesh.segments[:, 0:2] - 1] = True 469 470 #Bamg private fields 471 md.private.bamg = OrderedDict() 472 md.private.bamg['mesh'] = bamgmesh(bamgmesh_out) 473 md.private.bamg['geometry'] = bamggeom(bamggeom_out) 474 md.mesh.elementconnectivity = md.private.bamg['mesh'].ElementConnectivity 475 md.mesh.elementconnectivity[np.nonzero(np.isnan(md.mesh.elementconnectivity))] = 0 476 md.mesh.elementconnectivity = md.mesh.elementconnectivity.astype(int) 477 478 #Check for orphan 479 if np.any(np.logical_not(np.in1d(np.arange(1, md.mesh.numberofvertices + 1), md.mesh.elements.flat))): 480 raise RuntimeError("Output mesh has orphans. Check your Domain and / or RequiredVertices") 481 482 return md 483 484 485 def processgeometry(geom, tol, outline): # {{{ 486 487 raise RuntimeError("bamg.py / processgeometry is not complete.") 488 #Deal with edges 489 print("Checking Edge crossing...") 490 i = 0 491 while (i < np.size(geom.Edges, axis=0)): 492 #edge counter 493 i += 1 494 495 #Get coordinates 496 x1 = geom.Vertices[geom.Edges[i, 0], 0] 497 y1 = geom.Vertices[geom.Edges[i, 0], 1] 498 x2 = geom.Vertices[geom.Edges[i, 1], 0] 499 y2 = geom.Vertices[geom.Edges[i, 1], 1] 500 color1 = geom.Edges[i, 2] 501 502 j = i #test edges located AFTER i only 503 while (j < np.size(geom.Edges, axis=0)): 504 #edge counter 505 j += 1 506 507 #Skip if the two edges already have a vertex in common 508 if any(m.ismember(geom.Edges[i, 0:2], geom.Edges[j, 0:2])): 509 continue 510 511 #Get coordinates 512 x3 = geom.Vertices[geom.Edges[j, 0], 0] 513 y3 = geom.Vertices[geom.Edges[j, 0], 1] 514 x4 = geom.Vertices[geom.Edges[j, 1], 0] 515 y4 = geom.Vertices[geom.Edges[j, 1], 1] 516 color2 = geom.Edges[j, 2] 517 518 #Check if the two edges are crossing one another 519 if SegIntersect(np.array([[x1, y1], [x2, y2]]), np.array([[x3, y3], [x4, y4]])): 520 521 #Get coordinate of intersection point (http: / / mathworld.wolfram.com / Line - LineIntersection.html) 522 x = np.linalg.det(np.array([np.linalg.det(np.array([[x1, y1], [x2, y2]])), x1 - x2], [np.linalg.det(np.array([[x3, y3], [x4, y4]])), x3 - x4]) / np.linalg.det(np.array([[x1 - x2, y1 - y2], [x3 - x4, y3 - y4]]))) 523 y = np.linalg.det(np.array([np.linalg.det(np.array([[x1, y1], [x2, y2]])), y1 - y2], [np.linalg.det(np.array([[x3, y3], [x4, y4]])), y3 - y4]) / np.linalg.det(np.array([[x1 - x2, y1 - y2], [x3 - x4, y3 - y4]]))) 524 525 #Add vertex to the list of vertices 526 geom.Vertices = np.vstack((geom.Vertices, [x, y, min(color1, color2)])) 527 id = np.size(geom.Vertices, axis=0) 528 529 #Update edges i and j 530 edgei = geom.Edges[i, :].copy() 531 edgej = geom.Edges[j, :].copy() 532 geom.Edges[i, :] = [edgei(0), id, edgei(2)] 533 geom.Edges = np.vstack((geom.Edges, [id, edgei(1), edgei(2)])) 534 geom.Edges[j, :] = [edgej(0), id, edgej(2)] 535 geom.Edges = np.vstack((geom.Edges, [id, edgej(1), edgej(2)])) 536 537 #update current edge second tip 538 x2 = x 539 y2 = y 540 541 #Check point outside 542 print("Checking for points outside the domain...") 543 i = 0 544 num = 0 545 while (i < np.size(geom.Vertices, axis=0)): 546 #vertex counter 547 i += 1 548 549 #Get coordinates 550 x = geom.Vertices[i, 0] 551 y = geom.Vertices[i, 1] 552 color = geom.Vertices[i, 2] 553 554 #Check that the point is inside the domain 555 if color != 1 and not ContourToNodes(x, y, outline[0], 1): 556 #Remove points from list of Vertices 557 num += 1 558 geom.Vertices[i, :]=[] 559 560 #update edges 561 posedges = np.nonzero(geom.Edges == i) 562 geom.Edges[posedges[0], :]=[] 563 posedges = np.nonzero(geom.Edges > i) 564 geom.Edges[posedges] = geom.Edges[posedges] - 1 565 566 #update counter 567 i -= 1 568 569 if num: 570 print(("WARNING: %d points outside the domain outline have been removed" % num)) 571 572 """ 573 %Check point spacing 574 if ~isnan(tol), 575 disp('Checking point spacing...') 576 i = 0 577 while (i < size(geom.Vertices, 1)), 578 579 %vertex counter 580 i = i + 1 581 582 %Get coordinates 583 x1 = geom.Vertices(i, 1) 584 y1 = geom.Vertices(i, 2) 585 586 j = i; %test edges located AFTER i only 587 while (j < size(geom.Vertices, 1)), 588 589 %vertex counter 590 j = j + 1 591 592 %Get coordinates 593 x2 = geom.Vertices(j, 1) 594 y2 = geom.Vertices(j, 2) 595 596 %Check whether the two vertices are too close 597 if ((x2 - x1)**2 + (y2 - y1)**2 < tol**2) 598 599 %Remove points from list of Vertices 600 geom.Vertices(j, :)=[] 601 602 %update edges 603 posedges = find(m.ismember(geom.Edges, j)) 604 geom.Edges(posedges)=i 605 posedges = find(geom.Edges > j) 606 geom.Edges(posedges)=geom.Edges(posedges) - 1 607 608 %update counter 609 j = j - 1 610 611 end 612 end 613 end 614 end 615 %remove empty edges 616 geom.Edges(find(geom.Edges(:, 1) == geom.Edges(:, 2)), :)=[] 617 """ 618 return geom 623 619 # }}} 624 -
issm/trunk-jpl/src/m/mesh/roundmesh.py
r22320 r24115 5 5 from triangle import triangle 6 6 7 def roundmesh(md, radius,resolution):8 9 ROUNDMESH - create an unstructured round mesh 7 def roundmesh(md, radius, resolution): 8 """ 9 ROUNDMESH - create an unstructured round mesh 10 10 11 12 13 11 This script will generate a structured round mesh 12 - radius : specifies the radius of the circle in meters 13 - resolution : specifies the resolution in meters 14 14 15 16 17 15 Usage: 16 md=roundmesh(md,radius,resolution) 17 """ 18 18 19 #First we have to create the domain outline 19 #First we have to create the domain outline 20 20 21 22 pointsonedge=np.floor((2.*np.pi*radius) / resolution)+1#+1 to close the outline21 #Get number of points on the circle 22 pointsonedge = np.floor((2. * np.pi * radius) / resolution) + 1 #+1 to close the outline 23 23 24 #Calculate the cartesians coordinates of the points 25 theta=np.linspace(0.,2.*np.pi,pointsonedge) 26 x_list=roundsigfig(radius*np.cos(theta),12) 27 y_list=roundsigfig(radius*np.sin(theta),12) 28 A=OrderedDict() 29 A['x']=[x_list] 30 A['y']=[y_list] 31 A['density']=1. 32 expwrite(A,'RoundDomainOutline.exp') 24 #Calculate the cartesians coordinates of the points 25 theta = np.linspace(0., 2. * np.pi, pointsonedge) 26 x_list = roundsigfig(radius * np.cos(theta), 12) 27 y_list = roundsigfig(radius * np.sin(theta), 12) 28 A = OrderedDict() 29 A['x'] = [x_list] 30 A['y'] = [y_list] 31 A['density'] = 1. 32 print('now writing mesh') 33 expwrite(A, 'RoundDomainOutline.exp') 33 34 34 #Call Bamg 35 md=triangle(md,'RoundDomainOutline.exp',resolution) 36 #md=bamg(md,'domain','RoundDomainOutline.exp','hmin',resolution) 35 #Call Bamg 36 print('now meshing') 37 md = triangle(md, 'RoundDomainOutline.exp', resolution) 38 #md = bamg(md,'domain','RoundDomainOutline.exp','hmin',resolution) 37 39 38 39 pos=np.argmin(md.mesh.x**2+md.mesh.y**2)40 md.mesh.x[pos]=0.41 md.mesh.y[pos]=0.40 #move the closest node to the center 41 pos = np.argmin(md.mesh.x**2 + md.mesh.y**2) 42 md.mesh.x[pos] = 0. 43 md.mesh.y[pos] = 0. 42 44 43 44 45 #delete domain 46 os.remove('RoundDomainOutline.exp') 45 47 46 48 return md 47 49 48 def roundsigfig(x,n):49 50 50 digits=np.ceil(np.log10(np.abs(x))) 51 x=x/10.**digits 52 x=np.round(x,decimals=n) 53 x=x*10.**digits 51 def roundsigfig(x, n): 54 52 55 pos=np.nonzero(np.isnan(x)) 56 x[pos]=0. 57 58 return x 59 53 nonzeros = np.where(x != 0) 54 digits = np.ceil(np.log10(np.abs(x[nonzeros]))) 55 x[nonzeros] = x[nonzeros] / 10.**digits 56 x[nonzeros] = np.round(x[nonzeros], decimals=n) 57 x[nonzeros] = x[nonzeros] * 10.**digits 58 return x -
issm/trunk-jpl/src/m/mesh/triangle.py
r23716 r24115 5 5 from ElementConnectivity import ElementConnectivity 6 6 from Triangle_python import Triangle_python 7 import MatlabFuncs as m8 7 9 def triangle(md,domainname,*args):10 """11 TRIANGLE - create model mesh using the triangle package12 8 13 This routine creates a model mesh using Triangle and a domain outline, to within a certain resolution 14 where md is a @model object, domainname is the name of an Argus domain outline file, 15 and resolution is a characteristic length for the mesh (same unit as the domain outline 16 unit). Riftname is an optional argument (Argus domain outline) describing rifts. 9 def triangle(md, domainname, *args): 10 """ 11 TRIANGLE - create model mesh using the triangle package 17 12 18 Usage: 19 md=triangle(md,domainname,resolution) 20 or md=triangle(md,domainname, resolution, riftname) 13 This routine creates a model mesh using Triangle and a domain outline, to within a certain resolution 14 where md is a @model object, domainname is the name of an Argus domain outline file, 15 and resolution is a characteristic length for the mesh (same unit as the domain outline 16 unit). Riftname is an optional argument (Argus domain outline) describing rifts. 21 17 22 Examples: 23 md=triangle(md,'DomainOutline.exp',1000); 24 md=triangle(md,'DomainOutline.exp',1000,'Rifts.exp'); 25 """ 18 Usage: 19 md=triangle(md,domainname,resolution) 20 or md=triangle(md,domainname, resolution, riftname) 26 21 27 #Figure out a characteristic area. Resolution is a node oriented concept (ex a 1000m resolution node would 28 #be made of 1000*1000 area squares). 22 Examples: 23 md=triangle(md,'DomainOutline.exp',1000); 24 md=triangle(md,'DomainOutline.exp',1000,'Rifts.exp'); 25 """ 29 26 30 if len(args)==1: 31 resolution=args[0] 32 riftname='' 33 if len(args)==2: 34 riftname=args[0] 35 resolution=args[1] 27 #Figure out a characteristic area. Resolution is a node oriented concept (ex a 1000m resolution node would 28 #be made of 1000*1000 area squares). 36 29 37 #Check that mesh was not already run, and warn user: 38 if md.mesh.numberofelements: 39 choice = eval(input('This model already has a mesh. Are you sure you want to go ahead? (y/n)')) 40 if not m.strcmp(choice,'y'):41 print('no meshing done ... exiting') 42 return None 30 if len(args) == 1: 31 resolution = args[0] 32 riftname = '' 33 if len(args) == 2: 34 riftname = args[0] 35 resolution = args[1] 43 36 44 area = resolution**2 37 #Check that mesh was not already run, and warn user: 38 if md.mesh.numberofelements: 39 choice = eval(input('This model already has a mesh. Are you sure you want to go ahead? (y/n)')) 40 if choice not in ['y', 'n']: 41 print("bad answer try you should use 'y' or 'n' ... exiting") 42 return None 43 if choice == 'n': 44 print('no meshing done ... exiting') 45 return None 45 46 46 #Check that file exist (this is a very very common mistake) 47 if not os.path.exists(domainname): 48 raise IOError("file '%s' not found" % domainname) 47 area = resolution**2 49 48 50 #Mesh using Triangle 51 md.mesh=mesh2d() 52 md.mesh.elements,md.mesh.x,md.mesh.y,md.mesh.segments,md.mesh.segmentmarkers=Triangle_python(domainname,riftname,area) 53 md.mesh.elements=md.mesh.elements.astype(int) 54 md.mesh.segments=md.mesh.segments.astype(int) 55 md.mesh.segmentmarkers=md.mesh.segmentmarkers.astype(int) 49 #Check that file exist (this is a very very common mistake) 50 if not os.path.exists(domainname): 51 raise IOError("file '%s' not found" % domainname) 56 52 57 #Fill in rest of fields: 58 md.mesh.numberofelements = np.size(md.mesh.elements,axis=0) 59 md.mesh.numberofvertices = np.size(md.mesh.x) 60 md.mesh.vertexonboundary = np.zeros(md.mesh.numberofvertices,bool) 61 md.mesh.vertexonboundary[md.mesh.segments[:,0:2]-1] = True 53 #Mesh using Triangle 54 md.mesh = mesh2d() 55 md.mesh.elements, md.mesh.x, md.mesh.y, md.mesh.segments, md.mesh.segmentmarkers = Triangle_python(domainname, riftname, area) 56 md.mesh.elements = md.mesh.elements.astype(int) 57 md.mesh.segments = md.mesh.segments.astype(int) 58 md.mesh.segmentmarkers = md.mesh.segmentmarkers.astype(int) 62 59 63 #Now, build the connectivity tables for this mesh. 64 md.mesh.vertexconnectivity = NodeConnectivity(md.mesh.elements, md.mesh.numberofvertices)[0] 65 md.mesh.elementconnectivity = ElementConnectivity(md.mesh.elements, md.mesh.vertexconnectivity)[0] 60 #Fill in rest of fields: 61 md.mesh.numberofelements = np.size(md.mesh.elements, axis=0) 62 md.mesh.numberofvertices = np.size(md.mesh.x) 63 md.mesh.vertexonboundary = np.zeros(md.mesh.numberofvertices, bool) 64 md.mesh.vertexonboundary[md.mesh.segments[:, 0:2] - 1] = True 66 65 67 return md 66 #Now, build the connectivity tables for this mesh. 67 md.mesh.vertexconnectivity = NodeConnectivity(md.mesh.elements, md.mesh.numberofvertices)[0] 68 md.mesh.elementconnectivity = ElementConnectivity(md.mesh.elements, md.mesh.vertexconnectivity)[0] 69 70 return md -
issm/trunk-jpl/src/m/modules/BamgMesher.py
r20909 r24115 1 1 from BamgMesher_python import BamgMesher_python 2 2 3 def BamgMesher(bamgmesh,bamggeom,bamgoptions):4 """5 BAMGMESHER6 3 7 Usage: 8 bamgmesh,bamggeom = BamgMesher(bamgmesh,bamggeom,bamgoptions); 4 def BamgMesher(bamgmesh, bamggeom, bamgoptions): 5 """ 6 BAMGMESHER 9 7 10 bamgmesh: input bamg mesh 11 bamggeom: input bamg geometry for the mesh 12 bamgoptions: options for the bamg mesh 13 """ 14 15 #Call mex module 16 bamgmesh, bamggeom = BamgMesher_python(bamgmesh, bamggeom, bamgoptions) 8 Usage: 9 bamgmesh,bamggeom = BamgMesher(bamgmesh,bamggeom,bamgoptions); 17 10 18 #return 19 return bamgmesh, bamggeom 11 bamgmesh: input bamg mesh 12 bamggeom: input bamg geometry for the mesh 13 bamgoptions: options for the bamg mesh 14 """ 15 16 #Call mex module 17 bamgmesh, bamggeom = BamgMesher_python(bamgmesh, bamggeom, bamgoptions) 18 19 #return 20 return bamgmesh, bamggeom -
issm/trunk-jpl/src/m/plot/plot_unit.py
r23920 r24115 1 from cmaptools import getcolormap, truncate_colormap1 from cmaptools import getcolormap, truncate_colormap 2 2 from plot_quiver import plot_quiver 3 from scipy.interpolate import griddata 4 import numpy as np 3 import numpy as np 5 4 try: 6 5 import matplotlib as mpl … … 10 9 from mpl_toolkits.mplot3d.art3d import Poly3DCollection 11 10 except ImportError: 12 print("could not import pylab, matplotlib has not been installed, no plotting capabilities enabled") 13 14 def plot_unit(x,y,z,elements,data,is2d,isplanet,datatype,options,fig,axgrid,gridindex): 11 print("could not import pylab, matplotlib has not been installed, no plotting capabilities enabled") 12 13 14 def plot_unit(x, y, z, elements, data, is2d, isplanet, datatype, options, fig, axgrid, gridindex): 15 15 """ 16 PLOT_UNIT - unit plot, display data16 PLOT_UNIT - unit plot, display data 17 17 18 18 Usage: 19 plot_unit(x, y,z,elements,data,is2d,isplanet,datatype,options)20 21 See also: PLOTMODEL, PLOT_MANAGER19 plot_unit(x, y, z, elements, data, is2d, isplanet, datatype, options) 20 21 See also: PLOTMODEL, PLOT_MANAGER 22 22 """ 23 23 #if we are plotting 3d replace the current axis 24 24 if not is2d: 25 25 axgrid[gridindex].axis('off') 26 ax =inset_locator.inset_axes(axgrid[gridindex],width='100%',height='100%',loc=3,borderpad=0,axes_class=Axes3D)27 ax.set_axis_bgcolor((0.7, 0.7,0.7))28 else: 29 ax =axgrid[gridindex]26 ax = inset_locator.inset_axes(axgrid[gridindex], width='100%', height='100%', loc=3, borderpad=0, axes_class=Axes3D) 27 ax.set_axis_bgcolor((0.7, 0.7, 0.7)) 28 else: 29 ax = axgrid[gridindex] 30 30 31 31 #edgecolor 32 edgecolor =options.getfieldvalue('edgecolor','None')32 edgecolor = options.getfieldvalue('edgecolor', 'None') 33 33 34 34 # colormap 35 35 # {{{ give number of colorlevels and transparency 36 colorlevels =options.getfieldvalue('colorlevels',128)37 alpha =options.getfieldvalue('alpha',1)38 if alpha <1:39 antialiased =True40 else: 41 antialiased =False36 colorlevels = options.getfieldvalue('colorlevels', 128) 37 alpha = options.getfieldvalue('alpha', 1) 38 if alpha < 1: 39 antialiased = True 40 else: 41 antialiased = False 42 42 # }}} 43 43 # {{{ define wich colormap to use 44 44 try: 45 defaultmap =plt.cm.get_cmap('viridis',colorlevels)45 defaultmap = plt.cm.get_cmap('viridis', colorlevels) 46 46 except AttributeError: 47 47 print("Viridis can't be found (probably too old Matplotlib) reverting to gnuplot colormap") 48 defaultmap =truncate_colormap('gnuplot2',0.1,0.9,colorlevels)48 defaultmap = truncate_colormap('gnuplot2', 0.1, 0.9, colorlevels) 49 49 if not options.exist('colormap'): 50 cmap =defaultmap51 else: 52 cmap =getcolormap(options)50 cmap = defaultmap 51 else: 52 cmap = getcolormap(options) 53 53 if options.exist('cmap_set_over'): 54 over =options.getfieldvalue('cmap_set_over','0.5')54 over = options.getfieldvalue('cmap_set_over', '0.5') 55 55 cmap.set_over(over) 56 56 if options.exist('cmap_set_under'): 57 under =options.getfieldvalue('cmap_set_under','0.5')57 under = options.getfieldvalue('cmap_set_under', '0.5') 58 58 cmap.set_under(under) 59 options.addfield('colormap', cmap)60 # }}} 61 # {{{ if plotting only one of several layers reduce dataset, same for surface62 if options.getfieldvalue('layer', 0)>=1:63 plotlayer =options.getfieldvalue('layer',0)64 if datatype ==1:65 slicesize =np.shape(elements)[0]66 elif datatype in [2, 3]:67 slicesize =len(x)68 data =data[(plotlayer-1)*slicesize:plotlayer*slicesize]59 options.addfield('colormap', cmap) 60 # }}} 61 # {{{ if plotting only one of several layers reduce dataset, same for surface 62 if options.getfieldvalue('layer', 0) >= 1: 63 plotlayer = options.getfieldvalue('layer', 0) 64 if datatype == 1: 65 slicesize = np.shape(elements)[0] 66 elif datatype in [2, 3]: 67 slicesize = len(x) 68 data = data[(plotlayer - 1) * slicesize:plotlayer * slicesize] 69 69 # }}} 70 70 # {{{ Get the colormap limits 71 71 if options.exist('clim'): 72 lims =options.getfieldvalue('clim',[np.amin(data),np.amax(data)])72 lims = options.getfieldvalue('clim', [np.amin(data), np.amax(data)]) 73 73 elif options.exist('caxis'): 74 lims =options.getfieldvalue('caxis',[np.amin(data),np.amax(data)])75 else: 76 if np.amin(data) ==np.amax(data):77 lims =[np.amin(data)-0.5,np.amax(data)+0.5]78 else: 79 lims =[np.amin(data),np.amax(data)]74 lims = options.getfieldvalue('caxis', [np.amin(data), np.amax(data)]) 75 else: 76 if np.amin(data) == np.amax(data): 77 lims = [np.amin(data) * 0.9, np.amax(data) * 1.1] 78 else: 79 lims = [np.amin(data), np.amax(data)] 80 80 # }}} 81 81 # {{{ Set the spread of the colormap (default is normal … … 88 88 else: 89 89 norm = mpl.colors.Normalize(vmin=lims[0], vmax=lims[1]) 90 options.addfield('colornorm', norm)90 options.addfield('colornorm', norm) 91 91 # }}} 92 92 … … 94 94 # {{{ data are on elements 95 95 96 if datatype ==1:96 if datatype == 1: 97 97 if is2d: 98 98 if options.exist('mask'): 99 triangles =mpl.tri.Triangulation(x,y,elements,data.mask)99 triangles = mpl.tri.Triangulation(x, y, elements, data.mask) 100 100 else: 101 triangles =mpl.tri.Triangulation(x,y,elements)102 tri =ax.tripcolor(triangles,data,colorlevels,cmap=cmap,norm=norm,alpha=alpha,edgecolors=edgecolor)101 triangles = mpl.tri.Triangulation(x, y, elements) 102 tri = ax.tripcolor(triangles, data, colorlevels, cmap=cmap, norm=norm, alpha=alpha, edgecolors=edgecolor) 103 103 else: 104 104 #first deal with colormap 105 105 loccmap = plt.cm.ScalarMappable(cmap=cmap) 106 loccmap.set_array([min(data), max(data)])107 loccmap.set_clim(vmin=min(data), vmax=max(data))106 loccmap.set_array([min(data), max(data)]) 107 loccmap.set_clim(vmin=min(data), vmax=max(data)) 108 108 109 109 #dealing with prism sides 110 recface =np.vstack((elements[:,0],elements[:,1],elements[:,4],elements[:,3])).T111 eltind =np.arange(0,np.shape(elements)[0])112 recface =np.vstack((recface,np.vstack((elements[:,1],elements[:,2],elements[:,5],elements[:,4])).T))113 eltind =np.hstack((eltind,np.arange(0,np.shape(elements)[0])))114 recface =np.vstack((recface,np.vstack((elements[:,2],elements[:,0],elements[:,3],elements[:,5])).T))115 eltind =np.hstack((eltind,np.arange(0,np.shape(elements)[0])))110 recface = np.vstack((elements[:, 0], elements[:, 1], elements[:, 4], elements[:, 3])).T 111 eltind = np.arange(0, np.shape(elements)[0]) 112 recface = np.vstack((recface, np.vstack((elements[:, 1], elements[:, 2], elements[:, 5], elements[:, 4])).T)) 113 eltind = np.hstack((eltind, np.arange(0, np.shape(elements)[0]))) 114 recface = np.vstack((recface, np.vstack((elements[:, 2], elements[:, 0], elements[:, 3], elements[:, 5])).T)) 115 eltind = np.hstack((eltind, np.arange(0, np.shape(elements)[0]))) 116 116 tmp = np.ascontiguousarray(np.sort(recface)).view(np.dtype((np.void, recface.dtype.itemsize * recface.shape[1]))) 117 117 _, idx, recur = np.unique(tmp, return_index=True, return_counts=True) 118 recel = recface[idx[np.where(recur==1)]]119 recindex =eltind[idx[np.where(recur==1)]]120 for i, rectangle in enumerate(recel):121 rec =list(zip(x[rectangle],y[rectangle],z[rectangle]))122 pl3 =Poly3DCollection([rec])123 color =loccmap.to_rgba(data[recindex[i]])118 recel = recface[idx[np.where(recur == 1)]] 119 recindex = eltind[idx[np.where(recur == 1)]] 120 for i, rectangle in enumerate(recel): 121 rec = list(zip(x[rectangle], y[rectangle], z[rectangle])) 122 pl3 = Poly3DCollection([rec]) 123 color = loccmap.to_rgba(data[recindex[i]]) 124 124 pl3.set_edgecolor(color) 125 125 pl3.set_color(color) … … 127 127 128 128 #dealing with prism bases 129 triface =np.vstack((elements[:,0:3],elements[:,3:6]))130 eltind =np.arange(0,np.shape(elements)[0])131 eltind =np.hstack((eltind,np.arange(0,np.shape(elements)[0])))129 triface = np.vstack((elements[:, 0:3], elements[:, 3:6])) 130 eltind = np.arange(0, np.shape(elements)[0]) 131 eltind = np.hstack((eltind, np.arange(0, np.shape(elements)[0]))) 132 132 tmp = np.ascontiguousarray(triface).view(np.dtype((np.void, triface.dtype.itemsize * triface.shape[1]))) 133 _, idx, recur = np.unique(tmp, return_index=True,return_counts=True)133 _, idx, recur = np.unique(tmp, return_index=True, return_counts=True) 134 134 #we keep only top and bottom elements 135 triel = triface[idx[np.where(recur==1)]]136 triindex =eltind[idx[np.where(recur==1)]]137 for i, triangle in enumerate(triel):138 tri =list(zip(x[triangle],y[triangle],z[triangle]))139 pl3 =Poly3DCollection([tri])140 color =loccmap.to_rgba(data[triindex[i]])141 pl3.set_edgecolor(color) 142 pl3.set_color(color) 143 ax.add_collection3d(pl3) 144 145 ax.set_xlim([min(x), max(x)])146 ax.set_ylim([min(y), max(y)])147 ax.set_zlim([min(z), max(z)])135 triel = triface[idx[np.where(recur == 1)]] 136 triindex = eltind[idx[np.where(recur == 1)]] 137 for i, triangle in enumerate(triel): 138 tri = list(zip(x[triangle], y[triangle], z[triangle])) 139 pl3 = Poly3DCollection([tri]) 140 color = loccmap.to_rgba(data[triindex[i]]) 141 pl3.set_edgecolor(color) 142 pl3.set_color(color) 143 ax.add_collection3d(pl3) 144 145 ax.set_xlim([min(x), max(x)]) 146 ax.set_ylim([min(y), max(y)]) 147 ax.set_zlim([min(z), max(z)]) 148 148 149 149 #raise ValueError('plot_unit error: 3D element plot not supported yet') 150 return 150 return 151 151 152 152 # }}} 153 153 # {{{ data are on nodes 154 154 155 elif datatype ==2:155 elif datatype == 2: 156 156 if is2d: 157 157 if np.ma.is_masked(data): 158 EltMask =np.asarray([np.any(np.in1d(index,np.where(data.mask))) for index in elements])159 triangles =mpl.tri.Triangulation(x,y,elements,EltMask)158 EltMask = np.asarray([np.any(np.in1d(index, np.where(data.mask))) for index in elements]) 159 triangles = mpl.tri.Triangulation(x, y, elements, EltMask) 160 160 else: 161 triangles =mpl.tri.Triangulation(x,y,elements)162 #tri =ax.tricontourf(triangles,data,colorlevels,cmap=cmap,norm=norm,alpha=alpha)161 triangles = mpl.tri.Triangulation(x, y, elements) 162 #tri = ax.tricontourf(triangles, data, colorlevels, cmap = cmap, norm = norm, alpha = alpha) 163 163 if options.exist('log'): 164 if alpha <1:#help with antialiasing165 tri =ax.tricontour(triangles,data,colorlevels,cmap=cmap,norm=norm,alpha=0.1,antialiased=antialiased)166 tri =ax.tricontourf(triangles,data,colorlevels,cmap=cmap,norm=norm,alpha=alpha,antialiased=antialiased)164 if alpha < 1: #help with antialiasing 165 tri = ax.tricontour(triangles, data, colorlevels, cmap=cmap, norm=norm, alpha=0.1, antialiased=antialiased) 166 tri = ax.tricontourf(triangles, data, colorlevels, cmap=cmap, norm=norm, alpha=alpha, antialiased=antialiased) 167 167 else: 168 if alpha <1:#help with antialiasing169 tri =ax.tricontour(triangles,data,colorlevels,cmap=cmap,norm=norm,alpha=0.1,antialiased=antialiased)170 tri =ax.tricontourf(triangles,data,colorlevels,cmap=cmap,norm=norm,alpha=alpha,extend='both',antialiased=antialiased)168 if alpha < 1: #help with antialiasing 169 tri = ax.tricontour(triangles, data, colorlevels, cmap=cmap, norm=norm, alpha=0.1, antialiased=antialiased) 170 tri = ax.tricontourf(triangles, data, colorlevels, cmap=cmap, norm=norm, alpha=alpha, extend='both', antialiased=antialiased) 171 171 if edgecolor != 'None': 172 ax.triplot(x, y,elements,color=edgecolor)172 ax.triplot(x, y, elements, color=edgecolor) 173 173 else: 174 174 #first deal with the colormap 175 175 loccmap = plt.cm.ScalarMappable(cmap=cmap) 176 loccmap.set_array([min(data), max(data)])177 loccmap.set_clim(vmin=min(data), vmax=max(data))176 loccmap.set_array([min(data), max(data)]) 177 loccmap.set_clim(vmin=min(data), vmax=max(data)) 178 178 179 179 #deal with prism sides 180 recface =np.vstack((elements[:,0],elements[:,1],elements[:,4],elements[:,3])).T181 recface =np.vstack((recface,np.vstack((elements[:,1],elements[:,2],elements[:,5],elements[:,4])).T))182 recface =np.vstack((recface,np.vstack((elements[:,2],elements[:,0],elements[:,3],elements[:,5])).T))180 recface = np.vstack((elements[:, 0], elements[:, 1], elements[:, 4], elements[:, 3])).T 181 recface = np.vstack((recface, np.vstack((elements[:, 1], elements[:, 2], elements[:, 5], elements[:, 4])).T)) 182 recface = np.vstack((recface, np.vstack((elements[:, 2], elements[:, 0], elements[:, 3], elements[:, 5])).T)) 183 183 tmp = np.ascontiguousarray(np.sort(recface)).view(np.dtype((np.void, recface.dtype.itemsize * recface.shape[1]))) 184 184 _, idx, recur = np.unique(tmp, return_index=True, return_counts=True) 185 recel = recface[idx[np.where(recur==1)]]185 recel = recface[idx[np.where(recur == 1)]] 186 186 for rectangle in recel: 187 rec =list(zip(x[rectangle],y[rectangle],z[rectangle]))188 pl3 =Poly3DCollection([rec])189 color =loccmap.to_rgba(np.mean(data[rectangle]))187 rec = list(zip(x[rectangle], y[rectangle], z[rectangle])) 188 pl3 = Poly3DCollection([rec]) 189 color = loccmap.to_rgba(np.mean(data[rectangle])) 190 190 pl3.set_edgecolor(color) 191 191 pl3.set_color(color) … … 193 193 194 194 #deal with prism faces 195 triface =np.vstack((elements[:,0:3],elements[:,3:6]))195 triface = np.vstack((elements[:, 0:3], elements[:, 3:6])) 196 196 tmp = np.ascontiguousarray(triface).view(np.dtype((np.void, triface.dtype.itemsize * triface.shape[1]))) 197 _, idx, recur = np.unique(tmp, return_index=True,return_counts=True)197 _, idx, recur = np.unique(tmp, return_index=True, return_counts=True) 198 198 #we keep only top and bottom elements 199 triel = triface[idx[np.where(recur==1)]]199 triel = triface[idx[np.where(recur == 1)]] 200 200 for triangle in triel: 201 tri =list(zip(x[triangle],y[triangle],z[triangle]))202 pl3 =Poly3DCollection([tri])203 color =loccmap.to_rgba(np.mean(data[triangle]))204 pl3.set_edgecolor(color) 205 pl3.set_color(color) 206 ax.add_collection3d(pl3) 207 208 ax.set_xlim([min(x), max(x)])209 ax.set_ylim([min(y), max(y)])210 ax.set_zlim([min(z), max(z)])201 tri = list(zip(x[triangle], y[triangle], z[triangle])) 202 pl3 = Poly3DCollection([tri]) 203 color = loccmap.to_rgba(np.mean(data[triangle])) 204 pl3.set_edgecolor(color) 205 pl3.set_color(color) 206 ax.add_collection3d(pl3) 207 208 ax.set_xlim([min(x), max(x)]) 209 ax.set_ylim([min(y), max(y)]) 210 ax.set_zlim([min(z), max(z)]) 211 211 #raise ValueError('plot_unit error: 3D element plot not supported yet') 212 212 return … … 214 214 # }}} 215 215 # {{{ plotting quiver 216 elif datatype ==3:216 elif datatype == 3: 217 217 if is2d: 218 Q =plot_quiver(x,y,data,options,ax)218 Q = plot_quiver(x, y, data, options, ax) 219 219 else: 220 220 raise ValueError('plot_unit error: 3D node plot not supported yet') 221 221 return 222 222 223 223 # }}} 224 224 # {{{ plotting P1 Patch (TODO) 225 225 226 elif datatype ==4:226 elif datatype == 4: 227 227 print('plot_unit message: P1 patch plot not implemented yet') 228 228 return … … 231 231 # {{{ plotting P0 Patch (TODO) 232 232 233 elif datatype ==5:233 elif datatype == 5: 234 234 print('plot_unit message: P0 patch plot not implemented yet') 235 235 return … … 237 237 # }}} 238 238 else: 239 raise ValueError('datatype =%d not supported' % datatype)239 raise ValueError('datatype = %d not supported' % datatype) -
issm/trunk-jpl/src/m/solve/loadresultsfromcluster.py
r23716 r24115 7 7 8 8 def loadresultsfromcluster(md,runtimename=False): 9 """ 10 LOADRESULTSFROMCLUSTER - load results of solution sequence from cluster 11 12 Usage: 13 md=loadresultsfromcluster(md,runtimename); 14 """ 9 """ 10 LOADRESULTSFROMCLUSTER - load results of solution sequence from cluster 15 11 16 #retrieve cluster, to be able to call its methods 17 cluster=md.cluster 12 Usage: 13 md=loadresultsfromcluster(md,runtimename); 14 """ 18 15 19 if runtimename: 20 md.private.runtimename=runtimename 16 #retrieve cluster, to be able to call its methods 17 cluster=md.cluster 21 18 22 #Download outputs from the cluster 23 filelist=[md.miscellaneous.name+'.outlog',md.miscellaneous.name+'.errlog'] 24 if md.qmu.isdakota: 25 filelist.append(md.miscellaneous.name+'.qmu.err') 26 filelist.append(md.miscellaneous.name+'.qmu.out') 27 if 'tabular_graphics_data' in fieldnames(md.qmu.params): 28 if md.qmu.params.tabular_graphics_data: 29 filelist.append('dakota_tabular.dat') 30 else: 31 filelist.append(md.miscellaneous.name+'.outbin') 32 cluster.Download(md.private.runtimename,filelist) 19 if runtimename: 20 md.private.runtimename=runtimename 33 21 34 #If we are here, no errors in the solution sequence, call loadresultsfromdisk. 35 if os.path.exists(md.miscellaneous.name+'.outbin'): 36 if os.path.getsize(md.miscellaneous.name+'.outbin')>0: 37 md=loadresultsfromdisk(md,md.miscellaneous.name+'.outbin') 38 else: 39 print(('WARNING, outbin file is empty for run '+md.miscellaneous.name)) 40 elif not md.qmu.isdakota: 41 print(('WARNING, outbin file does not exist '+md.miscellaneous.name)) 42 43 #erase the log and output files 44 if md.qmu.isdakota: 45 #filename=os.path.join('qmu'+str(os.getpid()),md.miscellaneous.name) 46 filename = md.miscellaneous.name 22 #Download outputs from the cluster 23 filelist=[md.miscellaneous.name+'.outlog',md.miscellaneous.name+'.errlog'] 24 if md.qmu.isdakota: 25 filelist.append(md.miscellaneous.name+'.qmu.err') 26 filelist.append(md.miscellaneous.name+'.qmu.out') 27 if 'tabular_graphics_data' in fieldnames(md.qmu.params): 28 if md.qmu.params.tabular_graphics_data: 29 filelist.append('dakota_tabular.dat') 30 else: 31 filelist.append(md.miscellaneous.name+'.outbin') 32 cluster.Download(md.private.runtimename,filelist) 47 33 48 # this will not work like normal as dakota doesn't generate outbin files, 49 # instead calls postqmu to store results directly in the model 50 # at md.results.dakota via dakota_out_parse 51 md=loadresultsfromdisk(md,md.miscellaneous.name+'.outbin') 52 else: 53 filename=md.miscellaneous.name 54 TryRem('.outbin',filename) 55 if not platform.system()=='Windows': 56 TryRem('.tar.gz',md.private.runtimename) 34 #If we are here, no errors in the solution sequence, call loadresultsfromdisk. 35 if os.path.exists(md.miscellaneous.name+'.outbin'): 36 if os.path.getsize(md.miscellaneous.name+'.outbin')>0: 37 md=loadresultsfromdisk(md,md.miscellaneous.name+'.outbin') 38 else: 39 print(('WARNING, outbin file is empty for run '+md.miscellaneous.name)) 40 elif not md.qmu.isdakota: 41 print(('WARNING, outbin file does not exist '+md.miscellaneous.name)) 57 42 58 TryRem('.errlog',filename) 59 TryRem('.outlog',filename) 60 61 #erase input file if run was carried out on same platform. 62 hostname=socket.gethostname() 63 if hostname==cluster.name: 64 if md.qmu.isdakota: 65 #filename=os.path.join('qmu'+str(os.getpid()),md.miscellaneous.name) 66 filename = md.miscellaneous.name 67 TryRem('.queue',filename) 68 else: 69 filename=md.miscellaneous.name 70 TryRem('.toolkits',filename) 71 if not platform.system()=='Windows': 72 TryRem('.queue',filename) 73 else: 74 TryRem('.bat',filename) 43 #erase the log and output files 44 if md.qmu.isdakota: 45 #filename=os.path.join('qmu'+str(os.getpid()),md.miscellaneous.name) 46 filename = md.miscellaneous.name 75 47 76 # remove this for bin file debugging 77 TryRem('.bin',filename) 48 # this will not work like normal as dakota doesn't generate outbin files, 49 # instead calls postqmu to store results directly in the model 50 # at md.results.dakota via dakota_out_parse 51 md=loadresultsfromdisk(md,md.miscellaneous.name+'.outbin') 52 else: 53 filename=md.miscellaneous.name 54 TryRem('.outbin',filename) 55 if not platform.system()=='Windows': 56 TryRem('.tar.gz',md.private.runtimename) 78 57 79 #cwd = os.getcwd().split('/')[-1] 80 if md.qmu.isdakota: 81 os.chdir('..') 82 #TryRem('',cwd) 58 TryRem('.errlog',filename) 59 TryRem('.outlog',filename) 83 60 84 return md 61 #erase input file if run was carried out on same platform. 62 hostname=socket.gethostname() 63 if hostname==cluster.name: 64 if md.qmu.isdakota: 65 #filename=os.path.join('qmu'+str(os.getpid()),md.miscellaneous.name) 66 filename = md.miscellaneous.name 67 TryRem('.queue',filename) 68 else: 69 filename=md.miscellaneous.name 70 TryRem('.toolkits',filename) 71 if not platform.system()=='Windows': 72 TryRem('.queue',filename) 73 else: 74 TryRem('.bat',filename) 75 76 # remove this for bin file debugging 77 TryRem('.bin',filename) 78 79 #cwd = os.getcwd().split('/')[-1] 80 if md.qmu.isdakota: 81 os.chdir('..') 82 #TryRem('',cwd) 83 84 return md 85 85 86 86 def TryRem(extension,filename): 87 88 89 90 87 try: 88 os.remove(filename+extension) 89 except OSError: 90 print(('WARNING, no '+extension+' is present for run '+filename)) -
issm/trunk-jpl/src/m/solve/loadresultsfromdisk.py
r23716 r24115 4 4 from postqmu import postqmu 5 5 6 6 7 def loadresultsfromdisk(md,filename): 7 8 8 """ 9 LOADRESULTSFROMDISK - load results of solution sequence from disk file "filename" 9 10 10 11 12 11 Usage: 12 md=loadresultsfromdisk(md=False,filename=False); 13 """ 13 14 14 15 16 15 #check number of inputs/outputs 16 if not md or not filename: 17 raise ValueError("loadresultsfromdisk: error message.") 17 18 18 19 if not md.qmu.isdakota: 19 20 20 21 22 21 #Check that file exists 22 if not os.path.exists(filename): 23 raise OSError("binary file '{}' not found.".format(filename)) 23 24 24 25 26 25 #initialize md.results if not a structure yet 26 if not isinstance(md.results,results): 27 md.results=results() 27 28 28 29 30 31 29 #load results onto model 30 structure=parseresultsfromdisk(md,filename,not md.settings.io_gather) 31 if not len(structure): 32 raise RuntimeError("No result found in binary file '{}'. Check for solution crash.".format(filename)) 32 33 33 34 setattr(md.results,structure[0].SolutionType,structure) 34 35 35 36 36 #recover solution_type from results 37 md.private.solution=structure[0].SolutionType 37 38 38 39 40 41 42 43 39 #read log files onto fields 40 if os.path.exists(md.miscellaneous.name+'.errlog'): 41 with open(md.miscellaneous.name+'.errlog','r') as f: 42 setattr(getattr(md.results,structure[0].SolutionType)[0],'errlog',[line[:-1] for line in f]) 43 else: 44 setattr(getattr(md.results,structure[0].SolutionType)[0],'errlog',[]) 44 45 45 46 47 48 49 46 if os.path.exists(md.miscellaneous.name+'.outlog'): 47 with open(md.miscellaneous.name+'.outlog','r') as f: 48 setattr(getattr(md.results,structure[0].SolutionType)[0],'outlog',[line[:-1] for line in f]) 49 else: 50 setattr(getattr(md.results,structure[0].SolutionType)[0],'outlog',[]) 50 51 51 52 52 if len(getattr(md.results,structure[0].SolutionType)[0].errlog): 53 print ("loadresultsfromcluster info message: error during solution. Check your errlog and outlog model fields.") 53 54 54 55 56 55 #if only one solution, extract it from list for user friendliness 56 if len(structure) == 1 and not structure[0].SolutionType=='TransientSolution': 57 setattr(md.results,structure[0].SolutionType,structure[0]) 57 58 58 59 60 59 #post processes qmu results if necessary 60 else: 61 md=postqmu(md,filename) 61 62 62 63 return md -
issm/trunk-jpl/src/m/solve/marshall.py
r23716 r24115 1 1 from WriteData import WriteData 2 2 3 3 4 def marshall(md): 4 5 5 """ 6 MARSHALL - outputs a compatible binary file from @model md, for certain solution type. 6 7 7 8 8 The routine creates a compatible binary file from @model md 9 This binary file will be used for parallel runs in JPL-package 9 10 10 11 12 13 14 11 Usage: 12 marshall(md) 13 """ 14 if md.verbose.solution: 15 print(("marshalling file '%s.bin'." % md.miscellaneous.name)) 15 16 16 17 18 fid=open(md.miscellaneous.name+'.bin','wb')19 20 17 #open file for binary writing 18 try: 19 fid = open(md.miscellaneous.name + '.bin', 'wb') 20 except IOError as e: 21 raise IOError("marshall error message: could not open '%s.bin' file for binary writing." % md.miscellaneous.name) 21 22 22 23 for field in md.properties(): 23 24 24 25 if field in ['results','radaroverlay','toolkits','cluster','private']:26 25 #Some properties do not need to be marshalled 26 if field in ['results', 'radaroverlay', 'toolkits', 'cluster', 'private']: 27 continue 27 28 28 29 if not hasattr(getattr(md,field),'marshall'):30 raise TypeError("field '%s' is not an object." % field)29 #Check that current field is an object 30 if not hasattr(getattr(md, field), 'marshall'): 31 raise TypeError("field '{}' is not an object.".format(field)) 31 32 32 33 34 exec("md.{}.marshall('md.{}',md,fid)".format(field,field))33 #Marshall current object 34 #print "marshalling %s ..." % field 35 exec("md.{}.marshall('md.{}',md,fid)".format(field, field)) 35 36 36 37 WriteData(fid,'XXX','name','md.EOF','data',True,'format','Boolean'); 37 #Last, write "md.EOF" to make sure that the binary file is not corrupt 38 WriteData(fid, 'XXX', 'name', 'md.EOF', 'data', True, 'format', 'Boolean') 38 39 39 #close file 40 try: 41 fid.close() 42 except IOError as e: 43 raise IOError("marshall error message: could not close file '%s.bin'." % md.miscellaneous.name) 44 40 #close file 41 try: 42 fid.close() 43 except IOError as e: 44 raise IOError("marshall error message: could not close file '%s.bin'." % md.miscellaneous.name)
Note:
See TracChangeset
for help on using the changeset viewer.