source:
issm/oecreview/Archive/18296-19100/ISSM-18339-18340.diff@
19102
Last change on this file since 19102 was 19102, checked in by , 10 years ago | |
---|---|
File size: 11.9 KB |
-
../trunk-jpl/externalpackages/dakota/configs/5.3.1/CMakeLists.txt.pfe.patch
1 156c153,155 2 < # TODO: Can't this be integrated into the following logic? 3 --- 4 > # TODO: Can't this be integrated into the following logic? 5 > set(BLAS_LIBS "-L/nasa/intel/mkl/10.0.011/lib/em64t/ -lmkl -lmkl_lapack -liomp5 -lpthread") 6 > set(LAPACK_LIBS "-L/nasa/intel/mkl/10.0.011/lib/em64t/ -lmkl -lmkl_lapack -liomp5 -lpthread") -
../trunk-jpl/externalpackages/dakota/install-5.3.1-pleiades.sh
1 #!/bin/bash 2 set -eu 3 4 #Some cleanup 5 rm -rf Dakota 6 rm -rf src 7 rm -rf build 8 rm -rf install 9 mkdir src build install 10 11 #Download from ISSM server 12 $ISSM_DIR/scripts/DownloadExternalPackage.py 'http://issm.jpl.nasa.gov/files/externalpackages/dakota-5.3.1-public-src.tar.gz' 'dakota-5.3.1-public-src.tar.gz' 13 14 #Untar 15 tar -zxvf dakota-5.3.1-public-src.tar.gz 16 17 #Move Dakota to src directory 18 mv dakota-5.3.1.src/* src 19 rm -rf dakota-5.3.1.src 20 21 #Set up Dakota cmake variables and config 22 export DAK_SRC=$ISSM_DIR/externalpackages/dakota/src 23 export DAK_BUILD=$ISSM_DIR/externalpackages/dakota/build 24 cp $DAK_SRC/cmake/BuildDakotaTemplate.cmake $DAK_SRC/cmake/BuildDakotaCustom.cmake 25 patch $DAK_SRC/cmake/BuildDakotaCustom.cmake configs/5.3.1/BuildDakotaCustom.cmake.patch 26 patch $DAK_SRC/cmake/DakotaDev.cmake configs/5.3.1/DakotaDev.cmake.patch 27 patch $DAK_SRC/CMakeLists.txt configs/5.3.1/CMakeLists.txt.pfe.patch 28 29 #Apply patches 30 patch src/src/ParallelLibrary.cpp configs/5.3.1/ParallelLibrary.cpp.patch 31 patch src/src/ParallelLibrary.hpp configs/5.3.1/ParallelLibrary.hpp.patch 32 patch src/src/NonDSampling.cpp configs/5.3.1/NonDSampling.cpp.patch 33 patch src/src/NonDLocalReliability.cpp configs/5.3.1/NonDLocalReliability.cpp.patch 34 patch src/packages/pecos/src/pecos_global_defs.hpp configs/5.3.1/pecos_global_defs.hpp.patch 35 36 #Configure dakota 37 cd $DAK_BUILD 38 cmake -C $DAK_SRC/cmake/BuildDakotaCustom.cmake -C $DAK_SRC/cmake/DakotaDev.cmake $DAK_SRC 39 cd .. 40 41 #Compile and install dakota 42 cd $DAK_BUILD 43 if [ $# -eq 0 ]; 44 then 45 make 46 make install 47 else 48 make -j $1 49 make -j $1 install 50 fi 51 cd .. -
../trunk-jpl/externalpackages/petsc/install-3.4-pleiades-intel2012.0.032-mpt.2.06rp16
Property changes on: ../trunk-jpl/externalpackages/dakota/install-5.3.1-pleiades.sh ___________________________________________________________________ Added: svn:executable ## -0,0 +1 ## +* \ No newline at end of property
1 #!/bin/bash2 3 #Some cleanup4 rm -rf install petsc-3.4.2 src5 mkdir install src6 7 #Download from ISSM server8 $ISSM_DIR/scripts/DownloadExternalPackage.py 'http://issm.jpl.nasa.gov/files/externalpackages/petsc-lite-3.4.2.tar.gz' 'petsc-3.4.2.tar.gz'9 10 #Untar and move petsc to install directory11 tar -zxvf petsc-3.4.2.tar.gz12 mv petsc-3.4.2/* src/13 rm -rf petsc-3.4.214 15 #configure16 cd src17 ./config/configure.py \18 --prefix="$ISSM_DIR/externalpackages/petsc/install" \19 --PETSC_ARCH="$ISSM_ARCH" \20 --PETSC_DIR="$ISSM_DIR/externalpackages/petsc/src" \21 --with-batch=1 \22 --with-debugging=0 \23 --with-shared-libraries=1 \24 --known-mpi-shared-libraries=1 \25 --with-mpi=1 \26 --with-mpi-lib="/nasa/sgi/mpt/2.06rp16/lib/libmpi.so" \27 --with-mpi-include="/nasa/sgi/mpt/2.06rp16/include" \28 --with-blas-lapack-dir="/nasa/intel/Compiler/2012.0.032/composer_xe_2011_sp1/mkl/lib/intel64" \29 --download-scalapack=yes \30 --download-blacs=yes \31 --download-mumps=yes \32 --download-metis=yes \33 --download-parmetis=yes \34 --download-trilinos=yes \35 --download-euclid=yes \36 --download-hypre=yes \37 --COPTFLAGS="-lmpi -O3" \38 --FOPTFLAGS="-lmpi -O3" \39 --CXXOPTFLAGS="-lmpi -O3"40 41 #prepare script to reconfigure petsc42 cat > script.queue << EOF43 #PBS -S /bin/bash44 #PBS -l select=1:ncpus=1:model=wes45 #PBS -l walltime=20046 #PBS -W group_list=s101047 #PBS -m e48 49 . /usr/share/modules/init/bash50 module load comp-intel/2012.0.03251 module load mpi-sgi/mpt.2.06rp1652 53 export PATH="$PATH:."54 export MPI_GROUP_MAX=6455 mpiexec -np 1 ./conftest-linux-gnu-ia64-intel.py56 EOF57 58 #print instructions59 echo "== Now: cd src/ "60 echo "== qsub -q devel script.queue "61 echo "== Then run reconfigure script generated by PETSc and follow instructions" -
../trunk-jpl/externalpackages/petsc/install-3.4-pleiades-intel2012.0.032-mpt.2.06rp16.sh
1 #!/bin/bash 2 3 #Some cleanup 4 rm -rf install petsc-3.4.2 src 5 mkdir install src 6 7 #Download from ISSM server 8 $ISSM_DIR/scripts/DownloadExternalPackage.py 'http://issm.jpl.nasa.gov/files/externalpackages/petsc-lite-3.4.2.tar.gz' 'petsc-3.4.2.tar.gz' 9 10 #Untar and move petsc to install directory 11 tar -zxvf petsc-3.4.2.tar.gz 12 mv petsc-3.4.2/* src/ 13 rm -rf petsc-3.4.2 14 15 #configure 16 cd src 17 ./config/configure.py \ 18 --prefix="$ISSM_DIR/externalpackages/petsc/install" \ 19 --PETSC_ARCH="$ISSM_ARCH" \ 20 --PETSC_DIR="$ISSM_DIR/externalpackages/petsc/src" \ 21 --with-batch=1 \ 22 --with-debugging=0 \ 23 --with-shared-libraries=1 \ 24 --known-mpi-shared-libraries=1 \ 25 --with-mpi=1 \ 26 --with-mpi-lib="/nasa/sgi/mpt/2.06rp16/lib/libmpi.so" \ 27 --with-mpi-include="/nasa/sgi/mpt/2.06rp16/include" \ 28 --with-blas-lapack-dir="/nasa/intel/Compiler/2012.0.032/composer_xe_2011_sp1/mkl/lib/intel64" \ 29 --download-scalapack=yes \ 30 --download-blacs=yes \ 31 --download-mumps=yes \ 32 --download-metis=yes \ 33 --download-parmetis=yes \ 34 --download-trilinos=yes \ 35 --download-euclid=yes \ 36 --download-hypre=yes \ 37 --COPTFLAGS="-lmpi -O3" \ 38 --FOPTFLAGS="-lmpi -O3" \ 39 --CXXOPTFLAGS="-lmpi -O3" 40 41 #prepare script to reconfigure petsc 42 cat > script.queue << EOF 43 #PBS -S /bin/bash 44 #PBS -l select=1:ncpus=1:model=wes 45 #PBS -l walltime=200 46 #PBS -W group_list=s1010 47 #PBS -m e 48 49 . /usr/share/modules/init/bash 50 module load comp-intel/2012.0.032 51 module load mpi-sgi/mpt.2.06rp16 52 53 export PATH="$PATH:." 54 export MPI_GROUP_MAX=64 55 mpiexec -np 1 ./conftest-linux-gnu-ia64-intel.py 56 EOF 57 58 #print instructions 59 echo "== Now: cd src/ " 60 echo "== qsub -q devel script.queue " 61 echo "== Then run reconfigure script generated by PETSc and follow instructions" -
../trunk-jpl/externalpackages/petsc/install-3.5-pleiades-intel2012.0.032-mpt.2.06rp16.sh
Property changes on: ../trunk-jpl/externalpackages/petsc/install-3.4-pleiades-intel2012.0.032-mpt.2.06rp16.sh ___________________________________________________________________ Added: svn:executable ## -0,0 +1 ## +* \ No newline at end of property
1 #!/bin/bash 2 set -eu 3 4 #Some cleanup 5 rm -rf install petsc-3.5.1 src 6 mkdir install src 7 8 #Download from ISSM server 9 $ISSM_DIR/scripts/DownloadExternalPackage.py 'http://issm.jpl.nasa.gov/files/externalpackages/petsc-lite-3.5.1.tar.gz' 'petsc-3.5.1.tar.gz' 10 11 #Untar and move petsc to install directory 12 tar -zxvf petsc-3.5.1.tar.gz 13 mv petsc-3.5.1/* src/ 14 rm -rf petsc-3.5.1 15 16 #configure 17 cd src 18 ./config/configure.py \ 19 --prefix="$ISSM_DIR/externalpackages/petsc/install" \ 20 --PETSC_ARCH="$ISSM_ARCH" \ 21 --PETSC_DIR="$ISSM_DIR/externalpackages/petsc/src" \ 22 --with-batch=1 \ 23 --with-debugging=0 \ 24 --with-shared-libraries=1 \ 25 --known-mpi-shared-libraries=1 \ 26 --with-mpi=1 \ 27 --with-mpi-lib="/nasa/sgi/mpt/2.06rp16/lib/libmpi.so" \ 28 --with-mpi-include="/nasa/sgi/mpt/2.06rp16/include" \ 29 --with-blas-lapack-dir="/nasa/intel/Compiler/2012.0.032/composer_xe_2011_sp1/mkl/lib/intel64" \ 30 --download-scalapack=yes \ 31 --download-mumps=yes \ 32 --download-metis=yes \ 33 --download-parmetis=yes \ 34 --download-trilinos=yes \ 35 --download-euclid=yes \ 36 --download-hypre=yes \ 37 --COPTFLAGS="-lmpi -O3" \ 38 --FOPTFLAGS="-lmpi -O3" \ 39 --CXXOPTFLAGS="-lmpi -O3" 40 41 #prepare script to reconfigure petsc 42 cat > script.queue << EOF 43 #PBS -S /bin/bash 44 #PBS -l select=1:ncpus=1:model=wes 45 #PBS -l walltime=200 46 #PBS -W group_list=s1010 47 #PBS -m e 48 49 . /usr/share/modules/init/bash 50 module load comp-intel/2012.0.032 51 module load math/intel_mkl_64_10.0.011 52 module load mpi-sgi/mpt.2.06rp16 53 54 export PATH="$PATH:." 55 export MPI_GROUP_MAX=64 56 mpiexec -np 1 ./conftest-linux-gnu-ia64-intel.py 57 EOF 58 59 #print instructions 60 echo "== Now: cd src/ " 61 echo "== qsub -q devel script.queue " 62 echo "== Then run reconfigure script generated by PETSc and follow instructions" -
../trunk-jpl/externalpackages/boost/install-1.55-pleiades.sh
Property changes on: ../trunk-jpl/externalpackages/petsc/install-3.5-pleiades-intel2012.0.032-mpt.2.06rp16.sh ___________________________________________________________________ Added: svn:executable ## -0,0 +1 ## +* \ No newline at end of property
1 #!/bin/bash 2 set -eu 3 4 #Note of caution: stop after boostrap phase, and run 5 #bjam --debug-configuration, to figure out which paths boost is using to include 6 #python. make sure everyone of these paths is covered by python. If not, just make 7 #symlinks in externalpackages/python to what boost is expecting. Ther is NO WAY 8 #to get the boost library to include python support without doing that. 9 10 #Some cleanup 11 rm -rf install boost_1_55_0 src 12 mkdir install src 13 14 #Download from ISSM server 15 $ISSM_DIR/scripts/DownloadExternalPackage.py 'http://issm.jpl.nasa.gov/files/externalpackages/boost_1_55_0.tar.gz' 'boost_1_55_0.tar.gz' 16 17 #Untar 18 tar -zxvf boost_1_55_0.tar.gz 19 20 #Move boost into install directory 21 mv boost_1_55_0/* src 22 rm -rf boost_1_55_0 23 24 #patch src/boost/atomic/detail/cas128strong.hpp ./configs/1.55/cas128strong.hpp.patch 25 #patch src/boost/atomic/detail/gcc-atomic.hpp ./configs/1.55/gcc-atomic.hpp.patch 26 #patch src/tools/build/v2/user-config.jam ./configs/1.55/user-config.jam.patch 27 #patch src/tools/build/v2/tools/darwin.jam ./configs/1.55/darwin.jam.patch 28 #patch src/tools/build/v2/tools/darwin.py ./configs/1.55/darwin.py.patch 29 30 #Configure and compile 31 cd src 32 ./bootstrap.sh \ 33 --prefix="$ISSM_DIR/externalpackages/boost/install" \ 34 --with-python=python2.7 \ 35 --with-python-root="$ISSM_DIR/externalpackages/python/install" \ 36 --with-toolset=intel-linux 37 38 #Compile boost 39 # Need gcc with iconv installed in a location that has been added to your path 40 ./b2 toolset=intel-linux cxxflags=-static-libstdc++ linkflags=-static-libstdc++ threading=multi install 41 #./bjam install 42 43 #./b2 toolset=clang cxxflags=-stdlib=libstdc++ linkflags=-stdlib=libstdc++ -j2 variant=release link=static threading=multi instal 44 45 #put bjam into install also: 46 mkdir ../install/bin 47 cp bjam ../install/bin
Note:
See TracBrowser
for help on using the repository browser.