Index: ../trunk-jpl/externalpackages/dakota/configs/5.3.1/CMakeLists.txt.pfe.patch =================================================================== --- ../trunk-jpl/externalpackages/dakota/configs/5.3.1/CMakeLists.txt.pfe.patch (revision 0) +++ ../trunk-jpl/externalpackages/dakota/configs/5.3.1/CMakeLists.txt.pfe.patch (revision 18340) @@ -0,0 +1,6 @@ +156c153,155 +< # TODO: Can't this be integrated into the following logic? +--- +> # TODO: Can't this be integrated into the following logic? +> set(BLAS_LIBS "-L/nasa/intel/mkl/10.0.011/lib/em64t/ -lmkl -lmkl_lapack -liomp5 -lpthread") +> set(LAPACK_LIBS "-L/nasa/intel/mkl/10.0.011/lib/em64t/ -lmkl -lmkl_lapack -liomp5 -lpthread") Index: ../trunk-jpl/externalpackages/dakota/install-5.3.1-pleiades.sh =================================================================== --- ../trunk-jpl/externalpackages/dakota/install-5.3.1-pleiades.sh (revision 0) +++ ../trunk-jpl/externalpackages/dakota/install-5.3.1-pleiades.sh (revision 18340) @@ -0,0 +1,51 @@ +#!/bin/bash +set -eu + +#Some cleanup +rm -rf Dakota +rm -rf src +rm -rf build +rm -rf install +mkdir src build install + +#Download from ISSM server +$ISSM_DIR/scripts/DownloadExternalPackage.py 'http://issm.jpl.nasa.gov/files/externalpackages/dakota-5.3.1-public-src.tar.gz' 'dakota-5.3.1-public-src.tar.gz' + +#Untar +tar -zxvf dakota-5.3.1-public-src.tar.gz + +#Move Dakota to src directory +mv dakota-5.3.1.src/* src +rm -rf dakota-5.3.1.src + +#Set up Dakota cmake variables and config +export DAK_SRC=$ISSM_DIR/externalpackages/dakota/src +export DAK_BUILD=$ISSM_DIR/externalpackages/dakota/build +cp $DAK_SRC/cmake/BuildDakotaTemplate.cmake $DAK_SRC/cmake/BuildDakotaCustom.cmake +patch $DAK_SRC/cmake/BuildDakotaCustom.cmake configs/5.3.1/BuildDakotaCustom.cmake.patch +patch $DAK_SRC/cmake/DakotaDev.cmake configs/5.3.1/DakotaDev.cmake.patch +patch $DAK_SRC/CMakeLists.txt configs/5.3.1/CMakeLists.txt.pfe.patch + +#Apply patches +patch src/src/ParallelLibrary.cpp configs/5.3.1/ParallelLibrary.cpp.patch +patch src/src/ParallelLibrary.hpp configs/5.3.1/ParallelLibrary.hpp.patch +patch src/src/NonDSampling.cpp configs/5.3.1/NonDSampling.cpp.patch +patch src/src/NonDLocalReliability.cpp configs/5.3.1/NonDLocalReliability.cpp.patch +patch src/packages/pecos/src/pecos_global_defs.hpp configs/5.3.1/pecos_global_defs.hpp.patch + +#Configure dakota +cd $DAK_BUILD +cmake -C $DAK_SRC/cmake/BuildDakotaCustom.cmake -C $DAK_SRC/cmake/DakotaDev.cmake $DAK_SRC +cd .. + +#Compile and install dakota +cd $DAK_BUILD +if [ $# -eq 0 ]; +then + make + make install +else + make -j $1 + make -j $1 install +fi +cd .. Property changes on: ../trunk-jpl/externalpackages/dakota/install-5.3.1-pleiades.sh ___________________________________________________________________ Added: svn:executable ## -0,0 +1 ## +* \ No newline at end of property Index: ../trunk-jpl/externalpackages/petsc/install-3.4-pleiades-intel2012.0.032-mpt.2.06rp16 =================================================================== --- ../trunk-jpl/externalpackages/petsc/install-3.4-pleiades-intel2012.0.032-mpt.2.06rp16 (revision 18339) +++ ../trunk-jpl/externalpackages/petsc/install-3.4-pleiades-intel2012.0.032-mpt.2.06rp16 (revision 18340) @@ -1,61 +0,0 @@ -#!/bin/bash - -#Some cleanup -rm -rf install petsc-3.4.2 src -mkdir install src - -#Download from ISSM server -$ISSM_DIR/scripts/DownloadExternalPackage.py 'http://issm.jpl.nasa.gov/files/externalpackages/petsc-lite-3.4.2.tar.gz' 'petsc-3.4.2.tar.gz' - -#Untar and move petsc to install directory -tar -zxvf petsc-3.4.2.tar.gz -mv petsc-3.4.2/* src/ -rm -rf petsc-3.4.2 - -#configure -cd src -./config/configure.py \ - --prefix="$ISSM_DIR/externalpackages/petsc/install" \ - --PETSC_ARCH="$ISSM_ARCH" \ - --PETSC_DIR="$ISSM_DIR/externalpackages/petsc/src" \ - --with-batch=1 \ - --with-debugging=0 \ - --with-shared-libraries=1 \ - --known-mpi-shared-libraries=1 \ - --with-mpi=1 \ - --with-mpi-lib="/nasa/sgi/mpt/2.06rp16/lib/libmpi.so" \ - --with-mpi-include="/nasa/sgi/mpt/2.06rp16/include" \ - --with-blas-lapack-dir="/nasa/intel/Compiler/2012.0.032/composer_xe_2011_sp1/mkl/lib/intel64" \ - --download-scalapack=yes \ - --download-blacs=yes \ - --download-mumps=yes \ - --download-metis=yes \ - --download-parmetis=yes \ - --download-trilinos=yes \ - --download-euclid=yes \ - --download-hypre=yes \ - --COPTFLAGS="-lmpi -O3" \ - --FOPTFLAGS="-lmpi -O3" \ - --CXXOPTFLAGS="-lmpi -O3" - -#prepare script to reconfigure petsc -cat > script.queue << EOF -#PBS -S /bin/bash -#PBS -l select=1:ncpus=1:model=wes -#PBS -l walltime=200 -#PBS -W group_list=s1010 -#PBS -m e - -. /usr/share/modules/init/bash -module load comp-intel/2012.0.032 -module load mpi-sgi/mpt.2.06rp16 - -export PATH="$PATH:." -export MPI_GROUP_MAX=64 -mpiexec -np 1 ./conftest-linux-gnu-ia64-intel.py -EOF - -#print instructions -echo "== Now: cd src/ " -echo "== qsub -q devel script.queue " -echo "== Then run reconfigure script generated by PETSc and follow instructions" Index: ../trunk-jpl/externalpackages/petsc/install-3.4-pleiades-intel2012.0.032-mpt.2.06rp16.sh =================================================================== --- ../trunk-jpl/externalpackages/petsc/install-3.4-pleiades-intel2012.0.032-mpt.2.06rp16.sh (revision 0) +++ ../trunk-jpl/externalpackages/petsc/install-3.4-pleiades-intel2012.0.032-mpt.2.06rp16.sh (revision 18340) @@ -0,0 +1,61 @@ +#!/bin/bash + +#Some cleanup +rm -rf install petsc-3.4.2 src +mkdir install src + +#Download from ISSM server +$ISSM_DIR/scripts/DownloadExternalPackage.py 'http://issm.jpl.nasa.gov/files/externalpackages/petsc-lite-3.4.2.tar.gz' 'petsc-3.4.2.tar.gz' + +#Untar and move petsc to install directory +tar -zxvf petsc-3.4.2.tar.gz +mv petsc-3.4.2/* src/ +rm -rf petsc-3.4.2 + +#configure +cd src +./config/configure.py \ + --prefix="$ISSM_DIR/externalpackages/petsc/install" \ + --PETSC_ARCH="$ISSM_ARCH" \ + --PETSC_DIR="$ISSM_DIR/externalpackages/petsc/src" \ + --with-batch=1 \ + --with-debugging=0 \ + --with-shared-libraries=1 \ + --known-mpi-shared-libraries=1 \ + --with-mpi=1 \ + --with-mpi-lib="/nasa/sgi/mpt/2.06rp16/lib/libmpi.so" \ + --with-mpi-include="/nasa/sgi/mpt/2.06rp16/include" \ + --with-blas-lapack-dir="/nasa/intel/Compiler/2012.0.032/composer_xe_2011_sp1/mkl/lib/intel64" \ + --download-scalapack=yes \ + --download-blacs=yes \ + --download-mumps=yes \ + --download-metis=yes \ + --download-parmetis=yes \ + --download-trilinos=yes \ + --download-euclid=yes \ + --download-hypre=yes \ + --COPTFLAGS="-lmpi -O3" \ + --FOPTFLAGS="-lmpi -O3" \ + --CXXOPTFLAGS="-lmpi -O3" + +#prepare script to reconfigure petsc +cat > script.queue << EOF +#PBS -S /bin/bash +#PBS -l select=1:ncpus=1:model=wes +#PBS -l walltime=200 +#PBS -W group_list=s1010 +#PBS -m e + +. /usr/share/modules/init/bash +module load comp-intel/2012.0.032 +module load mpi-sgi/mpt.2.06rp16 + +export PATH="$PATH:." +export MPI_GROUP_MAX=64 +mpiexec -np 1 ./conftest-linux-gnu-ia64-intel.py +EOF + +#print instructions +echo "== Now: cd src/ " +echo "== qsub -q devel script.queue " +echo "== Then run reconfigure script generated by PETSc and follow instructions" Property changes on: ../trunk-jpl/externalpackages/petsc/install-3.4-pleiades-intel2012.0.032-mpt.2.06rp16.sh ___________________________________________________________________ Added: svn:executable ## -0,0 +1 ## +* \ No newline at end of property Index: ../trunk-jpl/externalpackages/petsc/install-3.5-pleiades-intel2012.0.032-mpt.2.06rp16.sh =================================================================== --- ../trunk-jpl/externalpackages/petsc/install-3.5-pleiades-intel2012.0.032-mpt.2.06rp16.sh (revision 0) +++ ../trunk-jpl/externalpackages/petsc/install-3.5-pleiades-intel2012.0.032-mpt.2.06rp16.sh (revision 18340) @@ -0,0 +1,62 @@ +#!/bin/bash +set -eu + +#Some cleanup +rm -rf install petsc-3.5.1 src +mkdir install src + +#Download from ISSM server +$ISSM_DIR/scripts/DownloadExternalPackage.py 'http://issm.jpl.nasa.gov/files/externalpackages/petsc-lite-3.5.1.tar.gz' 'petsc-3.5.1.tar.gz' + +#Untar and move petsc to install directory +tar -zxvf petsc-3.5.1.tar.gz +mv petsc-3.5.1/* src/ +rm -rf petsc-3.5.1 + +#configure +cd src +./config/configure.py \ + --prefix="$ISSM_DIR/externalpackages/petsc/install" \ + --PETSC_ARCH="$ISSM_ARCH" \ + --PETSC_DIR="$ISSM_DIR/externalpackages/petsc/src" \ + --with-batch=1 \ + --with-debugging=0 \ + --with-shared-libraries=1 \ + --known-mpi-shared-libraries=1 \ + --with-mpi=1 \ + --with-mpi-lib="/nasa/sgi/mpt/2.06rp16/lib/libmpi.so" \ + --with-mpi-include="/nasa/sgi/mpt/2.06rp16/include" \ + --with-blas-lapack-dir="/nasa/intel/Compiler/2012.0.032/composer_xe_2011_sp1/mkl/lib/intel64" \ + --download-scalapack=yes \ + --download-mumps=yes \ + --download-metis=yes \ + --download-parmetis=yes \ + --download-trilinos=yes \ + --download-euclid=yes \ + --download-hypre=yes \ + --COPTFLAGS="-lmpi -O3" \ + --FOPTFLAGS="-lmpi -O3" \ + --CXXOPTFLAGS="-lmpi -O3" + +#prepare script to reconfigure petsc +cat > script.queue << EOF +#PBS -S /bin/bash +#PBS -l select=1:ncpus=1:model=wes +#PBS -l walltime=200 +#PBS -W group_list=s1010 +#PBS -m e + +. /usr/share/modules/init/bash +module load comp-intel/2012.0.032 +module load math/intel_mkl_64_10.0.011 +module load mpi-sgi/mpt.2.06rp16 + +export PATH="$PATH:." +export MPI_GROUP_MAX=64 +mpiexec -np 1 ./conftest-linux-gnu-ia64-intel.py +EOF + +#print instructions +echo "== Now: cd src/ " +echo "== qsub -q devel script.queue " +echo "== Then run reconfigure script generated by PETSc and follow instructions" Property changes on: ../trunk-jpl/externalpackages/petsc/install-3.5-pleiades-intel2012.0.032-mpt.2.06rp16.sh ___________________________________________________________________ Added: svn:executable ## -0,0 +1 ## +* \ No newline at end of property Index: ../trunk-jpl/externalpackages/boost/install-1.55-pleiades.sh =================================================================== --- ../trunk-jpl/externalpackages/boost/install-1.55-pleiades.sh (revision 0) +++ ../trunk-jpl/externalpackages/boost/install-1.55-pleiades.sh (revision 18340) @@ -0,0 +1,47 @@ +#!/bin/bash +set -eu + +#Note of caution: stop after boostrap phase, and run +#bjam --debug-configuration, to figure out which paths boost is using to include +#python. make sure everyone of these paths is covered by python. If not, just make +#symlinks in externalpackages/python to what boost is expecting. Ther is NO WAY +#to get the boost library to include python support without doing that. + +#Some cleanup +rm -rf install boost_1_55_0 src +mkdir install src + +#Download from ISSM server +$ISSM_DIR/scripts/DownloadExternalPackage.py 'http://issm.jpl.nasa.gov/files/externalpackages/boost_1_55_0.tar.gz' 'boost_1_55_0.tar.gz' + +#Untar +tar -zxvf boost_1_55_0.tar.gz + +#Move boost into install directory +mv boost_1_55_0/* src +rm -rf boost_1_55_0 + +#patch src/boost/atomic/detail/cas128strong.hpp ./configs/1.55/cas128strong.hpp.patch +#patch src/boost/atomic/detail/gcc-atomic.hpp ./configs/1.55/gcc-atomic.hpp.patch +#patch src/tools/build/v2/user-config.jam ./configs/1.55/user-config.jam.patch +#patch src/tools/build/v2/tools/darwin.jam ./configs/1.55/darwin.jam.patch +#patch src/tools/build/v2/tools/darwin.py ./configs/1.55/darwin.py.patch + +#Configure and compile +cd src +./bootstrap.sh \ + --prefix="$ISSM_DIR/externalpackages/boost/install" \ + --with-python=python2.7 \ + --with-python-root="$ISSM_DIR/externalpackages/python/install" \ + --with-toolset=intel-linux + +#Compile boost +# Need gcc with iconv installed in a location that has been added to your path +./b2 toolset=intel-linux cxxflags=-static-libstdc++ linkflags=-static-libstdc++ threading=multi install +#./bjam install + +#./b2 toolset=clang cxxflags=-stdlib=libstdc++ linkflags=-stdlib=libstdc++ -j2 variant=release link=static threading=multi instal + +#put bjam into install also: +mkdir ../install/bin +cp bjam ../install/bin Property changes on: ../trunk-jpl/externalpackages/boost/install-1.55-pleiades.sh ___________________________________________________________________ Added: svn:executable ## -0,0 +1 ## +* \ No newline at end of property