29 | | module load comp-intel/2016.2.181 |
30 | | module load mpi-hpe/mpt |
31 | | }}} |
32 | | |
33 | | And replace `ISSM_DIR` with your actual trunk. ''Log out and log back in'' to apply this change. |
34 | | |
35 | | == Installing ISSM on Pleiades == |
36 | | |
37 | | '''Do NOT install mpich'''. We have to use the one provided by NAS. Pleiades will ''only'' be used to run the code, you will use your local machine for pre and post processing, you will never use Pleiades' matlab. You can check out ISSM and install the following packages: |
38 | | - m1qn3 |
39 | | - PETSc (use `install-3.15-pleiades.sh` or newer) |
40 | | |
41 | | For documentation of pleiades, see here: http://www.nas.nasa.gov/hecc/support/kb/ |
42 | | |
43 | | You will need to run the following command before configuring ISSM: |
44 | | {{{ |
45 | | #!sh |
46 | | cd $ISSM_DIR |
47 | | autoreconf -ivf |
48 | | }}} |
49 | | |
50 | | Use the following configuration script for ISSM (adapt to your needs): |
51 | | |
52 | | {{{ |
53 | | #!sh |
54 | | ./configure \ |
55 | | --prefix=$ISSM_DIR \ |
56 | | --with-wrappers=no \ |
57 | | --with-petsc-dir="$ISSM_DIR/externalpackages/petsc/install" \ |
58 | | --with-m1qn3-dir="$ISSM_DIR/externalpackages/m1qn3/install" \ |
59 | | --with-mpi-include=" " \ |
60 | | --with-mpi-libflags=" -lmpi" \ |
61 | | --with-mkl-libflags="-L/nasa/intel/Compiler/2016.2.181/mkl/lib/intel64/ -mkl=cluster " \ |
62 | | --with-metis-dir="$ISSM_DIR/externalpackages/petsc/install" \ |
63 | | --with-mumps-dir="$ISSM_DIR/externalpackages/petsc/install" \ |
64 | | --with-scalapack-dir="$ISSM_DIR/externalpackages/petsc/install" \ |
65 | | --with-cxxoptflags="-g -O3 -axCORE-AVX2,AVX -xSSE4.2 -ipo -no-inline-min-size -inline-max-size=345 -no-inline-max-total-size -no-inline-max-per-routine -no-inline-max-per-compile " \ |
66 | | --with-fortran-lib="-L/nasa/intel/Compiler/2016.2.181/compilers_and_libraries_2016.2.181/linux/compiler/lib/intel64/ -lifcore -lifport" \ |
67 | | --with-vendor="intel-pleiades" \ |
68 | | --enable-development |
69 | | }}} |
70 | | |
71 | | == Installing ISSM on Pleiades with Dakota == |
72 | | |
73 | | For Dakota to run, you you will still need to make PETSc and m1qn3, but you will need to make sure you are using the intel mpi and that the externalpackages are built with the mpi compilers. |
74 | | |
75 | | In your `~/.bashrc`, add the following lines: |
76 | | |
77 | | {{{ |
| 29 | module load comp-intel/2018.3.222 |
| 30 | module load mpi-intel/2018.3.222 |
| 31 | |
82 | | }}} |
83 | | |
84 | | And change your loaded packages to (note the removal of the pkgsrc): |
| 36 | |
| 37 | }}} |
| 38 | |
| 39 | And replace `ISSM_DIR` with your actual trunk. ''Log out and log back in'' to apply this change. |
| 40 | |
| 41 | == Installing ISSM on Pleiades == |
| 42 | |
| 43 | '''Do NOT install mpich'''. We have to use the one provided by NAS. Pleiades will ''only'' be used to run the code, you will use your local machine for pre and post processing, you will never use Pleiades' matlab. You can check out ISSM and install the following packages: |
| 44 | - m1qn3 |
| 45 | - PETSc (use `install-3.14-pleiades.sh` or newer) |
| 46 | |
| 47 | For documentation of pleiades, see here: http://www.nas.nasa.gov/hecc/support/kb/ |
| 48 | |
| 49 | You will need to run the following command before configuring ISSM: |
| 50 | {{{ |
| 51 | #!sh |
| 52 | cd $ISSM_DIR |
| 53 | autoreconf -ivf |
| 54 | }}} |
| 55 | |
| 56 | Use the following configuration script for ISSM (adapt to your needs): |
| 57 | |
| 58 | {{{ |
| 59 | #!sh |
| 60 | ./configure \ |
| 61 | --prefix=$ISSM_DIR \ |
| 62 | --enable-standalone-libraries \ |
| 63 | --with-wrappers=no \ |
| 64 | --with-m1qn3-dir=$ISSM_DIR/externalpackages/m1qn3/install \ |
| 65 | --with-metis-dir=$ISSM_DIR/externalpackages/petsc/install \ |
| 66 | --with-petsc-dir=$ISSM_DIR/externalpackages/petsc/install \ |
| 67 | --with-scalapack-lib="-L/nasa/intel/Compiler/2018.3.222/compilers_and_libraries_2018.3.222/linux/mkl/lib/intel64/libmkl_scalapack_lp64.so" \ |
| 68 | --with-mpi-include=" " \ |
| 69 | --with-mpi-libflags=" -lmpi" \ |
| 70 | --with-mkl-libflags="-L/nasa/intel/Compiler/2018.3.222/compilers_and_libraries_2018.3.222/linux/mkl/lib/intel64 -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -lpthread -lm" \ |
| 71 | --with-mumps-dir=$ISSM_DIR/externalpackages/petsc/install \ |
| 72 | --with-fortran-lib="-L/nasa/intel/Compiler/2018.3.222/compilers_and_libraries_2018.3.222/linux/compiler/lib/intel64_lin/ -lifcore -lifport -lgfortran" \ |
| 73 | --with-cxxoptflags="-O3 " \ |
| 74 | --with-vendor="intel-pleiades-mpi" \ |
| 75 | --enable-development |
| 76 | }}} |
| 77 | |
| 78 | == Installing ISSM on Pleiades with Dakota == |
| 79 | |
| 80 | For Dakota to run, you you will still need to make PETSc and m1qn3, but you will need to make sure you are using the intel mpi and that the externalpackages are built with the mpi compilers. |
| 81 | |
| 82 | In your `~/.bashrc`, change your loaded packages to remove the pkgsrc. Since pkgsrc loads an incompatible boost, it needs to be removed to build dakota: |
152 | | --prefix=$ISSM_DIR \ |
153 | | --with-wrappers=no \ |
154 | | --without-Love \ |
155 | | --without-Sealevelchange \ |
156 | | --without-kriging \ |
157 | | --with-m1qn3-dir="$ISSM_DIR/externalpackages/m1qn3/install" \ |
158 | | --with-mpi-include=" " \ |
159 | | --with-mpi-libflags=" -lmpi" \ |
160 | | --with-mkl-libflags="-L/nasa/intel/Compiler/2016.2.181/mkl/lib/intel64/ -mkl=cluster " \ |
161 | | --with-metis-dir="$ISSM_DIR/externalpackages/petsc/install" \ |
162 | | --with-mumps-dir="$ISSM_DIR/externalpackages/petsc/install" \ |
163 | | --with-scalapack-dir="$ISSM_DIR/externalpackages/petsc/install" \ |
164 | | --with-cxxoptflags="-g -O3 -diag-disable=2196 -axCORE-AVX2,AVX -xSSE4.2 " \ |
165 | | --with-fortran-lib="-L/nasa/intel/Compiler/2016.2.181/compilers_and_libraries_2016.2.181/linux/compiler/lib/intel64/ -lifcore -lifport" \ |
166 | | --with-gsl-dir="$ISSM_DIR/externalpackages/gsl/install" \ |
167 | | --with-vendor="intel-pleiades" \ |
168 | | --with-codipack-dir="$ISSM_DIR/externalpackages/codipack/install" \ |
169 | | --with-medipack-dir="$ISSM_DIR/externalpackages/medipack/install" \ |
170 | | --enable-tape-alloc \ |
171 | | --enable-development |
| 146 | --prefix=$ISSM_DIR \ |
| 147 | --enable-standalone-libraries \ |
| 148 | --with-wrappers=no \ |
| 149 | --without-Love \ |
| 150 | --without-Sealevelchange \ |
| 151 | --without-kriging \ |
| 152 | --with-m1qn3-dir=$ISSM_DIR/externalpackages/m1qn3/install \ |
| 153 | --with-metis-dir=$ISSM_DIR/externalpackages/petsc/install \ |
| 154 | --with-petsc-dir=$ISSM_DIR/externalpackages/petsc/install \ |
| 155 | --with-scalapack-lib="-L/nasa/intel/Compiler/2018.3.222/compilers_and_libraries_2018.3.222/linux/mkl/lib/intel64/libmkl_scalapack_lp64.so" \ |
| 156 | --with-mpi-include=" " \ |
| 157 | --with-mpi-libflags=" -lmpi" \ |
| 158 | --with-mkl-libflags="-L/nasa/intel/Compiler/2018.3.222/compilers_and_libraries_2018.3.222/linux/mkl/lib/intel64 -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -lpthread -lm" \ |
| 159 | --with-mumps-dir=$ISSM_DIR/externalpackages/petsc/install \ |
| 160 | --with-fortran-lib="-L/nasa/intel/Compiler/2018.3.222/compilers_and_libraries_2018.3.222/linux/compiler/lib/intel64_lin/ -lifcore -lifport -lgfortran" \ |
| 161 | --with-cxxoptflags="-g -O3 -diag-disable=2196 " \ |
| 162 | --with-vendor="intel-pleiades-mpi" \ |
| 163 | --with-gsl-dir="$ISSM_DIR/externalpackages/gsl/install" \ |
| 164 | --with-vendor="intel-pleiades" \ |
| 165 | --with-codipack-dir="$ISSM_DIR/externalpackages/codipack/install" \ |
| 166 | --with-medipack-dir="$ISSM_DIR/externalpackages/medipack/install" \ |
| 167 | --enable-tape-alloc \ |
| 168 | --enable-development |
| 169 | |
| 170 | #NB: The following is the old flag list, which does not work with the mpi compiler |
| 171 | #--with-cxxoptflags="-g -O3 -diag-disable=2196 -axCORE-AVX2,AVX -xSSE4.2 " \ |
192 | | Without dakota, make sure your module list includes scicon/app-tools, comp-intel/2016.2.181, and mpi-hpe/mpt. |
193 | | With dakota, make sure your module list includes scicon/app-tools, comp-intel/2018.3.222, and mpi-intel/2018.3.222. You can specify your own list of modules by adding to `pfe_settings.m`, for example: |
| 192 | Make sure your module list includes scicon/app-tools, comp-intel/2018.3.222, and mpi-intel/2018.3.222. You can specify your own list of modules by adding to `pfe_settings.m`, for example: |