Changes between Version 4 and Version 5 of lonestar


Ignore:
Timestamp:
01/27/16 20:26:01 (9 years ago)
Author:
seroussi
Comment:

--

Legend:

Unmodified
Added
Removed
Modified
  • lonestar

    v4 v5  
    77{{{
    88#!sh
    9 Host lonestar lonestar.tacc.utexas.edu
    10    HostName lonestar.tacc.utexas.edu
     9Host lonestar ls5.tacc.utexas.edu
     10   HostName ls5.tacc.utexas.edu
    1111  User YOURUSERNAME
    12   HostKeyAlias lonestar.tacc.utexas.edu
     12  HostKeyAlias ls5.tacc.utexas.edu
    1313  HostbasedAuthentication no
    1414}}}
    15 and replace `YOURUSERNAME` by your lonestar username.
     15and replace `YOURUSERNAME` by your lonestar5 username.
    1616
    17 Once this is done, you can ssh lonestar by simply doing:
     17Once this is done, you can ssh lonestar5 by simply doing:
    1818
    1919{{{
     
    5959export ISSM_DIR=PATHTOTRUNK
    6060source $ISSM_DIR/etc/environment.sh
    61 module load cmake/2.8.7
    62 module load mkl/10.3
     61module load cmake/3.4.1
    6362}}}
    6463
     
    8180   --with-kml=no \
    8281   --with-bamg=no \
    83    --with-metis-dir=$ISSM_DIR/externalpackages/metis/install \
     82   --with-metis-dir="$ISSM_DIR/externalpackages/petsc/install" \
    8483   --with-petsc-dir=$ISSM_DIR/externalpackages/petsc/install \
    8584   --with-m1qn3-dir=$ISSM_DIR/externalpackages/m1qn3/install \
    86    --with-mpi-include="/opt/apps/intel11_1/mvapich2/1.6/include/" \
    87    --with-mpi-libflags="-L/opt/apps/intel11_1/mvapich2/1.6/lib/ -lmpich" \
    88    --with-mkl-dir="$TACC_MKL_LIB" \
     85   --with-mpi-include="/opt/cray/mpt/default/gni/mpich-intel/14.0/include/" \
     86   --with-mpi-libflags="-Lopt/cray/mpt/default/gni/mpich-intel/14.0/lib/ -lmpich" \
     87   --with-mkl-dir="/opt/apps/intel/16.0.1.150/compilers_and_libraries_2016.1.150/linux/mkl/lib/intel64" \
    8988   --with-mumps-dir=$ISSM_DIR/externalpackages/petsc/install/ \
    9089   --with-scalapack-dir=$ISSM_DIR/externalpackages/petsc/install/ \
     
    103102cluster.login='seroussi';
    104103cluster.codepath='/home1/03729/seroussi/trunk-jpl/bin/';
    105 cluster.executionpath='/home1/03729/seroussi/trunk-jpl/execution/';
     104cluster.executionpath='/work/03729/seroussi/trunk-jpl/execution/';
    106105}}}
    107106
     
    120119to have a job of 2 nodes, 12 cpus for nodes, so a total of 24 cores.
    121120
     121To submit a job on lonestar, do:
     122
     123 {{{
     124#!m
     125sbatch job.queue
     126}}}
     127
    122128Now if you want to check the status of your job and the queue you are using, type in the bash with the lonestar session:
    123129