Hi I have som transient runs I would like to plot. There are running fine on the cluster and I can load them onto my local machine. I thought It might be better to plot them on the cluster, rather then loading the output bin on my local machine,(I have Matlab on the cluster too). Loading them on my local machine, takes so much time and data space. Thus, I am about to create a plot script on the cluster (with the loadresultsfromdisk function) that I can run there.I open Issm on my cluster to test my commands, I have to load a model for that, but I get this error message:
`Warning: While loading an object of class 'love':
Unrecognized method, property, or field 'int_steps_per_layers' for class 'love'.
Is there something installed wrongly? If I add Matlab in the installation. would it make my transient runs slower?
some modules are pre installed on the cluster and I jus load them. so this is my installation script for the cluster:
module purge
2 module load openmpi/4.1.3
3 module load netcdf/4.8.0p
4 module load hdf5/1.10.7p
5
6 export CC=mpicc
7 export CXX=mpicxx
8 export FC=mpif90
9
10 #export ISSM_DIR=$PWD
11 export ISSM_PETSC_VERSION="3.16"
12
13 source $ISSM_DIR/etc/environment.sh
14
15 make distclean
16
17 #-----------------------------
18 # install external packages
19 #-----------------------------
20
21 export ISSM_SOURCE_DIR=$ISSM_DIR
22 cd $ISSM_SOURCE_DIR/externalpackages
23
24 #echo "--------------------------------------------------------"
25 #echo " compile cmake"
26 #echo "--------------------------------------------------------"
27 #cd cmake
28 #./install.sh
29 #cd ..
30 #export LIBCMAKE_ROOT=$ISSM_DIR/externalpackages/cmake/install
31 #source $ISSM_DIR/etc/environment.sh
32
33 echo "--------------------------------------------------------"
34 echo " compile m1qn3"
35 echo "--------------------------------------------------------"
36 cd m1qn3
37 FFLAGS="-O2 -g" ./install.sh
38 cd ..
39 export LIBM1QN3_ROOT=$ISSM_DIR/externalpackages/m1qn3/install
40 source $ISSM_DIR/etc/environment.sh
41
42 echo "--------------------------------------------------------"
43 echo " compile triangle"
44 echo "--------------------------------------------------------"
45 cd triangle
46 CFLAGS="-O2 -g" ./install-linux.sh
47 cd ..
48 export LIBTRIANGLE_ROOT=$ISSM_DIR/externalpackages/triangle/install
49 source $ISSM_DIR/etc/environment.sh
50
then my configure script on the cluster:
1 module purge
2 module load openmpi/4.1.3
3 module load netcdf/4.8.0p
4 module load hdf5/1.10.7p
5
6 module load petsc/3.17.4
7 #module load python3/3.11.0
8
9 export CC=mpicc
10 export CXX=mpicxx
11 export FC=mpif90
12
13 #export ISSM_DIR=$PWD
14 # export ISSM_PETSC_VERSION="3.16"
15
16
17 make distclean
18
19 source $ISSM_DIR/etc/environment.sh
20
21 autoreconf -ivf
65 ./configure \
66 --without-Love --without-kml --without-Sealevelchange \
67 --prefix=$ISSM_DIR \
68 --with-triangle-dir="$ISSM_DIR/externalpackages/triangle/install" \
69 --with-mpi-include="$OPENMPI_BASE/include" \
70 --with-petsc-dir="$PETSC_ROOT" \
71 --with-metis-dir="$PETSC_ROOT" \
72 --with-mumps-dir="$PETSC_ROOT" \
73 --with-scalapack-lib="-L${MKLROOT}/lib/intel64 -lmkl_scalapack_lp64 -lmkl_blacs_openmpi_lp64" \
74 --with-mkl-libflags="-L${MKLROOT}/lib/intel64 -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -lpthread -lm" \
75 --with-m1qn3-dir="$ISSM_DIR/externalpackages/m1qn3/install" \
76 --enable-debugging \
77 --enable-development \
78 --enable-shared \
79 --without-kriging \
80 --with-numthreads=16 CC=mpicc CXX=mpicxx FC=mpif90 F77=mpif90
81
with this alias I open ISSM:
67 issmt() {
68 module purge
69 module load openmpi/4.1.3
70 module load netcdf/4.8.0p
71 module load hdf5/1.10.7p
72 module load petsc/3.17.4
73 module load matlab/R2021b
74 module load matlab_licence/monash
75 matlab -nodesktop -nosplash -r "addpath $ISSM_DIR/src/m/dev; devpath; addpath $ISSM_DIR/lib"
76 }
77