diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index a42d03e128abb73c8dc31b379f4808b2fa1f237d..67d0deb81ee7c5e10ed3416e7e74814a19d5077e 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -7,54 +7,54 @@ stages:
   - install
   - test
 
-env:bionic:
-  image: keckj/hysop:bionic
+env:focal:
+  image: keckj/hysop:focal
   stage: env
   script:
       - "bash ci/scripts/version.sh"
 
-config:bionic:
-  image: keckj/hysop:bionic
+config:focal:
+  image: keckj/hysop:focal
   stage: configure
   script: 
-      - "bash ci/scripts/config.sh $CI_PROJECT_DIR/build/gcc-7   $CI_PROJECT_DIR/install/gcc-7   gcc-7   g++-7     gfortran-7"
-      - "bash ci/scripts/config.sh $CI_PROJECT_DIR/build/clang-8 $CI_PROJECT_DIR/install/clang-8 clang-8 clang++-8 gfortran-7"
+      - "bash ci/scripts/config.sh $CI_PROJECT_DIR/build/gcc   $CI_PROJECT_DIR/install/gcc   gcc   g++     gfortran"
+      - "bash ci/scripts/config.sh $CI_PROJECT_DIR/build/clang $CI_PROJECT_DIR/install/clang clang clang++ gfortran"
   dependencies:
-    - env:bionic
+    - env:focal
   artifacts:
     paths:
         - $CI_PROJECT_DIR/build
 
-build:bionic:
-  image: keckj/hysop:bionic
+build:focal:
+  image: keckj/hysop:focal
   stage: build
   script: 
-      - "bash ci/scripts/build.sh $CI_PROJECT_DIR/build/gcc-7   gcc-7   g++-7     gfortran-7"
-      - "bash ci/scripts/build.sh $CI_PROJECT_DIR/build/clang-8 clang-8 clang++-8 gfortran-7"
+      - "bash ci/scripts/build.sh $CI_PROJECT_DIR/build/gcc   gcc   g++     gfortran"
+      - "bash ci/scripts/build.sh $CI_PROJECT_DIR/build/clang clang clang++ gfortran"
   dependencies:
-    - config:bionic
+    - config:focal
   artifacts:
     paths:
         - $CI_PROJECT_DIR/build
 
-install:bionic:
-  image: keckj/hysop:bionic
+install:focal:
+  image: keckj/hysop:focal
   stage: install
   script: 
-      - "bash ci/scripts/install.sh $CI_PROJECT_DIR/build/gcc-7 $CI_PROJECT_DIR/install/gcc-7"
+      - "bash ci/scripts/install.sh $CI_PROJECT_DIR/build/gcc $CI_PROJECT_DIR/install/gcc"
   dependencies:
-    - build:bionic
+    - build:focal
   artifacts:
     paths:
         - $CI_PROJECT_DIR/install
 
-test:bionic:
-  image: keckj/hysop:bionic
+test:focal:
+  image: keckj/hysop:focal
   stage: test
   script:
-    - "bash ci/scripts/test.sh $CI_PROJECT_DIR/install/gcc-7 $CI_PROJECT_DIR/hysop $CI_PROJECT_DIR/cache"
+    - "bash ci/scripts/test.sh $CI_PROJECT_DIR/install/gcc $CI_PROJECT_DIR/hysop $CI_PROJECT_DIR/cache"
   dependencies:
-    - install:bionic
+    - install:focal
   cache:
     paths:
       - $CI_PROJECT_DIR/cache
diff --git a/CMakeLists.txt b/CMakeLists.txt
index e9c8098a616dc2e67fc413da01efdb2fbff6e7f7..4f4a57e288c841c2388112e3db56dbf7743c9415 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -18,8 +18,13 @@
 cmake_minimum_required(VERSION 3.0.2)
 
 if(POLICY CMP0053)
+    # Simplify variable reference and escape sequence evaluation (cmake 3.1)
   cmake_policy(SET CMP0053 NEW)
 endif()
+if(POLICY CMP0074)
+    # find_package(<PackageName>) commands will first search prefixes specified by the <PackageName>_ROOT (cmake 3.12)
+    cmake_policy(SET CMP0074 NEW)
+endif()
 
 # Set cmake modules directory (i.e. the one which contains all user-defined FindXXX.cmake files among other things)
 set(CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake")
diff --git a/INSTALL b/INSTALL
index bfa0f3666075ae491a0be0d6c0f8057066294845..196557af28df1cd74a002677dddb863f6bcefcb8 100644
--- a/INSTALL
+++ b/INSTALL
@@ -49,7 +49,7 @@ At the end of this step BUILDDIR contains all makefiles, setup.py and other requ
 
 Some useful options for cmake : 
 
--DFFTW_DIR : where to find fftw if it's not in a "standard" place.
+-DFFTW_ROOT : where to find fftw if it's not in a "standard" place.
 -DWITH_SCALES=ON/OFF : to compile a hysop version including scales (default = on)
 -DWITH_PPM=ON/OFF : to compile  a hysop version including scales (default = off)
 -DWITH_TESTS=ON/OFF: enable testing (i.e. prepare target "make test", default = off)
@@ -60,7 +60,7 @@ mkdir /home/mylogin/buildhysop
 cd /home/mylogin/buildhysop
 export FC=mpif90
 module load cmake-2.8
-cmake -DFFTW_DIR=/softs/install/fftw3.1 ~/Softs/HySoP
+cmake -DFFTW_ROOT=/softs/install/fftw3.1 ~/Softs/HySoP
 
 ===================================================================================================================
 3 - Build
diff --git a/ci/docker_images/ubuntu/bionic/Dockerfile b/ci/docker_images/ubuntu/bionic/Dockerfile
index 2609fefd9c5efb8885553be1a6ae129a6333f7a2..c7f5c384509af05349077a311f7086ba721d7a5e 100644
--- a/ci/docker_images/ubuntu/bionic/Dockerfile
+++ b/ci/docker_images/ubuntu/bionic/Dockerfile
@@ -12,18 +12,50 @@ RUN apt-get update
 RUN apt-get full-upgrade -y
 
 # get build tools and required libraries
-RUN apt-get install -y expat unzip xz-utils automake libtool pkg-config cmake rsync git vim ssh clang gcc gfortran cython swig lsb-core cpio libnuma1 libpciaccess0 libreadline-dev libboost-all-dev libblas-dev liblapack-dev libcgal-dev libatlas-base-dev libopenblas-dev libgfortran3 libgcc1 libopenmpi-dev libhdf5-openmpi-dev libfftw3-dev libfftw3-mpi-dev libgmp-dev libmpfr-dev libmpc-dev libsparsehash-dev libcairo-dev libcairomm-1.0-dev libflint-dev python python-dev python-pip python-tk opencl-headers
+RUN apt-get install -y expat unzip xz-utils automake libtool pkg-config cmake rsync git vim ssh clang gcc gfortran swig lsb-core cpio libnuma1 libpciaccess0 libreadline-dev libboost-all-dev libblas-dev liblapack-dev libcgal-dev libatlas-base-dev libopenblas-dev libgfortran3 libgcc1 libgmp-dev libmpfr-dev libmpc-dev libsparsehash-dev libcairo-dev libcairomm-1.0-dev python python-dev python-tk opencl-headers
 
-# python packages using pip
-RUN pip install --upgrade pip
-RUN pip install --upgrade numpy setuptools cffi wheel pytest
-RUN pip install --upgrade backports.weakref backports.tempfile scipy sympy matplotlib mpi4py gmpy2 psutil py-cpuinfo Mako subprocess32 editdistance portalocker colors.py tee primefac pycairo weave argparse_color_formatter networkx pyvis
-RUN CC=mpicc HDF5_MPI="ON" pip install --upgrade --no-binary=h5py h5py
+# python packages using pip2.7
+RUN cd /tmp && \
+ curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \
+ python2.7 get-pip.py && \
+ pip2.7 install --upgrade pip && \
+ rm -f /tmp/get-pip.py
+RUN pip2.7 install --upgrade numpy setuptools cffi wheel pytest pybind11 cython
+
+# OpenMPI 4 + mpi4py (enable mpi1 compatibility for mpi4py)
+ENV MPI_ROOT "/usr/local"
+RUN cd /tmp && \
+ wget https://download.open-mpi.org/release/open-mpi/v4.0/openmpi-4.0.4.tar.gz && \
+ tar -xvzf openmpi-*.tar.gz && \
+ rm -f openmpi-*.tar.gz && \
+ cd openmpi-* && \
+ ./configure --enable-shared --disable-static --with-threads=posix --enable-ipv6 --prefix="${MPI_ROOT}" --with-hwloc=internal --with-libevent=internal --enable-mpi1-compatibility && \
+ make -j$(nproc) && \
+ make install && \
+ rm -rf /tmp/openmpi-*
+
+ENV MPICC "${MPI_ROOT}/bin/mpicc"
+RUN ldconfig && pip2.7 install --upgrade mpi4py
+
+# HDF5 + h5py (v1.10.6 is currently the last supported by h5py)
+RUN cd /tmp && \
+ wget https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.10/hdf5-1.10.6/src/hdf5-1.10.6.tar.gz && \
+ tar -xvzf hdf5-*.tar.gz && \
+ rm -rf hdf5-*.tar.gz && \
+ cd hdf5-* && \
+ CC="${MPICC}" ./configure --prefix="${MPI_ROOT}" --enable-parallel --enable-shared=yes --enable-static=no && \
+ make -j$(nproc) && \
+ make install && \
+ rm -rf /tmp/hdf5-*
+RUN CC="${MPICC}" HDF5_MPI="ON" HDF5_VERSION="1.10.6" HDF5_DIR="${MPI_ROOT}" pip2.7 install --upgrade --no-binary=h5py h5py
 
 # llvm + numba + llvmlite (llvmlite 0.32 has a bug with llvm8)
 RUN apt-get install -y llvm-8-dev libclang-8-dev clang-8
 ENV LLVM_CONFIG=llvm-config-8
-RUN pip install --upgrade numba llvmlite==0.31.0
+RUN pip2.7 install --upgrade numba llvmlite==0.31.0
+
+# other python packages
+RUN pip2.7 install --upgrade backports.weakref backports.tempfile scipy sympy matplotlib gmpy2 psutil py-cpuinfo Mako subprocess32 editdistance portalocker colors.py tee primefac pycairo weave argparse_color_formatter networkx pyvis zarr numcodecs jsonpickle
 
 # patchelf
 RUN cd /tmp && \
@@ -36,16 +68,16 @@ RUN cd /tmp && \
  cd - && \
  rm -Rf /tmp/patchelf
 
-# Intel experimental OpenCL platform with SYCL support (2020-02)
+# Intel experimental OpenCL platform with SYCL support (2020-06)
 ENV LD_LIBRARY_PATH "/opt/intel/oclcpuexp/x64:${LD_LIBRARY_PATH}"
 RUN mkdir -p /opt/intel/oclcpuexp && \
- wget https://github.com/intel/llvm/releases/download/2020-02/oclcpuexp-2020.10.3.0.04_rel.tar.gz && \
- tar -xvzf oclcpuexp-2020.10.3.0.04_rel.tar.gz && \
+ wget https://github.com/intel/llvm/releases/download/2020-06/oclcpuexp-2020.10.6.0.4_rel.tar.gz && \
+ tar -xvzf oclcpuexp-*.tar.gz && \
  mv x64/ /opt/intel/oclcpuexp/ && \
  mv clbltfnshared.rtl /opt/intel/oclcpuexp/ && \
- rm -rf x64 *.rtl oclcpuexp* && \
- wget https://github.com/oneapi-src/oneTBB/releases/download/v2020.2/tbb-2020.2-lin.tgz && \
- tar -xvzf tbb-2020.2-lin.tgz && \
+ rm -rf *.rtl oclcpuexp-* && \
+ wget https://github.com/oneapi-src/oneTBB/releases/download/v2020.3/tbb-2020.3-lin.tgz && \
+ tar -xvzf tbb-*.tgz && \
  mv tbb/lib/intel64/gcc4.8/* /opt/intel/oclcpuexp/x64/ && \
  rm -f /usr/local/lib/libOpenCL.so && \
  rm -f /usr/local/lib/libOpenCL.so && \
@@ -83,13 +115,13 @@ RUN cd /tmp && \
 
 # pyopencl
 RUN cd /tmp && \
- pip install pybind11 && \
  git clone https://github.com/inducer/pyopencl && \
  cd pyopencl && \
  git submodule update --init && \
- ./configure.py && \
+ git checkout v2020.1 && \
+ python2.7 configure.py && \
  make && \
- pip install --upgrade . && \
+ pip2.7 install --upgrade . && \
  cd - && \
  rm -Rf /tmp/pyopencl
 
@@ -123,20 +155,10 @@ RUN cd /tmp && \
 RUN cd /tmp && \
  git clone https://github.com/geggo/gpyfft && \
  cd gpyfft && \
- pip install . && \
+ pip2.7 install . && \
  cd - && \
  rm -Rf /tmp/gpyfft
 
-# pyfftw (with R2R transforms - experimental branch)
-RUN cd /tmp && \
- git clone https://github.com/drwells/pyFFTW && \
- cd pyFFTW && \
- git checkout r2r-try-two && \
- sed -i 's/\(fftw3[fl]\?_\)threads/\1omp/g' setup.py && \
- pip install . && \
- cd - && \
- rm -Rf /tmp/pyFFTW
-
 # HPTT (CPU tensor permutation library)
 RUN cd /tmp && \
  git clone https://gitlab.com/keckj/hptt && \
@@ -147,7 +169,7 @@ RUN cd /tmp && \
  make && \
  make install && \
  cd ../pythonAPI && \
- pip install --upgrade . && \
+ pip2.7 install --upgrade . && \
  cd /tmp && \
  rm -Rf /tmp/hptt
 
@@ -155,26 +177,63 @@ RUN cd /tmp && \
 RUN cd /tmp && \
  git clone https://gitlab.com/keckj/memory-tempfile && \
  cd memory-tempfile && \
- pip install . && \
+ pip2.7 install . && \
  cd /tmp && \
  rm -Rf /tmp/memory-tempfile
 
-# python flint
+# python flint (FLINT2 + ARB + python-flint)
 RUN cd /tmp \
-  && wget https://github.com/fredrik-johansson/arb/archive/2.16.0.tar.gz \
-  && tar -xvzf 2.16.0.tar.gz \
-  && cd arb-2.16.0 \
+  && wget https://github.com/wbhart/flint2/archive/v2.6.1.tar.gz \
+  && tar -xvzf v2.6.1.tar.gz \
+  && cd flint2-2.6.1 \
   && ./configure \
   && make -j$(nproc) \
   && make install \
   && cd - \
-  && rm -rf arb-2.16.0
+  && rm -rf flint2-2.6.1
 RUN cd /tmp \
-  && git clone https://github.com/fredrik-johansson/python-flint \
-  && cd python-flint \
-  && pip install . \
+  && wget https://github.com/fredrik-johansson/arb/archive/2.18.1.tar.gz \
+  && tar -xvzf 2.18.1.tar.gz \
+  && cd arb-2.18.1 \
+  && ./configure \
+  && make -j$(nproc) \
+  && make install \
   && cd - \
-  && rm -rf python-flint
+  && rm -rf arb-2.18.1
+RUN pip2.7 install --upgrade python-flint
+
+# static fftw + pyfftw (with R2R transforms)
+# Weird pyfftw bug : not passing -O2 explicitely during build causes a segfault on import...
+# See https://bugs.gentoo.org/548776
+ENV FFTW_ROOT="/usr/local"
+ADD ci/patch/pyfftw.patch /tmp/pyfftw.patch
+RUN cd /tmp && \
+ wget http://www.fftw.org/fftw-3.3.8.tar.gz && \
+ tar -xvzf fftw-*.tar.gz && \
+ rm -f fftw-*.tar.gz && \
+ cd fftw-* && \
+ ./configure --enable-openmp --enable-threads --enable-mpi --enable-static --with-pic --prefix="${FFTW_ROOT}" --enable-single && \
+ make -j$(nproc) && \
+ make install && \
+ make clean && \
+ ./configure --enable-openmp --enable-threads --enable-mpi --enable-static --with-pic --prefix="${FFTW_ROOT}" && \
+ make -j8 && \
+ make install && \
+ make clean && \
+ ./configure --enable-openmp --enable-threads --enable-mpi --enable-static --with-pic --prefix="${FFTW_ROOT}" --enable-long-double && \
+ make -j8 && \
+ make install && \
+ rm -rf /tmp/fftw-*
+RUN cd /tmp && \
+ git clone https://github.com/drwells/pyFFTW && \
+ cd pyFFTW && \
+ git checkout r2r-try-two && \
+ sed -i 's/\(fftw3[fl]\?_\)threads/\1omp/g' setup.py && \
+ mv /tmp/pyfftw.patch . && \
+ patch -p0 -i pyfftw.patch && \
+ STATIC_FFTW_DIR="${FFTW_ROOT}/lib" CFLAGS="-Wl,-Bsymbolic -fopenmp -I${FFTW_ROOT}/include -O2" python2.7 setup.py build_ext --inplace && \
+ pip2.7 install --upgrade . && \
+ rm -rf /tmp/pyFFTW
 
 # ensure all libraries are known by the runtime linker
 RUN ldconfig
diff --git a/ci/docker_images/ubuntu/focal/Dockerfile b/ci/docker_images/ubuntu/focal/Dockerfile
index 539ff1ef44dc5f54301ee204b51256ca491e3c78..193df3282d70613218c0c2d2085f8632fb618e13 100644
--- a/ci/docker_images/ubuntu/focal/Dockerfile
+++ b/ci/docker_images/ubuntu/focal/Dockerfile
@@ -12,23 +12,51 @@ RUN apt-get update
 RUN apt-get full-upgrade -y
 
 # get build tools and required libraries
-RUN apt-get install -y --no-install-recommends expat unzip xz-utils automake libtool pkg-config cmake git vim ssh curl wget ca-certificates gcc g++ gfortran lsb-core cpio libnuma1 libpciaccess0 libreadline-dev libblas-dev liblapack-dev libgcc-9-dev libgfortran-9-dev libopenmpi-dev libhdf5-openmpi-dev libgmp-dev libmpfr-dev libmpc-dev libflint-dev libfftw-dev libfftw-mpi-dev python2.7-dev opencl-headers
+RUN apt-get install -y --no-install-recommends expat unzip xz-utils automake libtool pkg-config cmake rsync git vim ssh curl wget ca-certificates gcc g++ gfortran lsb-core cpio libnuma1 libpciaccess0 libreadline-dev libblas-dev liblapack-dev libgcc-9-dev libgfortran-9-dev libgmp-dev libmpfr-dev libmpc-dev python2.7-dev opencl-headers swig libgmp-dev libmpfr-dev libmpc-dev libcairo-dev libcairomm-1.0-dev python2.7-tk
 
-# python packages using pip
+# python packages using pip2.7
 RUN cd /tmp && \
  curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \
  python2.7 get-pip.py && \
  pip2.7 install --upgrade pip && \
  rm -f /tmp/get-pip.py
 RUN pip2.7 install --upgrade numpy setuptools cffi wheel pytest pybind11 cython
-RUN pip2.7 install --upgrade backports.weakref backports.tempfile scipy sympy matplotlib mpi4py gmpy2 psutil py-cpuinfo Mako subprocess32 editdistance portalocker colors.py tee primefac weave argparse_color_formatter networkx pyvis
-RUN CC=mpicc HDF5_MPI="ON" pip2.7 install --upgrade --no-binary=h5py h5py
+
+# OpenMPI 4 + mpi4py (enable mpi1 compatibility for mpi4py)
+ENV MPI_ROOT "/usr/local"
+RUN cd /tmp && \
+ wget https://download.open-mpi.org/release/open-mpi/v4.0/openmpi-4.0.4.tar.gz && \
+ tar -xvzf openmpi-*.tar.gz && \
+ rm -f openmpi-*.tar.gz && \
+ cd openmpi-* && \
+ ./configure --enable-shared --disable-static --with-threads=posix --enable-ipv6 --prefix="${MPI_ROOT}" --with-hwloc=internal --with-libevent=internal --enable-mpi1-compatibility && \
+ make -j$(nproc) && \
+ make install && \
+ rm -rf /tmp/openmpi-*
+
+ENV MPICC "${MPI_ROOT}/bin/mpicc"
+RUN ldconfig && pip2.7 install --upgrade mpi4py
+
+# HDF5 + h5py (v1.10.6 is currently the last supported by h5py)
+RUN cd /tmp && \
+ wget https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.10/hdf5-1.10.6/src/hdf5-1.10.6.tar.gz && \
+ tar -xvzf hdf5-*.tar.gz && \
+ rm -rf hdf5-*.tar.gz && \
+ cd hdf5-* && \
+ CC="${MPICC}" ./configure --prefix="${MPI_ROOT}" --enable-parallel --enable-shared=yes --enable-static=no && \
+ make -j$(nproc) && \
+ make install && \
+ rm -rf /tmp/hdf5-*
+RUN CC="${MPICC}" HDF5_MPI="ON" HDF5_VERSION="1.10.6" HDF5_DIR="${MPI_ROOT}" pip2.7 install --upgrade --no-binary=h5py h5py
 
 # llvm + numba + llvmlite (llvmlite 0.32 has a bug with llvm8)
 RUN apt-get install -y llvm-8-dev libclang-8-dev clang-8
 ENV LLVM_CONFIG=llvm-config-8
 RUN pip2.7 install --upgrade numba llvmlite==0.31.0
 
+# other python packages
+RUN pip2.7 install --upgrade backports.weakref backports.tempfile scipy sympy matplotlib gmpy2 psutil py-cpuinfo Mako subprocess32 editdistance portalocker colors.py tee primefac pycairo weave argparse_color_formatter networkx pyvis zarr numcodecs jsonpickle
+
 # patchelf
 RUN cd /tmp && \
  git clone https://github.com/NixOS/patchelf && \
@@ -40,16 +68,16 @@ RUN cd /tmp && \
  cd - && \
  rm -Rf /tmp/patchelf
 
-# Intel experimental OpenCL platform with SYCL support (2020-02)
+# Intel experimental OpenCL platform with SYCL support (2020-06)
 ENV LD_LIBRARY_PATH "/opt/intel/oclcpuexp/x64:${LD_LIBRARY_PATH}"
 RUN mkdir -p /opt/intel/oclcpuexp && \
- wget https://github.com/intel/llvm/releases/download/2020-02/oclcpuexp-2020.10.3.0.04_rel.tar.gz && \
- tar -xvzf oclcpuexp-2020.10.3.0.04_rel.tar.gz && \
+ wget https://github.com/intel/llvm/releases/download/2020-06/oclcpuexp-2020.10.6.0.4_rel.tar.gz && \
+ tar -xvzf oclcpuexp-*.tar.gz && \
  mv x64/ /opt/intel/oclcpuexp/ && \
  mv clbltfnshared.rtl /opt/intel/oclcpuexp/ && \
- rm -rf x64 *.rtl oclcpuexp* && \
- wget https://github.com/oneapi-src/oneTBB/releases/download/v2020.2/tbb-2020.2-lin.tgz && \
- tar -xvzf tbb-2020.2-lin.tgz && \
+ rm -rf *.rtl oclcpuexp-* && \
+ wget https://github.com/oneapi-src/oneTBB/releases/download/v2020.3/tbb-2020.3-lin.tgz && \
+ tar -xvzf tbb-*.tgz && \
  mv tbb/lib/intel64/gcc4.8/* /opt/intel/oclcpuexp/x64/ && \
  rm -f /usr/local/lib/libOpenCL.so && \
  rm -f /usr/local/lib/libOpenCL.so && \
@@ -90,6 +118,7 @@ RUN cd /tmp && \
  git clone https://github.com/inducer/pyopencl && \
  cd pyopencl && \
  git submodule update --init && \
+ git checkout v2020.1 && \
  python2.7 configure.py && \
  make && \
  pip2.7 install --upgrade . && \
@@ -110,6 +139,7 @@ RUN cd /tmp && \
 
 # clFFT
 RUN cd /tmp && \
+ ln -s /usr/local/lib /usr/local/lib64 && \
  git clone https://github.com/clMathLibraries/clFFT && \
  cd clFFT && \
  cd src && \
@@ -129,15 +159,6 @@ RUN cd /tmp && \
  cd - && \
  rm -Rf /tmp/gpyfft
 
-# pyfftw (with R2R transforms - experimental branch)
-RUN cd /tmp && \
- git clone https://github.com/drwells/pyFFTW && \
- cd pyFFTW && \
- git checkout r2r-try-two && \
- pip2.7 install . && \
- cd - && \
- rm -Rf /tmp/pyFFTW
-
 # HPTT (CPU tensor permutation library)
 RUN cd /tmp && \
  git clone https://gitlab.com/keckj/hptt && \
@@ -160,22 +181,59 @@ RUN cd /tmp && \
  cd /tmp && \
  rm -Rf /tmp/memory-tempfile
 
-# python flint
+# python flint (FLINT2 + ARB + python-flint)
 RUN cd /tmp \
-  && wget https://github.com/fredrik-johansson/arb/archive/2.16.0.tar.gz \
-  && tar -xvzf 2.16.0.tar.gz \
-  && cd arb-2.16.0 \
+  && wget https://github.com/wbhart/flint2/archive/v2.6.1.tar.gz \
+  && tar -xvzf v2.6.1.tar.gz \
+  && cd flint2-2.6.1 \
   && ./configure \
   && make -j$(nproc) \
   && make install \
   && cd - \
-  && rm -rf arb-2.16.0
+  && rm -rf flint2-2.6.1
 RUN cd /tmp \
-  && git clone https://github.com/fredrik-johansson/python-flint \
-  && cd python-flint \
-  && pip2.7 install . \
+  && wget https://github.com/fredrik-johansson/arb/archive/2.18.1.tar.gz \
+  && tar -xvzf 2.18.1.tar.gz \
+  && cd arb-2.18.1 \
+  && ./configure \
+  && make -j$(nproc) \
+  && make install \
   && cd - \
-  && rm -rf python-flint
+  && rm -rf arb-2.18.1
+RUN pip2.7 install --upgrade python-flint
+
+# static fftw + pyfftw (with R2R transforms)
+# Weird pyfftw bug : not passing -O2 explicitely during build causes a segfault on import...
+# See https://bugs.gentoo.org/548776
+ENV FFTW_ROOT="/usr/local"
+ADD ci/patch/pyfftw.patch /tmp/pyfftw.patch
+RUN cd /tmp && \
+ wget http://www.fftw.org/fftw-3.3.8.tar.gz && \
+ tar -xvzf fftw-*.tar.gz && \
+ rm -f fftw-*.tar.gz && \
+ cd fftw-* && \
+ ./configure --enable-openmp --enable-threads --enable-mpi --enable-static --with-pic --prefix="${FFTW_ROOT}" --enable-single && \
+ make -j$(nproc) && \
+ make install && \
+ make clean && \
+ ./configure --enable-openmp --enable-threads --enable-mpi --enable-static --with-pic --prefix="${FFTW_ROOT}" && \
+ make -j8 && \
+ make install && \
+ make clean && \
+ ./configure --enable-openmp --enable-threads --enable-mpi --enable-static --with-pic --prefix="${FFTW_ROOT}" --enable-long-double && \
+ make -j8 && \
+ make install && \
+ rm -rf /tmp/fftw-*
+RUN cd /tmp && \
+ git clone https://github.com/drwells/pyFFTW && \
+ cd pyFFTW && \
+ git checkout r2r-try-two && \
+ sed -i 's/\(fftw3[fl]\?_\)threads/\1omp/g' setup.py && \
+ mv /tmp/pyfftw.patch . && \
+ patch -p0 -i pyfftw.patch && \
+ STATIC_FFTW_DIR="${FFTW_ROOT}/lib" CFLAGS="-Wl,-Bsymbolic -fopenmp -I${FFTW_ROOT}/include -O2" python2.7 setup.py build_ext --inplace && \
+ pip2.7 install --upgrade . && \
+ rm -rf /tmp/pyFFTW
 
 # ensure all libraries are known by the runtime linker
 RUN ldconfig
diff --git a/ci/patch/pyfftw.patch b/ci/patch/pyfftw.patch
new file mode 100644
index 0000000000000000000000000000000000000000..d659715deef15bf3cc9fc35b01dc1ba04252ccb4
--- /dev/null
+++ b/ci/patch/pyfftw.patch
@@ -0,0 +1,40 @@
+--- setup.py.origin	2020-03-11 15:59:29.426762235 +0100
++++ setup.py	2020-03-11 16:02:32.366226427 +0100
+@@ -53,6 +53,9 @@
+ ISRELEASED = False
+ VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO)
+ 
++static_fftw_path = os.environ.get('STATIC_FFTW_DIR', None)
++link_static_fftw = static_fftw_path is not None
++
+ def get_package_data():
+     from pkg_resources import get_build_platform
+ 
+@@ -121,8 +124,27 @@
+         
+         have_cython = False
+ 
++    if link_static_fftw:
++        from pkg_resources import get_build_platform
++        if get_build_platform() in ('win32', 'win-amd64'):
++            lib_pre = ''
++            lib_ext = '.lib'
++        else:
++            lib_pre = 'lib'
++            lib_ext = '.a'
++        extra_link_args = []
++        for lib in common_extension_args['libraries']:
++            extra_link_args.append(
++                os.path.join(static_fftw_path, lib_pre + lib + lib_ext))
++        # now that full paths to libraries are in extra_link_args remove them
++        # from common_extension_args
++        common_extension_args['libraries'] = []
++    else:
++        extra_link_args = []
++
+     ext_modules = [
+         Extension('pyfftw.pyfftw', sources=sources, 
++                  extra_link_args = extra_link_args,
+                   **common_extension_args)]
+ 
+     if have_cython:
diff --git a/ci/scripts/build_and_debug.sh b/ci/scripts/build_and_debug.sh
index 39dac1707e806fca9dee5cc6d7a6b40ff2d581c3..de66fc1046209834cc08435120916087392c244e 100755
--- a/ci/scripts/build_and_debug.sh
+++ b/ci/scripts/build_and_debug.sh
@@ -8,9 +8,9 @@ if [[ ! -d '/hysop' ]]; then
     exit 1
 fi
 
-CC=gcc-7
-CXX=g++-7
-FC=gfortran-7
+CC=gcc
+CXX=g++
+FC=gfortran
 
 HYSOP_DIR='/tmp/hysop'
 
diff --git a/ci/scripts/build_and_test.sh b/ci/scripts/build_and_test.sh
index 3bd1f54a76f9866e668dfc79f4f2fc342dbc740e..9465a2dd6a3e2544369ed5ca0fb1273be2b9f19e 100755
--- a/ci/scripts/build_and_test.sh
+++ b/ci/scripts/build_and_test.sh
@@ -8,9 +8,9 @@ if [[ ! -d '/hysop' ]]; then
     exit 1
 fi
 
-CC=gcc-7
-CXX=g++-7
-FC=gfortran-7
+CC=gcc
+CXX=g++
+FC=gfortran
 
 HYSOP_DIR='/tmp/hysop'
 HYSOP_BUILD_DIR="${HYSOP_DIR}/build"
diff --git a/ci/scripts/config.sh b/ci/scripts/config.sh
index 7343b5172a87542fabccf43bb4e689cf35f5436a..ac2158623814d2e73893c48d6e827c8dd2c9c12e 100755
--- a/ci/scripts/config.sh
+++ b/ci/scripts/config.sh
@@ -20,9 +20,9 @@ ROOT_DIR="$(pwd)"
 BUILD_DIR="$1"
 INSTALL_DIR="$2"
 
-mkdir -p $BUILD_DIR
-cd $BUILD_DIR
-CC="$3" CXX="$4" FC="$5" cmake -DCMAKE_BUILD_TYPE=Release -DVERBOSE=OFF -DWITH_SCALES=ON -DPYTHON_EXECUTABLE="$(which python2.7)" -DHYSOP_INSTALL=$INSTALL_DIR $ROOT_DIR
+mkdir -p "${BUILD_DIR}"
+cd "${BUILD_DIR}"
+CC="$3" CXX="$4" FC="$5" cmake -DCMAKE_BUILD_TYPE=Release -DVERBOSE=OFF -DWITH_SCALES=ON -DPYTHON_EXECUTABLE="$(which python2.7)" -DHYSOP_INSTALL="${INSTALL_DIR}" -DFIND_FFTW_STATIC_ONLY=ON -DFIND_FFTW_VERBOSE=ON "${ROOT_DIR}"
 
 if [ ! -f Makefile ]; then
     echo "The makefile has not been generated."
diff --git a/ci/scripts/install.sh b/ci/scripts/install.sh
index ca04e7e690d82d93abbb1eed440e8739411863a0..82f3f64561b88e76e38b02cac4a2b8b6e66aaa44 100755
--- a/ci/scripts/install.sh
+++ b/ci/scripts/install.sh
@@ -1,6 +1,8 @@
 #!/bin/bash
 set -feu -o pipefail
 
+PYTHON_EXECUTABLE=${PYTHON_EXECUTABLE:-"$(which python2.7)"}
+
 if [ $# -ne 2 ]; then
     echo "Usage ./install build_folder install_folder"
     exit 1
@@ -19,15 +21,15 @@ fi
 BUILD_FOLDER="$1"
 INSTALL_FOLDER="$2"
 
-cd $BUILD_FOLDER
+cd "${BUILD_FOLDER}"
 make install
 
-if [ ! -d "$INSTALL_FOLDER/lib/python2.7/site-packages/hysop" ]; then
-    echo "$INSTALL_FOLDER/lib/python2.7/site-packages/hysop was not created."
+if [ ! -d "${INSTALL_FOLDER}/lib/python2.7/site-packages/hysop" ]; then
+    echo "${INSTALL_FOLDER}/lib/python2.7/site-packages/hysop was not created."
     exit 1
 fi
 
-export PYTHONPATH="$INSTALL_FOLDER/lib/python2.7/site-packages"
-python -c 'import hysop; print hysop'
+export PYTHONPATH="${INSTALL_FOLDER}/lib/python2.7/site-packages"
+"${PYTHON_EXECUTABLE}" -c 'import hysop; print hysop'
 
 exit 0
diff --git a/ci/scripts/test.sh b/ci/scripts/test.sh
index dab2668f5421740c735cdc2eff22d056ee83051a..706a8f1e11110def06193a245dbd09f168856663 100755
--- a/ci/scripts/test.sh
+++ b/ci/scripts/test.sh
@@ -1,7 +1,7 @@
 #!/bin/bash
 set -feu -o pipefail
 
-PYTHON_EXECUTABLE=${PYTHON_EXECUTABLE:-python2.7}
+PYTHON_EXECUTABLE=${PYTHON_EXECUTABLE:-"$(which python2.7)"}
 
 if [ $# -lt 2 ]; then
     echo "Usage ./test install_folder hysop_folder [cache_dir] [backup_cache_dir]"
@@ -92,7 +92,7 @@ RUN_LONG_TESTS=${RUN_LONG_TESTS:-false}
 
 COMMON_TEST_OPTIONS=''
 TEST_DIR="$HYSOP_DIR"
-COMMON_EXAMPLE_OPTIONS='-VNC -d16 -cp float -maxit 2 --autotuner-max-candidates 1'
+COMMON_EXAMPLE_OPTIONS='-VNC -d16 -cp float -maxit 2 --autotuner-max-candidates 1 --save-checkpoint --checkpoint-dump-freq 0 --checkpoint-dump-period 0 --checkpoint-dump-last --checkpoint-dump-times'
 EXAMPLE_DIR="$HYSOP_DIR/../hysop_examples/examples"
 
 hysop_test() {
@@ -135,6 +135,7 @@ if [ "$RUN_TESTS" = true ]; then
     hysop_test "operator/tests/test_solenoidal_projection.py"
     hysop_test "operator/tests/test_poisson_curl.py"
     ${HYSOP_DIR}/fields/tests/test_cartesian.sh
+    ${HYSOP_DIR}/core/tests/test_checkpoint.sh
 fi
 
 if [ "${RUN_LONG_TESTS}" = true ]; then
diff --git a/ci/utils/build_docker_image.sh b/ci/utils/build_docker_image.sh
index b6d5ce89ec10af63a955f63596818755ac4afe51..5722a9b78d340a131a25a01feb7abec641d1c632 100755
--- a/ci/utils/build_docker_image.sh
+++ b/ci/utils/build_docker_image.sh
@@ -1,7 +1,7 @@
 #!/usr/bin/env bash
 set -feu -o pipefail
 SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
-NTHREADS="$(python -c 'import psutil; print(psutil.cpu_count(logical=False))')"
-UBUNTU_RELEASE=${1:-bionic}
+NTHREADS="$(nproc)"
+UBUNTU_RELEASE=${1:-focal}
 
 docker build --rm=true --build-arg "NTHREADS=$NTHREADS" -t "keckj/hysop:${UBUNTU_RELEASE}" -f "${SCRIPT_DIR}/../docker_images/ubuntu/${UBUNTU_RELEASE}/Dockerfile" "${SCRIPT_DIR}/../.."
diff --git a/ci/utils/pull_docker_image.sh b/ci/utils/pull_docker_image.sh
index 7cf52350a8ee79807a51fd5e0e849fdf0fae3ecb..e73e452de24bad3f050af715fb8c875c74324d53 100755
--- a/ci/utils/pull_docker_image.sh
+++ b/ci/utils/pull_docker_image.sh
@@ -1,5 +1,5 @@
 #!/usr/bin/env bash
 set -euf -o pipefail
-UBUNTU_RELEASE=${1:-bionic}
+UBUNTU_RELEASE=${1:-focal}
 docker logout
 docker pull "keckj/hysop:${UBUNTU_RELEASE}"
diff --git a/ci/utils/push_docker_image.sh b/ci/utils/push_docker_image.sh
index 33aec3ccf331068848844267349b62e4e245854e..b36ade258042775cae7990aadb763c5b7a1747d1 100755
--- a/ci/utils/push_docker_image.sh
+++ b/ci/utils/push_docker_image.sh
@@ -1,6 +1,6 @@
 #!/usr/bin/env bash
 set -euf -o pipefail
-UBUNTU_RELEASE=${1:-bionic}
+UBUNTU_RELEASE=${1:-focal}
 docker login
 docker push "keckj/hysop:${UBUNTU_RELEASE}"
 docker logout
diff --git a/ci/utils/run_ci.sh b/ci/utils/run_ci.sh
index ce540c0a57274df43650fef5df9416b512455c17..0706ebb1251c5fd7e95d4d84bc7039de875bc957 100755
--- a/ci/utils/run_ci.sh
+++ b/ci/utils/run_ci.sh
@@ -1,7 +1,7 @@
 #!/usr/bin/env bash
 set -feu -o pipefail
 SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
-UBUNTU_RELEASE=${1:-bionic}
+UBUNTU_RELEASE=${1:-focal}
 DOCKER_IMG="keckj/hysop:${UBUNTU_RELEASE}"
 CONTAINER_ID='hysop_build_and_test'
 
@@ -13,8 +13,8 @@ trap remove_img INT TERM EXIT KILL
 
 remove_img
 
-docker logout
-docker pull "${DOCKER_IMG}"
+#docker logout
+#docker pull "${DOCKER_IMG}"
 docker create -v "${SCRIPT_DIR}/../..:/hysop:ro" --name="${CONTAINER_ID}" -it "${DOCKER_IMG}"
 docker start "${CONTAINER_ID}"
 
diff --git a/ci/utils/run_debug.sh b/ci/utils/run_debug.sh
index eb3bb7753820301c71112e837d0e637a5d520608..39b1b64a914b06922a4175735c46a8413953f9e9 100755
--- a/ci/utils/run_debug.sh
+++ b/ci/utils/run_debug.sh
@@ -1,7 +1,7 @@
 #!/usr/bin/env bash
 set -feu -o pipefail
 SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
-UBUNTU_RELEASE=${1:-bionic}
+UBUNTU_RELEASE=${1:-focal}
 DOCKER_IMG="keckj/hysop:${UBUNTU_RELEASE}"
 CONTAINER_ID='hysop_build_and_debug'
 
@@ -13,8 +13,8 @@ trap remove_img INT TERM EXIT KILL
 
 remove_img
 
-docker logout
-docker pull "${DOCKER_IMG}"
+#docker logout
+#docker pull "${DOCKER_IMG}"
 docker create -v "${SCRIPT_DIR}/../..:/hysop:ro" --name="${CONTAINER_ID}" -it "${DOCKER_IMG}"
 docker start "${CONTAINER_ID}"
 docker exec -it "${CONTAINER_ID}" /hysop/ci/scripts/build_and_debug.sh
diff --git a/ci/utils/run_docker_image.sh b/ci/utils/run_docker_image.sh
index 6690d09643ba5f4148bac539397fa82bac467f7d..05cd63e1ae16fdcfaff076dcd3604cdac73a8381 100755
--- a/ci/utils/run_docker_image.sh
+++ b/ci/utils/run_docker_image.sh
@@ -1,5 +1,5 @@
 #!/usr/bin/env bash
-set -euf -o pipefail
+set -feu -o pipefail
 SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
-UBUNTU_RELEASE='bionic'
+UBUNTU_RELEASE=${1:-focal}
 docker run -it -v "${SCRIPT_DIR}/../..:/hysop:ro" "keckj/hysop:${UBUNTU_RELEASE}"
diff --git a/cmake/FindFFTW.cmake b/cmake/FindFFTW.cmake
index b77e3f331202295e8a48ad4e5463e0edba677222..24b1798b1d0523b43126929d40964d3baa32eb3e 100644
--- a/cmake/FindFFTW.cmake
+++ b/cmake/FindFFTW.cmake
@@ -51,7 +51,7 @@
 #   Examples: fFtW3Q => FFTW3Q, fftw3f-mpi => FFTW3F_MPI
 # 
 # == Using a specific FFTW ==
-#   Set the variable ${FFTW_DIR} to your desired search paths if it's not in a standard place or if you want a specific version. 
+#   Set the variable ${FFTW_ROOT} to your desired search paths if it's not in a standard place or if you want a specific version. 
 #
 # == Checking against a specific version or the library ==
 #   Not supported yet.
@@ -133,7 +133,7 @@ foreach(fftw_comp ${FFTW_FIND_COMPONENTS})
     find_path(
         ${COMPONENT}_INCLUDE_DIR
         NAMES ${header}
-        PATHS ${FFTW_DIR} 
+        PATHS ${FFTW_ROOT} 
         PATHS ${${COMPONENT}_PKGCONF_INCLUDE_DIRS}
         PATH_SUFFIXES include
         NO_DEFAULT_PATH
@@ -162,7 +162,7 @@ foreach(fftw_comp ${FFTW_FIND_COMPONENTS})
     find_library(
         ${COMPONENT}_LIBRARY
         NAMES ${library}
-        PATHS ${FFTW_DIR} 
+        PATHS ${FFTW_ROOT} 
         PATHS ${${COMPONENT}_INCLUDE_DIR}/.. 
         PATHS ${${COMPONENT}_PKGCONF_LIBRARY_DIRS}}
         PATH_SUFFIXES lib
diff --git a/cmake/FindPythonFull.cmake b/cmake/FindPythonFull.cmake
index 646611606ac36dbb90f17eab561856ce8a195189..857bde5c98a46db43bd3d7ff3314bf0bd7f3ee9d 100644
--- a/cmake/FindPythonFull.cmake
+++ b/cmake/FindPythonFull.cmake
@@ -32,6 +32,7 @@ if(EXISTS "${PYTHON_INCLUDE_DIRS}" AND EXISTS "${PYTHON_LIBRARY}" AND EXISTS "${
 else()
   set(PYTHON_FOUND FALSE)
   # --- Find python interpreter
+  set(Python_ADDITIONAL_VERSIONS 2.7)
   find_package(PythonInterp)
 
   # --- Use distutils to explore python configuration corresponding to
diff --git a/docs/config/mainpage.doxygen b/docs/config/mainpage.doxygen
index c267d5085ed6c9dd88edddb6b142bb621122f0e5..e1f084f09e143121e91bbee813b7da227c3a4692 100644
--- a/docs/config/mainpage.doxygen
+++ b/docs/config/mainpage.doxygen
@@ -54,7 +54,7 @@ At the end of this step BUILDDIR contains all makefiles, setup.py and other requ
 
 Some useful options for cmake :
 
-- -DFFTW_DIR : where to find fftw if it's not in a "standard" place.
+- -DFFTW_ROOT : where to find fftw if it's not in a "standard" place.
 - -DWITH_SCALES=ON/OFF : to compile an HySoP version including scales (default = on)
 - -DWITH_TESTS=ON/OFF: enable testing (i.e. prepare target "make test", default = off)
 
@@ -64,7 +64,7 @@ mkdir /home/mylogin/buildHySoP
 cd /home/mylogin/buildHySoP
 export FC=mpif90
 module load cmake-2.8
-cmake -DFFTW_DIR=/softs/install/fftw3.1 ~/Softs/HySoP
+cmake -DFFTW_ROOT/softs/install/fftw3.1 ~/Softs/HySoP
 \endcode
 
 \subsection installDirConfig Install directory configuration :
diff --git a/docs/sphinx/install_guide/index.rst b/docs/sphinx/install_guide/index.rst
index 666afc5bff1fb5aca9df567ab8beeecd25f4b601..1c15fb1ea819628ea68e1f69d7c1654072d30994 100644
--- a/docs/sphinx/install_guide/index.rst
+++ b/docs/sphinx/install_guide/index.rst
@@ -66,7 +66,7 @@ Behavior options:
 Components options :
 
 * WITH_FFTW=ON/OFF : Link with fftw library (required for some HySoP solvers),  (default=ON).
-* FFTW_DIR : where to find fftw if it's not in a "standard" place.
+* FFTW_ROOT : where to find fftw if it's not in a "standard" place.
 * WITH_EXTRAS=ON/OFF : Link with some extra fortran libraries (like arnoldi solver), (default=OFF).
 * WITH_GPU=ON/OFF : Use of GPU (required for some HySoP solvers), (default=ON)
 * WITH_SCALES=ON/OFF : to compile an HySoP version including scales (default = ON).
diff --git a/hysop/backend/device/opencl/opencl_env.py b/hysop/backend/device/opencl/opencl_env.py
index f42bc18fd9f20a0484afb83e86b22f9e95b21a20..9362fd1748df5fbbe3764e9315b3763b53fe0299 100644
--- a/hysop/backend/device/opencl/opencl_env.py
+++ b/hysop/backend/device/opencl/opencl_env.py
@@ -431,15 +431,16 @@ Dumped OpenCL Kernel '{}'
            listformat(self.default_build_opts),
            listformat(build_options))
 
-
             dumped_src = dump_prefix + gpu_src
 
-            dump_folder=IO.default_path()+'/'+OPENCL_KERNEL_DUMP_FOLDER
-            dump_file_prefix=dump_folder+'/rk{}_'.format(main_rank)+kernel_name
+            dump_folder=os.path.join(IO.default_path(), OPENCL_KERNEL_DUMP_FOLDER)
+            dump_file_prefix=os.path.join(dump_folder, 'rk{}_'.format(main_rank)+kernel_name)
             tmp_dump_file=dump_file_prefix+'.c'
             dump_file=dump_file_prefix+'.cl'
+
             if not os.path.exists(dump_folder) and (main_rank == 0):
                 os.makedirs(dump_folder)
+
             with open(tmp_dump_file, 'w+') as f:
                 f.write(dumped_src)
 
@@ -562,14 +563,13 @@ Dumped OpenCL Kernel '{}'
             print '=== Kernel raw source compiling ==='
         prg = cl.Program(self.context, gpu_src)
 
-        dump_folder=IO.default_path()+'/'+OPENCL_KERNEL_DUMP_FOLDER
-        if not os.path.exists(dump_folder) and (main_rank == 0):
-            os.makedirs(dump_folder)
+        dump_folder=os.path.join(IO.default_path(), OPENCL_KERNEL_DUMP_FOLDER)
 
         if DEBUG:
             # dump kernel source while in debug mode
-            dump_file=dump_folder+'/rk{}_{}_dump.cl'.format(
-                main_rank, kernel_name)
+            if not os.path.exists(dump_folder) and (main_rank == 0):
+                os.makedirs(dump_folder)
+            dump_file=os.path.join(dump_folder, 'rk{}_{}_dump.cl'.format(main_rank, kernel_name))
             print 'Dumping kernel src at \'{}\'.'.format(dump_file)
             with open(dump_file, 'w+') as f:
                 f.write(gpu_src)
@@ -584,8 +584,9 @@ Dumped OpenCL Kernel '{}'
             build = prg.build(s_build_opts)
         except Exception as e:
             # always dump source when build fails
-            dump_file=dump_folder+'/rk{}_{}_build_fail.cl'.format(
-                main_rank, kernel_name)
+            if not os.path.exists(dump_folder) and (main_rank == 0):
+                os.makedirs(dump_folder)
+            dump_file=os.path.join(dump_folder, 'rk{}_{}_build_fail.cl'.format(main_rank, kernel_name))
             with open(dump_file, 'w+') as f:
                 f.write(gpu_src)
             print('Build options : ', s_build_opts)
diff --git a/hysop/core/checkpoints.py b/hysop/core/checkpoints.py
index 5183655992b4f2ee15fbc56c5fc87391143e8080..ef802b7fffefb78a6b78cc9ac0171ef016618858 100644
--- a/hysop/core/checkpoints.py
+++ b/hysop/core/checkpoints.py
@@ -12,16 +12,32 @@ from hysop.fields.cartesian_discrete_field import CartesianDiscreteScalarField
 
 class CheckpointHandler(object):
     def __init__(self, load_checkpoint_path, save_checkpoint_path, 
+            compression_method, compression_level,
             io_params, relax_constraints):
         check_instance(load_checkpoint_path, str, allow_none=True)
         check_instance(save_checkpoint_path, str, allow_none=True)
+        check_instance(compression_method, str, allow_none=True)
+        check_instance(compression_level, int, allow_none=True)
         check_instance(io_params, IOParams, allow_none=True)
         check_instance(relax_constraints, bool)
 
+        if (compression_method is not None):
+            from numcodecs import blosc
+            available_compressors = blosc.list_compressors()
+            if compression_method not in available_compressors:
+                msg='User specified compression method \'{}\' which is not supported by blosc. Available compressors are {}.'
+                raise RuntimeError(msg.format(compression_method, ', '.join(available_compressors)))
+        if (compression_level is not None):
+            if (compression_level < 0) or (compression_level > 9):
+                msg='User specified compression level {} that is not in valid range [0,9].'
+                raise RuntimeError(msg.format(compression_level))
+
         self._load_checkpoint_path = load_checkpoint_path
         self._save_checkpoint_path = save_checkpoint_path
-        self._io_params = io_params
-        self._relax_constraints = relax_constraints
+        self._compression_method   = compression_method
+        self._compression_level    = compression_level
+        self._io_params            = io_params
+        self._relax_constraints    = relax_constraints
 
         self._checkpoint_template   = None
         self._checkpoint_compressor = None
@@ -33,6 +49,12 @@ class CheckpointHandler(object):
     def save_checkpoint_path(self):
         return self._save_checkpoint_path
     @property
+    def compression_method(self):
+        return self._compression_method
+    @property
+    def compression_level(self):
+        return self._compression_level
+    @property
     def io_params(self):
         return self._io_params
     @property
@@ -116,23 +138,28 @@ class CheckpointHandler(object):
         ellapsed = Wtime() - start
         msg=' > Successfully imported checkpoint in {}.'
         vprint(msg.format(time2str(ellapsed)))
+
+    def should_dump(self, simulation):
+        io_params = self.io_params
+        if (self.save_checkpoint_path is None):
+            return False
+        if (io_params is None):
+            return False
+        return io_params.should_dump(simulation)
     
 
     # Checkpoint is first exported as a directory containing a hierarchy of arrays (field and parameters data + metadata)
     # This folder is than tarred (without any form of compression) so that a checkpoint consists in a single movable file.
-    # Data is already compressed during data export by the zarr module, using the blosc compressor (snappy, clevel=3). 
+    # Data is already compressed during data export by the zarr module, using the blosc compressor (snappy, clevel=9). 
     def save_checkpoint(self, problem, simulation):
         save_checkpoint_path = self.save_checkpoint_path
-        if (save_checkpoint_path is None):
+        if (self.save_checkpoint_path is None):
             return
 
         if (self.io_params is None):
             msg='Load checkpoint has been set to \'{}\' but checkpoint io_params has not been specified.'
             raise RuntimeError(msg.format(load_checkpoint_path))
         
-        if not self.io_params.should_dump(simulation):
-            return
-        
         vprint('>Exporting problem checkpoint to \'{}\':'.format(save_checkpoint_path))
         if not save_checkpoint_path.endswith('.tar'):
             msg='Can only export checkpoint with tar extension, got {}.'
@@ -249,10 +276,12 @@ class CheckpointHandler(object):
         vprint('\n>Creating checkpoint template as \'{}\'...'.format(checkpoint_template))
         import zarr
         from numcodecs import blosc, Blosc
+        blosc.use_threads = (mpi_params.size == 1) # disable threads for multiple processes (can deadlock)
         
         # array data compressor
-        blosc.use_threads = (mpi_params.size == 1) # disable threads for multiple processes (can deadlock)
-        compressor = Blosc(cname='snappy', clevel=3, shuffle=Blosc.BITSHUFFLE)
+        self._compression_method = first_not_None(self._compression_method, 'zstd')
+        self._compression_level  = first_not_None(self._compression_level, 6)
+        compressor = Blosc(cname=self._compression_method, clevel=self._compression_level, shuffle=Blosc.SHUFFLE)
         self._checkpoint_compressor = compressor
         
         # io_leader creates a directory layout on (hopefully) shared filesystem
@@ -355,8 +384,7 @@ class CheckpointHandler(object):
                                                                          if not is_at_right_boundary[i])
                 grid_is_uniformly_distributed = all(res == inner_compute_resolutions[0] 
                                                             for res in inner_compute_resolutions)
-                grid_is_uniformly_distributed |= (topo.mpi_params.size == 1)
-
+                
                 if grid_is_uniformly_distributed:
                     # We divide the array in 'compute_resolution' chunks, no sychronization is required.
                     # Here there is no need to use the process locker to write this array data.
@@ -366,6 +394,7 @@ class CheckpointHandler(object):
                 else:
                     # We divide the array in >=1MB chunks (chunks are given in terms of elements)
                     # Array chunks may overlap different processes so we need interprocess sychronization (slow)
+                    assert (comm.size > 1)
                     should_sync = True
                     if dim == 1:
                         chunks = 1024*1024    # at least 1MB / chunk
@@ -376,6 +405,9 @@ class CheckpointHandler(object):
                     else:
                         raise NotImplementedError(dim)
                 
+                if should_sync:
+                    raise NotImplementedError
+                
                 # Create array (no memory is allocated here, even on disk because data blocks are empty)
                 dtype = dfield.dtype
                 shape = grid_resolution
@@ -402,7 +434,7 @@ class CheckpointHandler(object):
         
         if (root is not None):
             root.attrs['nbytes'] = nbytes
-            msg=' => Maximum checkpoint size will be {}, without compression and metadata.'
+            msg='>Maximum checkpoint size will be {}, without compression and metadata.'
             vprint(root.tree())
             vprint(msg.format(bytes2str(nbytes)))
 
diff --git a/hysop/core/tests/test_checkpoint.sh b/hysop/core/tests/test_checkpoint.sh
index b5395299632a2ac6425f9cd17cea7018a2552d96..d6cfc4919b1901987aedf81742a1ac9bf1bfb687 100755
--- a/hysop/core/tests/test_checkpoint.sh
+++ b/hysop/core/tests/test_checkpoint.sh
@@ -1,11 +1,11 @@
 #!/usr/bin/env bash
 set -feu -o pipefail
 PYTHON_EXECUTABLE=${PYTHON_EXECUTABLE:-python2.7}
+MPIRUN_EXECUTABLE=${MPIRUN_EXECUTABLE:-mpirun --allow-run-as-root}
 
 SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
 EXAMPLE_DIR="$(realpath ${SCRIPT_DIR}/../../../hysop_examples/examples)"
 
-
 function compare_files {
     if [[ ! -f "$1" ]]; then
         echo "File '${1}' does not exist."
@@ -89,8 +89,8 @@ if [[ ! -f "${EXAMPLE_FILE}" ]]; then
 fi
 
 echo ' Running simulations...'
-mpirun -np 4 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -S "${TEST_DIR}/checkpoint0.tar" --dump-dir "${TEST_DIR}/run0" --checkpoint-dump-time 0.05 --checkpoint-dump-freq 0
-mpirun -np 4 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -S "${TEST_DIR}/checkpoint1.tar" --dump-dir "${TEST_DIR}/run1" --checkpoint-dump-time 0.05 --checkpoint-dump-freq 0
+${MPIRUN_EXECUTABLE} -np 4 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -S "${TEST_DIR}/checkpoint0.tar" --dump-dir "${TEST_DIR}/run0" --checkpoint-dump-time 0.05 --checkpoint-dump-freq 0
+${MPIRUN_EXECUTABLE} -np 4 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -S "${TEST_DIR}/checkpoint1.tar" --dump-dir "${TEST_DIR}/run1" --checkpoint-dump-time 0.05 --checkpoint-dump-freq 0
 
 echo ' Comparing solutions...'
 echo "  >debug dumps match"
@@ -103,8 +103,8 @@ done
 
 echo
 echo ' Running simulations from checkpoints...'
-mpirun -np 4 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -L "${TEST_DIR}/checkpoint0.tar" --dump-dir "${TEST_DIR}/run2"
-mpirun -np 4 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -L "${TEST_DIR}/checkpoint1.tar" --dump-dir "${TEST_DIR}/run3"
+${MPIRUN_EXECUTABLE} -np 4 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -L "${TEST_DIR}/checkpoint0.tar" --dump-dir "${TEST_DIR}/run2"
+${MPIRUN_EXECUTABLE} -np 4 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -L "${TEST_DIR}/checkpoint1.tar" --dump-dir "${TEST_DIR}/run3"
 
 echo ' Comparing solutions...'
 compare_files "${TEST_DIR}/run2/dump/run.txt" "${TEST_DIR}/run3/dump/run.txt"
@@ -139,9 +139,9 @@ fi
 
 # Fortran FFTW does not yield exactly the same results in parallel so we use h5diff with an absolute tolerance of 10^-12
 echo ' Running simulations...'
-mpirun -np 1 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -impl fortran -cp fp64 -S "${TEST_DIR}/checkpoint0.tar" --dump-dir "${TEST_DIR}/run0" --checkpoint-dump-time 0.15 --checkpoint-dump-freq 0
-mpirun -np 2 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -impl fortran -cp fp64 -S "${TEST_DIR}/checkpoint1.tar" --dump-dir "${TEST_DIR}/run1" --checkpoint-dump-time 0.15 --checkpoint-dump-freq 0
-mpirun -np 3 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -impl fortran -cp fp64 -S "${TEST_DIR}/checkpoint2.tar" --dump-dir "${TEST_DIR}/run2" --checkpoint-dump-time 0.15 --checkpoint-dump-freq 0
+${MPIRUN_EXECUTABLE} -np 1 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -impl fortran -cp fp64 -S "${TEST_DIR}/checkpoint0.tar" --dump-dir "${TEST_DIR}/run0" --checkpoint-dump-time 0.15 --checkpoint-dump-freq 0
+${MPIRUN_EXECUTABLE} -np 2 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -impl fortran -cp fp64 -S "${TEST_DIR}/checkpoint1.tar" --dump-dir "${TEST_DIR}/run1" --checkpoint-dump-time 0.15 --checkpoint-dump-freq 0
+${MPIRUN_EXECUTABLE} -np 3 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -impl fortran -cp fp64 -S "${TEST_DIR}/checkpoint2.tar" --dump-dir "${TEST_DIR}/run2" --checkpoint-dump-time 0.15 --checkpoint-dump-freq 0
 echo ' Comparing solutions...'
 for f0 in $(find "${TEST_DIR}/run0" -name '*.h5' | sort -n); do
     f1=$(echo "${f0}" | sed 's/run0/run1/')
@@ -153,14 +153,14 @@ done
 
 echo ' Running simulations from checkpoints using different MPI topologies...'
 COMMON_OPTIONS="-NC -d24 --tend 0.3 --dump-tstart 0.15 --dump-freq 1 --hdf5-disable-slicing --hdf5-disable-compression --checkpoint-relax-constraints"
-mpirun -np 3 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -impl fortran -cp fp64 -L "${TEST_DIR}/checkpoint0.tar" --dump-dir "${TEST_DIR}/run3"
-mpirun -np 2 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -impl fortran -cp fp64 -L "${TEST_DIR}/checkpoint1.tar" --dump-dir "${TEST_DIR}/run4"
-mpirun -np 1 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -impl fortran -cp fp64 -L "${TEST_DIR}/checkpoint2.tar" --dump-dir "${TEST_DIR}/run5"
+${MPIRUN_EXECUTABLE} -np 3 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -impl fortran -cp fp64 -L "${TEST_DIR}/checkpoint0.tar" --dump-dir "${TEST_DIR}/run3"
+${MPIRUN_EXECUTABLE} -np 2 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -impl fortran -cp fp64 -L "${TEST_DIR}/checkpoint1.tar" --dump-dir "${TEST_DIR}/run4"
+${MPIRUN_EXECUTABLE} -np 1 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -impl fortran -cp fp64 -L "${TEST_DIR}/checkpoint2.tar" --dump-dir "${TEST_DIR}/run5"
 echo ' Comparing solutions...'
-for f0 in $(find "${TEST_DIR}/run0" -name '*.h5' | sort -n); do
-    f3=$(echo "${f0}" | sed 's/run0/run3/')
-    f4=$(echo "${f0}" | sed 's/run0/run4/')
-    f5=$(echo "${f0}" | sed 's/run0/run5/')
+for f3 in $(find "${TEST_DIR}/run3" -name '*.h5' | sort -n); do
+    f0=$(echo "${f3}" | sed 's/run3/run0/')
+    f4=$(echo "${f3}" | sed 's/run3/run4/')
+    f5=$(echo "${f3}" | sed 's/run3/run5/')
     h5diff -d '1e-12' "${f0}" "${f3}" 
     h5diff -d '1e-12' "${f0}" "${f4}" 
     h5diff -d '1e-12' "${f0}" "${f5}" 
@@ -168,13 +168,12 @@ for f0 in $(find "${TEST_DIR}/run0" -name '*.h5' | sort -n); do
 done
 
 echo ' Running simulations from checkpoints using OpenCL and different datatypes...'
-mpirun -np 1 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -cp fp64 -impl opencl -L "${TEST_DIR}/checkpoint0.tar" --dump-dir "${TEST_DIR}/run6"
-mpirun -np 1 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -cp fp32 -impl opencl -L "${TEST_DIR}/checkpoint1.tar" --dump-dir "${TEST_DIR}/run7"
+${MPIRUN_EXECUTABLE} -np 1 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -cp fp64 -impl opencl -L "${TEST_DIR}/checkpoint0.tar" --dump-dir "${TEST_DIR}/run6"
+${MPIRUN_EXECUTABLE} -np 1 "${PYTHON_EXECUTABLE}" "${EXAMPLE_FILE}" ${COMMON_OPTIONS} -cp fp32 -impl opencl -L "${TEST_DIR}/checkpoint1.tar" --dump-dir "${TEST_DIR}/run7"
 echo ' Comparing solutions...'
-for f0 in $(find "${TEST_DIR}/run0" -name '*.h5' | sort -n); do
-    f6=$(echo "${f0}" | sed 's/run0/run6/')
-    f7=$(echo "${f0}" | sed 's/run0/run7/')
+for f6 in $(find "${TEST_DIR}/run6" -name '*.h5' | sort -n); do
+    f7=$(echo "${f6}" | sed 's/run0/run7/')
     h5diff -d '5e-5' "${f6}" "${f7}" 
-    echo "  >$(basename ${f0}) match"
+    echo "  >$(basename ${f6}) match"
 done
 
diff --git a/hysop/problem.py b/hysop/problem.py
index 65faf8aa4e22d453b9fa5bcb076d16cff4455a08..e7031a3626988605984c3676b7d3bbdfb35263de 100644
--- a/hysop/problem.py
+++ b/hysop/problem.py
@@ -148,8 +148,10 @@ class Problem(ComputationalGraph):
                 vprint()
                 simu.print_state()
                 self.apply(simulation=simu, dbg=dbg, **kwds)
+                should_dump_checkpoint = checkpoint_handler.should_dump(simu) # determined before simu advance
                 simu.advance(dbg=dbg, plot_freq=plot_freq)
-                checkpoint_handler.save_checkpoint(self, simu)
+                if should_dump_checkpoint:
+                    checkpoint_handler.save_checkpoint(self, simu)
                 if report_freq and (simu.current_iteration % report_freq) == 0:
                     self.profiler_report()
         
diff --git a/hysop/simulation.py b/hysop/simulation.py
index 07397e356461d43f455b60cec9f73b6eb8580c9f..e8a5afd02a215512717d879b6eb3307f44495d55 100644
--- a/hysop/simulation.py
+++ b/hysop/simulation.py
@@ -1,4 +1,5 @@
-"""Description of the simulation parameters (time, iteration ...)
+"""
+Description of the simulation parameters (time, iteration ...)
 
 Usage
 -----
@@ -26,7 +27,6 @@ Usage
     # end simulation (optional) to prepare io
     s.finalize()
     io.apply(s)
-
 """
 import numpy as np
 from abc import ABCMeta, abstractmethod
@@ -116,8 +116,8 @@ class Simulation(object):
         self.time = start
         self.is_over = False
         self.current_iteration = -1
-        self._rank = main_rank if mpi_params is None else mpi_params.rank
-        self._comm = main_comm if mpi_params is None else mpi_params.comm
+        self._rank = main_rank if (mpi_params is None) else mpi_params.rank
+        self._comm = main_comm if (mpi_params is None) else mpi_params.comm
         self.clamp_t_to_end = clamp_t_to_end
         self._restart = restart
 
diff --git a/hysop_examples/example_utils.py b/hysop_examples/example_utils.py
index c3501d82b544eac9ad246bc0c72727e3b1b28256..36349d1e1a853c4df6004f5aea27e824f704099e 100644
--- a/hysop_examples/example_utils.py
+++ b/hysop_examples/example_utils.py
@@ -33,17 +33,22 @@ class SplitAppendAction(argparse._AppendAction):
         self._append    = append
     
     def __call__(self, parser, namespace, values, option_string=None):
-        assert isinstance(values, str), type(values)
-        for c in ('(','{','[',']','}',')'):
-            values = values.replace(c, '')
-        try:
-            values = tuple(self._convert(v) for v in values.split(self._separator))
-        except:
-            msg='Failed to convert \'{}\' to {} of {}s for parameter {}.'
-            msg=msg.format(values, self._container.__name__, self._convert.__name__,
-                    self.dest)
-            parser.error(msg)
-        assert len(values)>0
+        if isinstance(values, str):
+            for c in ('(','{','[',']','}',')'):
+                values = values.replace(c, '')
+            try:
+                values = tuple(self._convert(v) for v in values.split(self._separator))
+            except:
+                msg='Failed to convert \'{}\' to {} of {}s for parameter {}.'
+                msg=msg.format(values, self._container.__name__, self._convert.__name__,
+                        self.dest)
+                parser.error(msg)
+        else:
+            try:
+                values = tuple(values)
+            except:
+                msg='Could not convert values \'{}\' to tuple for parameter {}.'.format(values, self.dest)
+                parser.error(msg)
         if self._append:
             items = argparse._ensure_value(namespace, self.dest, self._container())
         else:
@@ -965,7 +970,7 @@ class HysopArgParser(argparse.ArgumentParser):
                 help=('Global output period for all IO params.' 
                      +' This will append linspace(tstart, tend, int((tend-tstart)/dump_period)) to times of interest.'
                      +' Use 0.0 to disable period based dumping.'))
-        file_io.add_argument('--dump-times', type=str, default=None, convert=float,
+        file_io.add_argument('--dump-times', type=str, default=None, convert=float, nargs='?', const=tuple(),
                 action=self.split, container=tuple, append=False,
                 dest='dump_times',
                 help='Global comma delimited list of additional output times of interest for all io_params.')
@@ -1025,6 +1030,10 @@ class HysopArgParser(argparse.ArgumentParser):
                               'Should not be to frequent for efficiency reasons. May be used in conjunction with --load-checkpoint, '
                               'in which case the starting checkpoint may be overwritten in the case the same path are given. '
                               'Defaults to {checkpoint_output_dir}/checkpoint.tar if no filename is specified.'))
+                pargs.add_argument('--checkpoint-compression-method', type=str, default=None, dest='checkpoint_compression_method',
+                                    help='Set the compression method used by the Blosc meta-compressor for checkpoint array data. Defaults to zstd.')
+                pargs.add_argument('--checkpoint-compression-level', type=int, default=None, dest='checkpoint_compression_level',
+                                    help='Set the compression level used by the Blosc meta-compressor for checkpoint array data, from 0 (no compression) to 9 (maximum compression). Defaults to 6.')
                 pargs.add_argument('--checkpoint-relax-constraints', action='store_true', dest='checkpoint_relax_constraints',
                                     help=('Relax field/parameter checks when loading a checkpoint. This allows for a change in datatype, '
                                     'boundary conditions, ghost count and topology shape when reloading a checkpoint. '
@@ -1046,7 +1055,7 @@ class HysopArgParser(argparse.ArgumentParser):
                     type=float, default=None, 
                     dest='{}_dump_period'.format(pname), 
                     help='Custom output period for IO parameter \'{}.\''.format(pname))
-            pargs.add_argument('--{}-dump-times'.format(pname), 
+            pargs.add_argument('--{}-dump-times'.format(pname), nargs='?', const=tuple(),
                     action=self.split, container=tuple, append=False,
                     type=str, default=None, convert=float,
                     dest='{}_dump_times'.format(pname),
@@ -1270,6 +1279,8 @@ class HysopArgParser(argparse.ArgumentParser):
         self._check_default(args, 'load_checkpoint_path', str, allow_none=True)
         self._check_default(args, 'save_checkpoint_path', str, allow_none=True)
         self._check_default(args, 'checkpoint_relax_constraints', bool, allow_none=False)
+        self._check_default(args, 'checkpoint_compression_method', str, allow_none=True)
+        self._check_default(args, 'checkpoint_compression_level', int, allow_none=True)
     
     def _add_graphical_io_args(self):
         graphical_io = self.add_argument_group('Graphical I/O')
@@ -1608,6 +1619,7 @@ class HysopArgParser(argparse.ArgumentParser):
             args.save_checkpoint_path = save_checkpoint_path
 
         args.checkpoint_handler = CheckpointHandler(args.load_checkpoint_path, args.save_checkpoint_path, 
+                                    args.checkpoint_compression_method, args.checkpoint_compression_level,
                                     args.checkpoint_io_params, args.checkpoint_relax_constraints)
 
         # debug dumps
diff --git a/hysop_examples/examples/shear_layer/shear_layer.py b/hysop_examples/examples/shear_layer/shear_layer.py
index 7220b5adc1e6593a37c34ab66778c3ae708d4391..e7e162154bed95e6f8b42b39c6e1cc039e14fabf 100644
--- a/hysop_examples/examples/shear_layer/shear_layer.py
+++ b/hysop_examples/examples/shear_layer/shear_layer.py
@@ -199,9 +199,7 @@ def compute(args):
     # Finally solve the problem 
     problem.solve(simu, dry_run=args.dry_run, 
             debug_dumper=debug_dumper,
-            load_checkpoint=args.load_checkpoint,
-            save_checkpoint=args.save_checkpoint,
-            checkpoint_io_params=args.checkpoint_io_params,
+            checkpoint_handler=args.checkpoint_handler,
             plot_freq=args.plot_freq)
     
     # Finalize
diff --git a/requirements.txt b/requirements.txt
index 6d68dae12b9a8440c3bb7a90853366d668ce0919..163c660c86bd6f1c282d9f75075352fec69dec0b 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -23,5 +23,7 @@ configparser
 backports.tempfile
 backports.weakref
 networkx
+pyvis
 zarr
 numcodecs
+jsonpickle