Code as for a real PIV project#

Here, we gather modules and scripts for preprocessing and PIV computation as we would do for a real project. This can be used as a first step when you start a new project of PIV computation with fluidimage.

args.py

code to define command line tools with argparse

params_pre.py

parameters for the image preprocessing

try_pre.py

script to be used with ipython to try pre parameters

job_pre.py

job for image preprocessing

submit_pre.py

submit preprocessing jobs on a cluster

params_piv.py

parameters for the PIV computation

try_piv.py

script to be used with ipython to try piv parameters

job_piv.py

job for PIV computation

submit_piv.py

submit PIV jobs on a cluster

args.py#

"""module to define command line tools with argparse
====================================================

It is used in other scripts in this directory.

"""

import argparse


def make_parser(doc="", postfix_in="pre", postfix_out="piv_coarse"):
    parser = argparse.ArgumentParser(
        description=doc, formatter_class=argparse.RawDescriptionHelpFormatter
    )
    parser.add_argument("exp", help="index of the experiment (int)", type=int)

    parser.add_argument(
        "--nb-cores", help="Number of cores", type=int, default=None
    )
    parser.add_argument(
        "-s", "--seq", help="launch topologies sequentially", action="store_true"
    )
    parser.add_argument("-v", "--verbose", help="verbose mode", action="count")
    parser.add_argument(
        "-how",
        "--saving_how",
        type=str,
        help='Saving mode ("ask", "new_dir", "complete" or "recompute")',
        default="complete",
    )
    parser.add_argument(
        "-in", "--postfix_in", type=str, help="postfix input", default=postfix_in
    )
    parser.add_argument(
        "-out",
        "--postfix_out",
        type=str,
        help="postfix output",
        default=postfix_out,
    )
    return parser


def parse_args(doc="", postfix_in="pre", postfix_out="piv_coarse"):
    parser = make_parser(doc, postfix_in, postfix_out)
    args = parser.parse_args()
    return args

params_pre.py#

#! /usr/bin/env python
"""Contains the parameters for the image preprocessing
======================================================

This can be run in ipython to explore the preprocessing parameters.

To find good parameters, execute the preprocessing topology (see `job_pre.py`)::

  ./job_pre.py 0 2 -how recompute

and use the GUI tool `fluidimviewer` in the input and output directories::

  fluidimviewer ../../../image_samples/Jet/Images.pre &
  fluidimviewer ../../../image_samples/Jet/Images &

You can also try the piv computation with::

  ./try_pre.py &

"""

from params_piv import get_path

from fluidimage.topologies.preproc import TopologyPreproc


def make_params_pre(iexp, savinghow="recompute", postfix_out="pre"):

    # These parameters can depend on the experiment.
    # One can add here a lot of conditions to set good values.
    # DO NOT change the default value if you want to change the value for 1 experiment

    path_images_dir = get_path(iexp)
    assert path_images_dir

    params = TopologyPreproc.create_default_params()
    params.series.path = path_images_dir

    print(f"{path_images_dir}")

    params.tools.sequence = ["rescale_intensity_tanh", "sliding_median"]

    params.tools.rescale_intensity_tanh.enable = False
    params.tools.rescale_intensity_tanh.threshold = None

    params.tools.sliding_median.enable = True
    params.tools.sliding_median.window_size = 10
    params.tools.sliding_median.weight = 0.8

    params.saving.how = savinghow
    params.saving.postfix = postfix_out

    return params


if __name__ == "__main__":
    from args import parse_args

    args = parse_args(doc="", postfix_in=None, postfix_out="pre")
    params = make_params_pre(
        args.exp, savinghow=args.saving_how, postfix_out=args.postfix_out
    )

    print(params)

try_pre.py#

#! /usr/bin/env python
"""Experiment on the parameters of the preproc computation
==========================================================

It can be convenient to use this script with ipython --matplotlib

Run it, change the parameters in `params_pre.py` and rerun `try_pre.py`.

Alternatively, run the script with::

  ./try_pre.py &

"""

from importlib import reload

import params_pre

from fluidimage.preproc import Work

reload(params_pre)

params = params_pre.make_params_pre(iexp=0)

work = Work(params=params)

work.display()

job_pre.py#

#! /usr/bin/env python
"""
Job computing preprocessing
===========================

To be launched in the terminal::

  ./job_pre.py 0 --nb-cores 2

or (to force the processing of already computed images)::

  ./job_pre.py 0 --nb-cores 2 -how recompute

see also the help::

  ./job_pre.py -h

"""

import sys
from importlib import reload

import params_pre

from fluidimage.preproc import Topology

reload(params_pre)


def check_nb_cores(args):
    if args.nb_cores is None:
        print(
            "Bad argument: nb_cores is None. Specify with --nb-cores",
            file=sys.stderr,
        )
        sys.exit(1)


def main(args):
    params = params_pre.make_params_pre(
        args.exp, savinghow=args.saving_how, postfix_out=args.postfix_out
    )

    check_nb_cores(args)

    topology = Topology(params, nb_max_workers=int(args.nb_cores))
    topology.compute(sequential=args.seq)


if __name__ == "__main__":
    from args import parse_args

    args = parse_args(doc=__doc__, postfix_in=None, postfix_out="pre")
    print(args)
    main(args)

    print("\nend of job_pre.py")

submit_pre.py#

#! /usr/bin/env python
"""
Script to submit preprocessing jobs on a cluster (submit_pre.py)
================================================================

To be launched in bash::

  ./submit_pre.py

"""

from fluiddyn.clusters.legi import Calcul7 as Calcul

cluster = Calcul()

# we use half of the node
nb_cores = cluster.nb_cores_per_node // 2

for iexp in range(1):
    # We restrict the numbers of workers for the topology to limit memory usage
    command = f"job_pre.py {iexp} --nb-cores {nb_cores // 1.5}"
    cluster.submit_script(
        command,
        name_run=f"fluidimage_preproc_exp{iexp}",
        nb_cores_per_node=nb_cores,
        walltime="4:00:00",
        omp_num_threads=1,
        idempotent=True,
        delay_signal_walltime=600,
        ask=False,
    )

params_piv.py#

#! /usr/bin/env python
"""Contains the parameters for the PIV computation
==================================================

This can be run in ipython to explore the PIV parameters.

To find good parameters, try the piv computation with::

  ./try_piv.py &

"""

from pathlib import Path

from fluidimage.piv import Topology

path_here = Path(__file__).absolute().parent


def get_path(iexp):
    """Silly example of get_path function..."""
    return (path_here / "../../../image_samples/Jet/Images").resolve()


def make_params_piv(
    iexp, savinghow="recompute", postfix_in="pre", postfix_out="piv"
):

    # These parameters can depend on the experiment.
    # One can add here a lot of conditions to set good values.
    # DO NOT change the default value if you want to change the value for 1 experiment

    path_images_dir = get_path(iexp)
    assert path_images_dir.exists()

    if postfix_in == "":
        path_in = path_images_dir
    else:
        if not postfix_in.startswith("."):
            postfix_in = "." + postfix_in
        path_in = path_images_dir.with_suffix(postfix_in)

    params = Topology.create_default_params()

    params.series.path = path_in / ("c*.png")
    print(f"{params.series.path = }")

    params.piv0.shape_crop_im0 = 48
    params.piv0.displacement_max = 5

    # for multi peaks search
    params.piv0.nb_peaks_to_search = 2
    params.piv0.particle_radius = 3

    params.mask.strcrop = ":, 50:"

    params.multipass.number = 2
    params.multipass.use_tps = "last"
    params.multipass.smoothing_coef = 10.0
    params.multipass.threshold_tps = 0.1

    params.fix.correl_min = 0.2
    params.fix.threshold_diff_neighbour = 2

    params.saving.how = savinghow
    params.saving.postfix = postfix_out

    return params


if __name__ == "__main__":
    from args import parse_args

    args = parse_args(doc="", postfix_in="pre", postfix_out="piv_try")
    params = make_params_piv(
        args.exp,
        savinghow=args.saving_how,
        postfix_in=args.postfix_in,
        postfix_out=args.postfix_out,
    )
    print(params)

try_piv.py#

#! /usr/bin/env python
"""Experiment on the parameters of the PIV computation
======================================================

It can be convenient to use this script with ipython --matplotlib

Run it, play with the object `piv` which represents the results, change the
parameters in `params_piv.py` and rerun `try_piv.py`.

Alternatively, run the script with::

  ./try_piv.py &

"""

from importlib import reload

import params_piv

from fluidimage.piv import Work

reload(params_piv)

params = params_piv.make_params_piv(iexp=0)

work = Work(params=params)

piv = work.process_1_serie()

# piv.piv0.display(show_interp=True, scale=0.05, show_error=True)
piv.display(show_interp=False, scale=0.05, show_error=True)

job_piv.py#

#! /usr/bin/env python
"""
Job computing all PIV fields
============================

To be launched in the terminal::

  ./job_piv.py 0 --nb-cores 2

see also the help::

  ./job_piv.py -h

"""

import params_piv
from job_pre import check_nb_cores

from fluidimage.piv import Topology


def main(args):
    params = params_piv.make_params_piv(
        args.exp,
        savinghow=args.saving_how,
        postfix_in=args.postfix_in,
        postfix_out=args.postfix_out,
    )

    check_nb_cores(args)

    topology = Topology(params, nb_max_workers=int(args.nb_cores))
    topology.compute(sequential=args.seq)


if __name__ == "__main__":
    from args import parse_args

    args = parse_args(doc=__doc__, postfix_in="pre", postfix_out="piv")
    print(args)
    main(args)

submit_piv.py#

#! /usr/bin/env python
"""
Script to submit PIV jobs on a cluster (submit_piv.py)
======================================================

To be launched in bash::

  ./submit_piv.py

"""

from fluiddyn.clusters.legi import Calcul8 as Calcul

cluster = Calcul()

# we use half of the node
nb_cores = cluster.nb_cores_per_node // 2

for iexp in range(1):
    command = f"job_piv.py {iexp} --nb-cores {nb_cores}"
    cluster.submit_script(
        command,
        name_run=f"fluidimage_exp{iexp}",
        nb_cores_per_node=nb_cores,
        walltime="4:00:00",
        omp_num_threads=1,
        idempotent=True,
        delay_signal_walltime=600,
        ask=False,
    )