Module robofish.io.app
This module defines the command line interface.
All commands have a --help
option to get more info about them in the command line.
robofish-io-print example.hdf5
Checking out the content of files.
usage: robofish-io-print [-h] [--output_format {shape,full}] path
This function can be used to print hdf5 files from the command line
positional arguments:
path The path to a hdf5 file
optional arguments:
-h, --help show this help message and exit
--output_format {shape,full}
Choose how datasets are printed, either the shapes or the full content is printed
Evaluate
robofish-io-evaluate *analysis_type* example.hdf5
Show some property of a file.
usage: robofish-io-evaluate [-h] [--names NAMES [NAMES ...]] [--save_path SAVE_PATH]
{speed,turn,orientation,relative_orientation,distance_to_wall,tank_positions,trajectories,evaluate_positionVec,follow_iid} paths [paths ...]
This function can be called from the commandline to evaluate files.
Different evaluation methods can be called, which generate graphs from the given files.
With the first argument 'analysis_type', the type of analysis is chosen.
positional arguments:
{speed,turn,orientation,relative_orientation,distance_to_wall,tank_positions,trajectories,evaluate_positionVec,follow_iid}
The type of analysis.
speed - Evaluate the speed of the entities as histogram.
turn - Evaluate the turn angles of the entities as histogram.
orientation - Evaluate the orientations of the entities on a 2d grid.
relative_orientation - Evaluate the relative orientations of the entities as a histogram.
distance_to_wall - Evaluate the distances of the entities to the walls as a histogram.
tank_positions - Evaluate the positions of the entities as a heatmap.
trajectories - Evaluate the trajectories of the entities.
evaluate_positionVec - Evaluate the vectors pointing from the focal fish to the conspecifics as heatmap.
follow_iid - Evaluate the follow metric in respect to the inter individual distance (iid).
paths The paths to files or folders. Multiple paths can be given to compare experiments.
optional arguments:
-h, --help show this help message and exit
--names NAMES [NAMES ...]
Names, that should be used in the graphs instead of the pahts.
--save_path SAVE_PATH
Filename for saving resulting graphics.
Trackviewer
robofish-trackviewer example.hdf5
The trackviewer is from a different repository. It was included in the install instructions.
usage: robofish-trackviewer [-h] [--draw-labels] [--draw-view-vectors] [--far-plane FAR_PLANE] [--view-of-agents field of perception number of bins] [--view-of-walls field of perception number of bins]
[--view-of-walls-matches]
[trackset_file]
View RoboFish tracks in a GUI.
positional arguments:
trackset_file Path to HDF5 file containing the tracks to view
optional arguments:
-h, --help show this help message and exit
--draw-labels Whether to draw labels inside the agents' outlines
--draw-view-vectors Whether to draw view vectors to the right of / below the trackfile
--far-plane FAR_PLANE
Maximum distance an agent can see
--view-of-agents field of perception number of bins
--view-of-walls field of perception number of bins
--view-of-walls-matches
Expand source code
# -*- coding: utf-8 -*-
"""
.. include:: ../../../docs/app.md
"""
# -----------------------------------------------------------
# Dec 2020 Andreas Gerken, Berlin, Germany
# Released under GNU 3.0 License
# email andi.gerken@gmail.com
# -----------------------------------------------------------
import robofish.io
from robofish.io import utils
import argparse
import logging
import warnings
from tqdm.auto import tqdm
import itertools
import numpy as np
def print_file(args=None):
"""This function can be used to print hdf5 files from the command line
Returns:
A human readable print of a given hdf5 file.
"""
parser = argparse.ArgumentParser(
description="This function can be used to print hdf5 files from the command line"
)
parser.add_argument("path", type=str, help="The path to a hdf5 file")
parser.add_argument(
"--output_format",
type=str,
choices=["shape", "full"],
default="shape",
help="Choose how datasets are printed, either the shapes or the full content is printed",
)
parser.add_argument(
"--full_attrs",
default=False,
action="store_true",
help="Show full unabbreviated values for attributes",
)
if args is None:
args = parser.parse_args()
with robofish.io.File(path=args.path, strict_validate=False) as f:
print(f.to_string(output_format=args.output_format, full_attrs=args.full_attrs))
print()
valid = f.validate(strict_validate=False)[0]
print("Valid file" if valid else "Invalid file")
return not valid
def update_calculated_data(args=None):
parser = argparse.ArgumentParser(
description="This function updates all calculated data from files."
)
parser.add_argument(
"path",
type=str,
nargs="+",
help="The path to one or multiple files and/or folders.",
)
if args is None:
args = parser.parse_args()
files_per_path = utils.get_all_files_from_paths(args.path)
files = [
f for f_in_path in files_per_path for f in f_in_path
] # Concatenate all files to one list
assert len(files) > 0, f"No files found in path {args.path}."
pbar = tqdm(files)
for fp in pbar:
try:
with robofish.io.File(fp, "r+", validate_poses_hash=False) as f:
if f.update_calculated_data(verbose=False):
pbar.set_description(f"File {fp} was updated")
else:
pbar.set_description(f"File {fp} was already up to date")
except Exception as e:
warnings.warn(f"The file {fp} could not be updated.")
print(e)
# This should not be neccessary since the data will always have the calculated data by default.
# def clear_calculated_data(args=None):
# parser = argparse.ArgumentParser(
# description="This function clears calculated data from robofish.io files."
# )
# parser.add_argument(
# "path",
# type=str,
# nargs="+",
# help="The path to one or multiple files and/or folders.",
# )
# if args is None:
# args = parser.parse_args()
# files_per_path = utils.get_all_files_from_paths(args.path)
# files = [
# f for f_in_path in files_per_path for f in f_in_path
# ] # Concatenate all files to one list
# assert len(files) > 0, f"No files found in path {args.path}."
# for fp in files:
# print(f"File {fp}")
# with robofish.io.File(
# fp, "r+", validate_poses_hash=False, store_calculated_data=False
# ) as f:
# f.clear_calculated_data()
def validate(args=None):
"""This function can be used to validate hdf5 files.
The function can be directly accessed from the commandline and can be given
any number of files or folders. The function returns the validity of the files
in a human readable format or as a raw output.
Returns:
A human readable table of each file and its validity
"""
parser = argparse.ArgumentParser(
description="The function can be directly accessed from the commandline and can be given any number of files or folders. The function returns the validity of the files in a human readable format or as a raw output."
)
parser.add_argument(
"--output_format",
type=str,
default="h",
choices=["h", "raw"],
help="Output format, can be either h for human readable or raw for a dict.",
)
parser.add_argument(
"path",
type=str,
nargs="+",
help="The path to one or multiple files and/or folders.",
)
if args is None:
args = parser.parse_args()
logging.getLogger().setLevel(logging.ERROR)
files_per_path = utils.get_all_files_from_paths(args.path)
files = [
f for f_in_path in files_per_path for f in f_in_path
] # Concatenate all files to one list
if len(files) == 0:
logging.getLogger().setLevel(logging.INFO)
logging.info("No files found in %s" % args.path)
return
validity_dict = {}
for fp in files:
with robofish.io.File(fp) as f:
validity_dict[str(fp)] = f.validate(strict_validate=False)
if args.output_format == "raw":
return validity_dict
max_filename_width = max([len(str(f)) for f in files])
error_code = 0
for fp, (validity, validity_message) in validity_dict.items():
filled_file = (str)(fp).ljust(max_filename_width + 3)
if not validity:
error_code = 1
msg = f"{filled_file}:{validity}"
if validity_message != "":
msg += f"\n{validity_message}"
print(msg)
return error_code
def render(args=None):
parser = argparse.ArgumentParser(
description="This function shows the file as animation."
)
parser.add_argument(
"path",
type=str,
help="The path to one file.",
)
parser.add_argument(
"-vp",
"--video_path",
default=None,
type=str,
help="Path to save the video to (mp4). If a path is given, the animation won't be played.",
)
default_options = {
"linewidth": 2,
"speedup": 1,
"trail": 100,
"entity_scale": 0.2,
"fixed_view": False,
"view_size": 60,
"slow_view": 0.8,
"cut_frames_start": 0,
"cut_frames_end": 0,
"show_text": False,
"render_goals": False,
"render_targets": False,
"figsize": 10,
}
for key, value in default_options.items():
if isinstance(value, bool):
parser.add_argument(
f"--{key}",
default=value,
action="store_true" if value is False else "store_false",
help=f"Optional setter for video option {key}.\tDefault: {value}",
)
else:
parser.add_argument(
f"--{key}",
default=value,
type=type(value),
help=f"Optional video option {key} with type {type(value)}.\tDefault: {value}",
)
if args is None:
args = parser.parse_args()
print(args)
f = robofish.io.File(path=args.path)
f.render(**vars(args))
def update_individual_ids(args=None):
parser = argparse.ArgumentParser()
parser.add_argument(
"path",
type=str,
nargs="+",
help="The path to one or multiple files and/or folders.",
)
if args is None:
args = parser.parse_args()
files_per_path = utils.get_all_files_from_paths(args.path)
files_per_path = [sorted(f_in_path) for f_in_path in files_per_path]
files = list(itertools.chain.from_iterable(files_per_path))
file_names = [f.name for f in files]
# make sure that there are no duplicate files
unique, counts = np.unique(file_names, return_counts=True)
if not (counts == 1).all():
warnings.warn(
f"There are duplicate files in the path! {unique[np.where(counts != 1)]}. \nThey will get the same individual ids."
)
y = input(
f"Are you sure you want to update the global individual id of {len(files)} files? (y/n)"
)
if y != "y":
print("The update was aborted. Printing the existing individual ids.")
else:
# Split filenames like "recording1-sub_0" into ["recording1", "9"]
split_file_names = np.array([str(f.name).split("-sub_") for f in files])
# Find uniques in the first part of the split names
_, unique_inverse = np.unique(split_file_names[:, 0], return_inverse=True)
running_individual_id = 0
for unique in range(max(unique_inverse) + 1):
file_ids = np.where(unique_inverse == unique)
for file in np.array(files)[file_ids]:
video = None
n_fish = None
with robofish.io.File(file, "r+") as f:
if n_fish is None:
n_fish = len(f.entities)
else:
assert n_fish == len(
f.entities
), f"Number of fish in file {file} is not the same as in the previous file."
if video is None:
video = f.attrs["video"]
else:
assert (
video == f.attrs["video"]
), f"Video in file {file} is not the same as in the previous file."
for e, entity in enumerate(f.entities):
entity.attrs["individual_id"] = running_individual_id + e
# Delete the old individual_id attribute
if "global_individual_id" in entity.attrs:
del entity.attrs["global_individual_id"]
running_individual_id += n_fish
print("Update finished.")
for fp in sorted(files):
with robofish.io.File(fp, "r") as f:
try:
print(f"File {fp}\t: {[e.attrs['individual_id'] for e in f.entities]}")
except Exception as e:
if y == "y":
print(f"Could not read individual_id from {fp}")
print(e)
Functions
def print_file(args=None)
-
This function can be used to print hdf5 files from the command line
Returns
A human readable print of a given hdf5 file.
Expand source code
def print_file(args=None): """This function can be used to print hdf5 files from the command line Returns: A human readable print of a given hdf5 file. """ parser = argparse.ArgumentParser( description="This function can be used to print hdf5 files from the command line" ) parser.add_argument("path", type=str, help="The path to a hdf5 file") parser.add_argument( "--output_format", type=str, choices=["shape", "full"], default="shape", help="Choose how datasets are printed, either the shapes or the full content is printed", ) parser.add_argument( "--full_attrs", default=False, action="store_true", help="Show full unabbreviated values for attributes", ) if args is None: args = parser.parse_args() with robofish.io.File(path=args.path, strict_validate=False) as f: print(f.to_string(output_format=args.output_format, full_attrs=args.full_attrs)) print() valid = f.validate(strict_validate=False)[0] print("Valid file" if valid else "Invalid file") return not valid
def render(args=None)
-
Expand source code
def render(args=None): parser = argparse.ArgumentParser( description="This function shows the file as animation." ) parser.add_argument( "path", type=str, help="The path to one file.", ) parser.add_argument( "-vp", "--video_path", default=None, type=str, help="Path to save the video to (mp4). If a path is given, the animation won't be played.", ) default_options = { "linewidth": 2, "speedup": 1, "trail": 100, "entity_scale": 0.2, "fixed_view": False, "view_size": 60, "slow_view": 0.8, "cut_frames_start": 0, "cut_frames_end": 0, "show_text": False, "render_goals": False, "render_targets": False, "figsize": 10, } for key, value in default_options.items(): if isinstance(value, bool): parser.add_argument( f"--{key}", default=value, action="store_true" if value is False else "store_false", help=f"Optional setter for video option {key}.\tDefault: {value}", ) else: parser.add_argument( f"--{key}", default=value, type=type(value), help=f"Optional video option {key} with type {type(value)}.\tDefault: {value}", ) if args is None: args = parser.parse_args() print(args) f = robofish.io.File(path=args.path) f.render(**vars(args))
def update_calculated_data(args=None)
-
Expand source code
def update_calculated_data(args=None): parser = argparse.ArgumentParser( description="This function updates all calculated data from files." ) parser.add_argument( "path", type=str, nargs="+", help="The path to one or multiple files and/or folders.", ) if args is None: args = parser.parse_args() files_per_path = utils.get_all_files_from_paths(args.path) files = [ f for f_in_path in files_per_path for f in f_in_path ] # Concatenate all files to one list assert len(files) > 0, f"No files found in path {args.path}." pbar = tqdm(files) for fp in pbar: try: with robofish.io.File(fp, "r+", validate_poses_hash=False) as f: if f.update_calculated_data(verbose=False): pbar.set_description(f"File {fp} was updated") else: pbar.set_description(f"File {fp} was already up to date") except Exception as e: warnings.warn(f"The file {fp} could not be updated.") print(e)
def update_individual_ids(args=None)
-
Expand source code
def update_individual_ids(args=None): parser = argparse.ArgumentParser() parser.add_argument( "path", type=str, nargs="+", help="The path to one or multiple files and/or folders.", ) if args is None: args = parser.parse_args() files_per_path = utils.get_all_files_from_paths(args.path) files_per_path = [sorted(f_in_path) for f_in_path in files_per_path] files = list(itertools.chain.from_iterable(files_per_path)) file_names = [f.name for f in files] # make sure that there are no duplicate files unique, counts = np.unique(file_names, return_counts=True) if not (counts == 1).all(): warnings.warn( f"There are duplicate files in the path! {unique[np.where(counts != 1)]}. \nThey will get the same individual ids." ) y = input( f"Are you sure you want to update the global individual id of {len(files)} files? (y/n)" ) if y != "y": print("The update was aborted. Printing the existing individual ids.") else: # Split filenames like "recording1-sub_0" into ["recording1", "9"] split_file_names = np.array([str(f.name).split("-sub_") for f in files]) # Find uniques in the first part of the split names _, unique_inverse = np.unique(split_file_names[:, 0], return_inverse=True) running_individual_id = 0 for unique in range(max(unique_inverse) + 1): file_ids = np.where(unique_inverse == unique) for file in np.array(files)[file_ids]: video = None n_fish = None with robofish.io.File(file, "r+") as f: if n_fish is None: n_fish = len(f.entities) else: assert n_fish == len( f.entities ), f"Number of fish in file {file} is not the same as in the previous file." if video is None: video = f.attrs["video"] else: assert ( video == f.attrs["video"] ), f"Video in file {file} is not the same as in the previous file." for e, entity in enumerate(f.entities): entity.attrs["individual_id"] = running_individual_id + e # Delete the old individual_id attribute if "global_individual_id" in entity.attrs: del entity.attrs["global_individual_id"] running_individual_id += n_fish print("Update finished.") for fp in sorted(files): with robofish.io.File(fp, "r") as f: try: print(f"File {fp}\t: {[e.attrs['individual_id'] for e in f.entities]}") except Exception as e: if y == "y": print(f"Could not read individual_id from {fp}") print(e)
def validate(args=None)
-
This function can be used to validate hdf5 files.
The function can be directly accessed from the commandline and can be given any number of files or folders. The function returns the validity of the files in a human readable format or as a raw output.
Returns
A human readable table of each file and its validity
Expand source code
def validate(args=None): """This function can be used to validate hdf5 files. The function can be directly accessed from the commandline and can be given any number of files or folders. The function returns the validity of the files in a human readable format or as a raw output. Returns: A human readable table of each file and its validity """ parser = argparse.ArgumentParser( description="The function can be directly accessed from the commandline and can be given any number of files or folders. The function returns the validity of the files in a human readable format or as a raw output." ) parser.add_argument( "--output_format", type=str, default="h", choices=["h", "raw"], help="Output format, can be either h for human readable or raw for a dict.", ) parser.add_argument( "path", type=str, nargs="+", help="The path to one or multiple files and/or folders.", ) if args is None: args = parser.parse_args() logging.getLogger().setLevel(logging.ERROR) files_per_path = utils.get_all_files_from_paths(args.path) files = [ f for f_in_path in files_per_path for f in f_in_path ] # Concatenate all files to one list if len(files) == 0: logging.getLogger().setLevel(logging.INFO) logging.info("No files found in %s" % args.path) return validity_dict = {} for fp in files: with robofish.io.File(fp) as f: validity_dict[str(fp)] = f.validate(strict_validate=False) if args.output_format == "raw": return validity_dict max_filename_width = max([len(str(f)) for f in files]) error_code = 0 for fp, (validity, validity_message) in validity_dict.items(): filled_file = (str)(fp).ljust(max_filename_width + 3) if not validity: error_code = 1 msg = f"{filled_file}:{validity}" if validity_message != "": msg += f"\n{validity_message}" print(msg) return error_code