Skip to content
Snippets Groups Projects
Commit 598f4bd2 authored by Andi Gerken's avatar Andi Gerken
Browse files

Added utils draft, added tests

parent 88aff831
No related branches found
No related tags found
No related merge requests found
Pipeline #33259 passed
**/__pycache__
*.egg
*.egg-info*
.vscode
.venv
......@@ -2,12 +2,18 @@
from setuptools import setup, find_packages
entry_points = {
"console_scripts": [
"robofish-io-validate=robofish.io.app:validate",
]
}
setup(
name="robofish-io",
version="0.1",
author="",
author_email="",
install_requires=[],
install_requires=['h5py'],
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Science/Research",
......@@ -21,4 +27,5 @@ setup(
packages=find_packages("src"),
package_dir={"": "src"},
zip_safe=True,
entry_points=entry_points,
)
# -*- coding: utf-8 -*-
import argparse
import robofish.io.util as util
def validate():
parser = argparse.ArgumentParser(description='TODO')
parser.add_argument('path', type=str, nargs='+',
help='TODO')
args = parser.parse_args()
hdf5_dict = util.load_hdf5(args.path)
util.validate_dict(hdf5_dict)
# -*- coding: utf-8 -*-
import os
import h5py
import traceback
def load_hdf5(paths):
""" Load hdf5 files from a given path.
The function can be given the path to a single single hdf5 file, to a folder, containing hdf5 files, or an array of multiple files or folders.
Args:
path (str): The path to a hdf5 file or folder.
Returns:
dict: A dictionary where the keys are filenames and the values are the content of the hdf5 files
"""
if not type(paths) is list:
paths = [paths]
hdf5_dict = {}
for path in paths:
if os.path.isdir(path):
# Find all hdf5 files in folder
files = [path + '/' + file for file in os.listdir(
path) if not os.path.isdir(file) and file.endswith('.hdf5')]
else:
files = [path]
hdf5_dict.update({os.path.basename(file): h5py.File(file, 'r')
for file in files})
return hdf5_dict
def create_from_dict(path, dict):
""" Todo """
f = h5py.File(path, 'w')
return f
def validate_dict(hdf5_dict):
""" Todo """
for path, file in hdf5_dict.items():
print(validate(file))
def validate(file):
""" Todo """
# print(list(file.keys()))
# print(dict(file['root']))
# Example dict
# {
# 'root': {
# 'version' : None,
# 'world_size': None,
# 'entities' : {
# 'entity':{
# 'type': None,
# 'poses': [],
# 'outlines': [],
# 'time': {
# 'monotonic points': [],
# 'calendar points': []
# }
# }
# }
try:
version = file['version']
world = file['world size']
root = file['root']
entities = root['entities']
#for key, value in file.attrs.items():
# print(key, value)
return (True, "Valid")
except Exception as e:
return (False, "Invalid: " + e.args[0])
File added
File added
File added
import pytest
import robofish.io
import robofish.io.util as util
import os
import h5py
def test_loadhdf5():
""" Test the robofish.io.util.load_hdf5 function """
files = ['../../resources/example1.hdf5', '../../resources/example2.hdf5',
'../../resources/', '../not_existent.hdf5']
files = [os.path.abspath(os.path.dirname(
__file__) + '/' + file) for file in files]
# One file test
tracksets = util.load_hdf5(files[0])
assert len(tracksets) == 1
# Two file test
tracksets = util.load_hdf5(files[:2])
assert len(tracksets) == 2
# Folder test
tracksets = util.load_hdf5(files[2])
assert len(tracksets) == 3
# Folder and file test
tracksets = util.load_hdf5(files[:3])
assert len(tracksets) == 3
# Broken file test
with pytest.raises(OSError):
tracksets = util.load_hdf5(files[3])
def test_create_from_dict():
test_dict = {
'root': {
'version': None,
'world_size': None,
'entities': {
'entity': {
'type': None,
'poses': [],
'outlines': [],
'time': {
'monotonic points': [],
'calendar points': []
}
}
}
}
}
testfile_path = os.path.abspath(os.path.dirname(
__file__) + '/../../resources/test_dataset.hdf5')
h5file = util.create_from_dict(testfile_path, test_dict)
assert type(h5file) == h5py._hl.files.File
if __name__ == '__main__':
test_loadhdf5()
test_create_from_dict()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment