diff --git a/pipelines/data_integration.py b/pipelines/data_integration.py index 4cb52335ce223d823606dc861f1c7d5675829788..318b84d75acfb13fcea9d125fd68895097195662 100644 --- a/pipelines/data_integration.py +++ b/pipelines/data_integration.py @@ -25,7 +25,7 @@ from itertools import chain try: from dima.src import hdf5_writer as hdf5_lib from dima.utils import g5505_utils as utils - from dima.instruments.readers import filereader_registry + from dima.instruments import filereader_registry except ModuleNotFoundError: print(':)') import src.hdf5_writer as hdf5_lib @@ -181,6 +181,7 @@ def copy_subtree_and_create_hdf5(src, dst, select_dir_keywords, select_file_keyw logging.info("Creating HDF5 file at: %s", dst) + #hdf5_path = hdf5_lib.create_hdf5_file_from_filesystem_path_new(dst, path_to_files_dict, select_dir_keywords, root_metadata_dict) hdf5_path = hdf5_lib.create_hdf5_file_from_filesystem_path(dst, path_to_files_dict, select_dir_keywords, root_metadata_dict) logging.info("Completed creation of HDF5 file %s at: %s", hdf5_path, dst) @@ -238,6 +239,7 @@ def run_pipeline(path_to_config_yamlFile, log_dir='logs/'): # Collection mode processing if specified if 'collection' in config_dict.get('integration_mode', 'single_experiment'): path_to_filenames_dict = {path_to_rawdata_folder: [os.path.basename(path) for path in output_filename_path]} if output_filename_path else {} + #hdf5_path = hdf5_lib.create_hdf5_file_from_filesystem_path_new(path_to_rawdata_folder, path_to_filenames_dict, [], root_metadata_dict) hdf5_path = hdf5_lib.create_hdf5_file_from_filesystem_path(path_to_rawdata_folder, path_to_filenames_dict, [], root_metadata_dict) output_filename_path.append(hdf5_path) else: