diff --git a/.azure-pipelines/azure-pipelines.yml b/.azure-pipelines/azure-pipelines.yml index cd233c27..5d47006c 100644 --- a/.azure-pipelines/azure-pipelines.yml +++ b/.azure-pipelines/azure-pipelines.yml @@ -106,7 +106,7 @@ stages: - task: UsePythonVersion@0 displayName: Set up python inputs: - versionSpec: 3.8 + versionSpec: 3.10 - task: DownloadBuildArtifacts@0 displayName: Get pre-built package diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 093a84c1..416a0faf 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -5,13 +5,18 @@ # Required version: 2 +# Set the version of python and other tools +build: + os: ubuntu-22.04 + tools: + python: '3.10' + # Build documentation in the docs/ directory with Sphinx sphinx: configuration: docs/conf.py # Set the version of Python and optionally declare the requirements required to build your docs python: - version: '3.8' install: - method: pip path: . diff --git a/CHANGELOG.md b/CHANGELOG.md index dac728d8..45298140 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,9 @@ # CHANGES -## +## 0.2.2 - Fixed the axis dimensions for `images pp`. +- Added timestamp check and warning on triggers if they happen before/after shutters in `find-tristan-triggers`. +- Added `images serial` for gated access binning of events. ## 0.2.1 - Added dagnostic tool `valid-events` for checking that there are events recorded after the shutter open signal in case the binned image is blank(asynchronicity issue). Also, a couple of small improvements on the other diagnostic tools. diff --git a/docs/api.rst b/docs/api.rst index c5dddd8d..a84e8bae 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -2,6 +2,9 @@ API === +General +======= + .. automodule:: tristan :members: :show-inheritance: @@ -25,6 +28,9 @@ Data Diagnostics API =============== +General +======= + .. automodule:: tristan.diagnostics :members: diff --git a/docs/usage.rst b/docs/usage.rst index 70dfd5e2..76380860 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -99,6 +99,30 @@ by duration, with `-i`, or by number, with `-x`. For example, this could be used to deconstruct a rotation data collection into several rotation datasets, each corresponding to a different pump-probe delay window. +Serial crystallography tool (gated access) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +To bin events into images gated by trigger signals, use `images serial`, which will write one image per gate signal. Each 'gate-open' signal is taken as the start of an exposure and +the next 'gate-close' signal is taken as the end of the exposure. + +This tool requires at least the rising edge of the trigger signal, specified with `-g`, to be passed as *gate open* and will then look for the corresponding falling edge +to be used as *gate close*. + +.. code-block:: console + + images serial -g SYNC-rising /path/to/file + + +In some cases, it might be more useful to look at the events collected between different kinds of trigger signals, by specifying the *gate open* signal with `-g` +and the *gate close* using the `-c` flag as in the example below. + +.. code-block:: console + + images serial -g TTL-rising -c SYNC-falling /path/to/file + + + +============================== Apply the flatfield correction ============================== diff --git a/requirements.txt b/requirements.txt index 48376605..ba618e54 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,8 +1,8 @@ dask==2022.11.1 distributed==2022.11.1 h5py==3.7.0 -hdf5plugin==3.3.1 -nexgen==0.6.14 +hdf5plugin==4.0.1 +nexgen==0.6.23 numpy==1.23.5 pandas==1.5.2 Pint==0.20.1 diff --git a/requirements_dev.txt b/requirements_dev.txt index 4c9170d5..39173e44 100644 --- a/requirements_dev.txt +++ b/requirements_dev.txt @@ -1,8 +1,8 @@ dask==2022.11.1 distributed==2022.11.1 h5py==3.7.0 -hdf5plugin==3.3.1 -nexgen==0.6.14 +hdf5plugin==4.0.1 +nexgen==0.6.23 numpy==1.23.5 pandas==1.5.2 Pint==0.20.1 diff --git a/requirements_doc.txt b/requirements_doc.txt index 45d2f55e..cbd90bae 100644 --- a/requirements_doc.txt +++ b/requirements_doc.txt @@ -1,11 +1,11 @@ dask==2022.11.1 distributed==2022.11.1 h5py==3.7.0 -hdf5plugin==3.3.1 -nexgen==0.6.14 +hdf5plugin==4.0.1 +nexgen==0.6.23 numpy==1.23.5 pandas==1.5.2 Pint==0.20.1 -Sphinx==4.5.0 -sphinx-rtd-theme==1.0.0 +Sphinx==6.2.0 +sphinx-rtd-theme==1.2.0 zarr==2.13.3 diff --git a/setup.cfg b/setup.cfg index 312dd1c9..fbeeae8f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -32,7 +32,7 @@ install_requires = dask[array,diagnostics,distributed] != 2021.3.* h5py hdf5plugin - nexgen >= 0.6.8 + nexgen >= 0.6.20 numpy pandas pint diff --git a/src/tristan/command_line/__init__.py b/src/tristan/command_line/__init__.py index 1d67b9d9..5bf90b17 100644 --- a/src/tristan/command_line/__init__.py +++ b/src/tristan/command_line/__init__.py @@ -421,3 +421,22 @@ def positive_int(value: SupportsInt) -> int: group.add_argument( "-x", "--num-sequences", help="Number of image sequences.", type=positive_int ) + +# A parser for specifying a pair of trigger signals with which to gate event processing. +gate_parser = argparse.ArgumentParser(add_help=False) +gate_parser.add_argument( + "-g", + "--gate-open", + help="Trigger signal denoting the start of each gate period.", + choices=triggers.keys(), + required=True, +) +gate_parser.add_argument( + "-c", + "--gate-close", + help="Trigger signal denoting the end of each gate period (optional). If not " + "provided, this will default to the complementary signal to the '--gate-open' " + "value. For example, if the '--gate-open' signal is 'TTL-rising', " + "the --gate-close signal will default to 'TTL-falling', and vice versa.", + choices=triggers.keys(), +) diff --git a/src/tristan/command_line/images.py b/src/tristan/command_line/images.py index a16f2e83..9172ed0a 100644 --- a/src/tristan/command_line/images.py +++ b/src/tristan/command_line/images.py @@ -29,7 +29,6 @@ find_start_end, find_time_bins, make_images, - valid_events, ) from ..data import ( cue_keys, @@ -41,12 +40,14 @@ latrd_data, pixel_index, seconds, + valid_events, ) from . import ( check_multiple_output_files, check_output_file, data_files, exposure_parser, + gate_parser, image_output_parser, input_parser, interval_parser, @@ -534,6 +535,154 @@ def multiple_sequences_cli(args): print(f"Images written to\n\t{output_nexus_pattern or out_file_pattern}") +def gated_images_cli(args): + """Utility to bin events into a sequence of images according to a gating signal.""" + write_mode = "w" if args.force else "x" + output_file = check_output_file(args.output_file, args.stem, "images", args.force) + + input_nexus = args.data_dir / f"{args.stem}.nxs" + if not input_nexus.exists(): + print( + "Could not find a NeXus file containing experiment metadata.\n" + "Resorting to writing raw image data without accompanying metadata." + ) + + image_size = args.image_size or determine_image_size(input_nexus) + + raw_files, _ = data_files(args.data_dir, args.stem) + + # If gate_close isn't specified, default to the complementary signal to gate_open. + gate_open = triggers.get(args.gate_open) + gate_close = triggers.get(args.gate_close) or gate_open ^ (1 << 5) + + with latrd_data(raw_files, keys=cue_keys) as cues_data: + print("Finding detector shutter open and close times.") + with ProgressBar(): + start, end = find_start_end(cues_data) + + print("Finding gate signal times.") + # Here we assume no synchronization issues: + # falling edges always recorded after rising edges. + open_times = cue_times(cues_data, gate_open, after=start) + close_times = cue_times(cues_data, gate_close, before=end) + with ProgressBar(): + open_times, close_times = dask.compute(open_times, close_times) + + if not open_times.size: + sys.exit(f"Could not find a '{cues[gate_open]}' signal.") + if not close_times.size: + sys.exit(f"Could not find a '{cues[gate_close]}' signal.") + + open_times = np.sort(open_times) + close_times = np.sort(close_times) + + if not open_times.size == close_times.size: + # If size difference is just one, look for missing one right before/after + # shutters and use shutter open/close timestamp as first/last gate + if abs(open_times.size - close_times.size) > 1: + sys.exit( + "Found a non-matching number of gate open and close signals:\n\t" + f"Number of '{cues[gate_open]}' signals: {open_times.size}\n\t" + f"Number of '{cues[gate_close]}' signals: {close_times.size}\n" + f"Note that signals before the shutter open time are ignored." + ) + else: + if open_times[-1] > close_times[-1]: + print( + "WARNING! \n\t" + f"Missing last '{cues[gate_close]}' signal.\n\t" + f"Shutter close timestamp will be used instead for last image." + ) + # Append shutter close to close_times + close_times = np.append(close_times, end) + elif open_times[0] > close_times[0]: + print( + "WARNING! \n\t" + f"Missing first '{cues[gate_open]}' signal.\n\t" + f"Shutter open timestamp will be used instead for first image." + ) + # Insert shutter open to open times + open_times = np.insert(open_times, 0, start) + else: + sys.exit( + "Found a non-matching number of gate open and close signals:\n\t" + f"Number of '{cues[gate_open]}' signals: {open_times.size}\n\t" + f"Number of '{cues[gate_close]}' signals: {close_times.size}\n" + ) + + num_images = open_times.size + bins = np.linspace(0, num_images, num_images + 1, dtype=np.uint64) + + if input_nexus.exists(): + try: + # Write output NeXus file if we have an input NeXus file. + output_nexus = CopyTristanNexus.serial_images_nexus( + output_file, + input_nexus, + nbins=num_images, + write_mode=write_mode, + ) + except FileExistsError: + sys.exit( + f"This output file already exists:\n\t" + f"{output_file.with_suffix('.nxs')}\n" + "Use '-f' to override, " + "or specify a different output file path with '-o'." + ) + else: + output_nexus = None + + print(f"Binning events into {num_images} images.") + + # Make a cache for the images. + images = create_cache(output_file, num_images, image_size) + + with latrd_data(raw_files, keys=(event_location_key, event_time_key)) as data: + # Consider only those events that occur between the start and end times. + data = valid_events(data, start, end) + + # Gate the events. + event_times = data[event_time_key].astype(np.int64).values + open_index = da.digitize(event_times, open_times) - 1 + close_index = da.digitize(event_times, close_times) + # Look for events that happen after gate open and before gate close + # Eliminate invalid events by looking at the open and close index + valid = open_index == close_index + valid = dd.from_dask_array(valid, index=data.index) + + # Convert the event IDs to a form that is suitable for a NumPy bincount. + data[event_location_key] = pixel_index(data[event_location_key], image_size) + + columns = event_location_key, "time_bin" + dtypes = data.dtypes + dtypes["time_bin"] = dtypes.pop(event_time_key) + + meta = pd.DataFrame(columns=columns).astype(dtype=dtypes) + # Enumerate the image in the stack to which each event belongs + data = data.map_partitions(find_time_bins, bins=bins, meta=meta) + data["time_bin"] = open_index + data = data[valid] + + # Bin to images, partition by partition. + data = dd.map_partitions( + make_images, data, image_size, images, meta=meta, enforce_metadata=False + ) + + print("Computing the binned images.") + # Use multi-threading, rather than multi-processing. + with Client(processes=False): + compute_with_progress(data) + + print("Transferring the images to the output file.") + with h5py.File(output_file, write_mode) as f: + zarr.copy_all(zarr.open(images.store), f, **Bitshuffle()) + + # Delete the Zarr store. + images.store.clear() + + print(f"Images written to\n\t{output_nexus or output_file}") + + parser = argparse.ArgumentParser(description=__doc__, parents=[version_parser]) subparsers = parser.add_subparsers( help="Choose the manner in which to create images.", @@ -608,6 +757,16 @@ def multiple_sequences_cli(args): ) parser_multiple_sequences.set_defaults(func=multiple_sequences_cli) +parser_serial = subparsers.add_parser( + "serial", + description="Bin events into images, gated with trigger signals.\n\n" + "Events will be binned into as many images as there are gate signals, one image " + "per gate. Each 'gate-open' signal is taken as the start of an exposure and the " + "next 'gate-close' signal is taken as the end of the exposure.", + parents=[version_parser, input_parser, image_output_parser, gate_parser], +) +parser_serial.set_defaults(func=gated_images_cli) + def main(args=None): """Perform the image binning with a user-specified sub-command.""" diff --git a/src/tristan/data.py b/src/tristan/data.py index 3e7dacd8..f6330a99 100644 --- a/src/tristan/data.py +++ b/src/tristan/data.py @@ -146,7 +146,12 @@ def first_cue_time( return data[cue_time_key].loc[first_index] -def cue_times(data: dd.DataFrame, message: int, after: int | None = None) -> da.Array: +def cue_times( + data: dd.DataFrame, + message: int, + after: int | None = None, + before: int | None = None, +) -> da.Array: """ Find the timestamps of all instances of a cue message in a Tristan data set. @@ -165,6 +170,8 @@ def cue_times(data: dd.DataFrame, message: int, after: int | None = None) -> da. index = data[cue_id_key] == message if after: index &= data[cue_time_key] >= after + if before: + index &= data[cue_time_key] <= before return da.unique(data[cue_time_key][index].values) diff --git a/src/tristan/diagnostics/find_trigger_intervals.py b/src/tristan/diagnostics/find_trigger_intervals.py index e0bf912c..86dbf5a3 100644 --- a/src/tristan/diagnostics/find_trigger_intervals.py +++ b/src/tristan/diagnostics/find_trigger_intervals.py @@ -237,8 +237,8 @@ def main(args): logger.info(f"--- {k} ---") logger.info("SHUTTERS") if len(shutters[0]) > 0 and len(shutters[1]) > 0: - logger.info(f"Shutter open timestamp: {shutters[0][0]:.4f}") - logger.info(f"Shutter close timestamp: {shutters[1][0]:.4f}") + logger.info(f"Shutter open timestamp: {shutters[0][0]:.4f}.") + logger.info(f"Shutter close timestamp: {shutters[1][0]:.4f}.") diff0 = shutters[1][0] - shutters[0][0] logger.info( f"Total time between shutter opening and closing: {diff0:.4f} s." @@ -246,10 +246,10 @@ def main(args): elif len(shutters[0]) == 0 or len(shutters[1]) == 0: logger.warning("Missing shutter information!") logger.warning( - f"Number of shutter open timestamps found: {len(shutters[0])}" + f"Number of shutter open timestamps found: {len(shutters[0])}." ) logger.warning( - f"Number of shutter close timestamps found: {len(shutters[1])}" + f"Number of shutter close timestamps found: {len(shutters[1])}." ) logger.info("LVDS") if len(v["LVDS re"]) > 0 and len(v["LVDS fe"]) > 0: @@ -273,6 +273,8 @@ def main(args): logger.info("TTL") logger.info(f"Found {len(v['TTL re'])} rising edges.\n") if len(v["TTL re"]) > 0: + logger.info(f"First TTL rising edge timestamp: {v['TTL re'][0]:.4f} .") + logger.info(f"Last TTL rising edge timestamp: {v['TTL re'][-1]:.4f} .") d = [i - v["TTL re"][n - 1] for n, i in enumerate(v["TTL re"])][1:] d_avg = np.average(d) if len(d) else np.nan logger.info( @@ -321,16 +323,32 @@ def main(args): logger.info( f"Found {len(v['SYNC re'])} rising edges and {len(v['SYNC fe'])} falling edges." ) + logger.info( + f"First SYNC rising edge timestamp: {v['SYNC re'][0]:.4f}." + ) + logger.info( + f"Last SYNC falling edge timestamp: {v['SYNC fe'][-1]:.4f}." + ) + if v["SYNC re"][0] < shutters[0][0]: + logger.warning( + "First SYNC rising edge was recorded before the shutter open signal! \n" + f"Timestamp difference: {shutters[0][0] - v['SYNC re'][0]} s." + ) + if v["SYNC fe"][-1] > shutters[1][0]: + logger.warning( + "Last SYNC falling edge was recorded after the shutter close signal! \n" + f"Timestamp difference: {v['SYNC fe'][-1] - shutters[1][0]} s." + ) diff4 = [b - a for a, b in zip(v["SYNC re"], v["SYNC fe"])] avg4 = np.average(diff4) if len(diff4) else np.nan logger.info( - f"Average time interval between SYNC re and fe: {avg4:.4f} s" + f"Average time interval between SYNC re and fe: {avg4:.4f} s." ) if len(v["TTL re"]) == len(v["SYNC re"]): diff5 = [b - a for a, b in zip(v["TTL re"], v["SYNC re"])] avg5 = np.average(diff5) if len(diff5) else np.nan logger.info( - f"Average time interval between TTL re and SYNC re: {avg5:.4f} s" + f"Average time interval between TTL re and SYNC re: {avg5:.4f} s." ) else: logger.error( diff --git a/tests/test_command_line.py b/tests/test_command_line.py index 2c47bba9..36984b49 100644 --- a/tests/test_command_line.py +++ b/tests/test_command_line.py @@ -13,6 +13,7 @@ check_output_file, data_files, exposure_parser, + gate_parser, image_output_parser, image_size, input_parser, @@ -74,6 +75,24 @@ def test_data_files(dummy_data_transient): data_files(dummy_data_transient, stem) +no_help_parameters = [ + (version_parser, []), + (input_parser, ["some/dummy/path/to/file_name_meta.h5"]), + (image_output_parser, []), + (exposure_parser, ["-e", ".1"]), + (gate_parser, ["-g", "TTL-rising"]), +] + + +@pytest.mark.parametrize(("parser", "req_arguments"), no_help_parameters) +def test_no_help(capsys, parser: argparse.ArgumentParser, req_arguments: list[str]): + """Check that this parser does not introduce a help flag.""" + for flag in "-h", "--help": + with pytest.raises(SystemExit, match="2"): + parser.parse_args([*req_arguments, flag]) + assert f"error: unrecognized arguments: {flag}" in capsys.readouterr().err + + def test_version_parser(capsys): """Test that the version parser gives the correct behaviour.""" for flag in "--version", "-V": @@ -87,13 +106,6 @@ def test_version_parser_optional(): assert version_parser.parse_args([]) == argparse.Namespace() -def test_version_parser_no_help(capsys): - """Check that the version parser does not introduce a help flag.""" - with pytest.raises(SystemExit, match="2"): - version_parser.parse_args(["-h"]) - assert "error: unrecognized arguments: -h" in capsys.readouterr().err - - @pytest.mark.parametrize( "filename", ("dummy_meta.h5", "dummy_1.h5", "dummy_0001.h5", "dummy.nxs") ) @@ -243,16 +255,6 @@ def test_input_parser_cwd(run_in_tmp_path, filename): assert args.stem == "dummy" -def test_input_parser_no_help(capsys): - """Check that the input parser does not introduce a help flag.""" - directory = "some/dummy/path/to" - stem = "file_name" - for flag in "-h", "--help": - with pytest.raises(SystemExit, match="2"): - input_parser.parse_args([f"{directory}/{stem}_meta.h5", flag]) - assert f"error: unrecognized arguments: {flag}" in capsys.readouterr().err - - def test_image_size(): """Test unpacking an image size tuple from a comma-separated string of integers.""" for size in "1,2", "1, 2", "1 ,2", "1 , 2", "(1,2)", "'1,2'", '"1,2"', "'\"(1,2'\"": @@ -331,14 +333,6 @@ def test_image_output_parser_malformed_image_size(capsys): ) -def test_image_output_parser_no_help(capsys): - """Check that the output/image size parser does not introduce a help flag.""" - for flag in "-h", "--help": - with pytest.raises(SystemExit, match="2"): - image_output_parser.parse_args([flag]) - assert f"error: unrecognized arguments: {flag}" in capsys.readouterr().err - - def test_units_of_time(): """Test the utility for applying a default unit to a quantity.""" # Check that a dimensionless input quantity defaults to seconds. @@ -452,11 +446,3 @@ def test_exposure_time_mutually_exclusive(capsys): "error: argument -e/--exposure-time: not allowed with argument -n/--num-images" in capsys.readouterr().err ) - - -def test_exposure_time_no_help(capsys): - """Check that this parser does not introduce a help flag.""" - for flag in "-h", "--help": - with pytest.raises(SystemExit, match="2"): - exposure_parser.parse_args(["-e", ".1", flag]) - assert f"error: unrecognized arguments: {flag}" in capsys.readouterr().err