diff --git a/crates/re_data_source/src/data_loader/mod.rs b/crates/re_data_source/src/data_loader/mod.rs index 4df6e4355ef5..1e090cb23eac 100644 --- a/crates/re_data_source/src/data_loader/mod.rs +++ b/crates/re_data_source/src/data_loader/mod.rs @@ -30,7 +30,7 @@ pub struct DataLoaderSettings { // TODO(#5350): actually support this pub opened_store_id: Option, - /// What should the entity paths be prefixed with? + /// What should the logged entity paths be prefixed with? pub entity_path_prefix: Option, /// At what time(s) should the data be logged to? diff --git a/examples/python/external_data_loader/main.py b/examples/python/external_data_loader/main.py index f052321923db..e897ac7c81d3 100755 --- a/examples/python/external_data_loader/main.py +++ b/examples/python/external_data_loader/main.py @@ -14,6 +14,9 @@ # It is up to you whether you make use of that shared recording ID or not. # If you use it, the data will end up in the same recording as all other plugins interested in # that file, otherwise you can just create a dedicated recording for it. Or both. +# +# Check out `re_data_source::DataLoaderSettings` documentation for an exhaustive listing of +# the available CLI parameters. parser = argparse.ArgumentParser( description=""" This is an example executable data-loader plugin for the Rerun Viewer. @@ -28,7 +31,23 @@ """ ) parser.add_argument("filepath", type=str) -parser.add_argument("--recording-id", type=str) +parser.add_argument("--recording-id", type=str, help="optional recommended ID for the recording") +parser.add_argument("--entity-path-prefix", type=str, help="optional prefix for all entity paths") +parser.add_argument( + "--timeless", action="store_true", default=False, help="optionally mark data to be logged as timeless" +) +parser.add_argument( + "--time", + type=str, + action="append", + help="optional timestamps to log at (e.g. `--time sim_time=1709203426`)", +) +parser.add_argument( + "--sequence", + type=str, + action="append", + help="optional sequences to log at (e.g. `--sequence sim_frame=42`)", +) args = parser.parse_args() @@ -44,10 +63,34 @@ def main() -> None: # The most important part of this: log to standard output so the Rerun Viewer can ingest it! rr.stdout() + set_time_from_args() + + if args.entity_path_prefix: + entity_path = f"{args.entity_path_prefix}/{args.filepath}" + else: + entity_path = args.filepath + with open(args.filepath) as file: body = file.read() text = f"""## Some Python code\n```python\n{body}\n```\n""" - rr.log(args.filepath, rr.TextDocument(text, media_type=rr.MediaType.MARKDOWN), timeless=True) + rr.log(entity_path, rr.TextDocument(text, media_type=rr.MediaType.MARKDOWN), timeless=args.timeless) + + +def set_time_from_args() -> None: + if not args.timeless and args.time is not None: + for time_str in args.time: + parts = time_str.split("=") + if len(parts) != 2: + continue + timeline_name, time = parts + rr.set_time_seconds(timeline_name, float(time)) + + for time_str in args.time: + parts = time_str.split("=") + if len(parts) != 2: + continue + timeline_name, time = parts + rr.set_time_sequence(timeline_name, int(time)) if __name__ == "__main__": diff --git a/examples/python/log_file/main.py b/examples/python/log_file/main.py index bef3c2a9444f..113567a1a132 100755 --- a/examples/python/log_file/main.py +++ b/examples/python/log_file/main.py @@ -34,12 +34,12 @@ for filepath in args.filepaths: if not args.from_contents: # Either log the file using its path… - rr.log_file_from_path(filepath) + rr.log_file_from_path(filepath, entity_path_prefix="log_file_example") else: # …or using its contents if you already have them loaded for some reason. try: with open(filepath, "rb") as file: - rr.log_file_from_contents(filepath, file.read()) + rr.log_file_from_contents(filepath, file.read(), entity_path_prefix="log_file_example") except Exception: pass diff --git a/examples/rust/external_data_loader/src/main.rs b/examples/rust/external_data_loader/src/main.rs index d9b20b2790bc..fbda74d64d54 100644 --- a/examples/rust/external_data_loader/src/main.rs +++ b/examples/rust/external_data_loader/src/main.rs @@ -37,7 +37,7 @@ struct Args { #[argh(option)] entity_path_prefix: Option, - /// optional mark data to be logged as timeless + /// optionally mark data to be logged as timeless #[argh(switch)] timeless: bool, diff --git a/rerun_py/rerun_sdk/rerun/_log.py b/rerun_py/rerun_sdk/rerun/_log.py index 8b54eaf5f4f8..ef9256418c5e 100644 --- a/rerun_py/rerun_sdk/rerun/_log.py +++ b/rerun_py/rerun_sdk/rerun/_log.py @@ -283,10 +283,14 @@ def log_components( ) +# TODO(#3841): expose timepoint settings once we implement stateless APIs @catch_and_log_exceptions() def log_file_from_path( file_path: str | Path, *, + recording_id: str | None = None, + entity_path_prefix: str | None = None, + timeless: bool | None = None, recording: RecordingStream | None = None, ) -> None: r""" @@ -304,6 +308,15 @@ def log_file_from_path( file_path: Path to the file to be logged. + recording_id: + The recommended `RecordingId` to log the data to. + + entity_path_prefix: + What should the logged entity paths be prefixed with? + + timeless: + Should the logged data be timeless? + recording: Specifies the [`rerun.RecordingStream`][] to use. If left unspecified, defaults to the current active data recording, if there is one. See @@ -311,14 +324,24 @@ def log_file_from_path( """ - bindings.log_file_from_path(Path(file_path), recording=recording) + bindings.log_file_from_path( + Path(file_path), + recording_id=recording_id, + entity_path_prefix=entity_path_prefix, + timeless=timeless, + recording=recording, + ) +# TODO(cmc): expose timepoint settings once we implement stateless APIs @catch_and_log_exceptions() def log_file_from_contents( file_path: str | Path, file_contents: bytes, *, + recording_id: str | None = None, + entity_path_prefix: str | None = None, + timeless: bool | None = None, recording: RecordingStream | None = None, ) -> None: r""" @@ -339,6 +362,15 @@ def log_file_from_contents( file_contents: Contents to be logged. + recording_id: + The recommended `RecordingId` to log the data to. + + entity_path_prefix: + What should the logged entity paths be prefixed with? + + timeless: + Should the logged data be timeless? + recording: Specifies the [`rerun.RecordingStream`][] to use. If left unspecified, defaults to the current active data recording, if there is one. See @@ -346,7 +378,14 @@ def log_file_from_contents( """ - bindings.log_file_from_contents(Path(file_path), file_contents, recording=recording) + bindings.log_file_from_contents( + Path(file_path), + file_contents, + recording_id=recording_id, + entity_path_prefix=entity_path_prefix, + timeless=timeless, + recording=recording, + ) def escape_entity_path_part(part: str) -> str: diff --git a/rerun_py/src/python_bridge.rs b/rerun_py/src/python_bridge.rs index 0398f52b5c34..d8136d8113c1 100644 --- a/rerun_py/src/python_bridge.rs +++ b/rerun_py/src/python_bridge.rs @@ -967,59 +967,96 @@ fn log_arrow_msg( #[pyfunction] #[pyo3(signature = ( file_path, + recording_id = None, + entity_path_prefix = None, + timeless = None, recording=None, ))] fn log_file_from_path( py: Python<'_>, file_path: std::path::PathBuf, + recording_id: Option, + entity_path_prefix: Option, + timeless: Option, recording: Option<&PyRecordingStream>, ) -> PyResult<()> { - let Some(recording) = get_data_recording(recording) else { - return Ok(()); - }; - - let Some(recording_id) = recording.store_info().map(|info| info.store_id.clone()) else { - return Ok(()); - }; - let settings = rerun::DataLoaderSettings::recommended(recording_id); - - recording - .log_file_from_path(&settings, file_path) - .map_err(|err| PyRuntimeError::new_err(err.to_string()))?; - - py.allow_threads(flush_garbage_queue); - - Ok(()) + log_file( + py, + file_path, + None, + recording_id, + entity_path_prefix, + timeless, + recording, + ) } #[pyfunction] #[pyo3(signature = ( file_path, file_contents, + recording_id = None, + entity_path_prefix = None, + timeless = None, recording=None, ))] fn log_file_from_contents( py: Python<'_>, file_path: std::path::PathBuf, file_contents: &[u8], + recording_id: Option, + entity_path_prefix: Option, + timeless: Option, + recording: Option<&PyRecordingStream>, +) -> PyResult<()> { + log_file( + py, + file_path, + Some(file_contents), + recording_id, + entity_path_prefix, + timeless, + recording, + ) +} + +fn log_file( + py: Python<'_>, + file_path: std::path::PathBuf, + file_contents: Option<&[u8]>, + recording_id: Option, + entity_path_prefix: Option, + timeless: Option, recording: Option<&PyRecordingStream>, ) -> PyResult<()> { let Some(recording) = get_data_recording(recording) else { return Ok(()); }; - let Some(recording_id) = recording.store_info().map(|info| info.store_id.clone()) else { + let Some(recording_id) = recording + .store_info() + .map(|info| info.store_id.clone()) + .or(recording_id.map(|id| StoreId::from_string(StoreKind::Recording, id))) + else { return Ok(()); }; - let settings = rerun::DataLoaderSettings::recommended(recording_id); - recording - .log_file_from_contents( - &settings, - file_path, - std::borrow::Cow::Borrowed(file_contents), - ) - .map_err(|err| PyRuntimeError::new_err(err.to_string()))?; + let settings = rerun::DataLoaderSettings { + store_id: recording_id, + opened_store_id: None, + entity_path_prefix: entity_path_prefix.map(Into::into), + timepoint: timeless.unwrap_or(false).then(TimePoint::timeless), + }; + + if let Some(contents) = file_contents { + recording + .log_file_from_contents(&settings, file_path, std::borrow::Cow::Borrowed(contents)) + .map_err(|err| PyRuntimeError::new_err(err.to_string()))?; + } else { + recording + .log_file_from_path(&settings, file_path) + .map_err(|err| PyRuntimeError::new_err(err.to_string()))?; + } py.allow_threads(flush_garbage_queue);