From b697e73c3d861c209152ccfb140ae00fbc6e9925 Mon Sep 17 00:00:00 2001 From: Jupp56 Date: Fri, 25 Feb 2022 23:57:01 +0000 Subject: [PATCH] Enhanced par_for_each and par_for_each_mut docs (#4039) # Objective Continuation of #2663 due to git problems - better documentation for Query::par_for_each and par_for_each_mut ## Solution Going into more detail about the function parameters --- crates/bevy_ecs/src/system/query.rs | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/crates/bevy_ecs/src/system/query.rs b/crates/bevy_ecs/src/system/query.rs index df1098fd71513..8d65633de149d 100644 --- a/crates/bevy_ecs/src/system/query.rs +++ b/crates/bevy_ecs/src/system/query.rs @@ -503,10 +503,26 @@ where }; } - /// Runs `f` on each query result in parallel using the given task pool. + /// Runs `f` on each query result in parallel using the given [`TaskPool`]. /// /// This can only be called for immutable data, see [`Self::par_for_each_mut`] for /// mutable access. + /// + /// # Tasks and batch size + /// + /// The items in the query get sorted into batches. + /// Internally, this function spawns a group of futures that each take on a `batch_size` sized section of the items (or less if the division is not perfect). + /// Then, the tasks in the [`TaskPool`] work through these futures. + /// + /// You can use this value to tune between maximum multithreading ability (many small batches) and minimum parallelization overhead (few big batches). + /// Rule of thumb: If the function body is (mostly) computationally expensive but there are not many items, a small batch size (=more batches) may help to even out the load. + /// If the body is computationally cheap and you have many items, a large batch size (=fewer batches) avoids spawning additional futures that don't help to even out the load. + /// + /// # Arguments + /// + ///* `task_pool` - The [`TaskPool`] to use + ///* `batch_size` - The number of batches to spawn + ///* `f` - The function to run on each item in the query #[inline] pub fn par_for_each>::Item) + Send + Sync + Clone>( &'s self, @@ -529,7 +545,8 @@ where }; } - /// Runs `f` on each query result in parallel using the given task pool. + /// Runs `f` on each query result in parallel using the given [`TaskPool`]. + /// See [`Self::par_for_each`] for more details. #[inline] pub fn par_for_each_mut<'a, FN: Fn(>::Item) + Send + Sync + Clone>( &'a mut self,