Bug 1543974 - Deal with conflicting render task rect scheduling and allocation. r=kvark
authorNicolas Silva <nsilva@mozilla.com>
Sat, 04 May 2019 13:31:13 +0000
changeset 531497 5cba75dd63c2f1b4d2a34797e3e16e935f5ea763
parent 531496 40f352e2d6b863c7b7040db9098d9421802709df
child 531498 2400bc291ff5bd6c5fc84f4a52c55b9a138f7dad
push id11265
push userffxbld-merge
push dateMon, 13 May 2019 10:53:39 +0000
treeherdermozilla-beta@77e0fe8dbdd3 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerskvark
bugs1543974
milestone68.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1543974 - Deal with conflicting render task rect scheduling and allocation. r=kvark Due to the render task ping-pong target allocation scheme, we need to ensure: - that tasks only read from tasks that are an odd number of passes apart, - that render task content is kept valid long enough for all of the dependent tasks. The former is solved in this patch through blit tasks, the latter by marking tasks for saving as needed. Differential Revision: https://phabricator.services.mozilla.com/D29800
gfx/wr/webrender/src/render_task.rs
gfx/wr/webrender/src/tiling.rs
--- a/gfx/wr/webrender/src/render_task.rs
+++ b/gfx/wr/webrender/src/render_task.rs
@@ -148,17 +148,17 @@ impl RenderTaskTree {
     ) {
         let parent = &mut self[parent_id];
         parent.children.push(child_id);
     }
 
     /// Assign this frame's render tasks to render passes ordered so that passes appear
     /// earlier than the ones that depend on them.
     pub fn generate_passes(
-        &self,
+        &mut self,
         main_render_task: Option<RenderTaskId>,
         screen_size: DeviceIntSize,
         gpu_supports_fast_clears: bool,
     ) -> Vec<RenderPass> {
         let mut passes = Vec::new();
 
         if !self.cacheable_render_tasks.is_empty() {
             self.generate_passes_impl(
@@ -175,16 +175,19 @@ impl RenderTaskTree {
                 &[main_task],
                 screen_size,
                 gpu_supports_fast_clears,
                 true,
                 &mut passes,
             );
         }
 
+
+        self.resolve_target_conflicts(&mut passes);
+
         passes
     }
 
     /// Assign the render tasks from the tree rooted at root_task to render passes and
     /// append them to the `passes` vector so that the passes that we depend on end up
     /// _earlier_ in the pass list.
     fn generate_passes_impl(
         &self,
@@ -273,16 +276,123 @@ impl RenderTaskTree {
                 task_id,
                 task.get_dynamic_size(),
                 task.target_kind(),
                 &task.location,
             );
         }
     }
 
+    /// Resolve conflicts between the generated passes and the limitiations of our target
+    /// allocation scheme.
+    ///
+    /// The render task graph operates with a ping-pong target allocation scheme where
+    /// a set of targets is written to by even passes and a different set of targets is
+    /// written to by odd passes.
+    /// Since tasks cannot read and write the same target, we can run into issues if a
+    /// task pass in N + 2 reads the result of a task in pass N.
+    /// To avoid such cases have to insert blit tasks to copy the content of the task
+    /// into pass N + 1 which is readable by pass N + 2.
+    ///
+    /// In addition, allocated rects of pass N are currently not tracked and can be
+    /// overwritten by allocations in later passes on the same target, unless the task
+    /// has been marked for saving, which perserves the allocated rect until the end of
+    /// the frame. This is a big hammer, hopefully we won't need to mark many passes
+    /// for saving. A better solution would be to track allocations through the entire
+    /// graph, there is a prototype of that in https://github.com/nical/toy-render-graph/
+    fn resolve_target_conflicts(&mut self, passes: &mut [RenderPass]) {
+        // Keep track of blit tasks we inserted to avoid adding several blits for the same
+        // task.
+        let mut task_redirects = vec![None; self.tasks.len()];
+
+        let mut task_passes = vec![-1; self.tasks.len()];
+        for pass_index in 0..passes.len() {
+            for task in &passes[pass_index].tasks {
+                task_passes[task.index as usize] = pass_index as i32;
+            }
+        }
+
+        for task_index in 0..self.tasks.len() {
+            if task_passes[task_index] < 0 {
+                // The task doesn't contribute to this frame.
+                continue;
+            }
+
+            let pass_index = task_passes[task_index];
+
+            // Go through each dependency and check whether they belong
+            // to a pass that uses the same targets and/or are more than
+            // one pass behind.
+            for nth_child in 0..self.tasks[task_index].children.len() {
+                let child_task_index = self.tasks[task_index].children[nth_child].index as usize;
+                let child_pass_index = task_passes[child_task_index];
+
+                if child_pass_index == pass_index - 1 {
+                    // This should be the most common case.
+                    continue;
+                }
+
+                if child_pass_index % 2 != pass_index % 2 {
+                    // The tasks and its dependency aren't on the same targets,
+                    // but the dependency needs to be kept alive.
+                    self.tasks[child_task_index].mark_for_saving();
+                    continue;
+                }
+
+                if let Some(blit_id) = task_redirects[child_task_index] {
+                    // We already resolved a similar conflict with a blit task,
+                    // reuse the same blit instead of creating a new one.
+                    self.tasks[task_index].children[nth_child] = blit_id;
+
+                    // Mark for saving if the blit is more than pass appart from
+                    // our task.
+                    if child_pass_index < pass_index - 2 {
+                        self.tasks[blit_id.index as usize].mark_for_saving();
+                    }
+
+                    continue;
+                }
+
+                // Our dependency is an even number of passes behind, need
+                // to insert a blit to ensure we don't read and write from
+                // the same target.
+
+                let task_id = RenderTaskId {
+                    index: child_task_index as u32,
+                    #[cfg(debug_assertions)]
+                    frame_id: self.frame_id,
+                };
+
+                let mut blit = RenderTask::new_blit(
+                    self.tasks[task_index].location.size(),
+                    BlitSource::RenderTask { task_id },
+                );
+
+                // Mark for saving if the blit is more than pass appart from
+                // our task.
+                if child_pass_index < pass_index - 2 {
+                    blit.mark_for_saving();
+                }
+
+                let blit_id = RenderTaskId {
+                    index: self.tasks.len() as u32,
+                    #[cfg(debug_assertions)]
+                    frame_id: self.frame_id,
+                };
+
+                self.tasks.push(blit);
+
+                passes[child_pass_index as usize + 1].tasks.push(blit_id);
+
+                self.tasks[task_index].children[nth_child] = blit_id;
+                task_redirects[task_index] = Some(blit_id);
+            }
+        }
+    }
+
     pub fn get_task_address(&self, id: RenderTaskId) -> RenderTaskAddress {
         #[cfg(debug_assertions)]
         debug_assert_eq!(self.frame_id, id.frame_id);
         RenderTaskAddress(id.index)
     }
 
     pub fn write_task_data(&mut self) {
         for task in &self.tasks {
--- a/gfx/wr/webrender/src/tiling.rs
+++ b/gfx/wr/webrender/src/tiling.rs
@@ -559,18 +559,22 @@ impl RenderTarget for ColorRenderTarget 
                             source: BlitJobSource::Texture(
                                 cache_item.texture_id,
                                 cache_item.texture_layer,
                                 source_rect,
                             ),
                             target_rect: target_rect.inner_rect(task_info.padding)
                         });
                     }
-                    BlitSource::RenderTask { .. } => {
-                        panic!("BUG: render task blit jobs to render tasks not supported");
+                    BlitSource::RenderTask { task_id } => {
+                        let (target_rect, _) = task.get_target_rect();
+                        self.blits.push(BlitJob {
+                            source: BlitJobSource::RenderTask(task_id),
+                            target_rect: target_rect.inner_rect(task_info.padding)
+                        });
                     }
                 }
             }
         }
     }
 
     fn must_be_drawn(&self) -> bool {
         self.alpha_batch_containers.iter().any(|ab| {