From eb9db211137e8ea5fe1a1bdd5fc61a6a6e6b5e29 Mon Sep 17 00:00:00 2001 From: Roman Salnikov Date: Tue, 16 Jan 2024 01:39:10 +0100 Subject: [PATCH] Camera-driven UI (#10559) # Objective Add support for presenting each UI tree on a specific window and viewport, while making as few breaking changes as possible. This PR is meant to resolve the following issues at once, since they're all related. - Fixes #5622 - Fixes #5570 - Fixes #5621 Adopted #5892 , but started over since the current codebase diverged significantly from the original PR branch. Also, I made a decision to propagate component to children instead of recursively iterating over nodes in search for the root. ## Solution Add a new optional component that can be inserted to UI root nodes and propagate to children to specify which camera it should render onto. This is then used to get the render target and the viewport for that UI tree. Since this component is optional, the default behavior should be to render onto the single camera (if only one exist) and warn of ambiguity if multiple cameras exist. This reduces the complexity for users with just one camera, while giving control in contexts where it matters. ## Changelog - Adds `TargetCamera(Entity)` component to specify which camera should a node tree be rendered into. If only one camera exists, this component is optional. - Adds an example of rendering UI to a texture and using it as a material in a 3D world. - Fixes recalculation of physical viewport size when target scale factor changes. This can happen when the window is moved between displays with different DPI. - Changes examples to demonstrate assigning UI to different viewports and windows and make interactions in an offset viewport testable. - Removes `UiCameraConfig`. UI visibility now can be controlled via combination of explicit `TargetCamera` and `Visibility` on the root nodes. --------- Co-authored-by: davier Co-authored-by: Alice Cecile Co-authored-by: Alice Cecile --- Cargo.toml | 11 ++ crates/bevy_render/src/camera/camera.rs | 49 ++++- crates/bevy_ui/src/camera_config.rs | 30 --- crates/bevy_ui/src/focus.rs | 138 +++++++------- crates/bevy_ui/src/layout/debug.rs | 4 +- crates/bevy_ui/src/layout/mod.rs | 225 +++++++++++++++-------- crates/bevy_ui/src/lib.rs | 28 +-- crates/bevy_ui/src/render/mod.rs | 152 +++++++++------ crates/bevy_ui/src/render/render_pass.rs | 15 +- crates/bevy_ui/src/ui_node.rs | 46 ++++- crates/bevy_ui/src/update.rs | 86 ++++++++- examples/3d/split_screen.rs | 173 +++++++++++++++-- examples/README.md | 1 + examples/ui/relative_cursor_position.rs | 17 +- examples/ui/render_ui_to_texture.rs | 145 +++++++++++++++ examples/window/multiple_windows.rs | 48 +++-- 16 files changed, 872 insertions(+), 296 deletions(-) delete mode 100644 crates/bevy_ui/src/camera_config.rs create mode 100644 examples/ui/render_ui_to_texture.rs diff --git a/Cargo.toml b/Cargo.toml index c10d0c7acf7cb..c3f5d205408c8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2249,6 +2249,17 @@ description = "Showcases the RelativeCursorPosition component" category = "UI (User Interface)" wasm = true +[[example]] +name = "render_ui_to_texture" +path = "examples/ui/render_ui_to_texture.rs" +doc-scrape-examples = true + +[package.metadata.example.render_ui_to_texture] +name = "Render UI to Texture" +description = "An example of rendering UI as a part of a 3D world" +category = "UI (User Interface)" +wasm = true + [[example]] name = "size_constraints" path = "examples/ui/size_constraints.rs" diff --git a/crates/bevy_render/src/camera/camera.rs b/crates/bevy_render/src/camera/camera.rs index 389a575f2d674..d31e7113406ed 100644 --- a/crates/bevy_render/src/camera/camera.rs +++ b/crates/bevy_render/src/camera/camera.rs @@ -28,6 +28,7 @@ use bevy_transform::components::GlobalTransform; use bevy_utils::{HashMap, HashSet}; use bevy_window::{ NormalizedWindowRef, PrimaryWindow, Window, WindowCreated, WindowRef, WindowResized, + WindowScaleFactorChanged, }; use std::{borrow::Cow, ops::Range}; use wgpu::{BlendState, LoadOp, TextureFormat}; @@ -79,7 +80,7 @@ pub struct RenderTargetInfo { pub struct ComputedCameraValues { projection_matrix: Mat4, target_info: Option, - // position and size of the `Viewport` + // size of the `Viewport` old_viewport_size: Option, } @@ -229,6 +230,11 @@ impl Camera { self.computed.target_info.as_ref().map(|t| t.physical_size) } + #[inline] + pub fn target_scaling_factor(&self) -> Option { + self.computed.target_info.as_ref().map(|t| t.scale_factor) + } + /// The projection matrix computed using this camera's [`CameraProjection`]. #[inline] pub fn projection_matrix(&self) -> Mat4 { @@ -575,9 +581,9 @@ impl NormalizedRenderTarget { /// System in charge of updating a [`Camera`] when its window or projection changes. /// -/// The system detects window creation and resize events to update the camera projection if -/// needed. It also queries any [`CameraProjection`] component associated with the same entity -/// as the [`Camera`] one, to automatically update the camera projection matrix. +/// The system detects window creation, resize, and scale factor change events to update the camera +/// projection if needed. It also queries any [`CameraProjection`] component associated with the same +/// entity as the [`Camera`] one, to automatically update the camera projection matrix. /// /// The system function is generic over the camera projection type, and only instances of /// [`OrthographicProjection`] and [`PerspectiveProjection`] are automatically added to @@ -595,6 +601,7 @@ impl NormalizedRenderTarget { pub fn camera_system( mut window_resized_events: EventReader, mut window_created_events: EventReader, + mut window_scale_factor_changed_events: EventReader, mut image_asset_events: EventReader>, primary_window: Query>, windows: Query<(Entity, &Window)>, @@ -607,6 +614,11 @@ pub fn camera_system( let mut changed_window_ids = HashSet::new(); changed_window_ids.extend(window_created_events.read().map(|event| event.window)); changed_window_ids.extend(window_resized_events.read().map(|event| event.window)); + let scale_factor_changed_window_ids: HashSet<_> = window_scale_factor_changed_events + .read() + .map(|event| event.window) + .collect(); + changed_window_ids.extend(scale_factor_changed_window_ids.clone()); let changed_image_handles: HashSet<&AssetId> = image_asset_events .read() @@ -617,7 +629,7 @@ pub fn camera_system( .collect(); for (mut camera, mut camera_projection) in &mut cameras { - let viewport_size = camera + let mut viewport_size = camera .viewport .as_ref() .map(|viewport| viewport.physical_size); @@ -628,11 +640,36 @@ pub fn camera_system( || camera_projection.is_changed() || camera.computed.old_viewport_size != viewport_size { - camera.computed.target_info = normalized_target.get_render_target_info( + let new_computed_target_info = normalized_target.get_render_target_info( &windows, &images, &manual_texture_views, ); + // Check for the scale factor changing, and resize the viewport if needed. + // This can happen when the window is moved between monitors with different DPIs. + // Without this, the viewport will take a smaller portion of the window moved to + // a higher DPI monitor. + if normalized_target.is_changed(&scale_factor_changed_window_ids, &HashSet::new()) { + if let (Some(new_scale_factor), Some(old_scale_factor)) = ( + new_computed_target_info + .as_ref() + .map(|info| info.scale_factor), + camera + .computed + .target_info + .as_ref() + .map(|info| info.scale_factor), + ) { + let resize_factor = new_scale_factor / old_scale_factor; + if let Some(ref mut viewport) = camera.viewport { + let resize = |vec: UVec2| (vec.as_vec2() * resize_factor).as_uvec2(); + viewport.physical_position = resize(viewport.physical_position); + viewport.physical_size = resize(viewport.physical_size); + viewport_size = Some(viewport.physical_size); + } + } + } + camera.computed.target_info = new_computed_target_info; if let Some(size) = camera.logical_viewport_size() { camera_projection.update(size.x, size.y); camera.computed.projection_matrix = camera_projection.get_projection_matrix(); diff --git a/crates/bevy_ui/src/camera_config.rs b/crates/bevy_ui/src/camera_config.rs deleted file mode 100644 index 857bd57ebec7c..0000000000000 --- a/crates/bevy_ui/src/camera_config.rs +++ /dev/null @@ -1,30 +0,0 @@ -//! Configuration for cameras related to UI. - -use bevy_ecs::component::Component; -use bevy_ecs::prelude::With; -use bevy_ecs::reflect::ReflectComponent; -use bevy_reflect::{std_traits::ReflectDefault, Reflect}; -use bevy_render::camera::Camera; -use bevy_render::extract_component::ExtractComponent; - -/// Configuration for cameras related to UI. -/// -/// When a [`Camera`] doesn't have the [`UiCameraConfig`] component, -/// it will display the UI by default. -/// -#[derive(Component, Clone, ExtractComponent, Reflect)] -#[extract_component_filter(With)] -#[reflect(Component, Default)] -pub struct UiCameraConfig { - /// Whether to output UI to this camera view. - /// - /// When a [`Camera`] doesn't have the [`UiCameraConfig`] component, - /// it will display the UI by default. - pub show_ui: bool, -} - -impl Default for UiCameraConfig { - fn default() -> Self { - Self { show_ui: true } - } -} diff --git a/crates/bevy_ui/src/focus.rs b/crates/bevy_ui/src/focus.rs index b2f195c92fbbc..c89e291292333 100644 --- a/crates/bevy_ui/src/focus.rs +++ b/crates/bevy_ui/src/focus.rs @@ -1,4 +1,4 @@ -use crate::{camera_config::UiCameraConfig, CalculatedClip, Node, UiScale, UiStack}; +use crate::{CalculatedClip, DefaultUiCamera, Node, TargetCamera, UiScale, UiStack}; use bevy_ecs::{ change_detection::DetectChangesMut, entity::Entity, @@ -13,7 +13,7 @@ use bevy_reflect::{std_traits::ReflectDefault, Reflect}; use bevy_render::{camera::NormalizedRenderTarget, prelude::Camera, view::ViewVisibility}; use bevy_transform::components::GlobalTransform; -use bevy_utils::smallvec::SmallVec; +use bevy_utils::{smallvec::SmallVec, HashMap}; use bevy_window::{PrimaryWindow, Window}; #[cfg(feature = "serialize")] @@ -133,6 +133,7 @@ pub struct NodeQuery { focus_policy: Option<&'static FocusPolicy>, calculated_clip: Option<&'static CalculatedClip>, view_visibility: Option<&'static ViewVisibility>, + target_camera: Option<&'static TargetCamera>, } /// The system that sets Interaction for all UI elements based on the mouse cursor activity @@ -141,14 +142,15 @@ pub struct NodeQuery { #[allow(clippy::too_many_arguments)] pub fn ui_focus_system( mut state: Local, - camera: Query<(&Camera, Option<&UiCameraConfig>)>, + camera_query: Query<(Entity, &Camera)>, + default_ui_camera: DefaultUiCamera, + primary_window: Query>, windows: Query<&Window>, mouse_button_input: Res>, touches_input: Res, ui_scale: Res, ui_stack: Res, mut node_query: Query, - primary_window: Query>, ) { let primary_window = primary_window.iter().next(); @@ -174,31 +176,31 @@ pub fn ui_focus_system( let mouse_clicked = mouse_button_input.just_pressed(MouseButton::Left) || touches_input.any_just_pressed(); - let is_ui_disabled = - |camera_ui| matches!(camera_ui, Some(&UiCameraConfig { show_ui: false, .. })); - - let cursor_position = camera + let camera_cursor_positions: HashMap = camera_query .iter() - .filter(|(_, camera_ui)| !is_ui_disabled(*camera_ui)) - .filter_map(|(camera, _)| { - if let Some(NormalizedRenderTarget::Window(window_ref)) = + .filter_map(|(entity, camera)| { + // Interactions are only supported for cameras rendering to a window. + let Some(NormalizedRenderTarget::Window(window_ref)) = camera.target.normalize(primary_window) - { - Some(window_ref) - } else { - None - } - }) - .find_map(|window_ref| { + else { + return None; + }; + + let viewport_position = camera + .logical_viewport_rect() + .map(|rect| rect.min) + .unwrap_or_default(); windows .get(window_ref.entity()) .ok() .and_then(|window| window.cursor_position()) + .or_else(|| touches_input.first_pressed_position()) + .map(|cursor_position| (entity, cursor_position - viewport_position)) }) - .or_else(|| touches_input.first_pressed_position()) // The cursor position returned by `Window` only takes into account the window scale factor and not `UiScale`. // To convert the cursor position to logical UI viewport coordinates we have to divide it by `UiScale`. - .map(|cursor_position| cursor_position / ui_scale.0); + .map(|(entity, cursor_position)| (entity, cursor_position / ui_scale.0)) + .collect(); // prepare an iterator that contains all the nodes that have the cursor in their rect, // from the top node to the bottom one. this will also reset the interaction to `None` @@ -209,61 +211,69 @@ pub fn ui_focus_system( // reverse the iterator to traverse the tree from closest nodes to furthest .rev() .filter_map(|entity| { - if let Ok(node) = node_query.get_mut(*entity) { - // Nodes that are not rendered should not be interactable - if let Some(view_visibility) = node.view_visibility { - if !view_visibility.get() { - // Reset their interaction to None to avoid strange stuck state - if let Some(mut interaction) = node.interaction { - // We cannot simply set the interaction to None, as that will trigger change detection repeatedly - interaction.set_if_neq(Interaction::None); - } + let Ok(node) = node_query.get_mut(*entity) else { + return None; + }; - return None; - } + let Some(view_visibility) = node.view_visibility else { + return None; + }; + // Nodes that are not rendered should not be interactable + if !view_visibility.get() { + // Reset their interaction to None to avoid strange stuck state + if let Some(mut interaction) = node.interaction { + // We cannot simply set the interaction to None, as that will trigger change detection repeatedly + interaction.set_if_neq(Interaction::None); } + return None; + } + let Some(camera_entity) = node + .target_camera + .map(TargetCamera::entity) + .or(default_ui_camera.get()) + else { + return None; + }; - let node_rect = node.node.logical_rect(node.global_transform); + let node_rect = node.node.logical_rect(node.global_transform); - // Intersect with the calculated clip rect to find the bounds of the visible region of the node - let visible_rect = node - .calculated_clip - .map(|clip| node_rect.intersect(clip.clip)) - .unwrap_or(node_rect); + // Intersect with the calculated clip rect to find the bounds of the visible region of the node + let visible_rect = node + .calculated_clip + .map(|clip| node_rect.intersect(clip.clip)) + .unwrap_or(node_rect); - // The mouse position relative to the node - // (0., 0.) is the top-left corner, (1., 1.) is the bottom-right corner - // Coordinates are relative to the entire node, not just the visible region. - let relative_cursor_position = cursor_position - .map(|cursor_position| (cursor_position - node_rect.min) / node_rect.size()); + // The mouse position relative to the node + // (0., 0.) is the top-left corner, (1., 1.) is the bottom-right corner + // Coordinates are relative to the entire node, not just the visible region. + let relative_cursor_position = camera_cursor_positions + .get(&camera_entity) + .map(|cursor_position| (*cursor_position - node_rect.min) / node_rect.size()); - // If the current cursor position is within the bounds of the node's visible area, consider it for - // clicking - let relative_cursor_position_component = RelativeCursorPosition { - normalized_visible_node_rect: visible_rect.normalize(node_rect), - normalized: relative_cursor_position, - }; + // If the current cursor position is within the bounds of the node's visible area, consider it for + // clicking + let relative_cursor_position_component = RelativeCursorPosition { + normalized_visible_node_rect: visible_rect.normalize(node_rect), + normalized: relative_cursor_position, + }; - let contains_cursor = relative_cursor_position_component.mouse_over(); + let contains_cursor = relative_cursor_position_component.mouse_over(); - // Save the relative cursor position to the correct component - if let Some(mut node_relative_cursor_position_component) = - node.relative_cursor_position - { - *node_relative_cursor_position_component = relative_cursor_position_component; - } + // Save the relative cursor position to the correct component + if let Some(mut node_relative_cursor_position_component) = node.relative_cursor_position + { + *node_relative_cursor_position_component = relative_cursor_position_component; + } - if contains_cursor { - Some(*entity) - } else { - if let Some(mut interaction) = node.interaction { - if *interaction == Interaction::Hovered || (cursor_position.is_none()) { - interaction.set_if_neq(Interaction::None); - } + if contains_cursor { + Some(*entity) + } else { + if let Some(mut interaction) = node.interaction { + if *interaction == Interaction::Hovered || (relative_cursor_position.is_none()) + { + interaction.set_if_neq(Interaction::None); } - None } - } else { None } }) diff --git a/crates/bevy_ui/src/layout/debug.rs b/crates/bevy_ui/src/layout/debug.rs index c47b2ca8e802b..37fb5c2b88845 100644 --- a/crates/bevy_ui/src/layout/debug.rs +++ b/crates/bevy_ui/src/layout/debug.rs @@ -12,7 +12,7 @@ pub fn print_ui_layout_tree(ui_surface: &UiSurface) { .iter() .map(|(entity, node)| (*node, *entity)) .collect(); - for (&entity, roots) in &ui_surface.window_roots { + for (&entity, roots) in &ui_surface.camera_roots { let mut out = String::new(); for root in roots { print_node( @@ -25,7 +25,7 @@ pub fn print_ui_layout_tree(ui_surface: &UiSurface) { &mut out, ); } - bevy_log::info!("Layout tree for window entity: {entity:?}\n{out}"); + bevy_log::info!("Layout tree for camera entity: {entity:?}\n{out}"); } } diff --git a/crates/bevy_ui/src/layout/mod.rs b/crates/bevy_ui/src/layout/mod.rs index 806896b4d6428..a089ec7418f4c 100644 --- a/crates/bevy_ui/src/layout/mod.rs +++ b/crates/bevy_ui/src/layout/mod.rs @@ -1,7 +1,7 @@ mod convert; pub mod debug; -use crate::{ContentSize, Node, Outline, Style, UiScale}; +use crate::{ContentSize, DefaultUiCamera, Node, Outline, Style, TargetCamera, UiScale}; use bevy_ecs::{ change_detection::{DetectChanges, DetectChangesMut}, entity::Entity, @@ -13,10 +13,11 @@ use bevy_ecs::{ }; use bevy_hierarchy::{Children, Parent}; use bevy_log::warn; -use bevy_math::Vec2; +use bevy_math::{UVec2, Vec2}; +use bevy_render::camera::{Camera, NormalizedRenderTarget}; use bevy_transform::components::Transform; -use bevy_utils::{default, EntityHashMap}; -use bevy_window::{PrimaryWindow, Window, WindowResolution, WindowScaleFactorChanged}; +use bevy_utils::{default, EntityHashMap, HashMap, HashSet}; +use bevy_window::{PrimaryWindow, Window, WindowScaleFactorChanged}; use std::fmt; use taffy::Taffy; use thiserror::Error; @@ -51,7 +52,7 @@ struct RootNodePair { #[derive(Resource)] pub struct UiSurface { entity_to_taffy: EntityHashMap, - window_roots: EntityHashMap>, + camera_roots: EntityHashMap>, taffy: Taffy, } @@ -66,7 +67,7 @@ impl fmt::Debug for UiSurface { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("UiSurface") .field("entity_to_taffy", &self.entity_to_taffy) - .field("window_nodes", &self.window_roots) + .field("camera_roots", &self.camera_roots) .finish() } } @@ -77,7 +78,7 @@ impl Default for UiSurface { taffy.disable_rounding(); Self { entity_to_taffy: Default::default(), - window_roots: Default::default(), + camera_roots: Default::default(), taffy, } } @@ -142,9 +143,9 @@ without UI components as a child of an entity with UI components, results may be } /// Set the ui node entities without a [`Parent`] as children to the root node in the taffy layout. - pub fn set_window_children( + pub fn set_camera_children( &mut self, - window_id: Entity, + camera_id: Entity, children: impl Iterator, ) { let viewport_style = taffy::style::Style { @@ -160,7 +161,7 @@ without UI components as a child of an entity with UI components, results may be ..default() }; - let existing_roots = self.window_roots.entry(window_id).or_default(); + let existing_roots = self.camera_roots.entry(camera_id).or_default(); let mut new_roots = Vec::new(); for entity in children { let node = *self.entity_to_taffy.get(&entity).unwrap(); @@ -185,18 +186,20 @@ without UI components as a child of an entity with UI components, results may be } } - self.window_roots.insert(window_id, new_roots); + self.camera_roots.insert(camera_id, new_roots); } /// Compute the layout for each window entity's corresponding root node in the layout. - pub fn compute_window_layout(&mut self, window: Entity, window_resolution: &WindowResolution) { + pub fn compute_camera_layout(&mut self, camera: Entity, render_target_resolution: UVec2) { + let Some(camera_root_nodes) = self.camera_roots.get(&camera) else { + return; + }; + let available_space = taffy::geometry::Size { - width: taffy::style::AvailableSpace::Definite(window_resolution.physical_width() as f32), - height: taffy::style::AvailableSpace::Definite( - window_resolution.physical_height() as f32 - ), + width: taffy::style::AvailableSpace::Definite(render_target_resolution.x as f32), + height: taffy::style::AvailableSpace::Definite(render_target_resolution.y as f32), }; - for root_nodes in self.window_roots.entry(window).or_default() { + for root_nodes in camera_root_nodes { self.taffy .compute_layout(root_nodes.implicit_viewport_node, available_space) .unwrap(); @@ -241,64 +244,109 @@ pub enum LayoutError { #[allow(clippy::too_many_arguments)] pub fn ui_layout_system( primary_window: Query<(Entity, &Window), With>, - windows: Query<(Entity, &Window)>, + cameras: Query<(Entity, &Camera)>, + default_ui_camera: DefaultUiCamera, ui_scale: Res, mut scale_factor_events: EventReader, mut resize_events: EventReader, mut ui_surface: ResMut, - root_node_query: Query, Without)>, - style_query: Query<(Entity, Ref