repo
stringlengths
6
65
file_url
stringlengths
81
311
file_path
stringlengths
6
227
content
stringlengths
0
32.8k
language
stringclasses
1 value
license
stringclasses
7 values
commit_sha
stringlengths
40
40
retrieved_at
stringdate
2026-01-04 15:31:58
2026-01-04 20:25:31
truncated
bool
2 classes
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/3d/shadow_caster_receiver.rs
examples/3d/shadow_caster_receiver.rs
//! Demonstrates how to prevent meshes from casting/receiving shadows in a 3d scene. use std::f32::consts::PI; use bevy::{ color::palettes::basic::{BLUE, LIME, RED}, light::{CascadeShadowConfigBuilder, NotShadowCaster, NotShadowReceiver}, prelude::*, }; fn main() { println!( "Controls: C - toggle shadow casters (i.e. casters become not, and not casters become casters) R - toggle shadow receivers (i.e. receivers become not, and not receivers become receivers) L - switch between directional and point lights" ); App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems(Update, (toggle_light, toggle_shadows)) .run(); } /// set up a 3D scene to test shadow biases and perspective projections fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { let spawn_plane_depth = 500.0f32; let spawn_height = 2.0; let sphere_radius = 0.25; let white_handle = materials.add(StandardMaterial { base_color: Color::WHITE, perceptual_roughness: 1.0, ..default() }); let sphere_handle = meshes.add(Sphere::new(sphere_radius)); // sphere - initially a caster commands.spawn(( Mesh3d(sphere_handle.clone()), MeshMaterial3d(materials.add(Color::from(RED))), Transform::from_xyz(-1.0, spawn_height, 0.0), )); // sphere - initially not a caster commands.spawn(( Mesh3d(sphere_handle), MeshMaterial3d(materials.add(Color::from(BLUE))), Transform::from_xyz(1.0, spawn_height, 0.0), NotShadowCaster, )); // floating plane - initially not a shadow receiver and not a caster commands.spawn(( Mesh3d(meshes.add(Plane3d::default().mesh().size(20.0, 20.0))), MeshMaterial3d(materials.add(Color::from(LIME))), Transform::from_xyz(0.0, 1.0, -10.0), NotShadowCaster, NotShadowReceiver, )); // lower ground plane - initially a shadow receiver commands.spawn(( Mesh3d(meshes.add(Plane3d::default().mesh().size(20.0, 20.0))), MeshMaterial3d(white_handle), )); println!("Using DirectionalLight"); commands.spawn(( PointLight { intensity: 0.0, range: spawn_plane_depth, color: Color::WHITE, shadows_enabled: true, ..default() }, Transform::from_xyz(5.0, 5.0, 0.0), )); commands.spawn(( DirectionalLight { illuminance: light_consts::lux::OVERCAST_DAY, shadows_enabled: true, ..default() }, Transform::from_rotation(Quat::from_euler(EulerRot::ZYX, 0.0, PI / 2., -PI / 4.)), CascadeShadowConfigBuilder { first_cascade_far_bound: 7.0, maximum_distance: 25.0, ..default() } .build(), )); // camera commands.spawn(( Camera3d::default(), Transform::from_xyz(-5.0, 5.0, 5.0).looking_at(Vec3::new(-1.0, 1.0, 0.0), Vec3::Y), )); } fn toggle_light( input: Res<ButtonInput<KeyCode>>, mut point_lights: Query<&mut PointLight>, mut directional_lights: Query<&mut DirectionalLight>, ) { if input.just_pressed(KeyCode::KeyL) { for mut light in &mut point_lights { light.intensity = if light.intensity == 0.0 { println!("Using PointLight"); 1_000_000.0 // Mini-sun point light } else { 0.0 }; } for mut light in &mut directional_lights { light.illuminance = if light.illuminance == 0.0 { println!("Using DirectionalLight"); light_consts::lux::OVERCAST_DAY } else { 0.0 }; } } } fn toggle_shadows( mut commands: Commands, input: Res<ButtonInput<KeyCode>>, mut queries: ParamSet<( Query<Entity, (With<Mesh3d>, With<NotShadowCaster>)>, Query<Entity, (With<Mesh3d>, With<NotShadowReceiver>)>, Query<Entity, (With<Mesh3d>, Without<NotShadowCaster>)>, Query<Entity, (With<Mesh3d>, Without<NotShadowReceiver>)>, )>, ) { if input.just_pressed(KeyCode::KeyC) { println!("Toggling casters"); for entity in queries.p0().iter() { commands.entity(entity).remove::<NotShadowCaster>(); } for entity in queries.p2().iter() { commands.entity(entity).insert(NotShadowCaster); } } if input.just_pressed(KeyCode::KeyR) { println!("Toggling receivers"); for entity in queries.p1().iter() { commands.entity(entity).remove::<NotShadowReceiver>(); } for entity in queries.p3().iter() { commands.entity(entity).insert(NotShadowReceiver); } } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/3d/lines.rs
examples/3d/lines.rs
//! Create a custom material to draw basic lines in 3D use bevy::{ asset::RenderAssetUsages, mesh::{MeshVertexBufferLayoutRef, PrimitiveTopology}, pbr::{MaterialPipeline, MaterialPipelineKey}, prelude::*, reflect::TypePath, render::render_resource::{ AsBindGroup, PolygonMode, RenderPipelineDescriptor, SpecializedMeshPipelineError, }, shader::ShaderRef, }; /// This example uses a shader source file from the assets subdirectory const SHADER_ASSET_PATH: &str = "shaders/line_material.wgsl"; fn main() { App::new() .add_plugins((DefaultPlugins, MaterialPlugin::<LineMaterial>::default())) .add_systems(Startup, setup) .run(); } fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<LineMaterial>>, ) { // Spawn a list of lines with start and end points for each lines commands.spawn(( Mesh3d(meshes.add(LineList { lines: vec![ (Vec3::ZERO, Vec3::new(1.0, 1.0, 0.0)), (Vec3::new(1.0, 1.0, 0.0), Vec3::new(1.0, 0.0, 0.0)), ], })), MeshMaterial3d(materials.add(LineMaterial { color: LinearRgba::GREEN, })), Transform::from_xyz(-1.5, 0.0, 0.0), )); // Spawn a line strip that goes from point to point commands.spawn(( Mesh3d(meshes.add(LineStrip { points: vec![ Vec3::ZERO, Vec3::new(1.0, 1.0, 0.0), Vec3::new(1.0, 0.0, 0.0), ], })), MeshMaterial3d(materials.add(LineMaterial { color: LinearRgba::BLUE, })), Transform::from_xyz(0.5, 0.0, 0.0), )); // camera commands.spawn(( Camera3d::default(), Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y), )); } #[derive(Asset, TypePath, Default, AsBindGroup, Debug, Clone)] struct LineMaterial { #[uniform(0)] color: LinearRgba, } impl Material for LineMaterial { fn fragment_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } fn specialize( _pipeline: &MaterialPipeline, descriptor: &mut RenderPipelineDescriptor, _layout: &MeshVertexBufferLayoutRef, _key: MaterialPipelineKey<Self>, ) -> Result<(), SpecializedMeshPipelineError> { // This is the important part to tell bevy to render this material as a line between vertices descriptor.primitive.polygon_mode = PolygonMode::Line; Ok(()) } } /// A list of lines with a start and end position #[derive(Debug, Clone)] struct LineList { lines: Vec<(Vec3, Vec3)>, } impl From<LineList> for Mesh { fn from(line: LineList) -> Self { let vertices: Vec<_> = line.lines.into_iter().flat_map(|(a, b)| [a, b]).collect(); Mesh::new( // This tells wgpu that the positions are list of lines // where every pair is a start and end point PrimitiveTopology::LineList, RenderAssetUsages::RENDER_WORLD, ) // Add the vertices positions as an attribute .with_inserted_attribute(Mesh::ATTRIBUTE_POSITION, vertices) } } /// A list of points that will have a line drawn between each consecutive points #[derive(Debug, Clone)] struct LineStrip { points: Vec<Vec3>, } impl From<LineStrip> for Mesh { fn from(line: LineStrip) -> Self { Mesh::new( // This tells wgpu that the positions are a list of points // where a line will be drawn between each consecutive point PrimitiveTopology::LineStrip, RenderAssetUsages::RENDER_WORLD, ) // Add the point positions as an attribute .with_inserted_attribute(Mesh::ATTRIBUTE_POSITION, line.points) } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/3d/clustered_decals.rs
examples/3d/clustered_decals.rs
//! Demonstrates clustered decals, which affix decals to surfaces. use std::f32::consts::{FRAC_PI_3, PI}; use std::fmt::{self, Formatter}; use bevy::{ color::palettes::css::{LIME, ORANGE_RED, SILVER}, input::mouse::AccumulatedMouseMotion, light::ClusteredDecal, pbr::{decal, ExtendedMaterial, MaterialExtension}, prelude::*, render::{ render_resource::AsBindGroup, renderer::{RenderAdapter, RenderDevice}, }, shader::ShaderRef, window::{CursorIcon, SystemCursorIcon}, }; use ops::{acos, cos, sin}; use widgets::{ WidgetClickEvent, WidgetClickSender, BUTTON_BORDER, BUTTON_BORDER_COLOR, BUTTON_BORDER_RADIUS_SIZE, BUTTON_PADDING, }; #[path = "../helpers/widgets.rs"] mod widgets; /// The custom material shader that we use to demonstrate how to use the decal /// `tag` field. const SHADER_ASSET_PATH: &str = "shaders/custom_clustered_decal.wgsl"; /// The speed at which the cube rotates, in radians per frame. const CUBE_ROTATION_SPEED: f32 = 0.02; /// The speed at which the selection can be moved, in spherical coordinate /// radians per mouse unit. const MOVE_SPEED: f32 = 0.008; /// The speed at which the selection can be scaled, in reciprocal mouse units. const SCALE_SPEED: f32 = 0.05; /// The speed at which the selection can be scaled, in radians per mouse unit. const ROLL_SPEED: f32 = 0.01; /// Various settings for the demo. #[derive(Resource, Default)] struct AppStatus { /// The object that will be moved, scaled, or rotated when the mouse is /// dragged. selection: Selection, /// What happens when the mouse is dragged: one of a move, rotate, or scale /// operation. drag_mode: DragMode, } /// The object that will be moved, scaled, or rotated when the mouse is dragged. #[derive(Clone, Copy, Component, Default, PartialEq)] enum Selection { /// The camera. /// /// The camera can only be moved, not scaled or rotated. #[default] Camera, /// The first decal, which an orange bounding box surrounds. DecalA, /// The second decal, which a lime green bounding box surrounds. DecalB, } impl fmt::Display for Selection { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match *self { Selection::Camera => f.write_str("camera"), Selection::DecalA => f.write_str("decal A"), Selection::DecalB => f.write_str("decal B"), } } } /// What happens when the mouse is dragged: one of a move, rotate, or scale /// operation. #[derive(Clone, Copy, Component, Default, PartialEq, Debug)] enum DragMode { /// The mouse moves the current selection. #[default] Move, /// The mouse scales the current selection. /// /// This only applies to decals, not cameras. Scale, /// The mouse rotates the current selection around its local Z axis. /// /// This only applies to decals, not cameras. Roll, } impl fmt::Display for DragMode { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match *self { DragMode::Move => f.write_str("move"), DragMode::Scale => f.write_str("scale"), DragMode::Roll => f.write_str("roll"), } } } /// A marker component for the help text in the top left corner of the window. #[derive(Clone, Copy, Component)] struct HelpText; /// A shader extension that demonstrates how to use the `tag` field to customize /// the appearance of your decals. #[derive(Asset, AsBindGroup, Reflect, Debug, Clone)] struct CustomDecalExtension {} impl MaterialExtension for CustomDecalExtension { fn fragment_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } } /// Entry point. fn main() { App::new() .add_plugins(DefaultPlugins.set(WindowPlugin { primary_window: Some(Window { title: "Bevy Clustered Decals Example".into(), ..default() }), ..default() })) .add_plugins(MaterialPlugin::< ExtendedMaterial<StandardMaterial, CustomDecalExtension>, >::default()) .init_resource::<AppStatus>() .add_message::<WidgetClickEvent<Selection>>() .add_systems(Startup, setup) .add_systems(Update, draw_gizmos) .add_systems(Update, rotate_cube) .add_systems(Update, widgets::handle_ui_interactions::<Selection>) .add_systems( Update, (handle_selection_change, update_radio_buttons) .after(widgets::handle_ui_interactions::<Selection>), ) .add_systems(Update, process_move_input) .add_systems(Update, process_scale_input) .add_systems(Update, process_roll_input) .add_systems(Update, switch_drag_mode) .add_systems(Update, update_help_text) .add_systems(Update, update_button_visibility) .run(); } /// Creates the scene. fn setup( mut commands: Commands, asset_server: Res<AssetServer>, app_status: Res<AppStatus>, render_device: Res<RenderDevice>, render_adapter: Res<RenderAdapter>, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<ExtendedMaterial<StandardMaterial, CustomDecalExtension>>>, ) { // Error out if clustered decals aren't supported on the current platform. if !decal::clustered::clustered_decals_are_usable(&render_device, &render_adapter) { error!("Clustered decals aren't usable on this platform."); commands.write_message(AppExit::error()); } spawn_cube(&mut commands, &mut meshes, &mut materials); spawn_camera(&mut commands); spawn_light(&mut commands); spawn_decals(&mut commands, &asset_server); spawn_buttons(&mut commands); spawn_help_text(&mut commands, &app_status); } /// Spawns the cube onto which the decals are projected. fn spawn_cube( commands: &mut Commands, meshes: &mut Assets<Mesh>, materials: &mut Assets<ExtendedMaterial<StandardMaterial, CustomDecalExtension>>, ) { // Rotate the cube a bit just to make it more interesting. let mut transform = Transform::IDENTITY; transform.rotate_y(FRAC_PI_3); commands.spawn(( Mesh3d(meshes.add(Cuboid::new(3.0, 3.0, 3.0))), MeshMaterial3d(materials.add(ExtendedMaterial { base: StandardMaterial { base_color: SILVER.into(), ..default() }, extension: CustomDecalExtension {}, })), transform, )); } /// Spawns the directional light. fn spawn_light(commands: &mut Commands) { commands.spawn(( DirectionalLight::default(), Transform::from_xyz(4.0, 8.0, 4.0).looking_at(Vec3::ZERO, Vec3::Y), )); } /// Spawns the camera. fn spawn_camera(commands: &mut Commands) { commands .spawn(Camera3d::default()) .insert(Transform::from_xyz(0.0, 2.5, 9.0).looking_at(Vec3::ZERO, Vec3::Y)) // Tag the camera with `Selection::Camera`. .insert(Selection::Camera); } /// Spawns the actual clustered decals. fn spawn_decals(commands: &mut Commands, asset_server: &AssetServer) { let base_color_texture = asset_server.load("branding/icon.png"); commands.spawn(( ClusteredDecal { base_color_texture: Some(base_color_texture.clone()), // Tint with red. tag: 1, ..ClusteredDecal::default() }, calculate_initial_decal_transform(vec3(1.0, 3.0, 5.0), Vec3::ZERO, Vec2::splat(1.1)), Selection::DecalA, )); commands.spawn(( ClusteredDecal { base_color_texture: Some(base_color_texture.clone()), // Tint with blue. tag: 2, ..ClusteredDecal::default() }, calculate_initial_decal_transform(vec3(-2.0, -1.0, 4.0), Vec3::ZERO, Vec2::splat(2.0)), Selection::DecalB, )); } /// Spawns the buttons at the bottom of the screen. fn spawn_buttons(commands: &mut Commands) { // Spawn the radio buttons that allow the user to select an object to // control. commands.spawn(( widgets::main_ui_node(), children![widgets::option_buttons( "Drag to Move", &[ (Selection::Camera, "Camera"), (Selection::DecalA, "Decal A"), (Selection::DecalB, "Decal B"), ], )], )); // Spawn the drag buttons that allow the user to control the scale and roll // of the selected object. commands.spawn(( Node { flex_direction: FlexDirection::Row, position_type: PositionType::Absolute, right: px(10), bottom: px(10), column_gap: px(6), ..default() }, children![ (drag_button("Scale"), DragMode::Scale), (drag_button("Roll"), DragMode::Roll), ], )); } /// Spawns a button that the user can drag to change a parameter. fn drag_button(label: &str) -> impl Bundle { ( Node { border: BUTTON_BORDER, justify_content: JustifyContent::Center, align_items: AlignItems::Center, padding: BUTTON_PADDING, border_radius: BorderRadius::all(BUTTON_BORDER_RADIUS_SIZE), ..default() }, Button, BackgroundColor(Color::BLACK), BUTTON_BORDER_COLOR, children![widgets::ui_text(label, Color::WHITE)], ) } /// Spawns the help text at the top of the screen. fn spawn_help_text(commands: &mut Commands, app_status: &AppStatus) { commands.spawn(( Text::new(create_help_string(app_status)), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, HelpText, )); } /// Draws the outlines that show the bounds of the clustered decals. fn draw_gizmos( mut gizmos: Gizmos, decals: Query<(&GlobalTransform, &Selection), With<ClusteredDecal>>, ) { for (global_transform, selection) in &decals { let color = match *selection { Selection::Camera => continue, Selection::DecalA => ORANGE_RED, Selection::DecalB => LIME, }; gizmos.primitive_3d( &Cuboid { // Since the clustered decal is a 1×1×1 cube in model space, its // half-size is half of the scaling part of its transform. half_size: global_transform.scale() * 0.5, }, Isometry3d { rotation: global_transform.rotation(), translation: global_transform.translation_vec3a(), }, color, ); } } /// Calculates the initial transform of the clustered decal. fn calculate_initial_decal_transform(start: Vec3, looking_at: Vec3, size: Vec2) -> Transform { let direction = looking_at - start; let center = start + direction * 0.5; Transform::from_translation(center) .with_scale((size * 0.5).extend(direction.length())) .looking_to(direction, Vec3::Y) } /// Rotates the cube a bit every frame. fn rotate_cube(mut meshes: Query<&mut Transform, With<Mesh3d>>) { for mut transform in &mut meshes { transform.rotate_y(CUBE_ROTATION_SPEED); } } /// Updates the state of the radio buttons when the user clicks on one. fn update_radio_buttons( mut widgets: Query<( Entity, Option<&mut BackgroundColor>, Has<Text>, &WidgetClickSender<Selection>, )>, app_status: Res<AppStatus>, mut writer: TextUiWriter, ) { for (entity, maybe_bg_color, has_text, sender) in &mut widgets { let selected = app_status.selection == **sender; if let Some(mut bg_color) = maybe_bg_color { widgets::update_ui_radio_button(&mut bg_color, selected); } if has_text { widgets::update_ui_radio_button_text(entity, &mut writer, selected); } } } /// Changes the selection when the user clicks a radio button. fn handle_selection_change( mut events: MessageReader<WidgetClickEvent<Selection>>, mut app_status: ResMut<AppStatus>, ) { for event in events.read() { app_status.selection = **event; } } /// Process a drag event that moves the selected object. fn process_move_input( mut selections: Query<(&mut Transform, &Selection)>, mouse_buttons: Res<ButtonInput<MouseButton>>, mouse_motion: Res<AccumulatedMouseMotion>, app_status: Res<AppStatus>, ) { // Only process drags when movement is selected. if !mouse_buttons.pressed(MouseButton::Left) || app_status.drag_mode != DragMode::Move { return; } for (mut transform, selection) in &mut selections { if app_status.selection != *selection { continue; } let position = transform.translation; // Convert to spherical coordinates. let radius = position.length(); let mut theta = acos(position.y / radius); let mut phi = position.z.signum() * acos(position.x * position.xz().length_recip()); // Camera movement is the inverse of object movement. let (phi_factor, theta_factor) = match *selection { Selection::Camera => (1.0, -1.0), Selection::DecalA | Selection::DecalB => (-1.0, 1.0), }; // Adjust the spherical coordinates. Clamp the inclination to (0, π). phi += phi_factor * mouse_motion.delta.x * MOVE_SPEED; theta = f32::clamp( theta + theta_factor * mouse_motion.delta.y * MOVE_SPEED, 0.001, PI - 0.001, ); // Convert spherical coordinates back to Cartesian coordinates. transform.translation = radius * vec3(sin(theta) * cos(phi), cos(theta), sin(theta) * sin(phi)); // Look at the center, but preserve the previous roll angle. let roll = transform.rotation.to_euler(EulerRot::YXZ).2; transform.look_at(Vec3::ZERO, Vec3::Y); let (yaw, pitch, _) = transform.rotation.to_euler(EulerRot::YXZ); transform.rotation = Quat::from_euler(EulerRot::YXZ, yaw, pitch, roll); } } /// Processes a drag event that scales the selected target. fn process_scale_input( mut selections: Query<(&mut Transform, &Selection)>, mouse_buttons: Res<ButtonInput<MouseButton>>, mouse_motion: Res<AccumulatedMouseMotion>, app_status: Res<AppStatus>, ) { // Only process drags when the scaling operation is selected. if !mouse_buttons.pressed(MouseButton::Left) || app_status.drag_mode != DragMode::Scale { return; } for (mut transform, selection) in &mut selections { if app_status.selection == *selection { transform.scale *= 1.0 + mouse_motion.delta.x * SCALE_SPEED; } } } /// Processes a drag event that rotates the selected target along its local Z /// axis. fn process_roll_input( mut selections: Query<(&mut Transform, &Selection)>, mouse_buttons: Res<ButtonInput<MouseButton>>, mouse_motion: Res<AccumulatedMouseMotion>, app_status: Res<AppStatus>, ) { // Only process drags when the rolling operation is selected. if !mouse_buttons.pressed(MouseButton::Left) || app_status.drag_mode != DragMode::Roll { return; } for (mut transform, selection) in &mut selections { if app_status.selection != *selection { continue; } let (yaw, pitch, mut roll) = transform.rotation.to_euler(EulerRot::YXZ); roll += mouse_motion.delta.x * ROLL_SPEED; transform.rotation = Quat::from_euler(EulerRot::YXZ, yaw, pitch, roll); } } /// Creates the help string at the top left of the screen. fn create_help_string(app_status: &AppStatus) -> String { format!( "Click and drag to {} {}", app_status.drag_mode, app_status.selection ) } /// Changes the drag mode when the user hovers over the "Scale" and "Roll" /// buttons in the lower right. /// /// If the user is hovering over no such button, this system changes the drag /// mode back to its default value of [`DragMode::Move`]. fn switch_drag_mode( mut commands: Commands, mut interactions: Query<(&Interaction, &DragMode)>, mut windows: Query<Entity, With<Window>>, mouse_buttons: Res<ButtonInput<MouseButton>>, mut app_status: ResMut<AppStatus>, ) { if mouse_buttons.pressed(MouseButton::Left) { return; } for (interaction, drag_mode) in &mut interactions { if *interaction != Interaction::Hovered { continue; } app_status.drag_mode = *drag_mode; // Set the cursor to provide the user with a nice visual hint. for window in &mut windows { commands .entity(window) .insert(CursorIcon::from(SystemCursorIcon::EwResize)); } return; } app_status.drag_mode = DragMode::Move; for window in &mut windows { commands.entity(window).remove::<CursorIcon>(); } } /// Updates the help text in the top left of the screen to reflect the current /// selection and drag mode. fn update_help_text(mut help_text: Query<&mut Text, With<HelpText>>, app_status: Res<AppStatus>) { for mut text in &mut help_text { text.0 = create_help_string(&app_status); } } /// Updates the visibility of the drag mode buttons so that they aren't visible /// if the camera is selected. fn update_button_visibility( mut nodes: Query<&mut Visibility, With<DragMode>>, app_status: Res<AppStatus>, ) { for mut visibility in &mut nodes { *visibility = match app_status.selection { Selection::Camera => Visibility::Hidden, Selection::DecalA | Selection::DecalB => Visibility::Visible, }; } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/3d/depth_of_field.rs
examples/3d/depth_of_field.rs
//! Demonstrates depth of field (DOF). //! //! The depth of field effect simulates the blur that a real camera produces on //! objects that are out of focus. //! //! The test scene is inspired by [a blog post on depth of field in Unity]. //! However, the technique used in Bevy has little to do with that blog post, //! and all the assets are original. //! //! [a blog post on depth of field in Unity]: https://catlikecoding.com/unity/tutorials/advanced-rendering/depth-of-field/ use bevy::{ camera::PhysicalCameraParameters, core_pipeline::tonemapping::Tonemapping, gltf::GltfMeshName, pbr::Lightmap, post_process::{ bloom::Bloom, dof::{self, DepthOfField, DepthOfFieldMode}, }, prelude::*, }; /// The increments in which the user can adjust the focal distance, in meters /// per frame. const FOCAL_DISTANCE_SPEED: f32 = 0.05; /// The increments in which the user can adjust the f-number, in units per frame. const APERTURE_F_STOP_SPEED: f32 = 0.01; /// The minimum distance that we allow the user to focus on. const MIN_FOCAL_DISTANCE: f32 = 0.01; /// The minimum f-number that we allow the user to set. const MIN_APERTURE_F_STOPS: f32 = 0.05; /// A resource that stores the settings that the user can change. #[derive(Clone, Copy, Resource)] struct AppSettings { /// The distance from the camera to the area in the most focus. focal_distance: f32, /// The [f-number]. Lower numbers cause objects outside the focal distance /// to be blurred more. /// /// [f-number]: https://en.wikipedia.org/wiki/F-number aperture_f_stops: f32, /// Whether depth of field is on, and, if so, whether we're in Gaussian or /// bokeh mode. mode: Option<DepthOfFieldMode>, } fn main() { App::new() .init_resource::<AppSettings>() .add_plugins(DefaultPlugins.set(WindowPlugin { primary_window: Some(Window { title: "Bevy Depth of Field Example".to_string(), ..default() }), ..default() })) .add_systems(Startup, setup) .add_systems(Update, tweak_scene) .add_systems( Update, (adjust_focus, change_mode, update_dof_settings, update_text).chain(), ) .run(); } fn setup(mut commands: Commands, asset_server: Res<AssetServer>, app_settings: Res<AppSettings>) { // Spawn the camera. Enable HDR and bloom, as that highlights the depth of // field effect. let mut camera = commands.spawn(( Camera3d::default(), Transform::from_xyz(0.0, 4.5, 8.25).looking_at(Vec3::ZERO, Vec3::Y), Tonemapping::TonyMcMapface, Bloom::NATURAL, )); // Insert the depth of field settings. if let Some(depth_of_field) = Option::<DepthOfField>::from(*app_settings) { camera.insert(depth_of_field); } // Spawn the scene. commands.spawn(SceneRoot(asset_server.load( GltfAssetLabel::Scene(0).from_asset("models/DepthOfFieldExample/DepthOfFieldExample.glb"), ))); // Spawn the help text. commands.spawn(( create_text(&app_settings), Node { position_type: PositionType::Absolute, bottom: px(12), left: px(12), ..default() }, )); } /// Adjusts the focal distance and f-number per user inputs. fn adjust_focus(input: Res<ButtonInput<KeyCode>>, mut app_settings: ResMut<AppSettings>) { // Change the focal distance if the user requested. let distance_delta = if input.pressed(KeyCode::ArrowDown) { -FOCAL_DISTANCE_SPEED } else if input.pressed(KeyCode::ArrowUp) { FOCAL_DISTANCE_SPEED } else { 0.0 }; // Change the f-number if the user requested. let f_stop_delta = if input.pressed(KeyCode::ArrowLeft) { -APERTURE_F_STOP_SPEED } else if input.pressed(KeyCode::ArrowRight) { APERTURE_F_STOP_SPEED } else { 0.0 }; app_settings.focal_distance = (app_settings.focal_distance + distance_delta).max(MIN_FOCAL_DISTANCE); app_settings.aperture_f_stops = (app_settings.aperture_f_stops + f_stop_delta).max(MIN_APERTURE_F_STOPS); } /// Changes the depth of field mode (Gaussian, bokeh, off) per user inputs. fn change_mode(input: Res<ButtonInput<KeyCode>>, mut app_settings: ResMut<AppSettings>) { if !input.just_pressed(KeyCode::Space) { return; } app_settings.mode = match app_settings.mode { Some(DepthOfFieldMode::Bokeh) => Some(DepthOfFieldMode::Gaussian), Some(DepthOfFieldMode::Gaussian) => None, None => Some(DepthOfFieldMode::Bokeh), } } impl Default for AppSettings { fn default() -> Self { Self { // Objects 7 meters away will be in full focus. focal_distance: 7.0, // Set a nice blur level. // // This is a really low F-number, but we want to demonstrate the // effect, even if it's kind of unrealistic. aperture_f_stops: 1.0 / 8.0, // Turn on bokeh by default, as it's the nicest-looking technique. mode: Some(DepthOfFieldMode::Bokeh), } } } /// Writes the depth of field settings into the camera. fn update_dof_settings( mut commands: Commands, view_targets: Query<Entity, With<Camera>>, app_settings: Res<AppSettings>, ) { let depth_of_field: Option<DepthOfField> = (*app_settings).into(); for view in view_targets.iter() { match depth_of_field { None => { commands.entity(view).remove::<DepthOfField>(); } Some(depth_of_field) => { commands.entity(view).insert(depth_of_field); } } } } /// Makes one-time adjustments to the scene that can't be encoded in glTF. fn tweak_scene( mut commands: Commands, asset_server: Res<AssetServer>, mut materials: ResMut<Assets<StandardMaterial>>, mut lights: Query<&mut DirectionalLight, Changed<DirectionalLight>>, mut named_entities: Query< (Entity, &GltfMeshName, &MeshMaterial3d<StandardMaterial>), (With<Mesh3d>, Without<Lightmap>), >, ) { // Turn on shadows. for mut light in lights.iter_mut() { light.shadows_enabled = true; } // Add a nice lightmap to the circuit board. for (entity, name, material) in named_entities.iter_mut() { if &**name == "CircuitBoard" { materials.get_mut(material).unwrap().lightmap_exposure = 10000.0; commands.entity(entity).insert(Lightmap { image: asset_server.load("models/DepthOfFieldExample/CircuitBoardLightmap.hdr"), ..default() }); } } } /// Update the help text entity per the current app settings. fn update_text(mut texts: Query<&mut Text>, app_settings: Res<AppSettings>) { for mut text in texts.iter_mut() { *text = create_text(&app_settings); } } /// Regenerates the app text component per the current app settings. fn create_text(app_settings: &AppSettings) -> Text { app_settings.help_text().into() } impl From<AppSettings> for Option<DepthOfField> { fn from(app_settings: AppSettings) -> Self { app_settings.mode.map(|mode| DepthOfField { mode, focal_distance: app_settings.focal_distance, aperture_f_stops: app_settings.aperture_f_stops, max_depth: 14.0, ..default() }) } } impl AppSettings { /// Builds the help text. fn help_text(&self) -> String { let Some(mode) = self.mode else { return "Mode: Off (Press Space to change)".to_owned(); }; // We leave these as their defaults, so we don't need to store them in // the app settings and can just fetch them from the default camera // parameters. let sensor_height = PhysicalCameraParameters::default().sensor_height; let fov = PerspectiveProjection::default().fov; format!( "Focal distance: {:.2} m (Press Up/Down to change) Aperture F-stops: f/{:.3} (Press Left/Right to change) Sensor height: {:.2}mm Focal length: {:.2}mm Mode: {} (Press Space to change)", self.focal_distance, self.aperture_f_stops, sensor_height * 1000.0, dof::calculate_focal_length(sensor_height, fov) * 1000.0, match mode { DepthOfFieldMode::Bokeh => "Bokeh", DepthOfFieldMode::Gaussian => "Gaussian", } ) } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/3d/specular_tint.rs
examples/3d/specular_tint.rs
//! Demonstrates specular tints and maps. use std::f32::consts::PI; use bevy::{color::palettes::css::WHITE, core_pipeline::Skybox, prelude::*, render::view::Hdr}; /// The camera rotation speed in radians per frame. const ROTATION_SPEED: f32 = 0.005; /// The rate at which the specular tint hue changes in degrees per frame. const HUE_SHIFT_SPEED: f32 = 0.2; static SWITCH_TO_MAP_HELP_TEXT: &str = "Press Space to switch to a specular map"; static SWITCH_TO_SOLID_TINT_HELP_TEXT: &str = "Press Space to switch to a solid specular tint"; /// The current settings the user has chosen. #[derive(Resource, Default)] struct AppStatus { /// The type of tint (solid or texture map). tint_type: TintType, /// The hue of the solid tint in radians. hue: f32, } /// Assets needed by the demo. #[derive(Resource)] struct AppAssets { /// A color tileable 3D noise texture. noise_texture: Handle<Image>, } impl FromWorld for AppAssets { fn from_world(world: &mut World) -> Self { let asset_server = world.resource::<AssetServer>(); Self { noise_texture: asset_server.load("textures/AlphaNoise.png"), } } } /// The type of specular tint that the user has selected. #[derive(Clone, Copy, PartialEq, Default)] enum TintType { /// A solid color. #[default] Solid, /// A Perlin noise texture. Map, } /// The entry point. fn main() { App::new() .add_plugins(DefaultPlugins.set(WindowPlugin { primary_window: Some(Window { title: "Bevy Specular Tint Example".into(), ..default() }), ..default() })) .init_resource::<AppAssets>() .init_resource::<AppStatus>() .insert_resource(GlobalAmbientLight { color: Color::BLACK, brightness: 0.0, ..default() }) .add_systems(Startup, setup) .add_systems(Update, rotate_camera) .add_systems(Update, (toggle_specular_map, update_text).chain()) .add_systems(Update, shift_hue.after(toggle_specular_map)) .run(); } /// Creates the scene. fn setup( mut commands: Commands, asset_server: Res<AssetServer>, app_status: Res<AppStatus>, mut meshes: ResMut<Assets<Mesh>>, mut standard_materials: ResMut<Assets<StandardMaterial>>, ) { // Spawns a camera. commands.spawn(( Transform::from_xyz(-2.0, 0.0, 3.5).looking_at(Vec3::ZERO, Vec3::Y), Hdr, Camera3d::default(), Skybox { image: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"), brightness: 3000.0, ..default() }, EnvironmentMapLight { diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"), specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"), // We want relatively high intensity here in order for the specular // tint to show up well. intensity: 25000.0, ..default() }, )); // Spawn the sphere. commands.spawn(( Transform::from_rotation(Quat::from_rotation_x(PI * 0.5)), Mesh3d(meshes.add(Sphere::default().mesh().uv(32, 18))), MeshMaterial3d(standard_materials.add(StandardMaterial { // We want only reflected specular light here, so we set the base // color as black. base_color: Color::BLACK, reflectance: 1.0, specular_tint: Color::hsva(app_status.hue, 1.0, 1.0, 1.0), // The object must not be metallic, or else the reflectance is // ignored per the Filament spec: // // <https://google.github.io/filament/Filament.html#listing_fnormal> metallic: 0.0, perceptual_roughness: 0.0, ..default() })), )); // Spawn the help text. commands.spawn(( Node { position_type: PositionType::Absolute, bottom: px(12), left: px(12), ..default() }, app_status.create_text(), )); } /// Rotates the camera a bit every frame. fn rotate_camera(mut cameras: Query<&mut Transform, With<Camera3d>>) { for mut camera_transform in cameras.iter_mut() { camera_transform.translation = Quat::from_rotation_y(ROTATION_SPEED) * camera_transform.translation; camera_transform.look_at(Vec3::ZERO, Vec3::Y); } } /// Alters the hue of the solid color a bit every frame. fn shift_hue( mut app_status: ResMut<AppStatus>, objects_with_materials: Query<&MeshMaterial3d<StandardMaterial>>, mut standard_materials: ResMut<Assets<StandardMaterial>>, ) { if app_status.tint_type != TintType::Solid { return; } app_status.hue += HUE_SHIFT_SPEED; for material_handle in objects_with_materials.iter() { let Some(material) = standard_materials.get_mut(material_handle) else { continue; }; material.specular_tint = Color::hsva(app_status.hue, 1.0, 1.0, 1.0); } } impl AppStatus { /// Returns appropriate help text that reflects the current app status. fn create_text(&self) -> Text { let tint_map_help_text = match self.tint_type { TintType::Solid => SWITCH_TO_MAP_HELP_TEXT, TintType::Map => SWITCH_TO_SOLID_TINT_HELP_TEXT, }; Text::new(tint_map_help_text) } } /// Changes the specular tint to a solid color or map when the user presses /// Space. fn toggle_specular_map( keyboard: Res<ButtonInput<KeyCode>>, mut app_status: ResMut<AppStatus>, app_assets: Res<AppAssets>, objects_with_materials: Query<&MeshMaterial3d<StandardMaterial>>, mut standard_materials: ResMut<Assets<StandardMaterial>>, ) { if !keyboard.just_pressed(KeyCode::Space) { return; } // Swap tint type. app_status.tint_type = match app_status.tint_type { TintType::Solid => TintType::Map, TintType::Map => TintType::Solid, }; for material_handle in objects_with_materials.iter() { let Some(material) = standard_materials.get_mut(material_handle) else { continue; }; // Adjust the tint type. match app_status.tint_type { TintType::Solid => { material.reflectance = 1.0; material.specular_tint_texture = None; } TintType::Map => { // Set reflectance to 2.0 to spread out the map's reflectance // range from the default [0.0, 0.5] to [0.0, 1.0]. material.reflectance = 2.0; // As the tint map is multiplied by the tint color, we set the // latter to white so that only the map has an effect. material.specular_tint = WHITE.into(); material.specular_tint_texture = Some(app_assets.noise_texture.clone()); } }; } } /// Updates the help text at the bottom of the screen to reflect the current app /// status. fn update_text(mut text_query: Query<&mut Text>, app_status: Res<AppStatus>) { for mut text in text_query.iter_mut() { *text = app_status.create_text(); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/3d/skybox.rs
examples/3d/skybox.rs
//! Load a cubemap texture onto a cube like a skybox and cycle through different compressed texture formats #[cfg(not(target_arch = "wasm32"))] use bevy::anti_alias::taa::TemporalAntiAliasing; use bevy::{ camera_controller::free_camera::{FreeCamera, FreeCameraPlugin}, core_pipeline::Skybox, image::CompressedImageFormats, pbr::ScreenSpaceAmbientOcclusion, prelude::*, render::{ render_resource::{TextureViewDescriptor, TextureViewDimension}, renderer::RenderDevice, }, }; use std::f32::consts::PI; const CUBEMAPS: &[(&str, CompressedImageFormats)] = &[ ( "textures/Ryfjallet_cubemap.png", CompressedImageFormats::NONE, ), ( "textures/Ryfjallet_cubemap_astc4x4.ktx2", CompressedImageFormats::ASTC_LDR, ), ( "textures/Ryfjallet_cubemap_bc7.ktx2", CompressedImageFormats::BC, ), ( "textures/Ryfjallet_cubemap_etc2.ktx2", CompressedImageFormats::ETC2, ), ]; fn main() { App::new() .add_plugins(DefaultPlugins) .add_plugins(FreeCameraPlugin) .add_systems(Startup, setup) .add_systems( Update, ( cycle_cubemap_asset, asset_loaded.after(cycle_cubemap_asset), animate_light_direction, ), ) .run(); } #[derive(Resource)] struct Cubemap { is_loaded: bool, index: usize, image_handle: Handle<Image>, } fn setup(mut commands: Commands, asset_server: Res<AssetServer>) { // directional 'sun' light commands.spawn(( DirectionalLight { illuminance: 32000.0, ..default() }, Transform::from_xyz(0.0, 2.0, 0.0).with_rotation(Quat::from_rotation_x(-PI / 4.)), )); let skybox_handle = asset_server.load(CUBEMAPS[0].0); // camera commands.spawn(( Camera3d::default(), Msaa::Off, #[cfg(not(target_arch = "wasm32"))] TemporalAntiAliasing::default(), ScreenSpaceAmbientOcclusion::default(), Transform::from_xyz(0.0, 0.0, 8.0).looking_at(Vec3::ZERO, Vec3::Y), FreeCamera::default(), Skybox { image: skybox_handle.clone(), brightness: 1000.0, ..default() }, )); // ambient light // NOTE: The ambient light is used to scale how bright the environment map is so with a bright // environment map, use an appropriate color and brightness to match commands.insert_resource(GlobalAmbientLight { color: Color::srgb_u8(210, 220, 240), brightness: 1.0, ..default() }); commands.insert_resource(Cubemap { is_loaded: false, index: 0, image_handle: skybox_handle, }); } const CUBEMAP_SWAP_DELAY: f32 = 3.0; fn cycle_cubemap_asset( time: Res<Time>, mut next_swap: Local<f32>, mut cubemap: ResMut<Cubemap>, asset_server: Res<AssetServer>, render_device: Res<RenderDevice>, ) { let now = time.elapsed_secs(); if *next_swap == 0.0 { *next_swap = now + CUBEMAP_SWAP_DELAY; return; } else if now < *next_swap { return; } *next_swap += CUBEMAP_SWAP_DELAY; let supported_compressed_formats = CompressedImageFormats::from_features(render_device.features()); let mut new_index = cubemap.index; for _ in 0..CUBEMAPS.len() { new_index = (new_index + 1) % CUBEMAPS.len(); if supported_compressed_formats.contains(CUBEMAPS[new_index].1) { break; } info!( "Skipping format which is not supported by current hardware: {:?}", CUBEMAPS[new_index] ); } // Skip swapping to the same texture. Useful for when ktx2, zstd, or compressed texture support // is missing if new_index == cubemap.index { return; } cubemap.index = new_index; cubemap.image_handle = asset_server.load(CUBEMAPS[cubemap.index].0); cubemap.is_loaded = false; } fn asset_loaded( asset_server: Res<AssetServer>, mut images: ResMut<Assets<Image>>, mut cubemap: ResMut<Cubemap>, mut skyboxes: Query<&mut Skybox>, ) { if !cubemap.is_loaded && asset_server.load_state(&cubemap.image_handle).is_loaded() { info!("Swapping to {}...", CUBEMAPS[cubemap.index].0); let image = images.get_mut(&cubemap.image_handle).unwrap(); // NOTE: PNGs do not have any metadata that could indicate they contain a cubemap texture, // so they appear as one texture. The following code reconfigures the texture as necessary. if image.texture_descriptor.array_layer_count() == 1 { image .reinterpret_stacked_2d_as_array(image.height() / image.width()) .expect("asset should be 2d texture and height will always be evenly divisible with the given layers"); image.texture_view_descriptor = Some(TextureViewDescriptor { dimension: Some(TextureViewDimension::Cube), ..default() }); } for mut skybox in &mut skyboxes { skybox.image = cubemap.image_handle.clone(); } cubemap.is_loaded = true; } } fn animate_light_direction( time: Res<Time>, mut query: Query<&mut Transform, With<DirectionalLight>>, ) { for mut transform in &mut query { transform.rotate_y(time.delta_secs() * 0.5); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/3d/atmosphere.rs
examples/3d/atmosphere.rs
//! This example showcases pbr atmospheric scattering #[cfg(feature = "free_camera")] use bevy::camera_controller::free_camera::{FreeCamera, FreeCameraPlugin}; use std::f32::consts::PI; use bevy::{ anti_alias::fxaa::Fxaa, camera::Exposure, color::palettes::css::BLACK, core_pipeline::tonemapping::Tonemapping, image::{ ImageAddressMode, ImageFilterMode, ImageLoaderSettings, ImageSampler, ImageSamplerDescriptor, }, input::keyboard::KeyCode, light::{ light_consts::lux, AtmosphereEnvironmentMapLight, CascadeShadowConfigBuilder, FogVolume, VolumetricFog, VolumetricLight, }, pbr::{ AtmosphereMode, AtmosphereSettings, DefaultOpaqueRendererMethod, EarthlikeAtmosphere, ExtendedMaterial, MaterialExtension, ScreenSpaceReflections, }, post_process::bloom::Bloom, prelude::*, render::render_resource::{AsBindGroup, ShaderType}, shader::ShaderRef, }; #[derive(Resource, Default)] struct GameState { paused: bool, } fn main() { App::new() .insert_resource(DefaultOpaqueRendererMethod::deferred()) .insert_resource(ClearColor(Color::BLACK)) .insert_resource(GameState::default()) .insert_resource(GlobalAmbientLight::NONE) .add_plugins(( DefaultPlugins, #[cfg(feature = "free_camera")] FreeCameraPlugin, )) .add_plugins(MaterialPlugin::<ExtendedMaterial<StandardMaterial, Water>>::default()) .add_systems( Startup, (setup_camera_fog, setup_terrain_scene, print_controls), ) .add_systems(Update, (dynamic_scene, atmosphere_controls)) .run(); } fn print_controls() { println!("Atmosphere Example Controls:"); println!(" 1 - Switch to lookup texture rendering method"); println!(" 2 - Switch to raymarched rendering method"); println!(" Enter - Pause/Resume sun motion"); println!(" Up/Down - Increase/Decrease exposure"); } fn atmosphere_controls( keyboard_input: Res<ButtonInput<KeyCode>>, mut atmosphere_settings: Query<&mut AtmosphereSettings>, mut game_state: ResMut<GameState>, mut camera_exposure: Query<&mut Exposure, With<Camera3d>>, time: Res<Time>, ) { if keyboard_input.just_pressed(KeyCode::Digit1) { for mut settings in &mut atmosphere_settings { settings.rendering_method = AtmosphereMode::LookupTexture; println!("Switched to lookup texture rendering method"); } } if keyboard_input.just_pressed(KeyCode::Digit2) { for mut settings in &mut atmosphere_settings { settings.rendering_method = AtmosphereMode::Raymarched; println!("Switched to raymarched rendering method"); } } if keyboard_input.just_pressed(KeyCode::Enter) { game_state.paused = !game_state.paused; } if keyboard_input.pressed(KeyCode::ArrowUp) { for mut exposure in &mut camera_exposure { exposure.ev100 -= time.delta_secs() * 2.0; } } if keyboard_input.pressed(KeyCode::ArrowDown) { for mut exposure in &mut camera_exposure { exposure.ev100 += time.delta_secs() * 2.0; } } } fn setup_camera_fog(mut commands: Commands, earth_atmosphere: Res<EarthlikeAtmosphere>) { commands.spawn(( Camera3d::default(), Transform::from_xyz(-2.4, 0.04, 0.0).looking_at(Vec3::Y * 0.1, Vec3::Y), // get the default `Atmosphere` component earth_atmosphere.get(), // Can be adjusted to change the scene scale and rendering quality AtmosphereSettings::default(), // The directional light illuminance used in this scene // (the one recommended for use with this feature) is // quite bright, so raising the exposure compensation helps // bring the scene to a nicer brightness range. Exposure { ev100: 13.0 }, // Tonemapper chosen just because it looked good with the scene, any // tonemapper would be fine :) Tonemapping::AcesFitted, // Bloom gives the sun a much more natural look. Bloom::NATURAL, // Enables the atmosphere to drive reflections and ambient lighting (IBL) for this view AtmosphereEnvironmentMapLight::default(), #[cfg(feature = "free_camera")] FreeCamera::default(), VolumetricFog { ambient_intensity: 0.0, ..default() }, Msaa::Off, Fxaa::default(), ScreenSpaceReflections::default(), )); } #[derive(Component)] struct Terrain; /// A custom [`ExtendedMaterial`] that creates animated water ripples. #[derive(Asset, TypePath, AsBindGroup, Debug, Clone)] struct Water { /// The normal map image. /// /// Note that, like all normal maps, this must not be loaded as sRGB. #[texture(100)] #[sampler(101)] normals: Handle<Image>, // Parameters to the water shader. #[uniform(102)] settings: WaterSettings, } /// Parameters to the water shader. #[derive(ShaderType, Debug, Clone)] struct WaterSettings { /// How much to displace each octave each frame, in the u and v directions. /// Two octaves are packed into each `vec4`. octave_vectors: [Vec4; 2], /// How wide the waves are in each octave. octave_scales: Vec4, /// How high the waves are in each octave. octave_strengths: Vec4, } impl MaterialExtension for Water { fn deferred_fragment_shader() -> ShaderRef { "shaders/water_material.wgsl".into() } } fn setup_terrain_scene( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, mut water_materials: ResMut<Assets<ExtendedMaterial<StandardMaterial, Water>>>, asset_server: Res<AssetServer>, ) { // Configure a properly scaled cascade shadow map for this scene (defaults are too large, mesh units are in km) let cascade_shadow_config = CascadeShadowConfigBuilder { first_cascade_far_bound: 0.3, maximum_distance: 15.0, ..default() } .build(); // Sun commands.spawn(( DirectionalLight { shadows_enabled: true, // lux::RAW_SUNLIGHT is recommended for use with this feature, since // other values approximate sunlight *post-scattering* in various // conditions. RAW_SUNLIGHT in comparison is the illuminance of the // sun unfiltered by the atmosphere, so it is the proper input for // sunlight to be filtered by the atmosphere. illuminance: lux::RAW_SUNLIGHT, ..default() }, Transform::from_xyz(1.0, 0.4, 0.0).looking_at(Vec3::ZERO, Vec3::Y), VolumetricLight, cascade_shadow_config, )); // spawn the fog volume commands.spawn(( FogVolume::default(), Transform::from_scale(Vec3::new(10.0, 1.0, 10.0)).with_translation(Vec3::Y * 0.5), )); let sphere_mesh = meshes.add(Mesh::from(Sphere { radius: 1.0 })); // light probe spheres commands.spawn(( Mesh3d(sphere_mesh.clone()), MeshMaterial3d(materials.add(StandardMaterial { base_color: Color::WHITE, metallic: 1.0, perceptual_roughness: 0.0, ..default() })), Transform::from_xyz(-1.0, 0.1, -0.1).with_scale(Vec3::splat(0.05)), )); commands.spawn(( Mesh3d(sphere_mesh.clone()), MeshMaterial3d(materials.add(StandardMaterial { base_color: Color::WHITE, metallic: 0.0, perceptual_roughness: 1.0, ..default() })), Transform::from_xyz(-1.0, 0.1, 0.1).with_scale(Vec3::splat(0.05)), )); // Terrain commands.spawn(( Terrain, SceneRoot( asset_server.load(GltfAssetLabel::Scene(0).from_asset("models/terrain/terrain.glb")), ), Transform::from_xyz(-1.0, 0.0, -0.5) .with_scale(Vec3::splat(0.5)) .with_rotation(Quat::from_rotation_y(PI / 2.0)), )); spawn_water( &mut commands, &asset_server, &mut meshes, &mut water_materials, ); } // Spawns the water plane. fn spawn_water( commands: &mut Commands, asset_server: &AssetServer, meshes: &mut Assets<Mesh>, water_materials: &mut Assets<ExtendedMaterial<StandardMaterial, Water>>, ) { commands.spawn(( Mesh3d(meshes.add(Plane3d::new(Vec3::Y, Vec2::splat(1.0)))), MeshMaterial3d(water_materials.add(ExtendedMaterial { base: StandardMaterial { base_color: BLACK.into(), perceptual_roughness: 0.0, ..default() }, extension: Water { normals: asset_server.load_with_settings::<Image, ImageLoaderSettings>( "textures/water_normals.png", |settings| { settings.is_srgb = false; settings.sampler = ImageSampler::Descriptor(ImageSamplerDescriptor { address_mode_u: ImageAddressMode::Repeat, address_mode_v: ImageAddressMode::Repeat, mag_filter: ImageFilterMode::Linear, min_filter: ImageFilterMode::Linear, ..default() }); }, ), // These water settings are just random values to create some // variety. settings: WaterSettings { octave_vectors: [ vec4(0.080, 0.059, 0.073, -0.062), vec4(0.153, 0.138, -0.149, -0.195), ], octave_scales: vec4(1.0, 2.1, 7.9, 14.9) * 500.0, octave_strengths: vec4(0.16, 0.18, 0.093, 0.044) * 0.2, }, }, })), Transform::from_scale(Vec3::splat(100.0)), )); } fn dynamic_scene( mut suns: Query<&mut Transform, With<DirectionalLight>>, time: Res<Time>, sun_motion_state: Res<GameState>, ) { // Only rotate the sun if motion is not paused if !sun_motion_state.paused { suns.iter_mut() .for_each(|mut tf| tf.rotate_x(-time.delta_secs() * PI / 10.0)); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/3d/split_screen.rs
examples/3d/split_screen.rs
//! Renders four cameras to the same window to accomplish "split screen". use std::f32::consts::PI; use bevy::{ camera::Viewport, light::CascadeShadowConfigBuilder, prelude::*, window::WindowResized, }; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems(Update, (set_camera_viewports, button_system)) .run(); } /// set up a simple 3D scene fn setup( mut commands: Commands, asset_server: Res<AssetServer>, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { // plane commands.spawn(( Mesh3d(meshes.add(Plane3d::default().mesh().size(100.0, 100.0))), MeshMaterial3d(materials.add(Color::srgb(0.3, 0.5, 0.3))), )); commands.spawn(SceneRoot( asset_server.load(GltfAssetLabel::Scene(0).from_asset("models/animated/Fox.glb")), )); // Light commands.spawn(( Transform::from_rotation(Quat::from_euler(EulerRot::ZYX, 0.0, 1.0, -PI / 4.)), DirectionalLight { shadows_enabled: true, ..default() }, CascadeShadowConfigBuilder { num_cascades: if cfg!(all( feature = "webgl2", target_arch = "wasm32", not(feature = "webgpu") )) { // Limited to 1 cascade in WebGL 1 } else { 2 }, first_cascade_far_bound: 200.0, maximum_distance: 280.0, ..default() } .build(), )); // Cameras and their dedicated UI for (index, (camera_name, camera_pos)) in [ ("Player 1", Vec3::new(0.0, 200.0, -150.0)), ("Player 2", Vec3::new(150.0, 150., 50.0)), ("Player 3", Vec3::new(100.0, 150., -150.0)), ("Player 4", Vec3::new(-100.0, 80., 150.0)), ] .iter() .enumerate() { let camera = commands .spawn(( Camera3d::default(), Transform::from_translation(*camera_pos).looking_at(Vec3::ZERO, Vec3::Y), Camera { // Renders cameras with different priorities to prevent ambiguities order: index as isize, ..default() }, CameraPosition { pos: UVec2::new((index % 2) as u32, (index / 2) as u32), }, )) .id(); // Set up UI commands.spawn(( UiTargetCamera(camera), Node { width: percent(100), height: percent(100), ..default() }, children![ ( Text::new(*camera_name), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, ), buttons_panel(), ], )); } fn buttons_panel() -> impl Bundle { ( Node { position_type: PositionType::Absolute, width: percent(100), height: percent(100), display: Display::Flex, flex_direction: FlexDirection::Row, justify_content: JustifyContent::SpaceBetween, align_items: AlignItems::Center, padding: UiRect::all(px(20)), ..default() }, children![ rotate_button("<", Direction::Left), rotate_button(">", Direction::Right), ], ) } fn rotate_button(caption: &str, direction: Direction) -> impl Bundle { ( RotateCamera(direction), Button, Node { width: px(40), height: px(40), border: UiRect::all(px(2)), justify_content: JustifyContent::Center, align_items: AlignItems::Center, ..default() }, BorderColor::all(Color::WHITE), BackgroundColor(Color::srgb(0.25, 0.25, 0.25)), children![Text::new(caption)], ) } } #[derive(Component)] struct CameraPosition { pos: UVec2, } #[derive(Component)] struct RotateCamera(Direction); enum Direction { Left, Right, } fn set_camera_viewports( windows: Query<&Window>, mut window_resized_reader: MessageReader<WindowResized>, mut query: Query<(&CameraPosition, &mut Camera)>, ) { // We need to dynamically resize the camera's viewports whenever the window size changes // so then each camera always takes up half the screen. // A resize_event is sent when the window is first created, allowing us to reuse this system for initial setup. for window_resized in window_resized_reader.read() { let window = windows.get(window_resized.window).unwrap(); let size = window.physical_size() / 2; for (camera_position, mut camera) in &mut query { camera.viewport = Some(Viewport { physical_position: camera_position.pos * size, physical_size: size, ..default() }); } } } fn button_system( interaction_query: Query< (&Interaction, &ComputedUiTargetCamera, &RotateCamera), (Changed<Interaction>, With<Button>), >, mut camera_query: Query<&mut Transform, With<Camera>>, ) { for (interaction, computed_target, RotateCamera(direction)) in &interaction_query { if let Interaction::Pressed = *interaction { // Since TargetCamera propagates to the children, we can use it to find // which side of the screen the button is on. if let Some(mut camera_transform) = computed_target .get() .and_then(|camera| camera_query.get_mut(camera).ok()) { let angle = match direction { Direction::Left => -0.1, Direction::Right => 0.1, }; camera_transform.rotate_around(Vec3::ZERO, Quat::from_axis_angle(Vec3::Y, angle)); } } } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/3d/3d_scene.rs
examples/3d/3d_scene.rs
//! A simple 3D scene with light shining over a cube sitting on a plane. use bevy::prelude::*; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .run(); } /// set up a simple 3D scene fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { // circular base commands.spawn(( Mesh3d(meshes.add(Circle::new(4.0))), MeshMaterial3d(materials.add(Color::WHITE)), Transform::from_rotation(Quat::from_rotation_x(-std::f32::consts::FRAC_PI_2)), )); // cube commands.spawn(( Mesh3d(meshes.add(Cuboid::new(1.0, 1.0, 1.0))), MeshMaterial3d(materials.add(Color::srgb_u8(124, 144, 255))), Transform::from_xyz(0.0, 0.5, 0.0), )); // light commands.spawn(( PointLight { shadows_enabled: true, ..default() }, Transform::from_xyz(4.0, 8.0, 4.0), )); // camera commands.spawn(( Camera3d::default(), Transform::from_xyz(-2.5, 4.5, 9.0).looking_at(Vec3::ZERO, Vec3::Y), )); }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/3d/fog.rs
examples/3d/fog.rs
//! Distance-based fog visual effects are used in many games to give a soft falloff of visibility to the player for performance and/or visual design reasons. The further away something in a 3D world is from the camera, the more it's mixed or completely overwritten by a given color. //! //! In Bevy we can add the [`DistanceFog`] component to the same entity as our [`Camera3d`] to apply a distance fog effect. It has fields for color, directional light parameters, and how the fog falls off over distance. And that's it! The distance fog is now applied to the camera. //! //! The [`FogFalloff`] field controls most of the behavior of the fog through different descriptions of fog "curves". I.e. [`FogFalloff::Linear`] lets us define a start and end distance where up until the start distance none of the fog color is mixed in and by the end distance the fog color is as mixed in as it can be. [`FogFalloff::Exponential`] on the other hand uses an exponential curve to drive how "visible" things are with a density value. //! //! [Atmospheric fog](https://bevy.org/examples/3d-rendering/atmospheric-fog/) is another fog type that uses this same method of setup, but isn't covered here as it is a kind of fog that is most often used to imply distance and size in clear weather, while the ones shown off here are much more "dense". //! //! The bulk of this example is spent building a scene that suites showing off that the fog is working as intended by creating a pyramid (a 3D structure with clear delineations), a light source, input handling to modify fog settings, and UI to show what the current fog settings are. //! //! ## Controls //! //! | Key Binding | Action | //! |:-------------------|:------------------------------------| //! | `1` / `2` / `3` | Fog Falloff Mode | //! | `A` / `S` | Move Start Distance (Linear Fog) | //! | | Change Density (Exponential Fogs) | //! | `Z` / `X` | Move End Distance (Linear Fog) | //! | `-` / `=` | Adjust Fog Red Channel | //! | `[` / `]` | Adjust Fog Green Channel | //! | `;` / `'` | Adjust Fog Blue Channel | //! | `.` / `?` | Adjust Fog Alpha Channel | use bevy::{ light::{NotShadowCaster, NotShadowReceiver}, math::ops, prelude::*, }; fn main() { App::new() .insert_resource(GlobalAmbientLight::NONE) .add_plugins(DefaultPlugins) .add_systems( Startup, (setup_camera_fog, setup_pyramid_scene, setup_instructions), ) .add_systems(Update, update_system) .run(); } fn setup_camera_fog(mut commands: Commands) { commands.spawn(( Camera3d::default(), DistanceFog { color: Color::srgb(0.25, 0.25, 0.25), falloff: FogFalloff::Linear { start: 5.0, end: 20.0, }, ..default() }, )); } fn setup_pyramid_scene( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { let stone = materials.add(StandardMaterial { base_color: Srgba::hex("28221B").unwrap().into(), perceptual_roughness: 1.0, ..default() }); // pillars for (x, z) in &[(-1.5, -1.5), (1.5, -1.5), (1.5, 1.5), (-1.5, 1.5)] { commands.spawn(( Mesh3d(meshes.add(Cuboid::new(1.0, 3.0, 1.0))), MeshMaterial3d(stone.clone()), Transform::from_xyz(*x, 1.5, *z), )); } // orb commands.spawn(( Mesh3d(meshes.add(Sphere::default())), MeshMaterial3d(materials.add(StandardMaterial { base_color: Srgba::hex("126212CC").unwrap().into(), reflectance: 1.0, perceptual_roughness: 0.0, metallic: 0.5, alpha_mode: AlphaMode::Blend, ..default() })), Transform::from_scale(Vec3::splat(1.75)).with_translation(Vec3::new(0.0, 4.0, 0.0)), NotShadowCaster, NotShadowReceiver, )); // steps for i in 0..50 { let half_size = i as f32 / 2.0 + 3.0; let y = -i as f32 / 2.0; commands.spawn(( Mesh3d(meshes.add(Cuboid::new(2.0 * half_size, 0.5, 2.0 * half_size))), MeshMaterial3d(stone.clone()), Transform::from_xyz(0.0, y + 0.25, 0.0), )); } // sky commands.spawn(( Mesh3d(meshes.add(Cuboid::new(2.0, 1.0, 1.0))), MeshMaterial3d(materials.add(StandardMaterial { base_color: Srgba::hex("888888").unwrap().into(), unlit: true, cull_mode: None, ..default() })), Transform::from_scale(Vec3::splat(1_000_000.0)), )); // light commands.spawn(( PointLight { shadows_enabled: true, ..default() }, Transform::from_xyz(0.0, 1.0, 0.0), )); } fn setup_instructions(mut commands: Commands) { commands.spawn(( Text::default(), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, )); } fn update_system( camera: Single<(&mut DistanceFog, &mut Transform)>, mut text: Single<&mut Text>, time: Res<Time>, keycode: Res<ButtonInput<KeyCode>>, ) { let now = time.elapsed_secs(); let delta = time.delta_secs(); let (mut fog, mut transform) = camera.into_inner(); // Orbit camera around pyramid let orbit_scale = 8.0 + ops::sin(now / 10.0) * 7.0; *transform = Transform::from_xyz( ops::cos(now / 5.0) * orbit_scale, 12.0 - orbit_scale / 2.0, ops::sin(now / 5.0) * orbit_scale, ) .looking_at(Vec3::ZERO, Vec3::Y); // Fog Information text.0 = format!("Fog Falloff: {:?}\nFog Color: {:?}", fog.falloff, fog.color); // Fog Falloff Mode Switching text.push_str("\n\n1 / 2 / 3 - Fog Falloff Mode"); if keycode.pressed(KeyCode::Digit1) { if let FogFalloff::Linear { .. } = fog.falloff { // No change } else { fog.falloff = FogFalloff::Linear { start: 5.0, end: 20.0, }; }; } if keycode.pressed(KeyCode::Digit2) { if let FogFalloff::Exponential { .. } = fog.falloff { // No change } else if let FogFalloff::ExponentialSquared { density } = fog.falloff { fog.falloff = FogFalloff::Exponential { density }; } else { fog.falloff = FogFalloff::Exponential { density: 0.07 }; }; } if keycode.pressed(KeyCode::Digit3) { if let FogFalloff::Exponential { density } = fog.falloff { fog.falloff = FogFalloff::ExponentialSquared { density }; } else if let FogFalloff::ExponentialSquared { .. } = fog.falloff { // No change } else { fog.falloff = FogFalloff::ExponentialSquared { density: 0.07 }; }; } // Linear Fog Controls if let FogFalloff::Linear { start, end } = &mut fog.falloff { text.push_str("\nA / S - Move Start Distance\nZ / X - Move End Distance"); if keycode.pressed(KeyCode::KeyA) { *start -= delta * 3.0; } if keycode.pressed(KeyCode::KeyS) { *start += delta * 3.0; } if keycode.pressed(KeyCode::KeyZ) { *end -= delta * 3.0; } if keycode.pressed(KeyCode::KeyX) { *end += delta * 3.0; } } // Exponential Fog Controls if let FogFalloff::Exponential { density } = &mut fog.falloff { text.push_str("\nA / S - Change Density"); if keycode.pressed(KeyCode::KeyA) { *density -= delta * 0.5 * *density; if *density < 0.0 { *density = 0.0; } } if keycode.pressed(KeyCode::KeyS) { *density += delta * 0.5 * *density; } } // ExponentialSquared Fog Controls if let FogFalloff::ExponentialSquared { density } = &mut fog.falloff { text.push_str("\nA / S - Change Density"); if keycode.pressed(KeyCode::KeyA) { *density -= delta * 0.5 * *density; if *density < 0.0 { *density = 0.0; } } if keycode.pressed(KeyCode::KeyS) { *density += delta * 0.5 * *density; } } // RGBA Controls text.push_str("\n\n- / = - Red\n[ / ] - Green\n; / ' - Blue\n. / ? - Alpha"); // We're performing various operations in the sRGB color space, // so we convert the fog color to sRGB here, then modify it, // and finally when we're done we can convert it back and set it. let mut fog_color = Srgba::from(fog.color); if keycode.pressed(KeyCode::Minus) { fog_color.red = (fog_color.red - 0.1 * delta).max(0.0); } if keycode.any_pressed([KeyCode::Equal, KeyCode::NumpadEqual]) { fog_color.red = (fog_color.red + 0.1 * delta).min(1.0); } if keycode.pressed(KeyCode::BracketLeft) { fog_color.green = (fog_color.green - 0.1 * delta).max(0.0); } if keycode.pressed(KeyCode::BracketRight) { fog_color.green = (fog_color.green + 0.1 * delta).min(1.0); } if keycode.pressed(KeyCode::Semicolon) { fog_color.blue = (fog_color.blue - 0.1 * delta).max(0.0); } if keycode.pressed(KeyCode::Quote) { fog_color.blue = (fog_color.blue + 0.1 * delta).min(1.0); } if keycode.pressed(KeyCode::Period) { fog_color.alpha = (fog_color.alpha - 0.1 * delta).max(0.0); } if keycode.pressed(KeyCode::Slash) { fog_color.alpha = (fog_color.alpha + 0.1 * delta).min(1.0); } fog.color = Color::from(fog_color); }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/3d/ssr.rs
examples/3d/ssr.rs
//! Demonstrates screen space reflections in deferred rendering. use std::ops::Range; use bevy::{ anti_alias::fxaa::Fxaa, color::palettes::css::{BLACK, WHITE}, core_pipeline::Skybox, image::{ ImageAddressMode, ImageFilterMode, ImageLoaderSettings, ImageSampler, ImageSamplerDescriptor, }, input::mouse::MouseWheel, math::{vec3, vec4}, pbr::{ DefaultOpaqueRendererMethod, ExtendedMaterial, MaterialExtension, ScreenSpaceReflections, }, prelude::*, render::{ render_resource::{AsBindGroup, ShaderType}, view::Hdr, }, shader::ShaderRef, }; /// This example uses a shader source file from the assets subdirectory const SHADER_ASSET_PATH: &str = "shaders/water_material.wgsl"; // The speed of camera movement. const CAMERA_KEYBOARD_ZOOM_SPEED: f32 = 0.1; const CAMERA_KEYBOARD_ORBIT_SPEED: f32 = 0.02; const CAMERA_MOUSE_WHEEL_ZOOM_SPEED: f32 = 0.25; // We clamp camera distances to this range. const CAMERA_ZOOM_RANGE: Range<f32> = 2.0..12.0; static TURN_SSR_OFF_HELP_TEXT: &str = "Press Space to turn screen-space reflections off"; static TURN_SSR_ON_HELP_TEXT: &str = "Press Space to turn screen-space reflections on"; static MOVE_CAMERA_HELP_TEXT: &str = "Press WASD or use the mouse wheel to pan and orbit the camera"; static SWITCH_TO_FLIGHT_HELMET_HELP_TEXT: &str = "Press Enter to switch to the flight helmet model"; static SWITCH_TO_CUBE_HELP_TEXT: &str = "Press Enter to switch to the cube model"; /// A custom [`ExtendedMaterial`] that creates animated water ripples. #[derive(Asset, TypePath, AsBindGroup, Debug, Clone)] struct Water { /// The normal map image. /// /// Note that, like all normal maps, this must not be loaded as sRGB. #[texture(100)] #[sampler(101)] normals: Handle<Image>, // Parameters to the water shader. #[uniform(102)] settings: WaterSettings, } /// Parameters to the water shader. #[derive(ShaderType, Debug, Clone)] struct WaterSettings { /// How much to displace each octave each frame, in the u and v directions. /// Two octaves are packed into each `vec4`. octave_vectors: [Vec4; 2], /// How wide the waves are in each octave. octave_scales: Vec4, /// How high the waves are in each octave. octave_strengths: Vec4, } /// The current settings that the user has chosen. #[derive(Resource)] struct AppSettings { /// Whether screen space reflections are on. ssr_on: bool, /// Which model is being displayed. displayed_model: DisplayedModel, } /// Which model is being displayed. #[derive(Default)] enum DisplayedModel { /// The cube is being displayed. #[default] Cube, /// The flight helmet is being displayed. FlightHelmet, } /// A marker component for the cube model. #[derive(Component)] struct CubeModel; /// A marker component for the flight helmet model. #[derive(Component)] struct FlightHelmetModel; fn main() { // Enable deferred rendering, which is necessary for screen-space // reflections at this time. Disable multisampled antialiasing, as deferred // rendering doesn't support that. App::new() .insert_resource(DefaultOpaqueRendererMethod::deferred()) .init_resource::<AppSettings>() .add_plugins(DefaultPlugins.set(WindowPlugin { primary_window: Some(Window { title: "Bevy Screen Space Reflections Example".into(), ..default() }), ..default() })) .add_plugins(MaterialPlugin::<ExtendedMaterial<StandardMaterial, Water>>::default()) .add_systems(Startup, setup) .add_systems(Update, rotate_model) .add_systems(Update, move_camera) .add_systems(Update, adjust_app_settings) .run(); } // Set up the scene. fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut standard_materials: ResMut<Assets<StandardMaterial>>, mut water_materials: ResMut<Assets<ExtendedMaterial<StandardMaterial, Water>>>, asset_server: Res<AssetServer>, app_settings: Res<AppSettings>, ) { spawn_cube( &mut commands, &asset_server, &mut meshes, &mut standard_materials, ); spawn_flight_helmet(&mut commands, &asset_server); spawn_water( &mut commands, &asset_server, &mut meshes, &mut water_materials, ); spawn_camera(&mut commands, &asset_server); spawn_text(&mut commands, &app_settings); } // Spawns the rotating cube. fn spawn_cube( commands: &mut Commands, asset_server: &AssetServer, meshes: &mut Assets<Mesh>, standard_materials: &mut Assets<StandardMaterial>, ) { commands .spawn(( Mesh3d(meshes.add(Cuboid::new(1.0, 1.0, 1.0))), MeshMaterial3d(standard_materials.add(StandardMaterial { base_color: Color::from(WHITE), base_color_texture: Some(asset_server.load("branding/icon.png")), ..default() })), Transform::from_xyz(0.0, 0.5, 0.0), )) .insert(CubeModel); } // Spawns the flight helmet. fn spawn_flight_helmet(commands: &mut Commands, asset_server: &AssetServer) { commands.spawn(( SceneRoot( asset_server .load(GltfAssetLabel::Scene(0).from_asset("models/FlightHelmet/FlightHelmet.gltf")), ), Transform::from_scale(Vec3::splat(2.5)), FlightHelmetModel, Visibility::Hidden, )); } // Spawns the water plane. fn spawn_water( commands: &mut Commands, asset_server: &AssetServer, meshes: &mut Assets<Mesh>, water_materials: &mut Assets<ExtendedMaterial<StandardMaterial, Water>>, ) { commands.spawn(( Mesh3d(meshes.add(Plane3d::new(Vec3::Y, Vec2::splat(1.0)))), MeshMaterial3d(water_materials.add(ExtendedMaterial { base: StandardMaterial { base_color: BLACK.into(), perceptual_roughness: 0.0, ..default() }, extension: Water { normals: asset_server.load_with_settings::<Image, ImageLoaderSettings>( "textures/water_normals.png", |settings| { settings.is_srgb = false; settings.sampler = ImageSampler::Descriptor(ImageSamplerDescriptor { address_mode_u: ImageAddressMode::Repeat, address_mode_v: ImageAddressMode::Repeat, mag_filter: ImageFilterMode::Linear, min_filter: ImageFilterMode::Linear, ..default() }); }, ), // These water settings are just random values to create some // variety. settings: WaterSettings { octave_vectors: [ vec4(0.080, 0.059, 0.073, -0.062), vec4(0.153, 0.138, -0.149, -0.195), ], octave_scales: vec4(1.0, 2.1, 7.9, 14.9) * 5.0, octave_strengths: vec4(0.16, 0.18, 0.093, 0.044), }, }, })), Transform::from_scale(Vec3::splat(100.0)), )); } // Spawns the camera. fn spawn_camera(commands: &mut Commands, asset_server: &AssetServer) { // Create the camera. Add an environment map and skybox so the water has // something interesting to reflect, other than the cube. Enable deferred // rendering by adding depth and deferred prepasses. Turn on FXAA to make // the scene look a little nicer. Finally, add screen space reflections. commands .spawn(( Camera3d::default(), Transform::from_translation(vec3(-1.25, 2.25, 4.5)).looking_at(Vec3::ZERO, Vec3::Y), Hdr, Msaa::Off, )) .insert(EnvironmentMapLight { diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"), specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"), intensity: 5000.0, ..default() }) .insert(Skybox { image: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"), brightness: 5000.0, ..default() }) .insert(ScreenSpaceReflections::default()) .insert(Fxaa::default()); } // Spawns the help text. fn spawn_text(commands: &mut Commands, app_settings: &AppSettings) { commands.spawn(( create_text(app_settings), Node { position_type: PositionType::Absolute, bottom: px(12), left: px(12), ..default() }, )); } // Creates or recreates the help text. fn create_text(app_settings: &AppSettings) -> Text { format!( "{}\n{}\n{}", match app_settings.displayed_model { DisplayedModel::Cube => SWITCH_TO_FLIGHT_HELMET_HELP_TEXT, DisplayedModel::FlightHelmet => SWITCH_TO_CUBE_HELP_TEXT, }, if app_settings.ssr_on { TURN_SSR_OFF_HELP_TEXT } else { TURN_SSR_ON_HELP_TEXT }, MOVE_CAMERA_HELP_TEXT ) .into() } impl MaterialExtension for Water { fn deferred_fragment_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } } /// Rotates the model on the Y axis a bit every frame. fn rotate_model( mut query: Query<&mut Transform, Or<(With<CubeModel>, With<FlightHelmetModel>)>>, time: Res<Time>, ) { for mut transform in query.iter_mut() { transform.rotation = Quat::from_euler(EulerRot::XYZ, 0.0, time.elapsed_secs(), 0.0); } } // Processes input related to camera movement. fn move_camera( keyboard_input: Res<ButtonInput<KeyCode>>, mut mouse_wheel_reader: MessageReader<MouseWheel>, mut cameras: Query<&mut Transform, With<Camera>>, ) { let (mut distance_delta, mut theta_delta) = (0.0, 0.0); // Handle keyboard events. if keyboard_input.pressed(KeyCode::KeyW) { distance_delta -= CAMERA_KEYBOARD_ZOOM_SPEED; } if keyboard_input.pressed(KeyCode::KeyS) { distance_delta += CAMERA_KEYBOARD_ZOOM_SPEED; } if keyboard_input.pressed(KeyCode::KeyA) { theta_delta += CAMERA_KEYBOARD_ORBIT_SPEED; } if keyboard_input.pressed(KeyCode::KeyD) { theta_delta -= CAMERA_KEYBOARD_ORBIT_SPEED; } // Handle mouse events. for mouse_wheel in mouse_wheel_reader.read() { distance_delta -= mouse_wheel.y * CAMERA_MOUSE_WHEEL_ZOOM_SPEED; } // Update transforms. for mut camera_transform in cameras.iter_mut() { let local_z = camera_transform.local_z().as_vec3().normalize_or_zero(); if distance_delta != 0.0 { camera_transform.translation = (camera_transform.translation.length() + distance_delta) .clamp(CAMERA_ZOOM_RANGE.start, CAMERA_ZOOM_RANGE.end) * local_z; } if theta_delta != 0.0 { camera_transform .translate_around(Vec3::ZERO, Quat::from_axis_angle(Vec3::Y, theta_delta)); camera_transform.look_at(Vec3::ZERO, Vec3::Y); } } } // Adjusts app settings per user input. fn adjust_app_settings( mut commands: Commands, keyboard_input: Res<ButtonInput<KeyCode>>, mut app_settings: ResMut<AppSettings>, mut cameras: Query<Entity, With<Camera>>, mut cube_models: Query<&mut Visibility, (With<CubeModel>, Without<FlightHelmetModel>)>, mut flight_helmet_models: Query<&mut Visibility, (Without<CubeModel>, With<FlightHelmetModel>)>, mut text: Query<&mut Text>, ) { // If there are no changes, we're going to bail for efficiency. Record that // here. let mut any_changes = false; // If the user pressed Space, toggle SSR. if keyboard_input.just_pressed(KeyCode::Space) { app_settings.ssr_on = !app_settings.ssr_on; any_changes = true; } // If the user pressed Enter, switch models. if keyboard_input.just_pressed(KeyCode::Enter) { app_settings.displayed_model = match app_settings.displayed_model { DisplayedModel::Cube => DisplayedModel::FlightHelmet, DisplayedModel::FlightHelmet => DisplayedModel::Cube, }; any_changes = true; } // If there were no changes, bail. if !any_changes { return; } // Update SSR settings. for camera in cameras.iter_mut() { if app_settings.ssr_on { commands .entity(camera) .insert(ScreenSpaceReflections::default()); } else { commands.entity(camera).remove::<ScreenSpaceReflections>(); } } // Set cube model visibility. for mut cube_visibility in cube_models.iter_mut() { *cube_visibility = match app_settings.displayed_model { DisplayedModel::Cube => Visibility::Visible, _ => Visibility::Hidden, } } // Set flight helmet model visibility. for mut flight_helmet_visibility in flight_helmet_models.iter_mut() { *flight_helmet_visibility = match app_settings.displayed_model { DisplayedModel::FlightHelmet => Visibility::Visible, _ => Visibility::Hidden, }; } // Update the help text. for mut text in text.iter_mut() { *text = create_text(&app_settings); } } impl Default for AppSettings { fn default() -> Self { Self { ssr_on: true, displayed_model: default(), } } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/3d/occlusion_culling.rs
examples/3d/occlusion_culling.rs
//! Demonstrates occlusion culling. //! //! This demo rotates many small cubes around a rotating large cube at the //! origin. At all times, the large cube will be occluding several of the small //! cubes. The demo displays the number of cubes that were actually rendered, so //! the effects of occlusion culling can be seen. use std::{ any::TypeId, f32::consts::PI, fmt::Write as _, result::Result, sync::{Arc, Mutex}, }; use bevy::{ color::palettes::css::{SILVER, WHITE}, core_pipeline::{ core_3d::{ graph::{Core3d, Node3d}, Opaque3d, }, prepass::DepthPrepass, }, pbr::PbrPlugin, prelude::*, render::{ batching::gpu_preprocessing::{ GpuPreprocessingSupport, IndirectParametersBuffers, IndirectParametersIndexed, }, experimental::occlusion_culling::OcclusionCulling, render_graph::{self, NodeRunError, RenderGraphContext, RenderGraphExt, RenderLabel}, render_resource::{Buffer, BufferDescriptor, BufferUsages, MapMode}, renderer::{RenderContext, RenderDevice}, settings::WgpuFeatures, Render, RenderApp, RenderDebugFlags, RenderPlugin, RenderStartup, RenderSystems, }, }; use bytemuck::Pod; /// The radius of the spinning sphere of cubes. const OUTER_RADIUS: f32 = 3.0; /// The density of cubes in the other sphere. const OUTER_SUBDIVISION_COUNT: u32 = 5; /// The speed at which the outer sphere and large cube rotate in radians per /// frame. const ROTATION_SPEED: f32 = 0.01; /// The length of each side of the small cubes, in meters. const SMALL_CUBE_SIZE: f32 = 0.1; /// The length of each side of the large cube, in meters. const LARGE_CUBE_SIZE: f32 = 2.0; /// A marker component for the immediate parent of the large sphere of cubes. #[derive(Default, Component)] struct SphereParent; /// A marker component for the large spinning cube at the origin. #[derive(Default, Component)] struct LargeCube; /// A plugin for the render app that reads the number of culled meshes from the /// GPU back to the CPU. struct ReadbackIndirectParametersPlugin; /// The node that we insert into the render graph in order to read the number of /// culled meshes from the GPU back to the CPU. #[derive(Default)] struct ReadbackIndirectParametersNode; /// The [`RenderLabel`] that we use to identify the /// [`ReadbackIndirectParametersNode`]. #[derive(Clone, PartialEq, Eq, Hash, Debug, RenderLabel)] struct ReadbackIndirectParameters; /// The intermediate staging buffers that we use to read back the indirect /// parameters from the GPU to the CPU. /// /// We read back the GPU indirect parameters so that we can determine the number /// of meshes that were culled. /// /// `wgpu` doesn't allow us to read indirect buffers back from the GPU to the /// CPU directly. Instead, we have to copy them to a temporary staging buffer /// first, and then read *those* buffers back from the GPU to the CPU. This /// resource holds those temporary buffers. #[derive(Resource, Default)] struct IndirectParametersStagingBuffers { /// The buffer that stores the indirect draw commands. /// /// See [`IndirectParametersIndexed`] for more information about the memory /// layout of this buffer. data: Option<Buffer>, /// The buffer that stores the *number* of indirect draw commands. /// /// We only care about the first `u32` in this buffer. batch_sets: Option<Buffer>, } /// A resource, shared between the main world and the render world, that saves a /// CPU-side copy of the GPU buffer that stores the indirect draw parameters. /// /// This is needed so that we can display the number of meshes that were culled. /// It's reference counted, and protected by a lock, because we don't precisely /// know when the GPU will be ready to present the CPU with the buffer copy. /// Even though the rendering runs at least a frame ahead of the main app logic, /// we don't require more precise synchronization than the lock because we don't /// really care how up-to-date the counter of culled meshes is. If it's off by a /// few frames, that's no big deal. #[derive(Clone, Resource, Deref, DerefMut)] struct SavedIndirectParameters(Arc<Mutex<Option<SavedIndirectParametersData>>>); /// A CPU-side copy of the GPU buffer that stores the indirect draw parameters. /// /// This is needed so that we can display the number of meshes that were culled. struct SavedIndirectParametersData { /// The CPU-side copy of the GPU buffer that stores the indirect draw /// parameters. data: Vec<IndirectParametersIndexed>, /// The CPU-side copy of the GPU buffer that stores the *number* of indirect /// draw parameters that we have. /// /// All we care about is the number of indirect draw parameters for a single /// view, so this is only one word in size. count: u32, /// True if occlusion culling is supported at all; false if it's not. occlusion_culling_supported: bool, /// True if we support inspecting the number of meshes that were culled on /// this platform; false if we don't. /// /// If `multi_draw_indirect_count` isn't supported, then we would have to /// employ a more complicated approach in order to determine the number of /// meshes that are occluded, and that would be out of scope for this /// example. occlusion_culling_introspection_supported: bool, } impl SavedIndirectParameters { fn new() -> Self { Self(Arc::new(Mutex::new(None))) } } fn init_saved_indirect_parameters( render_device: Res<RenderDevice>, gpu_preprocessing_support: Res<GpuPreprocessingSupport>, saved_indirect_parameters: Res<SavedIndirectParameters>, ) { let mut saved_indirect_parameters = saved_indirect_parameters.0.lock().unwrap(); *saved_indirect_parameters = Some(SavedIndirectParametersData { data: vec![], count: 0, occlusion_culling_supported: gpu_preprocessing_support.is_culling_supported(), // In order to determine how many meshes were culled, we look at the indirect count buffer // that Bevy only populates if the platform supports `multi_draw_indirect_count`. So, if we // don't have that feature, then we don't bother to display how many meshes were culled. occlusion_culling_introspection_supported: render_device .features() .contains(WgpuFeatures::MULTI_DRAW_INDIRECT_COUNT), }); } /// The demo's current settings. #[derive(Resource)] struct AppStatus { /// Whether occlusion culling is presently enabled. /// /// By default, this is set to true. occlusion_culling: bool, } impl Default for AppStatus { fn default() -> Self { AppStatus { occlusion_culling: true, } } } fn main() { let render_debug_flags = RenderDebugFlags::ALLOW_COPIES_FROM_INDIRECT_PARAMETERS; App::new() .add_plugins( DefaultPlugins .set(WindowPlugin { primary_window: Some(Window { title: "Bevy Occlusion Culling Example".into(), ..default() }), ..default() }) .set(RenderPlugin { debug_flags: render_debug_flags, ..default() }) .set(PbrPlugin { debug_flags: render_debug_flags, ..default() }), ) .add_plugins(ReadbackIndirectParametersPlugin) .init_resource::<AppStatus>() .add_systems(Startup, setup) .add_systems(Update, spin_small_cubes) .add_systems(Update, spin_large_cube) .add_systems(Update, update_status_text) .add_systems(Update, toggle_occlusion_culling_on_request) .run(); } impl Plugin for ReadbackIndirectParametersPlugin { fn build(&self, app: &mut App) { // Create the `SavedIndirectParameters` resource that we're going to use // to communicate between the thread that the GPU-to-CPU readback // callback runs on and the main application threads. This resource is // atomically reference counted. We store one reference to the // `SavedIndirectParameters` in the main app and another reference in // the render app. let saved_indirect_parameters = SavedIndirectParameters::new(); app.insert_resource(saved_indirect_parameters.clone()); // Fetch the render app. let Some(render_app) = app.get_sub_app_mut(RenderApp) else { return; }; render_app // Insert another reference to the `SavedIndirectParameters`. .insert_resource(saved_indirect_parameters) // Setup the parameters in RenderStartup. .add_systems(RenderStartup, init_saved_indirect_parameters) .init_resource::<IndirectParametersStagingBuffers>() .add_systems(ExtractSchedule, readback_indirect_parameters) .add_systems( Render, create_indirect_parameters_staging_buffers .in_set(RenderSystems::PrepareResourcesFlush), ) // Add the node that allows us to read the indirect parameters back // from the GPU to the CPU, which allows us to determine how many // meshes were culled. .add_render_graph_node::<ReadbackIndirectParametersNode>( Core3d, ReadbackIndirectParameters, ) // We read back the indirect parameters any time after // `EndMainPass`. Readback doesn't particularly need to execute // before `EndMainPassPostProcessing`, but we specify that anyway // because we want to make the indirect parameters run before // *something* in the graph, and `EndMainPassPostProcessing` is a // good a node as any other. .add_render_graph_edges( Core3d, ( Node3d::EndMainPass, ReadbackIndirectParameters, Node3d::EndMainPassPostProcessing, ), ); } } /// Spawns all the objects in the scene. fn setup( mut commands: Commands, asset_server: Res<AssetServer>, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { spawn_small_cubes(&mut commands, &mut meshes, &mut materials); spawn_large_cube(&mut commands, &asset_server, &mut meshes, &mut materials); spawn_light(&mut commands); spawn_camera(&mut commands); spawn_help_text(&mut commands); } /// Spawns the rotating sphere of small cubes. fn spawn_small_cubes( commands: &mut Commands, meshes: &mut Assets<Mesh>, materials: &mut Assets<StandardMaterial>, ) { // Add the cube mesh. let small_cube = meshes.add(Cuboid::new( SMALL_CUBE_SIZE, SMALL_CUBE_SIZE, SMALL_CUBE_SIZE, )); // Add the cube material. let small_cube_material = materials.add(StandardMaterial { base_color: SILVER.into(), ..default() }); // Create the entity that the small cubes will be parented to. This is the // entity that we rotate. let sphere_parent = commands .spawn(Transform::from_translation(Vec3::ZERO)) .insert(Visibility::default()) .insert(SphereParent) .id(); // Now we have to figure out where to place the cubes. To do that, we create // a sphere mesh, but we don't add it to the scene. Instead, we inspect the // sphere mesh to find the positions of its vertices, and spawn a small cube // at each one. That way, we end up with a bunch of cubes arranged in a // spherical shape. // Create the sphere mesh, and extract the positions of its vertices. let sphere = Sphere::new(OUTER_RADIUS) .mesh() .ico(OUTER_SUBDIVISION_COUNT) .unwrap(); let sphere_positions = sphere.attribute(Mesh::ATTRIBUTE_POSITION).unwrap(); // At each vertex, create a small cube. for sphere_position in sphere_positions.as_float3().unwrap() { let sphere_position = Vec3::from_slice(sphere_position); let small_cube = commands .spawn(Mesh3d(small_cube.clone())) .insert(MeshMaterial3d(small_cube_material.clone())) .insert(Transform::from_translation(sphere_position)) .id(); commands.entity(sphere_parent).add_child(small_cube); } } /// Spawns the large cube at the center of the screen. /// /// This cube rotates chaotically and occludes small cubes behind it. fn spawn_large_cube( commands: &mut Commands, asset_server: &AssetServer, meshes: &mut Assets<Mesh>, materials: &mut Assets<StandardMaterial>, ) { commands .spawn(Mesh3d(meshes.add(Cuboid::new( LARGE_CUBE_SIZE, LARGE_CUBE_SIZE, LARGE_CUBE_SIZE, )))) .insert(MeshMaterial3d(materials.add(StandardMaterial { base_color: WHITE.into(), base_color_texture: Some(asset_server.load("branding/icon.png")), ..default() }))) .insert(Transform::IDENTITY) .insert(LargeCube); } // Spins the outer sphere a bit every frame. // // This ensures that the set of cubes that are hidden and shown varies over // time. fn spin_small_cubes(mut sphere_parents: Query<&mut Transform, With<SphereParent>>) { for mut sphere_parent_transform in &mut sphere_parents { sphere_parent_transform.rotate_y(ROTATION_SPEED); } } /// Spins the large cube a bit every frame. /// /// The chaotic rotation adds a bit of randomness to the scene to better /// demonstrate the dynamicity of the occlusion culling. fn spin_large_cube(mut large_cubes: Query<&mut Transform, With<LargeCube>>) { for mut transform in &mut large_cubes { transform.rotate(Quat::from_euler( EulerRot::XYZ, 0.13 * ROTATION_SPEED, 0.29 * ROTATION_SPEED, 0.35 * ROTATION_SPEED, )); } } /// Spawns a directional light to illuminate the scene. fn spawn_light(commands: &mut Commands) { commands .spawn(DirectionalLight::default()) .insert(Transform::from_rotation(Quat::from_euler( EulerRot::ZYX, 0.0, PI * -0.15, PI * -0.15, ))); } /// Spawns a camera that includes the depth prepass and occlusion culling. fn spawn_camera(commands: &mut Commands) { commands .spawn(Camera3d::default()) .insert(Transform::from_xyz(0.0, 0.0, 9.0).looking_at(Vec3::ZERO, Vec3::Y)) .insert(DepthPrepass) .insert(OcclusionCulling); } /// Spawns the help text at the upper left of the screen. fn spawn_help_text(commands: &mut Commands) { commands.spawn(( Text::new(""), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, )); } impl render_graph::Node for ReadbackIndirectParametersNode { fn run<'w>( &self, _: &mut RenderGraphContext, render_context: &mut RenderContext<'w>, world: &'w World, ) -> Result<(), NodeRunError> { // Extract the buffers that hold the GPU indirect draw parameters from // the world resources. We're going to read those buffers to determine // how many meshes were actually drawn. let (Some(indirect_parameters_buffers), Some(indirect_parameters_mapping_buffers)) = ( world.get_resource::<IndirectParametersBuffers>(), world.get_resource::<IndirectParametersStagingBuffers>(), ) else { return Ok(()); }; // Get the indirect parameters buffers corresponding to the opaque 3D // phase, since all our meshes are in that phase. let Some(phase_indirect_parameters_buffers) = indirect_parameters_buffers.get(&TypeId::of::<Opaque3d>()) else { return Ok(()); }; // Grab both the buffers we're copying from and the staging buffers // we're copying to. Remember that we can't map the indirect parameters // buffers directly, so we have to copy their contents to a staging // buffer. let ( Some(indexed_data_buffer), Some(indexed_batch_sets_buffer), Some(indirect_parameters_staging_data_buffer), Some(indirect_parameters_staging_batch_sets_buffer), ) = ( phase_indirect_parameters_buffers.indexed.data_buffer(), phase_indirect_parameters_buffers .indexed .batch_sets_buffer(), indirect_parameters_mapping_buffers.data.as_ref(), indirect_parameters_mapping_buffers.batch_sets.as_ref(), ) else { return Ok(()); }; // Copy from the indirect parameters buffers to the staging buffers. render_context.command_encoder().copy_buffer_to_buffer( indexed_data_buffer, 0, indirect_parameters_staging_data_buffer, 0, indexed_data_buffer.size(), ); render_context.command_encoder().copy_buffer_to_buffer( indexed_batch_sets_buffer, 0, indirect_parameters_staging_batch_sets_buffer, 0, indexed_batch_sets_buffer.size(), ); Ok(()) } } /// Creates the staging buffers that we use to read back the indirect parameters /// from the GPU to the CPU. /// /// We read the indirect parameters from the GPU to the CPU in order to display /// the number of meshes that were culled each frame. /// /// We need these staging buffers because `wgpu` doesn't allow us to read the /// contents of the indirect parameters buffers directly. We must first copy /// them from the GPU to a staging buffer, and then read the staging buffer. fn create_indirect_parameters_staging_buffers( mut indirect_parameters_staging_buffers: ResMut<IndirectParametersStagingBuffers>, indirect_parameters_buffers: Res<IndirectParametersBuffers>, render_device: Res<RenderDevice>, ) { let Some(phase_indirect_parameters_buffers) = indirect_parameters_buffers.get(&TypeId::of::<Opaque3d>()) else { return; }; // Fetch the indirect parameters buffers that we're going to copy from. let (Some(indexed_data_buffer), Some(indexed_batch_set_buffer)) = ( phase_indirect_parameters_buffers.indexed.data_buffer(), phase_indirect_parameters_buffers .indexed .batch_sets_buffer(), ) else { return; }; // Build the staging buffers. Make sure they have the same sizes as the // buffers we're copying from. indirect_parameters_staging_buffers.data = Some(render_device.create_buffer(&BufferDescriptor { label: Some("indexed data staging buffer"), size: indexed_data_buffer.size(), usage: BufferUsages::MAP_READ | BufferUsages::COPY_DST, mapped_at_creation: false, })); indirect_parameters_staging_buffers.batch_sets = Some(render_device.create_buffer(&BufferDescriptor { label: Some("indexed batch set staging buffer"), size: indexed_batch_set_buffer.size(), usage: BufferUsages::MAP_READ | BufferUsages::COPY_DST, mapped_at_creation: false, })); } /// Updates the app status text at the top of the screen. fn update_status_text( saved_indirect_parameters: Res<SavedIndirectParameters>, mut texts: Query<&mut Text>, meshes: Query<Entity, With<Mesh3d>>, app_status: Res<AppStatus>, ) { // How many meshes are in the scene? let total_mesh_count = meshes.iter().count(); // Sample the rendered object count. Note that we don't synchronize beyond // locking the data and therefore this will value will generally at least // one frame behind. This is fine; this app is just a demonstration after // all. let ( rendered_object_count, occlusion_culling_supported, occlusion_culling_introspection_supported, ): (u32, bool, bool) = { let saved_indirect_parameters = saved_indirect_parameters.lock().unwrap(); let Some(saved_indirect_parameters) = saved_indirect_parameters.as_ref() else { // Bail out early if the resource isn't initialized yet. return; }; ( saved_indirect_parameters .data .iter() .take(saved_indirect_parameters.count as usize) .map(|indirect_parameters| indirect_parameters.instance_count) .sum(), saved_indirect_parameters.occlusion_culling_supported, saved_indirect_parameters.occlusion_culling_introspection_supported, ) }; // Change the text. for mut text in &mut texts { text.0 = String::new(); if !occlusion_culling_supported { text.0 .push_str("Occlusion culling not supported on this platform"); continue; } let _ = writeln!( &mut text.0, "Occlusion culling {} (Press Space to toggle)", if app_status.occlusion_culling { "ON" } else { "OFF" }, ); if !occlusion_culling_introspection_supported { continue; } let _ = write!( &mut text.0, "{rendered_object_count}/{total_mesh_count} meshes rendered" ); } } /// A system that reads the indirect parameters back from the GPU so that we can /// report how many meshes were culled. fn readback_indirect_parameters( mut indirect_parameters_staging_buffers: ResMut<IndirectParametersStagingBuffers>, saved_indirect_parameters: Res<SavedIndirectParameters>, ) { // If culling isn't supported on this platform, bail. if !saved_indirect_parameters .lock() .unwrap() .as_ref() .unwrap() .occlusion_culling_supported { return; } // Grab the staging buffers. let (Some(data_buffer), Some(batch_sets_buffer)) = ( indirect_parameters_staging_buffers.data.take(), indirect_parameters_staging_buffers.batch_sets.take(), ) else { return; }; // Read the GPU buffers back. let saved_indirect_parameters_0 = (**saved_indirect_parameters).clone(); let saved_indirect_parameters_1 = (**saved_indirect_parameters).clone(); readback_buffer::<IndirectParametersIndexed>(data_buffer, move |indirect_parameters| { saved_indirect_parameters_0 .lock() .unwrap() .as_mut() .unwrap() .data = indirect_parameters.to_vec(); }); readback_buffer::<u32>(batch_sets_buffer, move |indirect_parameters_count| { saved_indirect_parameters_1 .lock() .unwrap() .as_mut() .unwrap() .count = indirect_parameters_count[0]; }); } // A helper function to asynchronously read an array of [`Pod`] values back from // the GPU to the CPU. // // The given callback is invoked when the data is ready. The buffer will // automatically be unmapped after the callback executes. fn readback_buffer<T>(buffer: Buffer, callback: impl FnOnce(&[T]) + Send + 'static) where T: Pod, { // We need to make another reference to the buffer so that we can move the // original reference into the closure below. let original_buffer = buffer.clone(); original_buffer .slice(..) .map_async(MapMode::Read, move |result| { // Make sure we succeeded. if result.is_err() { return; } { // Cast the raw bytes in the GPU buffer to the appropriate type. let buffer_view = buffer.slice(..).get_mapped_range(); let indirect_parameters: &[T] = bytemuck::cast_slice( &buffer_view[0..(buffer_view.len() / size_of::<T>() * size_of::<T>())], ); // Invoke the callback. callback(indirect_parameters); } // Unmap the buffer. We have to do this before submitting any more // GPU command buffers, or `wgpu` will assert. buffer.unmap(); }); } /// Adds or removes the [`OcclusionCulling`] and [`DepthPrepass`] components /// when the user presses the spacebar. fn toggle_occlusion_culling_on_request( mut commands: Commands, input: Res<ButtonInput<KeyCode>>, mut app_status: ResMut<AppStatus>, cameras: Query<Entity, With<Camera3d>>, ) { // Only run when the user presses the spacebar. if !input.just_pressed(KeyCode::Space) { return; } // Toggle the occlusion culling flag in `AppStatus`. app_status.occlusion_culling = !app_status.occlusion_culling; // Add or remove the `OcclusionCulling` and `DepthPrepass` components as // requested. for camera in &cameras { if app_status.occlusion_culling { commands .entity(camera) .insert(DepthPrepass) .insert(OcclusionCulling); } else { commands .entity(camera) .remove::<DepthPrepass>() .remove::<OcclusionCulling>(); } } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/3d/irradiance_volumes.rs
examples/3d/irradiance_volumes.rs
//! This example shows how irradiance volumes affect the indirect lighting of //! objects in a scene. //! //! The controls are as follows: //! //! * Space toggles the irradiance volume on and off. //! //! * Enter toggles the camera rotation on and off. //! //! * Tab switches the object between a plain sphere and a running fox. //! //! * Backspace shows and hides the voxel cubes. //! //! * Clicking anywhere moves the object. use bevy::{ color::palettes::css::*, core_pipeline::Skybox, light::{IrradianceVolume, NotShadowCaster}, math::{uvec3, vec3}, pbr::{ExtendedMaterial, MaterialExtension}, prelude::*, render::render_resource::{AsBindGroup, ShaderType}, shader::ShaderRef, window::PrimaryWindow, }; /// This example uses a shader source file from the assets subdirectory const SHADER_ASSET_PATH: &str = "shaders/irradiance_volume_voxel_visualization.wgsl"; // Rotation speed in radians per frame. const ROTATION_SPEED: f32 = 0.2; const FOX_SCALE: f32 = 0.05; const SPHERE_SCALE: f32 = 2.0; const IRRADIANCE_VOLUME_INTENSITY: f32 = 1800.0; const AMBIENT_LIGHT_BRIGHTNESS: f32 = 0.06; const VOXEL_CUBE_SCALE: f32 = 0.4; static DISABLE_IRRADIANCE_VOLUME_HELP_TEXT: &str = "Space: Disable the irradiance volume"; static ENABLE_IRRADIANCE_VOLUME_HELP_TEXT: &str = "Space: Enable the irradiance volume"; static HIDE_VOXELS_HELP_TEXT: &str = "Backspace: Hide the voxels"; static SHOW_VOXELS_HELP_TEXT: &str = "Backspace: Show the voxels"; static STOP_ROTATION_HELP_TEXT: &str = "Enter: Stop rotation"; static START_ROTATION_HELP_TEXT: &str = "Enter: Start rotation"; static SWITCH_TO_FOX_HELP_TEXT: &str = "Tab: Switch to a skinned mesh"; static SWITCH_TO_SPHERE_HELP_TEXT: &str = "Tab: Switch to a plain sphere mesh"; static CLICK_TO_MOVE_HELP_TEXT: &str = "Left click: Move the object"; static GIZMO_COLOR: Color = Color::Srgba(YELLOW); static VOXEL_FROM_WORLD: Mat4 = Mat4::from_cols_array_2d(&[ [-42.317566, 0.0, 0.0, 0.0], [0.0, 0.0, 44.601563, 0.0], [0.0, 16.73776, 0.0, 0.0], [0.0, 6.544792, 0.0, 1.0], ]); // The mode the application is in. #[derive(Resource)] struct AppStatus { // Whether the user wants the irradiance volume to be applied. irradiance_volume_present: bool, // Whether the user wants the unskinned sphere mesh or the skinned fox mesh. model: ExampleModel, // Whether the user has requested the scene to rotate. rotating: bool, // Whether the user has requested the voxels to be displayed. voxels_visible: bool, } // Which model the user wants to display. #[derive(Clone, Copy, PartialEq)] enum ExampleModel { // The plain sphere. Sphere, // The fox, which is skinned. Fox, } // Handles to all the assets used in this example. #[derive(Resource)] struct ExampleAssets { // The glTF scene containing the colored floor. main_scene: Handle<Scene>, // The 3D texture containing the irradiance volume. irradiance_volume: Handle<Image>, // The plain sphere mesh. main_sphere: Handle<Mesh>, // The material used for the sphere. main_sphere_material: Handle<StandardMaterial>, // The glTF scene containing the animated fox. fox: Handle<Scene>, // The graph containing the animation that the fox will play. fox_animation_graph: Handle<AnimationGraph>, // The node within the animation graph containing the animation. fox_animation_node: AnimationNodeIndex, // The voxel cube mesh. voxel_cube: Handle<Mesh>, // The skybox. skybox: Handle<Image>, } // The sphere and fox both have this component. #[derive(Component)] struct MainObject; // Marks each of the voxel cubes. #[derive(Component)] struct VoxelCube; // Marks the voxel cube parent object. #[derive(Component)] struct VoxelCubeParent; type VoxelVisualizationMaterial = ExtendedMaterial<StandardMaterial, VoxelVisualizationExtension>; #[derive(Asset, TypePath, AsBindGroup, Debug, Clone)] struct VoxelVisualizationExtension { #[uniform(100)] irradiance_volume_info: VoxelVisualizationIrradianceVolumeInfo, } #[derive(ShaderType, Debug, Clone)] struct VoxelVisualizationIrradianceVolumeInfo { world_from_voxel: Mat4, voxel_from_world: Mat4, resolution: UVec3, intensity: f32, } fn main() { // Create the example app. App::new() .add_plugins(DefaultPlugins.set(WindowPlugin { primary_window: Some(Window { title: "Bevy Irradiance Volumes Example".into(), ..default() }), ..default() })) .add_plugins(MaterialPlugin::<VoxelVisualizationMaterial>::default()) .init_resource::<AppStatus>() .init_resource::<ExampleAssets>() .insert_resource(GlobalAmbientLight { color: Color::WHITE, brightness: 0.0, ..default() }) .add_systems(Startup, setup) .add_systems(PreUpdate, create_cubes) .add_systems(Update, rotate_camera) .add_systems(Update, play_animations) .add_systems( Update, handle_mouse_clicks .after(rotate_camera) .after(play_animations), ) .add_systems( Update, change_main_object .after(rotate_camera) .after(play_animations), ) .add_systems( Update, toggle_irradiance_volumes .after(rotate_camera) .after(play_animations), ) .add_systems( Update, toggle_voxel_visibility .after(rotate_camera) .after(play_animations), ) .add_systems( Update, toggle_rotation.after(rotate_camera).after(play_animations), ) .add_systems( Update, draw_gizmo .after(handle_mouse_clicks) .after(change_main_object) .after(toggle_irradiance_volumes) .after(toggle_voxel_visibility) .after(toggle_rotation), ) .add_systems( Update, update_text .after(handle_mouse_clicks) .after(change_main_object) .after(toggle_irradiance_volumes) .after(toggle_voxel_visibility) .after(toggle_rotation), ) .run(); } // Spawns all the scene objects. fn setup(mut commands: Commands, assets: Res<ExampleAssets>, app_status: Res<AppStatus>) { spawn_main_scene(&mut commands, &assets); spawn_camera(&mut commands, &assets); spawn_irradiance_volume(&mut commands, &assets); spawn_light(&mut commands); spawn_sphere(&mut commands, &assets); spawn_voxel_cube_parent(&mut commands); spawn_fox(&mut commands, &assets); spawn_text(&mut commands, &app_status); } fn spawn_main_scene(commands: &mut Commands, assets: &ExampleAssets) { commands.spawn(SceneRoot(assets.main_scene.clone())); } fn spawn_camera(commands: &mut Commands, assets: &ExampleAssets) { commands.spawn(( Camera3d::default(), Transform::from_xyz(-10.012, 4.8605, 13.281).looking_at(Vec3::ZERO, Vec3::Y), Skybox { image: assets.skybox.clone(), brightness: 150.0, ..default() }, )); } fn spawn_irradiance_volume(commands: &mut Commands, assets: &ExampleAssets) { commands.spawn(( Transform::from_matrix(VOXEL_FROM_WORLD), IrradianceVolume { voxels: assets.irradiance_volume.clone(), intensity: IRRADIANCE_VOLUME_INTENSITY, ..default() }, )); } fn spawn_light(commands: &mut Commands) { commands.spawn(( PointLight { intensity: 250000.0, shadows_enabled: true, ..default() }, Transform::from_xyz(4.0762, 5.9039, 1.0055), )); } fn spawn_sphere(commands: &mut Commands, assets: &ExampleAssets) { commands .spawn(( Mesh3d(assets.main_sphere.clone()), MeshMaterial3d(assets.main_sphere_material.clone()), Transform::from_xyz(0.0, SPHERE_SCALE, 0.0).with_scale(Vec3::splat(SPHERE_SCALE)), )) .insert(MainObject); } fn spawn_voxel_cube_parent(commands: &mut Commands) { commands.spawn((Visibility::Hidden, Transform::default(), VoxelCubeParent)); } fn spawn_fox(commands: &mut Commands, assets: &ExampleAssets) { commands.spawn(( SceneRoot(assets.fox.clone()), Visibility::Hidden, Transform::from_scale(Vec3::splat(FOX_SCALE)), MainObject, )); } fn spawn_text(commands: &mut Commands, app_status: &AppStatus) { commands.spawn(( app_status.create_text(), Node { position_type: PositionType::Absolute, bottom: px(12), left: px(12), ..default() }, )); } // A system that updates the help text. fn update_text(mut text_query: Query<&mut Text>, app_status: Res<AppStatus>) { for mut text in text_query.iter_mut() { *text = app_status.create_text(); } } impl AppStatus { // Constructs the help text at the bottom of the screen based on the // application status. fn create_text(&self) -> Text { let irradiance_volume_help_text = if self.irradiance_volume_present { DISABLE_IRRADIANCE_VOLUME_HELP_TEXT } else { ENABLE_IRRADIANCE_VOLUME_HELP_TEXT }; let voxels_help_text = if self.voxels_visible { HIDE_VOXELS_HELP_TEXT } else { SHOW_VOXELS_HELP_TEXT }; let rotation_help_text = if self.rotating { STOP_ROTATION_HELP_TEXT } else { START_ROTATION_HELP_TEXT }; let switch_mesh_help_text = match self.model { ExampleModel::Sphere => SWITCH_TO_FOX_HELP_TEXT, ExampleModel::Fox => SWITCH_TO_SPHERE_HELP_TEXT, }; format!( "{CLICK_TO_MOVE_HELP_TEXT}\n\ {voxels_help_text}\n\ {irradiance_volume_help_text}\n\ {rotation_help_text}\n\ {switch_mesh_help_text}" ) .into() } } // Rotates the camera a bit every frame. fn rotate_camera( mut camera_query: Query<&mut Transform, With<Camera3d>>, time: Res<Time>, app_status: Res<AppStatus>, ) { if !app_status.rotating { return; } for mut transform in camera_query.iter_mut() { transform.translation = Vec2::from_angle(ROTATION_SPEED * time.delta_secs()) .rotate(transform.translation.xz()) .extend(transform.translation.y) .xzy(); transform.look_at(Vec3::ZERO, Vec3::Y); } } // Toggles between the unskinned sphere model and the skinned fox model if the // user requests it. fn change_main_object( keyboard: Res<ButtonInput<KeyCode>>, mut app_status: ResMut<AppStatus>, mut sphere_query: Query<&mut Visibility, (With<MainObject>, With<Mesh3d>, Without<SceneRoot>)>, mut fox_query: Query<&mut Visibility, (With<MainObject>, With<SceneRoot>)>, ) { if !keyboard.just_pressed(KeyCode::Tab) { return; } let Some(mut sphere_visibility) = sphere_query.iter_mut().next() else { return; }; let Some(mut fox_visibility) = fox_query.iter_mut().next() else { return; }; match app_status.model { ExampleModel::Sphere => { *sphere_visibility = Visibility::Hidden; *fox_visibility = Visibility::Visible; app_status.model = ExampleModel::Fox; } ExampleModel::Fox => { *sphere_visibility = Visibility::Visible; *fox_visibility = Visibility::Hidden; app_status.model = ExampleModel::Sphere; } } } impl Default for AppStatus { fn default() -> Self { Self { irradiance_volume_present: true, rotating: true, model: ExampleModel::Sphere, voxels_visible: false, } } } // Turns on and off the irradiance volume as requested by the user. fn toggle_irradiance_volumes( mut commands: Commands, keyboard: Res<ButtonInput<KeyCode>>, light_probe_query: Query<Entity, With<LightProbe>>, mut app_status: ResMut<AppStatus>, assets: Res<ExampleAssets>, mut ambient_light: ResMut<GlobalAmbientLight>, ) { if !keyboard.just_pressed(KeyCode::Space) { return; }; let Some(light_probe) = light_probe_query.iter().next() else { return; }; if app_status.irradiance_volume_present { commands.entity(light_probe).remove::<IrradianceVolume>(); ambient_light.brightness = AMBIENT_LIGHT_BRIGHTNESS * IRRADIANCE_VOLUME_INTENSITY; app_status.irradiance_volume_present = false; } else { commands.entity(light_probe).insert(IrradianceVolume { voxels: assets.irradiance_volume.clone(), intensity: IRRADIANCE_VOLUME_INTENSITY, ..default() }); ambient_light.brightness = 0.0; app_status.irradiance_volume_present = true; } } fn toggle_rotation(keyboard: Res<ButtonInput<KeyCode>>, mut app_status: ResMut<AppStatus>) { if keyboard.just_pressed(KeyCode::Enter) { app_status.rotating = !app_status.rotating; } } // Handles clicks on the plane that reposition the object. fn handle_mouse_clicks( buttons: Res<ButtonInput<MouseButton>>, windows: Query<&Window, With<PrimaryWindow>>, cameras: Query<(&Camera, &GlobalTransform)>, mut main_objects: Query<&mut Transform, With<MainObject>>, ) { if !buttons.pressed(MouseButton::Left) { return; } let Some(mouse_position) = windows.iter().next().and_then(Window::cursor_position) else { return; }; let Some((camera, camera_transform)) = cameras.iter().next() else { return; }; // Figure out where the user clicked on the plane. let Ok(ray) = camera.viewport_to_world(camera_transform, mouse_position) else { return; }; let Some(ray_distance) = ray.intersect_plane(Vec3::ZERO, InfinitePlane3d::new(Vec3::Y)) else { return; }; let plane_intersection = ray.origin + ray.direction.normalize() * ray_distance; // Move all the main objects. for mut transform in main_objects.iter_mut() { transform.translation = vec3( plane_intersection.x, transform.translation.y, plane_intersection.z, ); } } impl FromWorld for ExampleAssets { fn from_world(world: &mut World) -> Self { let fox_animation = world.load_asset(GltfAssetLabel::Animation(1).from_asset("models/animated/Fox.glb")); let (fox_animation_graph, fox_animation_node) = AnimationGraph::from_clip(fox_animation.clone()); ExampleAssets { main_sphere: world.add_asset(Sphere::default().mesh().uv(32, 18)), fox: world.load_asset(GltfAssetLabel::Scene(0).from_asset("models/animated/Fox.glb")), main_sphere_material: world.add_asset(Color::from(SILVER)), main_scene: world.load_asset( GltfAssetLabel::Scene(0) .from_asset("models/IrradianceVolumeExample/IrradianceVolumeExample.glb"), ), irradiance_volume: world.load_asset("irradiance_volumes/Example.vxgi.ktx2"), fox_animation_graph: world.add_asset(fox_animation_graph), fox_animation_node, voxel_cube: world.add_asset(Cuboid::default()), // Just use a specular map for the skybox since it's not too blurry. // In reality you wouldn't do this--you'd use a real skybox texture--but // reusing the textures like this saves space in the Bevy repository. skybox: world.load_asset("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"), } } } // Plays the animation on the fox. fn play_animations( mut commands: Commands, assets: Res<ExampleAssets>, mut players: Query<(Entity, &mut AnimationPlayer), Without<AnimationGraphHandle>>, ) { for (entity, mut player) in players.iter_mut() { commands .entity(entity) .insert(AnimationGraphHandle(assets.fox_animation_graph.clone())); player.play(assets.fox_animation_node).repeat(); } } fn create_cubes( image_assets: Res<Assets<Image>>, mut commands: Commands, irradiance_volumes: Query<(&IrradianceVolume, &GlobalTransform)>, voxel_cube_parents: Query<Entity, With<VoxelCubeParent>>, voxel_cubes: Query<Entity, With<VoxelCube>>, example_assets: Res<ExampleAssets>, mut voxel_visualization_material_assets: ResMut<Assets<VoxelVisualizationMaterial>>, ) { // If voxel cubes have already been spawned, don't do anything. if !voxel_cubes.is_empty() { return; } let Some(voxel_cube_parent) = voxel_cube_parents.iter().next() else { return; }; for (irradiance_volume, global_transform) in irradiance_volumes.iter() { let Some(image) = image_assets.get(&irradiance_volume.voxels) else { continue; }; let resolution = image.texture_descriptor.size; let voxel_cube_material = voxel_visualization_material_assets.add(ExtendedMaterial { base: StandardMaterial::from(Color::from(RED)), extension: VoxelVisualizationExtension { irradiance_volume_info: VoxelVisualizationIrradianceVolumeInfo { world_from_voxel: VOXEL_FROM_WORLD.inverse(), voxel_from_world: VOXEL_FROM_WORLD, resolution: uvec3( resolution.width, resolution.height, resolution.depth_or_array_layers, ), intensity: IRRADIANCE_VOLUME_INTENSITY, }, }, }); let scale = vec3( 1.0 / resolution.width as f32, 1.0 / resolution.height as f32, 1.0 / resolution.depth_or_array_layers as f32, ); // Spawn a cube for each voxel. for z in 0..resolution.depth_or_array_layers { for y in 0..resolution.height { for x in 0..resolution.width { let uvw = (uvec3(x, y, z).as_vec3() + 0.5) * scale - 0.5; let pos = global_transform.transform_point(uvw); let voxel_cube = commands .spawn(( Mesh3d(example_assets.voxel_cube.clone()), MeshMaterial3d(voxel_cube_material.clone()), Transform::from_scale(Vec3::splat(VOXEL_CUBE_SCALE)) .with_translation(pos), )) .insert(VoxelCube) .insert(NotShadowCaster) .id(); commands.entity(voxel_cube_parent).add_child(voxel_cube); } } } } } // Draws a gizmo showing the bounds of the irradiance volume. fn draw_gizmo( mut gizmos: Gizmos, irradiance_volume_query: Query<&GlobalTransform, With<IrradianceVolume>>, app_status: Res<AppStatus>, ) { if app_status.voxels_visible { for transform in irradiance_volume_query.iter() { gizmos.cube(*transform, GIZMO_COLOR); } } } // Handles a request from the user to toggle the voxel visibility on and off. fn toggle_voxel_visibility( keyboard: Res<ButtonInput<KeyCode>>, mut app_status: ResMut<AppStatus>, mut voxel_cube_parent_query: Query<&mut Visibility, With<VoxelCubeParent>>, ) { if !keyboard.just_pressed(KeyCode::Backspace) { return; } app_status.voxels_visible = !app_status.voxels_visible; for mut visibility in voxel_cube_parent_query.iter_mut() { *visibility = if app_status.voxels_visible { Visibility::Visible } else { Visibility::Hidden }; } } impl MaterialExtension for VoxelVisualizationExtension { fn fragment_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/3d/reflection_probes.rs
examples/3d/reflection_probes.rs
//! This example shows how to place reflection probes in the scene. //! //! Press Space to cycle through the reflection modes: //! //! 1. A pre-generated [`EnvironmentMapLight`] acting as a reflection probe, with both the skybox and cubes //! 2. A runtime-generated [`GeneratedEnvironmentMapLight`] acting as a reflection probe with just the skybox //! 3. A pre-generated [`EnvironmentMapLight`] with just the skybox //! //! Press Enter to pause or resume rotation. //! //! Reflection probes don't work on WebGL 2 or WebGPU. use bevy::{ camera::Exposure, core_pipeline::{tonemapping::Tonemapping, Skybox}, pbr::generate::generate_environment_map_light, prelude::*, render::{render_resource::TextureUsages, view::Hdr}, }; use std::{ f32::consts::PI, fmt::{Display, Formatter, Result as FmtResult}, }; static STOP_ROTATION_HELP_TEXT: &str = "Press Enter to stop rotation"; static START_ROTATION_HELP_TEXT: &str = "Press Enter to start rotation"; static REFLECTION_MODE_HELP_TEXT: &str = "Press Space to switch reflection mode"; const ENV_MAP_INTENSITY: f32 = 5000.0; // The mode the application is in. #[derive(Resource)] struct AppStatus { // Which environment maps the user has requested to display. reflection_mode: ReflectionMode, // Whether the user has requested the scene to rotate. rotating: bool, // The current roughness of the central sphere sphere_roughness: f32, } // Which environment maps the user has requested to display. #[derive(Clone, Copy, PartialEq)] enum ReflectionMode { // Only a world environment map is shown. EnvironmentMap = 0, // Both a world environment map and a reflection probe are present. The // reflection probe is shown in the sphere. ReflectionProbe = 1, // A generated environment map is shown. GeneratedEnvironmentMap = 2, } // The various reflection maps. #[derive(Resource)] struct Cubemaps { // The blurry diffuse cubemap that reflects the world, but not the cubes. diffuse_environment_map: Handle<Image>, // The specular cubemap mip chain that reflects the world, but not the cubes. specular_environment_map: Handle<Image>, // The specular cubemap mip chain that reflects both the world and the cubes. specular_reflection_probe: Handle<Image>, } fn main() { // Create the app. App::new() .add_plugins(DefaultPlugins) .init_resource::<AppStatus>() .init_resource::<Cubemaps>() .add_systems(Startup, setup) .add_systems(PreUpdate, add_environment_map_to_camera) .add_systems( Update, change_reflection_type.before(generate_environment_map_light), ) .add_systems(Update, toggle_rotation) .add_systems(Update, change_sphere_roughness) .add_systems( Update, rotate_camera .after(toggle_rotation) .after(change_reflection_type), ) .add_systems(Update, update_text.after(rotate_camera)) .add_systems(Update, setup_environment_map_usage) .run(); } // Spawns all the scene objects. fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, asset_server: Res<AssetServer>, app_status: Res<AppStatus>, cubemaps: Res<Cubemaps>, ) { spawn_camera(&mut commands); spawn_sphere(&mut commands, &mut meshes, &mut materials, &app_status); spawn_reflection_probe(&mut commands, &cubemaps); spawn_scene(&mut commands, &asset_server); spawn_text(&mut commands, &app_status); } // Spawns the cubes, light, and camera. fn spawn_scene(commands: &mut Commands, asset_server: &AssetServer) { commands.spawn(( SceneRoot(asset_server.load(GltfAssetLabel::Scene(0).from_asset("models/cubes/Cubes.glb"))), CubesScene, )); } // Spawns the camera. fn spawn_camera(commands: &mut Commands) { commands.spawn(( Camera3d::default(), Hdr, Exposure { ev100: 11.0 }, Tonemapping::AcesFitted, Transform::from_xyz(-3.883, 0.325, 2.781).looking_at(Vec3::ZERO, Vec3::Y), )); } // Creates the sphere mesh and spawns it. fn spawn_sphere( commands: &mut Commands, meshes: &mut Assets<Mesh>, materials: &mut Assets<StandardMaterial>, app_status: &AppStatus, ) { // Create a sphere mesh. let sphere_mesh = meshes.add(Sphere::new(1.0).mesh().ico(7).unwrap()); // Create a sphere. commands.spawn(( Mesh3d(sphere_mesh.clone()), MeshMaterial3d(materials.add(StandardMaterial { base_color: Srgba::hex("#ffffff").unwrap().into(), metallic: 1.0, perceptual_roughness: app_status.sphere_roughness, ..StandardMaterial::default() })), SphereMaterial, )); } // Spawns the reflection probe. fn spawn_reflection_probe(commands: &mut Commands, cubemaps: &Cubemaps) { commands.spawn(( LightProbe, EnvironmentMapLight { diffuse_map: cubemaps.diffuse_environment_map.clone(), specular_map: cubemaps.specular_reflection_probe.clone(), intensity: ENV_MAP_INTENSITY, ..default() }, // 2.0 because the sphere's radius is 1.0 and we want to fully enclose it. Transform::from_scale(Vec3::splat(2.0)), )); } // Spawns the help text. fn spawn_text(commands: &mut Commands, app_status: &AppStatus) { // Create the text. commands.spawn(( app_status.create_text(), Node { position_type: PositionType::Absolute, bottom: px(12), left: px(12), ..default() }, )); } // Adds a world environment map to the camera. This separate system is needed because the camera is // managed by the scene spawner, as it's part of the glTF file with the cubes, so we have to add // the environment map after the fact. fn add_environment_map_to_camera( mut commands: Commands, query: Query<Entity, Added<Camera3d>>, cubemaps: Res<Cubemaps>, ) { for camera_entity in query.iter() { commands .entity(camera_entity) .insert(create_camera_environment_map_light(&cubemaps)) .insert(Skybox { image: cubemaps.specular_environment_map.clone(), brightness: ENV_MAP_INTENSITY, ..default() }); } } // A system that handles switching between different reflection modes. fn change_reflection_type( mut commands: Commands, light_probe_query: Query<Entity, With<LightProbe>>, cubes_scene_query: Query<Entity, With<CubesScene>>, camera_query: Query<Entity, With<Camera3d>>, keyboard: Res<ButtonInput<KeyCode>>, mut app_status: ResMut<AppStatus>, cubemaps: Res<Cubemaps>, asset_server: Res<AssetServer>, ) { // Only do anything if space was pressed. if !keyboard.just_pressed(KeyCode::Space) { return; } // Advance to the next reflection mode. app_status.reflection_mode = ReflectionMode::try_from((app_status.reflection_mode as u32 + 1) % 3).unwrap(); // Remove light probes for light_probe in light_probe_query.iter() { commands.entity(light_probe).despawn(); } // Remove existing cube scenes for scene_entity in cubes_scene_query.iter() { commands.entity(scene_entity).despawn(); } match app_status.reflection_mode { ReflectionMode::EnvironmentMap | ReflectionMode::GeneratedEnvironmentMap => {} ReflectionMode::ReflectionProbe => { spawn_reflection_probe(&mut commands, &cubemaps); spawn_scene(&mut commands, &asset_server); } } // Update the environment-map components on the camera entity/entities for camera in camera_query.iter() { // Remove any existing environment-map components commands .entity(camera) .remove::<(EnvironmentMapLight, GeneratedEnvironmentMapLight)>(); match app_status.reflection_mode { // A baked or reflection-probe environment map ReflectionMode::EnvironmentMap | ReflectionMode::ReflectionProbe => { commands .entity(camera) .insert(create_camera_environment_map_light(&cubemaps)); } // GPU-filtered environment map generated at runtime ReflectionMode::GeneratedEnvironmentMap => { commands .entity(camera) .insert(GeneratedEnvironmentMapLight { environment_map: cubemaps.specular_environment_map.clone(), intensity: ENV_MAP_INTENSITY, ..default() }); } } } } // A system that handles enabling and disabling rotation. fn toggle_rotation(keyboard: Res<ButtonInput<KeyCode>>, mut app_status: ResMut<AppStatus>) { if keyboard.just_pressed(KeyCode::Enter) { app_status.rotating = !app_status.rotating; } } // A system that updates the help text. fn update_text(mut text_query: Query<&mut Text>, app_status: Res<AppStatus>) { for mut text in text_query.iter_mut() { *text = app_status.create_text(); } } impl TryFrom<u32> for ReflectionMode { type Error = (); fn try_from(value: u32) -> Result<Self, Self::Error> { match value { 0 => Ok(ReflectionMode::EnvironmentMap), 1 => Ok(ReflectionMode::ReflectionProbe), 2 => Ok(ReflectionMode::GeneratedEnvironmentMap), _ => Err(()), } } } impl Display for ReflectionMode { fn fmt(&self, formatter: &mut Formatter<'_>) -> FmtResult { let text = match *self { ReflectionMode::EnvironmentMap => "Environment map", ReflectionMode::ReflectionProbe => "Reflection probe", ReflectionMode::GeneratedEnvironmentMap => "Generated environment map", }; formatter.write_str(text) } } impl AppStatus { // Constructs the help text at the bottom of the screen based on the // application status. fn create_text(&self) -> Text { let rotation_help_text = if self.rotating { STOP_ROTATION_HELP_TEXT } else { START_ROTATION_HELP_TEXT }; format!( "{}\n{}\nRoughness: {:.2}\n{}\nUp/Down arrows to change roughness", self.reflection_mode, rotation_help_text, self.sphere_roughness, REFLECTION_MODE_HELP_TEXT ) .into() } } // Creates the world environment map light, used as a fallback if no reflection // probe is applicable to a mesh. fn create_camera_environment_map_light(cubemaps: &Cubemaps) -> EnvironmentMapLight { EnvironmentMapLight { diffuse_map: cubemaps.diffuse_environment_map.clone(), specular_map: cubemaps.specular_environment_map.clone(), intensity: ENV_MAP_INTENSITY, ..default() } } // Rotates the camera a bit every frame. fn rotate_camera( time: Res<Time>, mut camera_query: Query<&mut Transform, With<Camera3d>>, app_status: Res<AppStatus>, ) { if !app_status.rotating { return; } for mut transform in camera_query.iter_mut() { transform.translation = Vec2::from_angle(time.delta_secs() * PI / 5.0) .rotate(transform.translation.xz()) .extend(transform.translation.y) .xzy(); transform.look_at(Vec3::ZERO, Vec3::Y); } } // Loads the cubemaps from the assets directory. impl FromWorld for Cubemaps { fn from_world(world: &mut World) -> Self { Cubemaps { diffuse_environment_map: world .load_asset("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"), specular_environment_map: world .load_asset("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"), specular_reflection_probe: world .load_asset("environment_maps/cubes_reflection_probe_specular_rgb9e5_zstd.ktx2"), } } } fn setup_environment_map_usage(cubemaps: Res<Cubemaps>, mut images: ResMut<Assets<Image>>) { if let Some(image) = images.get_mut(&cubemaps.specular_environment_map) && !image .texture_descriptor .usage .contains(TextureUsages::COPY_SRC) { image.texture_descriptor.usage |= TextureUsages::COPY_SRC; } } impl Default for AppStatus { fn default() -> Self { Self { reflection_mode: ReflectionMode::ReflectionProbe, rotating: false, sphere_roughness: 0.2, } } } #[derive(Component)] struct SphereMaterial; #[derive(Component)] struct CubesScene; // A system that changes the sphere's roughness with up/down arrow keys fn change_sphere_roughness( keyboard: Res<ButtonInput<KeyCode>>, mut app_status: ResMut<AppStatus>, mut materials: ResMut<Assets<StandardMaterial>>, sphere_query: Query<&MeshMaterial3d<StandardMaterial>, With<SphereMaterial>>, ) { let roughness_delta = if keyboard.pressed(KeyCode::ArrowUp) { 0.01 // Decrease roughness } else if keyboard.pressed(KeyCode::ArrowDown) { -0.01 // Increase roughness } else { 0.0 // No change }; if roughness_delta != 0.0 { // Update the app status app_status.sphere_roughness = (app_status.sphere_roughness + roughness_delta).clamp(0.0, 1.0); // Update the sphere material for material_handle in sphere_query.iter() { if let Some(material) = materials.get_mut(&material_handle.0) { material.perceptual_roughness = app_status.sphere_roughness; } } } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/3d/motion_blur.rs
examples/3d/motion_blur.rs
//! Demonstrates how to enable per-object motion blur. This rendering feature can be configured per //! camera using the [`MotionBlur`] component.z use bevy::{ image::{ImageAddressMode, ImageFilterMode, ImageSampler, ImageSamplerDescriptor}, math::ops, post_process::motion_blur::MotionBlur, prelude::*, }; fn main() { let mut app = App::new(); app.add_plugins(DefaultPlugins) .add_systems(Startup, (setup_camera, setup_scene, setup_ui)) .add_systems(Update, (keyboard_inputs, move_cars, move_camera).chain()) .run(); } fn setup_camera(mut commands: Commands) { commands.spawn(( Camera3d::default(), // Add the `MotionBlur` component to a camera to enable motion blur. // Motion blur requires the depth and motion vector prepass, which this bundle adds. // Configure the amount and quality of motion blur per-camera using this component. MotionBlur { shutter_angle: 1.0, samples: 2, }, // MSAA and Motion Blur together are not compatible on WebGL #[cfg(all(feature = "webgl2", target_arch = "wasm32", not(feature = "webgpu")))] Msaa::Off, )); } // Everything past this point is used to build the example, but isn't required to use motion blur. #[derive(Resource)] enum CameraMode { Track, Chase, } #[derive(Component)] struct Moves(f32); #[derive(Component)] struct CameraTracked; #[derive(Component)] struct Rotates; fn setup_scene( asset_server: Res<AssetServer>, mut images: ResMut<Assets<Image>>, mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { commands.insert_resource(GlobalAmbientLight { color: Color::WHITE, brightness: 300.0, ..default() }); commands.insert_resource(CameraMode::Chase); commands.spawn(( DirectionalLight { illuminance: 3_000.0, shadows_enabled: true, ..default() }, Transform::default().looking_to(Vec3::new(-1.0, -0.7, -1.0), Vec3::X), )); // Sky commands.spawn(( Mesh3d(meshes.add(Sphere::default())), MeshMaterial3d(materials.add(StandardMaterial { unlit: true, base_color: Color::linear_rgb(0.1, 0.6, 1.0), ..default() })), Transform::default().with_scale(Vec3::splat(-4000.0)), )); // Ground let mut plane: Mesh = Plane3d::default().into(); let uv_size = 4000.0; let uvs = vec![[uv_size, 0.0], [0.0, 0.0], [0.0, uv_size], [uv_size; 2]]; plane.insert_attribute(Mesh::ATTRIBUTE_UV_0, uvs); commands.spawn(( Mesh3d(meshes.add(plane)), MeshMaterial3d(materials.add(StandardMaterial { base_color: Color::WHITE, perceptual_roughness: 1.0, base_color_texture: Some(images.add(uv_debug_texture())), ..default() })), Transform::from_xyz(0.0, -0.65, 0.0).with_scale(Vec3::splat(80.)), )); spawn_cars(&asset_server, &mut meshes, &mut materials, &mut commands); spawn_trees(&mut meshes, &mut materials, &mut commands); spawn_barriers(&mut meshes, &mut materials, &mut commands); } fn spawn_cars( asset_server: &AssetServer, meshes: &mut Assets<Mesh>, materials: &mut Assets<StandardMaterial>, commands: &mut Commands, ) { const N_CARS: usize = 20; let box_mesh = meshes.add(Cuboid::new(0.3, 0.15, 0.55)); let cylinder = meshes.add(Cylinder::default()); let logo = asset_server.load("branding/icon.png"); let wheel_matl = materials.add(StandardMaterial { base_color: Color::WHITE, base_color_texture: Some(logo.clone()), ..default() }); let mut matl = |color| { materials.add(StandardMaterial { base_color: color, ..default() }) }; let colors = [ matl(Color::linear_rgb(1.0, 0.0, 0.0)), matl(Color::linear_rgb(1.0, 1.0, 0.0)), matl(Color::BLACK), matl(Color::linear_rgb(0.0, 0.0, 1.0)), matl(Color::linear_rgb(0.0, 1.0, 0.0)), matl(Color::linear_rgb(1.0, 0.0, 1.0)), matl(Color::linear_rgb(0.5, 0.5, 0.0)), matl(Color::linear_rgb(1.0, 0.5, 0.0)), ]; let make_wheel = |x: f32, z: f32| { ( Mesh3d(cylinder.clone()), MeshMaterial3d(wheel_matl.clone()), Transform::from_xyz(0.14 * x, -0.045, 0.15 * z) .with_scale(Vec3::new(0.15, 0.04, 0.15)) .with_rotation(Quat::from_rotation_z(std::f32::consts::FRAC_PI_2)), Rotates, ) }; for i in 0..N_CARS { let color = colors[i % colors.len()].clone(); commands .spawn(( Mesh3d(box_mesh.clone()), MeshMaterial3d(color.clone()), Transform::from_scale(Vec3::splat(0.5)), Moves(i as f32 * 2.0), children![ ( Mesh3d(box_mesh.clone()), MeshMaterial3d(color), Transform::from_xyz(0.0, 0.08, 0.03).with_scale(Vec3::new(1.0, 1.0, 0.5)), ), make_wheel(1.0, 1.0), make_wheel(1.0, -1.0), make_wheel(-1.0, 1.0), make_wheel(-1.0, -1.0) ], )) .insert_if(CameraTracked, || i == 0); } } fn spawn_barriers( meshes: &mut Assets<Mesh>, materials: &mut Assets<StandardMaterial>, commands: &mut Commands, ) { const N_CONES: usize = 100; let capsule = meshes.add(Capsule3d::default()); let matl = materials.add(StandardMaterial { base_color: Color::srgb_u8(255, 87, 51), reflectance: 1.0, ..default() }); let mut spawn_with_offset = |offset: f32| { for i in 0..N_CONES { let pos = race_track_pos( offset, (i as f32) / (N_CONES as f32) * std::f32::consts::PI * 2.0, ); commands.spawn(( Mesh3d(capsule.clone()), MeshMaterial3d(matl.clone()), Transform::from_xyz(pos.x, -0.65, pos.y).with_scale(Vec3::splat(0.07)), )); } }; spawn_with_offset(0.04); spawn_with_offset(-0.04); } fn spawn_trees( meshes: &mut Assets<Mesh>, materials: &mut Assets<StandardMaterial>, commands: &mut Commands, ) { const N_TREES: usize = 30; let capsule = meshes.add(Capsule3d::default()); let sphere = meshes.add(Sphere::default()); let leaves = materials.add(Color::linear_rgb(0.0, 1.0, 0.0)); let trunk = materials.add(Color::linear_rgb(0.4, 0.2, 0.2)); let mut spawn_with_offset = |offset: f32| { for i in 0..N_TREES { let pos = race_track_pos( offset, (i as f32) / (N_TREES as f32) * std::f32::consts::PI * 2.0, ); let [x, z] = pos.into(); commands.spawn(( Mesh3d(sphere.clone()), MeshMaterial3d(leaves.clone()), Transform::from_xyz(x, -0.3, z).with_scale(Vec3::splat(0.3)), )); commands.spawn(( Mesh3d(capsule.clone()), MeshMaterial3d(trunk.clone()), Transform::from_xyz(x, -0.5, z).with_scale(Vec3::new(0.05, 0.3, 0.05)), )); } }; spawn_with_offset(0.07); spawn_with_offset(-0.07); } fn setup_ui(mut commands: Commands) { commands.spawn(( Text::default(), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, children![ TextSpan::default(), TextSpan::default(), TextSpan::new("1/2: -/+ shutter angle (blur amount)\n"), TextSpan::new("3/4: -/+ sample count (blur quality)\n"), TextSpan::new("Spacebar: cycle camera\n"), ], )); } fn keyboard_inputs( mut motion_blur: Single<&mut MotionBlur>, presses: Res<ButtonInput<KeyCode>>, text: Single<Entity, With<Text>>, mut writer: TextUiWriter, mut camera: ResMut<CameraMode>, ) { if presses.just_pressed(KeyCode::Digit1) { motion_blur.shutter_angle -= 0.25; } else if presses.just_pressed(KeyCode::Digit2) { motion_blur.shutter_angle += 0.25; } else if presses.just_pressed(KeyCode::Digit3) { motion_blur.samples = motion_blur.samples.saturating_sub(1); } else if presses.just_pressed(KeyCode::Digit4) { motion_blur.samples += 1; } else if presses.just_pressed(KeyCode::Space) { *camera = match *camera { CameraMode::Track => CameraMode::Chase, CameraMode::Chase => CameraMode::Track, }; } motion_blur.shutter_angle = motion_blur.shutter_angle.clamp(0.0, 1.0); motion_blur.samples = motion_blur.samples.clamp(0, 64); let entity = *text; *writer.text(entity, 1) = format!("Shutter angle: {:.2}\n", motion_blur.shutter_angle); *writer.text(entity, 2) = format!("Samples: {:.5}\n", motion_blur.samples); } /// Parametric function for a looping race track. `offset` will return the point offset /// perpendicular to the track at the given point. fn race_track_pos(offset: f32, t: f32) -> Vec2 { let x_tweak = 2.0; let y_tweak = 3.0; let scale = 8.0; let x0 = ops::sin(x_tweak * t); let y0 = ops::cos(y_tweak * t); let dx = x_tweak * ops::cos(x_tweak * t); let dy = y_tweak * -ops::sin(y_tweak * t); let dl = ops::hypot(dx, dy); let x = x0 + offset * dy / dl; let y = y0 - offset * dx / dl; Vec2::new(x, y) * scale } fn move_cars( time: Res<Time>, mut movables: Query<(&mut Transform, &Moves, &Children)>, mut spins: Query<&mut Transform, (Without<Moves>, With<Rotates>)>, ) { for (mut transform, moves, children) in &mut movables { let time = time.elapsed_secs() * 0.25; let t = time + 0.5 * moves.0; let dx = ops::cos(t); let dz = -ops::sin(3.0 * t); let speed_variation = (dx * dx + dz * dz).sqrt() * 0.15; let t = t + speed_variation; let prev = transform.translation; transform.translation.x = race_track_pos(0.0, t).x; transform.translation.z = race_track_pos(0.0, t).y; transform.translation.y = -0.59; let delta = transform.translation - prev; transform.look_to(delta, Vec3::Y); for child in children.iter() { let Ok(mut wheel) = spins.get_mut(child) else { continue; }; let radius = wheel.scale.x; let circumference = 2.0 * std::f32::consts::PI * radius; let angle = delta.length() / circumference * std::f32::consts::PI * 2.0; wheel.rotate_local_y(angle); } } } fn move_camera( camera: Single<(&mut Transform, &mut Projection), Without<CameraTracked>>, tracked: Single<&Transform, With<CameraTracked>>, mode: Res<CameraMode>, ) { let (mut transform, mut projection) = camera.into_inner(); match *mode { CameraMode::Track => { transform.look_at(tracked.translation, Vec3::Y); transform.translation = Vec3::new(15.0, -0.5, 0.0); if let Projection::Perspective(perspective) = &mut *projection { perspective.fov = 0.05; } } CameraMode::Chase => { transform.translation = tracked.translation + Vec3::new(0.0, 0.15, 0.0) + tracked.back() * 0.6; transform.look_to(tracked.forward(), Vec3::Y); if let Projection::Perspective(perspective) = &mut *projection { perspective.fov = 1.0; } } } } fn uv_debug_texture() -> Image { use bevy::{asset::RenderAssetUsages, render::render_resource::*}; const TEXTURE_SIZE: usize = 7; let mut palette = [ 164, 164, 164, 255, 168, 168, 168, 255, 153, 153, 153, 255, 139, 139, 139, 255, 153, 153, 153, 255, 177, 177, 177, 255, 159, 159, 159, 255, ]; let mut texture_data = [0; TEXTURE_SIZE * TEXTURE_SIZE * 4]; for y in 0..TEXTURE_SIZE { let offset = TEXTURE_SIZE * y * 4; texture_data[offset..(offset + TEXTURE_SIZE * 4)].copy_from_slice(&palette); palette.rotate_right(12); } let mut img = Image::new_fill( Extent3d { width: TEXTURE_SIZE as u32, height: TEXTURE_SIZE as u32, depth_or_array_layers: 1, }, TextureDimension::D2, &texture_data, TextureFormat::Rgba8UnormSrgb, RenderAssetUsages::RENDER_WORLD, ); img.sampler = ImageSampler::Descriptor(ImageSamplerDescriptor { address_mode_u: ImageAddressMode::Repeat, address_mode_v: ImageAddressMode::MirrorRepeat, mag_filter: ImageFilterMode::Nearest, ..ImageSamplerDescriptor::linear() }); img }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/3d/color_grading.rs
examples/3d/color_grading.rs
//! Demonstrates color grading with an interactive adjustment UI. use std::{ f32::consts::PI, fmt::{self, Formatter}, }; use bevy::{ light::CascadeShadowConfigBuilder, prelude::*, render::view::{ColorGrading, ColorGradingGlobal, ColorGradingSection, Hdr}, }; use std::fmt::Display; static FONT_PATH: &str = "fonts/FiraMono-Medium.ttf"; /// How quickly the value changes per frame. const OPTION_ADJUSTMENT_SPEED: f32 = 0.003; /// The color grading section that the user has selected: highlights, midtones, /// or shadows. #[derive(Clone, Copy, PartialEq)] enum SelectedColorGradingSection { Highlights, Midtones, Shadows, } /// The global option that the user has selected. /// /// See the documentation of [`ColorGradingGlobal`] for more information about /// each field here. #[derive(Clone, Copy, PartialEq, Default)] enum SelectedGlobalColorGradingOption { #[default] Exposure, Temperature, Tint, Hue, } /// The section-specific option that the user has selected. /// /// See the documentation of [`ColorGradingSection`] for more information about /// each field here. #[derive(Clone, Copy, PartialEq)] enum SelectedSectionColorGradingOption { Saturation, Contrast, Gamma, Gain, Lift, } /// The color grading option that the user has selected. #[derive(Clone, Copy, PartialEq, Resource)] enum SelectedColorGradingOption { /// The user has selected a global color grading option: one that applies to /// the whole image as opposed to specifically to highlights, midtones, or /// shadows. Global(SelectedGlobalColorGradingOption), /// The user has selected a color grading option that applies only to /// highlights, midtones, or shadows. Section( SelectedColorGradingSection, SelectedSectionColorGradingOption, ), } impl Default for SelectedColorGradingOption { fn default() -> Self { Self::Global(default()) } } /// Buttons consist of three parts: the button itself, a label child, and a /// value child. This specifies one of the three entities. #[derive(Clone, Copy, PartialEq, Component)] enum ColorGradingOptionWidgetType { /// The parent button. Button, /// The label of the button. Label, /// The numerical value that the button displays. Value, } #[derive(Clone, Copy, Component)] struct ColorGradingOptionWidget { widget_type: ColorGradingOptionWidgetType, option: SelectedColorGradingOption, } /// A marker component for the help text at the top left of the screen. #[derive(Clone, Copy, Component)] struct HelpText; fn main() { App::new() .add_plugins(DefaultPlugins) .init_resource::<SelectedColorGradingOption>() .add_systems(Startup, setup) .add_systems( Update, ( handle_button_presses, adjust_color_grading_option, update_ui_state, ) .chain(), ) .run(); } fn setup( mut commands: Commands, currently_selected_option: Res<SelectedColorGradingOption>, asset_server: Res<AssetServer>, ) { // Create the scene. add_basic_scene(&mut commands, &asset_server); // Create the root UI element. let font = asset_server.load(FONT_PATH); let color_grading = ColorGrading::default(); add_buttons(&mut commands, &font, &color_grading); // Spawn help text. add_help_text(&mut commands, &font, &currently_selected_option); // Spawn the camera. add_camera(&mut commands, &asset_server, color_grading); } /// Adds all the buttons on the bottom of the scene. fn add_buttons(commands: &mut Commands, font: &Handle<Font>, color_grading: &ColorGrading) { commands.spawn(( // Spawn the parent node that contains all the buttons. Node { flex_direction: FlexDirection::Column, position_type: PositionType::Absolute, row_gap: px(6), left: px(12), bottom: px(12), ..default() }, children![ // Create the first row, which contains the global controls. buttons_for_global_controls(color_grading, font), // Create the rows for individual controls. buttons_for_section(SelectedColorGradingSection::Highlights, color_grading, font), buttons_for_section(SelectedColorGradingSection::Midtones, color_grading, font), buttons_for_section(SelectedColorGradingSection::Shadows, color_grading, font), ], )); } /// Adds the buttons for the global controls (those that control the scene as a /// whole as opposed to shadows, midtones, or highlights). fn buttons_for_global_controls(color_grading: &ColorGrading, font: &Handle<Font>) -> impl Bundle { let make_button = |option: SelectedGlobalColorGradingOption| { button_for_value( SelectedColorGradingOption::Global(option), color_grading, font, ) }; // Add the parent node for the row. ( Node::default(), children![ Node { width: px(125), ..default() }, make_button(SelectedGlobalColorGradingOption::Exposure), make_button(SelectedGlobalColorGradingOption::Temperature), make_button(SelectedGlobalColorGradingOption::Tint), make_button(SelectedGlobalColorGradingOption::Hue), ], ) } /// Adds the buttons that control color grading for individual sections /// (highlights, midtones, shadows). fn buttons_for_section( section: SelectedColorGradingSection, color_grading: &ColorGrading, font: &Handle<Font>, ) -> impl Bundle { let make_button = |option| { button_for_value( SelectedColorGradingOption::Section(section, option), color_grading, font, ) }; // Spawn the row container. ( Node { align_items: AlignItems::Center, ..default() }, children![ // Spawn the label ("Highlights", etc.) ( text(&section.to_string(), font, Color::WHITE), Node { width: px(125), ..default() } ), // Spawn the buttons. make_button(SelectedSectionColorGradingOption::Saturation), make_button(SelectedSectionColorGradingOption::Contrast), make_button(SelectedSectionColorGradingOption::Gamma), make_button(SelectedSectionColorGradingOption::Gain), make_button(SelectedSectionColorGradingOption::Lift), ], ) } /// Adds a button that controls one of the color grading values. fn button_for_value( option: SelectedColorGradingOption, color_grading: &ColorGrading, font: &Handle<Font>, ) -> impl Bundle { let label = match option { SelectedColorGradingOption::Global(option) => option.to_string(), SelectedColorGradingOption::Section(_, option) => option.to_string(), }; // Add the button node. ( Button, Node { border: UiRect::all(px(1)), width: px(200), justify_content: JustifyContent::Center, align_items: AlignItems::Center, padding: UiRect::axes(px(12), px(6)), margin: UiRect::right(px(12)), border_radius: BorderRadius::MAX, ..default() }, BorderColor::all(Color::WHITE), BackgroundColor(Color::BLACK), ColorGradingOptionWidget { widget_type: ColorGradingOptionWidgetType::Button, option, }, children![ // Add the button label. ( text(&label, font, Color::WHITE), ColorGradingOptionWidget { widget_type: ColorGradingOptionWidgetType::Label, option, }, ), // Add a spacer. Node { flex_grow: 1.0, ..default() }, // Add the value text. ( text( &format!("{:.3}", option.get(color_grading)), font, Color::WHITE, ), ColorGradingOptionWidget { widget_type: ColorGradingOptionWidgetType::Value, option, }, ), ], ) } /// Creates the help text at the top of the screen. fn add_help_text( commands: &mut Commands, font: &Handle<Font>, currently_selected_option: &SelectedColorGradingOption, ) { commands.spawn(( Text::new(create_help_text(currently_selected_option)), TextFont { font: FontSource::from(font), ..default() }, Node { position_type: PositionType::Absolute, left: px(12), top: px(12), ..default() }, HelpText, )); } /// Adds some text to the scene. fn text(label: &str, font: &Handle<Font>, color: Color) -> impl Bundle + use<> { ( Text::new(label), TextFont { font: font.into(), font_size: 15.0, ..default() }, TextColor(color), ) } fn add_camera(commands: &mut Commands, asset_server: &AssetServer, color_grading: ColorGrading) { commands.spawn(( Camera3d::default(), Hdr, Transform::from_xyz(0.7, 0.7, 1.0).looking_at(Vec3::new(0.0, 0.3, 0.0), Vec3::Y), color_grading, DistanceFog { color: Color::srgb_u8(43, 44, 47), falloff: FogFalloff::Linear { start: 1.0, end: 8.0, }, ..default() }, EnvironmentMapLight { diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"), specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"), intensity: 2000.0, ..default() }, )); } fn add_basic_scene(commands: &mut Commands, asset_server: &AssetServer) { // Spawn the main scene. commands.spawn(SceneRoot(asset_server.load( GltfAssetLabel::Scene(0).from_asset("models/TonemappingTest/TonemappingTest.gltf"), ))); // Spawn the flight helmet. commands.spawn(( SceneRoot( asset_server .load(GltfAssetLabel::Scene(0).from_asset("models/FlightHelmet/FlightHelmet.gltf")), ), Transform::from_xyz(0.5, 0.0, -0.5).with_rotation(Quat::from_rotation_y(-0.15 * PI)), )); // Spawn the light. commands.spawn(( DirectionalLight { illuminance: 15000.0, shadows_enabled: true, ..default() }, Transform::from_rotation(Quat::from_euler(EulerRot::ZYX, 0.0, PI * -0.15, PI * -0.15)), CascadeShadowConfigBuilder { maximum_distance: 3.0, first_cascade_far_bound: 0.9, ..default() } .build(), )); } impl Display for SelectedGlobalColorGradingOption { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let name = match *self { SelectedGlobalColorGradingOption::Exposure => "Exposure", SelectedGlobalColorGradingOption::Temperature => "Temperature", SelectedGlobalColorGradingOption::Tint => "Tint", SelectedGlobalColorGradingOption::Hue => "Hue", }; f.write_str(name) } } impl Display for SelectedColorGradingSection { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let name = match *self { SelectedColorGradingSection::Highlights => "Highlights", SelectedColorGradingSection::Midtones => "Midtones", SelectedColorGradingSection::Shadows => "Shadows", }; f.write_str(name) } } impl Display for SelectedSectionColorGradingOption { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let name = match *self { SelectedSectionColorGradingOption::Saturation => "Saturation", SelectedSectionColorGradingOption::Contrast => "Contrast", SelectedSectionColorGradingOption::Gamma => "Gamma", SelectedSectionColorGradingOption::Gain => "Gain", SelectedSectionColorGradingOption::Lift => "Lift", }; f.write_str(name) } } impl Display for SelectedColorGradingOption { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { SelectedColorGradingOption::Global(option) => write!(f, "\"{option}\""), SelectedColorGradingOption::Section(section, option) => { write!(f, "\"{option}\" for \"{section}\"") } } } } impl SelectedSectionColorGradingOption { /// Returns the appropriate value in the given color grading section. fn get(&self, section: &ColorGradingSection) -> f32 { match *self { SelectedSectionColorGradingOption::Saturation => section.saturation, SelectedSectionColorGradingOption::Contrast => section.contrast, SelectedSectionColorGradingOption::Gamma => section.gamma, SelectedSectionColorGradingOption::Gain => section.gain, SelectedSectionColorGradingOption::Lift => section.lift, } } fn set(&self, section: &mut ColorGradingSection, value: f32) { match *self { SelectedSectionColorGradingOption::Saturation => section.saturation = value, SelectedSectionColorGradingOption::Contrast => section.contrast = value, SelectedSectionColorGradingOption::Gamma => section.gamma = value, SelectedSectionColorGradingOption::Gain => section.gain = value, SelectedSectionColorGradingOption::Lift => section.lift = value, } } } impl SelectedGlobalColorGradingOption { /// Returns the appropriate value in the given set of global color grading /// values. fn get(&self, global: &ColorGradingGlobal) -> f32 { match *self { SelectedGlobalColorGradingOption::Exposure => global.exposure, SelectedGlobalColorGradingOption::Temperature => global.temperature, SelectedGlobalColorGradingOption::Tint => global.tint, SelectedGlobalColorGradingOption::Hue => global.hue, } } /// Sets the appropriate value in the given set of global color grading /// values. fn set(&self, global: &mut ColorGradingGlobal, value: f32) { match *self { SelectedGlobalColorGradingOption::Exposure => global.exposure = value, SelectedGlobalColorGradingOption::Temperature => global.temperature = value, SelectedGlobalColorGradingOption::Tint => global.tint = value, SelectedGlobalColorGradingOption::Hue => global.hue = value, } } } impl SelectedColorGradingOption { /// Returns the appropriate value in the given set of color grading values. fn get(&self, color_grading: &ColorGrading) -> f32 { match self { SelectedColorGradingOption::Global(option) => option.get(&color_grading.global), SelectedColorGradingOption::Section( SelectedColorGradingSection::Highlights, option, ) => option.get(&color_grading.highlights), SelectedColorGradingOption::Section(SelectedColorGradingSection::Midtones, option) => { option.get(&color_grading.midtones) } SelectedColorGradingOption::Section(SelectedColorGradingSection::Shadows, option) => { option.get(&color_grading.shadows) } } } /// Sets the appropriate value in the given set of color grading values. fn set(&self, color_grading: &mut ColorGrading, value: f32) { match self { SelectedColorGradingOption::Global(option) => { option.set(&mut color_grading.global, value); } SelectedColorGradingOption::Section( SelectedColorGradingSection::Highlights, option, ) => option.set(&mut color_grading.highlights, value), SelectedColorGradingOption::Section(SelectedColorGradingSection::Midtones, option) => { option.set(&mut color_grading.midtones, value); } SelectedColorGradingOption::Section(SelectedColorGradingSection::Shadows, option) => { option.set(&mut color_grading.shadows, value); } } } } /// Handles mouse clicks on the buttons when the user clicks on a new one. fn handle_button_presses( mut interactions: Query<(&Interaction, &ColorGradingOptionWidget), Changed<Interaction>>, mut currently_selected_option: ResMut<SelectedColorGradingOption>, ) { for (interaction, widget) in interactions.iter_mut() { if widget.widget_type == ColorGradingOptionWidgetType::Button && *interaction == Interaction::Pressed { *currently_selected_option = widget.option; } } } /// Updates the state of the UI based on the current state. fn update_ui_state( mut buttons: Query<( &mut BackgroundColor, &mut BorderColor, &ColorGradingOptionWidget, )>, button_text: Query<(Entity, &ColorGradingOptionWidget), (With<Text>, Without<HelpText>)>, help_text: Single<Entity, With<HelpText>>, mut writer: TextUiWriter, cameras: Single<Ref<ColorGrading>>, currently_selected_option: Res<SelectedColorGradingOption>, ) { // Exit early if the UI didn't change if !currently_selected_option.is_changed() && !cameras.is_changed() { return; } // The currently-selected option is drawn with inverted colors. for (mut background, mut border_color, widget) in buttons.iter_mut() { if *currently_selected_option == widget.option { *background = Color::WHITE.into(); *border_color = Color::BLACK.into(); } else { *background = Color::BLACK.into(); *border_color = Color::WHITE.into(); } } let value_label = format!("{:.3}", currently_selected_option.get(cameras.as_ref())); // Update the buttons. for (entity, widget) in button_text.iter() { // Set the text color. let color = if *currently_selected_option == widget.option { Color::BLACK } else { Color::WHITE }; writer.for_each_color(entity, |mut text_color| { text_color.0 = color; }); // Update the displayed value, if this is the currently-selected option. if widget.widget_type == ColorGradingOptionWidgetType::Value && *currently_selected_option == widget.option { writer.for_each_text(entity, |mut text| { text.clone_from(&value_label); }); } } // Update the help text. *writer.text(*help_text, 0) = create_help_text(&currently_selected_option); } /// Creates the help text at the top left of the window. fn create_help_text(currently_selected_option: &SelectedColorGradingOption) -> String { format!("Press Left/Right to adjust {currently_selected_option}") } /// Processes keyboard input to change the value of the currently-selected color /// grading option. fn adjust_color_grading_option( mut color_grading: Single<&mut ColorGrading>, input: Res<ButtonInput<KeyCode>>, currently_selected_option: Res<SelectedColorGradingOption>, ) { let mut delta = 0.0; if input.pressed(KeyCode::ArrowLeft) { delta -= OPTION_ADJUSTMENT_SPEED; } if input.pressed(KeyCode::ArrowRight) { delta += OPTION_ADJUSTMENT_SPEED; } if delta != 0.0 { let new_value = currently_selected_option.get(color_grading.as_ref()) + delta; currently_selected_option.set(&mut color_grading, new_value); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/3d/wireframe.rs
examples/3d/wireframe.rs
//! Showcases wireframe rendering. //! //! Wireframes currently do not work when using webgl or webgpu. //! Supported platforms: //! - DX12 //! - Vulkan //! - Metal //! //! This is a native only feature. use bevy::{ color::palettes::css::*, pbr::wireframe::{NoWireframe, Wireframe, WireframeColor, WireframeConfig, WireframePlugin}, prelude::*, render::{ render_resource::WgpuFeatures, settings::{RenderCreation, WgpuSettings}, RenderPlugin, }, }; fn main() { App::new() .add_plugins(( DefaultPlugins.set(RenderPlugin { render_creation: RenderCreation::Automatic(WgpuSettings { // WARN this is a native only feature. It will not work with webgl or webgpu features: WgpuFeatures::POLYGON_MODE_LINE, ..default() }), ..default() }), // You need to add this plugin to enable wireframe rendering WireframePlugin::default(), )) // Wireframes can be configured with this resource. This can be changed at runtime. .insert_resource(WireframeConfig { // The global wireframe config enables drawing of wireframes on every mesh, // except those with `NoWireframe`. Meshes with `Wireframe` will always have a wireframe, // regardless of the global configuration. global: true, // Controls the default color of all wireframes. Used as the default color for global wireframes. // Can be changed per mesh using the `WireframeColor` component. default_color: WHITE.into(), }) .add_systems(Startup, setup) .add_systems(Update, update_colors) .run(); } /// set up a simple 3D scene fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { // Red cube: Never renders a wireframe commands.spawn(( Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(materials.add(Color::from(RED))), Transform::from_xyz(-1.0, 0.5, -1.0), NoWireframe, )); // Orange cube: Follows global wireframe setting commands.spawn(( Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(materials.add(Color::from(ORANGE))), Transform::from_xyz(0.0, 0.5, 0.0), )); // Green cube: Always renders a wireframe commands.spawn(( Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(materials.add(Color::from(LIME))), Transform::from_xyz(1.0, 0.5, 1.0), Wireframe, // This lets you configure the wireframe color of this entity. // If not set, this will use the color in `WireframeConfig` WireframeColor { color: LIME.into() }, )); // plane commands.spawn(( Mesh3d(meshes.add(Plane3d::default().mesh().size(5.0, 5.0))), MeshMaterial3d(materials.add(Color::from(BLUE))), // You can insert this component without the `Wireframe` component // to override the color of the global wireframe for this mesh WireframeColor { color: BLACK.into(), }, )); // light commands.spawn((PointLight::default(), Transform::from_xyz(2.0, 4.0, 2.0))); // camera commands.spawn(( Camera3d::default(), Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y), )); // Text used to show controls commands.spawn(( Text::default(), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, )); } /// This system let's you toggle various wireframe settings fn update_colors( keyboard_input: Res<ButtonInput<KeyCode>>, mut config: ResMut<WireframeConfig>, mut wireframe_colors: Query<&mut WireframeColor, With<Wireframe>>, mut text: Single<&mut Text>, ) { text.0 = format!( "Controls --------------- Z - Toggle global X - Change global color C - Change color of the green cube wireframe WireframeConfig ------------- Global: {} Color: {:?}", config.global, config.default_color, ); // Toggle showing a wireframe on all meshes if keyboard_input.just_pressed(KeyCode::KeyZ) { config.global = !config.global; } // Toggle the global wireframe color if keyboard_input.just_pressed(KeyCode::KeyX) { config.default_color = if config.default_color == WHITE.into() { DEEP_PINK.into() } else { WHITE.into() }; } // Toggle the color of a wireframe using WireframeColor and not the global color if keyboard_input.just_pressed(KeyCode::KeyC) { for mut color in &mut wireframe_colors { color.color = if color.color == LIME.into() { RED.into() } else { LIME.into() }; } } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/3d/3d_viewport_to_world.rs
examples/3d/3d_viewport_to_world.rs
//! This example demonstrates how to use the `Camera::viewport_to_world` method. use bevy::prelude::*; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems(Update, draw_cursor) .run(); } fn draw_cursor( camera_query: Single<(&Camera, &GlobalTransform)>, ground: Single<&GlobalTransform, With<Ground>>, window: Single<&Window>, mut gizmos: Gizmos, ) { let (camera, camera_transform) = *camera_query; if let Some(cursor_position) = window.cursor_position() // Calculate a ray pointing from the camera into the world based on the cursor's position. && let Ok(ray) = camera.viewport_to_world(camera_transform, cursor_position) // Calculate if and at what distance the ray is hitting the ground plane. && let Some(distance) = ray.intersect_plane(ground.translation(), InfinitePlane3d::new(ground.up())) { let point = ray.get_point(distance); // Draw a circle just above the ground plane at that position. gizmos.circle( Isometry3d::new( point + ground.up() * 0.01, Quat::from_rotation_arc(Vec3::Z, ground.up().as_vec3()), ), 0.2, Color::WHITE, ); } } #[derive(Component)] struct Ground; fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { // plane commands.spawn(( Mesh3d(meshes.add(Plane3d::default().mesh().size(20., 20.))), MeshMaterial3d(materials.add(Color::srgb(0.3, 0.5, 0.3))), Ground, )); // light commands.spawn(( DirectionalLight::default(), Transform::from_translation(Vec3::ONE).looking_at(Vec3::ZERO, Vec3::Y), )); // camera commands.spawn(( Camera3d::default(), Transform::from_xyz(15.0, 5.0, 15.0).looking_at(Vec3::ZERO, Vec3::Y), )); }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/3d/transmission.rs
examples/3d/transmission.rs
//! This example showcases light transmission //! //! ## Controls //! //! | Key Binding | Action | //! |:-------------------|:-----------------------------------------------------| //! | `J`/`K`/`L`/`;` | Change Screen Space Transmission Quality | //! | `O` / `P` | Decrease / Increase Screen Space Transmission Steps | //! | `1` / `2` | Decrease / Increase Diffuse Transmission | //! | `Q` / `W` | Decrease / Increase Specular Transmission | //! | `A` / `S` | Decrease / Increase Thickness | //! | `Z` / `X` | Decrease / Increase IOR | //! | `E` / `R` | Decrease / Increase Perceptual Roughness | //! | `U` / `I` | Decrease / Increase Reflectance | //! | Arrow Keys | Control Camera | //! | `C` | Randomize Colors | //! | `H` | Toggle HDR + Bloom | //! | `D` | Toggle Depth Prepass | //! | `T` | Toggle TAA | use std::f32::consts::PI; use bevy::{ camera::{Exposure, ScreenSpaceTransmissionQuality}, color::palettes::css::*, core_pipeline::{prepass::DepthPrepass, tonemapping::Tonemapping}, light::{NotShadowCaster, PointLightShadowMap, TransmittedShadowReceiver}, math::ops, post_process::bloom::Bloom, prelude::*, render::{ camera::TemporalJitter, view::{ColorGrading, ColorGradingGlobal, Hdr}, }, }; // *Note:* TAA is not _required_ for specular transmission, but // it _greatly enhances_ the look of the resulting blur effects. // Sadly, it's not available under WebGL. #[cfg(any(feature = "webgpu", not(target_arch = "wasm32")))] use bevy::anti_alias::taa::TemporalAntiAliasing; use rand::random; fn main() { App::new() .add_plugins(DefaultPlugins) .insert_resource(ClearColor(Color::BLACK)) .insert_resource(PointLightShadowMap { size: 2048 }) .insert_resource(GlobalAmbientLight { brightness: 0.0, ..default() }) .add_systems(Startup, setup) .add_systems(Update, (example_control_system, flicker_system)) .run(); } /// set up a simple 3D scene fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, asset_server: Res<AssetServer>, ) { let icosphere_mesh = meshes.add(Sphere::new(0.9).mesh().ico(7).unwrap()); let cube_mesh = meshes.add(Cuboid::new(0.7, 0.7, 0.7)); let plane_mesh = meshes.add(Plane3d::default().mesh().size(2.0, 2.0)); let cylinder_mesh = meshes.add(Cylinder::new(0.5, 2.0).mesh().resolution(50)); // Cube #1 commands.spawn(( Mesh3d(cube_mesh.clone()), MeshMaterial3d(materials.add(StandardMaterial::default())), Transform::from_xyz(0.25, 0.5, -2.0).with_rotation(Quat::from_euler( EulerRot::XYZ, 1.4, 3.7, 21.3, )), ExampleControls { color: true, specular_transmission: false, diffuse_transmission: false, }, )); // Cube #2 commands.spawn(( Mesh3d(cube_mesh), MeshMaterial3d(materials.add(StandardMaterial::default())), Transform::from_xyz(-0.75, 0.7, -2.0).with_rotation(Quat::from_euler( EulerRot::XYZ, 0.4, 2.3, 4.7, )), ExampleControls { color: true, specular_transmission: false, diffuse_transmission: false, }, )); // Candle commands.spawn(( Mesh3d(cylinder_mesh), MeshMaterial3d(materials.add(StandardMaterial { base_color: Color::srgb(0.9, 0.2, 0.3), diffuse_transmission: 0.7, perceptual_roughness: 0.32, thickness: 0.2, ..default() })), Transform::from_xyz(-1.0, 0.0, 0.0), ExampleControls { color: true, specular_transmission: false, diffuse_transmission: true, }, )); // Candle Flame let scaled_white = LinearRgba::from(ANTIQUE_WHITE) * 20.; let scaled_orange = LinearRgba::from(ORANGE_RED) * 4.; let emissive = LinearRgba { red: scaled_white.red + scaled_orange.red, green: scaled_white.green + scaled_orange.green, blue: scaled_white.blue + scaled_orange.blue, alpha: 1.0, }; commands.spawn(( Mesh3d(icosphere_mesh.clone()), MeshMaterial3d(materials.add(StandardMaterial { emissive, diffuse_transmission: 1.0, ..default() })), Transform::from_xyz(-1.0, 1.15, 0.0).with_scale(Vec3::new(0.1, 0.2, 0.1)), Flicker, NotShadowCaster, )); // Glass Sphere commands.spawn(( Mesh3d(icosphere_mesh.clone()), MeshMaterial3d(materials.add(StandardMaterial { base_color: Color::WHITE, specular_transmission: 0.9, diffuse_transmission: 1.0, thickness: 1.8, ior: 1.5, perceptual_roughness: 0.12, ..default() })), Transform::from_xyz(1.0, 0.0, 0.0), ExampleControls { color: true, specular_transmission: true, diffuse_transmission: false, }, )); // R Sphere commands.spawn(( Mesh3d(icosphere_mesh.clone()), MeshMaterial3d(materials.add(StandardMaterial { base_color: RED.into(), specular_transmission: 0.9, diffuse_transmission: 1.0, thickness: 1.8, ior: 1.5, perceptual_roughness: 0.12, ..default() })), Transform::from_xyz(1.0, -0.5, 2.0).with_scale(Vec3::splat(0.5)), ExampleControls { color: true, specular_transmission: true, diffuse_transmission: false, }, )); // G Sphere commands.spawn(( Mesh3d(icosphere_mesh.clone()), MeshMaterial3d(materials.add(StandardMaterial { base_color: LIME.into(), specular_transmission: 0.9, diffuse_transmission: 1.0, thickness: 1.8, ior: 1.5, perceptual_roughness: 0.12, ..default() })), Transform::from_xyz(0.0, -0.5, 2.0).with_scale(Vec3::splat(0.5)), ExampleControls { color: true, specular_transmission: true, diffuse_transmission: false, }, )); // B Sphere commands.spawn(( Mesh3d(icosphere_mesh), MeshMaterial3d(materials.add(StandardMaterial { base_color: BLUE.into(), specular_transmission: 0.9, diffuse_transmission: 1.0, thickness: 1.8, ior: 1.5, perceptual_roughness: 0.12, ..default() })), Transform::from_xyz(-1.0, -0.5, 2.0).with_scale(Vec3::splat(0.5)), ExampleControls { color: true, specular_transmission: true, diffuse_transmission: false, }, )); // Chessboard Plane let black_material = materials.add(StandardMaterial { base_color: Color::BLACK, reflectance: 0.3, perceptual_roughness: 0.8, ..default() }); let white_material = materials.add(StandardMaterial { base_color: Color::WHITE, reflectance: 0.3, perceptual_roughness: 0.8, ..default() }); for x in -3..4 { for z in -3..4 { commands.spawn(( Mesh3d(plane_mesh.clone()), MeshMaterial3d(if (x + z) % 2 == 0 { black_material.clone() } else { white_material.clone() }), Transform::from_xyz(x as f32 * 2.0, -1.0, z as f32 * 2.0), ExampleControls { color: true, specular_transmission: false, diffuse_transmission: false, }, )); } } // Paper commands.spawn(( Mesh3d(plane_mesh), MeshMaterial3d(materials.add(StandardMaterial { base_color: Color::WHITE, diffuse_transmission: 0.6, perceptual_roughness: 0.8, reflectance: 1.0, double_sided: true, cull_mode: None, ..default() })), Transform::from_xyz(0.0, 0.5, -3.0) .with_scale(Vec3::new(2.0, 1.0, 1.0)) .with_rotation(Quat::from_euler(EulerRot::XYZ, PI / 2.0, 0.0, 0.0)), TransmittedShadowReceiver, ExampleControls { specular_transmission: false, color: false, diffuse_transmission: true, }, )); // Candle Light commands.spawn(( Transform::from_xyz(-1.0, 1.7, 0.0), PointLight { color: Color::from( LinearRgba::from(ANTIQUE_WHITE).mix(&LinearRgba::from(ORANGE_RED), 0.2), ), intensity: 4_000.0, radius: 0.2, range: 5.0, shadows_enabled: true, ..default() }, Flicker, )); // Camera commands.spawn(( Camera3d::default(), Transform::from_xyz(1.0, 1.8, 7.0).looking_at(Vec3::ZERO, Vec3::Y), ColorGrading { global: ColorGradingGlobal { post_saturation: 1.2, ..default() }, ..default() }, Tonemapping::TonyMcMapface, Exposure { ev100: 6.0 }, #[cfg(any(feature = "webgpu", not(target_arch = "wasm32")))] Msaa::Off, #[cfg(any(feature = "webgpu", not(target_arch = "wasm32")))] TemporalAntiAliasing::default(), EnvironmentMapLight { intensity: 25.0, diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"), specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"), ..default() }, Bloom::default(), )); // Controls Text commands.spawn(( Text::default(), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, ExampleDisplay, )); } #[derive(Component)] struct Flicker; #[derive(Component)] struct ExampleControls { diffuse_transmission: bool, specular_transmission: bool, color: bool, } struct ExampleState { diffuse_transmission: f32, specular_transmission: f32, thickness: f32, ior: f32, perceptual_roughness: f32, reflectance: f32, auto_camera: bool, } #[derive(Component)] struct ExampleDisplay; impl Default for ExampleState { fn default() -> Self { ExampleState { diffuse_transmission: 0.5, specular_transmission: 0.9, thickness: 1.8, ior: 1.5, perceptual_roughness: 0.12, reflectance: 0.5, auto_camera: true, } } } fn example_control_system( mut commands: Commands, mut materials: ResMut<Assets<StandardMaterial>>, controllable: Query<(&MeshMaterial3d<StandardMaterial>, &ExampleControls)>, camera: Single< ( Entity, &mut Camera3d, &mut Transform, Option<&DepthPrepass>, Option<&TemporalJitter>, Has<Hdr>, ), With<Camera3d>, >, mut display: Single<&mut Text, With<ExampleDisplay>>, mut state: Local<ExampleState>, time: Res<Time>, input: Res<ButtonInput<KeyCode>>, ) { if input.pressed(KeyCode::Digit2) { state.diffuse_transmission = (state.diffuse_transmission + time.delta_secs()).min(1.0); } else if input.pressed(KeyCode::Digit1) { state.diffuse_transmission = (state.diffuse_transmission - time.delta_secs()).max(0.0); } if input.pressed(KeyCode::KeyW) { state.specular_transmission = (state.specular_transmission + time.delta_secs()).min(1.0); } else if input.pressed(KeyCode::KeyQ) { state.specular_transmission = (state.specular_transmission - time.delta_secs()).max(0.0); } if input.pressed(KeyCode::KeyS) { state.thickness = (state.thickness + time.delta_secs()).min(5.0); } else if input.pressed(KeyCode::KeyA) { state.thickness = (state.thickness - time.delta_secs()).max(0.0); } if input.pressed(KeyCode::KeyX) { state.ior = (state.ior + time.delta_secs()).min(3.0); } else if input.pressed(KeyCode::KeyZ) { state.ior = (state.ior - time.delta_secs()).max(1.0); } if input.pressed(KeyCode::KeyI) { state.reflectance = (state.reflectance + time.delta_secs()).min(1.0); } else if input.pressed(KeyCode::KeyU) { state.reflectance = (state.reflectance - time.delta_secs()).max(0.0); } if input.pressed(KeyCode::KeyR) { state.perceptual_roughness = (state.perceptual_roughness + time.delta_secs()).min(1.0); } else if input.pressed(KeyCode::KeyE) { state.perceptual_roughness = (state.perceptual_roughness - time.delta_secs()).max(0.0); } let randomize_colors = input.just_pressed(KeyCode::KeyC); for (material_handle, controls) in &controllable { let material = materials.get_mut(material_handle).unwrap(); if controls.specular_transmission { material.specular_transmission = state.specular_transmission; material.thickness = state.thickness; material.ior = state.ior; material.perceptual_roughness = state.perceptual_roughness; material.reflectance = state.reflectance; } if controls.diffuse_transmission { material.diffuse_transmission = state.diffuse_transmission; } if controls.color && randomize_colors { material.base_color = Color::srgba(random(), random(), random(), material.base_color.alpha()); } } let (camera_entity, mut camera_3d, mut camera_transform, depth_prepass, temporal_jitter, hdr) = camera.into_inner(); if input.just_pressed(KeyCode::KeyH) { if hdr { commands.entity(camera_entity).remove::<Hdr>(); } else { commands.entity(camera_entity).insert(Hdr); } } #[cfg(any(feature = "webgpu", not(target_arch = "wasm32")))] if input.just_pressed(KeyCode::KeyD) { if depth_prepass.is_none() { commands.entity(camera_entity).insert(DepthPrepass); } else { commands.entity(camera_entity).remove::<DepthPrepass>(); } } #[cfg(any(feature = "webgpu", not(target_arch = "wasm32")))] if input.just_pressed(KeyCode::KeyT) { if temporal_jitter.is_none() { commands .entity(camera_entity) .insert((TemporalJitter::default(), TemporalAntiAliasing::default())); } else { commands .entity(camera_entity) .remove::<(TemporalJitter, TemporalAntiAliasing)>(); } } if input.just_pressed(KeyCode::KeyO) && camera_3d.screen_space_specular_transmission_steps > 0 { camera_3d.screen_space_specular_transmission_steps -= 1; } if input.just_pressed(KeyCode::KeyP) && camera_3d.screen_space_specular_transmission_steps < 4 { camera_3d.screen_space_specular_transmission_steps += 1; } if input.just_pressed(KeyCode::KeyJ) { camera_3d.screen_space_specular_transmission_quality = ScreenSpaceTransmissionQuality::Low; } if input.just_pressed(KeyCode::KeyK) { camera_3d.screen_space_specular_transmission_quality = ScreenSpaceTransmissionQuality::Medium; } if input.just_pressed(KeyCode::KeyL) { camera_3d.screen_space_specular_transmission_quality = ScreenSpaceTransmissionQuality::High; } if input.just_pressed(KeyCode::Semicolon) { camera_3d.screen_space_specular_transmission_quality = ScreenSpaceTransmissionQuality::Ultra; } let rotation = if input.pressed(KeyCode::ArrowRight) { state.auto_camera = false; time.delta_secs() } else if input.pressed(KeyCode::ArrowLeft) { state.auto_camera = false; -time.delta_secs() } else if state.auto_camera { time.delta_secs() * 0.25 } else { 0.0 }; let distance_change = if input.pressed(KeyCode::ArrowDown) && camera_transform.translation.length() < 25.0 { time.delta_secs() } else if input.pressed(KeyCode::ArrowUp) && camera_transform.translation.length() > 2.0 { -time.delta_secs() } else { 0.0 }; camera_transform.translation *= ops::exp(distance_change); camera_transform.rotate_around( Vec3::ZERO, Quat::from_euler(EulerRot::XYZ, 0.0, rotation, 0.0), ); display.0 = format!( concat!( " J / K / L / ; Screen Space Specular Transmissive Quality: {:?}\n", " O / P Screen Space Specular Transmissive Steps: {}\n", " 1 / 2 Diffuse Transmission: {:.2}\n", " Q / W Specular Transmission: {:.2}\n", " A / S Thickness: {:.2}\n", " Z / X IOR: {:.2}\n", " E / R Perceptual Roughness: {:.2}\n", " U / I Reflectance: {:.2}\n", " Arrow Keys Control Camera\n", " C Randomize Colors\n", " H HDR + Bloom: {}\n", " D Depth Prepass: {}\n", " T TAA: {}\n", ), camera_3d.screen_space_specular_transmission_quality, camera_3d.screen_space_specular_transmission_steps, state.diffuse_transmission, state.specular_transmission, state.thickness, state.ior, state.perceptual_roughness, state.reflectance, if hdr { "ON " } else { "OFF" }, if cfg!(any(feature = "webgpu", not(target_arch = "wasm32"))) { if depth_prepass.is_some() { "ON " } else { "OFF" } } else { "N/A (WebGL)" }, if cfg!(any(feature = "webgpu", not(target_arch = "wasm32"))) { if temporal_jitter.is_some() { if depth_prepass.is_some() { "ON " } else { "N/A (Needs Depth Prepass)" } } else { "OFF" } } else { "N/A (WebGL)" }, ); } fn flicker_system( mut flame: Single<&mut Transform, (With<Flicker>, With<Mesh3d>)>, light: Single<(&mut PointLight, &mut Transform), (With<Flicker>, Without<Mesh3d>)>, time: Res<Time>, ) { let s = time.elapsed_secs(); let a = ops::cos(s * 6.0) * 0.0125 + ops::cos(s * 4.0) * 0.025; let b = ops::cos(s * 5.0) * 0.0125 + ops::cos(s * 3.0) * 0.025; let c = ops::cos(s * 7.0) * 0.0125 + ops::cos(s * 2.0) * 0.025; let (mut light, mut light_transform) = light.into_inner(); light.intensity = 4_000.0 + 3000.0 * (a + b + c); flame.translation = Vec3::new(-1.0, 1.23, 0.0); flame.look_at(Vec3::new(-1.0 - c, 1.7 - b, 0.0 - a), Vec3::X); flame.rotate(Quat::from_euler(EulerRot::XYZ, 0.0, 0.0, PI / 2.0)); light_transform.translation = Vec3::new(-1.0 - c, 1.7, 0.0 - a); flame.translation = Vec3::new(-1.0 - c, 1.23, 0.0 - a); }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/3d/3d_shapes.rs
examples/3d/3d_shapes.rs
//! Here we use shape primitives to generate meshes for 3d objects as well as attaching a runtime-generated patterned texture to each 3d object. //! //! "Shape primitives" here are just the mathematical definition of certain shapes, they're not meshes on their own! A sphere with radius `1.0` can be defined with [`Sphere::new(1.0)`][Sphere::new] but all this does is store the radius. So we need to turn these descriptions of shapes into meshes. //! //! While a shape is not a mesh, turning it into one in Bevy is easy. In this example we call [`meshes.add(/* Shape here! */)`][`Assets<A>::add`] on the shape, which works because the [`Assets<A>::add`] method takes anything that can be turned into the asset type it stores. There's an implementation for [`From`] on shape primitives into [`Mesh`], so that will get called internally by [`Assets<A>::add`]. //! //! [`Extrusion`] lets us turn 2D shape primitives into versions of those shapes that have volume by extruding them. A 1x1 square that gets wrapped in this with an extrusion depth of 2 will give us a rectangular prism of size 1x1x2, but here we're just extruding these 2d shapes by depth 1. //! //! The material applied to these shapes is a texture that we generate at run time by looping through a "palette" of RGBA values (stored adjacent to each other in the array) and writing values to positions in another array that represents the buffer for an 8x8 texture. This texture is then registered with the assets system just one time, with that [`Handle<StandardMaterial>`] then applied to all the shapes in this example. //! //! The mesh and material are [`Handle<Mesh>`] and [`Handle<StandardMaterial>`] at the moment, neither of which implement `Component` on their own. Handles are put behind "newtypes" to prevent ambiguity, as some entities might want to have handles to meshes (or images, or materials etc.) for different purposes! All we need to do to make them rendering-relevant components is wrap the mesh handle and the material handle in [`Mesh3d`] and [`MeshMaterial3d`] respectively. //! //! You can toggle wireframes with the space bar except on wasm. Wasm does not support //! `POLYGON_MODE_LINE` on the gpu. use std::f32::consts::PI; #[cfg(not(target_arch = "wasm32"))] use bevy::pbr::wireframe::{WireframeConfig, WireframePlugin}; use bevy::{ asset::RenderAssetUsages, color::palettes::basic::SILVER, input::common_conditions::{input_just_pressed, input_toggle_active}, prelude::*, render::render_resource::{Extent3d, TextureDimension, TextureFormat}, }; fn main() { App::new() .add_plugins(( DefaultPlugins.set(ImagePlugin::default_nearest()), #[cfg(not(target_arch = "wasm32"))] WireframePlugin::default(), )) .add_systems(Startup, setup) .add_systems( Update, ( rotate.run_if(input_toggle_active(true, KeyCode::KeyR)), advance_rows.run_if(input_just_pressed(KeyCode::Tab)), #[cfg(not(target_arch = "wasm32"))] toggle_wireframe, ), ) .run(); } /// A marker component for our shapes so we can query them separately from the ground plane #[derive(Component)] struct Shape; const SHAPES_X_EXTENT: f32 = 14.0; const EXTRUSION_X_EXTENT: f32 = 14.0; const Z_EXTENT: f32 = 8.0; const THICKNESS: f32 = 0.1; fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut images: ResMut<Assets<Image>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { let debug_material = materials.add(StandardMaterial { base_color_texture: Some(images.add(uv_debug_texture())), ..default() }); let shapes = [ meshes.add(Cuboid::default()), meshes.add(Tetrahedron::default()), meshes.add(Capsule3d::default()), meshes.add(Torus::default()), meshes.add(Cylinder::default()), meshes.add(Cone::default()), meshes.add(ConicalFrustum::default()), meshes.add(Sphere::default().mesh().ico(5).unwrap()), meshes.add(Sphere::default().mesh().uv(32, 18)), meshes.add(Segment3d::default()), meshes.add(Polyline3d::new(vec![ Vec3::new(-0.5, 0.0, 0.0), Vec3::new(0.5, 0.0, 0.0), Vec3::new(0.0, 0.5, 0.0), ])), ]; let extrusions = [ meshes.add(Extrusion::new(Rectangle::default(), 1.)), meshes.add(Extrusion::new(Capsule2d::default(), 1.)), meshes.add(Extrusion::new(Annulus::default(), 1.)), meshes.add(Extrusion::new(Circle::default(), 1.)), meshes.add(Extrusion::new(Ellipse::default(), 1.)), meshes.add(Extrusion::new(RegularPolygon::default(), 1.)), meshes.add(Extrusion::new(Triangle2d::default(), 1.)), ]; let ring_extrusions = [ meshes.add(Extrusion::new(Rectangle::default().to_ring(THICKNESS), 1.)), meshes.add(Extrusion::new(Capsule2d::default().to_ring(THICKNESS), 1.)), meshes.add(Extrusion::new( Ring::new(Circle::new(1.0), Circle::new(0.5)), 1., )), meshes.add(Extrusion::new(Circle::default().to_ring(THICKNESS), 1.)), meshes.add(Extrusion::new( { // This is an approximation; Ellipse does not implement Inset as concentric ellipses do not have parallel curves let outer = Ellipse::default(); let mut inner = outer; inner.half_size -= Vec2::splat(THICKNESS); Ring::new(outer, inner) }, 1., )), meshes.add(Extrusion::new( RegularPolygon::default().to_ring(THICKNESS), 1., )), meshes.add(Extrusion::new(Triangle2d::default().to_ring(THICKNESS), 1.)), ]; let num_shapes = shapes.len(); for (i, shape) in shapes.into_iter().enumerate() { commands.spawn(( Mesh3d(shape), MeshMaterial3d(debug_material.clone()), Transform::from_xyz( -SHAPES_X_EXTENT / 2. + i as f32 / (num_shapes - 1) as f32 * SHAPES_X_EXTENT, 2.0, Row::Front.z(), ) .with_rotation(Quat::from_rotation_x(-PI / 4.)), Shape, Row::Front, )); } let num_extrusions = extrusions.len(); for (i, shape) in extrusions.into_iter().enumerate() { commands.spawn(( Mesh3d(shape), MeshMaterial3d(debug_material.clone()), Transform::from_xyz( -EXTRUSION_X_EXTENT / 2. + i as f32 / (num_extrusions - 1) as f32 * EXTRUSION_X_EXTENT, 2.0, Row::Middle.z(), ) .with_rotation(Quat::from_rotation_x(-PI / 4.)), Shape, Row::Middle, )); } let num_ring_extrusions = ring_extrusions.len(); for (i, shape) in ring_extrusions.into_iter().enumerate() { commands.spawn(( Mesh3d(shape), MeshMaterial3d(debug_material.clone()), Transform::from_xyz( -EXTRUSION_X_EXTENT / 2. + i as f32 / (num_ring_extrusions - 1) as f32 * EXTRUSION_X_EXTENT, 2.0, Row::Rear.z(), ) .with_rotation(Quat::from_rotation_x(-PI / 4.)), Shape, Row::Rear, )); } commands.spawn(( PointLight { shadows_enabled: true, intensity: 10_000_000., range: 100.0, shadow_depth_bias: 0.2, ..default() }, Transform::from_xyz(8.0, 16.0, 8.0), )); // ground plane commands.spawn(( Mesh3d(meshes.add(Plane3d::default().mesh().size(50.0, 50.0).subdivisions(10))), MeshMaterial3d(materials.add(Color::from(SILVER))), )); commands.spawn(( Camera3d::default(), Transform::from_xyz(0.0, 7., 14.0).looking_at(Vec3::new(0., 1., 0.), Vec3::Y), )); let mut text = "\ Press 'R' to pause/resume rotation\n\ Press 'Tab' to cycle through rows" .to_string(); #[cfg(not(target_arch = "wasm32"))] text.push_str("\nPress 'Space' to toggle wireframes"); commands.spawn(( Text::new(text), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, )); } fn rotate(mut query: Query<&mut Transform, With<Shape>>, time: Res<Time>) { for mut transform in &mut query { transform.rotate_y(time.delta_secs() / 2.); } } /// Creates a colorful test pattern fn uv_debug_texture() -> Image { const TEXTURE_SIZE: usize = 8; let mut palette: [u8; 32] = [ 255, 102, 159, 255, 255, 159, 102, 255, 236, 255, 102, 255, 121, 255, 102, 255, 102, 255, 198, 255, 102, 198, 255, 255, 121, 102, 255, 255, 236, 102, 255, 255, ]; let mut texture_data = [0; TEXTURE_SIZE * TEXTURE_SIZE * 4]; for y in 0..TEXTURE_SIZE { let offset = TEXTURE_SIZE * y * 4; texture_data[offset..(offset + TEXTURE_SIZE * 4)].copy_from_slice(&palette); palette.rotate_right(4); } Image::new_fill( Extent3d { width: TEXTURE_SIZE as u32, height: TEXTURE_SIZE as u32, depth_or_array_layers: 1, }, TextureDimension::D2, &texture_data, TextureFormat::Rgba8UnormSrgb, RenderAssetUsages::RENDER_WORLD, ) } #[cfg(not(target_arch = "wasm32"))] fn toggle_wireframe( mut wireframe_config: ResMut<WireframeConfig>, keyboard: Res<ButtonInput<KeyCode>>, ) { if keyboard.just_pressed(KeyCode::Space) { wireframe_config.global = !wireframe_config.global; } } #[derive(Component, Clone, Copy)] enum Row { Front, Middle, Rear, } impl Row { fn z(self) -> f32 { match self { Row::Front => Z_EXTENT / 2., Row::Middle => 0., Row::Rear => -Z_EXTENT / 2., } } fn advance(self) -> Self { match self { Row::Front => Row::Rear, Row::Middle => Row::Front, Row::Rear => Row::Middle, } } } fn advance_rows(mut shapes: Query<(&mut Row, &mut Transform), With<Shape>>) { for (mut row, mut transform) in &mut shapes { *row = row.advance(); transform.translation.z = row.z(); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/gizmos/axes.rs
examples/gizmos/axes.rs
//! This example demonstrates the implementation and behavior of the axes gizmo. use bevy::{camera::primitives::Aabb, prelude::*}; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha8Rng; use std::f32::consts::PI; const TRANSITION_DURATION: f32 = 2.0; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems(Update, (move_cubes, draw_axes).chain()) .run(); } /// The `ShowAxes` component is attached to an entity to get the `draw_axes` system to /// display axes according to its Transform component. #[derive(Component)] struct ShowAxes; /// The `TransformTracking` component keeps track of the data we need to interpolate /// between two transforms in our example. #[derive(Component)] struct TransformTracking { /// The initial transform of the cube during the move initial_transform: Transform, /// The target transform of the cube during the move target_transform: Transform, /// The progress of the cube during the move in seconds progress: f32, } #[derive(Resource)] struct SeededRng(ChaCha8Rng); fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { // We're seeding the PRNG here to make this example deterministic for testing purposes. // This isn't strictly required in practical use unless you need your app to be deterministic. let mut rng = ChaCha8Rng::seed_from_u64(19878367467713); // Lights... commands.spawn(( PointLight { shadows_enabled: true, ..default() }, Transform::from_xyz(2., 6., 0.), )); // Camera... commands.spawn(( Camera3d::default(), Transform::from_xyz(0., 1.5, -8.).looking_at(Vec3::new(0., -0.5, 0.), Vec3::Y), )); // Action! (Our cubes that are going to move) commands.spawn(( Mesh3d(meshes.add(Cuboid::new(1., 1., 1.))), MeshMaterial3d(materials.add(Color::srgb(0.8, 0.7, 0.6))), ShowAxes, TransformTracking { initial_transform: default(), target_transform: random_transform(&mut rng), progress: 0.0, }, )); commands.spawn(( Mesh3d(meshes.add(Cuboid::new(0.5, 0.5, 0.5))), MeshMaterial3d(materials.add(Color::srgb(0.6, 0.7, 0.8))), ShowAxes, TransformTracking { initial_transform: default(), target_transform: random_transform(&mut rng), progress: 0.0, }, )); // A plane to give a sense of place commands.spawn(( Mesh3d(meshes.add(Plane3d::default().mesh().size(20., 20.))), MeshMaterial3d(materials.add(Color::srgb(0.1, 0.1, 0.1))), Transform::from_xyz(0., -2., 0.), )); commands.insert_resource(SeededRng(rng)); } // This system draws the axes based on the cube's transform, with length based on the size of // the entity's axis-aligned bounding box (AABB). fn draw_axes(mut gizmos: Gizmos, query: Query<(&Transform, &Aabb), With<ShowAxes>>) { for (&transform, &aabb) in &query { let length = aabb.half_extents.length(); gizmos.axes(transform, length); } } // This system changes the cubes' transforms to interpolate between random transforms fn move_cubes( mut query: Query<(&mut Transform, &mut TransformTracking)>, time: Res<Time>, mut rng: ResMut<SeededRng>, ) { for (mut transform, mut tracking) in &mut query { *transform = interpolate_transforms( tracking.initial_transform, tracking.target_transform, tracking.progress / TRANSITION_DURATION, ); if tracking.progress < TRANSITION_DURATION { tracking.progress += time.delta_secs(); } else { tracking.initial_transform = *transform; tracking.target_transform = random_transform(&mut rng.0); tracking.progress = 0.0; } } } // Helper functions for random transforms and interpolation: const TRANSLATION_BOUND_LOWER_X: f32 = -5.; const TRANSLATION_BOUND_UPPER_X: f32 = 5.; const TRANSLATION_BOUND_LOWER_Y: f32 = -1.; const TRANSLATION_BOUND_UPPER_Y: f32 = 1.; const TRANSLATION_BOUND_LOWER_Z: f32 = -2.; const TRANSLATION_BOUND_UPPER_Z: f32 = 6.; const SCALING_BOUND_LOWER_LOG: f32 = -1.2; const SCALING_BOUND_UPPER_LOG: f32 = 1.2; fn random_transform(rng: &mut impl Rng) -> Transform { Transform { translation: random_translation(rng), rotation: random_rotation(rng), scale: random_scale(rng), } } fn random_translation(rng: &mut impl Rng) -> Vec3 { let x = rng.random::<f32>() * (TRANSLATION_BOUND_UPPER_X - TRANSLATION_BOUND_LOWER_X) + TRANSLATION_BOUND_LOWER_X; let y = rng.random::<f32>() * (TRANSLATION_BOUND_UPPER_Y - TRANSLATION_BOUND_LOWER_Y) + TRANSLATION_BOUND_LOWER_Y; let z = rng.random::<f32>() * (TRANSLATION_BOUND_UPPER_Z - TRANSLATION_BOUND_LOWER_Z) + TRANSLATION_BOUND_LOWER_Z; Vec3::new(x, y, z) } fn random_scale(rng: &mut impl Rng) -> Vec3 { let x_factor_log = rng.random::<f32>() * (SCALING_BOUND_UPPER_LOG - SCALING_BOUND_LOWER_LOG) + SCALING_BOUND_LOWER_LOG; let y_factor_log = rng.random::<f32>() * (SCALING_BOUND_UPPER_LOG - SCALING_BOUND_LOWER_LOG) + SCALING_BOUND_LOWER_LOG; let z_factor_log = rng.random::<f32>() * (SCALING_BOUND_UPPER_LOG - SCALING_BOUND_LOWER_LOG) + SCALING_BOUND_LOWER_LOG; Vec3::new( ops::exp2(x_factor_log), ops::exp2(y_factor_log), ops::exp2(z_factor_log), ) } fn elerp(v1: Vec3, v2: Vec3, t: f32) -> Vec3 { let x_factor_log = (1. - t) * ops::log2(v1.x) + t * ops::log2(v2.x); let y_factor_log = (1. - t) * ops::log2(v1.y) + t * ops::log2(v2.y); let z_factor_log = (1. - t) * ops::log2(v1.z) + t * ops::log2(v2.z); Vec3::new( ops::exp2(x_factor_log), ops::exp2(y_factor_log), ops::exp2(z_factor_log), ) } fn random_rotation(rng: &mut impl Rng) -> Quat { let dir = random_direction(rng); let angle = rng.random::<f32>() * 2. * PI; Quat::from_axis_angle(dir, angle) } fn random_direction(rng: &mut impl Rng) -> Vec3 { let height = rng.random::<f32>() * 2. - 1.; let theta = rng.random::<f32>() * 2. * PI; build_direction(height, theta) } fn build_direction(height: f32, theta: f32) -> Vec3 { let z = height; let m = ops::sin(ops::acos(z)); let x = ops::cos(theta) * m; let y = ops::sin(theta) * m; Vec3::new(x, y, z) } fn interpolate_transforms(t1: Transform, t2: Transform, t: f32) -> Transform { let translation = t1.translation.lerp(t2.translation, t); let rotation = t1.rotation.slerp(t2.rotation, t); let scale = elerp(t1.scale, t2.scale, t); Transform { translation, rotation, scale, } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/gizmos/3d_gizmos.rs
examples/gizmos/3d_gizmos.rs
//! This example demonstrates Bevy's immediate mode drawing API intended for visual debugging. use bevy::{ camera_controller::free_camera::{FreeCamera, FreeCameraPlugin}, color::palettes::css::*, prelude::*, }; use std::f32::consts::PI; fn main() { App::new() .add_plugins((DefaultPlugins, FreeCameraPlugin)) .init_gizmo_group::<MyRoundGizmos>() .add_systems(Startup, setup) .add_systems(Update, (draw_example_collection, update_config)) .run(); } // We can create our own gizmo config group! #[derive(Default, Reflect, GizmoConfigGroup)] struct MyRoundGizmos; fn setup( mut commands: Commands, mut gizmo_assets: ResMut<Assets<GizmoAsset>>, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { let mut gizmo = GizmoAsset::new(); // When drawing a lot of static lines a Gizmo component can have // far better performance than the Gizmos system parameter, // but the system parameter will perform better for smaller lines that update often. // A sphere made out of 30_000 lines! gizmo .sphere(Isometry3d::IDENTITY, 0.5, CRIMSON) .resolution(30_000 / 3); commands.spawn(( Gizmo { handle: gizmo_assets.add(gizmo), line_config: GizmoLineConfig { width: 5., ..default() }, ..default() }, Transform::from_xyz(4., 1., 0.), )); commands.spawn(( Camera3d::default(), Transform::from_xyz(0., 1.5, 6.).looking_at(Vec3::ZERO, Vec3::Y), FreeCamera::default(), )); // plane commands.spawn(( Mesh3d(meshes.add(Plane3d::default().mesh().size(5.0, 5.0))), MeshMaterial3d(materials.add(Color::srgb(0.3, 0.5, 0.3))), )); // cube commands.spawn(( Mesh3d(meshes.add(Cuboid::new(1.0, 1.0, 1.0))), MeshMaterial3d(materials.add(Color::srgb(0.8, 0.7, 0.6))), Transform::from_xyz(0.0, 0.5, 0.0), )); // light commands.spawn(( PointLight { shadows_enabled: true, ..default() }, Transform::from_xyz(4.0, 8.0, 4.0), )); // example instructions commands.spawn(( Text::new( "Press 'T' to toggle drawing gizmos on top of everything else in the scene\n\ Press 'P' to toggle perspective for line gizmos\n\ Hold 'Left' or 'Right' to change the line width of straight gizmos\n\ Hold 'Up' or 'Down' to change the line width of round gizmos\n\ Press '1' or '2' to toggle the visibility of straight gizmos or round gizmos\n\ Press 'B' to show all AABB boxes\n\ Press 'U' or 'I' to cycle through line styles for straight or round gizmos\n\ Press 'J' or 'K' to cycle through line joins for straight or round gizmos\n\ Press 'Spacebar' to toggle pause", ), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, )); } fn draw_example_collection( mut gizmos: Gizmos, mut my_gizmos: Gizmos<MyRoundGizmos>, time: Res<Time>, ) { gizmos.grid( Quat::from_rotation_x(PI / 2.), UVec2::splat(20), Vec2::new(2., 2.), // Light gray LinearRgba::gray(0.65), ); gizmos.grid( Isometry3d::new(Vec3::splat(10.0), Quat::from_rotation_x(PI / 3. * 2.)), UVec2::splat(20), Vec2::new(2., 2.), PURPLE, ); gizmos.sphere(Vec3::splat(10.0), 1.0, PURPLE); gizmos .primitive_3d( &Plane3d { normal: Dir3::Y, half_size: Vec2::splat(1.0), }, Isometry3d::new( Vec3::splat(4.0) + Vec2::from(ops::sin_cos(time.elapsed_secs())).extend(0.0), Quat::from_rotation_x(PI / 2. + time.elapsed_secs()), ), GREEN, ) .cell_count(UVec2::new(5, 10)) .spacing(Vec2::new(0.2, 0.1)); gizmos.cube( Transform::from_translation(Vec3::Y * 0.5).with_scale(Vec3::splat(1.25)), BLACK, ); gizmos.rect( Isometry3d::new( Vec3::new(ops::cos(time.elapsed_secs()) * 2.5, 1., 0.), Quat::from_rotation_y(PI / 2.), ), Vec2::splat(2.), LIME, ); gizmos.cross(Vec3::new(-1., 1., 1.), 0.5, FUCHSIA); let domain = Interval::EVERYWHERE; let curve = FunctionCurve::new(domain, |t| { (Vec2::from(ops::sin_cos(t * 10.0))).extend(t - 6.0) }); let resolution = ((ops::sin(time.elapsed_secs()) + 1.0) * 100.0) as usize; let times_and_colors = (0..=resolution) .map(|n| n as f32 / resolution as f32) .map(|t| t * 5.0) .map(|t| (t, TEAL.mix(&HOT_PINK, t / 5.0))); gizmos.curve_gradient_3d(curve, times_and_colors); my_gizmos.sphere(Vec3::new(1., 0.5, 0.), 0.5, RED); my_gizmos .rounded_cuboid(Vec3::new(-2.0, 0.75, -0.75), Vec3::splat(0.9), TURQUOISE) .edge_radius(0.1) .arc_resolution(4); for y in [0., 0.5, 1.] { gizmos.ray( Vec3::new(1., y, 0.), Vec3::new(-3., ops::sin(time.elapsed_secs() * 3.), 0.), BLUE, ); } my_gizmos .arc_3d( 180.0_f32.to_radians(), 0.2, Isometry3d::new( Vec3::ONE, Quat::from_rotation_arc(Vec3::Y, Vec3::ONE.normalize()), ), ORANGE, ) .resolution(10); // Circles have 32 line-segments by default. my_gizmos.circle(Quat::from_rotation_arc(Vec3::Z, Vec3::Y), 3., BLACK); // You may want to increase this for larger circles or spheres. my_gizmos .circle(Quat::from_rotation_arc(Vec3::Z, Vec3::Y), 3.1, NAVY) .resolution(64); my_gizmos .sphere(Isometry3d::IDENTITY, 3.2, BLACK) .resolution(64); gizmos.arrow(Vec3::ZERO, Vec3::splat(1.5), YELLOW); // You can create more complex arrows using the arrow builder. gizmos .arrow(Vec3::new(2., 0., 2.), Vec3::new(2., 2., 2.), ORANGE_RED) .with_double_end() .with_tip_length(0.5); } fn update_config( mut config_store: ResMut<GizmoConfigStore>, keyboard: Res<ButtonInput<KeyCode>>, real_time: Res<Time<Real>>, mut virtual_time: ResMut<Time<Virtual>>, ) { if keyboard.just_pressed(KeyCode::KeyT) { for (_, config, _) in config_store.iter_mut() { config.depth_bias = if config.depth_bias == 0. { -1. } else { 0. }; } } if keyboard.just_pressed(KeyCode::KeyP) { for (_, config, _) in config_store.iter_mut() { // Toggle line perspective config.line.perspective ^= true; // Increase the line width when line perspective is on config.line.width *= if config.line.perspective { 5. } else { 1. / 5. }; } } let (config, _) = config_store.config_mut::<DefaultGizmoConfigGroup>(); if keyboard.pressed(KeyCode::ArrowRight) { config.line.width += 5. * real_time.delta_secs(); config.line.width = config.line.width.clamp(0., 50.); } if keyboard.pressed(KeyCode::ArrowLeft) { config.line.width -= 5. * real_time.delta_secs(); config.line.width = config.line.width.clamp(0., 50.); } if keyboard.just_pressed(KeyCode::Digit1) { config.enabled ^= true; } if keyboard.just_pressed(KeyCode::KeyU) { config.line.style = match config.line.style { GizmoLineStyle::Solid => GizmoLineStyle::Dotted, GizmoLineStyle::Dotted => GizmoLineStyle::Dashed { gap_scale: 3.0, line_scale: 5.0, }, _ => GizmoLineStyle::Solid, }; } if keyboard.just_pressed(KeyCode::KeyJ) { config.line.joints = match config.line.joints { GizmoLineJoint::Bevel => GizmoLineJoint::Miter, GizmoLineJoint::Miter => GizmoLineJoint::Round(4), GizmoLineJoint::Round(_) => GizmoLineJoint::None, GizmoLineJoint::None => GizmoLineJoint::Bevel, }; } let (my_config, _) = config_store.config_mut::<MyRoundGizmos>(); if keyboard.pressed(KeyCode::ArrowUp) { my_config.line.width += 5. * real_time.delta_secs(); my_config.line.width = my_config.line.width.clamp(0., 50.); } if keyboard.pressed(KeyCode::ArrowDown) { my_config.line.width -= 5. * real_time.delta_secs(); my_config.line.width = my_config.line.width.clamp(0., 50.); } if keyboard.just_pressed(KeyCode::Digit2) { my_config.enabled ^= true; } if keyboard.just_pressed(KeyCode::KeyI) { my_config.line.style = match my_config.line.style { GizmoLineStyle::Solid => GizmoLineStyle::Dotted, GizmoLineStyle::Dotted => GizmoLineStyle::Dashed { gap_scale: 3.0, line_scale: 5.0, }, _ => GizmoLineStyle::Solid, }; } if keyboard.just_pressed(KeyCode::KeyK) { my_config.line.joints = match my_config.line.joints { GizmoLineJoint::Bevel => GizmoLineJoint::Miter, GizmoLineJoint::Miter => GizmoLineJoint::Round(4), GizmoLineJoint::Round(_) => GizmoLineJoint::None, GizmoLineJoint::None => GizmoLineJoint::Bevel, }; } if keyboard.just_pressed(KeyCode::KeyB) { // AABB gizmos are normally only drawn on entities with a ShowAabbGizmo component // We can change this behavior in the configuration of AabbGizmoGroup config_store.config_mut::<AabbGizmoConfigGroup>().1.draw_all ^= true; } if keyboard.just_pressed(KeyCode::Space) { virtual_time.toggle(); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/gizmos/2d_gizmos.rs
examples/gizmos/2d_gizmos.rs
//! This example demonstrates Bevy's immediate mode drawing API intended for visual debugging. use std::f32::consts::{FRAC_PI_2, PI, TAU}; use bevy::{color::palettes::css::*, math::Isometry2d, prelude::*}; fn main() { App::new() .add_plugins(DefaultPlugins) .init_gizmo_group::<MyRoundGizmos>() .add_systems(Startup, setup) .add_systems(Update, (draw_example_collection, update_config)) .run(); } // We can create our own gizmo config group! #[derive(Default, Reflect, GizmoConfigGroup)] struct MyRoundGizmos {} fn setup(mut commands: Commands) { commands.spawn(Camera2d); // text commands.spawn(( Text::new( "Hold 'Left' or 'Right' to change the line width of straight gizmos\n\ Hold 'Up' or 'Down' to change the line width of round gizmos\n\ Press '1' / '2' to toggle the visibility of straight / round gizmos\n\ Press 'U' / 'I' to cycle through line styles\n\ Press 'J' / 'K' to cycle through line joins\n\ Press 'Spacebar' to toggle pause", ), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, )); } fn draw_example_collection( mut gizmos: Gizmos, mut my_gizmos: Gizmos<MyRoundGizmos>, time: Res<Time>, ) { let sin_t_scaled = ops::sin(time.elapsed_secs()) * 50.; gizmos.line_2d(Vec2::Y * -sin_t_scaled, Vec2::splat(-80.), RED); gizmos.ray_2d(Vec2::Y * sin_t_scaled, Vec2::splat(80.), LIME); gizmos .grid_2d( Isometry2d::IDENTITY, UVec2::new(16, 9), Vec2::new(80., 80.), // Dark gray LinearRgba::gray(0.05), ) .outer_edges(); // Triangle gizmos.linestrip_gradient_2d([ (Vec2::Y * 300., BLUE), (Vec2::new(-255., -155.), RED), (Vec2::new(255., -155.), LIME), (Vec2::Y * 300., BLUE), ]); gizmos.rect_2d(Isometry2d::IDENTITY, Vec2::splat(650.), BLACK); gizmos.cross_2d(Vec2::new(-160., 120.), 12., FUCHSIA); let domain = Interval::EVERYWHERE; let curve = FunctionCurve::new(domain, |t| Vec2::new(t, ops::sin(t / 25.0) * 100.0)); let resolution = ((ops::sin(time.elapsed_secs()) + 1.0) * 50.0) as usize; let times_and_colors = (0..=resolution) .map(|n| n as f32 / resolution as f32) .map(|t| (t - 0.5) * 600.0) .map(|t| (t, TEAL.mix(&HOT_PINK, (t + 300.0) / 600.0))); gizmos.curve_gradient_2d(curve, times_and_colors); my_gizmos .rounded_rect_2d(Isometry2d::IDENTITY, Vec2::splat(630.), BLACK) .corner_radius(ops::cos(time.elapsed_secs() / 3.) * 100.); // Circles have 32 line-segments by default. // You may want to increase this for larger circles. my_gizmos .circle_2d(Isometry2d::IDENTITY, 300., NAVY) .resolution(64); my_gizmos.ellipse_2d( Rot2::radians(time.elapsed_secs() % TAU), Vec2::new(100., 200.), YELLOW_GREEN, ); // Arcs default resolution is linearly interpolated between // 1 and 32, using the arc length as scalar. my_gizmos.arc_2d( Rot2::radians(sin_t_scaled / 10.), FRAC_PI_2, 310., ORANGE_RED, ); my_gizmos.arc_2d(Isometry2d::IDENTITY, FRAC_PI_2, 80.0, ORANGE_RED); my_gizmos.long_arc_2d_between(Vec2::ZERO, Vec2::X * 20.0, Vec2::Y * 20.0, ORANGE_RED); my_gizmos.short_arc_2d_between(Vec2::ZERO, Vec2::X * 40.0, Vec2::Y * 40.0, ORANGE_RED); gizmos.arrow_2d( Vec2::ZERO, Vec2::from_angle(sin_t_scaled / -10. + PI / 2.) * 50., YELLOW, ); // You can create more complex arrows using the arrow builder. gizmos .arrow_2d( Vec2::ZERO, Vec2::from_angle(sin_t_scaled / -10.) * 50., GREEN, ) .with_double_end() .with_tip_length(10.); } fn update_config( mut config_store: ResMut<GizmoConfigStore>, keyboard: Res<ButtonInput<KeyCode>>, real_time: Res<Time<Real>>, mut virtual_time: ResMut<Time<Virtual>>, ) { let (config, _) = config_store.config_mut::<DefaultGizmoConfigGroup>(); if keyboard.pressed(KeyCode::ArrowRight) { config.line.width += 5. * real_time.delta_secs(); config.line.width = config.line.width.clamp(0., 50.); } if keyboard.pressed(KeyCode::ArrowLeft) { config.line.width -= 5. * real_time.delta_secs(); config.line.width = config.line.width.clamp(0., 50.); } if keyboard.just_pressed(KeyCode::Digit1) { config.enabled ^= true; } if keyboard.just_pressed(KeyCode::KeyU) { config.line.style = match config.line.style { GizmoLineStyle::Solid => GizmoLineStyle::Dotted, GizmoLineStyle::Dotted => GizmoLineStyle::Dashed { gap_scale: 3.0, line_scale: 5.0, }, _ => GizmoLineStyle::Solid, }; } if keyboard.just_pressed(KeyCode::KeyI) { config.line.style = match config.line.style { GizmoLineStyle::Solid => GizmoLineStyle::Dashed { gap_scale: 3.0, line_scale: 5.0, }, GizmoLineStyle::Dotted => GizmoLineStyle::Solid, _ => GizmoLineStyle::Dotted, }; } if keyboard.just_pressed(KeyCode::KeyJ) { config.line.joints = match config.line.joints { GizmoLineJoint::Bevel => GizmoLineJoint::Miter, GizmoLineJoint::Miter => GizmoLineJoint::Round(4), GizmoLineJoint::Round(_) => GizmoLineJoint::None, GizmoLineJoint::None => GizmoLineJoint::Bevel, }; } if keyboard.just_pressed(KeyCode::KeyK) { config.line.joints = match config.line.joints { GizmoLineJoint::Bevel => GizmoLineJoint::None, GizmoLineJoint::Miter => GizmoLineJoint::Bevel, GizmoLineJoint::Round(_) => GizmoLineJoint::Miter, GizmoLineJoint::None => GizmoLineJoint::Round(4), }; } let (my_config, _) = config_store.config_mut::<MyRoundGizmos>(); if keyboard.pressed(KeyCode::ArrowUp) { my_config.line.width += 5. * real_time.delta_secs(); my_config.line.width = my_config.line.width.clamp(0., 50.); } if keyboard.pressed(KeyCode::ArrowDown) { my_config.line.width -= 5. * real_time.delta_secs(); my_config.line.width = my_config.line.width.clamp(0., 50.); } if keyboard.just_pressed(KeyCode::Digit2) { my_config.enabled ^= true; } if keyboard.just_pressed(KeyCode::KeyI) { my_config.line.style = match my_config.line.style { GizmoLineStyle::Solid => GizmoLineStyle::Dotted, GizmoLineStyle::Dotted => GizmoLineStyle::Dashed { gap_scale: 3.0, line_scale: 5.0, }, _ => GizmoLineStyle::Solid, }; } if keyboard.just_pressed(KeyCode::KeyK) { my_config.line.joints = match my_config.line.joints { GizmoLineJoint::Bevel => GizmoLineJoint::Miter, GizmoLineJoint::Miter => GizmoLineJoint::Round(4), GizmoLineJoint::Round(_) => GizmoLineJoint::None, GizmoLineJoint::None => GizmoLineJoint::Bevel, }; } if keyboard.just_pressed(KeyCode::Space) { virtual_time.toggle(); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/gizmos/light_gizmos.rs
examples/gizmos/light_gizmos.rs
//! This example demonstrates how to visualize lights properties through the gizmo API. use std::f32::consts::{FRAC_PI_2, PI}; use bevy::{ color::palettes::css::{DARK_CYAN, GOLD, GRAY, PURPLE}, prelude::*, }; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems(Update, rotate_camera) .add_systems(Update, update_config) .run(); } #[derive(Component)] struct GizmoColorText; fn gizmo_color_text(config: &LightGizmoConfigGroup) -> String { match config.color { LightGizmoColor::Manual(color) => format!("Manual {}", Srgba::from(color).to_hex()), LightGizmoColor::Varied => "Random from entity".to_owned(), LightGizmoColor::MatchLightColor => "Match light color".to_owned(), LightGizmoColor::ByLightType => { format!( "Point {}, Spot {}, Directional {}", Srgba::from(config.point_light_color).to_hex(), Srgba::from(config.spot_light_color).to_hex(), Srgba::from(config.directional_light_color).to_hex() ) } } } fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, mut config_store: ResMut<GizmoConfigStore>, ) { // Circular base. commands.spawn(( Mesh3d(meshes.add(Circle::new(4.0))), MeshMaterial3d(materials.add(Color::WHITE)), Transform::from_rotation(Quat::from_rotation_x(-FRAC_PI_2)), )); // Cubes. { let mesh = meshes.add(Cuboid::new(1.0, 1.0, 1.0)); let material = materials.add(Color::srgb_u8(124, 144, 255)); for x in [-2.0, 0.0, 2.0] { commands.spawn(( Mesh3d(mesh.clone()), MeshMaterial3d(material.clone()), Transform::from_xyz(x, 0.5, 0.0), )); } } // Lights. { commands.spawn(( PointLight { shadows_enabled: true, range: 2.0, color: DARK_CYAN.into(), ..default() }, Transform::from_xyz(0.0, 1.5, 0.0), )); commands.spawn(( SpotLight { shadows_enabled: true, range: 3.5, color: PURPLE.into(), outer_angle: PI / 4.0, inner_angle: PI / 4.0 * 0.8, ..default() }, Transform::from_xyz(4.0, 2.0, 0.0).looking_at(Vec3::X * 1.5, Vec3::Y), )); commands.spawn(( DirectionalLight { color: GOLD.into(), illuminance: DirectionalLight::default().illuminance * 0.05, shadows_enabled: true, ..default() }, Transform::from_xyz(-4.0, 2.0, 0.0).looking_at(Vec3::NEG_X * 1.5, Vec3::Y), )); } // Camera. commands.spawn(( Camera3d::default(), Transform::from_xyz(-2.5, 4.5, 9.0).looking_at(Vec3::ZERO, Vec3::Y), )); // Example instructions and gizmo config. { commands.spawn(( Text::new( "Press 'D' to toggle drawing gizmos on top of everything else in the scene\n\ Hold 'Left' or 'Right' to change the line width of the gizmos\n\ Press 'A' to toggle drawing of the light gizmos\n\ Press 'C' to cycle between the light gizmos coloring modes", ), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, )); let (_, light_config) = config_store.config_mut::<LightGizmoConfigGroup>(); light_config.draw_all = true; light_config.color = LightGizmoColor::MatchLightColor; commands .spawn(( Text::new("Gizmo color mode: "), GizmoColorText, Node { position_type: PositionType::Absolute, bottom: px(12), left: px(12), ..default() }, )) .with_child(TextSpan(gizmo_color_text(light_config))); } } fn rotate_camera(mut transform: Single<&mut Transform, With<Camera>>, time: Res<Time>) { transform.rotate_around(Vec3::ZERO, Quat::from_rotation_y(time.delta_secs() / 2.)); } fn update_config( mut config_store: ResMut<GizmoConfigStore>, keyboard: Res<ButtonInput<KeyCode>>, time: Res<Time>, color_text_query: Single<Entity, With<GizmoColorText>>, mut writer: TextUiWriter, ) { if keyboard.just_pressed(KeyCode::KeyD) { for (_, config, _) in config_store.iter_mut() { config.depth_bias = if config.depth_bias == 0. { -1. } else { 0. }; } } let (config, light_config) = config_store.config_mut::<LightGizmoConfigGroup>(); if keyboard.pressed(KeyCode::ArrowRight) { config.line.width += 5. * time.delta_secs(); config.line.width = config.line.width.clamp(0., 50.); } if keyboard.pressed(KeyCode::ArrowLeft) { config.line.width -= 5. * time.delta_secs(); config.line.width = config.line.width.clamp(0., 50.); } if keyboard.just_pressed(KeyCode::KeyA) { config.enabled ^= true; } if keyboard.just_pressed(KeyCode::KeyC) { light_config.color = match light_config.color { LightGizmoColor::Manual(_) => LightGizmoColor::Varied, LightGizmoColor::Varied => LightGizmoColor::MatchLightColor, LightGizmoColor::MatchLightColor => LightGizmoColor::ByLightType, LightGizmoColor::ByLightType => LightGizmoColor::Manual(GRAY.into()), }; *writer.text(*color_text_query, 1) = gizmo_color_text(light_config); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/remote/client.rs
examples/remote/client.rs
//! A simple command line client that allows issuing queries to a remote Bevy //! app via the BRP. //! This example requires the `bevy_remote` feature to be enabled. //! You can run it with the following command: //! ```text //! cargo run --example client --features="bevy_remote" //! ``` //! This example assumes that the `server` example is running on the same machine. use std::any::type_name; use anyhow::Result as AnyhowResult; use bevy::{ ecs::hierarchy::ChildOf, prelude::info, remote::{ builtin_methods::{ BrpQuery, BrpQueryFilter, BrpQueryParams, ComponentSelector, BRP_QUERY_METHOD, }, http::{DEFAULT_ADDR, DEFAULT_PORT}, BrpRequest, }, transform::components::Transform, }; /// The application entry point. fn main() -> AnyhowResult<()> { // Create the URL. We're going to need it to issue the HTTP request. let host_part = format!("{DEFAULT_ADDR}:{DEFAULT_PORT}"); let url = format!("http://{host_part}/"); // Creates a request to get all Transform components from the remote Bevy app. // This request will return all entities that have a Transform component. run_transform_only_query(&url)?; // Create a query that only returns root entities - ie, entities that do not // have a parent. run_query_root_entities(&url)?; // Create a query all request to send to the remote Bevy app. // This request will return all entities in the app, their components, and their // component values. run_query_all_components_and_entities(&url)?; Ok(()) } fn run_query_all_components_and_entities(url: &str) -> Result<(), anyhow::Error> { let query_all_req = BrpRequest { jsonrpc: String::from("2.0"), method: String::from(BRP_QUERY_METHOD), id: Some(serde_json::to_value(1)?), params: Some( serde_json::to_value(BrpQueryParams { data: BrpQuery { components: Vec::default(), option: ComponentSelector::All, has: Vec::default(), }, strict: false, filter: BrpQueryFilter::default(), }) .expect("Unable to convert query parameters to a valid JSON value"), ), }; info!("query_all req: {query_all_req:#?}"); let query_all_res = ureq::post(url) .send_json(query_all_req)? .body_mut() .read_json::<serde_json::Value>()?; info!("{query_all_res:#}"); Ok(()) } fn run_transform_only_query(url: &str) -> Result<(), anyhow::Error> { let get_transform_request = BrpRequest { jsonrpc: String::from("2.0"), method: String::from(BRP_QUERY_METHOD), id: Some(serde_json::to_value(1)?), params: Some( serde_json::to_value(BrpQueryParams { data: BrpQuery { components: vec![type_name::<Transform>().to_string()], ..Default::default() }, strict: false, filter: BrpQueryFilter::default(), }) .expect("Unable to convert query parameters to a valid JSON value"), ), }; info!("transform request: {get_transform_request:#?}"); let res = ureq::post(url) .send_json(get_transform_request)? .body_mut() .read_json::<serde_json::Value>()?; info!("{res:#}"); Ok(()) } fn run_query_root_entities(url: &str) -> Result<(), anyhow::Error> { let get_transform_request = BrpRequest { jsonrpc: String::from("2.0"), method: String::from(BRP_QUERY_METHOD), id: Some(serde_json::to_value(1)?), params: Some( serde_json::to_value(BrpQueryParams { data: BrpQuery { components: Vec::default(), option: ComponentSelector::All, has: Vec::default(), }, strict: false, filter: BrpQueryFilter { without: vec![type_name::<ChildOf>().to_string()], with: Vec::default(), }, }) .expect("Unable to convert query parameters to a valid JSON value"), ), }; info!("transform request: {get_transform_request:#?}"); let res = ureq::post(url) .send_json(get_transform_request)? .body_mut() .read_json::<serde_json::Value>()?; info!("{res:#}"); Ok(()) }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/remote/server.rs
examples/remote/server.rs
//! A Bevy app that you can connect to with the BRP and edit. //! Run this example with the `remote` feature enabled: //! ```bash //! cargo run --example server --features="bevy_remote" //! ``` use bevy::math::ops::cos; use bevy::{ input::common_conditions::input_just_pressed, prelude::*, remote::{http::RemoteHttpPlugin, RemotePlugin}, }; use serde::{Deserialize, Serialize}; fn main() { App::new() .add_plugins(DefaultPlugins) .add_plugins(RemotePlugin::default()) .add_plugins(RemoteHttpPlugin::default()) .add_systems(Startup, setup) .add_systems(Update, remove.run_if(input_just_pressed(KeyCode::Space))) .add_systems(Update, move_cube) .run(); } fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { // circular base commands.spawn(( Mesh3d(meshes.add(Circle::new(4.0))), MeshMaterial3d(materials.add(Color::WHITE)), Transform::from_rotation(Quat::from_rotation_x(-std::f32::consts::FRAC_PI_2)), )); // cube commands.spawn(( Mesh3d(meshes.add(Cuboid::new(1.0, 1.0, 1.0))), MeshMaterial3d(materials.add(Color::srgb_u8(124, 144, 255))), Transform::from_xyz(0.0, 0.5, 0.0), Cube(1.0), )); // test resource commands.insert_resource(TestResource { foo: Vec2::new(1.0, -1.0), bar: false, }); // light commands.spawn(( PointLight { shadows_enabled: true, ..default() }, Transform::from_xyz(4.0, 8.0, 4.0), )); // camera commands.spawn(( Camera3d::default(), Transform::from_xyz(-2.5, 4.5, 9.0).looking_at(Vec3::ZERO, Vec3::Y), )); } /// An arbitrary resource that can be inspected and manipulated with remote methods. #[derive(Resource, Reflect, Serialize, Deserialize)] #[reflect(Resource, Serialize, Deserialize)] pub struct TestResource { /// An arbitrary field of the test resource. pub foo: Vec2, /// Another arbitrary field. pub bar: bool, } fn move_cube(mut query: Query<&mut Transform, With<Cube>>, time: Res<Time>) { for mut transform in &mut query { transform.translation.y = -cos(time.elapsed_secs()) + 1.5; } } fn remove(mut commands: Commands, cube_entity: Single<Entity, With<Cube>>) { commands.entity(*cube_entity).remove::<Cube>(); } #[derive(Component, Reflect, Serialize, Deserialize)] #[reflect(Component, Serialize, Deserialize)] struct Cube(f32);
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/window/window_resizing.rs
examples/window/window_resizing.rs
//! This example illustrates how to resize windows, and how to respond to a window being resized. use bevy::{prelude::*, window::WindowResized}; fn main() { App::new() .insert_resource(ResolutionSettings { large: Vec2::new(1920.0, 1080.0), medium: Vec2::new(800.0, 600.0), small: Vec2::new(640.0, 360.0), }) .add_plugins(DefaultPlugins) .add_systems(Startup, (setup_camera, setup_ui)) .add_systems(Update, (on_resize_system, toggle_resolution)) .run(); } /// Marker component for the text that displays the current resolution. #[derive(Component)] struct ResolutionText; /// Stores the various window-resolutions we can select between. #[derive(Resource)] struct ResolutionSettings { large: Vec2, medium: Vec2, small: Vec2, } // Spawns the camera that draws UI fn setup_camera(mut commands: Commands) { commands.spawn(Camera2d); } // Spawns the UI fn setup_ui(mut commands: Commands) { // Node that fills entire background commands .spawn(Node { width: percent(100), ..default() }) // Text where we display current resolution .with_child(( Text::new("Resolution"), TextFont { font_size: 42.0, ..default() }, ResolutionText, )); } /// This system shows how to request the window to a new resolution fn toggle_resolution( keys: Res<ButtonInput<KeyCode>>, mut window: Single<&mut Window>, resolution: Res<ResolutionSettings>, ) { if keys.just_pressed(KeyCode::Digit1) { let res = resolution.small; window.resolution.set(res.x, res.y); } if keys.just_pressed(KeyCode::Digit2) { let res = resolution.medium; window.resolution.set(res.x, res.y); } if keys.just_pressed(KeyCode::Digit3) { let res = resolution.large; window.resolution.set(res.x, res.y); } } /// This system shows how to respond to a window being resized. /// Whenever the window is resized, the text will update with the new resolution. fn on_resize_system( mut text: Single<&mut Text, With<ResolutionText>>, mut resize_reader: MessageReader<WindowResized>, ) { for e in resize_reader.read() { // When resolution is being changed text.0 = format!("{:.1} x {:.1}", e.width, e.height); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/window/window_settings.rs
examples/window/window_settings.rs
//! Illustrates how to change window settings and shows how to affect //! the mouse pointer in various ways. #[cfg(feature = "custom_cursor")] use bevy::window::{CustomCursor, CustomCursorImage}; use bevy::{ diagnostic::{FrameCount, FrameTimeDiagnosticsPlugin, LogDiagnosticsPlugin}, prelude::*, window::{ CursorGrabMode, CursorIcon, CursorOptions, PresentMode, SystemCursorIcon, WindowLevel, WindowTheme, }, }; fn main() { App::new() .add_plugins(( DefaultPlugins.set(WindowPlugin { primary_window: Some(Window { title: "I am a window!".into(), name: Some("bevy.app".into()), resolution: (500, 300).into(), present_mode: PresentMode::AutoVsync, // Tells Wasm to resize the window according to the available canvas fit_canvas_to_parent: true, // Tells Wasm not to override default event handling, like F5, Ctrl+R etc. prevent_default_event_handling: false, window_theme: Some(WindowTheme::Dark), enabled_buttons: bevy::window::EnabledButtons { maximize: false, ..Default::default() }, // This will spawn an invisible window // The window will be made visible in the make_visible() system after 3 frames. // This is useful when you want to avoid the white window that shows up before the GPU is ready to render the app. visible: false, ..default() }), ..default() }), LogDiagnosticsPlugin::default(), FrameTimeDiagnosticsPlugin::default(), )) .add_systems(Startup, init_cursor_icons) .add_systems( Update, ( change_title, toggle_theme, toggle_cursor, toggle_vsync, toggle_window_controls, cycle_cursor_icon, switch_level, make_visible, ), ) .run(); } fn make_visible(mut window: Single<&mut Window>, frames: Res<FrameCount>) { // The delay may be different for your app or system. if frames.0 == 3 { // At this point the gpu is ready to show the app so we can make the window visible. // Alternatively, you could toggle the visibility in Startup. // It will work, but it will have one white frame before it starts rendering window.visible = true; } } /// This system toggles the vsync mode when pressing the button V. /// You'll see fps increase displayed in the console. fn toggle_vsync(input: Res<ButtonInput<KeyCode>>, mut window: Single<&mut Window>) { if input.just_pressed(KeyCode::KeyV) { window.present_mode = if matches!(window.present_mode, PresentMode::AutoVsync) { PresentMode::AutoNoVsync } else { PresentMode::AutoVsync }; info!("PRESENT_MODE: {:?}", window.present_mode); } } /// This system switches the window level when pressing the T button /// You'll notice it won't be covered by other windows, or will be covered by all the other /// windows depending on the level. /// /// This feature only works on some platforms. Please check the /// [documentation](https://docs.rs/bevy/latest/bevy/prelude/struct.Window.html#structfield.window_level) /// for more details. fn switch_level(input: Res<ButtonInput<KeyCode>>, mut window: Single<&mut Window>) { if input.just_pressed(KeyCode::KeyT) { window.window_level = match window.window_level { WindowLevel::AlwaysOnBottom => WindowLevel::Normal, WindowLevel::Normal => WindowLevel::AlwaysOnTop, WindowLevel::AlwaysOnTop => WindowLevel::AlwaysOnBottom, }; info!("WINDOW_LEVEL: {:?}", window.window_level); } } /// This system toggles the window controls when pressing buttons 1, 2 and 3 /// /// This feature only works on some platforms. Please check the /// [documentation](https://docs.rs/bevy/latest/bevy/prelude/struct.Window.html#structfield.enabled_buttons) /// for more details. fn toggle_window_controls(input: Res<ButtonInput<KeyCode>>, mut window: Single<&mut Window>) { let toggle_minimize = input.just_pressed(KeyCode::Digit1); let toggle_maximize = input.just_pressed(KeyCode::Digit2); let toggle_close = input.just_pressed(KeyCode::Digit3); if toggle_minimize || toggle_maximize || toggle_close { if toggle_minimize { window.enabled_buttons.minimize = !window.enabled_buttons.minimize; } if toggle_maximize { window.enabled_buttons.maximize = !window.enabled_buttons.maximize; } if toggle_close { window.enabled_buttons.close = !window.enabled_buttons.close; } } } /// This system will then change the title during execution fn change_title(mut window: Single<&mut Window>, time: Res<Time>) { window.title = format!( "Seconds since startup: {}", time.elapsed().as_secs_f32().round() ); } fn toggle_cursor(mut cursor_options: Single<&mut CursorOptions>, input: Res<ButtonInput<KeyCode>>) { if input.just_pressed(KeyCode::Space) { cursor_options.visible = !cursor_options.visible; cursor_options.grab_mode = match cursor_options.grab_mode { CursorGrabMode::None => CursorGrabMode::Locked, CursorGrabMode::Locked | CursorGrabMode::Confined => CursorGrabMode::None, }; } } /// This system will toggle the color theme used by the window fn toggle_theme(mut window: Single<&mut Window>, input: Res<ButtonInput<KeyCode>>) { if input.just_pressed(KeyCode::KeyF) && let Some(current_theme) = window.window_theme { window.window_theme = match current_theme { WindowTheme::Light => Some(WindowTheme::Dark), WindowTheme::Dark => Some(WindowTheme::Light), }; } } /// Resource with a set of cursor icons we want to cycle through #[derive(Resource)] struct CursorIcons(Vec<CursorIcon>); fn init_cursor_icons( mut commands: Commands, window: Single<Entity, With<Window>>, #[cfg(feature = "custom_cursor")] asset_server: Res<AssetServer>, ) { let cursor_icons = CursorIcons(vec![ SystemCursorIcon::Default.into(), SystemCursorIcon::Pointer.into(), SystemCursorIcon::Wait.into(), SystemCursorIcon::Text.into(), #[cfg(feature = "custom_cursor")] CustomCursor::Image(CustomCursorImage { handle: asset_server.load("branding/icon.png"), hotspot: (128, 128), ..Default::default() }) .into(), ]); // By default the Window entity does not have a CursorIcon component, so we add it here. commands.entity(*window).insert(cursor_icons.0[0].clone()); commands.insert_resource(cursor_icons); } /// This system cycles the cursor's icon through a small set of icons when clicking fn cycle_cursor_icon( mut cursor: Single<&mut CursorIcon>, input: Res<ButtonInput<MouseButton>>, mut index: Local<usize>, cursor_icons: Res<CursorIcons>, ) { if input.just_pressed(MouseButton::Left) { *index = (*index + 1) % cursor_icons.0.len(); **cursor = cursor_icons.0[*index].clone(); } else if input.just_pressed(MouseButton::Right) { *index = if *index == 0 { cursor_icons.0.len() - 1 } else { *index - 1 }; **cursor = cursor_icons.0[*index].clone(); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/window/multi_window_text.rs
examples/window/multi_window_text.rs
//! Renders text to multiple windows with different scale factors using both Text and Text2d. use bevy::{ camera::{visibility::RenderLayers, RenderTarget}, color::palettes::css::{LIGHT_CYAN, YELLOW}, prelude::*, sprite::Text2dShadow, window::{WindowRef, WindowResolution}, }; fn main() { App::new() // By default, a primary window is spawned by `WindowPlugin`, contained in `DefaultPlugins`. // The primary window is given the `PrimaryWindow` marker component. .add_plugins(DefaultPlugins.set(WindowPlugin { primary_window: Some(Window { title: "Primary window".to_owned(), // Override the primary window's scale factor and use `1.` (no scaling). resolution: WindowResolution::default().with_scale_factor_override(1.), ..default() }), ..Default::default() })) .add_systems(Startup, setup_scene) .run(); } fn setup_scene(mut commands: Commands) { // The first camera; no render target is specified, its render target will be set to the primary window automatically. // This camera has no `RenderLayers` component, so it only renders entities belonging to render layer `0`. commands.spawn(Camera2d); // Spawn a second window let secondary_window = commands .spawn(Window { title: "Secondary Window".to_owned(), // Override the secondary window's scale factor and set it to double that of the primary window. // This means the second window's text will use glyphs drawn at twice the resolution of the primary window's text, // and they will be twice as big on screen. resolution: WindowResolution::default().with_scale_factor_override(2.), ..default() }) .id(); // Spawn a second camera let secondary_window_camera = commands .spawn(( Camera2d, // This camera will only render entities belonging to render layer `1`. RenderLayers::layer(1), // Without an explicit render target, this camera would also target the primary window. RenderTarget::Window(WindowRef::Entity(secondary_window)), )) .id(); let node = Node { position_type: PositionType::Absolute, top: Val::Px(12.0), left: Val::Px(12.0), ..default() }; let text_font = TextFont::from_font_size(30.); // UI nodes can only be rendered by one camera at a time and ignore `RenderLayers`. // This root UI node has no `UiTargetCamera` so `bevy_ui` will try to find a // camera with the `IsDefaultUiCamera` marker component. When that fails (neither // camera spawned here has an `IsDefaultUiCamera`), it queries for the // first camera targeting the primary window and uses that. commands.spawn(node.clone()).with_child(( Text::new("UI Text Primary Window"), text_font.clone(), TextShadow::default(), )); commands .spawn((node, UiTargetCamera(secondary_window_camera))) .with_child(( Text::new("UI Text Secondary Window"), text_font.clone(), TextShadow::default(), )); // `Text2d` belonging to render layer `0`. commands.spawn(( Text2d::new("Text2d Primary Window"), TextColor(YELLOW.into()), text_font.clone(), Text2dShadow::default(), )); // `Text2d` belonging to render layer `1`. commands.spawn(( Text2d::new("Text2d Secondary Window"), TextColor(YELLOW.into()), text_font.clone(), Text2dShadow::default(), RenderLayers::layer(1), )); // This `Text2d` entity belongs to both render layers `0` and `1`, so it will be rendered by both // cameras. A single text layout is generated per `Text2d` entity, targeting a specific scale // factor. Since the two camera's render targets have different scale factors, the text layout // will be generated using the higher scale factor (the secondary window's), and then downscaled when it is // drawn by the camera targeting the primary window. commands.spawn(( Text2d::new("Text2d Both Windows"), TextColor(LIGHT_CYAN.into()), text_font, Text2dShadow::default(), RenderLayers::from_layers(&[0, 1]), Transform::from_xyz(0., -50., 0.), )); }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/window/scale_factor_override.rs
examples/window/scale_factor_override.rs
//! This example illustrates how to override the window scale factor imposed by the //! operating system. use bevy::{prelude::*, window::WindowResolution}; #[derive(Component)] struct CustomText; fn main() { App::new() .add_plugins(DefaultPlugins.set(WindowPlugin { primary_window: Some(Window { resolution: WindowResolution::new(500, 300).with_scale_factor_override(1.0), ..default() }), ..default() })) .add_systems(Startup, setup) .add_systems( Update, (display_override, toggle_override, change_scale_factor), ) .run(); } fn setup(mut commands: Commands) { // camera commands.spawn(Camera2d); // root node commands.spawn(( Node { width: percent(100), height: percent(100), justify_content: JustifyContent::SpaceBetween, ..default() }, children![( Node { width: px(300), height: percent(100), border: UiRect::all(px(2)), ..default() }, BackgroundColor(Color::srgb(0.65, 0.65, 0.65)), children![( CustomText, Text::new("Example text"), TextFont { font_size: 25.0, ..default() }, Node { align_self: AlignSelf::FlexEnd, ..default() }, )] )], )); } /// Set the title of the window to the current override fn display_override( mut window: Single<&mut Window>, mut custom_text: Single<&mut Text, With<CustomText>>, ) { let text = format!( "Scale factor: {:.1} {}", window.scale_factor(), if window.resolution.scale_factor_override().is_some() { "(overridden)" } else { "(default)" } ); window.title.clone_from(&text); custom_text.0 = text; } /// This system toggles scale factor overrides when enter is pressed fn toggle_override(input: Res<ButtonInput<KeyCode>>, mut window: Single<&mut Window>) { if input.just_pressed(KeyCode::Enter) { let scale_factor_override = window.resolution.scale_factor_override(); window .resolution .set_scale_factor_override(scale_factor_override.xor(Some(1.0))); } } /// This system changes the scale factor override when up or down is pressed fn change_scale_factor(input: Res<ButtonInput<KeyCode>>, mut window: Single<&mut Window>) { let scale_factor_override = window.resolution.scale_factor_override(); if input.just_pressed(KeyCode::ArrowUp) { window .resolution .set_scale_factor_override(scale_factor_override.map(|n| n + 1.0)); } else if input.just_pressed(KeyCode::ArrowDown) { window .resolution .set_scale_factor_override(scale_factor_override.map(|n| (n - 1.0).max(1.0))); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/window/screenshot.rs
examples/window/screenshot.rs
//! An example showing how to save screenshots to disk use bevy::{ prelude::*, render::view::screenshot::{save_to_disk, Capturing, Screenshot}, window::{CursorIcon, SystemCursorIcon}, }; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems(Update, (screenshot_on_spacebar, screenshot_saving)) .run(); } fn screenshot_on_spacebar( mut commands: Commands, input: Res<ButtonInput<KeyCode>>, mut counter: Local<u32>, ) { if input.just_pressed(KeyCode::Space) { let path = format!("./screenshot-{}.png", *counter); *counter += 1; commands .spawn(Screenshot::primary_window()) .observe(save_to_disk(path)); } } fn screenshot_saving( mut commands: Commands, screenshot_saving: Query<Entity, With<Capturing>>, window: Single<Entity, With<Window>>, ) { match screenshot_saving.iter().count() { 0 => { commands.entity(*window).remove::<CursorIcon>(); } x if x > 0 => { commands .entity(*window) .insert(CursorIcon::from(SystemCursorIcon::Progress)); } _ => {} } } /// set up a simple 3D scene fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { // plane commands.spawn(( Mesh3d(meshes.add(Plane3d::default().mesh().size(5.0, 5.0))), MeshMaterial3d(materials.add(Color::srgb(0.3, 0.5, 0.3))), )); // cube commands.spawn(( Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(materials.add(Color::srgb(0.8, 0.7, 0.6))), Transform::from_xyz(0.0, 0.5, 0.0), )); // light commands.spawn(( PointLight { shadows_enabled: true, ..default() }, Transform::from_xyz(4.0, 8.0, 4.0), )); // camera commands.spawn(( Camera3d::default(), Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y), )); commands.spawn(( Text::new("Press <spacebar> to save a screenshot to disk"), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, )); }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/window/low_power.rs
examples/window/low_power.rs
//! This example illustrates how to run a winit window in a reactive, low power mode. //! //! This is useful for making desktop applications, or any other program that doesn't need to be //! running the event loop non-stop. use bevy::{ prelude::*, window::{PresentMode, RequestRedraw, WindowPlugin}, winit::{EventLoopProxyWrapper, WinitSettings, WinitUserEvent}, }; use core::time::Duration; fn main() { App::new() // Continuous rendering for games - bevy's default. .insert_resource(WinitSettings::game()) // Power-saving reactive rendering for applications. .insert_resource(WinitSettings::desktop_app()) // You can also customize update behavior with the fields of [`WinitSettings`] .insert_resource(WinitSettings { focused_mode: bevy::winit::UpdateMode::Continuous, unfocused_mode: bevy::winit::UpdateMode::reactive_low_power(Duration::from_millis(10)), }) .insert_resource(ExampleMode::Game) .add_plugins(DefaultPlugins.set(WindowPlugin { primary_window: Some(Window { // Turn off vsync to maximize CPU/GPU usage present_mode: PresentMode::AutoNoVsync, ..default() }), ..default() })) .add_systems(Startup, test_setup::setup) .add_systems( Update, ( test_setup::cycle_modes, test_setup::rotate_cube, test_setup::update_text, update_winit, ), ) .run(); } #[derive(Resource, Debug)] enum ExampleMode { Game, Application, ApplicationWithRequestRedraw, ApplicationWithWakeUp, } /// Update winit based on the current `ExampleMode` fn update_winit( mode: Res<ExampleMode>, mut winit_config: ResMut<WinitSettings>, event_loop_proxy: Res<EventLoopProxyWrapper>, mut redraw_request_writer: MessageWriter<RequestRedraw>, ) { use ExampleMode::*; *winit_config = match *mode { Game => { // In the default `WinitSettings::game()` mode: // * When focused: the event loop runs as fast as possible // * When not focused: the app will update when the window is directly interacted with // (e.g. the mouse hovers over a visible part of the out of focus window), a // [`RequestRedraw`] event is received, or one sixtieth of a second has passed // without the app updating (60 Hz refresh rate max). WinitSettings::game() } Application => { // While in `WinitSettings::desktop_app()` mode: // * When focused: the app will update any time a winit event (e.g. the window is // moved/resized, the mouse moves, a button is pressed, etc.), a [`RequestRedraw`] // event is received, or after 5 seconds if the app has not updated. // * When not focused: the app will update when the window is directly interacted with // (e.g. the mouse hovers over a visible part of the out of focus window), a // [`RequestRedraw`] event is received, or one minute has passed without the app // updating. WinitSettings::desktop_app() } ApplicationWithRequestRedraw => { // Sending a `RequestRedraw` event is useful when you want the app to update the next // frame regardless of any user input. For example, your application might use // `WinitSettings::desktop_app()` to reduce power use, but UI animations need to play even // when there are no inputs, so you send redraw requests while the animation is playing. // Note that in this example the RequestRedraw winit event will make the app run in the same // way as continuous redraw_request_writer.write(RequestRedraw); WinitSettings::desktop_app() } ApplicationWithWakeUp => { // Sending a `WakeUp` event is useful when you want the app to update the next // frame regardless of any user input. This can be used from outside Bevy, see example // `window/custom_user_event.rs` for an example usage from outside. // Note that in this example the `WakeUp` winit event will make the app run in the same // way as continuous let _ = event_loop_proxy.send_event(WinitUserEvent::WakeUp); WinitSettings::desktop_app() } }; } /// Everything in this module is for setting up and animating the scene, and is not important to the /// demonstrated features. pub(crate) mod test_setup { use crate::ExampleMode; use bevy::{ color::palettes::basic::{LIME, YELLOW}, prelude::*, window::RequestRedraw, }; /// Switch between update modes when the spacebar is pressed. pub(crate) fn cycle_modes( mut mode: ResMut<ExampleMode>, button_input: Res<ButtonInput<KeyCode>>, ) { if button_input.just_pressed(KeyCode::Space) { *mode = match *mode { ExampleMode::Game => ExampleMode::Application, ExampleMode::Application => ExampleMode::ApplicationWithRequestRedraw, ExampleMode::ApplicationWithRequestRedraw => ExampleMode::ApplicationWithWakeUp, ExampleMode::ApplicationWithWakeUp => ExampleMode::Game, }; } } #[derive(Component)] pub(crate) struct Rotator; /// Rotate the cube to make it clear when the app is updating pub(crate) fn rotate_cube( time: Res<Time>, mut cube_transform: Query<&mut Transform, With<Rotator>>, ) { for mut transform in &mut cube_transform { transform.rotate_x(time.delta_secs()); transform.rotate_local_y(time.delta_secs()); } } #[derive(Component)] pub struct ModeText; pub(crate) fn update_text( mut frame: Local<usize>, mode: Res<ExampleMode>, text: Single<Entity, With<ModeText>>, mut writer: TextUiWriter, ) { *frame += 1; let mode = match *mode { ExampleMode::Game => "game(), continuous, default", ExampleMode::Application => "desktop_app(), reactive", ExampleMode::ApplicationWithRequestRedraw => { "desktop_app(), reactive, RequestRedraw sent" } ExampleMode::ApplicationWithWakeUp => "desktop_app(), reactive, WakeUp sent", }; *writer.text(*text, 2) = mode.to_string(); *writer.text(*text, 4) = frame.to_string(); } /// Set up a scene with a cube and some text pub fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, mut request_redraw_writer: MessageWriter<RequestRedraw>, ) { commands.spawn(( Mesh3d(meshes.add(Cuboid::new(0.5, 0.5, 0.5))), MeshMaterial3d(materials.add(Color::srgb(0.8, 0.7, 0.6))), Rotator, )); commands.spawn(( DirectionalLight::default(), Transform::from_xyz(1.0, 1.0, 1.0).looking_at(Vec3::ZERO, Vec3::Y), )); commands.spawn(( Camera3d::default(), Transform::from_xyz(-2.0, 2.0, 2.0).looking_at(Vec3::ZERO, Vec3::Y), )); request_redraw_writer.write(RequestRedraw); commands.spawn(( Text::default(), Node { align_self: AlignSelf::FlexStart, position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, ModeText, children![ TextSpan::new("Press space bar to cycle modes\n"), (TextSpan::default(), TextColor(LIME.into())), (TextSpan::new("\nFrame: "), TextColor(YELLOW.into())), (TextSpan::new(""), TextColor(YELLOW.into())), ], )); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/window/multiple_windows.rs
examples/window/multiple_windows.rs
//! Uses two windows to visualize a 3D model from different angles. use bevy::{camera::RenderTarget, prelude::*, window::WindowRef}; fn main() { App::new() // By default, a primary window gets spawned by `WindowPlugin`, contained in `DefaultPlugins` .add_plugins(DefaultPlugins) .add_systems(Startup, setup_scene) .run(); } fn setup_scene(mut commands: Commands, asset_server: Res<AssetServer>) { // add entities to the world commands.spawn(SceneRoot( asset_server.load(GltfAssetLabel::Scene(0).from_asset("models/torus/torus.gltf")), )); // light commands.spawn(( DirectionalLight::default(), Transform::from_xyz(3.0, 3.0, 3.0).looking_at(Vec3::ZERO, Vec3::Y), )); let first_window_camera = commands .spawn(( Camera3d::default(), Transform::from_xyz(0.0, 0.0, 6.0).looking_at(Vec3::ZERO, Vec3::Y), )) .id(); // Spawn a second window let second_window = commands .spawn(Window { title: "Second window".to_owned(), ..default() }) .id(); let second_window_camera = commands .spawn(( Camera3d::default(), Transform::from_xyz(6.0, 0.0, 0.0).looking_at(Vec3::ZERO, Vec3::Y), RenderTarget::Window(WindowRef::Entity(second_window)), )) .id(); let node = Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }; commands .spawn(( node.clone(), // Since we are using multiple cameras, we need to specify which camera UI should be rendered to UiTargetCamera(first_window_camera), )) .with_child((Text::new("First window"), TextShadow::default())); commands .spawn((node, UiTargetCamera(second_window_camera))) .with_child((Text::new("Second window"), TextShadow::default())); }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/window/monitor_info.rs
examples/window/monitor_info.rs
//! Displays information about available monitors (displays). use bevy::{ camera::RenderTarget, prelude::*, window::{ExitCondition, Monitor, WindowMode, WindowRef}, }; fn main() { App::new() .add_plugins(DefaultPlugins.set(WindowPlugin { primary_window: None, exit_condition: ExitCondition::DontExit, ..default() })) .add_systems(Update, (update, close_on_esc)) .run(); } #[derive(Component)] struct MonitorRef(Entity); fn update( mut commands: Commands, monitors_added: Query<(Entity, &Monitor), Added<Monitor>>, mut monitors_removed: RemovedComponents<Monitor>, monitor_refs: Query<(Entity, &MonitorRef)>, ) { for (entity, monitor) in monitors_added.iter() { // Spawn a new window on each monitor let name = monitor.name.clone().unwrap_or_else(|| "<no name>".into()); let size = format!("{}x{}px", monitor.physical_height, monitor.physical_width); let hz = monitor .refresh_rate_millihertz .map(|x| format!("{}Hz", x as f32 / 1000.0)) .unwrap_or_else(|| "<unknown>".into()); let position = format!( "x={} y={}", monitor.physical_position.x, monitor.physical_position.y ); let scale = format!("{:.2}", monitor.scale_factor); let window = commands .spawn(( Window { title: name.clone(), mode: WindowMode::Fullscreen( MonitorSelection::Entity(entity), VideoModeSelection::Current, ), position: WindowPosition::Centered(MonitorSelection::Entity(entity)), ..default() }, MonitorRef(entity), )) .id(); let camera = commands .spawn((Camera2d, RenderTarget::Window(WindowRef::Entity(window)))) .id(); let info_text = format!( "Monitor: {name}\nSize: {size}\nRefresh rate: {hz}\nPosition: {position}\nScale: {scale}\n\n", ); commands.spawn(( Text(info_text), Node { position_type: PositionType::Relative, height: percent(100), width: percent(100), ..default() }, UiTargetCamera(camera), MonitorRef(entity), )); } // Remove windows for removed monitors for monitor_entity in monitors_removed.read() { for (ref_entity, monitor_ref) in monitor_refs.iter() { if monitor_ref.0 == monitor_entity { commands.entity(ref_entity).despawn(); } } } } fn close_on_esc( mut commands: Commands, focused_windows: Query<(Entity, &Window)>, input: Res<ButtonInput<KeyCode>>, ) { for (window, focus) in focused_windows.iter() { if !focus.focused { continue; } if input.just_pressed(KeyCode::Escape) { commands.entity(window).despawn(); } } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/window/custom_cursor_image.rs
examples/window/custom_cursor_image.rs
//! Illustrates how to use a custom cursor image with a texture atlas and //! animation. use std::time::Duration; use bevy::{ prelude::*, window::{CursorIcon, CustomCursor, CustomCursorImage}, }; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems( Startup, (setup_cursor_icon, setup_camera, setup_instructions), ) .add_systems( Update, ( execute_animation, toggle_texture_atlas, toggle_flip_x, toggle_flip_y, cycle_rect, ), ) .run(); } fn setup_cursor_icon( mut commands: Commands, asset_server: Res<AssetServer>, mut texture_atlas_layouts: ResMut<Assets<TextureAtlasLayout>>, window: Single<Entity, With<Window>>, ) { let layout = TextureAtlasLayout::from_grid(UVec2::splat(64), 20, 10, Some(UVec2::splat(5)), None); let texture_atlas_layout = texture_atlas_layouts.add(layout); let animation_config = AnimationConfig::new(0, 199, 1, 4); commands.entity(*window).insert(( CursorIcon::Custom(CustomCursor::Image(CustomCursorImage { // Image to use as the cursor. handle: asset_server .load("cursors/kenney_crosshairPack/Tilesheet/crosshairs_tilesheet_white.png"), // Optional texture atlas allows you to pick a section of the image // and animate it. texture_atlas: Some(TextureAtlas { layout: texture_atlas_layout.clone(), index: animation_config.first_sprite_index, }), flip_x: false, flip_y: false, // Optional section of the image to use as the cursor. rect: None, // The hotspot is the point in the cursor image that will be // positioned at the mouse cursor's position. hotspot: (0, 0), })), animation_config, )); } fn setup_camera(mut commands: Commands) { commands.spawn(Camera3d::default()); } fn setup_instructions(mut commands: Commands) { commands.spawn(( Text::new( "Press T to toggle the cursor's `texture_atlas`.\n Press X to toggle the cursor's `flip_x` setting.\n Press Y to toggle the cursor's `flip_y` setting.\n Press C to cycle through the sections of the cursor's image using `rect`.", ), Node { position_type: PositionType::Absolute, bottom: px(12), left: px(12), ..default() }, )); } #[derive(Component)] struct AnimationConfig { first_sprite_index: usize, last_sprite_index: usize, increment: usize, fps: u8, frame_timer: Timer, } impl AnimationConfig { fn new(first: usize, last: usize, increment: usize, fps: u8) -> Self { Self { first_sprite_index: first, last_sprite_index: last, increment, fps, frame_timer: Self::timer_from_fps(fps), } } fn timer_from_fps(fps: u8) -> Timer { Timer::new(Duration::from_secs_f32(1.0 / (fps as f32)), TimerMode::Once) } } /// This system loops through all the sprites in the [`CursorIcon`]'s /// [`TextureAtlas`], from [`AnimationConfig`]'s `first_sprite_index` to /// `last_sprite_index`. fn execute_animation(time: Res<Time>, mut query: Query<(&mut AnimationConfig, &mut CursorIcon)>) { for (mut config, mut cursor_icon) in &mut query { if let CursorIcon::Custom(CustomCursor::Image(ref mut image)) = *cursor_icon { config.frame_timer.tick(time.delta()); if config.frame_timer.is_finished() && let Some(atlas) = image.texture_atlas.as_mut() { atlas.index += config.increment; if atlas.index > config.last_sprite_index { atlas.index = config.first_sprite_index; } config.frame_timer = AnimationConfig::timer_from_fps(config.fps); } } } } fn toggle_texture_atlas( input: Res<ButtonInput<KeyCode>>, mut query: Query<&mut CursorIcon, With<Window>>, mut cached_atlas: Local<Option<TextureAtlas>>, // this lets us restore the previous value ) { if input.just_pressed(KeyCode::KeyT) { for mut cursor_icon in &mut query { if let CursorIcon::Custom(CustomCursor::Image(ref mut image)) = *cursor_icon { match image.texture_atlas.take() { Some(a) => { // Save the current texture atlas. *cached_atlas = Some(a.clone()); } None => { // Restore the cached texture atlas. if let Some(cached_a) = cached_atlas.take() { image.texture_atlas = Some(cached_a); } } } } } } } fn toggle_flip_x( input: Res<ButtonInput<KeyCode>>, mut query: Query<&mut CursorIcon, With<Window>>, ) { if input.just_pressed(KeyCode::KeyX) { for mut cursor_icon in &mut query { if let CursorIcon::Custom(CustomCursor::Image(ref mut image)) = *cursor_icon { image.flip_x = !image.flip_x; } } } } fn toggle_flip_y( input: Res<ButtonInput<KeyCode>>, mut query: Query<&mut CursorIcon, With<Window>>, ) { if input.just_pressed(KeyCode::KeyY) { for mut cursor_icon in &mut query { if let CursorIcon::Custom(CustomCursor::Image(ref mut image)) = *cursor_icon { image.flip_y = !image.flip_y; } } } } /// This system alternates the [`CursorIcon`]'s `rect` field between `None` and /// 4 sections/rectangles of the cursor's image. fn cycle_rect(input: Res<ButtonInput<KeyCode>>, mut query: Query<&mut CursorIcon, With<Window>>) { if !input.just_pressed(KeyCode::KeyC) { return; } const RECT_SIZE: u32 = 32; // half the size of a tile in the texture atlas const SECTIONS: [Option<URect>; 5] = [ Some(URect { min: UVec2::ZERO, max: UVec2::splat(RECT_SIZE), }), Some(URect { min: UVec2::new(RECT_SIZE, 0), max: UVec2::new(2 * RECT_SIZE, RECT_SIZE), }), Some(URect { min: UVec2::new(0, RECT_SIZE), max: UVec2::new(RECT_SIZE, 2 * RECT_SIZE), }), Some(URect { min: UVec2::new(RECT_SIZE, RECT_SIZE), max: UVec2::splat(2 * RECT_SIZE), }), None, // reset to None ]; for mut cursor_icon in &mut query { if let CursorIcon::Custom(CustomCursor::Image(ref mut image)) = *cursor_icon { let next_rect = SECTIONS .iter() .cycle() .skip_while(|&&corner| corner != image.rect) .nth(1) // move to the next element .unwrap_or(&None); image.rect = *next_rect; } } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/window/transparent_window.rs
examples/window/transparent_window.rs
//! Shows how to display a window in transparent mode. //! //! This feature works as expected depending on the platform. Please check the //! [documentation](https://docs.rs/bevy/latest/bevy/prelude/struct.Window.html#structfield.transparent) //! for more details. use bevy::prelude::*; #[cfg(any(target_os = "macos", target_os = "linux"))] use bevy::window::CompositeAlphaMode; fn main() { App::new() .add_plugins(DefaultPlugins.set(WindowPlugin { primary_window: Some(Window { // Setting `transparent` allows the `ClearColor`'s alpha value to take effect transparent: true, // Disabling window decorations to make it feel more like a widget than a window decorations: false, #[cfg(target_os = "macos")] composite_alpha_mode: CompositeAlphaMode::PostMultiplied, #[cfg(target_os = "linux")] composite_alpha_mode: CompositeAlphaMode::PreMultiplied, ..default() }), ..default() })) // ClearColor must have 0 alpha, otherwise some color will bleed through .insert_resource(ClearColor(Color::NONE)) .add_systems(Startup, setup) .run(); } fn setup(mut commands: Commands, asset_server: Res<AssetServer>) { commands.spawn(Camera2d); commands.spawn(Sprite::from_image(asset_server.load("branding/icon.png"))); }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/window/window_drag_move.rs
examples/window/window_drag_move.rs
//! This example illustrates drag move and drag resize without window //! decorations. //! //! When window decorations are not present, the user cannot drag a window by //! its titlebar to change its position. The `start_drag_move()` function //! permits a user to drag a window by left clicking anywhere in the window; //! left click must be pressed and other constraints can be imposed. For //! instance an application could require a user to hold down alt and left click //! to drag a window. //! //! The `start_drag_resize()` function behaves similarly but permits a window to //! be resized. use bevy::{math::CompassOctant, prelude::*}; /// Determine what do on left click. #[derive(Resource, Debug)] enum LeftClickAction { /// Do nothing. Nothing, /// Move the window on left click. Move, /// Resize the window on left click. Resize, } /// What direction index should the window resize toward. #[derive(Resource)] struct ResizeDir(usize); /// Directions that the drag resizes the window toward. const DIRECTIONS: [CompassOctant; 8] = [ CompassOctant::North, CompassOctant::NorthEast, CompassOctant::East, CompassOctant::SouthEast, CompassOctant::South, CompassOctant::SouthWest, CompassOctant::West, CompassOctant::NorthWest, ]; fn main() { App::new() .add_plugins(DefaultPlugins.set(WindowPlugin { primary_window: Some(Window { decorations: false, ..default() }), ..default() })) .insert_resource(ResizeDir(7)) .insert_resource(LeftClickAction::Move) .add_systems(Startup, setup) .add_systems(Update, (handle_input, move_or_resize_windows)) .run(); } fn setup(mut commands: Commands) { // Camera commands.spawn(Camera3d::default()); // UI commands.spawn(( Node { position_type: PositionType::Absolute, padding: UiRect::all(px(5)), ..default() }, BackgroundColor(Color::BLACK.with_alpha(0.75)), GlobalZIndex(i32::MAX), children![( Text::default(), children![ TextSpan::new( "Demonstrate drag move and drag resize without window decorations.\n\n", ), TextSpan::new("Controls:\n"), TextSpan::new("A - change left click action ["), TextSpan::new("Move"), TextSpan::new("]\n"), TextSpan::new("S / D - change resize direction ["), TextSpan::new("NorthWest"), TextSpan::new("]\n"), ] )], )); } fn handle_input( input: Res<ButtonInput<KeyCode>>, mut action: ResMut<LeftClickAction>, mut dir: ResMut<ResizeDir>, example_text: Query<Entity, With<Text>>, mut writer: TextUiWriter, ) -> Result { use LeftClickAction::*; if input.just_pressed(KeyCode::KeyA) { *action = match *action { Move => Resize, Resize => Nothing, Nothing => Move, }; *writer.text(example_text.single()?, 4) = format!("{:?}", *action); } if input.just_pressed(KeyCode::KeyS) { dir.0 = dir .0 .checked_sub(1) .unwrap_or(DIRECTIONS.len().saturating_sub(1)); *writer.text(example_text.single()?, 7) = format!("{:?}", DIRECTIONS[dir.0]); } if input.just_pressed(KeyCode::KeyD) { dir.0 = (dir.0 + 1) % DIRECTIONS.len(); *writer.text(example_text.single()?, 7) = format!("{:?}", DIRECTIONS[dir.0]); } Ok(()) } fn move_or_resize_windows( mut windows: Query<&mut Window>, action: Res<LeftClickAction>, input: Res<ButtonInput<MouseButton>>, dir: Res<ResizeDir>, ) { // Both `start_drag_move()` and `start_drag_resize()` must be called after a // left mouse button press as done here. // // winit 0.30.5 may panic when initiated without a left mouse button press. if input.just_pressed(MouseButton::Left) { for mut window in windows.iter_mut() { match *action { LeftClickAction::Nothing => (), LeftClickAction::Move => window.start_drag_move(), LeftClickAction::Resize => { let d = DIRECTIONS[dir.0]; window.start_drag_resize(d); } } } } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/window/clear_color.rs
examples/window/clear_color.rs
//! Shows how to set the solid color that is used to paint the window before the frame gets drawn. //! //! Acts as background color, since pixels that are not drawn in a frame remain unchanged. use bevy::{color::palettes::css::PURPLE, prelude::*}; fn main() { App::new() .insert_resource(ClearColor(Color::srgb(0.5, 0.5, 0.9))) .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems(Update, change_clear_color) .run(); } fn setup(mut commands: Commands) { commands.spawn(Camera2d); } fn change_clear_color(input: Res<ButtonInput<KeyCode>>, mut clear_color: ResMut<ClearColor>) { if input.just_pressed(KeyCode::Space) { clear_color.0 = PURPLE.into(); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/helpers/widgets.rs
examples/helpers/widgets.rs
//! Simple widgets for example UI. //! //! Unlike other examples, which demonstrate an application, this demonstrates a plugin library. use bevy::prelude::*; /// An event that's sent whenever the user changes one of the settings by /// clicking a radio button. #[derive(Clone, Message, Deref, DerefMut)] pub struct WidgetClickEvent<T>(T); /// A marker component that we place on all widgets that send /// [`WidgetClickEvent`]s of the given type. #[derive(Clone, Component, Deref, DerefMut)] pub struct WidgetClickSender<T>(pub T) where T: Clone + Send + Sync + 'static; /// A marker component that we place on all radio `Button`s. #[derive(Clone, Copy, Component)] pub struct RadioButton; /// A marker component that we place on all `Text` inside radio buttons. #[derive(Clone, Copy, Component)] pub struct RadioButtonText; /// The size of the border that surrounds buttons. pub const BUTTON_BORDER: UiRect = UiRect::all(Val::Px(1.0)); /// The color of the border that surrounds buttons. pub const BUTTON_BORDER_COLOR: BorderColor = BorderColor { left: Color::WHITE, right: Color::WHITE, top: Color::WHITE, bottom: Color::WHITE, }; /// The amount of rounding to apply to button corners. pub const BUTTON_BORDER_RADIUS_SIZE: Val = Val::Px(6.0); /// The amount of space between the edge of the button and its label. pub const BUTTON_PADDING: UiRect = UiRect::axes(Val::Px(12.0), Val::Px(6.0)); /// Returns a [`Node`] appropriate for the outer main UI node. /// /// This UI is in the bottom left corner and has flex column support pub fn main_ui_node() -> Node { Node { flex_direction: FlexDirection::Column, position_type: PositionType::Absolute, row_gap: px(6), left: px(10), bottom: px(10), ..default() } } /// Spawns a single radio button that allows configuration of a setting. /// /// The type parameter specifies the value that will be packaged up and sent in /// a [`WidgetClickEvent`] when the radio button is clicked. pub fn option_button<T>( option_value: T, option_name: &str, is_selected: bool, is_first: bool, is_last: bool, ) -> impl Bundle where T: Clone + Send + Sync + 'static, { let (bg_color, fg_color) = if is_selected { (Color::WHITE, Color::BLACK) } else { (Color::BLACK, Color::WHITE) }; // Add the button node. ( Button, Node { border: BUTTON_BORDER.with_left(if is_first { px(1) } else { px(0) }), justify_content: JustifyContent::Center, align_items: AlignItems::Center, padding: BUTTON_PADDING, border_radius: BorderRadius::ZERO .with_left(if is_first { BUTTON_BORDER_RADIUS_SIZE } else { px(0) }) .with_right(if is_last { BUTTON_BORDER_RADIUS_SIZE } else { px(0) }), ..default() }, BUTTON_BORDER_COLOR, BackgroundColor(bg_color), RadioButton, WidgetClickSender(option_value.clone()), children![( ui_text(option_name, fg_color), RadioButtonText, WidgetClickSender(option_value), )], ) } /// Spawns the buttons that allow configuration of a setting. /// /// The user may change the setting to any one of the labeled `options`. The /// value of the given type parameter will be packaged up and sent as a /// [`WidgetClickEvent`] when one of the radio buttons is clicked. pub fn option_buttons<T>(title: &str, options: &[(T, &str)]) -> impl Bundle where T: Clone + Send + Sync + 'static, { let buttons = options .iter() .cloned() .enumerate() .map(|(option_index, (option_value, option_name))| { option_button( option_value, option_name, option_index == 0, option_index == 0, option_index == options.len() - 1, ) }) .collect::<Vec<_>>(); // Add the parent node for the row. ( Node { align_items: AlignItems::Center, ..default() }, Children::spawn(( Spawn(( ui_text(title, Color::BLACK), Node { width: px(125), ..default() }, )), SpawnIter(buttons.into_iter()), )), ) } /// Creates a text bundle for the UI. pub fn ui_text(label: &str, color: Color) -> impl Bundle + use<> { ( Text::new(label), TextFont { font_size: 18.0, ..default() }, TextColor(color), ) } /// Checks for clicks on the radio buttons and sends `RadioButtonChangeEvent`s /// as necessary. pub fn handle_ui_interactions<T>( mut interactions: Query<(&Interaction, &WidgetClickSender<T>), With<Button>>, mut widget_click_events: MessageWriter<WidgetClickEvent<T>>, ) where T: Clone + Send + Sync + 'static, { for (interaction, click_event) in interactions.iter_mut() { if *interaction == Interaction::Pressed { widget_click_events.write(WidgetClickEvent((**click_event).clone())); } } } /// Updates the style of the button part of a radio button to reflect its /// selected status. pub fn update_ui_radio_button(background_color: &mut BackgroundColor, selected: bool) { background_color.0 = if selected { Color::WHITE } else { Color::BLACK }; } /// Updates the color of the label of a radio button to reflect its selected /// status. pub fn update_ui_radio_button_text(entity: Entity, writer: &mut TextUiWriter, selected: bool) { let text_color = if selected { Color::BLACK } else { Color::WHITE }; writer.for_each_color(entity, |mut color| { color.0 = text_color; }); }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/mobile/src/lib.rs
examples/mobile/src/lib.rs
//! A 3d Scene with a button and playing sound. use bevy::{ color::palettes::basic::*, input::{gestures::RotationGesture, touch::TouchPhase}, log::{Level, LogPlugin}, prelude::*, window::{AppLifecycle, ScreenEdge, WindowMode}, winit::WinitSettings, }; // the `bevy_main` proc_macro generates the required boilerplate for Android #[bevy_main] /// The entry point for the application. Is `pub` so that it can be used from /// `main.rs`. pub fn main() { let mut app = App::new(); app.add_plugins( DefaultPlugins .set(LogPlugin { // This will show some log events from Bevy to the native logger. level: Level::DEBUG, filter: "wgpu=error,bevy_render=info,bevy_ecs=trace".to_string(), ..Default::default() }) .set(WindowPlugin { primary_window: Some(Window { resizable: false, mode: WindowMode::BorderlessFullscreen(MonitorSelection::Primary), // on iOS, gestures must be enabled. // This doesn't work on Android recognize_rotation_gesture: true, // Only has an effect on iOS prefers_home_indicator_hidden: true, // Only has an effect on iOS prefers_status_bar_hidden: true, // Only has an effect on iOS preferred_screen_edges_deferring_system_gestures: ScreenEdge::Bottom, ..default() }), ..default() }), ) // Make the winit loop wait more aggressively when no user input is received // This can help reduce cpu usage on mobile devices .insert_resource(WinitSettings::mobile()) .add_systems(Startup, (setup_scene, setup_music)) .add_systems( Update, ( touch_camera, button_handler, // Only run the lifetime handler when an [`AudioSink`] component exists in the world. // This ensures we don't try to manage audio that hasn't been initialized yet. handle_lifetime.run_if(any_with_component::<AudioSink>), ), ) .run(); } fn touch_camera( window: Query<&Window>, mut touch_inputs: MessageReader<TouchInput>, mut camera_transform: Single<&mut Transform, With<Camera3d>>, mut last_position: Local<Option<Vec2>>, mut rotation_gestures: MessageReader<RotationGesture>, ) { let Ok(window) = window.single() else { return; }; for touch_input in touch_inputs.read() { if touch_input.phase == TouchPhase::Started { *last_position = None; } if let Some(last_position) = *last_position { **camera_transform = Transform::from_xyz( camera_transform.translation.x + (touch_input.position.x - last_position.x) / window.width() * 5.0, camera_transform.translation.y, camera_transform.translation.z + (touch_input.position.y - last_position.y) / window.height() * 5.0, ) .looking_at(Vec3::ZERO, Vec3::Y); } *last_position = Some(touch_input.position); } // Rotation gestures only work on iOS for rotation_gesture in rotation_gestures.read() { let forward = camera_transform.forward(); camera_transform.rotate_axis(forward, rotation_gesture.0 / 10.0); } } /// set up a simple 3D scene fn setup_scene( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { // plane commands.spawn(( Mesh3d(meshes.add(Plane3d::default().mesh().size(5.0, 5.0))), MeshMaterial3d(materials.add(Color::srgb(0.1, 0.2, 0.1))), )); // cube commands.spawn(( Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(materials.add(Color::srgb(0.5, 0.4, 0.3))), Transform::from_xyz(0.0, 0.5, 0.0), )); // sphere commands.spawn(( Mesh3d(meshes.add(Sphere::new(0.5).mesh().ico(4).unwrap())), MeshMaterial3d(materials.add(Color::srgb(0.1, 0.4, 0.8))), Transform::from_xyz(1.5, 1.5, 1.5), )); // light commands.spawn(( PointLight { intensity: 1_000_000.0, // Shadows makes some Android devices segfault, this is under investigation // https://github.com/bevyengine/bevy/issues/8214 #[cfg(not(target_os = "android"))] shadows_enabled: true, ..default() }, Transform::from_xyz(4.0, 8.0, 4.0), )); // camera commands.spawn(( Camera3d::default(), Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y), // MSAA makes some Android devices panic, this is under investigation // https://github.com/bevyengine/bevy/issues/8229 #[cfg(target_os = "android")] Msaa::Off, )); // Test ui commands .spawn(( Button, Node { justify_content: JustifyContent::Center, align_items: AlignItems::Center, position_type: PositionType::Absolute, left: px(50), right: px(50), bottom: px(50), ..default() }, )) .with_child(( Text::new("Test Button"), TextFont { font_size: 30.0, ..default() }, TextColor::BLACK, TextLayout::new_with_justify(Justify::Center), )); } fn button_handler( mut interaction_query: Query< (&Interaction, &mut BackgroundColor), (Changed<Interaction>, With<Button>), >, ) { for (interaction, mut color) in &mut interaction_query { match *interaction { Interaction::Pressed => { *color = BLUE.into(); } Interaction::Hovered => { *color = GRAY.into(); } Interaction::None => { *color = WHITE.into(); } } } } fn setup_music(asset_server: Res<AssetServer>, mut commands: Commands) { commands.spawn(( AudioPlayer::new(asset_server.load("sounds/Windless Slopes.ogg")), PlaybackSettings::LOOP, )); } // Pause audio when app goes into background and resume when it returns. // This is handled by the OS on iOS, but not on Android. fn handle_lifetime( mut app_lifecycle_reader: MessageReader<AppLifecycle>, music_controller: Single<&AudioSink>, ) { for app_lifecycle in app_lifecycle_reader.read() { match app_lifecycle { AppLifecycle::Idle | AppLifecycle::WillSuspend | AppLifecycle::WillResume => {} AppLifecycle::Suspended => music_controller.pause(), AppLifecycle::Running => music_controller.play(), } } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/mobile/src/main.rs
examples/mobile/src/main.rs
//! The entry point for iOS applications. use bevy_mobile_example::main;
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/math/bounding_2d.rs
examples/math/bounding_2d.rs
//! This example demonstrates bounding volume intersections. use bevy::{ color::palettes::css::*, math::{bounding::*, ops, Isometry2d}, prelude::*, }; fn main() { App::new() .add_plugins(DefaultPlugins) .init_state::<Test>() .add_systems(Startup, setup) .add_systems( Update, (update_text, spin, update_volumes, update_test_state), ) .add_systems( PostUpdate, ( render_shapes, ( aabb_intersection_system.run_if(in_state(Test::AabbSweep)), circle_intersection_system.run_if(in_state(Test::CircleSweep)), ray_cast_system.run_if(in_state(Test::RayCast)), aabb_cast_system.run_if(in_state(Test::AabbCast)), bounding_circle_cast_system.run_if(in_state(Test::CircleCast)), ), render_volumes, ) .chain(), ) .run(); } #[derive(Component)] struct Spin; fn spin(time: Res<Time>, mut query: Query<&mut Transform, With<Spin>>) { for mut transform in query.iter_mut() { transform.rotation *= Quat::from_rotation_z(time.delta_secs() / 5.); } } #[derive(States, Default, Debug, Hash, PartialEq, Eq, Clone, Copy)] enum Test { AabbSweep, CircleSweep, #[default] RayCast, AabbCast, CircleCast, } fn update_test_state( keycode: Res<ButtonInput<KeyCode>>, cur_state: Res<State<Test>>, mut state: ResMut<NextState<Test>>, ) { if !keycode.just_pressed(KeyCode::Space) { return; } use Test::*; let next = match **cur_state { AabbSweep => CircleSweep, CircleSweep => RayCast, RayCast => AabbCast, AabbCast => CircleCast, CircleCast => AabbSweep, }; state.set(next); } fn update_text(mut text: Single<&mut Text>, cur_state: Res<State<Test>>) { if !cur_state.is_changed() { return; } text.clear(); text.push_str("Intersection test:\n"); use Test::*; for &test in &[AabbSweep, CircleSweep, RayCast, AabbCast, CircleCast] { let s = if **cur_state == test { "*" } else { " " }; text.push_str(&format!(" {s} {test:?} {s}\n")); } text.push_str("\nPress space to cycle"); } #[derive(Component)] enum Shape { Rectangle(Rectangle), Circle(Circle), Triangle(Triangle2d), Line(Segment2d), Capsule(Capsule2d), Polygon(RegularPolygon), } fn render_shapes(mut gizmos: Gizmos, query: Query<(&Shape, &Transform)>) { let color = GRAY; for (shape, transform) in query.iter() { let translation = transform.translation.xy(); let rotation = transform.rotation.to_euler(EulerRot::YXZ).2; let isometry = Isometry2d::new(translation, Rot2::radians(rotation)); match shape { Shape::Rectangle(r) => { gizmos.primitive_2d(r, isometry, color); } Shape::Circle(c) => { gizmos.primitive_2d(c, isometry, color); } Shape::Triangle(t) => { gizmos.primitive_2d(t, isometry, color); } Shape::Line(l) => { gizmos.primitive_2d(l, isometry, color); } Shape::Capsule(c) => { gizmos.primitive_2d(c, isometry, color); } Shape::Polygon(p) => { gizmos.primitive_2d(p, isometry, color); } } } } #[derive(Component)] enum DesiredVolume { Aabb, Circle, } #[derive(Component, Debug)] enum CurrentVolume { Aabb(Aabb2d), Circle(BoundingCircle), } fn update_volumes( mut commands: Commands, query: Query< (Entity, &DesiredVolume, &Shape, &Transform), Or<(Changed<DesiredVolume>, Changed<Shape>, Changed<Transform>)>, >, ) { for (entity, desired_volume, shape, transform) in query.iter() { let translation = transform.translation.xy(); let rotation = transform.rotation.to_euler(EulerRot::YXZ).2; let isometry = Isometry2d::new(translation, Rot2::radians(rotation)); match desired_volume { DesiredVolume::Aabb => { let aabb = match shape { Shape::Rectangle(r) => r.aabb_2d(isometry), Shape::Circle(c) => c.aabb_2d(isometry), Shape::Triangle(t) => t.aabb_2d(isometry), Shape::Line(l) => l.aabb_2d(isometry), Shape::Capsule(c) => c.aabb_2d(isometry), Shape::Polygon(p) => p.aabb_2d(isometry), }; commands.entity(entity).insert(CurrentVolume::Aabb(aabb)); } DesiredVolume::Circle => { let circle = match shape { Shape::Rectangle(r) => r.bounding_circle(isometry), Shape::Circle(c) => c.bounding_circle(isometry), Shape::Triangle(t) => t.bounding_circle(isometry), Shape::Line(l) => l.bounding_circle(isometry), Shape::Capsule(c) => c.bounding_circle(isometry), Shape::Polygon(p) => p.bounding_circle(isometry), }; commands .entity(entity) .insert(CurrentVolume::Circle(circle)); } } } } fn render_volumes(mut gizmos: Gizmos, query: Query<(&CurrentVolume, &Intersects)>) { for (volume, intersects) in query.iter() { let color = if **intersects { AQUA } else { ORANGE_RED }; match volume { CurrentVolume::Aabb(a) => { gizmos.rect_2d(a.center(), a.half_size() * 2., color); } CurrentVolume::Circle(c) => { gizmos.circle_2d(c.center(), c.radius(), color); } } } } #[derive(Component, Deref, DerefMut, Default)] struct Intersects(bool); const OFFSET_X: f32 = 125.; const OFFSET_Y: f32 = 75.; fn setup(mut commands: Commands) { commands.spawn(Camera2d); commands.spawn(( Transform::from_xyz(-OFFSET_X, OFFSET_Y, 0.), Shape::Circle(Circle::new(45.)), DesiredVolume::Aabb, Intersects::default(), )); commands.spawn(( Transform::from_xyz(0., OFFSET_Y, 0.), Shape::Rectangle(Rectangle::new(80., 80.)), Spin, DesiredVolume::Circle, Intersects::default(), )); commands.spawn(( Transform::from_xyz(OFFSET_X, OFFSET_Y, 0.), Shape::Triangle(Triangle2d::new( Vec2::new(-40., -40.), Vec2::new(-20., 40.), Vec2::new(40., 50.), )), Spin, DesiredVolume::Aabb, Intersects::default(), )); commands.spawn(( Transform::from_xyz(-OFFSET_X, -OFFSET_Y, 0.), Shape::Line(Segment2d::from_direction_and_length( Dir2::from_xy(1., 0.3).unwrap(), 90., )), Spin, DesiredVolume::Circle, Intersects::default(), )); commands.spawn(( Transform::from_xyz(0., -OFFSET_Y, 0.), Shape::Capsule(Capsule2d::new(25., 50.)), Spin, DesiredVolume::Aabb, Intersects::default(), )); commands.spawn(( Transform::from_xyz(OFFSET_X, -OFFSET_Y, 0.), Shape::Polygon(RegularPolygon::new(50., 6)), Spin, DesiredVolume::Circle, Intersects::default(), )); commands.spawn(( Text::default(), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, )); } fn draw_filled_circle(gizmos: &mut Gizmos, position: Vec2, color: Srgba) { for r in [1., 2., 3.] { gizmos.circle_2d(position, r, color); } } fn draw_ray(gizmos: &mut Gizmos, ray: &RayCast2d) { gizmos.line_2d( ray.ray.origin, ray.ray.origin + *ray.ray.direction * ray.max, WHITE, ); draw_filled_circle(gizmos, ray.ray.origin, FUCHSIA); } fn get_and_draw_ray(gizmos: &mut Gizmos, time: &Time) -> RayCast2d { let ray = Vec2::new(ops::cos(time.elapsed_secs()), ops::sin(time.elapsed_secs())); let dist = 150. + ops::sin(0.5 * time.elapsed_secs()).abs() * 500.; let aabb_ray = Ray2d { origin: ray * 250., direction: Dir2::new_unchecked(-ray), }; let ray_cast = RayCast2d::from_ray(aabb_ray, dist - 20.); draw_ray(gizmos, &ray_cast); ray_cast } fn ray_cast_system( mut gizmos: Gizmos, time: Res<Time>, mut volumes: Query<(&CurrentVolume, &mut Intersects)>, ) { let ray_cast = get_and_draw_ray(&mut gizmos, &time); for (volume, mut intersects) in volumes.iter_mut() { let toi = match volume { CurrentVolume::Aabb(a) => ray_cast.aabb_intersection_at(a), CurrentVolume::Circle(c) => ray_cast.circle_intersection_at(c), }; **intersects = toi.is_some(); if let Some(toi) = toi { draw_filled_circle( &mut gizmos, ray_cast.ray.origin + *ray_cast.ray.direction * toi, LIME, ); } } } fn aabb_cast_system( mut gizmos: Gizmos, time: Res<Time>, mut volumes: Query<(&CurrentVolume, &mut Intersects)>, ) { let ray_cast = get_and_draw_ray(&mut gizmos, &time); let aabb_cast = AabbCast2d { aabb: Aabb2d::new(Vec2::ZERO, Vec2::splat(15.)), ray: ray_cast, }; for (volume, mut intersects) in volumes.iter_mut() { let toi = match *volume { CurrentVolume::Aabb(a) => aabb_cast.aabb_collision_at(a), CurrentVolume::Circle(_) => None, }; **intersects = toi.is_some(); if let Some(toi) = toi { gizmos.rect_2d( aabb_cast.ray.ray.origin + *aabb_cast.ray.ray.direction * toi, aabb_cast.aabb.half_size() * 2., LIME, ); } } } fn bounding_circle_cast_system( mut gizmos: Gizmos, time: Res<Time>, mut volumes: Query<(&CurrentVolume, &mut Intersects)>, ) { let ray_cast = get_and_draw_ray(&mut gizmos, &time); let circle_cast = BoundingCircleCast { circle: BoundingCircle::new(Vec2::ZERO, 15.), ray: ray_cast, }; for (volume, mut intersects) in volumes.iter_mut() { let toi = match *volume { CurrentVolume::Aabb(_) => None, CurrentVolume::Circle(c) => circle_cast.circle_collision_at(c), }; **intersects = toi.is_some(); if let Some(toi) = toi { gizmos.circle_2d( circle_cast.ray.ray.origin + *circle_cast.ray.ray.direction * toi, circle_cast.circle.radius(), LIME, ); } } } fn get_intersection_position(time: &Time) -> Vec2 { let x = ops::cos(0.8 * time.elapsed_secs()) * 250.; let y = ops::sin(0.4 * time.elapsed_secs()) * 100.; Vec2::new(x, y) } fn aabb_intersection_system( mut gizmos: Gizmos, time: Res<Time>, mut volumes: Query<(&CurrentVolume, &mut Intersects)>, ) { let center = get_intersection_position(&time); let aabb = Aabb2d::new(center, Vec2::splat(50.)); gizmos.rect_2d(center, aabb.half_size() * 2., YELLOW); for (volume, mut intersects) in volumes.iter_mut() { let hit = match volume { CurrentVolume::Aabb(a) => aabb.intersects(a), CurrentVolume::Circle(c) => aabb.intersects(c), }; **intersects = hit; } } fn circle_intersection_system( mut gizmos: Gizmos, time: Res<Time>, mut volumes: Query<(&CurrentVolume, &mut Intersects)>, ) { let center = get_intersection_position(&time); let circle = BoundingCircle::new(center, 50.); gizmos.circle_2d(center, circle.radius(), YELLOW); for (volume, mut intersects) in volumes.iter_mut() { let hit = match volume { CurrentVolume::Aabb(a) => circle.intersects(a), CurrentVolume::Circle(c) => circle.intersects(c), }; **intersects = hit; } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/math/sampling_primitives.rs
examples/math/sampling_primitives.rs
//! This example shows how to sample random points from primitive shapes. use std::f32::consts::PI; use bevy::{ core_pipeline::tonemapping::Tonemapping, input::mouse::{AccumulatedMouseMotion, AccumulatedMouseScroll, MouseButtonInput}, math::prelude::*, post_process::bloom::Bloom, prelude::*, }; use rand::{seq::IndexedRandom, Rng, SeedableRng}; use rand_chacha::ChaCha8Rng; fn main() { App::new() .add_plugins(DefaultPlugins) .insert_resource(SampledShapes::new()) .add_systems(Startup, setup) .add_systems( Update, ( handle_mouse, handle_keypress, spawn_points, despawn_points, animate_spawning, animate_despawning, update_camera, update_lights, ), ) .run(); } // Constants /// Maximum distance of the camera from its target. (meters) /// Should be set such that it is possible to look at all objects const MAX_CAMERA_DISTANCE: f32 = 12.0; /// Minimum distance of the camera from its target. (meters) /// Should be set such that it is not possible to clip into objects const MIN_CAMERA_DISTANCE: f32 = 1.0; /// Offset to be placed between the shapes const DISTANCE_BETWEEN_SHAPES: Vec3 = Vec3::new(2.0, 0.0, 0.0); /// Maximum amount of points allowed to be present. /// Should be set such that it does not cause large amounts of lag when reached. const MAX_POINTS: usize = 3000; // TODO: Test wasm and add a wasm-specific-bound /// How many points should be spawned each frame const POINTS_PER_FRAME: usize = 3; /// Color used for the inside points const INSIDE_POINT_COLOR: LinearRgba = LinearRgba::rgb(0.855, 1.1, 0.01); /// Color used for the points on the boundary const BOUNDARY_POINT_COLOR: LinearRgba = LinearRgba::rgb(0.08, 0.2, 0.90); /// Time (in seconds) for the spawning/despawning animation const ANIMATION_TIME: f32 = 1.0; /// Color for the sky and the sky-light const SKY_COLOR: Color = Color::srgb(0.02, 0.06, 0.15); const SMALL_3D: f32 = 0.5; const BIG_3D: f32 = 1.0; // primitives const CUBOID: Cuboid = Cuboid { half_size: Vec3::new(SMALL_3D, BIG_3D, SMALL_3D), }; const SPHERE: Sphere = Sphere { radius: 1.5 * SMALL_3D, }; const TRIANGLE_3D: Triangle3d = Triangle3d { vertices: [ Vec3::new(BIG_3D, -BIG_3D * 0.5, 0.0), Vec3::new(0.0, BIG_3D, 0.0), Vec3::new(-BIG_3D, -BIG_3D * 0.5, 0.0), ], }; const CAPSULE_3D: Capsule3d = Capsule3d { radius: SMALL_3D, half_length: SMALL_3D, }; const CYLINDER: Cylinder = Cylinder { radius: SMALL_3D, half_height: SMALL_3D, }; const TETRAHEDRON: Tetrahedron = Tetrahedron { vertices: [ Vec3::new(-BIG_3D, -BIG_3D * 0.67, BIG_3D * 0.5), Vec3::new(BIG_3D, -BIG_3D * 0.67, BIG_3D * 0.5), Vec3::new(0.0, -BIG_3D * 0.67, -BIG_3D * 1.17), Vec3::new(0.0, BIG_3D, 0.0), ], }; // Components, Resources /// Resource for the random sampling mode, telling whether to sample the interior or the boundary. #[derive(Resource)] enum SamplingMode { Interior, Boundary, } /// Resource for storing whether points should spawn by themselves #[derive(Resource)] enum SpawningMode { Manual, Automatic, } /// Resource for tracking how many points should be spawned #[derive(Resource)] struct SpawnQueue(usize); #[derive(Resource)] struct PointCounter(usize); /// Resource storing the shapes being sampled and their translations. #[derive(Resource)] struct SampledShapes(Vec<(Shape, Vec3)>); impl SampledShapes { fn new() -> Self { let shapes = Shape::list_all_shapes(); let n_shapes = shapes.len(); let translations = (0..n_shapes).map(|i| (i as f32 - n_shapes as f32 / 2.0) * DISTANCE_BETWEEN_SHAPES); SampledShapes(shapes.into_iter().zip(translations).collect()) } } /// Enum listing the shapes that can be sampled #[derive(Clone, Copy)] enum Shape { Cuboid, Sphere, Capsule, Cylinder, Tetrahedron, Triangle, } struct ShapeMeshBuilder { shape: Shape, } impl Shape { /// Return a vector containing all implemented shapes fn list_all_shapes() -> Vec<Shape> { vec![ Shape::Cuboid, Shape::Sphere, Shape::Capsule, Shape::Cylinder, Shape::Tetrahedron, Shape::Triangle, ] } } impl ShapeSample for Shape { type Output = Vec3; fn sample_interior<R: Rng + ?Sized>(&self, rng: &mut R) -> Vec3 { match self { Shape::Cuboid => CUBOID.sample_interior(rng), Shape::Sphere => SPHERE.sample_interior(rng), Shape::Capsule => CAPSULE_3D.sample_interior(rng), Shape::Cylinder => CYLINDER.sample_interior(rng), Shape::Tetrahedron => TETRAHEDRON.sample_interior(rng), Shape::Triangle => TRIANGLE_3D.sample_interior(rng), } } fn sample_boundary<R: Rng + ?Sized>(&self, rng: &mut R) -> Self::Output { match self { Shape::Cuboid => CUBOID.sample_boundary(rng), Shape::Sphere => SPHERE.sample_boundary(rng), Shape::Capsule => CAPSULE_3D.sample_boundary(rng), Shape::Cylinder => CYLINDER.sample_boundary(rng), Shape::Tetrahedron => TETRAHEDRON.sample_boundary(rng), Shape::Triangle => TRIANGLE_3D.sample_boundary(rng), } } } impl Meshable for Shape { type Output = ShapeMeshBuilder; fn mesh(&self) -> Self::Output { ShapeMeshBuilder { shape: *self } } } impl MeshBuilder for ShapeMeshBuilder { fn build(&self) -> Mesh { match self.shape { Shape::Cuboid => CUBOID.mesh().into(), Shape::Sphere => SPHERE.mesh().into(), Shape::Capsule => CAPSULE_3D.mesh().into(), Shape::Cylinder => CYLINDER.mesh().into(), Shape::Tetrahedron => TETRAHEDRON.mesh().into(), Shape::Triangle => TRIANGLE_3D.mesh().into(), } } } /// The source of randomness used by this example. #[derive(Resource)] struct RandomSource(ChaCha8Rng); /// A container for the handle storing the mesh used to display sampled points as spheres. #[derive(Resource)] struct PointMesh(Handle<Mesh>); /// A container for the handle storing the material used to display sampled points. #[derive(Resource)] struct PointMaterial { interior: Handle<StandardMaterial>, boundary: Handle<StandardMaterial>, } /// Marker component for sampled points. #[derive(Component)] struct SamplePoint; /// Component for animating the spawn animation of lights. #[derive(Component)] struct SpawningPoint { progress: f32, } /// Marker component for lights which should change intensity. #[derive(Component)] struct DespawningPoint { progress: f32, } /// Marker component for lights which should change intensity. #[derive(Component)] struct FireflyLights; /// The pressed state of the mouse, used for camera motion. #[derive(Resource)] struct MousePressed(bool); /// Camera movement component. #[derive(Component)] struct CameraRig { /// Rotation around the vertical axis of the camera (radians). /// Positive changes makes the camera look more from the right. pub yaw: f32, /// Rotation around the horizontal axis of the camera (radians) (-pi/2; pi/2). /// Positive looks down from above. pub pitch: f32, /// Distance from the center, smaller distance causes more zoom. pub distance: f32, /// Location in 3D space at which the camera is looking and around which it is orbiting. pub target: Vec3, } fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, shapes: Res<SampledShapes>, ) { // Use seeded rng and store it in a resource; this makes the random output reproducible. let seeded_rng = ChaCha8Rng::seed_from_u64(4); // Chosen by a fair die roll, guaranteed to be random. commands.insert_resource(RandomSource(seeded_rng)); // Make a plane for establishing space. commands.spawn(( Mesh3d(meshes.add(Plane3d::default().mesh().size(20.0, 20.0))), MeshMaterial3d(materials.add(StandardMaterial { base_color: Color::srgb(0.3, 0.5, 0.3), perceptual_roughness: 0.95, metallic: 0.0, ..default() })), Transform::from_xyz(0.0, -2.5, 0.0), )); let shape_material = materials.add(StandardMaterial { base_color: Color::srgba(0.2, 0.1, 0.6, 0.3), reflectance: 0.0, alpha_mode: AlphaMode::Blend, cull_mode: None, ..default() }); // Spawn shapes to be sampled for (shape, translation) in shapes.0.iter() { // The sampled shape shown transparently: commands.spawn(( Mesh3d(meshes.add(shape.mesh())), MeshMaterial3d(shape_material.clone()), Transform::from_translation(*translation), )); // Lights which work as the bulk lighting of the fireflies: commands.spawn(( PointLight { range: 4.0, radius: 0.6, intensity: 1.0, shadows_enabled: false, color: Color::LinearRgba(INSIDE_POINT_COLOR), ..default() }, Transform::from_translation(*translation), FireflyLights, )); } // Global light: commands.spawn(( PointLight { color: SKY_COLOR, intensity: 2_000.0, shadows_enabled: false, ..default() }, Transform::from_xyz(4.0, 8.0, 4.0), )); // A camera: commands.spawn(( Camera3d::default(), Camera { clear_color: ClearColorConfig::Custom(SKY_COLOR), ..default() }, Tonemapping::TonyMcMapface, Transform::from_xyz(-2.0, 3.0, 5.0).looking_at(Vec3::ZERO, Vec3::Y), Bloom::NATURAL, CameraRig { yaw: 0.56, pitch: 0.45, distance: 8.0, target: Vec3::ZERO, }, )); // Store the mesh and material for sample points in resources: commands.insert_resource(PointMesh( meshes.add(Sphere::new(0.03).mesh().ico(1).unwrap()), )); commands.insert_resource(PointMaterial { interior: materials.add(StandardMaterial { base_color: Color::BLACK, reflectance: 0.05, emissive: 2.5 * INSIDE_POINT_COLOR, ..default() }), boundary: materials.add(StandardMaterial { base_color: Color::BLACK, reflectance: 0.05, emissive: 1.5 * BOUNDARY_POINT_COLOR, ..default() }), }); // Instructions for the example: commands.spawn(( Text::new( "Controls:\n\ M: Toggle between sampling boundary and interior.\n\ A: Toggle automatic spawning & despawning of points.\n\ R: Restart (erase all samples).\n\ S: Add one random sample.\n\ D: Add 100 random samples.\n\ Rotate camera by holding left mouse and panning.\n\ Zoom camera by scrolling via mouse or +/-.\n\ Move camera by L/R arrow keys.\n\ Tab: Toggle this text", ), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, )); // No points are scheduled to spawn initially. commands.insert_resource(SpawnQueue(0)); // No points have been spawned initially. commands.insert_resource(PointCounter(0)); // The mode starts with interior points. commands.insert_resource(SamplingMode::Interior); // Points spawn automatically by default. commands.insert_resource(SpawningMode::Automatic); // Starting mouse-pressed state is false. commands.insert_resource(MousePressed(false)); } // Handle user inputs from the keyboard: fn handle_keypress( mut commands: Commands, keyboard: Res<ButtonInput<KeyCode>>, mut mode: ResMut<SamplingMode>, mut spawn_mode: ResMut<SpawningMode>, samples: Query<Entity, With<SamplePoint>>, shapes: Res<SampledShapes>, mut spawn_queue: ResMut<SpawnQueue>, mut counter: ResMut<PointCounter>, mut text_menus: Query<&mut Visibility, With<Text>>, mut camera_rig: Single<&mut CameraRig>, ) { // R => restart, deleting all samples if keyboard.just_pressed(KeyCode::KeyR) { // Don't forget to zero out the counter! counter.0 = 0; for entity in &samples { commands.entity(entity).despawn(); } } // S => sample once if keyboard.just_pressed(KeyCode::KeyS) { spawn_queue.0 += 1; } // D => sample a hundred if keyboard.just_pressed(KeyCode::KeyD) { spawn_queue.0 += 100; } // M => toggle mode between interior and boundary. if keyboard.just_pressed(KeyCode::KeyM) { match *mode { SamplingMode::Interior => *mode = SamplingMode::Boundary, SamplingMode::Boundary => *mode = SamplingMode::Interior, } } // A => toggle spawning mode between automatic and manual. if keyboard.just_pressed(KeyCode::KeyA) { match *spawn_mode { SpawningMode::Manual => *spawn_mode = SpawningMode::Automatic, SpawningMode::Automatic => *spawn_mode = SpawningMode::Manual, } } // Tab => toggle help menu. if keyboard.just_pressed(KeyCode::Tab) { for mut visibility in text_menus.iter_mut() { *visibility = match *visibility { Visibility::Hidden => Visibility::Visible, _ => Visibility::Hidden, }; } } // +/- => zoom camera. if keyboard.just_pressed(KeyCode::NumpadSubtract) || keyboard.just_pressed(KeyCode::Minus) { camera_rig.distance += MAX_CAMERA_DISTANCE / 15.0; camera_rig.distance = camera_rig .distance .clamp(MIN_CAMERA_DISTANCE, MAX_CAMERA_DISTANCE); } if keyboard.just_pressed(KeyCode::NumpadAdd) { camera_rig.distance -= MAX_CAMERA_DISTANCE / 15.0; camera_rig.distance = camera_rig .distance .clamp(MIN_CAMERA_DISTANCE, MAX_CAMERA_DISTANCE); } // Arrows => Move camera focus let left = keyboard.just_pressed(KeyCode::ArrowLeft); let right = keyboard.just_pressed(KeyCode::ArrowRight); if left || right { let mut closest = 0; let mut closest_distance = f32::MAX; for (i, (_, position)) in shapes.0.iter().enumerate() { let distance = camera_rig.target.distance(*position); if distance < closest_distance { closest = i; closest_distance = distance; } } if closest > 0 && left { camera_rig.target = shapes.0[closest - 1].1; } if closest < shapes.0.len() - 1 && right { camera_rig.target = shapes.0[closest + 1].1; } } } // Handle user mouse input for panning the camera around: fn handle_mouse( accumulated_mouse_motion: Res<AccumulatedMouseMotion>, accumulated_mouse_scroll: Res<AccumulatedMouseScroll>, mut mouse_button_inputs: MessageReader<MouseButtonInput>, mut camera_rig: Single<&mut CameraRig>, mut mouse_pressed: ResMut<MousePressed>, ) { // Store left-pressed state in the MousePressed resource for mouse_button_input in mouse_button_inputs.read() { if mouse_button_input.button != MouseButton::Left { continue; } *mouse_pressed = MousePressed(mouse_button_input.state.is_pressed()); } if accumulated_mouse_scroll.delta != Vec2::ZERO { let mouse_scroll = accumulated_mouse_scroll.delta.y; camera_rig.distance -= mouse_scroll / 15.0 * MAX_CAMERA_DISTANCE; camera_rig.distance = camera_rig .distance .clamp(MIN_CAMERA_DISTANCE, MAX_CAMERA_DISTANCE); } // If the mouse is not pressed, just ignore motion events if !mouse_pressed.0 { return; } if accumulated_mouse_motion.delta != Vec2::ZERO { let displacement = accumulated_mouse_motion.delta; camera_rig.yaw += displacement.x / 90.; camera_rig.pitch += displacement.y / 90.; // The extra 0.01 is to disallow weird behavior at the poles of the rotation camera_rig.pitch = camera_rig.pitch.clamp(-PI / 2.01, PI / 2.01); } } fn spawn_points( mut commands: Commands, mode: ResMut<SamplingMode>, shapes: Res<SampledShapes>, mut random_source: ResMut<RandomSource>, sample_mesh: Res<PointMesh>, sample_material: Res<PointMaterial>, mut spawn_queue: ResMut<SpawnQueue>, mut counter: ResMut<PointCounter>, spawn_mode: ResMut<SpawningMode>, ) { if let SpawningMode::Automatic = *spawn_mode { spawn_queue.0 += POINTS_PER_FRAME; } if spawn_queue.0 == 0 { return; } let rng = &mut random_source.0; // Don't go crazy for _ in 0..1000 { if spawn_queue.0 == 0 { break; } spawn_queue.0 -= 1; counter.0 += 1; let (shape, offset) = shapes.0.choose(rng).expect("There is at least one shape"); // Get a single random Vec3: let sample: Vec3 = *offset + match *mode { SamplingMode::Interior => shape.sample_interior(rng), SamplingMode::Boundary => shape.sample_boundary(rng), }; // Spawn a sphere at the random location: commands.spawn(( Mesh3d(sample_mesh.0.clone()), MeshMaterial3d(match *mode { SamplingMode::Interior => sample_material.interior.clone(), SamplingMode::Boundary => sample_material.boundary.clone(), }), Transform::from_translation(sample).with_scale(Vec3::ZERO), SamplePoint, SpawningPoint { progress: 0.0 }, )); } } fn despawn_points( mut commands: Commands, samples: Query<Entity, With<SamplePoint>>, spawn_mode: Res<SpawningMode>, mut counter: ResMut<PointCounter>, mut random_source: ResMut<RandomSource>, ) { // Do not despawn automatically in manual mode if let SpawningMode::Manual = *spawn_mode { return; } if counter.0 < MAX_POINTS { return; } let rng = &mut random_source.0; // Skip a random amount of points to ensure random despawning let skip = rng.random_range(0..counter.0); let despawn_amount = (counter.0 - MAX_POINTS).min(100); counter.0 -= samples .iter() .skip(skip) .take(despawn_amount) .map(|entity| { commands .entity(entity) .insert(DespawningPoint { progress: 0.0 }) .remove::<SpawningPoint>() .remove::<SamplePoint>(); }) .count(); } fn animate_spawning( mut commands: Commands, time: Res<Time>, mut samples: Query<(Entity, &mut Transform, &mut SpawningPoint)>, ) { let dt = time.delta_secs(); for (entity, mut transform, mut point) in samples.iter_mut() { point.progress += dt / ANIMATION_TIME; transform.scale = Vec3::splat(point.progress.min(1.0)); if point.progress >= 1.0 { commands.entity(entity).remove::<SpawningPoint>(); } } } fn animate_despawning( mut commands: Commands, time: Res<Time>, mut samples: Query<(Entity, &mut Transform, &mut DespawningPoint)>, ) { let dt = time.delta_secs(); for (entity, mut transform, mut point) in samples.iter_mut() { point.progress += dt / ANIMATION_TIME; // If the point is already smaller than expected, jump ahead with the despawning progress to avoid sudden jumps in size point.progress = f32::max(point.progress, 1.0 - transform.scale.x); transform.scale = Vec3::splat((1.0 - point.progress).max(0.0)); if point.progress >= 1.0 { commands.entity(entity).despawn(); } } } fn update_camera(mut camera: Query<(&mut Transform, &CameraRig), Changed<CameraRig>>) { for (mut transform, rig) in camera.iter_mut() { let looking_direction = Quat::from_rotation_y(-rig.yaw) * Quat::from_rotation_x(rig.pitch) * Vec3::Z; transform.translation = rig.target - rig.distance * looking_direction; transform.look_at(rig.target, Dir3::Y); } } fn update_lights( mut lights: Query<&mut PointLight, With<FireflyLights>>, counter: Res<PointCounter>, ) { let saturation = (counter.0 as f32 / MAX_POINTS as f32).min(2.0); let intensity = 40_000.0 * saturation; for mut light in lights.iter_mut() { light.intensity = light.intensity.lerp(intensity, 0.04); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/math/render_primitives.rs
examples/math/render_primitives.rs
//! This example demonstrates how each of Bevy's math primitives look like in 2D and 3D with meshes //! and with gizmos use bevy::{input::common_conditions::input_just_pressed, math::Isometry2d, prelude::*}; const LEFT_RIGHT_OFFSET_2D: f32 = 200.0; const LEFT_RIGHT_OFFSET_3D: f32 = 2.0; fn main() { let mut app = App::new(); app.add_plugins(DefaultPlugins) .init_state::<PrimitiveSelected>() .init_state::<CameraActive>(); // cameras app.add_systems(Startup, (setup_cameras, setup_lights, setup_ambient_light)) .add_systems( Update, ( update_active_cameras.run_if(state_changed::<CameraActive>), switch_cameras.run_if(input_just_pressed(KeyCode::KeyC)), ), ); // text // PostStartup since we need the cameras to exist app.add_systems(PostStartup, setup_text); app.add_systems( Update, (update_text.run_if(state_changed::<PrimitiveSelected>),), ); // primitives app.add_systems(Startup, (spawn_primitive_2d, spawn_primitive_3d)) .add_systems( Update, ( switch_to_next_primitive.run_if(input_just_pressed(KeyCode::ArrowUp)), switch_to_previous_primitive.run_if(input_just_pressed(KeyCode::ArrowDown)), draw_gizmos_2d.run_if(in_mode(CameraActive::Dim2)), draw_gizmos_3d.run_if(in_mode(CameraActive::Dim3)), update_primitive_meshes .run_if(state_changed::<PrimitiveSelected>.or(state_changed::<CameraActive>)), rotate_primitive_2d_meshes, rotate_primitive_3d_meshes, ), ); app.run(); } /// State for tracking which of the two cameras (2D & 3D) is currently active #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, States, Default, Reflect)] enum CameraActive { #[default] /// 2D Camera is active Dim2, /// 3D Camera is active Dim3, } /// State for tracking which primitives are currently displayed #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, States, Default, Reflect)] enum PrimitiveSelected { #[default] RectangleAndCuboid, CircleAndSphere, Ellipse, Triangle, Plane, Line, Segment, Polyline, Polygon, ConvexPolygon, RegularPolygon, Capsule, Cylinder, Cone, ConicalFrustum, Torus, Tetrahedron, Arc, CircularSector, CircularSegment, } impl std::fmt::Display for PrimitiveSelected { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let name = match self { PrimitiveSelected::RectangleAndCuboid => String::from("Rectangle/Cuboid"), PrimitiveSelected::CircleAndSphere => String::from("Circle/Sphere"), other => format!("{other:?}"), }; write!(f, "{name}") } } impl PrimitiveSelected { const ALL: [Self; 20] = [ Self::RectangleAndCuboid, Self::CircleAndSphere, Self::Ellipse, Self::Triangle, Self::Plane, Self::Line, Self::Segment, Self::Polyline, Self::Polygon, Self::ConvexPolygon, Self::RegularPolygon, Self::Capsule, Self::Cylinder, Self::Cone, Self::ConicalFrustum, Self::Torus, Self::Tetrahedron, Self::Arc, Self::CircularSector, Self::CircularSegment, ]; fn next(self) -> Self { Self::ALL .into_iter() .cycle() .skip_while(|&x| x != self) .nth(1) .unwrap() } fn previous(self) -> Self { Self::ALL .into_iter() .rev() .cycle() .skip_while(|&x| x != self) .nth(1) .unwrap() } } const SMALL_2D: f32 = 50.0; const BIG_2D: f32 = 100.0; const SMALL_3D: f32 = 0.5; const BIG_3D: f32 = 1.0; // primitives const RECTANGLE: Rectangle = Rectangle { half_size: Vec2::new(SMALL_2D, BIG_2D), }; const CUBOID: Cuboid = Cuboid { half_size: Vec3::new(BIG_3D, SMALL_3D, BIG_3D), }; const CIRCLE: Circle = Circle { radius: BIG_2D }; const SPHERE: Sphere = Sphere { radius: BIG_3D }; const ELLIPSE: Ellipse = Ellipse { half_size: Vec2::new(BIG_2D, SMALL_2D), }; const TRIANGLE_2D: Triangle2d = Triangle2d { vertices: [ Vec2::new(BIG_2D, 0.0), Vec2::new(0.0, BIG_2D), Vec2::new(-BIG_2D, 0.0), ], }; const TRIANGLE_3D: Triangle3d = Triangle3d { vertices: [ Vec3::new(BIG_3D, 0.0, 0.0), Vec3::new(0.0, BIG_3D, 0.0), Vec3::new(-BIG_3D, 0.0, 0.0), ], }; const PLANE_2D: Plane2d = Plane2d { normal: Dir2::Y }; const PLANE_3D: Plane3d = Plane3d { normal: Dir3::Y, half_size: Vec2::new(BIG_3D, BIG_3D), }; const LINE_2D: Line2d = Line2d { direction: Dir2::X }; const LINE_3D: Line3d = Line3d { direction: Dir3::X }; const SEGMENT_2D: Segment2d = Segment2d { vertices: [Vec2::new(-BIG_2D / 2., 0.), Vec2::new(BIG_2D / 2., 0.)], }; const SEGMENT_3D: Segment3d = Segment3d { vertices: [ Vec3::new(-BIG_3D / 2., 0., 0.), Vec3::new(BIG_3D / 2., 0., 0.), ], }; const POLYLINE_2D_VERTICES: [Vec2; 4] = [ Vec2::new(-BIG_2D, -SMALL_2D), Vec2::new(-SMALL_2D, SMALL_2D), Vec2::new(SMALL_2D, -SMALL_2D), Vec2::new(BIG_2D, SMALL_2D), ]; const POLYLINE_3D_VERTICES: [Vec3; 4] = [ Vec3::new(-BIG_3D, -SMALL_3D, -SMALL_3D), Vec3::new(SMALL_3D, SMALL_3D, 0.0), Vec3::new(-SMALL_3D, -SMALL_3D, 0.0), Vec3::new(BIG_3D, SMALL_3D, SMALL_3D), ]; const CONVEX_POLYGON_VERTICES: [Vec2; 5] = [ Vec2::new(-BIG_2D, -SMALL_2D), Vec2::new(BIG_2D, -SMALL_2D), Vec2::new(BIG_2D, SMALL_2D), Vec2::new(BIG_2D / 2.0, SMALL_2D * 2.0), Vec2::new(-BIG_2D, SMALL_2D), ]; const REGULAR_POLYGON: RegularPolygon = RegularPolygon { circumcircle: Circle { radius: BIG_2D }, sides: 5, }; const CAPSULE_2D: Capsule2d = Capsule2d { radius: SMALL_2D, half_length: SMALL_2D, }; const CAPSULE_3D: Capsule3d = Capsule3d { radius: SMALL_3D, half_length: SMALL_3D, }; const CYLINDER: Cylinder = Cylinder { radius: SMALL_3D, half_height: SMALL_3D, }; const CONE: Cone = Cone { radius: BIG_3D, height: BIG_3D, }; const CONICAL_FRUSTUM: ConicalFrustum = ConicalFrustum { radius_top: BIG_3D, radius_bottom: SMALL_3D, height: BIG_3D, }; const ANNULUS: Annulus = Annulus { inner_circle: Circle { radius: SMALL_2D }, outer_circle: Circle { radius: BIG_2D }, }; const TORUS: Torus = Torus { minor_radius: SMALL_3D / 2.0, major_radius: SMALL_3D * 1.5, }; const TETRAHEDRON: Tetrahedron = Tetrahedron { vertices: [ Vec3::new(-BIG_3D, 0.0, 0.0), Vec3::new(BIG_3D, 0.0, 0.0), Vec3::new(0.0, 0.0, -BIG_3D * 1.67), Vec3::new(0.0, BIG_3D * 1.67, -BIG_3D * 0.5), ], }; const ARC: Arc2d = Arc2d { radius: BIG_2D, half_angle: std::f32::consts::FRAC_PI_4, }; const CIRCULAR_SECTOR: CircularSector = CircularSector { arc: Arc2d { radius: BIG_2D, half_angle: std::f32::consts::FRAC_PI_4, }, }; const CIRCULAR_SEGMENT: CircularSegment = CircularSegment { arc: Arc2d { radius: BIG_2D, half_angle: std::f32::consts::FRAC_PI_4, }, }; fn setup_cameras(mut commands: Commands) { let start_in_2d = true; let make_camera = |is_active| Camera { is_active, ..Default::default() }; commands.spawn((Camera2d, make_camera(start_in_2d))); commands.spawn(( Camera3d::default(), make_camera(!start_in_2d), Transform::from_xyz(0.0, 10.0, 0.0).looking_at(Vec3::ZERO, Vec3::Z), )); } fn setup_ambient_light(mut ambient_light: ResMut<GlobalAmbientLight>) { ambient_light.brightness = 50.0; } fn setup_lights(mut commands: Commands) { commands.spawn(( PointLight { intensity: 5000.0, ..default() }, Transform::from_translation(Vec3::new(-LEFT_RIGHT_OFFSET_3D, 2.0, 0.0)) .looking_at(Vec3::new(-LEFT_RIGHT_OFFSET_3D, 0.0, 0.0), Vec3::Y), )); } /// Marker component for header text #[derive(Debug, Clone, Component, Default, Reflect)] pub struct HeaderText; /// Marker component for header node #[derive(Debug, Clone, Component, Default, Reflect)] pub struct HeaderNode; fn update_active_cameras( state: Res<State<CameraActive>>, camera_2d: Single<(Entity, &mut Camera), With<Camera2d>>, camera_3d: Single<(Entity, &mut Camera), (With<Camera3d>, Without<Camera2d>)>, mut text: Query<&mut UiTargetCamera, With<HeaderNode>>, ) { let (entity_2d, mut cam_2d) = camera_2d.into_inner(); let (entity_3d, mut cam_3d) = camera_3d.into_inner(); let is_camera_2d_active = matches!(*state.get(), CameraActive::Dim2); cam_2d.is_active = is_camera_2d_active; cam_3d.is_active = !is_camera_2d_active; let active_camera = if is_camera_2d_active { entity_2d } else { entity_3d }; text.iter_mut().for_each(|mut target_camera| { *target_camera = UiTargetCamera(active_camera); }); } fn switch_cameras(current: Res<State<CameraActive>>, mut next: ResMut<NextState<CameraActive>>) { let next_state = match current.get() { CameraActive::Dim2 => CameraActive::Dim3, CameraActive::Dim3 => CameraActive::Dim2, }; next.set(next_state); } fn setup_text(mut commands: Commands, cameras: Query<(Entity, &Camera)>) { let active_camera = cameras .iter() .find_map(|(entity, camera)| camera.is_active.then_some(entity)) .expect("run condition ensures existence"); commands.spawn(( HeaderNode, Node { justify_self: JustifySelf::Center, top: px(5), ..Default::default() }, UiTargetCamera(active_camera), children![( Text::default(), HeaderText, TextLayout::new_with_justify(Justify::Center), children![ TextSpan::new("Primitive: "), TextSpan(format!("{text}", text = PrimitiveSelected::default())), TextSpan::new("\n\n"), TextSpan::new( "Press 'C' to switch between 2D and 3D mode\n\ Press 'Up' or 'Down' to switch to the next/previous primitive", ), TextSpan::new("\n\n"), TextSpan::new("(If nothing is displayed, there's no rendering support yet)",), ] )], )); } fn update_text( primitive_state: Res<State<PrimitiveSelected>>, header: Query<Entity, With<HeaderText>>, mut writer: TextUiWriter, ) { let new_text = format!("{text}", text = primitive_state.get()); header.iter().for_each(|header_text| { if let Some(mut text) = writer.get_text(header_text, 2) { (*text).clone_from(&new_text); }; }); } fn switch_to_next_primitive( current: Res<State<PrimitiveSelected>>, mut next: ResMut<NextState<PrimitiveSelected>>, ) { let next_state = current.get().next(); next.set(next_state); } fn switch_to_previous_primitive( current: Res<State<PrimitiveSelected>>, mut next: ResMut<NextState<PrimitiveSelected>>, ) { let next_state = current.get().previous(); next.set(next_state); } fn in_mode(active: CameraActive) -> impl Fn(Res<State<CameraActive>>) -> bool { move |state| *state.get() == active } fn draw_gizmos_2d(mut gizmos: Gizmos, state: Res<State<PrimitiveSelected>>, time: Res<Time>) { const POSITION: Vec2 = Vec2::new(-LEFT_RIGHT_OFFSET_2D, 0.0); let angle = time.elapsed_secs(); let isometry = Isometry2d::new(POSITION, Rot2::radians(angle)); let color = Color::WHITE; #[expect( clippy::match_same_arms, reason = "Certain primitives don't have any 2D rendering support yet." )] match state.get() { PrimitiveSelected::RectangleAndCuboid => { gizmos.primitive_2d(&RECTANGLE, isometry, color); } PrimitiveSelected::CircleAndSphere => { gizmos.primitive_2d(&CIRCLE, isometry, color); } PrimitiveSelected::Ellipse => drop(gizmos.primitive_2d(&ELLIPSE, isometry, color)), PrimitiveSelected::Triangle => gizmos.primitive_2d(&TRIANGLE_2D, isometry, color), PrimitiveSelected::Plane => gizmos.primitive_2d(&PLANE_2D, isometry, color), PrimitiveSelected::Line => drop(gizmos.primitive_2d(&LINE_2D, isometry, color)), PrimitiveSelected::Segment => { drop(gizmos.primitive_2d(&SEGMENT_2D, isometry, color)); } PrimitiveSelected::Polyline => gizmos.primitive_2d( &Polyline2d { vertices: POLYLINE_2D_VERTICES.to_vec(), }, isometry, color, ), PrimitiveSelected::ConvexPolygon => gizmos.primitive_2d( &Polygon::from(ConvexPolygon::new(CONVEX_POLYGON_VERTICES).unwrap()), isometry, color, ), PrimitiveSelected::Polygon => gizmos.primitive_2d( &Polygon { vertices: vec![ Vec2::new(-BIG_2D, -SMALL_2D), Vec2::new(BIG_2D, -SMALL_2D), Vec2::new(BIG_2D, SMALL_2D), Vec2::new(0.0, 0.0), Vec2::new(-BIG_2D, SMALL_2D), ], }, isometry, color, ), PrimitiveSelected::RegularPolygon => { gizmos.primitive_2d(&REGULAR_POLYGON, isometry, color); } PrimitiveSelected::Capsule => gizmos.primitive_2d(&CAPSULE_2D, isometry, color), PrimitiveSelected::Cylinder => {} PrimitiveSelected::Cone => {} PrimitiveSelected::ConicalFrustum => {} PrimitiveSelected::Torus => drop(gizmos.primitive_2d(&ANNULUS, isometry, color)), PrimitiveSelected::Tetrahedron => {} PrimitiveSelected::Arc => gizmos.primitive_2d(&ARC, isometry, color), PrimitiveSelected::CircularSector => { gizmos.primitive_2d(&CIRCULAR_SECTOR, isometry, color); } PrimitiveSelected::CircularSegment => { gizmos.primitive_2d(&CIRCULAR_SEGMENT, isometry, color); } } } /// Marker for primitive meshes to record in which state they should be visible in #[derive(Debug, Clone, Component, Default, Reflect)] pub struct PrimitiveData { camera_mode: CameraActive, primitive_state: PrimitiveSelected, } /// Marker for meshes of 2D primitives #[derive(Debug, Clone, Component, Default)] pub struct MeshDim2; /// Marker for meshes of 3D primitives #[derive(Debug, Clone, Component, Default)] pub struct MeshDim3; fn spawn_primitive_2d( mut commands: Commands, mut materials: ResMut<Assets<ColorMaterial>>, mut meshes: ResMut<Assets<Mesh>>, ) { const POSITION: Vec3 = Vec3::new(LEFT_RIGHT_OFFSET_2D, 0.0, 0.0); let material: Handle<ColorMaterial> = materials.add(Color::WHITE); let camera_mode = CameraActive::Dim2; let polyline_2d = Polyline2d { vertices: POLYLINE_2D_VERTICES.to_vec(), }; let convex_polygon = ConvexPolygon::new(CONVEX_POLYGON_VERTICES).unwrap(); [ Some(RECTANGLE.mesh().build()), Some(CIRCLE.mesh().build()), Some(ELLIPSE.mesh().build()), Some(TRIANGLE_2D.mesh().build()), None, // plane None, // line Some(SEGMENT_2D.mesh().build()), Some(polyline_2d.mesh().build()), None, // polygon Some(convex_polygon.mesh().build()), Some(REGULAR_POLYGON.mesh().build()), Some(CAPSULE_2D.mesh().build()), None, // cylinder None, // cone None, // conical frustum Some(ANNULUS.mesh().build()), None, // tetrahedron None, // arc Some(CIRCULAR_SECTOR.mesh().build()), Some(CIRCULAR_SEGMENT.mesh().build()), ] .into_iter() .zip(PrimitiveSelected::ALL) .for_each(|(maybe_mesh, state)| { if let Some(mesh) = maybe_mesh { commands.spawn(( MeshDim2, PrimitiveData { camera_mode, primitive_state: state, }, Mesh2d(meshes.add(mesh)), MeshMaterial2d(material.clone()), Transform::from_translation(POSITION), )); } }); } fn spawn_primitive_3d( mut commands: Commands, mut materials: ResMut<Assets<StandardMaterial>>, mut meshes: ResMut<Assets<Mesh>>, ) { const POSITION: Vec3 = Vec3::new(-LEFT_RIGHT_OFFSET_3D, 0.0, 0.0); let material: Handle<StandardMaterial> = materials.add(Color::WHITE); let camera_mode = CameraActive::Dim3; let polyline_3d = Polyline3d { vertices: POLYLINE_3D_VERTICES.to_vec(), }; [ Some(CUBOID.mesh().build()), Some(SPHERE.mesh().build()), None, // ellipse Some(TRIANGLE_3D.mesh().build()), Some(PLANE_3D.mesh().build()), None, // line Some(SEGMENT_3D.mesh().build()), Some(polyline_3d.mesh().build()), None, // polygon None, // convex polygon None, // regular polygon Some(CAPSULE_3D.mesh().build()), Some(CYLINDER.mesh().build()), Some(CONE.mesh().build()), Some(CONICAL_FRUSTUM.mesh().build()), Some(TORUS.mesh().build()), Some(TETRAHEDRON.mesh().build()), None, // arc None, // circular sector None, // circular segment ] .into_iter() .zip(PrimitiveSelected::ALL) .for_each(|(maybe_mesh, state)| { if let Some(mesh) = maybe_mesh { commands.spawn(( MeshDim3, PrimitiveData { camera_mode, primitive_state: state, }, Mesh3d(meshes.add(mesh)), MeshMaterial3d(material.clone()), Transform::from_translation(POSITION), )); } }); } fn update_primitive_meshes( camera_state: Res<State<CameraActive>>, primitive_state: Res<State<PrimitiveSelected>>, mut primitives: Query<(&mut Visibility, &PrimitiveData)>, ) { primitives.iter_mut().for_each(|(mut vis, primitive)| { let visible = primitive.camera_mode == *camera_state.get() && primitive.primitive_state == *primitive_state.get(); *vis = if visible { Visibility::Inherited } else { Visibility::Hidden }; }); } fn rotate_primitive_2d_meshes( mut primitives_2d: Query< (&mut Transform, &ViewVisibility), (With<PrimitiveData>, With<MeshDim2>), >, time: Res<Time>, ) { let rotation_2d = Quat::from_mat3(&Mat3::from_angle(time.elapsed_secs())); primitives_2d .iter_mut() .filter(|(_, vis)| vis.get()) .for_each(|(mut transform, _)| { transform.rotation = rotation_2d; }); } fn rotate_primitive_3d_meshes( mut primitives_3d: Query< (&mut Transform, &ViewVisibility), (With<PrimitiveData>, With<MeshDim3>), >, time: Res<Time>, ) { let rotation_3d = Quat::from_rotation_arc( Vec3::Z, Vec3::new( ops::sin(time.elapsed_secs()), ops::cos(time.elapsed_secs()), ops::sin(time.elapsed_secs()) * 0.5, ) .try_normalize() .unwrap_or(Vec3::Z), ); primitives_3d .iter_mut() .filter(|(_, vis)| vis.get()) .for_each(|(mut transform, _)| { transform.rotation = rotation_3d; }); } fn draw_gizmos_3d(mut gizmos: Gizmos, state: Res<State<PrimitiveSelected>>, time: Res<Time>) { const POSITION: Vec3 = Vec3::new(LEFT_RIGHT_OFFSET_3D, 0.0, 0.0); let rotation = Quat::from_rotation_arc( Vec3::Z, Vec3::new( ops::sin(time.elapsed_secs()), ops::cos(time.elapsed_secs()), ops::sin(time.elapsed_secs()) * 0.5, ) .try_normalize() .unwrap_or(Vec3::Z), ); let isometry = Isometry3d::new(POSITION, rotation); let color = Color::WHITE; let resolution = 10; #[expect( clippy::match_same_arms, reason = "Certain primitives don't have any 3D rendering support yet." )] match state.get() { PrimitiveSelected::RectangleAndCuboid => { gizmos.primitive_3d(&CUBOID, isometry, color); } PrimitiveSelected::CircleAndSphere => drop( gizmos .primitive_3d(&SPHERE, isometry, color) .resolution(resolution), ), PrimitiveSelected::Ellipse => {} PrimitiveSelected::Triangle => gizmos.primitive_3d(&TRIANGLE_3D, isometry, color), PrimitiveSelected::Plane => drop(gizmos.primitive_3d(&PLANE_3D, isometry, color)), PrimitiveSelected::Line => gizmos.primitive_3d(&LINE_3D, isometry, color), PrimitiveSelected::Segment => gizmos.primitive_3d(&SEGMENT_3D, isometry, color), PrimitiveSelected::Polyline => gizmos.primitive_3d( &Polyline3d { vertices: POLYLINE_3D_VERTICES.to_vec(), }, isometry, color, ), PrimitiveSelected::Polygon => {} PrimitiveSelected::ConvexPolygon => {} PrimitiveSelected::RegularPolygon => {} PrimitiveSelected::Capsule => drop( gizmos .primitive_3d(&CAPSULE_3D, isometry, color) .resolution(resolution), ), PrimitiveSelected::Cylinder => drop( gizmos .primitive_3d(&CYLINDER, isometry, color) .resolution(resolution), ), PrimitiveSelected::Cone => drop( gizmos .primitive_3d(&CONE, isometry, color) .resolution(resolution), ), PrimitiveSelected::ConicalFrustum => { gizmos.primitive_3d(&CONICAL_FRUSTUM, isometry, color); } PrimitiveSelected::Torus => drop( gizmos .primitive_3d(&TORUS, isometry, color) .minor_resolution(resolution) .major_resolution(resolution), ), PrimitiveSelected::Tetrahedron => { gizmos.primitive_3d(&TETRAHEDRON, isometry, color); } PrimitiveSelected::Arc => {} PrimitiveSelected::CircularSector => {} PrimitiveSelected::CircularSegment => {} } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/math/custom_primitives.rs
examples/math/custom_primitives.rs
//! This example demonstrates how you can add your own custom primitives to bevy highlighting //! traits you may want to implement for your primitives to achieve different functionalities. use std::f32::consts::{PI, SQRT_2}; #[cfg(not(target_family = "wasm"))] use bevy::pbr::wireframe::{WireframeConfig, WireframePlugin}; use bevy::{ asset::RenderAssetUsages, camera::ScalingMode, color::palettes::css::{RED, WHITE}, input::common_conditions::{input_just_pressed, input_toggle_active}, math::{ bounding::{ Aabb2d, Bounded2d, Bounded3d, BoundedExtrusion, BoundingCircle, BoundingVolume, }, Isometry2d, }, mesh::{Extrudable, ExtrusionBuilder, PerimeterSegment}, prelude::*, }; const HEART: Heart = Heart::new(0.5); const HOLLOW: Heart = Heart::new(0.3); // By implementing these traits we can construct the 2D ring version of this shape const RING: Ring<Heart> = Ring::new(HEART, HOLLOW); // By implementing these traits we can construct the 3D extrusion of this shape const EXTRUSION: Extrusion<Heart> = Extrusion { base_shape: HEART, half_depth: 0.5, }; const RING_EXTRUSION: Extrusion<Ring<Heart>> = Extrusion { base_shape: RING, half_depth: 0.5, }; // The transform of the camera in 2D const TRANSFORM_2D: Transform = Transform { translation: Vec3::ZERO, rotation: Quat::IDENTITY, scale: Vec3::ONE, }; // The projection used for the camera in 2D const PROJECTION_2D: Projection = Projection::Orthographic(OrthographicProjection { near: -1.0, far: 10.0, scale: 1.0, viewport_origin: Vec2::new(0.5, 0.5), scaling_mode: ScalingMode::AutoMax { max_width: 8.0, max_height: 20.0, }, area: Rect { min: Vec2::NEG_ONE, max: Vec2::ONE, }, }); // The transform of the camera in 3D const TRANSFORM_3D: Transform = Transform { translation: Vec3::ZERO, // The camera is pointing at the 3D shape rotation: Quat::from_xyzw(-0.2669336, -0.0, -0.0, 0.96371484), scale: Vec3::ONE, }; // The projection used for the camera in 3D const PROJECTION_3D: Projection = Projection::Perspective(PerspectiveProjection { fov: PI / 4.0, near: 0.1, far: 1000.0, aspect_ratio: 1.0, near_clip_plane: vec4(0.0, 0.0, -1.0, -0.1), }); /// State for tracking the currently displayed shape /// /// Also a component for associating the entity with this state, for toggling visibility #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, States, Default, Reflect, Component)] enum ShapeActive { #[default] /// The 2D heart shape is displayed Heart, /// The 2D heart ring shape is displayed Ring, /// The 3D extruded heart shape is displayed Extrusion, /// The 3D extruded heart ring shape is displayed RingExtrusion, } impl ShapeActive { const SHAPES: [ShapeActive; 4] = [ ShapeActive::Heart, ShapeActive::Ring, ShapeActive::Extrusion, ShapeActive::RingExtrusion, ]; fn next_shape(self) -> Self { Self::SHAPES .into_iter() .cycle() .skip_while(|shape| *shape != self) .nth(1) // move to the next element .unwrap() } } /// State for tracking the currently displayed shape #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, States, Default, Reflect)] enum BoundingShape { #[default] /// No bounding shapes None, /// The bounding sphere or circle of the shape BoundingSphere, /// The Axis Aligned Bounding Box (AABB) of the shape BoundingBox, } /// A marker component for our 2D shapes so we can query them separately from the camera #[derive(Component)] struct Shape2d; /// A marker component for our 3D shapes so we can query them separately from the camera #[derive(Component)] struct Shape3d; fn main() { let mut app = App::new(); app.add_plugins(DefaultPlugins); #[cfg(not(target_family = "wasm"))] app.add_plugins(WireframePlugin::default()); app.init_state::<BoundingShape>() .init_state::<ShapeActive>() .add_systems(Startup, setup) .add_systems( Update, ( ( rotate_2d_shapes.run_if(input_toggle_active(true, KeyCode::KeyR)), bounding_shapes_2d, ) .run_if(state_in_one_of([ShapeActive::Heart, ShapeActive::Ring])), ( rotate_3d_shapes.run_if(input_toggle_active(true, KeyCode::KeyR)), bounding_shapes_3d, ) .run_if(state_in_one_of([ ShapeActive::Extrusion, ShapeActive::RingExtrusion, ])), update_bounding_shape.run_if(input_just_pressed(KeyCode::KeyB)), switch_shapes.run_if(input_just_pressed(KeyCode::Tab)), ), ); #[cfg(not(target_family = "wasm"))] app.add_systems( Update, toggle_wireframes.run_if(input_just_pressed(KeyCode::Space)), ); app.run(); } fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { // Spawn the camera commands.spawn((Camera3d::default(), TRANSFORM_2D, PROJECTION_2D)); // Spawn the 2D heart commands.spawn(( // We can use the methods defined on the `MeshBuilder` to customize the mesh. Mesh3d(meshes.add(HEART.mesh().resolution(50))), MeshMaterial3d(materials.add(StandardMaterial { emissive: RED.into(), base_color: RED.into(), ..Default::default() })), Transform::from_xyz(0.0, 0.0, 0.0), Shape2d, Visibility::Visible, ShapeActive::Heart, )); // Spawn the 2D heart ring commands.spawn(( // We can use the methods defined on the `MeshBuilder` to customize the mesh. Mesh3d(meshes.add(RING.mesh().with_inner(|heart| heart.resolution(50)))), MeshMaterial3d(materials.add(StandardMaterial { emissive: RED.into(), base_color: RED.into(), ..Default::default() })), Transform::from_xyz(0.0, 0.0, 0.0), Shape2d, Visibility::Hidden, ShapeActive::Ring, )); // Spawn an extrusion of the heart commands.spawn(( // We can set a custom resolution for the round parts of the extrusion as well. Mesh3d(meshes.add(EXTRUSION.mesh().resolution(50))), MeshMaterial3d(materials.add(StandardMaterial { base_color: RED.into(), ..Default::default() })), Transform::from_xyz(0., -3., -5.).with_rotation(Quat::from_rotation_x(-PI / 4.)), Shape3d, Visibility::Hidden, ShapeActive::Extrusion, )); // Spawn an extrusion of the heart ring commands.spawn(( // We can set a custom resolution for the round parts of the extrusion as well. Mesh3d( meshes.add( RING_EXTRUSION .mesh() .with_inner(|ring| ring.with_inner(|heart| heart.resolution(50))), ), ), MeshMaterial3d(materials.add(StandardMaterial { base_color: RED.into(), ..Default::default() })), Transform::from_xyz(0., -3., -5.).with_rotation(Quat::from_rotation_x(-PI / 4.)), Shape3d, Visibility::Hidden, ShapeActive::RingExtrusion, )); // Point light for 3D commands.spawn(( PointLight { shadows_enabled: true, intensity: 10_000_000., range: 100.0, shadow_depth_bias: 0.2, ..default() }, Transform::from_xyz(8.0, 12.0, 1.0), )); let mut text = "\ Press 'B' to cycle between no bounding shapes, bounding boxes (AABBs) and bounding spheres / circles\n\ Press 'Tab' to cycle between 2D and 3D shapes\n\ Press 'R' to pause/resume rotation".to_string(); #[cfg(not(target_family = "wasm"))] text.push_str("\nPress 'Space' to toggle display of wireframes"); // Example instructions commands.spawn(( Text::new(text), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, )); } // Rotate the 2D shapes. fn rotate_2d_shapes(mut shapes: Query<&mut Transform, With<Shape2d>>, time: Res<Time>) { let elapsed_seconds = time.elapsed_secs(); for mut transform in shapes.iter_mut() { transform.rotation = Quat::from_rotation_z(elapsed_seconds); } } // Draw bounding boxes or circles for the 2D shapes. fn bounding_shapes_2d( shapes: Query<&Transform, With<Shape2d>>, mut gizmos: Gizmos, bounding_shape: Res<State<BoundingShape>>, ) { for transform in shapes.iter() { // Get the rotation angle from the 3D rotation. let rotation = transform.rotation.to_scaled_axis().z; let rotation = Rot2::radians(rotation); let isometry = Isometry2d::new(transform.translation.xy(), rotation); match bounding_shape.get() { BoundingShape::None => (), BoundingShape::BoundingBox => { // Get the AABB of the primitive with the rotation and translation of the mesh. let aabb = HEART.aabb_2d(isometry); gizmos.rect_2d(aabb.center(), aabb.half_size() * 2., WHITE); } BoundingShape::BoundingSphere => { // Get the bounding sphere of the primitive with the rotation and translation of the mesh. let bounding_circle = HEART.bounding_circle(isometry); gizmos .circle_2d(bounding_circle.center(), bounding_circle.radius(), WHITE) .resolution(64); } } } } // Rotate the 3D shapes. fn rotate_3d_shapes(mut shapes: Query<&mut Transform, With<Shape3d>>, time: Res<Time>) { let delta_seconds = time.delta_secs(); for mut transform in shapes.iter_mut() { transform.rotate_y(delta_seconds); } } // Draw the AABBs or bounding spheres for the 3D shapes. fn bounding_shapes_3d( shapes: Query<&Transform, With<Shape3d>>, mut gizmos: Gizmos, bounding_shape: Res<State<BoundingShape>>, ) { for transform in shapes.iter() { match bounding_shape.get() { BoundingShape::None => (), BoundingShape::BoundingBox => { // Get the AABB of the extrusion with the rotation and translation of the mesh. let aabb = EXTRUSION.aabb_3d(transform.to_isometry()); gizmos.primitive_3d( &Cuboid::from_size(Vec3::from(aabb.half_size()) * 2.), aabb.center(), WHITE, ); } BoundingShape::BoundingSphere => { // Get the bounding sphere of the extrusion with the rotation and translation of the mesh. let bounding_sphere = EXTRUSION.bounding_sphere(transform.to_isometry()); gizmos.sphere(bounding_sphere.center(), bounding_sphere.radius(), WHITE); } } } } // Switch to the next bounding shape. fn update_bounding_shape( current: Res<State<BoundingShape>>, mut next: ResMut<NextState<BoundingShape>>, ) { next.set(match current.get() { BoundingShape::None => BoundingShape::BoundingBox, BoundingShape::BoundingBox => BoundingShape::BoundingSphere, BoundingShape::BoundingSphere => BoundingShape::None, }); } // Switch between shapes, and update 2D and 3D cameras. fn switch_shapes( current: Res<State<ShapeActive>>, mut next: ResMut<NextState<ShapeActive>>, camera: Single<(&mut Transform, &mut Projection)>, mut shapes: Query<(&mut Visibility, &ShapeActive)>, ) { let next_state = current.get().next_shape(); next.set(next_state); for (mut visibility, shape) in &mut shapes { if next_state == *shape { *visibility = Visibility::Visible; } else { *visibility = Visibility::Hidden; } } let (mut transform, mut projection) = camera.into_inner(); match next_state { ShapeActive::Heart | ShapeActive::Ring => { *transform = TRANSFORM_2D; *projection = PROJECTION_2D; } ShapeActive::Extrusion | ShapeActive::RingExtrusion => { *transform = TRANSFORM_3D; *projection = PROJECTION_3D; } }; } fn toggle_wireframes(mut wireframe_config: ResMut<WireframeConfig>) { wireframe_config.global = !wireframe_config.global; } /// A custom 2D heart primitive. The heart is made up of two circles centered at `Vec2::new(±radius, 0.)` each with the same `radius`. /// /// The tip of the heart connects the two circles at a 45° angle from `Vec3::NEG_Y`. #[derive(Copy, Clone)] struct Heart { /// The radius of each wing of the heart radius: f32, } // The `Primitive2d` or `Primitive3d` trait is required by almost all other traits for primitives in bevy. // Depending on your shape, you should implement either one of them. impl Primitive2d for Heart {} impl Heart { const fn new(radius: f32) -> Self { Self { radius } } } // The `Measured2d` and `Measured3d` traits are used to compute the perimeter, the area or the volume of a primitive. // If you implement `Measured2d` for a 2D primitive, `Measured3d` is automatically implemented for `Extrusion<T>`. impl Measured2d for Heart { fn perimeter(&self) -> f32 { self.radius * (2.5 * PI + ops::powf(2f32, 1.5) + 2.0) } fn area(&self) -> f32 { let circle_area = PI * self.radius * self.radius; let triangle_area = self.radius * self.radius * (1.0 + 2f32.sqrt()) / 2.0; let cutout = triangle_area - circle_area * 3.0 / 16.0; 2.0 * circle_area + 4.0 * cutout } } // The `Bounded2d` or `Bounded3d` traits are used to compute the Axis Aligned Bounding Boxes or bounding circles / spheres for primitives. impl Bounded2d for Heart { fn aabb_2d(&self, isometry: impl Into<Isometry2d>) -> Aabb2d { let isometry = isometry.into(); // The center of the circle at the center of the right wing of the heart let circle_center = isometry.rotation * Vec2::new(self.radius, 0.0); // The maximum X and Y positions of the two circles of the wings of the heart. let max_circle = circle_center.abs() + Vec2::splat(self.radius); // Since the two circles of the heart are mirrored around the origin, the minimum position is the negative of the maximum. let min_circle = -max_circle; // The position of the tip at the bottom of the heart let tip_position = isometry.rotation * Vec2::new(0.0, -self.radius * (1. + SQRT_2)); Aabb2d { min: isometry.translation + min_circle.min(tip_position), max: isometry.translation + max_circle.max(tip_position), } } fn bounding_circle(&self, isometry: impl Into<Isometry2d>) -> BoundingCircle { let isometry = isometry.into(); // The bounding circle of the heart is not at its origin. This `offset` is the offset between the center of the bounding circle and its translation. let offset = self.radius / ops::powf(2f32, 1.5); // The center of the bounding circle let center = isometry * Vec2::new(0.0, -offset); // The radius of the bounding circle let radius = self.radius * (1.0 + 2f32.sqrt()) - offset; BoundingCircle::new(center, radius) } } // You can implement the `BoundedExtrusion` trait to implement `Bounded3d for Extrusion<Heart>`. There is a default implementation for both AABBs and bounding spheres, // but you may be able to find faster solutions for your specific primitives. impl BoundedExtrusion for Heart {} // You can use the `Meshable` trait to create a `MeshBuilder` for the primitive. impl Meshable for Heart { // The `MeshBuilder` can be used to create the actual mesh for that primitive. type Output = HeartMeshBuilder; fn mesh(&self) -> Self::Output { Self::Output { heart: *self, resolution: 32, } } } // You can include any additional information needed for meshing the primitive in the `MeshBuilder`. struct HeartMeshBuilder { heart: Heart, // The resolution determines the amount of vertices used for each wing of the heart resolution: usize, } // This trait is needed so that the configuration methods of the builder of the primitive are also available for the builder for the extrusion. // If you do not want to support these configuration options for extrusions you can just implement them for your 2D `MeshBuilder`. trait HeartBuilder { /// Set the resolution for each of the wings of the heart. fn resolution(self, resolution: usize) -> Self; } impl HeartBuilder for HeartMeshBuilder { fn resolution(mut self, resolution: usize) -> Self { self.resolution = resolution; self } } impl HeartBuilder for ExtrusionBuilder<Heart> { fn resolution(mut self, resolution: usize) -> Self { self.base_builder.resolution = resolution; self } } impl MeshBuilder for HeartMeshBuilder { // This is where you should build the actual mesh. fn build(&self) -> Mesh { let radius = self.heart.radius; // The curved parts of each wing (half) of the heart have an angle of `PI * 1.25` or 225° let wing_angle = PI * 1.25; // We create buffers for the vertices, their normals and UVs, as well as the indices used to connect the vertices. let mut vertices = Vec::with_capacity(2 * self.resolution); let mut uvs = Vec::with_capacity(2 * self.resolution); let mut indices = Vec::with_capacity(6 * self.resolution - 9); // Since the heart is flat, we know all the normals are identical already. let normals = vec![[0f32, 0f32, 1f32]; 2 * self.resolution]; // The point in the middle of the two curved parts of the heart vertices.push([0.0; 3]); uvs.push([0.5, 0.5]); // The left wing of the heart, starting from the point in the middle. for i in 1..self.resolution { let angle = (i as f32 / self.resolution as f32) * wing_angle; let (sin, cos) = ops::sin_cos(angle); vertices.push([radius * (cos - 1.0), radius * sin, 0.0]); uvs.push([0.5 - (cos - 1.0) / 4., 0.5 - sin / 2.]); } // The bottom tip of the heart vertices.push([0.0, radius * (-1. - SQRT_2), 0.0]); uvs.push([0.5, 1.]); // The right wing of the heart, starting from the bottom most point and going towards the middle point. for i in 0..self.resolution - 1 { let angle = (i as f32 / self.resolution as f32) * wing_angle - PI / 4.; let (sin, cos) = ops::sin_cos(angle); vertices.push([radius * (cos + 1.0), radius * sin, 0.0]); uvs.push([0.5 - (cos + 1.0) / 4., 0.5 - sin / 2.]); } // This is where we build all the triangles from the points created above. // Each triangle has one corner on the middle point with the other two being adjacent points on the perimeter of the heart. for i in 2..2 * self.resolution as u32 { indices.extend_from_slice(&[i - 1, i, 0]); } // Here, the actual `Mesh` is created. We set the indices, vertices, normals and UVs created above and specify the topology of the mesh. Mesh::new( bevy::mesh::PrimitiveTopology::TriangleList, RenderAssetUsages::default(), ) .with_inserted_indices(bevy::mesh::Indices::U32(indices)) .with_inserted_attribute(Mesh::ATTRIBUTE_POSITION, vertices) .with_inserted_attribute(Mesh::ATTRIBUTE_NORMAL, normals) .with_inserted_attribute(Mesh::ATTRIBUTE_UV_0, uvs) } } // The `Extrudable` trait can be used to easily implement meshing for extrusions. impl Extrudable for HeartMeshBuilder { fn perimeter(&self) -> Vec<PerimeterSegment> { let resolution = self.resolution as u32; vec![ // The left wing of the heart PerimeterSegment::Smooth { // The normals of the first and last vertices of smooth segments have to be specified manually. first_normal: Vec2::X, last_normal: Vec2::new(-1.0, -1.0).normalize(), // These indices are used to index into the `ATTRIBUTE_POSITION` vec of your 2D mesh. indices: (0..resolution).collect(), }, // The bottom tip of the heart PerimeterSegment::Flat { indices: vec![resolution - 1, resolution, resolution + 1], }, // The right wing of the heart PerimeterSegment::Smooth { first_normal: Vec2::new(1.0, -1.0).normalize(), last_normal: Vec2::NEG_X, indices: (resolution + 1..2 * resolution).chain([0]).collect(), }, ] } } // Helper run condition for matching multiple states fn state_in_one_of<S: States, const N: usize>( states: [S; N], ) -> impl FnMut(Option<Res<State<S>>>) -> bool + Clone { move |current_state: Option<Res<State<S>>>| match current_state { Some(current_state) => states.contains(&current_state), None => false, } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/math/cubic_splines.rs
examples/math/cubic_splines.rs
//! This example exhibits different available modes of constructing cubic Bezier curves. use bevy::{ app::{App, Startup, Update}, color::*, ecs::system::Commands, gizmos::gizmos::Gizmos, input::{mouse::MouseButtonInput, ButtonState}, math::{cubic_splines::*, vec2}, prelude::*, }; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems( Update, ( handle_keypress, handle_mouse_move, handle_mouse_press, draw_edit_move, update_curve, update_spline_mode_text, update_cycling_mode_text, draw_curve, draw_control_points, ) .chain(), ) .run(); } fn setup(mut commands: Commands) { // Initialize the modes with their defaults: let spline_mode = SplineMode::default(); commands.insert_resource(spline_mode); let cycling_mode = CyclingMode::default(); commands.insert_resource(cycling_mode); // Starting data for [`ControlPoints`]: let default_points = vec![ vec2(-500., -200.), vec2(-250., 250.), vec2(250., 250.), vec2(500., -200.), ]; let default_tangents = vec![ vec2(0., 200.), vec2(200., 0.), vec2(0., -200.), vec2(-200., 0.), ]; let default_control_data = ControlPoints { points_and_tangents: default_points.into_iter().zip(default_tangents).collect(), }; let curve = form_curve(&default_control_data, spline_mode, cycling_mode); commands.insert_resource(curve); commands.insert_resource(default_control_data); // Mouse tracking information: commands.insert_resource(MousePosition::default()); commands.insert_resource(MouseEditMove::default()); commands.spawn(Camera2d); // The instructions and modes are rendered on the left-hand side in a column. let instructions_text = "Click and drag to add control points and their tangents\n\ R: Remove the last control point\n\ S: Cycle the spline construction being used\n\ C: Toggle cyclic curve construction"; let spline_mode_text = format!("Spline: {spline_mode}"); let cycling_mode_text = format!("{cycling_mode}"); let style = TextFont::default(); commands .spawn(Node { position_type: PositionType::Absolute, top: px(12), left: px(12), flex_direction: FlexDirection::Column, row_gap: px(20), ..default() }) .with_children(|parent| { parent.spawn((Text::new(instructions_text), style.clone())); parent.spawn((SplineModeText, Text(spline_mode_text), style.clone())); parent.spawn((CyclingModeText, Text(cycling_mode_text), style.clone())); }); } // ----------------------------------- // Curve-related Resources and Systems // ----------------------------------- /// The current spline mode, which determines the spline method used in conjunction with the /// control points. #[derive(Clone, Copy, Resource, Default)] enum SplineMode { #[default] Hermite, Cardinal, B, } impl std::fmt::Display for SplineMode { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { SplineMode::Hermite => f.write_str("Hermite"), SplineMode::Cardinal => f.write_str("Cardinal"), SplineMode::B => f.write_str("B"), } } } /// The current cycling mode, which determines whether the control points should be interpolated /// cyclically (to make a loop). #[derive(Clone, Copy, Resource, Default)] enum CyclingMode { #[default] NotCyclic, Cyclic, } impl std::fmt::Display for CyclingMode { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { CyclingMode::NotCyclic => f.write_str("Not Cyclic"), CyclingMode::Cyclic => f.write_str("Cyclic"), } } } /// The curve presently being displayed. This is optional because there may not be enough control /// points to actually generate a curve. #[derive(Clone, Default, Resource)] struct Curve(Option<CubicCurve<Vec2>>); /// The control points used to generate a curve. The tangent components are only used in the case of /// Hermite interpolation. #[derive(Clone, Resource)] struct ControlPoints { points_and_tangents: Vec<(Vec2, Vec2)>, } /// This system is responsible for updating the [`Curve`] when the [control points] or active modes /// change. /// /// [control points]: ControlPoints fn update_curve( control_points: Res<ControlPoints>, spline_mode: Res<SplineMode>, cycling_mode: Res<CyclingMode>, mut curve: ResMut<Curve>, ) { if !control_points.is_changed() && !spline_mode.is_changed() && !cycling_mode.is_changed() { return; } *curve = form_curve(&control_points, *spline_mode, *cycling_mode); } /// This system uses gizmos to draw the current [`Curve`] by breaking it up into a large number /// of line segments. fn draw_curve(curve: Res<Curve>, mut gizmos: Gizmos) { let Some(ref curve) = curve.0 else { return; }; // Scale resolution with curve length so it doesn't degrade as the length increases. let resolution = 100 * curve.segments().len(); gizmos.linestrip( curve.iter_positions(resolution).map(|pt| pt.extend(0.0)), Color::srgb(1.0, 1.0, 1.0), ); } /// This system uses gizmos to draw the current [control points] as circles, displaying their /// tangent vectors as arrows in the case of a Hermite spline. /// /// [control points]: ControlPoints fn draw_control_points( control_points: Res<ControlPoints>, spline_mode: Res<SplineMode>, mut gizmos: Gizmos, ) { for &(point, tangent) in &control_points.points_and_tangents { gizmos.circle_2d(point, 10.0, Color::srgb(0.0, 1.0, 0.0)); if matches!(*spline_mode, SplineMode::Hermite) { gizmos.arrow_2d(point, point + tangent, Color::srgb(1.0, 0.0, 0.0)); } } } /// Helper function for generating a [`Curve`] from [control points] and selected modes. /// /// [control points]: ControlPoints fn form_curve( control_points: &ControlPoints, spline_mode: SplineMode, cycling_mode: CyclingMode, ) -> Curve { let (points, tangents): (Vec<_>, Vec<_>) = control_points.points_and_tangents.iter().copied().unzip(); match spline_mode { SplineMode::Hermite => { let spline = CubicHermite::new(points, tangents); Curve(match cycling_mode { CyclingMode::NotCyclic => spline.to_curve().ok(), CyclingMode::Cyclic => spline.to_curve_cyclic().ok(), }) } SplineMode::Cardinal => { let spline = CubicCardinalSpline::new_catmull_rom(points); Curve(match cycling_mode { CyclingMode::NotCyclic => spline.to_curve().ok(), CyclingMode::Cyclic => spline.to_curve_cyclic().ok(), }) } SplineMode::B => { let spline = CubicBSpline::new(points); Curve(match cycling_mode { CyclingMode::NotCyclic => spline.to_curve().ok(), CyclingMode::Cyclic => spline.to_curve_cyclic().ok(), }) } } } // -------------------- // Text-related Components and Systems // -------------------- /// Marker component for the text node that displays the current [`SplineMode`]. #[derive(Component)] struct SplineModeText; /// Marker component for the text node that displays the current [`CyclingMode`]. #[derive(Component)] struct CyclingModeText; fn update_spline_mode_text( spline_mode: Res<SplineMode>, mut spline_mode_text: Query<&mut Text, With<SplineModeText>>, ) { if !spline_mode.is_changed() { return; } let new_text = format!("Spline: {}", *spline_mode); for mut spline_mode_text in spline_mode_text.iter_mut() { (**spline_mode_text).clone_from(&new_text); } } fn update_cycling_mode_text( cycling_mode: Res<CyclingMode>, mut cycling_mode_text: Query<&mut Text, With<CyclingModeText>>, ) { if !cycling_mode.is_changed() { return; } let new_text = format!("{}", *cycling_mode); for mut cycling_mode_text in cycling_mode_text.iter_mut() { (**cycling_mode_text).clone_from(&new_text); } } // ----------------------------------- // Input-related Resources and Systems // ----------------------------------- /// A small state machine which tracks a click-and-drag motion used to create new control points. /// /// When the user is not doing a click-and-drag motion, the `start` field is `None`. When the user /// presses the left mouse button, the location of that press is temporarily stored in the field. #[derive(Clone, Default, Resource)] struct MouseEditMove { start: Option<Vec2>, } /// The current mouse position, if known. #[derive(Clone, Default, Resource)] struct MousePosition(Option<Vec2>); /// Update the current cursor position and track it in the [`MousePosition`] resource. fn handle_mouse_move( mut cursor_moved_reader: MessageReader<CursorMoved>, mut mouse_position: ResMut<MousePosition>, ) { if let Some(cursor_moved) = cursor_moved_reader.read().last() { mouse_position.0 = Some(cursor_moved.position); } } /// This system handles updating the [`MouseEditMove`] resource, orchestrating the logical part /// of the click-and-drag motion which actually creates new control points. fn handle_mouse_press( mut mouse_button_input_reader: MessageReader<MouseButtonInput>, mouse_position: Res<MousePosition>, mut edit_move: ResMut<MouseEditMove>, mut control_points: ResMut<ControlPoints>, camera: Single<(&Camera, &GlobalTransform)>, ) { let Some(mouse_pos) = mouse_position.0 else { return; }; // Handle click and drag behavior for mouse_button_input in mouse_button_input_reader.read() { if mouse_button_input.button != MouseButton::Left { continue; } match mouse_button_input.state { ButtonState::Pressed => { if edit_move.start.is_some() { // If the edit move already has a start, press event should do nothing. continue; } // This press represents the start of the edit move. edit_move.start = Some(mouse_pos); } ButtonState::Released => { // Release is only meaningful if we started an edit move. let Some(start) = edit_move.start else { continue; }; let (camera, camera_transform) = *camera; // Convert the starting point and end point (current mouse pos) into world coords: let Ok(point) = camera.viewport_to_world_2d(camera_transform, start) else { continue; }; let Ok(end_point) = camera.viewport_to_world_2d(camera_transform, mouse_pos) else { continue; }; let tangent = end_point - point; // The start of the click-and-drag motion represents the point to add, // while the difference with the current position represents the tangent. control_points.points_and_tangents.push((point, tangent)); // Reset the edit move since we've consumed it. edit_move.start = None; } } } } /// This system handles drawing the "preview" control point based on the state of [`MouseEditMove`]. fn draw_edit_move( edit_move: Res<MouseEditMove>, mouse_position: Res<MousePosition>, mut gizmos: Gizmos, camera: Single<(&Camera, &GlobalTransform)>, ) { let Some(start) = edit_move.start else { return; }; let Some(mouse_pos) = mouse_position.0 else { return; }; let (camera, camera_transform) = *camera; // Resources store data in viewport coordinates, so we need to convert to world coordinates // to display them: let Ok(start) = camera.viewport_to_world_2d(camera_transform, start) else { return; }; let Ok(end) = camera.viewport_to_world_2d(camera_transform, mouse_pos) else { return; }; gizmos.circle_2d(start, 10.0, Color::srgb(0.0, 1.0, 0.7)); gizmos.circle_2d(start, 7.0, Color::srgb(0.0, 1.0, 0.7)); gizmos.arrow_2d(start, end, Color::srgb(1.0, 0.0, 0.7)); } /// This system handles all keyboard commands. fn handle_keypress( keyboard: Res<ButtonInput<KeyCode>>, mut spline_mode: ResMut<SplineMode>, mut cycling_mode: ResMut<CyclingMode>, mut control_points: ResMut<ControlPoints>, ) { // S => change spline mode if keyboard.just_pressed(KeyCode::KeyS) { *spline_mode = match *spline_mode { SplineMode::Hermite => SplineMode::Cardinal, SplineMode::Cardinal => SplineMode::B, SplineMode::B => SplineMode::Hermite, } } // C => change cycling mode if keyboard.just_pressed(KeyCode::KeyC) { *cycling_mode = match *cycling_mode { CyclingMode::NotCyclic => CyclingMode::Cyclic, CyclingMode::Cyclic => CyclingMode::NotCyclic, } } // R => remove last control point if keyboard.just_pressed(KeyCode::KeyR) { control_points.points_and_tangents.pop(); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/math/random_sampling.rs
examples/math/random_sampling.rs
//! This example shows how to sample random points from primitive shapes. use bevy::{ input::mouse::{AccumulatedMouseMotion, MouseButtonInput}, math::prelude::*, mesh::SphereKind, prelude::*, }; use rand::{distr::Distribution, SeedableRng}; use rand_chacha::ChaCha8Rng; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems(Update, (handle_mouse, handle_keypress)) .run(); } /// Resource for the random sampling mode, telling whether to sample the interior or the boundary. #[derive(Resource)] enum Mode { Interior, Boundary, } /// Resource storing the shape being sampled. #[derive(Resource)] struct SampledShape(Cuboid); /// The source of randomness used by this example. #[derive(Resource)] struct RandomSource(ChaCha8Rng); /// A container for the handle storing the mesh used to display sampled points as spheres. #[derive(Resource)] struct PointMesh(Handle<Mesh>); /// A container for the handle storing the material used to display sampled points. #[derive(Resource)] struct PointMaterial(Handle<StandardMaterial>); /// Marker component for sampled points. #[derive(Component)] struct SamplePoint; /// The pressed state of the mouse, used for camera motion. #[derive(Resource)] struct MousePressed(bool); fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { // Use seeded rng and store it in a resource; this makes the random output reproducible. let seeded_rng = ChaCha8Rng::seed_from_u64(19878367467712); commands.insert_resource(RandomSource(seeded_rng)); // Make a plane for establishing space. commands.spawn(( Mesh3d(meshes.add(Plane3d::default().mesh().size(12.0, 12.0))), MeshMaterial3d(materials.add(Color::srgb(0.3, 0.5, 0.3))), Transform::from_xyz(0.0, -2.5, 0.0), )); // Store the shape we sample from in a resource: let shape = Cuboid::from_length(2.9); commands.insert_resource(SampledShape(shape)); // The sampled shape shown transparently: commands.spawn(( Mesh3d(meshes.add(shape)), MeshMaterial3d(materials.add(StandardMaterial { base_color: Color::srgba(0.2, 0.1, 0.6, 0.3), alpha_mode: AlphaMode::Blend, cull_mode: None, ..default() })), )); // A light: commands.spawn(( PointLight { shadows_enabled: true, ..default() }, Transform::from_xyz(4.0, 8.0, 4.0), )); // A camera: commands.spawn(( Camera3d::default(), Transform::from_xyz(-2.0, 3.0, 5.0).looking_at(Vec3::ZERO, Vec3::Y), )); // Store the mesh and material for sample points in resources: commands.insert_resource(PointMesh( meshes.add( Sphere::new(0.03) .mesh() .kind(SphereKind::Ico { subdivisions: 3 }), ), )); commands.insert_resource(PointMaterial(materials.add(StandardMaterial { base_color: Color::srgb(1.0, 0.8, 0.8), metallic: 0.8, ..default() }))); // Instructions for the example: commands.spawn(( Text::new( "Controls:\n\ M: Toggle between sampling boundary and interior.\n\ R: Restart (erase all samples).\n\ S: Add one random sample.\n\ D: Add 100 random samples.\n\ Rotate camera by holding left mouse and panning left/right.", ), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, )); // The mode starts with interior points. commands.insert_resource(Mode::Interior); // Starting mouse-pressed state is false. commands.insert_resource(MousePressed(false)); } // Handle user inputs from the keyboard: fn handle_keypress( mut commands: Commands, keyboard: Res<ButtonInput<KeyCode>>, mut mode: ResMut<Mode>, shape: Res<SampledShape>, mut random_source: ResMut<RandomSource>, sample_mesh: Res<PointMesh>, sample_material: Res<PointMaterial>, samples: Query<Entity, With<SamplePoint>>, ) { // R => restart, deleting all samples if keyboard.just_pressed(KeyCode::KeyR) { for entity in &samples { commands.entity(entity).despawn(); } } // S => sample once if keyboard.just_pressed(KeyCode::KeyS) { let rng = &mut random_source.0; // Get a single random Vec3: let sample: Vec3 = match *mode { Mode::Interior => shape.0.sample_interior(rng), Mode::Boundary => shape.0.sample_boundary(rng), }; // Spawn a sphere at the random location: commands.spawn(( Mesh3d(sample_mesh.0.clone()), MeshMaterial3d(sample_material.0.clone()), Transform::from_translation(sample), SamplePoint, )); // NOTE: The point is inside the cube created at setup just because of how the // scene is constructed; in general, you would want to use something like // `cube_transform.transform_point(sample)` to get the position of where the sample // would be after adjusting for the position and orientation of the cube. // // If the spawned point also needed to follow the position of the cube as it moved, // then making it a child entity of the cube would be a good approach. } // D => generate many samples if keyboard.just_pressed(KeyCode::KeyD) { let mut rng = &mut random_source.0; // Get 100 random Vec3s: let samples: Vec<Vec3> = match *mode { Mode::Interior => { let dist = shape.0.interior_dist(); dist.sample_iter(&mut rng).take(100).collect() } Mode::Boundary => { let dist = shape.0.boundary_dist(); dist.sample_iter(&mut rng).take(100).collect() } }; // For each sample point, spawn a sphere: for sample in samples { commands.spawn(( Mesh3d(sample_mesh.0.clone()), MeshMaterial3d(sample_material.0.clone()), Transform::from_translation(sample), SamplePoint, )); } // NOTE: See the previous note above regarding the positioning of these samples // relative to the transform of the cube containing them. } // M => toggle mode between interior and boundary. if keyboard.just_pressed(KeyCode::KeyM) { match *mode { Mode::Interior => *mode = Mode::Boundary, Mode::Boundary => *mode = Mode::Interior, } } } // Handle user mouse input for panning the camera around: fn handle_mouse( accumulated_mouse_motion: Res<AccumulatedMouseMotion>, mut mouse_button_inputs: MessageReader<MouseButtonInput>, mut camera_transform: Single<&mut Transform, With<Camera>>, mut mouse_pressed: ResMut<MousePressed>, ) { // Store left-pressed state in the MousePressed resource for mouse_button_input in mouse_button_inputs.read() { if mouse_button_input.button != MouseButton::Left { continue; } *mouse_pressed = MousePressed(mouse_button_input.state.is_pressed()); } // If the mouse is not pressed, just ignore motion events if !mouse_pressed.0 { return; } if accumulated_mouse_motion.delta != Vec2::ZERO { let displacement = accumulated_mouse_motion.delta.x; camera_transform.rotate_around(Vec3::ZERO, Quat::from_rotation_y(-displacement / 150.)); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/transforms/translation.rs
examples/transforms/translation.rs
//! Illustrates how to move an object along an axis. use bevy::prelude::*; // Define a struct to keep some information about our entity. // Here it's an arbitrary movement speed, the spawn location, and a maximum distance from it. #[derive(Component)] struct Movable { spawn: Vec3, max_distance: f32, speed: f32, } // Implement a utility function for easier Movable struct creation. impl Movable { fn new(spawn: Vec3) -> Self { Movable { spawn, max_distance: 5.0, speed: 2.0, } } } fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems(Update, move_cube) .run(); } // Startup system to setup the scene and spawn all relevant entities. fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { // Add a cube to visualize translation. let entity_spawn = Vec3::ZERO; commands.spawn(( Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(materials.add(Color::WHITE)), Transform::from_translation(entity_spawn), Movable::new(entity_spawn), )); // Spawn a camera looking at the entities to show what's happening in this example. commands.spawn(( Camera3d::default(), Transform::from_xyz(0.0, 10.0, 20.0).looking_at(entity_spawn, Vec3::Y), )); // Add a light source for better 3d visibility. commands.spawn(( DirectionalLight::default(), Transform::from_xyz(3.0, 3.0, 3.0).looking_at(Vec3::ZERO, Vec3::Y), )); } // This system will move all Movable entities with a Transform fn move_cube(mut cubes: Query<(&mut Transform, &mut Movable)>, timer: Res<Time>) { for (mut transform, mut cube) in &mut cubes { // Check if the entity moved too far from its spawn, if so invert the moving direction. if (cube.spawn - transform.translation).length() > cube.max_distance { cube.speed *= -1.0; } let direction = transform.local_x(); transform.translation += direction * cube.speed * timer.delta_secs(); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/transforms/transform.rs
examples/transforms/transform.rs
//! Shows multiple transformations of objects. use std::f32::consts::PI; use bevy::{color::palettes::basic::YELLOW, prelude::*}; // A struct for additional data of for a moving cube. #[derive(Component)] struct CubeState { start_pos: Vec3, move_speed: f32, turn_speed: f32, } // A struct adding information to a scalable entity, // that will be stationary at the center of the scene. #[derive(Component)] struct Center { max_size: f32, min_size: f32, scale_factor: f32, } fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems( Update, ( move_cube, rotate_cube, scale_down_sphere_proportional_to_cube_travel_distance, ) .chain(), ) .run(); } // Startup system to setup the scene and spawn all relevant entities. fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { // Add an object (sphere) for visualizing scaling. commands.spawn(( Mesh3d(meshes.add(Sphere::new(3.0).mesh().ico(32).unwrap())), MeshMaterial3d(materials.add(Color::from(YELLOW))), Transform::from_translation(Vec3::ZERO), Center { max_size: 1.0, min_size: 0.1, scale_factor: 0.05, }, )); // Add the cube to visualize rotation and translation. // This cube will circle around the center_sphere // by changing its rotation each frame and moving forward. // Define a start transform for an orbiting cube, that's away from our central object (sphere) // and rotate it so it will be able to move around the sphere and not towards it. let cube_spawn = Transform::from_translation(Vec3::Z * -10.0).with_rotation(Quat::from_rotation_y(PI / 2.)); commands.spawn(( Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(materials.add(Color::WHITE)), cube_spawn, CubeState { start_pos: cube_spawn.translation, move_speed: 2.0, turn_speed: 0.2, }, )); // Spawn a camera looking at the entities to show what's happening in this example. commands.spawn(( Camera3d::default(), Transform::from_xyz(0.0, 10.0, 20.0).looking_at(Vec3::ZERO, Vec3::Y), )); // Add a light source for better 3d visibility. commands.spawn(( DirectionalLight::default(), Transform::from_xyz(3.0, 3.0, 3.0).looking_at(Vec3::ZERO, Vec3::Y), )); } // This system will move the cube forward. fn move_cube(mut cubes: Query<(&mut Transform, &mut CubeState)>, timer: Res<Time>) { for (mut transform, cube) in &mut cubes { // Move the cube forward smoothly at a given move_speed. let forward = transform.forward(); transform.translation += forward * cube.move_speed * timer.delta_secs(); } } // This system will rotate the cube slightly towards the center_sphere. // Due to the forward movement the resulting movement // will be a circular motion around the center_sphere. fn rotate_cube( mut cubes: Query<(&mut Transform, &mut CubeState), Without<Center>>, center_spheres: Query<&Transform, With<Center>>, timer: Res<Time>, ) { // Calculate the point to circle around. (The position of the center_sphere) let mut center: Vec3 = Vec3::ZERO; for sphere in &center_spheres { center += sphere.translation; } // Update the rotation of the cube(s). for (mut transform, cube) in &mut cubes { // Calculate the rotation of the cube if it would be looking at the sphere in the center. let look_at_sphere = transform.looking_at(center, *transform.local_y()); // Interpolate between the current rotation and the fully turned rotation // when looking at the sphere, with a given turn speed to get a smooth motion. // With higher speed the curvature of the orbit would be smaller. let incremental_turn_weight = cube.turn_speed * timer.delta_secs(); let old_rotation = transform.rotation; transform.rotation = old_rotation.lerp(look_at_sphere.rotation, incremental_turn_weight); } } // This system will scale down the sphere in the center of the scene // according to the traveling distance of the orbiting cube(s) from their start position(s). fn scale_down_sphere_proportional_to_cube_travel_distance( cubes: Query<(&Transform, &CubeState), Without<Center>>, mut centers: Query<(&mut Transform, &Center)>, ) { // First we need to calculate the length of between // the current position of the orbiting cube and the spawn position. let mut distances = 0.0; for (cube_transform, cube_state) in &cubes { distances += (cube_state.start_pos - cube_transform.translation).length(); } // Now we use the calculated value to scale the sphere in the center accordingly. for (mut transform, center) in &mut centers { // Calculate the new size from the calculated distances and the centers scale_factor. // Since we want to have the sphere at its max_size at the cubes spawn location we start by // using the max_size as start value and subtract the distances scaled by a scaling factor. let mut new_size: f32 = center.max_size - center.scale_factor * distances; // The new size should also not be smaller than the centers min_size. // Therefore the max value out of (new_size, center.min_size) is used. new_size = new_size.max(center.min_size); // Now scale the sphere uniformly in all directions using new_size. // Here Vec3:splat is used to create a vector with new_size in x, y and z direction. transform.scale = Vec3::splat(new_size); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/transforms/3d_rotation.rs
examples/transforms/3d_rotation.rs
//! Illustrates how to rotate an object around an axis. use bevy::prelude::*; use std::f32::consts::TAU; // Define a component to designate a rotation speed to an entity. #[derive(Component)] struct Rotatable { speed: f32, } fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems(Update, rotate_cube) .run(); } fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { // Spawn a cube to rotate. commands.spawn(( Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(materials.add(Color::WHITE)), Transform::from_translation(Vec3::ZERO), Rotatable { speed: 0.3 }, )); // Spawn a camera looking at the entities to show what's happening in this example. commands.spawn(( Camera3d::default(), Transform::from_xyz(0.0, 10.0, 20.0).looking_at(Vec3::ZERO, Vec3::Y), )); // Add a light source so we can see clearly. commands.spawn(( DirectionalLight::default(), Transform::from_xyz(3.0, 3.0, 3.0).looking_at(Vec3::ZERO, Vec3::Y), )); } // This system will rotate any entity in the scene with a Rotatable component around its y-axis. fn rotate_cube(mut cubes: Query<(&mut Transform, &Rotatable)>, timer: Res<Time>) { for (mut transform, cube) in &mut cubes { // The speed is first multiplied by TAU which is a full rotation (360deg) in radians, // and then multiplied by delta_secs which is the time that passed last frame. // In other words. Speed is equal to the amount of rotations per second. transform.rotate_y(cube.speed * TAU * timer.delta_secs()); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/transforms/scale.rs
examples/transforms/scale.rs
//! Illustrates how to scale an object in each direction. use std::f32::consts::PI; use bevy::prelude::*; // Define a component to keep information for the scaled object. #[derive(Component)] struct Scaling { scale_direction: Vec3, scale_speed: f32, max_element_size: f32, min_element_size: f32, } // Implement a simple initialization. impl Scaling { fn new() -> Self { Scaling { scale_direction: Vec3::X, scale_speed: 2.0, max_element_size: 5.0, min_element_size: 1.0, } } } fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems(Update, (change_scale_direction, scale_cube)) .run(); } // Startup system to setup the scene and spawn all relevant entities. fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { // Spawn a cube to scale. commands.spawn(( Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(materials.add(Color::WHITE)), Transform::from_rotation(Quat::from_rotation_y(PI / 4.0)), Scaling::new(), )); // Spawn a camera looking at the entities to show what's happening in this example. commands.spawn(( Camera3d::default(), Transform::from_xyz(0.0, 10.0, 20.0).looking_at(Vec3::ZERO, Vec3::Y), )); // Add a light source for better 3d visibility. commands.spawn(( DirectionalLight::default(), Transform::from_xyz(3.0, 3.0, 3.0).looking_at(Vec3::ZERO, Vec3::Y), )); } // This system will check if a scaled entity went above or below the entities scaling bounds // and change the direction of the scaling vector. fn change_scale_direction(mut cubes: Query<(&mut Transform, &mut Scaling)>) { for (mut transform, mut cube) in &mut cubes { // If an entity scaled beyond the maximum of its size in any dimension // the scaling vector is flipped so the scaling is gradually reverted. // Additionally, to ensure the condition does not trigger again we floor the elements to // their next full value, which should be max_element_size at max. if transform.scale.max_element() > cube.max_element_size { cube.scale_direction *= -1.0; transform.scale = transform.scale.floor(); } // If an entity scaled beyond the minimum of its size in any dimension // the scaling vector is also flipped. // Additionally the Values are ceiled to be min_element_size at least // and the scale direction is flipped. // This way the entity will change the dimension in which it is scaled any time it // reaches its min_element_size. if transform.scale.min_element() < cube.min_element_size { cube.scale_direction *= -1.0; transform.scale = transform.scale.ceil(); cube.scale_direction = cube.scale_direction.zxy(); } } } // This system will scale any entity with assigned Scaling in each direction // by cycling through the directions to scale. fn scale_cube(mut cubes: Query<(&mut Transform, &Scaling)>, timer: Res<Time>) { for (mut transform, cube) in &mut cubes { transform.scale += cube.scale_direction * cube.scale_speed * timer.delta_secs(); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/transforms/align.rs
examples/transforms/align.rs
//! This example shows how to align the orientations of objects in 3D space along two axes using the `Transform::align` API. use bevy::{ color::palettes::basic::{GRAY, RED, WHITE}, input::mouse::{AccumulatedMouseMotion, MouseButtonInput}, math::StableInterpolate, prelude::*, }; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha8Rng; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems(Update, (draw_ship_axes, draw_random_axes)) .add_systems(Update, (handle_keypress, handle_mouse, rotate_ship).chain()) .run(); } /// This struct stores metadata for a single rotational move of the ship #[derive(Component, Default)] struct Ship { /// The target transform of the ship move, the endpoint of interpolation target_transform: Transform, /// Whether the ship is currently in motion; allows motion to be paused in_motion: bool, } #[derive(Component)] struct RandomAxes(Dir3, Dir3); #[derive(Component)] struct Instructions; #[derive(Resource)] struct MousePressed(bool); #[derive(Resource)] struct SeededRng(ChaCha8Rng); // Setup fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, asset_server: Res<AssetServer>, ) { // We're seeding the PRNG here to make this example deterministic for testing purposes. // This isn't strictly required in practical use unless you need your app to be deterministic. let mut seeded_rng = ChaCha8Rng::seed_from_u64(19878367467712); // A camera looking at the origin commands.spawn(( Camera3d::default(), Transform::from_xyz(3., 2.5, 4.).looking_at(Vec3::ZERO, Vec3::Y), )); // A plane that we can sit on top of commands.spawn(( Mesh3d(meshes.add(Plane3d::default().mesh().size(100.0, 100.0))), MeshMaterial3d(materials.add(Color::srgb(0.3, 0.5, 0.3))), Transform::from_xyz(0., -2., 0.), )); // A light source commands.spawn(( PointLight { shadows_enabled: true, ..default() }, Transform::from_xyz(4.0, 7.0, -4.0), )); // Initialize random axes let first = seeded_rng.random(); let second = seeded_rng.random(); commands.spawn(RandomAxes(first, second)); // Finally, our ship that is going to rotate commands.spawn(( SceneRoot( asset_server .load(GltfAssetLabel::Scene(0).from_asset("models/ship/craft_speederD.gltf")), ), Ship { target_transform: random_axes_target_alignment(&RandomAxes(first, second)), ..default() }, )); // Instructions for the example commands.spawn(( Text::new( "The bright red axis is the primary alignment axis, and it will always be\n\ made to coincide with the primary target direction (white) exactly.\n\ The fainter red axis is the secondary alignment axis, and it is made to\n\ line up with the secondary target direction (gray) as closely as possible.\n\ Press 'R' to generate random target directions.\n\ Press 'T' to align the ship to those directions.\n\ Click and drag the mouse to rotate the camera.\n\ Press 'H' to hide/show these instructions.", ), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, Instructions, )); commands.insert_resource(MousePressed(false)); commands.insert_resource(SeededRng(seeded_rng)); } // Update systems // Draw the main and secondary axes on the rotating ship fn draw_ship_axes(mut gizmos: Gizmos, ship_transform: Single<&Transform, With<Ship>>) { // Local Z-axis arrow, negative direction let z_ends = arrow_ends(*ship_transform, Vec3::NEG_Z, 1.5); gizmos.arrow(z_ends.0, z_ends.1, RED); // local X-axis arrow let x_ends = arrow_ends(*ship_transform, Vec3::X, 1.5); gizmos.arrow(x_ends.0, x_ends.1, Color::srgb(0.65, 0., 0.)); } // Draw the randomly generated axes fn draw_random_axes(mut gizmos: Gizmos, random_axes: Single<&RandomAxes>) { let RandomAxes(v1, v2) = *random_axes; gizmos.arrow(Vec3::ZERO, 1.5 * *v1, WHITE); gizmos.arrow(Vec3::ZERO, 1.5 * *v2, GRAY); } // Actually update the ship's transform according to its initial source and target fn rotate_ship(ship: Single<(&mut Ship, &mut Transform)>, time: Res<Time>) { let (mut ship, mut ship_transform) = ship.into_inner(); if !ship.in_motion { return; } let target_rotation = ship.target_transform.rotation; ship_transform .rotation .smooth_nudge(&target_rotation, 3.0, time.delta_secs()); if ship_transform.rotation.angle_between(target_rotation) <= f32::EPSILON { ship.in_motion = false; } } // Handle user inputs from the keyboard for dynamically altering the scenario fn handle_keypress( mut ship: Single<&mut Ship>, mut random_axes: Single<&mut RandomAxes>, mut instructions_viz: Single<&mut Visibility, With<Instructions>>, keyboard: Res<ButtonInput<KeyCode>>, mut seeded_rng: ResMut<SeededRng>, ) { if keyboard.just_pressed(KeyCode::KeyR) { // Randomize the target axes let first = seeded_rng.0.random(); let second = seeded_rng.0.random(); **random_axes = RandomAxes(first, second); // Stop the ship and set it up to transform from its present orientation to the new one ship.in_motion = false; ship.target_transform = random_axes_target_alignment(&random_axes); } if keyboard.just_pressed(KeyCode::KeyT) { ship.in_motion ^= true; } if keyboard.just_pressed(KeyCode::KeyH) { if *instructions_viz.as_ref() == Visibility::Hidden { **instructions_viz = Visibility::Visible; } else { **instructions_viz = Visibility::Hidden; } } } // Handle user mouse input for panning the camera around fn handle_mouse( accumulated_mouse_motion: Res<AccumulatedMouseMotion>, mut mouse_button_inputs: MessageReader<MouseButtonInput>, mut camera_transform: Single<&mut Transform, With<Camera>>, mut mouse_pressed: ResMut<MousePressed>, ) { // Store left-pressed state in the MousePressed resource for mouse_button_input in mouse_button_inputs.read() { if mouse_button_input.button != MouseButton::Left { continue; } *mouse_pressed = MousePressed(mouse_button_input.state.is_pressed()); } // If the mouse is not pressed, just ignore motion events if !mouse_pressed.0 { return; } if accumulated_mouse_motion.delta != Vec2::ZERO { let displacement = accumulated_mouse_motion.delta.x; camera_transform.rotate_around(Vec3::ZERO, Quat::from_rotation_y(-displacement / 75.)); } } // Helper functions (i.e. non-system functions) fn arrow_ends(transform: &Transform, axis: Vec3, length: f32) -> (Vec3, Vec3) { let local_vector = length * (transform.rotation * axis); (transform.translation, transform.translation + local_vector) } // This is where `Transform::align` is actually used! // Note that the choice of `Vec3::X` and `Vec3::Y` here matches the use of those in `draw_ship_axes`. fn random_axes_target_alignment(random_axes: &RandomAxes) -> Transform { let RandomAxes(first, second) = random_axes; Transform::IDENTITY.aligned_by(Vec3::NEG_Z, *first, Vec3::X, *second) }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/shader/shader_material_2d.rs
examples/shader/shader_material_2d.rs
//! A shader and a material that uses it. use bevy::{ prelude::*, reflect::TypePath, render::render_resource::AsBindGroup, shader::ShaderRef, sprite_render::{AlphaMode2d, Material2d, Material2dPlugin}, }; /// This example uses a shader source file from the assets subdirectory const SHADER_ASSET_PATH: &str = "shaders/custom_material_2d.wgsl"; fn main() { App::new() .add_plugins(( DefaultPlugins, Material2dPlugin::<CustomMaterial>::default(), )) .add_systems(Startup, setup) .run(); } // Setup a simple 2d scene fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<CustomMaterial>>, asset_server: Res<AssetServer>, ) { // camera commands.spawn(Camera2d); // quad commands.spawn(( Mesh2d(meshes.add(Rectangle::default())), MeshMaterial2d(materials.add(CustomMaterial { color: LinearRgba::BLUE, color_texture: Some(asset_server.load("branding/icon.png")), })), Transform::default().with_scale(Vec3::splat(128.)), )); } // This is the struct that will be passed to your shader #[derive(Asset, TypePath, AsBindGroup, Debug, Clone)] struct CustomMaterial { #[uniform(0)] color: LinearRgba, #[texture(1)] #[sampler(2)] color_texture: Option<Handle<Image>>, } /// The Material2d trait is very configurable, but comes with sensible defaults for all methods. /// You only need to implement functions for features that need non-default behavior. See the Material2d api docs for details! impl Material2d for CustomMaterial { fn fragment_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } fn alpha_mode(&self) -> AlphaMode2d { AlphaMode2d::Mask(0.5) } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/shader/compute_shader_game_of_life.rs
examples/shader/compute_shader_game_of_life.rs
//! A compute shader that simulates Conway's Game of Life. //! //! Compute shaders use the GPU for computing arbitrary information, that may be independent of what //! is rendered to the screen. use bevy::{ asset::RenderAssetUsages, prelude::*, render::{ extract_resource::{ExtractResource, ExtractResourcePlugin}, render_asset::RenderAssets, render_graph::{self, RenderGraph, RenderLabel}, render_resource::{ binding_types::{texture_storage_2d, uniform_buffer}, *, }, renderer::{RenderContext, RenderDevice, RenderQueue}, texture::GpuImage, Render, RenderApp, RenderStartup, RenderSystems, }, shader::PipelineCacheError, }; use std::borrow::Cow; /// This example uses a shader source file from the assets subdirectory const SHADER_ASSET_PATH: &str = "shaders/game_of_life.wgsl"; const DISPLAY_FACTOR: u32 = 4; const SIZE: UVec2 = UVec2::new(1280 / DISPLAY_FACTOR, 720 / DISPLAY_FACTOR); const WORKGROUP_SIZE: u32 = 8; fn main() { App::new() .insert_resource(ClearColor(Color::BLACK)) .add_plugins(( DefaultPlugins .set(WindowPlugin { primary_window: Some(Window { resolution: (SIZE * DISPLAY_FACTOR).into(), // uncomment for unthrottled FPS // present_mode: bevy::window::PresentMode::AutoNoVsync, ..default() }), ..default() }) .set(ImagePlugin::default_nearest()), GameOfLifeComputePlugin, )) .add_systems(Startup, setup) .add_systems(Update, switch_textures) .run(); } fn setup(mut commands: Commands, mut images: ResMut<Assets<Image>>) { let mut image = Image::new_target_texture(SIZE.x, SIZE.y, TextureFormat::Rgba32Float, None); image.asset_usage = RenderAssetUsages::RENDER_WORLD; image.texture_descriptor.usage = TextureUsages::COPY_DST | TextureUsages::STORAGE_BINDING | TextureUsages::TEXTURE_BINDING; let image0 = images.add(image.clone()); let image1 = images.add(image); commands.spawn(( Sprite { image: image0.clone(), custom_size: Some(SIZE.as_vec2()), ..default() }, Transform::from_scale(Vec3::splat(DISPLAY_FACTOR as f32)), )); commands.spawn(Camera2d); commands.insert_resource(GameOfLifeImages { texture_a: image0, texture_b: image1, }); commands.insert_resource(GameOfLifeUniforms { alive_color: LinearRgba::RED, }); } // Switch texture to display every frame to show the one that was written to most recently. fn switch_textures(images: Res<GameOfLifeImages>, mut sprite: Single<&mut Sprite>) { if sprite.image == images.texture_a { sprite.image = images.texture_b.clone(); } else { sprite.image = images.texture_a.clone(); } } struct GameOfLifeComputePlugin; #[derive(Debug, Hash, PartialEq, Eq, Clone, RenderLabel)] struct GameOfLifeLabel; impl Plugin for GameOfLifeComputePlugin { fn build(&self, app: &mut App) { // Extract the game of life image resource from the main world into the render world // for operation on by the compute shader and display on the sprite. app.add_plugins(( ExtractResourcePlugin::<GameOfLifeImages>::default(), ExtractResourcePlugin::<GameOfLifeUniforms>::default(), )); let render_app = app.sub_app_mut(RenderApp); render_app .add_systems(RenderStartup, init_game_of_life_pipeline) .add_systems( Render, prepare_bind_group.in_set(RenderSystems::PrepareBindGroups), ); let mut render_graph = render_app.world_mut().resource_mut::<RenderGraph>(); render_graph.add_node(GameOfLifeLabel, GameOfLifeNode::default()); render_graph.add_node_edge(GameOfLifeLabel, bevy::render::graph::CameraDriverLabel); } } #[derive(Resource, Clone, ExtractResource)] struct GameOfLifeImages { texture_a: Handle<Image>, texture_b: Handle<Image>, } #[derive(Resource, Clone, ExtractResource, ShaderType)] struct GameOfLifeUniforms { alive_color: LinearRgba, } #[derive(Resource)] struct GameOfLifeImageBindGroups([BindGroup; 2]); fn prepare_bind_group( mut commands: Commands, pipeline: Res<GameOfLifePipeline>, gpu_images: Res<RenderAssets<GpuImage>>, game_of_life_images: Res<GameOfLifeImages>, game_of_life_uniforms: Res<GameOfLifeUniforms>, render_device: Res<RenderDevice>, pipeline_cache: Res<PipelineCache>, queue: Res<RenderQueue>, ) { let view_a = gpu_images.get(&game_of_life_images.texture_a).unwrap(); let view_b = gpu_images.get(&game_of_life_images.texture_b).unwrap(); // Uniform buffer is used here to demonstrate how to set up a uniform in a compute shader // Alternatives such as storage buffers or push constants may be more suitable for your use case let mut uniform_buffer = UniformBuffer::from(game_of_life_uniforms.into_inner()); uniform_buffer.write_buffer(&render_device, &queue); let bind_group_0 = render_device.create_bind_group( None, &pipeline_cache.get_bind_group_layout(&pipeline.texture_bind_group_layout), &BindGroupEntries::sequential(( &view_a.texture_view, &view_b.texture_view, &uniform_buffer, )), ); let bind_group_1 = render_device.create_bind_group( None, &pipeline_cache.get_bind_group_layout(&pipeline.texture_bind_group_layout), &BindGroupEntries::sequential(( &view_b.texture_view, &view_a.texture_view, &uniform_buffer, )), ); commands.insert_resource(GameOfLifeImageBindGroups([bind_group_0, bind_group_1])); } #[derive(Resource)] struct GameOfLifePipeline { texture_bind_group_layout: BindGroupLayoutDescriptor, init_pipeline: CachedComputePipelineId, update_pipeline: CachedComputePipelineId, } fn init_game_of_life_pipeline( mut commands: Commands, asset_server: Res<AssetServer>, pipeline_cache: Res<PipelineCache>, ) { let texture_bind_group_layout = BindGroupLayoutDescriptor::new( "GameOfLifeImages", &BindGroupLayoutEntries::sequential( ShaderStages::COMPUTE, ( texture_storage_2d(TextureFormat::Rgba32Float, StorageTextureAccess::ReadOnly), texture_storage_2d(TextureFormat::Rgba32Float, StorageTextureAccess::WriteOnly), uniform_buffer::<GameOfLifeUniforms>(false), ), ), ); let shader = asset_server.load(SHADER_ASSET_PATH); let init_pipeline = pipeline_cache.queue_compute_pipeline(ComputePipelineDescriptor { layout: vec![texture_bind_group_layout.clone()], shader: shader.clone(), entry_point: Some(Cow::from("init")), ..default() }); let update_pipeline = pipeline_cache.queue_compute_pipeline(ComputePipelineDescriptor { layout: vec![texture_bind_group_layout.clone()], shader, entry_point: Some(Cow::from("update")), ..default() }); commands.insert_resource(GameOfLifePipeline { texture_bind_group_layout, init_pipeline, update_pipeline, }); } enum GameOfLifeState { Loading, Init, Update(usize), } struct GameOfLifeNode { state: GameOfLifeState, } impl Default for GameOfLifeNode { fn default() -> Self { Self { state: GameOfLifeState::Loading, } } } impl render_graph::Node for GameOfLifeNode { fn update(&mut self, world: &mut World) { let pipeline = world.resource::<GameOfLifePipeline>(); let pipeline_cache = world.resource::<PipelineCache>(); // if the corresponding pipeline has loaded, transition to the next stage match self.state { GameOfLifeState::Loading => { match pipeline_cache.get_compute_pipeline_state(pipeline.init_pipeline) { CachedPipelineState::Ok(_) => { self.state = GameOfLifeState::Init; } // If the shader hasn't loaded yet, just wait. CachedPipelineState::Err(PipelineCacheError::ShaderNotLoaded(_)) => {} CachedPipelineState::Err(err) => { panic!("Initializing assets/{SHADER_ASSET_PATH}:\n{err}") } _ => {} } } GameOfLifeState::Init => { if let CachedPipelineState::Ok(_) = pipeline_cache.get_compute_pipeline_state(pipeline.update_pipeline) { self.state = GameOfLifeState::Update(1); } } GameOfLifeState::Update(0) => { self.state = GameOfLifeState::Update(1); } GameOfLifeState::Update(1) => { self.state = GameOfLifeState::Update(0); } GameOfLifeState::Update(_) => unreachable!(), } } fn run( &self, _graph: &mut render_graph::RenderGraphContext, render_context: &mut RenderContext, world: &World, ) -> Result<(), render_graph::NodeRunError> { let bind_groups = &world.resource::<GameOfLifeImageBindGroups>().0; let pipeline_cache = world.resource::<PipelineCache>(); let pipeline = world.resource::<GameOfLifePipeline>(); let mut pass = render_context .command_encoder() .begin_compute_pass(&ComputePassDescriptor::default()); // select the pipeline based on the current state match self.state { GameOfLifeState::Loading => {} GameOfLifeState::Init => { let init_pipeline = pipeline_cache .get_compute_pipeline(pipeline.init_pipeline) .unwrap(); pass.set_bind_group(0, &bind_groups[0], &[]); pass.set_pipeline(init_pipeline); pass.dispatch_workgroups(SIZE.x / WORKGROUP_SIZE, SIZE.y / WORKGROUP_SIZE, 1); } GameOfLifeState::Update(index) => { let update_pipeline = pipeline_cache .get_compute_pipeline(pipeline.update_pipeline) .unwrap(); pass.set_bind_group(0, &bind_groups[index], &[]); pass.set_pipeline(update_pipeline); pass.dispatch_workgroups(SIZE.x / WORKGROUP_SIZE, SIZE.y / WORKGROUP_SIZE, 1); } } Ok(()) } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/shader/fallback_image.rs
examples/shader/fallback_image.rs
//! This example tests that all texture dimensions are supported by //! `FallbackImage`. //! //! When running this example, you should expect to see a window that only draws //! the clear color. The test material does not shade any geometry; this example //! only tests that the images are initialized and bound so that the app does //! not panic. use bevy::{ prelude::*, reflect::TypePath, render::render_resource::AsBindGroup, shader::ShaderRef, }; /// This example uses a shader source file from the assets subdirectory const SHADER_ASSET_PATH: &str = "shaders/fallback_image_test.wgsl"; fn main() { App::new() .add_plugins(( DefaultPlugins, MaterialPlugin::<FallbackTestMaterial>::default(), )) .add_systems(Startup, setup) .run(); } fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<FallbackTestMaterial>>, ) { commands.spawn(( Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(materials.add(FallbackTestMaterial { image_1d: None, image_2d: None, image_2d_array: None, image_cube: None, image_cube_array: None, image_3d: None, })), )); commands.spawn(( Camera3d::default(), Transform::from_xyz(5.0, 5.0, 5.0).looking_at(Vec3::new(1.5, 0.0, 0.0), Vec3::Y), )); } #[derive(AsBindGroup, Debug, Clone, Asset, TypePath)] struct FallbackTestMaterial { #[texture(0, dimension = "1d")] #[sampler(1)] image_1d: Option<Handle<Image>>, #[texture(2, dimension = "2d")] #[sampler(3)] image_2d: Option<Handle<Image>>, #[texture(4, dimension = "2d_array")] #[sampler(5)] image_2d_array: Option<Handle<Image>>, #[texture(6, dimension = "cube")] #[sampler(7)] image_cube: Option<Handle<Image>>, #[texture(8, dimension = "cube_array")] #[sampler(9)] image_cube_array: Option<Handle<Image>>, #[texture(10, dimension = "3d")] #[sampler(11)] image_3d: Option<Handle<Image>>, } impl Material for FallbackTestMaterial { fn fragment_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/shader/shader_material.rs
examples/shader/shader_material.rs
//! A shader and a material that uses it. use bevy::{ prelude::*, reflect::TypePath, render::render_resource::AsBindGroup, shader::ShaderRef, }; /// This example uses a shader source file from the assets subdirectory const SHADER_ASSET_PATH: &str = "shaders/custom_material.wgsl"; fn main() { App::new() .add_plugins((DefaultPlugins, MaterialPlugin::<CustomMaterial>::default())) .add_systems(Startup, setup) .run(); } /// set up a simple 3D scene fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<CustomMaterial>>, asset_server: Res<AssetServer>, ) { // cube commands.spawn(( Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(materials.add(CustomMaterial { color: LinearRgba::BLUE, color_texture: Some(asset_server.load("branding/icon.png")), alpha_mode: AlphaMode::Blend, })), Transform::from_xyz(0.0, 0.5, 0.0), )); // camera commands.spawn(( Camera3d::default(), Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y), )); } // This struct defines the data that will be passed to your shader #[derive(Asset, TypePath, AsBindGroup, Debug, Clone)] struct CustomMaterial { #[uniform(0)] color: LinearRgba, #[texture(1)] #[sampler(2)] color_texture: Option<Handle<Image>>, alpha_mode: AlphaMode, } /// The Material trait is very configurable, but comes with sensible defaults for all methods. /// You only need to implement functions for features that need non-default behavior. See the Material api docs for details! impl Material for CustomMaterial { fn fragment_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } fn alpha_mode(&self) -> AlphaMode { self.alpha_mode } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/shader/animate_shader.rs
examples/shader/animate_shader.rs
//! A shader that uses dynamic data like the time since startup. //! The time data is in the globals binding which is part of the `mesh_view_bindings` shader import. use bevy::{ prelude::*, reflect::TypePath, render::render_resource::AsBindGroup, shader::ShaderRef, }; /// This example uses a shader source file from the assets subdirectory const SHADER_ASSET_PATH: &str = "shaders/animate_shader.wgsl"; fn main() { App::new() .add_plugins((DefaultPlugins, MaterialPlugin::<CustomMaterial>::default())) .add_systems(Startup, setup) .run(); } fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<CustomMaterial>>, ) { // cube commands.spawn(( Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(materials.add(CustomMaterial {})), Transform::from_xyz(0.0, 0.5, 0.0), )); // camera commands.spawn(( Camera3d::default(), Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y), )); } #[derive(Asset, TypePath, AsBindGroup, Debug, Clone)] struct CustomMaterial {} impl Material for CustomMaterial { fn fragment_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/shader/storage_buffer.rs
examples/shader/storage_buffer.rs
//! This example demonstrates how to use a storage buffer with `AsBindGroup` in a custom material. use bevy::{ mesh::MeshTag, prelude::*, reflect::TypePath, render::{render_resource::AsBindGroup, storage::ShaderStorageBuffer}, shader::ShaderRef, }; const SHADER_ASSET_PATH: &str = "shaders/storage_buffer.wgsl"; fn main() { App::new() .add_plugins((DefaultPlugins, MaterialPlugin::<CustomMaterial>::default())) .add_systems(Startup, setup) .add_systems(Update, update) .run(); } /// set up a simple 3D scene fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut buffers: ResMut<Assets<ShaderStorageBuffer>>, mut materials: ResMut<Assets<CustomMaterial>>, ) { // Example data for the storage buffer let color_data: Vec<[f32; 4]> = vec![ [1.0, 0.0, 0.0, 1.0], [0.0, 1.0, 0.0, 1.0], [0.0, 0.0, 1.0, 1.0], [1.0, 1.0, 0.0, 1.0], [0.0, 1.0, 1.0, 1.0], ]; let colors = buffers.add(ShaderStorageBuffer::from(color_data)); let mesh_handle = meshes.add(Cuboid::from_size(Vec3::splat(0.3))); // Create the custom material with the storage buffer let material_handle = materials.add(CustomMaterial { colors: colors.clone(), }); commands.insert_resource(CustomMaterialHandle(material_handle.clone())); // Spawn cubes with the custom material let mut current_color_id: u32 = 0; for i in -6..=6 { for j in -3..=3 { commands.spawn(( Mesh3d(mesh_handle.clone()), MeshMaterial3d(material_handle.clone()), MeshTag(current_color_id % 5), Transform::from_xyz(i as f32, j as f32, 0.0), )); current_color_id += 1; } } // Camera commands.spawn(( Camera3d::default(), Transform::from_xyz(0.0, 0.0, 10.0).looking_at(Vec3::ZERO, Vec3::Y), )); } // Update the material color by time fn update( time: Res<Time>, material_handles: Res<CustomMaterialHandle>, mut materials: ResMut<Assets<CustomMaterial>>, mut buffers: ResMut<Assets<ShaderStorageBuffer>>, ) { let material = materials.get_mut(&material_handles.0).unwrap(); let buffer = buffers.get_mut(&material.colors).unwrap(); buffer.set_data( (0..5) .map(|i| { let t = time.elapsed_secs() * 5.0; [ ops::sin(t + i as f32) / 2.0 + 0.5, ops::sin(t + i as f32 + 2.0) / 2.0 + 0.5, ops::sin(t + i as f32 + 4.0) / 2.0 + 0.5, 1.0, ] }) .collect::<Vec<[f32; 4]>>(), ); } // Holds handles to the custom materials #[derive(Resource)] struct CustomMaterialHandle(Handle<CustomMaterial>); // This struct defines the data that will be passed to your shader #[derive(Asset, TypePath, AsBindGroup, Debug, Clone)] struct CustomMaterial { #[storage(0, read_only)] colors: Handle<ShaderStorageBuffer>, } impl Material for CustomMaterial { fn vertex_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } fn fragment_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/shader/shader_prepass.rs
examples/shader/shader_prepass.rs
//! Bevy has an optional prepass that is controlled per-material. A prepass is a rendering pass that runs before the main pass. //! It will optionally generate various view textures. Currently it supports depth, normal, and motion vector textures. //! The textures are not generated for any material using alpha blending. use bevy::{ core_pipeline::prepass::{DepthPrepass, MotionVectorPrepass, NormalPrepass}, light::NotShadowCaster, pbr::PbrPlugin, prelude::*, reflect::TypePath, render::render_resource::{AsBindGroup, ShaderType}, shader::ShaderRef, }; /// This example uses a shader source file from the assets subdirectory const PREPASS_SHADER_ASSET_PATH: &str = "shaders/show_prepass.wgsl"; const MATERIAL_SHADER_ASSET_PATH: &str = "shaders/custom_material.wgsl"; fn main() { App::new() .add_plugins(( DefaultPlugins.set(PbrPlugin { // The prepass is enabled by default on the StandardMaterial, // but you can disable it if you need to. // // prepass_enabled: false, ..default() }), MaterialPlugin::<CustomMaterial>::default(), MaterialPlugin::<PrepassOutputMaterial>::default(), )) .add_systems(Startup, setup) .add_systems(Update, (rotate, toggle_prepass_view)) .run(); } /// set up a simple 3D scene fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<CustomMaterial>>, mut std_materials: ResMut<Assets<StandardMaterial>>, mut depth_materials: ResMut<Assets<PrepassOutputMaterial>>, asset_server: Res<AssetServer>, ) { // camera commands.spawn(( Camera3d::default(), Transform::from_xyz(-2.0, 3., 5.0).looking_at(Vec3::ZERO, Vec3::Y), // Disabling MSAA for maximum compatibility. Shader prepass with MSAA needs GPU capability MULTISAMPLED_SHADING Msaa::Off, // To enable the prepass you need to add the components associated with the ones you need // This will write the depth buffer to a texture that you can use in the main pass DepthPrepass, // This will generate a texture containing world normals (with normal maps applied) NormalPrepass, // This will generate a texture containing screen space pixel motion vectors MotionVectorPrepass, )); // plane commands.spawn(( Mesh3d(meshes.add(Plane3d::default().mesh().size(5.0, 5.0))), MeshMaterial3d(std_materials.add(Color::srgb(0.3, 0.5, 0.3))), )); // A quad that shows the outputs of the prepass // To make it easy, we just draw a big quad right in front of the camera. // For a real application, this isn't ideal. commands.spawn(( Mesh3d(meshes.add(Rectangle::new(20.0, 20.0))), MeshMaterial3d(depth_materials.add(PrepassOutputMaterial { settings: ShowPrepassSettings::default(), })), Transform::from_xyz(-0.75, 1.25, 3.0).looking_at(Vec3::new(2.0, -2.5, -5.0), Vec3::Y), NotShadowCaster, )); // Opaque cube commands.spawn(( Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(materials.add(CustomMaterial { color: LinearRgba::WHITE, color_texture: Some(asset_server.load("branding/icon.png")), alpha_mode: AlphaMode::Opaque, })), Transform::from_xyz(-1.0, 0.5, 0.0), Rotates, )); // Cube with alpha mask commands.spawn(( Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(std_materials.add(StandardMaterial { alpha_mode: AlphaMode::Mask(1.0), base_color_texture: Some(asset_server.load("branding/icon.png")), ..default() })), Transform::from_xyz(0.0, 0.5, 0.0), )); // Cube with alpha blending. // Transparent materials are ignored by the prepass commands.spawn(( Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(materials.add(CustomMaterial { color: LinearRgba::WHITE, color_texture: Some(asset_server.load("branding/icon.png")), alpha_mode: AlphaMode::Blend, })), Transform::from_xyz(1.0, 0.5, 0.0), )); // light commands.spawn(( PointLight { shadows_enabled: true, ..default() }, Transform::from_xyz(4.0, 8.0, 4.0), )); commands.spawn(( Text::default(), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, children![ TextSpan::new("Prepass Output: transparent\n"), TextSpan::new("\n\n"), TextSpan::new("Controls\n"), TextSpan::new("---------------\n"), TextSpan::new("Space - Change output\n"), ], )); } // This is the struct that will be passed to your shader #[derive(Asset, TypePath, AsBindGroup, Debug, Clone)] struct CustomMaterial { #[uniform(0)] color: LinearRgba, #[texture(1)] #[sampler(2)] color_texture: Option<Handle<Image>>, alpha_mode: AlphaMode, } /// Not shown in this example, but if you need to specialize your material, the specialize /// function will also be used by the prepass impl Material for CustomMaterial { fn fragment_shader() -> ShaderRef { MATERIAL_SHADER_ASSET_PATH.into() } fn alpha_mode(&self) -> AlphaMode { self.alpha_mode } // You can override the default shaders used in the prepass if your material does // anything not supported by the default prepass // fn prepass_fragment_shader() -> ShaderRef { // "shaders/custom_material.wgsl".into() // } } #[derive(Component)] struct Rotates; fn rotate(mut q: Query<&mut Transform, With<Rotates>>, time: Res<Time>) { for mut t in q.iter_mut() { let rot = (ops::sin(time.elapsed_secs()) * 0.5 + 0.5) * std::f32::consts::PI * 2.0; t.rotation = Quat::from_rotation_z(rot); } } #[derive(Debug, Clone, Default, ShaderType)] struct ShowPrepassSettings { show_depth: u32, show_normals: u32, show_motion_vectors: u32, padding_1: u32, padding_2: u32, } // This shader simply loads the prepass texture and outputs it directly #[derive(Asset, TypePath, AsBindGroup, Debug, Clone)] struct PrepassOutputMaterial { #[uniform(0)] settings: ShowPrepassSettings, } impl Material for PrepassOutputMaterial { fn fragment_shader() -> ShaderRef { PREPASS_SHADER_ASSET_PATH.into() } // This needs to be transparent in order to show the scene behind the mesh fn alpha_mode(&self) -> AlphaMode { AlphaMode::Blend } fn enable_prepass() -> bool { false } } /// Every time you press space, it will cycle between transparent, depth and normals view fn toggle_prepass_view( mut prepass_view: Local<u32>, keycode: Res<ButtonInput<KeyCode>>, material_handle: Single<&MeshMaterial3d<PrepassOutputMaterial>>, mut materials: ResMut<Assets<PrepassOutputMaterial>>, text: Single<Entity, With<Text>>, mut writer: TextUiWriter, ) { if keycode.just_pressed(KeyCode::Space) { *prepass_view = (*prepass_view + 1) % 4; let label = match *prepass_view { 0 => "transparent", 1 => "depth", 2 => "normals", 3 => "motion vectors", _ => unreachable!(), }; let text = *text; *writer.text(text, 1) = format!("Prepass Output: {label}\n"); writer.for_each_color(text, |mut color| { color.0 = Color::WHITE; }); let mat = materials.get_mut(*material_handle).unwrap(); mat.settings.show_depth = (*prepass_view == 1) as u32; mat.settings.show_normals = (*prepass_view == 2) as u32; mat.settings.show_motion_vectors = (*prepass_view == 3) as u32; } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/shader/shader_material_screenspace_texture.rs
examples/shader/shader_material_screenspace_texture.rs
//! A shader that samples a texture with view-independent UV coordinates. use bevy::{ prelude::*, reflect::TypePath, render::render_resource::AsBindGroup, shader::ShaderRef, }; /// This example uses a shader source file from the assets subdirectory const SHADER_ASSET_PATH: &str = "shaders/custom_material_screenspace_texture.wgsl"; fn main() { App::new() .add_plugins((DefaultPlugins, MaterialPlugin::<CustomMaterial>::default())) .add_systems(Startup, setup) .add_systems(Update, rotate_camera) .run(); } #[derive(Component)] struct MainCamera; fn setup( mut commands: Commands, asset_server: Res<AssetServer>, mut meshes: ResMut<Assets<Mesh>>, mut custom_materials: ResMut<Assets<CustomMaterial>>, mut standard_materials: ResMut<Assets<StandardMaterial>>, ) { commands.spawn(( Mesh3d(meshes.add(Plane3d::default().mesh().size(5.0, 5.0))), MeshMaterial3d(standard_materials.add(Color::srgb(0.3, 0.5, 0.3))), )); commands.spawn((PointLight::default(), Transform::from_xyz(4.0, 8.0, 4.0))); commands.spawn(( Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(custom_materials.add(CustomMaterial { texture: asset_server.load( "models/FlightHelmet/FlightHelmet_Materials_LensesMat_OcclusionRoughMetal.png", ), })), Transform::from_xyz(0.0, 0.5, 0.0), )); // camera commands.spawn(( Camera3d::default(), Transform::from_xyz(4.0, 2.5, 4.0).looking_at(Vec3::ZERO, Vec3::Y), MainCamera, )); } fn rotate_camera(mut cam_transform: Single<&mut Transform, With<MainCamera>>, time: Res<Time>) { cam_transform.rotate_around( Vec3::ZERO, Quat::from_axis_angle(Vec3::Y, 45f32.to_radians() * time.delta_secs()), ); cam_transform.look_at(Vec3::ZERO, Vec3::Y); } #[derive(Asset, TypePath, AsBindGroup, Debug, Clone)] struct CustomMaterial { #[texture(0)] #[sampler(1)] texture: Handle<Image>, } impl Material for CustomMaterial { fn fragment_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/shader/gpu_readback.rs
examples/shader/gpu_readback.rs
//! Simple example demonstrating the use of the [`Readback`] component to read back data from the GPU //! using both a storage buffer and texture. use bevy::{ asset::RenderAssetUsages, prelude::*, render::{ extract_resource::{ExtractResource, ExtractResourcePlugin}, gpu_readback::{Readback, ReadbackComplete}, render_asset::RenderAssets, render_graph::{self, RenderGraph, RenderLabel}, render_resource::{ binding_types::{storage_buffer, texture_storage_2d}, *, }, renderer::{RenderContext, RenderDevice}, storage::{GpuShaderStorageBuffer, ShaderStorageBuffer}, texture::GpuImage, Render, RenderApp, RenderStartup, RenderSystems, }, }; /// This example uses a shader source file from the assets subdirectory const SHADER_ASSET_PATH: &str = "shaders/gpu_readback.wgsl"; // The length of the buffer sent to the gpu const BUFFER_LEN: usize = 16; fn main() { App::new() .add_plugins(( DefaultPlugins, GpuReadbackPlugin, ExtractResourcePlugin::<ReadbackBuffer>::default(), ExtractResourcePlugin::<ReadbackImage>::default(), )) .insert_resource(ClearColor(Color::BLACK)) .add_systems(Startup, setup) .run(); } // We need a plugin to organize all the systems and render node required for this example struct GpuReadbackPlugin; impl Plugin for GpuReadbackPlugin { fn build(&self, app: &mut App) { let Some(render_app) = app.get_sub_app_mut(RenderApp) else { return; }; render_app .add_systems( RenderStartup, (init_compute_pipeline, add_compute_render_graph_node), ) .add_systems( Render, prepare_bind_group .in_set(RenderSystems::PrepareBindGroups) // We don't need to recreate the bind group every frame .run_if(not(resource_exists::<GpuBufferBindGroup>)), ); } } #[derive(Resource, ExtractResource, Clone)] struct ReadbackBuffer(Handle<ShaderStorageBuffer>); #[derive(Resource, ExtractResource, Clone)] struct ReadbackImage(Handle<Image>); fn setup( mut commands: Commands, mut images: ResMut<Assets<Image>>, mut buffers: ResMut<Assets<ShaderStorageBuffer>>, ) { // Create a storage buffer with some data let buffer: Vec<u32> = (0..BUFFER_LEN as u32).collect(); let mut buffer = ShaderStorageBuffer::from(buffer); // We need to enable the COPY_SRC usage so we can copy the buffer to the cpu buffer.buffer_description.usage |= BufferUsages::COPY_SRC; let buffer = buffers.add(buffer); // Create a storage texture with some data let size = Extent3d { width: BUFFER_LEN as u32, height: 1, ..default() }; // We create an uninitialized image since this texture will only be used for getting data out // of the compute shader, not getting data in, so there's no reason for it to exist on the CPU let mut image = Image::new_uninit( size, TextureDimension::D2, TextureFormat::R32Uint, RenderAssetUsages::RENDER_WORLD, ); // We also need to enable the COPY_SRC, as well as STORAGE_BINDING so we can use it in the // compute shader image.texture_descriptor.usage |= TextureUsages::COPY_SRC | TextureUsages::STORAGE_BINDING; let image = images.add(image); // Spawn the readback components. For each frame, the data will be read back from the GPU // asynchronously and trigger the `ReadbackComplete` event on this entity. Despawn the entity // to stop reading back the data. commands .spawn(Readback::buffer(buffer.clone())) .observe(|event: On<ReadbackComplete>| { // This matches the type which was used to create the `ShaderStorageBuffer` above, // and is a convenient way to interpret the data. let data: Vec<u32> = event.to_shader_type(); info!("Buffer {:?}", data); }); // It is also possible to read only a range of the buffer. commands .spawn(Readback::buffer_range( buffer.clone(), 4 * u32::SHADER_SIZE.get(), // skip the first four elements 8 * u32::SHADER_SIZE.get(), // read eight elements )) .observe(|event: On<ReadbackComplete>| { let data: Vec<u32> = event.to_shader_type(); info!("Buffer range {:?}", data); }); // This is just a simple way to pass the buffer handle to the render app for our compute node commands.insert_resource(ReadbackBuffer(buffer)); // Textures can also be read back from the GPU. Pay careful attention to the format of the // texture, as it will affect how the data is interpreted. commands .spawn(Readback::texture(image.clone())) .observe(|event: On<ReadbackComplete>| { // You probably want to interpret the data as a color rather than a `ShaderType`, // but in this case we know the data is a single channel storage texture, so we can // interpret it as a `Vec<u32>` let data: Vec<u32> = event.to_shader_type(); info!("Image {:?}", data); }); commands.insert_resource(ReadbackImage(image)); } fn add_compute_render_graph_node(mut render_graph: ResMut<RenderGraph>) { // Add the compute node as a top-level node to the render graph. This means it will only execute // once per frame. Normally, adding a node would use the `RenderGraphApp::add_render_graph_node` // method, but it does not allow adding as a top-level node. render_graph.add_node(ComputeNodeLabel, ComputeNode::default()); } #[derive(Resource)] struct GpuBufferBindGroup(BindGroup); fn prepare_bind_group( mut commands: Commands, pipeline: Res<ComputePipeline>, render_device: Res<RenderDevice>, pipeline_cache: Res<PipelineCache>, buffer: Res<ReadbackBuffer>, image: Res<ReadbackImage>, buffers: Res<RenderAssets<GpuShaderStorageBuffer>>, images: Res<RenderAssets<GpuImage>>, ) { let buffer = buffers.get(&buffer.0).unwrap(); let image = images.get(&image.0).unwrap(); let bind_group = render_device.create_bind_group( None, &pipeline_cache.get_bind_group_layout(&pipeline.layout), &BindGroupEntries::sequential(( buffer.buffer.as_entire_buffer_binding(), image.texture_view.into_binding(), )), ); commands.insert_resource(GpuBufferBindGroup(bind_group)); } #[derive(Resource)] struct ComputePipeline { layout: BindGroupLayoutDescriptor, pipeline: CachedComputePipelineId, } fn init_compute_pipeline( mut commands: Commands, asset_server: Res<AssetServer>, pipeline_cache: Res<PipelineCache>, ) { let layout = BindGroupLayoutDescriptor::new( "", &BindGroupLayoutEntries::sequential( ShaderStages::COMPUTE, ( storage_buffer::<Vec<u32>>(false), texture_storage_2d(TextureFormat::R32Uint, StorageTextureAccess::WriteOnly), ), ), ); let shader = asset_server.load(SHADER_ASSET_PATH); let pipeline = pipeline_cache.queue_compute_pipeline(ComputePipelineDescriptor { label: Some("GPU readback compute shader".into()), layout: vec![layout.clone()], shader: shader.clone(), ..default() }); commands.insert_resource(ComputePipeline { layout, pipeline }); } /// Label to identify the node in the render graph #[derive(Debug, Hash, PartialEq, Eq, Clone, RenderLabel)] struct ComputeNodeLabel; /// The node that will execute the compute shader #[derive(Default)] struct ComputeNode {} impl render_graph::Node for ComputeNode { fn run( &self, _graph: &mut render_graph::RenderGraphContext, render_context: &mut RenderContext, world: &World, ) -> Result<(), render_graph::NodeRunError> { let pipeline_cache = world.resource::<PipelineCache>(); let pipeline = world.resource::<ComputePipeline>(); let bind_group = world.resource::<GpuBufferBindGroup>(); if let Some(init_pipeline) = pipeline_cache.get_compute_pipeline(pipeline.pipeline) { let mut pass = render_context .command_encoder() .begin_compute_pass(&ComputePassDescriptor { label: Some("GPU readback compute pass"), ..default() }); pass.set_bind_group(0, &bind_group.0, &[]); pass.set_pipeline(init_pipeline); pass.dispatch_workgroups(BUFFER_LEN as u32, 1, 1); } Ok(()) } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/shader/extended_material.rs
examples/shader/extended_material.rs
//! Demonstrates using a custom extension to the `StandardMaterial` to modify the results of the builtin pbr shader. use bevy::{ color::palettes::basic::RED, pbr::{ExtendedMaterial, MaterialExtension, OpaqueRendererMethod}, prelude::*, render::render_resource::*, shader::ShaderRef, }; /// This example uses a shader source file from the assets subdirectory const SHADER_ASSET_PATH: &str = "shaders/extended_material.wgsl"; fn main() { App::new() .add_plugins(DefaultPlugins) .add_plugins(MaterialPlugin::< ExtendedMaterial<StandardMaterial, MyExtension>, >::default()) .add_systems(Startup, setup) .add_systems(Update, rotate_things) .run(); } fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<ExtendedMaterial<StandardMaterial, MyExtension>>>, ) { // sphere commands.spawn(( Mesh3d(meshes.add(Sphere::new(1.0))), MeshMaterial3d(materials.add(ExtendedMaterial { base: StandardMaterial { base_color: RED.into(), // can be used in forward or deferred mode opaque_render_method: OpaqueRendererMethod::Auto, // in deferred mode, only the PbrInput can be modified (uvs, color and other material properties), // in forward mode, the output can also be modified after lighting is applied. // see the fragment shader `extended_material.wgsl` for more info. // Note: to run in deferred mode, you must also add a `DeferredPrepass` component to the camera and either // change the above to `OpaqueRendererMethod::Deferred` or add the `DefaultOpaqueRendererMethod` resource. ..Default::default() }, extension: MyExtension::new(1), })), Transform::from_xyz(0.0, 0.5, 0.0), )); // light commands.spawn(( DirectionalLight::default(), Transform::from_xyz(1.0, 1.0, 1.0).looking_at(Vec3::ZERO, Vec3::Y), Rotate, )); // camera commands.spawn(( Camera3d::default(), Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y), )); } #[derive(Component)] struct Rotate; fn rotate_things(mut q: Query<&mut Transform, With<Rotate>>, time: Res<Time>) { for mut t in &mut q { t.rotate_y(time.delta_secs()); } } #[derive(Asset, AsBindGroup, Reflect, Debug, Clone, Default)] struct MyExtension { // We need to ensure that the bindings of the base material and the extension do not conflict, // so we start from binding slot 100, leaving slots 0-99 for the base material. #[uniform(100)] quantize_steps: u32, // Web examples WebGL2 support: structs must be 16 byte aligned. #[cfg(feature = "webgl2")] #[uniform(100)] _webgl2_padding_8b: u32, #[cfg(feature = "webgl2")] #[uniform(100)] _webgl2_padding_12b: u32, #[cfg(feature = "webgl2")] #[uniform(100)] _webgl2_padding_16b: u32, } impl MyExtension { fn new(quantize_steps: u32) -> Self { Self { quantize_steps, ..default() } } } impl MaterialExtension for MyExtension { fn fragment_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } fn deferred_fragment_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/shader/extended_material_bindless.rs
examples/shader/extended_material_bindless.rs
//! Demonstrates bindless `ExtendedMaterial`. use std::f32::consts::FRAC_PI_2; use bevy::{ color::palettes::{css::RED, tailwind::GRAY_600}, mesh::{SphereKind, SphereMeshBuilder}, pbr::{ExtendedMaterial, MaterialExtension, MeshMaterial3d}, prelude::*, render::render_resource::{AsBindGroup, ShaderType}, shader::ShaderRef, utils::default, }; /// The path to the example material shader. static SHADER_ASSET_PATH: &str = "shaders/extended_material_bindless.wgsl"; /// The example bindless material extension. /// /// As usual for material extensions, we need to avoid conflicting with both the /// binding numbers and bindless indices of the [`StandardMaterial`], so we /// start both values at 100 and 50 respectively. /// /// The `#[data(50, ExampleBindlessExtensionUniform, binding_array(101))]` /// attribute specifies that the plain old data /// [`ExampleBindlessExtensionUniform`] will be placed into an array with /// binding 100 and will occupy index 50 in the /// `ExampleBindlessExtendedMaterialIndices` structure. (See the shader for the /// definition of that structure.) That corresponds to the following shader /// declaration: /// /// ```wgsl /// @group(2) @binding(100) var<storage> example_extended_material_indices: /// array<ExampleBindlessExtendedMaterialIndices>; /// ``` /// /// The `#[bindless(index_table(range(50..53), binding(100)))]` attribute /// specifies that this material extension should be bindless. The `range` /// subattribute specifies that this material extension should have its own /// index table covering bindings 50, 51, and 52. The `binding` subattribute /// specifies that the extended material index table should be bound to binding /// 100. This corresponds to the following shader declarations: /// /// ```wgsl /// struct ExampleBindlessExtendedMaterialIndices { /// material: u32, // 50 /// modulate_texture: u32, // 51 /// modulate_texture_sampler: u32, // 52 /// } /// /// @group(2) @binding(100) var<storage> example_extended_material_indices: /// array<ExampleBindlessExtendedMaterialIndices>; /// ``` /// /// We need to use the `index_table` subattribute because the /// [`StandardMaterial`] bindless index table is bound to binding 0 by default. /// Thus we need to specify a different binding so that our extended bindless /// index table doesn't conflict. #[derive(Asset, Clone, Reflect, AsBindGroup)] #[data(50, ExampleBindlessExtensionUniform, binding_array(101))] #[bindless(index_table(range(50..53), binding(100)))] struct ExampleBindlessExtension { /// The color we're going to multiply the base color with. modulate_color: Color, /// The image we're going to multiply the base color with. #[texture(51)] #[sampler(52)] modulate_texture: Option<Handle<Image>>, } /// The GPU-side data structure specifying plain old data for the material /// extension. #[derive(Clone, Default, ShaderType)] struct ExampleBindlessExtensionUniform { /// The GPU representation of the color we're going to multiply the base /// color with. modulate_color: Vec4, } impl MaterialExtension for ExampleBindlessExtension { fn fragment_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } } impl<'a> From<&'a ExampleBindlessExtension> for ExampleBindlessExtensionUniform { fn from(material_extension: &'a ExampleBindlessExtension) -> Self { // Convert the CPU `ExampleBindlessExtension` structure to its GPU // format. ExampleBindlessExtensionUniform { modulate_color: LinearRgba::from(material_extension.modulate_color).to_vec4(), } } } /// The entry point. fn main() { App::new() .add_plugins(DefaultPlugins) .add_plugins(MaterialPlugin::< ExtendedMaterial<StandardMaterial, ExampleBindlessExtension>, >::default()) .add_systems(Startup, setup) .add_systems(Update, rotate_sphere) .run(); } /// Creates the scene. fn setup( mut commands: Commands, asset_server: Res<AssetServer>, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<ExtendedMaterial<StandardMaterial, ExampleBindlessExtension>>>, ) { // Create a gray sphere, modulated with a red-tinted Bevy logo. commands.spawn(( Mesh3d(meshes.add(SphereMeshBuilder::new( 1.0, SphereKind::Uv { sectors: 20, stacks: 20, }, ))), MeshMaterial3d(materials.add(ExtendedMaterial { base: StandardMaterial { base_color: GRAY_600.into(), ..default() }, extension: ExampleBindlessExtension { modulate_color: RED.into(), modulate_texture: Some(asset_server.load("textures/uv_checker_bw.png")), }, })), Transform::from_xyz(0.0, 0.5, 0.0), )); // Create a light. commands.spawn(( DirectionalLight::default(), Transform::from_xyz(1.0, 1.0, 1.0).looking_at(Vec3::ZERO, Vec3::Y), )); // Create a camera. commands.spawn(( Camera3d::default(), Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y), )); } fn rotate_sphere(mut meshes: Query<&mut Transform, With<Mesh3d>>, time: Res<Time>) { for mut transform in &mut meshes { transform.rotation = Quat::from_euler(EulerRot::YXZ, -time.elapsed_secs(), FRAC_PI_2 * 3.0, 0.0); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/shader/shader_material_glsl.rs
examples/shader/shader_material_glsl.rs
//! A shader that uses the GLSL shading language. use bevy::{ prelude::*, reflect::TypePath, render::render_resource::AsBindGroup, shader::ShaderRef, }; /// This example uses shader source files from the assets subdirectory const VERTEX_SHADER_ASSET_PATH: &str = "shaders/custom_material.vert"; const FRAGMENT_SHADER_ASSET_PATH: &str = "shaders/custom_material.frag"; fn main() { App::new() .add_plugins((DefaultPlugins, MaterialPlugin::<CustomMaterial>::default())) .add_systems(Startup, setup) .run(); } /// set up a simple 3D scene fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<CustomMaterial>>, asset_server: Res<AssetServer>, ) { // cube commands.spawn(( Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(materials.add(CustomMaterial { color: LinearRgba::BLUE, color_texture: Some(asset_server.load("branding/icon.png")), alpha_mode: AlphaMode::Blend, })), Transform::from_xyz(0.0, 0.5, 0.0), )); // camera commands.spawn(( Camera3d::default(), Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y), )); } // This is the struct that will be passed to your shader #[derive(Asset, TypePath, AsBindGroup, Clone)] struct CustomMaterial { #[uniform(0)] color: LinearRgba, #[texture(1)] #[sampler(2)] color_texture: Option<Handle<Image>>, alpha_mode: AlphaMode, } /// The Material trait is very configurable, but comes with sensible defaults for all methods. /// You only need to implement functions for features that need non-default behavior. See the Material api docs for details! /// When using the GLSL shading language for your shader, the specialize method must be overridden. impl Material for CustomMaterial { fn vertex_shader() -> ShaderRef { VERTEX_SHADER_ASSET_PATH.into() } fn fragment_shader() -> ShaderRef { FRAGMENT_SHADER_ASSET_PATH.into() } fn alpha_mode(&self) -> AlphaMode { self.alpha_mode } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/shader/array_texture.rs
examples/shader/array_texture.rs
//! This example illustrates how to create a texture for use with a //! `texture_2d_array<f32>` shader uniform variable and then how to sample from //! that texture in the shader by using a `MeshTag` component on the mesh //! entity. use bevy::{ image::{ImageArrayLayout, ImageLoaderSettings}, mesh::MeshTag, prelude::*, reflect::TypePath, render::render_resource::AsBindGroup, shader::ShaderRef, }; /// This example uses a shader source file from the assets subdirectory. const SHADER_ASSET_PATH: &str = "shaders/array_texture.wgsl"; /// Corresponds to the number of layers in the array texture. const TEXTURE_COUNT: u32 = 4; fn main() { App::new() .add_plugins(( DefaultPlugins, MaterialPlugin::<ArrayTextureMaterial>::default(), )) .add_systems(Startup, setup) .add_systems(Update, update_mesh_tags) .run(); } fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<ArrayTextureMaterial>>, asset_server: Res<AssetServer>, ) { // Load the texture. let array_texture = asset_server.load_with_settings( "textures/array_texture.png", |settings: &mut ImageLoaderSettings| { settings.array_layout = Some(ImageArrayLayout::RowCount { rows: TEXTURE_COUNT, }); }, ); // light commands.spawn(( DirectionalLight::default(), Transform::from_xyz(3.0, 2.0, 1.0).looking_at(Vec3::ZERO, Vec3::Y), )); // camera commands.spawn(( Camera3d::default(), Transform::from_xyz(5.0, 5.0, 5.0).looking_at(Vec3::new(1.5, 0.0, 0.0), Vec3::Y), )); // Spawn some cubes using the array texture. let mesh_handle = meshes.add(Cuboid::default()); let material_handle = materials.add(ArrayTextureMaterial { array_texture }); for x in -5..=5 { commands.spawn(( Mesh3d(mesh_handle.clone()), MeshMaterial3d(material_handle.clone()), // Pass a different mesh tag to allow selecting different layers of // the array texture in the shader. MeshTag(x as u32 % TEXTURE_COUNT), Transform::from_xyz(x as f32 + 0.5, 0.0, 0.0), )); } } fn update_mesh_tags(time: Res<Time>, mut query: Query<&mut MeshTag>, mut timer: Local<Timer>) { // Initialize the timer on the first run. if timer.duration().is_zero() { *timer = Timer::from_seconds(1.5, TimerMode::Repeating); } timer.tick(time.delta()); if timer.just_finished() { for mut tag in query.iter_mut() { // Cycle through the texture layers to demonstrate that we can // select different layers of the array texture at runtime. tag.0 = (tag.0 + 1) % TEXTURE_COUNT; } } } #[derive(Asset, TypePath, AsBindGroup, Debug, Clone)] struct ArrayTextureMaterial { #[texture(0, dimension = "2d_array")] #[sampler(1)] array_texture: Handle<Image>, } impl Material for ArrayTextureMaterial { fn fragment_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/shader/shader_defs.rs
examples/shader/shader_defs.rs
//! A shader that uses "shaders defs", which selectively toggle parts of a shader. use bevy::{ mesh::MeshVertexBufferLayoutRef, pbr::{MaterialPipeline, MaterialPipelineKey}, prelude::*, reflect::TypePath, render::render_resource::{ AsBindGroup, RenderPipelineDescriptor, SpecializedMeshPipelineError, }, shader::ShaderRef, }; /// This example uses a shader source file from the assets subdirectory const SHADER_ASSET_PATH: &str = "shaders/shader_defs.wgsl"; fn main() { App::new() .add_plugins((DefaultPlugins, MaterialPlugin::<CustomMaterial>::default())) .add_systems(Startup, setup) .run(); } /// set up a simple 3D scene fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<CustomMaterial>>, ) { // blue cube commands.spawn(( Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(materials.add(CustomMaterial { color: LinearRgba::BLUE, is_red: false, })), Transform::from_xyz(-1.0, 0.5, 0.0), )); // red cube (with green color overridden by the IS_RED "shader def") commands.spawn(( Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(materials.add(CustomMaterial { color: LinearRgba::GREEN, is_red: true, })), Transform::from_xyz(1.0, 0.5, 0.0), )); // camera commands.spawn(( Camera3d::default(), Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y), )); } impl Material for CustomMaterial { fn fragment_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } fn specialize( _pipeline: &MaterialPipeline, descriptor: &mut RenderPipelineDescriptor, _layout: &MeshVertexBufferLayoutRef, key: MaterialPipelineKey<Self>, ) -> Result<(), SpecializedMeshPipelineError> { if key.bind_group_data.is_red { let fragment = descriptor.fragment.as_mut().unwrap(); fragment.shader_defs.push("IS_RED".into()); } Ok(()) } } // This is the struct that will be passed to your shader #[derive(Asset, TypePath, AsBindGroup, Debug, Clone)] #[bind_group_data(CustomMaterialKey)] struct CustomMaterial { #[uniform(0)] color: LinearRgba, is_red: bool, } // This key is used to identify a specific permutation of this material pipeline. // In this case, we specialize on whether or not to configure the "IS_RED" shader def. // Specialization keys should be kept as small / cheap to hash as possible, // as they will be used to look up the pipeline for each drawn entity with this material type, #[repr(C)] #[derive(Eq, PartialEq, Hash, Copy, Clone)] struct CustomMaterialKey { is_red: bool, } impl From<&CustomMaterial> for CustomMaterialKey { fn from(material: &CustomMaterial) -> Self { Self { is_red: material.is_red, } } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/shader/shader_material_bindless.rs
examples/shader/shader_material_bindless.rs
//! A material that uses bindless textures. use bevy::prelude::*; use bevy::render::render_resource::{AsBindGroup, ShaderType}; use bevy::shader::ShaderRef; const SHADER_ASSET_PATH: &str = "shaders/bindless_material.wgsl"; // `#[bindless(limit(4))]` indicates that we want Bevy to group materials into // bind groups of at most 4 materials each. // Note that we use the structure-level `#[uniform]` attribute to supply // ordinary data to the shader. #[derive(Asset, TypePath, AsBindGroup, Debug, Clone)] #[uniform(0, BindlessMaterialUniform, binding_array(10))] #[bindless(limit(4))] struct BindlessMaterial { color: LinearRgba, // This will be exposed to the shader as a binding array of 4 textures and a // binding array of 4 samplers. #[texture(1)] #[sampler(2)] color_texture: Option<Handle<Image>>, } // This buffer will be presented to the shader as `@binding(10)`. #[derive(ShaderType)] struct BindlessMaterialUniform { color: LinearRgba, } impl<'a> From<&'a BindlessMaterial> for BindlessMaterialUniform { fn from(material: &'a BindlessMaterial) -> Self { BindlessMaterialUniform { color: material.color, } } } // The entry point. fn main() { App::new() .add_plugins(( DefaultPlugins, MaterialPlugin::<BindlessMaterial>::default(), )) .add_systems(Startup, setup) .run(); } // Creates a simple scene. fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<BindlessMaterial>>, asset_server: Res<AssetServer>, ) { // Add a cube with a blue tinted texture. commands.spawn(( Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(materials.add(BindlessMaterial { color: LinearRgba::BLUE, color_texture: Some(asset_server.load("branding/bevy_logo_dark.png")), })), Transform::from_xyz(-2.0, 0.5, 0.0), )); // Add a cylinder with a red tinted texture. commands.spawn(( Mesh3d(meshes.add(Cylinder::default())), MeshMaterial3d(materials.add(BindlessMaterial { color: LinearRgba::RED, color_texture: Some(asset_server.load("branding/bevy_logo_light.png")), })), Transform::from_xyz(2.0, 0.5, 0.0), )); // Add a camera. commands.spawn(( Camera3d::default(), Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y), )); } impl Material for BindlessMaterial { fn fragment_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/shader/automatic_instancing.rs
examples/shader/automatic_instancing.rs
//! Shows that multiple instances of a cube are automatically instanced in one draw call //! Try running this example in a graphics profiler and all the cubes should be only a single draw call. //! Also demonstrates how to use `MeshTag` to use external data in a custom material. use bevy::{ mesh::MeshTag, prelude::*, reflect::TypePath, render::render_resource::AsBindGroup, shader::ShaderRef, }; const SHADER_ASSET_PATH: &str = "shaders/automatic_instancing.wgsl"; fn main() { App::new() .add_plugins((DefaultPlugins, MaterialPlugin::<CustomMaterial>::default())) .add_systems(Startup, setup) .add_systems(Update, update) .run(); } /// Sets up an instanced grid of cubes, where each cube is colored based on an image that is /// sampled in the vertex shader. The cubes are then animated in a spiral pattern. /// /// This example demonstrates one use of automatic instancing and how to use `MeshTag` to use /// external data in a custom material. For example, here we use the "index" of each cube to /// determine the texel coordinate to sample from the image in the shader. fn setup( mut commands: Commands, assets: Res<AssetServer>, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<CustomMaterial>>, ) { // We will use this image as our external data for our material to sample from in the vertex shader let image = assets.load("branding/icon.png"); // Our single mesh handle that will be instanced let mesh_handle = meshes.add(Cuboid::from_size(Vec3::splat(0.01))); // Create the custom material with a reference to our texture // Automatic instancing works with any Material, including the `StandardMaterial`. // This custom material is used to demonstrate the optional `MeshTag` feature. let material_handle = materials.add(CustomMaterial { image: image.clone(), }); // We're hardcoding the image dimensions for simplicity let image_dims = UVec2::new(256, 256); let total_pixels = image_dims.x * image_dims.y; for index in 0..total_pixels { // Get x,y from index - x goes left to right, y goes top to bottom let x = index % image_dims.x; let y = index / image_dims.x; // Convert to centered world coordinates let world_x = (x as f32 - image_dims.x as f32 / 2.0) / 50.0; let world_y = -((y as f32 - image_dims.y as f32 / 2.0) / 50.0); // Still need negative for world space commands.spawn(( // For automatic instancing to take effect you need to // use the same mesh handle and material handle for each instance Mesh3d(mesh_handle.clone()), MeshMaterial3d(material_handle.clone()), // This is an optional component that can be used to help tie external data to a mesh instance MeshTag(index), Transform::from_xyz(world_x, world_y, 0.0), )); } // Camera commands.spawn(( Camera3d::default(), Transform::from_xyz(0.0, 0.0, 5.0).looking_at(Vec3::ZERO, Vec3::Y), )); } // Animate the transform fn update(time: Res<Time>, mut transforms: Query<(&mut Transform, &MeshTag)>) { for (mut transform, index) in transforms.iter_mut() { // Animate the z position based on time using the index to create a spiral transform.translation.z = ops::sin(time.elapsed_secs() + index.0 as f32 * 0.01); } } // This struct defines the data that will be passed to your shader #[derive(Asset, TypePath, AsBindGroup, Debug, Clone)] struct CustomMaterial { #[texture(0)] #[sampler(1)] image: Handle<Image>, } impl Material for CustomMaterial { fn vertex_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } fn fragment_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/shader/shader_material_wesl.rs
examples/shader/shader_material_wesl.rs
//! A shader that uses the WESL shading language. use bevy::{ mesh::MeshVertexBufferLayoutRef, pbr::{MaterialPipeline, MaterialPipelineKey}, prelude::*, reflect::TypePath, render::render_resource::{ AsBindGroup, RenderPipelineDescriptor, SpecializedMeshPipelineError, }, shader::{ShaderDefVal, ShaderRef}, }; /// This example uses shader source files from the assets subdirectory const FRAGMENT_SHADER_ASSET_PATH: &str = "shaders/custom_material.wesl"; fn main() { App::new() .add_plugins(( DefaultPlugins, MaterialPlugin::<CustomMaterial>::default(), CustomMaterialPlugin, )) .add_systems(Startup, setup) .add_systems(Update, update) .run(); } /// A plugin that loads the custom material shader pub struct CustomMaterialPlugin; /// An example utility shader that is used by the custom material #[expect( dead_code, reason = "used to kept a strong handle, shader is referenced by the material" )] #[derive(Resource)] struct UtilityShader(Handle<Shader>); impl Plugin for CustomMaterialPlugin { fn build(&self, app: &mut App) { let handle = app .world_mut() .resource_mut::<AssetServer>() .load::<Shader>("shaders/util.wesl"); app.insert_resource(UtilityShader(handle)); } } /// set up a simple 3D scene fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<CustomMaterial>>, ) { // cube commands.spawn(( Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(materials.add(CustomMaterial { time: Vec4::ZERO, party_mode: false, })), Transform::from_xyz(0.0, 0.5, 0.0), )); // camera commands.spawn(( Camera3d::default(), Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::ZERO, Vec3::Y), )); } fn update( time: Res<Time>, mut query: Query<(&MeshMaterial3d<CustomMaterial>, &mut Transform)>, mut materials: ResMut<Assets<CustomMaterial>>, keys: Res<ButtonInput<KeyCode>>, ) { for (material, mut transform) in query.iter_mut() { let material = materials.get_mut(material).unwrap(); material.time.x = time.elapsed_secs(); if keys.just_pressed(KeyCode::Space) { material.party_mode = !material.party_mode; } if material.party_mode { transform.rotate(Quat::from_rotation_y(0.005)); } } } // This is the struct that will be passed to your shader #[derive(Asset, TypePath, AsBindGroup, Clone)] #[bind_group_data(CustomMaterialKey)] struct CustomMaterial { // Needed for 16 bit alignment in WebGL2 #[uniform(0)] time: Vec4, party_mode: bool, } #[repr(C)] #[derive(Eq, PartialEq, Hash, Copy, Clone)] struct CustomMaterialKey { party_mode: bool, } impl From<&CustomMaterial> for CustomMaterialKey { fn from(material: &CustomMaterial) -> Self { Self { party_mode: material.party_mode, } } } impl Material for CustomMaterial { fn fragment_shader() -> ShaderRef { FRAGMENT_SHADER_ASSET_PATH.into() } fn specialize( _pipeline: &MaterialPipeline, descriptor: &mut RenderPipelineDescriptor, _layout: &MeshVertexBufferLayoutRef, key: MaterialPipelineKey<Self>, ) -> Result<(), SpecializedMeshPipelineError> { let fragment = descriptor.fragment.as_mut().unwrap(); fragment.shader_defs.push(ShaderDefVal::Bool( "PARTY_MODE".to_string(), key.bind_group_data.party_mode, )); Ok(()) } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/gltf/update_gltf_scene.rs
examples/gltf/update_gltf_scene.rs
//! Update a scene from a glTF file, either by spawning the scene as a child of another entity, //! or by accessing the entities of the scene. use bevy::{light::DirectionalLightShadowMap, prelude::*}; fn main() { App::new() .insert_resource(DirectionalLightShadowMap { size: 4096 }) .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems(Update, move_scene_entities) .run(); } #[derive(Component)] struct MovedScene; fn setup(mut commands: Commands, asset_server: Res<AssetServer>) { commands.spawn(( Transform::from_xyz(4.0, 25.0, 8.0).looking_at(Vec3::ZERO, Vec3::Y), DirectionalLight { shadows_enabled: true, ..default() }, )); commands.spawn(( Camera3d::default(), Transform::from_xyz(-0.5, 0.9, 1.5).looking_at(Vec3::new(-0.5, 0.3, 0.0), Vec3::Y), EnvironmentMapLight { diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"), specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"), intensity: 150.0, ..default() }, )); // Spawn the scene as a child of this entity at the given transform commands.spawn(( Transform::from_xyz(-1.0, 0.0, 0.0), SceneRoot( asset_server .load(GltfAssetLabel::Scene(0).from_asset("models/FlightHelmet/FlightHelmet.gltf")), ), )); // Spawn a second scene, and add a tag component to be able to target it later commands.spawn(( SceneRoot( asset_server .load(GltfAssetLabel::Scene(0).from_asset("models/FlightHelmet/FlightHelmet.gltf")), ), MovedScene, )); } // This system will move all entities that are descendants of MovedScene (which will be all entities spawned in the scene) fn move_scene_entities( time: Res<Time>, moved_scene: Query<Entity, With<MovedScene>>, children: Query<&Children>, mut transforms: Query<&mut Transform>, ) { for moved_scene_entity in &moved_scene { let mut offset = 0.; for entity in children.iter_descendants(moved_scene_entity) { if let Ok(mut transform) = transforms.get_mut(entity) { transform.translation = Vec3::new( offset * ops::sin(time.elapsed_secs()) / 20., 0., ops::cos(time.elapsed_secs()) / 20., ); offset += 0.5; } } } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/gltf/custom_gltf_vertex_attribute.rs
examples/gltf/custom_gltf_vertex_attribute.rs
//! Renders a glTF mesh in 2D with a custom vertex attribute. use bevy::{ gltf::GltfPlugin, mesh::{MeshVertexAttribute, MeshVertexBufferLayoutRef}, prelude::*, reflect::TypePath, render::render_resource::*, shader::ShaderRef, sprite_render::{Material2d, Material2dKey, Material2dPlugin}, }; /// This example uses a shader source file from the assets subdirectory const SHADER_ASSET_PATH: &str = "shaders/custom_gltf_2d.wgsl"; /// This vertex attribute supplies barycentric coordinates for each triangle. /// /// Each component of the vector corresponds to one corner of a triangle. It's /// equal to 1.0 in that corner and 0.0 in the other two. Hence, its value in /// the fragment shader indicates proximity to a corner or the opposite edge. const ATTRIBUTE_BARYCENTRIC: MeshVertexAttribute = MeshVertexAttribute::new("Barycentric", 2137464976, VertexFormat::Float32x3); fn main() { App::new() .insert_resource(GlobalAmbientLight { color: Color::WHITE, brightness: 1.0 / 5.0f32, ..default() }) .add_plugins(( DefaultPlugins.set( GltfPlugin::default() // Map a custom glTF attribute name to a `MeshVertexAttribute`. // The glTF file used here has an attribute name with *two* // underscores: __BARYCENTRIC // One is stripped to do the comparison here. .add_custom_vertex_attribute("_BARYCENTRIC", ATTRIBUTE_BARYCENTRIC), ), Material2dPlugin::<CustomMaterial>::default(), )) .add_systems(Startup, setup) .run(); } fn setup( mut commands: Commands, asset_server: Res<AssetServer>, mut materials: ResMut<Assets<CustomMaterial>>, ) { // Add a mesh loaded from a glTF file. This mesh has data for `ATTRIBUTE_BARYCENTRIC`. let mesh = asset_server.load( GltfAssetLabel::Primitive { mesh: 0, primitive: 0, } .from_asset("models/barycentric/barycentric.gltf"), ); commands.spawn(( Mesh2d(mesh), MeshMaterial2d(materials.add(CustomMaterial {})), Transform::from_scale(150.0 * Vec3::ONE), )); commands.spawn(Camera2d); } /// This custom material uses barycentric coordinates from /// `ATTRIBUTE_BARYCENTRIC` to shade a white border around each triangle. The /// thickness of the border is animated using the global time shader uniform. #[derive(Asset, TypePath, AsBindGroup, Debug, Clone)] struct CustomMaterial {} impl Material2d for CustomMaterial { fn vertex_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } fn fragment_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } fn specialize( descriptor: &mut RenderPipelineDescriptor, layout: &MeshVertexBufferLayoutRef, _key: Material2dKey<Self>, ) -> Result<(), SpecializedMeshPipelineError> { let vertex_layout = layout.0.get_layout(&[ Mesh::ATTRIBUTE_POSITION.at_shader_location(0), Mesh::ATTRIBUTE_COLOR.at_shader_location(1), ATTRIBUTE_BARYCENTRIC.at_shader_location(2), ])?; descriptor.vertex.buffers = vec![vertex_layout]; Ok(()) } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/gltf/load_gltf_extras.rs
examples/gltf/load_gltf_extras.rs
//! Loads and renders a glTF file as a scene, and list all the different `gltf_extras`. use bevy::{ gltf::{GltfExtras, GltfMaterialExtras, GltfMeshExtras, GltfSceneExtras}, prelude::*, }; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems(Update, check_for_gltf_extras) .run(); } #[derive(Component)] struct ExampleDisplay; fn setup(mut commands: Commands, asset_server: Res<AssetServer>) { commands.spawn(( Camera3d::default(), Transform::from_xyz(2.0, 2.0, 2.0).looking_at(Vec3::ZERO, Vec3::Y), )); commands.spawn(DirectionalLight { shadows_enabled: true, ..default() }); // a barebones scene containing one of each gltf_extra type commands.spawn(SceneRoot(asset_server.load( GltfAssetLabel::Scene(0).from_asset("models/extras/gltf_extras.glb"), ))); // a place to display the extras on screen commands.spawn(( Text::default(), TextFont { font_size: 15., ..default() }, Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, ExampleDisplay, )); } fn check_for_gltf_extras( gltf_extras_per_entity: Query<( Entity, Option<&Name>, Option<&GltfSceneExtras>, Option<&GltfExtras>, Option<&GltfMeshExtras>, Option<&GltfMaterialExtras>, )>, mut display: Single<&mut Text, With<ExampleDisplay>>, ) { let mut gltf_extra_infos_lines: Vec<String> = vec![]; for (id, name, scene_extras, extras, mesh_extras, material_extras) in gltf_extras_per_entity.iter() { if scene_extras.is_some() || extras.is_some() || mesh_extras.is_some() || material_extras.is_some() { let formatted_extras = format!( "Extras per entity {} ('Name: {}'): - scene extras: {:?} - primitive extras: {:?} - mesh extras: {:?} - material extras: {:?} ", id, name.unwrap_or(&Name::default()), scene_extras, extras, mesh_extras, material_extras ); gltf_extra_infos_lines.push(formatted_extras); } display.0 = gltf_extra_infos_lines.join("\n"); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/gltf/gltf_extension_animation_graph.rs
examples/gltf/gltf_extension_animation_graph.rs
//! Uses glTF extension processing to play an animation on a skinned glTF model of a fox. use std::f32::consts::PI; use bevy::{ asset::LoadContext, ecs::entity::EntityHashSet, gltf::extensions::{GltfExtensionHandler, GltfExtensionHandlers}, light::CascadeShadowConfigBuilder, platform::collections::{HashMap, HashSet}, prelude::*, scene::SceneInstanceReady, }; /// An example asset that contains a mesh and animation. const GLTF_PATH: &str = "models/animated/Fox.glb"; fn main() { App::new() .insert_resource(GlobalAmbientLight { color: Color::WHITE, brightness: 2000., ..default() }) .add_plugins((DefaultPlugins, GltfExtensionHandlerAnimationPlugin)) .add_systems( Startup, (setup_mesh_and_animation, setup_camera_and_environment), ) .run(); } /// A component that stores a reference to an animation we want to play. This is /// created when we start loading the mesh (see `setup_mesh_and_animation`) and /// read when the mesh has spawned (see `play_animation_once_loaded`). #[derive(Component, Reflect)] #[reflect(Component)] struct AnimationToPlay { graph_handle: Handle<AnimationGraph>, index: AnimationNodeIndex, } fn setup_mesh_and_animation(mut commands: Commands, asset_server: Res<AssetServer>) { // Spawn an entity with our components, and connect it to an observer that // will trigger when the scene is loaded and spawned. commands .spawn(SceneRoot( asset_server.load(GltfAssetLabel::Scene(0).from_asset(GLTF_PATH)), )) .observe(play_animation_when_ready); } fn play_animation_when_ready( scene_ready: On<SceneInstanceReady>, mut commands: Commands, children: Query<&Children>, mut players: Query<(&mut AnimationPlayer, &AnimationToPlay)>, ) { for child in children.iter_descendants(scene_ready.entity) { let Ok((mut player, animation_to_play)) = players.get_mut(child) else { continue; }; // Tell the animation player to start the animation and keep // repeating it. // // If you want to try stopping and switching animations, see the // `animated_mesh_control.rs` example. player.play(animation_to_play.index).repeat(); // Add the animation graph. This only needs to be done once to // connect the animation player to the mesh. commands .entity(child) .insert(AnimationGraphHandle(animation_to_play.graph_handle.clone())); } } /// Spawn a camera and a simple environment with a ground plane and light. fn setup_camera_and_environment( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { // Camera commands.spawn(( Camera3d::default(), Transform::from_xyz(100.0, 100.0, 150.0).looking_at(Vec3::new(0.0, 20.0, 0.0), Vec3::Y), )); // Plane commands.spawn(( Mesh3d(meshes.add(Plane3d::default().mesh().size(500000.0, 500000.0))), MeshMaterial3d(materials.add(Color::srgb(0.3, 0.5, 0.3))), )); // Light commands.spawn(( Transform::from_rotation(Quat::from_euler(EulerRot::ZYX, 0.0, 1.0, -PI / 4.)), DirectionalLight { shadows_enabled: true, ..default() }, CascadeShadowConfigBuilder { first_cascade_far_bound: 200.0, maximum_distance: 400.0, ..default() } .build(), )); } struct GltfExtensionHandlerAnimationPlugin; impl Plugin for GltfExtensionHandlerAnimationPlugin { fn build(&self, app: &mut App) { app.world_mut() .resource_mut::<GltfExtensionHandlers>() .0 .write_blocking() .push(Box::new(GltfExtensionHandlerAnimation::default())); } } #[derive(Default, Clone)] struct GltfExtensionHandlerAnimation { animation_root_indices: HashSet<usize>, animation_root_entities: EntityHashSet, clip: Option<Handle<AnimationClip>>, } impl GltfExtensionHandler for GltfExtensionHandlerAnimation { fn dyn_clone(&self) -> Box<dyn GltfExtensionHandler> { Box::new((*self).clone()) } #[cfg(feature = "bevy_animation")] fn on_animation(&mut self, gltf_animation: &gltf::Animation, handle: Handle<AnimationClip>) { if gltf_animation.name().is_some_and(|v| v == "Walk") { self.clip = Some(handle.clone()); } } #[cfg(feature = "bevy_animation")] fn on_animations_collected( &mut self, _load_context: &mut LoadContext<'_>, _animations: &[Handle<AnimationClip>], _named_animations: &HashMap<Box<str>, Handle<AnimationClip>>, animation_roots: &HashSet<usize>, ) { self.animation_root_indices = animation_roots.clone(); } fn on_gltf_node( &mut self, _load_context: &mut LoadContext<'_>, gltf_node: &gltf::Node, entity: &mut EntityWorldMut, ) { if self.animation_root_indices.contains(&gltf_node.index()) { self.animation_root_entities.insert(entity.id()); } } /// Called when an individual Scene is done processing fn on_scene_completed( &mut self, load_context: &mut LoadContext<'_>, _scene: &gltf::Scene, _world_root_id: Entity, world: &mut World, ) { // Create an AnimationGraph from the desired clip let (graph, index) = AnimationGraph::from_clip(self.clip.clone().unwrap()); // Store the animation graph as an asset with an arbitrary label // We only have one graph, so this label will be unique let graph_handle = load_context.add_labeled_asset("MyAnimationGraphLabel".to_string(), graph); // Create a component that stores a reference to our animation let animation_to_play = AnimationToPlay { graph_handle, index, }; // Insert the `AnimationToPlay` component on the first animation root let mut entity = world.entity_mut(*self.animation_root_entities.iter().next().unwrap()); entity.insert(animation_to_play); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/gltf/gltf_extension_mesh_2d.rs
examples/gltf/gltf_extension_mesh_2d.rs
//! Uses glTF extension processing to convert incoming 3d Meshes to 2d Meshes use bevy::{ asset::LoadContext, gltf::extensions::{GltfExtensionHandler, GltfExtensionHandlers}, gltf::GltfPlugin, mesh::{MeshVertexAttribute, MeshVertexBufferLayoutRef}, prelude::*, reflect::TypePath, render::render_resource::*, shader::ShaderRef, sprite_render::{Material2d, Material2dKey, Material2dPlugin}, }; /// This example uses a shader source file from the assets subdirectory const SHADER_ASSET_PATH: &str = "shaders/custom_gltf_2d.wgsl"; /// This vertex attribute supplies barycentric coordinates for each triangle. /// /// Each component of the vector corresponds to one corner of a triangle. It's /// equal to 1.0 in that corner and 0.0 in the other two. Hence, its value in /// the fragment shader indicates proximity to a corner or the opposite edge. const ATTRIBUTE_BARYCENTRIC: MeshVertexAttribute = MeshVertexAttribute::new("Barycentric", 2137464976, VertexFormat::Float32x3); fn main() { App::new() .insert_resource(GlobalAmbientLight { color: Color::WHITE, brightness: 2000., ..default() }) .add_plugins(( DefaultPlugins.set( GltfPlugin::default() // Map a custom glTF attribute name to a `MeshVertexAttribute`. // The glTF file used here has an attribute name with *two* // underscores: __BARYCENTRIC // One is stripped to do the comparison here. .add_custom_vertex_attribute("_BARYCENTRIC", ATTRIBUTE_BARYCENTRIC), ), GltfToMesh2dPlugin, )) .add_systems(Startup, setup) .run(); } fn setup(mut commands: Commands, asset_server: Res<AssetServer>) { commands.spawn(( SceneRoot( asset_server .load(GltfAssetLabel::Scene(0).from_asset("models/barycentric/barycentric.gltf")), ), Transform::from_scale(150. * Vec3::ONE), )); commands.spawn(Camera2d); } struct GltfToMesh2dPlugin; impl Plugin for GltfToMesh2dPlugin { fn build(&self, app: &mut App) { app.world_mut() .resource_mut::<GltfExtensionHandlers>() .0 .write_blocking() .push(Box::new(GltfExtensionHandlerToMesh2d)); app.add_plugins(Material2dPlugin::<CustomMaterial>::default()); } } #[derive(Default, Clone)] struct GltfExtensionHandlerToMesh2d; impl GltfExtensionHandler for GltfExtensionHandlerToMesh2d { fn dyn_clone(&self) -> Box<dyn GltfExtensionHandler> { Box::new((*self).clone()) } fn on_spawn_mesh_and_material( &mut self, load_context: &mut LoadContext<'_>, _primitive: &gltf::Primitive, _mesh: &gltf::Mesh, _material: &gltf::Material, entity: &mut EntityWorldMut, ) { if let Some(mesh3d) = entity.get::<Mesh3d>() && let Some(_) = entity.get::<MeshMaterial3d<StandardMaterial>>() { let material_handle = load_context.add_labeled_asset("AColorMaterial".to_string(), CustomMaterial {}); let mesh_handle = mesh3d.0.clone(); entity .remove::<(Mesh3d, MeshMaterial3d<StandardMaterial>)>() .insert((Mesh2d(mesh_handle), MeshMaterial2d(material_handle.clone()))); } } } /// This custom material uses barycentric coordinates from /// `ATTRIBUTE_BARYCENTRIC` to shade a white border around each triangle. The /// thickness of the border is animated using the global time shader uniform. #[derive(Asset, TypePath, AsBindGroup, Debug, Clone)] struct CustomMaterial {} impl Material2d for CustomMaterial { fn vertex_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } fn fragment_shader() -> ShaderRef { SHADER_ASSET_PATH.into() } fn specialize( descriptor: &mut RenderPipelineDescriptor, layout: &MeshVertexBufferLayoutRef, _key: Material2dKey<Self>, ) -> Result<(), SpecializedMeshPipelineError> { let vertex_layout = layout.0.get_layout(&[ Mesh::ATTRIBUTE_POSITION.at_shader_location(0), Mesh::ATTRIBUTE_COLOR.at_shader_location(1), ATTRIBUTE_BARYCENTRIC.at_shader_location(2), ])?; descriptor.vertex.buffers = vec![vertex_layout]; Ok(()) } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/gltf/edit_material_on_gltf.rs
examples/gltf/edit_material_on_gltf.rs
//! Showcases how to change the material of a `Scene` spawned from a Gltf use bevy::{ audio::AudioPlugin, color::palettes, gltf::GltfMaterialName, prelude::*, scene::SceneInstanceReady, }; fn main() { App::new() .add_plugins(DefaultPlugins.build().disable::<AudioPlugin>()) .add_systems(Startup, setup_scene) .add_observer(change_material) .run(); } /// This is added to a [`SceneRoot`] and will cause the [`StandardMaterial::base_color`] /// of materials with [`GltfMaterialName`] equal to `LeatherPartsMat`. #[derive(Component)] struct ColorOverride(Color); fn setup_scene(mut commands: Commands, asset_server: Res<AssetServer>) { commands.spawn(( Camera3d::default(), Transform::from_xyz(0., 1., 2.5).looking_at(Vec3::new(0., 0.25, 0.), Dir3::Y), )); commands.spawn(( DirectionalLight::default(), Transform::from_xyz(0., 1., 0.25).looking_at(Vec3::ZERO, Dir3::Y), )); // FlightHelmet handle let flight_helmet = asset_server .load(GltfAssetLabel::Scene(0).from_asset("models/FlightHelmet/FlightHelmet.gltf")); // This model will keep its original materials commands.spawn(SceneRoot(flight_helmet.clone())); // This model will be tinted red commands.spawn(( SceneRoot(flight_helmet.clone()), Transform::from_xyz(-1.25, 0., 0.), ColorOverride(palettes::tailwind::RED_300.into()), )); // This model will be tinted green commands.spawn(( SceneRoot(flight_helmet), Transform::from_xyz(1.25, 0., 0.), ColorOverride(palettes::tailwind::GREEN_300.into()), )); } /// On [`SceneInstanceReady`], iterates over all descendants of the scene /// and modifies the tint of the material for the materials named `LeatherPartsMat`. /// /// If the [`SceneRoot`] does not have a [`ColorOverride`], it is skipped. fn change_material( scene_ready: On<SceneInstanceReady>, mut commands: Commands, children: Query<&Children>, color_override: Query<&ColorOverride>, mesh_materials: Query<(&MeshMaterial3d<StandardMaterial>, &GltfMaterialName)>, mut asset_materials: ResMut<Assets<StandardMaterial>>, ) { info!("processing Scene Entity: {}", scene_ready.entity); // Get the `ColorOverride` of the entity, if it does not have a color override, return let Ok(color_override) = color_override.get(scene_ready.entity) else { info!("{} does not have a color override", scene_ready.entity); return; }; // Iterate over all children recursively for descendant in children.iter_descendants(scene_ready.entity) { // Get the material id and name which were created from the glTF file information let Ok((id, material_name)) = mesh_materials.get(descendant) else { continue; }; // Get the material of the descendant let Some(material) = asset_materials.get_mut(id.id()) else { continue; }; // match on the material name, modifying the materials as necessary match material_name.0.as_str() { "LeatherPartsMat" => { info!("editing LeatherPartsMat to use ColorOverride tint"); // Create a copy of the material and override base color // If you intend on creating multiple models with the same tint, it // is best to cache the handle somewhere, as having multiple materials // that are identical is expensive let mut new_material = material.clone(); new_material.base_color = color_override.0; // Override `MeshMaterial3d` with new material commands .entity(descendant) .insert(MeshMaterial3d(asset_materials.add(new_material))); } name => { info!("not replacing: {name}"); } } } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/gltf/load_gltf.rs
examples/gltf/load_gltf.rs
//! Loads and renders a glTF file as a scene. use bevy::{ light::{CascadeShadowConfigBuilder, DirectionalLightShadowMap}, prelude::*, }; use std::f32::consts::*; fn main() { App::new() .insert_resource(DirectionalLightShadowMap { size: 4096 }) .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems(Update, animate_light_direction) .run(); } fn setup(mut commands: Commands, asset_server: Res<AssetServer>) { commands.spawn(( Camera3d::default(), Transform::from_xyz(0.7, 0.7, 1.0).looking_at(Vec3::new(0.0, 0.3, 0.0), Vec3::Y), EnvironmentMapLight { diffuse_map: asset_server.load("environment_maps/pisa_diffuse_rgb9e5_zstd.ktx2"), specular_map: asset_server.load("environment_maps/pisa_specular_rgb9e5_zstd.ktx2"), intensity: 250.0, ..default() }, )); commands.spawn(( DirectionalLight { shadows_enabled: true, ..default() }, // This is a relatively small scene, so use tighter shadow // cascade bounds than the default for better quality. // We also adjusted the shadow map to be larger since we're // only using a single cascade. CascadeShadowConfigBuilder { num_cascades: 1, maximum_distance: 1.6, ..default() } .build(), )); commands.spawn(SceneRoot(asset_server.load( GltfAssetLabel::Scene(0).from_asset("models/FlightHelmet/FlightHelmet.gltf"), ))); } fn animate_light_direction( time: Res<Time>, mut query: Query<&mut Transform, With<DirectionalLight>>, ) { for mut transform in &mut query { transform.rotation = Quat::from_euler( EulerRot::ZYX, 0.0, time.elapsed_secs() * PI / 5.0, -FRAC_PI_4, ); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/gltf/gltf_skinned_mesh.rs
examples/gltf/gltf_skinned_mesh.rs
//! Skinned mesh example with mesh and joints data loaded from a glTF file. //! Example taken from <https://github.com/KhronosGroup/glTF-Tutorials/blob/master/gltfTutorial/gltfTutorial_019_SimpleSkin.md> use std::f32::consts::*; use bevy::{math::ops, mesh::skinning::SkinnedMesh, prelude::*}; fn main() { App::new() .add_plugins(DefaultPlugins) .insert_resource(GlobalAmbientLight { brightness: 750.0, ..default() }) .add_systems(Startup, setup) .add_systems(Update, joint_animation) .run(); } fn setup(mut commands: Commands, asset_server: Res<AssetServer>) { // Create a camera commands.spawn(( Camera3d::default(), Transform::from_xyz(-2.0, 2.5, 5.0).looking_at(Vec3::new(0.0, 1.0, 0.0), Vec3::Y), )); // Spawn the first scene in `models/SimpleSkin/SimpleSkin.gltf` commands.spawn(SceneRoot(asset_server.load( GltfAssetLabel::Scene(0).from_asset("models/SimpleSkin/SimpleSkin.gltf"), ))); } /// The scene hierarchy currently looks somewhat like this: /// /// ```text /// <Parent entity> /// + Mesh node (without `Mesh3d` or `SkinnedMesh` component) /// + Skinned mesh entity (with `Mesh3d` and `SkinnedMesh` component, created by glTF loader) /// + First joint /// + Second joint /// ``` /// /// In this example, we want to get and animate the second joint. /// It is similar to the animation defined in `models/SimpleSkin/SimpleSkin.gltf`. fn joint_animation( time: Res<Time>, children: Query<&ChildOf, With<SkinnedMesh>>, parents: Query<&Children>, mut transform_query: Query<&mut Transform>, ) { // Iter skinned mesh entity for child_of in &children { // Mesh node is the parent of the skinned mesh entity. let mesh_node_entity = child_of.parent(); // Get `Children` in the mesh node. let mesh_node_parent = parents.get(mesh_node_entity).unwrap(); // First joint is the second child of the mesh node. let first_joint_entity = mesh_node_parent[1]; // Get `Children` in the first joint. let first_joint_children = parents.get(first_joint_entity).unwrap(); // Second joint is the first child of the first joint. let second_joint_entity = first_joint_children[0]; // Get `Transform` in the second joint. let mut second_joint_transform = transform_query.get_mut(second_joint_entity).unwrap(); second_joint_transform.rotation = Quat::from_rotation_z(FRAC_PI_2 * ops::sin(time.elapsed_secs())); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/gltf/query_gltf_primitives.rs
examples/gltf/query_gltf_primitives.rs
//! This example demonstrates how to query a [`StandardMaterial`] within a glTF scene. //! It is particularly useful for glTF scenes with a mesh that consists of multiple primitives. use std::f32::consts::PI; use bevy::{gltf::GltfMaterialName, mesh::VertexAttributeValues, prelude::*}; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems(Update, find_top_material_and_mesh) .run(); } fn find_top_material_and_mesh( mut materials: ResMut<Assets<StandardMaterial>>, mut meshes: ResMut<Assets<Mesh>>, time: Res<Time>, mat_query: Query<( &MeshMaterial3d<StandardMaterial>, &Mesh3d, &GltfMaterialName, )>, ) { for (mat_handle, mesh_handle, name) in mat_query.iter() { // locate a material by material name if name.0 == "Top" { if let Some(material) = materials.get_mut(mat_handle) { if let Color::Hsla(ref mut hsla) = material.base_color { *hsla = hsla.rotate_hue(time.delta_secs() * 100.0); } else { material.base_color = Color::from(Hsla::hsl(0.0, 0.9, 0.7)); } } if let Some(mesh) = meshes.get_mut(mesh_handle) && let Some(VertexAttributeValues::Float32x3(positions)) = mesh.attribute_mut(Mesh::ATTRIBUTE_POSITION) { for position in positions { *position = ( position[0], 1.5 + 0.5 * ops::sin(time.elapsed_secs() / 2.0), position[2], ) .into(); } } } } } fn setup(mut commands: Commands, asset_server: Res<AssetServer>) { commands.spawn(( Camera3d::default(), Transform::from_xyz(4.0, 4.0, 12.0).looking_at(Vec3::new(0.0, 0.0, 0.5), Vec3::Y), )); commands.spawn(( Transform::from_rotation(Quat::from_euler(EulerRot::ZYX, 0.0, 1.0, -PI / 4.)), DirectionalLight::default(), )); commands.spawn(SceneRoot(asset_server.load( GltfAssetLabel::Scene(0).from_asset("models/GltfPrimitives/gltf_primitives.glb"), ))); }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/movement/smooth_follow.rs
examples/movement/smooth_follow.rs
//! This example demonstrates how to use interpolation to make one entity smoothly follow another. use bevy::{ math::{prelude::*, vec3, NormedVectorSpace}, prelude::*, }; use rand::SeedableRng; use rand_chacha::ChaCha8Rng; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems(Update, (move_target, move_follower).chain()) .run(); } // The sphere that the following sphere targets at all times: #[derive(Component)] struct TargetSphere; // The speed of the target sphere moving to its next location: #[derive(Resource)] struct TargetSphereSpeed(f32); // The position that the target sphere always moves linearly toward: #[derive(Resource)] struct TargetPosition(Vec3); // The decay rate used by the smooth following: #[derive(Resource)] struct DecayRate(f32); // The sphere that follows the target sphere by moving towards it with nudging: #[derive(Component)] struct FollowingSphere; /// The source of randomness used by this example. #[derive(Resource)] struct RandomSource(ChaCha8Rng); fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { // A plane: commands.spawn(( Mesh3d(meshes.add(Plane3d::default().mesh().size(12.0, 12.0))), MeshMaterial3d(materials.add(Color::srgb(0.3, 0.15, 0.3))), Transform::from_xyz(0.0, -2.5, 0.0), )); // The target sphere: commands.spawn(( Mesh3d(meshes.add(Sphere::new(0.3))), MeshMaterial3d(materials.add(Color::srgb(0.3, 0.15, 0.9))), TargetSphere, )); // The sphere that follows it: commands.spawn(( Mesh3d(meshes.add(Sphere::new(0.3))), MeshMaterial3d(materials.add(Color::srgb(0.9, 0.3, 0.3))), Transform::from_translation(vec3(0.0, -2.0, 0.0)), FollowingSphere, )); // A light: commands.spawn(( PointLight { intensity: 15_000_000.0, shadows_enabled: true, ..default() }, Transform::from_xyz(4.0, 8.0, 4.0), )); // A camera: commands.spawn(( Camera3d::default(), Transform::from_xyz(-2.0, 3.0, 5.0).looking_at(Vec3::ZERO, Vec3::Y), )); // Set starting values for resources used by the systems: commands.insert_resource(TargetSphereSpeed(5.0)); commands.insert_resource(DecayRate(2.0)); commands.insert_resource(TargetPosition(Vec3::ZERO)); commands.insert_resource(RandomSource(ChaCha8Rng::seed_from_u64(68941654987813521))); } fn move_target( mut target: Single<&mut Transform, With<TargetSphere>>, target_speed: Res<TargetSphereSpeed>, mut target_pos: ResMut<TargetPosition>, time: Res<Time>, mut rng: ResMut<RandomSource>, ) { match Dir3::new(target_pos.0 - target.translation) { // The target and the present position of the target sphere are far enough to have a well- // defined direction between them, so let's move closer: Ok(dir) => { let delta_time = time.delta_secs(); let abs_delta = (target_pos.0 - target.translation).norm(); // Avoid overshooting in case of high values of `delta_time`: let magnitude = f32::min(abs_delta, delta_time * target_speed.0); target.translation += dir * magnitude; } // The two are really close, so let's generate a new target position: Err(_) => { let legal_region = Cuboid::from_size(Vec3::splat(4.0)); *target_pos = TargetPosition(legal_region.sample_interior(&mut rng.0)); } } } fn move_follower( mut following: Single<&mut Transform, With<FollowingSphere>>, target: Single<&Transform, (With<TargetSphere>, Without<FollowingSphere>)>, decay_rate: Res<DecayRate>, time: Res<Time>, ) { let decay_rate = decay_rate.0; let delta_time = time.delta_secs(); // Calling `smooth_nudge` is what moves the following sphere smoothly toward the target. following .translation .smooth_nudge(&target.translation, decay_rate, delta_time); }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/movement/physics_in_fixed_timestep.rs
examples/movement/physics_in_fixed_timestep.rs
//! This example shows how to properly handle player input, //! advance a physics simulation in a fixed timestep, and display the results. //! //! The classic source for how and why this is done is Glenn Fiedler's article //! [Fix Your Timestep!](https://gafferongames.com/post/fix_your_timestep/). //! For a more Bevy-centric source, see //! [this cheatbook entry](https://bevy-cheatbook.github.io/fundamentals/fixed-timestep.html). //! //! ## Motivation //! //! The naive way of moving a player is to just update their position like so: //! ```no_run //! transform.translation += velocity; //! ``` //! The issue here is that the player's movement speed will be tied to the frame rate. //! Faster machines will move the player faster, and slower machines will move the player slower. //! In fact, you can observe this today when running some old games that did it this way on modern hardware! //! The player will move at a breakneck pace. //! //! The more sophisticated way is to update the player's position based on the time that has passed: //! ```no_run //! transform.translation += velocity * time.delta_secs(); //! ``` //! This way, velocity represents a speed in units per second, and the player will move at the same speed //! regardless of the frame rate. //! //! However, this can still be problematic if the frame rate is very low or very high. //! If the frame rate is very low, the player will move in large jumps. This may lead to //! a player moving in such large jumps that they pass through walls or other obstacles. //! In general, you cannot expect a physics simulation to behave nicely with *any* delta time. //! Ideally, we want to have some stability in what kinds of delta times we feed into our physics simulation. //! //! The solution is using a fixed timestep. This means that we advance the physics simulation by a fixed amount //! at a time. If the real time that passed between two frames is less than the fixed timestep, we simply //! don't advance the physics simulation at all. //! If it is more, we advance the physics simulation multiple times until we catch up. //! You can read more about how Bevy implements this in the documentation for //! [`bevy::time::Fixed`](https://docs.rs/bevy/latest/bevy/time/struct.Fixed.html). //! //! This leaves us with a last problem, however. If our physics simulation may advance zero or multiple times //! per frame, there may be frames in which the player's position did not need to be updated at all, //! and some where it is updated by a large amount that resulted from running the physics simulation multiple times. //! This is physically correct, but visually jarring. Imagine a player moving in a straight line, but depending on the frame rate, //! they may sometimes advance by a large amount and sometimes not at all. Visually, we want the player to move smoothly. //! This is why we need to separate the player's position in the physics simulation from the player's position in the visual representation. //! The visual representation can then be interpolated smoothly based on the previous and current actual player position in the physics simulation. //! //! This is a tradeoff: every visual frame is now slightly lagging behind the actual physical frame, //! but in return, the player's movement will appear smooth. //! There are other ways to compute the visual representation of the player, such as extrapolation. //! See the [documentation of the lightyear crate](https://cbournhonesque.github.io/lightyear/book/concepts/advanced_replication/visual_interpolation.html) //! for a nice overview of the different methods and their respective tradeoffs. //! //! If we decide to use a fixed timestep, our game logic should mostly go in the `FixedUpdate` schedule. //! One notable exception is the camera. Cameras should update as often as possible, or the player will very quickly //! notice choppy movement if it's only updated at the same rate as the physics simulation. So, we use a variable timestep for the camera, //! updating its transform every frame. The question now is which schedule to use. That depends on whether the camera data is required //! for the physics simulation to run or not. //! For example, in 3D games, the camera rotation often determines which direction the player moves when pressing "W", //! so we need to rotate the camera *before* the fixed timestep. In contrast, the translation of the camera depends on what the physics simulation //! has calculated for the player's position. Therefore, we need to update the camera's translation *after* the fixed timestep. Fortunately, //! we can get smooth movement by simply using the interpolated player translation for the camera as well. //! //! ## Implementation //! //! - The player's inputs since the last physics update are stored in the `AccumulatedInput` component. //! - The player's velocity is stored in a `Velocity` component. This is the speed in units per second. //! - The player's current position in the physics simulation is stored in a `PhysicalTranslation` component. //! - The player's previous position in the physics simulation is stored in a `PreviousPhysicalTranslation` component. //! - The player's visual representation is stored in Bevy's regular `Transform` component. //! - Every frame, we go through the following steps: //! - Accumulate the player's input and set the current speed in the `handle_input` system. //! This is run in the `RunFixedMainLoop` schedule, ordered in `RunFixedMainLoopSystems::BeforeFixedMainLoop`, //! which runs before the fixed timestep loop. This is run every frame. //! - Rotate the camera based on the player's input. This is also run in `RunFixedMainLoopSystems::BeforeFixedMainLoop`. //! - Advance the physics simulation by one fixed timestep in the `advance_physics` system. //! Accumulated input is consumed here. //! This is run in the `FixedUpdate` schedule, which runs zero or multiple times per frame. //! - Update the player's visual representation in the `interpolate_rendered_transform` system. //! This interpolates between the player's previous and current position in the physics simulation. //! It is run in the `RunFixedMainLoop` schedule, ordered in `RunFixedMainLoopSystems::AfterFixedMainLoop`, //! which runs after the fixed timestep loop. This is run every frame. //! - Update the camera's translation to the player's interpolated translation. This is also run in `RunFixedMainLoopSystems::AfterFixedMainLoop`. //! //! //! ## Controls //! //! | Key Binding | Action | //! |:---------------------|:--------------| //! | `W` | Move up | //! | `S` | Move down | //! | `A` | Move left | //! | `D` | Move right | //! | Mouse | Rotate camera | use std::f32::consts::FRAC_PI_2; use bevy::{color::palettes::tailwind, input::mouse::AccumulatedMouseMotion, prelude::*}; fn main() { App::new() .add_plugins(DefaultPlugins) .init_resource::<DidFixedTimestepRunThisFrame>() .add_systems(Startup, (spawn_text, spawn_player, spawn_environment)) // At the beginning of each frame, clear the flag that indicates whether the fixed timestep has run this frame. .add_systems(PreUpdate, clear_fixed_timestep_flag) // At the beginning of each fixed timestep, set the flag that indicates whether the fixed timestep has run this frame. .add_systems(FixedPreUpdate, set_fixed_time_step_flag) // Advance the physics simulation using a fixed timestep. .add_systems(FixedUpdate, advance_physics) .add_systems( // The `RunFixedMainLoop` schedule allows us to schedule systems to run before and after the fixed timestep loop. RunFixedMainLoop, ( ( // The camera needs to be rotated before the physics simulation is advanced in before the fixed timestep loop, // so that the physics simulation can use the current rotation. // Note that if we ran it in `Update`, it would be too late, as the physics simulation would already have been advanced. // If we ran this in `FixedUpdate`, it would sometimes not register player input, as that schedule may run zero times per frame. rotate_camera, // Accumulate our input before the fixed timestep loop to tell the physics simulation what it should do during the fixed timestep. accumulate_input, ) .chain() .in_set(RunFixedMainLoopSystems::BeforeFixedMainLoop), ( // Clear our accumulated input after it was processed during the fixed timestep. // By clearing the input *after* the fixed timestep, we can still use `AccumulatedInput` inside `FixedUpdate` if we need it. clear_input.run_if(did_fixed_timestep_run_this_frame), // The player's visual representation needs to be updated after the physics simulation has been advanced. // This could be run in `Update`, but if we run it here instead, the systems in `Update` // will be working with the `Transform` that will actually be shown on screen. interpolate_rendered_transform, // The camera can then use the interpolated transform to position itself correctly. translate_camera, ) .chain() .in_set(RunFixedMainLoopSystems::AfterFixedMainLoop), ), ) .run(); } /// A vector representing the player's input, accumulated over all frames that ran /// since the last time the physics simulation was advanced. #[derive(Debug, Component, Clone, Copy, PartialEq, Default, Deref, DerefMut)] struct AccumulatedInput { // The player's movement input (WASD). movement: Vec2, // Other input that could make sense would be e.g. // boost: bool } /// A vector representing the player's velocity in the physics simulation. #[derive(Debug, Component, Clone, Copy, PartialEq, Default, Deref, DerefMut)] struct Velocity(Vec3); /// The actual position of the player in the physics simulation. /// This is separate from the `Transform`, which is merely a visual representation. /// /// If you want to make sure that this component is always initialized /// with the same value as the `Transform`'s translation, you can /// use a [component lifecycle hook](https://docs.rs/bevy/0.14.0/bevy/ecs/component/struct.ComponentHooks.html) #[derive(Debug, Component, Clone, Copy, PartialEq, Default, Deref, DerefMut)] struct PhysicalTranslation(Vec3); /// The value [`PhysicalTranslation`] had in the last fixed timestep. /// Used for interpolation in the `interpolate_rendered_transform` system. #[derive(Debug, Component, Clone, Copy, PartialEq, Default, Deref, DerefMut)] struct PreviousPhysicalTranslation(Vec3); /// Spawn the player and a 3D camera. We could also spawn the camera as a child of the player, /// but in practice, they are usually spawned separately so that the player's rotation does not /// influence the camera's rotation. fn spawn_player(mut commands: Commands) { commands.spawn((Camera3d::default(), CameraSensitivity::default())); commands.spawn(( Name::new("Player"), Transform::from_scale(Vec3::splat(0.3)), AccumulatedInput::default(), Velocity::default(), PhysicalTranslation::default(), PreviousPhysicalTranslation::default(), )); } /// Spawn a field of floating spheres to fly around in fn spawn_environment( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { let sphere_material = materials.add(Color::from(tailwind::SKY_200)); let sphere_mesh = meshes.add(Sphere::new(0.3)); let spheres_in_x = 6; let spheres_in_y = 4; let spheres_in_z = 10; let distance = 3.0; for x in 0..spheres_in_x { for y in 0..spheres_in_y { for z in 0..spheres_in_z { let translation = Vec3::new( x as f32 * distance - (spheres_in_x as f32 - 1.0) * distance / 2.0, y as f32 * distance - (spheres_in_y as f32 - 1.0) * distance / 2.0, z as f32 * distance - (spheres_in_z as f32 - 1.0) * distance / 2.0, ); commands.spawn(( Name::new("Sphere"), Transform::from_translation(translation), Mesh3d(sphere_mesh.clone()), MeshMaterial3d(sphere_material.clone()), )); } } } commands.spawn(( DirectionalLight::default(), Transform::default().looking_to(Vec3::new(-1.0, -3.0, 0.5), Vec3::Y), )); } /// Spawn a bit of UI text to explain how to move the player. fn spawn_text(mut commands: Commands) { let font = TextFont { font_size: 25.0, ..default() }; commands.spawn(( Node { position_type: PositionType::Absolute, bottom: px(12), left: px(12), flex_direction: FlexDirection::Column, ..default() }, children![ (Text::new("Move the player with WASD"), font.clone()), (Text::new("Rotate the camera with the mouse"), font) ], )); } fn rotate_camera( accumulated_mouse_motion: Res<AccumulatedMouseMotion>, player: Single<(&mut Transform, &CameraSensitivity), With<Camera>>, ) { let (mut transform, camera_sensitivity) = player.into_inner(); let delta = accumulated_mouse_motion.delta; if delta != Vec2::ZERO { // Note that we are not multiplying by delta time here. // The reason is that for mouse movement, we already get the full movement that happened since the last frame. // This means that if we multiply by delta time, we will get a smaller rotation than intended by the user. let delta_yaw = -delta.x * camera_sensitivity.x; let delta_pitch = -delta.y * camera_sensitivity.y; let (yaw, pitch, roll) = transform.rotation.to_euler(EulerRot::YXZ); let yaw = yaw + delta_yaw; // If the pitch was ±¹⁄₂ π, the camera would look straight up or down. // When the user wants to move the camera back to the horizon, which way should the camera face? // The camera has no way of knowing what direction was "forward" before landing in that extreme position, // so the direction picked will for all intents and purposes be arbitrary. // Another issue is that for mathematical reasons, the yaw will effectively be flipped when the pitch is at the extremes. // To not run into these issues, we clamp the pitch to a safe range. const PITCH_LIMIT: f32 = FRAC_PI_2 - 0.01; let pitch = (pitch + delta_pitch).clamp(-PITCH_LIMIT, PITCH_LIMIT); transform.rotation = Quat::from_euler(EulerRot::YXZ, yaw, pitch, roll); } } #[derive(Debug, Component, Deref, DerefMut)] struct CameraSensitivity(Vec2); impl Default for CameraSensitivity { fn default() -> Self { Self( // These factors are just arbitrary mouse sensitivity values. // It's often nicer to have a faster horizontal sensitivity than vertical. // We use a component for them so that we can make them user-configurable at runtime // for accessibility reasons. // It also allows you to inspect them in an editor if you `Reflect` the component. Vec2::new(0.003, 0.002), ) } } /// Handle keyboard input and accumulate it in the `AccumulatedInput` component. /// /// There are many strategies for how to handle all the input that happened since the last fixed timestep. /// This is a very simple one: we just use the last available input. /// That strategy works fine for us since the user continuously presses the input keys in this example. /// If we had some kind of instantaneous action like activating a boost ability, we would need to remember that that input /// was pressed at some point since the last fixed timestep. fn accumulate_input( keyboard_input: Res<ButtonInput<KeyCode>>, player: Single<(&mut AccumulatedInput, &mut Velocity)>, camera: Single<&Transform, With<Camera>>, ) { /// Since Bevy's 3D renderer assumes SI units, this has the unit of meters per second. /// Note that about 1.5 is the average walking speed of a human. const SPEED: f32 = 4.0; let (mut input, mut velocity) = player.into_inner(); // Reset the input to zero before reading the new input. As mentioned above, we can only do this // because this is continuously pressed by the user. Do not reset e.g. whether the user wants to boost. input.movement = Vec2::ZERO; if keyboard_input.pressed(KeyCode::KeyW) { input.movement.y += 1.0; } if keyboard_input.pressed(KeyCode::KeyS) { input.movement.y -= 1.0; } if keyboard_input.pressed(KeyCode::KeyA) { input.movement.x -= 1.0; } if keyboard_input.pressed(KeyCode::KeyD) { input.movement.x += 1.0; } // Remap the 2D input to Bevy's 3D coordinate system. // Pressing W makes `input.y` go up. Since Bevy assumes that -Z is forward, we make our new Z equal to -input.y let input_3d = Vec3 { x: input.movement.x, y: 0.0, z: -input.movement.y, }; // Rotate the input so that forward is aligned with the camera's forward direction. let rotated_input = camera.rotation * input_3d; // We need to normalize and scale because otherwise // diagonal movement would be faster than horizontal or vertical movement. // We use `clamp_length_max` instead of `.normalize_or_zero()` because gamepad input // may be smaller than 1.0 when the player is pushing the stick just a little bit. velocity.0 = rotated_input.clamp_length_max(1.0) * SPEED; } /// A simple resource that tells us whether the fixed timestep ran this frame. #[derive(Resource, Debug, Deref, DerefMut, Default)] pub struct DidFixedTimestepRunThisFrame(bool); /// Reset the flag at the start of every frame. fn clear_fixed_timestep_flag( mut did_fixed_timestep_run_this_frame: ResMut<DidFixedTimestepRunThisFrame>, ) { did_fixed_timestep_run_this_frame.0 = false; } /// Set the flag during each fixed timestep. fn set_fixed_time_step_flag( mut did_fixed_timestep_run_this_frame: ResMut<DidFixedTimestepRunThisFrame>, ) { did_fixed_timestep_run_this_frame.0 = true; } fn did_fixed_timestep_run_this_frame( did_fixed_timestep_run_this_frame: Res<DidFixedTimestepRunThisFrame>, ) -> bool { did_fixed_timestep_run_this_frame.0 } // Clear the input after it was processed in the fixed timestep. fn clear_input(mut input: Single<&mut AccumulatedInput>) { **input = AccumulatedInput::default(); } /// Advance the physics simulation by one fixed timestep. This may run zero or multiple times per frame. /// /// Note that since this runs in `FixedUpdate`, `Res<Time>` would be `Res<Time<Fixed>>` automatically. /// We are being explicit here for clarity. fn advance_physics( fixed_time: Res<Time<Fixed>>, mut query: Query<( &mut PhysicalTranslation, &mut PreviousPhysicalTranslation, &Velocity, )>, ) { for (mut current_physical_translation, mut previous_physical_translation, velocity) in query.iter_mut() { previous_physical_translation.0 = current_physical_translation.0; current_physical_translation.0 += velocity.0 * fixed_time.delta_secs(); } } fn interpolate_rendered_transform( fixed_time: Res<Time<Fixed>>, mut query: Query<( &mut Transform, &PhysicalTranslation, &PreviousPhysicalTranslation, )>, ) { for (mut transform, current_physical_translation, previous_physical_translation) in query.iter_mut() { let previous = previous_physical_translation.0; let current = current_physical_translation.0; // The overstep fraction is a value between 0 and 1 that tells us how far we are between two fixed timesteps. let alpha = fixed_time.overstep_fraction(); let rendered_translation = previous.lerp(current, alpha); transform.translation = rendered_translation; } } // Sync the camera's position with the player's interpolated position fn translate_camera( mut camera: Single<&mut Transform, With<Camera>>, player: Single<&Transform, (With<AccumulatedInput>, Without<Camera>)>, ) { camera.translation = player.translation; }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/camera/projection_zoom.rs
examples/camera/projection_zoom.rs
//! Shows how to zoom orthographic and perspective projection cameras. use std::{f32::consts::PI, ops::Range}; use bevy::{camera::ScalingMode, input::mouse::AccumulatedMouseScroll, prelude::*}; #[derive(Debug, Resource)] struct CameraSettings { /// The height of the viewport in world units when the orthographic camera's scale is 1 pub orthographic_viewport_height: f32, /// Clamp the orthographic camera's scale to this range pub orthographic_zoom_range: Range<f32>, /// Multiply mouse wheel inputs by this factor when using the orthographic camera pub orthographic_zoom_speed: f32, /// Clamp perspective camera's field of view to this range pub perspective_zoom_range: Range<f32>, /// Multiply mouse wheel inputs by this factor when using the perspective camera pub perspective_zoom_speed: f32, } fn main() { App::new() .add_plugins(DefaultPlugins) .insert_resource(CameraSettings { orthographic_viewport_height: 5., // In orthographic projections, we specify camera scale relative to a default value of 1, // in which one unit in world space corresponds to one pixel. orthographic_zoom_range: 0.1..10.0, // This value was hand-tuned to ensure that zooming in and out feels smooth but not slow. orthographic_zoom_speed: 0.2, // Perspective projections use field of view, expressed in radians. We would // normally not set it to more than π, which represents a 180° FOV. perspective_zoom_range: (PI / 5.)..(PI - 0.2), // Changes in FOV are much more noticeable due to its limited range in radians perspective_zoom_speed: 0.05, }) .add_systems(Startup, (setup, instructions)) .add_systems(Update, (switch_projection, zoom)) .run(); } /// Set up a simple 3D scene fn setup( asset_server: Res<AssetServer>, camera_settings: Res<CameraSettings>, mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { commands.spawn(( Name::new("Camera"), Camera3d::default(), Projection::from(OrthographicProjection { // We can set the scaling mode to FixedVertical to keep the viewport height constant as its aspect ratio changes. // The viewport height is the height of the camera's view in world units when the scale is 1. scaling_mode: ScalingMode::FixedVertical { viewport_height: camera_settings.orthographic_viewport_height, }, // This is the default value for scale for orthographic projections. // To zoom in and out, change this value, rather than `ScalingMode` or the camera's position. scale: 1., ..OrthographicProjection::default_3d() }), Transform::from_xyz(5.0, 5.0, 5.0).looking_at(Vec3::ZERO, Vec3::Y), )); commands.spawn(( Name::new("Plane"), Mesh3d(meshes.add(Plane3d::default().mesh().size(5.0, 5.0))), MeshMaterial3d(materials.add(StandardMaterial { base_color: Color::srgb(0.3, 0.5, 0.3), // Turning off culling keeps the plane visible when viewed from beneath. cull_mode: None, ..default() })), )); commands.spawn(( Name::new("Fox"), SceneRoot( asset_server.load(GltfAssetLabel::Scene(0).from_asset("models/animated/Fox.glb")), ), // Note: the scale adjustment is purely an accident of our fox model, which renders // HUGE unless mitigated! Transform::from_translation(Vec3::splat(0.0)).with_scale(Vec3::splat(0.025)), )); commands.spawn(( Name::new("Light"), PointLight::default(), Transform::from_xyz(3.0, 8.0, 5.0), )); } fn instructions(mut commands: Commands) { commands.spawn(( Name::new("Instructions"), Text::new( "Scroll mouse wheel to zoom in/out\n\ Space: switch between orthographic and perspective projections", ), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, )); } fn switch_projection( mut camera: Single<&mut Projection, With<Camera>>, camera_settings: Res<CameraSettings>, keyboard_input: Res<ButtonInput<KeyCode>>, ) { if keyboard_input.just_pressed(KeyCode::Space) { // Switch projection type **camera = match **camera { Projection::Orthographic(_) => Projection::Perspective(PerspectiveProjection { fov: camera_settings.perspective_zoom_range.start, ..default() }), Projection::Perspective(_) => Projection::Orthographic(OrthographicProjection { scaling_mode: ScalingMode::FixedVertical { viewport_height: camera_settings.orthographic_viewport_height, }, ..OrthographicProjection::default_3d() }), _ => return, } } } fn zoom( camera: Single<&mut Projection, With<Camera>>, camera_settings: Res<CameraSettings>, mouse_wheel_input: Res<AccumulatedMouseScroll>, ) { // Usually, you won't need to handle both types of projection, // but doing so makes for a more complete example. match *camera.into_inner() { Projection::Orthographic(ref mut orthographic) => { // We want scrolling up to zoom in, decreasing the scale, so we negate the delta. let delta_zoom = -mouse_wheel_input.delta.y * camera_settings.orthographic_zoom_speed; // When changing scales, logarithmic changes are more intuitive. // To get this effect, we add 1 to the delta, so that a delta of 0 // results in no multiplicative effect, positive values result in a multiplicative increase, // and negative values result in multiplicative decreases. let multiplicative_zoom = 1. + delta_zoom; orthographic.scale = (orthographic.scale * multiplicative_zoom).clamp( camera_settings.orthographic_zoom_range.start, camera_settings.orthographic_zoom_range.end, ); } Projection::Perspective(ref mut perspective) => { // We want scrolling up to zoom in, decreasing the scale, so we negate the delta. let delta_zoom = -mouse_wheel_input.delta.y * camera_settings.perspective_zoom_speed; // Adjust the field of view, but keep it within our stated range. perspective.fov = (perspective.fov + delta_zoom).clamp( camera_settings.perspective_zoom_range.start, camera_settings.perspective_zoom_range.end, ); } _ => (), } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/camera/free_camera_controller.rs
examples/camera/free_camera_controller.rs
//! This example showcases the default `FreeCamera` camera controller. //! //! The default `FreeCamera` controller is useful for exploring large scenes, debugging and editing purposes. To use it, //! simply add the [`FreeCameraPlugin`] to your [`App`] and attach the [`FreeCamera`] component to the camera entity you //! wish to control. //! //! ## Default Controls //! //! This controller has a simple 6-axis control scheme, and mouse controls for camera orientation. There are also //! bindings for capturing the mouse, both while holding the button and toggle, a run feature that increases the //! max speed, and scrolling changes the movement speed. All keybinds can be changed by editing the [`FreeCamera`] //! component. //! //! | Default Key Binding | Action | //! |:--------------------|:-----------------------| //! | Mouse | Look around | //! | Left click | Capture mouse (hold) | //! | M | Capture mouse (toggle) | //! | WASD | Horizontal movement | //! | QE | Vertical movement | //! | Left shift | Run | //! | Scroll wheel | Change movement speed | //! //! The movement speed, sensitivity and friction can also be changed by the [`FreeCamera`] component. //! //! ## Example controls //! //! This example also provides a few extra keybinds to change the camera sensitivity, friction (how fast the camera //! stops), scroll factor (how much scrolling changes speed) and enabling/disabling the controller. //! //! | Key Binding | Action | //! |:------------|:-----------------------| //! | Z | Decrease sensitivity | //! | X | Increase sensitivity | //! | C | Decrease friction | //! | V | Increase friction | //! | F | Decrease scroll factor | //! | G | Increase scroll factor | //! | B | Enable/Disable | use std::f32::consts::{FRAC_PI_4, PI}; use bevy::{ camera_controller::free_camera::{FreeCamera, FreeCameraPlugin, FreeCameraState}, color::palettes::tailwind, prelude::*, }; fn main() { App::new() .add_plugins(DefaultPlugins) // Plugin that enables FreeCamera functionality .add_plugins(FreeCameraPlugin) // Example code plugins .add_plugins((CameraPlugin, CameraSettingsPlugin, ScenePlugin)) .run(); } // Plugin that spawns the camera. struct CameraPlugin; impl Plugin for CameraPlugin { fn build(&self, app: &mut App) { app.add_systems(Startup, spawn_camera); } } fn spawn_camera(mut commands: Commands) { commands.spawn(( Camera3d::default(), Transform::from_xyz(0.0, 1.0, 0.0).looking_to(Vec3::X, Vec3::Y), // This component stores all camera settings and state, which is used by the FreeCameraPlugin to // control it. These properties can be changed at runtime, but beware the controller system is // constantly using and modifying those values unless the enabled field is false. FreeCamera { sensitivity: 0.2, friction: 25.0, walk_speed: 3.0, run_speed: 9.0, ..default() }, )); } // Plugin that handles camera settings controls and information text struct CameraSettingsPlugin; impl Plugin for CameraSettingsPlugin { fn build(&self, app: &mut App) { app.add_systems(PostStartup, spawn_text) .add_systems(Update, (update_camera_settings, update_text)); } } #[derive(Component)] struct InfoText; fn spawn_text(mut commands: Commands, free_camera_query: Query<&FreeCamera>) { commands.spawn(( Node { position_type: PositionType::Absolute, top: px(-16), left: px(12), ..default() }, children![Text::new(format!( "{}", free_camera_query.single().unwrap() ))], )); commands.spawn(( Node { position_type: PositionType::Absolute, bottom: px(12), left: px(12), ..default() }, children![Text::new(concat![ "Z/X: decrease/increase sensitivity\n", "C/V: decrease/increase friction\n", "F/G: decrease/increase scroll factor\n", "B: enable/disable controller", ]),], )); // Mutable text marked with component commands.spawn(( Node { position_type: PositionType::Absolute, top: px(12), right: px(12), ..default() }, children![(InfoText, Text::new(""))], )); } fn update_camera_settings( mut camera_query: Query<(&mut FreeCamera, &mut FreeCameraState)>, input: Res<ButtonInput<KeyCode>>, ) { let (mut free_camera, mut free_camera_state) = camera_query.single_mut().unwrap(); if input.pressed(KeyCode::KeyZ) { free_camera.sensitivity = (free_camera.sensitivity - 0.005).max(0.005); } if input.pressed(KeyCode::KeyX) { free_camera.sensitivity += 0.005; } if input.pressed(KeyCode::KeyC) { free_camera.friction = (free_camera.friction - 0.2).max(0.0); } if input.pressed(KeyCode::KeyV) { free_camera.friction += 0.2; } if input.pressed(KeyCode::KeyF) { free_camera.scroll_factor = (free_camera.scroll_factor - 0.02).max(0.02); } if input.pressed(KeyCode::KeyG) { free_camera.scroll_factor += 0.02; } if input.just_pressed(KeyCode::KeyB) { free_camera_state.enabled = !free_camera_state.enabled; } } fn update_text( mut text_query: Query<&mut Text, With<InfoText>>, camera_query: Query<(&FreeCamera, &FreeCameraState)>, ) { let mut text = text_query.single_mut().unwrap(); let (free_camera, free_camera_state) = camera_query.single().unwrap(); text.0 = format!( "Enabled: {},\nSensitivity: {:.03}\nFriction: {:.01}\nScroll factor: {:.02}\nWalk Speed: {:.02}\nRun Speed: {:.02}\nSpeed: {:.02}", free_camera_state.enabled, free_camera.sensitivity, free_camera.friction, free_camera.scroll_factor, free_camera.walk_speed, free_camera.run_speed, free_camera_state.velocity.length(), ); } // Plugin that spawns the scene and lighting. struct ScenePlugin; impl Plugin for ScenePlugin { fn build(&self, app: &mut App) { app.add_systems(Startup, (spawn_lights, spawn_world)); } } fn spawn_lights(mut commands: Commands) { // Main light commands.spawn(( PointLight { color: Color::from(tailwind::ORANGE_300), shadows_enabled: true, ..default() }, Transform::from_xyz(0.0, 3.0, 0.0), )); // Light behind wall commands.spawn(( PointLight { color: Color::WHITE, shadows_enabled: true, ..default() }, Transform::from_xyz(-3.5, 3.0, 0.0), )); // Light under floor commands.spawn(( PointLight { color: Color::from(tailwind::RED_300), shadows_enabled: true, ..default() }, Transform::from_xyz(0.0, -0.5, 0.0), )); } fn spawn_world( mut commands: Commands, mut materials: ResMut<Assets<StandardMaterial>>, mut meshes: ResMut<Assets<Mesh>>, ) { let cube = meshes.add(Cuboid::new(1.0, 1.0, 1.0)); let floor = meshes.add(Plane3d::new(Vec3::Y, Vec2::splat(10.0))); let sphere = meshes.add(Sphere::new(0.5)); let wall = meshes.add(Cuboid::new(0.2, 4.0, 3.0)); let blue_material = materials.add(Color::from(tailwind::BLUE_700)); let red_material = materials.add(Color::from(tailwind::RED_950)); let white_material = materials.add(Color::WHITE); // Top side of floor commands.spawn(( Mesh3d(floor.clone()), MeshMaterial3d(white_material.clone()), )); // Under side of floor commands.spawn(( Mesh3d(floor.clone()), MeshMaterial3d(white_material.clone()), Transform::from_xyz(0.0, -0.01, 0.0).with_rotation(Quat::from_rotation_x(PI)), )); // Blue sphere commands.spawn(( Mesh3d(sphere.clone()), MeshMaterial3d(blue_material.clone()), Transform::from_xyz(3.0, 1.5, 0.0), )); // Tall wall commands.spawn(( Mesh3d(wall.clone()), MeshMaterial3d(white_material.clone()), Transform::from_xyz(-3.0, 2.0, 0.0), )); // Cube behind wall commands.spawn(( Mesh3d(cube.clone()), MeshMaterial3d(blue_material.clone()), Transform::from_xyz(-4.2, 0.5, 0.0), )); // Hidden cube under floor commands.spawn(( Mesh3d(cube.clone()), MeshMaterial3d(red_material.clone()), Transform { translation: Vec3::new(3.0, -2.0, 0.0), rotation: Quat::from_euler(EulerRot::YXZEx, FRAC_PI_4, FRAC_PI_4, 0.0), ..default() }, )); }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/camera/custom_projection.rs
examples/camera/custom_projection.rs
//! Demonstrates how to define and use custom camera projections. use bevy::camera::CameraProjection; use bevy::prelude::*; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .run(); } /// Like a perspective projection, but the vanishing point is not centered. #[derive(Debug, Clone)] struct ObliquePerspectiveProjection { horizontal_obliqueness: f32, vertical_obliqueness: f32, perspective: PerspectiveProjection, } /// Implement the [`CameraProjection`] trait for our custom projection: impl CameraProjection for ObliquePerspectiveProjection { fn get_clip_from_view(&self) -> Mat4 { let mut mat = self.perspective.get_clip_from_view(); mat.col_mut(2)[0] = self.horizontal_obliqueness; mat.col_mut(2)[1] = self.vertical_obliqueness; mat } fn get_clip_from_view_for_sub(&self, sub_view: &bevy::camera::SubCameraView) -> Mat4 { let mut mat = self.perspective.get_clip_from_view_for_sub(sub_view); mat.col_mut(2)[0] = self.horizontal_obliqueness; mat.col_mut(2)[1] = self.vertical_obliqueness; mat } fn update(&mut self, width: f32, height: f32) { self.perspective.update(width, height); } fn far(&self) -> f32 { self.perspective.far } fn get_frustum_corners(&self, z_near: f32, z_far: f32) -> [Vec3A; 8] { self.perspective.get_frustum_corners(z_near, z_far) } } fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { commands.spawn(( Camera3d::default(), // Use our custom projection: Projection::custom(ObliquePerspectiveProjection { horizontal_obliqueness: 0.2, vertical_obliqueness: 0.6, perspective: PerspectiveProjection::default(), }), Transform::from_xyz(-2.5, 4.5, 9.0).looking_at(Vec3::ZERO, Vec3::Y), )); // Scene setup commands.spawn(( Mesh3d(meshes.add(Circle::new(4.0))), MeshMaterial3d(materials.add(Color::WHITE)), Transform::from_rotation(Quat::from_rotation_x(-std::f32::consts::FRAC_PI_2)), )); commands.spawn(( Mesh3d(meshes.add(Cuboid::new(1.0, 1.0, 1.0))), MeshMaterial3d(materials.add(Color::srgb_u8(124, 144, 255))), Transform::from_xyz(0.0, 0.5, 0.0), )); commands.spawn(( PointLight { shadows_enabled: true, ..default() }, Transform::from_xyz(4.0, 8.0, 4.0), )); }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/camera/2d_top_down_camera.rs
examples/camera/2d_top_down_camera.rs
//! This example showcases a 2D top-down camera with smooth player tracking. //! //! ## Controls //! //! | Key Binding | Action | //! |:---------------------|:--------------| //! | `W` | Move up | //! | `S` | Move down | //! | `A` | Move left | //! | `D` | Move right | use bevy::{post_process::bloom::Bloom, prelude::*}; /// Player movement speed factor. const PLAYER_SPEED: f32 = 100.; /// How quickly should the camera snap to the desired location. const CAMERA_DECAY_RATE: f32 = 2.; #[derive(Component)] struct Player; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, (setup_scene, setup_instructions, setup_camera)) .add_systems(Update, (move_player, update_camera).chain()) .run(); } fn setup_scene( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<ColorMaterial>>, ) { // World where we move the player commands.spawn(( Mesh2d(meshes.add(Rectangle::new(1000., 700.))), MeshMaterial2d(materials.add(Color::srgb(0.2, 0.2, 0.3))), )); // Player commands.spawn(( Player, Mesh2d(meshes.add(Circle::new(25.))), MeshMaterial2d(materials.add(Color::srgb(6.25, 9.4, 9.1))), // RGB values exceed 1 to achieve a bright color for the bloom effect Transform::from_xyz(0., 0., 2.), )); } fn setup_instructions(mut commands: Commands) { commands.spawn(( Text::new("Move the light with WASD.\nThe camera will smoothly track the light."), Node { position_type: PositionType::Absolute, bottom: px(12), left: px(12), ..default() }, )); } fn setup_camera(mut commands: Commands) { commands.spawn((Camera2d, Bloom::NATURAL)); } /// Update the camera position by tracking the player. fn update_camera( mut camera: Single<&mut Transform, (With<Camera2d>, Without<Player>)>, player: Single<&Transform, (With<Player>, Without<Camera2d>)>, time: Res<Time>, ) { let Vec3 { x, y, .. } = player.translation; let direction = Vec3::new(x, y, camera.translation.z); // Applies a smooth effect to camera movement using stable interpolation // between the camera position and the player position on the x and y axes. camera .translation .smooth_nudge(&direction, CAMERA_DECAY_RATE, time.delta_secs()); } /// Update the player position with keyboard inputs. /// Note that the approach used here is for demonstration purposes only, /// as the point of this example is to showcase the camera tracking feature. /// /// A more robust solution for player movement can be found in `examples/movement/physics_in_fixed_timestep.rs`. fn move_player( mut player: Single<&mut Transform, With<Player>>, time: Res<Time>, kb_input: Res<ButtonInput<KeyCode>>, ) { let mut direction = Vec2::ZERO; if kb_input.pressed(KeyCode::KeyW) { direction.y += 1.; } if kb_input.pressed(KeyCode::KeyS) { direction.y -= 1.; } if kb_input.pressed(KeyCode::KeyA) { direction.x -= 1.; } if kb_input.pressed(KeyCode::KeyD) { direction.x += 1.; } // Progressively update the player's position over time. Normalize the // direction vector to prevent it from exceeding a magnitude of 1 when // moving diagonally. let move_delta = direction.normalize_or_zero() * PLAYER_SPEED * time.delta_secs(); player.translation += move_delta.extend(0.); }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/camera/2d_screen_shake.rs
examples/camera/2d_screen_shake.rs
//! This example showcases how to implement 2D screen shake. //! It follows the GDC talk ["Math for Game Programmers: Juicing Your Cameras With Math"](https://www.youtube.com/watch?v=tu-Qe66AvtY) by Squirrel Eiserloh //! //! The key features are: //! - Camera shake is dependent on a "trauma" value between 0.0 and 1.0. The more trauma, the stronger the shake. //! - Trauma automatically decays over time. //! - The camera shake will always only affect the camera `Transform` up to a maximum displacement. //! - The camera's `Transform` is only affected by the shake for the rendering. The `Transform` stays "normal" for the rest of the game logic. //! - All displacements are governed by a noise function, guaranteeing that the shake is smooth and continuous. //! This means that the camera won't jump around wildly. //! //! ## Controls //! //! | Key Binding | Action | //! |:---------------------------------|:---------------------------| //! | Space (pressed repeatedly) | Increase camera trauma | use bevy::{ input::common_conditions::input_just_pressed, math::ops::powf, prelude::*, sprite_render::MeshMaterial2d, }; // Before we implement the code, let's quickly introduce the underlying constants. // They are later encoded in a `CameraShakeConfig` component, but introduced here so we can easily tweak them. // Try playing around with them and see how the shake behaves! /// The trauma decay rate controls how quickly the trauma decays. /// 0.5 means that a full trauma of 1.0 will decay to 0.0 in 2 seconds. const TRAUMA_DECAY_PER_SECOND: f32 = 0.5; /// The trauma exponent controls how the trauma affects the shake. /// Camera shakes don't feel punchy when they go up linearly, so we use an exponent of 2.0. /// The higher the exponent, the more abrupt is the transition between no shake and full shake. const TRAUMA_EXPONENT: f32 = 2.0; /// The maximum angle the camera can rotate on full trauma. /// 10.0 degrees is a somewhat high but still reasonable shake. Try bigger values for something more silly and wiggly. const MAX_ANGLE: f32 = 10.0_f32.to_radians(); /// The maximum translation the camera will move on full trauma in both the x and y directions. /// 20.0 px is a low enough displacement to not be distracting. Try higher values for an effect that looks like the camera is wandering around. const MAX_TRANSLATION: f32 = 20.0; /// How much we are traversing the noise function in arbitrary units per second. /// This dictates how fast the camera shakes. /// 20.0 is a fairly fast shake. Try lower values for a more dreamy effect. const NOISE_SPEED: f32 = 20.0; /// How much trauma we add per press of the space key. /// A value of 1.0 would mean that a single press would result in a maximum trauma, i.e. 1.0. const TRAUMA_PER_PRESS: f32 = 0.4; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, (setup_scene, setup_instructions, setup_camera)) // At the start of the frame, restore the camera's transform to its unshaken state. .add_systems(PreUpdate, reset_transform) .add_systems( Update, // Increase trauma when the space key is pressed. increase_trauma.run_if(input_just_pressed(KeyCode::Space)), ) // Just before the end of the frame, apply the shake. // This is ordered so that the transform propagation produces correct values for the global transform, which is used by Bevy's rendering. .add_systems(PostUpdate, shake_camera.before(TransformSystems::Propagate)) .run(); } /// Let's start with the core mechanic: how do we shake the camera? /// This system runs right at the end of the frame, so that we can sneak in the shake effect before rendering kicks in. fn shake_camera( camera_shake: Single<(&mut CameraShakeState, &CameraShakeConfig, &mut Transform)>, time: Res<Time>, ) { let (mut camera_shake, config, mut transform) = camera_shake.into_inner(); // Before we even start thinking about the shake, we save the original transform so it's not lost. // At the start of the next frame, we will restore the camera's transform to this original transform. camera_shake.original_transform = *transform; // To generate the transform offset, we use a noise function. Noise is like a random number generator, but cooler. // Let's start with a visual intuition: <https://assets-global.website-files.com/64b6d182aee713bd0401f4b9/64b95974ec292aabac45fc8e_image.png> // The image on the left is made from pure randomness, the image on the right is made from a kind of noise called Perlin noise. // Notice how the noise has much more "structure" than the randomness? How it looks like it has peaks and valleys? // This property makes noise very desirable for a variety of visual effects. In our case, what we want is that the // camera does not wildly teleport around the world, but instead *moves* through the world frame by frame. // We can use 1D Perlin noise for this, which takes one input and outputs a value between -1.0 and 1.0. If we increase the input by a little bit, // like by the time since the last frame, we get a different output that is still "close" to the previous one. // This is the input to the noise function. Just using the elapsed time is pretty good input, // since it means that noise generations that are close in time will be close in output. // We simply multiply it by a constant to be able to "speed up" or "slow down" the noise. let t = time.elapsed_secs() * config.noise_speed; // Now we generate three noise values. One for the rotation, one for the x-offset, and one for the y-offset. // But if we generated those three noise values with the same input, we would get the same output three times! // To avoid this, we simply add a random offset to each input. // You can think of this as the seed value you would give a random number generator. let rotation_noise = perlin_noise::generate(t + 0.0); let x_noise = perlin_noise::generate(t + 100.0); let y_noise = perlin_noise::generate(t + 200.0); // Games often deal with linear increments. For example, if an enemy deals 10 damage and attacks you 2 times, you will take 20 damage. // But that's not how impact feels! Human senses are much more attuned to exponential changes. // So, we make sure that the `shake` value we use is an exponential function of the trauma. // But doesn't this make the value explode? Fortunately not: since `trauma` is between 0.0 and 1.0, exponentiating it will actually make it smaller! // See <https://www.wolframalpha.com/input?i=plot+x+and+x%5E2+and+x%5E3+for+x+in+%5B0%2C+1%5D> for a graph. let shake = powf(camera_shake.trauma, config.exponent); // Now, to get the final offset, we multiply this noise value by the shake value and the maximum value. // The noise value is in [-1, 1], so by multiplying it with a maximum value, we get a value in [-max_value, +max_value]. // Multiply this by the shake value to get the exponential effect, and we're done! let roll_offset = rotation_noise * shake * config.max_angle; let x_offset = x_noise * shake * config.max_translation; let y_offset = y_noise * shake * config.max_translation; // Finally, we apply the offset to the camera's transform. Since we already stored the original transform, // and this system runs right at the end of the frame, we can't accidentally break any game logic by changing the transform. transform.translation.x += x_offset; transform.translation.y += y_offset; transform.rotate_z(roll_offset); // Some bookkeeping at the end: trauma should decay over time. camera_shake.trauma -= config.trauma_decay_per_second * time.delta_secs(); camera_shake.trauma = camera_shake.trauma.clamp(0.0, 1.0); } /// Increase the trauma when the space key is pressed. fn increase_trauma(mut camera_shake: Single<&mut CameraShakeState>) { camera_shake.trauma += TRAUMA_PER_PRESS; camera_shake.trauma = camera_shake.trauma.clamp(0.0, 1.0); } /// Restore the camera's transform to its unshaken state. /// Runs at the start of the frame, so that gameplay logic doesn't need to care about camera shake. fn reset_transform(camera_shake: Single<(&CameraShakeState, &mut Transform)>) { let (camera_shake, mut transform) = camera_shake.into_inner(); *transform = camera_shake.original_transform; } /// The current state of the camera shake that is updated every frame. #[derive(Component, Debug, Default)] struct CameraShakeState { /// The current trauma level in [0.0, 1.0]. trauma: f32, /// The original transform of the camera before applying the shake. /// We store this so that we can restore the camera's transform to its original state at the start of the next frame. original_transform: Transform, } /// Configuration for the camera shake. /// See the constants at the top of the file for some good default values and detailed explanations. #[derive(Component, Debug)] #[require(CameraShakeState)] struct CameraShakeConfig { trauma_decay_per_second: f32, exponent: f32, max_angle: f32, max_translation: f32, noise_speed: f32, } fn setup_camera(mut commands: Commands) { commands.spawn(( Camera2d, // Enable camera shake for this camera. CameraShakeConfig { trauma_decay_per_second: TRAUMA_DECAY_PER_SECOND, exponent: TRAUMA_EXPONENT, max_angle: MAX_ANGLE, max_translation: MAX_TRANSLATION, noise_speed: NOISE_SPEED, }, )); } /// Spawn a scene so we have something to look at. fn setup_scene( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<ColorMaterial>>, ) { // Background tile commands.spawn(( Mesh2d(meshes.add(Rectangle::new(1000., 700.))), MeshMaterial2d(materials.add(Color::srgb(0.2, 0.2, 0.3))), )); // The shape in the middle could be our player character. commands.spawn(( Mesh2d(meshes.add(Rectangle::new(50.0, 100.0))), MeshMaterial2d(materials.add(Color::srgb(0.25, 0.94, 0.91))), Transform::from_xyz(0., 0., 2.), )); // These two shapes could be obstacles. commands.spawn(( Mesh2d(meshes.add(Rectangle::new(50.0, 50.0))), MeshMaterial2d(materials.add(Color::srgb(0.85, 0.0, 0.2))), Transform::from_xyz(-450.0, 200.0, 2.), )); commands.spawn(( Mesh2d(meshes.add(Rectangle::new(70.0, 50.0))), MeshMaterial2d(materials.add(Color::srgb(0.5, 0.8, 0.2))), Transform::from_xyz(450.0, -150.0, 2.), )); } fn setup_instructions(mut commands: Commands) { commands.spawn(( Text::new("Press space repeatedly to trigger a progressively stronger screen shake"), Node { position_type: PositionType::Absolute, bottom: px(12), left: px(12), ..default() }, )); } /// Tiny 1D Perlin noise implementation. The mathematical details are not important here. mod perlin_noise { use super::*; pub fn generate(x: f32) -> f32 { // Left coordinate of the unit-line that contains the input. let x_floor = x.floor() as usize; // Input location in the unit-line. let xf0 = x - x_floor as f32; let xf1 = xf0 - 1.0; // Wrap to range 0-255. let xi0 = x_floor & 0xFF; let xi1 = (x_floor + 1) & 0xFF; // Apply the fade function to the location. let t = fade(xf0).clamp(0.0, 1.0); // Generate hash values for each point of the unit-line. let h0 = PERMUTATION_TABLE[xi0]; let h1 = PERMUTATION_TABLE[xi1]; // Linearly interpolate between dot products of each gradient with its distance to the input location. let a = dot_grad(h0, xf0); let b = dot_grad(h1, xf1); a.interpolate_stable(&b, t) } // A cubic curve that smoothly transitions from 0 to 1 as t goes from 0 to 1 fn fade(t: f32) -> f32 { t * t * t * (t * (t * 6.0 - 15.0) + 10.0) } fn dot_grad(hash: u8, xf: f32) -> f32 { // In 1D case, the gradient may be either 1 or -1. // The distance vector is the input offset (relative to the smallest bound). if hash & 0x1 != 0 { xf } else { -xf } } // Perlin noise permutation table. This is a random sequence of the numbers 0-255. const PERMUTATION_TABLE: [u8; 256] = [ 0x97, 0xA0, 0x89, 0x5B, 0x5A, 0x0F, 0x83, 0x0D, 0xC9, 0x5F, 0x60, 0x35, 0xC2, 0xE9, 0x07, 0xE1, 0x8C, 0x24, 0x67, 0x1E, 0x45, 0x8E, 0x08, 0x63, 0x25, 0xF0, 0x15, 0x0A, 0x17, 0xBE, 0x06, 0x94, 0xF7, 0x78, 0xEA, 0x4B, 0x00, 0x1A, 0xC5, 0x3E, 0x5E, 0xFC, 0xDB, 0xCB, 0x75, 0x23, 0x0B, 0x20, 0x39, 0xB1, 0x21, 0x58, 0xED, 0x95, 0x38, 0x57, 0xAE, 0x14, 0x7D, 0x88, 0xAB, 0xA8, 0x44, 0xAF, 0x4A, 0xA5, 0x47, 0x86, 0x8B, 0x30, 0x1B, 0xA6, 0x4D, 0x92, 0x9E, 0xE7, 0x53, 0x6F, 0xE5, 0x7A, 0x3C, 0xD3, 0x85, 0xE6, 0xDC, 0x69, 0x5C, 0x29, 0x37, 0x2E, 0xF5, 0x28, 0xF4, 0x66, 0x8F, 0x36, 0x41, 0x19, 0x3F, 0xA1, 0x01, 0xD8, 0x50, 0x49, 0xD1, 0x4C, 0x84, 0xBB, 0xD0, 0x59, 0x12, 0xA9, 0xC8, 0xC4, 0x87, 0x82, 0x74, 0xBC, 0x9F, 0x56, 0xA4, 0x64, 0x6D, 0xC6, 0xAD, 0xBA, 0x03, 0x40, 0x34, 0xD9, 0xE2, 0xFA, 0x7C, 0x7B, 0x05, 0xCA, 0x26, 0x93, 0x76, 0x7E, 0xFF, 0x52, 0x55, 0xD4, 0xCF, 0xCE, 0x3B, 0xE3, 0x2F, 0x10, 0x3A, 0x11, 0xB6, 0xBD, 0x1C, 0x2A, 0xDF, 0xB7, 0xAA, 0xD5, 0x77, 0xF8, 0x98, 0x02, 0x2C, 0x9A, 0xA3, 0x46, 0xDD, 0x99, 0x65, 0x9B, 0xA7, 0x2B, 0xAC, 0x09, 0x81, 0x16, 0x27, 0xFD, 0x13, 0x62, 0x6C, 0x6E, 0x4F, 0x71, 0xE0, 0xE8, 0xB2, 0xB9, 0x70, 0x68, 0xDA, 0xF6, 0x61, 0xE4, 0xFB, 0x22, 0xF2, 0xC1, 0xEE, 0xD2, 0x90, 0x0C, 0xBF, 0xB3, 0xA2, 0xF1, 0x51, 0x33, 0x91, 0xEB, 0xF9, 0x0E, 0xEF, 0x6B, 0x31, 0xC0, 0xD6, 0x1F, 0xB5, 0xC7, 0x6A, 0x9D, 0xB8, 0x54, 0xCC, 0xB0, 0x73, 0x79, 0x32, 0x2D, 0x7F, 0x04, 0x96, 0xFE, 0x8A, 0xEC, 0xCD, 0x5D, 0xDE, 0x72, 0x43, 0x1D, 0x18, 0x48, 0xF3, 0x8D, 0x80, 0xC3, 0x4E, 0x42, 0xD7, 0x3D, 0x9C, 0xB4, ]; }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/camera/2d_on_ui.rs
examples/camera/2d_on_ui.rs
//! This example shows how to render 2D objects on top of Bevy UI, by using a second camera with a higher `order` than the UI camera. use bevy::{camera::visibility::RenderLayers, color::palettes::tailwind, prelude::*}; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems(Update, rotate_sprite) .run(); } fn setup(mut commands: Commands, asset_server: Res<AssetServer>) { // The default camera. `IsDefaultUiCamera` makes this the default camera to render UI elements to. Alternatively, you can add the `UiTargetCamera` component to root UI nodes to define which camera they should be rendered to. commands.spawn((Camera2d, IsDefaultUiCamera)); // The second camera. The higher order means that this camera will be rendered after the first camera. We will render to this camera to draw on top of the UI. commands.spawn(( Camera2d, Camera { order: 1, // Don't draw anything in the background, to see the previous camera. clear_color: ClearColorConfig::None, ..default() }, // This camera will only render entities which are on the same render layer. RenderLayers::layer(1), )); commands.spawn(( // We could also use a `UiTargetCamera` component here instead of the general `IsDefaultUiCamera`. Node { width: percent(100), height: percent(100), display: Display::Flex, justify_content: JustifyContent::Center, align_items: AlignItems::Center, ..default() }, BackgroundColor(tailwind::ROSE_400.into()), children![( Node { height: percent(30), width: percent(20), min_height: px(150), min_width: px(150), border: UiRect::all(px(2)), border_radius: BorderRadius::all(percent(25)), ..default() }, BorderColor::all(Color::WHITE), )], )); // This 2D object will be rendered on the second camera, on top of the default camera where the UI is rendered. commands.spawn(( Sprite { image: asset_server.load("textures/rpg/chars/sensei/sensei.png"), custom_size: Some(Vec2::new(100., 100.)), ..default() }, RenderLayers::layer(1), )); } fn rotate_sprite(time: Res<Time>, mut sprite: Single<&mut Transform, With<Sprite>>) { // Use any of the regular 2D rendering features, for example rotating a sprite via its `Transform`. sprite.rotation *= Quat::from_rotation_z(time.delta_secs() * 0.5) * Quat::from_rotation_y(time.delta_secs()); }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/camera/camera_orbit.rs
examples/camera/camera_orbit.rs
//! Shows how to orbit camera around a static scene using pitch, yaw, and roll. //! //! See also: `first_person_view_model` example, which does something similar but as a first-person //! camera view. use std::{f32::consts::FRAC_PI_2, ops::Range}; use bevy::{input::mouse::AccumulatedMouseMotion, prelude::*}; #[derive(Debug, Resource)] struct CameraSettings { pub orbit_distance: f32, pub pitch_speed: f32, // Clamp pitch to this range pub pitch_range: Range<f32>, pub roll_speed: f32, pub yaw_speed: f32, } impl Default for CameraSettings { fn default() -> Self { // Limiting pitch stops some unexpected rotation past 90° up or down. let pitch_limit = FRAC_PI_2 - 0.01; Self { // These values are completely arbitrary, chosen because they seem to produce // "sensible" results for this example. Adjust as required. orbit_distance: 20.0, pitch_speed: 0.003, pitch_range: -pitch_limit..pitch_limit, roll_speed: 1.0, yaw_speed: 0.004, } } } fn main() { App::new() .add_plugins(DefaultPlugins) .init_resource::<CameraSettings>() .add_systems(Startup, (setup, instructions)) .add_systems(Update, orbit) .run(); } /// Set up a simple 3D scene fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { commands.spawn(( Name::new("Camera"), Camera3d::default(), Transform::from_xyz(5.0, 5.0, 5.0).looking_at(Vec3::ZERO, Vec3::Y), )); commands.spawn(( Name::new("Plane"), Mesh3d(meshes.add(Plane3d::default().mesh().size(5.0, 5.0))), MeshMaterial3d(materials.add(StandardMaterial { base_color: Color::srgb(0.3, 0.5, 0.3), // Turning off culling keeps the plane visible when viewed from beneath. cull_mode: None, ..default() })), )); commands.spawn(( Name::new("Cube"), Mesh3d(meshes.add(Cuboid::default())), MeshMaterial3d(materials.add(Color::srgb(0.8, 0.7, 0.6))), Transform::from_xyz(1.5, 0.51, 1.5), )); commands.spawn(( Name::new("Light"), PointLight::default(), Transform::from_xyz(3.0, 8.0, 5.0), )); } fn instructions(mut commands: Commands) { commands.spawn(( Name::new("Instructions"), Text::new( "Mouse up or down: pitch\n\ Mouse left or right: yaw\n\ Mouse buttons: roll", ), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, )); } fn orbit( mut camera: Single<&mut Transform, With<Camera>>, camera_settings: Res<CameraSettings>, mouse_buttons: Res<ButtonInput<MouseButton>>, mouse_motion: Res<AccumulatedMouseMotion>, time: Res<Time>, ) { let delta = mouse_motion.delta; let mut delta_roll = 0.0; if mouse_buttons.pressed(MouseButton::Left) { delta_roll -= 1.0; } if mouse_buttons.pressed(MouseButton::Right) { delta_roll += 1.0; } // Mouse motion is one of the few inputs that should not be multiplied by delta time, // as we are already receiving the full movement since the last frame was rendered. Multiplying // by delta time here would make the movement slower that it should be. let delta_pitch = delta.y * camera_settings.pitch_speed; let delta_yaw = delta.x * camera_settings.yaw_speed; // Conversely, we DO need to factor in delta time for mouse button inputs. delta_roll *= camera_settings.roll_speed * time.delta_secs(); // Obtain the existing pitch, yaw, and roll values from the transform. let (yaw, pitch, roll) = camera.rotation.to_euler(EulerRot::YXZ); // Establish the new yaw and pitch, preventing the pitch value from exceeding our limits. let pitch = (pitch + delta_pitch).clamp( camera_settings.pitch_range.start, camera_settings.pitch_range.end, ); let roll = roll + delta_roll; let yaw = yaw + delta_yaw; camera.rotation = Quat::from_euler(EulerRot::YXZ, yaw, pitch, roll); // Adjust the translation to maintain the correct orientation toward the orbit target. // In our example it's a static target, but this could easily be customized. let target = Vec3::ZERO; camera.translation = target - camera.forward() * camera_settings.orbit_distance; }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/camera/first_person_view_model.rs
examples/camera/first_person_view_model.rs
//! This example showcases a 3D first-person camera. //! //! The setup presented here is a very common way of organizing a first-person game //! where the player can see their own arms. We use two industry terms to differentiate //! the kinds of models we have: //! //! - The *view model* is the model that represents the player's body. //! - The *world model* is everything else. //! //! ## Motivation //! //! The reason for this distinction is that these two models should be rendered with different field of views (FOV). //! The view model is typically designed and animated with a very specific FOV in mind, so it is //! generally *fixed* and cannot be changed by a player. The world model, on the other hand, should //! be able to change its FOV to accommodate the player's preferences for the following reasons: //! - *Accessibility*: How prone is the player to motion sickness? A wider FOV can help. //! - *Tactical preference*: Does the player want to see more of the battlefield? //! Or have a more zoomed-in view for precision aiming? //! - *Physical considerations*: How well does the in-game FOV match the player's real-world FOV? //! Are they sitting in front of a monitor or playing on a TV in the living room? How big is the screen? //! //! ## Implementation //! //! The `Player` is an entity holding two cameras, one for each model. The view model camera has a fixed //! FOV of 70 degrees, while the world model camera has a variable FOV that can be changed by the player. //! //! We use different `RenderLayers` to select what to render. //! //! - The world model camera has no explicit `RenderLayers` component, so it uses the layer 0. //! All static objects in the scene are also on layer 0 for the same reason. //! - The view model camera has a `RenderLayers` component with layer 1, so it only renders objects //! explicitly assigned to layer 1. The arm of the player is one such object. //! The order of the view model camera is additionally bumped to 1 to ensure it renders on top of the world model. //! - The light source in the scene must illuminate both the view model and the world model, so it is //! assigned to both layers 0 and 1. //! //! ## Controls //! //! | Key Binding | Action | //! |:---------------------|:--------------| //! | mouse | Look around | //! | arrow up | Decrease FOV | //! | arrow down | Increase FOV | use std::f32::consts::FRAC_PI_2; use bevy::{ camera::visibility::RenderLayers, color::palettes::tailwind, input::mouse::AccumulatedMouseMotion, light::NotShadowCaster, prelude::*, }; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems( Startup, ( spawn_view_model, spawn_world_model, spawn_lights, spawn_text, ), ) .add_systems(Update, (move_player, change_fov)) .run(); } #[derive(Debug, Component)] struct Player; #[derive(Debug, Component, Deref, DerefMut)] struct CameraSensitivity(Vec2); impl Default for CameraSensitivity { fn default() -> Self { Self( // These factors are just arbitrary mouse sensitivity values. // It's often nicer to have a faster horizontal sensitivity than vertical. // We use a component for them so that we can make them user-configurable at runtime // for accessibility reasons. // It also allows you to inspect them in an editor if you `Reflect` the component. Vec2::new(0.003, 0.002), ) } } #[derive(Debug, Component)] struct WorldModelCamera; /// Used implicitly by all entities without a `RenderLayers` component. /// Our world model camera and all objects other than the player are on this layer. /// The light source belongs to both layers. const DEFAULT_RENDER_LAYER: usize = 0; /// Used by the view model camera and the player's arm. /// The light source belongs to both layers. const VIEW_MODEL_RENDER_LAYER: usize = 1; fn spawn_view_model( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { let arm = meshes.add(Cuboid::new(0.1, 0.1, 0.5)); let arm_material = materials.add(Color::from(tailwind::TEAL_200)); commands.spawn(( Player, CameraSensitivity::default(), Transform::from_xyz(0.0, 1.0, 0.0), Visibility::default(), children![ ( WorldModelCamera, Camera3d::default(), Projection::from(PerspectiveProjection { fov: 90.0_f32.to_radians(), ..default() }), ), // Spawn view model camera. ( Camera3d::default(), Camera { // Bump the order to render on top of the world model. order: 1, ..default() }, Projection::from(PerspectiveProjection { fov: 70.0_f32.to_radians(), ..default() }), // Only render objects belonging to the view model. RenderLayers::layer(VIEW_MODEL_RENDER_LAYER), ), // Spawn the player's right arm. ( Mesh3d(arm), MeshMaterial3d(arm_material), Transform::from_xyz(0.2, -0.1, -0.25), // Ensure the arm is only rendered by the view model camera. RenderLayers::layer(VIEW_MODEL_RENDER_LAYER), // The arm is free-floating, so shadows would look weird. NotShadowCaster, ), ], )); } fn spawn_world_model( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<StandardMaterial>>, ) { let floor = meshes.add(Plane3d::new(Vec3::Y, Vec2::splat(10.0))); let cube = meshes.add(Cuboid::new(2.0, 0.5, 1.0)); let material = materials.add(Color::WHITE); // The world model camera will render the floor and the cubes spawned in this system. // Assigning no `RenderLayers` component defaults to layer 0. commands.spawn((Mesh3d(floor), MeshMaterial3d(material.clone()))); commands.spawn(( Mesh3d(cube.clone()), MeshMaterial3d(material.clone()), Transform::from_xyz(0.0, 0.25, -3.0), )); commands.spawn(( Mesh3d(cube), MeshMaterial3d(material), Transform::from_xyz(0.75, 1.75, 0.0), )); } fn spawn_lights(mut commands: Commands) { commands.spawn(( PointLight { color: Color::from(tailwind::ROSE_300), shadows_enabled: true, ..default() }, Transform::from_xyz(-2.0, 4.0, -0.75), // The light source illuminates both the world model and the view model. RenderLayers::from_layers(&[DEFAULT_RENDER_LAYER, VIEW_MODEL_RENDER_LAYER]), )); } fn spawn_text(mut commands: Commands) { commands .spawn(Node { position_type: PositionType::Absolute, bottom: px(12), left: px(12), ..default() }) .with_child(Text::new(concat!( "Move the camera with your mouse.\n", "Press arrow up to decrease the FOV of the world model.\n", "Press arrow down to increase the FOV of the world model." ))); } fn move_player( accumulated_mouse_motion: Res<AccumulatedMouseMotion>, player: Single<(&mut Transform, &CameraSensitivity), With<Player>>, ) { let (mut transform, camera_sensitivity) = player.into_inner(); let delta = accumulated_mouse_motion.delta; if delta != Vec2::ZERO { // Note that we are not multiplying by delta_time here. // The reason is that for mouse movement, we already get the full movement that happened since the last frame. // This means that if we multiply by delta_time, we will get a smaller rotation than intended by the user. // This situation is reversed when reading e.g. analog input from a gamepad however, where the same rules // as for keyboard input apply. Such an input should be multiplied by delta_time to get the intended rotation // independent of the framerate. let delta_yaw = -delta.x * camera_sensitivity.x; let delta_pitch = -delta.y * camera_sensitivity.y; let (yaw, pitch, roll) = transform.rotation.to_euler(EulerRot::YXZ); let yaw = yaw + delta_yaw; // If the pitch was ±¹⁄₂ π, the camera would look straight up or down. // When the user wants to move the camera back to the horizon, which way should the camera face? // The camera has no way of knowing what direction was "forward" before landing in that extreme position, // so the direction picked will for all intents and purposes be arbitrary. // Another issue is that for mathematical reasons, the yaw will effectively be flipped when the pitch is at the extremes. // To not run into these issues, we clamp the pitch to a safe range. const PITCH_LIMIT: f32 = FRAC_PI_2 - 0.01; let pitch = (pitch + delta_pitch).clamp(-PITCH_LIMIT, PITCH_LIMIT); transform.rotation = Quat::from_euler(EulerRot::YXZ, yaw, pitch, roll); } } fn change_fov( input: Res<ButtonInput<KeyCode>>, mut world_model_projection: Single<&mut Projection, With<WorldModelCamera>>, ) { let Projection::Perspective(perspective) = world_model_projection.as_mut() else { unreachable!( "The `Projection` component was explicitly built with `Projection::Perspective`" ); }; if input.pressed(KeyCode::ArrowUp) { perspective.fov -= 1.0_f32.to_radians(); perspective.fov = perspective.fov.max(20.0_f32.to_radians()); } if input.pressed(KeyCode::ArrowDown) { perspective.fov += 1.0_f32.to_radians(); perspective.fov = perspective.fov.min(160.0_f32.to_radians()); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/camera/pan_camera_controller.rs
examples/camera/pan_camera_controller.rs
//! Example for `PanCamera`, demonstrating basic camera controls such as panning and zooming. //! //! This example shows how to use the `PanCamera` controller on a 2D camera in Bevy. The camera //! can be panned with keyboard inputs (arrow keys or WASD) and zoomed in/out using the mouse //! wheel or the +/- keys. The camera starts with the default `PanCamera` settings, which can //! be customized. //! //! Controls: //! - Arrow keys (or WASD) to pan the camera. //! - Mouse scroll wheel or +/- to zoom in/out. use bevy::camera_controller::pan_camera::{PanCamera, PanCameraPlugin}; use bevy::prelude::*; fn main() { App::new() .add_plugins(DefaultPlugins) .add_plugins(PanCameraPlugin) // Adds the PanCamera plugin to enable camera panning and zooming controls. .add_systems(Startup, (setup, spawn_text).chain()) .run(); } fn spawn_text(mut commands: Commands, camera: Query<&PanCamera>) { commands.spawn(( Node { position_type: PositionType::Absolute, top: px(-16), left: px(12), ..default() }, children![Text::new(format!("{}", camera.single().unwrap()))], )); } fn setup(mut commands: Commands, asset_server: Res<AssetServer>) { // Spawn a 2D Camera with default PanCamera settings commands.spawn((Camera2d, PanCamera::default())); commands.spawn(Sprite::from_image( asset_server.load("branding/bevy_bird_dark.png"), )); }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/no_std/library/src/lib.rs
examples/no_std/library/src/lib.rs
//! Example `no_std` compatible Bevy library. // The first step to a `no_std` library is to add this annotation: #![no_std] // This does 2 things to your crate: // 1. It prevents automatically linking the `std` crate with yours. // 2. It switches to `core::prelude` instead of `std::prelude` for what is implicitly // imported in all modules in your crate. // It is common to want to use `std` when it's available, and fall-back to an alternative // implementation which may make compromises for the sake of compatibility. // To do this, you can conditionally re-include the standard library: #[cfg(feature = "std")] extern crate std; // This still uses the `core` prelude, so items such as `std::println` aren't implicitly included // in all your modules, but it does make them available to import. // Because Bevy requires access to an allocator anyway, you are free to include `alloc` regardless // of what features are enabled. // This gives you access to `Vec`, `String`, `Box`, and many other allocation primitives. extern crate alloc; // Here's our first example of using something from `core` instead of `std`. // Since `std` re-exports `core` items, they are the same type just with a different name. // This means any 3rd party code written for `std::time::Duration` will work identically for // `core::time::Duration`. use core::time::Duration; // With the above boilerplate out of the way, everything below should look very familiar to those // who have worked with Bevy before. use bevy::prelude::*; // While this example doesn't need it, a lot of fundamental types which are exclusively in `std` // have alternatives in `bevy::platform`. // If you find yourself needing a `HashMap`, `RwLock`, or `Instant`, check there first! #[expect(unused_imports, reason = "demonstrating some available items")] use bevy::platform::{ collections::{HashMap, HashSet}, hash::DefaultHasher, sync::{ atomic::{AtomicBool, AtomicUsize}, Arc, Barrier, LazyLock, Mutex, Once, OnceLock, RwLock, Weak, }, time::Instant, }; // Note that `bevy::platform::sync::Arc` exists, despite `alloc::sync::Arc` being available. // The reason is not every platform has full support for atomic operations, so `Arc`, `AtomicBool`, // etc. aren't always available. // You can test for their inclusion with `#[cfg(target_has_atomic = "ptr")]` and other related flags. // You can get a more cross-platform alternative from `portable-atomic`, but Bevy handles this for you! // Simply use `bevy::platform::sync` instead of `core::sync` and `alloc::sync` when possible, // and Bevy will handle selecting the fallback from `portable-atomic` when it is required. /// Plugin for working with delayed components. /// /// You can delay the insertion of a component by using [`insert_delayed`](EntityCommandsExt::insert_delayed). pub struct DelayedComponentPlugin; impl Plugin for DelayedComponentPlugin { fn build(&self, app: &mut App) { app.add_systems(Update, tick_timers); } } /// Extension trait providing [`insert_delayed`](EntityCommandsExt::insert_delayed). pub trait EntityCommandsExt { /// Insert the provided [`Bundle`] `B` with a provided `delay`. fn insert_delayed<B: Bundle>(&mut self, bundle: B, delay: Duration) -> &mut Self; } impl EntityCommandsExt for EntityCommands<'_> { fn insert_delayed<B: Bundle>(&mut self, bundle: B, delay: Duration) -> &mut Self { self.insert(( DelayedComponentTimer(Timer::new(delay, TimerMode::Once)), DelayedComponent(bundle), )) .observe(unwrap::<B>) } } impl EntityCommandsExt for EntityWorldMut<'_> { fn insert_delayed<B: Bundle>(&mut self, bundle: B, delay: Duration) -> &mut Self { self.insert(( DelayedComponentTimer(Timer::new(delay, TimerMode::Once)), DelayedComponent(bundle), )) .observe(unwrap::<B>) } } #[derive(Component, Deref, DerefMut, Reflect, Debug)] #[reflect(Component)] struct DelayedComponentTimer(Timer); #[derive(Component)] #[component(immutable)] struct DelayedComponent<B: Bundle>(B); #[derive(EntityEvent)] struct Unwrap { entity: Entity, } fn tick_timers( mut commands: Commands, mut query: Query<(Entity, &mut DelayedComponentTimer)>, time: Res<Time>, ) { for (entity, mut timer) in &mut query { timer.tick(time.delta()); if timer.just_finished() { commands.entity(entity).remove::<DelayedComponentTimer>(); commands.trigger(Unwrap { entity }); } } } fn unwrap<B: Bundle>(event: On<Unwrap>, world: &mut World) { if let Ok(mut target) = world.get_entity_mut(event.event_target()) && let Some(DelayedComponent(bundle)) = target.take::<DelayedComponent<B>>() { target.insert(bundle); } world.despawn(event.observer()); }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/time/time.rs
examples/time/time.rs
//! An example that illustrates how Time is handled in ECS. use bevy::{app::AppExit, prelude::*}; use std::{ io::{self, BufRead}, time::Duration, }; fn banner() { println!("This example is meant to intuitively demonstrate how Time works in Bevy."); println!(); println!("Time will be printed in three different schedules in the app:"); println!("- PreUpdate: real time is printed"); println!("- FixedUpdate: fixed time step time is printed, may be run zero or multiple times"); println!("- Update: virtual game time is printed"); println!(); println!("Max delta time is set to 5 seconds. Fixed timestep is set to 1 second."); println!(); } fn help() { println!("The app reads commands line-by-line from standard input."); println!(); println!("Commands:"); println!(" empty line: Run app.update() once on the Bevy App"); println!(" q: Quit the app."); println!(" f: Set speed to fast, 2x"); println!(" n: Set speed to normal, 1x"); println!(" s: Set speed to slow, 0.5x"); println!(" p: Pause"); println!(" u: Unpause"); } fn runner(mut app: App) -> AppExit { banner(); help(); let stdin = io::stdin(); for line in stdin.lock().lines() { if let Err(err) = line { println!("read err: {err:#}"); break; } match line.unwrap().as_str() { "" => { app.update(); } "f" => { println!("FAST: setting relative speed to 2x"); app.world_mut() .resource_mut::<Time<Virtual>>() .set_relative_speed(2.0); } "n" => { println!("NORMAL: setting relative speed to 1x"); app.world_mut() .resource_mut::<Time<Virtual>>() .set_relative_speed(1.0); } "s" => { println!("SLOW: setting relative speed to 0.5x"); app.world_mut() .resource_mut::<Time<Virtual>>() .set_relative_speed(0.5); } "p" => { println!("PAUSE: pausing virtual clock"); app.world_mut().resource_mut::<Time<Virtual>>().pause(); } "u" => { println!("UNPAUSE: resuming virtual clock"); app.world_mut().resource_mut::<Time<Virtual>>().unpause(); } "q" => { println!("QUITTING!"); break; } _ => { help(); } } } AppExit::Success } fn print_real_time(time: Res<Time<Real>>) { println!( "PreUpdate: this is real time clock, delta is {:?} and elapsed is {:?}", time.delta(), time.elapsed() ); } fn print_fixed_time(time: Res<Time>) { println!( "FixedUpdate: this is generic time clock inside fixed, delta is {:?} and elapsed is {:?}", time.delta(), time.elapsed() ); } fn print_time(time: Res<Time>) { println!( "Update: this is generic time clock, delta is {:?} and elapsed is {:?}", time.delta(), time.elapsed() ); } fn main() { App::new() .add_plugins(MinimalPlugins) .insert_resource(Time::<Virtual>::from_max_delta(Duration::from_secs(5))) .insert_resource(Time::<Fixed>::from_duration(Duration::from_secs(1))) .add_systems(PreUpdate, print_real_time) .add_systems(FixedUpdate, print_fixed_time) .add_systems(Update, print_time) .set_runner(runner) .run(); }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/time/virtual_time.rs
examples/time/virtual_time.rs
//! Shows how `Time<Virtual>` can be used to pause, resume, slow down //! and speed up a game. use std::time::Duration; use bevy::{ color::palettes::css::*, input::common_conditions::input_just_pressed, prelude::*, time::common_conditions::on_real_timer, }; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems( Update, ( move_virtual_time_sprites, move_real_time_sprites, toggle_pause.run_if(input_just_pressed(KeyCode::Space)), change_time_speed::<1>.run_if(input_just_pressed(KeyCode::ArrowUp)), change_time_speed::<-1>.run_if(input_just_pressed(KeyCode::ArrowDown)), (update_virtual_time_info_text, update_real_time_info_text) // update the texts on a timer to make them more readable. // `on_timer` run condition uses `Virtual` time meaning it's scaled // and would result in the UI updating at different intervals based // on `Time<Virtual>::relative_speed` and `Time<Virtual>::is_paused()` .run_if(on_real_timer(Duration::from_millis(250))), ), ) .run(); } /// `Real` time related marker #[derive(Component)] struct RealTime; /// `Virtual` time related marker #[derive(Component)] struct VirtualTime; /// Setup the example fn setup(mut commands: Commands, asset_server: Res<AssetServer>, mut time: ResMut<Time<Virtual>>) { // start with double `Virtual` time resulting in one of the sprites moving at twice the speed // of the other sprite which moves based on `Real` (unscaled) time time.set_relative_speed(2.); commands.spawn(Camera2d); let virtual_color = GOLD.into(); let sprite_scale = Vec2::splat(0.5).extend(1.); let texture_handle = asset_server.load("branding/icon.png"); // the sprite moving based on real time commands.spawn(( Sprite::from_image(texture_handle.clone()), Transform::from_scale(sprite_scale), RealTime, )); // the sprite moving based on virtual time commands.spawn(( Sprite { image: texture_handle, color: virtual_color, ..Default::default() }, Transform { scale: sprite_scale, translation: Vec3::new(0., -160., 0.), ..default() }, VirtualTime, )); // info UI let font_size = 33.; commands.spawn(( Node { display: Display::Flex, justify_content: JustifyContent::SpaceBetween, width: percent(100), position_type: PositionType::Absolute, top: px(0), padding: UiRect::all(px(20)), ..default() }, children![ ( Text::default(), TextFont { font_size, ..default() }, RealTime, ), ( Text::new("CONTROLS\n(Un)pause: Space\nSpeed+: Up\nSpeed-: Down"), TextFont { font_size, ..default() }, TextColor(Color::srgb(0.85, 0.85, 0.85)), TextLayout::new_with_justify(Justify::Center), ), ( Text::default(), TextFont { font_size, ..default() }, TextColor(virtual_color), TextLayout::new_with_justify(Justify::Right), VirtualTime, ), ], )); } /// Move sprites using `Real` (unscaled) time fn move_real_time_sprites( mut sprite_query: Query<&mut Transform, (With<Sprite>, With<RealTime>)>, // `Real` time which is not scaled or paused time: Res<Time<Real>>, ) { for mut transform in sprite_query.iter_mut() { // move roughly half the screen in a `Real` second // when the time is scaled the speed is going to change // and the sprite will stay still the time is paused transform.translation.x = get_sprite_translation_x(time.elapsed_secs()); } } /// Move sprites using `Virtual` (scaled) time fn move_virtual_time_sprites( mut sprite_query: Query<&mut Transform, (With<Sprite>, With<VirtualTime>)>, // the default `Time` is either `Time<Virtual>` in regular systems // or `Time<Fixed>` in fixed timestep systems so `Time::delta()`, // `Time::elapsed()` will return the appropriate values either way time: Res<Time>, ) { for mut transform in sprite_query.iter_mut() { // move roughly half the screen in a `Virtual` second // when time is scaled using `Time<Virtual>::set_relative_speed` it's going // to move at a different pace and the sprite will stay still when time is // `Time<Virtual>::is_paused()` transform.translation.x = get_sprite_translation_x(time.elapsed_secs()); } } fn get_sprite_translation_x(elapsed: f32) -> f32 { ops::sin(elapsed) * 500. } /// Update the speed of `Time<Virtual>.` by `DELTA` fn change_time_speed<const DELTA: i8>(mut time: ResMut<Time<Virtual>>) { let time_speed = (time.relative_speed() + DELTA as f32) .round() .clamp(0.25, 5.); // set the speed of the virtual time to speed it up or slow it down time.set_relative_speed(time_speed); } /// Pause or resume `Relative` time fn toggle_pause(mut time: ResMut<Time<Virtual>>) { time.toggle(); } /// Update the `Real` time info text fn update_real_time_info_text(time: Res<Time<Real>>, mut query: Query<&mut Text, With<RealTime>>) { for mut text in &mut query { **text = format!( "REAL TIME\nElapsed: {:.1}\nDelta: {:.5}\n", time.elapsed_secs(), time.delta_secs(), ); } } /// Update the `Virtual` time info text fn update_virtual_time_info_text( time: Res<Time<Virtual>>, mut query: Query<&mut Text, With<VirtualTime>>, ) { for mut text in &mut query { **text = format!( "VIRTUAL TIME\nElapsed: {:.1}\nDelta: {:.5}\nSpeed: {:.2}", time.elapsed_secs(), time.delta_secs(), time.relative_speed() ); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/time/timers.rs
examples/time/timers.rs
//! Illustrates how `Timer`s can be used both as resources and components. use bevy::{log::info, prelude::*}; fn main() { App::new() .add_plugins(DefaultPlugins) .init_resource::<Countdown>() .add_systems(Startup, setup) .add_systems(Update, (countdown, print_when_completed)) .run(); } #[derive(Component, Deref, DerefMut)] struct PrintOnCompletionTimer(Timer); #[derive(Resource)] struct Countdown { percent_trigger: Timer, main_timer: Timer, } impl Countdown { pub fn new() -> Self { Self { percent_trigger: Timer::from_seconds(4.0, TimerMode::Repeating), main_timer: Timer::from_seconds(20.0, TimerMode::Once), } } } impl Default for Countdown { fn default() -> Self { Self::new() } } fn setup(mut commands: Commands) { // Add an entity to the world with a timer commands.spawn(PrintOnCompletionTimer(Timer::from_seconds( 5.0, TimerMode::Once, ))); } /// This system ticks the `Timer` on the entity with the `PrintOnCompletionTimer` /// component using bevy's `Time` resource to get the delta between each update. fn print_when_completed(time: Res<Time>, mut query: Query<&mut PrintOnCompletionTimer>) { for mut timer in &mut query { if timer.tick(time.delta()).just_finished() { info!("Entity timer just finished"); } } } /// This system controls ticking the timer within the countdown resource and /// handling its state. fn countdown(time: Res<Time>, mut countdown: ResMut<Countdown>) { countdown.main_timer.tick(time.delta()); // The API encourages this kind of timer state checking (if you're only checking for one value) // Additionally, `is_finished()` would accomplish the same thing as `just_finished` due to the // timer being repeating, however this makes more sense visually. if countdown.percent_trigger.tick(time.delta()).just_finished() { if !countdown.main_timer.is_finished() { // Print the percent complete the main timer is. info!( "Timer is {:0.0}% complete!", countdown.main_timer.fraction() * 100.0 ); } else { // The timer has finished so we pause the percent output timer countdown.percent_trigger.pause(); info!("Paused percent trigger timer"); } } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/scene/scene.rs
examples/scene/scene.rs
//! This example demonstrates how to load scene data from files and then dynamically //! apply that data to entities in your Bevy `World`. This includes spawning new //! entities and applying updates to existing ones. Scenes in Bevy encapsulate //! serialized and deserialized `Components` or `Resources` so that you can easily //! store, load, and manipulate data outside of a purely code-driven context. //! //! This example also shows how to do the following: //! * Register your custom types for reflection, which allows them to be serialized, //! deserialized, and manipulated dynamically. //! * Skip serialization of fields you don't want stored in your scene files (like //! runtime values that should always be computed dynamically). //! * Save a new scene to disk to show how it can be updated compared to the original //! scene file (and how that updated scene file might then be used later on). //! //! The example proceeds by creating components and resources, registering their types, //! loading a scene from a file, logging when changes are detected, and finally saving //! a new scene file to disk. This is useful for anyone wanting to see how to integrate //! file-based scene workflows into their Bevy projects. //! //! # Note on working with files //! //! The saving behavior uses the standard filesystem APIs, which are blocking, so it //! utilizes a thread pool (`IoTaskPool`) to avoid stalling the main thread. This //! won't work on WASM because WASM typically doesn't have direct filesystem access. //! use bevy::{asset::LoadState, prelude::*, tasks::IoTaskPool}; use core::time::Duration; use std::{fs::File, io::Write}; /// The entry point of our Bevy app. /// /// Sets up default plugins, registers all necessary component/resource types /// for serialization/reflection, and runs the various systems in the correct schedule. fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems( Startup, (save_scene_system, load_scene_system, infotext_system), ) .add_systems(Update, (log_system, panic_on_fail)) .run(); } /// # Components, Resources, and Reflection /// /// Below are some simple examples of how to define your own Bevy `Component` types /// and `Resource` types so that they can be properly reflected, serialized, and /// deserialized. The `#[derive(Reflect)]` macro enables Bevy's reflection features, /// and we add component-specific reflection by using `#[reflect(Component)]`. /// We also illustrate how to skip serializing fields and how `FromWorld` can help /// create runtime-initialized data. /// /// A sample component that is fully serializable. /// /// This component has public `x` and `y` fields that will be included in /// the scene files. Notice how it derives `Default`, `Reflect`, and declares /// itself as a reflected component with `#[reflect(Component)]`. #[derive(Component, Reflect, Default)] #[reflect(Component)] // this tells the reflect derive to also reflect component behaviors struct ComponentA { /// An example `f32` field pub x: f32, /// Another example `f32` field pub y: f32, } /// A sample component that includes both serializable and non-serializable fields. /// /// This is useful for skipping serialization of runtime data or fields you /// don't want written to scene files. #[derive(Component, Reflect)] #[reflect(Component)] struct ComponentB { /// A string field that will be serialized. pub value: String, /// A `Duration` field that should never be serialized to the scene file, so we skip it. #[reflect(skip_serializing)] pub _time_since_startup: Duration, } /// This implements `FromWorld` for `ComponentB`, letting us initialize runtime fields /// by accessing the current ECS resources. In this case, we acquire the `Time` resource /// and store the current elapsed time. impl FromWorld for ComponentB { fn from_world(world: &mut World) -> Self { let time = world.resource::<Time>(); ComponentB { _time_since_startup: time.elapsed(), value: "Default Value".to_string(), } } } /// A simple resource that also derives `Reflect`, allowing it to be stored in scenes. /// /// Just like a component, you can skip serializing fields or implement `FromWorld` if needed. #[derive(Resource, Reflect, Default)] #[reflect(Resource)] struct ResourceA { /// This resource tracks a `score` value. pub score: u32, } /// # Scene File Paths /// /// `SCENE_FILE_PATH` points to the original scene file that we'll be loading. /// `NEW_SCENE_FILE_PATH` points to the new scene file that we'll be creating /// (and demonstrating how to serialize to disk). /// /// The initial scene file will be loaded below and not change when the scene is saved. const SCENE_FILE_PATH: &str = "scenes/load_scene_example.scn.ron"; /// The new, updated scene data will be saved here so that you can see the changes. const NEW_SCENE_FILE_PATH: &str = "scenes/load_scene_example-new.scn.ron"; /// Loads a scene from an asset file and spawns it in the current world. /// /// Spawning a `DynamicSceneRoot` creates a new parent entity, which then spawns new /// instances of the scene's entities as its children. If you modify the /// `SCENE_FILE_PATH` scene file, or if you enable file watching, you can see /// changes reflected immediately. fn load_scene_system(mut commands: Commands, asset_server: Res<AssetServer>) { commands.spawn(DynamicSceneRoot(asset_server.load(SCENE_FILE_PATH))); } /// Logs changes made to `ComponentA` entities, and also checks whether `ResourceA` /// has been recently added. /// /// Any time a `ComponentA` is modified, that change will appear here. This system /// demonstrates how you might detect and handle scene updates at runtime. fn log_system( query: Query<(Entity, &ComponentA), Changed<ComponentA>>, res: Option<Res<ResourceA>>, ) { for (entity, component_a) in &query { info!(" Entity({})", entity.index()); info!( " ComponentA: {{ x: {} y: {} }}\n", component_a.x, component_a.y ); } if let Some(res) = res && res.is_added() { info!(" New ResourceA: {{ score: {} }}\n", res.score); } } /// Demonstrates how to create a new scene from scratch, populate it with data, /// and then serialize it to a file. The new file is written to `NEW_SCENE_FILE_PATH`. /// /// This system creates a fresh world, duplicates the type registry so that our /// custom component types are recognized, spawns some sample entities and resources, /// and then serializes the resulting dynamic scene. fn save_scene_system(world: &mut World) { // Scenes can be created from any ECS World. // You can either create a new one for the scene or use the current World. // For demonstration purposes, we'll create a new one. let mut scene_world = World::new(); // The `TypeRegistry` resource contains information about all registered types (including components). // This is used to construct scenes, so we'll want to ensure that our previous type registrations // exist in this new scene world as well. // To do this, we can simply clone the `AppTypeRegistry` resource. let type_registry = world.resource::<AppTypeRegistry>().clone(); scene_world.insert_resource(type_registry); let mut component_b = ComponentB::from_world(world); component_b.value = "hello".to_string(); scene_world.spawn(( component_b, ComponentA { x: 1.0, y: 2.0 }, Transform::IDENTITY, Name::new("joe"), )); scene_world.spawn(ComponentA { x: 3.0, y: 4.0 }); scene_world.insert_resource(ResourceA { score: 1 }); // With our sample world ready to go, we can now create our scene using DynamicScene or DynamicSceneBuilder. // For simplicity, we will create our scene using DynamicScene: let scene = DynamicScene::from_world(&scene_world); // Scenes can be serialized like this: let type_registry = world.resource::<AppTypeRegistry>(); let type_registry = type_registry.read(); let serialized_scene = scene.serialize(&type_registry).unwrap(); // Showing the scene in the console info!("{}", serialized_scene); // Writing the scene to a new file. Using a task to avoid calling the filesystem APIs in a system // as they are blocking. // // This can't work in Wasm as there is no filesystem access. #[cfg(not(target_arch = "wasm32"))] IoTaskPool::get() .spawn(async move { // Write the scene RON data to file File::create(format!("assets/{NEW_SCENE_FILE_PATH}")) .and_then(|mut file| file.write(serialized_scene.as_bytes())) .expect("Error while writing scene to file"); }) .detach(); } /// Spawns a simple 2D camera and some text indicating that the user should /// check the console output for scene loading/saving messages. /// /// This system is only necessary for the info message in the UI. fn infotext_system(mut commands: Commands) { commands.spawn(Camera2d); commands.spawn(( Text::new("Nothing to see in this window! Check the console output!"), TextFont { font_size: 42.0, ..default() }, Node { align_self: AlignSelf::FlexEnd, ..default() }, )); } /// To help with Bevy's automated testing, we want the example to close with an appropriate if the /// scene fails to load. This is most likely not something you want in your own app. fn panic_on_fail(scenes: Query<&DynamicSceneRoot>, asset_server: Res<AssetServer>) { for scene in &scenes { if let Some(LoadState::Failed(err)) = asset_server.get_load_state(&scene.0) { panic!("Failed to load scene. {err}"); } } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/2d/bloom_2d.rs
examples/2d/bloom_2d.rs
//! Illustrates bloom post-processing in 2d. use bevy::{ core_pipeline::tonemapping::{DebandDither, Tonemapping}, post_process::bloom::{Bloom, BloomCompositeMode}, prelude::*, }; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems(Update, update_bloom_settings) .run(); } fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<ColorMaterial>>, asset_server: Res<AssetServer>, ) { commands.spawn(( Camera2d, Camera { clear_color: ClearColorConfig::Custom(Color::BLACK), ..default() }, Tonemapping::TonyMcMapface, // 1. Using a tonemapper that desaturates to white is recommended Bloom::default(), // 2. Enable bloom for the camera DebandDither::Enabled, // Optional: bloom causes gradients which cause banding )); // Sprite commands.spawn(Sprite { image: asset_server.load("branding/bevy_bird_dark.png"), color: Color::srgb(5.0, 5.0, 5.0), // 3. Put something bright in a dark environment to see the effect custom_size: Some(Vec2::splat(160.0)), ..default() }); // Circle mesh commands.spawn(( Mesh2d(meshes.add(Circle::new(100.))), // 3. Put something bright in a dark environment to see the effect MeshMaterial2d(materials.add(Color::srgb(7.5, 0.0, 7.5))), Transform::from_translation(Vec3::new(-200., 0., 0.)), )); // Hexagon mesh commands.spawn(( Mesh2d(meshes.add(RegularPolygon::new(100., 6))), // 3. Put something bright in a dark environment to see the effect MeshMaterial2d(materials.add(Color::srgb(6.25, 9.4, 9.1))), Transform::from_translation(Vec3::new(200., 0., 0.)), )); // UI commands.spawn(( Text::default(), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, )); } // ------------------------------------------------------------------------------------------------ fn update_bloom_settings( camera: Single<(Entity, &Tonemapping, Option<&mut Bloom>), With<Camera>>, mut text: Single<&mut Text>, mut commands: Commands, keycode: Res<ButtonInput<KeyCode>>, time: Res<Time>, ) { let (camera_entity, tonemapping, bloom) = camera.into_inner(); match bloom { Some(mut bloom) => { text.0 = "Bloom (Toggle: Space)\n".to_string(); text.push_str(&format!("(Q/A) Intensity: {:.2}\n", bloom.intensity)); text.push_str(&format!( "(W/S) Low-frequency boost: {:.2}\n", bloom.low_frequency_boost )); text.push_str(&format!( "(E/D) Low-frequency boost curvature: {:.2}\n", bloom.low_frequency_boost_curvature )); text.push_str(&format!( "(R/F) High-pass frequency: {:.2}\n", bloom.high_pass_frequency )); text.push_str(&format!( "(T/G) Mode: {}\n", match bloom.composite_mode { BloomCompositeMode::EnergyConserving => "Energy-conserving", BloomCompositeMode::Additive => "Additive", } )); text.push_str(&format!( "(Y/H) Threshold: {:.2}\n", bloom.prefilter.threshold )); text.push_str(&format!( "(U/J) Threshold softness: {:.2}\n", bloom.prefilter.threshold_softness )); text.push_str(&format!("(I/K) Horizontal Scale: {:.2}\n", bloom.scale.x)); if keycode.just_pressed(KeyCode::Space) { commands.entity(camera_entity).remove::<Bloom>(); } let dt = time.delta_secs(); if keycode.pressed(KeyCode::KeyA) { bloom.intensity -= dt / 10.0; } if keycode.pressed(KeyCode::KeyQ) { bloom.intensity += dt / 10.0; } bloom.intensity = bloom.intensity.clamp(0.0, 1.0); if keycode.pressed(KeyCode::KeyS) { bloom.low_frequency_boost -= dt / 10.0; } if keycode.pressed(KeyCode::KeyW) { bloom.low_frequency_boost += dt / 10.0; } bloom.low_frequency_boost = bloom.low_frequency_boost.clamp(0.0, 1.0); if keycode.pressed(KeyCode::KeyD) { bloom.low_frequency_boost_curvature -= dt / 10.0; } if keycode.pressed(KeyCode::KeyE) { bloom.low_frequency_boost_curvature += dt / 10.0; } bloom.low_frequency_boost_curvature = bloom.low_frequency_boost_curvature.clamp(0.0, 1.0); if keycode.pressed(KeyCode::KeyF) { bloom.high_pass_frequency -= dt / 10.0; } if keycode.pressed(KeyCode::KeyR) { bloom.high_pass_frequency += dt / 10.0; } bloom.high_pass_frequency = bloom.high_pass_frequency.clamp(0.0, 1.0); if keycode.pressed(KeyCode::KeyG) { bloom.composite_mode = BloomCompositeMode::Additive; } if keycode.pressed(KeyCode::KeyT) { bloom.composite_mode = BloomCompositeMode::EnergyConserving; } if keycode.pressed(KeyCode::KeyH) { bloom.prefilter.threshold -= dt; } if keycode.pressed(KeyCode::KeyY) { bloom.prefilter.threshold += dt; } bloom.prefilter.threshold = bloom.prefilter.threshold.max(0.0); if keycode.pressed(KeyCode::KeyJ) { bloom.prefilter.threshold_softness -= dt / 10.0; } if keycode.pressed(KeyCode::KeyU) { bloom.prefilter.threshold_softness += dt / 10.0; } bloom.prefilter.threshold_softness = bloom.prefilter.threshold_softness.clamp(0.0, 1.0); if keycode.pressed(KeyCode::KeyK) { bloom.scale.x -= dt * 2.0; } if keycode.pressed(KeyCode::KeyI) { bloom.scale.x += dt * 2.0; } bloom.scale.x = bloom.scale.x.clamp(0.0, 16.0); } None => { text.0 = "Bloom: Off (Toggle: Space)\n".to_string(); if keycode.just_pressed(KeyCode::Space) { commands.entity(camera_entity).insert(Bloom::default()); } } } text.push_str(&format!("(O) Tonemapping: {tonemapping:?}\n")); if keycode.just_pressed(KeyCode::KeyO) { commands .entity(camera_entity) .insert(next_tonemap(tonemapping)); } } /// Get the next Tonemapping algorithm fn next_tonemap(tonemapping: &Tonemapping) -> Tonemapping { match tonemapping { Tonemapping::None => Tonemapping::AcesFitted, Tonemapping::AcesFitted => Tonemapping::AgX, Tonemapping::AgX => Tonemapping::BlenderFilmic, Tonemapping::BlenderFilmic => Tonemapping::Reinhard, Tonemapping::Reinhard => Tonemapping::ReinhardLuminance, Tonemapping::ReinhardLuminance => Tonemapping::SomewhatBoringDisplayTransform, Tonemapping::SomewhatBoringDisplayTransform => Tonemapping::TonyMcMapface, Tonemapping::TonyMcMapface => Tonemapping::None, } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/2d/2d_shapes.rs
examples/2d/2d_shapes.rs
//! Here we use shape primitives to build meshes in a 2D rendering context, making each mesh a certain color by giving that mesh's entity a material based off a [`Color`]. //! //! Meshes are better known for their use in 3D rendering, but we can use them in a 2D context too. Without a third dimension, the meshes we're building are flat – like paper on a table. These are still very useful for "vector-style" graphics, picking behavior, or as a foundation to build off of for where to apply a shader. //! //! A "shape definition" is not a mesh on its own. A circle can be defined with a radius, i.e. [`Circle::new(50.0)`][Circle::new], but rendering tends to happen with meshes built out of triangles. So we need to turn shape descriptions into meshes. //! //! Thankfully, we can add shape primitives directly to [`Assets<Mesh>`] because [`Mesh`] implements [`From`] for shape primitives and [`Assets<T>::add`] can be given any value that can be "turned into" `T`! //! //! We apply a material to the shape by first making a [`Color`] then calling [`Assets<ColorMaterial>::add`] with that color as its argument, which will create a material from that color through the same process [`Assets<Mesh>::add`] can take a shape primitive. //! //! Both the mesh and material need to be wrapped in their own "newtypes". The mesh and material are currently [`Handle<Mesh>`] and [`Handle<ColorMaterial>`] at the moment, which are not components. Handles are put behind "newtypes" to prevent ambiguity, as some entities might want to have handles to meshes (or images, or materials etc.) for different purposes! All we need to do to make them rendering-relevant components is wrap the mesh handle and the material handle in [`Mesh2d`] and [`MeshMaterial2d`] respectively. //! //! You can toggle wireframes with the space bar except on wasm. Wasm does not support //! `POLYGON_MODE_LINE` on the gpu. #[cfg(not(target_arch = "wasm32"))] use bevy::{ input::common_conditions::input_just_pressed, sprite_render::{Wireframe2dConfig, Wireframe2dPlugin}, }; use bevy::{input::common_conditions::input_toggle_active, prelude::*}; fn main() { let mut app = App::new(); app.add_plugins(( DefaultPlugins, #[cfg(not(target_arch = "wasm32"))] Wireframe2dPlugin::default(), )) .add_systems(Startup, setup); #[cfg(not(target_arch = "wasm32"))] app.add_systems( Update, toggle_wireframe.run_if(input_just_pressed(KeyCode::Space)), ); app.add_systems( Update, rotate.run_if(input_toggle_active(false, KeyCode::KeyR)), ); app.run(); } const X_EXTENT: f32 = 1000.; const Y_EXTENT: f32 = 150.; const THICKNESS: f32 = 5.0; fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<ColorMaterial>>, ) { commands.spawn(Camera2d); let shapes = [ meshes.add(Circle::new(50.0)), meshes.add(CircularSector::new(50.0, 1.0)), meshes.add(CircularSegment::new(50.0, 1.25)), meshes.add(Ellipse::new(25.0, 50.0)), meshes.add(Annulus::new(25.0, 50.0)), meshes.add(Capsule2d::new(25.0, 50.0)), meshes.add(Rhombus::new(75.0, 100.0)), meshes.add(Rectangle::new(50.0, 100.0)), meshes.add(RegularPolygon::new(50.0, 6)), meshes.add(Triangle2d::new( Vec2::Y * 50.0, Vec2::new(-50.0, -50.0), Vec2::new(50.0, -50.0), )), meshes.add(Segment2d::new( Vec2::new(-50.0, 50.0), Vec2::new(50.0, -50.0), )), meshes.add(Polyline2d::new(vec![ Vec2::new(-50.0, 50.0), Vec2::new(0.0, -50.0), Vec2::new(50.0, 50.0), ])), ]; let num_shapes = shapes.len(); for (i, shape) in shapes.into_iter().enumerate() { // Distribute colors evenly across the rainbow. let color = Color::hsl(360. * i as f32 / num_shapes as f32, 0.95, 0.7); commands.spawn(( Mesh2d(shape), MeshMaterial2d(materials.add(color)), Transform::from_xyz( // Distribute shapes from -X_EXTENT/2 to +X_EXTENT/2. -X_EXTENT / 2. + i as f32 / (num_shapes - 1) as f32 * X_EXTENT, Y_EXTENT / 2., 0.0, ), )); } let rings = [ meshes.add(Circle::new(50.0).to_ring(THICKNESS)), // this visually produces an arc segment but this is not technically accurate meshes.add(Ring::new( CircularSector::new(50.0, 1.0), CircularSector::new(45.0, 1.0), )), meshes.add(CircularSegment::new(50.0, 1.25).to_ring(THICKNESS)), meshes.add({ // This is an approximation; Ellipse does not implement Inset as concentric ellipses do not have parallel curves let outer = Ellipse::new(25.0, 50.0); let mut inner = outer; inner.half_size -= Vec2::splat(THICKNESS); Ring::new(outer, inner) }), // this is equivalent to the Annulus::new(25.0, 50.0) above meshes.add(Ring::new(Circle::new(50.0), Circle::new(25.0))), meshes.add(Capsule2d::new(25.0, 50.0).to_ring(THICKNESS)), meshes.add(Rhombus::new(75.0, 100.0).to_ring(THICKNESS)), meshes.add(Rectangle::new(50.0, 100.0).to_ring(THICKNESS)), meshes.add(RegularPolygon::new(50.0, 6).to_ring(THICKNESS)), meshes.add( Triangle2d::new( Vec2::Y * 50.0, Vec2::new(-50.0, -50.0), Vec2::new(50.0, -50.0), ) .to_ring(THICKNESS), ), ]; // Allow for 2 empty spaces let num_rings = rings.len() + 2; for (i, shape) in rings.into_iter().enumerate() { // Distribute colors evenly across the rainbow. let color = Color::hsl(360. * i as f32 / num_rings as f32, 0.95, 0.7); commands.spawn(( Mesh2d(shape), MeshMaterial2d(materials.add(color)), Transform::from_xyz( // Distribute shapes from -X_EXTENT/2 to +X_EXTENT/2. -X_EXTENT / 2. + i as f32 / (num_rings - 1) as f32 * X_EXTENT, -Y_EXTENT / 2., 0.0, ), )); } let mut text = "Press 'R' to pause/resume rotation".to_string(); #[cfg(not(target_arch = "wasm32"))] text.push_str("\nPress 'Space' to toggle wireframes"); commands.spawn(( Text::new(text), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, )); } #[cfg(not(target_arch = "wasm32"))] fn toggle_wireframe(mut wireframe_config: ResMut<Wireframe2dConfig>) { wireframe_config.global = !wireframe_config.global; } fn rotate(mut query: Query<&mut Transform, With<Mesh2d>>, time: Res<Time>) { for mut transform in &mut query { transform.rotate_z(time.delta_secs() / 2.0); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/2d/pixel_grid_snap.rs
examples/2d/pixel_grid_snap.rs
//! Shows how to create graphics that snap to the pixel grid by rendering to a texture in 2D use bevy::{ camera::visibility::RenderLayers, camera::RenderTarget, color::palettes::css::GRAY, prelude::*, render::render_resource::{ Extent3d, TextureDescriptor, TextureDimension, TextureFormat, TextureUsages, }, window::WindowResized, }; /// In-game resolution width. const RES_WIDTH: u32 = 160; /// In-game resolution height. const RES_HEIGHT: u32 = 90; /// Default render layers for pixel-perfect rendering. /// You can skip adding this component, as this is the default. const PIXEL_PERFECT_LAYERS: RenderLayers = RenderLayers::layer(0); /// Render layers for high-resolution rendering. const HIGH_RES_LAYERS: RenderLayers = RenderLayers::layer(1); fn main() { App::new() .add_plugins(DefaultPlugins.set(ImagePlugin::default_nearest())) .add_systems(Startup, (setup_camera, setup_sprite, setup_mesh)) .add_systems(Update, (rotate, fit_canvas)) .run(); } /// Low-resolution texture that contains the pixel-perfect world. /// Canvas itself is rendered to the high-resolution world. #[derive(Component)] struct Canvas; /// Camera that renders the pixel-perfect world to the [`Canvas`]. #[derive(Component)] struct InGameCamera; /// Camera that renders the [`Canvas`] (and other graphics on [`HIGH_RES_LAYERS`]) to the screen. #[derive(Component)] struct OuterCamera; #[derive(Component)] struct Rotate; fn setup_sprite(mut commands: Commands, asset_server: Res<AssetServer>) { // The sample sprite that will be rendered to the pixel-perfect canvas commands.spawn(( Sprite::from_image(asset_server.load("pixel/bevy_pixel_dark.png")), Transform::from_xyz(-45., 20., 2.), Rotate, PIXEL_PERFECT_LAYERS, )); // The sample sprite that will be rendered to the high-res "outer world" commands.spawn(( Sprite::from_image(asset_server.load("pixel/bevy_pixel_light.png")), Transform::from_xyz(-45., -20., 2.), Rotate, HIGH_RES_LAYERS, )); } /// Spawns a capsule mesh on the pixel-perfect layer. fn setup_mesh( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<ColorMaterial>>, ) { commands.spawn(( Mesh2d(meshes.add(Capsule2d::default())), MeshMaterial2d(materials.add(Color::BLACK)), Transform::from_xyz(25., 0., 2.).with_scale(Vec3::splat(32.)), Rotate, PIXEL_PERFECT_LAYERS, )); } fn setup_camera(mut commands: Commands, mut images: ResMut<Assets<Image>>) { let canvas_size = Extent3d { width: RES_WIDTH, height: RES_HEIGHT, ..default() }; // This Image serves as a canvas representing the low-resolution game screen let mut canvas = Image { texture_descriptor: TextureDescriptor { label: None, size: canvas_size, dimension: TextureDimension::D2, format: TextureFormat::Bgra8UnormSrgb, mip_level_count: 1, sample_count: 1, usage: TextureUsages::TEXTURE_BINDING | TextureUsages::COPY_DST | TextureUsages::RENDER_ATTACHMENT, view_formats: &[], }, ..default() }; // Fill image.data with zeroes canvas.resize(canvas_size); let image_handle = images.add(canvas); // This camera renders whatever is on `PIXEL_PERFECT_LAYERS` to the canvas commands.spawn(( Camera2d, Camera { // Render before the "main pass" camera order: -1, clear_color: ClearColorConfig::Custom(GRAY.into()), ..default() }, RenderTarget::Image(image_handle.clone().into()), Msaa::Off, InGameCamera, PIXEL_PERFECT_LAYERS, )); // Spawn the canvas commands.spawn((Sprite::from_image(image_handle), Canvas, HIGH_RES_LAYERS)); // The "outer" camera renders whatever is on `HIGH_RES_LAYERS` to the screen. // here, the canvas and one of the sample sprites will be rendered by this camera commands.spawn((Camera2d, Msaa::Off, OuterCamera, HIGH_RES_LAYERS)); } /// Rotates entities to demonstrate grid snapping. fn rotate(time: Res<Time>, mut transforms: Query<&mut Transform, With<Rotate>>) { for mut transform in &mut transforms { let dt = time.delta_secs(); transform.rotate_z(dt); } } /// Scales camera projection to fit the window (integer multiples only). fn fit_canvas( mut resize_messages: MessageReader<WindowResized>, mut projection: Single<&mut Projection, With<OuterCamera>>, ) { let Projection::Orthographic(projection) = &mut **projection else { return; }; for window_resized in resize_messages.read() { let h_scale = window_resized.width / RES_WIDTH as f32; let v_scale = window_resized.height / RES_HEIGHT as f32; projection.scale = 1. / h_scale.min(v_scale).round(); } }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/2d/sprite_flipping.rs
examples/2d/sprite_flipping.rs
//! Displays a single [`Sprite`], created from an image, but flipped on one axis. use bevy::prelude::*; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .run(); } fn setup(mut commands: Commands, asset_server: Res<AssetServer>) { commands.spawn(Camera2d); commands.spawn(Sprite { image: asset_server.load("branding/bevy_bird_dark.png"), // Flip the logo to the left flip_x: true, // And don't flip it upside-down ( the default ) flip_y: false, ..Default::default() }); }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/2d/2d_viewport_to_world.rs
examples/2d/2d_viewport_to_world.rs
//! This example demonstrates how to use the `Camera::viewport_to_world_2d` method with a dynamic viewport and camera. use bevy::{ camera::Viewport, color::palettes::{ basic::WHITE, css::{GREEN, RED}, }, math::ops::powf, prelude::*, }; fn main() { App::new() .add_plugins(DefaultPlugins) .add_systems(Startup, setup) .add_systems(FixedUpdate, controls) .add_systems(PostUpdate, draw_cursor.after(TransformSystems::Propagate)) .run(); } fn draw_cursor( camera_query: Single<(&Camera, &GlobalTransform)>, window: Single<&Window>, mut gizmos: Gizmos, ) { let (camera, camera_transform) = *camera_query; if let Some(cursor_position) = window.cursor_position() // Calculate a world position based on the cursor's position. && let Ok(world_pos) = camera.viewport_to_world_2d(camera_transform, cursor_position) // To test Camera::world_to_viewport, convert result back to viewport space and then back to world space. && let Ok(viewport_check) = camera.world_to_viewport(camera_transform, world_pos.extend(0.0)) && let Ok(world_check) = camera.viewport_to_world_2d(camera_transform, viewport_check.xy()) { gizmos.circle_2d(world_pos, 10., WHITE); // Should be the same as world_pos gizmos.circle_2d(world_check, 8., RED); } } fn controls( camera_query: Single<(&mut Camera, &mut Transform, &mut Projection)>, window: Single<&Window>, input: Res<ButtonInput<KeyCode>>, time: Res<Time<Fixed>>, ) { let (mut camera, mut transform, mut projection) = camera_query.into_inner(); let fspeed = 600.0 * time.delta_secs(); let uspeed = fspeed as u32; let window_size = window.resolution.physical_size(); // Camera movement controls if input.pressed(KeyCode::ArrowUp) { transform.translation.y += fspeed; } if input.pressed(KeyCode::ArrowDown) { transform.translation.y -= fspeed; } if input.pressed(KeyCode::ArrowLeft) { transform.translation.x -= fspeed; } if input.pressed(KeyCode::ArrowRight) { transform.translation.x += fspeed; } // Camera zoom controls if let Projection::Orthographic(projection2d) = &mut *projection { if input.pressed(KeyCode::Comma) { projection2d.scale *= powf(4.0f32, time.delta_secs()); } if input.pressed(KeyCode::Period) { projection2d.scale *= powf(0.25f32, time.delta_secs()); } } if let Some(viewport) = camera.viewport.as_mut() { // Reset viewport size on window resize if viewport.physical_size.x > window_size.x || viewport.physical_size.y > window_size.y { viewport.physical_size = (window_size.as_vec2() * 0.75).as_uvec2(); } // Viewport movement controls if input.pressed(KeyCode::KeyW) { viewport.physical_position.y = viewport.physical_position.y.saturating_sub(uspeed); } if input.pressed(KeyCode::KeyS) { viewport.physical_position.y += uspeed; } if input.pressed(KeyCode::KeyA) { viewport.physical_position.x = viewport.physical_position.x.saturating_sub(uspeed); } if input.pressed(KeyCode::KeyD) { viewport.physical_position.x += uspeed; } // Bound viewport position so it doesn't go off-screen viewport.physical_position = viewport .physical_position .min(window_size - viewport.physical_size); // Viewport size controls if input.pressed(KeyCode::KeyI) { viewport.physical_size.y = viewport.physical_size.y.saturating_sub(uspeed); } if input.pressed(KeyCode::KeyK) { viewport.physical_size.y += uspeed; } if input.pressed(KeyCode::KeyJ) { viewport.physical_size.x = viewport.physical_size.x.saturating_sub(uspeed); } if input.pressed(KeyCode::KeyL) { viewport.physical_size.x += uspeed; } // Bound viewport size so it doesn't go off-screen viewport.physical_size = viewport .physical_size .min(window_size - viewport.physical_position) .max(UVec2::new(20, 20)); } } fn setup( mut commands: Commands, mut meshes: ResMut<Assets<Mesh>>, mut materials: ResMut<Assets<ColorMaterial>>, window: Single<&Window>, ) { let window_size = window.resolution.physical_size().as_vec2(); // Initialize centered, non-window-filling viewport commands.spawn(( Camera2d, Camera { viewport: Some(Viewport { physical_position: (window_size * 0.125).as_uvec2(), physical_size: (window_size * 0.75).as_uvec2(), ..default() }), ..default() }, )); // Create a minimal UI explaining how to interact with the example commands.spawn(( Text::new( "Move the mouse to see the circle follow your cursor.\n\ Use the arrow keys to move the camera.\n\ Use the comma and period keys to zoom in and out.\n\ Use the WASD keys to move the viewport.\n\ Use the IJKL keys to resize the viewport.", ), Node { position_type: PositionType::Absolute, top: px(12), left: px(12), ..default() }, )); // Add mesh to make camera movement visible commands.spawn(( Mesh2d(meshes.add(Rectangle::new(40.0, 20.0))), MeshMaterial2d(materials.add(Color::from(GREEN))), )); // Add background to visualize viewport bounds commands.spawn(( Mesh2d(meshes.add(Rectangle::new(50000.0, 50000.0))), MeshMaterial2d(materials.add(Color::linear_rgb(0.01, 0.01, 0.01))), Transform::from_translation(Vec3::new(0.0, 0.0, -200.0)), )); }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false
bevyengine/bevy
https://github.com/bevyengine/bevy/blob/51a6fedb06a022ab5d39e099413caa882e1b022d/examples/2d/sprite_sheet.rs
examples/2d/sprite_sheet.rs
//! Renders an animated sprite by loading all animation frames from a single image (a sprite sheet) //! into a texture atlas, and changing the displayed image periodically. use bevy::prelude::*; fn main() { App::new() .add_plugins(DefaultPlugins.set(ImagePlugin::default_nearest())) // prevents blurry sprites .add_systems(Startup, setup) .add_systems(Update, animate_sprite) .run(); } #[derive(Component)] struct AnimationIndices { first: usize, last: usize, } #[derive(Component, Deref, DerefMut)] struct AnimationTimer(Timer); fn animate_sprite( time: Res<Time>, mut query: Query<(&AnimationIndices, &mut AnimationTimer, &mut Sprite)>, ) { for (indices, mut timer, mut sprite) in &mut query { timer.tick(time.delta()); if timer.just_finished() && let Some(atlas) = &mut sprite.texture_atlas { atlas.index = if atlas.index == indices.last { indices.first } else { atlas.index + 1 }; } } } fn setup( mut commands: Commands, asset_server: Res<AssetServer>, mut texture_atlas_layouts: ResMut<Assets<TextureAtlasLayout>>, ) { let texture = asset_server.load("textures/rpg/chars/gabe/gabe-idle-run.png"); let layout = TextureAtlasLayout::from_grid(UVec2::splat(24), 7, 1, None, None); let texture_atlas_layout = texture_atlas_layouts.add(layout); // Use only the subset of sprites in the sheet that make up the run animation let animation_indices = AnimationIndices { first: 1, last: 6 }; commands.spawn(Camera2d); commands.spawn(( Sprite::from_atlas_image( texture, TextureAtlas { layout: texture_atlas_layout, index: animation_indices.first, }, ), Transform::from_scale(Vec3::splat(6.0)), animation_indices, AnimationTimer(Timer::from_seconds(0.1, TimerMode::Repeating)), )); }
rust
Apache-2.0
51a6fedb06a022ab5d39e099413caa882e1b022d
2026-01-04T15:31:59.438636Z
false