diff options
117 files changed, 2756 insertions, 1779 deletions
diff --git a/.gitignore b/.gitignore index 9096ff228a..19490b9878 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,7 @@ project.properties platform/android/java/lib/.cxx/ platform/android/java/libs/* platform/android/java/app/libs/* +platform/android/java/lib/.cxx/* # General c++ generated files *.lib diff --git a/.travis.yml b/.travis.yml index 14ee95e77e..8cfd7a1a7f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -55,17 +55,15 @@ matrix: packages: - *linux_deps -# TODO: Android support - -# - name: Android export template (release_debug, Clang) -# stage: build -# env: PLATFORM=android TOOLS=no TARGET=release_debug CACHE_NAME=${PLATFORM}-clang EXTRA_ARGS="warnings=extra werror=yes" -# os: linux -# compiler: clang -# addons: -# apt: -# packages: -# - openjdk-8-jdk + - name: Android export template (release_debug, Clang) + stage: build + env: PLATFORM=android TOOLS=no TARGET=release_debug CACHE_NAME=${PLATFORM}-clang EXTRA_ARGS="warnings=extra werror=yes" + os: linux + compiler: clang + addons: + apt: + packages: + - openjdk-8-jdk - name: macOS editor (debug, Clang) stage: build @@ -92,21 +90,6 @@ matrix: # packages: # - scons -# TODO: Dummy/Offscreen rasterizer - -# - name: Linux headless editor (release_debug, GCC 9, testing project exporting and script running) -# stage: build -# env: PLATFORM=server TOOLS=yes TARGET=release_debug CACHE_NAME=${PLATFORM}-tools-gcc-9 MATRIX_EVAL="CC=gcc-9 && CXX=g++-9" EXTRA_ARGS="warnings=extra werror=yes" TEST_PROJECT=yes -# os: linux -# compiler: gcc-9 -# addons: -# apt: -# sources: -# - sourceline: "ppa:ubuntu-toolchain-r/test" -# packages: -# - *gcc9_deps -# - *linux_deps - - name: Linux export template (release_debug, GCC 7, without 3D support) stage: build env: PLATFORM=linuxbsd TOOLS=no TARGET=release_debug CACHE_NAME=${PLATFORM}-gcc-7 EXTRA_ARGS="disable_3d=yes" diff --git a/core/engine.cpp b/core/engine.cpp index 36987eab31..5361e09a8a 100644 --- a/core/engine.cpp +++ b/core/engine.cpp @@ -60,7 +60,7 @@ void Engine::set_target_fps(int p_fps) { _target_fps = p_fps > 0 ? p_fps : 0; } -float Engine::get_target_fps() const { +int Engine::get_target_fps() const { return _target_fps; } diff --git a/core/engine.h b/core/engine.h index 4cfdeffa82..8512779d4c 100644 --- a/core/engine.h +++ b/core/engine.h @@ -86,7 +86,7 @@ public: float get_physics_jitter_fix() const; virtual void set_target_fps(int p_fps); - virtual float get_target_fps() const; + virtual int get_target_fps() const; virtual float get_frames_per_second() const { return _fps; } diff --git a/core/undo_redo.cpp b/core/undo_redo.cpp index 8bd5a4915d..62ad3e9f98 100644 --- a/core/undo_redo.cpp +++ b/core/undo_redo.cpp @@ -511,8 +511,7 @@ void UndoRedo::_bind_methods() { ClassDB::bind_method(D_METHOD("create_action", "name", "merge_mode"), &UndoRedo::create_action, DEFVAL(MERGE_DISABLE)); ClassDB::bind_method(D_METHOD("commit_action"), &UndoRedo::commit_action); - // FIXME: Typo in "commiting", fix in 4.0 when breaking compat. - ClassDB::bind_method(D_METHOD("is_commiting_action"), &UndoRedo::is_committing_action); + ClassDB::bind_method(D_METHOD("is_committing_action"), &UndoRedo::is_committing_action); { MethodInfo mi; diff --git a/doc/classes/@GlobalScope.xml b/doc/classes/@GlobalScope.xml index 8c6821eaac..f462aa989d 100644 --- a/doc/classes/@GlobalScope.xml +++ b/doc/classes/@GlobalScope.xml @@ -12,8 +12,8 @@ <methods> </methods> <members> - <member name="ARVRServer" type="ARVRServer" setter="" getter=""> - The [ARVRServer] singleton. + <member name="XRServer" type="XRServer" setter="" getter=""> + The [XRServer] singleton. </member> <member name="AudioServer" type="AudioServer" setter="" getter=""> The [AudioServer] singleton. diff --git a/doc/classes/CameraFeed.xml b/doc/classes/CameraFeed.xml index 3232f5970c..4fc124592f 100644 --- a/doc/classes/CameraFeed.xml +++ b/doc/classes/CameraFeed.xml @@ -4,7 +4,7 @@ A camera feed gives you access to a single physical camera attached to your device. </brief_description> <description> - A camera feed gives you access to a single physical camera attached to your device. When enabled, Godot will start capturing frames from the camera which can then be used. + A camera feed gives you access to a single physical camera attached to your device. When enabled, Godot will start capturing frames from the camera which can then be used. See also [CameraServer]. [b]Note:[/b] Many cameras will return YCbCr images which are split into two textures and need to be combined in a shader. Godot does this automatically for you if you set the environment to show the camera image in the background. </description> <tutorials> diff --git a/doc/classes/CameraServer.xml b/doc/classes/CameraServer.xml index 82d1faf716..e00dc031dc 100644 --- a/doc/classes/CameraServer.xml +++ b/doc/classes/CameraServer.xml @@ -6,6 +6,7 @@ <description> The [CameraServer] keeps track of different cameras accessible in Godot. These are external cameras such as webcams or the cameras on your phone. It is notably used to provide AR modules with a video feed from the camera. + [b]Note:[/b] This class is currently only implemented on macOS and iOS. On other platforms, no [CameraFeed]s will be available. </description> <tutorials> </tutorials> @@ -16,7 +17,7 @@ <argument index="0" name="feed" type="CameraFeed"> </argument> <description> - Adds a camera feed to the camera server. + Adds the camera [code]feed[/code] to the camera server. </description> </method> <method name="feeds"> @@ -32,7 +33,7 @@ <argument index="0" name="index" type="int"> </argument> <description> - Returns the [CameraFeed] with this id. + Returns the [CameraFeed] corresponding to the camera with the given [code]index[/code]. </description> </method> <method name="get_feed_count"> @@ -48,7 +49,7 @@ <argument index="0" name="feed" type="CameraFeed"> </argument> <description> - Removes a [CameraFeed]. + Removes the specified camera [code]feed[/code]. </description> </method> </methods> @@ -57,14 +58,14 @@ <argument index="0" name="id" type="int"> </argument> <description> - Emitted when a [CameraFeed] is added (e.g. webcam is plugged in). + Emitted when a [CameraFeed] is added (e.g. a webcam is plugged in). </description> </signal> <signal name="camera_feed_removed"> <argument index="0" name="id" type="int"> </argument> <description> - Emitted when a [CameraFeed] is removed (e.g. webcam is unplugged). + Emitted when a [CameraFeed] is removed (e.g. a webcam is unplugged). </description> </signal> </signals> @@ -73,7 +74,7 @@ The RGBA camera image. </constant> <constant name="FEED_YCBCR_IMAGE" value="0" enum="FeedImage"> - The YCbCr camera image. + The [url=https://en.wikipedia.org/wiki/YCbCr]YCbCr[/url] camera image. </constant> <constant name="FEED_Y_IMAGE" value="0" enum="FeedImage"> The Y component camera image. diff --git a/doc/classes/Control.xml b/doc/classes/Control.xml index 85a75fda37..0c8d42021a 100644 --- a/doc/classes/Control.xml +++ b/doc/classes/Control.xml @@ -838,7 +838,7 @@ Controls whether the control will be able to receive mouse button input events through [method _gui_input] and how these events should be handled. Also controls whether the control can receive the [signal mouse_entered], and [signal mouse_exited] signals. See the constants to learn what each does. </member> <member name="rect_clip_content" type="bool" setter="set_clip_contents" getter="is_clipping_contents" default="false"> - Enables whether rendering of children should be clipped to this control's rectangle. If [code]true[/code], parts of a child which would be visibly outside of this control's rectangle will not be rendered. + Enables whether rendering of [CanvasItem] based children should be clipped to this control's rectangle. If [code]true[/code], parts of a child which would be visibly outside of this control's rectangle will not be rendered. </member> <member name="rect_global_position" type="Vector2" setter="_set_global_position" getter="get_global_position"> The node's global position, relative to the world (usually to the top-left corner of the window). diff --git a/doc/classes/RenderingServer.xml b/doc/classes/RenderingServer.xml index d2d13fe406..aa393877b2 100644 --- a/doc/classes/RenderingServer.xml +++ b/doc/classes/RenderingServer.xml @@ -3067,15 +3067,15 @@ Sets when the viewport should be updated. See [enum ViewportUpdateMode] constants for options. </description> </method> - <method name="viewport_set_use_arvr"> + <method name="viewport_set_use_xr"> <return type="void"> </return> <argument index="0" name="viewport" type="RID"> </argument> - <argument index="1" name="use_arvr" type="bool"> + <argument index="1" name="use_xr" type="bool"> </argument> <description> - If [code]true[/code], the viewport uses augmented or virtual reality technologies. See [ARVRInterface]. + If [code]true[/code], the viewport uses augmented or virtual reality technologies. See [XRInterface]. </description> </method> </methods> diff --git a/doc/classes/SubViewport.xml b/doc/classes/SubViewport.xml index dc3d748496..e877050bf8 100644 --- a/doc/classes/SubViewport.xml +++ b/doc/classes/SubViewport.xml @@ -9,7 +9,7 @@ <methods> </methods> <members> - <member name="arvr" type="bool" setter="set_use_arvr" getter="is_using_arvr" default="false"> + <member name="xr" type="bool" setter="set_use_xr" getter="is_using_xr" default="false"> If [code]true[/code], the sub-viewport will be used in AR/VR process. </member> <member name="render_target_clear_mode" type="int" setter="set_clear_mode" getter="get_clear_mode" enum="SubViewport.ClearMode" default="0"> diff --git a/doc/classes/UndoRedo.xml b/doc/classes/UndoRedo.xml index 766ebf7e32..2cc3e974e2 100644 --- a/doc/classes/UndoRedo.xml +++ b/doc/classes/UndoRedo.xml @@ -155,7 +155,7 @@ Returns [code]true[/code] if an "undo" action is available. </description> </method> - <method name="is_commiting_action" qualifiers="const"> + <method name="is_committing_action" qualifiers="const"> <return type="bool"> </return> <description> diff --git a/doc/classes/ARVRAnchor.xml b/doc/classes/XRAnchor3D.xml index 82575ce7cb..a409c79230 100644 --- a/doc/classes/ARVRAnchor.xml +++ b/doc/classes/XRAnchor3D.xml @@ -1,10 +1,10 @@ <?xml version="1.0" encoding="UTF-8" ?> -<class name="ARVRAnchor" inherits="Node3D" version="4.0"> +<class name="XRAnchor3D" inherits="Node3D" version="4.0"> <brief_description> An anchor point in AR space. </brief_description> <description> - The [ARVRAnchor] point is a spatial node that maps a real world location identified by the AR platform to a position within the game world. For example, as long as plane detection in ARKit is on, ARKit will identify and update the position of planes (tables, floors, etc) and create anchors for them. + The [XRAnchor3D] point is a spatial node that maps a real world location identified by the AR platform to a position within the game world. For example, as long as plane detection in ARKit is on, ARKit will identify and update the position of planes (tables, floors, etc) and create anchors for them. This node is mapped to one of the anchors through its unique ID. When you receive a signal that a new anchor is available, you should add this node to your scene for that anchor. You can predefine nodes and set the ID; the nodes will simply remain on 0,0,0 until a plane is recognized. Keep in mind that, as long as plane detection is enabled, the size, placing and orientation of an anchor will be updated as the detection logic learns more about the real world out there especially if only part of the surface is in view. </description> @@ -29,7 +29,7 @@ <return type="Mesh"> </return> <description> - If provided by the [ARVRInterface], this returns a mesh object for the anchor. For an anchor, this can be a shape related to the object being tracked or it can be a mesh that provides topology related to the anchor and can be used to create shadows/reflections on surfaces or for generating collision shapes. + If provided by the [XRInterface], this returns a mesh object for the anchor. For an anchor, this can be a shape related to the object being tracked or it can be a mesh that provides topology related to the anchor and can be used to create shadows/reflections on surfaces or for generating collision shapes. </description> </method> <method name="get_plane" qualifiers="const"> diff --git a/doc/classes/ARVRCamera.xml b/doc/classes/XRCamera3D.xml index c97d5cf1d8..4d86e24daa 100644 --- a/doc/classes/ARVRCamera.xml +++ b/doc/classes/XRCamera3D.xml @@ -1,11 +1,11 @@ <?xml version="1.0" encoding="UTF-8" ?> -<class name="ARVRCamera" inherits="Camera3D" version="4.0"> +<class name="XRCamera3D" inherits="Camera3D" version="4.0"> <brief_description> A camera node with a few overrules for AR/VR applied, such as location tracking. </brief_description> <description> This is a helper spatial node for our camera; note that, if stereoscopic rendering is applicable (VR-HMD), most of the camera properties are ignored, as the HMD information overrides them. The only properties that can be trusted are the near and far planes. - The position and orientation of this node is automatically updated by the ARVR Server to represent the location of the HMD if such tracking is available and can thus be used by game logic. Note that, in contrast to the ARVR Controller, the render thread has access to the most up-to-date tracking data of the HMD and the location of the ARVRCamera can lag a few milliseconds behind what is used for rendering as a result. + The position and orientation of this node is automatically updated by the XR Server to represent the location of the HMD if such tracking is available and can thus be used by game logic. Note that, in contrast to the XR Controller, the render thread has access to the most up-to-date tracking data of the HMD and the location of the XRCamera3D can lag a few milliseconds behind what is used for rendering as a result. </description> <tutorials> <link>https://docs.godotengine.org/en/latest/tutorials/vr/index.html</link> diff --git a/doc/classes/ARVRController.xml b/doc/classes/XRController3D.xml index 572b47ce6d..e4a06a80db 100644 --- a/doc/classes/ARVRController.xml +++ b/doc/classes/XRController3D.xml @@ -1,12 +1,12 @@ <?xml version="1.0" encoding="UTF-8" ?> -<class name="ARVRController" inherits="Node3D" version="4.0"> +<class name="XRController3D" inherits="Node3D" version="4.0"> <brief_description> A spatial node representing a spatially-tracked controller. </brief_description> <description> This is a helper spatial node that is linked to the tracking of controllers. It also offers several handy passthroughs to the state of buttons and such on the controllers. - Controllers are linked by their ID. You can create controller nodes before the controllers are available. If your game always uses two controllers (one for each hand), you can predefine the controllers with ID 1 and 2; they will become active as soon as the controllers are identified. If you expect additional controllers to be used, you should react to the signals and add ARVRController nodes to your scene. - The position of the controller node is automatically updated by the [ARVRServer]. This makes this node ideal to add child nodes to visualize the controller. + Controllers are linked by their ID. You can create controller nodes before the controllers are available. If your game always uses two controllers (one for each hand), you can predefine the controllers with ID 1 and 2; they will become active as soon as the controllers are identified. If you expect additional controllers to be used, you should react to the signals and add XRController3D nodes to your scene. + The position of the controller node is automatically updated by the [XRServer]. This makes this node ideal to add child nodes to visualize the controller. </description> <tutorials> <link>https://docs.godotengine.org/en/latest/tutorials/vr/index.html</link> @@ -20,17 +20,17 @@ </description> </method> <method name="get_hand" qualifiers="const"> - <return type="int" enum="ARVRPositionalTracker.TrackerHand"> + <return type="int" enum="XRPositionalTracker.TrackerHand"> </return> <description> - Returns the hand holding this controller, if known. See [enum ARVRPositionalTracker.TrackerHand]. + Returns the hand holding this controller, if known. See [enum XRPositionalTracker.TrackerHand]. </description> </method> <method name="get_is_active" qualifiers="const"> <return type="bool"> </return> <description> - Returns [code]true[/code] if the bound controller is active. ARVR systems attempt to track active controllers. + Returns [code]true[/code] if the bound controller is active. XR systems attempt to track active controllers. </description> </method> <method name="get_joystick_axis" qualifiers="const"> @@ -46,14 +46,14 @@ <return type="int"> </return> <description> - Returns the ID of the joystick object bound to this. Every controller tracked by the [ARVRServer] that has buttons and axis will also be registered as a joystick within Godot. This means that all the normal joystick tracking and input mapping will work for buttons and axis found on the AR/VR controllers. This ID is purely offered as information so you can link up the controller with its joystick entry. + Returns the ID of the joystick object bound to this. Every controller tracked by the [XRServer] that has buttons and axis will also be registered as a joystick within Godot. This means that all the normal joystick tracking and input mapping will work for buttons and axis found on the AR/VR controllers. This ID is purely offered as information so you can link up the controller with its joystick entry. </description> </method> <method name="get_mesh" qualifiers="const"> <return type="Mesh"> </return> <description> - If provided by the [ARVRInterface], this returns a mesh associated with the controller. This can be used to visualize the controller. + If provided by the [XRInterface], this returns a mesh associated with the controller. This can be used to visualize the controller. </description> </method> <method name="is_button_pressed" qualifiers="const"> @@ -70,11 +70,11 @@ <member name="controller_id" type="int" setter="set_controller_id" getter="get_controller_id" default="1"> The controller's ID. A controller ID of 0 is unbound and will always result in an inactive node. Controller ID 1 is reserved for the first controller that identifies itself as the left-hand controller and ID 2 is reserved for the first controller that identifies itself as the right-hand controller. - For any other controller that the [ARVRServer] detects, we continue with controller ID 3. + For any other controller that the [XRServer] detects, we continue with controller ID 3. When a controller is turned off, its slot is freed. This ensures controllers will keep the same ID even when controllers with lower IDs are turned off. </member> <member name="rumble" type="float" setter="set_rumble" getter="get_rumble" default="0.0"> - The degree to which the controller vibrates. Ranges from [code]0.0[/code] to [code]1.0[/code] with precision [code].01[/code]. If changed, updates [member ARVRPositionalTracker.rumble] accordingly. + The degree to which the controller vibrates. Ranges from [code]0.0[/code] to [code]1.0[/code] with precision [code].01[/code]. If changed, updates [member XRPositionalTracker.rumble] accordingly. This is a useful property to animate if you want the controller to vibrate for a limited duration. </member> </members> diff --git a/doc/classes/ARVRInterface.xml b/doc/classes/XRInterface.xml index 0727bda668..1985010223 100644 --- a/doc/classes/ARVRInterface.xml +++ b/doc/classes/XRInterface.xml @@ -1,11 +1,11 @@ <?xml version="1.0" encoding="UTF-8" ?> -<class name="ARVRInterface" inherits="Reference" version="4.0"> +<class name="XRInterface" inherits="Reference" version="4.0"> <brief_description> Base class for an AR/VR interface implementation. </brief_description> <description> - This class needs to be implemented to make an AR or VR platform available to Godot and these should be implemented as C++ modules or GDNative modules (note that for GDNative the subclass ARVRScriptInterface should be used). Part of the interface is exposed to GDScript so you can detect, enable and configure an AR or VR platform. - Interfaces should be written in such a way that simply enabling them will give us a working setup. You can query the available interfaces through [ARVRServer]. + This class needs to be implemented to make an AR or VR platform available to Godot and these should be implemented as C++ modules or GDNative modules (note that for GDNative the subclass XRScriptInterface should be used). Part of the interface is exposed to GDScript so you can detect, enable and configure an AR or VR platform. + Interfaces should be written in such a way that simply enabling them will give us a working setup. You can query the available interfaces through [XRServer]. </description> <tutorials> <link>https://docs.godotengine.org/en/latest/tutorials/vr/index.html</link> @@ -40,7 +40,7 @@ </description> </method> <method name="get_tracking_status" qualifiers="const"> - <return type="int" enum="ARVRInterface.Tracking_status"> + <return type="int" enum="XRInterface.Tracking_status"> </return> <description> If supported, returns the status of our tracking. This will allow you to provide feedback to the user whether there are issues with positional tracking. @@ -84,19 +84,19 @@ </member> </members> <constants> - <constant name="ARVR_NONE" value="0" enum="Capabilities"> - No ARVR capabilities. + <constant name="XR_NONE" value="0" enum="Capabilities"> + No XR capabilities. </constant> - <constant name="ARVR_MONO" value="1" enum="Capabilities"> + <constant name="XR_MONO" value="1" enum="Capabilities"> This interface can work with normal rendering output (non-HMD based AR). </constant> - <constant name="ARVR_STEREO" value="2" enum="Capabilities"> + <constant name="XR_STEREO" value="2" enum="Capabilities"> This interface supports stereoscopic rendering. </constant> - <constant name="ARVR_AR" value="4" enum="Capabilities"> + <constant name="XR_AR" value="4" enum="Capabilities"> This interface supports AR (video background and real world tracking). </constant> - <constant name="ARVR_EXTERNAL" value="8" enum="Capabilities"> + <constant name="XR_EXTERNAL" value="8" enum="Capabilities"> This interface outputs to an external device. If the main viewport is used, the on screen output is an unmodified buffer of either the left or right eye (stretched if the viewport size is not changed to the same aspect ratio of [method get_render_targetsize]). Using a separate viewport node frees up the main viewport for other purposes. </constant> <constant name="EYE_MONO" value="0" enum="Eyes"> @@ -108,19 +108,19 @@ <constant name="EYE_RIGHT" value="2" enum="Eyes"> Right eye output, this is mostly used internally when rendering the image for the right eye and obtaining positioning and projection information. </constant> - <constant name="ARVR_NORMAL_TRACKING" value="0" enum="Tracking_status"> + <constant name="XR_NORMAL_TRACKING" value="0" enum="Tracking_status"> Tracking is behaving as expected. </constant> - <constant name="ARVR_EXCESSIVE_MOTION" value="1" enum="Tracking_status"> + <constant name="XR_EXCESSIVE_MOTION" value="1" enum="Tracking_status"> Tracking is hindered by excessive motion (the player is moving faster than tracking can keep up). </constant> - <constant name="ARVR_INSUFFICIENT_FEATURES" value="2" enum="Tracking_status"> + <constant name="XR_INSUFFICIENT_FEATURES" value="2" enum="Tracking_status"> Tracking is hindered by insufficient features, it's too dark (for camera-based tracking), player is blocked, etc. </constant> - <constant name="ARVR_UNKNOWN_TRACKING" value="3" enum="Tracking_status"> + <constant name="XR_UNKNOWN_TRACKING" value="3" enum="Tracking_status"> We don't know the status of the tracking or this interface does not provide feedback. </constant> - <constant name="ARVR_NOT_TRACKING" value="4" enum="Tracking_status"> + <constant name="XR_NOT_TRACKING" value="4" enum="Tracking_status"> Tracking is not functional (camera not plugged in or obscured, lighthouses turned off, etc.). </constant> </constants> diff --git a/doc/classes/ARVROrigin.xml b/doc/classes/XROrigin3D.xml index a88a89c927..57cf673d30 100644 --- a/doc/classes/ARVROrigin.xml +++ b/doc/classes/XROrigin3D.xml @@ -1,13 +1,13 @@ <?xml version="1.0" encoding="UTF-8" ?> -<class name="ARVROrigin" inherits="Node3D" version="4.0"> +<class name="XROrigin3D" inherits="Node3D" version="4.0"> <brief_description> The origin point in AR/VR. </brief_description> <description> This is a special node within the AR/VR system that maps the physical location of the center of our tracking space to the virtual location within our game world. - There should be only one of these nodes in your scene and you must have one. All the ARVRCamera, ARVRController and ARVRAnchor nodes should be direct children of this node for spatial tracking to work correctly. + There should be only one of these nodes in your scene and you must have one. All the XRCamera3D, XRController3D and XRAnchor3D nodes should be direct children of this node for spatial tracking to work correctly. It is the position of this node that you update when your character needs to move through your game world while we're not moving in the real world. Movement in the real world is always in relation to this origin point. - For example, if your character is driving a car, the ARVROrigin node should be a child node of this car. Or, if you're implementing a teleport system to move your character, you should change the position of this node. + For example, if your character is driving a car, the XROrigin3D node should be a child node of this car. Or, if you're implementing a teleport system to move your character, you should change the position of this node. </description> <tutorials> <link>https://docs.godotengine.org/en/latest/tutorials/vr/index.html</link> @@ -17,7 +17,7 @@ <members> <member name="world_scale" type="float" setter="set_world_scale" getter="get_world_scale" default="1.0"> Allows you to adjust the scale to your game's units. Most AR/VR platforms assume a scale of 1 game world unit = 1 real world meter. - [b]Note:[/b] This method is a passthrough to the [ARVRServer] itself. + [b]Note:[/b] This method is a passthrough to the [XRServer] itself. </member> </members> <constants> diff --git a/doc/classes/ARVRPositionalTracker.xml b/doc/classes/XRPositionalTracker.xml index 640b721d37..2f7cc21703 100644 --- a/doc/classes/ARVRPositionalTracker.xml +++ b/doc/classes/XRPositionalTracker.xml @@ -1,19 +1,19 @@ <?xml version="1.0" encoding="UTF-8" ?> -<class name="ARVRPositionalTracker" inherits="Object" version="4.0"> +<class name="XRPositionalTracker" inherits="Object" version="4.0"> <brief_description> A tracked object. </brief_description> <description> An instance of this object represents a device that is tracked, such as a controller or anchor point. HMDs aren't represented here as they are handled internally. - As controllers are turned on and the AR/VR interface detects them, instances of this object are automatically added to this list of active tracking objects accessible through the [ARVRServer]. - The [ARVRController] and [ARVRAnchor] both consume objects of this type and should be used in your project. The positional trackers are just under-the-hood objects that make this all work. These are mostly exposed so that GDNative-based interfaces can interact with them. + As controllers are turned on and the AR/VR interface detects them, instances of this object are automatically added to this list of active tracking objects accessible through the [XRServer]. + The [XRController3D] and [XRAnchor3D] both consume objects of this type and should be used in your project. The positional trackers are just under-the-hood objects that make this all work. These are mostly exposed so that GDNative-based interfaces can interact with them. </description> <tutorials> <link>https://docs.godotengine.org/en/latest/tutorials/vr/index.html</link> </tutorials> <methods> <method name="get_hand" qualifiers="const"> - <return type="int" enum="ARVRPositionalTracker.TrackerHand"> + <return type="int" enum="XRPositionalTracker.TrackerHand"> </return> <description> Returns the hand holding this tracker, if known. See [enum TrackerHand] constants. @@ -58,7 +58,7 @@ <return type="int"> </return> <description> - Returns the internal tracker ID. This uniquely identifies the tracker per tracker type and matches the ID you need to specify for nodes such as the [ARVRController] and [ARVRAnchor] nodes. + Returns the internal tracker ID. This uniquely identifies the tracker per tracker type and matches the ID you need to specify for nodes such as the [XRController3D] and [XRAnchor3D] nodes. </description> </method> <method name="get_tracks_orientation" qualifiers="const"> @@ -85,7 +85,7 @@ </description> </method> <method name="get_type" qualifiers="const"> - <return type="int" enum="ARVRServer.TrackerType"> + <return type="int" enum="XRServer.TrackerType"> </return> <description> Returns the tracker's type. diff --git a/doc/classes/ARVRServer.xml b/doc/classes/XRServer.xml index d8d069c048..5e6002aee3 100644 --- a/doc/classes/ARVRServer.xml +++ b/doc/classes/XRServer.xml @@ -1,5 +1,5 @@ <?xml version="1.0" encoding="UTF-8" ?> -<class name="ARVRServer" inherits="Object" version="4.0"> +<class name="XRServer" inherits="Object" version="4.0"> <brief_description> Server for AR and VR features. </brief_description> @@ -13,7 +13,7 @@ <method name="center_on_hmd"> <return type="void"> </return> - <argument index="0" name="rotation_mode" type="int" enum="ARVRServer.RotationMode"> + <argument index="0" name="rotation_mode" type="int" enum="XRServer.RotationMode"> </argument> <argument index="1" name="keep_height" type="bool"> </argument> @@ -27,7 +27,7 @@ </description> </method> <method name="find_interface" qualifiers="const"> - <return type="ARVRInterface"> + <return type="XRInterface"> </return> <argument index="0" name="name" type="String"> </argument> @@ -43,7 +43,7 @@ </description> </method> <method name="get_interface" qualifiers="const"> - <return type="ARVRInterface"> + <return type="XRInterface"> </return> <argument index="0" name="idx" type="int"> </argument> @@ -69,7 +69,7 @@ <return type="int"> </return> <description> - Returns the absolute timestamp (in μs) of the last [ARVRServer] commit of the AR/VR eyes to [RenderingServer]. The value comes from an internal call to [method OS.get_ticks_usec]. + Returns the absolute timestamp (in μs) of the last [XRServer] commit of the AR/VR eyes to [RenderingServer]. The value comes from an internal call to [method OS.get_ticks_usec]. </description> </method> <method name="get_last_frame_usec"> @@ -83,7 +83,7 @@ <return type="int"> </return> <description> - Returns the absolute timestamp (in μs) of the last [ARVRServer] process callback. The value comes from an internal call to [method OS.get_ticks_usec]. + Returns the absolute timestamp (in μs) of the last [XRServer] process callback. The value comes from an internal call to [method OS.get_ticks_usec]. </description> </method> <method name="get_reference_frame" qualifiers="const"> @@ -94,7 +94,7 @@ </description> </method> <method name="get_tracker" qualifiers="const"> - <return type="ARVRPositionalTracker"> + <return type="XRPositionalTracker"> </return> <argument index="0" name="idx" type="int"> </argument> @@ -111,8 +111,8 @@ </method> </methods> <members> - <member name="primary_interface" type="ARVRInterface" setter="set_primary_interface" getter="get_primary_interface"> - The primary [ARVRInterface] currently bound to the [ARVRServer]. + <member name="primary_interface" type="XRInterface" setter="set_primary_interface" getter="get_primary_interface"> + The primary [XRInterface] currently bound to the [XRServer]. </member> <member name="world_scale" type="float" setter="set_world_scale" getter="get_world_scale" default="1.0"> Allows you to adjust the scale to your game's units. Most AR/VR platforms assume a scale of 1 game world unit = 1 real world meter. @@ -141,7 +141,7 @@ <argument index="2" name="id" type="int"> </argument> <description> - Emitted when a new tracker has been added. If you don't use a fixed number of controllers or if you're using [ARVRAnchor]s for an AR solution, it is important to react to this signal to add the appropriate [ARVRController] or [ARVRAnchor] nodes related to this new tracker. + Emitted when a new tracker has been added. If you don't use a fixed number of controllers or if you're using [XRAnchor3D]s for an AR solution, it is important to react to this signal to add the appropriate [XRController3D] or [XRAnchor3D] nodes related to this new tracker. </description> </signal> <signal name="tracker_removed"> @@ -152,7 +152,7 @@ <argument index="2" name="id" type="int"> </argument> <description> - Emitted when a tracker is removed. You should remove any [ARVRController] or [ARVRAnchor] points if applicable. This is not mandatory, the nodes simply become inactive and will be made active again when a new tracker becomes available (i.e. a new controller is switched on that takes the place of the previous one). + Emitted when a tracker is removed. You should remove any [XRController3D] or [XRAnchor3D] points if applicable. This is not mandatory, the nodes simply become inactive and will be made active again when a new tracker becomes available (i.e. a new controller is switched on that takes the place of the previous one). </description> </signal> </signals> diff --git a/drivers/vulkan/SCsub b/drivers/vulkan/SCsub index 7ffdac27d5..91d0e42f80 100644 --- a/drivers/vulkan/SCsub +++ b/drivers/vulkan/SCsub @@ -4,7 +4,25 @@ Import("env") env.add_source_files(env.drivers_sources, "*.cpp") -if env["builtin_vulkan"]: +if env["platform"] == "android": + # Use NDK Vulkan headers + thirdparty_dir = env["ANDROID_NDK_ROOT"] + "/sources/third_party/vulkan/src" + thirdparty_includes = [ + thirdparty_dir, + thirdparty_dir + "/include", + thirdparty_dir + "/layers", + thirdparty_dir + "/layers/generated", + ] + env.Prepend(CPPPATH=thirdparty_includes) + + # Build Vulkan memory allocator + env_thirdparty = env.Clone() + env_thirdparty.disable_warnings() + + thirdparty_dir = "#thirdparty/vulkan" + vma_sources = [thirdparty_dir + "/android/vk_mem_alloc.cpp"] + env_thirdparty.add_source_files(env.drivers_sources, vma_sources) +elif env["builtin_vulkan"]: # Use bundled Vulkan headers thirdparty_dir = "#thirdparty/vulkan" env.Prepend(CPPPATH=[thirdparty_dir, thirdparty_dir + "/include", thirdparty_dir + "/loader"]) diff --git a/drivers/vulkan/rendering_device_vulkan.cpp b/drivers/vulkan/rendering_device_vulkan.cpp index 01da97cfd5..09f10ef8b1 100644 --- a/drivers/vulkan/rendering_device_vulkan.cpp +++ b/drivers/vulkan/rendering_device_vulkan.cpp @@ -1565,15 +1565,20 @@ RID RenderingDeviceVulkan::texture_create(const TextureFormat &p_format, const T image_create_info.pNext = nullptr; image_create_info.flags = 0; - VkImageFormatListCreateInfoKHR format_list_create_info; - Vector<VkFormat> allowed_formats; - +#ifndef _MSC_VER +#warning TODO check for support via RenderingDevice to enable on mobile when possible +#endif + // vkCreateImage fails with format list on Android (VK_ERROR_OUT_OF_HOST_MEMORY) +#ifndef ANDROID_ENABLED if (p_format.shareable_formats.size()) { image_create_info.flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT; + + Vector<VkFormat> allowed_formats; for (int i = 0; i < p_format.shareable_formats.size(); i++) { allowed_formats.push_back(vulkan_formats[p_format.shareable_formats[i]]); } + VkImageFormatListCreateInfoKHR format_list_create_info; format_list_create_info.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR; format_list_create_info.pNext = nullptr; format_list_create_info.viewFormatCount = allowed_formats.size(); @@ -1585,6 +1590,7 @@ RID RenderingDeviceVulkan::texture_create(const TextureFormat &p_format, const T ERR_FAIL_COND_V_MSG(p_view.format_override != DATA_FORMAT_MAX && p_format.shareable_formats.find(p_view.format_override) == -1, RID(), "If supplied a list of shareable formats, the current view format override must be present in the list"); } +#endif if (p_format.type == TEXTURE_TYPE_CUBE || p_format.type == TEXTURE_TYPE_CUBE_ARRAY) { image_create_info.flags |= VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT; } diff --git a/drivers/vulkan/rendering_device_vulkan.h b/drivers/vulkan/rendering_device_vulkan.h index a46b80e2f4..88a12c7e44 100644 --- a/drivers/vulkan/rendering_device_vulkan.h +++ b/drivers/vulkan/rendering_device_vulkan.h @@ -37,8 +37,10 @@ #include "servers/rendering/rendering_device.h" #ifdef DEBUG_ENABLED +#ifndef _DEBUG #define _DEBUG #endif +#endif #include "vk_mem_alloc.h" #include <vulkan/vulkan.h> //todo: diff --git a/drivers/vulkan/vulkan_context.h b/drivers/vulkan/vulkan_context.h index 2f10fbfdef..e587104e3c 100644 --- a/drivers/vulkan/vulkan_context.h +++ b/drivers/vulkan/vulkan_context.h @@ -45,8 +45,6 @@ class VulkanContext { FRAME_LAG = 2 }; - bool use_validation_layers; - VkInstance inst; VkSurfaceKHR surface; VkPhysicalDevice gpu; @@ -181,6 +179,8 @@ protected: bool buffers_prepared; + bool use_validation_layers; + public: VkDevice get_device(); VkPhysicalDevice get_physical_device(); diff --git a/editor/icons/ARVRAnchor.svg b/editor/icons/XRAnchor3D.svg index f1571b3fcc..f1571b3fcc 100644 --- a/editor/icons/ARVRAnchor.svg +++ b/editor/icons/XRAnchor3D.svg diff --git a/editor/icons/ARVRCamera.svg b/editor/icons/XRCamera3D.svg index f59a8c8b4a..f59a8c8b4a 100644 --- a/editor/icons/ARVRCamera.svg +++ b/editor/icons/XRCamera3D.svg diff --git a/editor/icons/ARVRController.svg b/editor/icons/XRController3D.svg index 40e5b8dce1..40e5b8dce1 100644 --- a/editor/icons/ARVRController.svg +++ b/editor/icons/XRController3D.svg diff --git a/editor/icons/ARVROrigin.svg b/editor/icons/XROrigin3D.svg index dbb93ba7a5..dbb93ba7a5 100644 --- a/editor/icons/ARVROrigin.svg +++ b/editor/icons/XROrigin3D.svg diff --git a/main/main.cpp b/main/main.cpp index a53e52e485..fb42f71a75 100644 --- a/main/main.cpp +++ b/main/main.cpp @@ -61,7 +61,6 @@ #include "scene/main/window.h" #include "scene/register_scene_types.h" #include "scene/resources/packed_scene.h" -#include "servers/arvr_server.h" #include "servers/audio_server.h" #include "servers/camera_server.h" #include "servers/display_server.h" @@ -72,6 +71,7 @@ #include "servers/register_server_types.h" #include "servers/rendering/rendering_server_raster.h" #include "servers/rendering/rendering_server_wrap_mt.h" +#include "servers/xr_server.h" #ifdef TOOLS_ENABLED #include "editor/doc_data.h" @@ -105,7 +105,7 @@ static AudioServer *audio_server = nullptr; static DisplayServer *display_server = nullptr; static RenderingServer *rendering_server = nullptr; static CameraServer *camera_server = nullptr; -static ARVRServer *arvr_server = nullptr; +static XRServer *xr_server = nullptr; static PhysicsServer3D *physics_server = nullptr; static PhysicsServer2D *physics_2d_server = nullptr; static NavigationServer3D *navigation_server = nullptr; @@ -478,6 +478,14 @@ Error Main::setup(const char *execpath, int argc, char *argv[], bool p_second_ph I = args.front(); while (I) { +#ifdef OSX_ENABLED + // Ignore the process serial number argument passed by macOS Gatekeeper. + // Otherwise, Godot would try to open a non-existent project on the first start and abort. + if (I->get().begins_with("-psn_")) { + I = I->next(); + continue; + } +#endif List<String>::Element *N = I->next(); @@ -1297,8 +1305,8 @@ Error Main::setup2(Thread::ID p_main_tid_override) { audio_server = memnew(AudioServer); audio_server->init(); - // also init our arvr_server from here - arvr_server = memnew(ARVRServer); + // also init our xr_server from here + xr_server = memnew(XRServer); register_core_singletons(); @@ -2276,9 +2284,9 @@ void Main::cleanup() { EditorNode::unregister_editor_types(); #endif - if (arvr_server) { + if (xr_server) { // cleanup now before we pull the rug from underneath... - memdelete(arvr_server); + memdelete(xr_server); } ImageLoader::cleanup(); diff --git a/misc/travis/android-tools-linux.sh b/misc/travis/android-tools-linux.sh index d0c123ee6c..4eeb54412c 100755 --- a/misc/travis/android-tools-linux.sh +++ b/misc/travis/android-tools-linux.sh @@ -24,12 +24,12 @@ ANDROID_SDK_URL=$ANDROID_BASE_URL/$ANDROID_SDK_FILENAME ANDROID_SDK_PATH=$GODOT_BUILD_TOOLS_PATH/$ANDROID_SDK_DIR ANDROID_SDK_SHA256=92ffee5a1d98d856634e8b71132e8a95d96c83a63fde1099be3d86df3106def9 -ANDROID_NDK_RELEASE=r20 +ANDROID_NDK_RELEASE=r21 ANDROID_NDK_DIR=android-ndk ANDROID_NDK_FILENAME=android-ndk-$ANDROID_NDK_RELEASE-linux-x86_64.zip ANDROID_NDK_URL=$ANDROID_BASE_URL/$ANDROID_NDK_FILENAME ANDROID_NDK_PATH=$GODOT_BUILD_TOOLS_PATH/$ANDROID_NDK_DIR -ANDROID_NDK_SHA1=8665fc84a1b1f0d6ab3b5fdd1e30200cc7b9adff +ANDROID_NDK_SHA1=afc9c0b9faad222898ac8168c78ad4ccac8a1b5c echo echo "Download and install Android development tools ..." diff --git a/modules/SCsub b/modules/SCsub index 1671b398e5..fb46c5f877 100644 --- a/modules/SCsub +++ b/modules/SCsub @@ -11,6 +11,7 @@ Export("env_modules") # Header with MODULE_*_ENABLED defines. env.CommandNoCache("modules_enabled.gen.h", Value(env.module_list), modules_builders.generate_modules_enabled) +vs_sources = [] # libmodule_<name>.a for each active module. for module in env.module_list: env.modules_sources = [] @@ -23,6 +24,8 @@ for module in env.module_list: lib = env_modules.add_library("module_%s" % module, env.modules_sources) env.Prepend(LIBS=[lib]) + if env["vsproj"]: + vs_sources += env.modules_sources # libmodules.a with only register_module_types. # Must be last so that all libmodule_<name>.a libraries are on the right side @@ -31,3 +34,5 @@ env.modules_sources = [] env_modules.add_source_files(env.modules_sources, "register_module_types.gen.cpp") lib = env_modules.add_library("modules", env.modules_sources) env.Prepend(LIBS=[lib]) +if env["vsproj"]: + env.modules_sources += vs_sources diff --git a/modules/arkit/arkit_interface.h b/modules/arkit/arkit_interface.h index 4f8f726816..1044f3cf6f 100644 --- a/modules/arkit/arkit_interface.h +++ b/modules/arkit/arkit_interface.h @@ -31,9 +31,9 @@ #ifndef ARKIT_INTERFACE_H #define ARKIT_INTERFACE_H -#include "servers/arvr/arvr_interface.h" -#include "servers/arvr/arvr_positional_tracker.h" #include "servers/camera/camera_feed.h" +#include "servers/xr/xr_interface.h" +#include "servers/xr/xr_positional_tracker.h" /** @author Bastiaan Olij <mux213@gmail.com> @@ -44,8 +44,8 @@ // forward declaration for some needed objects class ARKitShader; -class ARKitInterface : public ARVRInterface { - GDCLASS(ARKitInterface, ARVRInterface); +class ARKitInterface : public XRInterface { + GDCLASS(ARKitInterface, XRInterface); private: bool initialized; @@ -65,7 +65,7 @@ private: Vector<uint8_t> img_data[2]; struct anchor_map { - ARVRPositionalTracker *tracker; + XRPositionalTracker *tracker; unsigned char uuid[16]; }; @@ -73,7 +73,7 @@ private: unsigned int num_anchors; unsigned int max_anchors; anchor_map *anchors; - ARVRPositionalTracker *get_anchor_for_uuid(const unsigned char *p_uuid); + XRPositionalTracker *get_anchor_for_uuid(const unsigned char *p_uuid); void remove_anchor_for_uuid(const unsigned char *p_uuid); void remove_all_anchors(); @@ -108,9 +108,9 @@ public: virtual Size2 get_render_targetsize(); virtual bool is_stereo(); - virtual Transform get_transform_for_eye(ARVRInterface::Eyes p_eye, const Transform &p_cam_transform); - virtual CameraMatrix get_projection_for_eye(ARVRInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far); - virtual void commit_for_eye(ARVRInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect); + virtual Transform get_transform_for_eye(XRInterface::Eyes p_eye, const Transform &p_cam_transform); + virtual CameraMatrix get_projection_for_eye(XRInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far); + virtual void commit_for_eye(XRInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect); virtual void process(); diff --git a/modules/arkit/arkit_interface.mm b/modules/arkit/arkit_interface.mm index 031e1e115e..79f09e2a7e 100644 --- a/modules/arkit/arkit_interface.mm +++ b/modules/arkit/arkit_interface.mm @@ -158,7 +158,7 @@ StringName ARKitInterface::get_name() const { } int ARKitInterface::get_capabilities() const { - return ARKitInterface::ARVR_MONO + ARKitInterface::ARVR_AR; + return ARKitInterface::XR_MONO + ARKitInterface::XR_AR; } Array ARKitInterface::raycast(Vector2 p_screen_coord) { @@ -218,8 +218,8 @@ bool ARKitInterface::is_initialized() const { } bool ARKitInterface::initialize() { - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL_V(arvr_server, false); + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL_V(xr_server, false); if (!initialized) { print_line("initializing ARKit"); @@ -244,7 +244,7 @@ bool ARKitInterface::initialize() { transform = Transform(); // make this our primary interface - arvr_server->set_primary_interface(this); + xr_server->set_primary_interface(this); // make sure we have our feed setup if (feed.is_null()) { @@ -270,10 +270,10 @@ bool ARKitInterface::initialize() { void ARKitInterface::uninitialize() { if (initialized) { - ARVRServer *arvr_server = ARVRServer::get_singleton(); - if (arvr_server != NULL) { + XRServer *xr_server = XRServer::get_singleton(); + if (xr_server != NULL) { // no longer our primary interface - arvr_server->clear_primary_interface_if(this); + xr_server->clear_primary_interface_if(this); } if (feed.is_valid()) { @@ -303,22 +303,22 @@ Size2 ARKitInterface::get_render_targetsize() { return target_size; } -Transform ARKitInterface::get_transform_for_eye(ARVRInterface::Eyes p_eye, const Transform &p_cam_transform) { +Transform ARKitInterface::get_transform_for_eye(XRInterface::Eyes p_eye, const Transform &p_cam_transform) { _THREAD_SAFE_METHOD_ Transform transform_for_eye; - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL_V(arvr_server, transform_for_eye); + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL_V(xr_server, transform_for_eye); if (initialized) { - float world_scale = arvr_server->get_world_scale(); + float world_scale = xr_server->get_world_scale(); // just scale our origin point of our transform, note that we really shouldn't be using world_scale in ARKit but.... transform_for_eye = transform; transform_for_eye.origin *= world_scale; - transform_for_eye = p_cam_transform * arvr_server->get_reference_frame() * transform_for_eye; + transform_for_eye = p_cam_transform * xr_server->get_reference_frame() * transform_for_eye; } else { // huh? well just return what we got.... transform_for_eye = p_cam_transform; @@ -327,7 +327,7 @@ Transform ARKitInterface::get_transform_for_eye(ARVRInterface::Eyes p_eye, const return transform_for_eye; } -CameraMatrix ARKitInterface::get_projection_for_eye(ARVRInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far) { +CameraMatrix ARKitInterface::get_projection_for_eye(XRInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far) { // Remember our near and far, it will be used in process when we obtain our projection from our ARKit session. z_near = p_z_near; z_far = p_z_far; @@ -335,7 +335,7 @@ CameraMatrix ARKitInterface::get_projection_for_eye(ARVRInterface::Eyes p_eye, r return projection; } -void ARKitInterface::commit_for_eye(ARVRInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect) { +void ARKitInterface::commit_for_eye(XRInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect) { _THREAD_SAFE_METHOD_ // We must have a valid render target @@ -356,7 +356,7 @@ void ARKitInterface::commit_for_eye(ARVRInterface::Eyes p_eye, RID p_render_targ VSG::rasterizer->blit_render_target_to_screen(p_render_target, screen_rect, 0); } -ARVRPositionalTracker *ARKitInterface::get_anchor_for_uuid(const unsigned char *p_uuid) { +XRPositionalTracker *ARKitInterface::get_anchor_for_uuid(const unsigned char *p_uuid) { if (anchors == NULL) { num_anchors = 0; max_anchors = 10; @@ -377,8 +377,8 @@ ARVRPositionalTracker *ARKitInterface::get_anchor_for_uuid(const unsigned char * ERR_FAIL_NULL_V(anchors, NULL); } - ARVRPositionalTracker *new_tracker = memnew(ARVRPositionalTracker); - new_tracker->set_type(ARVRServer::TRACKER_ANCHOR); + XRPositionalTracker *new_tracker = memnew(XRPositionalTracker); + new_tracker->set_type(XRServer::TRACKER_ANCHOR); char tracker_name[256]; sprintf(tracker_name, "Anchor %02x%02x%02x%02x-%02x%02x-%02x%02x-%02x%02x-%02x%02x%02x%02x%02x%02x", p_uuid[0], p_uuid[1], p_uuid[2], p_uuid[3], p_uuid[4], p_uuid[5], p_uuid[6], p_uuid[7], p_uuid[8], p_uuid[9], p_uuid[10], p_uuid[11], p_uuid[12], p_uuid[13], p_uuid[14], p_uuid[15]); @@ -388,7 +388,7 @@ ARVRPositionalTracker *ARKitInterface::get_anchor_for_uuid(const unsigned char * new_tracker->set_name(name); // add our tracker - ARVRServer::get_singleton()->add_tracker(new_tracker); + XRServer::get_singleton()->add_tracker(new_tracker); anchors[num_anchors].tracker = new_tracker; memcpy(anchors[num_anchors].uuid, p_uuid, 16); num_anchors++; @@ -401,7 +401,7 @@ void ARKitInterface::remove_anchor_for_uuid(const unsigned char *p_uuid) { for (unsigned int i = 0; i < num_anchors; i++) { if (memcmp(anchors[i].uuid, p_uuid, 16) == 0) { // remove our tracker - ARVRServer::get_singleton()->remove_tracker(anchors[i].tracker); + XRServer::get_singleton()->remove_tracker(anchors[i].tracker); memdelete(anchors[i].tracker); // bring remaining forward @@ -421,7 +421,7 @@ void ARKitInterface::remove_all_anchors() { if (anchors != NULL) { for (unsigned int i = 0; i < num_anchors; i++) { // remove our tracker - ARVRServer::get_singleton()->remove_tracker(anchors[i].tracker); + XRServer::get_singleton()->remove_tracker(anchors[i].tracker); memdelete(anchors[i].tracker); }; @@ -582,16 +582,16 @@ void ARKitInterface::process() { // strangely enough we have to states, rolling them up into one if (camera.trackingState == ARTrackingStateNotAvailable) { // no tracking, would be good if we black out the screen or something... - tracking_state = ARVRInterface::ARVR_NOT_TRACKING; + tracking_state = XRInterface::XR_NOT_TRACKING; } else { if (camera.trackingState == ARTrackingStateNormal) { - tracking_state = ARVRInterface::ARVR_NORMAL_TRACKING; + tracking_state = XRInterface::XR_NORMAL_TRACKING; } else if (camera.trackingStateReason == ARTrackingStateReasonExcessiveMotion) { - tracking_state = ARVRInterface::ARVR_EXCESSIVE_MOTION; + tracking_state = XRInterface::XR_EXCESSIVE_MOTION; } else if (camera.trackingStateReason == ARTrackingStateReasonInsufficientFeatures) { - tracking_state = ARVRInterface::ARVR_INSUFFICIENT_FEATURES; + tracking_state = XRInterface::XR_INSUFFICIENT_FEATURES; } else { - tracking_state = ARVRInterface::ARVR_UNKNOWN_TRACKING; + tracking_state = XRInterface::XR_UNKNOWN_TRACKING; } // copy our current frame transform @@ -665,7 +665,7 @@ void ARKitInterface::_add_or_update_anchor(void *p_anchor) { unsigned char uuid[16]; [anchor.identifier getUUIDBytes:uuid]; - ARVRPositionalTracker *tracker = get_anchor_for_uuid(uuid); + XRPositionalTracker *tracker = get_anchor_for_uuid(uuid); if (tracker != NULL) { // lets update our mesh! (using Arjens code as is for now) // we should also probably limit how often we do this... @@ -695,7 +695,7 @@ void ARKitInterface::_add_or_update_anchor(void *p_anchor) { } // Note, this also contains a scale factor which gives us an idea of the size of the anchor - // We may extract that in our ARVRAnchor class + // We may extract that in our XRAnchor class Basis b; matrix_float4x4 m44 = anchor.transform; b.elements[0].x = m44.columns[0][0]; diff --git a/modules/arkit/register_types.cpp b/modules/arkit/register_types.cpp index c78b35529b..91069ab364 100644 --- a/modules/arkit/register_types.cpp +++ b/modules/arkit/register_types.cpp @@ -37,7 +37,7 @@ void register_arkit_types() { Ref<ARKitInterface> arkit_interface; arkit_interface.instance(); - ARVRServer::get_singleton()->add_interface(arkit_interface); + XRServer::get_singleton()->add_interface(arkit_interface); } void unregister_arkit_types() { diff --git a/modules/gdnative/SCsub b/modules/gdnative/SCsub index a788175b07..cab05549d2 100644 --- a/modules/gdnative/SCsub +++ b/modules/gdnative/SCsub @@ -17,7 +17,7 @@ env_gdnative.Prepend(CPPPATH=["#modules/gdnative/include/"]) Export("env_gdnative") SConscript("net/SCsub") -SConscript("arvr/SCsub") +SConscript("xr/SCsub") SConscript("pluginscript/SCsub") SConscript("videodecoder/SCsub") diff --git a/modules/gdnative/config.py b/modules/gdnative/config.py index 37e25a46d4..4b997e4bfe 100644 --- a/modules/gdnative/config.py +++ b/modules/gdnative/config.py @@ -9,7 +9,7 @@ def configure(env): def get_doc_classes(): return [ "@NativeScript", - "ARVRInterfaceGDNative", + "XRInterfaceGDNative", "GDNative", "GDNativeLibrary", "MultiplayerPeerGDNative", diff --git a/modules/gdnative/doc_classes/ARVRInterfaceGDNative.xml b/modules/gdnative/doc_classes/ARVRInterfaceGDNative.xml deleted file mode 100644 index e8405b64a3..0000000000 --- a/modules/gdnative/doc_classes/ARVRInterfaceGDNative.xml +++ /dev/null @@ -1,15 +0,0 @@ -<?xml version="1.0" encoding="UTF-8" ?> -<class name="ARVRInterfaceGDNative" inherits="ARVRInterface" version="4.0"> - <brief_description> - GDNative wrapper for an ARVR interface. - </brief_description> - <description> - This is a wrapper class for GDNative implementations of the ARVR interface. To use a GDNative ARVR interface, simply instantiate this object and set your GDNative library containing the ARVR interface implementation. - </description> - <tutorials> - </tutorials> - <methods> - </methods> - <constants> - </constants> -</class> diff --git a/modules/gdnative/doc_classes/GDNativeLibrary.xml b/modules/gdnative/doc_classes/GDNativeLibrary.xml index 601e132d42..1aab864102 100644 --- a/modules/gdnative/doc_classes/GDNativeLibrary.xml +++ b/modules/gdnative/doc_classes/GDNativeLibrary.xml @@ -4,7 +4,7 @@ An external library containing functions or script classes to use in Godot. </brief_description> <description> - A GDNative library can implement [NativeScript]s, global functions to call with the [GDNative] class, or low-level engine extensions through interfaces such as [ARVRInterfaceGDNative]. The library must be compiled for each platform and architecture that the project will run on. + A GDNative library can implement [NativeScript]s, global functions to call with the [GDNative] class, or low-level engine extensions through interfaces such as [XRInterfaceGDNative]. The library must be compiled for each platform and architecture that the project will run on. </description> <tutorials> <link>https://docs.godotengine.org/en/latest/tutorials/plugins/gdnative/gdnative-c-example.html</link> diff --git a/modules/gdnative/doc_classes/XRInterfaceGDNative.xml b/modules/gdnative/doc_classes/XRInterfaceGDNative.xml new file mode 100644 index 0000000000..13de815793 --- /dev/null +++ b/modules/gdnative/doc_classes/XRInterfaceGDNative.xml @@ -0,0 +1,15 @@ +<?xml version="1.0" encoding="UTF-8" ?> +<class name="XRInterfaceGDNative" inherits="XRInterface" version="4.0"> + <brief_description> + GDNative wrapper for an XR interface. + </brief_description> + <description> + This is a wrapper class for GDNative implementations of the XR interface. To use a GDNative XR interface, simply instantiate this object and set your GDNative library containing the XR interface implementation. + </description> + <tutorials> + </tutorials> + <methods> + </methods> + <constants> + </constants> +</class> diff --git a/modules/gdnative/gdnative_api.json b/modules/gdnative/gdnative_api.json index e1d6c0c867..9473a3d419 100644 --- a/modules/gdnative/gdnative_api.json +++ b/modules/gdnative/gdnative_api.json @@ -5935,8 +5935,8 @@ ] }, { - "name": "arvr", - "type": "ARVR", + "name": "xr", + "type": "XR", "version": { "major": 1, "minor": 1 @@ -5944,24 +5944,24 @@ "next": null, "api": [ { - "name": "godot_arvr_register_interface", + "name": "godot_xr_register_interface", "return_type": "void", "arguments": [ - ["const godot_arvr_interface_gdnative *", "p_interface"] + ["const godot_xr_interface_gdnative *", "p_interface"] ] }, { - "name": "godot_arvr_get_worldscale", + "name": "godot_xr_get_worldscale", "return_type": "godot_real", "arguments": [] }, { - "name": "godot_arvr_get_reference_frame", + "name": "godot_xr_get_reference_frame", "return_type": "godot_transform", "arguments": [] }, { - "name": "godot_arvr_blit", + "name": "godot_xr_blit", "return_type": "void", "arguments": [ ["godot_int", "p_eye"], @@ -5970,14 +5970,14 @@ ] }, { - "name": "godot_arvr_get_texid", + "name": "godot_xr_get_texid", "return_type": "godot_int", "arguments": [ ["godot_rid *", "p_render_target"] ] }, { - "name": "godot_arvr_add_controller", + "name": "godot_xr_add_controller", "return_type": "godot_int", "arguments": [ ["char *", "p_device_name"], @@ -5987,14 +5987,14 @@ ] }, { - "name": "godot_arvr_remove_controller", + "name": "godot_xr_remove_controller", "return_type": "void", "arguments": [ ["godot_int", "p_controller_id"] ] }, { - "name": "godot_arvr_set_controller_transform", + "name": "godot_xr_set_controller_transform", "return_type": "void", "arguments": [ ["godot_int", "p_controller_id"], @@ -6004,7 +6004,7 @@ ] }, { - "name": "godot_arvr_set_controller_button", + "name": "godot_xr_set_controller_button", "return_type": "void", "arguments": [ ["godot_int", "p_controller_id"], @@ -6013,7 +6013,7 @@ ] }, { - "name": "godot_arvr_set_controller_axis", + "name": "godot_xr_set_controller_axis", "return_type": "void", "arguments": [ ["godot_int", "p_controller_id"], @@ -6023,7 +6023,7 @@ ] }, { - "name": "godot_arvr_get_controller_rumble", + "name": "godot_xr_get_controller_rumble", "return_type": "godot_real", "arguments": [ ["godot_int", "p_controller_id"] diff --git a/modules/gdnative/gdnative_builders.py b/modules/gdnative/gdnative_builders.py index 2d84f93d87..620935795f 100644 --- a/modules/gdnative/gdnative_builders.py +++ b/modules/gdnative/gdnative_builders.py @@ -19,7 +19,7 @@ def _build_gdnative_api_struct_header(api): "", "#include <gdnative/gdnative.h>", "#include <android/godot_android.h>", - "#include <arvr/godot_arvr.h>", + "#include <xr/godot_xr.h>", "#include <nativescript/godot_nativescript.h>", "#include <net/godot_net.h>", "#include <pluginscript/godot_pluginscript.h>", diff --git a/modules/gdnative/include/arvr/godot_arvr.h b/modules/gdnative/include/xr/godot_xr.h index aaef31a855..22f7f021c4 100644 --- a/modules/gdnative/include/arvr/godot_arvr.h +++ b/modules/gdnative/include/xr/godot_xr.h @@ -1,5 +1,5 @@ /*************************************************************************/ -/* godot_arvr.h */ +/* godot_xr.h */ /*************************************************************************/ /* This file is part of: */ /* GODOT ENGINE */ @@ -28,8 +28,8 @@ /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /*************************************************************************/ -#ifndef GODOT_NATIVEARVR_H -#define GODOT_NATIVEARVR_H +#ifndef GODOT_NATIVEXR_H +#define GODOT_NATIVEXR_H #include <gdnative/gdnative.h> @@ -61,32 +61,31 @@ typedef struct { void (*fill_projection_for_eye)(void *, godot_real *, godot_int, godot_real, godot_real, godot_real); void (*commit_for_eye)(void *, godot_int, godot_rid *, godot_rect2 *); void (*process)(void *); - // only in 1.1 onwards godot_int (*get_external_texture_for_eye)(void *, godot_int); void (*notification)(void *, godot_int); godot_int (*get_camera_feed_id)(void *); -} godot_arvr_interface_gdnative; +} godot_xr_interface_gdnative; -void GDAPI godot_arvr_register_interface(const godot_arvr_interface_gdnative *p_interface); +void GDAPI godot_xr_register_interface(const godot_xr_interface_gdnative *p_interface); -// helper functions to access ARVRServer data -godot_real GDAPI godot_arvr_get_worldscale(); -godot_transform GDAPI godot_arvr_get_reference_frame(); +// helper functions to access XRServer data +godot_real GDAPI godot_xr_get_worldscale(); +godot_transform GDAPI godot_xr_get_reference_frame(); // helper functions for rendering -void GDAPI godot_arvr_blit(godot_int p_eye, godot_rid *p_render_target, godot_rect2 *p_rect); -godot_int GDAPI godot_arvr_get_texid(godot_rid *p_render_target); +void GDAPI godot_xr_blit(godot_int p_eye, godot_rid *p_render_target, godot_rect2 *p_rect); +godot_int GDAPI godot_xr_get_texid(godot_rid *p_render_target); -// helper functions for updating ARVR controllers -godot_int GDAPI godot_arvr_add_controller(char *p_device_name, godot_int p_hand, godot_bool p_tracks_orientation, godot_bool p_tracks_position); -void GDAPI godot_arvr_remove_controller(godot_int p_controller_id); -void GDAPI godot_arvr_set_controller_transform(godot_int p_controller_id, godot_transform *p_transform, godot_bool p_tracks_orientation, godot_bool p_tracks_position); -void GDAPI godot_arvr_set_controller_button(godot_int p_controller_id, godot_int p_button, godot_bool p_is_pressed); -void GDAPI godot_arvr_set_controller_axis(godot_int p_controller_id, godot_int p_axis, godot_real p_value, godot_bool p_can_be_negative); -godot_real GDAPI godot_arvr_get_controller_rumble(godot_int p_controller_id); +// helper functions for updating XR controllers +godot_int GDAPI godot_xr_add_controller(char *p_device_name, godot_int p_hand, godot_bool p_tracks_orientation, godot_bool p_tracks_position); +void GDAPI godot_xr_remove_controller(godot_int p_controller_id); +void GDAPI godot_xr_set_controller_transform(godot_int p_controller_id, godot_transform *p_transform, godot_bool p_tracks_orientation, godot_bool p_tracks_position); +void GDAPI godot_xr_set_controller_button(godot_int p_controller_id, godot_int p_button, godot_bool p_is_pressed); +void GDAPI godot_xr_set_controller_axis(godot_int p_controller_id, godot_int p_axis, godot_real p_value, godot_bool p_can_be_negative); +godot_real GDAPI godot_xr_get_controller_rumble(godot_int p_controller_id); #ifdef __cplusplus } #endif -#endif /* !GODOT_NATIVEARVR_H */ +#endif /* !GODOT_NATIVEXR_H */ diff --git a/modules/gdnative/register_types.cpp b/modules/gdnative/register_types.cpp index 397a020689..67a286ee2e 100644 --- a/modules/gdnative/register_types.cpp +++ b/modules/gdnative/register_types.cpp @@ -34,11 +34,11 @@ #include "gdnative.h" -#include "arvr/register_types.h" #include "nativescript/register_types.h" #include "net/register_types.h" #include "pluginscript/register_types.h" #include "videodecoder/register_types.h" +#include "xr/register_types.h" #include "core/engine.h" #include "core/io/resource_loader.h" @@ -240,7 +240,7 @@ void register_gdnative_types() { GDNativeCallRegistry::singleton->register_native_call_type("standard_varcall", cb_standard_varcall); register_net_types(); - register_arvr_types(); + register_xr_types(); register_nativescript_types(); register_pluginscript_types(); register_videodecoder_types(); @@ -305,7 +305,7 @@ void unregister_gdnative_types() { unregister_videodecoder_types(); unregister_pluginscript_types(); unregister_nativescript_types(); - unregister_arvr_types(); + unregister_xr_types(); unregister_net_types(); memdelete(GDNativeCallRegistry::singleton); diff --git a/modules/gdnative/arvr/SCsub b/modules/gdnative/xr/SCsub index 0b2db3b504..0b2db3b504 100644 --- a/modules/gdnative/arvr/SCsub +++ b/modules/gdnative/xr/SCsub diff --git a/modules/gdnative/arvr/config.py b/modules/gdnative/xr/config.py index d22f9454ed..d22f9454ed 100644 --- a/modules/gdnative/arvr/config.py +++ b/modules/gdnative/xr/config.py diff --git a/modules/gdnative/arvr/register_types.cpp b/modules/gdnative/xr/register_types.cpp index 0f6e2bca1a..da3a7dc4b8 100644 --- a/modules/gdnative/arvr/register_types.cpp +++ b/modules/gdnative/xr/register_types.cpp @@ -29,11 +29,12 @@ /*************************************************************************/ #include "register_types.h" -#include "arvr_interface_gdnative.h" +#include "xr_interface_gdnative.h" -void register_arvr_types() { - ClassDB::register_class<ARVRInterfaceGDNative>(); +void register_xr_types() { + ClassDB::register_class<XRInterfaceGDNative>(); + ClassDB::add_compatibility_class("ARVRInterfaceGDNative", "XRInterfaceGDNative"); } -void unregister_arvr_types() { +void unregister_xr_types() { } diff --git a/modules/gdnative/arvr/register_types.h b/modules/gdnative/xr/register_types.h index b0de6f7c14..2501d28651 100644 --- a/modules/gdnative/arvr/register_types.h +++ b/modules/gdnative/xr/register_types.h @@ -28,10 +28,10 @@ /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /*************************************************************************/ -#ifndef ARVR_REGISTER_TYPES_H -#define ARVR_REGISTER_TYPES_H +#ifndef XR_REGISTER_TYPES_H +#define XR_REGISTER_TYPES_H -void register_arvr_types(); -void unregister_arvr_types(); +void register_xr_types(); +void unregister_xr_types(); -#endif // ARVR_REGISTER_TYPES_H +#endif // XR_REGISTER_TYPES_H diff --git a/modules/gdnative/arvr/arvr_interface_gdnative.cpp b/modules/gdnative/xr/xr_interface_gdnative.cpp index f14691027a..0451945139 100644 --- a/modules/gdnative/arvr/arvr_interface_gdnative.cpp +++ b/modules/gdnative/xr/xr_interface_gdnative.cpp @@ -1,5 +1,5 @@ /*************************************************************************/ -/* arvr_interface_gdnative.cpp */ +/* xr_interface_gdnative.cpp */ /*************************************************************************/ /* This file is part of: */ /* GODOT ENGINE */ @@ -28,17 +28,17 @@ /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /*************************************************************************/ -#include "arvr_interface_gdnative.h" +#include "xr_interface_gdnative.h" #include "core/input/input_filter.h" -#include "servers/arvr/arvr_positional_tracker.h" #include "servers/rendering/rendering_server_globals.h" +#include "servers/xr/xr_positional_tracker.h" -void ARVRInterfaceGDNative::_bind_methods() { +void XRInterfaceGDNative::_bind_methods() { ADD_PROPERTY_DEFAULT("interface_is_initialized", false); ADD_PROPERTY_DEFAULT("ar_is_anchor_detection_enabled", false); } -ARVRInterfaceGDNative::ARVRInterfaceGDNative() { +XRInterfaceGDNative::XRInterfaceGDNative() { print_verbose("Construct gdnative interface\n"); // we won't have our data pointer until our library gets set @@ -47,7 +47,7 @@ ARVRInterfaceGDNative::ARVRInterfaceGDNative() { interface = nullptr; } -ARVRInterfaceGDNative::~ARVRInterfaceGDNative() { +XRInterfaceGDNative::~XRInterfaceGDNative() { print_verbose("Destruct gdnative interface\n"); if (interface != nullptr && is_initialized()) { @@ -58,7 +58,7 @@ ARVRInterfaceGDNative::~ARVRInterfaceGDNative() { cleanup(); } -void ARVRInterfaceGDNative::cleanup() { +void XRInterfaceGDNative::cleanup() { if (interface != nullptr) { interface->destructor(data); data = nullptr; @@ -66,7 +66,7 @@ void ARVRInterfaceGDNative::cleanup() { } } -void ARVRInterfaceGDNative::set_interface(const godot_arvr_interface_gdnative *p_interface) { +void XRInterfaceGDNative::set_interface(const godot_xr_interface_gdnative *p_interface) { // this should only be called once, just being paranoid.. if (interface) { cleanup(); @@ -79,7 +79,7 @@ void ARVRInterfaceGDNative::set_interface(const godot_arvr_interface_gdnative *p data = interface->constructor((godot_object *)this); } -StringName ARVRInterfaceGDNative::get_name() const { +StringName XRInterfaceGDNative::get_name() const { ERR_FAIL_COND_V(interface == nullptr, StringName()); @@ -92,7 +92,7 @@ StringName ARVRInterfaceGDNative::get_name() const { return name; } -int ARVRInterfaceGDNative::get_capabilities() const { +int XRInterfaceGDNative::get_capabilities() const { int capabilities; ERR_FAIL_COND_V(interface == nullptr, 0); // 0 = None @@ -102,32 +102,28 @@ int ARVRInterfaceGDNative::get_capabilities() const { return capabilities; } -bool ARVRInterfaceGDNative::get_anchor_detection_is_enabled() const { +bool XRInterfaceGDNative::get_anchor_detection_is_enabled() const { ERR_FAIL_COND_V(interface == nullptr, false); return interface->get_anchor_detection_is_enabled(data); } -void ARVRInterfaceGDNative::set_anchor_detection_is_enabled(bool p_enable) { +void XRInterfaceGDNative::set_anchor_detection_is_enabled(bool p_enable) { ERR_FAIL_COND(interface == nullptr); interface->set_anchor_detection_is_enabled(data, p_enable); } -int ARVRInterfaceGDNative::get_camera_feed_id() { +int XRInterfaceGDNative::get_camera_feed_id() { ERR_FAIL_COND_V(interface == nullptr, 0); - if ((interface->version.major > 1) || ((interface->version.major) == 1 && (interface->version.minor >= 1))) { - return (unsigned int)interface->get_camera_feed_id(data); - } else { - return 0; - } + return (unsigned int)interface->get_camera_feed_id(data); } -bool ARVRInterfaceGDNative::is_stereo() { +bool XRInterfaceGDNative::is_stereo() { bool stereo; ERR_FAIL_COND_V(interface == nullptr, false); @@ -137,14 +133,14 @@ bool ARVRInterfaceGDNative::is_stereo() { return stereo; } -bool ARVRInterfaceGDNative::is_initialized() const { +bool XRInterfaceGDNative::is_initialized() const { ERR_FAIL_COND_V(interface == nullptr, false); return interface->is_initialized(data); } -bool ARVRInterfaceGDNative::initialize() { +bool XRInterfaceGDNative::initialize() { ERR_FAIL_COND_V(interface == nullptr, false); bool initialized = interface->initialize(data); @@ -152,28 +148,28 @@ bool ARVRInterfaceGDNative::initialize() { if (initialized) { // if we successfully initialize our interface and we don't have a primary interface yet, this becomes our primary interface - ARVRServer *arvr_server = ARVRServer::get_singleton(); - if ((arvr_server != nullptr) && (arvr_server->get_primary_interface() == nullptr)) { - arvr_server->set_primary_interface(this); + XRServer *xr_server = XRServer::get_singleton(); + if ((xr_server != nullptr) && (xr_server->get_primary_interface() == nullptr)) { + xr_server->set_primary_interface(this); }; }; return initialized; } -void ARVRInterfaceGDNative::uninitialize() { +void XRInterfaceGDNative::uninitialize() { ERR_FAIL_COND(interface == nullptr); - ARVRServer *arvr_server = ARVRServer::get_singleton(); - if (arvr_server != nullptr) { + XRServer *xr_server = XRServer::get_singleton(); + if (xr_server != nullptr) { // Whatever happens, make sure this is no longer our primary interface - arvr_server->clear_primary_interface_if(this); + xr_server->clear_primary_interface_if(this); } interface->uninitialize(data); } -Size2 ARVRInterfaceGDNative::get_render_targetsize() { +Size2 XRInterfaceGDNative::get_render_targetsize() { ERR_FAIL_COND_V(interface == nullptr, Size2()); @@ -183,7 +179,7 @@ Size2 ARVRInterfaceGDNative::get_render_targetsize() { return *vec; } -Transform ARVRInterfaceGDNative::get_transform_for_eye(ARVRInterface::Eyes p_eye, const Transform &p_cam_transform) { +Transform XRInterfaceGDNative::get_transform_for_eye(XRInterface::Eyes p_eye, const Transform &p_cam_transform) { Transform *ret; ERR_FAIL_COND_V(interface == nullptr, Transform()); @@ -195,7 +191,7 @@ Transform ARVRInterfaceGDNative::get_transform_for_eye(ARVRInterface::Eyes p_eye return *ret; } -CameraMatrix ARVRInterfaceGDNative::get_projection_for_eye(ARVRInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far) { +CameraMatrix XRInterfaceGDNative::get_projection_for_eye(XRInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far) { CameraMatrix cm; ERR_FAIL_COND_V(interface == nullptr, CameraMatrix()); @@ -205,37 +201,30 @@ CameraMatrix ARVRInterfaceGDNative::get_projection_for_eye(ARVRInterface::Eyes p return cm; } -unsigned int ARVRInterfaceGDNative::get_external_texture_for_eye(ARVRInterface::Eyes p_eye) { +unsigned int XRInterfaceGDNative::get_external_texture_for_eye(XRInterface::Eyes p_eye) { ERR_FAIL_COND_V(interface == nullptr, 0); - if ((interface->version.major > 1) || ((interface->version.major) == 1 && (interface->version.minor >= 1))) { - return (unsigned int)interface->get_external_texture_for_eye(data, (godot_int)p_eye); - } else { - return 0; - } + return (unsigned int)interface->get_external_texture_for_eye(data, (godot_int)p_eye); } -void ARVRInterfaceGDNative::commit_for_eye(ARVRInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect) { +void XRInterfaceGDNative::commit_for_eye(XRInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect) { ERR_FAIL_COND(interface == nullptr); interface->commit_for_eye(data, (godot_int)p_eye, (godot_rid *)&p_render_target, (godot_rect2 *)&p_screen_rect); } -void ARVRInterfaceGDNative::process() { +void XRInterfaceGDNative::process() { ERR_FAIL_COND(interface == nullptr); interface->process(data); } -void ARVRInterfaceGDNative::notification(int p_what) { +void XRInterfaceGDNative::notification(int p_what) { ERR_FAIL_COND(interface == nullptr); - // this is only available in interfaces that implement 1.1 or later - if ((interface->version.major > 1) || ((interface->version.major == 1) && (interface->version.minor > 0))) { - interface->notification(data, p_what); - } + interface->notification(data, p_what); } ///////////////////////////////////////////////////////////////////////////////////// @@ -243,30 +232,30 @@ void ARVRInterfaceGDNative::notification(int p_what) { extern "C" { -void GDAPI godot_arvr_register_interface(const godot_arvr_interface_gdnative *p_interface) { - // If our major version is 0 or bigger then 10, we're likely looking at our constructor pointer from an older plugin - ERR_FAIL_COND_MSG((p_interface->version.major == 0) || (p_interface->version.major > 10), "GDNative ARVR interfaces build for Godot 3.0 are not supported."); +void GDAPI godot_xr_register_interface(const godot_xr_interface_gdnative *p_interface) { + // Must be on a version 4 plugin + ERR_FAIL_COND_MSG(p_interface->version.major < 4, "GDNative XR interfaces build for Godot 3.x are not supported."); - Ref<ARVRInterfaceGDNative> new_interface; + Ref<XRInterfaceGDNative> new_interface; new_interface.instance(); - new_interface->set_interface((const godot_arvr_interface_gdnative *)p_interface); - ARVRServer::get_singleton()->add_interface(new_interface); + new_interface->set_interface((const godot_xr_interface_gdnative *)p_interface); + XRServer::get_singleton()->add_interface(new_interface); } -godot_real GDAPI godot_arvr_get_worldscale() { - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL_V(arvr_server, 1.0); +godot_real GDAPI godot_xr_get_worldscale() { + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL_V(xr_server, 1.0); - return arvr_server->get_world_scale(); + return xr_server->get_world_scale(); } -godot_transform GDAPI godot_arvr_get_reference_frame() { +godot_transform GDAPI godot_xr_get_reference_frame() { godot_transform reference_frame; Transform *reference_frame_ptr = (Transform *)&reference_frame; - ARVRServer *arvr_server = ARVRServer::get_singleton(); - if (arvr_server != nullptr) { - *reference_frame_ptr = arvr_server->get_reference_frame(); + XRServer *xr_server = XRServer::get_singleton(); + if (xr_server != nullptr) { + *reference_frame_ptr = xr_server->get_reference_frame(); } else { godot_transform_new_identity(&reference_frame); } @@ -274,17 +263,17 @@ godot_transform GDAPI godot_arvr_get_reference_frame() { return reference_frame; } -void GDAPI godot_arvr_blit(godot_int p_eye, godot_rid *p_render_target, godot_rect2 *p_rect) { +void GDAPI godot_xr_blit(godot_int p_eye, godot_rid *p_render_target, godot_rect2 *p_rect) { // blits out our texture as is, handy for preview display of one of the eyes that is already rendered with lens distortion on an external HMD - ARVRInterface::Eyes eye = (ARVRInterface::Eyes)p_eye; + XRInterface::Eyes eye = (XRInterface::Eyes)p_eye; #if 0 RID *render_target = (RID *)p_render_target; #endif Rect2 screen_rect = *(Rect2 *)p_rect; - if (eye == ARVRInterface::EYE_LEFT) { + if (eye == XRInterface::EYE_LEFT) { screen_rect.size.x /= 2.0; - } else if (p_eye == ARVRInterface::EYE_RIGHT) { + } else if (p_eye == XRInterface::EYE_RIGHT) { screen_rect.size.x /= 2.0; screen_rect.position.x += screen_rect.size.x; } @@ -296,7 +285,7 @@ void GDAPI godot_arvr_blit(godot_int p_eye, godot_rid *p_render_target, godot_re #endif } -godot_int GDAPI godot_arvr_get_texid(godot_rid *p_render_target) { +godot_int GDAPI godot_xr_get_texid(godot_rid *p_render_target) { // In order to send off our textures to display on our hardware we need the opengl texture ID instead of the render target RID // This is a handy function to expose that. #if 0 @@ -313,20 +302,20 @@ godot_int GDAPI godot_arvr_get_texid(godot_rid *p_render_target) { return texid; } -godot_int GDAPI godot_arvr_add_controller(char *p_device_name, godot_int p_hand, godot_bool p_tracks_orientation, godot_bool p_tracks_position) { - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL_V(arvr_server, 0); +godot_int GDAPI godot_xr_add_controller(char *p_device_name, godot_int p_hand, godot_bool p_tracks_orientation, godot_bool p_tracks_position) { + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL_V(xr_server, 0); InputFilter *input = InputFilter::get_singleton(); ERR_FAIL_NULL_V(input, 0); - ARVRPositionalTracker *new_tracker = memnew(ARVRPositionalTracker); + XRPositionalTracker *new_tracker = memnew(XRPositionalTracker); new_tracker->set_name(p_device_name); - new_tracker->set_type(ARVRServer::TRACKER_CONTROLLER); + new_tracker->set_type(XRServer::TRACKER_CONTROLLER); if (p_hand == 1) { - new_tracker->set_hand(ARVRPositionalTracker::TRACKER_LEFT_HAND); + new_tracker->set_hand(XRPositionalTracker::TRACKER_LEFT_HAND); } else if (p_hand == 2) { - new_tracker->set_hand(ARVRPositionalTracker::TRACKER_RIGHT_HAND); + new_tracker->set_hand(XRPositionalTracker::TRACKER_RIGHT_HAND); } // also register as joystick... @@ -346,20 +335,20 @@ godot_int GDAPI godot_arvr_add_controller(char *p_device_name, godot_int p_hand, } // add our tracker to our server and remember its pointer - arvr_server->add_tracker(new_tracker); + xr_server->add_tracker(new_tracker); // note, this ID is only unique within controllers! return new_tracker->get_tracker_id(); } -void GDAPI godot_arvr_remove_controller(godot_int p_controller_id) { - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL(arvr_server); +void GDAPI godot_xr_remove_controller(godot_int p_controller_id) { + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL(xr_server); InputFilter *input = InputFilter::get_singleton(); ERR_FAIL_NULL(input); - ARVRPositionalTracker *remove_tracker = arvr_server->find_by_type_and_id(ARVRServer::TRACKER_CONTROLLER, p_controller_id); + XRPositionalTracker *remove_tracker = xr_server->find_by_type_and_id(XRServer::TRACKER_CONTROLLER, p_controller_id); if (remove_tracker != nullptr) { // unset our joystick if applicable int joyid = remove_tracker->get_joy_id(); @@ -369,16 +358,16 @@ void GDAPI godot_arvr_remove_controller(godot_int p_controller_id) { } // remove our tracker from our server - arvr_server->remove_tracker(remove_tracker); + xr_server->remove_tracker(remove_tracker); memdelete(remove_tracker); } } -void GDAPI godot_arvr_set_controller_transform(godot_int p_controller_id, godot_transform *p_transform, godot_bool p_tracks_orientation, godot_bool p_tracks_position) { - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL(arvr_server); +void GDAPI godot_xr_set_controller_transform(godot_int p_controller_id, godot_transform *p_transform, godot_bool p_tracks_orientation, godot_bool p_tracks_position) { + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL(xr_server); - ARVRPositionalTracker *tracker = arvr_server->find_by_type_and_id(ARVRServer::TRACKER_CONTROLLER, p_controller_id); + XRPositionalTracker *tracker = xr_server->find_by_type_and_id(XRServer::TRACKER_CONTROLLER, p_controller_id); if (tracker != nullptr) { Transform *transform = (Transform *)p_transform; if (p_tracks_orientation) { @@ -390,14 +379,14 @@ void GDAPI godot_arvr_set_controller_transform(godot_int p_controller_id, godot_ } } -void GDAPI godot_arvr_set_controller_button(godot_int p_controller_id, godot_int p_button, godot_bool p_is_pressed) { - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL(arvr_server); +void GDAPI godot_xr_set_controller_button(godot_int p_controller_id, godot_int p_button, godot_bool p_is_pressed) { + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL(xr_server); InputFilter *input = InputFilter::get_singleton(); ERR_FAIL_NULL(input); - ARVRPositionalTracker *tracker = arvr_server->find_by_type_and_id(ARVRServer::TRACKER_CONTROLLER, p_controller_id); + XRPositionalTracker *tracker = xr_server->find_by_type_and_id(XRServer::TRACKER_CONTROLLER, p_controller_id); if (tracker != nullptr) { int joyid = tracker->get_joy_id(); if (joyid != -1) { @@ -406,14 +395,14 @@ void GDAPI godot_arvr_set_controller_button(godot_int p_controller_id, godot_int } } -void GDAPI godot_arvr_set_controller_axis(godot_int p_controller_id, godot_int p_axis, godot_real p_value, godot_bool p_can_be_negative) { - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL(arvr_server); +void GDAPI godot_xr_set_controller_axis(godot_int p_controller_id, godot_int p_axis, godot_real p_value, godot_bool p_can_be_negative) { + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL(xr_server); InputFilter *input = InputFilter::get_singleton(); ERR_FAIL_NULL(input); - ARVRPositionalTracker *tracker = arvr_server->find_by_type_and_id(ARVRServer::TRACKER_CONTROLLER, p_controller_id); + XRPositionalTracker *tracker = xr_server->find_by_type_and_id(XRServer::TRACKER_CONTROLLER, p_controller_id); if (tracker != nullptr) { int joyid = tracker->get_joy_id(); if (joyid != -1) { @@ -425,11 +414,11 @@ void GDAPI godot_arvr_set_controller_axis(godot_int p_controller_id, godot_int p } } -godot_real GDAPI godot_arvr_get_controller_rumble(godot_int p_controller_id) { - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL_V(arvr_server, 0.0); +godot_real GDAPI godot_xr_get_controller_rumble(godot_int p_controller_id) { + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL_V(xr_server, 0.0); - ARVRPositionalTracker *tracker = arvr_server->find_by_type_and_id(ARVRServer::TRACKER_CONTROLLER, p_controller_id); + XRPositionalTracker *tracker = xr_server->find_by_type_and_id(XRServer::TRACKER_CONTROLLER, p_controller_id); if (tracker != nullptr) { return tracker->get_rumble(); } diff --git a/modules/gdnative/arvr/arvr_interface_gdnative.h b/modules/gdnative/xr/xr_interface_gdnative.h index e38eb435c6..64f1282a7e 100644 --- a/modules/gdnative/arvr/arvr_interface_gdnative.h +++ b/modules/gdnative/xr/xr_interface_gdnative.h @@ -1,5 +1,5 @@ /*************************************************************************/ -/* arvr_interface_gdnative.h */ +/* xr_interface_gdnative.h */ /*************************************************************************/ /* This file is part of: */ /* GODOT ENGINE */ @@ -28,11 +28,11 @@ /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /*************************************************************************/ -#ifndef ARVR_INTERFACE_GDNATIVE_H -#define ARVR_INTERFACE_GDNATIVE_H +#ifndef XR_INTERFACE_GDNATIVE_H +#define XR_INTERFACE_GDNATIVE_H #include "modules/gdnative/gdnative.h" -#include "servers/arvr/arvr_interface.h" +#include "servers/xr/xr_interface.h" /** @authors Hinsbart & Karroffel & Mux213 @@ -40,23 +40,23 @@ This subclass of our AR/VR interface forms a bridge to GDNative. */ -class ARVRInterfaceGDNative : public ARVRInterface { - GDCLASS(ARVRInterfaceGDNative, ARVRInterface); +class XRInterfaceGDNative : public XRInterface { + GDCLASS(XRInterfaceGDNative, XRInterface); void cleanup(); protected: - const godot_arvr_interface_gdnative *interface; + const godot_xr_interface_gdnative *interface; void *data; static void _bind_methods(); public: /** general interface information **/ - ARVRInterfaceGDNative(); - ~ARVRInterfaceGDNative(); + XRInterfaceGDNative(); + ~XRInterfaceGDNative(); - void set_interface(const godot_arvr_interface_gdnative *p_interface); + void set_interface(const godot_xr_interface_gdnative *p_interface); virtual StringName get_name() const; virtual int get_capabilities() const; @@ -73,19 +73,19 @@ public: /** rendering and internal **/ virtual Size2 get_render_targetsize(); virtual bool is_stereo(); - virtual Transform get_transform_for_eye(ARVRInterface::Eyes p_eye, const Transform &p_cam_transform); + virtual Transform get_transform_for_eye(XRInterface::Eyes p_eye, const Transform &p_cam_transform); // we expose a Vector<float> version of this function to GDNative - Vector<float> _get_projection_for_eye(ARVRInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far); + Vector<float> _get_projection_for_eye(XRInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far); - // and a CameraMatrix version to ARVRServer - virtual CameraMatrix get_projection_for_eye(ARVRInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far); + // and a CameraMatrix version to XRServer + virtual CameraMatrix get_projection_for_eye(XRInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far); - virtual unsigned int get_external_texture_for_eye(ARVRInterface::Eyes p_eye); - virtual void commit_for_eye(ARVRInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect); + virtual unsigned int get_external_texture_for_eye(XRInterface::Eyes p_eye); + virtual void commit_for_eye(XRInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect); virtual void process(); virtual void notification(int p_what); }; -#endif // ARVR_INTERFACE_GDNATIVE_H +#endif // XR_INTERFACE_GDNATIVE_H diff --git a/modules/mobile_vr/doc_classes/MobileVRInterface.xml b/modules/mobile_vr/doc_classes/MobileVRInterface.xml index 7552abe61d..120535bd41 100644 --- a/modules/mobile_vr/doc_classes/MobileVRInterface.xml +++ b/modules/mobile_vr/doc_classes/MobileVRInterface.xml @@ -1,5 +1,5 @@ <?xml version="1.0" encoding="UTF-8" ?> -<class name="MobileVRInterface" inherits="ARVRInterface" version="4.0"> +<class name="MobileVRInterface" inherits="XRInterface" version="4.0"> <brief_description> Generic mobile VR implementation. </brief_description> @@ -8,9 +8,9 @@ Note that even though there is no positional tracking, the camera will assume the headset is at a height of 1.85 meters. You can change this by setting [member eye_height]. You can initialise this interface as follows: [codeblock] - var interface = ARVRServer.find_interface("Native mobile") + var interface = XRServer.find_interface("Native mobile") if interface and interface.initialize(): - get_viewport().arvr = true + get_viewport().xr = true [/codeblock] </description> <tutorials> @@ -25,7 +25,7 @@ The width of the display in centimeters. </member> <member name="eye_height" type="float" setter="set_eye_height" getter="get_eye_height" default="1.85"> - The height at which the camera is placed in relation to the ground (i.e. [ARVROrigin] node). + The height at which the camera is placed in relation to the ground (i.e. [XROrigin3D] node). </member> <member name="iod" type="float" setter="set_iod" getter="get_iod" default="6.0"> The interocular distance, also known as the interpupillary distance. The distance between the pupils of the left and right eye. diff --git a/modules/mobile_vr/mobile_vr_interface.cpp b/modules/mobile_vr/mobile_vr_interface.cpp index 6b5a70435d..2f0a15f20b 100644 --- a/modules/mobile_vr/mobile_vr_interface.cpp +++ b/modules/mobile_vr/mobile_vr_interface.cpp @@ -39,7 +39,7 @@ StringName MobileVRInterface::get_name() const { }; int MobileVRInterface::get_capabilities() const { - return ARVRInterface::ARVR_STEREO; + return XRInterface::XR_STEREO; }; Vector3 MobileVRInterface::scale_magneto(const Vector3 &p_magnetometer) { @@ -165,7 +165,7 @@ void MobileVRInterface::set_position_from_sensors() { rotate.rotate(orientation.get_axis(2), gyro.z * delta_time); orientation = rotate * orientation; - tracking_state = ARVRInterface::ARVR_NORMAL_TRACKING; + tracking_state = XRInterface::XR_NORMAL_TRACKING; }; ///@TODO improve this, the magnetometer is very fidgity sometimes flipping the axis for no apparent reason (probably a bug on my part) @@ -177,7 +177,7 @@ void MobileVRInterface::set_position_from_sensors() { transform_quat = transform_quat.slerp(acc_mag_quat, 0.1); orientation = Basis(transform_quat); - tracking_state = ARVRInterface::ARVR_NORMAL_TRACKING; + tracking_state = XRInterface::XR_NORMAL_TRACKING; } else if (has_grav) { // use gravity vector to make sure down is down... // transform gravity into our world space @@ -297,8 +297,8 @@ bool MobileVRInterface::is_initialized() const { }; bool MobileVRInterface::initialize() { - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL_V(arvr_server, false); + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL_V(xr_server, false); if (!initialized) { // reset our sensor data and orientation @@ -314,7 +314,7 @@ bool MobileVRInterface::initialize() { orientation = Basis(); // make this our primary interface - arvr_server->set_primary_interface(this); + xr_server->set_primary_interface(this); last_ticks = OS::get_singleton()->get_ticks_usec(); @@ -326,10 +326,10 @@ bool MobileVRInterface::initialize() { void MobileVRInterface::uninitialize() { if (initialized) { - ARVRServer *arvr_server = ARVRServer::get_singleton(); - if (arvr_server != nullptr) { + XRServer *xr_server = XRServer::get_singleton(); + if (xr_server != nullptr) { // no longer our primary interface - arvr_server->clear_primary_interface_if(this); + xr_server->clear_primary_interface_if(this); } initialized = false; @@ -348,22 +348,22 @@ Size2 MobileVRInterface::get_render_targetsize() { return target_size; }; -Transform MobileVRInterface::get_transform_for_eye(ARVRInterface::Eyes p_eye, const Transform &p_cam_transform) { +Transform MobileVRInterface::get_transform_for_eye(XRInterface::Eyes p_eye, const Transform &p_cam_transform) { _THREAD_SAFE_METHOD_ Transform transform_for_eye; - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL_V(arvr_server, transform_for_eye); + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL_V(xr_server, transform_for_eye); if (initialized) { - float world_scale = arvr_server->get_world_scale(); + float world_scale = xr_server->get_world_scale(); // we don't need to check for the existence of our HMD, doesn't effect our values... // note * 0.01 to convert cm to m and * 0.5 as we're moving half in each direction... - if (p_eye == ARVRInterface::EYE_LEFT) { + if (p_eye == XRInterface::EYE_LEFT) { transform_for_eye.origin.x = -(intraocular_dist * 0.01 * 0.5 * world_scale); - } else if (p_eye == ARVRInterface::EYE_RIGHT) { + } else if (p_eye == XRInterface::EYE_RIGHT) { transform_for_eye.origin.x = intraocular_dist * 0.01 * 0.5 * world_scale; } else { // for mono we don't reposition, we want our center position. @@ -374,7 +374,7 @@ Transform MobileVRInterface::get_transform_for_eye(ARVRInterface::Eyes p_eye, co hmd_transform.basis = orientation; hmd_transform.origin = Vector3(0.0, eye_height * world_scale, 0.0); - transform_for_eye = p_cam_transform * (arvr_server->get_reference_frame()) * hmd_transform * transform_for_eye; + transform_for_eye = p_cam_transform * (xr_server->get_reference_frame()) * hmd_transform * transform_for_eye; } else { // huh? well just return what we got.... transform_for_eye = p_cam_transform; @@ -383,12 +383,12 @@ Transform MobileVRInterface::get_transform_for_eye(ARVRInterface::Eyes p_eye, co return transform_for_eye; }; -CameraMatrix MobileVRInterface::get_projection_for_eye(ARVRInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far) { +CameraMatrix MobileVRInterface::get_projection_for_eye(XRInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far) { _THREAD_SAFE_METHOD_ CameraMatrix eye; - if (p_eye == ARVRInterface::EYE_MONO) { + if (p_eye == XRInterface::EYE_MONO) { ///@TODO for now hardcode some of this, what is really needed here is that this needs to be in sync with the real cameras properties // which probably means implementing a specific class for iOS and Android. For now this is purely here as an example. // Note also that if you use a normal viewport with AR/VR turned off you can still use the tracker output of this interface @@ -396,13 +396,13 @@ CameraMatrix MobileVRInterface::get_projection_for_eye(ARVRInterface::Eyes p_eye // This will make more sense when we implement ARkit on iOS (probably a separate interface). eye.set_perspective(60.0, p_aspect, p_z_near, p_z_far, false); } else { - eye.set_for_hmd(p_eye == ARVRInterface::EYE_LEFT ? 1 : 2, p_aspect, intraocular_dist, display_width, display_to_lens, oversample, p_z_near, p_z_far); + eye.set_for_hmd(p_eye == XRInterface::EYE_LEFT ? 1 : 2, p_aspect, intraocular_dist, display_width, display_to_lens, oversample, p_z_near, p_z_far); }; return eye; }; -void MobileVRInterface::commit_for_eye(ARVRInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect) { +void MobileVRInterface::commit_for_eye(XRInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect) { _THREAD_SAFE_METHOD_ // We must have a valid render target @@ -417,9 +417,9 @@ void MobileVRInterface::commit_for_eye(ARVRInterface::Eyes p_eye, RID p_render_t // we output half a screen dest.size.x *= 0.5; - if (p_eye == ARVRInterface::EYE_LEFT) { + if (p_eye == XRInterface::EYE_LEFT) { eye_center.x = ((-intraocular_dist / 2.0) + (display_width / 4.0)) / (display_width / 2.0); - } else if (p_eye == ARVRInterface::EYE_RIGHT) { + } else if (p_eye == XRInterface::EYE_RIGHT) { dest.position.x = dest.size.x; eye_center.x = ((intraocular_dist / 2.0) - (display_width / 4.0)) / (display_width / 2.0); } diff --git a/modules/mobile_vr/mobile_vr_interface.h b/modules/mobile_vr/mobile_vr_interface.h index c762c9b799..3a9ed1314a 100644 --- a/modules/mobile_vr/mobile_vr_interface.h +++ b/modules/mobile_vr/mobile_vr_interface.h @@ -31,8 +31,8 @@ #ifndef MOBILE_VR_INTERFACE_H #define MOBILE_VR_INTERFACE_H -#include "servers/arvr/arvr_interface.h" -#include "servers/arvr/arvr_positional_tracker.h" +#include "servers/xr/xr_interface.h" +#include "servers/xr/xr_positional_tracker.h" /** @author Bastiaan Olij <mux213@gmail.com> @@ -47,8 +47,8 @@ more advanced interfaces. */ -class MobileVRInterface : public ARVRInterface { - GDCLASS(MobileVRInterface, ARVRInterface); +class MobileVRInterface : public XRInterface { + GDCLASS(MobileVRInterface, XRInterface); private: bool initialized; @@ -137,9 +137,9 @@ public: virtual Size2 get_render_targetsize(); virtual bool is_stereo(); - virtual Transform get_transform_for_eye(ARVRInterface::Eyes p_eye, const Transform &p_cam_transform); - virtual CameraMatrix get_projection_for_eye(ARVRInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far); - virtual void commit_for_eye(ARVRInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect); + virtual Transform get_transform_for_eye(XRInterface::Eyes p_eye, const Transform &p_cam_transform); + virtual CameraMatrix get_projection_for_eye(XRInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far); + virtual void commit_for_eye(XRInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect); virtual void process(); virtual void notification(int p_what); diff --git a/modules/mobile_vr/register_types.cpp b/modules/mobile_vr/register_types.cpp index faf6c3b151..75638d47c4 100644 --- a/modules/mobile_vr/register_types.cpp +++ b/modules/mobile_vr/register_types.cpp @@ -37,7 +37,7 @@ void register_mobile_vr_types() { Ref<MobileVRInterface> mobile_vr; mobile_vr.instance(); - ARVRServer::get_singleton()->add_interface(mobile_vr); + XRServer::get_singleton()->add_interface(mobile_vr); } void unregister_mobile_vr_types() { diff --git a/platform/android/SCsub b/platform/android/SCsub index f39eb8b889..ec42bc42b5 100644 --- a/platform/android/SCsub +++ b/platform/android/SCsub @@ -17,8 +17,8 @@ android_files = [ "java_godot_io_wrapper.cpp", "jni_utils.cpp", "android_keys_utils.cpp", - "vulkan/vk_renderer_jni.cpp", - "plugin/godot_plugin_jni.cpp", + "display_server_android.cpp", + "vulkan/vulkan_context_android.cpp", ] env_android = env.Clone() diff --git a/platform/android/detect.py b/platform/android/detect.py index ed0643e3b3..6da1e5f3d6 100644 --- a/platform/android/detect.py +++ b/platform/android/detect.py @@ -25,7 +25,7 @@ def get_opts(): return [ ("ANDROID_NDK_ROOT", "Path to the Android NDK", os.environ.get("ANDROID_NDK_ROOT", 0)), - ("ndk_platform", 'Target platform (android-<api>, e.g. "android-18")', "android-18"), + ("ndk_platform", 'Target platform (android-<api>, e.g. "android-24")', "android-24"), EnumVariable("android_arch", "Target architecture", "armv7", ("armv7", "arm64v8", "x86", "x86_64")), BoolVariable("android_neon", "Enable NEON support (armv7 only)", True), ] @@ -102,7 +102,7 @@ def configure(env): neon_text = "" if env["android_arch"] == "armv7" and env["android_neon"]: neon_text = " (with NEON)" - print("Building for Android (" + env["android_arch"] + ")" + neon_text) + print("Building for Android, platform " + env["ndk_platform"] + " (" + env["android_arch"] + ")" + neon_text) can_vectorize = True if env["android_arch"] == "x86": @@ -314,8 +314,8 @@ def configure(env): ) env.Prepend(CPPPATH=["#platform/android"]) - env.Append(CPPDEFINES=["ANDROID_ENABLED", "UNIX_ENABLED", "NO_FCNTL"]) - env.Append(LIBS=["OpenSLES", "EGL", "GLESv3", "GLESv2", "android", "log", "z", "dl"]) + env.Append(CPPDEFINES=["ANDROID_ENABLED", "UNIX_ENABLED", "VULKAN_ENABLED", "NO_FCNTL"]) + env.Append(LIBS=["OpenSLES", "EGL", "GLESv2", "vulkan", "android", "log", "z", "dl"]) # Return NDK version string in source.properties (adapted from the Chromium project). diff --git a/platform/android/display_server_android.cpp b/platform/android/display_server_android.cpp new file mode 100644 index 0000000000..9534387d35 --- /dev/null +++ b/platform/android/display_server_android.cpp @@ -0,0 +1,655 @@ +/*************************************************************************/ +/* display_server_android.cpp */ +/*************************************************************************/ +/* This file is part of: */ +/* GODOT ENGINE */ +/* https://godotengine.org */ +/*************************************************************************/ +/* Copyright (c) 2007-2020 Juan Linietsky, Ariel Manzur. */ +/* Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md). */ +/* */ +/* Permission is hereby granted, free of charge, to any person obtaining */ +/* a copy of this software and associated documentation files (the */ +/* "Software"), to deal in the Software without restriction, including */ +/* without limitation the rights to use, copy, modify, merge, publish, */ +/* distribute, sublicense, and/or sell copies of the Software, and to */ +/* permit persons to whom the Software is furnished to do so, subject to */ +/* the following conditions: */ +/* */ +/* The above copyright notice and this permission notice shall be */ +/* included in all copies or substantial portions of the Software. */ +/* */ +/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */ +/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */ +/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/ +/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */ +/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */ +/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */ +/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ +/*************************************************************************/ + +#include "display_server_android.h" + +#include "android_keys_utils.h" +#include "core/project_settings.h" +#include "java_godot_io_wrapper.h" +#include "java_godot_wrapper.h" +#include "os_android.h" + +#if defined(OPENGL_ENABLED) +#include "drivers/gles2/rasterizer_gles2.h" +#endif +#if defined(VULKAN_ENABLED) +#include "drivers/vulkan/rendering_device_vulkan.h" +#include "platform/android/vulkan/vulkan_context_android.h" +#include "servers/rendering/rasterizer_rd/rasterizer_rd.h" +#endif + +DisplayServerAndroid *DisplayServerAndroid::get_singleton() { + return (DisplayServerAndroid *)DisplayServer::get_singleton(); +} + +bool DisplayServerAndroid::has_feature(Feature p_feature) const { + switch (p_feature) { + //case FEATURE_CONSOLE_WINDOW: + //case FEATURE_CURSOR_SHAPE: + //case FEATURE_CUSTOM_CURSOR_SHAPE: + //case FEATURE_GLOBAL_MENU: + //case FEATURE_HIDPI: + //case FEATURE_ICON: + //case FEATURE_IME: + //case FEATURE_MOUSE: + //case FEATURE_MOUSE_WARP: + //case FEATURE_NATIVE_DIALOG: + //case FEATURE_NATIVE_ICON: + //case FEATURE_NATIVE_VIDEO: + //case FEATURE_WINDOW_TRANSPARENCY: + case FEATURE_CLIPBOARD: + case FEATURE_KEEP_SCREEN_ON: + case FEATURE_ORIENTATION: + case FEATURE_TOUCHSCREEN: + case FEATURE_VIRTUAL_KEYBOARD: + return true; + default: + return false; + } +} + +String DisplayServerAndroid::get_name() const { + return "Android"; +} + +void DisplayServerAndroid::clipboard_set(const String &p_text) { + GodotJavaWrapper *godot_java = OS_Android::get_singleton()->get_godot_java(); + ERR_FAIL_COND(!godot_java); + + if (godot_java->has_set_clipboard()) { + godot_java->set_clipboard(p_text); + } else { + DisplayServer::clipboard_set(p_text); + } +} + +String DisplayServerAndroid::clipboard_get() const { + GodotJavaWrapper *godot_java = OS_Android::get_singleton()->get_godot_java(); + ERR_FAIL_COND_V(!godot_java, String()); + + if (godot_java->has_get_clipboard()) { + return godot_java->get_clipboard(); + } else { + return DisplayServer::clipboard_get(); + } +} + +void DisplayServerAndroid::screen_set_keep_on(bool p_enable) { + GodotJavaWrapper *godot_java = OS_Android::get_singleton()->get_godot_java(); + ERR_FAIL_COND(!godot_java); + + godot_java->set_keep_screen_on(p_enable); + keep_screen_on = p_enable; +} + +bool DisplayServerAndroid::screen_is_kept_on() const { + return keep_screen_on; +} + +void DisplayServerAndroid::screen_set_orientation(DisplayServer::ScreenOrientation p_orientation, int p_screen) { + GodotIOJavaWrapper *godot_io_java = OS_Android::get_singleton()->get_godot_io_java(); + ERR_FAIL_COND(!godot_io_java); + + godot_io_java->set_screen_orientation(p_orientation); +} + +DisplayServer::ScreenOrientation DisplayServerAndroid::screen_get_orientation(int p_screen) const { + GodotIOJavaWrapper *godot_io_java = OS_Android::get_singleton()->get_godot_io_java(); + ERR_FAIL_COND_V(!godot_io_java, SCREEN_LANDSCAPE); + + return (ScreenOrientation)godot_io_java->get_screen_orientation(); +} + +int DisplayServerAndroid::get_screen_count() const { + return 1; +} + +Point2i DisplayServerAndroid::screen_get_position(int p_screen) const { + return Point2i(0, 0); +} + +Size2i DisplayServerAndroid::screen_get_size(int p_screen) const { + return OS_Android::get_singleton()->get_display_size(); +} + +Rect2i DisplayServerAndroid::screen_get_usable_rect(int p_screen) const { + Size2i display_size = OS_Android::get_singleton()->get_display_size(); + return Rect2i(0, 0, display_size.width, display_size.height); +} + +int DisplayServerAndroid::screen_get_dpi(int p_screen) const { + GodotIOJavaWrapper *godot_io_java = OS_Android::get_singleton()->get_godot_io_java(); + ERR_FAIL_COND_V(!godot_io_java, 0); + + return godot_io_java->get_screen_dpi(); +} + +bool DisplayServerAndroid::screen_is_touchscreen(int p_screen) const { + return true; +} + +void DisplayServerAndroid::virtual_keyboard_show(const String &p_existing_text, const Rect2 &p_screen_rect, int p_max_length) { + GodotIOJavaWrapper *godot_io_java = OS_Android::get_singleton()->get_godot_io_java(); + ERR_FAIL_COND(!godot_io_java); + + if (godot_io_java->has_vk()) { + godot_io_java->show_vk(p_existing_text, p_max_length); + } else { + ERR_PRINT("Virtual keyboard not available"); + } +} + +void DisplayServerAndroid::virtual_keyboard_hide() { + GodotIOJavaWrapper *godot_io_java = OS_Android::get_singleton()->get_godot_io_java(); + ERR_FAIL_COND(!godot_io_java); + + if (godot_io_java->has_vk()) { + godot_io_java->hide_vk(); + } else { + ERR_PRINT("Virtual keyboard not available"); + } +} + +int DisplayServerAndroid::virtual_keyboard_get_height() const { + GodotIOJavaWrapper *godot_io_java = OS_Android::get_singleton()->get_godot_io_java(); + ERR_FAIL_COND_V(!godot_io_java, 0); + + return godot_io_java->get_vk_height(); +} + +void DisplayServerAndroid::window_set_window_event_callback(const Callable &p_callable, DisplayServer::WindowID p_window) { + window_event_callback = p_callable; +} + +void DisplayServerAndroid::window_set_input_event_callback(const Callable &p_callable, DisplayServer::WindowID p_window) { + input_event_callback = p_callable; +} + +void DisplayServerAndroid::window_set_input_text_callback(const Callable &p_callable, DisplayServer::WindowID p_window) { + input_text_callback = p_callable; +} + +void DisplayServerAndroid::window_set_rect_changed_callback(const Callable &p_callable, DisplayServer::WindowID p_window) { + // Not supported on Android. +} + +void DisplayServerAndroid::window_set_drop_files_callback(const Callable &p_callable, DisplayServer::WindowID p_window) { + // Not supported on Android. +} + +void DisplayServerAndroid::_window_callback(const Callable &p_callable, const Variant &p_arg) const { + if (!p_callable.is_null()) { + const Variant *argp = &p_arg; + Variant ret; + Callable::CallError ce; + p_callable.call((const Variant **)&argp, 1, ret, ce); + } +} + +void DisplayServerAndroid::send_window_event(DisplayServer::WindowEvent p_event) const { + _window_callback(window_event_callback, int(p_event)); +} + +void DisplayServerAndroid::send_input_event(const Ref<InputEvent> &p_event) const { + _window_callback(input_event_callback, p_event); +} + +void DisplayServerAndroid::send_input_text(const String &p_text) const { + _window_callback(input_text_callback, p_text); +} + +void DisplayServerAndroid::_dispatch_input_events(const Ref<InputEvent> &p_event) { + DisplayServerAndroid::get_singleton()->send_input_event(p_event); +} + +Vector<DisplayServer::WindowID> DisplayServerAndroid::get_window_list() const { + Vector<WindowID> ret; + ret.push_back(MAIN_WINDOW_ID); + return ret; +} + +DisplayServer::WindowID DisplayServerAndroid::get_window_at_screen_position(const Point2i &p_position) const { + return MAIN_WINDOW_ID; +} + +void DisplayServerAndroid::window_attach_instance_id(ObjectID p_instance, DisplayServer::WindowID p_window) { + window_attached_instance_id = p_instance; +} + +ObjectID DisplayServerAndroid::window_get_attached_instance_id(DisplayServer::WindowID p_window) const { + return window_attached_instance_id; +} + +void DisplayServerAndroid::window_set_title(const String &p_title, DisplayServer::WindowID p_window) { + // Not supported on Android. +} + +int DisplayServerAndroid::window_get_current_screen(DisplayServer::WindowID p_window) const { + return SCREEN_OF_MAIN_WINDOW; +} + +void DisplayServerAndroid::window_set_current_screen(int p_screen, DisplayServer::WindowID p_window) { + // Not supported on Android. +} + +Point2i DisplayServerAndroid::window_get_position(DisplayServer::WindowID p_window) const { + return Point2i(); +} + +void DisplayServerAndroid::window_set_position(const Point2i &p_position, DisplayServer::WindowID p_window) { + // Not supported on Android. +} + +void DisplayServerAndroid::window_set_transient(DisplayServer::WindowID p_window, DisplayServer::WindowID p_parent) { + // Not supported on Android. +} + +void DisplayServerAndroid::window_set_max_size(const Size2i p_size, DisplayServer::WindowID p_window) { + // Not supported on Android. +} + +Size2i DisplayServerAndroid::window_get_max_size(DisplayServer::WindowID p_window) const { + return Size2i(); +} + +void DisplayServerAndroid::window_set_min_size(const Size2i p_size, DisplayServer::WindowID p_window) { + // Not supported on Android. +} + +Size2i DisplayServerAndroid::window_get_min_size(DisplayServer::WindowID p_window) const { + return Size2i(); +} + +void DisplayServerAndroid::window_set_size(const Size2i p_size, DisplayServer::WindowID p_window) { + // Not supported on Android. +} + +Size2i DisplayServerAndroid::window_get_size(DisplayServer::WindowID p_window) const { + return OS_Android::get_singleton()->get_display_size(); +} + +Size2i DisplayServerAndroid::window_get_real_size(DisplayServer::WindowID p_window) const { + return OS_Android::get_singleton()->get_display_size(); +} + +void DisplayServerAndroid::window_set_mode(DisplayServer::WindowMode p_mode, DisplayServer::WindowID p_window) { + // Not supported on Android. +} + +DisplayServer::WindowMode DisplayServerAndroid::window_get_mode(DisplayServer::WindowID p_window) const { + return WINDOW_MODE_FULLSCREEN; +} + +bool DisplayServerAndroid::window_is_maximize_allowed(DisplayServer::WindowID p_window) const { + return false; +} + +void DisplayServerAndroid::window_set_flag(DisplayServer::WindowFlags p_flag, bool p_enabled, DisplayServer::WindowID p_window) { + // Not supported on Android. +} + +bool DisplayServerAndroid::window_get_flag(DisplayServer::WindowFlags p_flag, DisplayServer::WindowID p_window) const { + return false; +} + +void DisplayServerAndroid::window_request_attention(DisplayServer::WindowID p_window) { + // Not supported on Android. +} + +void DisplayServerAndroid::window_move_to_foreground(DisplayServer::WindowID p_window) { + // Not supported on Android. +} + +bool DisplayServerAndroid::window_can_draw(DisplayServer::WindowID p_window) const { + return true; +} + +bool DisplayServerAndroid::can_any_window_draw() const { + return true; +} + +void DisplayServerAndroid::alert(const String &p_alert, const String &p_title) { + GodotJavaWrapper *godot_java = OS_Android::get_singleton()->get_godot_java(); + ERR_FAIL_COND(!godot_java); + + godot_java->alert(p_alert, p_title); +} + +void DisplayServerAndroid::process_events() { + // Nothing to do +} + +Vector<String> DisplayServerAndroid::get_rendering_drivers_func() { + Vector<String> drivers; + +#ifdef OPENGL_ENABLED + drivers.push_back("opengl"); +#endif +#ifdef VULKAN_ENABLED + drivers.push_back("vulkan"); +#endif + + return drivers; +} + +DisplayServer *DisplayServerAndroid::create_func(const String &p_rendering_driver, DisplayServer::WindowMode p_mode, uint32_t p_flags, const Vector2i &p_resolution, Error &r_error) { + return memnew(DisplayServerAndroid(p_rendering_driver, p_mode, p_flags, p_resolution, r_error)); +} + +void DisplayServerAndroid::register_android_driver() { + register_create_function("android", create_func, get_rendering_drivers_func); +} + +DisplayServerAndroid::DisplayServerAndroid(const String &p_rendering_driver, DisplayServer::WindowMode p_mode, uint32_t p_flags, const Vector2i &p_resolution, Error &r_error) { + rendering_driver = p_rendering_driver; + + // TODO: rendering_driver is broken, change when different drivers are supported again + rendering_driver = "vulkan"; + + keep_screen_on = GLOBAL_GET("display/window/energy_saving/keep_screen_on"); + +#if defined(OPENGL_ENABLED) + if (rendering_driver == "opengl") { + bool gl_initialization_error = false; + + if (RasterizerGLES2::is_viable() == OK) { + RasterizerGLES2::register_config(); + RasterizerGLES2::make_current(); + } else { + gl_initialization_error = true; + } + + if (gl_initialization_error) { + OS::get_singleton()->alert("Your device does not support any of the supported OpenGL versions.\n" + "Please try updating your Android version.", + "Unable to initialize video driver"); + return; + } + } +#endif + +#if defined(VULKAN_ENABLED) + context_vulkan = nullptr; + rendering_device_vulkan = nullptr; + + if (rendering_driver == "vulkan") { + ANativeWindow *native_window = OS_Android::get_singleton()->get_native_window(); + ERR_FAIL_COND(!native_window); + + context_vulkan = memnew(VulkanContextAndroid); + if (context_vulkan->initialize() != OK) { + memdelete(context_vulkan); + context_vulkan = nullptr; + ERR_FAIL_MSG("Failed to initialize Vulkan context"); + } + + Size2i display_size = OS_Android::get_singleton()->get_display_size(); + if (context_vulkan->window_create(native_window, display_size.width, display_size.height) == -1) { + memdelete(context_vulkan); + context_vulkan = nullptr; + ERR_FAIL_MSG("Failed to create Vulkan window."); + } + + rendering_device_vulkan = memnew(RenderingDeviceVulkan); + rendering_device_vulkan->initialize(context_vulkan); + + RasterizerRD::make_current(); + } +#endif + + InputFilter::get_singleton()->set_event_dispatch_function(_dispatch_input_events); +} + +DisplayServerAndroid::~DisplayServerAndroid() { +#if defined(VULKAN_ENABLED) + if (rendering_driver == "vulkan") { + if (rendering_device_vulkan) { + rendering_device_vulkan->finalize(); + memdelete(rendering_device_vulkan); + } + + if (context_vulkan) { + memdelete(context_vulkan); + } + } +#endif +} + +void DisplayServerAndroid::process_joy_event(DisplayServerAndroid::JoypadEvent p_event) { + switch (p_event.type) { + case JOY_EVENT_BUTTON: + InputFilter::get_singleton()->joy_button(p_event.device, p_event.index, p_event.pressed); + break; + case JOY_EVENT_AXIS: + InputFilter::JoyAxis value; + value.min = -1; + value.value = p_event.value; + InputFilter::get_singleton()->joy_axis(p_event.device, p_event.index, value); + break; + case JOY_EVENT_HAT: + InputFilter::get_singleton()->joy_hat(p_event.device, p_event.hat); + break; + default: + return; + } +} + +void DisplayServerAndroid::process_key_event(int p_keycode, int p_scancode, int p_unicode_char, bool p_pressed) { + Ref<InputEventKey> ev; + ev.instance(); + int val = p_unicode_char; + int keycode = android_get_keysym(p_keycode); + int phy_keycode = android_get_keysym(p_scancode); + ev->set_keycode(keycode); + ev->set_physical_keycode(phy_keycode); + ev->set_unicode(val); + ev->set_pressed(p_pressed); + + if (val == '\n') { + ev->set_keycode(KEY_ENTER); + } else if (val == 61448) { + ev->set_keycode(KEY_BACKSPACE); + ev->set_unicode(KEY_BACKSPACE); + } else if (val == 61453) { + ev->set_keycode(KEY_ENTER); + ev->set_unicode(KEY_ENTER); + } else if (p_keycode == 4) { + OS_Android::get_singleton()->main_loop_request_go_back(); + } + + InputFilter::get_singleton()->parse_input_event(ev); +} + +void DisplayServerAndroid::process_touch(int p_what, int p_pointer, const Vector<DisplayServerAndroid::TouchPos> &p_points) { + switch (p_what) { + case 0: { //gesture begin + if (touch.size()) { + //end all if exist + for (int i = 0; i < touch.size(); i++) { + + Ref<InputEventScreenTouch> ev; + ev.instance(); + ev->set_index(touch[i].id); + ev->set_pressed(false); + ev->set_position(touch[i].pos); + InputFilter::get_singleton()->parse_input_event(ev); + } + } + + touch.resize(p_points.size()); + for (int i = 0; i < p_points.size(); i++) { + touch.write[i].id = p_points[i].id; + touch.write[i].pos = p_points[i].pos; + } + + //send touch + for (int i = 0; i < touch.size(); i++) { + + Ref<InputEventScreenTouch> ev; + ev.instance(); + ev->set_index(touch[i].id); + ev->set_pressed(true); + ev->set_position(touch[i].pos); + InputFilter::get_singleton()->parse_input_event(ev); + } + + } break; + case 1: { //motion + ERR_FAIL_COND(touch.size() != p_points.size()); + + for (int i = 0; i < touch.size(); i++) { + + int idx = -1; + for (int j = 0; j < p_points.size(); j++) { + + if (touch[i].id == p_points[j].id) { + idx = j; + break; + } + } + + ERR_CONTINUE(idx == -1); + + if (touch[i].pos == p_points[idx].pos) + continue; //no move unncesearily + + Ref<InputEventScreenDrag> ev; + ev.instance(); + ev->set_index(touch[i].id); + ev->set_position(p_points[idx].pos); + ev->set_relative(p_points[idx].pos - touch[i].pos); + InputFilter::get_singleton()->parse_input_event(ev); + touch.write[i].pos = p_points[idx].pos; + } + + } break; + case 2: { //release + if (touch.size()) { + //end all if exist + for (int i = 0; i < touch.size(); i++) { + + Ref<InputEventScreenTouch> ev; + ev.instance(); + ev->set_index(touch[i].id); + ev->set_pressed(false); + ev->set_position(touch[i].pos); + InputFilter::get_singleton()->parse_input_event(ev); + } + touch.clear(); + } + } break; + case 3: { // add touch + for (int i = 0; i < p_points.size(); i++) { + if (p_points[i].id == p_pointer) { + TouchPos tp = p_points[i]; + touch.push_back(tp); + + Ref<InputEventScreenTouch> ev; + ev.instance(); + + ev->set_index(tp.id); + ev->set_pressed(true); + ev->set_position(tp.pos); + InputFilter::get_singleton()->parse_input_event(ev); + + break; + } + } + } break; + case 4: { // remove touch + for (int i = 0; i < touch.size(); i++) { + if (touch[i].id == p_pointer) { + + Ref<InputEventScreenTouch> ev; + ev.instance(); + ev->set_index(touch[i].id); + ev->set_pressed(false); + ev->set_position(touch[i].pos); + InputFilter::get_singleton()->parse_input_event(ev); + touch.remove(i); + + break; + } + } + } break; + } +} + +void DisplayServerAndroid::process_hover(int p_type, Point2 p_pos) { + // https://developer.android.com/reference/android/view/MotionEvent.html#ACTION_HOVER_ENTER + switch (p_type) { + case 7: // hover move + case 9: // hover enter + case 10: { // hover exit + Ref<InputEventMouseMotion> ev; + ev.instance(); + ev->set_position(p_pos); + ev->set_global_position(p_pos); + ev->set_relative(p_pos - hover_prev_pos); + InputFilter::get_singleton()->parse_input_event(ev); + hover_prev_pos = p_pos; + } break; + } +} + +void DisplayServerAndroid::process_double_tap(Point2 p_pos) { + Ref<InputEventMouseButton> ev; + ev.instance(); + ev->set_position(p_pos); + ev->set_global_position(p_pos); + ev->set_pressed(false); + ev->set_doubleclick(true); + InputFilter::get_singleton()->parse_input_event(ev); +} + +void DisplayServerAndroid::process_scroll(Point2 p_pos) { + Ref<InputEventPanGesture> ev; + ev.instance(); + ev->set_position(p_pos); + ev->set_delta(p_pos - scroll_prev_pos); + InputFilter::get_singleton()->parse_input_event(ev); + scroll_prev_pos = p_pos; +} + +void DisplayServerAndroid::process_accelerometer(const Vector3 &p_accelerometer) { + InputFilter::get_singleton()->set_accelerometer(p_accelerometer); +} + +void DisplayServerAndroid::process_gravity(const Vector3 &p_gravity) { + InputFilter::get_singleton()->set_gravity(p_gravity); +} + +void DisplayServerAndroid::process_magnetometer(const Vector3 &p_magnetometer) { + InputFilter::get_singleton()->set_magnetometer(p_magnetometer); +} + +void DisplayServerAndroid::process_gyroscope(const Vector3 &p_gyroscope) { + InputFilter::get_singleton()->set_gyroscope(p_gyroscope); +} diff --git a/platform/android/display_server_android.h b/platform/android/display_server_android.h new file mode 100644 index 0000000000..2096ba68f1 --- /dev/null +++ b/platform/android/display_server_android.h @@ -0,0 +1,174 @@ +/*************************************************************************/ +/* display_server_android.h */ +/*************************************************************************/ +/* This file is part of: */ +/* GODOT ENGINE */ +/* https://godotengine.org */ +/*************************************************************************/ +/* Copyright (c) 2007-2020 Juan Linietsky, Ariel Manzur. */ +/* Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md). */ +/* */ +/* Permission is hereby granted, free of charge, to any person obtaining */ +/* a copy of this software and associated documentation files (the */ +/* "Software"), to deal in the Software without restriction, including */ +/* without limitation the rights to use, copy, modify, merge, publish, */ +/* distribute, sublicense, and/or sell copies of the Software, and to */ +/* permit persons to whom the Software is furnished to do so, subject to */ +/* the following conditions: */ +/* */ +/* The above copyright notice and this permission notice shall be */ +/* included in all copies or substantial portions of the Software. */ +/* */ +/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */ +/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */ +/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/ +/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */ +/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */ +/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */ +/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ +/*************************************************************************/ + +#ifndef DISPLAY_SERVER_ANDROID_H +#define DISPLAY_SERVER_ANDROID_H + +#include "servers/display_server.h" + +#if defined(VULKAN_ENABLED) +class VulkanContextAndroid; +class RenderingDeviceVulkan; +#endif + +class DisplayServerAndroid : public DisplayServer { +public: + struct TouchPos { + int id; + Point2 pos; + }; + + enum { + JOY_EVENT_BUTTON = 0, + JOY_EVENT_AXIS = 1, + JOY_EVENT_HAT = 2 + }; + + struct JoypadEvent { + + int device; + int type; + int index; + bool pressed; + float value; + int hat; + }; + +private: + String rendering_driver; + + bool keep_screen_on; + + Vector<TouchPos> touch; + Point2 hover_prev_pos; // needed to calculate the relative position on hover events + Point2 scroll_prev_pos; // needed to calculate the relative position on scroll events + +#if defined(VULKAN_ENABLED) + VulkanContextAndroid *context_vulkan; + RenderingDeviceVulkan *rendering_device_vulkan; +#endif + + ObjectID window_attached_instance_id; + + Callable window_event_callback; + Callable input_event_callback; + Callable input_text_callback; + + void _window_callback(const Callable &p_callable, const Variant &p_arg) const; + + static void _dispatch_input_events(const Ref<InputEvent> &p_event); + +public: + static DisplayServerAndroid *get_singleton(); + + virtual bool has_feature(Feature p_feature) const; + virtual String get_name() const; + + virtual void clipboard_set(const String &p_text); + virtual String clipboard_get() const; + + virtual void screen_set_keep_on(bool p_enable); + virtual bool screen_is_kept_on() const; + + virtual void screen_set_orientation(ScreenOrientation p_orientation, int p_screen = SCREEN_OF_MAIN_WINDOW); + virtual ScreenOrientation screen_get_orientation(int p_screen = SCREEN_OF_MAIN_WINDOW) const; + + virtual int get_screen_count() const; + virtual Point2i screen_get_position(int p_screen = SCREEN_OF_MAIN_WINDOW) const; + virtual Size2i screen_get_size(int p_screen = SCREEN_OF_MAIN_WINDOW) const; + virtual Rect2i screen_get_usable_rect(int p_screen = SCREEN_OF_MAIN_WINDOW) const; + virtual int screen_get_dpi(int p_screen = SCREEN_OF_MAIN_WINDOW) const; + virtual bool screen_is_touchscreen(int p_screen = SCREEN_OF_MAIN_WINDOW) const; + + virtual void virtual_keyboard_show(const String &p_existing_text, const Rect2 &p_screen_rect = Rect2(), int p_max_length = -1); + virtual void virtual_keyboard_hide(); + virtual int virtual_keyboard_get_height() const; + + virtual void window_set_window_event_callback(const Callable &p_callable, WindowID p_window = MAIN_WINDOW_ID); + virtual void window_set_input_event_callback(const Callable &p_callable, WindowID p_window = MAIN_WINDOW_ID); + virtual void window_set_input_text_callback(const Callable &p_callable, WindowID p_window = MAIN_WINDOW_ID); + virtual void window_set_rect_changed_callback(const Callable &p_callable, WindowID p_window = MAIN_WINDOW_ID); + virtual void window_set_drop_files_callback(const Callable &p_callable, WindowID p_window = MAIN_WINDOW_ID); + + void send_window_event(WindowEvent p_event) const; + void send_input_event(const Ref<InputEvent> &p_event) const; + void send_input_text(const String &p_text) const; + + virtual Vector<WindowID> get_window_list() const; + virtual WindowID get_window_at_screen_position(const Point2i &p_position) const; + virtual void window_attach_instance_id(ObjectID p_instance, WindowID p_window = MAIN_WINDOW_ID); + virtual ObjectID window_get_attached_instance_id(WindowID p_window = MAIN_WINDOW_ID) const; + virtual void window_set_title(const String &p_title, WindowID p_window = MAIN_WINDOW_ID); + virtual int window_get_current_screen(WindowID p_window = MAIN_WINDOW_ID) const; + virtual void window_set_current_screen(int p_screen, WindowID p_window = MAIN_WINDOW_ID); + virtual Point2i window_get_position(WindowID p_window = MAIN_WINDOW_ID) const; + virtual void window_set_position(const Point2i &p_position, WindowID p_window = MAIN_WINDOW_ID); + virtual void window_set_transient(WindowID p_window, WindowID p_parent); + virtual void window_set_max_size(const Size2i p_size, WindowID p_window = MAIN_WINDOW_ID); + virtual Size2i window_get_max_size(WindowID p_window = MAIN_WINDOW_ID) const; + virtual void window_set_min_size(const Size2i p_size, WindowID p_window = MAIN_WINDOW_ID); + virtual Size2i window_get_min_size(WindowID p_window = MAIN_WINDOW_ID) const; + virtual void window_set_size(const Size2i p_size, WindowID p_window = MAIN_WINDOW_ID); + virtual Size2i window_get_size(WindowID p_window = MAIN_WINDOW_ID) const; + virtual Size2i window_get_real_size(WindowID p_window = MAIN_WINDOW_ID) const; + virtual void window_set_mode(WindowMode p_mode, WindowID p_window = MAIN_WINDOW_ID); + virtual WindowMode window_get_mode(WindowID p_window = MAIN_WINDOW_ID) const; + virtual bool window_is_maximize_allowed(WindowID p_window = MAIN_WINDOW_ID) const; + virtual void window_set_flag(WindowFlags p_flag, bool p_enabled, WindowID p_window = MAIN_WINDOW_ID); + virtual bool window_get_flag(WindowFlags p_flag, WindowID p_window = MAIN_WINDOW_ID) const; + virtual void window_request_attention(WindowID p_window = MAIN_WINDOW_ID); + virtual void window_move_to_foreground(WindowID p_window = MAIN_WINDOW_ID); + virtual bool window_can_draw(WindowID p_window = MAIN_WINDOW_ID) const; + virtual bool can_any_window_draw() const; + + virtual void alert(const String &p_alert, const String &p_title); + + virtual void process_events(); + + void process_accelerometer(const Vector3 &p_accelerometer); + void process_gravity(const Vector3 &p_gravity); + void process_magnetometer(const Vector3 &p_magnetometer); + void process_gyroscope(const Vector3 &p_gyroscope); + void process_touch(int p_what, int p_pointer, const Vector<TouchPos> &p_points); + void process_hover(int p_type, Point2 p_pos); + void process_double_tap(Point2 p_pos); + void process_scroll(Point2 p_pos); + void process_joy_event(JoypadEvent p_event); + void process_key_event(int p_keycode, int p_scancode, int p_unicode_char, bool p_pressed); + + static DisplayServer *create_func(const String &p_rendering_driver, WindowMode p_mode, uint32_t p_flags, const Vector2i &p_resolution, Error &r_error); + static Vector<String> get_rendering_drivers_func(); + static void register_android_driver(); + + DisplayServerAndroid(const String &p_rendering_driver, WindowMode p_mode, uint32_t p_flags, const Vector2i &p_resolution, Error &r_error); + ~DisplayServerAndroid(); +}; + +#endif // DISPLAY_SERVER_ANDROID_H diff --git a/platform/android/java/lib/AndroidManifest.xml b/platform/android/java/lib/AndroidManifest.xml index b133585f99..fa39bc0f1d 100644 --- a/platform/android/java/lib/AndroidManifest.xml +++ b/platform/android/java/lib/AndroidManifest.xml @@ -13,7 +13,7 @@ <instrumentation android:icon="@mipmap/icon" android:label="@string/godot_project_name_string" - android:name=".GodotInstrumentation" + android:name="org.godotengine.godot.GodotInstrumentation" android:targetPackage="org.godotengine.godot" /> </manifest> diff --git a/platform/android/java/lib/build.gradle b/platform/android/java/lib/build.gradle index 062f91e08e..c69e19fbfa 100644 --- a/platform/android/java/lib/build.gradle +++ b/platform/android/java/lib/build.gradle @@ -1,4 +1,5 @@ apply plugin: 'com.android.library' +apply plugin: 'kotlin-android' dependencies { implementation libraries.supportCoreUtils diff --git a/platform/android/java/lib/src/org/godotengine/godot/Godot.java b/platform/android/java/lib/src/org/godotengine/godot/Godot.java index 1798a1df3a..bf0d1c6273 100644 --- a/platform/android/java/lib/src/org/godotengine/godot/Godot.java +++ b/platform/android/java/lib/src/org/godotengine/godot/Godot.java @@ -153,7 +153,7 @@ public abstract class Godot extends FragmentActivity implements SensorEventListe private String[] command_line; private boolean use_apk_expansion; - public GodotView mView; + public GodotRenderView mRenderView; private boolean godot_initialized = false; private SensorManager mSensorManager; @@ -213,34 +213,41 @@ public abstract class Godot extends FragmentActivity implements SensorEventListe setContentView(layout); // GodotEditText layout - GodotEditText edittext = new GodotEditText(this); - edittext.setLayoutParams(new ViewGroup.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT)); + GodotEditText editText = new GodotEditText(this); + editText.setLayoutParams(new ViewGroup.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT)); // ...add to FrameLayout - layout.addView(edittext); + layout.addView(editText); - mView = new GodotView(this, xrMode, use_32_bits, use_debug_opengl); - layout.addView(mView, new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)); - edittext.setView(mView); - io.setEdit(edittext); + GodotLib.setup(command_line); - mView.getViewTreeObserver().addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() { + final String videoDriver = GodotLib.getGlobal("rendering/quality/driver/driver_name"); + if (videoDriver.equals("Vulkan")) { + mRenderView = new GodotVulkanRenderView(this); + } else { + mRenderView = new GodotGLRenderView(this, xrMode, use_32_bits, use_debug_opengl); + } + + View view = mRenderView.getView(); + layout.addView(view, new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)); + editText.setView(mRenderView); + io.setEdit(editText); + + view.getViewTreeObserver().addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() { @Override public void onGlobalLayout() { Point fullSize = new Point(); getWindowManager().getDefaultDisplay().getSize(fullSize); Rect gameSize = new Rect(); - mView.getWindowVisibleDisplayFrame(gameSize); + mRenderView.getView().getWindowVisibleDisplayFrame(gameSize); final int keyboardHeight = fullSize.y - gameSize.bottom; GodotLib.setVirtualKeyboardHeight(keyboardHeight); } }); - final String[] current_command_line = command_line; - mView.queueEvent(new Runnable() { + mRenderView.queueOnRenderThread(new Runnable() { @Override public void run() { - GodotLib.setup(current_command_line); // Must occur after GodotLib.setup has completed. for (GodotPlugin plugin : pluginRegistry.getAllPlugins()) { @@ -384,7 +391,7 @@ public abstract class Godot extends FragmentActivity implements SensorEventListe */ @Keep private Surface getSurface() { - return mView.getHolder().getSurface(); + return mRenderView.getView().getHolder().getSurface(); } /** @@ -617,7 +624,7 @@ public abstract class Godot extends FragmentActivity implements SensorEventListe } return; } - mView.onPause(); + mRenderView.onActivityPaused(); mSensorManager.unregisterListener(this); @@ -655,7 +662,7 @@ public abstract class Godot extends FragmentActivity implements SensorEventListe return; } - mView.onResume(); + mRenderView.onActivityResumed(); mSensorManager.registerListener(this, mAccelerometer, SensorManager.SENSOR_DELAY_GAME); mSensorManager.registerListener(this, mGravity, SensorManager.SENSOR_DELAY_GAME); @@ -721,8 +728,8 @@ public abstract class Godot extends FragmentActivity implements SensorEventListe final float z = adjustedValues[2]; final int typeOfSensor = event.sensor.getType(); - if (mView != null) { - mView.queueEvent(new Runnable() { + if (mRenderView != null) { + mRenderView.queueOnRenderThread(new Runnable() { @Override public void run() { if (typeOfSensor == Sensor.TYPE_ACCELEROMETER) { @@ -773,8 +780,8 @@ public abstract class Godot extends FragmentActivity implements SensorEventListe } } - if (shouldQuit && mView != null) { - mView.queueEvent(new Runnable() { + if (shouldQuit && mRenderView != null) { + mRenderView.queueOnRenderThread(new Runnable() { @Override public void run() { GodotLib.back(); @@ -789,8 +796,8 @@ public abstract class Godot extends FragmentActivity implements SensorEventListe * This must be called after the render thread has started. */ public final void runOnRenderThread(@NonNull Runnable action) { - if (mView != null) { - mView.queueEvent(action); + if (mRenderView != null) { + mRenderView.queueOnRenderThread(action); } } @@ -847,7 +854,7 @@ public abstract class Godot extends FragmentActivity implements SensorEventListe if (evcount == 0) return true; - if (mView != null) { + if (mRenderView != null) { final int[] arr = new int[event.getPointerCount() * 3]; for (int i = 0; i < event.getPointerCount(); i++) { @@ -860,7 +867,7 @@ public abstract class Godot extends FragmentActivity implements SensorEventListe //System.out.printf("gaction: %d\n",event.getAction()); final int action = event.getAction() & MotionEvent.ACTION_MASK; - mView.queueEvent(new Runnable() { + mRenderView.queueOnRenderThread(new Runnable() { @Override public void run() { switch (action) { @@ -911,7 +918,7 @@ public abstract class Godot extends FragmentActivity implements SensorEventListe for (int i = cc.length; --i >= 0; cnt += cc[i] != 0 ? 1 : 0) ; if (cnt == 0) return super.onKeyMultiple(inKeyCode, repeatCount, event); - mView.queueEvent(new Runnable() { + mRenderView.queueOnRenderThread(new Runnable() { // This method will be called on the rendering thread: public void run() { for (int i = 0, n = cc.length; i < n; i++) { @@ -1033,6 +1040,6 @@ public abstract class Godot extends FragmentActivity implements SensorEventListe progress.mOverallTotal)); } public void initInputDevices() { - mView.initInputDevices(); + mRenderView.initInputDevices(); } } diff --git a/platform/android/java/lib/src/org/godotengine/godot/GodotView.java b/platform/android/java/lib/src/org/godotengine/godot/GodotGLRenderView.java index 8d3c2ae319..9be93243b8 100644 --- a/platform/android/java/lib/src/org/godotengine/godot/GodotView.java +++ b/platform/android/java/lib/src/org/godotengine/godot/GodotGLRenderView.java @@ -1,5 +1,5 @@ /*************************************************************************/ -/* GodotView.java */ +/* GodotGLRenderView.java */ /*************************************************************************/ /* This file is part of: */ /* GODOT ENGINE */ @@ -35,6 +35,7 @@ import android.opengl.GLSurfaceView; import android.view.GestureDetector; import android.view.KeyEvent; import android.view.MotionEvent; +import android.view.SurfaceView; import org.godotengine.godot.input.GodotGestureHandler; import org.godotengine.godot.input.GodotInputHandler; import org.godotengine.godot.utils.GLUtils; @@ -64,16 +65,14 @@ import org.godotengine.godot.xr.regular.RegularFallbackConfigChooser; * that matches it exactly (with regards to red/green/blue/alpha channels * bit depths). Failure to do so would result in an EGL_BAD_MATCH error. */ -public class GodotView extends GLSurfaceView { - - private static String TAG = GodotView.class.getSimpleName(); +public class GodotGLRenderView extends GLSurfaceView implements GodotRenderView { private final Godot activity; private final GodotInputHandler inputHandler; private final GestureDetector detector; private final GodotRenderer godotRenderer; - public GodotView(Godot activity, XRMode xrMode, boolean p_use_32_bits, boolean p_use_debug_opengl) { + public GodotGLRenderView(Godot activity, XRMode xrMode, boolean p_use_32_bits, boolean p_use_debug_opengl) { super(activity); GLUtils.use_32 = p_use_32_bits; GLUtils.use_debug_opengl = p_use_debug_opengl; @@ -85,10 +84,36 @@ public class GodotView extends GLSurfaceView { init(xrMode, false, 16, 0); } + @Override + public SurfaceView getView() { + return this; + } + + @Override public void initInputDevices() { this.inputHandler.initInputDevices(); } + @Override + public void queueOnRenderThread(Runnable event) { + queueEvent(event); + } + + @Override + public void onActivityPaused() { + onPause(); + } + + @Override + public void onActivityResumed() { + onResume(); + } + + @Override + public void onBackPressed() { + activity.onBackPressed(); + } + @SuppressLint("ClickableViewAccessibility") @Override public boolean onTouchEvent(MotionEvent event) { @@ -170,10 +195,6 @@ public class GodotView extends GLSurfaceView { setRenderer(godotRenderer); } - public void onBackPressed() { - activity.onBackPressed(); - } - @Override public void onResume() { super.onResume(); diff --git a/platform/android/java/lib/src/org/godotengine/godot/GodotIO.java b/platform/android/java/lib/src/org/godotengine/godot/GodotIO.java index 68ce40ba10..016a3a8d18 100644 --- a/platform/android/java/lib/src/org/godotengine/godot/GodotIO.java +++ b/platform/android/java/lib/src/org/godotengine/godot/GodotIO.java @@ -53,8 +53,6 @@ public class GodotIO { Godot activity; GodotEditText edit; - MediaPlayer mediaPlayer; - final int SCREEN_LANDSCAPE = 0; final int SCREEN_PORTRAIT = 1; final int SCREEN_REVERSE_LANDSCAPE = 2; @@ -530,44 +528,14 @@ public class GodotIO { activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_FULL_SENSOR); } break; } - }; - - public void setEdit(GodotEditText _edit) { - edit = _edit; - } - - public void playVideo(String p_path) { - Uri filePath = Uri.parse(p_path); - mediaPlayer = new MediaPlayer(); - - try { - mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC); - mediaPlayer.setDataSource(activity.getApplicationContext(), filePath); - mediaPlayer.prepare(); - mediaPlayer.start(); - } catch (IOException e) { - System.out.println("IOError while playing video"); - } } - public boolean isVideoPlaying() { - if (mediaPlayer != null) { - return mediaPlayer.isPlaying(); - } - return false; + public int getScreenOrientation() { + return activity.getRequestedOrientation(); } - public void pauseVideo() { - if (mediaPlayer != null) { - mediaPlayer.pause(); - } - } - - public void stopVideo() { - if (mediaPlayer != null) { - mediaPlayer.release(); - mediaPlayer = null; - } + public void setEdit(GodotEditText _edit) { + edit = _edit; } public static final int SYSTEM_DIR_DESKTOP = 0; diff --git a/platform/android/java/lib/src/org/godotengine/godot/GodotLib.java b/platform/android/java/lib/src/org/godotengine/godot/GodotLib.java index 89a65aea24..71fe822233 100644 --- a/platform/android/java/lib/src/org/godotengine/godot/GodotLib.java +++ b/platform/android/java/lib/src/org/godotengine/godot/GodotLib.java @@ -32,6 +32,7 @@ package org.godotengine.godot; import android.app.Activity; import android.hardware.SensorEvent; +import android.view.Surface; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.opengles.GL10; @@ -72,11 +73,11 @@ public class GodotLib { public static native void resize(int width, int height); /** - * Invoked on the GL thread when the underlying Android surface is created or recreated. + * Invoked on the render thread when the underlying Android surface is created or recreated. + * @param p_surface * @param p_32_bits - * @see android.opengl.GLSurfaceView.Renderer#onSurfaceCreated(GL10, EGLConfig) */ - public static native void newcontext(boolean p_32_bits); + public static native void newcontext(Surface p_surface, boolean p_32_bits); /** * Forward {@link Activity#onBackPressed()} event from the main thread to the GL thread. diff --git a/platform/android/vulkan/vk_renderer_jni.h b/platform/android/java/lib/src/org/godotengine/godot/GodotRenderView.java index 017766fea2..170c433c9c 100644 --- a/platform/android/vulkan/vk_renderer_jni.h +++ b/platform/android/java/lib/src/org/godotengine/godot/GodotRenderView.java @@ -1,5 +1,5 @@ /*************************************************************************/ -/* vk_renderer_jni.h */ +/* GodotRenderView.java */ /*************************************************************************/ /* This file is part of: */ /* GODOT ENGINE */ @@ -28,19 +28,20 @@ /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /*************************************************************************/ -#ifndef VK_RENDERER_JNI_H -#define VK_RENDERER_JNI_H +package org.godotengine.godot; -#include <android/log.h> -#include <jni.h> +import android.view.SurfaceView; -extern "C" { -JNIEXPORT void JNICALL Java_org_godotengine_godot_vulkan_VkRenderer_nativeOnVkSurfaceCreated(JNIEnv *env, jobject obj, jobject j_surface); -JNIEXPORT void JNICALL Java_org_godotengine_godot_vulkan_VkRenderer_nativeOnVkSurfaceChanged(JNIEnv *env, jobject object, jobject j_surface, jint width, jint height); -JNIEXPORT void JNICALL Java_org_godotengine_godot_vulkan_VkRenderer_nativeOnVkResume(JNIEnv *env, jobject obj); -JNIEXPORT void JNICALL Java_org_godotengine_godot_vulkan_VkRenderer_nativeOnVkDrawFrame(JNIEnv *env, jobject obj); -JNIEXPORT void JNICALL Java_org_godotengine_godot_vulkan_VkRenderer_nativeOnVkPause(JNIEnv *env, jobject obj); -JNIEXPORT void JNICALL Java_org_godotengine_godot_vulkan_VkRenderer_nativeOnVkDestroy(JNIEnv *env, jobject obj); -} +public interface GodotRenderView { + + abstract public SurfaceView getView(); + + abstract public void initInputDevices(); -#endif // VK_RENDERER_JNI_H + abstract public void queueOnRenderThread(Runnable event); + + abstract public void onActivityPaused(); + abstract public void onActivityResumed(); + + abstract public void onBackPressed(); +} diff --git a/platform/android/java/lib/src/org/godotengine/godot/GodotRenderer.java b/platform/android/java/lib/src/org/godotengine/godot/GodotRenderer.java index ee9a2aee4f..3e5bb4a4c9 100644 --- a/platform/android/java/lib/src/org/godotengine/godot/GodotRenderer.java +++ b/platform/android/java/lib/src/org/godotengine/godot/GodotRenderer.java @@ -70,7 +70,7 @@ class GodotRenderer implements GLSurfaceView.Renderer { } public void onSurfaceCreated(GL10 gl, EGLConfig config) { - GodotLib.newcontext(GLUtils.use_32); + GodotLib.newcontext(null, GLUtils.use_32); for (GodotPlugin plugin : pluginRegistry.getAllPlugins()) { plugin.onGLSurfaceCreated(gl, config); } diff --git a/platform/android/java/lib/src/org/godotengine/godot/GodotVulkanRenderView.java b/platform/android/java/lib/src/org/godotengine/godot/GodotVulkanRenderView.java new file mode 100644 index 0000000000..30197d5729 --- /dev/null +++ b/platform/android/java/lib/src/org/godotengine/godot/GodotVulkanRenderView.java @@ -0,0 +1,142 @@ +/*************************************************************************/ +/* GodotVulkanRenderView.java */ +/*************************************************************************/ +/* This file is part of: */ +/* GODOT ENGINE */ +/* https://godotengine.org */ +/*************************************************************************/ +/* Copyright (c) 2007-2020 Juan Linietsky, Ariel Manzur. */ +/* Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md). */ +/* */ +/* Permission is hereby granted, free of charge, to any person obtaining */ +/* a copy of this software and associated documentation files (the */ +/* "Software"), to deal in the Software without restriction, including */ +/* without limitation the rights to use, copy, modify, merge, publish, */ +/* distribute, sublicense, and/or sell copies of the Software, and to */ +/* permit persons to whom the Software is furnished to do so, subject to */ +/* the following conditions: */ +/* */ +/* The above copyright notice and this permission notice shall be */ +/* included in all copies or substantial portions of the Software. */ +/* */ +/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */ +/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */ +/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/ +/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */ +/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */ +/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */ +/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ +/*************************************************************************/ + +package org.godotengine.godot; + +import android.annotation.SuppressLint; +import android.view.GestureDetector; +import android.view.KeyEvent; +import android.view.MotionEvent; +import android.view.SurfaceView; +import org.godotengine.godot.input.GodotGestureHandler; +import org.godotengine.godot.input.GodotInputHandler; +import org.godotengine.godot.vulkan.VkRenderer; +import org.godotengine.godot.vulkan.VkSurfaceView; + +public class GodotVulkanRenderView extends VkSurfaceView implements GodotRenderView { + + private final Godot mActivity; + private final GodotInputHandler mInputHandler; + private final GestureDetector mGestureDetector; + private final VkRenderer mRenderer; + + public GodotVulkanRenderView(Godot activity) { + super(activity); + + mActivity = activity; + mInputHandler = new GodotInputHandler(this); + mGestureDetector = new GestureDetector(mActivity, new GodotGestureHandler(this)); + mRenderer = new VkRenderer(); + + setFocusableInTouchMode(true); + startRenderer(mRenderer); + } + + @Override + public SurfaceView getView() { + return this; + } + + @Override + public void initInputDevices() { + mInputHandler.initInputDevices(); + } + + @Override + public void queueOnRenderThread(Runnable event) { + queueOnVkThread(event); + } + + @Override + public void onActivityPaused() { + onPause(); + } + + @Override + public void onActivityResumed() { + onResume(); + } + + @Override + public void onBackPressed() { + mActivity.onBackPressed(); + } + + @SuppressLint("ClickableViewAccessibility") + @Override + public boolean onTouchEvent(MotionEvent event) { + super.onTouchEvent(event); + mGestureDetector.onTouchEvent(event); + return mActivity.gotTouchEvent(event); + } + + @Override + public boolean onKeyUp(final int keyCode, KeyEvent event) { + return mInputHandler.onKeyUp(keyCode, event) || super.onKeyUp(keyCode, event); + } + + @Override + public boolean onKeyDown(final int keyCode, KeyEvent event) { + return mInputHandler.onKeyDown(keyCode, event) || super.onKeyDown(keyCode, event); + } + + @Override + public boolean onGenericMotionEvent(MotionEvent event) { + return mInputHandler.onGenericMotionEvent(event) || super.onGenericMotionEvent(event); + } + + @Override + public void onResume() { + super.onResume(); + + queueOnVkThread(new Runnable() { + @Override + public void run() { + // Resume the renderer + mRenderer.onVkResume(); + GodotLib.focusin(); + } + }); + } + + @Override + public void onPause() { + super.onPause(); + + queueOnVkThread(new Runnable() { + @Override + public void run() { + GodotLib.focusout(); + // Pause the renderer + mRenderer.onVkPause(); + } + }); + } +} diff --git a/platform/android/java/lib/src/org/godotengine/godot/input/GodotEditText.java b/platform/android/java/lib/src/org/godotengine/godot/input/GodotEditText.java index e901b4b36d..92bb118e44 100644 --- a/platform/android/java/lib/src/org/godotengine/godot/input/GodotEditText.java +++ b/platform/android/java/lib/src/org/godotengine/godot/input/GodotEditText.java @@ -51,7 +51,7 @@ public class GodotEditText extends EditText { // =========================================================== // Fields // =========================================================== - private GodotView mView; + private GodotRenderView mRenderView; private GodotTextInputWrapper mInputWrapper; private EditHandler sHandler = new EditHandler(this); private String mOriginText; @@ -76,22 +76,22 @@ public class GodotEditText extends EditText { // =========================================================== public GodotEditText(final Context context) { super(context); - this.initView(); + initView(); } public GodotEditText(final Context context, final AttributeSet attrs) { super(context, attrs); - this.initView(); + initView(); } public GodotEditText(final Context context, final AttributeSet attrs, final int defStyle) { super(context, attrs, defStyle); - this.initView(); + initView(); } protected void initView() { - this.setPadding(0, 0, 0, 0); - this.setImeOptions(EditorInfo.IME_FLAG_NO_EXTRACT_UI); + setPadding(0, 0, 0, 0); + setImeOptions(EditorInfo.IME_FLAG_NO_EXTRACT_UI); } private void handleMessage(final Message msg) { @@ -106,7 +106,7 @@ public class GodotEditText extends EditText { edit.mInputWrapper.setOriginText(text); edit.addTextChangedListener(edit.mInputWrapper); setMaxInputLength(edit, msg.arg1); - final InputMethodManager imm = (InputMethodManager)mView.getContext().getSystemService(Context.INPUT_METHOD_SERVICE); + final InputMethodManager imm = (InputMethodManager)mRenderView.getView().getContext().getSystemService(Context.INPUT_METHOD_SERVICE); imm.showSoftInput(edit, 0); } } break; @@ -115,9 +115,9 @@ public class GodotEditText extends EditText { GodotEditText edit = (GodotEditText)msg.obj; edit.removeTextChangedListener(mInputWrapper); - final InputMethodManager imm = (InputMethodManager)mView.getContext().getSystemService(Context.INPUT_METHOD_SERVICE); + final InputMethodManager imm = (InputMethodManager)mRenderView.getView().getContext().getSystemService(Context.INPUT_METHOD_SERVICE); imm.hideSoftInputFromWindow(edit.getWindowToken(), 0); - edit.mView.requestFocus(); + edit.mRenderView.getView().requestFocus(); } break; } } @@ -135,12 +135,12 @@ public class GodotEditText extends EditText { // =========================================================== // Getter & Setter // =========================================================== - public void setView(final GodotView view) { - this.mView = view; + public void setView(final GodotRenderView view) { + mRenderView = view; if (mInputWrapper == null) - mInputWrapper = new GodotTextInputWrapper(mView, this); - this.setOnEditorActionListener(mInputWrapper); - view.requestFocus(); + mInputWrapper = new GodotTextInputWrapper(mRenderView, this); + setOnEditorActionListener(mInputWrapper); + view.getView().requestFocus(); } // =========================================================== @@ -152,7 +152,7 @@ public class GodotEditText extends EditText { /* Let GlSurfaceView get focus if back key is input. */ if (keyCode == KeyEvent.KEYCODE_BACK) { - this.mView.requestFocus(); + mRenderView.getView().requestFocus(); } return true; @@ -162,7 +162,7 @@ public class GodotEditText extends EditText { // Methods // =========================================================== public void showKeyboard(String p_existing_text, int p_max_input_length) { - this.mOriginText = p_existing_text; + mOriginText = p_existing_text; final Message msg = new Message(); msg.what = HANDLER_OPEN_IME_KEYBOARD; diff --git a/platform/android/java/lib/src/org/godotengine/godot/input/GodotGestureHandler.java b/platform/android/java/lib/src/org/godotengine/godot/input/GodotGestureHandler.java index 1a38a9c3d2..b1e0f66373 100644 --- a/platform/android/java/lib/src/org/godotengine/godot/input/GodotGestureHandler.java +++ b/platform/android/java/lib/src/org/godotengine/godot/input/GodotGestureHandler.java @@ -34,22 +34,22 @@ import android.util.Log; import android.view.GestureDetector; import android.view.MotionEvent; import org.godotengine.godot.GodotLib; -import org.godotengine.godot.GodotView; +import org.godotengine.godot.GodotRenderView; /** - * Handles gesture input related events for the {@link GodotView} view. + * Handles gesture input related events for the {@link GodotRenderView} view. * https://developer.android.com/reference/android/view/GestureDetector.SimpleOnGestureListener */ public class GodotGestureHandler extends GestureDetector.SimpleOnGestureListener { - private final GodotView godotView; + private final GodotRenderView mRenderView; - public GodotGestureHandler(GodotView godotView) { - this.godotView = godotView; + public GodotGestureHandler(GodotRenderView godotView) { + mRenderView = godotView; } private void queueEvent(Runnable task) { - godotView.queueEvent(task); + mRenderView.queueOnRenderThread(task); } @Override diff --git a/platform/android/java/lib/src/org/godotengine/godot/input/GodotInputHandler.java b/platform/android/java/lib/src/org/godotengine/godot/input/GodotInputHandler.java index e00ca86c41..0e4fc65119 100644 --- a/platform/android/java/lib/src/org/godotengine/godot/input/GodotInputHandler.java +++ b/platform/android/java/lib/src/org/godotengine/godot/input/GodotInputHandler.java @@ -42,27 +42,27 @@ import java.util.Collections; import java.util.Comparator; import java.util.List; import org.godotengine.godot.GodotLib; -import org.godotengine.godot.GodotView; +import org.godotengine.godot.GodotRenderView; import org.godotengine.godot.input.InputManagerCompat.InputDeviceListener; /** - * Handles input related events for the {@link GodotView} view. + * Handles input related events for the {@link GodotRenderView} view. */ public class GodotInputHandler implements InputDeviceListener { - private final ArrayList<Joystick> joysticksDevices = new ArrayList<Joystick>(); + private final ArrayList<Joystick> mJoysticksDevices = new ArrayList<Joystick>(); - private final GodotView godotView; - private final InputManagerCompat inputManager; + private final GodotRenderView mRenderView; + private final InputManagerCompat mInputManager; - public GodotInputHandler(GodotView godotView) { - this.godotView = godotView; - this.inputManager = InputManagerCompat.Factory.getInputManager(godotView.getContext()); - this.inputManager.registerInputDeviceListener(this, null); + public GodotInputHandler(GodotRenderView godotView) { + mRenderView = godotView; + mInputManager = InputManagerCompat.Factory.getInputManager(mRenderView.getView().getContext()); + mInputManager.registerInputDeviceListener(this, null); } private void queueEvent(Runnable task) { - godotView.queueEvent(task); + mRenderView.queueOnRenderThread(task); } private boolean isKeyEvent_GameDevice(int source) { @@ -113,7 +113,7 @@ public class GodotInputHandler implements InputDeviceListener { public boolean onKeyDown(final int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_BACK) { - godotView.onBackPressed(); + mRenderView.onBackPressed(); // press 'back' button should not terminate program //normal handle 'back' event in game logic return true; @@ -164,7 +164,7 @@ public class GodotInputHandler implements InputDeviceListener { // Check if the device exists if (device_id > -1) { - Joystick joy = joysticksDevices.get(device_id); + Joystick joy = mJoysticksDevices.get(device_id); for (int i = 0; i < joy.axes.size(); i++) { InputDevice.MotionRange range = joy.axes.get(i); @@ -208,11 +208,11 @@ public class GodotInputHandler implements InputDeviceListener { public void initInputDevices() { /* initially add input devices*/ - int[] deviceIds = inputManager.getInputDeviceIds(); + int[] deviceIds = mInputManager.getInputDeviceIds(); for (int deviceId : deviceIds) { - InputDevice device = inputManager.getInputDevice(deviceId); + InputDevice device = mInputManager.getInputDevice(deviceId); if (DEBUG) { - Log.v("GodotView", String.format("init() deviceId:%d, Name:%s\n", deviceId, device.getName())); + Log.v("GodotInputHandler", String.format("init() deviceId:%d, Name:%s\n", deviceId, device.getName())); } onInputDeviceAdded(deviceId); } @@ -224,13 +224,13 @@ public class GodotInputHandler implements InputDeviceListener { // Check if the device has not been already added if (id < 0) { - InputDevice device = inputManager.getInputDevice(deviceId); + InputDevice device = mInputManager.getInputDevice(deviceId); //device can be null if deviceId is not found if (device != null) { int sources = device.getSources(); if (((sources & InputDevice.SOURCE_GAMEPAD) == InputDevice.SOURCE_GAMEPAD) || ((sources & InputDevice.SOURCE_JOYSTICK) == InputDevice.SOURCE_JOYSTICK)) { - id = joysticksDevices.size(); + id = mJoysticksDevices.size(); Joystick joy = new Joystick(); joy.device_id = deviceId; @@ -249,7 +249,7 @@ public class GodotInputHandler implements InputDeviceListener { } } - joysticksDevices.add(joy); + mJoysticksDevices.add(joy); final int device_id = id; final String name = joy.name; @@ -270,7 +270,7 @@ public class GodotInputHandler implements InputDeviceListener { // Check if the evice has not been already removed if (device_id > -1) { - joysticksDevices.remove(device_id); + mJoysticksDevices.remove(device_id); queueEvent(new Runnable() { @Override @@ -360,8 +360,8 @@ public class GodotInputHandler implements InputDeviceListener { } private int findJoystickDevice(int device_id) { - for (int i = 0; i < joysticksDevices.size(); i++) { - if (joysticksDevices.get(i).device_id == device_id) { + for (int i = 0; i < mJoysticksDevices.size(); i++) { + if (mJoysticksDevices.get(i).device_id == device_id) { return i; } } diff --git a/platform/android/java/lib/src/org/godotengine/godot/input/GodotTextInputWrapper.java b/platform/android/java/lib/src/org/godotengine/godot/input/GodotTextInputWrapper.java index 18f2d57661..e12ff266bf 100644 --- a/platform/android/java/lib/src/org/godotengine/godot/input/GodotTextInputWrapper.java +++ b/platform/android/java/lib/src/org/godotengine/godot/input/GodotTextInputWrapper.java @@ -48,7 +48,7 @@ public class GodotTextInputWrapper implements TextWatcher, OnEditorActionListene // =========================================================== // Fields // =========================================================== - private final GodotView mView; + private final GodotRenderView mRenderView; private final GodotEditText mEdit; private String mOriginText; @@ -56,9 +56,9 @@ public class GodotTextInputWrapper implements TextWatcher, OnEditorActionListene // Constructors // =========================================================== - public GodotTextInputWrapper(final GodotView view, final GodotEditText edit) { - this.mView = view; - this.mEdit = edit; + public GodotTextInputWrapper(final GodotRenderView view, final GodotEditText edit) { + mRenderView = view; + mEdit = edit; } // =========================================================== @@ -66,13 +66,13 @@ public class GodotTextInputWrapper implements TextWatcher, OnEditorActionListene // =========================================================== private boolean isFullScreenEdit() { - final TextView textField = this.mEdit; + final TextView textField = mEdit; final InputMethodManager imm = (InputMethodManager)textField.getContext().getSystemService(Context.INPUT_METHOD_SERVICE); return imm.isFullscreenMode(); } public void setOriginText(final String originText) { - this.mOriginText = originText; + mOriginText = originText; } // =========================================================== @@ -87,7 +87,7 @@ public class GodotTextInputWrapper implements TextWatcher, OnEditorActionListene public void beforeTextChanged(final CharSequence pCharSequence, final int start, final int count, final int after) { //Log.d(TAG, "beforeTextChanged(" + pCharSequence + ")start: " + start + ",count: " + count + ",after: " + after); - mView.queueEvent(new Runnable() { + mRenderView.queueOnRenderThread(new Runnable() { @Override public void run() { for (int i = 0; i < count; ++i) { @@ -106,7 +106,7 @@ public class GodotTextInputWrapper implements TextWatcher, OnEditorActionListene for (int i = start; i < start + count; ++i) { newChars[i - start] = pCharSequence.charAt(i); } - mView.queueEvent(new Runnable() { + mRenderView.queueOnRenderThread(new Runnable() { @Override public void run() { for (int i = 0; i < count; ++i) { @@ -124,10 +124,10 @@ public class GodotTextInputWrapper implements TextWatcher, OnEditorActionListene @Override public boolean onEditorAction(final TextView pTextView, final int pActionID, final KeyEvent pKeyEvent) { - if (this.mEdit == pTextView && this.isFullScreenEdit()) { + if (mEdit == pTextView && isFullScreenEdit()) { final String characters = pKeyEvent.getCharacters(); - mView.queueEvent(new Runnable() { + mRenderView.queueOnRenderThread(new Runnable() { @Override public void run() { for (int i = 0; i < characters.length(); i++) { @@ -144,7 +144,7 @@ public class GodotTextInputWrapper implements TextWatcher, OnEditorActionListene GodotLib.key(KeyEvent.KEYCODE_ENTER, KeyEvent.KEYCODE_ENTER, 0, true); GodotLib.key(KeyEvent.KEYCODE_ENTER, KeyEvent.KEYCODE_ENTER, 0, false); - this.mView.requestFocus(); + mRenderView.getView().requestFocus(); return true; } return false; diff --git a/platform/android/java/lib/src/org/godotengine/godot/vulkan/VkRenderer.kt b/platform/android/java/lib/src/org/godotengine/godot/vulkan/VkRenderer.kt index 67faad8ddd..608ad48df9 100644 --- a/platform/android/java/lib/src/org/godotengine/godot/vulkan/VkRenderer.kt +++ b/platform/android/java/lib/src/org/godotengine/godot/vulkan/VkRenderer.kt @@ -33,6 +33,11 @@ package org.godotengine.godot.vulkan import android.view.Surface +import org.godotengine.godot.Godot +import org.godotengine.godot.GodotLib +import org.godotengine.godot.plugin.GodotPlugin +import org.godotengine.godot.plugin.GodotPluginRegistry + /** * Responsible to setting up and driving the Vulkan rendering logic. * @@ -48,52 +53,64 @@ import android.view.Surface */ internal class VkRenderer { + private val pluginRegistry: GodotPluginRegistry = GodotPluginRegistry.getPluginRegistry() + /** * Called when the surface is created and signals the beginning of rendering. */ fun onVkSurfaceCreated(surface: Surface) { - nativeOnVkSurfaceCreated(surface) + // TODO: properly implement surface re-creation: + // GodotLib.newcontext should be called here once it's done. + //GodotLib.newcontext(surface, false) + + for (plugin in pluginRegistry.getAllPlugins()) { + plugin.onVkSurfaceCreated(surface) + } } /** * Called after the surface is created and whenever its size changes. */ fun onVkSurfaceChanged(surface: Surface, width: Int, height: Int) { - nativeOnVkSurfaceChanged(surface, width, height) + GodotLib.resize(width, height) + + // TODO: properly implement surface re-creation: + // Update the native renderer instead of restarting the app. + // GodotLib.newcontext should not be called here once it's done. + GodotLib.newcontext(surface, false) + + for (plugin in pluginRegistry.getAllPlugins()) { + plugin.onVkSurfaceChanged(surface, width, height) + } } /** * Called to draw the current frame. */ fun onVkDrawFrame() { - nativeOnVkDrawFrame() + GodotLib.step() + for (plugin in pluginRegistry.getAllPlugins()) { + plugin.onVkDrawFrame() + } } /** * Called when the rendering thread is resumed. */ fun onVkResume() { - nativeOnVkResume() + GodotLib.onRendererResumed() } /** * Called when the rendering thread is paused. */ fun onVkPause() { - nativeOnVkPause() + GodotLib.onRendererPaused() } /** * Called when the rendering thread is destroyed and used as signal to tear down the Vulkan logic. */ fun onVkDestroy() { - nativeOnVkDestroy() } - - private external fun nativeOnVkSurfaceCreated(surface: Surface) - private external fun nativeOnVkSurfaceChanged(surface: Surface, width: Int, height: Int) - private external fun nativeOnVkResume() - private external fun nativeOnVkDrawFrame() - private external fun nativeOnVkPause() - private external fun nativeOnVkDestroy() } diff --git a/platform/android/java/lib/src/org/godotengine/godot/vulkan/VkSurfaceView.kt b/platform/android/java/lib/src/org/godotengine/godot/vulkan/VkSurfaceView.kt index 1c594f3201..6b0e12b21a 100644 --- a/platform/android/java/lib/src/org/godotengine/godot/vulkan/VkSurfaceView.kt +++ b/platform/android/java/lib/src/org/godotengine/godot/vulkan/VkSurfaceView.kt @@ -49,7 +49,7 @@ import android.view.SurfaceView * UI thread. * </ul> */ -internal class VkSurfaceView(context: Context) : SurfaceView(context), SurfaceHolder.Callback { +open internal class VkSurfaceView(context: Context) : SurfaceView(context), SurfaceHolder.Callback { companion object { fun checkState(expression: Boolean, errorMessage: Any) { @@ -100,7 +100,7 @@ internal class VkSurfaceView(context: Context) : SurfaceView(context), SurfaceHo * * Must not be called before a [VkRenderer] has been set. */ - fun onResume() { + open fun onResume() { vkThread.onResume() } @@ -109,7 +109,7 @@ internal class VkSurfaceView(context: Context) : SurfaceView(context), SurfaceHo * * Must not be called before a [VkRenderer] has been set. */ - fun onPause() { + open fun onPause() { vkThread.onPause() } diff --git a/platform/android/java/lib/src/org/godotengine/godot/vulkan/VkThread.kt b/platform/android/java/lib/src/org/godotengine/godot/vulkan/VkThread.kt index 2e332840bf..7557c8aa22 100644 --- a/platform/android/java/lib/src/org/godotengine/godot/vulkan/VkThread.kt +++ b/platform/android/java/lib/src/org/godotengine/godot/vulkan/VkThread.kt @@ -219,9 +219,9 @@ internal class VkThread(private val vkSurfaceView: VkSurfaceView, private val vk vkRenderer.onVkDrawFrame() } } catch (ex: InterruptedException) { - Log.i(TAG, ex.message) + Log.i(TAG, "InterruptedException", ex) } catch (ex: IllegalStateException) { - Log.i(TAG, ex.message) + Log.i(TAG, "IllegalStateException", ex) } finally { threadExiting() } diff --git a/platform/android/java/lib/src/org/godotengine/godot/xr/regular/RegularContextFactory.java b/platform/android/java/lib/src/org/godotengine/godot/xr/regular/RegularContextFactory.java index f2b4c95a2c..31cf696195 100644 --- a/platform/android/java/lib/src/org/godotengine/godot/xr/regular/RegularContextFactory.java +++ b/platform/android/java/lib/src/org/godotengine/godot/xr/regular/RegularContextFactory.java @@ -51,7 +51,6 @@ public class RegularContextFactory implements GLSurfaceView.EGLContextFactory { private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098; public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) { - String driver_name = GodotLib.getGlobal("rendering/quality/driver/driver_name"); // FIXME: Add support for Vulkan. Log.w(TAG, "creating OpenGL ES 2.0 context :"); diff --git a/platform/android/java_godot_io_wrapper.cpp b/platform/android/java_godot_io_wrapper.cpp index 8d075f8e97..0da0bd6387 100644 --- a/platform/android/java_godot_io_wrapper.cpp +++ b/platform/android/java_godot_io_wrapper.cpp @@ -56,11 +56,8 @@ GodotIOJavaWrapper::GodotIOJavaWrapper(JNIEnv *p_env, jobject p_godot_io_instanc _show_keyboard = p_env->GetMethodID(cls, "showKeyboard", "(Ljava/lang/String;I)V"); _hide_keyboard = p_env->GetMethodID(cls, "hideKeyboard", "()V"); _set_screen_orientation = p_env->GetMethodID(cls, "setScreenOrientation", "(I)V"); + _get_screen_orientation = p_env->GetMethodID(cls, "getScreenOrientation", "()I"); _get_system_dir = p_env->GetMethodID(cls, "getSystemDir", "(I)Ljava/lang/String;"); - _play_video = p_env->GetMethodID(cls, "playVideo", "(Ljava/lang/String;)V"); - _is_video_playing = p_env->GetMethodID(cls, "isVideoPlaying", "()Z"); - _pause_video = p_env->GetMethodID(cls, "pauseVideo", "()V"); - _stop_video = p_env->GetMethodID(cls, "stopVideo", "()V"); } } @@ -157,40 +154,22 @@ void GodotIOJavaWrapper::set_screen_orientation(int p_orient) { } } -String GodotIOJavaWrapper::get_system_dir(int p_dir) { - if (_get_system_dir) { +int GodotIOJavaWrapper::get_screen_orientation() { + if (_get_screen_orientation) { JNIEnv *env = ThreadAndroid::get_env(); - jstring s = (jstring)env->CallObjectMethod(godot_io_instance, _get_system_dir, p_dir); - return jstring_to_string(s, env); + return env->CallIntMethod(godot_io_instance, _get_screen_orientation); } else { - return String("."); + return 0; } } -void GodotIOJavaWrapper::play_video(const String &p_path) { - // Why is this not here?!?! -} - -bool GodotIOJavaWrapper::is_video_playing() { - if (_is_video_playing) { +String GodotIOJavaWrapper::get_system_dir(int p_dir) { + if (_get_system_dir) { JNIEnv *env = ThreadAndroid::get_env(); - return env->CallBooleanMethod(godot_io_instance, _is_video_playing); + jstring s = (jstring)env->CallObjectMethod(godot_io_instance, _get_system_dir, p_dir); + return jstring_to_string(s, env); } else { - return false; - } -} - -void GodotIOJavaWrapper::pause_video() { - if (_pause_video) { - JNIEnv *env = ThreadAndroid::get_env(); - env->CallVoidMethod(godot_io_instance, _pause_video); - } -} - -void GodotIOJavaWrapper::stop_video() { - if (_stop_video) { - JNIEnv *env = ThreadAndroid::get_env(); - env->CallVoidMethod(godot_io_instance, _stop_video); + return String("."); } } diff --git a/platform/android/java_godot_io_wrapper.h b/platform/android/java_godot_io_wrapper.h index 7dfed52187..dbb3b564f6 100644 --- a/platform/android/java_godot_io_wrapper.h +++ b/platform/android/java_godot_io_wrapper.h @@ -54,11 +54,8 @@ private: jmethodID _show_keyboard = 0; jmethodID _hide_keyboard = 0; jmethodID _set_screen_orientation = 0; + jmethodID _get_screen_orientation = 0; jmethodID _get_system_dir = 0; - jmethodID _play_video = 0; - jmethodID _is_video_playing = 0; - jmethodID _pause_video = 0; - jmethodID _stop_video = 0; public: GodotIOJavaWrapper(JNIEnv *p_env, jobject p_godot_io_instance); @@ -78,11 +75,8 @@ public: int get_vk_height(); void set_vk_height(int p_height); void set_screen_orientation(int p_orient); + int get_screen_orientation(); String get_system_dir(int p_dir); - void play_video(const String &p_path); - bool is_video_playing(); - void pause_video(); - void stop_video(); }; #endif /* !JAVA_GODOT_IO_WRAPPER_H */ diff --git a/platform/android/java_godot_lib_jni.cpp b/platform/android/java_godot_lib_jni.cpp index 9d44ab4619..c103e74222 100644 --- a/platform/android/java_godot_lib_jni.cpp +++ b/platform/android/java_godot_lib_jni.cpp @@ -34,13 +34,13 @@ #include "java_godot_wrapper.h" #include "android/asset_manager_jni.h" -#include "android_keys_utils.h" #include "api/java_class_wrapper.h" #include "audio_driver_jandroid.h" #include "core/engine.h" #include "core/input/input_filter.h" #include "core/project_settings.h" #include "dir_access_jandroid.h" +#include "display_server_android.h" #include "file_access_android.h" #include "file_access_jandroid.h" #include "jni_utils.h" @@ -52,6 +52,8 @@ #include <unistd.h> +#include <android/native_window_jni.h> + static JavaClassWrapper *java_class_wrapper = nullptr; static OS_Android *os_android = nullptr; static GodotJavaWrapper *godot_java = nullptr; @@ -165,17 +167,20 @@ JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_setup(JNIEnv *env, jc JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_resize(JNIEnv *env, jclass clazz, jint width, jint height) { if (os_android) - os_android->set_display_size(Size2(width, height)); + os_android->set_display_size(Size2i(width, height)); } -JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_newcontext(JNIEnv *env, jclass clazz, jboolean p_32_bits) { - +JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_newcontext(JNIEnv *env, jclass clazz, jobject p_surface, jboolean p_32_bits) { if (os_android) { if (step == 0) { // During startup os_android->set_context_is_16_bits(!p_32_bits); + if (p_surface) { + ANativeWindow *native_window = ANativeWindow_fromSurface(env, p_surface); + os_android->set_native_window(native_window); + } } else { - // GL context recreated because it was lost; restart app to let it reload everything + // Rendering context recreated because it was lost; restart app to let it reload everything os_android->main_loop_end(); godot_java->restart(env); step = -1; // Ensure no further steps are attempted @@ -195,7 +200,6 @@ JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_step(JNIEnv *env, jcl return; if (step == 0) { - // Since Godot is initialized on the UI thread, _main_thread_id was set to that thread's id, // but for Godot purposes, the main thread is the one running the game loop Main::setup2(Thread::get_caller_id()); @@ -213,10 +217,10 @@ JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_step(JNIEnv *env, jcl ++step; } - os_android->process_accelerometer(accelerometer); - os_android->process_gravity(gravity); - os_android->process_magnetometer(magnetometer); - os_android->process_gyroscope(gyroscope); + DisplayServerAndroid::get_singleton()->process_accelerometer(accelerometer); + DisplayServerAndroid::get_singleton()->process_gravity(gravity); + DisplayServerAndroid::get_singleton()->process_magnetometer(magnetometer); + DisplayServerAndroid::get_singleton()->process_gyroscope(gyroscope); if (os_android->main_loop_iterate()) { @@ -229,18 +233,18 @@ JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_touch(JNIEnv *env, jc if (step == 0) return; - Vector<OS_Android::TouchPos> points; + Vector<DisplayServerAndroid::TouchPos> points; for (int i = 0; i < count; i++) { jint p[3]; env->GetIntArrayRegion(positions, i * 3, 3, p); - OS_Android::TouchPos tp; + DisplayServerAndroid::TouchPos tp; tp.pos = Point2(p[1], p[2]); tp.id = p[0]; points.push_back(tp); } - os_android->process_touch(ev, pointer, points); + DisplayServerAndroid::get_singleton()->process_touch(ev, pointer, points); /* if (os_android) @@ -252,78 +256,78 @@ JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_hover(JNIEnv *env, jc if (step == 0) return; - os_android->process_hover(p_type, Point2(p_x, p_y)); + DisplayServerAndroid::get_singleton()->process_hover(p_type, Point2(p_x, p_y)); } JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_doubletap(JNIEnv *env, jclass clazz, jint p_x, jint p_y) { if (step == 0) return; - os_android->process_double_tap(Point2(p_x, p_y)); + DisplayServerAndroid::get_singleton()->process_double_tap(Point2(p_x, p_y)); } JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_scroll(JNIEnv *env, jclass clazz, jint p_x, jint p_y) { if (step == 0) return; - os_android->process_scroll(Point2(p_x, p_y)); + DisplayServerAndroid::get_singleton()->process_scroll(Point2(p_x, p_y)); } JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_joybutton(JNIEnv *env, jclass clazz, jint p_device, jint p_button, jboolean p_pressed) { if (step == 0) return; - OS_Android::JoypadEvent jevent; + DisplayServerAndroid::JoypadEvent jevent; jevent.device = p_device; - jevent.type = OS_Android::JOY_EVENT_BUTTON; + jevent.type = DisplayServerAndroid::JOY_EVENT_BUTTON; jevent.index = p_button; jevent.pressed = p_pressed; - os_android->process_joy_event(jevent); + DisplayServerAndroid::get_singleton()->process_joy_event(jevent); } JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_joyaxis(JNIEnv *env, jclass clazz, jint p_device, jint p_axis, jfloat p_value) { if (step == 0) return; - OS_Android::JoypadEvent jevent; + DisplayServerAndroid::JoypadEvent jevent; jevent.device = p_device; - jevent.type = OS_Android::JOY_EVENT_AXIS; + jevent.type = DisplayServerAndroid::JOY_EVENT_AXIS; jevent.index = p_axis; jevent.value = p_value; - os_android->process_joy_event(jevent); + DisplayServerAndroid::get_singleton()->process_joy_event(jevent); } JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_joyhat(JNIEnv *env, jclass clazz, jint p_device, jint p_hat_x, jint p_hat_y) { if (step == 0) return; - OS_Android::JoypadEvent jevent; + DisplayServerAndroid::JoypadEvent jevent; jevent.device = p_device; - jevent.type = OS_Android::JOY_EVENT_HAT; + jevent.type = DisplayServerAndroid::JOY_EVENT_HAT; int hat = 0; if (p_hat_x != 0) { if (p_hat_x < 0) - hat |= InputDefault::HAT_MASK_LEFT; + hat |= InputFilter::HAT_MASK_LEFT; else - hat |= InputDefault::HAT_MASK_RIGHT; + hat |= InputFilter::HAT_MASK_RIGHT; } if (p_hat_y != 0) { if (p_hat_y < 0) - hat |= InputDefault::HAT_MASK_UP; + hat |= InputFilter::HAT_MASK_UP; else - hat |= InputDefault::HAT_MASK_DOWN; + hat |= InputFilter::HAT_MASK_DOWN; } jevent.hat = hat; - os_android->process_joy_event(jevent); + DisplayServerAndroid::get_singleton()->process_joy_event(jevent); } JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_joyconnectionchanged(JNIEnv *env, jclass clazz, jint p_device, jboolean p_connected, jstring p_name) { if (os_android) { String name = jstring_to_string(p_name, env); - os_android->joy_connection_changed(p_device, p_connected, name); + InputFilter::get_singleton()->joy_connection_changed(p_device, p_connected, name); } } @@ -331,29 +335,7 @@ JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_key(JNIEnv *env, jcla if (step == 0) return; - Ref<InputEventKey> ievent; - ievent.instance(); - int val = p_unicode_char; - int keycode = android_get_keysym(p_keycode); - int phy_keycode = android_get_keysym(p_scancode); - ievent->set_keycode(keycode); - ievent->set_physical_keycode(phy_keycode); - ievent->set_unicode(val); - ievent->set_pressed(p_pressed); - - if (val == '\n') { - ievent->set_keycode(KEY_ENTER); - } else if (val == 61448) { - ievent->set_keycode(KEY_BACKSPACE); - ievent->set_unicode(KEY_BACKSPACE); - } else if (val == 61453) { - ievent->set_keycode(KEY_ENTER); - ievent->set_unicode(KEY_ENTER); - } else if (p_keycode == 4) { - os_android->main_loop_request_go_back(); - } - - os_android->process_event(ievent); + DisplayServerAndroid::get_singleton()->process_key_event(p_keycode, p_scancode, p_unicode_char, p_pressed); } JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_accelerometer(JNIEnv *env, jclass clazz, jfloat x, jfloat y, jfloat z) { diff --git a/platform/android/java_godot_lib_jni.h b/platform/android/java_godot_lib_jni.h index a7a5970440..221d701e2b 100644 --- a/platform/android/java_godot_lib_jni.h +++ b/platform/android/java_godot_lib_jni.h @@ -41,7 +41,7 @@ JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_initialize(JNIEnv *en JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_ondestroy(JNIEnv *env, jclass clazz, jobject activity); JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_setup(JNIEnv *env, jclass clazz, jobjectArray p_cmdline); JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_resize(JNIEnv *env, jclass clazz, jint width, jint height); -JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_newcontext(JNIEnv *env, jclass clazz, jboolean p_32_bits); +JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_newcontext(JNIEnv *env, jclass clazz, jobject p_surface, jboolean p_32_bits); JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_step(JNIEnv *env, jclass clazz); JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_back(JNIEnv *env, jclass clazz); JNIEXPORT void JNICALL Java_org_godotengine_godot_GodotLib_touch(JNIEnv *env, jclass clazz, jint ev, jint pointer, jint count, jintArray positions); diff --git a/platform/android/os_android.cpp b/platform/android/os_android.cpp index 344377d673..760157595c 100644 --- a/platform/android/os_android.cpp +++ b/platform/android/os_android.cpp @@ -32,15 +32,11 @@ #include "core/io/file_access_buffered_fa.h" #include "core/project_settings.h" -#if defined(OPENGL_ENABLED) -#include "drivers/gles2/rasterizer_gles2.h" -#endif #include "drivers/unix/dir_access_unix.h" #include "drivers/unix/file_access_unix.h" #include "file_access_android.h" #include "main/main.h" -#include "servers/rendering/rendering_server_raster.h" -#include "servers/rendering/rendering_server_wrap_mt.h" +#include "platform/android/display_server_android.h" #include "dir_access_jandroid.h" #include "file_access_jandroid.h" @@ -60,29 +56,6 @@ public: virtual ~AndroidLogger() {} }; -int OS_Android::get_video_driver_count() const { - - return 2; -} - -const char *OS_Android::get_video_driver_name(int p_driver) const { - - switch (p_driver) { - case VIDEO_DRIVER_GLES2: - return "GLES2"; - } - ERR_FAIL_V_MSG(nullptr, "Invalid video driver index: " + itos(p_driver) + "."); -} -int OS_Android::get_audio_driver_count() const { - - return 1; -} - -const char *OS_Android::get_audio_driver_name(int p_driver) const { - - return "Android"; -} - void OS_Android::initialize_core() { OS_Unix::initialize_core(); @@ -110,71 +83,33 @@ void OS_Android::initialize_core() { NetSocketAndroid::make_default(); } -void OS_Android::set_opengl_extensions(const char *p_gl_extensions) { - - ERR_FAIL_COND(!p_gl_extensions); - gl_extensions = p_gl_extensions; -} - -int OS_Android::get_current_video_driver() const { - return video_driver_index; +void OS_Android::initialize() { + initialize_core(); } -Error OS_Android::initialize(const VideoMode &p_desired, int p_video_driver, int p_audio_driver) { +void OS_Android::initialize_joypads() { + InputFilter::get_singleton()->set_fallback_mapping(godot_java->get_input_fallback_mapping()); - // FIXME: Add Vulkan support. Readd fallback code from Vulkan to GLES2? - -#if defined(OPENGL_ENABLED) - if (video_driver_index == VIDEO_DRIVER_GLES2) { - bool gl_initialization_error = false; - - if (RasterizerGLES2::is_viable() == OK) { - RasterizerGLES2::register_config(); - RasterizerGLES2::make_current(); - } else { - gl_initialization_error = true; - } - - if (gl_initialization_error) { - OS::get_singleton()->alert("Your device does not support any of the supported OpenGL versions.\n" - "Please try updating your Android version.", - "Unable to initialize video driver"); - return ERR_UNAVAILABLE; - } - } -#endif - - video_driver_index = p_video_driver; - - rendering_server = memnew(RenderingServerRaster); - if (get_render_thread_mode() != RENDER_THREAD_UNSAFE) { - rendering_server = memnew(RenderingServerWrapMT(rendering_server, false)); - } - - rendering_server->init(); - - AudioDriverManager::initialize(p_audio_driver); - - input = memnew(InputDefault); - input->set_fallback_mapping(godot_java->get_input_fallback_mapping()); - - return OK; + // This queries/updates the currently connected devices/joypads. + godot_java->init_input_devices(); } void OS_Android::set_main_loop(MainLoop *p_main_loop) { - main_loop = p_main_loop; - input->set_main_loop(p_main_loop); } void OS_Android::delete_main_loop() { - - memdelete(main_loop); + if (main_loop) { + memdelete(main_loop); + main_loop = nullptr; + } } void OS_Android::finalize() { +} - memdelete(input); +OS_Android *OS_Android::get_singleton() { + return (OS_Android *)OS::get_singleton(); } GodotJavaWrapper *OS_Android::get_godot_java() { @@ -185,12 +120,6 @@ GodotIOJavaWrapper *OS_Android::get_godot_io_java() { return godot_io_java; } -void OS_Android::alert(const String &p_alert, const String &p_title) { - - //print("ALERT: %s\n", p_alert.utf8().get_data()); - godot_java->alert(p_alert, p_title); -} - bool OS_Android::request_permission(const String &p_name) { return godot_java->request_permission(p_name); @@ -212,63 +141,6 @@ Error OS_Android::open_dynamic_library(const String p_path, void *&p_library_han return OK; } -void OS_Android::set_mouse_show(bool p_show) { - - //android has no mouse... -} - -void OS_Android::set_mouse_grab(bool p_grab) { - - //it really has no mouse...! -} - -bool OS_Android::is_mouse_grab_enabled() const { - - //*sigh* technology has evolved so much since i was a kid.. - return false; -} - -Point2 OS_Android::get_mouse_position() const { - - return Point2(); -} - -int OS_Android::get_mouse_button_state() const { - - return 0; -} - -void OS_Android::set_window_title(const String &p_title) { - //This queries/updates the currently connected devices/joypads - //Set_window_title is called when initializing the main loop (main.cpp) - //therefore this place is found to be suitable (I found no better). - godot_java->init_input_devices(); -} - -void OS_Android::set_video_mode(const VideoMode &p_video_mode, int p_screen) { -} - -OS::VideoMode OS_Android::get_video_mode(int p_screen) const { - - return default_videomode; -} - -void OS_Android::get_fullscreen_mode_list(List<VideoMode> *p_list, int p_screen) const { - - p_list->push_back(default_videomode); -} - -void OS_Android::set_keep_screen_on(bool p_enabled) { - OS::set_keep_screen_on(p_enabled); - - godot_java->set_keep_screen_on(p_enabled); -} - -Size2 OS_Android::get_window_size() const { - - return Vector2(default_videomode.width, default_videomode.height); -} - String OS_Android::get_name() const { return "Android"; @@ -279,11 +151,6 @@ MainLoop *OS_Android::get_main_loop() const { return main_loop; } -bool OS_Android::can_draw() const { - - return true; //always? -} - void OS_Android::main_loop_begin() { if (main_loop) @@ -304,277 +171,17 @@ void OS_Android::main_loop_end() { } void OS_Android::main_loop_focusout() { - - if (main_loop) - main_loop->notification(NOTIFICATION_WM_FOCUS_OUT); + DisplayServerAndroid::get_singleton()->send_window_event(DisplayServer::WINDOW_EVENT_FOCUS_OUT); audio_driver_android.set_pause(true); } void OS_Android::main_loop_focusin() { - - if (main_loop) - main_loop->notification(NOTIFICATION_WM_FOCUS_IN); + DisplayServerAndroid::get_singleton()->send_window_event(DisplayServer::WINDOW_EVENT_FOCUS_IN); audio_driver_android.set_pause(false); } -void OS_Android::process_joy_event(OS_Android::JoypadEvent p_event) { - - switch (p_event.type) { - case JOY_EVENT_BUTTON: - input->joy_button(p_event.device, p_event.index, p_event.pressed); - break; - case JOY_EVENT_AXIS: - InputDefault::JoyAxis value; - value.min = -1; - value.value = p_event.value; - input->joy_axis(p_event.device, p_event.index, value); - break; - case JOY_EVENT_HAT: - input->joy_hat(p_event.device, p_event.hat); - break; - default: - return; - } -} - -void OS_Android::process_event(Ref<InputEvent> p_event) { - - input->parse_input_event(p_event); -} - -void OS_Android::process_touch(int p_what, int p_pointer, const Vector<TouchPos> &p_points) { - - switch (p_what) { - case 0: { //gesture begin - - if (touch.size()) { - //end all if exist - for (int i = 0; i < touch.size(); i++) { - - Ref<InputEventScreenTouch> ev; - ev.instance(); - ev->set_index(touch[i].id); - ev->set_pressed(false); - ev->set_position(touch[i].pos); - input->parse_input_event(ev); - } - } - - touch.resize(p_points.size()); - for (int i = 0; i < p_points.size(); i++) { - touch.write[i].id = p_points[i].id; - touch.write[i].pos = p_points[i].pos; - } - - //send touch - for (int i = 0; i < touch.size(); i++) { - - Ref<InputEventScreenTouch> ev; - ev.instance(); - ev->set_index(touch[i].id); - ev->set_pressed(true); - ev->set_position(touch[i].pos); - input->parse_input_event(ev); - } - - } break; - case 1: { //motion - - ERR_FAIL_COND(touch.size() != p_points.size()); - - for (int i = 0; i < touch.size(); i++) { - - int idx = -1; - for (int j = 0; j < p_points.size(); j++) { - - if (touch[i].id == p_points[j].id) { - idx = j; - break; - } - } - - ERR_CONTINUE(idx == -1); - - if (touch[i].pos == p_points[idx].pos) - continue; //no move unncesearily - - Ref<InputEventScreenDrag> ev; - ev.instance(); - ev->set_index(touch[i].id); - ev->set_position(p_points[idx].pos); - ev->set_relative(p_points[idx].pos - touch[i].pos); - input->parse_input_event(ev); - touch.write[i].pos = p_points[idx].pos; - } - - } break; - case 2: { //release - - if (touch.size()) { - //end all if exist - for (int i = 0; i < touch.size(); i++) { - - Ref<InputEventScreenTouch> ev; - ev.instance(); - ev->set_index(touch[i].id); - ev->set_pressed(false); - ev->set_position(touch[i].pos); - input->parse_input_event(ev); - } - touch.clear(); - } - } break; - case 3: { // add touch - - for (int i = 0; i < p_points.size(); i++) { - if (p_points[i].id == p_pointer) { - TouchPos tp = p_points[i]; - touch.push_back(tp); - - Ref<InputEventScreenTouch> ev; - ev.instance(); - - ev->set_index(tp.id); - ev->set_pressed(true); - ev->set_position(tp.pos); - input->parse_input_event(ev); - - break; - } - } - } break; - case 4: { // remove touch - - for (int i = 0; i < touch.size(); i++) { - if (touch[i].id == p_pointer) { - - Ref<InputEventScreenTouch> ev; - ev.instance(); - ev->set_index(touch[i].id); - ev->set_pressed(false); - ev->set_position(touch[i].pos); - input->parse_input_event(ev); - touch.remove(i); - - break; - } - } - } break; - } -} - -void OS_Android::process_hover(int p_type, Point2 p_pos) { - // https://developer.android.com/reference/android/view/MotionEvent.html#ACTION_HOVER_ENTER - switch (p_type) { - case 7: // hover move - case 9: // hover enter - case 10: { // hover exit - Ref<InputEventMouseMotion> ev; - ev.instance(); - ev->set_position(p_pos); - ev->set_global_position(p_pos); - ev->set_relative(p_pos - hover_prev_pos); - input->parse_input_event(ev); - hover_prev_pos = p_pos; - } break; - } -} - -void OS_Android::process_double_tap(Point2 p_pos) { - Ref<InputEventMouseButton> ev; - ev.instance(); - ev->set_position(p_pos); - ev->set_global_position(p_pos); - ev->set_pressed(false); - ev->set_doubleclick(true); - input->parse_input_event(ev); -} - -void OS_Android::process_scroll(Point2 p_pos) { - Ref<InputEventPanGesture> ev; - ev.instance(); - ev->set_position(p_pos); - ev->set_delta(p_pos - scroll_prev_pos); - input->parse_input_event(ev); - scroll_prev_pos = p_pos; -} - -void OS_Android::process_accelerometer(const Vector3 &p_accelerometer) { - - input->set_accelerometer(p_accelerometer); -} - -void OS_Android::process_gravity(const Vector3 &p_gravity) { - - input->set_gravity(p_gravity); -} - -void OS_Android::process_magnetometer(const Vector3 &p_magnetometer) { - - input->set_magnetometer(p_magnetometer); -} - -void OS_Android::process_gyroscope(const Vector3 &p_gyroscope) { - - input->set_gyroscope(p_gyroscope); -} - -bool OS_Android::has_touchscreen_ui_hint() const { - - return true; -} - -bool OS_Android::has_virtual_keyboard() const { - - return true; -} - -int OS_Android::get_virtual_keyboard_height() const { - return godot_io_java->get_vk_height(); - - // ERR_PRINT("Cannot obtain virtual keyboard height."); - // return 0; -} - -void OS_Android::show_virtual_keyboard(const String &p_existing_text, const Rect2 &p_screen_rect, int p_max_input_length) { - - if (godot_io_java->has_vk()) { - godot_io_java->show_vk(p_existing_text, p_max_input_length); - } else { - - ERR_PRINT("Virtual keyboard not available"); - }; -} - -void OS_Android::hide_virtual_keyboard() { - - if (godot_io_java->has_vk()) { - - godot_io_java->hide_vk(); - } else { - - ERR_PRINT("Virtual keyboard not available"); - }; -} - -void OS_Android::init_video_mode(int p_video_width, int p_video_height) { - - default_videomode.width = p_video_width; - default_videomode.height = p_video_height; - default_videomode.fullscreen = true; - default_videomode.resizable = false; -} - void OS_Android::main_loop_request_go_back() { - - if (main_loop) - main_loop->notification(NOTIFICATION_WM_GO_BACK_REQUEST); -} - -void OS_Android::set_display_size(Size2 p_size) { - - default_videomode.width = p_size.x; - default_videomode.height = p_size.y; + DisplayServerAndroid::get_singleton()->send_window_event(DisplayServer::WINDOW_EVENT_GO_BACK_REQUEST); } Error OS_Android::shell_open(String p_uri) { @@ -597,26 +204,6 @@ String OS_Android::get_locale() const { return OS_Unix::get_locale(); } -void OS_Android::set_clipboard(const String &p_text) { - - // DO we really need the fallback to OS_Unix here?! - if (godot_java->has_set_clipboard()) { - godot_java->set_clipboard(p_text); - } else { - OS_Unix::set_clipboard(p_text); - } -} - -String OS_Android::get_clipboard() const { - - // DO we really need the fallback to OS_Unix here?! - if (godot_java->has_get_clipboard()) { - return godot_java->get_clipboard(); - } - - return OS_Unix::get_clipboard(); -} - String OS_Android::get_model_name() const { String model = godot_io_java->get_model(); @@ -626,11 +213,6 @@ String OS_Android::get_model_name() const { return OS_Unix::get_model_name(); } -int OS_Android::get_screen_dpi(int p_screen) const { - - return godot_io_java->get_screen_dpi(); -} - String OS_Android::get_user_data_dir() const { if (data_dir_cache != String()) @@ -662,11 +244,6 @@ String OS_Android::get_user_data_dir() const { return "."; } -void OS_Android::set_screen_orientation(ScreenOrientation p_orientation) { - - godot_io_java->set_screen_orientation(p_orientation); -} - String OS_Android::get_unique_id() const { String unique_id = godot_io_java->get_unique_id(); @@ -676,50 +253,46 @@ String OS_Android::get_unique_id() const { return OS::get_unique_id(); } -Error OS_Android::native_video_play(String p_path, float p_volume, String p_audio_track, String p_subtitle_track) { - // FIXME: Add support for volume, audio and subtitle tracks - - godot_io_java->play_video(p_path); - return OK; -} - -bool OS_Android::native_video_is_playing() const { - - return godot_io_java->is_video_playing(); -} - -void OS_Android::native_video_pause() { - - godot_io_java->pause_video(); -} - String OS_Android::get_system_dir(SystemDir p_dir) const { return godot_io_java->get_system_dir(p_dir); } -void OS_Android::native_video_stop() { +void OS_Android::set_display_size(const Size2i &p_size) { + display_size = p_size; +} - godot_io_java->stop_video(); +Size2i OS_Android::get_display_size() const { + return display_size; } void OS_Android::set_context_is_16_bits(bool p_is_16) { - +#if defined(OPENGL_ENABLED) //use_16bits_fbo = p_is_16; //if (rasterizer) // rasterizer->set_force_16_bits_fbo(p_is_16); +#endif } -void OS_Android::joy_connection_changed(int p_device, bool p_connected, String p_name) { - return input->joy_connection_changed(p_device, p_connected, p_name, ""); +void OS_Android::set_opengl_extensions(const char *p_gl_extensions) { +#if defined(OPENGL_ENABLED) + ERR_FAIL_COND(!p_gl_extensions); + gl_extensions = p_gl_extensions; +#endif } -bool OS_Android::is_joy_known(int p_device) { - return input->is_joy_mapped(p_device); +void OS_Android::set_native_window(ANativeWindow *p_native_window) { +#if defined(VULKAN_ENABLED) + native_window = p_native_window; +#endif } -String OS_Android::get_joy_guid(int p_device) const { - return input->get_joy_guid_remapped(p_device); +ANativeWindow *OS_Android::get_native_window() const { +#if defined(VULKAN_ENABLED) + return native_window; +#else + return nullptr; +#endif } void OS_Android::vibrate_handheld(int p_duration_ms) { @@ -747,19 +320,21 @@ bool OS_Android::_check_internal_feature_support(const String &p_feature) { } OS_Android::OS_Android(GodotJavaWrapper *p_godot_java, GodotIOJavaWrapper *p_godot_io_java, bool p_use_apk_expansion) { + display_size.width = 800; + display_size.height = 600; use_apk_expansion = p_use_apk_expansion; - default_videomode.width = 800; - default_videomode.height = 600; - default_videomode.fullscreen = true; - default_videomode.resizable = false; main_loop = nullptr; + +#if defined(OPENGL_ENABLED) gl_extensions = nullptr; - //rasterizer = nullptr; use_gl2 = false; +#endif - rendering_server = nullptr; +#if defined(VULKAN_ENABLED) + native_window = nullptr; +#endif godot_java = p_godot_java; godot_io_java = p_godot_io_java; @@ -769,6 +344,8 @@ OS_Android::OS_Android(GodotJavaWrapper *p_godot_java, GodotIOJavaWrapper *p_god _set_logger(memnew(CompositeLogger(loggers))); AudioDriverManager::add_driver(&audio_driver_android); + + DisplayServerAndroid::register_android_driver(); } OS_Android::~OS_Android() { diff --git a/platform/android/os_android.h b/platform/android/os_android.h index 8a91412ef6..cac7efaa88 100644 --- a/platform/android/os_android.h +++ b/platform/android/os_android.h @@ -33,78 +33,45 @@ #include "audio_driver_jandroid.h" #include "audio_driver_opensl.h" -#include "core/input/input_filter.h" #include "core/os/main_loop.h" #include "drivers/unix/os_unix.h" #include "servers/audio_server.h" -#include "servers/rendering/rasterizer.h" class GodotJavaWrapper; class GodotIOJavaWrapper; -class OS_Android : public OS_Unix { -public: - struct TouchPos { - int id; - Point2 pos; - }; - - enum { - JOY_EVENT_BUTTON = 0, - JOY_EVENT_AXIS = 1, - JOY_EVENT_HAT = 2 - }; - - struct JoypadEvent { - - int device; - int type; - int index; - bool pressed; - float value; - int hat; - }; +struct ANativeWindow; +class OS_Android : public OS_Unix { private: - Vector<TouchPos> touch; - Point2 hover_prev_pos; // needed to calculate the relative position on hover events - Point2 scroll_prev_pos; // needed to calculate the relative position on scroll events + Size2i display_size; - bool use_gl2; bool use_apk_expansion; +#if defined(OPENGL_ENABLED) bool use_16bits_fbo; + const char *gl_extensions; +#endif - RenderingServer *rendering_server; +#if defined(VULKAN_ENABLED) + ANativeWindow *native_window; +#endif mutable String data_dir_cache; //AudioDriverAndroid audio_driver_android; AudioDriverOpenSL audio_driver_android; - const char *gl_extensions; - - InputDefault *input; - VideoMode default_videomode; MainLoop *main_loop; GodotJavaWrapper *godot_java; GodotIOJavaWrapper *godot_io_java; - int video_driver_index; - public: - // functions used by main to initialize/deinitialize the OS - virtual int get_video_driver_count() const; - virtual const char *get_video_driver_name(int p_driver) const; - - virtual int get_audio_driver_count() const; - virtual const char *get_audio_driver_name(int p_driver) const; - - virtual int get_current_video_driver() const; - virtual void initialize_core(); - virtual Error initialize(const VideoMode &p_desired, int p_video_driver, int p_audio_driver); + virtual void initialize(); + + virtual void initialize_joypads(); virtual void set_main_loop(MainLoop *p_main_loop); virtual void delete_main_loop(); @@ -113,37 +80,19 @@ public: typedef int64_t ProcessID; - static OS *get_singleton(); + static OS_Android *get_singleton(); GodotJavaWrapper *get_godot_java(); GodotIOJavaWrapper *get_godot_io_java(); - virtual void alert(const String &p_alert, const String &p_title = "ALERT!"); virtual bool request_permission(const String &p_name); virtual bool request_permissions(); virtual Vector<String> get_granted_permissions() const; virtual Error open_dynamic_library(const String p_path, void *&p_library_handle, bool p_also_set_library_path = false); - virtual void set_mouse_show(bool p_show); - virtual void set_mouse_grab(bool p_grab); - virtual bool is_mouse_grab_enabled() const; - virtual Point2 get_mouse_position() const; - virtual int get_mouse_button_state() const; - virtual void set_window_title(const String &p_title); - - virtual void set_video_mode(const VideoMode &p_video_mode, int p_screen = 0); - virtual VideoMode get_video_mode(int p_screen = 0) const; - virtual void get_fullscreen_mode_list(List<VideoMode> *p_list, int p_screen = 0) const; - - virtual void set_keep_screen_on(bool p_enabled); - - virtual Size2 get_window_size() const; - virtual String get_name() const; virtual MainLoop *get_main_loop() const; - virtual bool can_draw() const; - void main_loop_begin(); bool main_loop_iterate(); void main_loop_request_go_back(); @@ -151,53 +100,25 @@ public: void main_loop_focusout(); void main_loop_focusin(); - virtual bool has_touchscreen_ui_hint() const; - - virtual bool has_virtual_keyboard() const; - virtual void show_virtual_keyboard(const String &p_existing_text, const Rect2 &p_screen_rect = Rect2(), int p_max_input_length = -1); - virtual void hide_virtual_keyboard(); - virtual int get_virtual_keyboard_height() const; - - void set_opengl_extensions(const char *p_gl_extensions); - void set_display_size(Size2 p_size); + void set_display_size(const Size2i &p_size); + Size2i get_display_size() const; void set_context_is_16_bits(bool p_is_16); + void set_opengl_extensions(const char *p_gl_extensions); - virtual void set_screen_orientation(ScreenOrientation p_orientation); + void set_native_window(ANativeWindow *p_native_window); + ANativeWindow *get_native_window() const; virtual Error shell_open(String p_uri); virtual String get_user_data_dir() const; virtual String get_resource_dir() const; virtual String get_locale() const; - virtual void set_clipboard(const String &p_text); - virtual String get_clipboard() const; virtual String get_model_name() const; - virtual int get_screen_dpi(int p_screen = 0) const; virtual String get_unique_id() const; virtual String get_system_dir(SystemDir p_dir) const; - void process_accelerometer(const Vector3 &p_accelerometer); - void process_gravity(const Vector3 &p_gravity); - void process_magnetometer(const Vector3 &p_magnetometer); - void process_gyroscope(const Vector3 &p_gyroscope); - void process_touch(int p_what, int p_pointer, const Vector<TouchPos> &p_points); - void process_hover(int p_type, Point2 p_pos); - void process_double_tap(Point2 p_pos); - void process_scroll(Point2 p_pos); - void process_joy_event(JoypadEvent p_event); - void process_event(Ref<InputEvent> p_event); - void init_video_mode(int p_video_width, int p_video_height); - - virtual Error native_video_play(String p_path, float p_volume, String p_audio_track, String p_subtitle_track); - virtual bool native_video_is_playing() const; - virtual void native_video_pause(); - virtual void native_video_stop(); - - virtual bool is_joy_known(int p_device); - virtual String get_joy_guid(int p_device) const; - void joy_connection_changed(int p_device, bool p_connected, String p_name); void vibrate_handheld(int p_duration_ms); virtual bool _check_internal_feature_support(const String &p_feature); diff --git a/platform/android/vulkan/vulkan_context_android.cpp b/platform/android/vulkan/vulkan_context_android.cpp new file mode 100644 index 0000000000..5fb7a83da4 --- /dev/null +++ b/platform/android/vulkan/vulkan_context_android.cpp @@ -0,0 +1,60 @@ +/*************************************************************************/ +/* vulkan_context_android.cpp */ +/*************************************************************************/ +/* This file is part of: */ +/* GODOT ENGINE */ +/* https://godotengine.org */ +/*************************************************************************/ +/* Copyright (c) 2007-2020 Juan Linietsky, Ariel Manzur. */ +/* Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md). */ +/* */ +/* Permission is hereby granted, free of charge, to any person obtaining */ +/* a copy of this software and associated documentation files (the */ +/* "Software"), to deal in the Software without restriction, including */ +/* without limitation the rights to use, copy, modify, merge, publish, */ +/* distribute, sublicense, and/or sell copies of the Software, and to */ +/* permit persons to whom the Software is furnished to do so, subject to */ +/* the following conditions: */ +/* */ +/* The above copyright notice and this permission notice shall be */ +/* included in all copies or substantial portions of the Software. */ +/* */ +/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */ +/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */ +/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/ +/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */ +/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */ +/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */ +/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ +/*************************************************************************/ + +#include "vulkan_context_android.h" +#include <vulkan/vulkan_android.h> + +const char *VulkanContextAndroid::_get_platform_surface_extension() const { + return VK_KHR_ANDROID_SURFACE_EXTENSION_NAME; +} + +int VulkanContextAndroid::window_create(ANativeWindow *p_window, int p_width, int p_height) { + VkAndroidSurfaceCreateInfoKHR createInfo; + createInfo.sType = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR; + createInfo.pNext = nullptr; + createInfo.flags = 0; + createInfo.window = p_window; + + VkSurfaceKHR surface; + VkResult err = vkCreateAndroidSurfaceKHR(_get_instance(), &createInfo, nullptr, &surface); + if (err != VK_SUCCESS) { + ERR_FAIL_V_MSG(-1, "vkCreateAndroidSurfaceKHR failed with error " + itos(err)); + } + + return _window_create(DisplayServer::MAIN_WINDOW_ID, surface, p_width, p_height); +} + +VulkanContextAndroid::VulkanContextAndroid() { + // TODO: fix validation layers + use_validation_layers = false; +} + +VulkanContextAndroid::~VulkanContextAndroid() { +} diff --git a/platform/android/vulkan/vk_renderer_jni.cpp b/platform/android/vulkan/vulkan_context_android.h index 3026e7daad..7e698ada4f 100644 --- a/platform/android/vulkan/vk_renderer_jni.cpp +++ b/platform/android/vulkan/vulkan_context_android.h @@ -1,5 +1,5 @@ /*************************************************************************/ -/* vk_renderer_jni.cpp */ +/* vulkan_context_android.h */ /*************************************************************************/ /* This file is part of: */ /* GODOT ENGINE */ @@ -28,31 +28,22 @@ /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /*************************************************************************/ -#include "vk_renderer_jni.h" +#ifndef VULKAN_CONTEXT_ANDROID_H +#define VULKAN_CONTEXT_ANDROID_H -extern "C" { +#include "drivers/vulkan/vulkan_context.h" -JNIEXPORT void JNICALL Java_org_godotengine_godot_vulkan_VkRenderer_nativeOnVkSurfaceCreated(JNIEnv *env, jobject obj, jobject j_surface) { - // TODO: complete -} +struct ANativeWindow; -JNIEXPORT void JNICALL Java_org_godotengine_godot_vulkan_VkRenderer_nativeOnVkSurfaceChanged(JNIEnv *env, jobject object, jobject j_surface, jint width, jint height) { - // TODO: complete -} +class VulkanContextAndroid : public VulkanContext { -JNIEXPORT void JNICALL Java_org_godotengine_godot_vulkan_VkRenderer_nativeOnVkResume(JNIEnv *env, jobject obj) { - // TODO: complete -} + virtual const char *_get_platform_surface_extension() const; -JNIEXPORT void JNICALL Java_org_godotengine_godot_vulkan_VkRenderer_nativeOnVkDrawFrame(JNIEnv *env, jobject obj) { - // TODO: complete -} +public: + int window_create(ANativeWindow *p_window, int p_width, int p_height); -JNIEXPORT void JNICALL Java_org_godotengine_godot_vulkan_VkRenderer_nativeOnVkPause(JNIEnv *env, jobject obj) { - // TODO: complete -} + VulkanContextAndroid(); + ~VulkanContextAndroid(); +}; -JNIEXPORT void JNICALL Java_org_godotengine_godot_vulkan_VkRenderer_nativeOnVkDestroy(JNIEnv *env, jobject obj) { - // TODO: complete -} -} +#endif // VULKAN_CONTEXT_ANDROID_H diff --git a/platform/linuxbsd/display_server_x11.cpp b/platform/linuxbsd/display_server_x11.cpp index 47497eb95f..6049dbf4d6 100644 --- a/platform/linuxbsd/display_server_x11.cpp +++ b/platform/linuxbsd/display_server_x11.cpp @@ -254,10 +254,10 @@ bool DisplayServerX11::_refresh_device_info() { bool absolute_mode = false; int resolution_x = 0; int resolution_y = 0; - int range_min_x = 0; - int range_min_y = 0; - int range_max_x = 0; - int range_max_y = 0; + double range_min_x = 0; + double range_min_y = 0; + double range_max_x = 0; + double range_max_y = 0; int pressure_resolution = 0; int tilt_resolution_x = 0; int tilt_resolution_y = 0; diff --git a/scene/3d/light_3d.cpp b/scene/3d/light_3d.cpp index c822b70a4b..2455d46e43 100644 --- a/scene/3d/light_3d.cpp +++ b/scene/3d/light_3d.cpp @@ -216,6 +216,14 @@ bool Light3D::is_editor_only() const { } void Light3D::_validate_property(PropertyInfo &property) const { + + if (get_light_type() == RS::LIGHT_DIRECTIONAL && property.name == "light_size") { + property.usage = 0; + } + + if (get_light_type() != RS::LIGHT_DIRECTIONAL && property.name == "light_angular_distance") { + property.usage = 0; + } } void Light3D::_bind_methods() { @@ -251,6 +259,8 @@ void Light3D::_bind_methods() { ADD_PROPERTY(PropertyInfo(Variant::COLOR, "light_color", PROPERTY_HINT_COLOR_NO_ALPHA), "set_color", "get_color"); ADD_PROPERTYI(PropertyInfo(Variant::FLOAT, "light_energy", PROPERTY_HINT_RANGE, "0,16,0.01,or_greater"), "set_param", "get_param", PARAM_ENERGY); ADD_PROPERTYI(PropertyInfo(Variant::FLOAT, "light_indirect_energy", PROPERTY_HINT_RANGE, "0,16,0.01,or_greater"), "set_param", "get_param", PARAM_INDIRECT_ENERGY); + ADD_PROPERTYI(PropertyInfo(Variant::FLOAT, "light_size", PROPERTY_HINT_RANGE, "0,64,0.01,or_greater"), "set_param", "get_param", PARAM_SIZE); + ADD_PROPERTYI(PropertyInfo(Variant::FLOAT, "light_angular_distance", PROPERTY_HINT_RANGE, "0,90,0.01"), "set_param", "get_param", PARAM_SIZE); ADD_PROPERTY(PropertyInfo(Variant::BOOL, "light_negative"), "set_negative", "is_negative"); ADD_PROPERTYI(PropertyInfo(Variant::FLOAT, "light_specular", PROPERTY_HINT_RANGE, "0,1,0.01"), "set_param", "get_param", PARAM_SPECULAR); ADD_PROPERTY(PropertyInfo(Variant::INT, "light_bake_mode", PROPERTY_HINT_ENUM, "Disable,Indirect,All"), "set_bake_mode", "get_bake_mode"); @@ -315,6 +325,7 @@ Light3D::Light3D(RenderingServer::LightType p_type) { set_param(PARAM_INDIRECT_ENERGY, 1); set_param(PARAM_SPECULAR, 0.5); set_param(PARAM_RANGE, 5); + set_param(PARAM_SIZE, 0); set_param(PARAM_ATTENUATION, 1); set_param(PARAM_SPOT_ANGLE, 45); set_param(PARAM_SPOT_ATTENUATION, 1); diff --git a/scene/3d/light_3d.h b/scene/3d/light_3d.h index ad2a1d5a84..21810e03dd 100644 --- a/scene/3d/light_3d.h +++ b/scene/3d/light_3d.h @@ -46,6 +46,7 @@ public: PARAM_INDIRECT_ENERGY = RS::LIGHT_PARAM_INDIRECT_ENERGY, PARAM_SPECULAR = RS::LIGHT_PARAM_SPECULAR, PARAM_RANGE = RS::LIGHT_PARAM_RANGE, + PARAM_SIZE = RS::LIGHT_PARAM_SIZE, PARAM_ATTENUATION = RS::LIGHT_PARAM_ATTENUATION, PARAM_SPOT_ANGLE = RS::LIGHT_PARAM_SPOT_ANGLE, PARAM_SPOT_ATTENUATION = RS::LIGHT_PARAM_SPOT_ATTENUATION, diff --git a/scene/3d/arvr_nodes.cpp b/scene/3d/xr_nodes.cpp index 537c094ceb..0373114e7d 100644 --- a/scene/3d/arvr_nodes.cpp +++ b/scene/3d/xr_nodes.cpp @@ -1,5 +1,5 @@ /*************************************************************************/ -/* arvr_nodes.cpp */ +/* xr_nodes.cpp */ /*************************************************************************/ /* This file is part of: */ /* GODOT ENGINE */ @@ -28,25 +28,25 @@ /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /*************************************************************************/ -#include "arvr_nodes.h" +#include "xr_nodes.h" #include "core/input/input_filter.h" -#include "servers/arvr/arvr_interface.h" -#include "servers/arvr_server.h" +#include "servers/xr/xr_interface.h" +#include "servers/xr_server.h" //////////////////////////////////////////////////////////////////////////////////////////////////// -void ARVRCamera::_notification(int p_what) { +void XRCamera3D::_notification(int p_what) { switch (p_what) { case NOTIFICATION_ENTER_TREE: { - // need to find our ARVROrigin parent and let it know we're its camera! - ARVROrigin *origin = Object::cast_to<ARVROrigin>(get_parent()); + // need to find our XROrigin3D parent and let it know we're its camera! + XROrigin3D *origin = Object::cast_to<XROrigin3D>(get_parent()); if (origin != nullptr) { origin->set_tracked_camera(this); } }; break; case NOTIFICATION_EXIT_TREE: { - // need to find our ARVROrigin parent and let it know we're no longer its camera! - ARVROrigin *origin = Object::cast_to<ARVROrigin>(get_parent()); + // need to find our XROrigin3D parent and let it know we're no longer its camera! + XROrigin3D *origin = Object::cast_to<XROrigin3D>(get_parent()); if (origin != nullptr) { origin->clear_tracked_camera_if(this); } @@ -54,26 +54,26 @@ void ARVRCamera::_notification(int p_what) { }; }; -String ARVRCamera::get_configuration_warning() const { +String XRCamera3D::get_configuration_warning() const { if (!is_visible() || !is_inside_tree()) return String(); - // must be child node of ARVROrigin! - ARVROrigin *origin = Object::cast_to<ARVROrigin>(get_parent()); + // must be child node of XROrigin3D! + XROrigin3D *origin = Object::cast_to<XROrigin3D>(get_parent()); if (origin == nullptr) { - return TTR("ARVRCamera must have an ARVROrigin node as its parent."); + return TTR("XRCamera3D must have an XROrigin3D node as its parent."); }; return String(); }; -Vector3 ARVRCamera::project_local_ray_normal(const Point2 &p_pos) const { - // get our ARVRServer - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL_V(arvr_server, Vector3()); +Vector3 XRCamera3D::project_local_ray_normal(const Point2 &p_pos) const { + // get our XRServer + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL_V(xr_server, Vector3()); - Ref<ARVRInterface> arvr_interface = arvr_server->get_primary_interface(); - if (arvr_interface.is_null()) { + Ref<XRInterface> xr_interface = xr_server->get_primary_interface(); + if (xr_interface.is_null()) { // we might be in the editor or have VR turned off, just call superclass return Camera3D::project_local_ray_normal(p_pos); } @@ -84,20 +84,20 @@ Vector3 ARVRCamera::project_local_ray_normal(const Point2 &p_pos) const { Vector2 cpos = get_viewport()->get_camera_coords(p_pos); Vector3 ray; - CameraMatrix cm = arvr_interface->get_projection_for_eye(ARVRInterface::EYE_MONO, viewport_size.aspect(), get_znear(), get_zfar()); + CameraMatrix cm = xr_interface->get_projection_for_eye(XRInterface::EYE_MONO, viewport_size.aspect(), get_znear(), get_zfar()); Vector2 screen_he = cm.get_viewport_half_extents(); ray = Vector3(((cpos.x / viewport_size.width) * 2.0 - 1.0) * screen_he.x, ((1.0 - (cpos.y / viewport_size.height)) * 2.0 - 1.0) * screen_he.y, -get_znear()).normalized(); return ray; }; -Point2 ARVRCamera::unproject_position(const Vector3 &p_pos) const { - // get our ARVRServer - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL_V(arvr_server, Vector2()); +Point2 XRCamera3D::unproject_position(const Vector3 &p_pos) const { + // get our XRServer + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL_V(xr_server, Vector2()); - Ref<ARVRInterface> arvr_interface = arvr_server->get_primary_interface(); - if (arvr_interface.is_null()) { + Ref<XRInterface> xr_interface = xr_server->get_primary_interface(); + if (xr_interface.is_null()) { // we might be in the editor or have VR turned off, just call superclass return Camera3D::unproject_position(p_pos); } @@ -106,7 +106,7 @@ Point2 ARVRCamera::unproject_position(const Vector3 &p_pos) const { Size2 viewport_size = get_viewport()->get_visible_rect().size; - CameraMatrix cm = arvr_interface->get_projection_for_eye(ARVRInterface::EYE_MONO, viewport_size.aspect(), get_znear(), get_zfar()); + CameraMatrix cm = xr_interface->get_projection_for_eye(XRInterface::EYE_MONO, viewport_size.aspect(), get_znear(), get_zfar()); Plane p(get_camera_transform().xform_inv(p_pos), 1.0); @@ -120,13 +120,13 @@ Point2 ARVRCamera::unproject_position(const Vector3 &p_pos) const { return res; }; -Vector3 ARVRCamera::project_position(const Point2 &p_point, float p_z_depth) const { - // get our ARVRServer - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL_V(arvr_server, Vector3()); +Vector3 XRCamera3D::project_position(const Point2 &p_point, float p_z_depth) const { + // get our XRServer + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL_V(xr_server, Vector3()); - Ref<ARVRInterface> arvr_interface = arvr_server->get_primary_interface(); - if (arvr_interface.is_null()) { + Ref<XRInterface> xr_interface = xr_server->get_primary_interface(); + if (xr_interface.is_null()) { // we might be in the editor or have VR turned off, just call superclass return Camera3D::project_position(p_point, p_z_depth); } @@ -135,7 +135,7 @@ Vector3 ARVRCamera::project_position(const Point2 &p_point, float p_z_depth) con Size2 viewport_size = get_viewport()->get_visible_rect().size; - CameraMatrix cm = arvr_interface->get_projection_for_eye(ARVRInterface::EYE_MONO, viewport_size.aspect(), get_znear(), get_zfar()); + CameraMatrix cm = xr_interface->get_projection_for_eye(XRInterface::EYE_MONO, viewport_size.aspect(), get_znear(), get_zfar()); Vector2 vp_he = cm.get_viewport_half_extents(); @@ -149,13 +149,13 @@ Vector3 ARVRCamera::project_position(const Point2 &p_point, float p_z_depth) con return get_camera_transform().xform(p); }; -Vector<Plane> ARVRCamera::get_frustum() const { - // get our ARVRServer - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL_V(arvr_server, Vector<Plane>()); +Vector<Plane> XRCamera3D::get_frustum() const { + // get our XRServer + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL_V(xr_server, Vector<Plane>()); - Ref<ARVRInterface> arvr_interface = arvr_server->get_primary_interface(); - if (arvr_interface.is_null()) { + Ref<XRInterface> xr_interface = xr_server->get_primary_interface(); + if (xr_interface.is_null()) { // we might be in the editor or have VR turned off, just call superclass return Camera3D::get_frustum(); } @@ -163,21 +163,21 @@ Vector<Plane> ARVRCamera::get_frustum() const { ERR_FAIL_COND_V(!is_inside_world(), Vector<Plane>()); Size2 viewport_size = get_viewport()->get_visible_rect().size; - CameraMatrix cm = arvr_interface->get_projection_for_eye(ARVRInterface::EYE_MONO, viewport_size.aspect(), get_znear(), get_zfar()); + CameraMatrix cm = xr_interface->get_projection_for_eye(XRInterface::EYE_MONO, viewport_size.aspect(), get_znear(), get_zfar()); return cm.get_projection_planes(get_camera_transform()); }; -ARVRCamera::ARVRCamera(){ +XRCamera3D::XRCamera3D(){ // nothing to do here yet for now.. }; -ARVRCamera::~ARVRCamera(){ +XRCamera3D::~XRCamera3D(){ // nothing to do here yet for now.. }; //////////////////////////////////////////////////////////////////////////////////////////////////// -void ARVRController::_notification(int p_what) { +void XRController3D::_notification(int p_what) { switch (p_what) { case NOTIFICATION_ENTER_TREE: { set_process_internal(true); @@ -186,12 +186,12 @@ void ARVRController::_notification(int p_what) { set_process_internal(false); }; break; case NOTIFICATION_INTERNAL_PROCESS: { - // get our ARVRServer - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL(arvr_server); + // get our XRServer + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL(xr_server); // find the tracker for our controller - ARVRPositionalTracker *tracker = arvr_server->find_by_type_and_id(ARVRServer::TRACKER_CONTROLLER, controller_id); + XRPositionalTracker *tracker = xr_server->find_by_type_and_id(XRServer::TRACKER_CONTROLLER, controller_id); if (tracker == nullptr) { // this controller is currently turned off is_active = false; @@ -236,49 +236,49 @@ void ARVRController::_notification(int p_what) { }; }; -void ARVRController::_bind_methods() { - ClassDB::bind_method(D_METHOD("set_controller_id", "controller_id"), &ARVRController::set_controller_id); - ClassDB::bind_method(D_METHOD("get_controller_id"), &ARVRController::get_controller_id); +void XRController3D::_bind_methods() { + ClassDB::bind_method(D_METHOD("set_controller_id", "controller_id"), &XRController3D::set_controller_id); + ClassDB::bind_method(D_METHOD("get_controller_id"), &XRController3D::get_controller_id); ADD_PROPERTY(PropertyInfo(Variant::INT, "controller_id", PROPERTY_HINT_RANGE, "0,32,1"), "set_controller_id", "get_controller_id"); - ClassDB::bind_method(D_METHOD("get_controller_name"), &ARVRController::get_controller_name); + ClassDB::bind_method(D_METHOD("get_controller_name"), &XRController3D::get_controller_name); // passthroughs to information about our related joystick - ClassDB::bind_method(D_METHOD("get_joystick_id"), &ARVRController::get_joystick_id); - ClassDB::bind_method(D_METHOD("is_button_pressed", "button"), &ARVRController::is_button_pressed); - ClassDB::bind_method(D_METHOD("get_joystick_axis", "axis"), &ARVRController::get_joystick_axis); + ClassDB::bind_method(D_METHOD("get_joystick_id"), &XRController3D::get_joystick_id); + ClassDB::bind_method(D_METHOD("is_button_pressed", "button"), &XRController3D::is_button_pressed); + ClassDB::bind_method(D_METHOD("get_joystick_axis", "axis"), &XRController3D::get_joystick_axis); - ClassDB::bind_method(D_METHOD("get_is_active"), &ARVRController::get_is_active); - ClassDB::bind_method(D_METHOD("get_hand"), &ARVRController::get_hand); + ClassDB::bind_method(D_METHOD("get_is_active"), &XRController3D::get_is_active); + ClassDB::bind_method(D_METHOD("get_hand"), &XRController3D::get_hand); - ClassDB::bind_method(D_METHOD("get_rumble"), &ARVRController::get_rumble); - ClassDB::bind_method(D_METHOD("set_rumble", "rumble"), &ARVRController::set_rumble); + ClassDB::bind_method(D_METHOD("get_rumble"), &XRController3D::get_rumble); + ClassDB::bind_method(D_METHOD("set_rumble", "rumble"), &XRController3D::set_rumble); ADD_PROPERTY(PropertyInfo(Variant::FLOAT, "rumble", PROPERTY_HINT_RANGE, "0.0,1.0,0.01"), "set_rumble", "get_rumble"); ADD_PROPERTY_DEFAULT("rumble", 0.0); - ClassDB::bind_method(D_METHOD("get_mesh"), &ARVRController::get_mesh); + ClassDB::bind_method(D_METHOD("get_mesh"), &XRController3D::get_mesh); ADD_SIGNAL(MethodInfo("button_pressed", PropertyInfo(Variant::INT, "button"))); ADD_SIGNAL(MethodInfo("button_release", PropertyInfo(Variant::INT, "button"))); ADD_SIGNAL(MethodInfo("mesh_updated", PropertyInfo(Variant::OBJECT, "mesh", PROPERTY_HINT_RESOURCE_TYPE, "Mesh"))); }; -void ARVRController::set_controller_id(int p_controller_id) { +void XRController3D::set_controller_id(int p_controller_id) { // We don't check any bounds here, this controller may not yet be active and just be a place holder until it is. // Note that setting this to 0 means this node is not bound to a controller yet. controller_id = p_controller_id; update_configuration_warning(); }; -int ARVRController::get_controller_id(void) const { +int XRController3D::get_controller_id(void) const { return controller_id; }; -String ARVRController::get_controller_name(void) const { - // get our ARVRServer - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL_V(arvr_server, String()); +String XRController3D::get_controller_name(void) const { + // get our XRServer + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL_V(xr_server, String()); - ARVRPositionalTracker *tracker = arvr_server->find_by_type_and_id(ARVRServer::TRACKER_CONTROLLER, controller_id); + XRPositionalTracker *tracker = xr_server->find_by_type_and_id(XRServer::TRACKER_CONTROLLER, controller_id); if (tracker == nullptr) { return String("Not connected"); }; @@ -286,12 +286,12 @@ String ARVRController::get_controller_name(void) const { return tracker->get_name(); }; -int ARVRController::get_joystick_id() const { - // get our ARVRServer - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL_V(arvr_server, 0); +int XRController3D::get_joystick_id() const { + // get our XRServer + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL_V(xr_server, 0); - ARVRPositionalTracker *tracker = arvr_server->find_by_type_and_id(ARVRServer::TRACKER_CONTROLLER, controller_id); + XRPositionalTracker *tracker = xr_server->find_by_type_and_id(XRServer::TRACKER_CONTROLLER, controller_id); if (tracker == nullptr) { // No tracker? no joystick id... (0 is our first joystick) return -1; @@ -300,7 +300,7 @@ int ARVRController::get_joystick_id() const { return tracker->get_joy_id(); }; -bool ARVRController::is_button_pressed(int p_button) const { +bool XRController3D::is_button_pressed(int p_button) const { int joy_id = get_joystick_id(); if (joy_id == -1) { return false; @@ -309,7 +309,7 @@ bool ARVRController::is_button_pressed(int p_button) const { return InputFilter::get_singleton()->is_joy_button_pressed(joy_id, p_button); }; -float ARVRController::get_joystick_axis(int p_axis) const { +float XRController3D::get_joystick_axis(int p_axis) const { int joy_id = get_joystick_id(); if (joy_id == -1) { return 0.0; @@ -318,12 +318,12 @@ float ARVRController::get_joystick_axis(int p_axis) const { return InputFilter::get_singleton()->get_joy_axis(joy_id, p_axis); }; -real_t ARVRController::get_rumble() const { - // get our ARVRServer - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL_V(arvr_server, 0.0); +real_t XRController3D::get_rumble() const { + // get our XRServer + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL_V(xr_server, 0.0); - ARVRPositionalTracker *tracker = arvr_server->find_by_type_and_id(ARVRServer::TRACKER_CONTROLLER, controller_id); + XRPositionalTracker *tracker = xr_server->find_by_type_and_id(XRServer::TRACKER_CONTROLLER, controller_id); if (tracker == nullptr) { return 0.0; }; @@ -331,46 +331,46 @@ real_t ARVRController::get_rumble() const { return tracker->get_rumble(); }; -void ARVRController::set_rumble(real_t p_rumble) { - // get our ARVRServer - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL(arvr_server); +void XRController3D::set_rumble(real_t p_rumble) { + // get our XRServer + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL(xr_server); - ARVRPositionalTracker *tracker = arvr_server->find_by_type_and_id(ARVRServer::TRACKER_CONTROLLER, controller_id); + XRPositionalTracker *tracker = xr_server->find_by_type_and_id(XRServer::TRACKER_CONTROLLER, controller_id); if (tracker != nullptr) { tracker->set_rumble(p_rumble); }; }; -Ref<Mesh> ARVRController::get_mesh() const { +Ref<Mesh> XRController3D::get_mesh() const { return mesh; } -bool ARVRController::get_is_active() const { +bool XRController3D::get_is_active() const { return is_active; }; -ARVRPositionalTracker::TrackerHand ARVRController::get_hand() const { - // get our ARVRServer - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL_V(arvr_server, ARVRPositionalTracker::TRACKER_HAND_UNKNOWN); +XRPositionalTracker::TrackerHand XRController3D::get_hand() const { + // get our XRServer + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL_V(xr_server, XRPositionalTracker::TRACKER_HAND_UNKNOWN); - ARVRPositionalTracker *tracker = arvr_server->find_by_type_and_id(ARVRServer::TRACKER_CONTROLLER, controller_id); + XRPositionalTracker *tracker = xr_server->find_by_type_and_id(XRServer::TRACKER_CONTROLLER, controller_id); if (tracker == nullptr) { - return ARVRPositionalTracker::TRACKER_HAND_UNKNOWN; + return XRPositionalTracker::TRACKER_HAND_UNKNOWN; }; return tracker->get_hand(); }; -String ARVRController::get_configuration_warning() const { +String XRController3D::get_configuration_warning() const { if (!is_visible() || !is_inside_tree()) return String(); - // must be child node of ARVROrigin! - ARVROrigin *origin = Object::cast_to<ARVROrigin>(get_parent()); + // must be child node of XROrigin! + XROrigin3D *origin = Object::cast_to<XROrigin3D>(get_parent()); if (origin == nullptr) { - return TTR("ARVRController must have an ARVROrigin node as its parent."); + return TTR("XRController3D must have an XROrigin3D node as its parent."); }; if (controller_id == 0) { @@ -380,19 +380,19 @@ String ARVRController::get_configuration_warning() const { return String(); }; -ARVRController::ARVRController() { +XRController3D::XRController3D() { controller_id = 1; is_active = true; button_states = 0; }; -ARVRController::~ARVRController(){ +XRController3D::~XRController3D(){ // nothing to do here yet for now.. }; //////////////////////////////////////////////////////////////////////////////////////////////////// -void ARVRAnchor::_notification(int p_what) { +void XRAnchor3D::_notification(int p_what) { switch (p_what) { case NOTIFICATION_ENTER_TREE: { set_process_internal(true); @@ -401,12 +401,12 @@ void ARVRAnchor::_notification(int p_what) { set_process_internal(false); }; break; case NOTIFICATION_INTERNAL_PROCESS: { - // get our ARVRServer - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL(arvr_server); + // get our XRServer + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL(xr_server); // find the tracker for our anchor - ARVRPositionalTracker *tracker = arvr_server->find_by_type_and_id(ARVRServer::TRACKER_ANCHOR, anchor_id); + XRPositionalTracker *tracker = xr_server->find_by_type_and_id(XRServer::TRACKER_ANCHOR, anchor_id); if (tracker == nullptr) { // this anchor is currently not available is_active = false; @@ -415,7 +415,7 @@ void ARVRAnchor::_notification(int p_what) { Transform transform; // we'll need our world_scale - real_t world_scale = arvr_server->get_world_scale(); + real_t world_scale = xr_server->get_world_scale(); // get our info from our tracker transform.basis = tracker->get_orientation(); @@ -427,7 +427,7 @@ void ARVRAnchor::_notification(int p_what) { transform.basis.orthonormalize(); // apply our reference frame and set our transform - set_transform(arvr_server->get_reference_frame() * transform); + set_transform(xr_server->get_reference_frame() * transform); // check for an updated mesh Ref<Mesh> trackerMesh = tracker->get_mesh(); @@ -442,43 +442,43 @@ void ARVRAnchor::_notification(int p_what) { }; }; -void ARVRAnchor::_bind_methods() { +void XRAnchor3D::_bind_methods() { - ClassDB::bind_method(D_METHOD("set_anchor_id", "anchor_id"), &ARVRAnchor::set_anchor_id); - ClassDB::bind_method(D_METHOD("get_anchor_id"), &ARVRAnchor::get_anchor_id); + ClassDB::bind_method(D_METHOD("set_anchor_id", "anchor_id"), &XRAnchor3D::set_anchor_id); + ClassDB::bind_method(D_METHOD("get_anchor_id"), &XRAnchor3D::get_anchor_id); ADD_PROPERTY(PropertyInfo(Variant::INT, "anchor_id", PROPERTY_HINT_RANGE, "0,32,1"), "set_anchor_id", "get_anchor_id"); - ClassDB::bind_method(D_METHOD("get_anchor_name"), &ARVRAnchor::get_anchor_name); + ClassDB::bind_method(D_METHOD("get_anchor_name"), &XRAnchor3D::get_anchor_name); - ClassDB::bind_method(D_METHOD("get_is_active"), &ARVRAnchor::get_is_active); - ClassDB::bind_method(D_METHOD("get_size"), &ARVRAnchor::get_size); + ClassDB::bind_method(D_METHOD("get_is_active"), &XRAnchor3D::get_is_active); + ClassDB::bind_method(D_METHOD("get_size"), &XRAnchor3D::get_size); - ClassDB::bind_method(D_METHOD("get_plane"), &ARVRAnchor::get_plane); + ClassDB::bind_method(D_METHOD("get_plane"), &XRAnchor3D::get_plane); - ClassDB::bind_method(D_METHOD("get_mesh"), &ARVRAnchor::get_mesh); + ClassDB::bind_method(D_METHOD("get_mesh"), &XRAnchor3D::get_mesh); ADD_SIGNAL(MethodInfo("mesh_updated", PropertyInfo(Variant::OBJECT, "mesh", PROPERTY_HINT_RESOURCE_TYPE, "Mesh"))); }; -void ARVRAnchor::set_anchor_id(int p_anchor_id) { +void XRAnchor3D::set_anchor_id(int p_anchor_id) { // We don't check any bounds here, this anchor may not yet be active and just be a place holder until it is. // Note that setting this to 0 means this node is not bound to an anchor yet. anchor_id = p_anchor_id; update_configuration_warning(); }; -int ARVRAnchor::get_anchor_id(void) const { +int XRAnchor3D::get_anchor_id(void) const { return anchor_id; }; -Vector3 ARVRAnchor::get_size() const { +Vector3 XRAnchor3D::get_size() const { return size; }; -String ARVRAnchor::get_anchor_name(void) const { - // get our ARVRServer - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL_V(arvr_server, String()); +String XRAnchor3D::get_anchor_name(void) const { + // get our XRServer + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL_V(xr_server, String()); - ARVRPositionalTracker *tracker = arvr_server->find_by_type_and_id(ARVRServer::TRACKER_ANCHOR, anchor_id); + XRPositionalTracker *tracker = xr_server->find_by_type_and_id(XRServer::TRACKER_ANCHOR, anchor_id); if (tracker == nullptr) { return String("Not connected"); }; @@ -486,18 +486,18 @@ String ARVRAnchor::get_anchor_name(void) const { return tracker->get_name(); }; -bool ARVRAnchor::get_is_active() const { +bool XRAnchor3D::get_is_active() const { return is_active; }; -String ARVRAnchor::get_configuration_warning() const { +String XRAnchor3D::get_configuration_warning() const { if (!is_visible() || !is_inside_tree()) return String(); - // must be child node of ARVROrigin! - ARVROrigin *origin = Object::cast_to<ARVROrigin>(get_parent()); + // must be child node of XROrigin3D! + XROrigin3D *origin = Object::cast_to<XROrigin3D>(get_parent()); if (origin == nullptr) { - return TTR("ARVRAnchor must have an ARVROrigin node as its parent."); + return TTR("XRAnchor3D must have an XROrigin3D node as its parent."); }; if (anchor_id == 0) { @@ -507,7 +507,7 @@ String ARVRAnchor::get_configuration_warning() const { return String(); }; -Plane ARVRAnchor::get_plane() const { +Plane XRAnchor3D::get_plane() const { Vector3 location = get_translation(); Basis orientation = get_transform().basis; @@ -516,67 +516,67 @@ Plane ARVRAnchor::get_plane() const { return plane; }; -Ref<Mesh> ARVRAnchor::get_mesh() const { +Ref<Mesh> XRAnchor3D::get_mesh() const { return mesh; } -ARVRAnchor::ARVRAnchor() { +XRAnchor3D::XRAnchor3D() { anchor_id = 1; is_active = true; }; -ARVRAnchor::~ARVRAnchor(){ +XRAnchor3D::~XRAnchor3D(){ // nothing to do here yet for now.. }; //////////////////////////////////////////////////////////////////////////////////////////////////// -String ARVROrigin::get_configuration_warning() const { +String XROrigin3D::get_configuration_warning() const { if (!is_visible() || !is_inside_tree()) return String(); if (tracked_camera == nullptr) - return TTR("ARVROrigin requires an ARVRCamera child node."); + return TTR("XROrigin3D requires an XRCamera3D child node."); return String(); }; -void ARVROrigin::_bind_methods() { - ClassDB::bind_method(D_METHOD("set_world_scale", "world_scale"), &ARVROrigin::set_world_scale); - ClassDB::bind_method(D_METHOD("get_world_scale"), &ARVROrigin::get_world_scale); +void XROrigin3D::_bind_methods() { + ClassDB::bind_method(D_METHOD("set_world_scale", "world_scale"), &XROrigin3D::set_world_scale); + ClassDB::bind_method(D_METHOD("get_world_scale"), &XROrigin3D::get_world_scale); ADD_PROPERTY(PropertyInfo(Variant::FLOAT, "world_scale"), "set_world_scale", "get_world_scale"); }; -void ARVROrigin::set_tracked_camera(ARVRCamera *p_tracked_camera) { +void XROrigin3D::set_tracked_camera(XRCamera3D *p_tracked_camera) { tracked_camera = p_tracked_camera; }; -void ARVROrigin::clear_tracked_camera_if(ARVRCamera *p_tracked_camera) { +void XROrigin3D::clear_tracked_camera_if(XRCamera3D *p_tracked_camera) { if (tracked_camera == p_tracked_camera) { tracked_camera = nullptr; }; }; -float ARVROrigin::get_world_scale() const { - // get our ARVRServer - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL_V(arvr_server, 1.0); +float XROrigin3D::get_world_scale() const { + // get our XRServer + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL_V(xr_server, 1.0); - return arvr_server->get_world_scale(); + return xr_server->get_world_scale(); }; -void ARVROrigin::set_world_scale(float p_world_scale) { - // get our ARVRServer - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL(arvr_server); +void XROrigin3D::set_world_scale(float p_world_scale) { + // get our XRServer + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL(xr_server); - arvr_server->set_world_scale(p_world_scale); + xr_server->set_world_scale(p_world_scale); }; -void ARVROrigin::_notification(int p_what) { - // get our ARVRServer - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL(arvr_server); +void XROrigin3D::_notification(int p_what) { + // get our XRServer + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL(xr_server); switch (p_what) { case NOTIFICATION_ENTER_TREE: { @@ -587,13 +587,13 @@ void ARVROrigin::_notification(int p_what) { }; break; case NOTIFICATION_INTERNAL_PROCESS: { // set our world origin to our node transform - arvr_server->set_world_origin(get_global_transform()); + xr_server->set_world_origin(get_global_transform()); // check if we have a primary interface - Ref<ARVRInterface> arvr_interface = arvr_server->get_primary_interface(); - if (arvr_interface.is_valid() && tracked_camera != nullptr) { + Ref<XRInterface> xr_interface = xr_server->get_primary_interface(); + if (xr_interface.is_valid() && tracked_camera != nullptr) { // get our positioning transform for our headset - Transform t = arvr_interface->get_transform_for_eye(ARVRInterface::EYE_MONO, Transform()); + Transform t = xr_interface->get_transform_for_eye(XRInterface::EYE_MONO, Transform()); // now apply this to our camera tracked_camera->set_transform(t); @@ -603,19 +603,19 @@ void ARVROrigin::_notification(int p_what) { break; }; - // send our notification to all active ARVR interfaces, they may need to react to it also - for (int i = 0; i < arvr_server->get_interface_count(); i++) { - Ref<ARVRInterface> interface = arvr_server->get_interface(i); + // send our notification to all active XE interfaces, they may need to react to it also + for (int i = 0; i < xr_server->get_interface_count(); i++) { + Ref<XRInterface> interface = xr_server->get_interface(i); if (interface.is_valid() && interface->is_initialized()) { interface->notification(p_what); } } }; -ARVROrigin::ARVROrigin() { +XROrigin3D::XROrigin3D() { tracked_camera = nullptr; }; -ARVROrigin::~ARVROrigin(){ +XROrigin3D::~XROrigin3D(){ // nothing to do here yet for now.. }; diff --git a/scene/3d/arvr_nodes.h b/scene/3d/xr_nodes.h index bc5df2e174..a2f16545d1 100644 --- a/scene/3d/arvr_nodes.h +++ b/scene/3d/xr_nodes.h @@ -1,5 +1,5 @@ /*************************************************************************/ -/* arvr_nodes.h */ +/* xr_nodes.h */ /*************************************************************************/ /* This file is part of: */ /* GODOT ENGINE */ @@ -28,24 +28,24 @@ /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /*************************************************************************/ -#ifndef ARVR_NODES_H -#define ARVR_NODES_H +#ifndef XR_NODES_H +#define XR_NODES_H #include "scene/3d/camera_3d.h" #include "scene/3d/node_3d.h" #include "scene/resources/mesh.h" -#include "servers/arvr/arvr_positional_tracker.h" +#include "servers/xr/xr_positional_tracker.h" /** @author Bastiaan Olij <mux213@gmail.com> **/ /* - ARVRCamera is a subclass of camera which will register itself with its parent ARVROrigin and as a result is automatically positioned + XRCamera is a subclass of camera which will register itself with its parent XROrigin and as a result is automatically positioned */ -class ARVRCamera : public Camera3D { +class XRCamera3D : public Camera3D { - GDCLASS(ARVRCamera, Camera3D); + GDCLASS(XRCamera3D, Camera3D); protected: void _notification(int p_what); @@ -58,19 +58,19 @@ public: virtual Vector3 project_position(const Point2 &p_point, float p_z_depth) const; virtual Vector<Plane> get_frustum() const; - ARVRCamera(); - ~ARVRCamera(); + XRCamera3D(); + ~XRCamera3D(); }; /* - ARVRController is a helper node that automatically updates its position based on tracker data. + XRController3D is a helper node that automatically updates its position based on tracker data. - It must be a child node of our ARVROrigin node + It must be a child node of our XROrigin node */ -class ARVRController : public Node3D { +class XRController3D : public Node3D { - GDCLASS(ARVRController, Node3D); + GDCLASS(XRController3D, Node3D); private: int controller_id; @@ -95,23 +95,23 @@ public: void set_rumble(real_t p_rumble); bool get_is_active() const; - ARVRPositionalTracker::TrackerHand get_hand() const; + XRPositionalTracker::TrackerHand get_hand() const; Ref<Mesh> get_mesh(void) const; String get_configuration_warning() const; - ARVRController(); - ~ARVRController(); + XRController3D(); + ~XRController3D(); }; /* - ARVRAnchor is a helper node that automatically updates its position based on anchor data, it represents a real world location. - It must be a child node of our ARVROrigin node + XRAnchor3D is a helper node that automatically updates its position based on anchor data, it represents a real world location. + It must be a child node of our XROrigin3D node */ -class ARVRAnchor : public Node3D { - GDCLASS(ARVRAnchor, Node3D); +class XRAnchor3D : public Node3D { + GDCLASS(XRAnchor3D, Node3D); private: int anchor_id; @@ -137,24 +137,24 @@ public: String get_configuration_warning() const; - ARVRAnchor(); - ~ARVRAnchor(); + XRAnchor3D(); + ~XRAnchor3D(); }; /* - ARVROrigin is special spatial node that acts as our origin point mapping our real world center of our tracking volume into our virtual world. + XROrigin3D is special spatial node that acts as our origin point mapping our real world center of our tracking volume into our virtual world. It is this point that you will move around the world as the player 'moves while standing still', i.e. the player moves through teleporting or controller inputs as opposed to physically moving. Our camera and controllers will always be child nodes and thus place relative to this origin point. - This node will automatically locate any camera child nodes and update its position while our ARVRController node will handle tracked controllers. + This node will automatically locate any camera child nodes and update its position while our XRController3D node will handle tracked controllers. */ -class ARVROrigin : public Node3D { +class XROrigin3D : public Node3D { - GDCLASS(ARVROrigin, Node3D); + GDCLASS(XROrigin3D, Node3D); private: - ARVRCamera *tracked_camera; + XRCamera3D *tracked_camera; protected: void _notification(int p_what); @@ -163,14 +163,14 @@ protected: public: String get_configuration_warning() const; - void set_tracked_camera(ARVRCamera *p_tracked_camera); - void clear_tracked_camera_if(ARVRCamera *p_tracked_camera); + void set_tracked_camera(XRCamera3D *p_tracked_camera); + void clear_tracked_camera_if(XRCamera3D *p_tracked_camera); float get_world_scale() const; void set_world_scale(float p_world_scale); - ARVROrigin(); - ~ARVROrigin(); + XROrigin3D(); + ~XROrigin3D(); }; -#endif /* ARVR_NODES_H */ +#endif /* XR_NODES_H */ diff --git a/scene/gui/tree.cpp b/scene/gui/tree.cpp index a7acaae8df..509a52d36a 100644 --- a/scene/gui/tree.cpp +++ b/scene/gui/tree.cpp @@ -3045,13 +3045,6 @@ void Tree::_notification(int p_what) { draw_item(Point2(), draw_ofs, draw_size, root); } - int ofs = 0; - - for (int i = 0; i < (columns.size() - 1 - 1); i++) { - - ofs += get_column_width(i); - } - if (show_column_titles) { //title buttons diff --git a/scene/main/viewport.cpp b/scene/main/viewport.cpp index ed4bb05ac7..1cfc3b0260 100644 --- a/scene/main/viewport.cpp +++ b/scene/main/viewport.cpp @@ -3615,13 +3615,14 @@ Viewport::~Viewport() { ///////////////////////////////// -void SubViewport::set_use_arvr(bool p_use_arvr) { - arvr = p_use_arvr; +void SubViewport::set_use_xr(bool p_use_xr) { + xr = p_use_xr; - RS::get_singleton()->viewport_set_use_arvr(get_viewport_rid(), arvr); + RS::get_singleton()->viewport_set_use_xr(get_viewport_rid(), xr); } -bool SubViewport::is_using_arvr() { - return arvr; + +bool SubViewport::is_using_xr() { + return xr; } void SubViewport::set_size(const Size2i &p_size) { @@ -3701,8 +3702,8 @@ void SubViewport::_notification(int p_what) { } void SubViewport::_bind_methods() { - ClassDB::bind_method(D_METHOD("set_use_arvr", "use"), &SubViewport::set_use_arvr); - ClassDB::bind_method(D_METHOD("is_using_arvr"), &SubViewport::is_using_arvr); + ClassDB::bind_method(D_METHOD("set_use_xr", "use"), &SubViewport::set_use_xr); + ClassDB::bind_method(D_METHOD("is_using_xr"), &SubViewport::is_using_xr); ClassDB::bind_method(D_METHOD("set_size", "size"), &SubViewport::set_size); ClassDB::bind_method(D_METHOD("get_size"), &SubViewport::get_size); @@ -3719,7 +3720,7 @@ void SubViewport::_bind_methods() { ClassDB::bind_method(D_METHOD("set_clear_mode", "mode"), &SubViewport::set_clear_mode); ClassDB::bind_method(D_METHOD("get_clear_mode"), &SubViewport::get_clear_mode); - ADD_PROPERTY(PropertyInfo(Variant::BOOL, "arvr"), "set_use_arvr", "is_using_arvr"); + ADD_PROPERTY(PropertyInfo(Variant::BOOL, "xr"), "set_use_xr", "is_using_xr"); ADD_PROPERTY(PropertyInfo(Variant::VECTOR2, "size"), "set_size", "get_size"); ADD_PROPERTY(PropertyInfo(Variant::VECTOR2, "size_2d_override"), "set_size_2d_override", "get_size_2d_override"); ADD_PROPERTY(PropertyInfo(Variant::BOOL, "size_2d_override_stretch"), "set_size_2d_override_stretch", "is_size_2d_override_stretch_enabled"); @@ -3739,7 +3740,7 @@ void SubViewport::_bind_methods() { } SubViewport::SubViewport() { - arvr = false; + xr = false; size_2d_override_stretch = false; update_mode = UPDATE_WHEN_VISIBLE; clear_mode = CLEAR_MODE_ALWAYS; diff --git a/scene/main/viewport.h b/scene/main/viewport.h index 646ba3c3a1..ab3987d16d 100644 --- a/scene/main/viewport.h +++ b/scene/main/viewport.h @@ -590,7 +590,7 @@ public: private: UpdateMode update_mode; ClearMode clear_mode; - bool arvr; + bool xr; bool size_2d_override_stretch; protected: @@ -606,8 +606,8 @@ public: void set_size_2d_override(const Size2i &p_size); Size2i get_size_2d_override() const; - void set_use_arvr(bool p_use_arvr); - bool is_using_arvr(); + void set_use_xr(bool p_use_xr); + bool is_using_xr(); void set_size_2d_override_stretch(bool p_enable); bool is_size_2d_override_stretch_enabled() const; diff --git a/scene/register_scene_types.cpp b/scene/register_scene_types.cpp index 94162cbe4e..035d26b3e4 100644 --- a/scene/register_scene_types.cpp +++ b/scene/register_scene_types.cpp @@ -179,7 +179,6 @@ #ifndef _3D_DISABLED #include "scene/3d/area_3d.h" -#include "scene/3d/arvr_nodes.h" #include "scene/3d/audio_stream_player_3d.h" #include "scene/3d/baked_lightmap.h" #include "scene/3d/bone_attachment_3d.h" @@ -213,6 +212,7 @@ #include "scene/3d/vehicle_body_3d.h" #include "scene/3d/visibility_notifier_3d.h" #include "scene/3d/world_environment.h" +#include "scene/3d/xr_nodes.h" #include "scene/resources/environment.h" #include "scene/resources/mesh_library.h" #endif @@ -410,10 +410,10 @@ void register_scene_types() { ClassDB::register_class<Camera3D>(); ClassDB::register_class<ClippedCamera3D>(); ClassDB::register_class<Listener3D>(); - ClassDB::register_class<ARVRCamera>(); - ClassDB::register_class<ARVRController>(); - ClassDB::register_class<ARVRAnchor>(); - ClassDB::register_class<ARVROrigin>(); + ClassDB::register_class<XRCamera3D>(); + ClassDB::register_class<XRController3D>(); + ClassDB::register_class<XRAnchor3D>(); + ClassDB::register_class<XROrigin3D>(); ClassDB::register_class<MeshInstance3D>(); ClassDB::register_class<ImmediateGeometry3D>(); ClassDB::register_virtual_class<SpriteBase3D>(); @@ -853,6 +853,10 @@ void register_scene_types() { ClassDB::add_compatibility_class("World", "World3D"); ClassDB::add_compatibility_class("ProceduralSky", "Sky"); ClassDB::add_compatibility_class("PanoramaSky", "Sky"); + ClassDB::add_compatibility_class("ARVRCamera", "XRCamera3D"); + ClassDB::add_compatibility_class("ARVROrigin", "XROrigin3D"); + ClassDB::add_compatibility_class("ARVRController", "XRController3D"); + ClassDB::add_compatibility_class("ARVRAnchor", "XRAnchor3D"); #endif diff --git a/servers/SCsub b/servers/SCsub index 7080a110da..121990f2e1 100644 --- a/servers/SCsub +++ b/servers/SCsub @@ -5,7 +5,7 @@ Import("env") env.servers_sources = [] env.add_source_files(env.servers_sources, "*.cpp") -SConscript("arvr/SCsub") +SConscript("xr/SCsub") SConscript("camera/SCsub") SConscript("physics_3d/SCsub") SConscript("physics_2d/SCsub") diff --git a/servers/display_server.h b/servers/display_server.h index 1956bcafca..93db7ef844 100644 --- a/servers/display_server.h +++ b/servers/display_server.h @@ -180,7 +180,7 @@ public: }; virtual void screen_set_orientation(ScreenOrientation p_orientation, int p_screen = SCREEN_OF_MAIN_WINDOW); - ScreenOrientation screen_get_orientation(int p_screen = SCREEN_OF_MAIN_WINDOW) const; + virtual ScreenOrientation screen_get_orientation(int p_screen = SCREEN_OF_MAIN_WINDOW) const; virtual void screen_set_keep_on(bool p_enable); //disable screensaver virtual bool screen_is_kept_on() const; diff --git a/servers/register_server_types.cpp b/servers/register_server_types.cpp index 64b48bea50..dadd26dade 100644 --- a/servers/register_server_types.cpp +++ b/servers/register_server_types.cpp @@ -33,9 +33,6 @@ #include "core/engine.h" #include "core/project_settings.h" -#include "arvr/arvr_interface.h" -#include "arvr/arvr_positional_tracker.h" -#include "arvr_server.h" #include "audio/audio_effect.h" #include "audio/audio_stream.h" #include "audio/effects/audio_effect_amplify.h" @@ -67,6 +64,9 @@ #include "physics_server_3d.h" #include "rendering_server.h" #include "servers/rendering/shader_types.h" +#include "xr/xr_interface.h" +#include "xr/xr_positional_tracker.h" +#include "xr_server.h" ShaderTypes *shader_types = nullptr; @@ -102,11 +102,15 @@ void register_server_types() { ClassDB::register_class<AudioServer>(); ClassDB::register_virtual_class<PhysicsServer3D>(); ClassDB::register_virtual_class<PhysicsServer2D>(); - ClassDB::register_class<ARVRServer>(); + ClassDB::register_class<XRServer>(); ClassDB::register_class<CameraServer>(); - ClassDB::register_virtual_class<ARVRInterface>(); - ClassDB::register_class<ARVRPositionalTracker>(); + ClassDB::register_virtual_class<XRInterface>(); + ClassDB::register_class<XRPositionalTracker>(); + + ClassDB::add_compatibility_class("ARVRServer", "XRServer"); + ClassDB::add_compatibility_class("ARVRInterface", "XRInterface"); + ClassDB::add_compatibility_class("ARVRPositionalTracker", "XRPositionalTracker"); ClassDB::register_virtual_class<AudioStream>(); ClassDB::register_virtual_class<AudioStreamPlayback>(); @@ -198,6 +202,6 @@ void register_server_singletons() { Engine::get_singleton()->add_singleton(Engine::Singleton("PhysicsServer3D", PhysicsServer3D::get_singleton())); Engine::get_singleton()->add_singleton(Engine::Singleton("NavigationServer2D", NavigationServer2D::get_singleton_mut())); Engine::get_singleton()->add_singleton(Engine::Singleton("NavigationServer3D", NavigationServer3D::get_singleton_mut())); - Engine::get_singleton()->add_singleton(Engine::Singleton("ARVRServer", ARVRServer::get_singleton())); + Engine::get_singleton()->add_singleton(Engine::Singleton("XRServer", XRServer::get_singleton())); Engine::get_singleton()->add_singleton(Engine::Singleton("CameraServer", CameraServer::get_singleton())); } diff --git a/servers/rendering/rasterizer.h b/servers/rendering/rasterizer.h index 5013b38f3f..cf0afe6097 100644 --- a/servers/rendering/rasterizer.h +++ b/servers/rendering/rasterizer.h @@ -231,7 +231,7 @@ public: virtual RID light_instance_create(RID p_light) = 0; virtual void light_instance_set_transform(RID p_light_instance, const Transform &p_transform) = 0; - virtual void light_instance_set_shadow_transform(RID p_light_instance, const CameraMatrix &p_projection, const Transform &p_transform, float p_far, float p_split, int p_pass, float p_shadow_texel_size, float p_bias_scale = 1.0) = 0; + virtual void light_instance_set_shadow_transform(RID p_light_instance, const CameraMatrix &p_projection, const Transform &p_transform, float p_far, float p_split, int p_pass, float p_shadow_texel_size, float p_bias_scale = 1.0, float p_range_begin = 0, const Vector2 &p_uv_scale = Vector2()) = 0; virtual void light_instance_mark_visible(RID p_light_instance) = 0; virtual bool light_instances_can_render_shadow_cube() const { return true; diff --git a/servers/rendering/rasterizer_rd/rasterizer_scene_high_end_rd.cpp b/servers/rendering/rasterizer_rd/rasterizer_scene_high_end_rd.cpp index 8d9b352a85..ec05c9e964 100644 --- a/servers/rendering/rasterizer_rd/rasterizer_scene_high_end_rd.cpp +++ b/servers/rendering/rasterizer_rd/rasterizer_scene_high_end_rd.cpp @@ -965,6 +965,7 @@ void RasterizerSceneHighEndRD::_setup_environment(RID p_environment, const Camer scene_state.ubo.shadow_filter_mode = shadow_filter_get(); scene_state.ubo.pancake_shadows = p_pancake_shadows; + scene_state.ubo.shadow_blocker_count = 16; scene_state.ubo.screen_pixel_size[0] = p_screen_pixel_size.x; scene_state.ubo.screen_pixel_size[1] = p_screen_pixel_size.y; @@ -1484,6 +1485,10 @@ void RasterizerSceneHighEndRD::_setup_lights(RID *p_light_cull_result, int p_lig light_data.specular = storage->light_get_param(base, RS::LIGHT_PARAM_SPECULAR); light_data.mask = storage->light_get_cull_mask(base); + float size = storage->light_get_param(base, RS::LIGHT_PARAM_SIZE); + + light_data.size = 1.0 - Math::cos(Math::deg2rad(size)); //angle to cosine offset + Color shadow_col = storage->light_get_shadow_color(base).to_linear(); if (get_debug_draw_mode() == RS::VIEWPORT_DEBUG_DRAW_PSSM_SPLITS) { @@ -1551,12 +1556,44 @@ void RasterizerSceneHighEndRD::_setup_lights(RID *p_light_cull_result, int p_lig light_data.shadow_normal_bias[j] = storage->light_get_param(base, RS::LIGHT_PARAM_SHADOW_NORMAL_BIAS) * light_instance_get_directional_shadow_texel_size(li, j); light_data.shadow_transmittance_bias[j] = storage->light_get_transmittance_bias(base) * bias_scale; light_data.shadow_transmittance_z_scale[j] = light_instance_get_shadow_range(li, j); + light_data.shadow_range_begin[j] = light_instance_get_shadow_range_begin(li, j); store_camera(shadow_mtx, light_data.shadow_matrices[j]); + + Vector2 uv_scale = light_instance_get_shadow_uv_scale(li, j); + uv_scale *= atlas_rect.size; //adapt to atlas size + switch (j) { + case 0: { + light_data.uv_scale1[0] = uv_scale.x; + light_data.uv_scale1[1] = uv_scale.y; + } break; + case 1: { + light_data.uv_scale2[0] = uv_scale.x; + light_data.uv_scale2[1] = uv_scale.y; + } break; + case 2: { + light_data.uv_scale3[0] = uv_scale.x; + light_data.uv_scale3[1] = uv_scale.y; + } break; + case 3: { + light_data.uv_scale4[0] = uv_scale.x; + light_data.uv_scale4[1] = uv_scale.y; + } break; + } } float fade_start = storage->light_get_param(base, RS::LIGHT_PARAM_SHADOW_FADE_START); light_data.fade_from = -light_data.shadow_split_offsets[3] * MIN(fade_start, 0.999); //using 1.0 would break smoothstep light_data.fade_to = -light_data.shadow_split_offsets[3]; + + float softshadow_angle = storage->light_get_param(base, RS::LIGHT_PARAM_SIZE); + if (softshadow_angle > 0.0) { + // I know tan(0) is 0, but let's not risk it with numerical precision. + // technically this will keep expanding until reaching the sun, but all we care + // is expand until we reach the radius of the near plane (there can't be more occluders than that) + light_data.softshadow_angle = Math::tan(Math::deg2rad(softshadow_angle)); + } else { + light_data.softshadow_angle = 0; + } } // Copy to SkyDirectionalLightData @@ -1619,6 +1656,10 @@ void RasterizerSceneHighEndRD::_setup_lights(RID *p_light_cull_result, int p_lig light_data.direction[1] = direction.y; light_data.direction[2] = direction.z; + float size = storage->light_get_param(base, RS::LIGHT_PARAM_SIZE); + + light_data.size = size; + light_data.cone_attenuation_angle[0] = Math::make_half_float(storage->light_get_param(base, RS::LIGHT_PARAM_SPOT_ATTENUATION)); float spot_angle = storage->light_get_param(base, RS::LIGHT_PARAM_SPOT_ANGLE); light_data.cone_attenuation_angle[1] = Math::make_half_float(Math::cos(Math::deg2rad(spot_angle))); @@ -1646,6 +1687,7 @@ void RasterizerSceneHighEndRD::_setup_lights(RID *p_light_cull_result, int p_lig shadow_texel_size *= light_instance_get_shadow_texel_size(li, p_shadow_atlas); light_data.shadow_normal_bias = storage->light_get_param(base, RS::LIGHT_PARAM_SHADOW_NORMAL_BIAS) * shadow_texel_size; + } else { //omni light_data.shadow_bias = storage->light_get_param(base, RS::LIGHT_PARAM_SHADOW_BIAS) * radius / 10.0; float shadow_texel_size = light_instance_get_shadow_texel_size(li, p_shadow_atlas); @@ -1656,18 +1698,31 @@ void RasterizerSceneHighEndRD::_setup_lights(RID *p_light_cull_result, int p_lig Rect2 rect = light_instance_get_shadow_atlas_rect(li, p_shadow_atlas); - if (type == RS::LIGHT_OMNI) { + light_data.atlas_rect[0] = rect.position.x; + light_data.atlas_rect[1] = rect.position.y; + light_data.atlas_rect[2] = rect.size.width; + light_data.atlas_rect[3] = rect.size.height; - light_data.atlas_rect[0] = rect.position.x; - light_data.atlas_rect[1] = rect.position.y; - light_data.atlas_rect[2] = rect.size.width; - light_data.atlas_rect[3] = rect.size.height * 0.5; + if (type == RS::LIGHT_OMNI) { + light_data.atlas_rect[3] *= 0.5; //one paraboloid on top of another Transform proj = (p_camera_inverse_transform * light_transform).inverse(); store_transform(proj, light_data.shadow_matrix); + + if (size > 0.0) { + + light_data.soft_shadow_size = size; + } else { + light_data.soft_shadow_size = 0.0; + } + } else if (type == RS::LIGHT_SPOT) { + //used for clamping in this light type + light_data.atlas_rect[2] += light_data.atlas_rect[0]; + light_data.atlas_rect[3] += light_data.atlas_rect[1]; + Transform modelview = (p_camera_inverse_transform * light_transform).inverse(); CameraMatrix bias; bias.set_light_bias(); @@ -1676,6 +1731,14 @@ void RasterizerSceneHighEndRD::_setup_lights(RID *p_light_cull_result, int p_lig CameraMatrix shadow_mtx = rectm * bias * light_instance_get_shadow_camera(li, 0) * modelview; store_camera(shadow_mtx, light_data.shadow_matrix); + + if (size > 0.0) { + CameraMatrix cm = light_instance_get_shadow_camera(li, 0); + float half_np = cm.get_z_near() * Math::tan(Math::deg2rad(spot_angle)); + light_data.soft_shadow_size = (size * 0.5 / radius) / (half_np / cm.get_z_near()) * rect.size.width; + } else { + light_data.soft_shadow_size = 0.0; + } } } else { light_data.shadow_color_enabled[3] = 0; diff --git a/servers/rendering/rasterizer_rd/rasterizer_scene_high_end_rd.h b/servers/rendering/rasterizer_rd/rasterizer_scene_high_end_rd.h index b89de11bb4..b4f5d25afd 100644 --- a/servers/rendering/rasterizer_rd/rasterizer_scene_high_end_rd.h +++ b/servers/rendering/rasterizer_rd/rasterizer_scene_high_end_rd.h @@ -254,17 +254,19 @@ class RasterizerSceneHighEndRD : public RasterizerSceneRD { float position[3]; float inv_radius; float direction[3]; + float size; uint16_t attenuation_energy[2]; //16 bits attenuation, then energy uint8_t color_specular[4]; //rgb color, a specular (8 bit unorm) uint16_t cone_attenuation_angle[2]; // attenuation and angle, (16bit float) - uint32_t mask; uint8_t shadow_color_enabled[4]; //shadow rgb color, a>0.5 enabled (8bit unorm) float atlas_rect[4]; // in omni, used for atlas uv, in spot, used for projector uv float shadow_matrix[16]; float shadow_bias; float shadow_normal_bias; float transmittance_bias; - uint32_t pad; + float soft_shadow_size; + uint32_t mask; + uint32_t pad[3]; }; struct DirectionalLightData { @@ -272,9 +274,11 @@ class RasterizerSceneHighEndRD : public RasterizerSceneRD { float direction[3]; float energy; float color[3]; + float size; float specular; uint32_t mask; - uint32_t pad[3]; + float softshadow_angle; + uint32_t pad[1]; uint32_t blend_splits; uint32_t shadow_enabled; float fade_from; @@ -283,12 +287,17 @@ class RasterizerSceneHighEndRD : public RasterizerSceneRD { float shadow_normal_bias[4]; float shadow_transmittance_bias[4]; float shadow_transmittance_z_scale[4]; + float shadow_range_begin[4]; float shadow_split_offsets[4]; float shadow_matrices[4][16]; float shadow_color1[4]; float shadow_color2[4]; float shadow_color3[4]; float shadow_color4[4]; + float uv_scale1[2]; + float uv_scale2[2]; + float uv_scale3[2]; + float uv_scale4[2]; }; struct GIProbeData { @@ -343,6 +352,9 @@ class RasterizerSceneHighEndRD : public RasterizerSceneRD { uint32_t pancake_shadows; uint32_t shadow_filter_mode; + uint32_t shadow_blocker_count; + uint32_t shadow_pad[3]; + float ambient_light_color_energy[4]; float ambient_color_sky_mix; diff --git a/servers/rendering/rasterizer_rd/rasterizer_scene_rd.cpp b/servers/rendering/rasterizer_rd/rasterizer_scene_rd.cpp index deef34d71f..a0bbf8bd43 100644 --- a/servers/rendering/rasterizer_rd/rasterizer_scene_rd.cpp +++ b/servers/rendering/rasterizer_rd/rasterizer_scene_rd.cpp @@ -2032,7 +2032,7 @@ void RasterizerSceneRD::light_instance_set_transform(RID p_light_instance, const light_instance->transform = p_transform; } -void RasterizerSceneRD::light_instance_set_shadow_transform(RID p_light_instance, const CameraMatrix &p_projection, const Transform &p_transform, float p_far, float p_split, int p_pass, float p_shadow_texel_size, float p_bias_scale) { +void RasterizerSceneRD::light_instance_set_shadow_transform(RID p_light_instance, const CameraMatrix &p_projection, const Transform &p_transform, float p_far, float p_split, int p_pass, float p_shadow_texel_size, float p_bias_scale, float p_range_begin, const Vector2 &p_uv_scale) { LightInstance *light_instance = light_instance_owner.getornull(p_light_instance); ERR_FAIL_COND(!light_instance); @@ -2048,7 +2048,9 @@ void RasterizerSceneRD::light_instance_set_shadow_transform(RID p_light_instance light_instance->shadow_transform[p_pass].farplane = p_far; light_instance->shadow_transform[p_pass].split = p_split; light_instance->shadow_transform[p_pass].bias_scale = p_bias_scale; + light_instance->shadow_transform[p_pass].range_begin = p_range_begin; light_instance->shadow_transform[p_pass].shadow_texel_size = p_shadow_texel_size; + light_instance->shadow_transform[p_pass].uv_scale = p_uv_scale; } void RasterizerSceneRD::light_instance_mark_visible(RID p_light_instance) { diff --git a/servers/rendering/rasterizer_rd/rasterizer_scene_rd.h b/servers/rendering/rasterizer_rd/rasterizer_scene_rd.h index 94c7971ec3..3478c05fb1 100644 --- a/servers/rendering/rasterizer_rd/rasterizer_scene_rd.h +++ b/servers/rendering/rasterizer_rd/rasterizer_scene_rd.h @@ -573,7 +573,9 @@ private: float split; float bias_scale; float shadow_texel_size; + float range_begin; Rect2 atlas_rect; + Vector2 uv_scale; }; RS::LightType light_type = RS::LIGHT_DIRECTIONAL; @@ -883,7 +885,7 @@ public: RID light_instance_create(RID p_light); void light_instance_set_transform(RID p_light_instance, const Transform &p_transform); - void light_instance_set_shadow_transform(RID p_light_instance, const CameraMatrix &p_projection, const Transform &p_transform, float p_far, float p_split, int p_pass, float p_shadow_texel_size, float p_bias_scale = 1.0); + void light_instance_set_shadow_transform(RID p_light_instance, const CameraMatrix &p_projection, const Transform &p_transform, float p_far, float p_split, int p_pass, float p_shadow_texel_size, float p_bias_scale = 1.0, float p_range_begin = 0, const Vector2 &p_uv_scale = Vector2()); void light_instance_mark_visible(RID p_light_instance); _FORCE_INLINE_ RID light_instance_get_base_light(RID p_light_instance) { @@ -967,6 +969,17 @@ public: LightInstance *li = light_instance_owner.getornull(p_light_instance); return li->shadow_transform[p_index].farplane; } + _FORCE_INLINE_ float light_instance_get_shadow_range_begin(RID p_light_instance, int p_index) { + + LightInstance *li = light_instance_owner.getornull(p_light_instance); + return li->shadow_transform[p_index].range_begin; + } + + _FORCE_INLINE_ Vector2 light_instance_get_shadow_uv_scale(RID p_light_instance, int p_index) { + + LightInstance *li = light_instance_owner.getornull(p_light_instance); + return li->shadow_transform[p_index].uv_scale; + } _FORCE_INLINE_ Rect2 light_instance_get_directional_shadow_atlas_rect(RID p_light_instance, int p_index) { diff --git a/servers/rendering/rasterizer_rd/rasterizer_storage_rd.cpp b/servers/rendering/rasterizer_rd/rasterizer_storage_rd.cpp index 8c73cecec3..0b26ec1be6 100644 --- a/servers/rendering/rasterizer_rd/rasterizer_storage_rd.cpp +++ b/servers/rendering/rasterizer_rd/rasterizer_storage_rd.cpp @@ -3104,6 +3104,7 @@ RID RasterizerStorageRD::light_create(RS::LightType p_type) { light.param[RS::LIGHT_PARAM_INDIRECT_ENERGY] = 1.0; light.param[RS::LIGHT_PARAM_SPECULAR] = 0.5; light.param[RS::LIGHT_PARAM_RANGE] = 1.0; + light.param[RS::LIGHT_PARAM_SIZE] = 0.0; light.param[RS::LIGHT_PARAM_SPOT_ANGLE] = 45; light.param[RS::LIGHT_PARAM_SHADOW_MAX_DISTANCE] = 0; light.param[RS::LIGHT_PARAM_SHADOW_SPLIT_1_OFFSET] = 0.1; diff --git a/servers/rendering/rasterizer_rd/shaders/scene_high_end.glsl b/servers/rendering/rasterizer_rd/shaders/scene_high_end.glsl index 62ab188ddc..70ce8d61e4 100644 --- a/servers/rendering/rasterizer_rd/shaders/scene_high_end.glsl +++ b/servers/rendering/rasterizer_rd/shaders/scene_high_end.glsl @@ -441,7 +441,7 @@ vec3 F0(float metallic, float specular, vec3 albedo) { return mix(vec3(dielectric), albedo, vec3(metallic)); } -void light_compute(vec3 N, vec3 L, vec3 V, vec3 light_color, float attenuation, vec3 shadow_attenuation, vec3 diffuse_color, float roughness, float metallic, float specular, float specular_blob_intensity, +void light_compute(vec3 N, vec3 L, vec3 V, float A, vec3 light_color, float attenuation, vec3 shadow_attenuation, vec3 diffuse_color, float roughness, float metallic, float specular, float specular_blob_intensity, #ifdef LIGHT_BACKLIGHT_USED vec3 backlight, #endif @@ -481,7 +481,7 @@ LIGHT_SHADER_CODE /* clang-format on */ #else - float NdotL = dot(N, L); + float NdotL = min(A + dot(N, L), 1.0); float cNdotL = max(NdotL, 0.0); // clamped NdotL float NdotV = dot(N, V); float cNdotV = max(NdotV, 0.0); @@ -491,11 +491,11 @@ LIGHT_SHADER_CODE #endif #if defined(SPECULAR_BLINN) || defined(SPECULAR_SCHLICK_GGX) || defined(LIGHT_CLEARCOAT_USED) - float cNdotH = max(dot(N, H), 0.0); + float cNdotH = clamp(A + dot(N, H), 0.0, 1.0); #endif #if defined(DIFFUSE_BURLEY) || defined(SPECULAR_SCHLICK_GGX) || defined(LIGHT_CLEARCOAT_USED) - float cLdotH = max(dot(L, H), 0.0); + float cLdotH = clamp(A + dot(L, H), 0.0, 1.0); #endif if (metallic < 1.0) { @@ -613,7 +613,7 @@ LIGHT_SHADER_CODE #elif defined(SPECULAR_PHONG) vec3 R = normalize(-reflect(L, N)); - float cRdotV = max(0.0, dot(R, V)); + float cRdotV = clamp(A + dot(R, V), 0.0, 1.0); float shininess = exp2(15.0 * (1.0 - roughness) + 1.0) * 0.25; float phong = pow(cRdotV, shininess); phong *= (shininess + 8.0) * (1.0 / (8.0 * M_PI)); @@ -686,6 +686,24 @@ LIGHT_SHADER_CODE #ifndef USE_NO_SHADOWS +const vec2 shadow_poisson_disk[16] = vec2[]( + vec2(-0.94201624, -0.39906216), + vec2(0.94558609, -0.76890725), + vec2(-0.094184101, -0.92938870), + vec2(0.34495938, 0.29387760), + vec2(-0.91588581, 0.45771432), + vec2(-0.81544232, -0.87912464), + vec2(-0.38277543, 0.27676845), + vec2(0.97484398, 0.75648379), + vec2(0.44323325, -0.97511554), + vec2(0.53742981, -0.47373420), + vec2(-0.26496911, -0.41893023), + vec2(0.79197514, 0.19090188), + vec2(-0.24188840, 0.99706507), + vec2(-0.81409955, 0.91437590), + vec2(0.19984126, 0.78641367), + vec2(0.14383161, -0.14100790)); + float sample_shadow(texture2D shadow, vec2 shadow_pixel_size, vec4 coord) { vec2 pos = coord.xy; @@ -725,6 +743,51 @@ float sample_shadow(texture2D shadow, vec2 shadow_pixel_size, vec4 coord) { return 0; } +float sample_directional_soft_shadow(texture2D shadow, vec3 pssm_coord, vec2 tex_scale) { + + //find blocker + float blocker_count = 0.0; + float blocker_average = 0.0; + + mat2 poisson_rotate; + + { + float r = dot(vec2(gl_FragCoord.xy), vec2(131.234, 583.123)); + float sr = sin(r); + float cr = cos(r); + poisson_rotate = mat2(vec2(cr, -sr), vec2(sr, cr)); + } + + for (uint i = 0; i < scene_data.shadow_blocker_count; i++) { + vec2 suv = pssm_coord.xy + (poisson_rotate * shadow_poisson_disk[i]) * tex_scale; + float d = textureLod(sampler2D(shadow, material_samplers[SAMPLER_LINEAR_CLAMP]), suv, 0.0).r; + if (d < pssm_coord.z) { + blocker_average += d; + blocker_count += 1.0; + } + } + + if (blocker_count > 0.0) { + + //blockers found, do soft shadow + blocker_average /= blocker_count; + float penumbra = (pssm_coord.z - blocker_average) / blocker_average; + tex_scale *= penumbra; + + float s = 0.0; + for (uint i = 0; i < scene_data.shadow_blocker_count; i++) { + vec2 suv = pssm_coord.xy + (poisson_rotate * shadow_poisson_disk[i]) * tex_scale; + s += textureProj(sampler2DShadow(shadow, shadow_sampler), vec4(suv, pssm_coord.z, 1.0)); + } + + return s / float(scene_data.shadow_blocker_count); + + } else { + //no blockers found, so no shadow + return 1.0; + } +} + #endif //USE_NO_SHADOWS void light_process_omni(uint idx, vec3 vertex, vec3 eye_vec, vec3 normal, vec3 albedo, float roughness, float metallic, float specular, float p_blob_intensity, @@ -760,6 +823,13 @@ void light_process_omni(uint idx, vec3 vertex, vec3 eye_vec, vec3 normal, vec3 a vec3 shadow_attenuation = vec3(1.0); vec4 color_specular = unpackUnorm4x8(lights.data[idx].color_specular); color_specular.rgb *= attenuation_energy.y; + float size_A = 0.0; + + if (lights.data[idx].size > 0.0) { + + float t = lights.data[idx].size / max(0.001, light_length); + size_A = max(0.0, 1.0 - 1 / sqrt(1 + t * t)); + } #ifdef LIGHT_TRANSMITTANCE_USED float transmittance_z = transmittance_depth; //no transmittance by default @@ -773,7 +843,7 @@ void light_process_omni(uint idx, vec3 vertex, vec3 eye_vec, vec3 normal, vec3 a vec4 v = vec4(vertex, 1.0); vec4 splane = (lights.data[idx].shadow_matrix * v); - float shadow_len = length(splane.xyz); + float shadow_len = length(splane.xyz); //need to remember shadow len from here { vec3 nofs = normal_interp * lights.data[idx].shadow_normal_bias / lights.data[idx].inv_radius; @@ -782,26 +852,126 @@ void light_process_omni(uint idx, vec3 vertex, vec3 eye_vec, vec3 normal, vec3 a splane = (lights.data[idx].shadow_matrix * v); } - splane.xyz = normalize(splane.xyz); - vec4 clamp_rect = lights.data[idx].atlas_rect; + float shadow; + + if (lights.data[idx].soft_shadow_size > 0.0) { + //soft shadow + + //find blocker + + float blocker_count = 0.0; + float blocker_average = 0.0; + + mat2 poisson_rotate; + + { + float r = dot(vec2(gl_FragCoord.xy), vec2(131.234, 583.123)); + float sr = sin(r); + float cr = cos(r); + poisson_rotate = mat2(vec2(cr, -sr), vec2(sr, cr)); + } + + vec3 normal = normalize(splane.xyz); + vec3 v0 = abs(normal.z) < 0.999 ? vec3(0.0, 0.0, 1.0) : vec3(0.0, 1.0, 0.0); + vec3 tangent = normalize(cross(v0, normal)); + vec3 bitangent = normalize(cross(tangent, normal)); + float z_norm = shadow_len * lights.data[idx].inv_radius; + + tangent *= lights.data[idx].soft_shadow_size; + bitangent *= lights.data[idx].soft_shadow_size; - if (splane.z >= 0.0) { + for (uint i = 0; i < scene_data.shadow_blocker_count; i++) { + vec2 poisson = (poisson_rotate * shadow_poisson_disk[i]); + vec3 pos = splane.xyz + tangent * poisson.x + bitangent * poisson.y; - splane.z += 1.0; + pos = normalize(pos); + vec4 uv_rect = lights.data[idx].atlas_rect; - clamp_rect.y += clamp_rect.w; + if (pos.z >= 0.0) { + pos.z += 1.0; + uv_rect.y += uv_rect.w; + } else { + + pos.z = 1.0 - pos.z; + } + + pos.xy /= pos.z; + + pos.xy = pos.xy * 0.5 + 0.5; + pos.xy = uv_rect.xy + pos.xy * uv_rect.zw; + + float d = textureLod(sampler2D(shadow_atlas, material_samplers[SAMPLER_LINEAR_CLAMP]), pos.xy, 0.0).r; + if (d < z_norm) { + blocker_average += d; + blocker_count += 1.0; + } + } + + if (blocker_count > 0.0) { + + //blockers found, do soft shadow + blocker_average /= blocker_count; + float penumbra = (z_norm - blocker_average) / blocker_average; + tangent *= penumbra; + bitangent *= penumbra; + + z_norm -= lights.data[idx].inv_radius * lights.data[idx].shadow_bias; + + shadow = 0.0; + for (uint i = 0; i < scene_data.shadow_blocker_count; i++) { + + vec2 poisson = (poisson_rotate * shadow_poisson_disk[i]); + vec3 pos = splane.xyz + tangent * poisson.x + bitangent * poisson.y; + + pos = normalize(pos); + vec4 uv_rect = lights.data[idx].atlas_rect; + + if (pos.z >= 0.0) { + + pos.z += 1.0; + uv_rect.y += uv_rect.w; + } else { + + pos.z = 1.0 - pos.z; + } + + pos.xy /= pos.z; + + pos.xy = pos.xy * 0.5 + 0.5; + pos.xy = uv_rect.xy + pos.xy * uv_rect.zw; + shadow += textureProj(sampler2DShadow(shadow_atlas, shadow_sampler), vec4(pos.xy, z_norm, 1.0)); + } + + shadow /= float(scene_data.shadow_blocker_count); + + } else { + //no blockers found, so no shadow + shadow = 1.0; + } } else { - splane.z = 1.0 - splane.z; - } - splane.xy /= splane.z; + splane.xyz = normalize(splane.xyz); + vec4 clamp_rect = lights.data[idx].atlas_rect; + + if (splane.z >= 0.0) { + + splane.z += 1.0; + + clamp_rect.y += clamp_rect.w; + + } else { + splane.z = 1.0 - splane.z; + } + + splane.xy /= splane.z; - splane.xy = splane.xy * 0.5 + 0.5; - splane.z = (shadow_len - lights.data[idx].shadow_bias) * lights.data[idx].inv_radius; - splane.xy = clamp_rect.xy + splane.xy * clamp_rect.zw; - splane.w = 1.0; //needed? i think it should be 1 already - float shadow = sample_shadow(shadow_atlas, scene_data.shadow_atlas_pixel_size, splane); + splane.xy = splane.xy * 0.5 + 0.5; + splane.z = (shadow_len - lights.data[idx].shadow_bias) * lights.data[idx].inv_radius; + splane.xy = clamp_rect.xy + splane.xy * clamp_rect.zw; + splane.w = 1.0; //needed? i think it should be 1 already + shadow = sample_shadow(shadow_atlas, scene_data.shadow_atlas_pixel_size, splane); + } #ifdef LIGHT_TRANSMITTANCE_USED { @@ -836,7 +1006,7 @@ void light_process_omni(uint idx, vec3 vertex, vec3 eye_vec, vec3 normal, vec3 a } #endif //USE_NO_SHADOWS - light_compute(normal, normalize(light_rel_vec), eye_vec, color_specular.rgb, light_attenuation, shadow_attenuation, albedo, roughness, metallic, specular, color_specular.a * p_blob_intensity, + light_compute(normal, normalize(light_rel_vec), eye_vec, size_A, color_specular.rgb, light_attenuation, shadow_attenuation, albedo, roughness, metallic, specular, color_specular.a * p_blob_intensity, #ifdef LIGHT_BACKLIGHT_USED backlight, #endif @@ -903,6 +1073,13 @@ void light_process_spot(uint idx, vec3 vertex, vec3 eye_vec, vec3 normal, vec3 a vec4 color_specular = unpackUnorm4x8(lights.data[idx].color_specular); color_specular.rgb *= attenuation_energy.y; + float size_A = 0.0; + + if (lights.data[idx].size > 0.0) { + + float t = lights.data[idx].size / max(0.001, light_length); + size_A = max(0.0, 1.0 - 1 / sqrt(1 + t * t)); + } /* if (lights.data[idx].atlas_rect!=vec4(0.0)) { //use projector texture @@ -920,22 +1097,82 @@ void light_process_spot(uint idx, vec3 vertex, vec3 eye_vec, vec3 normal, vec3 a v.xyz -= spot_dir * lights.data[idx].shadow_bias; - float depth_bias_scale = 1.0 / (max(0.0001, dot(spot_dir, -light_rel_vec) * lights.data[idx].inv_radius)); //the closer to the light origin, the more you have to offset to reach 1px in the map + float z_norm = dot(spot_dir, -light_rel_vec) * lights.data[idx].inv_radius; + + float depth_bias_scale = 1.0 / (max(0.0001, z_norm)); //the closer to the light origin, the more you have to offset to reach 1px in the map vec3 normal_bias = normalize(normal_interp) * (1.0 - max(0.0, dot(spot_dir, -normalize(normal_interp)))) * lights.data[idx].shadow_normal_bias * depth_bias_scale; normal_bias -= spot_dir * dot(spot_dir, normal_bias); //only XY, no Z v.xyz += normal_bias; + //adjust with bias + z_norm = dot(spot_dir, v.xyz - lights.data[idx].position) * lights.data[idx].inv_radius; + + float shadow; + vec4 splane = (lights.data[idx].shadow_matrix * v); splane /= splane.w; - splane.z = dot(spot_dir, v.xyz - lights.data[idx].position) * lights.data[idx].inv_radius; - float shadow = sample_shadow(shadow_atlas, scene_data.shadow_atlas_pixel_size, splane); + + if (lights.data[idx].soft_shadow_size > 0.0) { + //soft shadow + + //find blocker + + float blocker_count = 0.0; + float blocker_average = 0.0; + + mat2 poisson_rotate; + + { + float r = dot(vec2(gl_FragCoord.xy), vec2(131.234, 583.123)); + float sr = sin(r); + float cr = cos(r); + poisson_rotate = mat2(vec2(cr, -sr), vec2(sr, cr)); + } + + float uv_size = lights.data[idx].soft_shadow_size * z_norm; + for (uint i = 0; i < scene_data.shadow_blocker_count; i++) { + vec2 suv = splane.xy + (poisson_rotate * shadow_poisson_disk[i]) * uv_size; + suv = clamp(suv, lights.data[idx].atlas_rect.xy, lights.data[idx].atlas_rect.zw); + float d = textureLod(sampler2D(shadow_atlas, material_samplers[SAMPLER_LINEAR_CLAMP]), suv, 0.0).r; + if (d < z_norm) { + blocker_average += d; + blocker_count += 1.0; + } + } + + if (blocker_count > 0.0) { + + //blockers found, do soft shadow + blocker_average /= blocker_count; + float penumbra = (z_norm - blocker_average) / blocker_average; + uv_size *= penumbra; + + shadow = 0.0; + for (uint i = 0; i < scene_data.shadow_blocker_count; i++) { + vec2 suv = splane.xy + (poisson_rotate * shadow_poisson_disk[i]) * uv_size; + suv = clamp(suv, lights.data[idx].atlas_rect.xy, lights.data[idx].atlas_rect.zw); + shadow += textureProj(sampler2DShadow(shadow_atlas, shadow_sampler), vec4(suv, z_norm, 1.0)); + } + + shadow /= float(scene_data.shadow_blocker_count); + + } else { + //no blockers found, so no shadow + shadow = 1.0; + } + + } else { + //hard shadow + splane.z = z_norm; + shadow = sample_shadow(shadow_atlas, scene_data.shadow_atlas_pixel_size, splane); + } shadow_attenuation = mix(shadow_color_enabled.rgb, vec3(1.0), shadow); #ifdef LIGHT_TRANSMITTANCE_USED { - splane = (lights.data[idx].shadow_matrix * vec4(vertex - normalize(normal_interp) * lights.data[idx].transmittance_bias, 1.0)); + vec4 splane = (lights.data[idx].shadow_matrix * vec4(vertex - normalize(normal_interp) * lights.data[idx].transmittance_bias, 1.0)); splane /= splane.w; float shadow_z = textureLod(sampler2D(shadow_atlas, material_samplers[SAMPLER_LINEAR_CLAMP]), splane.xy, 0.0).r; @@ -950,7 +1187,7 @@ void light_process_spot(uint idx, vec3 vertex, vec3 eye_vec, vec3 normal, vec3 a #endif //USE_NO_SHADOWS - light_compute(normal, normalize(light_rel_vec), eye_vec, color_specular.rgb, light_attenuation, shadow_attenuation, albedo, roughness, metallic, specular, color_specular.a * p_blob_intensity, + light_compute(normal, normalize(light_rel_vec), eye_vec, size_A, color_specular.rgb, light_attenuation, shadow_attenuation, albedo, roughness, metallic, specular, color_specular.a * p_blob_intensity, #ifdef LIGHT_BACKLIGHT_USED backlight, #endif @@ -1636,13 +1873,28 @@ FRAGMENT_SHADER_CODE normal_bias -= light_dir * dot(light_dir, normal_bias); \ m_var.xyz += normal_bias; + float shadow = 0.0; + if (depth_z < directional_lights.data[i].shadow_split_offsets.x) { vec4 v = vec4(vertex, 1.0); BIAS_FUNC(v, 0) pssm_coord = (directional_lights.data[i].shadow_matrix1 * v); + pssm_coord /= pssm_coord.w; + + if (directional_lights.data[i].softshadow_angle > 0) { + float range_pos = dot(directional_lights.data[i].direction, v.xyz); + float range_begin = directional_lights.data[i].shadow_range_begin.x; + float test_radius = (range_pos - range_begin) * directional_lights.data[i].softshadow_angle; + vec2 tex_scale = directional_lights.data[i].uv_scale1 * test_radius; + shadow = sample_directional_soft_shadow(directional_shadow_atlas, pssm_coord.xyz, tex_scale); + } else { + shadow = sample_shadow(directional_shadow_atlas, scene_data.directional_shadow_pixel_size, pssm_coord); + } + shadow_color = directional_lights.data[i].shadow_color1.rgb; + #ifdef LIGHT_TRANSMITTANCE_USED { vec4 trans_vertex = vec4(vertex - normalize(normal_interp) * directional_lights.data[i].shadow_transmittance_bias.x, 1.0); @@ -1663,6 +1915,18 @@ FRAGMENT_SHADER_CODE BIAS_FUNC(v, 1) pssm_coord = (directional_lights.data[i].shadow_matrix2 * v); + pssm_coord /= pssm_coord.w; + + if (directional_lights.data[i].softshadow_angle > 0) { + float range_pos = dot(directional_lights.data[i].direction, v.xyz); + float range_begin = directional_lights.data[i].shadow_range_begin.y; + float test_radius = (range_pos - range_begin) * directional_lights.data[i].softshadow_angle; + vec2 tex_scale = directional_lights.data[i].uv_scale2 * test_radius; + shadow = sample_directional_soft_shadow(directional_shadow_atlas, pssm_coord.xyz, tex_scale); + } else { + shadow = sample_shadow(directional_shadow_atlas, scene_data.directional_shadow_pixel_size, pssm_coord); + } + shadow_color = directional_lights.data[i].shadow_color2.rgb; #ifdef LIGHT_TRANSMITTANCE_USED { @@ -1684,6 +1948,18 @@ FRAGMENT_SHADER_CODE BIAS_FUNC(v, 2) pssm_coord = (directional_lights.data[i].shadow_matrix3 * v); + pssm_coord /= pssm_coord.w; + + if (directional_lights.data[i].softshadow_angle > 0) { + float range_pos = dot(directional_lights.data[i].direction, v.xyz); + float range_begin = directional_lights.data[i].shadow_range_begin.z; + float test_radius = (range_pos - range_begin) * directional_lights.data[i].softshadow_angle; + vec2 tex_scale = directional_lights.data[i].uv_scale3 * test_radius; + shadow = sample_directional_soft_shadow(directional_shadow_atlas, pssm_coord.xyz, tex_scale); + } else { + shadow = sample_shadow(directional_shadow_atlas, scene_data.directional_shadow_pixel_size, pssm_coord); + } + shadow_color = directional_lights.data[i].shadow_color3.rgb; #ifdef LIGHT_TRANSMITTANCE_USED { @@ -1706,7 +1982,20 @@ FRAGMENT_SHADER_CODE BIAS_FUNC(v, 3) pssm_coord = (directional_lights.data[i].shadow_matrix4 * v); + pssm_coord /= pssm_coord.w; + + if (directional_lights.data[i].softshadow_angle > 0) { + float range_pos = dot(directional_lights.data[i].direction, v.xyz); + float range_begin = directional_lights.data[i].shadow_range_begin.w; + float test_radius = (range_pos - range_begin) * directional_lights.data[i].softshadow_angle; + vec2 tex_scale = directional_lights.data[i].uv_scale4 * test_radius; + shadow = sample_directional_soft_shadow(directional_shadow_atlas, pssm_coord.xyz, tex_scale); + } else { + shadow = sample_shadow(directional_shadow_atlas, scene_data.directional_shadow_pixel_size, pssm_coord); + } + shadow_color = directional_lights.data[i].shadow_color4.rgb; + #ifdef LIGHT_TRANSMITTANCE_USED { vec4 trans_vertex = vec4(vertex - normalize(normal_interp) * directional_lights.data[i].shadow_transmittance_bias.w, 1.0); @@ -1722,40 +2011,72 @@ FRAGMENT_SHADER_CODE #endif } - pssm_coord /= pssm_coord.w; - - float shadow = sample_shadow(directional_shadow_atlas, scene_data.directional_shadow_pixel_size, pssm_coord); - if (directional_lights.data[i].blend_splits) { vec3 shadow_color_blend = vec3(0.0); float pssm_blend; + float shadow2; if (depth_z < directional_lights.data[i].shadow_split_offsets.x) { vec4 v = vec4(vertex, 1.0); BIAS_FUNC(v, 1) pssm_coord = (directional_lights.data[i].shadow_matrix2 * v); + pssm_coord /= pssm_coord.w; + + if (directional_lights.data[i].softshadow_angle > 0) { + float range_pos = dot(directional_lights.data[i].direction, v.xyz); + float range_begin = directional_lights.data[i].shadow_range_begin.y; + float test_radius = (range_pos - range_begin) * directional_lights.data[i].softshadow_angle; + vec2 tex_scale = directional_lights.data[i].uv_scale2 * test_radius; + shadow2 = sample_directional_soft_shadow(directional_shadow_atlas, pssm_coord.xyz, tex_scale); + } else { + shadow2 = sample_shadow(directional_shadow_atlas, scene_data.directional_shadow_pixel_size, pssm_coord); + } + pssm_blend = smoothstep(0.0, directional_lights.data[i].shadow_split_offsets.x, depth_z); shadow_color_blend = directional_lights.data[i].shadow_color2.rgb; } else if (depth_z < directional_lights.data[i].shadow_split_offsets.y) { vec4 v = vec4(vertex, 1.0); BIAS_FUNC(v, 2) pssm_coord = (directional_lights.data[i].shadow_matrix3 * v); + pssm_coord /= pssm_coord.w; + + if (directional_lights.data[i].softshadow_angle > 0) { + float range_pos = dot(directional_lights.data[i].direction, v.xyz); + float range_begin = directional_lights.data[i].shadow_range_begin.z; + float test_radius = (range_pos - range_begin) * directional_lights.data[i].softshadow_angle; + vec2 tex_scale = directional_lights.data[i].uv_scale3 * test_radius; + shadow2 = sample_directional_soft_shadow(directional_shadow_atlas, pssm_coord.xyz, tex_scale); + } else { + shadow2 = sample_shadow(directional_shadow_atlas, scene_data.directional_shadow_pixel_size, pssm_coord); + } + pssm_blend = smoothstep(directional_lights.data[i].shadow_split_offsets.x, directional_lights.data[i].shadow_split_offsets.y, depth_z); + shadow_color_blend = directional_lights.data[i].shadow_color3.rgb; } else if (depth_z < directional_lights.data[i].shadow_split_offsets.z) { vec4 v = vec4(vertex, 1.0); BIAS_FUNC(v, 3) pssm_coord = (directional_lights.data[i].shadow_matrix4 * v); + pssm_coord /= pssm_coord.w; + if (directional_lights.data[i].softshadow_angle > 0) { + float range_pos = dot(directional_lights.data[i].direction, v.xyz); + float range_begin = directional_lights.data[i].shadow_range_begin.w; + float test_radius = (range_pos - range_begin) * directional_lights.data[i].softshadow_angle; + vec2 tex_scale = directional_lights.data[i].uv_scale4 * test_radius; + shadow2 = sample_directional_soft_shadow(directional_shadow_atlas, pssm_coord.xyz, tex_scale); + } else { + shadow2 = sample_shadow(directional_shadow_atlas, scene_data.directional_shadow_pixel_size, pssm_coord); + } + pssm_blend = smoothstep(directional_lights.data[i].shadow_split_offsets.y, directional_lights.data[i].shadow_split_offsets.z, depth_z); shadow_color_blend = directional_lights.data[i].shadow_color4.rgb; } else { pssm_blend = 0.0; //if no blend, same coord will be used (divide by z will result in same value, and already cached) } - pssm_coord /= pssm_coord.w; + pssm_blend = sqrt(pssm_blend); - float shadow2 = sample_shadow(directional_shadow_atlas, scene_data.directional_shadow_pixel_size, pssm_coord); shadow = mix(shadow, shadow2, pssm_blend); shadow_color = mix(shadow_color, shadow_color_blend, pssm_blend); } @@ -1767,7 +2088,7 @@ FRAGMENT_SHADER_CODE #undef BIAS_FUNC } - light_compute(normal, directional_lights.data[i].direction, normalize(view), directional_lights.data[i].color * directional_lights.data[i].energy, 1.0, shadow_attenuation, albedo, roughness, metallic, specular, directional_lights.data[i].specular * specular_blob_intensity, + light_compute(normal, directional_lights.data[i].direction, normalize(view), directional_lights.data[i].size, directional_lights.data[i].color * directional_lights.data[i].energy, 1.0, shadow_attenuation, albedo, roughness, metallic, specular, directional_lights.data[i].specular * specular_blob_intensity, #ifdef LIGHT_BACKLIGHT_USED backlight, #endif diff --git a/servers/rendering/rasterizer_rd/shaders/scene_high_end_inc.glsl b/servers/rendering/rasterizer_rd/shaders/scene_high_end_inc.glsl index e3f1e650ed..59f326bc9b 100644 --- a/servers/rendering/rasterizer_rd/shaders/scene_high_end_inc.glsl +++ b/servers/rendering/rasterizer_rd/shaders/scene_high_end_inc.glsl @@ -47,6 +47,11 @@ layout(set = 0, binding = 3, std140) uniform SceneData { bool pancake_shadows; uint shadow_filter_mode; + uint shadow_blocker_count; + uint shadow_pad0; + uint shadow_pad1; + uint shadow_pad2; + vec4 ambient_light_color_energy; float ambient_color_sky_mix; @@ -141,17 +146,19 @@ struct LightData { //this structure needs to be as packed as possible vec3 position; float inv_radius; vec3 direction; + float size; uint attenuation_energy; //attenuation uint color_specular; //rgb color, a specular (8 bit unorm) uint cone_attenuation_angle; // attenuation and angle, (16bit float) - uint mask; uint shadow_color_enabled; //shadow rgb color, a>0.5 enabled (8bit unorm) vec4 atlas_rect; // used for spot mat4 shadow_matrix; float shadow_bias; float shadow_normal_bias; float transmittance_bias; - uint pad; + float soft_shadow_size; // for spot, it's the size in uv coordinates of the light, for omni it's the span angle + uint mask; + uint pad[3]; }; layout(set = 0, binding = 5, std430) buffer Lights { @@ -180,11 +187,11 @@ struct DirectionalLightData { vec3 direction; float energy; vec3 color; + float size; float specular; uint mask; - uint pad0; + float softshadow_angle; uint pad1; - uint pad2; bool blend_splits; bool shadow_enabled; float fade_from; @@ -193,6 +200,7 @@ struct DirectionalLightData { vec4 shadow_normal_bias; vec4 shadow_transmittance_bias; vec4 shadow_transmittance_z_scale; + vec4 shadow_range_begin; vec4 shadow_split_offsets; mat4 shadow_matrix1; mat4 shadow_matrix2; @@ -202,6 +210,10 @@ struct DirectionalLightData { vec4 shadow_color2; vec4 shadow_color3; vec4 shadow_color4; + vec2 uv_scale1; + vec2 uv_scale2; + vec2 uv_scale3; + vec2 uv_scale4; }; layout(set = 0, binding = 7, std140) uniform DirectionalLights { diff --git a/servers/rendering/rendering_server_raster.h b/servers/rendering/rendering_server_raster.h index 1162946796..1b9755397a 100644 --- a/servers/rendering/rendering_server_raster.h +++ b/servers/rendering/rendering_server_raster.h @@ -456,7 +456,7 @@ public: BIND0R(RID, viewport_create) - BIND2(viewport_set_use_arvr, RID, bool) + BIND2(viewport_set_use_xr, RID, bool) BIND3(viewport_set_size, RID, int, int) BIND2(viewport_set_active, RID, bool) diff --git a/servers/rendering/rendering_server_scene.cpp b/servers/rendering/rendering_server_scene.cpp index a367d4522c..d66708587a 100644 --- a/servers/rendering/rendering_server_scene.cpp +++ b/servers/rendering/rendering_server_scene.cpp @@ -1499,7 +1499,9 @@ bool RenderingServerScene::_light_instance_update_shadow(Instance *p_instance, c if (j == 0 || d_z > z_max) z_max = d_z; } + real_t radius = 0; + real_t soft_shadow_expand = 0; Vector3 center; { @@ -1528,12 +1530,30 @@ bool RenderingServerScene::_light_instance_update_shadow(Instance *p_instance, c bias_scale = radius / first_radius; } - x_max_cam = x_vec.dot(center) + radius; - x_min_cam = x_vec.dot(center) - radius; - y_max_cam = y_vec.dot(center) + radius; - y_min_cam = y_vec.dot(center) - radius; z_min_cam = z_vec.dot(center) - radius; + { + + float soft_shadow_angle = RSG::storage->light_get_param(p_instance->base, RS::LIGHT_PARAM_SIZE); + + if (soft_shadow_angle > 0.0 && pancake_size > 0.0) { + + float z_range = (z_vec.dot(center) + radius + pancake_size) - z_min_cam; + soft_shadow_expand = Math::tan(Math::deg2rad(soft_shadow_angle)) * z_range; + + x_max += soft_shadow_expand; + y_max += soft_shadow_expand; + + x_min -= soft_shadow_expand; + y_min -= soft_shadow_expand; + } + } + + x_max_cam = x_vec.dot(center) + radius + soft_shadow_expand; + x_min_cam = x_vec.dot(center) - radius - soft_shadow_expand; + y_max_cam = y_vec.dot(center) + radius + soft_shadow_expand; + y_min_cam = y_vec.dot(center) - radius - soft_shadow_expand; + if (depth_range_mode == RS::LIGHT_DIRECTIONAL_SHADOW_DEPTH_RANGE_STABLE) { //this trick here is what stabilizes the shadow (make potential jaggies to not move) //at the cost of some wasted resolution. Still the quality increase is very well worth it @@ -1588,8 +1608,9 @@ bool RenderingServerScene::_light_instance_update_shadow(Instance *p_instance, c } } - if (cull_max > z_max) + if (cull_max > z_max) { z_max = cull_max; + } if (pancake_size > 0) { z_max = z_vec.dot(center) + radius + pancake_size; @@ -1677,11 +1698,19 @@ bool RenderingServerScene::_light_instance_update_shadow(Instance *p_instance, c ortho_camera.set_orthogonal(-half_x, half_x, -half_y, half_y, 0, (z_max - z_min_cam)); + Vector2 uv_scale(1.0 / (x_max_cam - x_min_cam), 1.0 / (y_max_cam - y_min_cam)); + Transform ortho_transform; ortho_transform.basis = transform.basis; ortho_transform.origin = x_vec * (x_min_cam + half_x) + y_vec * (y_min_cam + half_y) + z_vec * z_max; - RSG::scene_render->light_instance_set_shadow_transform(light->instance, ortho_camera, ortho_transform, z_max - z_min_cam, distances[i + 1], i, radius * 2.0 / texture_size, bias_scale * aspect_bias_scale * min_distance_bias_scale); + { + Vector3 max_in_view = p_cam_transform.affine_inverse().xform(z_vec * cull_max); + Vector3 dir_in_view = p_cam_transform.xform_inv(z_vec).normalized(); + cull_max = dir_in_view.dot(max_in_view); + } + + RSG::scene_render->light_instance_set_shadow_transform(light->instance, ortho_camera, ortho_transform, z_max - z_min_cam, distances[i + 1], i, radius * 2.0 / texture_size, bias_scale * aspect_bias_scale * min_distance_bias_scale, z_max, uv_scale); } RSG::scene_render->render_shadow(light->instance, p_shadow_atlas, i, (RasterizerScene::InstanceBase **)instance_shadow_cull_result, cull_count); @@ -1883,7 +1912,7 @@ void RenderingServerScene::render_camera(RID p_render_buffers, RID p_camera, RID #endif } -void RenderingServerScene::render_camera(RID p_render_buffers, Ref<ARVRInterface> &p_interface, ARVRInterface::Eyes p_eye, RID p_camera, RID p_scenario, Size2 p_viewport_size, RID p_shadow_atlas) { +void RenderingServerScene::render_camera(RID p_render_buffers, Ref<XRInterface> &p_interface, XRInterface::Eyes p_eye, RID p_camera, RID p_scenario, Size2 p_viewport_size, RID p_shadow_atlas) { // render for AR/VR interface Camera *camera = camera_owner.getornull(p_camera); @@ -1895,16 +1924,14 @@ void RenderingServerScene::render_camera(RID p_render_buffers, Ref<ARVRInterface // We also ignore our camera position, it will have been positioned with a slightly old tracking position. // Instead we take our origin point and have our ar/vr interface add fresh tracking data! Whoohoo! - Transform world_origin = ARVRServer::get_singleton()->get_world_origin(); + Transform world_origin = XRServer::get_singleton()->get_world_origin(); Transform cam_transform = p_interface->get_transform_for_eye(p_eye, world_origin); // For stereo render we only prepare for our left eye and then reuse the outcome for our right eye - if (p_eye == ARVRInterface::EYE_LEFT) { - ///@TODO possibly move responsibility for this into our ARVRServer or ARVRInterface? - + if (p_eye == XRInterface::EYE_LEFT) { // Center our transform, we assume basis is equal. Transform mono_transform = cam_transform; - Transform right_transform = p_interface->get_transform_for_eye(ARVRInterface::EYE_RIGHT, world_origin); + Transform right_transform = p_interface->get_transform_for_eye(XRInterface::EYE_RIGHT, world_origin); mono_transform.origin += right_transform.origin; mono_transform.origin *= 0.5; @@ -1958,7 +1985,7 @@ void RenderingServerScene::render_camera(RID p_render_buffers, Ref<ARVRInterface // now prepare our scene with our adjusted transform projection matrix _prepare_scene(mono_transform, combined_matrix, false, false, camera->env, camera->effects, camera->visible_layers, p_scenario, p_shadow_atlas, RID()); - } else if (p_eye == ARVRInterface::EYE_MONO) { + } else if (p_eye == XRInterface::EYE_MONO) { // For mono render, prepare as per usual _prepare_scene(cam_transform, camera_matrix, false, false, camera->env, camera->effects, camera->visible_layers, p_scenario, p_shadow_atlas, RID()); } diff --git a/servers/rendering/rendering_server_scene.h b/servers/rendering/rendering_server_scene.h index 80f226e1cb..0970fed6c4 100644 --- a/servers/rendering/rendering_server_scene.h +++ b/servers/rendering/rendering_server_scene.h @@ -39,7 +39,7 @@ #include "core/os/thread.h" #include "core/rid_owner.h" #include "core/self_list.h" -#include "servers/arvr/arvr_interface.h" +#include "servers/xr/xr_interface.h" class RenderingServerScene { public: @@ -426,7 +426,7 @@ public: void render_empty_scene(RID p_render_buffers, RID p_scenario, RID p_shadow_atlas); void render_camera(RID p_render_buffers, RID p_camera, RID p_scenario, Size2 p_viewport_size, RID p_shadow_atlas); - void render_camera(RID p_render_buffers, Ref<ARVRInterface> &p_interface, ARVRInterface::Eyes p_eye, RID p_camera, RID p_scenario, Size2 p_viewport_size, RID p_shadow_atlas); + void render_camera(RID p_render_buffers, Ref<XRInterface> &p_interface, XRInterface::Eyes p_eye, RID p_camera, RID p_scenario, Size2 p_viewport_size, RID p_shadow_atlas); void update_dirty_instances(); void render_probes(); diff --git a/servers/rendering/rendering_server_viewport.cpp b/servers/rendering/rendering_server_viewport.cpp index aa65101ddf..87dcb772bc 100644 --- a/servers/rendering/rendering_server_viewport.cpp +++ b/servers/rendering/rendering_server_viewport.cpp @@ -62,24 +62,24 @@ static Transform2D _canvas_get_transform(RenderingServerViewport::Viewport *p_vi return xf; } -void RenderingServerViewport::_draw_3d(Viewport *p_viewport, ARVRInterface::Eyes p_eye) { +void RenderingServerViewport::_draw_3d(Viewport *p_viewport, XRInterface::Eyes p_eye) { RENDER_TIMESTAMP(">Begin Rendering 3D Scene"); - Ref<ARVRInterface> arvr_interface; - if (ARVRServer::get_singleton() != nullptr) { - arvr_interface = ARVRServer::get_singleton()->get_primary_interface(); + Ref<XRInterface> xr_interface; + if (XRServer::get_singleton() != nullptr) { + xr_interface = XRServer::get_singleton()->get_primary_interface(); } - if (p_viewport->use_arvr && arvr_interface.is_valid()) { - RSG::scene->render_camera(p_viewport->render_buffers, arvr_interface, p_eye, p_viewport->camera, p_viewport->scenario, p_viewport->size, p_viewport->shadow_atlas); + if (p_viewport->use_xr && xr_interface.is_valid()) { + RSG::scene->render_camera(p_viewport->render_buffers, xr_interface, p_eye, p_viewport->camera, p_viewport->scenario, p_viewport->size, p_viewport->shadow_atlas); } else { RSG::scene->render_camera(p_viewport->render_buffers, p_viewport->camera, p_viewport->scenario, p_viewport->size, p_viewport->shadow_atlas); } RENDER_TIMESTAMP("<End Rendering 3D Scene"); } -void RenderingServerViewport::_draw_viewport(Viewport *p_viewport, ARVRInterface::Eyes p_eye) { +void RenderingServerViewport::_draw_viewport(Viewport *p_viewport, XRInterface::Eyes p_eye) { /* Camera should always be BEFORE any other 3D */ @@ -293,17 +293,15 @@ void RenderingServerViewport::_draw_viewport(Viewport *p_viewport, ARVRInterface void RenderingServerViewport::draw_viewports() { -#if 0 - // get our arvr interface in case we need it - Ref<ARVRInterface> arvr_interface; + // get our xr interface in case we need it + Ref<XRInterface> xr_interface; - if (ARVRServer::get_singleton() != nullptr) { - arvr_interface = ARVRServer::get_singleton()->get_primary_interface(); + if (XRServer::get_singleton() != nullptr) { + xr_interface = XRServer::get_singleton()->get_primary_interface(); // process all our active interfaces - ARVRServer::get_singleton()->_process(); + XRServer::get_singleton()->_process(); } -#endif if (Engine::get_singleton()->is_editor_hint()) { set_default_clear_color(GLOBAL_GET("rendering/environment/default_clear_color")); @@ -367,38 +365,41 @@ void RenderingServerViewport::draw_viewports() { RSG::storage->render_target_set_as_unused(vp->render_target); #if 0 - if (vp->use_arvr && arvr_interface.is_valid()) { + // TODO fix up this code after we change our commit_for_eye to accept our new render targets + + if (vp->use_xr && xr_interface.is_valid()) { // override our size, make sure it matches our required size - vp->size = arvr_interface->get_render_targetsize(); + vp->size = xr_interface->get_render_targetsize(); RSG::storage->render_target_set_size(vp->render_target, vp->size.x, vp->size.y); // render mono or left eye first - ARVRInterface::Eyes leftOrMono = arvr_interface->is_stereo() ? ARVRInterface::EYE_LEFT : ARVRInterface::EYE_MONO; + XRInterface::Eyes leftOrMono = xr_interface->is_stereo() ? XRInterface::EYE_LEFT : XRInterface::EYE_MONO; // check for an external texture destination for our left eye/mono - RSG::storage->render_target_set_external_texture(vp->render_target, arvr_interface->get_external_texture_for_eye(leftOrMono)); + // TODO investigate how we're going to make external textures work + RSG::storage->render_target_set_external_texture(vp->render_target, xr_interface->get_external_texture_for_eye(leftOrMono)); // set our render target as current RSG::rasterizer->set_current_render_target(vp->render_target); // and draw left eye/mono _draw_viewport(vp, leftOrMono); - arvr_interface->commit_for_eye(leftOrMono, vp->render_target, vp->viewport_to_screen_rect); + xr_interface->commit_for_eye(leftOrMono, vp->render_target, vp->viewport_to_screen_rect); // render right eye - if (leftOrMono == ARVRInterface::EYE_LEFT) { + if (leftOrMono == XRInterface::EYE_LEFT) { // check for an external texture destination for our right eye - RSG::storage->render_target_set_external_texture(vp->render_target, arvr_interface->get_external_texture_for_eye(ARVRInterface::EYE_RIGHT)); + RSG::storage->render_target_set_external_texture(vp->render_target, xr_interface->get_external_texture_for_eye(XRInterface::EYE_RIGHT)); // commit for eye may have changed the render target RSG::rasterizer->set_current_render_target(vp->render_target); - _draw_viewport(vp, ARVRInterface::EYE_RIGHT); - arvr_interface->commit_for_eye(ARVRInterface::EYE_RIGHT, vp->render_target, vp->viewport_to_screen_rect); + _draw_viewport(vp, XRInterface::EYE_RIGHT); + xr_interface->commit_for_eye(XRInterface::EYE_RIGHT, vp->render_target, vp->viewport_to_screen_rect); } // and for our frame timing, mark when we've finished committing our eyes - ARVRServer::get_singleton()->_mark_commit(); + XRServer::get_singleton()->_mark_commit(); } else { #endif { @@ -470,11 +471,11 @@ RID RenderingServerViewport::viewport_create() { return rid; } -void RenderingServerViewport::viewport_set_use_arvr(RID p_viewport, bool p_use_arvr) { +void RenderingServerViewport::viewport_set_use_xr(RID p_viewport, bool p_use_xr) { Viewport *viewport = viewport_owner.getornull(p_viewport); ERR_FAIL_COND(!viewport); - viewport->use_arvr = p_use_arvr; + viewport->use_xr = p_use_xr; } void RenderingServerViewport::viewport_set_size(RID p_viewport, int p_width, int p_height) { diff --git a/servers/rendering/rendering_server_viewport.h b/servers/rendering/rendering_server_viewport.h index f574c58d96..71d8408ed1 100644 --- a/servers/rendering/rendering_server_viewport.h +++ b/servers/rendering/rendering_server_viewport.h @@ -34,8 +34,8 @@ #include "core/rid_owner.h" #include "core/self_list.h" #include "rasterizer.h" -#include "servers/arvr/arvr_interface.h" #include "servers/rendering_server.h" +#include "servers/xr/xr_interface.h" class RenderingServerViewport { public: @@ -47,7 +47,7 @@ public: RID self; RID parent; - bool use_arvr; /* use arvr interface to override camera positioning and projection matrices and control output */ + bool use_xr; /* use xr interface to override camera positioning and projection matrices and control output */ Size2i size; RID camera; @@ -127,7 +127,7 @@ public: for (int i = 0; i < RS::VIEWPORT_RENDER_INFO_MAX; i++) { render_info[i] = 0; } - use_arvr = false; + use_xr = false; } }; @@ -152,13 +152,13 @@ public: Vector<Viewport *> active_viewports; private: - void _draw_3d(Viewport *p_viewport, ARVRInterface::Eyes p_eye); - void _draw_viewport(Viewport *p_viewport, ARVRInterface::Eyes p_eye = ARVRInterface::EYE_MONO); + void _draw_3d(Viewport *p_viewport, XRInterface::Eyes p_eye); + void _draw_viewport(Viewport *p_viewport, XRInterface::Eyes p_eye = XRInterface::EYE_MONO); public: RID viewport_create(); - void viewport_set_use_arvr(RID p_viewport, bool p_use_arvr); + void viewport_set_use_xr(RID p_viewport, bool p_use_xr); void viewport_set_size(RID p_viewport, int p_width, int p_height); diff --git a/servers/rendering/rendering_server_wrap_mt.h b/servers/rendering/rendering_server_wrap_mt.h index a3077980ce..9a98841b2c 100644 --- a/servers/rendering/rendering_server_wrap_mt.h +++ b/servers/rendering/rendering_server_wrap_mt.h @@ -370,7 +370,7 @@ public: FUNCRID(viewport) - FUNC2(viewport_set_use_arvr, RID, bool) + FUNC2(viewport_set_use_xr, RID, bool) FUNC3(viewport_set_size, RID, int, int) diff --git a/servers/rendering_server.cpp b/servers/rendering_server.cpp index d492586ce4..0a1b7b98e4 100644 --- a/servers/rendering_server.cpp +++ b/servers/rendering_server.cpp @@ -1766,7 +1766,7 @@ void RenderingServer::_bind_methods() { ClassDB::bind_method(D_METHOD("camera_set_use_vertical_aspect", "camera", "enable"), &RenderingServer::camera_set_use_vertical_aspect); ClassDB::bind_method(D_METHOD("viewport_create"), &RenderingServer::viewport_create); - ClassDB::bind_method(D_METHOD("viewport_set_use_arvr", "viewport", "use_arvr"), &RenderingServer::viewport_set_use_arvr); + ClassDB::bind_method(D_METHOD("viewport_set_use_xr", "viewport", "use_xr"), &RenderingServer::viewport_set_use_xr); ClassDB::bind_method(D_METHOD("viewport_set_size", "viewport", "width", "height"), &RenderingServer::viewport_set_size); ClassDB::bind_method(D_METHOD("viewport_set_active", "viewport", "active"), &RenderingServer::viewport_set_active); ClassDB::bind_method(D_METHOD("viewport_set_parent_viewport", "viewport", "parent_viewport"), &RenderingServer::viewport_set_parent_viewport); diff --git a/servers/rendering_server.h b/servers/rendering_server.h index ddae78cb1f..1907660dd7 100644 --- a/servers/rendering_server.h +++ b/servers/rendering_server.h @@ -382,6 +382,7 @@ public: LIGHT_PARAM_INDIRECT_ENERGY, LIGHT_PARAM_SPECULAR, LIGHT_PARAM_RANGE, + LIGHT_PARAM_SIZE, LIGHT_PARAM_ATTENUATION, LIGHT_PARAM_SPOT_ANGLE, LIGHT_PARAM_SPOT_ATTENUATION, @@ -580,7 +581,7 @@ public: virtual RID viewport_create() = 0; - virtual void viewport_set_use_arvr(RID p_viewport, bool p_use_arvr) = 0; + virtual void viewport_set_use_xr(RID p_viewport, bool p_use_xr) = 0; virtual void viewport_set_size(RID p_viewport, int p_width, int p_height) = 0; virtual void viewport_set_active(RID p_viewport, bool p_active) = 0; virtual void viewport_set_parent_viewport(RID p_viewport, RID p_parent_viewport) = 0; diff --git a/servers/arvr/SCsub b/servers/xr/SCsub index 86681f9c74..86681f9c74 100644 --- a/servers/arvr/SCsub +++ b/servers/xr/SCsub diff --git a/servers/arvr/arvr_interface.cpp b/servers/xr/xr_interface.cpp index 577b4cdd8a..c1233ae810 100644 --- a/servers/arvr/arvr_interface.cpp +++ b/servers/xr/xr_interface.cpp @@ -1,5 +1,5 @@ /*************************************************************************/ -/* arvr_interface.cpp */ +/* xr_interface.cpp */ /*************************************************************************/ /* This file is part of: */ /* GODOT ENGINE */ @@ -28,24 +28,24 @@ /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /*************************************************************************/ -#include "arvr_interface.h" +#include "xr_interface.h" -void ARVRInterface::_bind_methods() { - ClassDB::bind_method(D_METHOD("get_name"), &ARVRInterface::get_name); - ClassDB::bind_method(D_METHOD("get_capabilities"), &ARVRInterface::get_capabilities); +void XRInterface::_bind_methods() { + ClassDB::bind_method(D_METHOD("get_name"), &XRInterface::get_name); + ClassDB::bind_method(D_METHOD("get_capabilities"), &XRInterface::get_capabilities); - ClassDB::bind_method(D_METHOD("is_primary"), &ARVRInterface::is_primary); - ClassDB::bind_method(D_METHOD("set_is_primary", "enable"), &ARVRInterface::set_is_primary); + ClassDB::bind_method(D_METHOD("is_primary"), &XRInterface::is_primary); + ClassDB::bind_method(D_METHOD("set_is_primary", "enable"), &XRInterface::set_is_primary); - ClassDB::bind_method(D_METHOD("is_initialized"), &ARVRInterface::is_initialized); - ClassDB::bind_method(D_METHOD("set_is_initialized", "initialized"), &ARVRInterface::set_is_initialized); - ClassDB::bind_method(D_METHOD("initialize"), &ARVRInterface::initialize); - ClassDB::bind_method(D_METHOD("uninitialize"), &ARVRInterface::uninitialize); + ClassDB::bind_method(D_METHOD("is_initialized"), &XRInterface::is_initialized); + ClassDB::bind_method(D_METHOD("set_is_initialized", "initialized"), &XRInterface::set_is_initialized); + ClassDB::bind_method(D_METHOD("initialize"), &XRInterface::initialize); + ClassDB::bind_method(D_METHOD("uninitialize"), &XRInterface::uninitialize); - ClassDB::bind_method(D_METHOD("get_tracking_status"), &ARVRInterface::get_tracking_status); + ClassDB::bind_method(D_METHOD("get_tracking_status"), &XRInterface::get_tracking_status); - ClassDB::bind_method(D_METHOD("get_render_targetsize"), &ARVRInterface::get_render_targetsize); - ClassDB::bind_method(D_METHOD("is_stereo"), &ARVRInterface::is_stereo); + ClassDB::bind_method(D_METHOD("get_render_targetsize"), &XRInterface::get_render_targetsize); + ClassDB::bind_method(D_METHOD("is_stereo"), &XRInterface::is_stereo); ADD_GROUP("Interface", "interface_"); ADD_PROPERTY(PropertyInfo(Variant::BOOL, "interface_is_primary"), "set_is_primary", "is_primary"); @@ -54,55 +54,55 @@ void ARVRInterface::_bind_methods() { // we don't have any properties specific to VR yet.... // but we do have properties specific to AR.... - ClassDB::bind_method(D_METHOD("get_anchor_detection_is_enabled"), &ARVRInterface::get_anchor_detection_is_enabled); - ClassDB::bind_method(D_METHOD("set_anchor_detection_is_enabled", "enable"), &ARVRInterface::set_anchor_detection_is_enabled); - ClassDB::bind_method(D_METHOD("get_camera_feed_id"), &ARVRInterface::get_camera_feed_id); + ClassDB::bind_method(D_METHOD("get_anchor_detection_is_enabled"), &XRInterface::get_anchor_detection_is_enabled); + ClassDB::bind_method(D_METHOD("set_anchor_detection_is_enabled", "enable"), &XRInterface::set_anchor_detection_is_enabled); + ClassDB::bind_method(D_METHOD("get_camera_feed_id"), &XRInterface::get_camera_feed_id); ADD_GROUP("AR", "ar_"); ADD_PROPERTY(PropertyInfo(Variant::BOOL, "ar_is_anchor_detection_enabled"), "set_anchor_detection_is_enabled", "get_anchor_detection_is_enabled"); - BIND_ENUM_CONSTANT(ARVR_NONE); - BIND_ENUM_CONSTANT(ARVR_MONO); - BIND_ENUM_CONSTANT(ARVR_STEREO); - BIND_ENUM_CONSTANT(ARVR_AR); - BIND_ENUM_CONSTANT(ARVR_EXTERNAL); + BIND_ENUM_CONSTANT(XR_NONE); + BIND_ENUM_CONSTANT(XR_MONO); + BIND_ENUM_CONSTANT(XR_STEREO); + BIND_ENUM_CONSTANT(XR_AR); + BIND_ENUM_CONSTANT(XR_EXTERNAL); BIND_ENUM_CONSTANT(EYE_MONO); BIND_ENUM_CONSTANT(EYE_LEFT); BIND_ENUM_CONSTANT(EYE_RIGHT); - BIND_ENUM_CONSTANT(ARVR_NORMAL_TRACKING); - BIND_ENUM_CONSTANT(ARVR_EXCESSIVE_MOTION); - BIND_ENUM_CONSTANT(ARVR_INSUFFICIENT_FEATURES); - BIND_ENUM_CONSTANT(ARVR_UNKNOWN_TRACKING); - BIND_ENUM_CONSTANT(ARVR_NOT_TRACKING); + BIND_ENUM_CONSTANT(XR_NORMAL_TRACKING); + BIND_ENUM_CONSTANT(XR_EXCESSIVE_MOTION); + BIND_ENUM_CONSTANT(XR_INSUFFICIENT_FEATURES); + BIND_ENUM_CONSTANT(XR_UNKNOWN_TRACKING); + BIND_ENUM_CONSTANT(XR_NOT_TRACKING); }; -StringName ARVRInterface::get_name() const { +StringName XRInterface::get_name() const { return "Unknown"; }; -bool ARVRInterface::is_primary() { - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL_V(arvr_server, false); +bool XRInterface::is_primary() { + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL_V(xr_server, false); - return arvr_server->get_primary_interface() == this; + return xr_server->get_primary_interface() == this; }; -void ARVRInterface::set_is_primary(bool p_is_primary) { - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL(arvr_server); +void XRInterface::set_is_primary(bool p_is_primary) { + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL(xr_server); if (p_is_primary) { ERR_FAIL_COND(!is_initialized()); - arvr_server->set_primary_interface(this); + xr_server->set_primary_interface(this); } else { - arvr_server->clear_primary_interface_if(this); + xr_server->clear_primary_interface_if(this); }; }; -void ARVRInterface::set_is_initialized(bool p_initialized) { +void XRInterface::set_is_initialized(bool p_initialized) { if (p_initialized) { if (!is_initialized()) { initialize(); @@ -114,31 +114,31 @@ void ARVRInterface::set_is_initialized(bool p_initialized) { }; }; -ARVRInterface::Tracking_status ARVRInterface::get_tracking_status() const { +XRInterface::Tracking_status XRInterface::get_tracking_status() const { return tracking_state; }; -ARVRInterface::ARVRInterface() { - tracking_state = ARVR_UNKNOWN_TRACKING; +XRInterface::XRInterface() { + tracking_state = XR_UNKNOWN_TRACKING; }; -ARVRInterface::~ARVRInterface(){}; +XRInterface::~XRInterface(){}; // optional render to external texture which enhances performance on those platforms that require us to submit our end result into special textures. -unsigned int ARVRInterface::get_external_texture_for_eye(ARVRInterface::Eyes p_eye) { +unsigned int XRInterface::get_external_texture_for_eye(XRInterface::Eyes p_eye) { return 0; }; /** these will only be implemented on AR interfaces, so we want dummies for VR **/ -bool ARVRInterface::get_anchor_detection_is_enabled() const { +bool XRInterface::get_anchor_detection_is_enabled() const { return false; }; -void ARVRInterface::set_anchor_detection_is_enabled(bool p_enable){ +void XRInterface::set_anchor_detection_is_enabled(bool p_enable){ // don't do anything here, this needs to be implemented on AR interface to enable/disable things like plane detection etc. }; -int ARVRInterface::get_camera_feed_id() { +int XRInterface::get_camera_feed_id() { // don't do anything here, this needs to be implemented on AR interface to enable/disable things like plane detection etc. return 0; diff --git a/servers/arvr/arvr_interface.h b/servers/xr/xr_interface.h index 861061cbf5..99fcef7925 100644 --- a/servers/arvr/arvr_interface.h +++ b/servers/xr/xr_interface.h @@ -1,5 +1,5 @@ /*************************************************************************/ -/* arvr_interface.h */ +/* xr_interface.h */ /*************************************************************************/ /* This file is part of: */ /* GODOT ENGINE */ @@ -28,18 +28,18 @@ /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /*************************************************************************/ -#ifndef ARVR_INTERFACE_H -#define ARVR_INTERFACE_H +#ifndef XR_INTERFACE_H +#define XR_INTERFACE_H #include "core/math/camera_matrix.h" #include "core/os/thread_safe.h" #include "scene/main/window.h" -#include "servers/arvr_server.h" +#include "servers/xr_server.h" /** @author Bastiaan Olij <mux213@gmail.com> - The ARVR interface is a template class ontop of which we build interface to different AR, VR and tracking SDKs. + The XR interface is a template class ontop of which we build interface to different AR, VR and tracking SDKs. The idea is that we subclass this class, implement the logic, and then instantiate a singleton of each interface when Godot starts. These instances do not initialize themselves but register themselves with the AR/VR server. @@ -48,16 +48,16 @@ Note that we may make this into a fully instantiable class for GDNative support. */ -class ARVRInterface : public Reference { - GDCLASS(ARVRInterface, Reference); +class XRInterface : public Reference { + GDCLASS(XRInterface, Reference); public: enum Capabilities { /* purely meta data, provides some info about what this interface supports */ - ARVR_NONE = 0, /* no capabilities */ - ARVR_MONO = 1, /* can be used with mono output */ - ARVR_STEREO = 2, /* can be used with stereo output */ - ARVR_AR = 4, /* offers a camera feed for AR */ - ARVR_EXTERNAL = 8 /* renders to external device */ + XR_NONE = 0, /* no capabilities */ + XR_MONO = 1, /* can be used with mono output */ + XR_STEREO = 2, /* can be used with stereo output */ + XR_AR = 4, /* offers a camera feed for AR */ + XR_EXTERNAL = 8 /* renders to external device */ }; enum Eyes { @@ -67,11 +67,11 @@ public: }; enum Tracking_status { /* tracking status currently based on AR but we can start doing more with this for VR as well */ - ARVR_NORMAL_TRACKING, - ARVR_EXCESSIVE_MOTION, - ARVR_INSUFFICIENT_FEATURES, - ARVR_UNKNOWN_TRACKING, - ARVR_NOT_TRACKING + XR_NORMAL_TRACKING, + XR_EXCESSIVE_MOTION, + XR_INSUFFICIENT_FEATURES, + XR_UNKNOWN_TRACKING, + XR_NOT_TRACKING }; protected: @@ -107,20 +107,20 @@ public: virtual Size2 get_render_targetsize() = 0; /* returns the recommended render target size per eye for this device */ virtual bool is_stereo() = 0; /* returns true if this interface requires stereo rendering (for VR HMDs) or mono rendering (for mobile AR) */ - virtual Transform get_transform_for_eye(ARVRInterface::Eyes p_eye, const Transform &p_cam_transform) = 0; /* get each eyes camera transform, also implement EYE_MONO */ - virtual CameraMatrix get_projection_for_eye(ARVRInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far) = 0; /* get each eyes projection matrix */ - virtual unsigned int get_external_texture_for_eye(ARVRInterface::Eyes p_eye); /* if applicable return external texture to render to */ - virtual void commit_for_eye(ARVRInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect) = 0; /* output the left or right eye */ + virtual Transform get_transform_for_eye(XRInterface::Eyes p_eye, const Transform &p_cam_transform) = 0; /* get each eyes camera transform, also implement EYE_MONO */ + virtual CameraMatrix get_projection_for_eye(XRInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far) = 0; /* get each eyes projection matrix */ + virtual unsigned int get_external_texture_for_eye(XRInterface::Eyes p_eye); /* if applicable return external texture to render to */ + virtual void commit_for_eye(XRInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect) = 0; /* output the left or right eye */ virtual void process() = 0; virtual void notification(int p_what) = 0; - ARVRInterface(); - ~ARVRInterface(); + XRInterface(); + ~XRInterface(); }; -VARIANT_ENUM_CAST(ARVRInterface::Capabilities); -VARIANT_ENUM_CAST(ARVRInterface::Eyes); -VARIANT_ENUM_CAST(ARVRInterface::Tracking_status); +VARIANT_ENUM_CAST(XRInterface::Capabilities); +VARIANT_ENUM_CAST(XRInterface::Eyes); +VARIANT_ENUM_CAST(XRInterface::Tracking_status); #endif diff --git a/servers/arvr/arvr_positional_tracker.cpp b/servers/xr/xr_positional_tracker.cpp index dabeb7b86f..808b0a608f 100644 --- a/servers/arvr/arvr_positional_tracker.cpp +++ b/servers/xr/xr_positional_tracker.cpp @@ -1,5 +1,5 @@ /*************************************************************************/ -/* arvr_positional_tracker.cpp */ +/* xr_positional_tracker.cpp */ /*************************************************************************/ /* This file is part of: */ /* GODOT ENGINE */ @@ -28,192 +28,192 @@ /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /*************************************************************************/ -#include "arvr_positional_tracker.h" +#include "xr_positional_tracker.h" #include "core/input/input_filter.h" -void ARVRPositionalTracker::_bind_methods() { +void XRPositionalTracker::_bind_methods() { BIND_ENUM_CONSTANT(TRACKER_HAND_UNKNOWN); BIND_ENUM_CONSTANT(TRACKER_LEFT_HAND); BIND_ENUM_CONSTANT(TRACKER_RIGHT_HAND); // this class is read only from GDScript, so we only have access to getters.. - ClassDB::bind_method(D_METHOD("get_type"), &ARVRPositionalTracker::get_type); - ClassDB::bind_method(D_METHOD("get_tracker_id"), &ARVRPositionalTracker::get_tracker_id); - ClassDB::bind_method(D_METHOD("get_name"), &ARVRPositionalTracker::get_name); - ClassDB::bind_method(D_METHOD("get_joy_id"), &ARVRPositionalTracker::get_joy_id); - ClassDB::bind_method(D_METHOD("get_tracks_orientation"), &ARVRPositionalTracker::get_tracks_orientation); - ClassDB::bind_method(D_METHOD("get_orientation"), &ARVRPositionalTracker::get_orientation); - ClassDB::bind_method(D_METHOD("get_tracks_position"), &ARVRPositionalTracker::get_tracks_position); - ClassDB::bind_method(D_METHOD("get_position"), &ARVRPositionalTracker::get_position); - ClassDB::bind_method(D_METHOD("get_hand"), &ARVRPositionalTracker::get_hand); - ClassDB::bind_method(D_METHOD("get_transform", "adjust_by_reference_frame"), &ARVRPositionalTracker::get_transform); - ClassDB::bind_method(D_METHOD("get_mesh"), &ARVRPositionalTracker::get_mesh); + ClassDB::bind_method(D_METHOD("get_type"), &XRPositionalTracker::get_type); + ClassDB::bind_method(D_METHOD("get_tracker_id"), &XRPositionalTracker::get_tracker_id); + ClassDB::bind_method(D_METHOD("get_name"), &XRPositionalTracker::get_name); + ClassDB::bind_method(D_METHOD("get_joy_id"), &XRPositionalTracker::get_joy_id); + ClassDB::bind_method(D_METHOD("get_tracks_orientation"), &XRPositionalTracker::get_tracks_orientation); + ClassDB::bind_method(D_METHOD("get_orientation"), &XRPositionalTracker::get_orientation); + ClassDB::bind_method(D_METHOD("get_tracks_position"), &XRPositionalTracker::get_tracks_position); + ClassDB::bind_method(D_METHOD("get_position"), &XRPositionalTracker::get_position); + ClassDB::bind_method(D_METHOD("get_hand"), &XRPositionalTracker::get_hand); + ClassDB::bind_method(D_METHOD("get_transform", "adjust_by_reference_frame"), &XRPositionalTracker::get_transform); + ClassDB::bind_method(D_METHOD("get_mesh"), &XRPositionalTracker::get_mesh); // these functions we don't want to expose to normal users but do need to be callable from GDNative - ClassDB::bind_method(D_METHOD("_set_type", "type"), &ARVRPositionalTracker::set_type); - ClassDB::bind_method(D_METHOD("_set_name", "name"), &ARVRPositionalTracker::set_name); - ClassDB::bind_method(D_METHOD("_set_joy_id", "joy_id"), &ARVRPositionalTracker::set_joy_id); - ClassDB::bind_method(D_METHOD("_set_orientation", "orientation"), &ARVRPositionalTracker::set_orientation); - ClassDB::bind_method(D_METHOD("_set_rw_position", "rw_position"), &ARVRPositionalTracker::set_rw_position); - ClassDB::bind_method(D_METHOD("_set_mesh", "mesh"), &ARVRPositionalTracker::set_mesh); - ClassDB::bind_method(D_METHOD("get_rumble"), &ARVRPositionalTracker::get_rumble); - ClassDB::bind_method(D_METHOD("set_rumble", "rumble"), &ARVRPositionalTracker::set_rumble); + ClassDB::bind_method(D_METHOD("_set_type", "type"), &XRPositionalTracker::set_type); + ClassDB::bind_method(D_METHOD("_set_name", "name"), &XRPositionalTracker::set_name); + ClassDB::bind_method(D_METHOD("_set_joy_id", "joy_id"), &XRPositionalTracker::set_joy_id); + ClassDB::bind_method(D_METHOD("_set_orientation", "orientation"), &XRPositionalTracker::set_orientation); + ClassDB::bind_method(D_METHOD("_set_rw_position", "rw_position"), &XRPositionalTracker::set_rw_position); + ClassDB::bind_method(D_METHOD("_set_mesh", "mesh"), &XRPositionalTracker::set_mesh); + ClassDB::bind_method(D_METHOD("get_rumble"), &XRPositionalTracker::get_rumble); + ClassDB::bind_method(D_METHOD("set_rumble", "rumble"), &XRPositionalTracker::set_rumble); ADD_PROPERTY(PropertyInfo(Variant::FLOAT, "rumble"), "set_rumble", "get_rumble"); }; -void ARVRPositionalTracker::set_type(ARVRServer::TrackerType p_type) { +void XRPositionalTracker::set_type(XRServer::TrackerType p_type) { if (type != p_type) { type = p_type; - hand = ARVRPositionalTracker::TRACKER_HAND_UNKNOWN; + hand = XRPositionalTracker::TRACKER_HAND_UNKNOWN; - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL(arvr_server); + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL(xr_server); // get a tracker id for our type // note if this is a controller this will be 3 or higher but we may change it later. - tracker_id = arvr_server->get_free_tracker_id_for_type(p_type); + tracker_id = xr_server->get_free_tracker_id_for_type(p_type); }; }; -ARVRServer::TrackerType ARVRPositionalTracker::get_type() const { +XRServer::TrackerType XRPositionalTracker::get_type() const { return type; }; -void ARVRPositionalTracker::set_name(const String &p_name) { +void XRPositionalTracker::set_name(const String &p_name) { name = p_name; }; -StringName ARVRPositionalTracker::get_name() const { +StringName XRPositionalTracker::get_name() const { return name; }; -int ARVRPositionalTracker::get_tracker_id() const { +int XRPositionalTracker::get_tracker_id() const { return tracker_id; }; -void ARVRPositionalTracker::set_joy_id(int p_joy_id) { +void XRPositionalTracker::set_joy_id(int p_joy_id) { joy_id = p_joy_id; }; -int ARVRPositionalTracker::get_joy_id() const { +int XRPositionalTracker::get_joy_id() const { return joy_id; }; -bool ARVRPositionalTracker::get_tracks_orientation() const { +bool XRPositionalTracker::get_tracks_orientation() const { return tracks_orientation; }; -void ARVRPositionalTracker::set_orientation(const Basis &p_orientation) { +void XRPositionalTracker::set_orientation(const Basis &p_orientation) { _THREAD_SAFE_METHOD_ tracks_orientation = true; // obviously we have this orientation = p_orientation; }; -Basis ARVRPositionalTracker::get_orientation() const { +Basis XRPositionalTracker::get_orientation() const { _THREAD_SAFE_METHOD_ return orientation; }; -bool ARVRPositionalTracker::get_tracks_position() const { +bool XRPositionalTracker::get_tracks_position() const { return tracks_position; }; -void ARVRPositionalTracker::set_position(const Vector3 &p_position) { +void XRPositionalTracker::set_position(const Vector3 &p_position) { _THREAD_SAFE_METHOD_ - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL(arvr_server); - real_t world_scale = arvr_server->get_world_scale(); + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL(xr_server); + real_t world_scale = xr_server->get_world_scale(); ERR_FAIL_COND(world_scale == 0); tracks_position = true; // obviously we have this rw_position = p_position / world_scale; }; -Vector3 ARVRPositionalTracker::get_position() const { +Vector3 XRPositionalTracker::get_position() const { _THREAD_SAFE_METHOD_ - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL_V(arvr_server, rw_position); - real_t world_scale = arvr_server->get_world_scale(); + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL_V(xr_server, rw_position); + real_t world_scale = xr_server->get_world_scale(); return rw_position * world_scale; }; -void ARVRPositionalTracker::set_rw_position(const Vector3 &p_rw_position) { +void XRPositionalTracker::set_rw_position(const Vector3 &p_rw_position) { _THREAD_SAFE_METHOD_ tracks_position = true; // obviously we have this rw_position = p_rw_position; }; -Vector3 ARVRPositionalTracker::get_rw_position() const { +Vector3 XRPositionalTracker::get_rw_position() const { _THREAD_SAFE_METHOD_ return rw_position; }; -void ARVRPositionalTracker::set_mesh(const Ref<Mesh> &p_mesh) { +void XRPositionalTracker::set_mesh(const Ref<Mesh> &p_mesh) { _THREAD_SAFE_METHOD_ mesh = p_mesh; }; -Ref<Mesh> ARVRPositionalTracker::get_mesh() const { +Ref<Mesh> XRPositionalTracker::get_mesh() const { _THREAD_SAFE_METHOD_ return mesh; }; -ARVRPositionalTracker::TrackerHand ARVRPositionalTracker::get_hand() const { +XRPositionalTracker::TrackerHand XRPositionalTracker::get_hand() const { return hand; }; -void ARVRPositionalTracker::set_hand(const ARVRPositionalTracker::TrackerHand p_hand) { - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL(arvr_server); +void XRPositionalTracker::set_hand(const XRPositionalTracker::TrackerHand p_hand) { + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL(xr_server); if (hand != p_hand) { // we can only set this if we've previously set this to be a controller!! - ERR_FAIL_COND((type != ARVRServer::TRACKER_CONTROLLER) && (p_hand != ARVRPositionalTracker::TRACKER_HAND_UNKNOWN)); + ERR_FAIL_COND((type != XRServer::TRACKER_CONTROLLER) && (p_hand != XRPositionalTracker::TRACKER_HAND_UNKNOWN)); hand = p_hand; - if (hand == ARVRPositionalTracker::TRACKER_LEFT_HAND) { - if (!arvr_server->is_tracker_id_in_use_for_type(type, 1)) { + if (hand == XRPositionalTracker::TRACKER_LEFT_HAND) { + if (!xr_server->is_tracker_id_in_use_for_type(type, 1)) { tracker_id = 1; }; - } else if (hand == ARVRPositionalTracker::TRACKER_RIGHT_HAND) { - if (!arvr_server->is_tracker_id_in_use_for_type(type, 2)) { + } else if (hand == XRPositionalTracker::TRACKER_RIGHT_HAND) { + if (!xr_server->is_tracker_id_in_use_for_type(type, 2)) { tracker_id = 2; }; }; }; }; -Transform ARVRPositionalTracker::get_transform(bool p_adjust_by_reference_frame) const { +Transform XRPositionalTracker::get_transform(bool p_adjust_by_reference_frame) const { Transform new_transform; new_transform.basis = get_orientation(); new_transform.origin = get_position(); if (p_adjust_by_reference_frame) { - ARVRServer *arvr_server = ARVRServer::get_singleton(); - ERR_FAIL_NULL_V(arvr_server, new_transform); + XRServer *xr_server = XRServer::get_singleton(); + ERR_FAIL_NULL_V(xr_server, new_transform); - new_transform = arvr_server->get_reference_frame() * new_transform; + new_transform = xr_server->get_reference_frame() * new_transform; }; return new_transform; }; -real_t ARVRPositionalTracker::get_rumble() const { +real_t XRPositionalTracker::get_rumble() const { return rumble; }; -void ARVRPositionalTracker::set_rumble(real_t p_rumble) { +void XRPositionalTracker::set_rumble(real_t p_rumble) { if (p_rumble > 0.0) { rumble = p_rumble; } else { @@ -221,8 +221,8 @@ void ARVRPositionalTracker::set_rumble(real_t p_rumble) { }; }; -ARVRPositionalTracker::ARVRPositionalTracker() { - type = ARVRServer::TRACKER_UNKNOWN; +XRPositionalTracker::XRPositionalTracker() { + type = XRServer::TRACKER_UNKNOWN; name = "Unknown"; joy_id = -1; tracker_id = 0; @@ -232,6 +232,6 @@ ARVRPositionalTracker::ARVRPositionalTracker() { rumble = 0.0; }; -ARVRPositionalTracker::~ARVRPositionalTracker(){ +XRPositionalTracker::~XRPositionalTracker(){ }; diff --git a/servers/arvr/arvr_positional_tracker.h b/servers/xr/xr_positional_tracker.h index 03c6b33ffe..d9d1f909e9 100644 --- a/servers/arvr/arvr_positional_tracker.h +++ b/servers/xr/xr_positional_tracker.h @@ -1,5 +1,5 @@ /*************************************************************************/ -/* arvr_positional_tracker.h */ +/* xr_positional_tracker.h */ /*************************************************************************/ /* This file is part of: */ /* GODOT ENGINE */ @@ -28,12 +28,12 @@ /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /*************************************************************************/ -#ifndef ARVR_POSITIONAL_TRACKER_H -#define ARVR_POSITIONAL_TRACKER_H +#ifndef XR_POSITIONAL_TRACKER_H +#define XR_POSITIONAL_TRACKER_H #include "core/os/thread_safe.h" #include "scene/resources/mesh.h" -#include "servers/arvr_server.h" +#include "servers/xr_server.h" /** @author Bastiaan Olij <mux213@gmail.com> @@ -43,8 +43,8 @@ This is where potentially additional AR/VR interfaces may be active as there are AR/VR SDKs that solely deal with positional tracking. */ -class ARVRPositionalTracker : public Object { - GDCLASS(ARVRPositionalTracker, Object); +class XRPositionalTracker : public Object { + GDCLASS(XRPositionalTracker, Object); _THREAD_SAFE_CLASS_ public: @@ -55,7 +55,7 @@ public: }; private: - ARVRServer::TrackerType type; // type of tracker + XRServer::TrackerType type; // type of tracker StringName name; // (unique) name of the tracker int tracker_id; // tracker index id that is unique per type int joy_id; // if we also have a related joystick entity, the id of the joystick @@ -65,14 +65,14 @@ private: Vector3 rw_position; // our position "in the real world, so without world_scale applied" Ref<Mesh> mesh; // when available, a mesh that can be used to render this tracker TrackerHand hand; // if known, the hand this tracker is held in - real_t rumble; // rumble strength, 0.0 is off, 1.0 is maximum, note that we only record here, arvr_interface is responsible for execution + real_t rumble; // rumble strength, 0.0 is off, 1.0 is maximum, note that we only record here, xr_interface is responsible for execution protected: static void _bind_methods(); public: - void set_type(ARVRServer::TrackerType p_type); - ARVRServer::TrackerType get_type() const; + void set_type(XRServer::TrackerType p_type); + XRServer::TrackerType get_type() const; void set_name(const String &p_name); StringName get_name() const; int get_tracker_id() const; @@ -86,8 +86,8 @@ public: Vector3 get_position() const; // get position with world_scale applied void set_rw_position(const Vector3 &p_rw_position); Vector3 get_rw_position() const; - ARVRPositionalTracker::TrackerHand get_hand() const; - void set_hand(const ARVRPositionalTracker::TrackerHand p_hand); + XRPositionalTracker::TrackerHand get_hand() const; + void set_hand(const XRPositionalTracker::TrackerHand p_hand); real_t get_rumble() const; void set_rumble(real_t p_rumble); void set_mesh(const Ref<Mesh> &p_mesh); @@ -95,10 +95,10 @@ public: Transform get_transform(bool p_adjust_by_reference_frame) const; - ARVRPositionalTracker(); - ~ARVRPositionalTracker(); + XRPositionalTracker(); + ~XRPositionalTracker(); }; -VARIANT_ENUM_CAST(ARVRPositionalTracker::TrackerHand); +VARIANT_ENUM_CAST(XRPositionalTracker::TrackerHand); #endif diff --git a/servers/arvr_server.cpp b/servers/xr_server.cpp index f5597d8974..a93b99025f 100644 --- a/servers/arvr_server.cpp +++ b/servers/xr_server.cpp @@ -1,5 +1,5 @@ /*************************************************************************/ -/* arvr_server.cpp */ +/* xr_server.cpp */ /*************************************************************************/ /* This file is part of: */ /* GODOT ENGINE */ @@ -28,41 +28,41 @@ /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /*************************************************************************/ -#include "arvr_server.h" -#include "arvr/arvr_interface.h" -#include "arvr/arvr_positional_tracker.h" +#include "xr_server.h" #include "core/project_settings.h" +#include "xr/xr_interface.h" +#include "xr/xr_positional_tracker.h" -ARVRServer *ARVRServer::singleton = nullptr; +XRServer *XRServer::singleton = nullptr; -ARVRServer *ARVRServer::get_singleton() { +XRServer *XRServer::get_singleton() { return singleton; }; -void ARVRServer::_bind_methods() { - ClassDB::bind_method(D_METHOD("get_world_scale"), &ARVRServer::get_world_scale); - ClassDB::bind_method(D_METHOD("set_world_scale"), &ARVRServer::set_world_scale); - ClassDB::bind_method(D_METHOD("get_reference_frame"), &ARVRServer::get_reference_frame); - ClassDB::bind_method(D_METHOD("center_on_hmd", "rotation_mode", "keep_height"), &ARVRServer::center_on_hmd); - ClassDB::bind_method(D_METHOD("get_hmd_transform"), &ARVRServer::get_hmd_transform); +void XRServer::_bind_methods() { + ClassDB::bind_method(D_METHOD("get_world_scale"), &XRServer::get_world_scale); + ClassDB::bind_method(D_METHOD("set_world_scale"), &XRServer::set_world_scale); + ClassDB::bind_method(D_METHOD("get_reference_frame"), &XRServer::get_reference_frame); + ClassDB::bind_method(D_METHOD("center_on_hmd", "rotation_mode", "keep_height"), &XRServer::center_on_hmd); + ClassDB::bind_method(D_METHOD("get_hmd_transform"), &XRServer::get_hmd_transform); ADD_PROPERTY(PropertyInfo(Variant::FLOAT, "world_scale"), "set_world_scale", "get_world_scale"); - ClassDB::bind_method(D_METHOD("get_interface_count"), &ARVRServer::get_interface_count); - ClassDB::bind_method(D_METHOD("get_interface", "idx"), &ARVRServer::get_interface); - ClassDB::bind_method(D_METHOD("get_interfaces"), &ARVRServer::get_interfaces); - ClassDB::bind_method(D_METHOD("find_interface", "name"), &ARVRServer::find_interface); - ClassDB::bind_method(D_METHOD("get_tracker_count"), &ARVRServer::get_tracker_count); - ClassDB::bind_method(D_METHOD("get_tracker", "idx"), &ARVRServer::get_tracker); + ClassDB::bind_method(D_METHOD("get_interface_count"), &XRServer::get_interface_count); + ClassDB::bind_method(D_METHOD("get_interface", "idx"), &XRServer::get_interface); + ClassDB::bind_method(D_METHOD("get_interfaces"), &XRServer::get_interfaces); + ClassDB::bind_method(D_METHOD("find_interface", "name"), &XRServer::find_interface); + ClassDB::bind_method(D_METHOD("get_tracker_count"), &XRServer::get_tracker_count); + ClassDB::bind_method(D_METHOD("get_tracker", "idx"), &XRServer::get_tracker); - ClassDB::bind_method(D_METHOD("get_primary_interface"), &ARVRServer::get_primary_interface); - ClassDB::bind_method(D_METHOD("set_primary_interface", "interface"), &ARVRServer::set_primary_interface); + ClassDB::bind_method(D_METHOD("get_primary_interface"), &XRServer::get_primary_interface); + ClassDB::bind_method(D_METHOD("set_primary_interface", "interface"), &XRServer::set_primary_interface); ADD_PROPERTY(PropertyInfo(Variant::OBJECT, "primary_interface"), "set_primary_interface", "get_primary_interface"); - ClassDB::bind_method(D_METHOD("get_last_process_usec"), &ARVRServer::get_last_process_usec); - ClassDB::bind_method(D_METHOD("get_last_commit_usec"), &ARVRServer::get_last_commit_usec); - ClassDB::bind_method(D_METHOD("get_last_frame_usec"), &ARVRServer::get_last_frame_usec); + ClassDB::bind_method(D_METHOD("get_last_process_usec"), &XRServer::get_last_process_usec); + ClassDB::bind_method(D_METHOD("get_last_commit_usec"), &XRServer::get_last_commit_usec); + ClassDB::bind_method(D_METHOD("get_last_frame_usec"), &XRServer::get_last_frame_usec); BIND_ENUM_CONSTANT(TRACKER_CONTROLLER); BIND_ENUM_CONSTANT(TRACKER_BASESTATION); @@ -82,11 +82,11 @@ void ARVRServer::_bind_methods() { ADD_SIGNAL(MethodInfo("tracker_removed", PropertyInfo(Variant::STRING_NAME, "tracker_name"), PropertyInfo(Variant::INT, "type"), PropertyInfo(Variant::INT, "id"))); }; -real_t ARVRServer::get_world_scale() const { +real_t XRServer::get_world_scale() const { return world_scale; }; -void ARVRServer::set_world_scale(real_t p_world_scale) { +void XRServer::set_world_scale(real_t p_world_scale) { if (p_world_scale < 0.01) { p_world_scale = 0.01; } else if (p_world_scale > 1000.0) { @@ -96,25 +96,25 @@ void ARVRServer::set_world_scale(real_t p_world_scale) { world_scale = p_world_scale; }; -Transform ARVRServer::get_world_origin() const { +Transform XRServer::get_world_origin() const { return world_origin; }; -void ARVRServer::set_world_origin(const Transform &p_world_origin) { +void XRServer::set_world_origin(const Transform &p_world_origin) { world_origin = p_world_origin; }; -Transform ARVRServer::get_reference_frame() const { +Transform XRServer::get_reference_frame() const { return reference_frame; }; -void ARVRServer::center_on_hmd(RotationMode p_rotation_mode, bool p_keep_height) { +void XRServer::center_on_hmd(RotationMode p_rotation_mode, bool p_keep_height) { if (primary_interface != nullptr) { // clear our current reference frame or we'll end up double adjusting it reference_frame = Transform(); // requesting our EYE_MONO transform should return our current HMD position - Transform new_reference_frame = primary_interface->get_transform_for_eye(ARVRInterface::EYE_MONO, Transform()); + Transform new_reference_frame = primary_interface->get_transform_for_eye(XRInterface::EYE_MONO, Transform()); // remove our tilt if (p_rotation_mode == 1) { @@ -140,15 +140,15 @@ void ARVRServer::center_on_hmd(RotationMode p_rotation_mode, bool p_keep_height) }; }; -Transform ARVRServer::get_hmd_transform() { +Transform XRServer::get_hmd_transform() { Transform hmd_transform; if (primary_interface != nullptr) { - hmd_transform = primary_interface->get_transform_for_eye(ARVRInterface::EYE_MONO, hmd_transform); + hmd_transform = primary_interface->get_transform_for_eye(XRInterface::EYE_MONO, hmd_transform); }; return hmd_transform; }; -void ARVRServer::add_interface(const Ref<ARVRInterface> &p_interface) { +void XRServer::add_interface(const Ref<XRInterface> &p_interface) { ERR_FAIL_COND(p_interface.is_null()); for (int i = 0; i < interfaces.size(); i++) { @@ -163,7 +163,7 @@ void ARVRServer::add_interface(const Ref<ARVRInterface> &p_interface) { emit_signal("interface_added", p_interface->get_name()); }; -void ARVRServer::remove_interface(const Ref<ARVRInterface> &p_interface) { +void XRServer::remove_interface(const Ref<XRInterface> &p_interface) { ERR_FAIL_COND(p_interface.is_null()); int idx = -1; @@ -178,23 +178,23 @@ void ARVRServer::remove_interface(const Ref<ARVRInterface> &p_interface) { ERR_FAIL_COND(idx == -1); - print_verbose("ARVR: Removed interface" + p_interface->get_name()); + print_verbose("XR: Removed interface" + p_interface->get_name()); emit_signal("interface_removed", p_interface->get_name()); interfaces.remove(idx); }; -int ARVRServer::get_interface_count() const { +int XRServer::get_interface_count() const { return interfaces.size(); }; -Ref<ARVRInterface> ARVRServer::get_interface(int p_index) const { +Ref<XRInterface> XRServer::get_interface(int p_index) const { ERR_FAIL_INDEX_V(p_index, interfaces.size(), nullptr); return interfaces[p_index]; }; -Ref<ARVRInterface> ARVRServer::find_interface(const String &p_name) const { +Ref<XRInterface> XRServer::find_interface(const String &p_name) const { int idx = -1; for (int i = 0; i < interfaces.size(); i++) { @@ -210,7 +210,7 @@ Ref<ARVRInterface> ARVRServer::find_interface(const String &p_name) const { return interfaces[idx]; }; -Array ARVRServer::get_interfaces() const { +Array XRServer::get_interfaces() const { Array ret; for (int i = 0; i < interfaces.size(); i++) { @@ -238,7 +238,7 @@ Array ARVRServer::get_interfaces() const { - using this approach the shield disappears or is no longer tracked, but the gun stays firmly in your right hand because that is still controller 2, further more, if controller 1 is replaced the shield will return. */ -bool ARVRServer::is_tracker_id_in_use_for_type(TrackerType p_tracker_type, int p_tracker_id) const { +bool XRServer::is_tracker_id_in_use_for_type(TrackerType p_tracker_type, int p_tracker_id) const { for (int i = 0; i < trackers.size(); i++) { if (trackers[i]->get_type() == p_tracker_type && trackers[i]->get_tracker_id() == p_tracker_id) { return true; @@ -249,13 +249,13 @@ bool ARVRServer::is_tracker_id_in_use_for_type(TrackerType p_tracker_type, int p return false; }; -int ARVRServer::get_free_tracker_id_for_type(TrackerType p_tracker_type) { +int XRServer::get_free_tracker_id_for_type(TrackerType p_tracker_type) { // We start checking at 1, 0 means that it's not a controller.. // Note that for controller we reserve: // - 1 for the left hand controller and // - 2 for the right hand controller // so we start at 3 :) - int tracker_id = p_tracker_type == ARVRServer::TRACKER_CONTROLLER ? 3 : 1; + int tracker_id = p_tracker_type == XRServer::TRACKER_CONTROLLER ? 3 : 1; while (is_tracker_id_in_use_for_type(p_tracker_type, tracker_id)) { // try the next one @@ -265,14 +265,14 @@ int ARVRServer::get_free_tracker_id_for_type(TrackerType p_tracker_type) { return tracker_id; }; -void ARVRServer::add_tracker(ARVRPositionalTracker *p_tracker) { +void XRServer::add_tracker(XRPositionalTracker *p_tracker) { ERR_FAIL_NULL(p_tracker); trackers.push_back(p_tracker); emit_signal("tracker_added", p_tracker->get_name(), p_tracker->get_type(), p_tracker->get_tracker_id()); }; -void ARVRServer::remove_tracker(ARVRPositionalTracker *p_tracker) { +void XRServer::remove_tracker(XRPositionalTracker *p_tracker) { ERR_FAIL_NULL(p_tracker); int idx = -1; @@ -291,17 +291,17 @@ void ARVRServer::remove_tracker(ARVRPositionalTracker *p_tracker) { trackers.remove(idx); }; -int ARVRServer::get_tracker_count() const { +int XRServer::get_tracker_count() const { return trackers.size(); }; -ARVRPositionalTracker *ARVRServer::get_tracker(int p_index) const { +XRPositionalTracker *XRServer::get_tracker(int p_index) const { ERR_FAIL_INDEX_V(p_index, trackers.size(), nullptr); return trackers[p_index]; }; -ARVRPositionalTracker *ARVRServer::find_by_type_and_id(TrackerType p_tracker_type, int p_tracker_id) const { +XRPositionalTracker *XRServer::find_by_type_and_id(TrackerType p_tracker_type, int p_tracker_id) const { ERR_FAIL_COND_V(p_tracker_id == 0, nullptr); for (int i = 0; i < trackers.size(); i++) { @@ -313,36 +313,36 @@ ARVRPositionalTracker *ARVRServer::find_by_type_and_id(TrackerType p_tracker_typ return nullptr; }; -Ref<ARVRInterface> ARVRServer::get_primary_interface() const { +Ref<XRInterface> XRServer::get_primary_interface() const { return primary_interface; }; -void ARVRServer::set_primary_interface(const Ref<ARVRInterface> &p_primary_interface) { +void XRServer::set_primary_interface(const Ref<XRInterface> &p_primary_interface) { primary_interface = p_primary_interface; - print_verbose("ARVR: Primary interface set to: " + primary_interface->get_name()); + print_verbose("XR: Primary interface set to: " + primary_interface->get_name()); }; -void ARVRServer::clear_primary_interface_if(const Ref<ARVRInterface> &p_primary_interface) { +void XRServer::clear_primary_interface_if(const Ref<XRInterface> &p_primary_interface) { if (primary_interface == p_primary_interface) { - print_verbose("ARVR: Clearing primary interface"); + print_verbose("XR: Clearing primary interface"); primary_interface.unref(); }; }; -uint64_t ARVRServer::get_last_process_usec() { +uint64_t XRServer::get_last_process_usec() { return last_process_usec; }; -uint64_t ARVRServer::get_last_commit_usec() { +uint64_t XRServer::get_last_commit_usec() { return last_commit_usec; }; -uint64_t ARVRServer::get_last_frame_usec() { +uint64_t XRServer::get_last_frame_usec() { return last_frame_usec; }; -void ARVRServer::_process() { +void XRServer::_process() { /* called from rendering_server_viewport.draw_viewports right before we start drawing our viewports */ /* mark for our frame timing */ @@ -358,7 +358,7 @@ void ARVRServer::_process() { }; }; -void ARVRServer::_mark_commit() { +void XRServer::_mark_commit() { /* time this */ last_commit_usec = OS::get_singleton()->get_ticks_usec(); @@ -366,12 +366,12 @@ void ARVRServer::_mark_commit() { last_frame_usec = last_commit_usec - last_process_usec; }; -ARVRServer::ARVRServer() { +XRServer::XRServer() { singleton = this; world_scale = 1.0; }; -ARVRServer::~ARVRServer() { +XRServer::~XRServer() { primary_interface.unref(); while (interfaces.size() > 0) { diff --git a/servers/arvr_server.h b/servers/xr_server.h index ab2f0d721b..e04c7b3592 100644 --- a/servers/arvr_server.h +++ b/servers/xr_server.h @@ -1,5 +1,5 @@ /*************************************************************************/ -/* arvr_server.h */ +/* xr_server.h */ /*************************************************************************/ /* This file is part of: */ /* GODOT ENGINE */ @@ -28,8 +28,8 @@ /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /*************************************************************************/ -#ifndef ARVR_SERVER_H -#define ARVR_SERVER_H +#ifndef XR_SERVER_H +#define XR_SERVER_H #include "core/os/os.h" #include "core/os/thread_safe.h" @@ -37,16 +37,16 @@ #include "core/rid.h" #include "core/variant.h" -class ARVRInterface; -class ARVRPositionalTracker; +class XRInterface; +class XRPositionalTracker; /** @author Bastiaan Olij <mux213@gmail.com> - The ARVR server is a singleton object that gives access to the various + The XR server is a singleton object that gives access to the various objects and SDKs that are available on the system. Because there can be multiple SDKs active this is exposed as an array - and our ARVR server object acts as a pass through + and our XR server object acts as a pass through Also each positioning tracker is accessible from here. I've added some additional info into this header file that should move @@ -54,8 +54,8 @@ class ARVRPositionalTracker; or as a separate PR once this has been merged into the master branch. **/ -class ARVRServer : public Object { - GDCLASS(ARVRServer, Object); +class XRServer : public Object { + GDCLASS(XRServer, Object); _THREAD_SAFE_CLASS_ public: @@ -76,10 +76,10 @@ public: }; private: - Vector<Ref<ARVRInterface>> interfaces; - Vector<ARVRPositionalTracker *> trackers; + Vector<Ref<XRInterface>> interfaces; + Vector<XRPositionalTracker *> trackers; - Ref<ARVRInterface> primary_interface; /* we'll identify one interface as primary, this will be used by our viewports */ + Ref<XRInterface> primary_interface; /* we'll identify one interface as primary, this will be used by our viewports */ real_t world_scale; /* scale by which we multiply our tracker positions */ Transform world_origin; /* our world origin point, maps a location in our virtual world to the origin point in our real world tracking volume */ @@ -90,12 +90,12 @@ private: uint64_t last_frame_usec; /* time it took between process and committing, we should probably average this over the last x frames */ protected: - static ARVRServer *singleton; + static XRServer *singleton; static void _bind_methods(); public: - static ARVRServer *get_singleton(); + static XRServer *get_singleton(); /* World scale allows you to specify a scale factor that is applied to all positioning vectors in our VR world in essence scaling up, or scaling down the world. @@ -105,7 +105,7 @@ public: Most VR platforms, and our assumption, is that 1 unit in our virtual world equates to 1 meter in the real mode. This scale basically effects the unit size relationship to real world size. - I may remove access to this property in GDScript in favour of exposing it on the ARVROrigin node + I may remove access to this property in GDScript in favour of exposing it on the XROrigin3D node */ real_t get_world_scale() const; void set_world_scale(real_t p_world_scale); @@ -116,7 +116,7 @@ public: actions be it straffing, teleporting, etc. Movement of the player by moving through the physical space is always tracked in relation to this point. - Note that the ARVROrigin spatial node in your scene automatically updates this property and it should be used instead of + Note that the XROrigin3D spatial node in your scene automatically updates this property and it should be used instead of direct access to this property and it therefore is not available in GDScript Note: this should not be used in AR and should be ignored by an AR based interface as it would throw what you're looking at in the real world @@ -146,20 +146,20 @@ public: /* Interfaces are objects that 'glue' Godot to an AR or VR SDK such as the Oculus SDK, OpenVR, OpenHMD, etc. */ - void add_interface(const Ref<ARVRInterface> &p_interface); - void remove_interface(const Ref<ARVRInterface> &p_interface); + void add_interface(const Ref<XRInterface> &p_interface); + void remove_interface(const Ref<XRInterface> &p_interface); int get_interface_count() const; - Ref<ARVRInterface> get_interface(int p_index) const; - Ref<ARVRInterface> find_interface(const String &p_name) const; + Ref<XRInterface> get_interface(int p_index) const; + Ref<XRInterface> find_interface(const String &p_name) const; Array get_interfaces() const; /* note, more then one interface can technically be active, especially on mobile, but only one interface is used for rendering. This interface identifies itself by calling set_primary_interface when it is initialized */ - Ref<ARVRInterface> get_primary_interface() const; - void set_primary_interface(const Ref<ARVRInterface> &p_primary_interface); - void clear_primary_interface_if(const Ref<ARVRInterface> &p_primary_interface); /* this is automatically called if an interface destructs */ + Ref<XRInterface> get_primary_interface() const; + void set_primary_interface(const Ref<XRInterface> &p_primary_interface); + void clear_primary_interface_if(const Ref<XRInterface> &p_primary_interface); /* this is automatically called if an interface destructs */ /* Our trackers are objects that expose the orientation and position of physical devices such as controller, anchor points, etc. @@ -167,11 +167,11 @@ public: */ bool is_tracker_id_in_use_for_type(TrackerType p_tracker_type, int p_tracker_id) const; int get_free_tracker_id_for_type(TrackerType p_tracker_type); - void add_tracker(ARVRPositionalTracker *p_tracker); - void remove_tracker(ARVRPositionalTracker *p_tracker); + void add_tracker(XRPositionalTracker *p_tracker); + void remove_tracker(XRPositionalTracker *p_tracker); int get_tracker_count() const; - ARVRPositionalTracker *get_tracker(int p_index) const; - ARVRPositionalTracker *find_by_type_and_id(TrackerType p_tracker_type, int p_tracker_id) const; + XRPositionalTracker *get_tracker(int p_index) const; + XRPositionalTracker *find_by_type_and_id(TrackerType p_tracker_type, int p_tracker_id) const; uint64_t get_last_process_usec(); uint64_t get_last_commit_usec(); @@ -180,13 +180,13 @@ public: void _process(); void _mark_commit(); - ARVRServer(); - ~ARVRServer(); + XRServer(); + ~XRServer(); }; -#define ARVR ARVRServer +#define XR XRServer -VARIANT_ENUM_CAST(ARVRServer::TrackerType); -VARIANT_ENUM_CAST(ARVRServer::RotationMode); +VARIANT_ENUM_CAST(XRServer::TrackerType); +VARIANT_ENUM_CAST(XRServer::RotationMode); #endif diff --git a/thirdparty/vulkan/android/vk_mem_alloc.cpp b/thirdparty/vulkan/android/vk_mem_alloc.cpp new file mode 100644 index 0000000000..a28454cf6e --- /dev/null +++ b/thirdparty/vulkan/android/vk_mem_alloc.cpp @@ -0,0 +1,8 @@ +#define VMA_IMPLEMENTATION +#ifdef DEBUG_ENABLED +#ifndef _DEBUG +#define _DEBUG +#endif +#endif +// Include memory allocator from Android NDK +#include <vk_mem_alloc.h> |