1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
|
<?xml version="1.0" encoding="UTF-8" ?>
<class name="XRInterface" inherits="RefCounted" version="4.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="../class.xsd">
<brief_description>
Base class for an XR interface implementation.
</brief_description>
<description>
This class needs to be implemented to make an AR or VR platform available to Godot and these should be implemented as C++ modules or GDExtension modules. Part of the interface is exposed to GDScript so you can detect, enable and configure an AR or VR platform.
Interfaces should be written in such a way that simply enabling them will give us a working setup. You can query the available interfaces through [XRServer].
</description>
<tutorials>
<link title="XR documentation index">$DOCS_URL/tutorials/xr/index.html</link>
</tutorials>
<methods>
<method name="get_camera_feed_id">
<return type="int" />
<description>
If this is an AR interface that requires displaying a camera feed as the background, this method returns the feed ID in the [CameraServer] for this interface.
</description>
</method>
<method name="get_capabilities" qualifiers="const">
<return type="int" />
<description>
Returns a combination of [enum Capabilities] flags providing information about the capabilities of this interface.
</description>
</method>
<method name="get_name" qualifiers="const">
<return type="StringName" />
<description>
Returns the name of this interface (OpenXR, OpenVR, OpenHMD, ARKit, etc).
</description>
</method>
<method name="get_play_area" qualifiers="const">
<return type="PackedVector3Array" />
<description>
Returns an array of vectors that denotes the physical play area mapped to the virtual space around the [XROrigin3D] point. The points form a convex polygon that can be used to react to or visualize the play area. This returns an empty array if this feature is not supported or if the information is not yet available.
</description>
</method>
<method name="get_projection_for_view">
<return type="Projection" />
<param index="0" name="view" type="int" />
<param index="1" name="aspect" type="float" />
<param index="2" name="near" type="float" />
<param index="3" name="far" type="float" />
<description>
Returns the projection matrix for a view/eye.
</description>
</method>
<method name="get_render_target_size">
<return type="Vector2" />
<description>
Returns the resolution at which we should render our intermediate results before things like lens distortion are applied by the VR platform.
</description>
</method>
<method name="get_supported_environment_blend_modes">
<return type="Array" />
<description>
Returns the an array of supported environment blend modes, see [enum XRInterface.EnvironmentBlendMode].
</description>
</method>
<method name="get_tracking_status" qualifiers="const">
<return type="int" enum="XRInterface.TrackingStatus" />
<description>
If supported, returns the status of our tracking. This will allow you to provide feedback to the user whether there are issues with positional tracking.
</description>
</method>
<method name="get_transform_for_view">
<return type="Transform3D" />
<param index="0" name="view" type="int" />
<param index="1" name="cam_transform" type="Transform3D" />
<description>
Returns the transform for a view/eye.
[param view] is the view/eye index.
[param cam_transform] is the transform that maps device coordinates to scene coordinates, typically the global_transform of the current XROrigin3D.
</description>
</method>
<method name="get_view_count">
<return type="int" />
<description>
Returns the number of views that need to be rendered for this device. 1 for Monoscopic, 2 for Stereoscopic.
</description>
</method>
<method name="initialize">
<return type="bool" />
<description>
Call this to initialize this interface. The first interface that is initialized is identified as the primary interface and it will be used for rendering output.
After initializing the interface you want to use you then need to enable the AR/VR mode of a viewport and rendering should commence.
[b]Note:[/b] You must enable the XR mode on the main viewport for any device that uses the main output of Godot, such as for mobile VR.
If you do this for a platform that handles its own output (such as OpenVR) Godot will show just one eye without distortion on screen. Alternatively, you can add a separate viewport node to your scene and enable AR/VR on that viewport. It will be used to output to the HMD, leaving you free to do anything you like in the main window, such as using a separate camera as a spectator camera or rendering something completely different.
While currently not used, you can activate additional interfaces. You may wish to do this if you want to track controllers from other platforms. However, at this point in time only one interface can render to an HMD.
</description>
</method>
<method name="is_initialized" qualifiers="const">
<return type="bool" />
<description>
Is [code]true[/code] if this interface has been initialized.
</description>
</method>
<method name="is_passthrough_enabled">
<return type="bool" />
<description>
Is [code]true[/code] if passthrough is enabled.
</description>
</method>
<method name="is_passthrough_supported">
<return type="bool" />
<description>
Is [code]true[/code] if this interface supports passthrough.
</description>
</method>
<method name="set_environment_blend_mode">
<return type="bool" />
<param index="0" name="mode" type="int" enum="XRInterface.EnvironmentBlendMode" />
<description>
Sets the active environment blend mode.
[param mode] is the [enum XRInterface.EnvironmentBlendMode] starting with the next frame.
[b]Note:[/b] Not all runtimes support all environment blend modes, so it is important to check this at startup. For example:
[codeblock]
func _ready():
var xr_interface: XRInterface = XRServer.find_interface("OpenXR")
if xr_interface and xr_interface.is_initialized():
var vp: Viewport = get_viewport()
vp.use_xr = true
var acceptable_modes = [ XRInterface.XR_ENV_BLEND_MODE_OPAQUE, XRInterface.XR_ENV_BLEND_MODE_ADDITIVE ]
var modes = xr_interface.get_supported_environment_blend_modes()
for mode in acceptable_modes:
if mode in modes:
xr_interface.set_environment_blend_mode(mode)
break
[/codeblock]
</description>
</method>
<method name="set_play_area_mode">
<return type="bool" />
<param index="0" name="mode" type="int" enum="XRInterface.PlayAreaMode" />
<description>
Sets the active play area mode, will return [code]false[/code] if the mode can't be used with this interface.
</description>
</method>
<method name="start_passthrough">
<return type="bool" />
<description>
Starts passthrough, will return [code]false[/code] if passthrough couldn't be started.
[b]Note:[/b] The viewport used for XR must have a transparent background, otherwise passthrough may not properly render.
</description>
</method>
<method name="stop_passthrough">
<return type="void" />
<description>
Stops passthrough.
</description>
</method>
<method name="supports_play_area_mode">
<return type="bool" />
<param index="0" name="mode" type="int" enum="XRInterface.PlayAreaMode" />
<description>
Call this to find out if a given play area mode is supported by this interface.
</description>
</method>
<method name="trigger_haptic_pulse">
<return type="void" />
<param index="0" name="action_name" type="String" />
<param index="1" name="tracker_name" type="StringName" />
<param index="2" name="frequency" type="float" />
<param index="3" name="amplitude" type="float" />
<param index="4" name="duration_sec" type="float" />
<param index="5" name="delay_sec" type="float" />
<description>
Triggers a haptic pulse on a device associated with this interface.
[param action_name] is the name of the action for this pulse.
[param tracker_name] is optional and can be used to direct the pulse to a specific device provided that device is bound to this haptic.
</description>
</method>
<method name="uninitialize">
<return type="void" />
<description>
Turns the interface off.
</description>
</method>
</methods>
<members>
<member name="ar_is_anchor_detection_enabled" type="bool" setter="set_anchor_detection_is_enabled" getter="get_anchor_detection_is_enabled" default="false">
On an AR interface, [code]true[/code] if anchor detection is enabled.
</member>
<member name="interface_is_primary" type="bool" setter="set_primary" getter="is_primary" default="false">
[code]true[/code] if this is the primary interface.
</member>
<member name="xr_play_area_mode" type="int" setter="set_play_area_mode" getter="get_play_area_mode" enum="XRInterface.PlayAreaMode" default="0">
The play area mode for this interface.
</member>
</members>
<signals>
<signal name="play_area_changed">
<param index="0" name="mode" type="int" />
<description>
Emitted when the play area is changed. This can be a result of the player resetting the boundary or entering a new play area, the player changing the play area mode, the world scale changing or the player resetting their headset orientation.
</description>
</signal>
</signals>
<constants>
<constant name="XR_NONE" value="0" enum="Capabilities">
No XR capabilities.
</constant>
<constant name="XR_MONO" value="1" enum="Capabilities">
This interface can work with normal rendering output (non-HMD based AR).
</constant>
<constant name="XR_STEREO" value="2" enum="Capabilities">
This interface supports stereoscopic rendering.
</constant>
<constant name="XR_QUAD" value="4" enum="Capabilities">
This interface supports quad rendering (not yet supported by Godot).
</constant>
<constant name="XR_VR" value="8" enum="Capabilities">
This interface supports VR.
</constant>
<constant name="XR_AR" value="16" enum="Capabilities">
This interface supports AR (video background and real world tracking).
</constant>
<constant name="XR_EXTERNAL" value="32" enum="Capabilities">
This interface outputs to an external device. If the main viewport is used, the on screen output is an unmodified buffer of either the left or right eye (stretched if the viewport size is not changed to the same aspect ratio of [method get_render_target_size]). Using a separate viewport node frees up the main viewport for other purposes.
</constant>
<constant name="XR_NORMAL_TRACKING" value="0" enum="TrackingStatus">
Tracking is behaving as expected.
</constant>
<constant name="XR_EXCESSIVE_MOTION" value="1" enum="TrackingStatus">
Tracking is hindered by excessive motion (the player is moving faster than tracking can keep up).
</constant>
<constant name="XR_INSUFFICIENT_FEATURES" value="2" enum="TrackingStatus">
Tracking is hindered by insufficient features, it's too dark (for camera-based tracking), player is blocked, etc.
</constant>
<constant name="XR_UNKNOWN_TRACKING" value="3" enum="TrackingStatus">
We don't know the status of the tracking or this interface does not provide feedback.
</constant>
<constant name="XR_NOT_TRACKING" value="4" enum="TrackingStatus">
Tracking is not functional (camera not plugged in or obscured, lighthouses turned off, etc.).
</constant>
<constant name="XR_PLAY_AREA_UNKNOWN" value="0" enum="PlayAreaMode">
Play area mode not set or not available.
</constant>
<constant name="XR_PLAY_AREA_3DOF" value="1" enum="PlayAreaMode">
Play area only supports orientation tracking, no positional tracking, area will center around player.
</constant>
<constant name="XR_PLAY_AREA_SITTING" value="2" enum="PlayAreaMode">
Player is in seated position, limited positional tracking, fixed guardian around player.
</constant>
<constant name="XR_PLAY_AREA_ROOMSCALE" value="3" enum="PlayAreaMode">
Player is free to move around, full positional tracking.
</constant>
<constant name="XR_PLAY_AREA_STAGE" value="4" enum="PlayAreaMode">
Same as roomscale but origin point is fixed to the center of the physical space, XRServer.center_on_hmd disabled.
</constant>
<constant name="XR_ENV_BLEND_MODE_OPAQUE" value="0" enum="EnvironmentBlendMode">
Opaque blend mode. This is typically used for VR devices.
</constant>
<constant name="XR_ENV_BLEND_MODE_ADDITIVE" value="1" enum="EnvironmentBlendMode">
Additive blend mode. This is typically used for AR devices or VR devices with passthrough.
</constant>
<constant name="XR_ENV_BLEND_MODE_ALPHA_BLEND" value="2" enum="EnvironmentBlendMode">
Alpha blend mode. This is typically used for AR or VR devices with passthrough capabilities. The alpha channel controls how much of the passthrough is visible. Alpha of 0.0 means the passthrough is visible and this pixel works in ADDITIVE mode. Alpha of 1.0 means that the passthrough is not visible and this pixel works in OPAQUE mode.
</constant>
</constants>
</class>
|