summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--SConstruct9
-rw-r--r--core/engine.cpp1
-rw-r--r--doc/classes/@GlobalScope.xml5
-rw-r--r--doc/classes/ARVRInterface.xml7
-rw-r--r--doc/classes/AnimationNode.xml1
-rw-r--r--doc/classes/AnimationNodeAdd2.xml3
-rw-r--r--doc/classes/AnimationNodeAdd3.xml7
-rw-r--r--doc/classes/AnimationNodeAnimation.xml3
-rw-r--r--doc/classes/AnimationNodeBlend2.xml3
-rw-r--r--doc/classes/AnimationNodeBlend3.xml7
-rw-r--r--doc/classes/AnimationNodeBlendSpace1D.xml16
-rw-r--r--doc/classes/AnimationNodeBlendSpace2D.xml28
-rw-r--r--doc/classes/Bone2D.xml10
-rw-r--r--doc/classes/CPUParticles.xml91
-rw-r--r--doc/classes/CPUParticles2D.xml87
-rw-r--r--doc/classes/CameraFeed.xml64
-rw-r--r--doc/classes/CameraServer.xml83
-rw-r--r--doc/classes/CameraTexture.xml25
-rw-r--r--doc/classes/ClippedCamera.xml6
-rw-r--r--doc/classes/Environment.xml8
-rw-r--r--doc/classes/FileDialog.xml2
-rw-r--r--doc/classes/MeshInstance2D.xml6
-rw-r--r--doc/classes/MultiMeshInstance2D.xml6
-rw-r--r--doc/classes/OS.xml3
-rw-r--r--doc/classes/Particles.xml2
-rw-r--r--doc/classes/Particles2D.xml4
-rw-r--r--doc/classes/ParticlesMaterial.xml10
-rw-r--r--doc/classes/VisualServer.xml13
-rw-r--r--drivers/dummy/rasterizer_dummy.h2
-rw-r--r--drivers/gles2/rasterizer_scene_gles2.cpp71
-rw-r--r--drivers/gles2/rasterizer_scene_gles2.h4
-rw-r--r--drivers/gles2/rasterizer_storage_gles2.cpp14
-rw-r--r--drivers/gles2/rasterizer_storage_gles2.h1
-rw-r--r--drivers/gles2/shaders/copy.glsl29
-rw-r--r--drivers/gles3/rasterizer_scene_gles3.cpp68
-rw-r--r--drivers/gles3/rasterizer_scene_gles3.h4
-rw-r--r--drivers/gles3/rasterizer_storage_gles3.cpp9
-rw-r--r--drivers/gles3/rasterizer_storage_gles3.h1
-rw-r--r--drivers/gles3/shaders/copy.glsl36
-rw-r--r--drivers/unix/net_socket_posix.cpp6
-rw-r--r--drivers/wasapi/audio_driver_wasapi.cpp16
-rw-r--r--drivers/windows/file_access_windows.cpp4
-rw-r--r--editor/connections_dialog.cpp31
-rw-r--r--editor/dependency_editor.cpp40
-rw-r--r--editor/dependency_editor.h1
-rw-r--r--editor/editor_fonts.cpp22
-rw-r--r--editor/editor_help.cpp14
-rw-r--r--editor/editor_settings.cpp23
-rw-r--r--editor/import/editor_scene_importer_gltf.cpp13
-rw-r--r--editor/plugins/script_text_editor.cpp37
-rw-r--r--editor/project_manager.cpp6
-rw-r--r--main/main.cpp1
-rw-r--r--misc/dist/html/full-size.html9
-rw-r--r--modules/enet/networked_multiplayer_enet.cpp1
-rw-r--r--modules/gdnative/android/android_gdn.cpp29
-rw-r--r--modules/gdnative/arvr/arvr_interface_gdnative.cpp11
-rw-r--r--modules/gdnative/arvr/arvr_interface_gdnative.h1
-rw-r--r--modules/gdnative/gdnative_api.json14
-rw-r--r--modules/gdnative/include/android/godot_android.h2
-rw-r--r--modules/gdnative/include/arvr/godot_arvr.h1
-rw-r--r--modules/mono/editor/bindings_generator.cpp9
-rw-r--r--modules/webrtc/config.py3
-rw-r--r--modules/webrtc/doc_classes/WebRTCDataChannel.xml21
-rw-r--r--modules/webrtc/doc_classes/WebRTCMultiplayer.xml85
-rw-r--r--modules/webrtc/doc_classes/WebRTCPeerConnection.xml61
-rw-r--r--modules/webrtc/register_types.cpp2
-rw-r--r--modules/webrtc/webrtc_data_channel_js.cpp35
-rw-r--r--modules/webrtc/webrtc_multiplayer.cpp384
-rw-r--r--modules/webrtc/webrtc_multiplayer.h116
-rw-r--r--platform/android/java/src/org/godotengine/godot/Godot.java26
-rw-r--r--platform/android/java/src/org/godotengine/godot/GodotRenderer.java61
-rw-r--r--platform/android/java/src/org/godotengine/godot/GodotView.java720
-rw-r--r--platform/android/java/src/org/godotengine/godot/input/GodotInputHandler.java352
-rw-r--r--platform/android/java/src/org/godotengine/godot/input/Joystick.java44
-rw-r--r--platform/android/java/src/org/godotengine/godot/utils/GLUtils.java157
-rw-r--r--platform/android/java/src/org/godotengine/godot/xr/XRMode.java39
-rw-r--r--platform/android/java/src/org/godotengine/godot/xr/ovr/OvrConfigChooser.java112
-rw-r--r--platform/android/java/src/org/godotengine/godot/xr/ovr/OvrContextFactory.java58
-rw-r--r--platform/android/java/src/org/godotengine/godot/xr/ovr/OvrWindowSurfaceFactory.java60
-rw-r--r--platform/android/java/src/org/godotengine/godot/xr/pancake/PancakeConfigChooser.java151
-rw-r--r--platform/android/java/src/org/godotengine/godot/xr/pancake/PancakeContextFactory.java81
-rw-r--r--platform/android/java/src/org/godotengine/godot/xr/pancake/PancakeFallbackConfigChooser.java61
-rw-r--r--platform/android/java_godot_wrapper.cpp20
-rw-r--r--platform/android/java_godot_wrapper.h4
-rw-r--r--platform/android/os_android.cpp6
-rw-r--r--platform/android/os_android.h3
-rw-r--r--platform/haiku/os_haiku.cpp4
-rw-r--r--platform/haiku/os_haiku.h2
-rw-r--r--platform/iphone/SCsub1
-rw-r--r--platform/iphone/camera_ios.h47
-rw-r--r--platform/iphone/camera_ios.mm429
-rw-r--r--platform/iphone/detect.py1
-rw-r--r--platform/iphone/os_iphone.cpp7
-rw-r--r--platform/iphone/os_iphone.h3
-rw-r--r--platform/javascript/audio_driver_javascript.cpp2
-rw-r--r--platform/javascript/os_javascript.cpp25
-rw-r--r--platform/javascript/os_javascript.h3
-rw-r--r--platform/osx/SCsub1
-rw-r--r--platform/osx/camera_osx.h47
-rw-r--r--platform/osx/camera_osx.mm362
-rw-r--r--platform/osx/detect.py2
-rw-r--r--platform/osx/os_osx.h3
-rw-r--r--platform/osx/os_osx.mm7
-rw-r--r--platform/uwp/os_uwp.cpp6
-rw-r--r--platform/uwp/os_uwp.h3
-rw-r--r--platform/windows/SCsub1
-rw-r--r--platform/windows/camera_win.cpp94
-rw-r--r--platform/windows/camera_win.h46
-rw-r--r--platform/windows/joypad_windows.cpp18
-rw-r--r--platform/windows/os_windows.cpp28
-rw-r--r--platform/windows/os_windows.h2
-rw-r--r--platform/windows/power_windows.cpp2
-rw-r--r--platform/windows/windows_terminal_logger.cpp4
-rw-r--r--platform/x11/os_x11.cpp5
-rw-r--r--platform/x11/os_x11.h3
-rw-r--r--scene/2d/position_2d.cpp2
-rw-r--r--scene/2d/position_2d.h2
-rw-r--r--scene/3d/camera.cpp7
-rw-r--r--scene/3d/camera.h2
-rw-r--r--scene/animation/tween.cpp595
-rw-r--r--scene/animation/tween.h1
-rw-r--r--scene/gui/file_dialog.cpp23
-rw-r--r--scene/gui/file_dialog.h1
-rw-r--r--scene/gui/rich_text_label.cpp17
-rw-r--r--scene/gui/text_edit.cpp2
-rw-r--r--scene/register_scene_types.cpp1
-rw-r--r--scene/resources/default_theme/default_theme.cpp3
-rw-r--r--scene/resources/default_theme/icon_visibility.pngbin0 -> 488 bytes
-rw-r--r--scene/resources/default_theme/theme_data.h4
-rw-r--r--scene/resources/environment.cpp22
-rw-r--r--scene/resources/environment.h4
-rw-r--r--scene/resources/texture.cpp105
-rw-r--r--scene/resources/texture.h35
-rw-r--r--servers/SCsub1
-rw-r--r--servers/arvr/arvr_interface.cpp7
-rw-r--r--servers/arvr/arvr_interface.h1
-rw-r--r--servers/camera/SCsub7
-rw-r--r--servers/camera/camera_feed.cpp266
-rw-r--r--servers/camera/camera_feed.h115
-rw-r--r--servers/camera_server.cpp169
-rw-r--r--servers/camera_server.h96
-rw-r--r--servers/register_server_types.cpp6
-rw-r--r--servers/visual/rasterizer.h2
-rw-r--r--servers/visual/visual_server_raster.h2
-rw-r--r--servers/visual/visual_server_wrap_mt.h2
-rw-r--r--servers/visual_server.cpp1
-rw-r--r--servers/visual_server.h3
147 files changed, 5359 insertions, 1005 deletions
diff --git a/SConstruct b/SConstruct
index 17cf779d4a..ce2566559c 100644
--- a/SConstruct
+++ b/SConstruct
@@ -125,7 +125,7 @@ opts.Add(BoolVariable('verbose', "Enable verbose output for the compilation", Fa
opts.Add(BoolVariable('progress', "Show a progress indicator during compilation", True))
opts.Add(EnumVariable('warnings', "Set the level of warnings emitted during compilation", 'all', ('extra', 'all', 'moderate', 'no')))
opts.Add(BoolVariable('werror', "Treat compiler warnings as errors. Depends on the level of warnings set with 'warnings'", False))
-opts.Add(BoolVariable('dev', "If yes, alias for verbose=yes warnings=all", False))
+opts.Add(BoolVariable('dev', "If yes, alias for verbose=yes warnings=extra werror=yes", False))
opts.Add('extra_suffix', "Custom extra suffix added to the base filename of all generated binary files", '')
opts.Add(BoolVariable('vsproj', "Generate a Visual Studio solution", False))
opts.Add(EnumVariable('macports_clang', "Build using Clang from MacPorts", 'no', ('no', '5.0', 'devel')))
@@ -254,8 +254,9 @@ if selected_platform in platform_list:
env = env_base.Clone()
if env['dev']:
- env["warnings"] = "all"
env['verbose'] = True
+ env['warnings'] = "extra"
+ env['werror'] = True
if env['vsproj']:
env.vs_incs = []
@@ -337,7 +338,6 @@ if selected_platform in platform_list:
if (env["warnings"] == 'extra'):
# FIXME: enable -Wclobbered once #26351 is fixed
- # FIXME: enable -Wduplicated-branches once #27594 is merged
# Note: enable -Wimplicit-fallthrough for Clang (already part of -Wextra for GCC)
# once we switch to C++11 or later (necessary for our FALLTHROUGH macro).
env.Append(CCFLAGS=['-Wall', '-Wextra', '-Wno-unused-parameter']
@@ -345,7 +345,8 @@ if selected_platform in platform_list:
env.Append(CXXFLAGS=['-Wctor-dtor-privacy', '-Wnon-virtual-dtor'])
if methods.using_gcc(env):
env.Append(CCFLAGS=['-Wno-clobbered', '-Walloc-zero',
- '-Wduplicated-cond', '-Wstringop-overflow=4', '-Wlogical-op'])
+ '-Wduplicated-branches', '-Wduplicated-cond',
+ '-Wstringop-overflow=4', '-Wlogical-op'])
env.Append(CXXFLAGS=['-Wnoexcept', '-Wplacement-new=1'])
version = methods.get_compiler_version(env)
if version != None and version[0] >= '9':
diff --git a/core/engine.cpp b/core/engine.cpp
index 9607dedb3c..50822244cf 100644
--- a/core/engine.cpp
+++ b/core/engine.cpp
@@ -38,6 +38,7 @@
void Engine::set_iterations_per_second(int p_ips) {
+ ERR_FAIL_COND(p_ips <= 0);
ips = p_ips;
}
int Engine::get_iterations_per_second() const {
diff --git a/doc/classes/@GlobalScope.xml b/doc/classes/@GlobalScope.xml
index eb612191e7..d36a545c56 100644
--- a/doc/classes/@GlobalScope.xml
+++ b/doc/classes/@GlobalScope.xml
@@ -18,6 +18,9 @@
<member name="AudioServer" type="AudioServer" setter="" getter="">
[AudioServer] singleton
</member>
+ <member name="CameraServer" type="CameraServer" setter="" getter="">
+ [CameraServer] singleton
+ </member>
<member name="ClassDB" type="ClassDB" setter="" getter="">
[ClassDB] singleton
</member>
@@ -1013,7 +1016,7 @@
Trigger on a VR controller
</constant>
<constant name="JOY_OCULUS_AX" value="7" enum="JoystickList">
- A button on the right Oculus Touch controller, X button on the left controller (also when used in OpenVR)
+ A button on the right Oculus Touch controller, X button on the left controller (also when used in OpenVR)
</constant>
<constant name="JOY_OCULUS_BY" value="1" enum="JoystickList">
B button on the right Oculus Touch controller, Y button on the left controller (also when used in OpenVR)
diff --git a/doc/classes/ARVRInterface.xml b/doc/classes/ARVRInterface.xml
index 11084fb98e..c286811b5d 100644
--- a/doc/classes/ARVRInterface.xml
+++ b/doc/classes/ARVRInterface.xml
@@ -10,6 +10,13 @@
<tutorials>
</tutorials>
<methods>
+ <method name="get_camera_feed_id">
+ <return type="int">
+ </return>
+ <description>
+ If this is an AR interface that requires displaying a camera feed as the background, this method returns the feed id in the [CameraServer] for this interface.
+ </description>
+ </method>
<method name="get_capabilities" qualifiers="const">
<return type="int">
</return>
diff --git a/doc/classes/AnimationNode.xml b/doc/classes/AnimationNode.xml
index 9854d4c27e..23f7a316d9 100644
--- a/doc/classes/AnimationNode.xml
+++ b/doc/classes/AnimationNode.xml
@@ -215,6 +215,7 @@
</signal>
<signal name="tree_changed">
<description>
+ Emitted by nodes that inherit from this class and that have an internal tree when one of their nodes changes. The nodes that emit this signal are [AnimationNodeBlendSpace1D], [AnimationNodeBlendSpace2D], [AnimationNodeStateMachine], and [AnimationNodeBlendTree].
</description>
</signal>
</signals>
diff --git a/doc/classes/AnimationNodeAdd2.xml b/doc/classes/AnimationNodeAdd2.xml
index ac2a28e396..890d6f8b49 100644
--- a/doc/classes/AnimationNodeAdd2.xml
+++ b/doc/classes/AnimationNodeAdd2.xml
@@ -1,8 +1,10 @@
<?xml version="1.0" encoding="UTF-8" ?>
<class name="AnimationNodeAdd2" inherits="AnimationNode" category="Core" version="3.2">
<brief_description>
+ Blends two animations additively inside of an [AnimationNodeBlendTree].
</brief_description>
<description>
+ A resource to add to an [AnimationNodeBlendTree]. Blends two animations additively based on an amount value in the [code][0.0, 1.0][/code] range.
</description>
<tutorials>
</tutorials>
@@ -10,6 +12,7 @@
</methods>
<members>
<member name="sync" type="bool" setter="set_use_sync" getter="is_using_sync">
+ If [code]true[/code], sets the [code]optimization[/code] to [code]false[/code] when calling [method AnimationNode.blend_input], forcing the blended animations to update every frame.
</member>
</members>
<constants>
diff --git a/doc/classes/AnimationNodeAdd3.xml b/doc/classes/AnimationNodeAdd3.xml
index 9cfeb47378..4e36f0bae4 100644
--- a/doc/classes/AnimationNodeAdd3.xml
+++ b/doc/classes/AnimationNodeAdd3.xml
@@ -1,8 +1,14 @@
<?xml version="1.0" encoding="UTF-8" ?>
<class name="AnimationNodeAdd3" inherits="AnimationNode" category="Core" version="3.2">
<brief_description>
+ Blends two of three animations additively inside of an [AnimationNodeBlendTree].
</brief_description>
<description>
+ A resource to add to an [AnimationNodeBlendTree]. Blends two animations together additively out of three based on a value in the [-1.0, 1.0] range.
+ This node has three inputs:
+ - The base animation to add to
+ - A -add animation to blend with when the blend amount is in the [-1.0, 0.0] range.
+ - A +add animation to blend with when the blend amount is in the [code][0.0, 1.0][/code] range
</description>
<tutorials>
</tutorials>
@@ -10,6 +16,7 @@
</methods>
<members>
<member name="sync" type="bool" setter="set_use_sync" getter="is_using_sync">
+ If [code]true[/code], sets the [code]optimization[/code] to [code]false[/code] when calling [method AnimationNode.blend_input], forcing the blended animations to update every frame.
</member>
</members>
<constants>
diff --git a/doc/classes/AnimationNodeAnimation.xml b/doc/classes/AnimationNodeAnimation.xml
index e7d53fc7b3..420a702d38 100644
--- a/doc/classes/AnimationNodeAnimation.xml
+++ b/doc/classes/AnimationNodeAnimation.xml
@@ -1,8 +1,10 @@
<?xml version="1.0" encoding="UTF-8" ?>
<class name="AnimationNodeAnimation" inherits="AnimationRootNode" category="Core" version="3.2">
<brief_description>
+ Input animation to use in an [AnimationNodeBlendTree].
</brief_description>
<description>
+ A resource to add to an [AnimationNodeBlendTree]. Only features one output set using the [member animation] property. Use it as an input for [AnimationNode] that blend animations together.
</description>
<tutorials>
</tutorials>
@@ -10,6 +12,7 @@
</methods>
<members>
<member name="animation" type="String" setter="set_animation" getter="get_animation">
+ Animation to use as an output. It is one of the animations provided by [member AnimationTree.anim_player].
</member>
</members>
<constants>
diff --git a/doc/classes/AnimationNodeBlend2.xml b/doc/classes/AnimationNodeBlend2.xml
index f15fa44f39..9358c5eeef 100644
--- a/doc/classes/AnimationNodeBlend2.xml
+++ b/doc/classes/AnimationNodeBlend2.xml
@@ -1,8 +1,10 @@
<?xml version="1.0" encoding="UTF-8" ?>
<class name="AnimationNodeBlend2" inherits="AnimationNode" category="Core" version="3.2">
<brief_description>
+ Blends two animations linearly inside of an [AnimationNodeBlendTree].
</brief_description>
<description>
+ A resource to add to an [AnimationNodeBlendTree]. Blends two animations linearly based on an amount value in the [code][0.0, 1.0][/code] range.
</description>
<tutorials>
</tutorials>
@@ -10,6 +12,7 @@
</methods>
<members>
<member name="sync" type="bool" setter="set_use_sync" getter="is_using_sync">
+ If [code]true[/code], sets the [code]optimization[/code] to [code]false[/code] when calling [method AnimationNode.blend_input], forcing the blended animations to update every frame.
</member>
</members>
<constants>
diff --git a/doc/classes/AnimationNodeBlend3.xml b/doc/classes/AnimationNodeBlend3.xml
index 2f82eea041..9b9cf80968 100644
--- a/doc/classes/AnimationNodeBlend3.xml
+++ b/doc/classes/AnimationNodeBlend3.xml
@@ -1,8 +1,14 @@
<?xml version="1.0" encoding="UTF-8" ?>
<class name="AnimationNodeBlend3" inherits="AnimationNode" category="Core" version="3.2">
<brief_description>
+ Blends two of three animations linearly inside of an [AnimationNodeBlendTree].
</brief_description>
<description>
+ A resource to add to an [AnimationNodeBlendTree]. Blends two animations together linearly out of three based on a value in the [-1.0, 1.0] range.
+ This node has three inputs:
+ - The base animation
+ - A -blend animation to blend with when the blend amount is in the [-1.0, 0.0] range.
+ - A +blend animation to blend with when the blend amount is in the [code][0.0, 1.0][/code] range
</description>
<tutorials>
</tutorials>
@@ -10,6 +16,7 @@
</methods>
<members>
<member name="sync" type="bool" setter="set_use_sync" getter="is_using_sync">
+ If [code]true[/code], sets the [code]optimization[/code] to [code]false[/code] when calling [method AnimationNode.blend_input], forcing the blended animations to update every frame.
</member>
</members>
<constants>
diff --git a/doc/classes/AnimationNodeBlendSpace1D.xml b/doc/classes/AnimationNodeBlendSpace1D.xml
index b13aee277e..6fb5c6312b 100644
--- a/doc/classes/AnimationNodeBlendSpace1D.xml
+++ b/doc/classes/AnimationNodeBlendSpace1D.xml
@@ -1,8 +1,13 @@
<?xml version="1.0" encoding="UTF-8" ?>
<class name="AnimationNodeBlendSpace1D" inherits="AnimationRootNode" category="Core" version="3.2">
<brief_description>
+ Blends linearly between two of any number of [AnimationNode] of any type placed on a virtual axis.
</brief_description>
<description>
+ A resource to add to an [AnimationNodeBlendTree].
+ This is a virtual axis on which you can add any type of [AnimationNode] using [method add_blend_point].
+ Outputs the linear blend of the two [code]AnimationNode[/code] closest to the node's current [code]value[/code].
+ You can set the extents of the axis using the [member min_space] and [member max_space].
</description>
<tutorials>
</tutorials>
@@ -17,12 +22,14 @@
<argument index="2" name="at_index" type="int" default="-1">
</argument>
<description>
+ Add a new point that represents a [code]node[/code] on the virtual axis at a given position set by [code]pos[/code]. You can insert it at a specific index using the [code]at_index[/code] argument. If you use the default value for [code]at_index[/code] , the point is inserted at the end of the blend points array.
</description>
</method>
<method name="get_blend_point_count" qualifiers="const">
<return type="int">
</return>
<description>
+ Returns the number of points on the blend axis.
</description>
</method>
<method name="get_blend_point_node" qualifiers="const">
@@ -31,6 +38,7 @@
<argument index="0" name="point" type="int">
</argument>
<description>
+ Returns the [code]AnimationNode[/code] referenced by the point at index [code]point[/code].
</description>
</method>
<method name="get_blend_point_position" qualifiers="const">
@@ -39,6 +47,7 @@
<argument index="0" name="point" type="int">
</argument>
<description>
+ Returns the position of the point at index [code]point[/code].
</description>
</method>
<method name="remove_blend_point">
@@ -47,6 +56,7 @@
<argument index="0" name="point" type="int">
</argument>
<description>
+ Removes the point at index [code]point[/code] from the blend axis.
</description>
</method>
<method name="set_blend_point_node">
@@ -57,6 +67,7 @@
<argument index="1" name="node" type="AnimationRootNode">
</argument>
<description>
+ Changes the AnimationNode referenced by the point at index [code]point[/code].
</description>
</method>
<method name="set_blend_point_position">
@@ -67,17 +78,22 @@
<argument index="1" name="pos" type="float">
</argument>
<description>
+ Updates the position of the point at index [code]point[/code] on the blend axis.
</description>
</method>
</methods>
<members>
<member name="max_space" type="float" setter="set_max_space" getter="get_max_space">
+ The blend space's axis's upper limit for the points' position. See [method add_blend_point].
</member>
<member name="min_space" type="float" setter="set_min_space" getter="get_min_space">
+ The blend space's axis's lower limit for the points' position. See [method add_blend_point].
</member>
<member name="snap" type="float" setter="set_snap" getter="get_snap">
+ Position increment to snap to when moving a point on the axis.
</member>
<member name="value_label" type="String" setter="set_value_label" getter="get_value_label">
+ Label of the virtual axis of the blend space.
</member>
</members>
<constants>
diff --git a/doc/classes/AnimationNodeBlendSpace2D.xml b/doc/classes/AnimationNodeBlendSpace2D.xml
index 2ec5977301..74d4f1c60d 100644
--- a/doc/classes/AnimationNodeBlendSpace2D.xml
+++ b/doc/classes/AnimationNodeBlendSpace2D.xml
@@ -1,8 +1,12 @@
<?xml version="1.0" encoding="UTF-8" ?>
<class name="AnimationNodeBlendSpace2D" inherits="AnimationRootNode" category="Core" version="3.2">
<brief_description>
+ Blends linearly between three [AnimationNode] of any type placed in a 2d space.
</brief_description>
<description>
+ A resource to add to an [AnimationNodeBlendTree].
+ This node allows you to blend linearly between three animations using a [Vector2] weight.
+ You can add vertices to the blend space with [method add_blend_point] and automatically triangulate it by setting [member auto_triangles] to [code]true[/code]. Otherwise, use [method add_triangle] and [method remove_triangle] to create up the blend space by hand.
</description>
<tutorials>
</tutorials>
@@ -17,6 +21,7 @@
<argument index="2" name="at_index" type="int" default="-1">
</argument>
<description>
+ Add a new point that represents a [code]node[/code] at the position set by [code]pos[/code]. You can insert it at a specific index using the [code]at_index[/code] argument. If you use the default value for [code]at_index[/code] , the point is inserted at the end of the blend points array.
</description>
</method>
<method name="add_triangle">
@@ -31,12 +36,14 @@
<argument index="3" name="at_index" type="int" default="-1">
</argument>
<description>
+ Creates a new triangle using three points [code]x[/code], [code]y[/code], and [code]z[/code]. Triangles can overlap. You can insert the triangle at a specific index using the [code]at_index[/code] argument. If you use the default value for [code]at_index[/code] , the point is inserted at the end of the blend points array.
</description>
</method>
<method name="get_blend_point_count" qualifiers="const">
<return type="int">
</return>
<description>
+ Returns the number of points in the blend space.
</description>
</method>
<method name="get_blend_point_node" qualifiers="const">
@@ -45,6 +52,7 @@
<argument index="0" name="point" type="int">
</argument>
<description>
+ Returns the [code]AnimationRootNode[/code] referenced by the point at index [code]point[/code].
</description>
</method>
<method name="get_blend_point_position" qualifiers="const">
@@ -53,12 +61,14 @@
<argument index="0" name="point" type="int">
</argument>
<description>
+ Returns the position of the point at index [code]point[/code].
</description>
</method>
<method name="get_triangle_count" qualifiers="const">
<return type="int">
</return>
<description>
+ Returns the number of triangles in the blend space.
</description>
</method>
<method name="get_triangle_point">
@@ -69,6 +79,7 @@
<argument index="1" name="point" type="int">
</argument>
<description>
+ Returns the position of the point at index [code]point[/code] in the triangle of index [code]triangle[/code].
</description>
</method>
<method name="remove_blend_point">
@@ -77,6 +88,7 @@
<argument index="0" name="point" type="int">
</argument>
<description>
+ Removes the point at index [code]point[/code] from the blend space.
</description>
</method>
<method name="remove_triangle">
@@ -85,6 +97,7 @@
<argument index="0" name="triangle" type="int">
</argument>
<description>
+ Removes the triangle at index [code]triangle[/code] from the blend space.
</description>
</method>
<method name="set_blend_point_node">
@@ -95,6 +108,7 @@
<argument index="1" name="node" type="AnimationRootNode">
</argument>
<description>
+ Changes the AnimationNode referenced by the point at index [code]point[/code].
</description>
</method>
<method name="set_blend_point_position">
@@ -105,39 +119,49 @@
<argument index="1" name="pos" type="Vector2">
</argument>
<description>
+ Updates the position of the point at index [code]point[/code] on the blend axis.
</description>
</method>
</methods>
<members>
<member name="auto_triangles" type="bool" setter="set_auto_triangles" getter="get_auto_triangles">
+ If true, the blend space is triangulated automatically. The mesh updates every time you add or remove points with [method add_blend_point] and [method remove_blend_point].
</member>
<member name="blend_mode" type="int" setter="set_blend_mode" getter="get_blend_mode" enum="AnimationNodeBlendSpace2D.BlendMode">
+ Controls the interpolation between animations. See [enum BlendMode] constants.
</member>
<member name="max_space" type="Vector2" setter="set_max_space" getter="get_max_space">
+ The blend space's X and Y axes' upper limit for the points' position. See [method add_blend_point].
</member>
<member name="min_space" type="Vector2" setter="set_min_space" getter="get_min_space">
+ The blend space's X and Y axes' lower limit for the points' position. See [method add_blend_point].
</member>
<member name="snap" type="Vector2" setter="set_snap" getter="get_snap">
+ Position increment to snap to when moving a point.
</member>
<member name="x_label" type="String" setter="set_x_label" getter="get_x_label">
+ Name of the blend space's X axis.
</member>
<member name="y_label" type="String" setter="set_y_label" getter="get_y_label">
+ Name of the blend space's Y axis.
</member>
</members>
<signals>
<signal name="triangles_updated">
<description>
+ Emitted every time the blend space's triangles are created, removed, or when one of their vertices changes position.
</description>
</signal>
</signals>
<constants>
<constant name="BLEND_MODE_INTERPOLATED" value="0" enum="BlendMode">
+ The interpolation between animations is linear.
</constant>
<constant name="BLEND_MODE_DISCRETE" value="1" enum="BlendMode">
- Useful for frame-by-frame 2D animations.
+ The blend space plays the animation of the node the blending position is closest to. Useful for frame-by-frame 2D animations.
</constant>
<constant name="BLEND_MODE_DISCRETE_CARRY" value="2" enum="BlendMode">
- Keep the current play position when switching between discrete animations.
+ Similar to [const BLEND_MODE_DISCRETE], but starts the new animation at the last animation's playback position.
</constant>
</constants>
</class>
diff --git a/doc/classes/Bone2D.xml b/doc/classes/Bone2D.xml
index 59f7bec889..757c6c6a34 100644
--- a/doc/classes/Bone2D.xml
+++ b/doc/classes/Bone2D.xml
@@ -1,8 +1,13 @@
<?xml version="1.0" encoding="UTF-8" ?>
<class name="Bone2D" inherits="Node2D" category="Core" version="3.2">
<brief_description>
+ Joint used with [Skeleton2D] to control and animate other nodes.
</brief_description>
<description>
+ Use a hierarchy of [code]Bone2D[/code] bound to a [Skeleton2D] to control, and animate other [Node2D] nodes.
+ You can use [code]Bone2D[/code] and [code]Skeleton2D[/code] nodes to animate 2D meshes created with the Polygon 2D UV editor.
+ Each bone has a [member rest] transform that you can reset to with [method apply_rest]. These rest poses are relative to the bone's parent.
+ If in the editor, you can set the rest pose of an entire skeleton using a menu option, from the code, you need to iterate over the bones to set their individual rest poses.
</description>
<tutorials>
</tutorials>
@@ -11,25 +16,30 @@
<return type="void">
</return>
<description>
+ Stores the node's current transforms in [member rest].
</description>
</method>
<method name="get_index_in_skeleton" qualifiers="const">
<return type="int">
</return>
<description>
+ Returns the node's index as part of the entire skeleton. See [Skeleton2D].
</description>
</method>
<method name="get_skeleton_rest" qualifiers="const">
<return type="Transform2D">
</return>
<description>
+ Returns the node's [member rest] [code]Transform2D[/code] if it doesn't have a parent, or its rest pose relative to its parent.
</description>
</method>
</methods>
<members>
<member name="default_length" type="float" setter="set_default_length" getter="get_default_length">
+ Length of the bone's representation drawn in the editor's viewport in pixels.
</member>
<member name="rest" type="Transform2D" setter="set_rest" getter="get_rest">
+ Rest transform of the bone. You can reset the node's transforms to this value using [method apply_rest].
</member>
</members>
<constants>
diff --git a/doc/classes/CPUParticles.xml b/doc/classes/CPUParticles.xml
index 5458a87a9e..c9c92102f3 100644
--- a/doc/classes/CPUParticles.xml
+++ b/doc/classes/CPUParticles.xml
@@ -1,8 +1,11 @@
<?xml version="1.0" encoding="UTF-8" ?>
<class name="CPUParticles" inherits="GeometryInstance" category="Core" version="3.2">
<brief_description>
+ CPU-based 3D particle emitter.
</brief_description>
<description>
+ CPU-based 3D particle node used to create a variety of particle systems and effects.
+ See also [Particles], which provides the same functionality with hardware acceleration, but may not run on older devices.
</description>
<tutorials>
</tutorials>
@@ -13,56 +16,77 @@
<argument index="0" name="particles" type="Node">
</argument>
<description>
+ Sets this node's properties to match a given [Particles] node with an assigned [ParticlesMaterial].
</description>
</method>
<method name="restart">
<return type="void">
</return>
<description>
+ Restarts the particle emitter.
</description>
</method>
</methods>
<members>
<member name="amount" type="int" setter="set_amount" getter="get_amount">
+ Number of particles emitted in one emission cycle.
</member>
<member name="angle" type="float" setter="set_param" getter="get_param">
+ Initial rotation applied to each particle, in degrees.
</member>
<member name="angle_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Each particle's rotation will be animated along this [Curve].
</member>
<member name="angle_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Rotation randomness ratio. Default value: [code]0[/code].
</member>
<member name="angular_velocity" type="float" setter="set_param" getter="get_param">
+ Initial angular velocity applied to each particle. Sets the speed of rotation of the particle.
</member>
<member name="angular_velocity_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Each particle's angular velocity will vary along this [Curve].
</member>
<member name="angular_velocity_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Angular velocity randomness ratio. Default value: [code]0[/code].
</member>
<member name="anim_offset" type="float" setter="set_param" getter="get_param">
+ Particle animation offset.
</member>
<member name="anim_offset_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Each particle's animation offset will vary along this [Curve].
</member>
<member name="anim_offset_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Animation offset randomness ratio. Default value: [code]0[/code].
</member>
<member name="anim_speed" type="float" setter="set_param" getter="get_param">
+ Particle animation speed.
</member>
<member name="anim_speed_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Each particle's animation speed will vary along this [Curve].
</member>
<member name="anim_speed_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Animation speed randomness ratio. Default value: [code]0[/code].
</member>
<member name="color" type="Color" setter="set_color" getter="get_color">
+ Unused for 3D particles.
</member>
<member name="color_ramp" type="Gradient" setter="set_color_ramp" getter="get_color_ramp">
- Each particle's vertex color will vary along this [GradientTexture].
+ Unused for 3D particles.
</member>
<member name="damping" type="float" setter="set_param" getter="get_param">
+ The rate at which particles lose velocity.
</member>
<member name="damping_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Damping will vary along this [Curve].
</member>
<member name="damping_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Damping randomness ratio. Default value: [code]0[/code].
</member>
<member name="draw_order" type="int" setter="set_draw_order" getter="get_draw_order" enum="CPUParticles.DrawOrder">
+ Particle draw order. Uses [enum DrawOrder] values. Default value: [constant DRAW_ORDER_INDEX].
</member>
<member name="emission_box_extents" type="Vector3" setter="set_emission_box_extents" getter="get_emission_box_extents">
+ The rectangle's extents if [member emission_shape] is set to [constant EMISSION_SHAPE_BOX].
</member>
<member name="emission_colors" type="PoolColorArray" setter="set_emission_colors" getter="get_emission_colors">
</member>
@@ -71,134 +95,199 @@
<member name="emission_points" type="PoolVector3Array" setter="set_emission_points" getter="get_emission_points">
</member>
<member name="emission_shape" type="int" setter="set_emission_shape" getter="get_emission_shape" enum="CPUParticles.EmissionShape">
+ Particles will be emitted inside this region. Use [enum EmissionShape] for values. Default value: [constant EMISSION_SHAPE_POINT].
</member>
<member name="emission_sphere_radius" type="float" setter="set_emission_sphere_radius" getter="get_emission_sphere_radius">
+ The sphere's radius if [enum EmissionShape] is set to [constant EMISSION_SHAPE_SPHERE].
</member>
<member name="emitting" type="bool" setter="set_emitting" getter="is_emitting">
+ If [code]true[/code], particles are being emitted. Default value: [code]true[/code].
</member>
<member name="explosiveness" type="float" setter="set_explosiveness_ratio" getter="get_explosiveness_ratio">
+ How rapidly particles in an emission cycle are emitted. If greater than [code]0[/code], there will be a gap in emissions before the next cycle begins. Default value: [code]0[/code].
</member>
<member name="fixed_fps" type="int" setter="set_fixed_fps" getter="get_fixed_fps">
+ The particle system's frame rate is fixed to a value. For instance, changing the value to 2 will make the particles render at 2 frames per second. Note this does not slow down the particle system itself.
</member>
<member name="flag_align_y" type="bool" setter="set_particle_flag" getter="get_particle_flag">
+ Align y-axis of particle with the direction of its velocity.
</member>
<member name="flag_disable_z" type="bool" setter="set_particle_flag" getter="get_particle_flag">
+ If [code]true[/code], particles will not move on the z axis. Default value: [code]false[/code].
</member>
<member name="flag_rotate_y" type="bool" setter="set_particle_flag" getter="get_particle_flag">
+ If [code]true[/code], particles rotate around y-axis by [member angle].
</member>
<member name="flatness" type="float" setter="set_flatness" getter="get_flatness">
+ Amount of [member spread] in Y/Z plane. A value of [code]1[/code] restricts particles to X/Z plane. Default [code]0[/code].
</member>
<member name="fract_delta" type="bool" setter="set_fractional_delta" getter="get_fractional_delta">
+ If [code]true[/code], results in fractional delta calculation which has a smoother particles display effect. Default value: [code]true[/code]
</member>
<member name="gravity" type="Vector3" setter="set_gravity" getter="get_gravity">
+ Gravity applied to every particle. Default value: [code](0, -9.8, 0)[/code].
</member>
<member name="hue_variation" type="float" setter="set_param" getter="get_param">
+ Initial hue variation applied to each particle.
</member>
<member name="hue_variation_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Each particle's hue will vary along this [Curve].
</member>
<member name="hue_variation_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Hue variation randomness ratio. Default value: [code]0[/code].
</member>
<member name="initial_velocity" type="float" setter="set_param" getter="get_param">
+ Initial velocity magnitude for each particle. Direction comes from [member spread] and the node's orientation.
</member>
<member name="initial_velocity_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Initial velocity randomness ratio. Default value: [code]0[/code].
</member>
<member name="lifetime" type="float" setter="set_lifetime" getter="get_lifetime">
+ Amount of time each particle will exist. Default value: [code]1[/code].
</member>
<member name="linear_accel" type="float" setter="set_param" getter="get_param">
+ Linear acceleration applied to each particle in the direction of motion.
</member>
<member name="linear_accel_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Each particle's linear acceleration will vary along this [Curve].
</member>
<member name="linear_accel_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Linear acceleration randomness ratio. Default value: [code]0[/code].
</member>
<member name="local_coords" type="bool" setter="set_use_local_coordinates" getter="get_use_local_coordinates">
+ If [code]true[/code], particles use the parent node's coordinate space. If [code]false[/code], they use global coordinates. Default value: [code]true[/code].
</member>
<member name="mesh" type="Mesh" setter="set_mesh" getter="get_mesh">
+ The [Mesh] used for each particle. If [code]null[/code], particles will be spheres.
</member>
<member name="one_shot" type="bool" setter="set_one_shot" getter="get_one_shot">
+ If [code]true[/code], only one emission cycle occurs. If set [code]true[/code] during a cycle, emission will stop at the cycle's end. Default value: [code]false[/code].
</member>
<member name="orbit_velocity" type="float" setter="set_param" getter="get_param">
+ Orbital velocity applied to each particle. Makes the particles circle around origin in the local XY plane. Specified in number of full rotations around origin per second.
+ This property is only available when [member flag_disable_z] is [code]true[/code].
</member>
<member name="orbit_velocity_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Each particle's orbital velocity will vary along this [Curve].
</member>
<member name="orbit_velocity_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Orbital velocity randomness ratio. Default value: [code]0[/code].
</member>
<member name="preprocess" type="float" setter="set_pre_process_time" getter="get_pre_process_time">
+ Particle system starts as if it had already run for this many seconds.
</member>
<member name="radial_accel" type="float" setter="set_param" getter="get_param">
+ Radial acceleration applied to each particle. Makes particle accelerate away from origin.
</member>
<member name="radial_accel_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Each particle's radial acceleration will vary along this [Curve].
</member>
<member name="radial_accel_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Radial acceleration randomness ratio. Default value: [code]0[/code].
</member>
<member name="randomness" type="float" setter="set_randomness_ratio" getter="get_randomness_ratio">
+ Emission lifetime randomness ratio. Default value: [code]0[/code].
</member>
<member name="scale_amount" type="float" setter="set_param" getter="get_param">
+ Initial scale applied to each particle.
</member>
<member name="scale_amount_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Each particle's scale will vary along this [Curve].
</member>
<member name="scale_amount_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Scale randomness ratio. Default value: [code]0[/code].
</member>
<member name="speed_scale" type="float" setter="set_speed_scale" getter="get_speed_scale">
+ Particle system's running speed scaling ratio. Default value: [code]1[/code]. A value of [code]0[/code] can be used to pause the particles.
</member>
<member name="spread" type="float" setter="set_spread" getter="get_spread">
+ Each particle's initial direction range from [code]+spread[/code] to [code]-spread[/code] degrees. Default value: [code]45[/code].
</member>
<member name="tangential_accel" type="float" setter="set_param" getter="get_param">
+ Tangential acceleration applied to each particle. Tangential acceleration is perpendicular to the particle's velocity giving the particles a swirling motion.
</member>
<member name="tangential_accel_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Each particle's tangential acceleration will vary along this [Curve].
</member>
<member name="tangential_accel_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Tangential acceleration randomness ratio. Default value: [code]0[/code].
</member>
</members>
<constants>
<constant name="DRAW_ORDER_INDEX" value="0" enum="DrawOrder">
+ Particles are drawn in the order emitted.
</constant>
<constant name="DRAW_ORDER_LIFETIME" value="1" enum="DrawOrder">
+ Particles are drawn in order of remaining lifetime.
</constant>
<constant name="DRAW_ORDER_VIEW_DEPTH" value="2" enum="DrawOrder">
+ Particles are drawn in order of depth.
</constant>
<constant name="PARAM_INITIAL_LINEAR_VELOCITY" value="0" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set initial velocity properties.
</constant>
<constant name="PARAM_ANGULAR_VELOCITY" value="1" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set angular velocity properties.
</constant>
<constant name="PARAM_ORBIT_VELOCITY" value="2" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set orbital velocity properties.
</constant>
<constant name="PARAM_LINEAR_ACCEL" value="3" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set linear acceleration properties.
</constant>
<constant name="PARAM_RADIAL_ACCEL" value="4" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set radial acceleration properties.
</constant>
<constant name="PARAM_TANGENTIAL_ACCEL" value="5" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set tangential acceleration properties.
</constant>
<constant name="PARAM_DAMPING" value="6" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set damping properties.
</constant>
<constant name="PARAM_ANGLE" value="7" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set angle properties.
</constant>
<constant name="PARAM_SCALE" value="8" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set scale properties.
</constant>
<constant name="PARAM_HUE_VARIATION" value="9" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set hue variation properties.
</constant>
<constant name="PARAM_ANIM_SPEED" value="10" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set animation speed properties.
</constant>
<constant name="PARAM_ANIM_OFFSET" value="11" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set animation offset properties.
</constant>
<constant name="PARAM_MAX" value="12" enum="Parameter">
+ Represents the size of the [enum Parameter] enum.
</constant>
<constant name="FLAG_ALIGN_Y_TO_VELOCITY" value="0" enum="Flags">
+ Use with [method set_flag] to set [member flag_align_y].
</constant>
<constant name="FLAG_ROTATE_Y" value="1" enum="Flags">
+ Use with [method set_flag] to set [member flag_rotate_y].
</constant>
<constant name="FLAG_DISABLE_Z" value="2" enum="Flags">
+ Use with [method set_flag] to set [member flag_disable_z].
</constant>
<constant name="FLAG_MAX" value="3" enum="Flags">
+ Represents the size of the [enum Flags] enum.
</constant>
<constant name="EMISSION_SHAPE_POINT" value="0" enum="EmissionShape">
+ All particles will be emitted from a single point.
</constant>
<constant name="EMISSION_SHAPE_SPHERE" value="1" enum="EmissionShape">
+ Particles will be emitted in the volume of a sphere.
</constant>
<constant name="EMISSION_SHAPE_BOX" value="2" enum="EmissionShape">
+ Particles will be emitted in the volume of a box.
</constant>
<constant name="EMISSION_SHAPE_POINTS" value="3" enum="EmissionShape">
+ Particles will be emitted at a position chosen randomly among [member emission_points]. Particle color will be modulated by [member emission_colors].
</constant>
<constant name="EMISSION_SHAPE_DIRECTED_POINTS" value="4" enum="EmissionShape">
+ Particles will be emitted at a position chosen randomly among [member emission_points]. Particle velocity and rotation will be set based on [member emission_normals]. Particle color will be modulated by [member emission_colors].
</constant>
</constants>
</class>
diff --git a/doc/classes/CPUParticles2D.xml b/doc/classes/CPUParticles2D.xml
index 17c68ccb87..4351a0b4d4 100644
--- a/doc/classes/CPUParticles2D.xml
+++ b/doc/classes/CPUParticles2D.xml
@@ -1,10 +1,14 @@
<?xml version="1.0" encoding="UTF-8" ?>
<class name="CPUParticles2D" inherits="Node2D" category="Core" version="3.2">
<brief_description>
+ CPU-based 2D particle emitter.
</brief_description>
<description>
+ CPU-based 2D particle node used to create a variety of particle systems and effects.
+ See also [Particles2D], which provides the same functionality with hardware acceleration, but may not run on older devices.
</description>
<tutorials>
+ <link>https://docs.godotengine.org/en/latest/tutorials/2d/particle_systems_2d.html</link>
</tutorials>
<methods>
<method name="convert_from_particles">
@@ -13,53 +17,74 @@
<argument index="0" name="particles" type="Node">
</argument>
<description>
+ Sets this node's properties to match a given [Particles2D] node with an assigned [ParticlesMaterial].
</description>
</method>
<method name="restart">
<return type="void">
</return>
<description>
+ Restarts the particle emitter.
</description>
</method>
</methods>
<members>
<member name="amount" type="int" setter="set_amount" getter="get_amount">
+ Number of particles emitted in one emission cycle.
</member>
<member name="angle" type="float" setter="set_param" getter="get_param">
+ Initial rotation applied to each particle, in degrees.
</member>
<member name="angle_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Each particle's rotation will be animated along this [Curve].
</member>
<member name="angle_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Rotation randomness ratio. Default value: [code]0[/code].
</member>
<member name="angular_velocity" type="float" setter="set_param" getter="get_param">
+ Initial angular velocity applied to each particle. Sets the speed of rotation of the particle.
</member>
<member name="angular_velocity_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Each particle's angular velocity will vary along this [Curve].
</member>
<member name="angular_velocity_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Angular velocity randomness ratio. Default value: [code]0[/code].
</member>
<member name="anim_offset" type="float" setter="set_param" getter="get_param">
+ Particle animation offset.
</member>
<member name="anim_offset_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Each particle's animation offset will vary along this [Curve].
</member>
<member name="anim_offset_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Animation offset randomness ratio. Default value: [code]0[/code].
</member>
<member name="anim_speed" type="float" setter="set_param" getter="get_param">
+ Particle animation speed.
</member>
<member name="anim_speed_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Each particle's animation speed will vary along this [Curve].
</member>
<member name="anim_speed_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Animation speed randomness ratio. Default value: [code]0[/code].
</member>
<member name="color" type="Color" setter="set_color" getter="get_color">
+ Each particle's initial color. If [member texture] is defined, it will be multiplied by this color.
</member>
<member name="color_ramp" type="Gradient" setter="set_color_ramp" getter="get_color_ramp">
+ Each particle's color will vary along this [Gradient].
</member>
<member name="damping" type="float" setter="set_param" getter="get_param">
+ The rate at which particles lose velocity.
</member>
<member name="damping_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Damping will vary along this [Curve].
</member>
<member name="damping_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Damping randomness ratio. Default value: [code]0[/code].
</member>
<member name="draw_order" type="int" setter="set_draw_order" getter="get_draw_order" enum="CPUParticles2D.DrawOrder">
+ Particle draw order. Uses [enum DrawOrder] values. Default value: [constant DRAW_ORDER_INDEX].
</member>
<member name="emission_colors" type="PoolColorArray" setter="set_emission_colors" getter="get_emission_colors">
</member>
@@ -68,132 +93,194 @@
<member name="emission_points" type="PoolVector2Array" setter="set_emission_points" getter="get_emission_points">
</member>
<member name="emission_rect_extents" type="Vector2" setter="set_emission_rect_extents" getter="get_emission_rect_extents">
+ The rectangle's extents if [member emission_shape] is set to [constant EMISSION_SHAPE_RECTANGLE].
</member>
<member name="emission_shape" type="int" setter="set_emission_shape" getter="get_emission_shape" enum="CPUParticles2D.EmissionShape">
+ Particles will be emitted inside this region. Use [enum EmissionShape] for values. Default value: [constant EMISSION_SHAPE_POINT].
</member>
<member name="emission_sphere_radius" type="float" setter="set_emission_sphere_radius" getter="get_emission_sphere_radius">
+ The circle's radius if [member emission_shape] is set to [constant EMISSION_SHAPE_CIRCLE].
</member>
<member name="emitting" type="bool" setter="set_emitting" getter="is_emitting">
+ If [code]true[/code], particles are being emitted. Default value: [code]true[/code].
</member>
<member name="explosiveness" type="float" setter="set_explosiveness_ratio" getter="get_explosiveness_ratio">
+ How rapidly particles in an emission cycle are emitted. If greater than [code]0[/code], there will be a gap in emissions before the next cycle begins. Default value: [code]0[/code].
</member>
<member name="fixed_fps" type="int" setter="set_fixed_fps" getter="get_fixed_fps">
+ The particle system's frame rate is fixed to a value. For instance, changing the value to 2 will make the particles render at 2 frames per second. Note this does not slow down the simulation of the particle system itself.
</member>
<member name="flag_align_y" type="bool" setter="set_particle_flag" getter="get_particle_flag">
+ Align y-axis of particle with the direction of its velocity.
</member>
<member name="flatness" type="float" setter="set_flatness" getter="get_flatness">
</member>
<member name="fract_delta" type="bool" setter="set_fractional_delta" getter="get_fractional_delta">
+ If [code]true[/code], results in fractional delta calculation which has a smoother particles display effect. Default value: [code]true[/code]
</member>
<member name="gravity" type="Vector2" setter="set_gravity" getter="get_gravity">
+ Gravity applied to every particle. Default value: [code](0, 98)[/code].
</member>
<member name="hue_variation" type="float" setter="set_param" getter="get_param">
+ Initial hue variation applied to each particle.
</member>
<member name="hue_variation_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Each particle's hue will vary along this [Curve].
</member>
<member name="hue_variation_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Hue variation randomness ratio. Default value: [code]0[/code].
</member>
<member name="initial_velocity" type="float" setter="set_param" getter="get_param">
+ Initial velocity magnitude for each particle. Direction comes from [member spread] and the node's orientation.
</member>
<member name="initial_velocity_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Initial velocity randomness ratio. Default value: [code]0[/code].
</member>
<member name="lifetime" type="float" setter="set_lifetime" getter="get_lifetime">
+ Amount of time each particle will exist. Default value: [code]1[/code].
</member>
<member name="linear_accel" type="float" setter="set_param" getter="get_param">
+ Linear acceleration applied to each particle in the direction of motion.
</member>
<member name="linear_accel_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Each particle's linear acceleration will vary along this [Curve].
</member>
<member name="linear_accel_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Linear acceleration randomness ratio. Default value: [code]0[/code].
</member>
<member name="local_coords" type="bool" setter="set_use_local_coordinates" getter="get_use_local_coordinates">
+ If [code]true[/code], particles use the parent node's coordinate space. If [code]false[/code], they use global coordinates. Default value: [code]true[/code].
</member>
<member name="normalmap" type="Texture" setter="set_normalmap" getter="get_normalmap">
+ Normal map to be used for the [member texture] property.
</member>
<member name="one_shot" type="bool" setter="set_one_shot" getter="get_one_shot">
+ If [code]true[/code], only one emission cycle occurs. If set [code]true[/code] during a cycle, emission will stop at the cycle's end. Default value: [code]false[/code].
</member>
<member name="orbit_velocity" type="float" setter="set_param" getter="get_param">
+ Orbital velocity applied to each particle. Makes the particles circle around origin. Specified in number of full rotations around origin per second.
</member>
<member name="orbit_velocity_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Each particle's orbital velocity will vary along this [Curve].
</member>
<member name="orbit_velocity_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Orbital velocity randomness ratio. Default value: [code]0[/code].
</member>
<member name="preprocess" type="float" setter="set_pre_process_time" getter="get_pre_process_time">
+ Particle system starts as if it had already run for this many seconds.
</member>
<member name="radial_accel" type="float" setter="set_param" getter="get_param">
+ Radial acceleration applied to each particle. Makes particle accelerate away from origin.
</member>
<member name="radial_accel_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Each particle's radial acceleration will vary along this [Curve].
</member>
<member name="radial_accel_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Radial acceleration randomness ratio. Default value: [code]0[/code].
</member>
<member name="randomness" type="float" setter="set_randomness_ratio" getter="get_randomness_ratio">
+ Emission lifetime randomness ratio. Default value: [code]0[/code].
</member>
<member name="scale_amount" type="float" setter="set_param" getter="get_param">
+ Initial scale applied to each particle.
</member>
<member name="scale_amount_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Each particle's scale will vary along this [Curve].
</member>
<member name="scale_amount_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Scale randomness ratio. Default value: [code]0[/code].
</member>
<member name="speed_scale" type="float" setter="set_speed_scale" getter="get_speed_scale">
+ Particle system's running speed scaling ratio. Default value: [code]1[/code]. A value of [code]0[/code] can be used to pause the particles.
</member>
<member name="spread" type="float" setter="set_spread" getter="get_spread">
+ Each particle's initial direction range from [code]+spread[/code] to [code]-spread[/code] degrees. Default value: [code]45[/code].
</member>
<member name="tangential_accel" type="float" setter="set_param" getter="get_param">
+ Tangential acceleration applied to each particle. Tangential acceleration is perpendicular to the particle's velocity giving the particles a swirling motion.
</member>
<member name="tangential_accel_curve" type="Curve" setter="set_param_curve" getter="get_param_curve">
+ Each particle's tangential acceleration will vary along this [Curve].
</member>
<member name="tangential_accel_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
+ Tangential acceleration randomness ratio. Default value: [code]0[/code].
</member>
<member name="texture" type="Texture" setter="set_texture" getter="get_texture">
+ Particle texture. If [code]null[/code] particles will be squares.
</member>
</members>
<constants>
<constant name="DRAW_ORDER_INDEX" value="0" enum="DrawOrder">
+ Particles are drawn in the order emitted.
</constant>
<constant name="DRAW_ORDER_LIFETIME" value="1" enum="DrawOrder">
+ Particles are drawn in order of remaining lifetime.
</constant>
<constant name="PARAM_INITIAL_LINEAR_VELOCITY" value="0" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set initial velocity properties.
</constant>
<constant name="PARAM_ANGULAR_VELOCITY" value="1" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set angular velocity properties.
</constant>
<constant name="PARAM_ORBIT_VELOCITY" value="2" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set orbital velocity properties.
</constant>
<constant name="PARAM_LINEAR_ACCEL" value="3" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set linear acceleration properties.
</constant>
<constant name="PARAM_RADIAL_ACCEL" value="4" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set radial acceleration properties.
</constant>
<constant name="PARAM_TANGENTIAL_ACCEL" value="5" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set tangential acceleration properties.
</constant>
<constant name="PARAM_DAMPING" value="6" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set damping properties.
</constant>
<constant name="PARAM_ANGLE" value="7" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set angle properties.
</constant>
<constant name="PARAM_SCALE" value="8" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set scale properties.
</constant>
<constant name="PARAM_HUE_VARIATION" value="9" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set hue variation properties.
</constant>
<constant name="PARAM_ANIM_SPEED" value="10" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set animation speed properties.
</constant>
<constant name="PARAM_ANIM_OFFSET" value="11" enum="Parameter">
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set animation offset properties.
</constant>
<constant name="PARAM_MAX" value="12" enum="Parameter">
+ Represents the size of the [enum Parameter] enum.
</constant>
<constant name="FLAG_ALIGN_Y_TO_VELOCITY" value="0" enum="Flags">
+ Use with [method set_flag] to set [member flag_align_y].
</constant>
<constant name="FLAG_ROTATE_Y" value="1" enum="Flags">
+ Present for consistency with 3D particle nodes, not used in 2D.
</constant>
<constant name="FLAG_DISABLE_Z" value="2" enum="Flags">
+ Present for consistency with 3D particle nodes, not used in 2D.
</constant>
<constant name="FLAG_MAX" value="3" enum="Flags">
+ Represents the size of the [enum Flags] enum.
</constant>
<constant name="EMISSION_SHAPE_POINT" value="0" enum="EmissionShape">
+ All particles will be emitted from a single point.
</constant>
<constant name="EMISSION_SHAPE_CIRCLE" value="1" enum="EmissionShape">
+ Particles will be emitted on the perimeter of a circle.
</constant>
<constant name="EMISSION_SHAPE_RECTANGLE" value="2" enum="EmissionShape">
+ Particles will be emitted in the area of a rectangle.
</constant>
<constant name="EMISSION_SHAPE_POINTS" value="3" enum="EmissionShape">
+ Particles will be emitted at a position chosen randomly among [member emission_points]. Particle color will be modulated by [member emission_colors].
</constant>
<constant name="EMISSION_SHAPE_DIRECTED_POINTS" value="4" enum="EmissionShape">
+ Particles will be emitted at a position chosen randomly among [member emission_points]. Particle velocity and rotation will be set based on [member emission_normals]. Particle color will be modulated by [member emission_colors].
</constant>
</constants>
</class>
diff --git a/doc/classes/CameraFeed.xml b/doc/classes/CameraFeed.xml
new file mode 100644
index 0000000000..7f353f7a05
--- /dev/null
+++ b/doc/classes/CameraFeed.xml
@@ -0,0 +1,64 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<class name="CameraFeed" inherits="Reference" category="Core" version="3.2">
+ <brief_description>
+ A camera feed gives you access to a single physical camera attached to your device.
+ </brief_description>
+ <description>
+ A camera feed gives you access to a single physical camera attached to your device.
+ When enabled Godot will start capturing frames from the camera which can then be used. Do note that many cameras will return YCbCr images which are split into two textures and need to be combined in a shader. Godot does this automatically for you if you set the environment to show the camera image in the background.
+ </description>
+ <tutorials>
+ </tutorials>
+ <methods>
+ <method name="get_id" qualifiers="const">
+ <return type="int">
+ </return>
+ <description>
+ Get unique id for this feed
+ </description>
+ </method>
+ <method name="get_name" qualifiers="const">
+ <return type="String">
+ </return>
+ <description>
+ Get name of the camera
+ </description>
+ </method>
+ <method name="get_position" qualifiers="const">
+ <return type="int" enum="CameraFeed.FeedPosition">
+ </return>
+ <description>
+ Position of camera on the device.
+ </description>
+ </method>
+ </methods>
+ <members>
+ <member name="feed_is_active" type="bool" setter="set_active" getter="is_active">
+ </member>
+ <member name="feed_transform" type="Transform2D" setter="set_transform" getter="get_transform">
+ </member>
+ </members>
+ <constants>
+ <constant name="FEED_NOIMAGE" value="0" enum="FeedDataType">
+ No image set for the feed.
+ </constant>
+ <constant name="FEED_RGB" value="1" enum="FeedDataType">
+ Feed supplies RGB images.
+ </constant>
+ <constant name="FEED_YCbCr" value="2" enum="FeedDataType">
+ Feed supplies YCbCr images that need to be converted to RGB.
+ </constant>
+ <constant name="FEED_YCbCr_Sep" value="3" enum="FeedDataType">
+ Feed supplies separate Y and CbCr images that need to be combined and converted to RGB.
+ </constant>
+ <constant name="FEED_UNSPECIFIED" value="0" enum="FeedPosition">
+ Unspecified position.
+ </constant>
+ <constant name="FEED_FRONT" value="1" enum="FeedPosition">
+ Camera is mounted at the front of the device.
+ </constant>
+ <constant name="FEED_BACK" value="2" enum="FeedPosition">
+ Camera is moutned at the back of the device.
+ </constant>
+ </constants>
+</class>
diff --git a/doc/classes/CameraServer.xml b/doc/classes/CameraServer.xml
new file mode 100644
index 0000000000..bc76c306fb
--- /dev/null
+++ b/doc/classes/CameraServer.xml
@@ -0,0 +1,83 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<class name="CameraServer" inherits="Object" category="Core" version="3.2">
+ <brief_description>
+ Our camera server keeps track of different cameras accessible in Godot. These are external cameras such as webcams or the cameras on your phone.
+ </brief_description>
+ <description>
+ </description>
+ <tutorials>
+ </tutorials>
+ <methods>
+ <method name="add_feed">
+ <return type="void">
+ </return>
+ <argument index="0" name="feed" type="CameraFeed">
+ </argument>
+ <description>
+ Adds a camera feed to the camera server.
+ </description>
+ </method>
+ <method name="feeds">
+ <return type="Array">
+ </return>
+ <description>
+ Returns an array of [CameraFeed]s.
+ </description>
+ </method>
+ <method name="get_feed">
+ <return type="CameraFeed">
+ </return>
+ <argument index="0" name="index" type="int">
+ </argument>
+ <description>
+ Returns the [CameraFeed] with this id.
+ </description>
+ </method>
+ <method name="get_feed_count">
+ <return type="int">
+ </return>
+ <description>
+ Returns the number of [CameraFeed]s registered.
+ </description>
+ </method>
+ <method name="remove_feed">
+ <return type="void">
+ </return>
+ <argument index="0" name="feed" type="CameraFeed">
+ </argument>
+ <description>
+ Removes a [CameraFeed].
+ </description>
+ </method>
+ </methods>
+ <signals>
+ <signal name="camera_feed_added">
+ <argument index="0" name="id" type="int">
+ </argument>
+ <description>
+ Emitted when a [CameraFeed] is added (webcam is plugged in).
+ </description>
+ </signal>
+ <signal name="camera_feed_removed">
+ <argument index="0" name="id" type="int">
+ </argument>
+ <description>
+ Emitted when a [CameraFeed] is removed (webcam is removed).
+ </description>
+ </signal>
+ </signals>
+ <constants>
+ <constant name="FEED_RGBA_IMAGE" value="0" enum="FeedImage">
+ The RGBA camera image.
+ </constant>
+ <constant name="FEED_YCbCr_IMAGE" value="0" enum="FeedImage">
+ The YCbCr camera image.
+ </constant>
+ <constant name="FEED_Y_IMAGE" value="0" enum="FeedImage">
+ The Y component camera image.
+ </constant>
+ <constant name="FEED_CbCr_IMAGE" value="1" enum="FeedImage">
+ The CbCr component camera image.
+ </constant>
+ </constants>
+</class>
diff --git a/doc/classes/CameraTexture.xml b/doc/classes/CameraTexture.xml
new file mode 100644
index 0000000000..7477be7825
--- /dev/null
+++ b/doc/classes/CameraTexture.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<class name="CameraTexture" inherits="Texture" category="Core" version="3.2">
+ <brief_description>
+ This texture gives access to the camera texture provided by a [CameraFeed]. Note that many cameras supply YCbCr images which need to be converted in a shader.
+ </brief_description>
+ <description>
+ </description>
+ <tutorials>
+ </tutorials>
+ <methods>
+ </methods>
+ <members>
+ <member name="camera_feed_id" type="int" setter="set_camera_feed_id" getter="get_camera_feed_id">
+ Id of the [CameraFeed] for which we want to display the image.
+ </member>
+ <member name="camera_is_active" type="bool" setter="set_camera_active" getter="get_camera_active">
+ Convenience property that gives access to the active property of the [CameraFeed].
+ </member>
+ <member name="which_feed" type="int" setter="set_which_feed" getter="get_which_feed" enum="CameraServer.FeedImage">
+ Which image within the [CameraFeed] we want access to, important if the camera image is split in a Y and CbCr component.
+ </member>
+ </members>
+ <constants>
+ </constants>
+</class>
diff --git a/doc/classes/ClippedCamera.xml b/doc/classes/ClippedCamera.xml
index b7f158dd65..c6dcd6cd96 100644
--- a/doc/classes/ClippedCamera.xml
+++ b/doc/classes/ClippedCamera.xml
@@ -29,6 +29,12 @@
<description>
</description>
</method>
+ <method name="get_clip_offset" qualifiers="const">
+ <return type="float">
+ </return>
+ <description>
+ </description>
+ </method>
<method name="get_collision_mask_bit" qualifiers="const">
<return type="bool">
</return>
diff --git a/doc/classes/Environment.xml b/doc/classes/Environment.xml
index 2a4ab4ce0d..eb94447529 100644
--- a/doc/classes/Environment.xml
+++ b/doc/classes/Environment.xml
@@ -57,6 +57,9 @@
<member name="auto_exposure_speed" type="float" setter="set_tonemap_auto_exposure_speed" getter="get_tonemap_auto_exposure_speed">
Speed of the auto exposure effect. Affects the time needed for the camera to perform auto exposure.
</member>
+ <member name="background_camera_feed_id" type="int" setter="set_camera_feed_id" getter="get_camera_feed_id">
+ The id of the camera feed to show in the background.
+ </member>
<member name="background_canvas_max_layer" type="int" setter="set_canvas_max_layer" getter="get_canvas_max_layer">
Maximum layer id (if using Layer background mode).
</member>
@@ -266,7 +269,10 @@
<constant name="BG_CANVAS" value="4" enum="BGMode">
Display a [CanvasLayer] in the background.
</constant>
- <constant name="BG_MAX" value="6" enum="BGMode">
+ <constant name="BG_CAMERA_FEED" value="6" enum="BGMode">
+ Display a camera feed in the background.
+ </constant>
+ <constant name="BG_MAX" value="7" enum="BGMode">
Helper constant keeping track of the enum's size, has no direct usage in API calls.
</constant>
<constant name="GLOW_BLEND_MODE_ADDITIVE" value="0" enum="GlowBlendMode">
diff --git a/doc/classes/FileDialog.xml b/doc/classes/FileDialog.xml
index 953f364bdb..908c017ac9 100644
--- a/doc/classes/FileDialog.xml
+++ b/doc/classes/FileDialog.xml
@@ -138,5 +138,7 @@
</theme_item>
<theme_item name="reload" type="Texture">
</theme_item>
+ <theme_item name="toggle_hidden" type="Texture">
+ </theme_item>
</theme_items>
</class>
diff --git a/doc/classes/MeshInstance2D.xml b/doc/classes/MeshInstance2D.xml
index 39a733fdb3..4b38b9aa96 100644
--- a/doc/classes/MeshInstance2D.xml
+++ b/doc/classes/MeshInstance2D.xml
@@ -22,6 +22,12 @@
The [Texture] that will be used if using the default [CanvasItemMaterial]. Can be accessed as [code]TEXTURE[/code] in CanvasItem shader.
</member>
</members>
+ <signals>
+ <signal name="texture_changed">
+ <description>
+ </description>
+ </signal>
+ </signals>
<constants>
</constants>
</class>
diff --git a/doc/classes/MultiMeshInstance2D.xml b/doc/classes/MultiMeshInstance2D.xml
index 0bf243ee24..8509986c3c 100644
--- a/doc/classes/MultiMeshInstance2D.xml
+++ b/doc/classes/MultiMeshInstance2D.xml
@@ -22,6 +22,12 @@
The [Texture] that will be used if using the default [CanvasItemMaterial]. Can be accessed as [code]TEXTURE[/code] in CanvasItem shader.
</member>
</members>
+ <signals>
+ <signal name="texture_changed">
+ <description>
+ </description>
+ </signal>
+ </signals>
<constants>
</constants>
</class>
diff --git a/doc/classes/OS.xml b/doc/classes/OS.xml
index 2592bc6775..e994c24582 100644
--- a/doc/classes/OS.xml
+++ b/doc/classes/OS.xml
@@ -508,7 +508,8 @@
<argument index="0" name="tag_name" type="String">
</argument>
<description>
- Returns [code]true[/code] if the feature for the given feature tag is supported in the currently running instance, depending on platform, build etc. Can be used to check whether you're currently running a debug build, on a certain platform or arch, etc. See feature tags documentation.
+ Returns [code]true[/code] if the feature for the given feature tag is supported in the currently running instance, depending on platform, build etc. Can be used to check whether you're currently running a debug build, on a certain platform or arch, etc. Refer to the [url=https://docs.godotengine.org/en/latest/getting_started/workflow/export/feature_tags.html]Feature Tags[/url] documentation for more details.
+ Note that tag names are case-sensitive.
</description>
</method>
<method name="has_touchscreen_ui_hint" qualifiers="const">
diff --git a/doc/classes/Particles.xml b/doc/classes/Particles.xml
index 7820c63ad7..a29b621406 100644
--- a/doc/classes/Particles.xml
+++ b/doc/classes/Particles.xml
@@ -55,8 +55,10 @@
Time ratio between each emission. If [code]0[/code] particles are emitted continuously. If [code]1[/code] all particles are emitted simultaneously. Default value: [code]0[/code].
</member>
<member name="fixed_fps" type="int" setter="set_fixed_fps" getter="get_fixed_fps">
+ The particle system's frame rate is fixed to a value. For instance, changing the value to 2 will make the particles render at 2 frames per second. Note this does not slow down the simulation of the particle system itself.
</member>
<member name="fract_delta" type="bool" setter="set_fractional_delta" getter="get_fractional_delta">
+ If [code]true[/code], results in fractional delta calculation which has a smoother particles display effect. Default value: [code]true[/code].
</member>
<member name="lifetime" type="float" setter="set_lifetime" getter="get_lifetime">
Amount of time each particle will exist. Default value: [code]1[/code].
diff --git a/doc/classes/Particles2D.xml b/doc/classes/Particles2D.xml
index de4877b639..78114e985d 100644
--- a/doc/classes/Particles2D.xml
+++ b/doc/classes/Particles2D.xml
@@ -40,7 +40,7 @@
How rapidly particles in an emission cycle are emitted. If greater than [code]0[/code], there will be a gap in emissions before the next cycle begins. Default value: [code]0[/code].
</member>
<member name="fixed_fps" type="int" setter="set_fixed_fps" getter="get_fixed_fps">
- The particle system's frame rate is fixed to a value. For instance, changing the value to 2 will make the particles render at 2 frames per second. Note this does not slow down the particle system itself.
+ The particle system's frame rate is fixed to a value. For instance, changing the value to 2 will make the particles render at 2 frames per second. Note this does not slow down the simulation of the particle system itself.
</member>
<member name="fract_delta" type="bool" setter="set_fractional_delta" getter="get_fractional_delta">
If [code]true[/code], results in fractional delta calculation which has a smoother particles display effect. Default value: [code]true[/code]
@@ -52,7 +52,7 @@
If [code]true[/code], particles use the parent node's coordinate space. If [code]false[/code], they use global coordinates. Default value: [code]true[/code].
</member>
<member name="normal_map" type="Texture" setter="set_normal_map" getter="get_normal_map">
- Normal map to be used for the [code]texture[/code] property.
+ Normal map to be used for the [member texture] property.
</member>
<member name="one_shot" type="bool" setter="set_one_shot" getter="get_one_shot">
If [code]true[/code], only one emission cycle occurs. If set [code]true[/code] during a cycle, emission will stop at the cycle's end. Default value: [code]false[/code].
diff --git a/doc/classes/ParticlesMaterial.xml b/doc/classes/ParticlesMaterial.xml
index dd7a7cd151..cb06593bc2 100644
--- a/doc/classes/ParticlesMaterial.xml
+++ b/doc/classes/ParticlesMaterial.xml
@@ -100,7 +100,7 @@
Amount of [member spread] in Y/Z plane. A value of [code]1[/code] restricts particles to X/Z plane. Default [code]0[/code].
</member>
<member name="gravity" type="Vector3" setter="set_gravity" getter="get_gravity">
- Gravity applied to every particle. Default value: [code](0, 98, 0)[/code].
+ Gravity applied to every particle. Default value: [code](0, -9.8, 0)[/code].
</member>
<member name="hue_variation" type="float" setter="set_param" getter="get_param">
Initial hue variation applied to each particle.
@@ -112,13 +112,13 @@
Hue variation randomness ratio. Default value: [code]0[/code].
</member>
<member name="initial_velocity" type="float" setter="set_param" getter="get_param">
- Initial velocity magnitude for each particle. Direction comes from [member spread].
+ Initial velocity magnitude for each particle. Direction comes from [member spread] and the node's orientation.
</member>
<member name="initial_velocity_random" type="float" setter="set_param_randomness" getter="get_param_randomness">
Initial velocity randomness ratio. Default value: [code]0[/code].
</member>
<member name="linear_accel" type="float" setter="set_param" getter="get_param">
- Linear acceleration applied to each particle. Acceleration increases velocity magnitude each frame without affecting direction.
+ Linear acceleration applied to each particle in the direction of motion.
</member>
<member name="linear_accel_curve" type="Texture" setter="set_param_texture" getter="get_param_texture">
Each particle's linear acceleration will vary along this [CurveTexture].
@@ -184,7 +184,7 @@
Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set angular velocity properties.
</constant>
<constant name="PARAM_ORBIT_VELOCITY" value="2" enum="Parameter">
- Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set orbital_velocity properties.
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set orbital velocity properties.
</constant>
<constant name="PARAM_LINEAR_ACCEL" value="3" enum="Parameter">
Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set linear acceleration properties.
@@ -205,7 +205,7 @@
Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set scale properties.
</constant>
<constant name="PARAM_HUE_VARIATION" value="9" enum="Parameter">
- Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set hue_variation properties.
+ Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set hue variation properties.
</constant>
<constant name="PARAM_ANIM_SPEED" value="10" enum="Parameter">
Use with [method set_param], [method set_param_randomness], and [method set_param_texture] to set animation speed properties.
diff --git a/doc/classes/VisualServer.xml b/doc/classes/VisualServer.xml
index cb890cbc93..f9b668b38a 100644
--- a/doc/classes/VisualServer.xml
+++ b/doc/classes/VisualServer.xml
@@ -3361,6 +3361,17 @@
<description>
</description>
</method>
+ <method name="texture_bind">
+ <return type="void">
+ </return>
+ <argument index="0" name="texture" type="RID">
+ </argument>
+ <argument index="1" name="number" type="int">
+ </argument>
+ <description>
+ Binds the texture to a texture slot.
+ </description>
+ </method>
<method name="texture_create">
<return type="RID">
</return>
@@ -4429,7 +4440,7 @@
</constant>
<constant name="ENV_BG_KEEP" value="5" enum="EnvironmentBG">
</constant>
- <constant name="ENV_BG_MAX" value="6" enum="EnvironmentBG">
+ <constant name="ENV_BG_MAX" value="7" enum="EnvironmentBG">
</constant>
<constant name="ENV_DOF_BLUR_QUALITY_LOW" value="0" enum="EnvironmentDOFBlurQuality">
</constant>
diff --git a/drivers/dummy/rasterizer_dummy.h b/drivers/dummy/rasterizer_dummy.h
index db6a63a5e1..3deaef09e7 100644
--- a/drivers/dummy/rasterizer_dummy.h
+++ b/drivers/dummy/rasterizer_dummy.h
@@ -61,6 +61,7 @@ public:
void environment_set_bg_energy(RID p_env, float p_energy) {}
void environment_set_canvas_max_layer(RID p_env, int p_max_layer) {}
void environment_set_ambient_light(RID p_env, const Color &p_color, float p_energy = 1.0, float p_sky_contribution = 0.0) {}
+ void environment_set_camera_feed_id(RID p_env, int p_camera_feed_id){};
void environment_set_dof_blur_near(RID p_env, bool p_enable, float p_distance, float p_transition, float p_far_amount, VS::EnvironmentDOFBlurQuality p_quality) {}
void environment_set_dof_blur_far(RID p_env, bool p_enable, float p_distance, float p_transition, float p_far_amount, VS::EnvironmentDOFBlurQuality p_quality) {}
@@ -216,6 +217,7 @@ public:
uint32_t texture_get_height(RID p_texture) const { return 0; }
uint32_t texture_get_depth(RID p_texture) const { return 0; }
void texture_set_size_override(RID p_texture, int p_width, int p_height, int p_depth_3d) {}
+ void texture_bind(RID p_texture, uint32_t p_texture_no) {}
void texture_set_path(RID p_texture, const String &p_path) {
DummyTexture *t = texture_owner.getornull(p_texture);
diff --git a/drivers/gles2/rasterizer_scene_gles2.cpp b/drivers/gles2/rasterizer_scene_gles2.cpp
index 0231bb5837..a07c6832dc 100644
--- a/drivers/gles2/rasterizer_scene_gles2.cpp
+++ b/drivers/gles2/rasterizer_scene_gles2.cpp
@@ -36,6 +36,7 @@
#include "core/project_settings.h"
#include "core/vmap.h"
#include "rasterizer_canvas_gles2.h"
+#include "servers/camera/camera_feed.h"
#include "servers/visual/visual_server_raster.h"
#ifndef GLES_OVER_GL
@@ -769,6 +770,13 @@ void RasterizerSceneGLES2::environment_set_ambient_light(RID p_env, const Color
env->ambient_sky_contribution = p_sky_contribution;
}
+void RasterizerSceneGLES2::environment_set_camera_feed_id(RID p_env, int p_camera_feed_id) {
+ Environment *env = environment_owner.getornull(p_env);
+ ERR_FAIL_COND(!env);
+
+ env->camera_feed_id = p_camera_feed_id;
+}
+
void RasterizerSceneGLES2::environment_set_dof_blur_far(RID p_env, bool p_enable, float p_distance, float p_transition, float p_amount, VS::EnvironmentDOFBlurQuality p_quality) {
Environment *env = environment_owner.getornull(p_env);
ERR_FAIL_COND(!env);
@@ -2843,6 +2851,7 @@ void RasterizerSceneGLES2::render_scene(const Transform &p_cam_transform, const
// clear color
Color clear_color(0, 0, 0, 1);
+ Ref<CameraFeed> feed;
if (storage->frame.current_rt && storage->frame.current_rt->flags[RasterizerStorage::RENDER_TARGET_TRANSPARENT]) {
clear_color = Color(0, 0, 0, 0);
@@ -2855,6 +2864,9 @@ void RasterizerSceneGLES2::render_scene(const Transform &p_cam_transform, const
} else if (env->bg_mode == VS::ENV_BG_CANVAS || env->bg_mode == VS::ENV_BG_COLOR || env->bg_mode == VS::ENV_BG_COLOR_SKY) {
clear_color = env->bg_color;
storage->frame.clear_request = false;
+ } else if (env->bg_mode == VS::ENV_BG_CAMERA_FEED) {
+ feed = CameraServer::get_singleton()->get_feed_by_id(env->camera_feed_id);
+ storage->frame.clear_request = false;
} else {
storage->frame.clear_request = false;
}
@@ -2891,7 +2903,66 @@ void RasterizerSceneGLES2::render_scene(const Transform &p_cam_transform, const
env_radiance_tex = sky->radiance;
}
} break;
+ case VS::ENV_BG_CAMERA_FEED: {
+ if (feed.is_valid() && (feed->get_base_width() > 0) && (feed->get_base_height() > 0)) {
+ // copy our camera feed to our background
+
+ glDisable(GL_BLEND);
+ glDepthMask(GL_FALSE);
+ glDisable(GL_DEPTH_TEST);
+ glDisable(GL_CULL_FACE);
+
+ storage->shaders.copy.set_conditional(CopyShaderGLES2::USE_NO_ALPHA, true);
+ storage->shaders.copy.set_conditional(CopyShaderGLES2::USE_DISPLAY_TRANSFORM, true);
+
+ if (feed->get_datatype() == CameraFeed::FEED_RGB) {
+ RID camera_RGBA = feed->get_texture(CameraServer::FEED_RGBA_IMAGE);
+
+ VS::get_singleton()->texture_bind(camera_RGBA, 0);
+
+ } else if (feed->get_datatype() == CameraFeed::FEED_YCbCr) {
+ RID camera_YCbCr = feed->get_texture(CameraServer::FEED_YCbCr_IMAGE);
+
+ VS::get_singleton()->texture_bind(camera_YCbCr, 0);
+ storage->shaders.copy.set_conditional(CopyShaderGLES2::YCBCR_TO_RGB, true);
+
+ } else if (feed->get_datatype() == CameraFeed::FEED_YCbCr_Sep) {
+ RID camera_Y = feed->get_texture(CameraServer::FEED_Y_IMAGE);
+ RID camera_CbCr = feed->get_texture(CameraServer::FEED_CbCr_IMAGE);
+
+ VS::get_singleton()->texture_bind(camera_Y, 0);
+ VS::get_singleton()->texture_bind(camera_CbCr, 1);
+
+ storage->shaders.copy.set_conditional(CopyShaderGLES2::SEP_CBCR_TEXTURE, true);
+ storage->shaders.copy.set_conditional(CopyShaderGLES2::YCBCR_TO_RGB, true);
+ };
+
+ storage->shaders.copy.bind();
+ storage->shaders.copy.set_uniform(CopyShaderGLES2::DISPLAY_TRANSFORM, feed->get_transform());
+
+ storage->bind_quad_array();
+ glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+ glDisableVertexAttribArray(VS::ARRAY_VERTEX);
+ glDisableVertexAttribArray(VS::ARRAY_TEX_UV);
+ glBindBuffer(GL_ARRAY_BUFFER, 0);
+
+ // turn off everything used
+ storage->shaders.copy.set_conditional(CopyShaderGLES2::SEP_CBCR_TEXTURE, false);
+ storage->shaders.copy.set_conditional(CopyShaderGLES2::YCBCR_TO_RGB, false);
+ storage->shaders.copy.set_conditional(CopyShaderGLES2::USE_NO_ALPHA, false);
+ storage->shaders.copy.set_conditional(CopyShaderGLES2::USE_DISPLAY_TRANSFORM, false);
+
+ //restore
+ glEnable(GL_BLEND);
+ glDepthMask(GL_TRUE);
+ glEnable(GL_DEPTH_TEST);
+ glEnable(GL_CULL_FACE);
+ } else {
+ // don't have a feed, just show greenscreen :)
+ clear_color = Color(0.0, 1.0, 0.0, 1.0);
+ }
+ } break;
default: {
// FIXME: implement other background modes
} break;
diff --git a/drivers/gles2/rasterizer_scene_gles2.h b/drivers/gles2/rasterizer_scene_gles2.h
index d5a691d0b9..c95385eb24 100644
--- a/drivers/gles2/rasterizer_scene_gles2.h
+++ b/drivers/gles2/rasterizer_scene_gles2.h
@@ -354,6 +354,8 @@ public:
float bg_energy;
float sky_ambient;
+ int camera_feed_id;
+
Color ambient_color;
float ambient_energy;
float ambient_sky_contribution;
@@ -381,6 +383,7 @@ public:
sky_custom_fov(0.0),
bg_energy(1.0),
sky_ambient(0),
+ camera_feed_id(0),
ambient_energy(1.0),
ambient_sky_contribution(0.0),
canvas_max_layer(0),
@@ -413,6 +416,7 @@ public:
virtual void environment_set_bg_energy(RID p_env, float p_energy);
virtual void environment_set_canvas_max_layer(RID p_env, int p_max_layer);
virtual void environment_set_ambient_light(RID p_env, const Color &p_color, float p_energy = 1.0, float p_sky_contribution = 0.0);
+ virtual void environment_set_camera_feed_id(RID p_env, int p_camera_feed_id);
virtual void environment_set_dof_blur_near(RID p_env, bool p_enable, float p_distance, float p_transition, float p_amount, VS::EnvironmentDOFBlurQuality p_quality);
virtual void environment_set_dof_blur_far(RID p_env, bool p_enable, float p_distance, float p_transition, float p_amount, VS::EnvironmentDOFBlurQuality p_quality);
diff --git a/drivers/gles2/rasterizer_storage_gles2.cpp b/drivers/gles2/rasterizer_storage_gles2.cpp
index 7b5f193b2b..70f1173a7d 100644
--- a/drivers/gles2/rasterizer_storage_gles2.cpp
+++ b/drivers/gles2/rasterizer_storage_gles2.cpp
@@ -968,6 +968,15 @@ uint32_t RasterizerStorageGLES2::texture_get_texid(RID p_texture) const {
return texture->tex_id;
}
+void RasterizerStorageGLES2::texture_bind(RID p_texture, uint32_t p_texture_no) {
+ Texture *texture = texture_owner.getornull(p_texture);
+
+ ERR_FAIL_COND(!texture);
+
+ glActiveTexture(GL_TEXTURE0 + p_texture_no);
+ glBindTexture(texture->target, texture->tex_id);
+}
+
uint32_t RasterizerStorageGLES2::texture_get_width(RID p_texture) const {
Texture *texture = texture_owner.getornull(p_texture);
@@ -4688,6 +4697,7 @@ void RasterizerStorageGLES2::_render_target_allocate(RenderTarget *rt) {
/* BACK FBO */
/* For MSAA */
+#ifndef JAVASCRIPT_ENABLED
if (rt->msaa != VS::VIEWPORT_MSAA_DISABLED && config.multisample_supported) {
rt->multisample_active = true;
@@ -4743,7 +4753,9 @@ void RasterizerStorageGLES2::_render_target_allocate(RenderTarget *rt) {
glBindRenderbuffer(GL_RENDERBUFFER, 0);
- } else {
+ } else
+#endif
+ {
rt->multisample_active = false;
}
diff --git a/drivers/gles2/rasterizer_storage_gles2.h b/drivers/gles2/rasterizer_storage_gles2.h
index af57aa3d9b..d139697b86 100644
--- a/drivers/gles2/rasterizer_storage_gles2.h
+++ b/drivers/gles2/rasterizer_storage_gles2.h
@@ -350,6 +350,7 @@ public:
virtual uint32_t texture_get_height(RID p_texture) const;
virtual uint32_t texture_get_depth(RID p_texture) const;
virtual void texture_set_size_override(RID p_texture, int p_width, int p_height, int p_depth);
+ virtual void texture_bind(RID p_texture, uint32_t p_texture_no);
virtual void texture_set_path(RID p_texture, const String &p_path);
virtual String texture_get_path(RID p_texture) const;
diff --git a/drivers/gles2/shaders/copy.glsl b/drivers/gles2/shaders/copy.glsl
index 931b3f3708..195db7c45f 100644
--- a/drivers/gles2/shaders/copy.glsl
+++ b/drivers/gles2/shaders/copy.glsl
@@ -28,8 +28,15 @@ varying vec2 uv_interp;
#endif
varying vec2 uv2_interp;
+// These definitions are here because the shader-wrapper builder does
+// not understand `#elif defined()`
+#ifdef USE_DISPLAY_TRANSFORM
+#endif
+
#ifdef USE_COPY_SECTION
uniform highp vec4 copy_section;
+#elif defined(USE_DISPLAY_TRANSFORM)
+uniform highp mat4 display_transform;
#endif
void main() {
@@ -48,6 +55,8 @@ void main() {
#ifdef USE_COPY_SECTION
uv_interp = copy_section.xy + uv_interp * copy_section.zw;
gl_Position.xy = (copy_section.xy + (gl_Position.xy * 0.5 + 0.5) * copy_section.zw) * 2.0 - 1.0;
+#elif defined(USE_DISPLAY_TRANSFORM)
+ uv_interp = (display_transform * vec4(uv_in, 1.0, 1.0)).xy;
#endif
}
@@ -88,6 +97,10 @@ uniform samplerCube source_cube; // texunit:0
uniform sampler2D source; // texunit:0
#endif
+#ifdef SEP_CBCR_TEXTURE
+uniform sampler2D CbCr; //texunit:1
+#endif
+
varying vec2 uv2_interp;
#ifdef USE_MULTIPLIER
@@ -145,10 +158,26 @@ void main() {
#elif defined(USE_CUBEMAP)
vec4 color = textureCube(source_cube, normalize(cube_interp));
+#elif defined(SEP_CBCR_TEXTURE)
+ vec4 color;
+ color.r = texture2D(source, uv_interp).r;
+ color.gb = texture2D(CbCr, uv_interp).rg - vec2(0.5, 0.5);
+ color.a = 1.0;
#else
vec4 color = texture2D(source, uv_interp);
#endif
+#ifdef YCBCR_TO_RGB
+ // YCbCr -> RGB conversion
+
+ // Using BT.601, which is the standard for SDTV is provided as a reference
+ color.rgb = mat3(
+ vec3(1.00000, 1.00000, 1.00000),
+ vec3(0.00000, -0.34413, 1.77200),
+ vec3(1.40200, -0.71414, 0.00000)) *
+ color.rgb;
+#endif
+
#ifdef USE_NO_ALPHA
color.a = 1.0;
#endif
diff --git a/drivers/gles3/rasterizer_scene_gles3.cpp b/drivers/gles3/rasterizer_scene_gles3.cpp
index 7e02f2f3f5..7acb8a22bc 100644
--- a/drivers/gles3/rasterizer_scene_gles3.cpp
+++ b/drivers/gles3/rasterizer_scene_gles3.cpp
@@ -34,6 +34,7 @@
#include "core/os/os.h"
#include "core/project_settings.h"
#include "rasterizer_canvas_gles3.h"
+#include "servers/camera/camera_feed.h"
#include "servers/visual/visual_server_raster.h"
#ifndef GLES_OVER_GL
@@ -830,6 +831,12 @@ void RasterizerSceneGLES3::environment_set_ambient_light(RID p_env, const Color
env->ambient_energy = p_energy;
env->ambient_sky_contribution = p_sky_contribution;
}
+void RasterizerSceneGLES3::environment_set_camera_feed_id(RID p_env, int p_camera_feed_id) {
+ Environment *env = environment_owner.getornull(p_env);
+ ERR_FAIL_COND(!env);
+
+ env->camera_feed_id = p_camera_feed_id;
+}
void RasterizerSceneGLES3::environment_set_dof_blur_far(RID p_env, bool p_enable, float p_distance, float p_transition, float p_amount, VS::EnvironmentDOFBlurQuality p_quality) {
@@ -4342,6 +4349,7 @@ void RasterizerSceneGLES3::render_scene(const Transform &p_cam_transform, const
Color clear_color(0, 0, 0, 0);
RasterizerStorageGLES3::Sky *sky = NULL;
+ Ref<CameraFeed> feed;
GLuint env_radiance_tex = 0;
if (state.debug_draw == VS::VIEWPORT_DEBUG_DRAW_OVERDRAW) {
@@ -4376,6 +4384,9 @@ void RasterizerSceneGLES3::render_scene(const Transform &p_cam_transform, const
clear_color = env->bg_color.to_linear();
storage->frame.clear_request = false;
+ } else if (env->bg_mode == VS::ENV_BG_CAMERA_FEED) {
+ feed = CameraServer::get_singleton()->get_feed_by_id(env->camera_feed_id);
+ storage->frame.clear_request = false;
} else {
storage->frame.clear_request = false;
}
@@ -4426,6 +4437,63 @@ void RasterizerSceneGLES3::render_scene(const Transform &p_cam_transform, const
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
break;
+ case VS::ENV_BG_CAMERA_FEED:
+ if (feed.is_valid() && (feed->get_base_width() > 0) && (feed->get_base_height() > 0)) {
+ // copy our camera feed to our background
+
+ glDisable(GL_BLEND);
+ glDepthMask(GL_FALSE);
+ glDisable(GL_DEPTH_TEST);
+ glDisable(GL_CULL_FACE);
+
+ storage->shaders.copy.set_conditional(CopyShaderGLES3::USE_DISPLAY_TRANSFORM, true);
+ storage->shaders.copy.set_conditional(CopyShaderGLES3::DISABLE_ALPHA, true);
+ storage->shaders.copy.set_conditional(CopyShaderGLES3::SRGB_TO_LINEAR, true);
+
+ if (feed->get_datatype() == CameraFeed::FEED_RGB) {
+ RID camera_RGBA = feed->get_texture(CameraServer::FEED_RGBA_IMAGE);
+
+ VS::get_singleton()->texture_bind(camera_RGBA, 0);
+ } else if (feed->get_datatype() == CameraFeed::FEED_YCbCr) {
+ RID camera_YCbCr = feed->get_texture(CameraServer::FEED_YCbCr_IMAGE);
+
+ VS::get_singleton()->texture_bind(camera_YCbCr, 0);
+
+ storage->shaders.copy.set_conditional(CopyShaderGLES3::YCBCR_TO_SRGB, true);
+
+ } else if (feed->get_datatype() == CameraFeed::FEED_YCbCr_Sep) {
+ RID camera_Y = feed->get_texture(CameraServer::FEED_Y_IMAGE);
+ RID camera_CbCr = feed->get_texture(CameraServer::FEED_CbCr_IMAGE);
+
+ VS::get_singleton()->texture_bind(camera_Y, 0);
+ VS::get_singleton()->texture_bind(camera_CbCr, 1);
+
+ storage->shaders.copy.set_conditional(CopyShaderGLES3::SEP_CBCR_TEXTURE, true);
+ storage->shaders.copy.set_conditional(CopyShaderGLES3::YCBCR_TO_SRGB, true);
+ };
+
+ storage->shaders.copy.bind();
+ storage->shaders.copy.set_uniform(CopyShaderGLES3::DISPLAY_TRANSFORM, feed->get_transform());
+
+ _copy_screen(true, true);
+
+ //turn off everything used
+ storage->shaders.copy.set_conditional(CopyShaderGLES3::USE_DISPLAY_TRANSFORM, false);
+ storage->shaders.copy.set_conditional(CopyShaderGLES3::DISABLE_ALPHA, false);
+ storage->shaders.copy.set_conditional(CopyShaderGLES3::SRGB_TO_LINEAR, false);
+ storage->shaders.copy.set_conditional(CopyShaderGLES3::SEP_CBCR_TEXTURE, false);
+ storage->shaders.copy.set_conditional(CopyShaderGLES3::YCBCR_TO_SRGB, false);
+
+ //restore
+ glEnable(GL_BLEND);
+ glDepthMask(GL_TRUE);
+ glEnable(GL_DEPTH_TEST);
+ glEnable(GL_CULL_FACE);
+ } else {
+ // don't have a feed, just show greenscreen :)
+ clear_color = Color(0.0, 1.0, 0.0, 1.0);
+ }
+ break;
default: {
}
}
diff --git a/drivers/gles3/rasterizer_scene_gles3.h b/drivers/gles3/rasterizer_scene_gles3.h
index 59e23e5ac9..910f90edc2 100644
--- a/drivers/gles3/rasterizer_scene_gles3.h
+++ b/drivers/gles3/rasterizer_scene_gles3.h
@@ -376,6 +376,8 @@ public:
float bg_energy;
float sky_ambient;
+ int camera_feed_id;
+
Color ambient_color;
float ambient_energy;
float ambient_sky_contribution;
@@ -461,6 +463,7 @@ public:
sky_custom_fov(0.0),
bg_energy(1.0),
sky_ambient(0),
+ camera_feed_id(0),
ambient_energy(1.0),
ambient_sky_contribution(0.0),
canvas_max_layer(0),
@@ -542,6 +545,7 @@ public:
virtual void environment_set_bg_energy(RID p_env, float p_energy);
virtual void environment_set_canvas_max_layer(RID p_env, int p_max_layer);
virtual void environment_set_ambient_light(RID p_env, const Color &p_color, float p_energy = 1.0, float p_sky_contribution = 0.0);
+ virtual void environment_set_camera_feed_id(RID p_env, int p_camera_feed_id);
virtual void environment_set_dof_blur_near(RID p_env, bool p_enable, float p_distance, float p_transition, float p_amount, VS::EnvironmentDOFBlurQuality p_quality);
virtual void environment_set_dof_blur_far(RID p_env, bool p_enable, float p_distance, float p_transition, float p_amount, VS::EnvironmentDOFBlurQuality p_quality);
diff --git a/drivers/gles3/rasterizer_storage_gles3.cpp b/drivers/gles3/rasterizer_storage_gles3.cpp
index 01b85458c2..f8a3283869 100644
--- a/drivers/gles3/rasterizer_storage_gles3.cpp
+++ b/drivers/gles3/rasterizer_storage_gles3.cpp
@@ -1437,6 +1437,15 @@ uint32_t RasterizerStorageGLES3::texture_get_texid(RID p_texture) const {
return texture->tex_id;
}
+void RasterizerStorageGLES3::texture_bind(RID p_texture, uint32_t p_texture_no) {
+
+ Texture *texture = texture_owner.getornull(p_texture);
+
+ ERR_FAIL_COND(!texture);
+
+ glActiveTexture(GL_TEXTURE0 + p_texture_no);
+ glBindTexture(texture->target, texture->tex_id);
+}
uint32_t RasterizerStorageGLES3::texture_get_width(RID p_texture) const {
Texture *texture = texture_owner.get(p_texture);
diff --git a/drivers/gles3/rasterizer_storage_gles3.h b/drivers/gles3/rasterizer_storage_gles3.h
index 3d86558407..badb656e96 100644
--- a/drivers/gles3/rasterizer_storage_gles3.h
+++ b/drivers/gles3/rasterizer_storage_gles3.h
@@ -358,6 +358,7 @@ public:
virtual uint32_t texture_get_height(RID p_texture) const;
virtual uint32_t texture_get_depth(RID p_texture) const;
virtual void texture_set_size_override(RID p_texture, int p_width, int p_height, int p_depth);
+ virtual void texture_bind(RID p_texture, uint32_t p_texture_no);
virtual void texture_set_path(RID p_texture, const String &p_path);
virtual String texture_get_path(RID p_texture) const;
diff --git a/drivers/gles3/shaders/copy.glsl b/drivers/gles3/shaders/copy.glsl
index e1a0813efc..232b9ce7c0 100644
--- a/drivers/gles3/shaders/copy.glsl
+++ b/drivers/gles3/shaders/copy.glsl
@@ -18,10 +18,19 @@ out vec2 uv_interp;
out vec2 uv2_interp;
+// These definitions are here because the shader-wrapper builder does
+// not understand `#elif defined()`
+#ifdef USE_DISPLAY_TRANSFORM
+#endif
+
#ifdef USE_COPY_SECTION
uniform vec4 copy_section;
+#elif defined(USE_DISPLAY_TRANSFORM)
+
+uniform highp mat4 display_transform;
+
#endif
void main() {
@@ -44,6 +53,9 @@ void main() {
uv_interp = copy_section.xy + uv_interp * copy_section.zw;
gl_Position.xy = (copy_section.xy + (gl_Position.xy * 0.5 + 0.5) * copy_section.zw) * 2.0 - 1.0;
+#elif defined(USE_DISPLAY_TRANSFORM)
+
+ uv_interp = (display_transform * vec4(uv_in, 1.0, 1.0)).xy;
#endif
}
@@ -73,6 +85,8 @@ uniform highp vec4 asym_proj;
#endif
#ifdef USE_TEXTURE2DARRAY
#endif
+#ifdef YCBCR_TO_SRGB
+#endif
#ifdef USE_CUBEMAP
uniform samplerCube source_cube; //texunit:0
@@ -84,6 +98,10 @@ uniform sampler2DArray source_2d_array; //texunit:0
uniform sampler2D source; //texunit:0
#endif
+#ifdef SEP_CBCR_TEXTURE
+uniform sampler2D CbCr; //texunit:1
+#endif
+
/* clang-format on */
#if defined(USE_TEXTURE3D) || defined(USE_TEXTURE2DARRAY)
@@ -166,14 +184,30 @@ void main() {
vec4 color = textureLod(source_3d, vec3(uv_interp, layer), 0.0);
#elif defined(USE_TEXTURE2DARRAY)
vec4 color = textureLod(source_2d_array, vec3(uv_interp, layer), 0.0);
+#elif defined(SEP_CBCR_TEXTURE)
+ vec4 color;
+ color.r = textureLod(source, uv_interp, 0.0).r;
+ color.gb = textureLod(CbCr, uv_interp, 0.0).rg - vec2(0.5, 0.5);
+ color.a = 1.0;
#else
vec4 color = textureLod(source, uv_interp, 0.0);
#endif
#ifdef LINEAR_TO_SRGB
- //regular Linear -> SRGB conversion
+ // regular Linear -> SRGB conversion
vec3 a = vec3(0.055);
color.rgb = mix((vec3(1.0) + a) * pow(color.rgb, vec3(1.0 / 2.4)) - a, 12.92 * color.rgb, lessThan(color.rgb, vec3(0.0031308)));
+
+#elif defined(YCBCR_TO_SRGB)
+
+ // YCbCr -> SRGB conversion
+ // Using BT.709 which is the standard for HDTV
+ color.rgb = mat3(
+ vec3(1.00000, 1.00000, 1.00000),
+ vec3(0.00000, -0.18732, 1.85560),
+ vec3(1.57481, -0.46813, 0.00000)) *
+ color.rgb;
+
#endif
#ifdef SRGB_TO_LINEAR
diff --git a/drivers/unix/net_socket_posix.cpp b/drivers/unix/net_socket_posix.cpp
index 37abccbe27..75d51c8503 100644
--- a/drivers/unix/net_socket_posix.cpp
+++ b/drivers/unix/net_socket_posix.cpp
@@ -306,7 +306,7 @@ Error NetSocketPosix::bind(IP_Address p_addr, uint16_t p_port) {
sockaddr_storage addr;
size_t addr_size = _set_addr_storage(&addr, p_addr, p_port, _ip_type);
- if (::bind(_sock, (struct sockaddr *)&addr, addr_size) == SOCK_EMPTY) {
+ if (::bind(_sock, (struct sockaddr *)&addr, addr_size) != 0) {
close();
ERR_FAIL_V(ERR_UNAVAILABLE);
}
@@ -317,7 +317,7 @@ Error NetSocketPosix::bind(IP_Address p_addr, uint16_t p_port) {
Error NetSocketPosix::listen(int p_max_pending) {
ERR_FAIL_COND_V(!is_open(), ERR_UNCONFIGURED);
- if (::listen(_sock, p_max_pending) == SOCK_EMPTY) {
+ if (::listen(_sock, p_max_pending) != 0) {
close();
ERR_FAIL_V(FAILED);
@@ -334,7 +334,7 @@ Error NetSocketPosix::connect_to_host(IP_Address p_host, uint16_t p_port) {
struct sockaddr_storage addr;
size_t addr_size = _set_addr_storage(&addr, p_host, p_port, _ip_type);
- if (::connect(_sock, (struct sockaddr *)&addr, addr_size) == SOCK_EMPTY) {
+ if (::connect(_sock, (struct sockaddr *)&addr, addr_size) != 0) {
NetError err = _get_socket_error();
diff --git a/drivers/wasapi/audio_driver_wasapi.cpp b/drivers/wasapi/audio_driver_wasapi.cpp
index c97849ef07..fea38ee95d 100644
--- a/drivers/wasapi/audio_driver_wasapi.cpp
+++ b/drivers/wasapi/audio_driver_wasapi.cpp
@@ -167,13 +167,13 @@ Error AudioDriverWASAPI::audio_device_init(AudioDeviceWASAPI *p_device, bool p_c
ERR_FAIL_COND_V(hr != S_OK, ERR_CANT_OPEN);
for (ULONG i = 0; i < count && !found; i++) {
- IMMDevice *device = NULL;
+ IMMDevice *tmp_device = NULL;
- hr = devices->Item(i, &device);
+ hr = devices->Item(i, &tmp_device);
ERR_BREAK(hr != S_OK);
IPropertyStore *props = NULL;
- hr = device->OpenPropertyStore(STGM_READ, &props);
+ hr = tmp_device->OpenPropertyStore(STGM_READ, &props);
ERR_BREAK(hr != S_OK);
PROPVARIANT propvar;
@@ -183,7 +183,7 @@ Error AudioDriverWASAPI::audio_device_init(AudioDeviceWASAPI *p_device, bool p_c
ERR_BREAK(hr != S_OK);
if (p_device->device_name == String(propvar.pwszVal)) {
- hr = device->GetId(&strId);
+ hr = tmp_device->GetId(&strId);
ERR_BREAK(hr != S_OK);
found = true;
@@ -191,7 +191,7 @@ Error AudioDriverWASAPI::audio_device_init(AudioDeviceWASAPI *p_device, bool p_c
PropVariantClear(&propvar);
props->Release();
- device->Release();
+ tmp_device->Release();
}
if (found) {
@@ -289,7 +289,7 @@ Error AudioDriverWASAPI::audio_device_init(AudioDeviceWASAPI *p_device, bool p_c
}
DWORD streamflags = 0;
- if (mix_rate != pwfex->nSamplesPerSec) {
+ if ((DWORD)mix_rate != pwfex->nSamplesPerSec) {
streamflags |= AUDCLNT_STREAMFLAGS_RATEADJUST;
pwfex->nSamplesPerSec = mix_rate;
pwfex->nAvgBytesPerSec = pwfex->nSamplesPerSec * pwfex->nChannels * (pwfex->wBitsPerSample / 8);
@@ -571,7 +571,7 @@ void AudioDriverWASAPI::thread_func(void *p_udata) {
if (ad->audio_output.active) {
ad->audio_server_process(ad->buffer_frames, ad->samples_in.ptrw());
} else {
- for (unsigned int i = 0; i < ad->samples_in.size(); i++) {
+ for (int i = 0; i < ad->samples_in.size(); i++) {
ad->samples_in.write[i] = 0;
}
}
@@ -699,7 +699,7 @@ void AudioDriverWASAPI::thread_func(void *p_udata) {
ERR_BREAK(hr != S_OK);
// fixme: Only works for floating point atm
- for (int j = 0; j < num_frames_available; j++) {
+ for (UINT32 j = 0; j < num_frames_available; j++) {
int32_t l, r;
if (flags & AUDCLNT_BUFFERFLAGS_SILENT) {
diff --git a/drivers/windows/file_access_windows.cpp b/drivers/windows/file_access_windows.cpp
index 31360d319f..646e744248 100644
--- a/drivers/windows/file_access_windows.cpp
+++ b/drivers/windows/file_access_windows.cpp
@@ -93,7 +93,7 @@ Error FileAccessWindows::_open(const String &p_path, int p_mode_flags) {
// a file using the wrong case (which *works* on Windows, but won't on other
// platforms).
if (p_mode_flags == READ) {
- WIN32_FIND_DATAW d = { 0 };
+ WIN32_FIND_DATAW d;
HANDLE f = FindFirstFileW(path.c_str(), &d);
if (f) {
String fname = d.cFileName;
@@ -302,7 +302,7 @@ void FileAccessWindows::store_buffer(const uint8_t *p_src, int p_length) {
}
prev_op = WRITE;
}
- ERR_FAIL_COND(fwrite(p_src, 1, p_length, f) != p_length);
+ ERR_FAIL_COND(fwrite(p_src, 1, p_length, f) != (size_t)p_length);
}
bool FileAccessWindows::file_exists(const String &p_name) {
diff --git a/editor/connections_dialog.cpp b/editor/connections_dialog.cpp
index 569ad0297e..6d3603f31b 100644
--- a/editor/connections_dialog.cpp
+++ b/editor/connections_dialog.cpp
@@ -363,7 +363,7 @@ ConnectDialog::ConnectDialog() {
tree->connect("node_selected", this, "_tree_node_selected");
tree->set_connect_to_script_mode(true);
- Node *mc = vbc_left->add_margin_child(TTR("Connect To Script:"), tree, true);
+ Node *mc = vbc_left->add_margin_child(TTR("Connect to Script:"), tree, true);
connect_to_label = Object::cast_to<Label>(vbc_left->get_child(mc->get_index() - 1));
error_label = memnew(Label);
@@ -413,7 +413,7 @@ ConnectDialog::ConnectDialog() {
vbc_right->add_margin_child(TTR("Extra Call Arguments:"), bind_editor, true);
HBoxContainer *dstm_hb = memnew(HBoxContainer);
- vbc_left->add_margin_child("Method to Create:", dstm_hb);
+ vbc_left->add_margin_child("Receiver Method:", dstm_hb);
dst_method = memnew(LineEdit);
dst_method->set_h_size_flags(SIZE_EXPAND_FILL);
@@ -489,8 +489,26 @@ void ConnectionsDock::_make_or_edit_connection() {
bool oshot = connect_dialog->get_oneshot();
cToMake.flags = CONNECT_PERSIST | (defer ? CONNECT_DEFERRED : 0) | (oshot ? CONNECT_ONESHOT : 0);
- // Conditions to add function, must have a script and must have a method.
- bool add_script_function = !target->get_script().is_null() && !ClassDB::has_method(target->get_class(), cToMake.method);
+ // Conditions to add function: must have a script and must not have the method already
+ // (in the class, the script itself, or inherited).
+ bool add_script_function = false;
+ Ref<Script> script = target->get_script();
+ if (!target->get_script().is_null() && !ClassDB::has_method(target->get_class(), cToMake.method)) {
+ // There is a chance that the method is inherited from another script.
+ bool found_inherited_function = false;
+ Ref<Script> inherited_script = script->get_base_script();
+ while (!inherited_script.is_null()) {
+ int line = inherited_script->get_language()->find_function(cToMake.method, inherited_script->get_source_code());
+ if (line != -1) {
+ found_inherited_function = true;
+ break;
+ }
+
+ inherited_script = inherited_script->get_base_script();
+ }
+
+ add_script_function = !found_inherited_function;
+ }
PoolStringArray script_function_args;
if (add_script_function) {
// Pick up args here before "it" is deleted by update_tree.
@@ -507,8 +525,7 @@ void ConnectionsDock::_make_or_edit_connection() {
_connect(cToMake);
}
- // IMPORTANT NOTE: _disconnect and _connect cause an update_tree,
- // which will delete the object "it" is pointing to.
+ // IMPORTANT NOTE: _disconnect and _connect cause an update_tree, which will delete the object "it" is pointing to.
it = NULL;
if (add_script_function) {
@@ -677,7 +694,7 @@ void ConnectionsDock::_open_connection_dialog(Connection cToEdit) {
if (src && dst) {
connect_dialog->set_title(TTR("Edit Connection:") + cToEdit.signal);
- connect_dialog->popup_centered_ratio();
+ connect_dialog->popup_centered();
connect_dialog->init(cToEdit, true);
}
}
diff --git a/editor/dependency_editor.cpp b/editor/dependency_editor.cpp
index cc9e2c12e8..9a049f3ae3 100644
--- a/editor/dependency_editor.cpp
+++ b/editor/dependency_editor.cpp
@@ -35,9 +35,6 @@
#include "editor_node.h"
#include "scene/gui/margin_container.h"
-void DependencyEditor::_notification(int p_what) {
-}
-
void DependencyEditor::_searched(const String &p_path) {
Map<String, String> dep_rename;
@@ -63,7 +60,7 @@ void DependencyEditor::_load_pressed(Object *p_item, int p_cell, int p_button) {
for (List<String>::Element *E = ext.front(); E; E = E->next()) {
search->add_filter("*" + E->get());
}
- search->popup_centered_ratio();
+ search->popup_centered_ratio(0.65); // So it doesn't completely cover the dialog below it.
}
void DependencyEditor::_fix_and_find(EditorFileSystemDirectory *efsd, Map<String, Map<String, String> > &candidates) {
@@ -222,7 +219,7 @@ void DependencyEditor::edit(const String &p_path) {
set_title(TTR("Dependencies For:") + " " + p_path.get_file());
_update_list();
- popup_centered_ratio();
+ popup_centered_ratio(0.7); // So it doesn't completely cover the dialog below it.
if (EditorNode::get_singleton()->is_scene_open(p_path)) {
EditorNode::get_singleton()->show_warning(vformat(TTR("Scene '%s' is currently being edited.\nChanges will only take effect when reloaded."), p_path.get_file()));
@@ -478,12 +475,13 @@ void DependencyRemoveDialog::show(const Vector<String> &p_folders, const Vector<
if (removed_deps.empty()) {
owners->hide();
text->set_text(TTR("Remove selected files from the project? (no undo)"));
- popup_centered_minsize(Size2(400, 100));
+ set_size(Size2());
+ popup_centered();
} else {
_build_removed_dependency_tree(removed_deps);
owners->show();
text->set_text(TTR("The files being removed are required by other resources in order for them to work.\nRemove them anyway? (no undo)"));
- popup_centered_minsize(Size2(500, 350));
+ popup_centered(Size2(500, 350));
}
EditorFileSystem::get_singleton()->scan_changes();
}
@@ -579,6 +577,8 @@ void DependencyRemoveDialog::_bind_methods() {
DependencyRemoveDialog::DependencyRemoveDialog() {
+ get_ok()->set_text(TTR("Remove"));
+
VBoxContainer *vb = memnew(VBoxContainer);
add_child(vb);
@@ -589,7 +589,6 @@ DependencyRemoveDialog::DependencyRemoveDialog() {
owners->set_hide_root(true);
vb->add_child(owners);
owners->set_v_size_flags(SIZE_EXPAND_FILL);
- get_ok()->set_text(TTR("Remove"));
}
//////////////
@@ -617,7 +616,7 @@ void DependencyErrorDialog::show(Mode p_mode, const String &p_for_file, const Ve
ti->set_icon(0, icon);
}
- popup_centered_minsize(Size2(500, 220));
+ popup_centered();
}
void DependencyErrorDialog::ok_pressed() {
@@ -646,7 +645,8 @@ DependencyErrorDialog::DependencyErrorDialog() {
files->set_hide_root(true);
vb->add_margin_child(TTR("Load failed due to missing dependencies:"), files, true);
files->set_v_size_flags(SIZE_EXPAND_FILL);
- files->set_custom_minimum_size(Size2(1, 200));
+
+ set_custom_minimum_size(Size2(500, 220));
get_ok()->set_text(TTR("Open Anyway"));
get_cancel()->set_text(TTR("Close"));
@@ -670,7 +670,7 @@ void OrphanResourcesDialog::ok_pressed() {
return;
delete_confirm->set_text(vformat(TTR("Permanently delete %d item(s)? (No undo!)"), paths.size()));
- delete_confirm->popup_centered_minsize();
+ delete_confirm->popup_centered_clamped(delete_confirm->get_minimum_size());
}
bool OrphanResourcesDialog::_fill_owners(EditorFileSystemDirectory *efsd, HashMap<String, int> &refs, TreeItem *p_parent) {
@@ -794,6 +794,15 @@ void OrphanResourcesDialog::_bind_methods() {
OrphanResourcesDialog::OrphanResourcesDialog() {
+ set_title(TTR("Orphan Resource Explorer"));
+ delete_confirm = memnew(ConfirmationDialog);
+ get_ok()->set_text(TTR("Delete"));
+ add_child(delete_confirm);
+ dep_edit = memnew(DependencyEditor);
+ add_child(dep_edit);
+ delete_confirm->connect("confirmed", this, "_delete_confirm");
+ set_hide_on_ok(false);
+
VBoxContainer *vbc = memnew(VBoxContainer);
add_child(vbc);
@@ -807,14 +816,5 @@ OrphanResourcesDialog::OrphanResourcesDialog() {
files->set_column_title(1, TTR("Owns"));
files->set_hide_root(true);
vbc->add_margin_child(TTR("Resources Without Explicit Ownership:"), files, true);
- set_title(TTR("Orphan Resource Explorer"));
- delete_confirm = memnew(ConfirmationDialog);
- delete_confirm->set_text(TTR("Delete selected files?"));
- get_ok()->set_text(TTR("Delete"));
- add_child(delete_confirm);
- dep_edit = memnew(DependencyEditor);
- add_child(dep_edit);
files->connect("button_pressed", this, "_button_pressed");
- delete_confirm->connect("confirmed", this, "_delete_confirm");
- set_hide_on_ok(false);
}
diff --git a/editor/dependency_editor.h b/editor/dependency_editor.h
index 23c3cc031c..22e28a4d26 100644
--- a/editor/dependency_editor.h
+++ b/editor/dependency_editor.h
@@ -63,7 +63,6 @@ class DependencyEditor : public AcceptDialog {
protected:
static void _bind_methods();
- void _notification(int p_what);
public:
void edit(const String &p_path);
diff --git a/editor/editor_fonts.cpp b/editor/editor_fonts.cpp
index cddabbc4e4..40ecffbb3b 100644
--- a/editor/editor_fonts.cpp
+++ b/editor/editor_fonts.cpp
@@ -204,9 +204,8 @@ void editor_register_fonts(Ref<Theme> p_theme) {
dfmono->set_antialiased(font_source_antialiased);
dfmono->set_hinting(font_source_hinting);
dfmono->set_font_ptr(_font_Hack_Regular, _font_Hack_Regular_size);
- //dfd->set_force_autohinter(true); //just looks better..i think?
- int default_font_size = int(EditorSettings::get_singleton()->get("interface/editor/main_font_size")) * EDSCALE;
+ int default_font_size = int(EDITOR_GET("interface/editor/main_font_size")) * EDSCALE;
// Default font
MAKE_DEFAULT_FONT(df, default_font_size);
@@ -220,15 +219,14 @@ void editor_register_fonts(Ref<Theme> p_theme) {
MAKE_BOLD_FONT(df_title, default_font_size + 2 * EDSCALE);
p_theme->set_font("title", "EditorFonts", df_title);
- // Doc font
- MAKE_BOLD_FONT(df_doc_title, int(EDITOR_DEF("text_editor/help/help_title_font_size", 23)) * EDSCALE);
-
- MAKE_DEFAULT_FONT(df_doc, int(EDITOR_DEF("text_editor/help/help_font_size", 15)) * EDSCALE);
-
+ // Documentation fonts
+ MAKE_DEFAULT_FONT(df_doc, int(EDITOR_GET("text_editor/help/help_font_size")) * EDSCALE);
+ MAKE_BOLD_FONT(df_doc_bold, int(EDITOR_GET("text_editor/help/help_font_size")) * EDSCALE);
+ MAKE_BOLD_FONT(df_doc_title, int(EDITOR_GET("text_editor/help/help_title_font_size")) * EDSCALE);
+ MAKE_SOURCE_FONT(df_doc_code, int(EDITOR_GET("text_editor/help/help_source_font_size")) * EDSCALE);
p_theme->set_font("doc", "EditorFonts", df_doc);
+ p_theme->set_font("doc_bold", "EditorFonts", df_doc_bold);
p_theme->set_font("doc_title", "EditorFonts", df_doc_title);
-
- MAKE_SOURCE_FONT(df_doc_code, int(EDITOR_GET("text_editor/help/help_source_font_size")) * EDSCALE);
p_theme->set_font("doc_source", "EditorFonts", df_doc_code);
// Ruler font
@@ -236,13 +234,13 @@ void editor_register_fonts(Ref<Theme> p_theme) {
p_theme->set_font("rulers", "EditorFonts", df_rulers);
// Code font
- MAKE_SOURCE_FONT(df_code, int(EditorSettings::get_singleton()->get("interface/editor/code_font_size")) * EDSCALE);
+ MAKE_SOURCE_FONT(df_code, int(EDITOR_GET("interface/editor/code_font_size")) * EDSCALE);
p_theme->set_font("source", "EditorFonts", df_code);
- MAKE_SOURCE_FONT(df_expression, (int(EditorSettings::get_singleton()->get("interface/editor/code_font_size")) - 1) * EDSCALE);
+ MAKE_SOURCE_FONT(df_expression, (int(EDITOR_GET("interface/editor/code_font_size")) - 1) * EDSCALE);
p_theme->set_font("expression", "EditorFonts", df_expression);
- MAKE_SOURCE_FONT(df_output_code, int(EDITOR_DEF("run/output/font_size", 13)) * EDSCALE);
+ MAKE_SOURCE_FONT(df_output_code, int(EDITOR_GET("run/output/font_size")) * EDSCALE);
p_theme->set_font("output_source", "EditorFonts", df_output_code);
MAKE_SOURCE_FONT(df_text_editor_status_code, default_font_size);
diff --git a/editor/editor_help.cpp b/editor/editor_help.cpp
index df25b08b4c..26d793cf65 100644
--- a/editor/editor_help.cpp
+++ b/editor/editor_help.cpp
@@ -325,6 +325,7 @@ void EditorHelp::_update_doc() {
DocData::ClassDoc cd = doc->class_list[edited_class]; //make a copy, so we can sort without worrying
Ref<Font> doc_font = get_font("doc", "EditorFonts");
+ Ref<Font> doc_bold_font = get_font("doc_bold", "EditorFonts");
Ref<Font> doc_title_font = get_font("doc_title", "EditorFonts");
Ref<Font> doc_code_font = get_font("doc_source", "EditorFonts");
String link_color_text = title_color.to_html(false);
@@ -1132,6 +1133,7 @@ static void _add_text_to_rt(const String &p_bbcode, RichTextLabel *p_rt) {
String base_path;
Ref<Font> doc_font = p_rt->get_font("doc", "EditorFonts");
+ Ref<Font> doc_bold_font = p_rt->get_font("doc_bold", "EditorFonts");
Ref<Font> doc_code_font = p_rt->get_font("doc_source", "EditorFonts");
Color font_color_hl = p_rt->get_color("headline_color", "EditorHelp");
Color link_color = p_rt->get_color("accent_color", "Editor").linear_interpolate(font_color_hl, 0.8);
@@ -1219,7 +1221,7 @@ static void _add_text_to_rt(const String &p_bbcode, RichTextLabel *p_rt) {
} else if (tag == "b") {
//use bold font
- p_rt->push_font(doc_code_font);
+ p_rt->push_font(doc_bold_font);
pos = brk_end + 1;
tag_stack.push_front(tag);
} else if (tag == "i") {
@@ -1237,13 +1239,13 @@ static void _add_text_to_rt(const String &p_bbcode, RichTextLabel *p_rt) {
tag_stack.push_front(tag);
} else if (tag == "center") {
- //use monospace font
+ //align to center
p_rt->push_align(RichTextLabel::ALIGN_CENTER);
pos = brk_end + 1;
tag_stack.push_front(tag);
} else if (tag == "br") {
- //use monospace font
+ //force a line break
p_rt->add_newline();
pos = brk_end + 1;
} else if (tag == "u") {
@@ -1254,14 +1256,13 @@ static void _add_text_to_rt(const String &p_bbcode, RichTextLabel *p_rt) {
tag_stack.push_front(tag);
} else if (tag == "s") {
- //use strikethrough (not supported underline instead)
- p_rt->push_underline();
+ //use strikethrough
+ p_rt->push_strikethrough();
pos = brk_end + 1;
tag_stack.push_front(tag);
} else if (tag == "url") {
- //use strikethrough (not supported underline instead)
int end = bbcode.find("[", brk_end);
if (end == -1)
end = bbcode.length();
@@ -1278,7 +1279,6 @@ static void _add_text_to_rt(const String &p_bbcode, RichTextLabel *p_rt) {
tag_stack.push_front("url");
} else if (tag == "img") {
- //use strikethrough (not supported underline instead)
int end = bbcode.find("[", brk_end);
if (end == -1)
end = bbcode.length();
diff --git a/editor/editor_settings.cpp b/editor/editor_settings.cpp
index a6746c6d25..58e3cc6fc1 100644
--- a/editor/editor_settings.cpp
+++ b/editor/editor_settings.cpp
@@ -320,19 +320,19 @@ void EditorSettings::_load_defaults(Ref<ConfigFile> p_extra_config) {
_initial_set("interface/editor/custom_display_scale", 1.0f);
hints["interface/editor/custom_display_scale"] = PropertyInfo(Variant::REAL, "interface/editor/custom_display_scale", PROPERTY_HINT_RANGE, "0.5,3,0.01", PROPERTY_USAGE_DEFAULT | PROPERTY_USAGE_RESTART_IF_CHANGED);
_initial_set("interface/editor/main_font_size", 14);
- hints["interface/editor/main_font_size"] = PropertyInfo(Variant::INT, "interface/editor/main_font_size", PROPERTY_HINT_RANGE, "10,40,1", PROPERTY_USAGE_DEFAULT | PROPERTY_USAGE_RESTART_IF_CHANGED);
- _initial_set("interface/editor/code_font_size", 14);
- hints["interface/editor/code_font_size"] = PropertyInfo(Variant::INT, "interface/editor/code_font_size", PROPERTY_HINT_RANGE, "8,96,1", PROPERTY_USAGE_DEFAULT);
+ hints["interface/editor/main_font_size"] = PropertyInfo(Variant::INT, "interface/editor/main_font_size", PROPERTY_HINT_RANGE, "8,48,1", PROPERTY_USAGE_DEFAULT | PROPERTY_USAGE_RESTART_IF_CHANGED);
_initial_set("interface/editor/main_font_antialiased", true);
- _initial_set("interface/editor/code_font_antialiased", true);
_initial_set("interface/editor/main_font_hinting", 2);
hints["interface/editor/main_font_hinting"] = PropertyInfo(Variant::INT, "interface/editor/main_font_hinting", PROPERTY_HINT_ENUM, "None,Light,Normal", PROPERTY_USAGE_DEFAULT);
- _initial_set("interface/editor/code_font_hinting", 2);
- hints["interface/editor/code_font_hinting"] = PropertyInfo(Variant::INT, "interface/editor/code_font_hinting", PROPERTY_HINT_ENUM, "None,Light,Normal", PROPERTY_USAGE_DEFAULT);
_initial_set("interface/editor/main_font", "");
hints["interface/editor/main_font"] = PropertyInfo(Variant::STRING, "interface/editor/main_font", PROPERTY_HINT_GLOBAL_FILE, "*.ttf,*.otf", PROPERTY_USAGE_DEFAULT);
_initial_set("interface/editor/main_font_bold", "");
hints["interface/editor/main_font_bold"] = PropertyInfo(Variant::STRING, "interface/editor/main_font_bold", PROPERTY_HINT_GLOBAL_FILE, "*.ttf,*.otf", PROPERTY_USAGE_DEFAULT);
+ _initial_set("interface/editor/code_font_size", 14);
+ hints["interface/editor/code_font_size"] = PropertyInfo(Variant::INT, "interface/editor/code_font_size", PROPERTY_HINT_RANGE, "8,48,1", PROPERTY_USAGE_DEFAULT);
+ _initial_set("interface/editor/code_font_antialiased", true);
+ _initial_set("interface/editor/code_font_hinting", 2);
+ hints["interface/editor/code_font_hinting"] = PropertyInfo(Variant::INT, "interface/editor/code_font_hinting", PROPERTY_HINT_ENUM, "None,Light,Normal", PROPERTY_USAGE_DEFAULT);
_initial_set("interface/editor/code_font", "");
hints["interface/editor/code_font"] = PropertyInfo(Variant::STRING, "interface/editor/code_font", PROPERTY_HINT_GLOBAL_FILE, "*.ttf,*.otf", PROPERTY_USAGE_DEFAULT);
_initial_set("interface/editor/dim_editor_on_dialog_popup", true);
@@ -491,8 +491,12 @@ void EditorSettings::_load_defaults(Ref<ConfigFile> p_extra_config) {
// Help
_initial_set("text_editor/help/show_help_index", true);
- _initial_set("text_editor/help_source_font_size", 14);
- hints["text_editor/help/help_source_font_size"] = PropertyInfo(Variant::REAL, "text_editor/help/help_source_font_size", PROPERTY_HINT_RANGE, "10, 50, 1");
+ _initial_set("text_editor/help/help_font_size", 15);
+ hints["text_editor/help/help_font_size"] = PropertyInfo(Variant::INT, "text_editor/help/help_font_size", PROPERTY_HINT_RANGE, "8,48,1");
+ _initial_set("text_editor/help/help_source_font_size", 14);
+ hints["text_editor/help/help_source_font_size"] = PropertyInfo(Variant::INT, "text_editor/help/help_source_font_size", PROPERTY_HINT_RANGE, "8,48,1");
+ _initial_set("text_editor/help/help_title_font_size", 23);
+ hints["text_editor/help/help_title_font_size"] = PropertyInfo(Variant::INT, "text_editor/help/help_title_font_size", PROPERTY_HINT_RANGE, "8,48,1");
/* Editors */
@@ -600,7 +604,8 @@ void EditorSettings::_load_defaults(Ref<ConfigFile> p_extra_config) {
_initial_set("run/auto_save/save_before_running", true);
// Output
- hints["run/output/font_size"] = PropertyInfo(Variant::INT, "run/output/font_size", PROPERTY_HINT_RANGE, "8,96,1", PROPERTY_USAGE_DEFAULT);
+ _initial_set("run/output/font_size", 13);
+ hints["run/output/font_size"] = PropertyInfo(Variant::INT, "run/output/font_size", PROPERTY_HINT_RANGE, "8,48,1");
_initial_set("run/output/always_clear_output_on_play", true);
_initial_set("run/output/always_open_output_on_play", true);
_initial_set("run/output/always_close_output_on_stop", false);
diff --git a/editor/import/editor_scene_importer_gltf.cpp b/editor/import/editor_scene_importer_gltf.cpp
index daa423e1d9..c97021433b 100644
--- a/editor/import/editor_scene_importer_gltf.cpp
+++ b/editor/import/editor_scene_importer_gltf.cpp
@@ -2090,22 +2090,23 @@ void EditorSceneImporterGLTF::_import_animation(GLTFState &state, AnimationPlaye
animation->add_track(Animation::TYPE_VALUE);
animation->track_set_path(track_idx, node_path);
- if (track.weight_tracks[i].interpolation <= GLTFAnimation::INTERP_STEP) {
- animation->track_set_interpolation_type(track_idx, track.weight_tracks[i].interpolation == GLTFAnimation::INTERP_STEP ? Animation::INTERPOLATION_NEAREST : Animation::INTERPOLATION_NEAREST);
+ // Only LINEAR and STEP (NEAREST) can be supported out of the box by Godot's Animation,
+ // the other modes have to be baked.
+ GLTFAnimation::Interpolation gltf_interp = track.weight_tracks[i].interpolation;
+ if (gltf_interp == GLTFAnimation::INTERP_LINEAR || gltf_interp == GLTFAnimation::INTERP_STEP) {
+ animation->track_set_interpolation_type(track_idx, gltf_interp == GLTFAnimation::INTERP_STEP ? Animation::INTERPOLATION_NEAREST : Animation::INTERPOLATION_LINEAR);
for (int j = 0; j < track.weight_tracks[i].times.size(); j++) {
float t = track.weight_tracks[i].times[j];
float w = track.weight_tracks[i].values[j];
animation->track_insert_key(track_idx, t, w);
}
} else {
- //must bake, apologies.
+ // CATMULLROMSPLINE or CUBIC_SPLINE have to be baked, apologies.
float increment = 1.0 / float(bake_fps);
float time = 0.0;
-
bool last = false;
while (true) {
-
- _interpolate_track<float>(track.weight_tracks[i].times, track.weight_tracks[i].values, time, track.weight_tracks[i].interpolation);
+ _interpolate_track<float>(track.weight_tracks[i].times, track.weight_tracks[i].values, time, gltf_interp);
if (last) {
break;
}
diff --git a/editor/plugins/script_text_editor.cpp b/editor/plugins/script_text_editor.cpp
index cea65bb9b4..c7a438948d 100644
--- a/editor/plugins/script_text_editor.cpp
+++ b/editor/plugins/script_text_editor.cpp
@@ -882,26 +882,29 @@ void ScriptTextEditor::_update_connected_methods() {
continue;
}
- int line = script->get_language()->find_function(connection.method, text_edit->get_text());
- if (line < 0) {
- // There is a chance that the method is inherited from another script.
- bool found_inherited_function = false;
- Ref<Script> inherited_script = script->get_base_script();
- while (!inherited_script.is_null()) {
- line = inherited_script->get_language()->find_function(connection.method, inherited_script->get_source_code());
- if (line != -1) {
- found_inherited_function = true;
- break;
+ if (!ClassDB::has_method(script->get_instance_base_type(), connection.method)) {
+
+ int line = script->get_language()->find_function(connection.method, text_edit->get_text());
+ if (line < 0) {
+ // There is a chance that the method is inherited from another script.
+ bool found_inherited_function = false;
+ Ref<Script> inherited_script = script->get_base_script();
+ while (!inherited_script.is_null()) {
+ line = inherited_script->get_language()->find_function(connection.method, inherited_script->get_source_code());
+ if (line != -1) {
+ found_inherited_function = true;
+ break;
+ }
+
+ inherited_script = inherited_script->get_base_script();
}
- inherited_script = inherited_script->get_base_script();
- }
-
- if (!found_inherited_function) {
- missing_connections.push_back(connection);
+ if (!found_inherited_function) {
+ missing_connections.push_back(connection);
+ }
+ } else {
+ text_edit->set_line_info_icon(line - 1, get_parent_control()->get_icon("Slot", "EditorIcons"), connection.method);
}
- } else {
- text_edit->set_line_info_icon(line - 1, get_parent_control()->get_icon("Slot", "EditorIcons"), connection.method);
}
}
}
diff --git a/editor/project_manager.cpp b/editor/project_manager.cpp
index 3fd497ed7b..9a3991bef1 100644
--- a/editor/project_manager.cpp
+++ b/editor/project_manager.cpp
@@ -294,13 +294,13 @@ private:
String sp = _test_path();
if (sp != "") {
- // set the project name to the select folder name
- if (project_name->get_text() == "") {
+ // If the project name is empty or default, infer the project name from the selected folder name
+ if (project_name->get_text() == "" || project_name->get_text() == TTR("New Game Project")) {
sp = sp.replace("\\", "/");
int lidx = sp.find_last("/");
if (lidx != -1) {
- sp = sp.substr(lidx + 1, sp.length());
+ sp = sp.substr(lidx + 1, sp.length()).capitalize();
}
if (sp == "" && mode == MODE_IMPORT)
sp = TTR("Imported Project");
diff --git a/main/main.cpp b/main/main.cpp
index 304bda0e62..61f54b1b0a 100644
--- a/main/main.cpp
+++ b/main/main.cpp
@@ -1020,6 +1020,7 @@ Error Main::setup(const char *execpath, int argc, char *argv[], bool p_second_ph
}
Engine::get_singleton()->set_iterations_per_second(GLOBAL_DEF("physics/common/physics_fps", 60));
+ ProjectSettings::get_singleton()->set_custom_property_info("physics/common/physics_fps", PropertyInfo(Variant::INT, "physics/common/physics_fps", PROPERTY_HINT_RANGE, "1,120,1,or_greater"));
Engine::get_singleton()->set_physics_jitter_fix(GLOBAL_DEF("physics/common/physics_jitter_fix", 0.5));
Engine::get_singleton()->set_target_fps(GLOBAL_DEF("debug/settings/fps/force_fps", 0));
ProjectSettings::get_singleton()->set_custom_property_info("debug/settings/fps/force_fps", PropertyInfo(Variant::INT, "debug/settings/fps/force_fps", PROPERTY_HINT_RANGE, "0,120,1,or_greater"));
diff --git a/misc/dist/html/full-size.html b/misc/dist/html/full-size.html
index 44b009524c..0e8a41a9fc 100644
--- a/misc/dist/html/full-size.html
+++ b/misc/dist/html/full-size.html
@@ -162,8 +162,13 @@ $GODOT_HEAD_INCLUDE
requestAnimationFrame(animate);
function adjustCanvasDimensions() {
- canvas.width = innerWidth;
- canvas.height = innerHeight;
+ var scale = window.devicePixelRatio || 1;
+ var width = window.innerWidth;
+ var height = window.innerHeight;
+ canvas.width = width * scale;
+ canvas.height = height * scale;
+ canvas.style.width = width + "px";
+ canvas.style.height = height + "px";
}
animationCallbacks.push(adjustCanvasDimensions);
adjustCanvasDimensions();
diff --git a/modules/enet/networked_multiplayer_enet.cpp b/modules/enet/networked_multiplayer_enet.cpp
index 1ff0395ba9..dcb4b7fd75 100644
--- a/modules/enet/networked_multiplayer_enet.cpp
+++ b/modules/enet/networked_multiplayer_enet.cpp
@@ -80,6 +80,7 @@ Error NetworkedMultiplayerENet::create_server(int p_port, int p_max_clients, int
ERR_FAIL_COND_V(p_out_bandwidth < 0, ERR_INVALID_PARAMETER);
ENetAddress address;
+ memset(&address, 0, sizeof(address));
#ifdef GODOT_ENET
if (bind_ip.is_wildcard()) {
diff --git a/modules/gdnative/android/android_gdn.cpp b/modules/gdnative/android/android_gdn.cpp
index 8657935602..624ef19dec 100644
--- a/modules/gdnative/android/android_gdn.cpp
+++ b/modules/gdnative/android/android_gdn.cpp
@@ -34,6 +34,8 @@
// These entry points are only for the android platform and are simple stubs in all others.
#ifdef __ANDROID__
+#include "platform/android/java_godot_wrapper.h"
+#include "platform/android/os_android.h"
#include "platform/android/thread_jandroid.h"
#else
#define JNIEnv void
@@ -54,20 +56,31 @@ JNIEnv *GDAPI godot_android_get_env() {
jobject GDAPI godot_android_get_activity() {
#ifdef __ANDROID__
- JNIEnv *env = ThreadAndroid::get_env();
-
- jclass activityThread = env->FindClass("android/app/ActivityThread");
- jmethodID currentActivityThread = env->GetStaticMethodID(activityThread, "currentActivityThread", "()Landroid/app/ActivityThread;");
- jobject at = env->CallStaticObjectMethod(activityThread, currentActivityThread);
- jmethodID getApplication = env->GetMethodID(activityThread, "getApplication", "()Landroid/app/Application;");
- jobject context = env->CallObjectMethod(at, getApplication);
+ OS_Android *os_android = (OS_Android *)OS::get_singleton();
+ return os_android->get_godot_java()->get_activity();
+#else
+ return NULL;
+#endif
+}
- return env->NewGlobalRef(context);
+jobject GDAPI godot_android_get_surface() {
+#ifdef __ANDROID__
+ OS_Android *os_android = (OS_Android *)OS::get_singleton();
+ return os_android->get_godot_java()->get_surface();
#else
return NULL;
#endif
}
+bool GDAPI godot_android_is_activity_resumed() {
+#ifdef __ANDROID__
+ OS_Android *os_android = (OS_Android *)OS::get_singleton();
+ return os_android->get_godot_java()->is_activity_resumed();
+#else
+ return false;
+#endif
+}
+
#ifdef __cplusplus
}
#endif \ No newline at end of file
diff --git a/modules/gdnative/arvr/arvr_interface_gdnative.cpp b/modules/gdnative/arvr/arvr_interface_gdnative.cpp
index c3f8688adb..da01f573ce 100644
--- a/modules/gdnative/arvr/arvr_interface_gdnative.cpp
+++ b/modules/gdnative/arvr/arvr_interface_gdnative.cpp
@@ -115,6 +115,17 @@ void ARVRInterfaceGDNative::set_anchor_detection_is_enabled(bool p_enable) {
interface->set_anchor_detection_is_enabled(data, p_enable);
}
+int ARVRInterfaceGDNative::get_camera_feed_id() {
+
+ ERR_FAIL_COND_V(interface == NULL, 0);
+
+ if ((interface->version.major > 1) || ((interface->version.major) == 1 && (interface->version.minor >= 1))) {
+ return (unsigned int)interface->get_camera_feed_id(data);
+ } else {
+ return 0;
+ }
+}
+
bool ARVRInterfaceGDNative::is_stereo() {
bool stereo;
diff --git a/modules/gdnative/arvr/arvr_interface_gdnative.h b/modules/gdnative/arvr/arvr_interface_gdnative.h
index 86396b067a..e0e5b67849 100644
--- a/modules/gdnative/arvr/arvr_interface_gdnative.h
+++ b/modules/gdnative/arvr/arvr_interface_gdnative.h
@@ -66,6 +66,7 @@ public:
/** specific to AR **/
virtual bool get_anchor_detection_is_enabled() const;
virtual void set_anchor_detection_is_enabled(bool p_enable);
+ virtual int get_camera_feed_id();
/** rendering and internal **/
virtual Size2 get_render_targetsize();
diff --git a/modules/gdnative/gdnative_api.json b/modules/gdnative/gdnative_api.json
index 52c989037e..6c12ee6534 100644
--- a/modules/gdnative/gdnative_api.json
+++ b/modules/gdnative/gdnative_api.json
@@ -6304,7 +6304,7 @@
"type": "ANDROID",
"version": {
"major": 1,
- "minor": 0
+ "minor": 1
},
"next": null,
"api": [
@@ -6319,6 +6319,18 @@
"return_type": "jobject",
"arguments": [
]
+ },
+ {
+ "name": "godot_android_get_surface",
+ "return_type": "jobject",
+ "arguments": [
+ ]
+ },
+ {
+ "name": "godot_android_is_activity_resumed",
+ "return_type": "bool",
+ "arguments": [
+ ]
}
]
},
diff --git a/modules/gdnative/include/android/godot_android.h b/modules/gdnative/include/android/godot_android.h
index 32e86838be..7063e1d2c5 100644
--- a/modules/gdnative/include/android/godot_android.h
+++ b/modules/gdnative/include/android/godot_android.h
@@ -46,6 +46,8 @@ extern "C" {
JNIEnv *GDAPI godot_android_get_env();
jobject GDAPI godot_android_get_activity();
+jobject GDAPI godot_android_get_surface();
+bool GDAPI godot_android_is_activity_resumed();
#ifdef __cplusplus
}
diff --git a/modules/gdnative/include/arvr/godot_arvr.h b/modules/gdnative/include/arvr/godot_arvr.h
index 657090fa70..d465bbde54 100644
--- a/modules/gdnative/include/arvr/godot_arvr.h
+++ b/modules/gdnative/include/arvr/godot_arvr.h
@@ -64,6 +64,7 @@ typedef struct {
// only in 1.1 onwards
godot_int (*get_external_texture_for_eye)(void *, godot_int);
void (*notification)(void *, godot_int);
+ godot_int (*get_camera_feed_id)(void *);
} godot_arvr_interface_gdnative;
void GDAPI godot_arvr_register_interface(const godot_arvr_interface_gdnative *p_interface);
diff --git a/modules/mono/editor/bindings_generator.cpp b/modules/mono/editor/bindings_generator.cpp
index cd7774e7a1..2d618f7891 100644
--- a/modules/mono/editor/bindings_generator.cpp
+++ b/modules/mono/editor/bindings_generator.cpp
@@ -2536,13 +2536,8 @@ void BindingsGenerator::_default_argument_from_variant(const Variant &p_val, Arg
switch (p_val.get_type()) {
case Variant::NIL:
- if (ClassDB::class_exists(r_iarg.type.cname)) {
- // Object type
- r_iarg.default_argument = "null";
- } else {
- // Variant
- r_iarg.default_argument = "null";
- }
+ // Either Object type or Variant
+ r_iarg.default_argument = "null";
break;
// Atomic types
case Variant::BOOL:
diff --git a/modules/webrtc/config.py b/modules/webrtc/config.py
index 2e3a18ad0e..48b4c33c5d 100644
--- a/modules/webrtc/config.py
+++ b/modules/webrtc/config.py
@@ -7,7 +7,8 @@ def configure(env):
def get_doc_classes():
return [
"WebRTCPeerConnection",
- "WebRTCDataChannel"
+ "WebRTCDataChannel",
+ "WebRTCMultiplayer"
]
def get_doc_path():
diff --git a/modules/webrtc/doc_classes/WebRTCDataChannel.xml b/modules/webrtc/doc_classes/WebRTCDataChannel.xml
index dcc14d4ddb..7cce97244d 100644
--- a/modules/webrtc/doc_classes/WebRTCDataChannel.xml
+++ b/modules/webrtc/doc_classes/WebRTCDataChannel.xml
@@ -11,85 +11,106 @@
<return type="void">
</return>
<description>
+ Closes this data channel, notifying the other peer.
</description>
</method>
<method name="get_id" qualifiers="const">
<return type="int">
</return>
<description>
+ Returns the id assigned to this channel during creation (or auto-assigned during negotiation).
+ If the channel is not negotiated out-of-band the id will only be available after the connection is established (will return [code]65535[/code] until then).
</description>
</method>
<method name="get_label" qualifiers="const">
<return type="String">
</return>
<description>
+ Returns the label assigned to this channel during creation.
</description>
</method>
<method name="get_max_packet_life_time" qualifiers="const">
<return type="int">
</return>
<description>
+ Returns the [code]maxPacketLifeTime[/code] value assigned to this channel during creation.
+ Will be [code]65535[/code] if not specified.
</description>
</method>
<method name="get_max_retransmits" qualifiers="const">
<return type="int">
</return>
<description>
+ Returns the [code]maxRetransmits[/code] value assigned to this channel during creation.
+ Will be [code]65535[/code] if not specified.
</description>
</method>
<method name="get_protocol" qualifiers="const">
<return type="String">
</return>
<description>
+ Returns the sub-protocol assigned to this channel during creation. An empty string if not specified.
</description>
</method>
<method name="get_ready_state" qualifiers="const">
<return type="int" enum="WebRTCDataChannel.ChannelState">
</return>
<description>
+ Returns the current state of this channel, see [enum WebRTCDataChannel.ChannelState].
</description>
</method>
<method name="is_negotiated" qualifiers="const">
<return type="bool">
</return>
<description>
+ Returns [code]true[/code] if this channel was created with out-of-band configuration.
</description>
</method>
<method name="is_ordered" qualifiers="const">
<return type="bool">
</return>
<description>
+ Returns [code]true[/code] if this channel was created with ordering enabled (default).
</description>
</method>
<method name="poll">
<return type="int" enum="Error">
</return>
<description>
+ Reserved, but not used for now.
</description>
</method>
<method name="was_string_packet" qualifiers="const">
<return type="bool">
</return>
<description>
+ Returns [code]true[/code] if the last received packet was transferred as text. See [member write_mode].
</description>
</method>
</methods>
<members>
<member name="write_mode" type="int" setter="set_write_mode" getter="get_write_mode" enum="WebRTCDataChannel.WriteMode">
+ The transfer mode to use when sending outgoing packet. Either text or binary.
</member>
</members>
<constants>
<constant name="WRITE_MODE_TEXT" value="0" enum="WriteMode">
+ Tells the channel to send data over this channel as text. An external peer (non-Godot) would receive this as a string.
</constant>
<constant name="WRITE_MODE_BINARY" value="1" enum="WriteMode">
+ Tells the channel to send data over this channel as binary. An external peer (non-Godot) would receive this as array buffer or blob.
</constant>
<constant name="STATE_CONNECTING" value="0" enum="ChannelState">
+ The channel was created, but it's still trying to connect.
</constant>
<constant name="STATE_OPEN" value="1" enum="ChannelState">
+ The channel is currently open, and data can flow over it.
</constant>
<constant name="STATE_CLOSING" value="2" enum="ChannelState">
+ The channel is being closed, no new messages will be accepted, but those already in queue will be flushed.
</constant>
<constant name="STATE_CLOSED" value="3" enum="ChannelState">
+ The channel was closed, or connection failed.
</constant>
</constants>
</class>
diff --git a/modules/webrtc/doc_classes/WebRTCMultiplayer.xml b/modules/webrtc/doc_classes/WebRTCMultiplayer.xml
new file mode 100644
index 0000000000..2b0622fffa
--- /dev/null
+++ b/modules/webrtc/doc_classes/WebRTCMultiplayer.xml
@@ -0,0 +1,85 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<class name="WebRTCMultiplayer" inherits="NetworkedMultiplayerPeer" category="Core" version="3.2">
+ <brief_description>
+ A simple interface to create a peer-to-peer mesh network composed of [WebRTCPeerConnection] that is compatible with the [MultiplayerAPI].
+ </brief_description>
+ <description>
+ This class constructs a full mesh of [WebRTCPeerConnection] (one connection for each peer) that can be used as a [member MultiplayerAPI.network_peer].
+ You can add each [WebRTCPeerConnection] via [method add_peer] or remove them via [method remove_peer]. Peers must be added in [constant WebRTCPeerConnection.STATE_NEW] state to allow it to create the appropriate channels. This class will not create offers nor set descriptions, it will only poll them, and notify connections and disconnections.
+ [signal NetworkedMultiplayerPeer.connection_succeeded] and [signal NetworkedMultiplayerPeer.server_disconnected] will not be emitted unless [code]server_compatibility[/code] is [code]true[/code] in [method initialize]. Beside that data transfer works like in a [NetworkedMultiplayerPeer].
+ </description>
+ <tutorials>
+ </tutorials>
+ <methods>
+ <method name="add_peer">
+ <return type="int" enum="Error">
+ </return>
+ <argument index="0" name="peer" type="WebRTCPeerConnection">
+ </argument>
+ <argument index="1" name="peer_id" type="int">
+ </argument>
+ <argument index="2" name="unreliable_lifetime" type="int" default="1">
+ </argument>
+ <description>
+ Add a new peer to the mesh with the given [code]peer_id[/code]. The [WebRTCPeerConnection] must be in state [constant WebRTCPeerConnection.STATE_NEW].
+ Three channels will be created for reliable, unreliable, and ordered transport. The value of [code]unreliable_lifetime[/code] will be passed to the [code]maxPacketLifetime[/code] option when creating unreliable and ordered channels (see [method WebRTCPeerConnection.create_data_channel]).
+ </description>
+ </method>
+ <method name="close">
+ <return type="void">
+ </return>
+ <description>
+ Close all the add peer connections and channels, freeing all resources.
+ </description>
+ </method>
+ <method name="get_peer">
+ <return type="Dictionary">
+ </return>
+ <argument index="0" name="peer_id" type="int">
+ </argument>
+ <description>
+ Return a dictionary representation of the peer with given [code]peer_id[/code] with three keys. [code]connection[/code] containing the [WebRTCPeerConnection] to this peer, [code]channels[/code] an array of three [WebRTCDataChannel], and [code]connected[/code] a boolean representing if the peer connection is currently connected (all three channels are open).
+ </description>
+ </method>
+ <method name="get_peers">
+ <return type="Dictionary">
+ </return>
+ <description>
+ Returns a dictionary which keys are the peer ids and values the peer representation as in [method get_peer]
+ </description>
+ </method>
+ <method name="has_peer">
+ <return type="bool">
+ </return>
+ <argument index="0" name="peer_id" type="int">
+ </argument>
+ <description>
+ Returns [code]true[/code] if the given [code]peer_id[/code] is in the peers map (it might not be connected though).
+ </description>
+ </method>
+ <method name="initialize">
+ <return type="int" enum="Error">
+ </return>
+ <argument index="0" name="peer_id" type="int">
+ </argument>
+ <argument index="1" name="server_compatibility" type="bool" default="false">
+ </argument>
+ <description>
+ Initialize the multiplayer peer with the given [code]peer_id[/code] (must be between 1 and 2147483647).
+ If [code]server_compatibilty[/code] is [code]false[/code] (default), the multiplayer peer will be immediately in state [constant NetworkedMultiplayerPeer.CONNECTION_CONNECTED] and [signal NetworkedMultiplayerPeer.connection_succeeded] will not be emitted.
+ If [code]server_compatibilty[/code] is [code]true[/code] the peer will suppress all [signal NetworkedMultiplayerPeer.peer_connected] signals until a peer with id [constant NetworkedMultiplayerPeer.TARGET_PEER_SERVER] connects and then emit [signal NetworkedMultiplayerPeer.connection_succeeded]. After that the signal [signal NetworkedMultiplayerPeer.peer_connected] will be emitted for every already connected peer, and any new peer that might connect. If the server peer disconnects after that, signal [signal NetworkedMultiplayerPeer.server_disconnected] will be emitted and state will become [constant NetworkedMultiplayerPeer.CONNECTION_CONNECTED].
+ </description>
+ </method>
+ <method name="remove_peer">
+ <return type="void">
+ </return>
+ <argument index="0" name="peer_id" type="int">
+ </argument>
+ <description>
+ Remove the peer with given [code]peer_id[/code] from the mesh. If the peer was connected, and [signal NetworkedMultiplayerPeer.peer_connected] was emitted for it, then [signal NetworkedMultiplayerPeer.peer_disconnected] will be emitted.
+ </description>
+ </method>
+ </methods>
+ <constants>
+ </constants>
+</class>
diff --git a/modules/webrtc/doc_classes/WebRTCPeerConnection.xml b/modules/webrtc/doc_classes/WebRTCPeerConnection.xml
index 8b14c60deb..ae709877f4 100644
--- a/modules/webrtc/doc_classes/WebRTCPeerConnection.xml
+++ b/modules/webrtc/doc_classes/WebRTCPeerConnection.xml
@@ -1,8 +1,15 @@
<?xml version="1.0" encoding="UTF-8" ?>
<class name="WebRTCPeerConnection" inherits="Reference" category="Core" version="3.2">
<brief_description>
+ Interface to a WebRTC peer connection.
</brief_description>
<description>
+ A WebRTC connection between the local computer and a remote peer. Provides an interface to connect, maintain and monitor the connection.
+ Setting up a WebRTC connection between two peers from now on) may not seem a trivial task, but it can be broken down into 3 main steps:
+ - The peer that wants to initiate the connection ([code]A[/code] from now on) creates an offer and send it to the other peer ([code]B[/code] from now on).
+ - [code]B[/code] receives the offer, generate and answer, and sends it to [code]B[/code]).
+ - [code]A[/code] and [code]B[/code] then generates and exchange ICE candidates with each other.
+ After these steps, the connection should become connected. Keep on reading or look into the tutorial for more information.
</description>
<tutorials>
</tutorials>
@@ -17,12 +24,14 @@
<argument index="2" name="name" type="String">
</argument>
<description>
+ Add an ice candidate generated by a remote peer (and received over the signaling server). See [signal ice_candidate_created].
</description>
</method>
<method name="close">
<return type="void">
</return>
<description>
+ Close the peer connection and all data channels associated with it. Note, you cannot reuse this object for a new connection unless you call [method initialize].
</description>
</method>
<method name="create_data_channel">
@@ -35,18 +44,38 @@
}">
</argument>
<description>
+ Returns a new [WebRTCDataChannel] (or [code]null[/code] on failure) with given [code]label[/code] and optionally configured via the [code]options[/code] dictionary. This method can only be called when the connection is in state [constant STATE_NEW].
+ There are two ways to create a working data channel: either call [method create_data_channel] on only one of the peer and listen to [signal data_channel_received] on the other, or call [method create_data_channel] on both peers, with the same values, and the [code]negotiated[/code] option set to [code]true[/code].
+ Valid [code]options[/code] are:
+ [codeblock]
+ {
+ "negotiated": true, # When set to true (default off), means the channel is negotiated out of band. "id" must be set too. data_channel_received will not be called.
+ "id": 1, # When "negotiated" is true this value must also be set to the same value on both peer.
+
+ # Only one of maxRetransmits and maxPacketLifeTime can be specified, not both. They make the channel unreliable (but also better at real time).
+ "maxRetransmits": 1, # Specify the maximum number of attempt the peer will make to retransmits packets if they are not acknowledged.
+ "maxPacketLifeTime": 100, # Specify the maximum amount of time before giving up retransmitions of unacknowledged packets (in milliseconds).
+ "ordered": true, # When in unreliable mode (i.e. either "maxRetransmits" or "maxPacketLifetime" is set), "ordered" (true by default) specify if packet ordering is to be enforced.
+
+ "protocol": "my-custom-protocol", # A custom sub-protocol string for this channel.
+ }
+ [/codeblock]
+ NOTE: You must keep a reference to channels created this way, or it will be closed.
</description>
</method>
<method name="create_offer">
<return type="int" enum="Error">
</return>
<description>
+ Creates a new SDP offer to start a WebRTC connection with a remote peer. At least one [WebRTCDataChannel] must have been created before calling this method.
+ If this functions returns [code]OK[/code], [signal session_description_created] will be called when the session is ready to be sent.
</description>
</method>
<method name="get_connection_state" qualifiers="const">
<return type="int" enum="WebRTCPeerConnection.ConnectionState">
</return>
<description>
+ Returns the connection state. See [enum ConnectionState].
</description>
</method>
<method name="initialize">
@@ -57,12 +86,29 @@
}">
</argument>
<description>
+ Re-initialize this peer connection, closing any previously active connection, and going back to state [constant STATE_NEW]. A dictionary of [code]options[/code] can be passed to configure the peer connection.
+ Valid [code]options[/code] are:
+ [codeblock]
+ {
+ "iceServers": [
+ {
+ "urls": [ "stun:stun.example.com:3478" ], # One or more STUN servers.
+ },
+ {
+ "urls": [ "turn:turn.example.com:3478" ], # One or more TURN servers.
+ "username": "a_username", # Optional username for the TURN server.
+ "credentials": "a_password", # Optional password for the TURN server.
+ }
+ ]
+ }
+ [/codeblock]
</description>
</method>
<method name="poll">
<return type="int" enum="Error">
</return>
<description>
+ Call this method frequently (e.g. in [method Node._process] or [method Node._physics_process]) to properly receive signals.
</description>
</method>
<method name="set_local_description">
@@ -73,6 +119,8 @@
<argument index="1" name="sdp" type="String">
</argument>
<description>
+ Sets the SDP description of the local peer. This should be called in response to [signal session_description_created].
+ If [code]type[/code] is [code]answer[/code] the peer will start emitting [signal ice_candidate_created].
</description>
</method>
<method name="set_remote_description">
@@ -83,6 +131,9 @@
<argument index="1" name="sdp" type="String">
</argument>
<description>
+ Sets the SDP description of the remote peer. This should be called with the values generated by a remote peer and received over the signaling server.
+ If [code]type[/code] is [code]offer[/code] the peer will emit [signal session_description_created] with the appropriate answer.
+ If [code]type[/code] is [code]answer[/code] the peer will start emitting [signal ice_candidate_created].
</description>
</method>
</methods>
@@ -91,6 +142,8 @@
<argument index="0" name="channel" type="Object">
</argument>
<description>
+ Emitted when a new in-band channel is received, i.e. when the channel was created with [code]negotiated: false[/code] (default).
+ The object will be an instance of [WebRTCDataChannel]. You must keep a reference of it or it will be closed automatically. See [method create_data_channel]
</description>
</signal>
<signal name="ice_candidate_created">
@@ -101,6 +154,7 @@
<argument index="2" name="name" type="String">
</argument>
<description>
+ Emitted when a new ICE candidate has been created. The three parameters are meant to be passed to the remote peer over the signaling server.
</description>
</signal>
<signal name="session_description_created">
@@ -109,21 +163,28 @@
<argument index="1" name="sdp" type="String">
</argument>
<description>
+ Emitted after a successful call to [method create_offer] or [method set_remote_description] (when it generates an answer). The parameters are meant to be passed to [method set_local_description] on this object, and sent to the remote peer over the signaling server.
</description>
</signal>
</signals>
<constants>
<constant name="STATE_NEW" value="0" enum="ConnectionState">
+ The connection is new, data channels and an offer can be created in this state.
</constant>
<constant name="STATE_CONNECTING" value="1" enum="ConnectionState">
+ The peer is connecting, ICE is in progress, non of the transports has failed.
</constant>
<constant name="STATE_CONNECTED" value="2" enum="ConnectionState">
+ The peer is connected, all ICE transports are connected.
</constant>
<constant name="STATE_DISCONNECTED" value="3" enum="ConnectionState">
+ At least one ICE transport is disconnected.
</constant>
<constant name="STATE_FAILED" value="4" enum="ConnectionState">
+ One or more of the ICE transports failed.
</constant>
<constant name="STATE_CLOSED" value="5" enum="ConnectionState">
+ The peer connection is closed (after calling [method close] for example).
</constant>
</constants>
</class>
diff --git a/modules/webrtc/register_types.cpp b/modules/webrtc/register_types.cpp
index 44e072cc89..58b68d926b 100644
--- a/modules/webrtc/register_types.cpp
+++ b/modules/webrtc/register_types.cpp
@@ -40,6 +40,7 @@
#include "webrtc_data_channel_gdnative.h"
#include "webrtc_peer_connection_gdnative.h"
#endif
+#include "webrtc_multiplayer.h"
void register_webrtc_types() {
#ifdef JAVASCRIPT_ENABLED
@@ -54,6 +55,7 @@ void register_webrtc_types() {
ClassDB::register_class<WebRTCDataChannelGDNative>();
#endif
ClassDB::register_virtual_class<WebRTCDataChannel>();
+ ClassDB::register_class<WebRTCMultiplayer>();
}
void unregister_webrtc_types() {}
diff --git a/modules/webrtc/webrtc_data_channel_js.cpp b/modules/webrtc/webrtc_data_channel_js.cpp
index 2e7c64aa72..069918cc9c 100644
--- a/modules/webrtc/webrtc_data_channel_js.cpp
+++ b/modules/webrtc/webrtc_data_channel_js.cpp
@@ -68,7 +68,7 @@ void WebRTCDataChannelJS::_on_error() {
}
void WebRTCDataChannelJS::_on_message(uint8_t *p_data, uint32_t p_size, bool p_is_string) {
- if (in_buffer.space_left() < p_size + 5) {
+ if (in_buffer.space_left() < (int)(p_size + 5)) {
ERR_EXPLAIN("Buffer full! Dropping data");
ERR_FAIL();
}
@@ -205,30 +205,45 @@ String WebRTCDataChannelJS::get_label() const {
}
/* clang-format off */
-#define _JS_GET(PROP) \
+#define _JS_GET(PROP, DEF) \
EM_ASM_INT({ \
var dict = Module.IDHandler.get($0); \
if (!dict || !dict["channel"]) { \
- return 0; \
- }; \
- return dict["channel"].PROP; \
+ return DEF; \
+ } \
+ var out = dict["channel"].PROP; \
+ return out === null ? DEF : out; \
}, _js_id)
/* clang-format on */
bool WebRTCDataChannelJS::is_ordered() const {
- return _JS_GET(ordered);
+ return _JS_GET(ordered, true);
}
int WebRTCDataChannelJS::get_id() const {
- return _JS_GET(id);
+ return _JS_GET(id, 65535);
}
int WebRTCDataChannelJS::get_max_packet_life_time() const {
- return _JS_GET(maxPacketLifeTime);
+ // Can't use macro, webkit workaround.
+ /* clang-format off */
+ return EM_ASM_INT({
+ var dict = Module.IDHandler.get($0);
+ if (!dict || !dict["channel"]) {
+ return 65535;
+ }
+ if (dict["channel"].maxRetransmitTime !== undefined) {
+ // Guess someone didn't appreciate the standardization process.
+ return dict["channel"].maxRetransmitTime;
+ }
+ var out = dict["channel"].maxPacketLifeTime;
+ return out === null ? 65535 : out;
+ }, _js_id);
+ /* clang-format on */
}
int WebRTCDataChannelJS::get_max_retransmits() const {
- return _JS_GET(maxRetransmits);
+ return _JS_GET(maxRetransmits, 65535);
}
String WebRTCDataChannelJS::get_protocol() const {
@@ -236,7 +251,7 @@ String WebRTCDataChannelJS::get_protocol() const {
}
bool WebRTCDataChannelJS::is_negotiated() const {
- return _JS_GET(negotiated);
+ return _JS_GET(negotiated, false);
}
WebRTCDataChannelJS::WebRTCDataChannelJS() {
diff --git a/modules/webrtc/webrtc_multiplayer.cpp b/modules/webrtc/webrtc_multiplayer.cpp
new file mode 100644
index 0000000000..17dafff93a
--- /dev/null
+++ b/modules/webrtc/webrtc_multiplayer.cpp
@@ -0,0 +1,384 @@
+/*************************************************************************/
+/* webrtc_multiplayer.cpp */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* https://godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2019 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2019 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+#include "webrtc_multiplayer.h"
+
+#include "core/io/marshalls.h"
+#include "core/os/os.h"
+
+void WebRTCMultiplayer::_bind_methods() {
+ ClassDB::bind_method(D_METHOD("initialize", "peer_id", "server_compatibility"), &WebRTCMultiplayer::initialize, DEFVAL(false));
+ ClassDB::bind_method(D_METHOD("add_peer", "peer", "peer_id", "unreliable_lifetime"), &WebRTCMultiplayer::add_peer, DEFVAL(1));
+ ClassDB::bind_method(D_METHOD("remove_peer", "peer_id"), &WebRTCMultiplayer::remove_peer);
+ ClassDB::bind_method(D_METHOD("has_peer", "peer_id"), &WebRTCMultiplayer::has_peer);
+ ClassDB::bind_method(D_METHOD("get_peer", "peer_id"), &WebRTCMultiplayer::get_peer);
+ ClassDB::bind_method(D_METHOD("get_peers"), &WebRTCMultiplayer::get_peers);
+ ClassDB::bind_method(D_METHOD("close"), &WebRTCMultiplayer::close);
+}
+
+void WebRTCMultiplayer::set_transfer_mode(TransferMode p_mode) {
+ transfer_mode = p_mode;
+}
+
+NetworkedMultiplayerPeer::TransferMode WebRTCMultiplayer::get_transfer_mode() const {
+ return transfer_mode;
+}
+
+void WebRTCMultiplayer::set_target_peer(int p_peer_id) {
+ target_peer = p_peer_id;
+}
+
+/* Returns the ID of the NetworkedMultiplayerPeer who sent the most recent packet: */
+int WebRTCMultiplayer::get_packet_peer() const {
+ return next_packet_peer;
+}
+
+bool WebRTCMultiplayer::is_server() const {
+ return unique_id == TARGET_PEER_SERVER;
+}
+
+void WebRTCMultiplayer::poll() {
+ if (peer_map.size() == 0)
+ return;
+
+ List<int> remove;
+ List<int> add;
+ for (Map<int, Ref<ConnectedPeer> >::Element *E = peer_map.front(); E; E = E->next()) {
+ Ref<ConnectedPeer> peer = E->get();
+ peer->connection->poll();
+ // Check peer state
+ switch (peer->connection->get_connection_state()) {
+ case WebRTCPeerConnection::STATE_NEW:
+ case WebRTCPeerConnection::STATE_CONNECTING:
+ // Go to next peer, not ready yet.
+ continue;
+ case WebRTCPeerConnection::STATE_CONNECTED:
+ // Good to go, go ahead and check channel state.
+ break;
+ default:
+ // Peer is closed or in error state. Got to next peer.
+ remove.push_back(E->key());
+ continue;
+ }
+ // Check channels state
+ int ready = 0;
+ for (List<Ref<WebRTCDataChannel> >::Element *C = peer->channels.front(); C && C->get().is_valid(); C = C->next()) {
+ Ref<WebRTCDataChannel> ch = C->get();
+ switch (ch->get_ready_state()) {
+ case WebRTCDataChannel::STATE_CONNECTING:
+ continue;
+ case WebRTCDataChannel::STATE_OPEN:
+ ready++;
+ continue;
+ default:
+ // Channel was closed or in error state, remove peer id.
+ remove.push_back(E->key());
+ }
+ // We got a closed channel break out, the peer will be removed.
+ break;
+ }
+ // This peer has newly connected, and all channels are now open.
+ if (ready == peer->channels.size() && !peer->connected) {
+ peer->connected = true;
+ add.push_back(E->key());
+ }
+ }
+ // Remove disconnected peers
+ for (List<int>::Element *E = remove.front(); E; E = E->next()) {
+ remove_peer(E->get());
+ if (next_packet_peer == E->get())
+ next_packet_peer = 0;
+ }
+ // Signal newly connected peers
+ for (List<int>::Element *E = add.front(); E; E = E->next()) {
+ // Already connected to server: simply notify new peer.
+ // NOTE: Mesh is always connected.
+ if (connection_status == CONNECTION_CONNECTED)
+ emit_signal("peer_connected", E->get());
+
+ // Server emulation mode suppresses peer_conencted until server connects.
+ if (server_compat && E->get() == TARGET_PEER_SERVER) {
+ // Server connected.
+ connection_status = CONNECTION_CONNECTED;
+ emit_signal("peer_connected", TARGET_PEER_SERVER);
+ emit_signal("connection_succeeded");
+ // Notify of all previously connected peers
+ for (Map<int, Ref<ConnectedPeer> >::Element *F = peer_map.front(); F; F = F->next()) {
+ if (F->key() != 1 && F->get()->connected)
+ emit_signal("peer_connected", F->key());
+ }
+ break; // Because we already notified of all newly added peers.
+ }
+ }
+ // Fetch next packet
+ if (next_packet_peer == 0)
+ _find_next_peer();
+}
+
+void WebRTCMultiplayer::_find_next_peer() {
+ Map<int, Ref<ConnectedPeer> >::Element *E = peer_map.find(next_packet_peer);
+ if (E) E = E->next();
+ // After last.
+ while (E) {
+ for (List<Ref<WebRTCDataChannel> >::Element *F = E->get()->channels.front(); F; F = F->next()) {
+ if (F->get()->get_available_packet_count()) {
+ next_packet_peer = E->key();
+ return;
+ }
+ }
+ E = E->next();
+ }
+ E = peer_map.front();
+ // Before last
+ while (E) {
+ for (List<Ref<WebRTCDataChannel> >::Element *F = E->get()->channels.front(); F; F = F->next()) {
+ if (F->get()->get_available_packet_count()) {
+ next_packet_peer = E->key();
+ return;
+ }
+ }
+ if (E->key() == (int)next_packet_peer)
+ break;
+ E = E->next();
+ }
+ // No packet found
+ next_packet_peer = 0;
+}
+
+void WebRTCMultiplayer::set_refuse_new_connections(bool p_enable) {
+ refuse_connections = p_enable;
+}
+
+bool WebRTCMultiplayer::is_refusing_new_connections() const {
+ return refuse_connections;
+}
+
+NetworkedMultiplayerPeer::ConnectionStatus WebRTCMultiplayer::get_connection_status() const {
+ return connection_status;
+}
+
+Error WebRTCMultiplayer::initialize(int p_self_id, bool p_server_compat) {
+ ERR_FAIL_COND_V(p_self_id < 0 || p_self_id > ~(1 << 31), ERR_INVALID_PARAMETER);
+ unique_id = p_self_id;
+ server_compat = p_server_compat;
+
+ // Mesh and server are always connected
+ if (!server_compat || p_self_id == 1)
+ connection_status = CONNECTION_CONNECTED;
+ else
+ connection_status = CONNECTION_CONNECTING;
+ return OK;
+}
+
+int WebRTCMultiplayer::get_unique_id() const {
+ ERR_FAIL_COND_V(connection_status == CONNECTION_DISCONNECTED, 1);
+ return unique_id;
+}
+
+void WebRTCMultiplayer::_peer_to_dict(Ref<ConnectedPeer> p_connected_peer, Dictionary &r_dict) {
+ Array channels;
+ for (List<Ref<WebRTCDataChannel> >::Element *F = p_connected_peer->channels.front(); F; F = F->next()) {
+ channels.push_back(F->get());
+ }
+ r_dict["connection"] = p_connected_peer->connection;
+ r_dict["connected"] = p_connected_peer->connected;
+ r_dict["channels"] = channels;
+}
+
+bool WebRTCMultiplayer::has_peer(int p_peer_id) {
+ return peer_map.has(p_peer_id);
+}
+
+Dictionary WebRTCMultiplayer::get_peer(int p_peer_id) {
+ ERR_FAIL_COND_V(!peer_map.has(p_peer_id), Dictionary());
+ Dictionary out;
+ _peer_to_dict(peer_map[p_peer_id], out);
+ return out;
+}
+
+Dictionary WebRTCMultiplayer::get_peers() {
+ Dictionary out;
+ for (Map<int, Ref<ConnectedPeer> >::Element *E = peer_map.front(); E; E = E->next()) {
+ Dictionary d;
+ _peer_to_dict(E->get(), d);
+ out[E->key()] = d;
+ }
+ return out;
+}
+
+Error WebRTCMultiplayer::add_peer(Ref<WebRTCPeerConnection> p_peer, int p_peer_id, int p_unreliable_lifetime) {
+ ERR_FAIL_COND_V(p_peer_id < 0 || p_peer_id > ~(1 << 31), ERR_INVALID_PARAMETER);
+ ERR_FAIL_COND_V(p_unreliable_lifetime < 0, ERR_INVALID_PARAMETER);
+ ERR_FAIL_COND_V(refuse_connections, ERR_UNAUTHORIZED);
+ // Peer must be valid, and in new state (to create data channels)
+ ERR_FAIL_COND_V(!p_peer.is_valid(), ERR_INVALID_PARAMETER);
+ ERR_FAIL_COND_V(p_peer->get_connection_state() != WebRTCPeerConnection::STATE_NEW, ERR_INVALID_PARAMETER);
+
+ Ref<ConnectedPeer> peer = memnew(ConnectedPeer);
+ peer->connection = p_peer;
+
+ // Initialize data channels
+ Dictionary cfg;
+ cfg["negotiated"] = true;
+ cfg["ordered"] = true;
+
+ cfg["id"] = 1;
+ peer->channels[CH_RELIABLE] = p_peer->create_data_channel("reliable", cfg);
+ ERR_FAIL_COND_V(!peer->channels[CH_RELIABLE].is_valid(), FAILED);
+
+ cfg["id"] = 2;
+ cfg["maxPacketLifetime"] = p_unreliable_lifetime;
+ peer->channels[CH_ORDERED] = p_peer->create_data_channel("ordered", cfg);
+ ERR_FAIL_COND_V(!peer->channels[CH_ORDERED].is_valid(), FAILED);
+
+ cfg["id"] = 3;
+ cfg["ordered"] = false;
+ peer->channels[CH_UNRELIABLE] = p_peer->create_data_channel("unreliable", cfg);
+ ERR_FAIL_COND_V(!peer->channels[CH_UNRELIABLE].is_valid(), FAILED);
+
+ peer_map[p_peer_id] = peer; // add the new peer connection to the peer_map
+
+ return OK;
+}
+
+void WebRTCMultiplayer::remove_peer(int p_peer_id) {
+ ERR_FAIL_COND(!peer_map.has(p_peer_id));
+ Ref<ConnectedPeer> peer = peer_map[p_peer_id];
+ peer_map.erase(p_peer_id);
+ if (peer->connected) {
+ peer->connected = false;
+ emit_signal("peer_disconnected", p_peer_id);
+ if (server_compat && p_peer_id == TARGET_PEER_SERVER) {
+ emit_signal("server_disconnected");
+ connection_status = CONNECTION_DISCONNECTED;
+ }
+ }
+}
+
+Error WebRTCMultiplayer::get_packet(const uint8_t **r_buffer, int &r_buffer_size) {
+ // Peer not available
+ if (next_packet_peer == 0 || !peer_map.has(next_packet_peer)) {
+ _find_next_peer();
+ ERR_FAIL_V(ERR_UNAVAILABLE);
+ }
+ for (List<Ref<WebRTCDataChannel> >::Element *E = peer_map[next_packet_peer]->channels.front(); E; E = E->next()) {
+ if (E->get()->get_available_packet_count()) {
+ Error err = E->get()->get_packet(r_buffer, r_buffer_size);
+ _find_next_peer();
+ return err;
+ }
+ }
+ // Channels for that peer were empty. Bug?
+ _find_next_peer();
+ ERR_FAIL_V(ERR_BUG);
+}
+
+Error WebRTCMultiplayer::put_packet(const uint8_t *p_buffer, int p_buffer_size) {
+ ERR_FAIL_COND_V(connection_status == CONNECTION_DISCONNECTED, ERR_UNCONFIGURED);
+
+ int ch = CH_RELIABLE;
+ switch (transfer_mode) {
+ case TRANSFER_MODE_RELIABLE:
+ ch = CH_RELIABLE;
+ break;
+ case TRANSFER_MODE_UNRELIABLE_ORDERED:
+ ch = CH_ORDERED;
+ break;
+ case TRANSFER_MODE_UNRELIABLE:
+ ch = CH_UNRELIABLE;
+ break;
+ }
+
+ Map<int, Ref<ConnectedPeer> >::Element *E = NULL;
+
+ if (target_peer > 0) {
+
+ E = peer_map.find(target_peer);
+ if (!E) {
+ ERR_EXPLAIN("Invalid Target Peer: " + itos(target_peer));
+ ERR_FAIL_V(ERR_INVALID_PARAMETER);
+ }
+ ERR_FAIL_COND_V(E->value()->channels.size() <= ch, ERR_BUG);
+ ERR_FAIL_COND_V(!E->value()->channels[ch].is_valid(), ERR_BUG);
+ return E->value()->channels[ch]->put_packet(p_buffer, p_buffer_size);
+
+ } else {
+ int exclude = -target_peer;
+
+ for (Map<int, Ref<ConnectedPeer> >::Element *F = peer_map.front(); F; F = F->next()) {
+
+ // Exclude packet. If target_peer == 0 then don't exclude any packets
+ if (target_peer != 0 && F->key() == exclude)
+ continue;
+
+ ERR_CONTINUE(F->value()->channels.size() <= ch || !F->value()->channels[ch].is_valid());
+ F->value()->channels[ch]->put_packet(p_buffer, p_buffer_size);
+ }
+ }
+ return OK;
+}
+
+int WebRTCMultiplayer::get_available_packet_count() const {
+ if (next_packet_peer == 0)
+ return 0; // To be sure next call to get_packet works if size > 0 .
+ int size = 0;
+ for (Map<int, Ref<ConnectedPeer> >::Element *E = peer_map.front(); E; E = E->next()) {
+ for (List<Ref<WebRTCDataChannel> >::Element *F = E->get()->channels.front(); F; F = F->next()) {
+ size += F->get()->get_available_packet_count();
+ }
+ }
+ return size;
+}
+
+int WebRTCMultiplayer::get_max_packet_size() const {
+ return 1200;
+}
+
+void WebRTCMultiplayer::close() {
+ peer_map.clear();
+ unique_id = 0;
+ next_packet_peer = 0;
+ target_peer = 0;
+ connection_status = CONNECTION_DISCONNECTED;
+}
+
+WebRTCMultiplayer::WebRTCMultiplayer() {
+ unique_id = 0;
+ next_packet_peer = 0;
+ target_peer = 0;
+ transfer_mode = TRANSFER_MODE_RELIABLE;
+ refuse_connections = false;
+ connection_status = CONNECTION_DISCONNECTED;
+ server_compat = false;
+}
+
+WebRTCMultiplayer::~WebRTCMultiplayer() {
+ close();
+}
diff --git a/modules/webrtc/webrtc_multiplayer.h b/modules/webrtc/webrtc_multiplayer.h
new file mode 100644
index 0000000000..82bbfd4f68
--- /dev/null
+++ b/modules/webrtc/webrtc_multiplayer.h
@@ -0,0 +1,116 @@
+/*************************************************************************/
+/* webrtc_multiplayer.h */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* https://godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2019 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2019 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+#ifndef WEBRTC_MULTIPLAYER_H
+#define WEBRTC_MULTIPLAYER_H
+
+#include "core/io/networked_multiplayer_peer.h"
+#include "webrtc_peer_connection.h"
+
+class WebRTCMultiplayer : public NetworkedMultiplayerPeer {
+
+ GDCLASS(WebRTCMultiplayer, NetworkedMultiplayerPeer);
+
+protected:
+ static void _bind_methods();
+
+private:
+ enum {
+ CH_RELIABLE = 0,
+ CH_ORDERED = 1,
+ CH_UNRELIABLE = 2,
+ CH_RESERVED_MAX = 3
+ };
+
+ class ConnectedPeer : public Reference {
+
+ public:
+ Ref<WebRTCPeerConnection> connection;
+ List<Ref<WebRTCDataChannel> > channels;
+ bool connected;
+
+ ConnectedPeer() {
+ connected = false;
+ for (int i = 0; i < CH_RESERVED_MAX; i++)
+ channels.push_front(Ref<WebRTCDataChannel>());
+ }
+ };
+
+ uint32_t unique_id;
+ int target_peer;
+ int client_count;
+ bool refuse_connections;
+ ConnectionStatus connection_status;
+ TransferMode transfer_mode;
+ int next_packet_peer;
+ bool server_compat;
+
+ Map<int, Ref<ConnectedPeer> > peer_map;
+
+ void _peer_to_dict(Ref<ConnectedPeer> p_connected_peer, Dictionary &r_dict);
+ void _find_next_peer();
+
+public:
+ WebRTCMultiplayer();
+ ~WebRTCMultiplayer();
+
+ Error initialize(int p_self_id, bool p_server_compat = false);
+ Error add_peer(Ref<WebRTCPeerConnection> p_peer, int p_peer_id, int p_unreliable_lifetime = 1);
+ void remove_peer(int p_peer_id);
+ bool has_peer(int p_peer_id);
+ Dictionary get_peer(int p_peer_id);
+ Dictionary get_peers();
+ void close();
+
+ // PacketPeer
+ Error get_packet(const uint8_t **r_buffer, int &r_buffer_size); ///< buffer is GONE after next get_packet
+ Error put_packet(const uint8_t *p_buffer, int p_buffer_size);
+ int get_available_packet_count() const;
+ int get_max_packet_size() const;
+
+ // NetworkedMultiplayerPeer
+ void set_transfer_mode(TransferMode p_mode);
+ TransferMode get_transfer_mode() const;
+ void set_target_peer(int p_peer_id);
+
+ int get_unique_id() const;
+ int get_packet_peer() const;
+
+ bool is_server() const;
+
+ void poll();
+
+ void set_refuse_new_connections(bool p_enable);
+ bool is_refusing_new_connections() const;
+
+ ConnectionStatus get_connection_status() const;
+};
+
+#endif
diff --git a/platform/android/java/src/org/godotengine/godot/Godot.java b/platform/android/java/src/org/godotengine/godot/Godot.java
index 0eeaf0701c..751e885118 100644
--- a/platform/android/java/src/org/godotengine/godot/Godot.java
+++ b/platform/android/java/src/org/godotengine/godot/Godot.java
@@ -30,8 +30,6 @@
package org.godotengine.godot;
-//import android.R;
-
import android.Manifest;
import android.app.Activity;
import android.app.ActivityManager;
@@ -64,6 +62,7 @@ import android.util.Log;
import android.view.Display;
import android.view.KeyEvent;
import android.view.MotionEvent;
+import android.view.Surface;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewGroup.LayoutParams;
@@ -73,7 +72,6 @@ import android.view.WindowManager;
import android.widget.Button;
import android.widget.FrameLayout;
import android.widget.ProgressBar;
-import android.widget.RelativeLayout;
import android.widget.TextView;
import com.google.android.vending.expansion.downloader.DownloadProgressInfo;
import com.google.android.vending.expansion.downloader.DownloaderClientMarshaller;
@@ -94,6 +92,7 @@ import java.util.Locale;
import javax.microedition.khronos.opengles.GL10;
import org.godotengine.godot.input.GodotEditText;
import org.godotengine.godot.payments.PaymentsManager;
+import org.godotengine.godot.xr.XRMode;
public class Godot extends Activity implements SensorEventListener, IDownloaderClient {
@@ -120,6 +119,7 @@ public class Godot extends Activity implements SensorEventListener, IDownloaderC
private boolean use_immersive = false;
private boolean use_debug_opengl = false;
private boolean mStatePaused;
+ private boolean activityResumed;
private int mState;
static private Intent mCurrentIntent;
@@ -282,7 +282,7 @@ public class Godot extends Activity implements SensorEventListener, IDownloaderC
// ...add to FrameLayout
layout.addView(edittext);
- mView = new GodotView(getApplication(), io, use_gl3, use_32_bits, use_debug_opengl, this);
+ mView = new GodotView(this, XRMode.PANCAKE, use_gl3, use_32_bits, use_debug_opengl);
layout.addView(mView, new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT));
edittext.setView(mView);
io.setEdit(edittext);
@@ -402,6 +402,20 @@ public class Godot extends Activity implements SensorEventListener, IDownloaderC
}
}
+ /**
+ * Used by the native code (java_godot_wrapper.h) to check whether the activity is resumed or paused.
+ */
+ private boolean isActivityResumed() {
+ return activityResumed;
+ }
+
+ /**
+ * Used by the native code (java_godot_wrapper.h) to access the Android surface.
+ */
+ private Surface getSurface() {
+ return mView.getHolder().getSurface();
+ }
+
String expansion_pack_path;
private void initializeGodot() {
@@ -612,6 +626,8 @@ public class Godot extends Activity implements SensorEventListener, IDownloaderC
@Override
protected void onPause() {
super.onPause();
+ activityResumed = false;
+
if (!godot_initialized) {
if (null != mDownloaderClientStub) {
mDownloaderClientStub.disconnect(this);
@@ -687,6 +703,8 @@ public class Godot extends Activity implements SensorEventListener, IDownloaderC
singletons[i].onMainResume();
}
+
+ activityResumed = true;
}
public void UiChangeListener() {
diff --git a/platform/android/java/src/org/godotengine/godot/GodotRenderer.java b/platform/android/java/src/org/godotengine/godot/GodotRenderer.java
new file mode 100644
index 0000000000..8e3775c2a9
--- /dev/null
+++ b/platform/android/java/src/org/godotengine/godot/GodotRenderer.java
@@ -0,0 +1,61 @@
+/*************************************************************************/
+/* GodotRenderer.java */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* https://godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2019 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2019 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+package org.godotengine.godot;
+
+import android.opengl.GLSurfaceView;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.opengles.GL10;
+import org.godotengine.godot.utils.GLUtils;
+
+/**
+ * Godot's renderer implementation.
+ */
+class GodotRenderer implements GLSurfaceView.Renderer {
+
+ public void onDrawFrame(GL10 gl) {
+ GodotLib.step();
+ for (int i = 0; i < Godot.singleton_count; i++) {
+ Godot.singletons[i].onGLDrawFrame(gl);
+ }
+ }
+
+ public void onSurfaceChanged(GL10 gl, int width, int height) {
+
+ GodotLib.resize(width, height);
+ for (int i = 0; i < Godot.singleton_count; i++) {
+ Godot.singletons[i].onGLSurfaceChanged(gl, width, height);
+ }
+ }
+
+ public void onSurfaceCreated(GL10 gl, EGLConfig config) {
+ GodotLib.newcontext(GLUtils.use_32);
+ }
+}
diff --git a/platform/android/java/src/org/godotengine/godot/GodotView.java b/platform/android/java/src/org/godotengine/godot/GodotView.java
index ab28d9ec33..1c189a1579 100644
--- a/platform/android/java/src/org/godotengine/godot/GodotView.java
+++ b/platform/android/java/src/org/godotengine/godot/GodotView.java
@@ -30,28 +30,20 @@
package org.godotengine.godot;
import android.annotation.SuppressLint;
-import android.content.Context;
-import android.content.ContextWrapper;
import android.graphics.PixelFormat;
-import android.hardware.input.InputManager;
import android.opengl.GLSurfaceView;
-import android.util.AttributeSet;
-import android.util.Log;
-import android.view.InputDevice;
import android.view.KeyEvent;
import android.view.MotionEvent;
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-import javax.microedition.khronos.egl.EGL10;
-import javax.microedition.khronos.egl.EGLConfig;
-import javax.microedition.khronos.egl.EGLContext;
-import javax.microedition.khronos.egl.EGLDisplay;
-import javax.microedition.khronos.opengles.GL10;
-import org.godotengine.godot.input.InputManagerCompat;
-import org.godotengine.godot.input.InputManagerCompat.InputDeviceListener;
+import org.godotengine.godot.input.GodotInputHandler;
+import org.godotengine.godot.utils.GLUtils;
+import org.godotengine.godot.xr.XRMode;
+import org.godotengine.godot.xr.ovr.OvrConfigChooser;
+import org.godotengine.godot.xr.ovr.OvrContextFactory;
+import org.godotengine.godot.xr.ovr.OvrWindowSurfaceFactory;
+import org.godotengine.godot.xr.pancake.PancakeConfigChooser;
+import org.godotengine.godot.xr.pancake.PancakeContextFactory;
+import org.godotengine.godot.xr.pancake.PancakeFallbackConfigChooser;
+
/**
* A simple GLSurfaceView sub-class that demonstrate how to perform
* OpenGL ES 2.0 rendering into a GL Surface. Note the following important
@@ -70,57 +62,26 @@ import org.godotengine.godot.input.InputManagerCompat.InputDeviceListener;
* that matches it exactly (with regards to red/green/blue/alpha channels
* bit depths). Failure to do so would result in an EGL_BAD_MATCH error.
*/
-public class GodotView extends GLSurfaceView implements InputDeviceListener {
-
- private static String TAG = "GodotView";
- private static final boolean DEBUG = false;
- private Context ctx;
-
- private GodotIO io;
- private static boolean use_gl3 = false;
- private static boolean use_32 = false;
- private static boolean use_debug_opengl = false;
+public class GodotView extends GLSurfaceView {
- private Godot activity;
+ private static String TAG = GodotView.class.getSimpleName();
- private InputManagerCompat mInputManager;
- public GodotView(Context context, GodotIO p_io, boolean p_use_gl3, boolean p_use_32_bits, boolean p_use_debug_opengl, Godot p_activity) {
- super(context);
- ctx = context;
- io = p_io;
- use_gl3 = p_use_gl3;
- use_32 = p_use_32_bits;
- use_debug_opengl = p_use_debug_opengl;
+ private final Godot activity;
+ private final GodotInputHandler inputHandler;
- activity = p_activity;
-
- setPreserveEGLContextOnPause(true);
-
- mInputManager = InputManagerCompat.Factory.getInputManager(this.getContext());
- mInputManager.registerInputDeviceListener(this, null);
- init(false, 16, 0);
- }
-
- public GodotView(Context context) {
- super(context);
- ctx = context;
- }
+ public GodotView(Godot activity, XRMode xrMode, boolean p_use_gl3, boolean p_use_32_bits, boolean p_use_debug_opengl) {
+ super(activity);
+ GLUtils.use_gl3 = p_use_gl3;
+ GLUtils.use_32 = p_use_32_bits;
+ GLUtils.use_debug_opengl = p_use_debug_opengl;
- public GodotView(Context context, boolean translucent, int depth, int stencil) {
- super(context);
- init(translucent, depth, stencil);
+ this.activity = activity;
+ this.inputHandler = new GodotInputHandler(this);
+ init(xrMode, false, 16, 0);
}
public void initInputDevices() {
- /* initially add input devices*/
- int[] deviceIds = mInputManager.getInputDeviceIds();
- for (int deviceId : deviceIds) {
- InputDevice device = mInputManager.getInputDevice(deviceId);
- if (DEBUG) {
- Log.v("GodotView", String.format("init() deviceId:%d, Name:%s\n", deviceId, device.getName()));
- }
- onInputDeviceAdded(deviceId);
- }
+ this.inputHandler.initInputDevices();
}
@SuppressLint("ClickableViewAccessibility")
@@ -130,617 +91,80 @@ public class GodotView extends GLSurfaceView implements InputDeviceListener {
return activity.gotTouchEvent(event);
}
- public int get_godot_button(int keyCode) {
-
- int button;
- switch (keyCode) {
- case KeyEvent.KEYCODE_BUTTON_A: // Android A is SNES B
- button = 0;
- break;
- case KeyEvent.KEYCODE_BUTTON_B:
- button = 1;
- break;
- case KeyEvent.KEYCODE_BUTTON_X: // Android X is SNES Y
- button = 2;
- break;
- case KeyEvent.KEYCODE_BUTTON_Y:
- button = 3;
- break;
- case KeyEvent.KEYCODE_BUTTON_L1:
- button = 9;
- break;
- case KeyEvent.KEYCODE_BUTTON_L2:
- button = 15;
- break;
- case KeyEvent.KEYCODE_BUTTON_R1:
- button = 10;
- break;
- case KeyEvent.KEYCODE_BUTTON_R2:
- button = 16;
- break;
- case KeyEvent.KEYCODE_BUTTON_SELECT:
- button = 4;
- break;
- case KeyEvent.KEYCODE_BUTTON_START:
- button = 6;
- break;
- case KeyEvent.KEYCODE_BUTTON_THUMBL:
- button = 7;
- break;
- case KeyEvent.KEYCODE_BUTTON_THUMBR:
- button = 8;
- break;
- case KeyEvent.KEYCODE_DPAD_UP:
- button = 11;
- break;
- case KeyEvent.KEYCODE_DPAD_DOWN:
- button = 12;
- break;
- case KeyEvent.KEYCODE_DPAD_LEFT:
- button = 13;
- break;
- case KeyEvent.KEYCODE_DPAD_RIGHT:
- button = 14;
- break;
- case KeyEvent.KEYCODE_BUTTON_C:
- button = 17;
- break;
- case KeyEvent.KEYCODE_BUTTON_Z:
- button = 18;
- break;
-
- default:
- button = keyCode - KeyEvent.KEYCODE_BUTTON_1 + 20;
- break;
- }
- return button;
- };
-
- private static class joystick {
- public int device_id;
- public String name;
- public ArrayList<InputDevice.MotionRange> axes;
- public ArrayList<InputDevice.MotionRange> hats;
- }
-
- private static class RangeComparator implements Comparator<InputDevice.MotionRange> {
- @Override
- public int compare(InputDevice.MotionRange arg0, InputDevice.MotionRange arg1) {
- return arg0.getAxis() - arg1.getAxis();
- }
- }
-
- ArrayList<joystick> joy_devices = new ArrayList<joystick>();
-
- private int find_joy_device(int device_id) {
- for (int i = 0; i < joy_devices.size(); i++) {
- if (joy_devices.get(i).device_id == device_id) {
- return i;
- }
- }
-
- return -1;
- }
-
- @Override
- public void onInputDeviceAdded(int deviceId) {
- int id = find_joy_device(deviceId);
-
- // Check if the device has not been already added
- if (id < 0) {
- InputDevice device = mInputManager.getInputDevice(deviceId);
- //device can be null if deviceId is not found
- if (device != null) {
- int sources = device.getSources();
- if (((sources & InputDevice.SOURCE_GAMEPAD) == InputDevice.SOURCE_GAMEPAD) ||
- ((sources & InputDevice.SOURCE_JOYSTICK) == InputDevice.SOURCE_JOYSTICK)) {
- id = joy_devices.size();
-
- joystick joy = new joystick();
- joy.device_id = deviceId;
- joy.name = device.getName();
- joy.axes = new ArrayList<InputDevice.MotionRange>();
- joy.hats = new ArrayList<InputDevice.MotionRange>();
-
- List<InputDevice.MotionRange> ranges = device.getMotionRanges();
- Collections.sort(ranges, new RangeComparator());
-
- for (InputDevice.MotionRange range : ranges) {
- if (range.getAxis() == MotionEvent.AXIS_HAT_X || range.getAxis() == MotionEvent.AXIS_HAT_Y) {
- joy.hats.add(range);
- } else {
- joy.axes.add(range);
- }
- }
-
- joy_devices.add(joy);
-
- final int device_id = id;
- final String name = joy.name;
- queueEvent(new Runnable() {
- @Override
- public void run() {
- GodotLib.joyconnectionchanged(device_id, true, name);
- }
- });
- }
- }
- }
- }
-
- @Override
- public void onInputDeviceRemoved(int deviceId) {
- final int device_id = find_joy_device(deviceId);
-
- // Check if the evice has not been already removed
- if (device_id > -1) {
- joy_devices.remove(device_id);
-
- queueEvent(new Runnable() {
- @Override
- public void run() {
- GodotLib.joyconnectionchanged(device_id, false, "");
- }
- });
- }
- }
-
- @Override
- public void onInputDeviceChanged(int deviceId) {
- onInputDeviceRemoved(deviceId);
- onInputDeviceAdded(deviceId);
- }
@Override
public boolean onKeyUp(final int keyCode, KeyEvent event) {
-
- if (keyCode == KeyEvent.KEYCODE_BACK) {
- return true;
- }
-
- if (keyCode == KeyEvent.KEYCODE_VOLUME_UP || keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) {
- return super.onKeyUp(keyCode, event);
- };
-
- int source = event.getSource();
- if ((source & InputDevice.SOURCE_JOYSTICK) == InputDevice.SOURCE_JOYSTICK || (source & InputDevice.SOURCE_DPAD) == InputDevice.SOURCE_DPAD || (source & InputDevice.SOURCE_GAMEPAD) == InputDevice.SOURCE_GAMEPAD) {
-
- final int button = get_godot_button(keyCode);
- final int device_id = find_joy_device(event.getDeviceId());
-
- // Check if the device exists
- if (device_id > -1) {
- queueEvent(new Runnable() {
- @Override
- public void run() {
- GodotLib.joybutton(device_id, button, false);
- }
- });
- return true;
- }
- } else {
- final int chr = event.getUnicodeChar(0);
- queueEvent(new Runnable() {
- @Override
- public void run() {
- GodotLib.key(keyCode, chr, false);
- }
- });
- };
-
- return super.onKeyUp(keyCode, event);
- };
+ return inputHandler.onKeyUp(keyCode, event) || super.onKeyUp(keyCode, event);
+ }
@Override
public boolean onKeyDown(final int keyCode, KeyEvent event) {
-
- if (keyCode == KeyEvent.KEYCODE_BACK) {
- activity.onBackPressed();
- // press 'back' button should not terminate program
- //normal handle 'back' event in game logic
- return true;
- }
-
- if (keyCode == KeyEvent.KEYCODE_VOLUME_UP || keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) {
- return super.onKeyDown(keyCode, event);
- };
-
- int source = event.getSource();
- //Log.e(TAG, String.format("Key down! source %d, device %d, joystick %d, %d, %d", event.getDeviceId(), source, (source & InputDevice.SOURCE_JOYSTICK), (source & InputDevice.SOURCE_DPAD), (source & InputDevice.SOURCE_GAMEPAD)));
-
- if ((source & InputDevice.SOURCE_JOYSTICK) == InputDevice.SOURCE_JOYSTICK || (source & InputDevice.SOURCE_DPAD) == InputDevice.SOURCE_DPAD || (source & InputDevice.SOURCE_GAMEPAD) == InputDevice.SOURCE_GAMEPAD) {
-
- if (event.getRepeatCount() > 0) // ignore key echo
- return true;
-
- final int button = get_godot_button(keyCode);
- final int device_id = find_joy_device(event.getDeviceId());
-
- // Check if the device exists
- if (device_id > -1) {
- queueEvent(new Runnable() {
- @Override
- public void run() {
- GodotLib.joybutton(device_id, button, true);
- }
- });
- return true;
- }
- } else {
- final int chr = event.getUnicodeChar(0);
- queueEvent(new Runnable() {
- @Override
- public void run() {
- GodotLib.key(keyCode, chr, true);
- }
- });
- };
-
- return super.onKeyDown(keyCode, event);
+ return inputHandler.onKeyDown(keyCode, event) || super.onKeyDown(keyCode, event);
}
@Override
public boolean onGenericMotionEvent(MotionEvent event) {
-
- if ((event.getSource() & InputDevice.SOURCE_JOYSTICK) == InputDevice.SOURCE_JOYSTICK && event.getAction() == MotionEvent.ACTION_MOVE) {
-
- final int device_id = find_joy_device(event.getDeviceId());
-
- // Check if the device exists
- if (device_id > -1) {
- joystick joy = joy_devices.get(device_id);
-
- for (int i = 0; i < joy.axes.size(); i++) {
- InputDevice.MotionRange range = joy.axes.get(i);
- final float value = (event.getAxisValue(range.getAxis()) - range.getMin()) / range.getRange() * 2.0f - 1.0f;
- final int idx = i;
- queueEvent(new Runnable() {
- @Override
- public void run() {
- GodotLib.joyaxis(device_id, idx, value);
- }
- });
- }
-
- for (int i = 0; i < joy.hats.size(); i += 2) {
- final int hatX = Math.round(event.getAxisValue(joy.hats.get(i).getAxis()));
- final int hatY = Math.round(event.getAxisValue(joy.hats.get(i + 1).getAxis()));
- queueEvent(new Runnable() {
- @Override
- public void run() {
- GodotLib.joyhat(device_id, hatX, hatY);
- }
- });
- }
- return true;
- }
- };
-
- return super.onGenericMotionEvent(event);
- };
-
- private void init(boolean translucent, int depth, int stencil) {
-
- this.setFocusableInTouchMode(true);
- /* By default, GLSurfaceView() creates a RGB_565 opaque surface.
- * If we want a translucent one, we should change the surface's
- * format here, using PixelFormat.TRANSLUCENT for GL Surfaces
- * is interpreted as any 32-bit surface with alpha by SurfaceFlinger.
- */
- if (translucent) {
- this.getHolder().setFormat(PixelFormat.TRANSLUCENT);
- }
-
- /* Setup the context factory for 2.0 rendering.
- * See ContextFactory class definition below
- */
- setEGLContextFactory(new ContextFactory());
-
- /* We need to choose an EGLConfig that matches the format of
- * our surface exactly. This is going to be done in our
- * custom config chooser. See ConfigChooser class definition
- * below.
- */
-
- if (use_32) {
- setEGLConfigChooser(translucent ?
- new FallbackConfigChooser(8, 8, 8, 8, 24, stencil, new ConfigChooser(8, 8, 8, 8, 16, stencil)) :
- new FallbackConfigChooser(8, 8, 8, 8, 24, stencil, new ConfigChooser(5, 6, 5, 0, 16, stencil)));
-
- } else {
- setEGLConfigChooser(translucent ?
- new ConfigChooser(8, 8, 8, 8, 16, stencil) :
- new ConfigChooser(5, 6, 5, 0, 16, stencil));
- }
-
- /* Set the renderer responsible for frame rendering */
- setRenderer(new Renderer());
- }
-
- private static final int _EGL_CONTEXT_FLAGS_KHR = 0x30FC;
- private static final int _EGL_CONTEXT_OPENGL_DEBUG_BIT_KHR = 0x00000001;
-
- private static class ContextFactory implements GLSurfaceView.EGLContextFactory {
- private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
- public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
- String driver_name = GodotLib.getGlobal("rendering/quality/driver/driver_name");
- if (use_gl3 && !driver_name.equals("GLES3")) {
- use_gl3 = false;
- }
- if (use_gl3)
- Log.w(TAG, "creating OpenGL ES 3.0 context :");
- else
- Log.w(TAG, "creating OpenGL ES 2.0 context :");
-
- checkEglError("Before eglCreateContext", egl);
- EGLContext context;
- if (use_debug_opengl) {
- int[] attrib_list2 = { EGL_CONTEXT_CLIENT_VERSION, 2, _EGL_CONTEXT_FLAGS_KHR, _EGL_CONTEXT_OPENGL_DEBUG_BIT_KHR, EGL10.EGL_NONE };
- int[] attrib_list3 = { EGL_CONTEXT_CLIENT_VERSION, 3, _EGL_CONTEXT_FLAGS_KHR, _EGL_CONTEXT_OPENGL_DEBUG_BIT_KHR, EGL10.EGL_NONE };
- context = egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, use_gl3 ? attrib_list3 : attrib_list2);
- } else {
- int[] attrib_list2 = { EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
- int[] attrib_list3 = { EGL_CONTEXT_CLIENT_VERSION, 3, EGL10.EGL_NONE };
- context = egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, use_gl3 ? attrib_list3 : attrib_list2);
- }
- checkEglError("After eglCreateContext", egl);
- return context;
- }
-
- public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {
- egl.eglDestroyContext(display, context);
- }
- }
-
- private static void checkEglError(String prompt, EGL10 egl) {
- int error;
- while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
- Log.e(TAG, String.format("%s: EGL error: 0x%x", prompt, error));
- }
- }
- /* Fallback if 32bit View is not supported*/
- private static class FallbackConfigChooser extends ConfigChooser {
- private ConfigChooser fallback;
-
- public FallbackConfigChooser(int r, int g, int b, int a, int depth, int stencil, ConfigChooser fallback) {
- super(r, g, b, a, depth, stencil);
- this.fallback = fallback;
- }
-
- @Override
- public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display, EGLConfig[] configs) {
- EGLConfig ec = super.chooseConfig(egl, display, configs);
- if (ec == null) {
- Log.w(TAG, "Trying ConfigChooser fallback");
- ec = fallback.chooseConfig(egl, display, configs);
- use_32 = false;
- }
- return ec;
- }
+ return inputHandler.onGenericMotionEvent(event) || super.onGenericMotionEvent(event);
}
- private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser {
-
- public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
- mRedSize = r;
- mGreenSize = g;
- mBlueSize = b;
- mAlphaSize = a;
- mDepthSize = depth;
- mStencilSize = stencil;
- }
-
- /* This EGL config specification is used to specify 2.0 rendering.
- * We use a minimum size of 4 bits for red/green/blue, but will
- * perform actual matching in chooseConfig() below.
- */
- private static int EGL_OPENGL_ES2_BIT = 4;
- private static int[] s_configAttribs2 = {
- EGL10.EGL_RED_SIZE, 4,
- EGL10.EGL_GREEN_SIZE, 4,
- EGL10.EGL_BLUE_SIZE, 4,
- // EGL10.EGL_DEPTH_SIZE, 16,
- // EGL10.EGL_STENCIL_SIZE, EGL10.EGL_DONT_CARE,
- EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
- EGL10.EGL_NONE
- };
- private static int[] s_configAttribs3 = {
- EGL10.EGL_RED_SIZE, 4,
- EGL10.EGL_GREEN_SIZE, 4,
- EGL10.EGL_BLUE_SIZE, 4,
- // EGL10.EGL_DEPTH_SIZE, 16,
- // EGL10.EGL_STENCIL_SIZE, EGL10.EGL_DONT_CARE,
- EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, //apparently there is no EGL_OPENGL_ES3_BIT
- EGL10.EGL_NONE
- };
-
- public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
-
- /* Get the number of minimally matching EGL configurations
- */
- int[] num_config = new int[1];
- egl.eglChooseConfig(display, use_gl3 ? s_configAttribs3 : s_configAttribs2, null, 0, num_config);
-
- int numConfigs = num_config[0];
-
- if (numConfigs <= 0) {
- throw new IllegalArgumentException("No configs match configSpec");
- }
-
- /* Allocate then read the array of minimally matching EGL configs
- */
- EGLConfig[] configs = new EGLConfig[numConfigs];
- egl.eglChooseConfig(display, use_gl3 ? s_configAttribs3 : s_configAttribs2, configs, numConfigs, num_config);
+ private void init(XRMode xrMode, boolean translucent, int depth, int stencil) {
- if (DEBUG) {
- printConfigs(egl, display, configs);
- }
- /* Now return the "best" one
- */
- return chooseConfig(egl, display, configs);
- }
+ setPreserveEGLContextOnPause(true);
+ setFocusableInTouchMode(true);
+ switch (xrMode) {
- public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
- EGLConfig[] configs) {
- for (EGLConfig config : configs) {
- int d = findConfigAttrib(egl, display, config,
- EGL10.EGL_DEPTH_SIZE, 0);
- int s = findConfigAttrib(egl, display, config,
- EGL10.EGL_STENCIL_SIZE, 0);
+ case OVR:
+ // Replace the default egl config chooser.
+ setEGLConfigChooser(new OvrConfigChooser());
- // We need at least mDepthSize and mStencilSize bits
- if (d < mDepthSize || s < mStencilSize)
- continue;
+ // Replace the default context factory.
+ setEGLContextFactory(new OvrContextFactory());
- // We want an *exact* match for red/green/blue/alpha
- int r = findConfigAttrib(egl, display, config,
- EGL10.EGL_RED_SIZE, 0);
- int g = findConfigAttrib(egl, display, config,
- EGL10.EGL_GREEN_SIZE, 0);
- int b = findConfigAttrib(egl, display, config,
- EGL10.EGL_BLUE_SIZE, 0);
- int a = findConfigAttrib(egl, display, config,
- EGL10.EGL_ALPHA_SIZE, 0);
+ // Replace the default window surface factory.
+ setEGLWindowSurfaceFactory(new OvrWindowSurfaceFactory());
+ break;
- if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize)
- return config;
- }
- return null;
- }
+ case PANCAKE:
+ default:
+ /* By default, GLSurfaceView() creates a RGB_565 opaque surface.
+ * If we want a translucent one, we should change the surface's
+ * format here, using PixelFormat.TRANSLUCENT for GL Surfaces
+ * is interpreted as any 32-bit surface with alpha by SurfaceFlinger.
+ */
+ if (translucent) {
+ this.getHolder().setFormat(PixelFormat.TRANSLUCENT);
+ }
- private int findConfigAttrib(EGL10 egl, EGLDisplay display,
- EGLConfig config, int attribute, int defaultValue) {
+ /* Setup the context factory for 2.0 rendering.
+ * See ContextFactory class definition below
+ */
+ setEGLContextFactory(new PancakeContextFactory());
- if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
- return mValue[0];
- }
- return defaultValue;
- }
+ /* We need to choose an EGLConfig that matches the format of
+ * our surface exactly. This is going to be done in our
+ * custom config chooser. See ConfigChooser class definition
+ * below.
+ */
- private void printConfigs(EGL10 egl, EGLDisplay display,
- EGLConfig[] configs) {
- int numConfigs = configs.length;
- Log.w(TAG, String.format("%d configurations", numConfigs));
- for (int i = 0; i < numConfigs; i++) {
- Log.w(TAG, String.format("Configuration %d:\n", i));
- printConfig(egl, display, configs[i]);
- }
- }
+ if (GLUtils.use_32) {
+ setEGLConfigChooser(translucent ?
+ new PancakeFallbackConfigChooser(8, 8, 8, 8, 24, stencil,
+ new PancakeConfigChooser(8, 8, 8, 8, 16, stencil)) :
+ new PancakeFallbackConfigChooser(8, 8, 8, 8, 24, stencil,
+ new PancakeConfigChooser(5, 6, 5, 0, 16, stencil)));
- private void printConfig(EGL10 egl, EGLDisplay display,
- EGLConfig config) {
- int[] attributes = {
- EGL10.EGL_BUFFER_SIZE,
- EGL10.EGL_ALPHA_SIZE,
- EGL10.EGL_BLUE_SIZE,
- EGL10.EGL_GREEN_SIZE,
- EGL10.EGL_RED_SIZE,
- EGL10.EGL_DEPTH_SIZE,
- EGL10.EGL_STENCIL_SIZE,
- EGL10.EGL_CONFIG_CAVEAT,
- EGL10.EGL_CONFIG_ID,
- EGL10.EGL_LEVEL,
- EGL10.EGL_MAX_PBUFFER_HEIGHT,
- EGL10.EGL_MAX_PBUFFER_PIXELS,
- EGL10.EGL_MAX_PBUFFER_WIDTH,
- EGL10.EGL_NATIVE_RENDERABLE,
- EGL10.EGL_NATIVE_VISUAL_ID,
- EGL10.EGL_NATIVE_VISUAL_TYPE,
- 0x3030, // EGL10.EGL_PRESERVED_RESOURCES,
- EGL10.EGL_SAMPLES,
- EGL10.EGL_SAMPLE_BUFFERS,
- EGL10.EGL_SURFACE_TYPE,
- EGL10.EGL_TRANSPARENT_TYPE,
- EGL10.EGL_TRANSPARENT_RED_VALUE,
- EGL10.EGL_TRANSPARENT_GREEN_VALUE,
- EGL10.EGL_TRANSPARENT_BLUE_VALUE,
- 0x3039, // EGL10.EGL_BIND_TO_TEXTURE_RGB,
- 0x303A, // EGL10.EGL_BIND_TO_TEXTURE_RGBA,
- 0x303B, // EGL10.EGL_MIN_SWAP_INTERVAL,
- 0x303C, // EGL10.EGL_MAX_SWAP_INTERVAL,
- EGL10.EGL_LUMINANCE_SIZE,
- EGL10.EGL_ALPHA_MASK_SIZE,
- EGL10.EGL_COLOR_BUFFER_TYPE,
- EGL10.EGL_RENDERABLE_TYPE,
- 0x3042 // EGL10.EGL_CONFORMANT
- };
- String[] names = {
- "EGL_BUFFER_SIZE",
- "EGL_ALPHA_SIZE",
- "EGL_BLUE_SIZE",
- "EGL_GREEN_SIZE",
- "EGL_RED_SIZE",
- "EGL_DEPTH_SIZE",
- "EGL_STENCIL_SIZE",
- "EGL_CONFIG_CAVEAT",
- "EGL_CONFIG_ID",
- "EGL_LEVEL",
- "EGL_MAX_PBUFFER_HEIGHT",
- "EGL_MAX_PBUFFER_PIXELS",
- "EGL_MAX_PBUFFER_WIDTH",
- "EGL_NATIVE_RENDERABLE",
- "EGL_NATIVE_VISUAL_ID",
- "EGL_NATIVE_VISUAL_TYPE",
- "EGL_PRESERVED_RESOURCES",
- "EGL_SAMPLES",
- "EGL_SAMPLE_BUFFERS",
- "EGL_SURFACE_TYPE",
- "EGL_TRANSPARENT_TYPE",
- "EGL_TRANSPARENT_RED_VALUE",
- "EGL_TRANSPARENT_GREEN_VALUE",
- "EGL_TRANSPARENT_BLUE_VALUE",
- "EGL_BIND_TO_TEXTURE_RGB",
- "EGL_BIND_TO_TEXTURE_RGBA",
- "EGL_MIN_SWAP_INTERVAL",
- "EGL_MAX_SWAP_INTERVAL",
- "EGL_LUMINANCE_SIZE",
- "EGL_ALPHA_MASK_SIZE",
- "EGL_COLOR_BUFFER_TYPE",
- "EGL_RENDERABLE_TYPE",
- "EGL_CONFORMANT"
- };
- int[] value = new int[1];
- for (int i = 0; i < attributes.length; i++) {
- int attribute = attributes[i];
- String name = names[i];
- if (egl.eglGetConfigAttrib(display, config, attribute, value)) {
- Log.w(TAG, String.format(" %s: %d\n", name, value[0]));
} else {
- // Log.w(TAG, String.format(" %s: failed\n", name));
- while (egl.eglGetError() != EGL10.EGL_SUCCESS)
- ;
+ setEGLConfigChooser(translucent ?
+ new PancakeConfigChooser(8, 8, 8, 8, 16, stencil) :
+ new PancakeConfigChooser(5, 6, 5, 0, 16, stencil));
}
- }
+ break;
}
- // Subclasses can adjust these values:
- protected int mRedSize;
- protected int mGreenSize;
- protected int mBlueSize;
- protected int mAlphaSize;
- protected int mDepthSize;
- protected int mStencilSize;
- private int[] mValue = new int[1];
+ /* Set the renderer responsible for frame rendering */
+ setRenderer(new GodotRenderer());
}
- private static class Renderer implements GLSurfaceView.Renderer {
-
- public void onDrawFrame(GL10 gl) {
- GodotLib.step();
- for (int i = 0; i < Godot.singleton_count; i++) {
- Godot.singletons[i].onGLDrawFrame(gl);
- }
- }
-
- public void onSurfaceChanged(GL10 gl, int width, int height) {
-
- GodotLib.resize(width, height);
- for (int i = 0; i < Godot.singleton_count; i++) {
- Godot.singletons[i].onGLSurfaceChanged(gl, width, height);
- }
- }
-
- public void onSurfaceCreated(GL10 gl, EGLConfig config) {
- GodotLib.newcontext(use_32);
- }
+ public void onBackPressed() {
+ activity.onBackPressed();
}
}
diff --git a/platform/android/java/src/org/godotengine/godot/input/GodotInputHandler.java b/platform/android/java/src/org/godotengine/godot/input/GodotInputHandler.java
new file mode 100644
index 0000000000..d01f958123
--- /dev/null
+++ b/platform/android/java/src/org/godotengine/godot/input/GodotInputHandler.java
@@ -0,0 +1,352 @@
+/*************************************************************************/
+/* GodotInputHandler.java */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* https://godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2019 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2019 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+package org.godotengine.godot.input;
+
+import static org.godotengine.godot.utils.GLUtils.DEBUG;
+
+import android.util.Log;
+import android.view.InputDevice;
+import android.view.InputDevice.MotionRange;
+import android.view.KeyEvent;
+import android.view.MotionEvent;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import org.godotengine.godot.GodotLib;
+import org.godotengine.godot.GodotView;
+import org.godotengine.godot.input.InputManagerCompat.InputDeviceListener;
+
+/**
+ * Handles input related events for the {@link GodotView} view.
+ */
+public class GodotInputHandler implements InputDeviceListener {
+
+ private final ArrayList<Joystick> joysticksDevices = new ArrayList<Joystick>();
+
+ private final GodotView godotView;
+ private final InputManagerCompat inputManager;
+
+ public GodotInputHandler(GodotView godotView) {
+ this.godotView = godotView;
+ this.inputManager = InputManagerCompat.Factory.getInputManager(godotView.getContext());
+ this.inputManager.registerInputDeviceListener(this, null);
+ }
+
+ private void queueEvent(Runnable task) {
+ godotView.queueEvent(task);
+ }
+
+ public boolean onKeyUp(final int keyCode, KeyEvent event) {
+ if (keyCode == KeyEvent.KEYCODE_BACK) {
+ return true;
+ }
+
+ if (keyCode == KeyEvent.KEYCODE_VOLUME_UP || keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) {
+ return false;
+ };
+
+ int source = event.getSource();
+ if ((source & InputDevice.SOURCE_JOYSTICK) == InputDevice.SOURCE_JOYSTICK || (source & InputDevice.SOURCE_DPAD) == InputDevice.SOURCE_DPAD || (source & InputDevice.SOURCE_GAMEPAD) == InputDevice.SOURCE_GAMEPAD) {
+
+ final int button = getGodotButton(keyCode);
+ final int device_id = findJoystickDevice(event.getDeviceId());
+
+ // Check if the device exists
+ if (device_id > -1) {
+ queueEvent(new Runnable() {
+ @Override
+ public void run() {
+ GodotLib.joybutton(device_id, button, false);
+ }
+ });
+ return true;
+ }
+ } else {
+ final int chr = event.getUnicodeChar(0);
+ queueEvent(new Runnable() {
+ @Override
+ public void run() {
+ GodotLib.key(keyCode, chr, false);
+ }
+ });
+ };
+
+ return false;
+ }
+
+ public boolean onKeyDown(final int keyCode, KeyEvent event) {
+ if (keyCode == KeyEvent.KEYCODE_BACK) {
+ godotView.onBackPressed();
+ // press 'back' button should not terminate program
+ //normal handle 'back' event in game logic
+ return true;
+ }
+
+ if (keyCode == KeyEvent.KEYCODE_VOLUME_UP || keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) {
+ return false;
+ };
+
+ int source = event.getSource();
+ //Log.e(TAG, String.format("Key down! source %d, device %d, joystick %d, %d, %d", event.getDeviceId(), source, (source & InputDevice.SOURCE_JOYSTICK), (source & InputDevice.SOURCE_DPAD), (source & InputDevice.SOURCE_GAMEPAD)));
+
+ if ((source & InputDevice.SOURCE_JOYSTICK) == InputDevice.SOURCE_JOYSTICK || (source & InputDevice.SOURCE_DPAD) == InputDevice.SOURCE_DPAD || (source & InputDevice.SOURCE_GAMEPAD) == InputDevice.SOURCE_GAMEPAD) {
+
+ if (event.getRepeatCount() > 0) // ignore key echo
+ return true;
+
+ final int button = getGodotButton(keyCode);
+ final int device_id = findJoystickDevice(event.getDeviceId());
+
+ // Check if the device exists
+ if (device_id > -1) {
+ queueEvent(new Runnable() {
+ @Override
+ public void run() {
+ GodotLib.joybutton(device_id, button, true);
+ }
+ });
+ return true;
+ }
+ } else {
+ final int chr = event.getUnicodeChar(0);
+ queueEvent(new Runnable() {
+ @Override
+ public void run() {
+ GodotLib.key(keyCode, chr, true);
+ }
+ });
+ };
+
+ return false;
+ }
+
+ public boolean onGenericMotionEvent(MotionEvent event) {
+ if ((event.getSource() & InputDevice.SOURCE_JOYSTICK) == InputDevice.SOURCE_JOYSTICK && event.getAction() == MotionEvent.ACTION_MOVE) {
+
+ final int device_id = findJoystickDevice(event.getDeviceId());
+
+ // Check if the device exists
+ if (device_id > -1) {
+ Joystick joy = joysticksDevices.get(device_id);
+
+ for (int i = 0; i < joy.axes.size(); i++) {
+ InputDevice.MotionRange range = joy.axes.get(i);
+ final float value = (event.getAxisValue(range.getAxis()) - range.getMin()) / range.getRange() * 2.0f - 1.0f;
+ final int idx = i;
+ queueEvent(new Runnable() {
+ @Override
+ public void run() {
+ GodotLib.joyaxis(device_id, idx, value);
+ }
+ });
+ }
+
+ for (int i = 0; i < joy.hats.size(); i += 2) {
+ final int hatX = Math.round(event.getAxisValue(joy.hats.get(i).getAxis()));
+ final int hatY = Math.round(event.getAxisValue(joy.hats.get(i + 1).getAxis()));
+ queueEvent(new Runnable() {
+ @Override
+ public void run() {
+ GodotLib.joyhat(device_id, hatX, hatY);
+ }
+ });
+ }
+ return true;
+ }
+ };
+
+ return false;
+ }
+
+ public void initInputDevices() {
+ /* initially add input devices*/
+ int[] deviceIds = inputManager.getInputDeviceIds();
+ for (int deviceId : deviceIds) {
+ InputDevice device = inputManager.getInputDevice(deviceId);
+ if (DEBUG) {
+ Log.v("GodotView", String.format("init() deviceId:%d, Name:%s\n", deviceId, device.getName()));
+ }
+ onInputDeviceAdded(deviceId);
+ }
+ }
+
+ @Override
+ public void onInputDeviceAdded(int deviceId) {
+ int id = findJoystickDevice(deviceId);
+
+ // Check if the device has not been already added
+ if (id < 0) {
+ InputDevice device = inputManager.getInputDevice(deviceId);
+ //device can be null if deviceId is not found
+ if (device != null) {
+ int sources = device.getSources();
+ if (((sources & InputDevice.SOURCE_GAMEPAD) == InputDevice.SOURCE_GAMEPAD) ||
+ ((sources & InputDevice.SOURCE_JOYSTICK) == InputDevice.SOURCE_JOYSTICK)) {
+ id = joysticksDevices.size();
+
+ Joystick joy = new Joystick();
+ joy.device_id = deviceId;
+ joy.name = device.getName();
+ joy.axes = new ArrayList<InputDevice.MotionRange>();
+ joy.hats = new ArrayList<InputDevice.MotionRange>();
+
+ List<InputDevice.MotionRange> ranges = device.getMotionRanges();
+ Collections.sort(ranges, new RangeComparator());
+
+ for (InputDevice.MotionRange range : ranges) {
+ if (range.getAxis() == MotionEvent.AXIS_HAT_X || range.getAxis() == MotionEvent.AXIS_HAT_Y) {
+ joy.hats.add(range);
+ } else {
+ joy.axes.add(range);
+ }
+ }
+
+ joysticksDevices.add(joy);
+
+ final int device_id = id;
+ final String name = joy.name;
+ queueEvent(new Runnable() {
+ @Override
+ public void run() {
+ GodotLib.joyconnectionchanged(device_id, true, name);
+ }
+ });
+ }
+ }
+ }
+ }
+
+ @Override
+ public void onInputDeviceRemoved(int deviceId) {
+ final int device_id = findJoystickDevice(deviceId);
+
+ // Check if the evice has not been already removed
+ if (device_id > -1) {
+ joysticksDevices.remove(device_id);
+
+ queueEvent(new Runnable() {
+ @Override
+ public void run() {
+ GodotLib.joyconnectionchanged(device_id, false, "");
+ }
+ });
+ }
+ }
+
+ @Override
+ public void onInputDeviceChanged(int deviceId) {
+ onInputDeviceRemoved(deviceId);
+ onInputDeviceAdded(deviceId);
+ }
+
+ private static class RangeComparator implements Comparator<MotionRange> {
+ @Override
+ public int compare(MotionRange arg0, MotionRange arg1) {
+ return arg0.getAxis() - arg1.getAxis();
+ }
+ }
+
+ public static int getGodotButton(int keyCode) {
+ int button;
+ switch (keyCode) {
+ case KeyEvent.KEYCODE_BUTTON_A: // Android A is SNES B
+ button = 0;
+ break;
+ case KeyEvent.KEYCODE_BUTTON_B:
+ button = 1;
+ break;
+ case KeyEvent.KEYCODE_BUTTON_X: // Android X is SNES Y
+ button = 2;
+ break;
+ case KeyEvent.KEYCODE_BUTTON_Y:
+ button = 3;
+ break;
+ case KeyEvent.KEYCODE_BUTTON_L1:
+ button = 9;
+ break;
+ case KeyEvent.KEYCODE_BUTTON_L2:
+ button = 15;
+ break;
+ case KeyEvent.KEYCODE_BUTTON_R1:
+ button = 10;
+ break;
+ case KeyEvent.KEYCODE_BUTTON_R2:
+ button = 16;
+ break;
+ case KeyEvent.KEYCODE_BUTTON_SELECT:
+ button = 4;
+ break;
+ case KeyEvent.KEYCODE_BUTTON_START:
+ button = 6;
+ break;
+ case KeyEvent.KEYCODE_BUTTON_THUMBL:
+ button = 7;
+ break;
+ case KeyEvent.KEYCODE_BUTTON_THUMBR:
+ button = 8;
+ break;
+ case KeyEvent.KEYCODE_DPAD_UP:
+ button = 11;
+ break;
+ case KeyEvent.KEYCODE_DPAD_DOWN:
+ button = 12;
+ break;
+ case KeyEvent.KEYCODE_DPAD_LEFT:
+ button = 13;
+ break;
+ case KeyEvent.KEYCODE_DPAD_RIGHT:
+ button = 14;
+ break;
+ case KeyEvent.KEYCODE_BUTTON_C:
+ button = 17;
+ break;
+ case KeyEvent.KEYCODE_BUTTON_Z:
+ button = 18;
+ break;
+
+ default:
+ button = keyCode - KeyEvent.KEYCODE_BUTTON_1 + 20;
+ break;
+ }
+ return button;
+ }
+
+ private int findJoystickDevice(int device_id) {
+ for (int i = 0; i < joysticksDevices.size(); i++) {
+ if (joysticksDevices.get(i).device_id == device_id) {
+ return i;
+ }
+ }
+
+ return -1;
+ }
+}
diff --git a/platform/android/java/src/org/godotengine/godot/input/Joystick.java b/platform/android/java/src/org/godotengine/godot/input/Joystick.java
new file mode 100644
index 0000000000..ff95bfb0c5
--- /dev/null
+++ b/platform/android/java/src/org/godotengine/godot/input/Joystick.java
@@ -0,0 +1,44 @@
+/*************************************************************************/
+/* Joystick.java */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* https://godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2019 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2019 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+package org.godotengine.godot.input;
+
+import android.view.InputDevice.MotionRange;
+import java.util.ArrayList;
+
+/**
+ * POJO class to represent a Joystick input device.
+ */
+class Joystick {
+ int device_id;
+ String name;
+ ArrayList<MotionRange> axes;
+ ArrayList<MotionRange> hats;
+}
diff --git a/platform/android/java/src/org/godotengine/godot/utils/GLUtils.java b/platform/android/java/src/org/godotengine/godot/utils/GLUtils.java
new file mode 100644
index 0000000000..6c95494f8b
--- /dev/null
+++ b/platform/android/java/src/org/godotengine/godot/utils/GLUtils.java
@@ -0,0 +1,157 @@
+/*************************************************************************/
+/* GLUtils.java */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* https://godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2019 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2019 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+package org.godotengine.godot.utils;
+
+import android.util.Log;
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLDisplay;
+
+/**
+ * Contains GL utilities methods.
+ */
+public class GLUtils {
+
+ private static final String TAG = GLUtils.class.getSimpleName();
+
+ public static final boolean DEBUG = false;
+
+ public static boolean use_gl3 = false;
+ public static boolean use_32 = false;
+ public static boolean use_debug_opengl = false;
+
+ private static final String[] ATTRIBUTES_NAMES = new String[] {
+ "EGL_BUFFER_SIZE",
+ "EGL_ALPHA_SIZE",
+ "EGL_BLUE_SIZE",
+ "EGL_GREEN_SIZE",
+ "EGL_RED_SIZE",
+ "EGL_DEPTH_SIZE",
+ "EGL_STENCIL_SIZE",
+ "EGL_CONFIG_CAVEAT",
+ "EGL_CONFIG_ID",
+ "EGL_LEVEL",
+ "EGL_MAX_PBUFFER_HEIGHT",
+ "EGL_MAX_PBUFFER_PIXELS",
+ "EGL_MAX_PBUFFER_WIDTH",
+ "EGL_NATIVE_RENDERABLE",
+ "EGL_NATIVE_VISUAL_ID",
+ "EGL_NATIVE_VISUAL_TYPE",
+ "EGL_PRESERVED_RESOURCES",
+ "EGL_SAMPLES",
+ "EGL_SAMPLE_BUFFERS",
+ "EGL_SURFACE_TYPE",
+ "EGL_TRANSPARENT_TYPE",
+ "EGL_TRANSPARENT_RED_VALUE",
+ "EGL_TRANSPARENT_GREEN_VALUE",
+ "EGL_TRANSPARENT_BLUE_VALUE",
+ "EGL_BIND_TO_TEXTURE_RGB",
+ "EGL_BIND_TO_TEXTURE_RGBA",
+ "EGL_MIN_SWAP_INTERVAL",
+ "EGL_MAX_SWAP_INTERVAL",
+ "EGL_LUMINANCE_SIZE",
+ "EGL_ALPHA_MASK_SIZE",
+ "EGL_COLOR_BUFFER_TYPE",
+ "EGL_RENDERABLE_TYPE",
+ "EGL_CONFORMANT"
+ };
+
+ private static final int[] ATTRIBUTES = new int[] {
+ EGL10.EGL_BUFFER_SIZE,
+ EGL10.EGL_ALPHA_SIZE,
+ EGL10.EGL_BLUE_SIZE,
+ EGL10.EGL_GREEN_SIZE,
+ EGL10.EGL_RED_SIZE,
+ EGL10.EGL_DEPTH_SIZE,
+ EGL10.EGL_STENCIL_SIZE,
+ EGL10.EGL_CONFIG_CAVEAT,
+ EGL10.EGL_CONFIG_ID,
+ EGL10.EGL_LEVEL,
+ EGL10.EGL_MAX_PBUFFER_HEIGHT,
+ EGL10.EGL_MAX_PBUFFER_PIXELS,
+ EGL10.EGL_MAX_PBUFFER_WIDTH,
+ EGL10.EGL_NATIVE_RENDERABLE,
+ EGL10.EGL_NATIVE_VISUAL_ID,
+ EGL10.EGL_NATIVE_VISUAL_TYPE,
+ 0x3030, // EGL10.EGL_PRESERVED_RESOURCES,
+ EGL10.EGL_SAMPLES,
+ EGL10.EGL_SAMPLE_BUFFERS,
+ EGL10.EGL_SURFACE_TYPE,
+ EGL10.EGL_TRANSPARENT_TYPE,
+ EGL10.EGL_TRANSPARENT_RED_VALUE,
+ EGL10.EGL_TRANSPARENT_GREEN_VALUE,
+ EGL10.EGL_TRANSPARENT_BLUE_VALUE,
+ 0x3039, // EGL10.EGL_BIND_TO_TEXTURE_RGB,
+ 0x303A, // EGL10.EGL_BIND_TO_TEXTURE_RGBA,
+ 0x303B, // EGL10.EGL_MIN_SWAP_INTERVAL,
+ 0x303C, // EGL10.EGL_MAX_SWAP_INTERVAL,
+ EGL10.EGL_LUMINANCE_SIZE,
+ EGL10.EGL_ALPHA_MASK_SIZE,
+ EGL10.EGL_COLOR_BUFFER_TYPE,
+ EGL10.EGL_RENDERABLE_TYPE,
+ 0x3042 // EGL10.EGL_CONFORMANT
+ };
+
+ private GLUtils() {}
+
+ public static void checkEglError(String tag, String prompt, EGL10 egl) {
+ int error;
+ while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
+ Log.e(tag, String.format("%s: EGL error: 0x%x", prompt, error));
+ }
+ }
+
+ public static void printConfigs(EGL10 egl, EGLDisplay display,
+ EGLConfig[] configs) {
+ int numConfigs = configs.length;
+ Log.v(TAG, String.format("%d configurations", numConfigs));
+ for (int i = 0; i < numConfigs; i++) {
+ Log.v(TAG, String.format("Configuration %d:\n", i));
+ printConfig(egl, display, configs[i]);
+ }
+ }
+
+ private static void printConfig(EGL10 egl, EGLDisplay display,
+ EGLConfig config) {
+ int[] value = new int[1];
+ for (int i = 0; i < ATTRIBUTES.length; i++) {
+ int attribute = ATTRIBUTES[i];
+ String name = ATTRIBUTES_NAMES[i];
+ if (egl.eglGetConfigAttrib(display, config, attribute, value)) {
+ Log.i(TAG, String.format(" %s: %d\n", name, value[0]));
+ } else {
+ // Log.w(TAG, String.format(" %s: failed\n", name));
+ while (egl.eglGetError() != EGL10.EGL_SUCCESS)
+ ;
+ }
+ }
+ }
+}
diff --git a/platform/android/java/src/org/godotengine/godot/xr/XRMode.java b/platform/android/java/src/org/godotengine/godot/xr/XRMode.java
new file mode 100644
index 0000000000..cbc8a1e902
--- /dev/null
+++ b/platform/android/java/src/org/godotengine/godot/xr/XRMode.java
@@ -0,0 +1,39 @@
+/*************************************************************************/
+/* XRMode.java */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* https://godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2019 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2019 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+package org.godotengine.godot.xr;
+
+/**
+ * Godot available XR modes.
+ */
+public enum XRMode {
+ PANCAKE, // Regular/flatscreen
+ OVR, // Oculus mobile VR SDK
+}
diff --git a/platform/android/java/src/org/godotengine/godot/xr/ovr/OvrConfigChooser.java b/platform/android/java/src/org/godotengine/godot/xr/ovr/OvrConfigChooser.java
new file mode 100644
index 0000000000..ff836a31ca
--- /dev/null
+++ b/platform/android/java/src/org/godotengine/godot/xr/ovr/OvrConfigChooser.java
@@ -0,0 +1,112 @@
+/*************************************************************************/
+/* OvrConfigChooser.java */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* https://godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2019 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2019 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+package org.godotengine.godot.xr.ovr;
+
+import android.opengl.EGLExt;
+import android.opengl.GLSurfaceView;
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLDisplay;
+
+/**
+ * EGL config chooser for the Oculus Mobile VR SDK.
+ */
+public class OvrConfigChooser implements GLSurfaceView.EGLConfigChooser {
+
+ private static final int[] CONFIG_ATTRIBS = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_ALPHA_SIZE, 8, // Need alpha for the multi-pass timewarp compositor
+ EGL10.EGL_DEPTH_SIZE, 0,
+ EGL10.EGL_STENCIL_SIZE, 0,
+ EGL10.EGL_SAMPLES, 0,
+ EGL10.EGL_NONE
+ };
+
+ @Override
+ public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
+ // Do NOT use eglChooseConfig, because the Android EGL code pushes in
+ // multisample flags in eglChooseConfig if the user has selected the "force 4x
+ // MSAA" option in settings, and that is completely wasted for our warp
+ // target.
+ int[] numConfig = new int[1];
+ if (!egl.eglGetConfigs(display, null, 0, numConfig)) {
+ throw new IllegalArgumentException("eglGetConfigs failed.");
+ }
+
+ int configsCount = numConfig[0];
+ if (configsCount <= 0) {
+ throw new IllegalArgumentException("No configs match configSpec");
+ }
+
+ EGLConfig[] configs = new EGLConfig[configsCount];
+ if (!egl.eglGetConfigs(display, configs, configsCount, numConfig)) {
+ throw new IllegalArgumentException("eglGetConfigs #2 failed.");
+ }
+
+ int[] value = new int[1];
+ for (EGLConfig config : configs) {
+ egl.eglGetConfigAttrib(display, config, EGL10.EGL_RENDERABLE_TYPE, value);
+ if ((value[0] & EGLExt.EGL_OPENGL_ES3_BIT_KHR) != EGLExt.EGL_OPENGL_ES3_BIT_KHR) {
+ continue;
+ }
+
+ // The pbuffer config also needs to be compatible with normal window rendering
+ // so it can share textures with the window context.
+ egl.eglGetConfigAttrib(display, config, EGL10.EGL_SURFACE_TYPE, value);
+ if ((value[0] & (EGL10.EGL_WINDOW_BIT | EGL10.EGL_PBUFFER_BIT)) != (EGL10.EGL_WINDOW_BIT | EGL10.EGL_PBUFFER_BIT)) {
+ continue;
+ }
+
+ // Check each attribute in CONFIG_ATTRIBS (which are the attributes we care about)
+ // and ensure the value in config matches.
+ int attribIndex = 0;
+ while (CONFIG_ATTRIBS[attribIndex] != EGL10.EGL_NONE) {
+ egl.eglGetConfigAttrib(display, config, CONFIG_ATTRIBS[attribIndex], value);
+ if (value[0] != CONFIG_ATTRIBS[attribIndex + 1]) {
+ // Attribute key's value does not match the configs value.
+ // Start checking next config.
+ break;
+ }
+
+ // Step by two because CONFIG_ATTRIBS is in key/value pairs.
+ attribIndex += 2;
+ }
+
+ if (CONFIG_ATTRIBS[attribIndex] == EGL10.EGL_NONE) {
+ // All relevant attributes match, set the config and stop checking the rest.
+ return config;
+ }
+ }
+ return null;
+ }
+}
diff --git a/platform/android/java/src/org/godotengine/godot/xr/ovr/OvrContextFactory.java b/platform/android/java/src/org/godotengine/godot/xr/ovr/OvrContextFactory.java
new file mode 100644
index 0000000000..5f6da8c672
--- /dev/null
+++ b/platform/android/java/src/org/godotengine/godot/xr/ovr/OvrContextFactory.java
@@ -0,0 +1,58 @@
+/*************************************************************************/
+/* OvrContextFactory.java */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* https://godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2019 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2019 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+package org.godotengine.godot.xr.ovr;
+
+import android.opengl.EGL14;
+import android.opengl.GLSurfaceView;
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+
+/**
+ * EGL Context factory for the Oculus mobile VR SDK.
+ */
+public class OvrContextFactory implements GLSurfaceView.EGLContextFactory {
+
+ private static final int[] CONTEXT_ATTRIBS = {
+ EGL14.EGL_CONTEXT_CLIENT_VERSION, 3, EGL10.EGL_NONE
+ };
+
+ @Override
+ public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
+ return egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, CONTEXT_ATTRIBS);
+ }
+
+ @Override
+ public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {
+ egl.eglDestroyContext(display, context);
+ }
+}
diff --git a/platform/android/java/src/org/godotengine/godot/xr/ovr/OvrWindowSurfaceFactory.java b/platform/android/java/src/org/godotengine/godot/xr/ovr/OvrWindowSurfaceFactory.java
new file mode 100644
index 0000000000..f1e38c35d8
--- /dev/null
+++ b/platform/android/java/src/org/godotengine/godot/xr/ovr/OvrWindowSurfaceFactory.java
@@ -0,0 +1,60 @@
+/*************************************************************************/
+/* OvrWindowSurfaceFactory.java */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* https://godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2019 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2019 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+package org.godotengine.godot.xr.ovr;
+
+import android.opengl.GLSurfaceView;
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLDisplay;
+import javax.microedition.khronos.egl.EGLSurface;
+
+/**
+ * EGL window surface factory for the Oculus mobile VR SDK.
+ */
+public class OvrWindowSurfaceFactory implements GLSurfaceView.EGLWindowSurfaceFactory {
+
+ private final static int[] SURFACE_ATTRIBS = {
+ EGL10.EGL_WIDTH, 16,
+ EGL10.EGL_HEIGHT, 16,
+ EGL10.EGL_NONE
+ };
+
+ @Override
+ public EGLSurface createWindowSurface(EGL10 egl, EGLDisplay display, EGLConfig config,
+ Object nativeWindow) {
+ return egl.eglCreatePbufferSurface(display, config, SURFACE_ATTRIBS);
+ }
+
+ @Override
+ public void destroySurface(EGL10 egl, EGLDisplay display, EGLSurface surface) {
+ egl.eglDestroySurface(display, surface);
+ }
+}
diff --git a/platform/android/java/src/org/godotengine/godot/xr/pancake/PancakeConfigChooser.java b/platform/android/java/src/org/godotengine/godot/xr/pancake/PancakeConfigChooser.java
new file mode 100644
index 0000000000..ac19a09e76
--- /dev/null
+++ b/platform/android/java/src/org/godotengine/godot/xr/pancake/PancakeConfigChooser.java
@@ -0,0 +1,151 @@
+/*************************************************************************/
+/* PancakeConfigChooser.java */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* https://godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2019 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2019 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+package org.godotengine.godot.xr.pancake;
+
+import android.opengl.GLSurfaceView;
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLDisplay;
+import org.godotengine.godot.utils.GLUtils;
+
+/**
+ * Used to select the egl config for pancake games.
+ */
+public class PancakeConfigChooser implements GLSurfaceView.EGLConfigChooser {
+
+ private static final String TAG = PancakeConfigChooser.class.getSimpleName();
+
+ private int[] mValue = new int[1];
+
+ /* This EGL config specification is used to specify 2.0 rendering.
+ * We use a minimum size of 4 bits for red/green/blue, but will
+ * perform actual matching in chooseConfig() below.
+ */
+ private static int EGL_OPENGL_ES2_BIT = 4;
+ private static int[] s_configAttribs2 = {
+ EGL10.EGL_RED_SIZE, 4,
+ EGL10.EGL_GREEN_SIZE, 4,
+ EGL10.EGL_BLUE_SIZE, 4,
+ // EGL10.EGL_DEPTH_SIZE, 16,
+ // EGL10.EGL_STENCIL_SIZE, EGL10.EGL_DONT_CARE,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL10.EGL_NONE
+ };
+ private static int[] s_configAttribs3 = {
+ EGL10.EGL_RED_SIZE, 4,
+ EGL10.EGL_GREEN_SIZE, 4,
+ EGL10.EGL_BLUE_SIZE, 4,
+ // EGL10.EGL_DEPTH_SIZE, 16,
+ // EGL10.EGL_STENCIL_SIZE, EGL10.EGL_DONT_CARE,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, //apparently there is no EGL_OPENGL_ES3_BIT
+ EGL10.EGL_NONE
+ };
+
+ public PancakeConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
+ mRedSize = r;
+ mGreenSize = g;
+ mBlueSize = b;
+ mAlphaSize = a;
+ mDepthSize = depth;
+ mStencilSize = stencil;
+ }
+
+ public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
+
+ /* Get the number of minimally matching EGL configurations
+ */
+ int[] num_config = new int[1];
+ egl.eglChooseConfig(display, GLUtils.use_gl3 ? s_configAttribs3 : s_configAttribs2, null, 0, num_config);
+
+ int numConfigs = num_config[0];
+
+ if (numConfigs <= 0) {
+ throw new IllegalArgumentException("No configs match configSpec");
+ }
+
+ /* Allocate then read the array of minimally matching EGL configs
+ */
+ EGLConfig[] configs = new EGLConfig[numConfigs];
+ egl.eglChooseConfig(display, GLUtils.use_gl3 ? s_configAttribs3 : s_configAttribs2, configs, numConfigs, num_config);
+
+ if (GLUtils.DEBUG) {
+ GLUtils.printConfigs(egl, display, configs);
+ }
+ /* Now return the "best" one
+ */
+ return chooseConfig(egl, display, configs);
+ }
+
+ public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
+ EGLConfig[] configs) {
+ for (EGLConfig config : configs) {
+ int d = findConfigAttrib(egl, display, config,
+ EGL10.EGL_DEPTH_SIZE, 0);
+ int s = findConfigAttrib(egl, display, config,
+ EGL10.EGL_STENCIL_SIZE, 0);
+
+ // We need at least mDepthSize and mStencilSize bits
+ if (d < mDepthSize || s < mStencilSize)
+ continue;
+
+ // We want an *exact* match for red/green/blue/alpha
+ int r = findConfigAttrib(egl, display, config,
+ EGL10.EGL_RED_SIZE, 0);
+ int g = findConfigAttrib(egl, display, config,
+ EGL10.EGL_GREEN_SIZE, 0);
+ int b = findConfigAttrib(egl, display, config,
+ EGL10.EGL_BLUE_SIZE, 0);
+ int a = findConfigAttrib(egl, display, config,
+ EGL10.EGL_ALPHA_SIZE, 0);
+
+ if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize)
+ return config;
+ }
+ return null;
+ }
+
+ private int findConfigAttrib(EGL10 egl, EGLDisplay display,
+ EGLConfig config, int attribute, int defaultValue) {
+
+ if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
+ return mValue[0];
+ }
+ return defaultValue;
+ }
+
+ // Subclasses can adjust these values:
+ protected int mRedSize;
+ protected int mGreenSize;
+ protected int mBlueSize;
+ protected int mAlphaSize;
+ protected int mDepthSize;
+ protected int mStencilSize;
+}
diff --git a/platform/android/java/src/org/godotengine/godot/xr/pancake/PancakeContextFactory.java b/platform/android/java/src/org/godotengine/godot/xr/pancake/PancakeContextFactory.java
new file mode 100644
index 0000000000..aca6ffdba6
--- /dev/null
+++ b/platform/android/java/src/org/godotengine/godot/xr/pancake/PancakeContextFactory.java
@@ -0,0 +1,81 @@
+/*************************************************************************/
+/* PancakeContextFactory.java */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* https://godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2019 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2019 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+package org.godotengine.godot.xr.pancake;
+
+import android.opengl.GLSurfaceView;
+import android.util.Log;
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+import org.godotengine.godot.GodotLib;
+import org.godotengine.godot.utils.GLUtils;
+
+/**
+ * Factory used to setup the opengl context for pancake games.
+ */
+public class PancakeContextFactory implements GLSurfaceView.EGLContextFactory {
+ private static final String TAG = PancakeContextFactory.class.getSimpleName();
+
+ private static final int _EGL_CONTEXT_FLAGS_KHR = 0x30FC;
+ private static final int _EGL_CONTEXT_OPENGL_DEBUG_BIT_KHR = 0x00000001;
+
+ private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
+
+ public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
+ String driver_name = GodotLib.getGlobal("rendering/quality/driver/driver_name");
+ if (GLUtils.use_gl3 && !driver_name.equals("GLES3")) {
+ GLUtils.use_gl3 = false;
+ }
+ if (GLUtils.use_gl3)
+ Log.w(TAG, "creating OpenGL ES 3.0 context :");
+ else
+ Log.w(TAG, "creating OpenGL ES 2.0 context :");
+
+ GLUtils.checkEglError(TAG, "Before eglCreateContext", egl);
+ EGLContext context;
+ if (GLUtils.use_debug_opengl) {
+ int[] attrib_list2 = { EGL_CONTEXT_CLIENT_VERSION, 2, _EGL_CONTEXT_FLAGS_KHR, _EGL_CONTEXT_OPENGL_DEBUG_BIT_KHR, EGL10.EGL_NONE };
+ int[] attrib_list3 = { EGL_CONTEXT_CLIENT_VERSION, 3, _EGL_CONTEXT_FLAGS_KHR, _EGL_CONTEXT_OPENGL_DEBUG_BIT_KHR, EGL10.EGL_NONE };
+ context = egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, GLUtils.use_gl3 ? attrib_list3 : attrib_list2);
+ } else {
+ int[] attrib_list2 = { EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
+ int[] attrib_list3 = { EGL_CONTEXT_CLIENT_VERSION, 3, EGL10.EGL_NONE };
+ context = egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, GLUtils.use_gl3 ? attrib_list3 : attrib_list2);
+ }
+ GLUtils.checkEglError(TAG, "After eglCreateContext", egl);
+ return context;
+ }
+
+ public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {
+ egl.eglDestroyContext(display, context);
+ }
+}
diff --git a/platform/android/java/src/org/godotengine/godot/xr/pancake/PancakeFallbackConfigChooser.java b/platform/android/java/src/org/godotengine/godot/xr/pancake/PancakeFallbackConfigChooser.java
new file mode 100644
index 0000000000..e19f218916
--- /dev/null
+++ b/platform/android/java/src/org/godotengine/godot/xr/pancake/PancakeFallbackConfigChooser.java
@@ -0,0 +1,61 @@
+/*************************************************************************/
+/* PancakeFallbackConfigChooser.java */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* https://godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2019 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2019 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+package org.godotengine.godot.xr.pancake;
+
+import android.util.Log;
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLDisplay;
+import org.godotengine.godot.utils.GLUtils;
+
+/* Fallback if 32bit View is not supported*/
+public class PancakeFallbackConfigChooser extends PancakeConfigChooser {
+
+ private static final String TAG = PancakeFallbackConfigChooser.class.getSimpleName();
+
+ private PancakeConfigChooser fallback;
+
+ public PancakeFallbackConfigChooser(int r, int g, int b, int a, int depth, int stencil, PancakeConfigChooser fallback) {
+ super(r, g, b, a, depth, stencil);
+ this.fallback = fallback;
+ }
+
+ @Override
+ public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display, EGLConfig[] configs) {
+ EGLConfig ec = super.chooseConfig(egl, display, configs);
+ if (ec == null) {
+ Log.w(TAG, "Trying ConfigChooser fallback");
+ ec = fallback.chooseConfig(egl, display, configs);
+ GLUtils.use_32 = false;
+ }
+ return ec;
+ }
+}
diff --git a/platform/android/java_godot_wrapper.cpp b/platform/android/java_godot_wrapper.cpp
index e92d4437b1..339b14974c 100644
--- a/platform/android/java_godot_wrapper.cpp
+++ b/platform/android/java_godot_wrapper.cpp
@@ -60,6 +60,8 @@ GodotJavaWrapper::GodotJavaWrapper(JNIEnv *p_env, jobject p_godot_instance) {
_set_clipboard = p_env->GetMethodID(cls, "setClipboard", "(Ljava/lang/String;)V");
_request_permission = p_env->GetMethodID(cls, "requestPermission", "(Ljava/lang/String;)Z");
_init_input_devices = p_env->GetMethodID(cls, "initInputDevices", "()V");
+ _get_surface = p_env->GetMethodID(cls, "getSurface", "()Landroid/view/Surface;");
+ _is_activity_resumed = p_env->GetMethodID(cls, "isActivityResumed", "()Z");
}
GodotJavaWrapper::~GodotJavaWrapper() {
@@ -191,3 +193,21 @@ void GodotJavaWrapper::init_input_devices() {
env->CallVoidMethod(godot_instance, _init_input_devices);
}
}
+
+jobject GodotJavaWrapper::get_surface() {
+ if (_get_surface) {
+ JNIEnv *env = ThreadAndroid::get_env();
+ return env->CallObjectMethod(godot_instance, _get_surface);
+ } else {
+ return NULL;
+ }
+}
+
+bool GodotJavaWrapper::is_activity_resumed() {
+ if (_is_activity_resumed) {
+ JNIEnv *env = ThreadAndroid::get_env();
+ return env->CallBooleanMethod(godot_instance, _is_activity_resumed);
+ } else {
+ return false;
+ }
+}
diff --git a/platform/android/java_godot_wrapper.h b/platform/android/java_godot_wrapper.h
index be4f109d8c..82c2a5d122 100644
--- a/platform/android/java_godot_wrapper.h
+++ b/platform/android/java_godot_wrapper.h
@@ -55,6 +55,8 @@ private:
jmethodID _set_clipboard = 0;
jmethodID _request_permission = 0;
jmethodID _init_input_devices = 0;
+ jmethodID _get_surface = 0;
+ jmethodID _is_activity_resumed = 0;
public:
GodotJavaWrapper(JNIEnv *p_env, jobject p_godot_instance);
@@ -78,6 +80,8 @@ public:
void set_clipboard(const String &p_text);
bool request_permission(const String &p_name);
void init_input_devices();
+ jobject get_surface();
+ bool is_activity_resumed();
};
#endif /* !JAVA_GODOT_WRAPPER_H */
diff --git a/platform/android/os_android.cpp b/platform/android/os_android.cpp
index f8076dfd2d..ebc319e57d 100644
--- a/platform/android/os_android.cpp
+++ b/platform/android/os_android.cpp
@@ -176,6 +176,9 @@ Error OS_Android::initialize(const VideoMode &p_desired, int p_video_driver, int
input = memnew(InputDefault);
input->set_fallback_mapping("Default Android Gamepad");
+ ///@TODO implement a subclass for Android and instantiate that instead
+ camera_server = memnew(CameraServer);
+
//power_manager = memnew(PowerAndroid);
return OK;
@@ -193,6 +196,9 @@ void OS_Android::delete_main_loop() {
}
void OS_Android::finalize() {
+
+ memdelete(camera_server);
+
memdelete(input);
}
diff --git a/platform/android/os_android.h b/platform/android/os_android.h
index 4dbc96f4da..e74d4cfd43 100644
--- a/platform/android/os_android.h
+++ b/platform/android/os_android.h
@@ -39,6 +39,7 @@
#include "main/input_default.h"
//#include "power_android.h"
#include "servers/audio_server.h"
+#include "servers/camera_server.h"
#include "servers/visual/rasterizer.h"
class GodotJavaWrapper;
@@ -77,6 +78,8 @@ private:
VisualServer *visual_server;
+ CameraServer *camera_server;
+
mutable String data_dir_cache;
//AudioDriverAndroid audio_driver_android;
diff --git a/platform/haiku/os_haiku.cpp b/platform/haiku/os_haiku.cpp
index 438b50053f..9c07535c85 100644
--- a/platform/haiku/os_haiku.cpp
+++ b/platform/haiku/os_haiku.cpp
@@ -133,6 +133,8 @@ Error OS_Haiku::initialize(const VideoMode &p_desired, int p_video_driver, int p
window->Show();
visual_server->init();
+ camera_server = memnew(CameraServer);
+
AudioDriverManager::initialize(p_audio_driver);
return OK;
@@ -148,6 +150,8 @@ void OS_Haiku::finalize() {
visual_server->finish();
memdelete(visual_server);
+ memdelete(camera_server);
+
memdelete(input);
#if defined(OPENGL_ENABLED)
diff --git a/platform/haiku/os_haiku.h b/platform/haiku/os_haiku.h
index e1d4cf8d87..70d78a1978 100644
--- a/platform/haiku/os_haiku.h
+++ b/platform/haiku/os_haiku.h
@@ -38,6 +38,7 @@
#include "haiku_direct_window.h"
#include "main/input_default.h"
#include "servers/audio_server.h"
+#include "servers/camera_server.h"
#include "servers/visual_server.h"
class OS_Haiku : public OS_Unix {
@@ -49,6 +50,7 @@ private:
VisualServer *visual_server;
VideoMode current_video_mode;
int video_driver_index;
+ CameraServer *camera_server;
#ifdef MEDIA_KIT_ENABLED
AudioDriverMediaKit driver_media_kit;
diff --git a/platform/iphone/SCsub b/platform/iphone/SCsub
index fa1b124561..85ba56165b 100644
--- a/platform/iphone/SCsub
+++ b/platform/iphone/SCsub
@@ -14,6 +14,7 @@ iphone_lib = [
'in_app_store.mm',
'icloud.mm',
'ios.mm',
+ 'camera_ios.mm',
]
env_ios = env.Clone()
diff --git a/platform/iphone/camera_ios.h b/platform/iphone/camera_ios.h
new file mode 100644
index 0000000000..e5d62af65d
--- /dev/null
+++ b/platform/iphone/camera_ios.h
@@ -0,0 +1,47 @@
+/*************************************************************************/
+/* camera_ios.h */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* http://www.godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+#ifndef CAMERAIOS_H
+#define CAMERAIOS_H
+
+///@TODO this is a near duplicate of CameraOSX, we should find a way to combine those to minimise code duplication!!!!
+// If you fix something here, make sure you fix it there as wel!
+
+#include "servers/camera_server.h"
+
+class CameraIOS : public CameraServer {
+public:
+ CameraIOS();
+ ~CameraIOS();
+
+ void update_feeds();
+};
+
+#endif /* CAMERAIOS_H */ \ No newline at end of file
diff --git a/platform/iphone/camera_ios.mm b/platform/iphone/camera_ios.mm
new file mode 100644
index 0000000000..3994e9366a
--- /dev/null
+++ b/platform/iphone/camera_ios.mm
@@ -0,0 +1,429 @@
+/*************************************************************************/
+/* camera_ios.mm */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* http://www.godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+///@TODO this is a near duplicate of CameraOSX, we should find a way to combine those to minimise code duplication!!!!
+// If you fix something here, make sure you fix it there as wel!
+
+#include "camera_ios.h"
+#include "servers/camera/camera_feed.h"
+
+#import <AVFoundation/AVFoundation.h>
+#import <UIKit/UIKit.h>
+
+//////////////////////////////////////////////////////////////////////////
+// MyCaptureSession - This is a little helper class so we can capture our frames
+
+@interface MyCaptureSession : AVCaptureSession <AVCaptureVideoDataOutputSampleBufferDelegate> {
+ Ref<CameraFeed> feed;
+ size_t width[2];
+ size_t height[2];
+ PoolVector<uint8_t> img_data[2];
+
+ AVCaptureDeviceInput *input;
+ AVCaptureVideoDataOutput *output;
+}
+
+@end
+
+@implementation MyCaptureSession
+
+- (id)initForFeed:(Ref<CameraFeed>)p_feed andDevice:(AVCaptureDevice *)p_device {
+ if (self = [super init]) {
+ NSError *error;
+ feed = p_feed;
+ width[0] = 0;
+ height[0] = 0;
+ width[1] = 0;
+ height[1] = 0;
+
+ // prepare our device
+ [p_device lockForConfiguration:&error];
+
+ [p_device setFocusMode:AVCaptureFocusModeLocked];
+ [p_device setExposureMode:AVCaptureExposureModeLocked];
+ [p_device setWhiteBalanceMode:AVCaptureWhiteBalanceModeLocked];
+
+ [p_device unlockForConfiguration];
+
+ [self beginConfiguration];
+
+ // setup our capture
+ self.sessionPreset = AVCaptureSessionPreset1280x720;
+
+ input = [AVCaptureDeviceInput deviceInputWithDevice:p_device error:&error];
+ if (!input) {
+ print_line("Couldn't get input device for camera");
+ } else {
+ [self addInput:input];
+ }
+
+ output = [AVCaptureVideoDataOutput new];
+ if (!output) {
+ print_line("Couldn't get output device for camera");
+ } else {
+ NSDictionary *settings = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) };
+ output.videoSettings = settings;
+
+ // discard if the data output queue is blocked (as we process the still image)
+ [output setAlwaysDiscardsLateVideoFrames:YES];
+
+ // now set ourselves as the delegate to receive new frames. Note that we're doing this on the main thread at the moment, we may need to change this..
+ [output setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
+
+ [self addOutput:output];
+ }
+
+ [self commitConfiguration];
+
+ // kick off our session..
+ [self startRunning];
+ };
+ return self;
+}
+
+- (void)cleanup {
+ // stop running
+ [self stopRunning];
+
+ // cleanup
+ [self beginConfiguration];
+
+ if (input) {
+ [self removeInput:input];
+ // don't release this
+ input = nil;
+ }
+
+ if (output) {
+ [self removeOutput:output];
+ [output setSampleBufferDelegate:nil queue:NULL];
+ [output release];
+ output = nil;
+ }
+
+ [self commitConfiguration];
+}
+
+- (void)dealloc {
+ // bye bye
+ [super dealloc];
+}
+
+- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
+ // This gets called every time our camera has a new image for us to process.
+ // May need to investigate in a way to throttle this if we get more images then we're rendering frames..
+
+ // For now, version 1, we're just doing the bare minimum to make this work...
+
+ CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ // int width = CVPixelBufferGetWidth(pixelBuffer);
+ // int height = CVPixelBufferGetHeight(pixelBuffer);
+
+ // It says that we need to lock this on the documentation pages but it's not in the samples
+ // need to lock our base address so we can access our pixel buffers, better safe then sorry?
+ CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
+
+ // get our buffers
+ unsigned char *dataY = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
+ unsigned char *dataCbCr = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
+ if (dataY == NULL) {
+ print_line("Couldn't access Y pixel buffer data");
+ } else if (dataCbCr == NULL) {
+ print_line("Couldn't access CbCr pixel buffer data");
+ } else {
+ UIDeviceOrientation orientation = [[UIApplication sharedApplication] statusBarOrientation];
+ Ref<Image> img[2];
+
+ {
+ // do Y
+ int new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
+ int new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
+ int _bytes_per_row = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
+
+ if ((width[0] != new_width) || (height[0] != new_height)) {
+ // printf("Camera Y plane %i, %i - %i\n", new_width, new_height, bytes_per_row);
+
+ width[0] = new_width;
+ height[0] = new_height;
+ img_data[0].resize(new_width * new_height);
+ }
+
+ PoolVector<uint8_t>::Write w = img_data[0].write();
+ memcpy(w.ptr(), dataY, new_width * new_height);
+
+ img[0].instance();
+ img[0]->create(new_width, new_height, 0, Image::FORMAT_R8, img_data[0]);
+ }
+
+ {
+ // do CbCr
+ int new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1);
+ int new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
+ int bytes_per_row = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
+
+ if ((width[1] != new_width) || (height[1] != new_height)) {
+ // printf("Camera CbCr plane %i, %i - %i\n", new_width, new_height, bytes_per_row);
+
+ width[1] = new_width;
+ height[1] = new_height;
+ img_data[1].resize(2 * new_width * new_height);
+ }
+
+ PoolVector<uint8_t>::Write w = img_data[1].write();
+ memcpy(w.ptr(), dataCbCr, 2 * new_width * new_height);
+
+ ///TODO GLES2 doesn't support FORMAT_RG8, need to do some form of conversion
+ img[1].instance();
+ img[1]->create(new_width, new_height, 0, Image::FORMAT_RG8, img_data[1]);
+ }
+
+ // set our texture...
+ feed->set_YCbCr_imgs(img[0], img[1]);
+
+ // update our matrix to match the orientation, note, before changing anything
+ // here, be aware that the project orientation settings must match your xcode
+ // settings or this will go wrong!
+ Transform2D display_transform;
+ switch (orientation) {
+ case UIInterfaceOrientationPortrait: {
+ display_transform = Transform2D(0.0, -1.0, -1.0, 0.0, 1.0, 1.0);
+ } break;
+ case UIInterfaceOrientationLandscapeRight: {
+ display_transform = Transform2D(1.0, 0.0, 0.0, -1.0, 0.0, 1.0);
+ } break;
+ case UIInterfaceOrientationLandscapeLeft: {
+ display_transform = Transform2D(-1.0, 0.0, 0.0, 1.0, 1.0, 0.0);
+ } break;
+ default: {
+ display_transform = Transform2D(0.0, 1.0, 1.0, 0.0, 0.0, 0.0);
+ } break;
+ }
+
+ //TODO: this is correct for the camera on the back, I have a feeling this needs to be inversed for the camera on the front!
+ feed->set_transform(display_transform);
+ }
+
+ // and unlock
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
+}
+
+@end
+
+//////////////////////////////////////////////////////////////////////////
+// CameraFeedIOS - Subclass for camera feeds in iOS
+
+class CameraFeedIOS : public CameraFeed {
+private:
+ bool is_arkit; // if true this feed is updated through ARKit (should only have one and not yet implemented)
+ AVCaptureDevice *device;
+ MyCaptureSession *capture_session;
+
+public:
+ bool get_is_arkit() const;
+ AVCaptureDevice *get_device() const;
+
+ CameraFeedIOS();
+ ~CameraFeedIOS();
+
+ void set_device(AVCaptureDevice *p_device);
+
+ bool activate_feed();
+ void deactivate_feed();
+};
+
+bool CameraFeedIOS::get_is_arkit() const {
+ return is_arkit;
+};
+
+AVCaptureDevice *CameraFeedIOS::get_device() const {
+ return device;
+};
+
+CameraFeedIOS::CameraFeedIOS() {
+ capture_session = NULL;
+ device = NULL;
+ transform = Transform2D(1.0, 0.0, 0.0, 1.0, 0.0, 0.0); /* should re-orientate this based on device orientation */
+};
+
+void CameraFeedIOS::set_device(AVCaptureDevice *p_device) {
+ device = p_device;
+ if (device == NULL) {
+ ///@TODO finish this!
+ is_arkit = true;
+ name = "ARKit";
+ position = CameraFeed::FEED_BACK;
+ } else {
+ is_arkit = false;
+ [device retain];
+
+ // get some info
+ NSString *device_name = p_device.localizedName;
+ name = device_name.UTF8String;
+ position = CameraFeed::FEED_UNSPECIFIED;
+ if ([p_device position] == AVCaptureDevicePositionBack) {
+ position = CameraFeed::FEED_BACK;
+ } else if ([p_device position] == AVCaptureDevicePositionFront) {
+ position = CameraFeed::FEED_FRONT;
+ };
+ };
+};
+
+CameraFeedIOS::~CameraFeedIOS() {
+ if (capture_session != NULL) {
+ [capture_session release];
+ capture_session = NULL;
+ };
+
+ if (device != NULL) {
+ [device release];
+ device = NULL;
+ };
+};
+
+bool CameraFeedIOS::activate_feed() {
+ if (is_arkit) {
+ ///@TODO to implement;
+ } else {
+ if (capture_session) {
+ // already recording!
+ } else {
+ // start camera capture
+ capture_session = [[MyCaptureSession alloc] initForFeed:this andDevice:device];
+ };
+ };
+
+ return true;
+};
+
+void CameraFeedIOS::deactivate_feed() {
+ // end camera capture if we have one
+ if (capture_session) {
+ [capture_session cleanup];
+ [capture_session release];
+ capture_session = NULL;
+ };
+};
+
+//////////////////////////////////////////////////////////////////////////
+// MyDeviceNotifications - This is a little helper class gets notifications
+// when devices are connected/disconnected
+
+@interface MyDeviceNotifications : NSObject {
+ CameraIOS *camera_server;
+}
+
+@end
+
+@implementation MyDeviceNotifications
+
+- (void)devices_changed:(NSNotification *)notification {
+ camera_server->update_feeds();
+}
+
+- (id)initForServer:(CameraIOS *)p_server {
+ if (self = [super init]) {
+ camera_server = p_server;
+
+ [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(devices_changed:) name:AVCaptureDeviceWasConnectedNotification object:nil];
+ [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(devices_changed:) name:AVCaptureDeviceWasDisconnectedNotification object:nil];
+ };
+ return self;
+}
+
+- (void)dealloc {
+ // remove notifications
+ [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceWasConnectedNotification object:nil];
+ [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceWasDisconnectedNotification object:nil];
+
+ [super dealloc];
+}
+
+@end
+
+MyDeviceNotifications *device_notifications = nil;
+
+//////////////////////////////////////////////////////////////////////////
+// CameraIOS - Subclass for our camera server on iPhone
+
+void CameraIOS::update_feeds() {
+ // this way of doing things is deprecated but still works,
+ // rewrite to using AVCaptureDeviceDiscoverySession
+
+ AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:[NSArray arrayWithObjects:AVCaptureDeviceTypeBuiltInTelephotoCamera, AVCaptureDeviceTypeBuiltInDualCamera, AVCaptureDeviceTypeBuiltInTrueDepthCamera] mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionUnspecified];
+
+ // remove devices that are gone..
+ for (int i = feeds.size() - 1; i >= 0; i--) {
+ Ref<CameraFeedIOS> feed = (Ref<CameraFeedIOS>)feeds[i];
+
+ if (feed->get_is_arkit()) {
+ // ignore, this is our arkit entry
+ } else if (![session.devices containsObject:feed->get_device()]) {
+ // remove it from our array, this will also destroy it ;)
+ remove_feed(feed);
+ };
+ };
+
+ // add new devices..
+ for (AVCaptureDevice *device in session.devices) {
+ bool found = false;
+ for (int i = 0; i < feeds.size() && !found; i++) {
+ Ref<CameraFeedIOS> feed = (Ref<CameraFeedIOS>)feeds[i];
+ if (feed->get_device() == device) {
+ found = true;
+ };
+ };
+
+ if (!found) {
+ Ref<CameraFeedIOS> newfeed;
+ newfeed.instance();
+ newfeed->set_device(device);
+ add_feed(newfeed);
+ };
+ };
+};
+
+CameraIOS::CameraIOS() {
+ [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo
+ completionHandler:^(BOOL granted) {
+ if (granted) {
+ // Find available cameras we have at this time
+ update_feeds();
+
+ // should only have one of these....
+ device_notifications = [[MyDeviceNotifications alloc] initForServer:this];
+ } else {
+ print_line("No access to cameras!");
+ }
+ }];
+};
+
+CameraIOS::~CameraIOS() {
+ [device_notifications release];
+};
diff --git a/platform/iphone/detect.py b/platform/iphone/detect.py
index d9f710e456..daf4f405fd 100644
--- a/platform/iphone/detect.py
+++ b/platform/iphone/detect.py
@@ -144,6 +144,7 @@ def configure(env):
'-framework', 'CoreAudio',
'-framework', 'CoreGraphics',
'-framework', 'CoreMedia',
+ '-framework', 'CoreVideo',
'-framework', 'CoreMotion',
'-framework', 'Foundation',
'-framework', 'GameController',
diff --git a/platform/iphone/os_iphone.cpp b/platform/iphone/os_iphone.cpp
index 6a65cadf09..f5fce66059 100644
--- a/platform/iphone/os_iphone.cpp
+++ b/platform/iphone/os_iphone.cpp
@@ -167,6 +167,8 @@ Error OSIPhone::initialize(const VideoMode &p_desired, int p_video_driver, int p
input = memnew(InputDefault);
+ camera_server = memnew(CameraIOS);
+
#ifdef GAME_CENTER_ENABLED
game_center = memnew(GameCenter);
Engine::get_singleton()->add_singleton(Engine::Singleton("GameCenter", game_center));
@@ -361,6 +363,11 @@ void OSIPhone::finalize() {
if (main_loop) // should not happen?
memdelete(main_loop);
+ if (camera_server) {
+ memdelete(camera_server);
+ camera_server = NULL;
+ }
+
visual_server->finish();
memdelete(visual_server);
// memdelete(rasterizer);
diff --git a/platform/iphone/os_iphone.h b/platform/iphone/os_iphone.h
index 017125209c..c16c29a858 100644
--- a/platform/iphone/os_iphone.h
+++ b/platform/iphone/os_iphone.h
@@ -37,6 +37,7 @@
#include "drivers/coreaudio/audio_driver_coreaudio.h"
#include "drivers/unix/os_unix.h"
+#include "camera_ios.h"
#include "game_center.h"
#include "icloud.h"
#include "in_app_store.h"
@@ -60,6 +61,8 @@ private:
AudioDriverCoreAudio audio_driver;
+ CameraServer *camera_server;
+
#ifdef GAME_CENTER_ENABLED
GameCenter *game_center;
#endif
diff --git a/platform/javascript/audio_driver_javascript.cpp b/platform/javascript/audio_driver_javascript.cpp
index 11104007e2..163826f828 100644
--- a/platform/javascript/audio_driver_javascript.cpp
+++ b/platform/javascript/audio_driver_javascript.cpp
@@ -99,7 +99,7 @@ Error AudioDriverJavaScript::init() {
return FAILED;
}
- if (!internal_buffer || memarr_len(internal_buffer) != buffer_length * channel_count) {
+ if (!internal_buffer || (int)memarr_len(internal_buffer) != buffer_length * channel_count) {
if (internal_buffer)
memdelete_arr(internal_buffer);
internal_buffer = memnew_arr(float, buffer_length *channel_count);
diff --git a/platform/javascript/os_javascript.cpp b/platform/javascript/os_javascript.cpp
index e7981bf576..d96ffc3a55 100644
--- a/platform/javascript/os_javascript.cpp
+++ b/platform/javascript/os_javascript.cpp
@@ -70,6 +70,20 @@ static bool is_canvas_focused() {
/* clang-format on */
}
+static Point2 correct_canvas_position(int x, int y) {
+ int canvas_width;
+ int canvas_height;
+ emscripten_get_canvas_element_size(NULL, &canvas_width, &canvas_height);
+
+ double element_width;
+ double element_height;
+ emscripten_get_element_css_size(NULL, &element_width, &element_height);
+
+ x = (int)(canvas_width / element_width * x);
+ y = (int)(canvas_height / element_height * y);
+ return Point2(x, y);
+}
+
static bool cursor_inside_canvas = true;
EM_BOOL OS_JavaScript::fullscreen_change_callback(int p_event_type, const EmscriptenFullscreenChangeEvent *p_event, void *p_user_data) {
@@ -285,7 +299,7 @@ EM_BOOL OS_JavaScript::mouse_button_callback(int p_event_type, const EmscriptenM
Ref<InputEventMouseButton> ev;
ev.instance();
ev->set_pressed(p_event_type == EMSCRIPTEN_EVENT_MOUSEDOWN);
- ev->set_position(Point2(p_event->canvasX, p_event->canvasY));
+ ev->set_position(correct_canvas_position(p_event->canvasX, p_event->canvasY));
ev->set_global_position(ev->get_position());
dom2godot_mod(p_event, ev);
switch (p_event->button) {
@@ -349,7 +363,7 @@ EM_BOOL OS_JavaScript::mousemove_callback(int p_event_type, const EmscriptenMous
OS_JavaScript *os = get_singleton();
int input_mask = os->input->get_mouse_button_mask();
- Point2 pos = Point2(p_event->canvasX, p_event->canvasY);
+ Point2 pos = correct_canvas_position(p_event->canvasX, p_event->canvasY);
// For motion outside the canvas, only read mouse movement if dragging
// started inside the canvas; imitating desktop app behaviour.
if (!cursor_inside_canvas && !input_mask)
@@ -666,7 +680,7 @@ EM_BOOL OS_JavaScript::touch_press_callback(int p_event_type, const EmscriptenTo
if (!touch.isChanged)
continue;
ev->set_index(touch.identifier);
- ev->set_position(Point2(touch.canvasX, touch.canvasY));
+ ev->set_position(correct_canvas_position(touch.canvasX, touch.canvasY));
os->touches[i] = ev->get_position();
ev->set_pressed(p_event_type == EMSCRIPTEN_EVENT_TOUCHSTART);
@@ -691,7 +705,7 @@ EM_BOOL OS_JavaScript::touchmove_callback(int p_event_type, const EmscriptenTouc
if (!touch.isChanged)
continue;
ev->set_index(touch.identifier);
- ev->set_position(Point2(touch.canvasX, touch.canvasY));
+ ev->set_position(correct_canvas_position(touch.canvasX, touch.canvasY));
Point2 &prev = os->touches[i];
ev->set_relative(ev->get_position() - prev);
prev = ev->get_position();
@@ -942,6 +956,8 @@ Error OS_JavaScript::initialize(const VideoMode &p_desired, int p_video_driver,
VisualServer *visual_server = memnew(VisualServerRaster());
input = memnew(InputDefault);
+ camera_server = memnew(CameraServer);
+
EMSCRIPTEN_RESULT result;
#define EM_CHECK(ev) \
if (result != EMSCRIPTEN_RESULT_SUCCESS) \
@@ -1076,6 +1092,7 @@ void OS_JavaScript::delete_main_loop() {
void OS_JavaScript::finalize() {
+ memdelete(camera_server);
memdelete(input);
}
diff --git a/platform/javascript/os_javascript.h b/platform/javascript/os_javascript.h
index 27b23a4673..9635465c0d 100644
--- a/platform/javascript/os_javascript.h
+++ b/platform/javascript/os_javascript.h
@@ -35,6 +35,7 @@
#include "drivers/unix/os_unix.h"
#include "main/input_default.h"
#include "servers/audio_server.h"
+#include "servers/camera_server.h"
#include "servers/visual/rasterizer.h"
#include <emscripten/html5.h>
@@ -65,6 +66,8 @@ class OS_JavaScript : public OS_Unix {
int64_t sync_wait_time;
int64_t last_sync_check_time;
+ CameraServer *camera_server;
+
static EM_BOOL fullscreen_change_callback(int p_event_type, const EmscriptenFullscreenChangeEvent *p_event, void *p_user_data);
static EM_BOOL keydown_callback(int p_event_type, const EmscriptenKeyboardEvent *p_event, void *p_user_data);
diff --git a/platform/osx/SCsub b/platform/osx/SCsub
index e15b4339a7..9620863b96 100644
--- a/platform/osx/SCsub
+++ b/platform/osx/SCsub
@@ -13,6 +13,7 @@ files = [
'dir_access_osx.mm',
'joypad_osx.cpp',
'power_osx.cpp',
+ 'camera_osx.mm',
]
prog = env.add_program('#bin/godot', files)
diff --git a/platform/osx/camera_osx.h b/platform/osx/camera_osx.h
new file mode 100644
index 0000000000..ed7a1f0a73
--- /dev/null
+++ b/platform/osx/camera_osx.h
@@ -0,0 +1,47 @@
+/*************************************************************************/
+/* camera_osx.h */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* http://www.godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+#ifndef CAMERAOSX_H
+#define CAMERAOSX_H
+
+///@TODO this is a near duplicate of CameraIOS, we should find a way to combine those to minimise code duplication!!!!
+// If you fix something here, make sure you fix it there as wel!
+
+#include "servers/camera_server.h"
+
+class CameraOSX : public CameraServer {
+public:
+ CameraOSX();
+ ~CameraOSX();
+
+ void update_feeds();
+};
+
+#endif /* CAMERAOSX_H */ \ No newline at end of file
diff --git a/platform/osx/camera_osx.mm b/platform/osx/camera_osx.mm
new file mode 100644
index 0000000000..34486c58b1
--- /dev/null
+++ b/platform/osx/camera_osx.mm
@@ -0,0 +1,362 @@
+/*************************************************************************/
+/* camera_osx.mm */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* http://www.godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+///@TODO this is a near duplicate of CameraIOS, we should find a way to combine those to minimise code duplication!!!!
+// If you fix something here, make sure you fix it there as wel!
+
+#include "camera_osx.h"
+#include "servers/camera/camera_feed.h"
+#import <AVFoundation/AVFoundation.h>
+
+//////////////////////////////////////////////////////////////////////////
+// MyCaptureSession - This is a little helper class so we can capture our frames
+
+@interface MyCaptureSession : AVCaptureSession <AVCaptureVideoDataOutputSampleBufferDelegate> {
+ Ref<CameraFeed> feed;
+ size_t width[2];
+ size_t height[2];
+ PoolVector<uint8_t> img_data[2];
+
+ AVCaptureDeviceInput *input;
+ AVCaptureVideoDataOutput *output;
+}
+
+@end
+
+@implementation MyCaptureSession
+
+- (id)initForFeed:(Ref<CameraFeed>)p_feed andDevice:(AVCaptureDevice *)p_device {
+ if (self = [super init]) {
+ NSError *error;
+ feed = p_feed;
+ width[0] = 0;
+ height[0] = 0;
+ width[1] = 0;
+ height[1] = 0;
+
+ [self beginConfiguration];
+
+ input = [AVCaptureDeviceInput deviceInputWithDevice:p_device error:&error];
+ if (!input) {
+ print_line("Couldn't get input device for camera");
+ } else {
+ [self addInput:input];
+ }
+
+ output = [AVCaptureVideoDataOutput new];
+ if (!output) {
+ print_line("Couldn't get output device for camera");
+ } else {
+ NSDictionary *settings = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) };
+ output.videoSettings = settings;
+
+ // discard if the data output queue is blocked (as we process the still image)
+ [output setAlwaysDiscardsLateVideoFrames:YES];
+
+ // now set ourselves as the delegate to receive new frames.
+ [output setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
+
+ // this takes ownership
+ [self addOutput:output];
+ }
+
+ [self commitConfiguration];
+
+ // kick off our session..
+ [self startRunning];
+ };
+ return self;
+}
+
+- (void)cleanup {
+ // stop running
+ [self stopRunning];
+
+ // cleanup
+ [self beginConfiguration];
+
+ // remove input
+ if (input) {
+ [self removeInput:input];
+ // don't release this
+ input = NULL;
+ }
+
+ // free up our output
+ if (output) {
+ [self removeOutput:output];
+ [output setSampleBufferDelegate:nil queue:NULL];
+ [output release];
+ output = NULL;
+ }
+
+ [self commitConfiguration];
+}
+
+- (void)dealloc {
+ // bye bye
+ [super dealloc];
+}
+
+- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
+ // This gets called every time our camera has a new image for us to process.
+ // May need to investigate in a way to throttle this if we get more images then we're rendering frames..
+
+ // For now, version 1, we're just doing the bare minimum to make this work...
+ CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+ // int _width = CVPixelBufferGetWidth(pixelBuffer);
+ // int _height = CVPixelBufferGetHeight(pixelBuffer);
+
+ // It says that we need to lock this on the documentation pages but it's not in the samples
+ // need to lock our base address so we can access our pixel buffers, better safe then sorry?
+ CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
+
+ // get our buffers
+ unsigned char *dataY = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
+ unsigned char *dataCbCr = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
+ if (dataY == NULL) {
+ print_line("Couldn't access Y pixel buffer data");
+ } else if (dataCbCr == NULL) {
+ print_line("Couldn't access CbCr pixel buffer data");
+ } else {
+ Ref<Image> img[2];
+
+ {
+ // do Y
+ int new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
+ int new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
+
+ if ((width[0] != new_width) || (height[0] != new_height)) {
+ width[0] = new_width;
+ height[0] = new_height;
+ img_data[0].resize(new_width * new_height);
+ }
+
+ PoolVector<uint8_t>::Write w = img_data[0].write();
+ memcpy(w.ptr(), dataY, new_width * new_height);
+
+ img[0].instance();
+ img[0]->create(new_width, new_height, 0, Image::FORMAT_R8, img_data[0]);
+ }
+
+ {
+ // do CbCr
+ int new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1);
+ int new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
+
+ if ((width[1] != new_width) || (height[1] != new_height)) {
+ width[1] = new_width;
+ height[1] = new_height;
+ img_data[1].resize(2 * new_width * new_height);
+ }
+
+ PoolVector<uint8_t>::Write w = img_data[1].write();
+ memcpy(w.ptr(), dataCbCr, 2 * new_width * new_height);
+
+ ///TODO GLES2 doesn't support FORMAT_RG8, need to do some form of conversion
+ img[1].instance();
+ img[1]->create(new_width, new_height, 0, Image::FORMAT_RG8, img_data[1]);
+ }
+
+ // set our texture...
+ feed->set_YCbCr_imgs(img[0], img[1]);
+ }
+
+ // and unlock
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
+}
+
+@end
+
+//////////////////////////////////////////////////////////////////////////
+// CameraFeedOSX - Subclass for camera feeds in OSX
+
+class CameraFeedOSX : public CameraFeed {
+private:
+ AVCaptureDevice *device;
+ MyCaptureSession *capture_session;
+
+public:
+ AVCaptureDevice *get_device() const;
+
+ CameraFeedOSX();
+ ~CameraFeedOSX();
+
+ void set_device(AVCaptureDevice *p_device);
+
+ bool activate_feed();
+ void deactivate_feed();
+};
+
+AVCaptureDevice *CameraFeedOSX::get_device() const {
+ return device;
+};
+
+CameraFeedOSX::CameraFeedOSX() {
+ device = NULL;
+ capture_session = NULL;
+};
+
+void CameraFeedOSX::set_device(AVCaptureDevice *p_device) {
+ device = p_device;
+ [device retain];
+
+ // get some info
+ NSString *device_name = p_device.localizedName;
+ name = device_name.UTF8String;
+ position = CameraFeed::FEED_UNSPECIFIED;
+ if ([p_device position] == AVCaptureDevicePositionBack) {
+ position = CameraFeed::FEED_BACK;
+ } else if ([p_device position] == AVCaptureDevicePositionFront) {
+ position = CameraFeed::FEED_FRONT;
+ };
+};
+
+CameraFeedOSX::~CameraFeedOSX() {
+ if (capture_session != NULL) {
+ [capture_session release];
+ capture_session = NULL;
+ };
+
+ if (device != NULL) {
+ [device release];
+ device = NULL;
+ };
+};
+
+bool CameraFeedOSX::activate_feed() {
+ if (capture_session) {
+ // already recording!
+ } else {
+ // start camera capture
+ capture_session = [[MyCaptureSession alloc] initForFeed:this andDevice:device];
+ };
+
+ return true;
+};
+
+void CameraFeedOSX::deactivate_feed() {
+ // end camera capture if we have one
+ if (capture_session) {
+ [capture_session cleanup];
+ [capture_session release];
+ capture_session = NULL;
+ };
+};
+
+//////////////////////////////////////////////////////////////////////////
+// MyDeviceNotifications - This is a little helper class gets notifications
+// when devices are connected/disconnected
+
+@interface MyDeviceNotifications : NSObject {
+ CameraOSX *camera_server;
+}
+
+@end
+
+@implementation MyDeviceNotifications
+
+- (void)devices_changed:(NSNotification *)notification {
+ camera_server->update_feeds();
+}
+
+- (id)initForServer:(CameraOSX *)p_server {
+ if (self = [super init]) {
+ camera_server = p_server;
+
+ [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(devices_changed:) name:AVCaptureDeviceWasConnectedNotification object:nil];
+ [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(devices_changed:) name:AVCaptureDeviceWasDisconnectedNotification object:nil];
+ };
+ return self;
+}
+
+- (void)dealloc {
+ // remove notifications
+ [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceWasConnectedNotification object:nil];
+ [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceWasDisconnectedNotification object:nil];
+
+ [super dealloc];
+}
+
+@end
+
+MyDeviceNotifications *device_notifications = nil;
+
+//////////////////////////////////////////////////////////////////////////
+// CameraOSX - Subclass for our camera server on OSX
+
+void CameraOSX::update_feeds() {
+ NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
+
+ // remove devices that are gone..
+ for (int i = feeds.size() - 1; i >= 0; i--) {
+ Ref<CameraFeedOSX> feed = (Ref<CameraFeedOSX>)feeds[i];
+
+ if (![devices containsObject:feed->get_device()]) {
+ // remove it from our array, this will also destroy it ;)
+ remove_feed(feed);
+ };
+ };
+
+ // add new devices..
+ for (AVCaptureDevice *device in devices) {
+ bool found = false;
+ for (int i = 0; i < feeds.size() && !found; i++) {
+ Ref<CameraFeedOSX> feed = (Ref<CameraFeedOSX>)feeds[i];
+ if (feed->get_device() == device) {
+ found = true;
+ };
+ };
+
+ if (!found) {
+ Ref<CameraFeedOSX> newfeed;
+ newfeed.instance();
+ newfeed->set_device(device);
+
+ // assume display camera so inverse
+ Transform2D transform = Transform2D(-1.0, 0.0, 0.0, -1.0, 1.0, 1.0);
+ newfeed->set_transform(transform);
+
+ add_feed(newfeed);
+ };
+ };
+};
+
+CameraOSX::CameraOSX() {
+ // Find available cameras we have at this time
+ update_feeds();
+
+ // should only have one of these....
+ device_notifications = [[MyDeviceNotifications alloc] initForServer:this];
+};
+
+CameraOSX::~CameraOSX() {
+ [device_notifications release];
+};
diff --git a/platform/osx/detect.py b/platform/osx/detect.py
index 4c88f91d13..2175797dec 100644
--- a/platform/osx/detect.py
+++ b/platform/osx/detect.py
@@ -128,7 +128,7 @@ def configure(env):
env.Prepend(CPPPATH=['#platform/osx'])
env.Append(CPPFLAGS=['-DOSX_ENABLED', '-DUNIX_ENABLED', '-DGLES_ENABLED', '-DAPPLE_STYLE_KEYS', '-DCOREAUDIO_ENABLED', '-DCOREMIDI_ENABLED'])
- env.Append(LINKFLAGS=['-framework', 'Cocoa', '-framework', 'Carbon', '-framework', 'OpenGL', '-framework', 'AGL', '-framework', 'AudioUnit', '-framework', 'CoreAudio', '-framework', 'CoreMIDI', '-lz', '-framework', 'IOKit', '-framework', 'ForceFeedback', '-framework', 'CoreVideo'])
+ env.Append(LINKFLAGS=['-framework', 'Cocoa', '-framework', 'Carbon', '-framework', 'OpenGL', '-framework', 'AGL', '-framework', 'AudioUnit', '-framework', 'CoreAudio', '-framework', 'CoreMIDI', '-lz', '-framework', 'IOKit', '-framework', 'ForceFeedback', '-framework', 'AVFoundation', '-framework', 'CoreMedia', '-framework', 'CoreVideo'])
env.Append(LIBS=['pthread'])
env.Append(CCFLAGS=['-mmacosx-version-min=10.9'])
diff --git a/platform/osx/os_osx.h b/platform/osx/os_osx.h
index eed230ba89..09a9c0be4d 100644
--- a/platform/osx/os_osx.h
+++ b/platform/osx/os_osx.h
@@ -31,6 +31,7 @@
#ifndef OS_OSX_H
#define OS_OSX_H
+#include "camera_osx.h"
#include "core/os/input.h"
#include "crash_handler_osx.h"
#include "drivers/coreaudio/audio_driver_coreaudio.h"
@@ -73,6 +74,8 @@ public:
//Rasterizer *rasterizer;
VisualServer *visual_server;
+ CameraServer *camera_server;
+
List<String> args;
MainLoop *main_loop;
diff --git a/platform/osx/os_osx.mm b/platform/osx/os_osx.mm
index bdc17c7124..3ee6537180 100644
--- a/platform/osx/os_osx.mm
+++ b/platform/osx/os_osx.mm
@@ -1542,6 +1542,8 @@ Error OS_OSX::initialize(const VideoMode &p_desired, int p_video_driver, int p_a
visual_server->init();
AudioDriverManager::initialize(p_audio_driver);
+ camera_server = memnew(CameraOSX);
+
input = memnew(InputDefault);
joypad_osx = memnew(JoypadOSX);
@@ -1573,6 +1575,11 @@ void OS_OSX::finalize() {
delete_main_loop();
+ if (camera_server) {
+ memdelete(camera_server);
+ camera_server = NULL;
+ }
+
memdelete(joypad_osx);
memdelete(input);
diff --git a/platform/uwp/os_uwp.cpp b/platform/uwp/os_uwp.cpp
index f9d22481dd..9d9be44ce5 100644
--- a/platform/uwp/os_uwp.cpp
+++ b/platform/uwp/os_uwp.cpp
@@ -302,6 +302,10 @@ Error OS_UWP::initialize(const VideoMode &p_desired, int p_video_driver, int p_a
}
visual_server->init();
+
+ ///@TODO implement a subclass for UWP and instantiate that instead
+ camera_server = memnew(CameraServer);
+
input = memnew(InputDefault);
joypad = ref new JoypadUWP(input);
@@ -400,6 +404,8 @@ void OS_UWP::finalize() {
memdelete(input);
+ memdelete(camera_server);
+
joypad = nullptr;
}
diff --git a/platform/uwp/os_uwp.h b/platform/uwp/os_uwp.h
index 1bb68bc75d..b7a7248f19 100644
--- a/platform/uwp/os_uwp.h
+++ b/platform/uwp/os_uwp.h
@@ -41,6 +41,7 @@
#include "main/input_default.h"
#include "power_uwp.h"
#include "servers/audio_server.h"
+#include "servers/camera_server.h"
#include "servers/visual/rasterizer.h"
#include "servers/visual_server.h"
@@ -95,6 +96,8 @@ private:
VisualServer *visual_server;
int pressrc;
+ CameraServer *camera_server;
+
ContextEGL_UWP *gl_context;
Windows::UI::Core::CoreWindow ^ window;
diff --git a/platform/windows/SCsub b/platform/windows/SCsub
index 892d734734..8426ccbb89 100644
--- a/platform/windows/SCsub
+++ b/platform/windows/SCsub
@@ -8,6 +8,7 @@ import platform_windows_builders
common_win = [
"godot_windows.cpp",
+ "camera_win.cpp",
"context_gl_windows.cpp",
"crash_handler_windows.cpp",
"os_windows.cpp",
diff --git a/platform/windows/camera_win.cpp b/platform/windows/camera_win.cpp
new file mode 100644
index 0000000000..6eff2749f2
--- /dev/null
+++ b/platform/windows/camera_win.cpp
@@ -0,0 +1,94 @@
+/*************************************************************************/
+/* camera_win.cpp */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* http://www.godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+#include "camera_win.h"
+
+///@TODO sorry guys, I got about 80% through implementing this using DirectShow only to find out Microsoft deprecated half the API and its replacement is as confusing as they could make it
+// Joey suggested looking into libuvc which offers a more direct route to webcams over USB and this is very promissing but it wouldn't compile on windows for me...
+// I've gutted the classes I implemented DirectShow in just to have a skeleton for someone to work on, mail me for more details or if you want a copy....
+
+//////////////////////////////////////////////////////////////////////////
+// CameraFeedWindows - Subclass for our camera feed on windows
+
+/// @TODO need to implement this
+
+class CameraFeedWindows : public CameraFeed {
+private:
+protected:
+public:
+ CameraFeedWindows();
+ virtual ~CameraFeedWindows();
+
+ bool activate_feed();
+ void deactivate_feed();
+};
+
+CameraFeedWindows::CameraFeedWindows(){
+ ///@TODO implement this, should store information about our available camera
+};
+
+CameraFeedWindows::~CameraFeedWindows() {
+ // make sure we stop recording if we are!
+ if (is_active()) {
+ deactivate_feed();
+ };
+
+ ///@TODO free up anything used by this
+};
+
+bool CameraFeedWindows::activate_feed() {
+ ///@TODO this should activate our camera and start the process of capturing frames
+
+ return true;
+};
+
+///@TODO we should probably have a callback method here that is being called by the camera API which provides frames and call back into the CameraServer to update our texture
+
+void CameraFeedWindows::deactivate_feed(){
+ ///@TODO this should deactivate our camera and stop the process of capturing frames
+};
+
+//////////////////////////////////////////////////////////////////////////
+// CameraWindows - Subclass for our camera server on windows
+
+void CameraWindows::add_active_cameras(){
+ ///@TODO scan through any active cameras and create CameraFeedWindows objects for them
+};
+
+CameraWindows::CameraWindows() {
+ // Find cameras active right now
+ add_active_cameras();
+
+ // need to add something that will react to devices being connected/removed...
+};
+
+CameraWindows::~CameraWindows(){
+
+};
diff --git a/platform/windows/camera_win.h b/platform/windows/camera_win.h
new file mode 100644
index 0000000000..61d22ac6b1
--- /dev/null
+++ b/platform/windows/camera_win.h
@@ -0,0 +1,46 @@
+/*************************************************************************/
+/* camera_win.h */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* http://www.godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+#ifndef CAMERAWIN_H
+#define CAMERAWIN_H
+
+#include "servers/camera/camera_feed.h"
+#include "servers/camera_server.h"
+
+class CameraWindows : public CameraServer {
+private:
+ void add_active_cameras();
+
+public:
+ CameraWindows();
+ ~CameraWindows();
+};
+
+#endif /* CAMERAWIN_H */
diff --git a/platform/windows/joypad_windows.cpp b/platform/windows/joypad_windows.cpp
index 5a399cdf90..432060f4fe 100644
--- a/platform/windows/joypad_windows.cpp
+++ b/platform/windows/joypad_windows.cpp
@@ -103,17 +103,17 @@ bool JoypadWindows::is_xinput_device(const GUID *p_guid) {
PRAWINPUTDEVICELIST dev_list = NULL;
unsigned int dev_list_count = 0;
- if (GetRawInputDeviceList(NULL, &dev_list_count, sizeof(RAWINPUTDEVICELIST)) == -1) {
+ if (GetRawInputDeviceList(NULL, &dev_list_count, sizeof(RAWINPUTDEVICELIST)) == (UINT)-1) {
return false;
}
dev_list = (PRAWINPUTDEVICELIST)malloc(sizeof(RAWINPUTDEVICELIST) * dev_list_count);
if (!dev_list) return false;
- if (GetRawInputDeviceList(dev_list, &dev_list_count, sizeof(RAWINPUTDEVICELIST)) == -1) {
+ if (GetRawInputDeviceList(dev_list, &dev_list_count, sizeof(RAWINPUTDEVICELIST)) == (UINT)-1) {
free(dev_list);
return false;
}
- for (int i = 0; i < dev_list_count; i++) {
+ for (unsigned int i = 0; i < dev_list_count; i++) {
RID_DEVICE_INFO rdi;
char dev_name[128];
@@ -334,9 +334,9 @@ void JoypadWindows::process_joypads() {
if (joy.state.dwPacketNumber != joy.last_packet) {
int button_mask = XINPUT_GAMEPAD_DPAD_UP;
- for (int i = 0; i <= 16; i++) {
+ for (int j = 0; j <= 16; i++) {
- input->joy_button(joy.id, i, joy.state.Gamepad.wButtons & button_mask);
+ input->joy_button(joy.id, j, joy.state.Gamepad.wButtons & button_mask);
button_mask = button_mask * 2;
}
@@ -406,7 +406,7 @@ void JoypadWindows::process_joypads() {
// on mingw, these constants are not constants
int count = 6;
- int axes[] = { DIJOFS_X, DIJOFS_Y, DIJOFS_Z, DIJOFS_RX, DIJOFS_RY, DIJOFS_RZ };
+ unsigned int axes[] = { DIJOFS_X, DIJOFS_Y, DIJOFS_Z, DIJOFS_RX, DIJOFS_RY, DIJOFS_RZ };
int values[] = { js.lX, js.lY, js.lZ, js.lRx, js.lRy, js.lRz };
for (int j = 0; j < joy->joy_axis.size(); j++) {
@@ -426,7 +426,11 @@ void JoypadWindows::post_hat(int p_device, DWORD p_dpad) {
int dpad_val = 0;
- if (p_dpad == -1) {
+ // Should be -1 when centered, but according to docs:
+ // "Some drivers report the centered position of the POV indicator as 65,535. Determine whether the indicator is centered as follows:
+ // BOOL POVCentered = (LOWORD(dwPOV) == 0xFFFF);"
+ // https://docs.microsoft.com/en-us/previous-versions/windows/desktop/ee416628(v%3Dvs.85)#remarks
+ if (LOWORD(p_dpad) == 0xFFFF) {
dpad_val = InputDefault::HAT_MASK_CENTER;
}
if (p_dpad == 0) {
diff --git a/platform/windows/os_windows.cpp b/platform/windows/os_windows.cpp
index 4cd637a4b2..db56bf5c7e 100644
--- a/platform/windows/os_windows.cpp
+++ b/platform/windows/os_windows.cpp
@@ -94,6 +94,7 @@ static BOOL CALLBACK _MonitorEnumProcSize(HMONITOR hMonitor, HDC hdcMonitor, LPR
return TRUE;
}
+#ifdef DEBUG_ENABLED
static String format_error_message(DWORD id) {
LPWSTR messageBuffer = NULL;
@@ -106,6 +107,7 @@ static String format_error_message(DWORD id) {
return msg;
}
+#endif // DEBUG_ENABLED
extern HINSTANCE godot_hinstance;
@@ -555,6 +557,7 @@ LRESULT OS_Windows::WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
break;
}
}
+ FALLTHROUGH;
case WM_MBUTTONDOWN:
case WM_MBUTTONUP:
case WM_RBUTTONDOWN:
@@ -583,7 +586,6 @@ LRESULT OS_Windows::WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
case WM_MBUTTONDOWN: {
mb->set_pressed(true);
mb->set_button_index(3);
-
} break;
case WM_MBUTTONUP: {
mb->set_pressed(false);
@@ -598,19 +600,16 @@ LRESULT OS_Windows::WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
mb->set_button_index(2);
} break;
case WM_LBUTTONDBLCLK: {
-
mb->set_pressed(true);
mb->set_button_index(1);
mb->set_doubleclick(true);
} break;
case WM_RBUTTONDBLCLK: {
-
mb->set_pressed(true);
mb->set_button_index(2);
mb->set_doubleclick(true);
} break;
case WM_MBUTTONDBLCLK: {
-
mb->set_pressed(true);
mb->set_button_index(3);
mb->set_doubleclick(true);
@@ -1358,6 +1357,8 @@ Error OS_Windows::initialize(const VideoMode &p_desired, int p_video_driver, int
power_manager = memnew(PowerWindows);
+ camera_server = memnew(CameraWindows);
+
AudioDriverManager::initialize(p_audio_driver);
TRACKMOUSEEVENT tme;
@@ -1517,6 +1518,7 @@ void OS_Windows::finalize() {
memdelete(joypad);
memdelete(input);
+ memdelete(camera_server);
touch_state.clear();
visual_server->finish();
@@ -1816,11 +1818,11 @@ void OS_Windows::set_window_size(const Size2 p_size) {
// Don't let the mouse leave the window when resizing to a smaller resolution
if (mouse_mode == MOUSE_MODE_CONFINED) {
- RECT rect;
- GetClientRect(hWnd, &rect);
- ClientToScreen(hWnd, (POINT *)&rect.left);
- ClientToScreen(hWnd, (POINT *)&rect.right);
- ClipCursor(&rect);
+ RECT crect;
+ GetClientRect(hWnd, &crect);
+ ClientToScreen(hWnd, (POINT *)&crect.left);
+ ClientToScreen(hWnd, (POINT *)&crect.right);
+ ClipCursor(&crect);
}
}
void OS_Windows::set_window_fullscreen(bool p_enabled) {
@@ -2193,6 +2195,8 @@ uint64_t OS_Windows::get_unix_time() const {
FILETIME fep;
SystemTimeToFileTime(&ep, &fep);
+ // FIXME: dereferencing type-punned pointer will break strict-aliasing rules (GCC warning)
+ // https://docs.microsoft.com/en-us/windows/desktop/api/minwinbase/ns-minwinbase-filetime#remarks
return (*(uint64_t *)&ft - *(uint64_t *)&fep) / 10000000;
};
@@ -2378,7 +2382,7 @@ void OS_Windows::set_custom_mouse_cursor(const RES &p_cursor, CursorShape p_shap
}
// Finally, create the icon
- ICONINFO iconinfo = { 0 };
+ ICONINFO iconinfo;
iconinfo.fIcon = FALSE;
iconinfo.xHotspot = p_hotspot.x;
iconinfo.yHotspot = p_hotspot.y;
@@ -2531,9 +2535,9 @@ Error OS_Windows::execute(const String &p_path, const List<String> &p_arguments,
if (p_blocking) {
- DWORD ret = WaitForSingleObject(pi.pi.hProcess, INFINITE);
+ DWORD ret2 = WaitForSingleObject(pi.pi.hProcess, INFINITE);
if (r_exitcode)
- *r_exitcode = ret;
+ *r_exitcode = ret2;
CloseHandle(pi.pi.hProcess);
CloseHandle(pi.pi.hThread);
diff --git a/platform/windows/os_windows.h b/platform/windows/os_windows.h
index 0aacbcb9ff..b20d847baa 100644
--- a/platform/windows/os_windows.h
+++ b/platform/windows/os_windows.h
@@ -31,6 +31,7 @@
#ifndef OS_WINDOWS_H
#define OS_WINDOWS_H
+#include "camera_win.h"
#include "context_gl_windows.h"
#include "core/os/input.h"
#include "core/os/os.h"
@@ -108,6 +109,7 @@ class OS_Windows : public OS {
ContextGL_Windows *gl_context;
#endif
VisualServer *visual_server;
+ CameraWindows *camera_server;
int pressrc;
HDC hDC; // Private GDI Device Context
HINSTANCE hInstance; // Holds The Instance Of The Application
diff --git a/platform/windows/power_windows.cpp b/platform/windows/power_windows.cpp
index b96ae51132..0efd88c216 100644
--- a/platform/windows/power_windows.cpp
+++ b/platform/windows/power_windows.cpp
@@ -89,7 +89,7 @@ bool PowerWindows::GetPowerInfo_Windows() {
if (pct != 255) { /* 255 == unknown */
percent_left = (pct > 100) ? 100 : pct; /* clamp between 0%, 100% */
}
- if (secs != 0xFFFFFFFF) { /* ((DWORD)-1) == unknown */
+ if (secs != (int)0xFFFFFFFF) { /* ((DWORD)-1) == unknown */
nsecs_left = secs;
}
}
diff --git a/platform/windows/windows_terminal_logger.cpp b/platform/windows/windows_terminal_logger.cpp
index 7def419103..adbdafb07e 100644
--- a/platform/windows/windows_terminal_logger.cpp
+++ b/platform/windows/windows_terminal_logger.cpp
@@ -45,7 +45,7 @@ void WindowsTerminalLogger::logv(const char *p_format, va_list p_list, bool p_er
int len = vsnprintf(buf, BUFFER_SIZE, p_format, p_list);
if (len <= 0)
return;
- if (len >= BUFFER_SIZE)
+ if ((unsigned int)len >= BUFFER_SIZE)
len = BUFFER_SIZE; // Output is too big, will be truncated
buf[len] = 0;
@@ -154,4 +154,4 @@ void WindowsTerminalLogger::log_error(const char *p_function, const char *p_file
WindowsTerminalLogger::~WindowsTerminalLogger() {}
-#endif \ No newline at end of file
+#endif
diff --git a/platform/x11/os_x11.cpp b/platform/x11/os_x11.cpp
index 510f3d6114..146cd8bb66 100644
--- a/platform/x11/os_x11.cpp
+++ b/platform/x11/os_x11.cpp
@@ -583,6 +583,9 @@ Error OS_X11::initialize(const VideoMode &p_desired, int p_video_driver, int p_a
AudioDriverManager::initialize(p_audio_driver);
+ ///@TODO implement a subclass for Linux and instantiate that instead
+ camera_server = memnew(CameraServer);
+
input = memnew(InputDefault);
window_has_focus = true; // Set focus to true at init
@@ -783,6 +786,8 @@ void OS_X11::finalize() {
memdelete(input);
+ memdelete(camera_server);
+
visual_server->finish();
memdelete(visual_server);
//memdelete(rasterizer);
diff --git a/platform/x11/os_x11.h b/platform/x11/os_x11.h
index ad35cdb4f9..eca617bd90 100644
--- a/platform/x11/os_x11.h
+++ b/platform/x11/os_x11.h
@@ -42,6 +42,7 @@
#include "main/input_default.h"
#include "power_x11.h"
#include "servers/audio_server.h"
+#include "servers/camera_server.h"
#include "servers/visual/rasterizer.h"
#include "servers/visual_server.h"
//#include "servers/visual/visual_server_wrap_mt.h"
@@ -146,6 +147,8 @@ class OS_X11 : public OS_Unix {
void get_key_modifier_state(unsigned int p_x11_state, Ref<InputEventWithModifiers> state);
void flush_mouse_motion();
+ CameraServer *camera_server;
+
MouseMode mouse_mode;
Point2i center;
diff --git a/scene/2d/position_2d.cpp b/scene/2d/position_2d.cpp
index 8bccf117fd..f0c46a5fb7 100644
--- a/scene/2d/position_2d.cpp
+++ b/scene/2d/position_2d.cpp
@@ -33,6 +33,8 @@
#include "core/engine.h"
#include "scene/resources/texture.h"
+const float DEFAULT_GIZMO_EXTENTS = 10.0;
+
void Position2D::_draw_cross() {
float extents = get_gizmo_extents();
diff --git a/scene/2d/position_2d.h b/scene/2d/position_2d.h
index dc9cc2df15..ec73c02d2b 100644
--- a/scene/2d/position_2d.h
+++ b/scene/2d/position_2d.h
@@ -37,8 +37,6 @@ class Position2D : public Node2D {
GDCLASS(Position2D, Node2D)
- const float DEFAULT_GIZMO_EXTENTS = 10.0;
-
void _draw_cross();
protected:
diff --git a/scene/3d/camera.cpp b/scene/3d/camera.cpp
index a00f2173c0..c7d6919a2b 100644
--- a/scene/3d/camera.cpp
+++ b/scene/3d/camera.cpp
@@ -869,6 +869,11 @@ void ClippedCamera::clear_exceptions() {
exclude.clear();
}
+float ClippedCamera::get_clip_offset() const {
+
+ return clip_offset;
+}
+
void ClippedCamera::set_clip_to_areas(bool p_clip) {
clip_to_areas = p_clip;
@@ -912,6 +917,8 @@ void ClippedCamera::_bind_methods() {
ClassDB::bind_method(D_METHOD("set_clip_to_areas", "enable"), &ClippedCamera::set_clip_to_areas);
ClassDB::bind_method(D_METHOD("is_clip_to_areas_enabled"), &ClippedCamera::is_clip_to_areas_enabled);
+ ClassDB::bind_method(D_METHOD("get_clip_offset"), &ClippedCamera::get_clip_offset);
+
ClassDB::bind_method(D_METHOD("set_clip_to_bodies", "enable"), &ClippedCamera::set_clip_to_bodies);
ClassDB::bind_method(D_METHOD("is_clip_to_bodies_enabled"), &ClippedCamera::is_clip_to_bodies_enabled);
diff --git a/scene/3d/camera.h b/scene/3d/camera.h
index 1cd729199d..c0a4f77435 100644
--- a/scene/3d/camera.h
+++ b/scene/3d/camera.h
@@ -234,6 +234,8 @@ public:
void remove_exception(const Object *p_object);
void clear_exceptions();
+ float get_clip_offset() const;
+
ClippedCamera();
~ClippedCamera();
};
diff --git a/scene/animation/tween.cpp b/scene/animation/tween.cpp
index 23998183b8..c70e58564f 100644
--- a/scene/animation/tween.cpp
+++ b/scene/animation/tween.cpp
@@ -34,10 +34,14 @@
void Tween::_add_pending_command(StringName p_key, const Variant &p_arg1, const Variant &p_arg2, const Variant &p_arg3, const Variant &p_arg4, const Variant &p_arg5, const Variant &p_arg6, const Variant &p_arg7, const Variant &p_arg8, const Variant &p_arg9, const Variant &p_arg10) {
+ // Add a new pending command and reference it
pending_commands.push_back(PendingCommand());
PendingCommand &cmd = pending_commands.back()->get();
+ // Update the command with the target key
cmd.key = p_key;
+
+ // Determine command argument count
int &count = cmd.args;
if (p_arg10.get_type() != Variant::NIL)
count = 10;
@@ -59,6 +63,9 @@ void Tween::_add_pending_command(StringName p_key, const Variant &p_arg1, const
count = 2;
else if (p_arg1.get_type() != Variant::NIL)
count = 1;
+
+ // Add the specified arguments to the command
+ // TODO: Make this a switch statement?
if (count > 0)
cmd.arg[0] = p_arg1;
if (count > 1)
@@ -83,10 +90,14 @@ void Tween::_add_pending_command(StringName p_key, const Variant &p_arg1, const
void Tween::_process_pending_commands() {
+ // For each pending command...
for (List<PendingCommand>::Element *E = pending_commands.front(); E; E = E->next()) {
+ // Get the command
PendingCommand &cmd = E->get();
Variant::CallError err;
+
+ // Grab all of the arguments for the command
Variant *arg[10] = {
&cmd.arg[0],
&cmd.arg[1],
@@ -99,16 +110,20 @@ void Tween::_process_pending_commands() {
&cmd.arg[8],
&cmd.arg[9],
};
+
+ // Execute the command (and retrieve any errors)
this->call(cmd.key, (const Variant **)arg, cmd.args, err);
}
+
+ // Clear the pending commands
pending_commands.clear();
}
bool Tween::_set(const StringName &p_name, const Variant &p_value) {
+ // Set the correct attribute based on the given name
String name = p_name;
-
- if (name == "playback/speed" || name == "speed") { //bw compatibility
+ if (name == "playback/speed" || name == "speed") { // Backwards compatibility
set_speed_scale(p_value);
} else if (name == "playback/active") {
@@ -122,69 +137,78 @@ bool Tween::_set(const StringName &p_name, const Variant &p_value) {
bool Tween::_get(const StringName &p_name, Variant &r_ret) const {
+ // Get the correct attribute based on the given name
String name = p_name;
-
- if (name == "playback/speed") { //bw compatibility
-
+ if (name == "playback/speed") { // Backwards compatibility
r_ret = speed_scale;
- } else if (name == "playback/active") {
+ } else if (name == "playback/active") {
r_ret = is_active();
- } else if (name == "playback/repeat") {
+ } else if (name == "playback/repeat") {
r_ret = is_repeat();
}
-
return true;
}
void Tween::_get_property_list(List<PropertyInfo> *p_list) const {
-
+ // Add the property info for the Tween object
p_list->push_back(PropertyInfo(Variant::BOOL, "playback/active", PROPERTY_HINT_NONE, ""));
p_list->push_back(PropertyInfo(Variant::BOOL, "playback/repeat", PROPERTY_HINT_NONE, ""));
p_list->push_back(PropertyInfo(Variant::REAL, "playback/speed", PROPERTY_HINT_RANGE, "-64,64,0.01"));
}
void Tween::_notification(int p_what) {
-
+ // What notification did we receive?
switch (p_what) {
case NOTIFICATION_ENTER_TREE: {
-
+ // Are we not already active?
if (!is_active()) {
- //make sure that a previous process state was not saved
- //only process if "processing" is set
+ // Make sure that a previous process state was not saved
+ // Only process if "processing" is set
set_physics_process_internal(false);
set_process_internal(false);
}
} break;
- case NOTIFICATION_READY: {
+ case NOTIFICATION_READY: {
+ // Do nothing
} break;
+
case NOTIFICATION_INTERNAL_PROCESS: {
+ // Are we processing during physics time?
if (tween_process_mode == TWEEN_PROCESS_PHYSICS)
+ // Do nothing since we aren't aligned with physics when we should be
break;
+ // Should we update?
if (is_active())
+ // Update the tweens
_tween_process(get_process_delta_time());
} break;
- case NOTIFICATION_INTERNAL_PHYSICS_PROCESS: {
+ case NOTIFICATION_INTERNAL_PHYSICS_PROCESS: {
+ // Are we processing during 'regular' time?
if (tween_process_mode == TWEEN_PROCESS_IDLE)
+ // Do nothing since we whould only process during idle time
break;
+ // Should we update?
if (is_active())
+ // Update the tweens
_tween_process(get_physics_process_delta_time());
} break;
- case NOTIFICATION_EXIT_TREE: {
+ case NOTIFICATION_EXIT_TREE: {
+ // We've left the tree. Stop all tweens
stop_all();
} break;
}
}
void Tween::_bind_methods() {
-
+ // Bind getters and setters
ClassDB::bind_method(D_METHOD("is_active"), &Tween::is_active);
ClassDB::bind_method(D_METHOD("set_active", "active"), &Tween::set_active);
@@ -197,6 +221,7 @@ void Tween::_bind_methods() {
ClassDB::bind_method(D_METHOD("set_tween_process_mode", "mode"), &Tween::set_tween_process_mode);
ClassDB::bind_method(D_METHOD("get_tween_process_mode"), &Tween::get_tween_process_mode);
+ // Bind the various Tween control methods
ClassDB::bind_method(D_METHOD("start"), &Tween::start);
ClassDB::bind_method(D_METHOD("reset", "object", "key"), &Tween::reset, DEFVAL(""));
ClassDB::bind_method(D_METHOD("reset_all"), &Tween::reset_all);
@@ -211,6 +236,7 @@ void Tween::_bind_methods() {
ClassDB::bind_method(D_METHOD("tell"), &Tween::tell);
ClassDB::bind_method(D_METHOD("get_runtime"), &Tween::get_runtime);
+ // Bind interpolation and follow methods
ClassDB::bind_method(D_METHOD("interpolate_property", "object", "property", "initial_val", "final_val", "duration", "trans_type", "ease_type", "delay"), &Tween::interpolate_property, DEFVAL(0));
ClassDB::bind_method(D_METHOD("interpolate_method", "object", "method", "initial_val", "final_val", "duration", "trans_type", "ease_type", "delay"), &Tween::interpolate_method, DEFVAL(0));
ClassDB::bind_method(D_METHOD("interpolate_callback", "object", "duration", "callback", "arg1", "arg2", "arg3", "arg4", "arg5"), &Tween::interpolate_callback, DEFVAL(Variant()), DEFVAL(Variant()), DEFVAL(Variant()), DEFVAL(Variant()), DEFVAL(Variant()));
@@ -220,18 +246,22 @@ void Tween::_bind_methods() {
ClassDB::bind_method(D_METHOD("targeting_property", "object", "property", "initial", "initial_val", "final_val", "duration", "trans_type", "ease_type", "delay"), &Tween::targeting_property, DEFVAL(0));
ClassDB::bind_method(D_METHOD("targeting_method", "object", "method", "initial", "initial_method", "final_val", "duration", "trans_type", "ease_type", "delay"), &Tween::targeting_method, DEFVAL(0));
+ // Add the Tween signals
ADD_SIGNAL(MethodInfo("tween_started", PropertyInfo(Variant::OBJECT, "object"), PropertyInfo(Variant::NODE_PATH, "key")));
ADD_SIGNAL(MethodInfo("tween_step", PropertyInfo(Variant::OBJECT, "object"), PropertyInfo(Variant::NODE_PATH, "key"), PropertyInfo(Variant::REAL, "elapsed"), PropertyInfo(Variant::OBJECT, "value")));
ADD_SIGNAL(MethodInfo("tween_completed", PropertyInfo(Variant::OBJECT, "object"), PropertyInfo(Variant::NODE_PATH, "key")));
ADD_SIGNAL(MethodInfo("tween_all_completed"));
+ // Add the properties and tie them to the getters and setters
ADD_PROPERTY(PropertyInfo(Variant::BOOL, "repeat"), "set_repeat", "is_repeat");
ADD_PROPERTY(PropertyInfo(Variant::INT, "playback_process_mode", PROPERTY_HINT_ENUM, "Physics,Idle"), "set_tween_process_mode", "get_tween_process_mode");
ADD_PROPERTY(PropertyInfo(Variant::REAL, "playback_speed", PROPERTY_HINT_RANGE, "-64,64,0.01"), "set_speed_scale", "get_speed_scale");
+ // Bind Idle vs Physics process
BIND_ENUM_CONSTANT(TWEEN_PROCESS_PHYSICS);
BIND_ENUM_CONSTANT(TWEEN_PROCESS_IDLE);
+ // Bind the Transition type constants
BIND_ENUM_CONSTANT(TRANS_LINEAR);
BIND_ENUM_CONSTANT(TRANS_SINE);
BIND_ENUM_CONSTANT(TRANS_QUINT);
@@ -244,6 +274,7 @@ void Tween::_bind_methods() {
BIND_ENUM_CONSTANT(TRANS_BOUNCE);
BIND_ENUM_CONSTANT(TRANS_BACK);
+ // Bind the easing constants
BIND_ENUM_CONSTANT(EASE_IN);
BIND_ENUM_CONSTANT(EASE_OUT);
BIND_ENUM_CONSTANT(EASE_IN_OUT);
@@ -252,27 +283,30 @@ void Tween::_bind_methods() {
Variant &Tween::_get_initial_val(InterpolateData &p_data) {
+ // What type of data are we interpolating?
switch (p_data.type) {
case INTER_PROPERTY:
case INTER_METHOD:
case FOLLOW_PROPERTY:
case FOLLOW_METHOD:
+ // Simply use the given initial value
return p_data.initial_val;
case TARGETING_PROPERTY:
case TARGETING_METHOD: {
-
+ // Get the object that is being targeted
Object *object = ObjectDB::get_instance(p_data.target_id);
ERR_FAIL_COND_V(object == NULL, p_data.initial_val);
+ // Are we targeting a property or a method?
static Variant initial_val;
if (p_data.type == TARGETING_PROPERTY) {
-
+ // Get the property from the target object
bool valid = false;
initial_val = object->get_indexed(p_data.target_key, &valid);
ERR_FAIL_COND_V(!valid, p_data.initial_val);
} else {
-
+ // Call the method and get the initial value from it
Variant::CallError error;
initial_val = object->call(p_data.target_key[0], NULL, 0, error);
ERR_FAIL_COND_V(error.error != Variant::CallError::CALL_OK, p_data.initial_val);
@@ -281,64 +315,75 @@ Variant &Tween::_get_initial_val(InterpolateData &p_data) {
}
case INTER_CALLBACK:
+ // Callback does not have a special initial value
break;
}
+ // If we've made it here, just return the delta value as the initial value
return p_data.delta_val;
}
Variant &Tween::_get_delta_val(InterpolateData &p_data) {
+ // What kind of data are we interpolating?
switch (p_data.type) {
case INTER_PROPERTY:
case INTER_METHOD:
+ // Simply return the given delta value
return p_data.delta_val;
case FOLLOW_PROPERTY:
case FOLLOW_METHOD: {
-
+ // We're following an object, so grab that instance
Object *target = ObjectDB::get_instance(p_data.target_id);
ERR_FAIL_COND_V(target == NULL, p_data.initial_val);
+ // We want to figure out the final value
Variant final_val;
-
if (p_data.type == FOLLOW_PROPERTY) {
-
+ // Read the property as-is
bool valid = false;
final_val = target->get_indexed(p_data.target_key, &valid);
ERR_FAIL_COND_V(!valid, p_data.initial_val);
} else {
-
+ // We're looking at a method. Call the method on the target object
Variant::CallError error;
final_val = target->call(p_data.target_key[0], NULL, 0, error);
ERR_FAIL_COND_V(error.error != Variant::CallError::CALL_OK, p_data.initial_val);
}
- // convert INT to REAL is better for interpolaters
+ // If we're looking at an INT value, instead convert it to a REAL
+ // This is better for interpolation
if (final_val.get_type() == Variant::INT) final_val = final_val.operator real_t();
+
+ // Calculate the delta based on the initial value and the final value
_calc_delta_val(p_data.initial_val, final_val, p_data.delta_val);
return p_data.delta_val;
}
case TARGETING_PROPERTY:
case TARGETING_METHOD: {
-
+ // Grab the initial value from the data to calculate delta
Variant initial_val = _get_initial_val(p_data);
- // convert INT to REAL is better for interpolaters
+
+ // If we're looking at an INT value, instead convert it to a REAL
+ // This is better for interpolation
if (initial_val.get_type() == Variant::INT) initial_val = initial_val.operator real_t();
- //_calc_delta_val(p_data.initial_val, p_data.final_val, p_data.delta_val);
+ // Calculate the delta based on the initial value and the final value
_calc_delta_val(initial_val, p_data.final_val, p_data.delta_val);
return p_data.delta_val;
}
case INTER_CALLBACK:
+ // Callbacks have no special delta
break;
}
+ // If we've made it here, use the initial value as the delta
return p_data.initial_val;
}
Variant Tween::_run_equation(InterpolateData &p_data) {
-
+ // Get the initial and delta values from the data
Variant &initial_val = _get_initial_val(p_data);
Variant &delta_val = _get_delta_val(p_data);
Variant result;
@@ -346,48 +391,59 @@ Variant Tween::_run_equation(InterpolateData &p_data) {
#define APPLY_EQUATION(element) \
r.element = _run_equation(p_data.trans_type, p_data.ease_type, p_data.elapsed - p_data.delay, i.element, d.element, p_data.duration);
+ // What type of data are we interpolating?
switch (initial_val.get_type()) {
case Variant::BOOL:
+ // Run the boolean specific equation (checking if it is at least 0.5)
result = (_run_equation(p_data.trans_type, p_data.ease_type, p_data.elapsed - p_data.delay, initial_val, delta_val, p_data.duration)) >= 0.5;
break;
case Variant::INT:
+ // Run the integer specific equation
result = (int)_run_equation(p_data.trans_type, p_data.ease_type, p_data.elapsed - p_data.delay, (int)initial_val, (int)delta_val, p_data.duration);
break;
case Variant::REAL:
+ // Run the REAL specific equation
result = _run_equation(p_data.trans_type, p_data.ease_type, p_data.elapsed - p_data.delay, (real_t)initial_val, (real_t)delta_val, p_data.duration);
break;
case Variant::VECTOR2: {
+ // Get vectors for initial and delta values
Vector2 i = initial_val;
Vector2 d = delta_val;
Vector2 r;
+ // Execute the equation and mutate the r vector
+ // This uses the custom APPLY_EQUATION macro defined above
APPLY_EQUATION(x);
APPLY_EQUATION(y);
-
result = r;
} break;
case Variant::VECTOR3: {
+ // Get vectors for initial and delta values
Vector3 i = initial_val;
Vector3 d = delta_val;
Vector3 r;
+ // Execute the equation and mutate the r vector
+ // This uses the custom APPLY_EQUATION macro defined above
APPLY_EQUATION(x);
APPLY_EQUATION(y);
APPLY_EQUATION(z);
-
result = r;
} break;
case Variant::BASIS: {
+ // Get the basis for initial and delta values
Basis i = initial_val;
Basis d = delta_val;
Basis r;
+ // Execute the equation on all the basis and mutate the r basis
+ // This uses the custom APPLY_EQUATION macro defined above
APPLY_EQUATION(elements[0][0]);
APPLY_EQUATION(elements[0][1]);
APPLY_EQUATION(elements[0][2]);
@@ -397,55 +453,63 @@ Variant Tween::_run_equation(InterpolateData &p_data) {
APPLY_EQUATION(elements[2][0]);
APPLY_EQUATION(elements[2][1]);
APPLY_EQUATION(elements[2][2]);
-
result = r;
} break;
case Variant::TRANSFORM2D: {
+ // Get the transforms for initial and delta values
Transform2D i = initial_val;
Transform2D d = delta_val;
Transform2D r;
+ // Execute the equation on the transforms and mutate the r transform
+ // This uses the custom APPLY_EQUATION macro defined above
APPLY_EQUATION(elements[0][0]);
APPLY_EQUATION(elements[0][1]);
APPLY_EQUATION(elements[1][0]);
APPLY_EQUATION(elements[1][1]);
APPLY_EQUATION(elements[2][0]);
APPLY_EQUATION(elements[2][1]);
-
result = r;
} break;
case Variant::QUAT: {
+ // Get the quaternian for the initial and delta values
Quat i = initial_val;
Quat d = delta_val;
Quat r;
+ // Execute the equation on the quaternian values and mutate the r quaternian
+ // This uses the custom APPLY_EQUATION macro defined above
APPLY_EQUATION(x);
APPLY_EQUATION(y);
APPLY_EQUATION(z);
APPLY_EQUATION(w);
-
result = r;
} break;
case Variant::AABB: {
+ // Get the AABB's for the initial and delta values
AABB i = initial_val;
AABB d = delta_val;
AABB r;
+ // Execute the equation for the position and size of the AABB's and mutate the r AABB
+ // This uses the custom APPLY_EQUATION macro defined above
APPLY_EQUATION(position.x);
APPLY_EQUATION(position.y);
APPLY_EQUATION(position.z);
APPLY_EQUATION(size.x);
APPLY_EQUATION(size.y);
APPLY_EQUATION(size.z);
-
result = r;
} break;
case Variant::TRANSFORM: {
+ // Get the transforms for the initial and delta values
Transform i = initial_val;
Transform d = delta_val;
Transform r;
+ // Execute the equation for each of the transforms and their origin and mutate the r transform
+ // This uses the custom APPLY_EQUATION macro defined above
APPLY_EQUATION(basis.elements[0][0]);
APPLY_EQUATION(basis.elements[0][1]);
APPLY_EQUATION(basis.elements[0][2]);
@@ -458,40 +522,45 @@ Variant Tween::_run_equation(InterpolateData &p_data) {
APPLY_EQUATION(origin.x);
APPLY_EQUATION(origin.y);
APPLY_EQUATION(origin.z);
-
result = r;
} break;
case Variant::COLOR: {
+ // Get the Color for initial and delta value
Color i = initial_val;
Color d = delta_val;
Color r;
+ // Apply the equation on the Color RGBA, and mutate the r color
+ // This uses the custom APPLY_EQUATION macro defined above
APPLY_EQUATION(r);
APPLY_EQUATION(g);
APPLY_EQUATION(b);
APPLY_EQUATION(a);
-
result = r;
} break;
default: {
+ // If unknown, just return the initial value
result = initial_val;
} break;
};
#undef APPLY_EQUATION
-
+ // Return the result that was computed
return result;
}
bool Tween::_apply_tween_value(InterpolateData &p_data, Variant &value) {
+ // Get the object we want to apply the new value to
Object *object = ObjectDB::get_instance(p_data.id);
ERR_FAIL_COND_V(object == NULL, false);
+ // What kind of data are we mutating?
switch (p_data.type) {
case INTER_PROPERTY:
case FOLLOW_PROPERTY:
case TARGETING_PROPERTY: {
+ // Simply set the property on the object
bool valid = false;
object->set_indexed(p_data.key, value, &valid);
return valid;
@@ -500,85 +569,112 @@ bool Tween::_apply_tween_value(InterpolateData &p_data, Variant &value) {
case INTER_METHOD:
case FOLLOW_METHOD:
case TARGETING_METHOD: {
+ // We want to call the method on the target object
Variant::CallError error;
+
+ // Do we have a non-nil value passed in?
if (value.get_type() != Variant::NIL) {
+ // Pass it as an argument to the function call
Variant *arg[1] = { &value };
object->call(p_data.key[0], (const Variant **)arg, 1, error);
} else {
+ // Don't pass any argument
object->call(p_data.key[0], NULL, 0, error);
}
+ // Did we get an error from the function call?
if (error.error == Variant::CallError::CALL_OK)
return true;
return false;
}
case INTER_CALLBACK:
+ // Nothing to apply for a callback
break;
};
+ // No issues found!
return true;
}
void Tween::_tween_process(float p_delta) {
-
+ // Process all of the pending commands
_process_pending_commands();
+ // If the scale is 0, make no progress on the tweens
if (speed_scale == 0)
return;
- p_delta *= speed_scale;
+ // Update the delta and whether we are pending an update
+ p_delta *= speed_scale;
pending_update++;
- // if repeat and all interpolates was finished then reset all interpolates
- bool all_finished = true;
- if (repeat) {
+ // Are we repeating the interpolations?
+ if (repeat) {
+ // For each interpolation...
+ bool repeats_finished = true;
for (List<InterpolateData>::Element *E = interpolates.front(); E; E = E->next()) {
-
+ // Get the data from it
InterpolateData &data = E->get();
+ // Is not finished?
if (!data.finish) {
- all_finished = false;
+ // We aren't finished yet, no need to check the rest
+ repeats_finished = false;
break;
}
}
- if (all_finished)
+ // If we are all finished, we can reset all of the tweens
+ if (repeats_finished)
reset_all();
}
- all_finished = true;
+ // Are all of the tweens complete?
+ bool all_finished = true;
+
+ // For each tween we wish to interpolate...
for (List<InterpolateData>::Element *E = interpolates.front(); E; E = E->next()) {
+ // Get the data from it
InterpolateData &data = E->get();
+
+ // Track if we hit one that isn't finished yet
all_finished = all_finished && data.finish;
+ // Is the data not active or already finished? No need to go any further
if (!data.active || data.finish)
continue;
+ // Get the target object for this interpolation
Object *object = ObjectDB::get_instance(data.id);
if (object == NULL)
continue;
+ // Are we still delaying this tween?
bool prev_delaying = data.elapsed <= data.delay;
data.elapsed += p_delta;
if (data.elapsed < data.delay)
continue;
else if (prev_delaying) {
-
+ // We can apply the tween's value to the data and emit that the tween has started
_apply_tween_value(data, data.initial_val);
emit_signal("tween_started", object, NodePath(Vector<StringName>(), data.key, false));
}
+ // Are we at the end of the tween?
if (data.elapsed > (data.delay + data.duration)) {
-
+ // Set the elapsed time to the end and mark this one as finished
data.elapsed = data.delay + data.duration;
data.finish = true;
}
+ // Are we interpolating a callback?
if (data.type == INTER_CALLBACK) {
+ // Is the tween completed?
if (data.finish) {
+ // Are we calling this callback deferred or immediately?
if (data.call_deferred) {
-
+ // Run the deferred function callback, applying the correct number of arguments
switch (data.args) {
case 0:
object->call_deferred(data.key[0]);
@@ -600,6 +696,7 @@ void Tween::_tween_process(float p_delta) {
break;
}
} else {
+ // Call the function directly with the arguments
Variant::CallError error;
Variant *arg[5] = {
&data.arg[0],
@@ -612,23 +709,35 @@ void Tween::_tween_process(float p_delta) {
}
}
} else {
+ // We can apply the value directly
Variant result = _run_equation(data);
_apply_tween_value(data, result);
+
+ // Emit that the tween has taken a step
emit_signal("tween_step", object, NodePath(Vector<StringName>(), data.key, false), data.elapsed, result);
}
+ // Is the tween now finished?
if (data.finish) {
+ // Set it to the final value directly
_apply_tween_value(data, data.final_val);
+
+ // Mark the tween as completed and emit the signal
data.elapsed = 0;
emit_signal("tween_completed", object, NodePath(Vector<StringName>(), data.key, false));
- // not repeat mode, remove completed action
+
+ // If we are not repeating the tween, remove it
if (!repeat)
call_deferred("_remove_by_uid", data.uid);
- } else if (!repeat)
+ } else if (!repeat) {
+ // Check whether all tweens are finished
all_finished = all_finished && data.finish;
+ }
}
+ // One less update left to go
pending_update--;
+ // If all tweens are completed, we no longer need to be active
if (all_finished) {
set_active(false);
emit_signal("tween_all_completed");
@@ -636,76 +745,75 @@ void Tween::_tween_process(float p_delta) {
}
void Tween::set_tween_process_mode(TweenProcessMode p_mode) {
-
tween_process_mode = p_mode;
}
Tween::TweenProcessMode Tween::get_tween_process_mode() const {
-
return tween_process_mode;
}
bool Tween::is_active() const {
-
return is_processing_internal() || is_physics_processing_internal();
}
void Tween::set_active(bool p_active) {
-
+ // Do nothing if it's the same active mode that we currently are
if (is_active() == p_active)
return;
+ // Depending on physics or idle, set processing
switch (tween_process_mode) {
-
case TWEEN_PROCESS_IDLE: set_process_internal(p_active); break;
case TWEEN_PROCESS_PHYSICS: set_physics_process_internal(p_active); break;
}
}
bool Tween::is_repeat() const {
-
return repeat;
}
void Tween::set_repeat(bool p_repeat) {
-
repeat = p_repeat;
}
void Tween::set_speed_scale(float p_speed) {
-
speed_scale = p_speed;
}
float Tween::get_speed_scale() const {
-
return speed_scale;
}
bool Tween::start() {
+ // Are there any pending updates?
if (pending_update != 0) {
+ // Start the tweens after deferring
call_deferred("start");
return true;
}
+ // We want to be activated
set_active(true);
return true;
}
bool Tween::reset(Object *p_object, StringName p_key) {
-
+ // Find all interpolations that use the same object and target string
pending_update++;
for (List<InterpolateData>::Element *E = interpolates.front(); E; E = E->next()) {
-
+ // Get the target object
InterpolateData &data = E->get();
Object *object = ObjectDB::get_instance(data.id);
if (object == NULL)
continue;
+ // Do we have the correct object and key?
if (object == p_object && (data.concatenated_key == p_key || p_key == "")) {
-
+ // Reset the tween to the initial state
data.elapsed = 0;
data.finish = false;
+
+ // Also apply the initial state if there isn't a delay
if (data.delay == 0)
_apply_tween_value(data, data.initial_val);
}
@@ -715,13 +823,15 @@ bool Tween::reset(Object *p_object, StringName p_key) {
}
bool Tween::reset_all() {
-
+ // Go through all interpolations
pending_update++;
for (List<InterpolateData>::Element *E = interpolates.front(); E; E = E->next()) {
-
+ // Get the target data and set it back to the initial state
InterpolateData &data = E->get();
data.elapsed = 0;
data.finish = false;
+
+ // If there isn't a delay, apply the value to the object
if (data.delay == 0)
_apply_tween_value(data, data.initial_val);
}
@@ -730,15 +840,19 @@ bool Tween::reset_all() {
}
bool Tween::stop(Object *p_object, StringName p_key) {
-
+ // Find the tween that has the given target object and string key
pending_update++;
for (List<InterpolateData>::Element *E = interpolates.front(); E; E = E->next()) {
+ // Get the object the tween is targeting
InterpolateData &data = E->get();
Object *object = ObjectDB::get_instance(data.id);
if (object == NULL)
continue;
+
+ // Is this the correct object and does it have the given key?
if (object == p_object && (data.concatenated_key == p_key || p_key == ""))
+ // Disable the tween
data.active = false;
}
pending_update--;
@@ -746,12 +860,13 @@ bool Tween::stop(Object *p_object, StringName p_key) {
}
bool Tween::stop_all() {
-
+ // We no longer need to be active since all tweens have been stopped
set_active(false);
+ // For each interpolation...
pending_update++;
for (List<InterpolateData>::Element *E = interpolates.front(); E; E = E->next()) {
-
+ // Simply set it inactive
InterpolateData &data = E->get();
data.active = false;
}
@@ -760,16 +875,20 @@ bool Tween::stop_all() {
}
bool Tween::resume(Object *p_object, StringName p_key) {
-
+ // We need to be activated
+ // TODO: What if no tween is found??
set_active(true);
+ // Find the tween that uses the given target object and string key
pending_update++;
for (List<InterpolateData>::Element *E = interpolates.front(); E; E = E->next()) {
-
+ // Grab the object
InterpolateData &data = E->get();
Object *object = ObjectDB::get_instance(data.id);
if (object == NULL)
continue;
+
+ // If the object and string key match, activate it
if (object == p_object && (data.concatenated_key == p_key || p_key == ""))
data.active = true;
}
@@ -778,12 +897,14 @@ bool Tween::resume(Object *p_object, StringName p_key) {
}
bool Tween::resume_all() {
-
+ // Set ourselves active so we can process tweens
+ // TODO: What if there are no tweens? We get set to active for no reason!
set_active(true);
+ // For each interpolation...
pending_update++;
for (List<InterpolateData>::Element *E = interpolates.front(); E; E = E->next()) {
-
+ // Simply grab it and set it to active
InterpolateData &data = E->get();
data.active = true;
}
@@ -792,35 +913,46 @@ bool Tween::resume_all() {
}
bool Tween::remove(Object *p_object, StringName p_key) {
+ // If we are still updating, call this function again later
if (pending_update != 0) {
call_deferred("remove", p_object, p_key);
return true;
}
+
+ // For each interpolation...
List<List<InterpolateData>::Element *> for_removal;
for (List<InterpolateData>::Element *E = interpolates.front(); E; E = E->next()) {
-
+ // Get the target object
InterpolateData &data = E->get();
Object *object = ObjectDB::get_instance(data.id);
if (object == NULL)
continue;
+
+ // If the target object and string key match, queue it for removal
if (object == p_object && (data.concatenated_key == p_key || p_key == "")) {
for_removal.push_back(E);
}
}
+
+ // For each interpolation we wish to remove...
for (List<List<InterpolateData>::Element *>::Element *E = for_removal.front(); E; E = E->next()) {
+ // Erase it
interpolates.erase(E->get());
}
return true;
}
void Tween::_remove_by_uid(int uid) {
+ // If we are still updating, call this function again later
if (pending_update != 0) {
call_deferred("_remove_by_uid", uid);
return;
}
+ // Find the interpolation that matches the given UID
for (List<InterpolateData>::Element *E = interpolates.front(); E; E = E->next()) {
if (uid == E->get().uid) {
+ // It matches, erase it and stop looking
E->erase();
break;
}
@@ -829,49 +961,61 @@ void Tween::_remove_by_uid(int uid) {
void Tween::_push_interpolate_data(InterpolateData &p_data) {
pending_update++;
+
+ // Add the new interpolation
p_data.uid = ++uid;
interpolates.push_back(p_data);
+
pending_update--;
}
bool Tween::remove_all() {
-
+ // If we are still updating, call this function again later
if (pending_update != 0) {
call_deferred("remove_all");
return true;
}
+ // We no longer need to be active
set_active(false);
+
+ // Clear out all interpolations and reset the uid
interpolates.clear();
uid = 0;
+
return true;
}
bool Tween::seek(real_t p_time) {
-
+ // Go through each interpolation...
pending_update++;
for (List<InterpolateData>::Element *E = interpolates.front(); E; E = E->next()) {
-
+ // Get the target data
InterpolateData &data = E->get();
+ // Update the elapsed data to be set to the target time
data.elapsed = p_time;
- if (data.elapsed < data.delay) {
+ // Are we at the end?
+ if (data.elapsed < data.delay) {
+ // There is still time left to go
data.finish = false;
continue;
} else if (data.elapsed >= (data.delay + data.duration)) {
-
- data.finish = true;
+ // We are past the end of it, set the elapsed time to the end and mark as finished
data.elapsed = (data.delay + data.duration);
+ data.finish = true;
} else {
+ // We are not finished with this interpolation yet
data.finish = false;
}
+ // If we are a callback, do nothing special
if (data.type == INTER_CALLBACK) {
continue;
}
+ // Run the equation on the data and apply the value
Variant result = _run_equation(data);
-
_apply_tween_value(data, result);
}
pending_update--;
@@ -879,13 +1023,16 @@ bool Tween::seek(real_t p_time) {
}
real_t Tween::tell() const {
-
+ // We want to grab the position of the furthest along tween
pending_update++;
real_t pos = 0;
- for (const List<InterpolateData>::Element *E = interpolates.front(); E; E = E->next()) {
+ // For each interpolation...
+ for (const List<InterpolateData>::Element *E = interpolates.front(); E; E = E->next()) {
+ // Get the data and figure out if it's position is further along than the previous ones
const InterpolateData &data = E->get();
if (data.elapsed > pos)
+ // Save it if so
pos = data.elapsed;
}
pending_update--;
@@ -893,55 +1040,63 @@ real_t Tween::tell() const {
}
real_t Tween::get_runtime() const {
-
+ // If the tween isn't moving, it'll last forever
if (speed_scale == 0) {
return INFINITY;
}
pending_update++;
+
+ // For each interpolation...
real_t runtime = 0;
for (const List<InterpolateData>::Element *E = interpolates.front(); E; E = E->next()) {
-
+ // Get the tween data and see if it's runtime is greater than the previous tweens
const InterpolateData &data = E->get();
real_t t = data.delay + data.duration;
if (t > runtime)
+ // This is the longest running tween
runtime = t;
}
pending_update--;
+ // Adjust the runtime for the current speed scale
return runtime / speed_scale;
}
bool Tween::_calc_delta_val(const Variant &p_initial_val, const Variant &p_final_val, Variant &p_delta_val) {
+ // Get the initial, final, and delta values
const Variant &initial_val = p_initial_val;
const Variant &final_val = p_final_val;
Variant &delta_val = p_delta_val;
+ // What kind of data are we interpolating?
switch (initial_val.get_type()) {
case Variant::BOOL:
- //delta_val = p_final_val;
- delta_val = (int)p_final_val - (int)p_initial_val;
- break;
-
+ // We'll treat booleans just like integers
case Variant::INT:
+ // Compute the integer delta
delta_val = (int)final_val - (int)initial_val;
break;
case Variant::REAL:
+ // Convert to REAL and find the delta
delta_val = (real_t)final_val - (real_t)initial_val;
break;
case Variant::VECTOR2:
+ // Convert to Vectors and find the delta
delta_val = final_val.operator Vector2() - initial_val.operator Vector2();
break;
case Variant::VECTOR3:
+ // Convert to Vectors and find the delta
delta_val = final_val.operator Vector3() - initial_val.operator Vector3();
break;
case Variant::BASIS: {
+ // Build a new basis which is the delta between the initial and final values
Basis i = initial_val;
Basis f = final_val;
delta_val = Basis(f.elements[0][0] - i.elements[0][0],
@@ -956,6 +1111,7 @@ bool Tween::_calc_delta_val(const Variant &p_initial_val, const Variant &p_final
} break;
case Variant::TRANSFORM2D: {
+ // Build a new transform which is the difference between the initial and final values
Transform2D i = initial_val;
Transform2D f = final_val;
Transform2D d = Transform2D();
@@ -967,15 +1123,21 @@ bool Tween::_calc_delta_val(const Variant &p_initial_val, const Variant &p_final
d[2][1] = f.elements[2][1] - i.elements[2][1];
delta_val = d;
} break;
+
case Variant::QUAT:
+ // Convert to quaternianls and find the delta
delta_val = final_val.operator Quat() - initial_val.operator Quat();
break;
+
case Variant::AABB: {
+ // Build a new AABB and use the new position and sizes to make a delta
AABB i = initial_val;
AABB f = final_val;
delta_val = AABB(f.position - i.position, f.size - i.size);
} break;
+
case Variant::TRANSFORM: {
+ // Build a new transform which is the difference between the initial and final values
Transform i = initial_val;
Transform f = final_val;
Transform d;
@@ -994,124 +1156,157 @@ bool Tween::_calc_delta_val(const Variant &p_initial_val, const Variant &p_final
delta_val = d;
} break;
+
case Variant::COLOR: {
+ // Make a new color which is the difference between each the color's RGBA attributes
Color i = initial_val;
Color f = final_val;
delta_val = Color(f.r - i.r, f.g - i.g, f.b - i.b, f.a - i.a);
} break;
default:
+ // TODO: Should move away from a 'magic string'?
ERR_PRINT("Invalid param type, except(int/real/vector2/vector/matrix/matrix32/quat/aabb/transform/color)");
return false;
};
return true;
}
-bool Tween::interpolate_property(Object *p_object, NodePath p_property, Variant p_initial_val, Variant p_final_val, real_t p_duration, TransitionType p_trans_type, EaseType p_ease_type, real_t p_delay) {
- if (pending_update != 0) {
- _add_pending_command("interpolate_property", p_object, p_property, p_initial_val, p_final_val, p_duration, p_trans_type, p_ease_type, p_delay);
- return true;
- }
- p_property = p_property.get_as_property_path();
-
- if (p_initial_val.get_type() == Variant::NIL) p_initial_val = p_object->get_indexed(p_property.get_subnames());
-
- // convert INT to REAL is better for interpolaters
- if (p_initial_val.get_type() == Variant::INT) p_initial_val = p_initial_val.operator real_t();
- if (p_final_val.get_type() == Variant::INT) p_final_val = p_final_val.operator real_t();
-
- ERR_FAIL_COND_V(p_object == NULL, false);
- ERR_FAIL_COND_V(!ObjectDB::instance_validate(p_object), false);
- ERR_FAIL_COND_V(p_initial_val.get_type() != p_final_val.get_type(), false);
- ERR_FAIL_COND_V(p_duration <= 0, false);
- ERR_FAIL_COND_V(p_trans_type < 0 || p_trans_type >= TRANS_COUNT, false);
- ERR_FAIL_COND_V(p_ease_type < 0 || p_ease_type >= EASE_COUNT, false);
- ERR_FAIL_COND_V(p_delay < 0, false);
+bool Tween::_build_interpolation(InterpolateType p_interpolation_type, Object *p_object, NodePath *p_property, StringName *p_method, Variant p_initial_val, Variant p_final_val, real_t p_duration, TransitionType p_trans_type, EaseType p_ease_type, real_t p_delay) {
- bool prop_valid = false;
- p_object->get_indexed(p_property.get_subnames(), &prop_valid);
- ERR_FAIL_COND_V(!prop_valid, false);
+ // TODO: Add initialization+implementation for remaining interpolation types
+ // TODO: Fix this method's organization to take advantage of the type
+ // Make a new interpolation data
InterpolateData data;
data.active = true;
- data.type = INTER_PROPERTY;
+ data.type = p_interpolation_type;
data.finish = false;
data.elapsed = 0;
+ // Validate and apply interpolation data
+
+ // Give it the object
+ ERR_EXPLAIN("Invalid object provided to Tween!");
+ ERR_FAIL_COND_V(p_object == NULL, false); // Is the object real
+ ERR_FAIL_COND_V(!ObjectDB::instance_validate(p_object), false); // Is the object a valid instance?
data.id = p_object->get_instance_id();
- data.key = p_property.get_subnames();
- data.concatenated_key = p_property.get_concatenated_subnames();
+
+ // Validate the initial and final values
+ ERR_EXPLAIN("Initial value type does not match final value type!"); // TODO: Print both types to make debugging easier
+ ERR_FAIL_COND_V(p_initial_val.get_type() != p_final_val.get_type(), false); // Do the initial and final value types match?
data.initial_val = p_initial_val;
data.final_val = p_final_val;
+
+ // Check the Duration
+ ERR_EXPLAIN("Only non-negative duration values allowed in Tweens!");
+ ERR_FAIL_COND_V(p_duration < 0, false); // Is the tween duration non-negative
data.duration = p_duration;
+
+ // Tween Delay
+ ERR_EXPLAIN("Only non-negative delay values allowed in Tweens!");
+ ERR_FAIL_COND_V(p_delay < 0, false); // Is the delay non-negative?
+ data.delay = p_delay;
+
+ // Transition type
+ ERR_EXPLAIN("Invalid transition type provided to Tween");
+ ERR_FAIL_COND_V(p_trans_type < 0 || p_trans_type >= TRANS_COUNT, false); // Is the transition type valid
data.trans_type = p_trans_type;
+
+ // Easing type
+ ERR_EXPLAIN("Invalid easing type provided to Tween");
+ ERR_FAIL_COND_V(p_ease_type < 0 || p_ease_type >= EASE_COUNT, false); // Is the easing type valid
data.ease_type = p_ease_type;
- data.delay = p_delay;
+ // Is the property defined?
+ if (p_property) {
+ // Check that the object actually contains the given property
+ bool prop_valid = false;
+ p_object->get_indexed(p_property->get_subnames(), &prop_valid);
+ ERR_EXPLAIN("Tween target object has no property named: " + p_property->get_concatenated_subnames());
+ ERR_FAIL_COND_V(!prop_valid, false);
+
+ data.key = p_property->get_subnames();
+ data.concatenated_key = p_property->get_concatenated_subnames();
+ }
+
+ // Is the method defined?
+ if (p_method) {
+ // Does the object even have the requested method?
+ ERR_EXPLAIN("Tween target object has no method named: " + *p_method); // TODO: Fix this error message
+ ERR_FAIL_COND_V(!p_object->has_method(*p_method), false);
+
+ data.key.push_back(*p_method);
+ data.concatenated_key = *p_method;
+ }
+
+ // Is there not a valid delta?
if (!_calc_delta_val(data.initial_val, data.final_val, data.delta_val))
return false;
+ // Add this interpolation to the total
_push_interpolate_data(data);
return true;
}
-bool Tween::interpolate_method(Object *p_object, StringName p_method, Variant p_initial_val, Variant p_final_val, real_t p_duration, TransitionType p_trans_type, EaseType p_ease_type, real_t p_delay) {
+bool Tween::interpolate_property(Object *p_object, NodePath p_property, Variant p_initial_val, Variant p_final_val, real_t p_duration, TransitionType p_trans_type, EaseType p_ease_type, real_t p_delay) {
+ // If we are busy updating, call this function again later
if (pending_update != 0) {
- _add_pending_command("interpolate_method", p_object, p_method, p_initial_val, p_final_val, p_duration, p_trans_type, p_ease_type, p_delay);
+ _add_pending_command("interpolate_property", p_object, p_property, p_initial_val, p_final_val, p_duration, p_trans_type, p_ease_type, p_delay);
return true;
}
- // convert INT to REAL is better for interpolaters
- if (p_initial_val.get_type() == Variant::INT) p_initial_val = p_initial_val.operator real_t();
- if (p_final_val.get_type() == Variant::INT) p_final_val = p_final_val.operator real_t();
- ERR_FAIL_COND_V(p_object == NULL, false);
- ERR_FAIL_COND_V(!ObjectDB::instance_validate(p_object), false);
- ERR_FAIL_COND_V(p_initial_val.get_type() != p_final_val.get_type(), false);
- ERR_FAIL_COND_V(p_duration <= 0, false);
- ERR_FAIL_COND_V(p_trans_type < 0 || p_trans_type >= TRANS_COUNT, false);
- ERR_FAIL_COND_V(p_ease_type < 0 || p_ease_type >= EASE_COUNT, false);
- ERR_FAIL_COND_V(p_delay < 0, false);
+ // Get the property from the node path
+ p_property = p_property.get_as_property_path();
- ERR_EXPLAIN("Object has no method named: %s" + p_method);
- ERR_FAIL_COND_V(!p_object->has_method(p_method), false);
+ // If no initial value given, grab the initial value from the object
+ // TODO: Is this documented? This is very useful and removes a lot of clutter from tweens!
+ if (p_initial_val.get_type() == Variant::NIL) p_initial_val = p_object->get_indexed(p_property.get_subnames());
- InterpolateData data;
- data.active = true;
- data.type = INTER_METHOD;
- data.finish = false;
- data.elapsed = 0;
+ // Convert any integers into REALs as they are better for interpolation
+ if (p_initial_val.get_type() == Variant::INT) p_initial_val = p_initial_val.operator real_t();
+ if (p_final_val.get_type() == Variant::INT) p_final_val = p_final_val.operator real_t();
- data.id = p_object->get_instance_id();
- data.key.push_back(p_method);
- data.concatenated_key = p_method;
- data.initial_val = p_initial_val;
- data.final_val = p_final_val;
- data.duration = p_duration;
- data.trans_type = p_trans_type;
- data.ease_type = p_ease_type;
- data.delay = p_delay;
+ // Build the interpolation data
+ bool result = _build_interpolation(INTER_PROPERTY, p_object, &p_property, NULL, p_initial_val, p_final_val, p_duration, p_trans_type, p_ease_type, p_delay);
+ return result;
+}
- if (!_calc_delta_val(data.initial_val, data.final_val, data.delta_val))
- return false;
+bool Tween::interpolate_method(Object *p_object, StringName p_method, Variant p_initial_val, Variant p_final_val, real_t p_duration, TransitionType p_trans_type, EaseType p_ease_type, real_t p_delay) {
+ // If we are busy updating, call this function again later
+ if (pending_update != 0) {
+ _add_pending_command("interpolate_method", p_object, p_method, p_initial_val, p_final_val, p_duration, p_trans_type, p_ease_type, p_delay);
+ return true;
+ }
- _push_interpolate_data(data);
- return true;
+ // Convert any integers into REALs as they are better for interpolation
+ if (p_initial_val.get_type() == Variant::INT) p_initial_val = p_initial_val.operator real_t();
+ if (p_final_val.get_type() == Variant::INT) p_final_val = p_final_val.operator real_t();
+
+ // Build the interpolation data
+ bool result = _build_interpolation(INTER_METHOD, p_object, NULL, &p_method, p_initial_val, p_final_val, p_duration, p_trans_type, p_ease_type, p_delay);
+ return result;
}
bool Tween::interpolate_callback(Object *p_object, real_t p_duration, String p_callback, VARIANT_ARG_DECLARE) {
-
+ // If we are already updating, call this function again later
if (pending_update != 0) {
_add_pending_command("interpolate_callback", p_object, p_duration, p_callback, p_arg1, p_arg2, p_arg3, p_arg4, p_arg5);
return true;
}
+ // Check that the target object is valid
ERR_FAIL_COND_V(p_object == NULL, false);
ERR_FAIL_COND_V(!ObjectDB::instance_validate(p_object), false);
+
+ // Duration cannot be negative
ERR_FAIL_COND_V(p_duration < 0, false);
+ // Check whether the object even has the callback
ERR_EXPLAIN("Object has no callback named: %s" + p_callback);
ERR_FAIL_COND_V(!p_object->has_method(p_callback), false);
+ // Build a new InterpolationData
InterpolateData data;
data.active = true;
data.type = INTER_CALLBACK;
@@ -1119,12 +1314,14 @@ bool Tween::interpolate_callback(Object *p_object, real_t p_duration, String p_c
data.call_deferred = false;
data.elapsed = 0;
+ // Give the data it's configuration
data.id = p_object->get_instance_id();
data.key.push_back(p_callback);
data.concatenated_key = p_callback;
data.duration = p_duration;
data.delay = 0;
+ // Add arguments to the interpolation
int args = 0;
if (p_arg5.get_type() != Variant::NIL)
args = 5;
@@ -1146,23 +1343,30 @@ bool Tween::interpolate_callback(Object *p_object, real_t p_duration, String p_c
data.arg[3] = p_arg4;
data.arg[4] = p_arg5;
+ // Add the new interpolation
_push_interpolate_data(data);
return true;
}
bool Tween::interpolate_deferred_callback(Object *p_object, real_t p_duration, String p_callback, VARIANT_ARG_DECLARE) {
-
+ // If we are already updating, call this function again later
if (pending_update != 0) {
_add_pending_command("interpolate_deferred_callback", p_object, p_duration, p_callback, p_arg1, p_arg2, p_arg3, p_arg4, p_arg5);
return true;
}
+
+ // Check that the target object is valid
ERR_FAIL_COND_V(p_object == NULL, false);
ERR_FAIL_COND_V(!ObjectDB::instance_validate(p_object), false);
+
+ // No negative durations allowed
ERR_FAIL_COND_V(p_duration < 0, false);
+ // Confirm the callback exists on the object
ERR_EXPLAIN("Object has no callback named: %s" + p_callback);
ERR_FAIL_COND_V(!p_object->has_method(p_callback), false);
+ // Create a new InterpolateData for the callback
InterpolateData data;
data.active = true;
data.type = INTER_CALLBACK;
@@ -1170,12 +1374,14 @@ bool Tween::interpolate_deferred_callback(Object *p_object, real_t p_duration, S
data.call_deferred = true;
data.elapsed = 0;
+ // Give the data it's configuration
data.id = p_object->get_instance_id();
data.key.push_back(p_callback);
data.concatenated_key = p_callback;
data.duration = p_duration;
data.delay = 0;
+ // Collect arguments for the callback
int args = 0;
if (p_arg5.get_type() != Variant::NIL)
args = 5;
@@ -1197,32 +1403,46 @@ bool Tween::interpolate_deferred_callback(Object *p_object, real_t p_duration, S
data.arg[3] = p_arg4;
data.arg[4] = p_arg5;
+ // Add the new interpolation
_push_interpolate_data(data);
return true;
}
bool Tween::follow_property(Object *p_object, NodePath p_property, Variant p_initial_val, Object *p_target, NodePath p_target_property, real_t p_duration, TransitionType p_trans_type, EaseType p_ease_type, real_t p_delay) {
+ // If we are already updating, call this function again later
if (pending_update != 0) {
_add_pending_command("follow_property", p_object, p_property, p_initial_val, p_target, p_target_property, p_duration, p_trans_type, p_ease_type, p_delay);
return true;
}
+
+ // Get the two properties from their paths
p_property = p_property.get_as_property_path();
p_target_property = p_target_property.get_as_property_path();
+ // If no initial value is given, grab it from the source object
+ // TODO: Is this documented? It's really helpful for decluttering tweens
if (p_initial_val.get_type() == Variant::NIL) p_initial_val = p_object->get_indexed(p_property.get_subnames());
- // convert INT to REAL is better for interpolaters
+ // Convert initial INT values to REAL as they are better for interpolation
if (p_initial_val.get_type() == Variant::INT) p_initial_val = p_initial_val.operator real_t();
+ // Confirm the source and target objects are valid
ERR_FAIL_COND_V(p_object == NULL, false);
ERR_FAIL_COND_V(!ObjectDB::instance_validate(p_object), false);
ERR_FAIL_COND_V(p_target == NULL, false);
ERR_FAIL_COND_V(!ObjectDB::instance_validate(p_target), false);
- ERR_FAIL_COND_V(p_duration <= 0, false);
+
+ // No negative durations
+ ERR_FAIL_COND_V(p_duration < 0, false);
+
+ // Ensure transition and easing types are valid
ERR_FAIL_COND_V(p_trans_type < 0 || p_trans_type >= TRANS_COUNT, false);
ERR_FAIL_COND_V(p_ease_type < 0 || p_ease_type >= EASE_COUNT, false);
+
+ // No negative delays
ERR_FAIL_COND_V(p_delay < 0, false);
+ // Confirm the source and target objects have the desired properties
bool prop_valid = false;
p_object->get_indexed(p_property.get_subnames(), &prop_valid);
ERR_FAIL_COND_V(!prop_valid, false);
@@ -1231,16 +1451,20 @@ bool Tween::follow_property(Object *p_object, NodePath p_property, Variant p_ini
Variant target_val = p_target->get_indexed(p_target_property.get_subnames(), &target_prop_valid);
ERR_FAIL_COND_V(!target_prop_valid, false);
- // convert INT to REAL is better for interpolaters
+ // Convert target INT to REAL since it is better for interpolation
if (target_val.get_type() == Variant::INT) target_val = target_val.operator real_t();
+
+ // Verify that the target value and initial value are the same type
ERR_FAIL_COND_V(target_val.get_type() != p_initial_val.get_type(), false);
+ // Create a new InterpolateData
InterpolateData data;
data.active = true;
data.type = FOLLOW_PROPERTY;
data.finish = false;
data.elapsed = 0;
+ // Give the InterpolateData it's configuration
data.id = p_object->get_instance_id();
data.key = p_property.get_subnames();
data.concatenated_key = p_property.get_concatenated_subnames();
@@ -1252,46 +1476,59 @@ bool Tween::follow_property(Object *p_object, NodePath p_property, Variant p_ini
data.ease_type = p_ease_type;
data.delay = p_delay;
+ // Add the interpolation
_push_interpolate_data(data);
return true;
}
bool Tween::follow_method(Object *p_object, StringName p_method, Variant p_initial_val, Object *p_target, StringName p_target_method, real_t p_duration, TransitionType p_trans_type, EaseType p_ease_type, real_t p_delay) {
+ // If we are currently updating, call this function again later
if (pending_update != 0) {
_add_pending_command("follow_method", p_object, p_method, p_initial_val, p_target, p_target_method, p_duration, p_trans_type, p_ease_type, p_delay);
return true;
}
- // convert INT to REAL is better for interpolaters
+ // Convert initial INT values to REAL as they are better for interpolation
if (p_initial_val.get_type() == Variant::INT) p_initial_val = p_initial_val.operator real_t();
+ // Verify the source and target objects are valid
ERR_FAIL_COND_V(p_object == NULL, false);
ERR_FAIL_COND_V(!ObjectDB::instance_validate(p_object), false);
ERR_FAIL_COND_V(p_target == NULL, false);
ERR_FAIL_COND_V(!ObjectDB::instance_validate(p_target), false);
- ERR_FAIL_COND_V(p_duration <= 0, false);
+
+ // No negative durations
+ ERR_FAIL_COND_V(p_duration < 0, false);
+
+ // Ensure that the transition and ease types are valid
ERR_FAIL_COND_V(p_trans_type < 0 || p_trans_type >= TRANS_COUNT, false);
ERR_FAIL_COND_V(p_ease_type < 0 || p_ease_type >= EASE_COUNT, false);
+
+ // No negative delays
ERR_FAIL_COND_V(p_delay < 0, false);
+ // Confirm both objects have the target methods
ERR_EXPLAIN("Object has no method named: %s" + p_method);
ERR_FAIL_COND_V(!p_object->has_method(p_method), false);
ERR_EXPLAIN("Target has no method named: %s" + p_target_method);
ERR_FAIL_COND_V(!p_target->has_method(p_target_method), false);
+ // Call the method to get the target value
Variant::CallError error;
Variant target_val = p_target->call(p_target_method, NULL, 0, error);
ERR_FAIL_COND_V(error.error != Variant::CallError::CALL_OK, false);
- // convert INT to REAL is better for interpolaters
+ // Convert target INT values to REAL as they are better for interpolation
if (target_val.get_type() == Variant::INT) target_val = target_val.operator real_t();
ERR_FAIL_COND_V(target_val.get_type() != p_initial_val.get_type(), false);
+ // Make the new InterpolateData for the method follow
InterpolateData data;
data.active = true;
data.type = FOLLOW_METHOD;
data.finish = false;
data.elapsed = 0;
+ // Give the data it's configuration
data.id = p_object->get_instance_id();
data.key.push_back(p_method);
data.concatenated_key = p_method;
@@ -1303,31 +1540,41 @@ bool Tween::follow_method(Object *p_object, StringName p_method, Variant p_initi
data.ease_type = p_ease_type;
data.delay = p_delay;
+ // Add the new interpolation
_push_interpolate_data(data);
return true;
}
bool Tween::targeting_property(Object *p_object, NodePath p_property, Object *p_initial, NodePath p_initial_property, Variant p_final_val, real_t p_duration, TransitionType p_trans_type, EaseType p_ease_type, real_t p_delay) {
-
+ // If we are currently updating, call this function again later
if (pending_update != 0) {
_add_pending_command("targeting_property", p_object, p_property, p_initial, p_initial_property, p_final_val, p_duration, p_trans_type, p_ease_type, p_delay);
return true;
}
+ // Grab the target property and the target property
p_property = p_property.get_as_property_path();
p_initial_property = p_initial_property.get_as_property_path();
- // convert INT to REAL is better for interpolaters
+ // Convert the initial INT values to REAL as they are better for Interpolation
if (p_final_val.get_type() == Variant::INT) p_final_val = p_final_val.operator real_t();
+ // Verify both objects are valid
ERR_FAIL_COND_V(p_object == NULL, false);
ERR_FAIL_COND_V(!ObjectDB::instance_validate(p_object), false);
ERR_FAIL_COND_V(p_initial == NULL, false);
ERR_FAIL_COND_V(!ObjectDB::instance_validate(p_initial), false);
- ERR_FAIL_COND_V(p_duration <= 0, false);
+
+ // No negative durations
+ ERR_FAIL_COND_V(p_duration < 0, false);
+
+ // Ensure transition and easing types are valid
ERR_FAIL_COND_V(p_trans_type < 0 || p_trans_type >= TRANS_COUNT, false);
ERR_FAIL_COND_V(p_ease_type < 0 || p_ease_type >= EASE_COUNT, false);
+
+ // No negative delays
ERR_FAIL_COND_V(p_delay < 0, false);
+ // Ensure the initial and target properties exist on their objects
bool prop_valid = false;
p_object->get_indexed(p_property.get_subnames(), &prop_valid);
ERR_FAIL_COND_V(!prop_valid, false);
@@ -1336,16 +1583,18 @@ bool Tween::targeting_property(Object *p_object, NodePath p_property, Object *p_
Variant initial_val = p_initial->get_indexed(p_initial_property.get_subnames(), &initial_prop_valid);
ERR_FAIL_COND_V(!initial_prop_valid, false);
- // convert INT to REAL is better for interpolaters
+ // Convert the initial INT value to REAL as it is better for interpolation
if (initial_val.get_type() == Variant::INT) initial_val = initial_val.operator real_t();
ERR_FAIL_COND_V(initial_val.get_type() != p_final_val.get_type(), false);
+ // Build the InterpolateData object
InterpolateData data;
data.active = true;
data.type = TARGETING_PROPERTY;
data.finish = false;
data.elapsed = 0;
+ // Give the data it's configuration
data.id = p_object->get_instance_id();
data.key = p_property.get_subnames();
data.concatenated_key = p_property.get_concatenated_subnames();
@@ -1358,49 +1607,64 @@ bool Tween::targeting_property(Object *p_object, NodePath p_property, Object *p_
data.ease_type = p_ease_type;
data.delay = p_delay;
+ // Ensure there is a valid delta
if (!_calc_delta_val(data.initial_val, data.final_val, data.delta_val))
return false;
+ // Add the interpolation
_push_interpolate_data(data);
return true;
}
bool Tween::targeting_method(Object *p_object, StringName p_method, Object *p_initial, StringName p_initial_method, Variant p_final_val, real_t p_duration, TransitionType p_trans_type, EaseType p_ease_type, real_t p_delay) {
+ // If we are currently updating, call this function again later
if (pending_update != 0) {
_add_pending_command("targeting_method", p_object, p_method, p_initial, p_initial_method, p_final_val, p_duration, p_trans_type, p_ease_type, p_delay);
return true;
}
- // convert INT to REAL is better for interpolaters
+
+ // Convert final INT values to REAL as they are better for interpolation
if (p_final_val.get_type() == Variant::INT) p_final_val = p_final_val.operator real_t();
+ // Make sure the given objects are valid
ERR_FAIL_COND_V(p_object == NULL, false);
ERR_FAIL_COND_V(!ObjectDB::instance_validate(p_object), false);
ERR_FAIL_COND_V(p_initial == NULL, false);
ERR_FAIL_COND_V(!ObjectDB::instance_validate(p_initial), false);
- ERR_FAIL_COND_V(p_duration <= 0, false);
+
+ // No negative durations
+ ERR_FAIL_COND_V(p_duration < 0, false);
+
+ // Ensure transition and easing types are valid
ERR_FAIL_COND_V(p_trans_type < 0 || p_trans_type >= TRANS_COUNT, false);
ERR_FAIL_COND_V(p_ease_type < 0 || p_ease_type >= EASE_COUNT, false);
+
+ // No negative delays
ERR_FAIL_COND_V(p_delay < 0, false);
+ // Make sure both objects have the given method
ERR_EXPLAIN("Object has no method named: %s" + p_method);
ERR_FAIL_COND_V(!p_object->has_method(p_method), false);
ERR_EXPLAIN("Initial Object has no method named: %s" + p_initial_method);
ERR_FAIL_COND_V(!p_initial->has_method(p_initial_method), false);
+ // Call the method to get the initial value
Variant::CallError error;
Variant initial_val = p_initial->call(p_initial_method, NULL, 0, error);
ERR_FAIL_COND_V(error.error != Variant::CallError::CALL_OK, false);
- // convert INT to REAL is better for interpolaters
+ // Convert initial INT values to REAL as they aer better for interpolation
if (initial_val.get_type() == Variant::INT) initial_val = initial_val.operator real_t();
ERR_FAIL_COND_V(initial_val.get_type() != p_final_val.get_type(), false);
+ // Build the new InterpolateData object
InterpolateData data;
data.active = true;
data.type = TARGETING_METHOD;
data.finish = false;
data.elapsed = 0;
+ // Configure the data
data.id = p_object->get_instance_id();
data.key.push_back(p_method);
data.concatenated_key = p_method;
@@ -1413,16 +1677,17 @@ bool Tween::targeting_method(Object *p_object, StringName p_method, Object *p_in
data.ease_type = p_ease_type;
data.delay = p_delay;
+ // Ensure there is a valid delta
if (!_calc_delta_val(data.initial_val, data.final_val, data.delta_val))
return false;
+ // Add the interpolation
_push_interpolate_data(data);
return true;
}
Tween::Tween() {
-
- //String autoplay;
+ // Initialize tween attributes
tween_process_mode = TWEEN_PROCESS_IDLE;
repeat = false;
speed_scale = 1;
diff --git a/scene/animation/tween.h b/scene/animation/tween.h
index 6fe3bffdbe..64ce099ecd 100644
--- a/scene/animation/tween.h
+++ b/scene/animation/tween.h
@@ -135,6 +135,7 @@ private:
void _tween_process(float p_delta);
void _remove_by_uid(int uid);
void _push_interpolate_data(InterpolateData &p_data);
+ bool _build_interpolation(InterpolateType p_interpolation_type, Object *p_object, NodePath *p_property, StringName *p_method, Variant p_initial_val, Variant p_final_val, real_t p_duration, TransitionType p_trans_type, EaseType p_ease_type, real_t p_delay);
protected:
bool _set(const StringName &p_name, const Variant &p_value);
diff --git a/scene/gui/file_dialog.cpp b/scene/gui/file_dialog.cpp
index bdb1342019..bc8dcf0e82 100644
--- a/scene/gui/file_dialog.cpp
+++ b/scene/gui/file_dialog.cpp
@@ -48,8 +48,9 @@ void FileDialog::_notification(int p_what) {
if (p_what == NOTIFICATION_ENTER_TREE) {
- refresh->set_icon(get_icon("reload"));
dir_up->set_icon(get_icon("parent_folder"));
+ refresh->set_icon(get_icon("reload"));
+ show_hidden->set_icon(get_icon("toggle_hidden"));
}
if (p_what == NOTIFICATION_POPUP_HIDE) {
@@ -393,20 +394,19 @@ void FileDialog::update_file_list() {
List<String> files;
List<String> dirs;
- bool isdir;
- bool ishidden;
- bool show_hidden = show_hidden_files;
+ bool is_dir;
+ bool is_hidden;
String item;
- while ((item = dir_access->get_next(&isdir)) != "") {
+ while ((item = dir_access->get_next(&is_dir)) != "") {
if (item == "." || item == "..")
continue;
- ishidden = dir_access->current_is_hidden();
+ is_hidden = dir_access->current_is_hidden();
- if (show_hidden || !ishidden) {
- if (!isdir)
+ if (show_hidden_files || !is_hidden) {
+ if (!is_dir)
files.push_back(item);
else
dirs.push_back(item);
@@ -873,6 +873,13 @@ FileDialog::FileDialog() {
refresh->connect("pressed", this, "_update_file_list");
hbc->add_child(refresh);
+ show_hidden = memnew(ToolButton);
+ show_hidden->set_toggle_mode(true);
+ show_hidden->set_pressed(is_showing_hidden_files());
+ show_hidden->set_tooltip(RTR("Toggle Hidden Files"));
+ show_hidden->connect("toggled", this, "set_show_hidden_files");
+ hbc->add_child(show_hidden);
+
drives = memnew(OptionButton);
hbc->add_child(drives);
drives->connect("item_selected", this, "_select_drive");
diff --git a/scene/gui/file_dialog.h b/scene/gui/file_dialog.h
index 85edac0b12..9f7ea1a2f2 100644
--- a/scene/gui/file_dialog.h
+++ b/scene/gui/file_dialog.h
@@ -90,6 +90,7 @@ private:
ToolButton *dir_up;
ToolButton *refresh;
+ ToolButton *show_hidden;
Vector<String> filters;
diff --git a/scene/gui/rich_text_label.cpp b/scene/gui/rich_text_label.cpp
index c6b6d29384..8891f679ee 100644
--- a/scene/gui/rich_text_label.cpp
+++ b/scene/gui/rich_text_label.cpp
@@ -307,6 +307,13 @@ int RichTextLabel::_process_line(ItemFrame *p_frame, const Vector2 &p_ofs, int &
switch (it->type) {
+ case ITEM_ALIGN: {
+
+ ItemAlign *align_it = static_cast<ItemAlign *>(it);
+
+ align = align_it->align;
+
+ } break;
case ITEM_TEXT: {
ItemText *text = static_cast<ItemText *>(it);
@@ -1411,9 +1418,13 @@ void RichTextLabel::_add_item(Item *p_item, bool p_enter, bool p_ensure_newline)
if (p_enter)
current = p_item;
- if (p_ensure_newline && current_frame->lines[current_frame->lines.size() - 1].from) {
- _invalidate_current_line(current_frame);
- current_frame->lines.resize(current_frame->lines.size() + 1);
+ if (p_ensure_newline) {
+ Item *from = current_frame->lines[current_frame->lines.size() - 1].from;
+ // only create a new line for Item types that generate content/layout, ignore those that represent formatting/styling
+ if (from && from->type != ITEM_FONT && from->type != ITEM_COLOR && from->type != ITEM_UNDERLINE && from->type != ITEM_STRIKETHROUGH) {
+ _invalidate_current_line(current_frame);
+ current_frame->lines.resize(current_frame->lines.size() + 1);
+ }
}
if (current_frame->lines[current_frame->lines.size() - 1].from == NULL) {
diff --git a/scene/gui/text_edit.cpp b/scene/gui/text_edit.cpp
index 6b40ecfc6b..83ce71e959 100644
--- a/scene/gui/text_edit.cpp
+++ b/scene/gui/text_edit.cpp
@@ -4078,7 +4078,7 @@ void TextEdit::cursor_set_line(int p_row, bool p_adjust_viewport, bool p_can_be_
cursor.line = p_row;
int n_col = get_char_pos_for_line(cursor.last_fit_x, p_row, p_wrap_index);
- if (is_wrap_enabled() && p_wrap_index < times_line_wraps(p_row)) {
+ if (n_col != 0 && is_wrap_enabled() && p_wrap_index < times_line_wraps(p_row)) {
Vector<String> rows = get_wrap_rows_text(p_row);
int row_end_col = 0;
for (int i = 0; i < p_wrap_index + 1; i++) {
diff --git a/scene/register_scene_types.cpp b/scene/register_scene_types.cpp
index 5e4cf9c2ee..0423fcb5f0 100644
--- a/scene/register_scene_types.cpp
+++ b/scene/register_scene_types.cpp
@@ -646,6 +646,7 @@ void register_scene_types() {
ClassDB::register_class<GradientTexture>();
ClassDB::register_class<ProxyTexture>();
ClassDB::register_class<AnimatedTexture>();
+ ClassDB::register_class<CameraTexture>();
ClassDB::register_class<CubeMap>();
ClassDB::register_virtual_class<TextureLayered>();
ClassDB::register_class<Texture3D>();
diff --git a/scene/resources/default_theme/default_theme.cpp b/scene/resources/default_theme/default_theme.cpp
index e8359e3dfe..2c5dfc375c 100644
--- a/scene/resources/default_theme/default_theme.cpp
+++ b/scene/resources/default_theme/default_theme.cpp
@@ -541,8 +541,9 @@ void fill_default_theme(Ref<Theme> &theme, const Ref<Font> &default_font, const
// File Dialog
- theme->set_icon("reload", "FileDialog", make_icon(icon_reload_png));
theme->set_icon("parent_folder", "FileDialog", make_icon(icon_parent_folder_png));
+ theme->set_icon("reload", "FileDialog", make_icon(icon_reload_png));
+ theme->set_icon("toggle_hidden", "FileDialog", make_icon(icon_visibility_png));
// Popup
diff --git a/scene/resources/default_theme/icon_visibility.png b/scene/resources/default_theme/icon_visibility.png
new file mode 100644
index 0000000000..6402571f3e
--- /dev/null
+++ b/scene/resources/default_theme/icon_visibility.png
Binary files differ
diff --git a/scene/resources/default_theme/theme_data.h b/scene/resources/default_theme/theme_data.h
index 87b8afd6a3..5e13a6625a 100644
--- a/scene/resources/default_theme/theme_data.h
+++ b/scene/resources/default_theme/theme_data.h
@@ -194,6 +194,10 @@ static const unsigned char icon_stop_png[] = {
0x89, 0x50, 0x4e, 0x47, 0xd, 0xa, 0x1a, 0xa, 0x0, 0x0, 0x0, 0xd, 0x49, 0x48, 0x44, 0x52, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x10, 0x8, 0x4, 0x0, 0x0, 0x0, 0xb5, 0xfa, 0x37, 0xea, 0x0, 0x0, 0x0, 0x1e, 0x49, 0x44, 0x41, 0x54, 0x78, 0xda, 0x63, 0x20, 0x2, 0x7c, 0x60, 0x26, 0x28, 0xf3, 0xf0, 0x3f, 0x76, 0x8, 0x94, 0xa2, 0x97, 0x82, 0x51, 0x5, 0x84, 0x23, 0x8b, 0x30, 0x0, 0x0, 0x66, 0x60, 0x11, 0xdc, 0x92, 0xb3, 0xb7, 0xe7, 0x0, 0x0, 0x0, 0x0, 0x49, 0x45, 0x4e, 0x44, 0xae, 0x42, 0x60, 0x82
};
+static const unsigned char icon_visibility_png[] = {
+ 0x89, 0x50, 0x4e, 0x47, 0xd, 0xa, 0x1a, 0xa, 0x0, 0x0, 0x0, 0xd, 0x49, 0x48, 0x44, 0x52, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x10, 0x8, 0x3, 0x0, 0x0, 0x0, 0x28, 0x2d, 0xf, 0x53, 0x0, 0x0, 0x0, 0x3, 0x73, 0x42, 0x49, 0x54, 0x8, 0x8, 0x8, 0xdb, 0xe1, 0x4f, 0xe0, 0x0, 0x0, 0x0, 0x9, 0x70, 0x48, 0x59, 0x73, 0x0, 0x0, 0xe, 0xc4, 0x0, 0x0, 0xe, 0xc4, 0x1, 0x95, 0x2b, 0xe, 0x1b, 0x0, 0x0, 0x0, 0x19, 0x74, 0x45, 0x58, 0x74, 0x53, 0x6f, 0x66, 0x74, 0x77, 0x61, 0x72, 0x65, 0x0, 0x77, 0x77, 0x77, 0x2e, 0x69, 0x6e, 0x6b, 0x73, 0x63, 0x61, 0x70, 0x65, 0x2e, 0x6f, 0x72, 0x67, 0x9b, 0xee, 0x3c, 0x1a, 0x0, 0x0, 0x0, 0x96, 0x50, 0x4c, 0x54, 0x45, 0x0, 0x0, 0x0, 0xdf, 0xdf, 0xdf, 0xe3, 0xe3, 0xe3, 0xe6, 0xe6, 0xe6, 0xd5, 0xd5, 0xd5, 0xd8, 0xd8, 0xd8, 0xdb, 0xdb, 0xdb, 0xdd, 0xdd, 0xdd, 0xe1, 0xe1, 0xe1, 0xe3, 0xe3, 0xe3, 0xe4, 0xe4, 0xe4, 0xde, 0xde, 0xde, 0xde, 0xde, 0xde, 0xe0, 0xe0, 0xe0, 0xe1, 0xe1, 0xe1, 0xde, 0xde, 0xde, 0xdf, 0xdf, 0xdf, 0xdf, 0xdf, 0xdf, 0xdf, 0xdf, 0xdf, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xdf, 0xdf, 0xdf, 0xe0, 0xe0, 0xe0, 0xde, 0xde, 0xde, 0xde, 0xde, 0xde, 0xde, 0xde, 0xde, 0xde, 0xde, 0xde, 0xdf, 0xdf, 0xdf, 0xdf, 0xdf, 0xdf, 0xdf, 0xdf, 0xdf, 0xdf, 0xdf, 0xdf, 0xe0, 0xe0, 0xe0, 0xde, 0xde, 0xde, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe1, 0xe1, 0xe1, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe1, 0xe1, 0xe1, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xe0, 0xb7, 0x7e, 0xd, 0xb6, 0x0, 0x0, 0x0, 0x32, 0x74, 0x52, 0x4e, 0x53, 0x0, 0x8, 0x9, 0xa, 0xc, 0xd, 0xe, 0xf, 0x11, 0x12, 0x13, 0x2e, 0x2f, 0x32, 0x33, 0x36, 0x37, 0x38, 0x48, 0x49, 0x4b, 0x50, 0x53, 0x55, 0x56, 0x6c, 0x6d, 0x6e, 0x70, 0x77, 0x79, 0x7b, 0x7c, 0xc5, 0xd7, 0xd8, 0xda, 0xdb, 0xdc, 0xdd, 0xde, 0xdf, 0xe1, 0xe2, 0xe3, 0xf0, 0xf2, 0xf3, 0xf4, 0xfe, 0x5e, 0x62, 0x1a, 0x26, 0x0, 0x0, 0x0, 0x86, 0x49, 0x44, 0x41, 0x54, 0x18, 0x19, 0x8d, 0xc1, 0xb, 0x16, 0x42, 0x40, 0x0, 0x86, 0xd1, 0x2f, 0xa2, 0x77, 0x2a, 0x85, 0xde, 0x91, 0x5e, 0x33, 0x8a, 0x7f, 0xff, 0x9b, 0xcb, 0x99, 0x63, 0x1, 0xee, 0xa5, 0x9f, 0xc1, 0x3e, 0x6f, 0xea, 0xfc, 0xe0, 0xd1, 0x99, 0xdd, 0xe5, 0x94, 0xb, 0x9c, 0xf9, 0x57, 0x26, 0x9, 0x82, 0x5d, 0xa1, 0xdf, 0x92, 0x96, 0xf7, 0x94, 0x99, 0xd0, 0x1a, 0x1b, 0x7d, 0x7c, 0xe0, 0x2c, 0x25, 0x6c, 0x2b, 0x1b, 0x93, 0x4a, 0x17, 0xa0, 0x94, 0x2, 0xac, 0x64, 0x8, 0xa5, 0x12, 0x78, 0x48, 0x1, 0x56, 0x32, 0x8c, 0xa4, 0x7, 0x70, 0x93, 0x76, 0xc4, 0xd6, 0x6c, 0xc8, 0xa4, 0x2b, 0x30, 0xac, 0x54, 0x8c, 0x69, 0x4d, 0xad, 0xcc, 0x90, 0xd6, 0xba, 0x91, 0x49, 0xc3, 0x30, 0xb3, 0x6a, 0x22, 0x9c, 0xd5, 0x5b, 0xce, 0x2b, 0xa2, 0xe3, 0x9f, 0xf2, 0xba, 0xce, 0x8f, 0x3e, 0xbd, 0xfc, 0x1, 0xdb, 0xf3, 0x10, 0xc5, 0x78, 0x85, 0x14, 0x89, 0x0, 0x0, 0x0, 0x0, 0x49, 0x45, 0x4e, 0x44, 0xae, 0x42, 0x60, 0x82
+};
+
static const unsigned char icon_zoom_less_png[] = {
0x89, 0x50, 0x4e, 0x47, 0xd, 0xa, 0x1a, 0xa, 0x0, 0x0, 0x0, 0xd, 0x49, 0x48, 0x44, 0x52, 0x0, 0x0, 0x0, 0x10, 0x0, 0x0, 0x0, 0x10, 0x8, 0x4, 0x0, 0x0, 0x0, 0xb5, 0xfa, 0x37, 0xea, 0x0, 0x0, 0x0, 0x13, 0x49, 0x44, 0x41, 0x54, 0x78, 0xda, 0x63, 0x18, 0x31, 0xe0, 0xc1, 0x7f, 0x3c, 0x90, 0xb0, 0x82, 0x11, 0x2, 0x0, 0xbf, 0x57, 0x36, 0x25, 0x52, 0x24, 0x7b, 0x26, 0x0, 0x0, 0x0, 0x0, 0x49, 0x45, 0x4e, 0x44, 0xae, 0x42, 0x60, 0x82
};
diff --git a/scene/resources/environment.cpp b/scene/resources/environment.cpp
index 52dfffda5b..7c3867beaa 100644
--- a/scene/resources/environment.cpp
+++ b/scene/resources/environment.cpp
@@ -111,6 +111,11 @@ void Environment::set_ambient_light_sky_contribution(float p_energy) {
VS::get_singleton()->environment_set_ambient_light(environment, ambient_color, ambient_energy, ambient_sky_contribution);
}
+void Environment::set_camera_feed_id(int p_camera_feed_id) {
+ camera_feed_id = p_camera_feed_id;
+ VS::get_singleton()->environment_set_camera_feed_id(environment, camera_feed_id);
+};
+
Environment::BGMode Environment::get_background() const {
return bg_mode;
@@ -165,6 +170,10 @@ float Environment::get_ambient_light_sky_contribution() const {
return ambient_sky_contribution;
}
+int Environment::get_camera_feed_id(void) const {
+
+ return camera_feed_id;
+}
void Environment::set_tonemapper(ToneMapper p_tone_mapper) {
@@ -321,6 +330,12 @@ void Environment::_validate_property(PropertyInfo &property) const {
}
}
+ if (property.name == "background_camera_feed_id") {
+ if (bg_mode != BG_CAMERA_FEED) {
+ property.usage = PROPERTY_USAGE_NOEDITOR;
+ }
+ }
+
static const char *hide_prefixes[] = {
"fog_",
"auto_exposure_",
@@ -946,6 +961,7 @@ void Environment::_bind_methods() {
ClassDB::bind_method(D_METHOD("set_ambient_light_color", "color"), &Environment::set_ambient_light_color);
ClassDB::bind_method(D_METHOD("set_ambient_light_energy", "energy"), &Environment::set_ambient_light_energy);
ClassDB::bind_method(D_METHOD("set_ambient_light_sky_contribution", "energy"), &Environment::set_ambient_light_sky_contribution);
+ ClassDB::bind_method(D_METHOD("set_camera_feed_id", "camera_feed_id"), &Environment::set_camera_feed_id);
ClassDB::bind_method(D_METHOD("get_background"), &Environment::get_background);
ClassDB::bind_method(D_METHOD("get_sky"), &Environment::get_sky);
@@ -959,9 +975,10 @@ void Environment::_bind_methods() {
ClassDB::bind_method(D_METHOD("get_ambient_light_color"), &Environment::get_ambient_light_color);
ClassDB::bind_method(D_METHOD("get_ambient_light_energy"), &Environment::get_ambient_light_energy);
ClassDB::bind_method(D_METHOD("get_ambient_light_sky_contribution"), &Environment::get_ambient_light_sky_contribution);
+ ClassDB::bind_method(D_METHOD("get_camera_feed_id"), &Environment::get_camera_feed_id);
ADD_GROUP("Background", "background_");
- ADD_PROPERTY(PropertyInfo(Variant::INT, "background_mode", PROPERTY_HINT_ENUM, "Clear Color,Custom Color,Sky,Color+Sky,Canvas,Keep"), "set_background", "get_background");
+ ADD_PROPERTY(PropertyInfo(Variant::INT, "background_mode", PROPERTY_HINT_ENUM, "Clear Color,Custom Color,Sky,Color+Sky,Canvas,Keep,Camera Feed"), "set_background", "get_background");
ADD_PROPERTY(PropertyInfo(Variant::OBJECT, "background_sky", PROPERTY_HINT_RESOURCE_TYPE, "Sky"), "set_sky", "get_sky");
ADD_PROPERTY(PropertyInfo(Variant::REAL, "background_sky_custom_fov", PROPERTY_HINT_RANGE, "0,180,0.1"), "set_sky_custom_fov", "get_sky_custom_fov");
ADD_PROPERTY(PropertyInfo(Variant::BASIS, "background_sky_orientation"), "set_sky_orientation", "get_sky_orientation");
@@ -970,6 +987,7 @@ void Environment::_bind_methods() {
ADD_PROPERTY(PropertyInfo(Variant::COLOR, "background_color"), "set_bg_color", "get_bg_color");
ADD_PROPERTY(PropertyInfo(Variant::REAL, "background_energy", PROPERTY_HINT_RANGE, "0,16,0.01"), "set_bg_energy", "get_bg_energy");
ADD_PROPERTY(PropertyInfo(Variant::INT, "background_canvas_max_layer", PROPERTY_HINT_RANGE, "-1000,1000,1"), "set_canvas_max_layer", "get_canvas_max_layer");
+ ADD_PROPERTY(PropertyInfo(Variant::INT, "background_camera_feed_id", PROPERTY_HINT_RANGE, "1,10,1"), "set_camera_feed_id", "get_camera_feed_id");
ADD_GROUP("Ambient Light", "ambient_light_");
ADD_PROPERTY(PropertyInfo(Variant::COLOR, "ambient_light_color"), "set_ambient_light_color", "get_ambient_light_color");
ADD_PROPERTY(PropertyInfo(Variant::REAL, "ambient_light_energy", PROPERTY_HINT_RANGE, "0,16,0.01"), "set_ambient_light_energy", "get_ambient_light_energy");
@@ -1265,6 +1283,7 @@ void Environment::_bind_methods() {
BIND_ENUM_CONSTANT(BG_SKY);
BIND_ENUM_CONSTANT(BG_COLOR_SKY);
BIND_ENUM_CONSTANT(BG_CANVAS);
+ BIND_ENUM_CONSTANT(BG_CAMERA_FEED);
BIND_ENUM_CONSTANT(BG_MAX);
BIND_ENUM_CONSTANT(GLOW_BLEND_MODE_ADDITIVE);
@@ -1310,6 +1329,7 @@ Environment::Environment() :
ambient_energy = 1.0;
//ambient_sky_contribution = 1.0;
set_ambient_light_sky_contribution(1.0);
+ set_camera_feed_id(1);
tone_mapper = TONE_MAPPER_LINEAR;
tonemap_exposure = 1.0;
diff --git a/scene/resources/environment.h b/scene/resources/environment.h
index a54f13a88f..acce9c09a2 100644
--- a/scene/resources/environment.h
+++ b/scene/resources/environment.h
@@ -49,6 +49,7 @@ public:
BG_COLOR_SKY,
BG_CANVAS,
BG_KEEP,
+ BG_CAMERA_FEED,
BG_MAX
};
@@ -98,6 +99,7 @@ private:
Color ambient_color;
float ambient_energy;
float ambient_sky_contribution;
+ int camera_feed_id;
ToneMapper tone_mapper;
float tonemap_exposure;
@@ -192,6 +194,7 @@ public:
void set_ambient_light_color(const Color &p_color);
void set_ambient_light_energy(float p_energy);
void set_ambient_light_sky_contribution(float p_energy);
+ void set_camera_feed_id(int p_camera_feed_id);
BGMode get_background() const;
Ref<Sky> get_sky() const;
@@ -205,6 +208,7 @@ public:
Color get_ambient_light_color() const;
float get_ambient_light_energy() const;
float get_ambient_light_sky_contribution() const;
+ int get_camera_feed_id(void) const;
void set_tonemapper(ToneMapper p_tone_mapper);
ToneMapper get_tonemapper() const;
diff --git a/scene/resources/texture.cpp b/scene/resources/texture.cpp
index 503949fd60..6e0bc43296 100644
--- a/scene/resources/texture.cpp
+++ b/scene/resources/texture.cpp
@@ -36,6 +36,7 @@
#include "core/os/os.h"
#include "mesh.h"
#include "scene/resources/bit_map.h"
+#include "servers/camera/camera_feed.h"
Size2 Texture::get_size() const {
@@ -2498,3 +2499,107 @@ String ResourceFormatLoaderTextureLayered::get_resource_type(const String &p_pat
return "TextureArray";
return "";
}
+
+void CameraTexture::_bind_methods() {
+ ClassDB::bind_method(D_METHOD("set_camera_feed_id", "feed_id"), &CameraTexture::set_camera_feed_id);
+ ClassDB::bind_method(D_METHOD("get_camera_feed_id"), &CameraTexture::get_camera_feed_id);
+
+ ClassDB::bind_method(D_METHOD("set_which_feed", "which_feed"), &CameraTexture::set_which_feed);
+ ClassDB::bind_method(D_METHOD("get_which_feed"), &CameraTexture::get_which_feed);
+
+ ClassDB::bind_method(D_METHOD("set_camera_active", "active"), &CameraTexture::set_camera_active);
+ ClassDB::bind_method(D_METHOD("get_camera_active"), &CameraTexture::get_camera_active);
+
+ ADD_PROPERTY(PropertyInfo(Variant::INT, "camera_feed_id"), "set_camera_feed_id", "get_camera_feed_id");
+ ADD_PROPERTY(PropertyInfo(Variant::INT, "which_feed"), "set_which_feed", "get_which_feed");
+ ADD_PROPERTY(PropertyInfo(Variant::BOOL, "camera_is_active"), "set_camera_active", "get_camera_active");
+}
+
+int CameraTexture::get_width() const {
+ Ref<CameraFeed> feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id);
+ if (feed.is_valid()) {
+ return feed->get_base_width();
+ } else {
+ return 0;
+ }
+}
+
+int CameraTexture::get_height() const {
+ Ref<CameraFeed> feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id);
+ if (feed.is_valid()) {
+ return feed->get_base_height();
+ } else {
+ return 0;
+ }
+}
+
+bool CameraTexture::has_alpha() const {
+ return false;
+}
+
+RID CameraTexture::get_rid() const {
+ Ref<CameraFeed> feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id);
+ if (feed.is_valid()) {
+ return feed->get_texture(which_feed);
+ } else {
+ return RID();
+ }
+}
+
+void CameraTexture::set_flags(uint32_t p_flags) {
+ // not supported
+}
+
+uint32_t CameraTexture::get_flags() const {
+ // not supported
+ return 0;
+}
+
+Ref<Image> CameraTexture::get_data() const {
+ // not (yet) supported
+ return Ref<Image>();
+}
+
+void CameraTexture::set_camera_feed_id(int p_new_id) {
+ camera_feed_id = p_new_id;
+ _change_notify();
+}
+
+int CameraTexture::get_camera_feed_id() const {
+ return camera_feed_id;
+}
+
+void CameraTexture::set_which_feed(CameraServer::FeedImage p_which) {
+ which_feed = p_which;
+ _change_notify();
+}
+
+CameraServer::FeedImage CameraTexture::get_which_feed() const {
+ return which_feed;
+}
+
+void CameraTexture::set_camera_active(bool p_active) {
+ Ref<CameraFeed> feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id);
+ if (feed.is_valid()) {
+ feed->set_active(p_active);
+ _change_notify();
+ }
+}
+
+bool CameraTexture::get_camera_active() const {
+ Ref<CameraFeed> feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id);
+ if (feed.is_valid()) {
+ return feed->is_active();
+ } else {
+ return false;
+ }
+}
+
+CameraTexture::CameraTexture() {
+ camera_feed_id = 0;
+ which_feed = CameraServer::FEED_RGBA_IMAGE;
+}
+
+CameraTexture::~CameraTexture() {
+ // nothing to do here yet
+}
diff --git a/scene/resources/texture.h b/scene/resources/texture.h
index 58287b7593..021673f072 100644
--- a/scene/resources/texture.h
+++ b/scene/resources/texture.h
@@ -39,6 +39,7 @@
#include "core/resource.h"
#include "scene/resources/curve.h"
#include "scene/resources/gradient.h"
+#include "servers/camera_server.h"
#include "servers/visual_server.h"
/**
@@ -740,4 +741,38 @@ public:
~AnimatedTexture();
};
+class CameraTexture : public Texture {
+ GDCLASS(CameraTexture, Texture)
+
+private:
+ int camera_feed_id;
+ CameraServer::FeedImage which_feed;
+
+protected:
+ static void _bind_methods();
+
+public:
+ virtual int get_width() const;
+ virtual int get_height() const;
+ virtual RID get_rid() const;
+ virtual bool has_alpha() const;
+
+ virtual void set_flags(uint32_t p_flags);
+ virtual uint32_t get_flags() const;
+
+ virtual Ref<Image> get_data() const;
+
+ void set_camera_feed_id(int p_new_id);
+ int get_camera_feed_id() const;
+
+ void set_which_feed(CameraServer::FeedImage p_which);
+ CameraServer::FeedImage get_which_feed() const;
+
+ void set_camera_active(bool p_active);
+ bool get_camera_active() const;
+
+ CameraTexture();
+ ~CameraTexture();
+};
+
#endif
diff --git a/servers/SCsub b/servers/SCsub
index f4af347fe6..34ba70b8cb 100644
--- a/servers/SCsub
+++ b/servers/SCsub
@@ -6,6 +6,7 @@ env.servers_sources = []
env.add_source_files(env.servers_sources, "*.cpp")
SConscript('arvr/SCsub')
+SConscript('camera/SCsub')
SConscript('physics/SCsub')
SConscript('physics_2d/SCsub')
SConscript('visual/SCsub')
diff --git a/servers/arvr/arvr_interface.cpp b/servers/arvr/arvr_interface.cpp
index 686ad0ba9b..e1b7611354 100644
--- a/servers/arvr/arvr_interface.cpp
+++ b/servers/arvr/arvr_interface.cpp
@@ -56,6 +56,7 @@ void ARVRInterface::_bind_methods() {
// but we do have properties specific to AR....
ClassDB::bind_method(D_METHOD("get_anchor_detection_is_enabled"), &ARVRInterface::get_anchor_detection_is_enabled);
ClassDB::bind_method(D_METHOD("set_anchor_detection_is_enabled", "enable"), &ARVRInterface::set_anchor_detection_is_enabled);
+ ClassDB::bind_method(D_METHOD("get_camera_feed_id"), &ARVRInterface::get_camera_feed_id);
ADD_GROUP("AR", "ar_");
ADD_PROPERTY(PropertyInfo(Variant::BOOL, "ar_is_anchor_detection_enabled"), "set_anchor_detection_is_enabled", "get_anchor_detection_is_enabled");
@@ -136,3 +137,9 @@ bool ARVRInterface::get_anchor_detection_is_enabled() const {
void ARVRInterface::set_anchor_detection_is_enabled(bool p_enable){
// don't do anything here, this needs to be implemented on AR interface to enable/disable things like plane detection etc.
};
+
+int ARVRInterface::get_camera_feed_id() {
+ // don't do anything here, this needs to be implemented on AR interface to enable/disable things like plane detection etc.
+
+ return 0;
+};
diff --git a/servers/arvr/arvr_interface.h b/servers/arvr/arvr_interface.h
index 9ea59a3961..ffafa4fcf5 100644
--- a/servers/arvr/arvr_interface.h
+++ b/servers/arvr/arvr_interface.h
@@ -101,6 +101,7 @@ public:
/** specific to AR **/
virtual bool get_anchor_detection_is_enabled() const;
virtual void set_anchor_detection_is_enabled(bool p_enable);
+ virtual int get_camera_feed_id();
/** rendering and internal **/
diff --git a/servers/camera/SCsub b/servers/camera/SCsub
new file mode 100644
index 0000000000..ccc76e823f
--- /dev/null
+++ b/servers/camera/SCsub
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+
+Import('env')
+
+env.add_source_files(env.servers_sources, "*.cpp")
+
+Export('env')
diff --git a/servers/camera/camera_feed.cpp b/servers/camera/camera_feed.cpp
new file mode 100644
index 0000000000..b96e9b749f
--- /dev/null
+++ b/servers/camera/camera_feed.cpp
@@ -0,0 +1,266 @@
+/*************************************************************************/
+/* camera_feed.cpp */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* http://www.godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+#include "camera_feed.h"
+#include "servers/visual_server.h"
+
+void CameraFeed::_bind_methods() {
+ // The setters prefixed with _ are only exposed so we can have feeds through GDNative!
+ // They should not be called by the end user.
+
+ ClassDB::bind_method(D_METHOD("get_id"), &CameraFeed::get_id);
+ ClassDB::bind_method(D_METHOD("get_name"), &CameraFeed::get_name);
+ ClassDB::bind_method(D_METHOD("_set_name", "name"), &CameraFeed::set_name);
+
+ ClassDB::bind_method(D_METHOD("is_active"), &CameraFeed::is_active);
+ ClassDB::bind_method(D_METHOD("set_active", "active"), &CameraFeed::set_active);
+
+ ClassDB::bind_method(D_METHOD("get_position"), &CameraFeed::get_position);
+ ClassDB::bind_method(D_METHOD("_set_position", "position"), &CameraFeed::set_position);
+
+ // Note, for transform some feeds may override what the user sets (such as ARKit)
+ ClassDB::bind_method(D_METHOD("get_transform"), &CameraFeed::get_transform);
+ ClassDB::bind_method(D_METHOD("set_transform", "transform"), &CameraFeed::set_transform);
+
+ ClassDB::bind_method(D_METHOD("_set_RGB_img", "rgb_img"), &CameraFeed::set_RGB_img);
+ ClassDB::bind_method(D_METHOD("_set_YCbCr_img", "ycbcr_img"), &CameraFeed::set_YCbCr_img);
+ ClassDB::bind_method(D_METHOD("_set_YCbCr_imgs", "y_img", "cbcr_img"), &CameraFeed::set_YCbCr_imgs);
+ ClassDB::bind_method(D_METHOD("_allocate_texture", "width", "height", "format", "texture_type", "data_type"), &CameraFeed::allocate_texture);
+
+ ADD_GROUP("Feed", "feed_");
+ ADD_PROPERTY(PropertyInfo(Variant::BOOL, "feed_is_active"), "set_active", "is_active");
+ ADD_PROPERTY(PropertyInfo(Variant::TRANSFORM2D, "feed_transform"), "set_transform", "get_transform");
+
+ BIND_ENUM_CONSTANT(FEED_NOIMAGE);
+ BIND_ENUM_CONSTANT(FEED_RGB);
+ BIND_ENUM_CONSTANT(FEED_YCbCr);
+ BIND_ENUM_CONSTANT(FEED_YCbCr_Sep);
+
+ BIND_ENUM_CONSTANT(FEED_UNSPECIFIED);
+ BIND_ENUM_CONSTANT(FEED_FRONT);
+ BIND_ENUM_CONSTANT(FEED_BACK);
+}
+
+int CameraFeed::get_id() const {
+ return id;
+}
+
+bool CameraFeed::is_active() const {
+ return active;
+}
+
+void CameraFeed::set_active(bool p_is_active) {
+ if (p_is_active == active) {
+ // all good
+ } else if (p_is_active) {
+ // attempt to activate this feed
+ if (activate_feed()) {
+ print_line("Activate " + name);
+ active = true;
+ }
+ } else {
+ // just deactivate it
+ deactivate_feed();
+ print_line("Deactivate " + name);
+ active = false;
+ }
+}
+
+String CameraFeed::get_name() const {
+ return name;
+}
+
+void CameraFeed::set_name(String p_name) {
+ name = p_name;
+}
+
+int CameraFeed::get_base_width() const {
+ return base_width;
+}
+
+int CameraFeed::get_base_height() const {
+ return base_height;
+}
+
+CameraFeed::FeedDataType CameraFeed::get_datatype() const {
+ return datatype;
+}
+
+CameraFeed::FeedPosition CameraFeed::get_position() const {
+ return position;
+}
+
+void CameraFeed::set_position(CameraFeed::FeedPosition p_position) {
+ position = p_position;
+}
+
+Transform2D CameraFeed::get_transform() const {
+ return transform;
+}
+
+void CameraFeed::set_transform(const Transform2D &p_transform) {
+ transform = p_transform;
+}
+
+RID CameraFeed::get_texture(CameraServer::FeedImage p_which) {
+ return texture[p_which];
+}
+
+CameraFeed::CameraFeed() {
+ // initialize our feed
+ id = CameraServer::get_singleton()->get_free_id();
+ name = "???";
+ active = false;
+ datatype = CameraFeed::FEED_RGB;
+ position = CameraFeed::FEED_UNSPECIFIED;
+ transform = Transform2D(1.0, 0.0, 0.0, -1.0, 0.0, 1.0);
+
+ // create a texture object
+ VisualServer *vs = VisualServer::get_singleton();
+ texture[CameraServer::FEED_Y_IMAGE] = vs->texture_create(); // also used for RGBA
+ texture[CameraServer::FEED_CbCr_IMAGE] = vs->texture_create();
+}
+
+CameraFeed::CameraFeed(String p_name, FeedPosition p_position) {
+ // initialize our feed
+ id = CameraServer::get_singleton()->get_free_id();
+ base_width = 0;
+ base_height = 0;
+ name = p_name;
+ active = false;
+ datatype = CameraFeed::FEED_NOIMAGE;
+ position = p_position;
+ transform = Transform2D(1.0, 0.0, 0.0, -1.0, 0.0, 1.0);
+
+ // create a texture object
+ VisualServer *vs = VisualServer::get_singleton();
+ texture[CameraServer::FEED_Y_IMAGE] = vs->texture_create(); // also used for RGBA
+ texture[CameraServer::FEED_CbCr_IMAGE] = vs->texture_create();
+}
+
+CameraFeed::~CameraFeed() {
+ // Free our textures
+ VisualServer *vs = VisualServer::get_singleton();
+ vs->free(texture[CameraServer::FEED_Y_IMAGE]);
+ vs->free(texture[CameraServer::FEED_CbCr_IMAGE]);
+}
+
+void CameraFeed::set_RGB_img(Ref<Image> p_rgb_img) {
+ if (active) {
+ VisualServer *vs = VisualServer::get_singleton();
+
+ int new_width = p_rgb_img->get_width();
+ int new_height = p_rgb_img->get_height();
+
+ if ((base_width != new_width) || (base_height != new_height)) {
+ // We're assuming here that our camera image doesn't change around formats etc, allocate the whole lot...
+ base_width = new_width;
+ base_height = new_height;
+
+ vs->texture_allocate(texture[CameraServer::FEED_RGBA_IMAGE], new_width, new_height, 0, Image::FORMAT_RGB8, VS::TEXTURE_TYPE_2D, VS::TEXTURE_FLAGS_DEFAULT);
+ }
+
+ vs->texture_set_data(texture[CameraServer::FEED_RGBA_IMAGE], p_rgb_img);
+ datatype = CameraFeed::FEED_RGB;
+ }
+}
+
+void CameraFeed::set_YCbCr_img(Ref<Image> p_ycbcr_img) {
+ if (active) {
+ VisualServer *vs = VisualServer::get_singleton();
+
+ int new_width = p_ycbcr_img->get_width();
+ int new_height = p_ycbcr_img->get_height();
+
+ if ((base_width != new_width) || (base_height != new_height)) {
+ // We're assuming here that our camera image doesn't change around formats etc, allocate the whole lot...
+ base_width = new_width;
+ base_height = new_height;
+
+ vs->texture_allocate(texture[CameraServer::FEED_RGBA_IMAGE], new_width, new_height, 0, Image::FORMAT_RGB8, VS::TEXTURE_TYPE_2D, VS::TEXTURE_FLAGS_DEFAULT);
+ }
+
+ vs->texture_set_data(texture[CameraServer::FEED_RGBA_IMAGE], p_ycbcr_img);
+ datatype = CameraFeed::FEED_YCbCr;
+ }
+}
+
+void CameraFeed::set_YCbCr_imgs(Ref<Image> p_y_img, Ref<Image> p_cbcr_img) {
+ if (active) {
+ VisualServer *vs = VisualServer::get_singleton();
+
+ ///@TODO investigate whether we can use thirdparty/misc/yuv2rgb.h here to convert our YUV data to RGB, our shader approach is potentially faster though..
+ // Wondering about including that into multiple projects, may cause issues.
+ // That said, if we convert to RGB, we could enable using texture resources again...
+
+ int new_y_width = p_y_img->get_width();
+ int new_y_height = p_y_img->get_height();
+ int new_cbcr_width = p_cbcr_img->get_width();
+ int new_cbcr_height = p_cbcr_img->get_height();
+
+ if ((base_width != new_y_width) || (base_height != new_y_height)) {
+ // We're assuming here that our camera image doesn't change around formats etc, allocate the whole lot...
+ base_width = new_y_width;
+ base_height = new_y_height;
+
+ vs->texture_allocate(texture[CameraServer::FEED_Y_IMAGE], new_y_width, new_y_height, 0, Image::FORMAT_R8, VS::TEXTURE_TYPE_2D, VS::TEXTURE_FLAG_USED_FOR_STREAMING);
+
+ ///@TODO GLES2 doesn't support FORMAT_RG8, need to do some form of conversion
+ vs->texture_allocate(texture[CameraServer::FEED_CbCr_IMAGE], new_cbcr_width, new_cbcr_height, 0, Image::FORMAT_RG8, VS::TEXTURE_TYPE_2D, VS::TEXTURE_FLAG_USED_FOR_STREAMING);
+ }
+
+ vs->texture_set_data(texture[CameraServer::FEED_Y_IMAGE], p_y_img);
+ vs->texture_set_data(texture[CameraServer::FEED_CbCr_IMAGE], p_cbcr_img);
+ datatype = CameraFeed::FEED_YCbCr_Sep;
+ }
+}
+
+void CameraFeed::allocate_texture(int p_width, int p_height, Image::Format p_format, VisualServer::TextureType p_texture_type, FeedDataType p_data_type) {
+ VisualServer *vs = VisualServer::get_singleton();
+
+ if ((base_width != p_width) || (base_height != p_height)) {
+ // We're assuming here that our camera image doesn't change around formats etc, allocate the whole lot...
+ base_width = p_width;
+ base_height = p_height;
+
+ vs->texture_allocate(texture[0], p_width, p_height, 0, p_format, p_texture_type, VS::TEXTURE_FLAGS_DEFAULT);
+ }
+
+ datatype = p_data_type;
+}
+
+bool CameraFeed::activate_feed() {
+ // nothing to do here
+ return true;
+}
+
+void CameraFeed::deactivate_feed() {
+ // nothing to do here
+}
diff --git a/servers/camera/camera_feed.h b/servers/camera/camera_feed.h
new file mode 100644
index 0000000000..aa8a78b2f6
--- /dev/null
+++ b/servers/camera/camera_feed.h
@@ -0,0 +1,115 @@
+/*************************************************************************/
+/* camera_feed.h */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* http://www.godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+#ifndef CAMERA_FEED_H
+#define CAMERA_FEED_H
+
+#include "core/image.h"
+#include "core/math/transform_2d.h"
+#include "servers/camera_server.h"
+#include "servers/visual_server.h"
+
+/**
+ @author Bastiaan Olij <mux213@gmail.com>
+
+ The camera server is a singleton object that gives access to the various
+ camera feeds that can be used as the background for our environment.
+**/
+
+class CameraFeed : public Reference {
+ GDCLASS(CameraFeed, Reference);
+
+public:
+ enum FeedDataType {
+ FEED_NOIMAGE, // we don't have an image yet
+ FEED_RGB, // our texture will contain a normal RGB texture that can be used directly
+ FEED_YCbCr, // our texture will contain a YCbCr texture that needs to be converted to RGB before output
+ FEED_YCbCr_Sep // our camera is split into two textures, first plane contains Y data, second plane contains CbCr data
+ };
+
+ enum FeedPosition {
+ FEED_UNSPECIFIED, // we have no idea
+ FEED_FRONT, // this is a camera on the front of the device
+ FEED_BACK // this is a camera on the back of the device
+ };
+
+private:
+ int id; // unique id for this, for internal use in case feeds are removed
+ int base_width;
+ int base_height;
+
+protected:
+ String name; // name of our camera feed
+ FeedDataType datatype; // type of texture data stored
+ FeedPosition position; // position of camera on the device
+ Transform2D transform; // display transform
+
+ bool active; // only when active do we actually update the camera texture each frame
+ RID texture[CameraServer::FEED_IMAGES]; // texture images needed for this
+
+ static void _bind_methods();
+
+public:
+ int get_id() const;
+ bool is_active() const;
+ void set_active(bool p_is_active);
+
+ String get_name() const;
+ void set_name(String p_name);
+
+ int get_base_width() const;
+ int get_base_height() const;
+
+ FeedPosition get_position() const;
+ void set_position(FeedPosition p_position);
+
+ Transform2D get_transform() const;
+ void set_transform(const Transform2D &p_transform);
+
+ RID get_texture(CameraServer::FeedImage p_which);
+
+ CameraFeed();
+ CameraFeed(String p_name, FeedPosition p_position = CameraFeed::FEED_UNSPECIFIED);
+ virtual ~CameraFeed();
+
+ FeedDataType get_datatype() const;
+ void set_RGB_img(Ref<Image> p_rgb_img);
+ void set_YCbCr_img(Ref<Image> p_ycbcr_img);
+ void set_YCbCr_imgs(Ref<Image> p_y_img, Ref<Image> p_cbcr_img);
+ void allocate_texture(int p_width, int p_height, Image::Format p_format, VisualServer::TextureType p_texture_type, FeedDataType p_data_type);
+
+ virtual bool activate_feed();
+ virtual void deactivate_feed();
+};
+
+VARIANT_ENUM_CAST(CameraFeed::FeedDataType);
+VARIANT_ENUM_CAST(CameraFeed::FeedPosition);
+
+#endif /* !CAMERA_FEED_H */
diff --git a/servers/camera_server.cpp b/servers/camera_server.cpp
new file mode 100644
index 0000000000..f52d3decae
--- /dev/null
+++ b/servers/camera_server.cpp
@@ -0,0 +1,169 @@
+/*************************************************************************/
+/* camera_server.cpp */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* http://www.godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+#include "camera_server.h"
+#include "servers/camera/camera_feed.h"
+#include "visual_server.h"
+
+////////////////////////////////////////////////////////
+// CameraServer
+
+void CameraServer::_bind_methods() {
+ ClassDB::bind_method(D_METHOD("get_feed", "index"), &CameraServer::get_feed);
+ ClassDB::bind_method(D_METHOD("get_feed_count"), &CameraServer::get_feed_count);
+ ClassDB::bind_method(D_METHOD("feeds"), &CameraServer::get_feeds);
+
+ ClassDB::bind_method(D_METHOD("add_feed", "feed"), &CameraServer::add_feed);
+ ClassDB::bind_method(D_METHOD("remove_feed", "feed"), &CameraServer::remove_feed);
+
+ ADD_SIGNAL(MethodInfo("camera_feed_added", PropertyInfo(Variant::INT, "id")));
+ ADD_SIGNAL(MethodInfo("camera_feed_removed", PropertyInfo(Variant::INT, "id")));
+
+ BIND_ENUM_CONSTANT(FEED_RGBA_IMAGE);
+ BIND_ENUM_CONSTANT(FEED_YCbCr_IMAGE);
+ BIND_ENUM_CONSTANT(FEED_Y_IMAGE);
+ BIND_ENUM_CONSTANT(FEED_CbCr_IMAGE);
+};
+
+CameraServer *CameraServer::singleton = NULL;
+
+CameraServer *CameraServer::get_singleton() {
+ return singleton;
+};
+
+int CameraServer::get_free_id() {
+ bool id_exists = true;
+ int newid = 0;
+
+ // find a free id
+ while (id_exists) {
+ newid++;
+ id_exists = false;
+ for (int i = 0; i < feeds.size() && !id_exists; i++) {
+ if (feeds[i]->get_id() == newid) {
+ id_exists = true;
+ };
+ };
+ };
+
+ return newid;
+};
+
+int CameraServer::get_feed_index(int p_id) {
+ for (int i = 0; i < feeds.size(); i++) {
+ if (feeds[i]->get_id() == p_id) {
+ return i;
+ };
+ };
+
+ return -1;
+};
+
+Ref<CameraFeed> CameraServer::get_feed_by_id(int p_id) {
+ int index = get_feed_index(p_id);
+
+ if (index == -1) {
+ return NULL;
+ } else {
+ return feeds[index];
+ }
+};
+
+void CameraServer::add_feed(const Ref<CameraFeed> &p_feed) {
+ // add our feed
+ feeds.push_back(p_feed);
+
+// record for debugging
+#ifdef DEBUG_ENABLED
+ print_line("Registered camera " + p_feed->get_name() + " with id " + itos(p_feed->get_id()) + " position " + itos(p_feed->get_position()) + " at index " + itos(feeds.size() - 1));
+#endif
+
+ // let whomever is interested know
+ emit_signal("camera_feed_added", p_feed->get_id());
+};
+
+void CameraServer::remove_feed(const Ref<CameraFeed> &p_feed) {
+ for (int i = 0; i < feeds.size(); i++) {
+ if (feeds[i] == p_feed) {
+ int feed_id = p_feed->get_id();
+
+// record for debugging
+#ifdef DEBUG_ENABLED
+ print_line("Removed camera " + p_feed->get_name() + " with id " + itos(feed_id) + " position " + itos(p_feed->get_position()));
+#endif
+
+ // remove it from our array, if this results in our feed being unreferenced it will be destroyed
+ feeds.remove(i);
+
+ // let whomever is interested know
+ emit_signal("camera_feed_removed", feed_id);
+ return;
+ };
+ };
+};
+
+Ref<CameraFeed> CameraServer::get_feed(int p_index) {
+ ERR_FAIL_INDEX_V(p_index, feeds.size(), NULL);
+
+ return feeds[p_index];
+};
+
+int CameraServer::get_feed_count() {
+ return feeds.size();
+};
+
+Array CameraServer::get_feeds() {
+ Array return_feeds;
+ int cc = get_feed_count();
+ return_feeds.resize(cc);
+
+ for (int i = 0; i < feeds.size(); i++) {
+ return_feeds[i] = get_feed(i);
+ };
+
+ return return_feeds;
+};
+
+RID CameraServer::feed_texture(int p_id, CameraServer::FeedImage p_texture) {
+ int index = get_feed_index(p_id);
+ ERR_FAIL_COND_V(index == -1, RID());
+
+ Ref<CameraFeed> feed = get_feed(index);
+
+ return feed->get_texture(p_texture);
+};
+
+CameraServer::CameraServer() {
+ singleton = this;
+};
+
+CameraServer::~CameraServer() {
+ singleton = NULL;
+};
diff --git a/servers/camera_server.h b/servers/camera_server.h
new file mode 100644
index 0000000000..c7716a3cff
--- /dev/null
+++ b/servers/camera_server.h
@@ -0,0 +1,96 @@
+/*************************************************************************/
+/* camera_server.h */
+/*************************************************************************/
+/* This file is part of: */
+/* GODOT ENGINE */
+/* http://www.godotengine.org */
+/*************************************************************************/
+/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
+/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
+/* */
+/* Permission is hereby granted, free of charge, to any person obtaining */
+/* a copy of this software and associated documentation files (the */
+/* "Software"), to deal in the Software without restriction, including */
+/* without limitation the rights to use, copy, modify, merge, publish, */
+/* distribute, sublicense, and/or sell copies of the Software, and to */
+/* permit persons to whom the Software is furnished to do so, subject to */
+/* the following conditions: */
+/* */
+/* The above copyright notice and this permission notice shall be */
+/* included in all copies or substantial portions of the Software. */
+/* */
+/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
+/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
+/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
+/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
+/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
+/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
+/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
+/*************************************************************************/
+
+#ifndef CAMERA_SERVER_H
+#define CAMERA_SERVER_H
+
+#include "core/object.h"
+#include "core/os/thread_safe.h"
+#include "core/reference.h"
+#include "core/rid.h"
+#include "core/variant.h"
+
+/**
+ @author Bastiaan Olij <mux213@gmail.com>
+
+ The camera server is a singleton object that gives access to the various
+ camera feeds that can be used as the background for our environment.
+**/
+
+class CameraFeed;
+
+class CameraServer : public Object {
+ GDCLASS(CameraServer, Object);
+ _THREAD_SAFE_CLASS_
+
+public:
+ enum FeedImage {
+ FEED_RGBA_IMAGE = 0,
+ FEED_YCbCr_IMAGE = 0,
+ FEED_Y_IMAGE = 0,
+ FEED_CbCr_IMAGE = 1,
+ FEED_IMAGES = 2
+ };
+
+private:
+protected:
+ Vector<Ref<CameraFeed> > feeds;
+
+ static CameraServer *singleton;
+
+ static void _bind_methods();
+
+public:
+ static CameraServer *get_singleton();
+
+ // Right now we identify our feed by it's ID when it's used in the background.
+ // May see if we can change this to purely relying on CameraFeed objects or by name.
+ int get_free_id();
+ int get_feed_index(int p_id);
+ Ref<CameraFeed> get_feed_by_id(int p_id);
+
+ // add and remove feeds
+ void add_feed(const Ref<CameraFeed> &p_feed);
+ void remove_feed(const Ref<CameraFeed> &p_feed);
+
+ // get our feeds
+ Ref<CameraFeed> get_feed(int p_idx);
+ int get_feed_count();
+ Array get_feeds();
+
+ RID feed_texture(int p_id, FeedImage p_texture);
+
+ CameraServer();
+ ~CameraServer();
+};
+
+VARIANT_ENUM_CAST(CameraServer::FeedImage);
+
+#endif /* CAMERA_SERVER_H */
diff --git a/servers/register_server_types.cpp b/servers/register_server_types.cpp
index f3394019f5..f7cec6a378 100644
--- a/servers/register_server_types.cpp
+++ b/servers/register_server_types.cpp
@@ -54,6 +54,8 @@
#include "audio/effects/audio_effect_stereo_enhance.h"
#include "audio/effects/audio_stream_generator.h"
#include "audio_server.h"
+#include "camera/camera_feed.h"
+#include "camera_server.h"
#include "core/script_debugger_remote.h"
#include "physics/physics_server_sw.h"
#include "physics_2d/physics_2d_server_sw.h"
@@ -114,6 +116,7 @@ void register_server_types() {
ClassDB::register_virtual_class<PhysicsServer>();
ClassDB::register_virtual_class<Physics2DServer>();
ClassDB::register_class<ARVRServer>();
+ ClassDB::register_class<CameraServer>();
shader_types = memnew(ShaderTypes);
@@ -169,6 +172,8 @@ void register_server_types() {
ClassDB::register_virtual_class<AudioEffectSpectrumAnalyzerInstance>();
}
+ ClassDB::register_class<CameraFeed>();
+
ClassDB::register_virtual_class<Physics2DDirectBodyState>();
ClassDB::register_virtual_class<Physics2DDirectSpaceState>();
ClassDB::register_virtual_class<Physics2DShapeQueryResult>();
@@ -208,4 +213,5 @@ void register_server_singletons() {
Engine::get_singleton()->add_singleton(Engine::Singleton("PhysicsServer", PhysicsServer::get_singleton()));
Engine::get_singleton()->add_singleton(Engine::Singleton("Physics2DServer", Physics2DServer::get_singleton()));
Engine::get_singleton()->add_singleton(Engine::Singleton("ARVRServer", ARVRServer::get_singleton()));
+ Engine::get_singleton()->add_singleton(Engine::Singleton("CameraServer", CameraServer::get_singleton()));
}
diff --git a/servers/visual/rasterizer.h b/servers/visual/rasterizer.h
index 77ab1b5548..31b468b50b 100644
--- a/servers/visual/rasterizer.h
+++ b/servers/visual/rasterizer.h
@@ -60,6 +60,7 @@ public:
virtual void environment_set_bg_energy(RID p_env, float p_energy) = 0;
virtual void environment_set_canvas_max_layer(RID p_env, int p_max_layer) = 0;
virtual void environment_set_ambient_light(RID p_env, const Color &p_color, float p_energy = 1.0, float p_sky_contribution = 0.0) = 0;
+ virtual void environment_set_camera_feed_id(RID p_env, int p_camera_feed_id) = 0;
virtual void environment_set_dof_blur_near(RID p_env, bool p_enable, float p_distance, float p_transition, float p_far_amount, VS::EnvironmentDOFBlurQuality p_quality) = 0;
virtual void environment_set_dof_blur_far(RID p_env, bool p_enable, float p_distance, float p_transition, float p_far_amount, VS::EnvironmentDOFBlurQuality p_quality) = 0;
@@ -204,6 +205,7 @@ public:
virtual uint32_t texture_get_height(RID p_texture) const = 0;
virtual uint32_t texture_get_depth(RID p_texture) const = 0;
virtual void texture_set_size_override(RID p_texture, int p_width, int p_height, int p_depth_3d) = 0;
+ virtual void texture_bind(RID p_texture, uint32_t p_texture_no) = 0;
virtual void texture_set_path(RID p_texture, const String &p_path) = 0;
virtual String texture_get_path(RID p_texture) const = 0;
diff --git a/servers/visual/visual_server_raster.h b/servers/visual/visual_server_raster.h
index 81010c32d3..f37d651dee 100644
--- a/servers/visual/visual_server_raster.h
+++ b/servers/visual/visual_server_raster.h
@@ -159,6 +159,7 @@ public:
BIND1RC(uint32_t, texture_get_height, RID)
BIND1RC(uint32_t, texture_get_depth, RID)
BIND4(texture_set_size_override, RID, int, int, int)
+ BIND2(texture_bind, RID, uint32_t)
BIND3(texture_set_detect_3d_callback, RID, TextureDetectCallback, void *)
BIND3(texture_set_detect_srgb_callback, RID, TextureDetectCallback, void *)
@@ -503,6 +504,7 @@ public:
BIND2(environment_set_bg_energy, RID, float)
BIND2(environment_set_canvas_max_layer, RID, int)
BIND4(environment_set_ambient_light, RID, const Color &, float, float)
+ BIND2(environment_set_camera_feed_id, RID, int)
BIND7(environment_set_ssr, RID, bool, int, float, float, float, bool)
BIND13(environment_set_ssao, RID, bool, float, float, float, float, float, float, float, const Color &, EnvironmentSSAOQuality, EnvironmentSSAOBlur, float)
diff --git a/servers/visual/visual_server_wrap_mt.h b/servers/visual/visual_server_wrap_mt.h
index d6da914c37..24e50eb99e 100644
--- a/servers/visual/visual_server_wrap_mt.h
+++ b/servers/visual/visual_server_wrap_mt.h
@@ -95,6 +95,7 @@ public:
FUNC1RC(uint32_t, texture_get_height, RID)
FUNC1RC(uint32_t, texture_get_depth, RID)
FUNC4(texture_set_size_override, RID, int, int, int)
+ FUNC2(texture_bind, RID, uint32_t)
FUNC3(texture_set_detect_3d_callback, RID, TextureDetectCallback, void *)
FUNC3(texture_set_detect_srgb_callback, RID, TextureDetectCallback, void *)
@@ -430,6 +431,7 @@ public:
FUNC2(environment_set_bg_energy, RID, float)
FUNC2(environment_set_canvas_max_layer, RID, int)
FUNC4(environment_set_ambient_light, RID, const Color &, float, float)
+ FUNC2(environment_set_camera_feed_id, RID, int)
FUNC7(environment_set_ssr, RID, bool, int, float, float, float, bool)
FUNC13(environment_set_ssao, RID, bool, float, float, float, float, float, float, float, const Color &, EnvironmentSSAOQuality, EnvironmentSSAOBlur, float)
diff --git a/servers/visual_server.cpp b/servers/visual_server.cpp
index 27288768eb..60be63fd24 100644
--- a/servers/visual_server.cpp
+++ b/servers/visual_server.cpp
@@ -1676,6 +1676,7 @@ void VisualServer::_bind_methods() {
ClassDB::bind_method(D_METHOD("texture_set_path", "texture", "path"), &VisualServer::texture_set_path);
ClassDB::bind_method(D_METHOD("texture_get_path", "texture"), &VisualServer::texture_get_path);
ClassDB::bind_method(D_METHOD("texture_set_shrink_all_x2_on_set_data", "shrink"), &VisualServer::texture_set_shrink_all_x2_on_set_data);
+ ClassDB::bind_method(D_METHOD("texture_bind", "texture", "number"), &VisualServer::texture_bind);
ClassDB::bind_method(D_METHOD("texture_debug_usage"), &VisualServer::_texture_debug_usage_bind);
ClassDB::bind_method(D_METHOD("textures_keep_original", "enable"), &VisualServer::textures_keep_original);
diff --git a/servers/visual_server.h b/servers/visual_server.h
index 8be8c1c1f1..a84d395e3f 100644
--- a/servers/visual_server.h
+++ b/servers/visual_server.h
@@ -140,6 +140,7 @@ public:
virtual uint32_t texture_get_height(RID p_texture) const = 0;
virtual uint32_t texture_get_depth(RID p_texture) const = 0;
virtual void texture_set_size_override(RID p_texture, int p_width, int p_height, int p_depth_3d) = 0;
+ virtual void texture_bind(RID p_texture, uint32_t p_texture_no) = 0;
virtual void texture_set_path(RID p_texture, const String &p_path) = 0;
virtual String texture_get_path(RID p_texture) const = 0;
@@ -707,6 +708,7 @@ public:
ENV_BG_COLOR_SKY,
ENV_BG_CANVAS,
ENV_BG_KEEP,
+ ENV_BG_CAMERA_FEED,
ENV_BG_MAX
};
@@ -718,6 +720,7 @@ public:
virtual void environment_set_bg_energy(RID p_env, float p_energy) = 0;
virtual void environment_set_canvas_max_layer(RID p_env, int p_max_layer) = 0;
virtual void environment_set_ambient_light(RID p_env, const Color &p_color, float p_energy = 1.0, float p_sky_contribution = 0.0) = 0;
+ virtual void environment_set_camera_feed_id(RID p_env, int p_camera_feed_id) = 0;
//set default SSAO options
//set default SSR options