diff options
Diffstat (limited to 'servers/arvr')
-rw-r--r-- | servers/arvr/arvr_interface.cpp | 5 | ||||
-rw-r--r-- | servers/arvr/arvr_interface.h | 1 | ||||
-rw-r--r-- | servers/arvr/arvr_positional_tracker.cpp | 15 | ||||
-rw-r--r-- | servers/arvr/arvr_positional_tracker.h | 7 |
4 files changed, 24 insertions, 4 deletions
diff --git a/servers/arvr/arvr_interface.cpp b/servers/arvr/arvr_interface.cpp index 3e59daff6c..686ad0ba9b 100644 --- a/servers/arvr/arvr_interface.cpp +++ b/servers/arvr/arvr_interface.cpp @@ -123,6 +123,11 @@ ARVRInterface::ARVRInterface() { ARVRInterface::~ARVRInterface(){}; +// optional render to external texture which enhances performance on those platforms that require us to submit our end result into special textures. +unsigned int ARVRInterface::get_external_texture_for_eye(ARVRInterface::Eyes p_eye) { + return 0; +}; + /** these will only be implemented on AR interfaces, so we want dummies for VR **/ bool ARVRInterface::get_anchor_detection_is_enabled() const { return false; diff --git a/servers/arvr/arvr_interface.h b/servers/arvr/arvr_interface.h index 6908f3006a..8459a82388 100644 --- a/servers/arvr/arvr_interface.h +++ b/servers/arvr/arvr_interface.h @@ -108,6 +108,7 @@ public: virtual bool is_stereo() = 0; /* returns true if this interface requires stereo rendering (for VR HMDs) or mono rendering (for mobile AR) */ virtual Transform get_transform_for_eye(ARVRInterface::Eyes p_eye, const Transform &p_cam_transform) = 0; /* get each eyes camera transform, also implement EYE_MONO */ virtual CameraMatrix get_projection_for_eye(ARVRInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far) = 0; /* get each eyes projection matrix */ + virtual unsigned int get_external_texture_for_eye(ARVRInterface::Eyes p_eye); /* if applicable return external texture to render to */ virtual void commit_for_eye(ARVRInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect) = 0; /* output the left or right eye */ virtual void process() = 0; diff --git a/servers/arvr/arvr_positional_tracker.cpp b/servers/arvr/arvr_positional_tracker.cpp index b96e9596f3..aabe617a8a 100644 --- a/servers/arvr/arvr_positional_tracker.cpp +++ b/servers/arvr/arvr_positional_tracker.cpp @@ -46,6 +46,7 @@ void ARVRPositionalTracker::_bind_methods() { ClassDB::bind_method(D_METHOD("get_position"), &ARVRPositionalTracker::get_position); ClassDB::bind_method(D_METHOD("get_hand"), &ARVRPositionalTracker::get_hand); ClassDB::bind_method(D_METHOD("get_transform", "adjust_by_reference_frame"), &ARVRPositionalTracker::get_transform); + ClassDB::bind_method(D_METHOD("get_mesh"), &ARVRPositionalTracker::get_mesh); // these functions we don't want to expose to normal users but do need to be callable from GDNative ClassDB::bind_method(D_METHOD("_set_type", "type"), &ARVRPositionalTracker::set_type); @@ -53,7 +54,7 @@ void ARVRPositionalTracker::_bind_methods() { ClassDB::bind_method(D_METHOD("_set_joy_id", "joy_id"), &ARVRPositionalTracker::set_joy_id); ClassDB::bind_method(D_METHOD("_set_orientation", "orientation"), &ARVRPositionalTracker::set_orientation); ClassDB::bind_method(D_METHOD("_set_rw_position", "rw_position"), &ARVRPositionalTracker::set_rw_position); - + ClassDB::bind_method(D_METHOD("_set_mesh", "mesh"), &ARVRPositionalTracker::set_mesh); ClassDB::bind_method(D_METHOD("get_rumble"), &ARVRPositionalTracker::get_rumble); ClassDB::bind_method(D_METHOD("set_rumble", "rumble"), &ARVRPositionalTracker::set_rumble); @@ -154,6 +155,18 @@ Vector3 ARVRPositionalTracker::get_rw_position() const { return rw_position; }; +void ARVRPositionalTracker::set_mesh(const Ref<Mesh> &p_mesh) { + _THREAD_SAFE_METHOD_ + + mesh = p_mesh; +}; + +Ref<Mesh> ARVRPositionalTracker::get_mesh() const { + _THREAD_SAFE_METHOD_ + + return mesh; +}; + ARVRPositionalTracker::TrackerHand ARVRPositionalTracker::get_hand() const { return hand; }; diff --git a/servers/arvr/arvr_positional_tracker.h b/servers/arvr/arvr_positional_tracker.h index 7cb9486f59..0d6a69540f 100644 --- a/servers/arvr/arvr_positional_tracker.h +++ b/servers/arvr/arvr_positional_tracker.h @@ -32,6 +32,7 @@ #define ARVR_POSITIONAL_TRACKER_H #include "core/os/thread_safe.h" +#include "scene/resources/mesh.h" #include "servers/arvr_server.h" /** @@ -40,9 +41,6 @@ The positional tracker object as an object that represents the position and orientation of a tracked object like a controller or headset. An AR/VR Interface will registered the trackers it manages with our AR/VR server and update its position and orientation. This is where potentially additional AR/VR interfaces may be active as there are AR/VR SDKs that solely deal with positional tracking. - - @TODO: - - create subclass of spatial node that uses one of our positional trackers to automatically determine its position */ class ARVRPositionalTracker : public Object { @@ -65,6 +63,7 @@ private: Basis orientation; // our orientation bool tracks_position; // do we track position? Vector3 rw_position; // our position "in the real world, so without world_scale applied" + Ref<Mesh> mesh; // when available, a mesh that can be used to render this tracker TrackerHand hand; // if known, the hand this tracker is held in real_t rumble; // rumble strength, 0.0 is off, 1.0 is maximum, note that we only record here, arvr_interface is responsible for execution @@ -91,6 +90,8 @@ public: void set_hand(const ARVRPositionalTracker::TrackerHand p_hand); real_t get_rumble() const; void set_rumble(real_t p_rumble); + void set_mesh(const Ref<Mesh> &p_mesh); + Ref<Mesh> get_mesh() const; Transform get_transform(bool p_adjust_by_reference_frame) const; |