diff --git a/.github/workflows/ci_4_x.yml b/.github/workflows/ci_4_x.yml new file mode 100644 index 0000000..22b2559 --- /dev/null +++ b/.github/workflows/ci_4_x.yml @@ -0,0 +1,41 @@ +name: Continuous integration 4.x +on: [push, pull_request] + +jobs: + build: + name: Build (macOS) + runs-on: "macos-latest" + + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + + - name: Set up Python 3.x + uses: actions/setup-python@v2 + with: + python-version: '3.x' + architecture: 'x64' + + - name: Configuring Python + run: | + python -c "import sys; print(sys.version)" + python -m pip install scons + python --version + scons --version + + - name: Generate Headers + run: | + ./scripts/generate_headers.sh 4.0 || true + + - name: Compile Plugins + run: | + ./scripts/release_xcframework.sh 4.0 + ls -l bin/release + + - uses: actions/upload-artifact@v4 + with: + name: plugins + path: bin/release/* + retention-days: 4 + if-no-files-found: error diff --git a/godot b/godot index 991bb6a..99d66b4 160000 --- a/godot +++ b/godot @@ -1 +1 @@ -Subproject commit 991bb6ac74ac8c09d7683041b50a8ced3a2defb1 +Subproject commit 99d66b4b22b505e5a9fd95bf3e4c7aa932680684 diff --git a/plugins/arkit/arkit_anchor_mesh.h b/plugins/arkit/arkit_anchor_mesh.h new file mode 100644 index 0000000..42161fb --- /dev/null +++ b/plugins/arkit/arkit_anchor_mesh.h @@ -0,0 +1,59 @@ +/*************************************************************************/ +/* arkit_interface.h */ +/*************************************************************************/ +/* This file is part of: */ +/* GODOT ENGINE */ +/* https://godotengine.org */ +/*************************************************************************/ +/* Copyright (c) 2007-2021 Juan Linietsky, Ariel Manzur. */ +/* Copyright (c) 2014-2021 Godot Engine contributors (cf. AUTHORS.md). */ +/* */ +/* Permission is hereby granted, free of charge, to any person obtaining */ +/* a copy of this software and associated documentation files (the */ +/* "Software"), to deal in the Software without restriction, including */ +/* without limitation the rights to use, copy, modify, merge, publish, */ +/* distribute, sublicense, and/or sell copies of the Software, and to */ +/* permit persons to whom the Software is furnished to do so, subject to */ +/* the following conditions: */ +/* */ +/* The above copyright notice and this permission notice shall be */ +/* included in all copies or substantial portions of the Software. */ +/* */ +/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */ +/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */ +/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/ +/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */ +/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */ +/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */ +/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ +/*************************************************************************/ + +#ifndef ARKIT_ANCHOR_MESH_H +#define ARKIT_ANCHOR_MESH_H + +#include "core/os/os.h" +#include "core/version.h" +#include "scene/resources/surface_tool.h" + +#include "servers/xr/xr_interface.h" +#include "servers/xr/xr_positional_tracker.h" + +class ARKitAnchorMesh : public XRPositionalTracker { + GDCLASS(ARKitAnchorMesh, XRPositionalTracker); + _THREAD_SAFE_CLASS_ + +private: + Ref mesh; + +protected: + static void _bind_methods(); + +public: + void set_mesh(Ref mesh); + Ref get_mesh() const; + + ARKitAnchorMesh(); + ~ARKitAnchorMesh(); +}; + +#endif /* !ARKIT_ANCHOR_MESH_H */ \ No newline at end of file diff --git a/plugins/arkit/arkit_anchor_mesh.mm b/plugins/arkit/arkit_anchor_mesh.mm new file mode 100644 index 0000000..9f8eb81 --- /dev/null +++ b/plugins/arkit/arkit_anchor_mesh.mm @@ -0,0 +1,76 @@ +/*************************************************************************/ +/* arkit_interface.h */ +/*************************************************************************/ +/* This file is part of: */ +/* GODOT ENGINE */ +/* https://godotengine.org */ +/*************************************************************************/ +/* Copyright (c) 2007-2021 Juan Linietsky, Ariel Manzur. */ +/* Copyright (c) 2014-2021 Godot Engine contributors (cf. AUTHORS.md). */ +/* */ +/* Permission is hereby granted, free of charge, to any person obtaining */ +/* a copy of this software and associated documentation files (the */ +/* "Software"), to deal in the Software without restriction, including */ +/* without limitation the rights to use, copy, modify, merge, publish, */ +/* distribute, sublicense, and/or sell copies of the Software, and to */ +/* permit persons to whom the Software is furnished to do so, subject to */ +/* the following conditions: */ +/* */ +/* The above copyright notice and this permission notice shall be */ +/* included in all copies or substantial portions of the Software. */ +/* */ +/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */ +/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */ +/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/ +/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */ +/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */ +/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */ +/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ +/*************************************************************************/ + +#include "core/os/os.h" +#include "core/version.h" +#include "scene/resources/surface_tool.h" + +#include "core/input/input.h" +#include "servers/rendering/rendering_server_globals.h" + +#define GODOT_FOCUS_IN_NOTIFICATION DisplayServer::WINDOW_EVENT_FOCUS_IN +#define GODOT_FOCUS_OUT_NOTIFICATION DisplayServer::WINDOW_EVENT_FOCUS_OUT + +#define GODOT_MAKE_THREAD_SAFE ; + +#define GODOT_AR_STATE_NOT_TRACKING XRInterface::XR_NOT_TRACKING +#define GODOT_AR_STATE_NORMAL_TRACKING XRInterface::XR_NORMAL_TRACKING +#define GODOT_AR_STATE_EXCESSIVE_MOTION XRInterface::XR_EXCESSIVE_MOTION +#define GODOT_AR_STATE_INSUFFICIENT_FEATURES XRInterface::XR_INSUFFICIENT_FEATURES +#define GODOT_AR_STATE_UNKNOWN_TRACKING XRInterface::XR_UNKNOWN_TRACKING + +#import +#import + +#include + +#include "arkit_anchor_mesh.h" + +void ARKitAnchorMesh::set_mesh(Ref p_mesh) { + mesh = p_mesh; +} + +Ref ARKitAnchorMesh::get_mesh() const { + return mesh; +} + +void ARKitAnchorMesh::_bind_methods() { + ClassDB::bind_method(D_METHOD("set_mesh", "mesh"), &ARKitAnchorMesh::set_mesh); + ClassDB::bind_method(D_METHOD("get_mesh"), &ARKitAnchorMesh::get_mesh); + ADD_PROPERTY(PropertyInfo(Variant::OBJECT, "mesh", PROPERTY_HINT_RESOURCE_TYPE, "Mesh"), "set_mesh", "get_mesh"); +} + +ARKitAnchorMesh::ARKitAnchorMesh(){ + mesh = NULL; +} + +ARKitAnchorMesh::~ARKitAnchorMesh(){ + +} \ No newline at end of file diff --git a/plugins/arkit/arkit_interface.h b/plugins/arkit/arkit_interface.h index 3745c97..11ec366 100644 --- a/plugins/arkit/arkit_interface.h +++ b/plugins/arkit/arkit_interface.h @@ -36,9 +36,10 @@ #if VERSION_MAJOR == 4 #include "servers/xr/xr_interface.h" #include "servers/xr/xr_positional_tracker.h" +#include "arkit_anchor_mesh.h" typedef XRInterface GodotBaseARInterface; -typedef XRPositionalTracker GodotARTracker; +typedef ARKitAnchorMesh GodotARTracker; typedef Vector GodotUInt8Vector; @@ -82,9 +83,11 @@ class ARKitInterface : public GodotBaseARInterface { bool light_estimation_is_enabled; real_t ambient_intensity; real_t ambient_color_temperature; + real_t exposure_offset; - Transform transform; - CameraMatrix projection; + Ref m_head; + Transform3D transform; + Projection projection; float eye_height, z_near, z_far; Ref feed; @@ -92,6 +95,10 @@ class ARKitInterface : public GodotBaseARInterface { size_t image_height[2]; GodotUInt8Vector img_data[2]; +#if VERSION_MAJOR == 4 + XRInterface::TrackingStatus tracking_state; +#endif + struct anchor_map { GodotARTracker *tracker; unsigned char uuid[16]; @@ -111,37 +118,45 @@ class ARKitInterface : public GodotBaseARInterface { public: void start_session(); void stop_session(); - - bool get_anchor_detection_is_enabled() const GODOT_ARKIT_OVERRIDE; - void set_anchor_detection_is_enabled(bool p_enable) GODOT_ARKIT_OVERRIDE; - virtual int get_camera_feed_id() GODOT_ARKIT_OVERRIDE; - + bool get_light_estimation_is_enabled() const; void set_light_estimation_is_enabled(bool p_enable); real_t get_ambient_intensity() const; real_t get_ambient_color_temperature() const; + real_t get_exposure_offset() const; /* while Godot has its own raycast logic this takes ARKits camera into account and hits on any ARAnchor */ Array raycast(Vector2 p_screen_coord); - virtual void notification(int p_what) GODOT_ARKIT_OVERRIDE; - virtual StringName get_name() const GODOT_ARKIT_OVERRIDE; - virtual int get_capabilities() const GODOT_ARKIT_OVERRIDE; + virtual uint32_t get_capabilities() const GODOT_ARKIT_OVERRIDE; virtual bool is_initialized() const GODOT_ARKIT_OVERRIDE; virtual bool initialize() GODOT_ARKIT_OVERRIDE; virtual void uninitialize() GODOT_ARKIT_OVERRIDE; + virtual Dictionary get_system_info() GODOT_ARKIT_OVERRIDE; + + /** input and output **/ + virtual TrackingStatus get_tracking_status() const GODOT_ARKIT_OVERRIDE { return tracking_state; }; + + /** specific to AR **/ + virtual bool get_anchor_detection_is_enabled() const GODOT_ARKIT_OVERRIDE; + virtual void set_anchor_detection_is_enabled(bool p_enable) GODOT_ARKIT_OVERRIDE; + virtual int get_camera_feed_id() GODOT_ARKIT_OVERRIDE; - virtual Size2 get_render_targetsize() GODOT_ARKIT_OVERRIDE; - virtual bool is_stereo() GODOT_ARKIT_OVERRIDE; - virtual Transform get_transform_for_eye(GodotBaseARInterface::Eyes p_eye, const Transform &p_cam_transform) GODOT_ARKIT_OVERRIDE; - virtual CameraMatrix get_projection_for_eye(GodotBaseARInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far) GODOT_ARKIT_OVERRIDE; - virtual void commit_for_eye(GodotBaseARInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect) GODOT_ARKIT_OVERRIDE; + /** rendering and internal **/ + virtual Transform3D get_camera_transform() GODOT_ARKIT_OVERRIDE; virtual void process() GODOT_ARKIT_OVERRIDE; + virtual Size2 get_render_target_size() GODOT_ARKIT_OVERRIDE; + virtual uint32_t get_view_count() GODOT_ARKIT_OVERRIDE; + virtual Transform3D get_transform_for_view(uint32_t p_view, const Transform3D &p_cam_transform) GODOT_ARKIT_OVERRIDE; + virtual Projection get_projection_for_view(uint32_t p_view, double p_aspect, double p_z_near, double p_z_far) GODOT_ARKIT_OVERRIDE; + + virtual Vector post_draw_viewport(RID p_render_target, const Rect2 &p_screen_rect) GODOT_ARKIT_OVERRIDE; + // called by delegate (void * because C++ and Obj-C don't always mix, should really change all platform/ios/*.cpp files to .mm) void _add_or_update_anchor(GodotARAnchor *p_anchor); void _remove_anchor(GodotARAnchor *p_anchor); diff --git a/plugins/arkit/arkit_interface.mm b/plugins/arkit/arkit_interface.mm index d7a1472..cef0653 100644 --- a/plugins/arkit/arkit_interface.mm +++ b/plugins/arkit/arkit_interface.mm @@ -69,6 +69,7 @@ #include +#include "arkit_anchor_mesh.h" #include "arkit_interface.h" #include "arkit_session_delegate.h" @@ -94,12 +95,14 @@ configuration.lightEstimationEnabled = light_estimation_is_enabled; if (plane_detection_is_enabled) { + print_line("Starting plane detection"); if (@available(iOS 11.3, *)) { configuration.planeDetection = ARPlaneDetectionVertical | ARPlaneDetectionHorizontal; } else { configuration.planeDetection = ARPlaneDetectionHorizontal; } } else { + print_line("Plane detection is disabled"); configuration.planeDetection = 0; } @@ -129,25 +132,6 @@ } } -void ARKitInterface::notification(int p_what) { - // TODO, this is not being called, need to find out why, possibly because this is not a node. - // in that case we need to find a way to get these notifications! - switch (p_what) { - case GODOT_FOCUS_IN_NOTIFICATION: { - print_line("Focus in"); - - start_session(); - }; break; - case GODOT_FOCUS_OUT_NOTIFICATION: { - print_line("Focus out"); - - stop_session(); - }; break; - default: - break; - } -} - bool ARKitInterface::get_anchor_detection_is_enabled() const { return plane_detection_is_enabled; } @@ -194,11 +178,15 @@ return ambient_color_temperature; } +real_t ARKitInterface::get_exposure_offset() const { + return exposure_offset; +} + StringName ARKitInterface::get_name() const { return "ARKit"; } -int ARKitInterface::get_capabilities() const { +uint32_t ARKitInterface::get_capabilities() const { #if VERSION_MAJOR == 4 return ARKitInterface::XR_MONO + ARKitInterface::XR_AR; #else @@ -218,22 +206,39 @@ point.x = p_screen_coord.x / screen_size.x; point.y = p_screen_coord.y / screen_size.y; + UIInterfaceOrientation orientation = UIInterfaceOrientationUnknown; + + if (@available(iOS 13, *)) { + orientation = [UIApplication sharedApplication].delegate.window.windowScene.interfaceOrientation; + } else { + orientation = [[UIApplication sharedApplication] statusBarOrientation]; + } + + // This transform takes a point from image space to screen space + CGAffineTransform affine_transform = [ar_session.currentFrame displayTransformForOrientation:orientation viewportSize:CGSizeMake(screen_size.width, screen_size.height)]; + + // Invert the transformation, as hitTest expects the point to be in image space + affine_transform = CGAffineTransformInvert(affine_transform); + + // Transform the point to image space + point = CGPointApplyAffineTransform(point, affine_transform); + ///@TODO maybe give more options here, for now we're taking just ARAchors into account that were found during plane detection keeping their size into account NSArray *results = [ar_session.currentFrame hitTest:point types:ARHitTestResultTypeExistingPlaneUsingExtent]; for (ARHitTestResult *result in results) { - Transform transform; + Transform3D transform; matrix_float4x4 m44 = result.worldTransform; - transform.basis.elements[0].x = m44.columns[0][0]; - transform.basis.elements[1].x = m44.columns[0][1]; - transform.basis.elements[2].x = m44.columns[0][2]; - transform.basis.elements[0].y = m44.columns[1][0]; - transform.basis.elements[1].y = m44.columns[1][1]; - transform.basis.elements[2].y = m44.columns[1][2]; - transform.basis.elements[0].z = m44.columns[2][0]; - transform.basis.elements[1].z = m44.columns[2][1]; - transform.basis.elements[2].z = m44.columns[2][2]; + transform.basis.rows[0].x = m44.columns[0][0]; + transform.basis.rows[1].x = m44.columns[0][1]; + transform.basis.rows[2].x = m44.columns[0][2]; + transform.basis.rows[0].y = m44.columns[1][0]; + transform.basis.rows[1].y = m44.columns[1][1]; + transform.basis.rows[2].y = m44.columns[1][2]; + transform.basis.rows[0].z = m44.columns[2][0]; + transform.basis.rows[1].z = m44.columns[2][1]; + transform.basis.rows[2].z = m44.columns[2][2]; transform.origin.x = m44.columns[3][0]; transform.origin.y = m44.columns[3][1]; transform.origin.z = m44.columns[3][2]; @@ -257,14 +262,17 @@ ClassDB::bind_method(D_METHOD("get_ambient_intensity"), &ARKitInterface::get_ambient_intensity); ClassDB::bind_method(D_METHOD("get_ambient_color_temperature"), &ARKitInterface::get_ambient_color_temperature); + ClassDB::bind_method(D_METHOD("get_exposure_offset"), &ARKitInterface::get_exposure_offset); ClassDB::bind_method(D_METHOD("raycast", "screen_coord"), &ARKitInterface::raycast); } +#if VERSION_MAJOR != 4 bool ARKitInterface::is_stereo() { // this is a mono device... return false; } +#endif bool ARKitInterface::is_initialized() const { return initialized; @@ -300,14 +308,18 @@ ar_session.delegate = ar_delegate; // reset our transform - transform = Transform(); + transform = Transform3D(); // make this our primary interface ar_server->set_primary_interface(this); // make sure we have our feed setup if (feed.is_null()) { - feed.instance(); +#if VERSION_MAJOR == 4 + feed.instantiate(); +#else + feed.instance(); +#endif feed->set_name("ARKit"); CameraServer *cs = CameraServer::get_singleton(); @@ -323,6 +335,13 @@ // Start our session... start_session(); } + + // The camera operates as a head and we need to create a tracker for that + m_head.instantiate(); + m_head->set_tracker_type(XRServer::TRACKER_HEAD); + m_head->set_tracker_name("head"); + m_head->set_tracker_desc("AR Device"); + ar_server->add_tracker(m_head); return true; } else { @@ -339,7 +358,7 @@ #endif if (ar_server != NULL) { // no longer our primary interface - ar_server->clear_primary_interface_if(this); + ar_server->set_primary_interface(nullptr); } if (feed.is_valid()) { @@ -352,6 +371,11 @@ remove_all_anchors(); + if (m_head.is_valid()) { + ar_server->remove_tracker(m_head); + m_head.unref(); + } + if (@available(iOS 11.0, *)) { ar_session = nil; } @@ -362,7 +386,12 @@ } } -Size2 ARKitInterface::get_render_targetsize() { +Dictionary ARKitInterface::get_system_info() { + Dictionary dict; + return dict; +} + +Size2 ARKitInterface::get_render_target_size() { GODOT_MAKE_THREAD_SAFE #if VERSION_MAJOR == 4 @@ -374,10 +403,18 @@ return target_size; } -Transform ARKitInterface::get_transform_for_eye(GodotBaseARInterface::Eyes p_eye, const Transform &p_cam_transform) { +uint32_t ARKitInterface::get_view_count() { + return 1; +} + +Transform3D ARKitInterface::get_camera_transform() { + return transform; +} + +Transform3D ARKitInterface::get_transform_for_view(uint32_t p_view, const Transform3D &p_cam_transform) { GODOT_MAKE_THREAD_SAFE - Transform transform_for_eye; + Transform3D transform_for_view; #if VERSION_MAJOR == 4 XRServer *ar_server = XRServer::get_singleton(); @@ -385,25 +422,25 @@ ARVRServer *ar_server = ARVRServer::get_singleton(); #endif - ERR_FAIL_NULL_V(ar_server, transform_for_eye); + ERR_FAIL_NULL_V(ar_server, transform_for_view); if (initialized) { float world_scale = ar_server->get_world_scale(); // just scale our origin point of our transform, note that we really shouldn't be using world_scale in ARKit but.... - transform_for_eye = transform; - transform_for_eye.origin *= world_scale; + transform_for_view = transform; + transform_for_view.origin *= world_scale; - transform_for_eye = p_cam_transform * ar_server->get_reference_frame() * transform_for_eye; + transform_for_view = p_cam_transform * ar_server->get_reference_frame() * transform_for_view; } else { // huh? well just return what we got.... - transform_for_eye = p_cam_transform; + transform_for_view = p_cam_transform; } - return transform_for_eye; + return transform_for_view; } -CameraMatrix ARKitInterface::get_projection_for_eye(GodotBaseARInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far) { +Projection ARKitInterface::get_projection_for_view(uint32_t p_view, double p_aspect, double p_z_near, double p_z_far) { // Remember our near and far, it will be used in process when we obtain our projection from our ARKit session. z_near = p_z_near; z_far = p_z_far; @@ -411,30 +448,52 @@ return projection; } -void ARKitInterface::commit_for_eye(GodotBaseARInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect) { +Vector ARKitInterface::post_draw_viewport(RID p_render_target, const Rect2 &p_screen_rect) { GODOT_MAKE_THREAD_SAFE + Vector blit_to_screen; + // We must have a valid render target - ERR_FAIL_COND(!p_render_target.is_valid()); + ERR_FAIL_COND_V(!p_render_target.is_valid(), blit_to_screen); // Because we are rendering to our device we must use our main viewport! - ERR_FAIL_COND(p_screen_rect == Rect2()); - -#if VERSION_MAJOR == 4 -#else - // get the size of our screen - Rect2 screen_rect = p_screen_rect; - - // screen_rect.position.x += screen_rect.size.x; - // screen_rect.size.x = -screen_rect.size.x; - // screen_rect.position.y += screen_rect.size.y; - // screen_rect.size.y = -screen_rect.size.y; + ERR_FAIL_COND_V(p_screen_rect == Rect2(), blit_to_screen); + + Rect2 src_rect(0.0f, 0.0f, 1.0f, 1.0f); + Rect2 dst_rect = p_screen_rect; + //Vector2 eye_center(((-intraocular_dist / 2.0) + (display_width / 4.0)) / (display_width / 2.0), 0.0); + + //add_blit(p_render_target, src_rect, dst_rect); + + if (p_screen_rect != Rect2()) { + BlitToScreen blit; + + blit.render_target = p_render_target; + blit.multi_view.use_layer = true; + blit.multi_view.layer = 0; + blit.lens_distortion.apply = false; + + Size2 render_size = get_render_target_size(); + Rect2 dst_rect = p_screen_rect; + float new_height = dst_rect.size.x * (render_size.y / render_size.x); + if (new_height > dst_rect.size.y) { + dst_rect.position.y = (0.5 * dst_rect.size.y) - (0.5 * new_height); + dst_rect.size.y = new_height; + } else { + float new_width = dst_rect.size.y * (render_size.x / render_size.y); + + dst_rect.position.x = (0.5 * dst_rect.size.x) - (0.5 * new_width); + dst_rect.size.x = new_width; + } - VSG::rasterizer->set_current_render_target(RID()); - VSG::rasterizer->blit_render_target_to_screen(p_render_target, screen_rect, 0); -#endif + blit.dst_rect = dst_rect; + blit_to_screen.push_back(blit); + } + + return blit_to_screen; } + GodotARTracker *ARKitInterface::get_anchor_for_uuid(const unsigned char *p_uuid) { if (anchors == NULL) { num_anchors = 0; @@ -457,7 +516,7 @@ } #if VERSION_MAJOR == 4 - XRPositionalTracker *new_tracker = memnew(XRPositionalTracker); + ARKitAnchorMesh *new_tracker = memnew(ARKitAnchorMesh); new_tracker->set_tracker_type(XRServer::TRACKER_ANCHOR); #else ARVRPositionalTracker *new_tracker = memnew(ARVRPositionalTracker); @@ -498,8 +557,6 @@ #else ARVRServer::get_singleton()->remove_tracker(anchors[i].tracker); #endif - memdelete(anchors[i].tracker); - // bring remaining forward for (unsigned int j = i + 1; j < num_anchors; j++) { anchors[j - 1] = anchors[j]; @@ -522,7 +579,6 @@ #else ARVRServer::get_singleton()->remove_tracker(anchors[i].tracker); #endif - memdelete(anchors[i].tracker); }; free(anchors); @@ -542,7 +598,7 @@ // only process if we have a new frame last_timestamp = current_frame.timestamp; -// get some info about our screen and orientation + // get some info about our screen and orientation #if VERSION_MAJOR == 4 Size2 screen_size = DisplayServer::get_singleton()->screen_get_size(); #else @@ -624,8 +680,13 @@ } } - img[0].instance(); - img[0]->create(new_width, new_height, 0, Image::FORMAT_R8, img_data[0]); +#if VERSION_MAJOR == 4 + img[0].instantiate(); + img[0]->initialize_data(new_width, new_height, 0, Image::FORMAT_R8, img_data[0]); +#else + img[0].instance(); + img[0]->create(new_width, new_height, 0, Image::FORMAT_R8, img_data[0]); +#endif } { @@ -668,29 +729,26 @@ } } - img[1].instance(); - img[1]->create(new_width, new_height, 0, Image::FORMAT_RG8, img_data[1]); +#if VERSION_MAJOR == 4 + img[1].instantiate(); + img[1]->initialize_data(new_width, new_height, 0, Image::FORMAT_RG8, img_data[1]); +#else + img[1].instance(); + img[1]->create(new_width, new_height, 0, Image::FORMAT_RG8, img_data[1]); +#endif } // set our texture... +#if VERSION_MAJOR == 4 && VERSION_MINOR >= 4 + feed->set_ycbcr_images(img[0], img[1]); +#else feed->set_YCbCr_imgs(img[0], img[1]); +#endif // now build our transform to display this as a background image that matches our camera + // this transform takes a point from image space to screen space CGAffineTransform affine_transform = [current_frame displayTransformForOrientation:orientation viewportSize:CGSizeMake(screen_size.width, screen_size.height)]; - - // we need to invert this, probably row v.s. column notation - affine_transform = CGAffineTransformInvert(affine_transform); - - if (orientation != UIInterfaceOrientationPortrait) { - affine_transform.b = -affine_transform.b; - affine_transform.d = -affine_transform.d; - affine_transform.ty = 1.0 - affine_transform.ty; - } else { - affine_transform.c = -affine_transform.c; - affine_transform.a = -affine_transform.a; - affine_transform.tx = 1.0 - affine_transform.tx; - } - + Transform2D display_transform = Transform2D( affine_transform.a, affine_transform.b, affine_transform.c, affine_transform.d, @@ -709,12 +767,17 @@ ///@TODO it's there, but not there.. what to do with this... // https://developer.apple.com/documentation/arkit/arlightestimate?language=objc - // ambient_color_temperature = current_frame.lightEstimate.ambientColorTemperature; + ambient_color_temperature = current_frame.lightEstimate.ambientColorTemperature; } // Process our camera ARCamera *camera = current_frame.camera; + // Record camera exposure + if (@available(iOS 13, *)) { + exposure_offset = camera.exposureOffset; + } + // strangely enough we have to states, rolling them up into one if (camera.trackingState == ARTrackingStateNotAvailable) { // no tracking, would be good if we black out the screen or something... @@ -733,60 +796,66 @@ // copy our current frame transform matrix_float4x4 m44 = camera.transform; if (orientation == UIInterfaceOrientationLandscapeLeft) { - transform.basis.elements[0].x = m44.columns[0][0]; - transform.basis.elements[1].x = m44.columns[0][1]; - transform.basis.elements[2].x = m44.columns[0][2]; - transform.basis.elements[0].y = m44.columns[1][0]; - transform.basis.elements[1].y = m44.columns[1][1]; - transform.basis.elements[2].y = m44.columns[1][2]; + transform.basis.rows[0].x = -m44.columns[0][0]; + transform.basis.rows[1].x = -m44.columns[0][1]; + transform.basis.rows[2].x = -m44.columns[0][2]; + transform.basis.rows[0].y = -m44.columns[1][0]; + transform.basis.rows[1].y = -m44.columns[1][1]; + transform.basis.rows[2].y = -m44.columns[1][2]; } else if (orientation == UIInterfaceOrientationPortrait) { - transform.basis.elements[0].x = m44.columns[1][0]; - transform.basis.elements[1].x = m44.columns[1][1]; - transform.basis.elements[2].x = m44.columns[1][2]; - transform.basis.elements[0].y = -m44.columns[0][0]; - transform.basis.elements[1].y = -m44.columns[0][1]; - transform.basis.elements[2].y = -m44.columns[0][2]; + transform.basis.rows[0].x = m44.columns[1][0]; + transform.basis.rows[1].x = m44.columns[1][1]; + transform.basis.rows[2].x = m44.columns[1][2]; + transform.basis.rows[0].y = -m44.columns[0][0]; + transform.basis.rows[1].y = -m44.columns[0][1]; + transform.basis.rows[2].y = -m44.columns[0][2]; } else if (orientation == UIInterfaceOrientationLandscapeRight) { - transform.basis.elements[0].x = -m44.columns[0][0]; - transform.basis.elements[1].x = -m44.columns[0][1]; - transform.basis.elements[2].x = -m44.columns[0][2]; - transform.basis.elements[0].y = -m44.columns[1][0]; - transform.basis.elements[1].y = -m44.columns[1][1]; - transform.basis.elements[2].y = -m44.columns[1][2]; + transform.basis.rows[0].x = m44.columns[0][0]; + transform.basis.rows[1].x = m44.columns[0][1]; + transform.basis.rows[2].x = m44.columns[0][2]; + transform.basis.rows[0].y = m44.columns[1][0]; + transform.basis.rows[1].y = m44.columns[1][1]; + transform.basis.rows[2].y = m44.columns[1][2]; } else if (orientation == UIInterfaceOrientationPortraitUpsideDown) { - // this may not be correct - transform.basis.elements[0].x = m44.columns[1][0]; - transform.basis.elements[1].x = m44.columns[1][1]; - transform.basis.elements[2].x = m44.columns[1][2]; - transform.basis.elements[0].y = m44.columns[0][0]; - transform.basis.elements[1].y = m44.columns[0][1]; - transform.basis.elements[2].y = m44.columns[0][2]; + transform.basis.rows[0].x = -m44.columns[1][0]; + transform.basis.rows[1].x = -m44.columns[1][1]; + transform.basis.rows[2].x = -m44.columns[1][2]; + transform.basis.rows[0].y = m44.columns[0][0]; + transform.basis.rows[1].y = m44.columns[0][1]; + transform.basis.rows[2].y = m44.columns[0][2]; } - transform.basis.elements[0].z = m44.columns[2][0]; - transform.basis.elements[1].z = m44.columns[2][1]; - transform.basis.elements[2].z = m44.columns[2][2]; + + transform.basis.rows[0].z = m44.columns[2][0]; + transform.basis.rows[1].z = m44.columns[2][1]; + transform.basis.rows[2].z = m44.columns[2][2]; transform.origin.x = m44.columns[3][0]; transform.origin.y = m44.columns[3][1]; transform.origin.z = m44.columns[3][2]; + if (m_head.is_valid()) { + // Set our head position, note in real space, reference frame and world scale is applied later + m_head->set_pose("default", transform, Vector3(), Vector3(), XRPose::XR_TRACKING_CONFIDENCE_HIGH); + } + // copy our current frame projection, investigate using projectionMatrixWithViewportSize:orientation:zNear:zFar: so we can set our own near and far m44 = [camera projectionMatrixForOrientation:orientation viewportSize:CGSizeMake(screen_size.width, screen_size.height) zNear:z_near zFar:z_far]; - projection.matrix[0][0] = m44.columns[0][0]; - projection.matrix[1][0] = m44.columns[1][0]; - projection.matrix[2][0] = m44.columns[2][0]; - projection.matrix[3][0] = m44.columns[3][0]; - projection.matrix[0][1] = m44.columns[0][1]; - projection.matrix[1][1] = m44.columns[1][1]; - projection.matrix[2][1] = m44.columns[2][1]; - projection.matrix[3][1] = m44.columns[3][1]; - projection.matrix[0][2] = m44.columns[0][2]; - projection.matrix[1][2] = m44.columns[1][2]; - projection.matrix[2][2] = m44.columns[2][2]; - projection.matrix[3][2] = m44.columns[3][2]; - projection.matrix[0][3] = m44.columns[0][3]; - projection.matrix[1][3] = m44.columns[1][3]; - projection.matrix[2][3] = m44.columns[2][3]; - projection.matrix[3][3] = m44.columns[3][3]; + + projection.columns[0][0] = m44.columns[0][0]; + projection.columns[1][0] = m44.columns[1][0]; + projection.columns[2][0] = m44.columns[2][0]; + projection.columns[3][0] = m44.columns[3][0]; + projection.columns[0][1] = m44.columns[0][1]; + projection.columns[1][1] = m44.columns[1][1]; + projection.columns[2][1] = m44.columns[2][1]; + projection.columns[3][1] = m44.columns[3][1]; + projection.columns[0][2] = m44.columns[0][2]; + projection.columns[1][2] = m44.columns[1][2]; + projection.columns[2][2] = m44.columns[2][2]; + projection.columns[3][2] = m44.columns[3][2]; + projection.columns[0][3] = m44.columns[0][3]; + projection.columns[1][3] = m44.columns[1][3]; + projection.columns[2][3] = m44.columns[2][3]; + projection.columns[3][3] = m44.columns[3][3]; } } } @@ -803,7 +872,7 @@ [anchor.identifier getUUIDBytes:uuid]; #if VERSION_MAJOR == 4 - XRPositionalTracker *tracker = get_anchor_for_uuid(uuid); + ARKitAnchorMesh *tracker = get_anchor_for_uuid(uuid); #else ARVRPositionalTracker *tracker = get_anchor_for_uuid(uuid); #endif @@ -818,7 +887,11 @@ if (@available(iOS 11.3, *)) { if (planeAnchor.geometry.triangleCount > 0) { Ref surftool; - surftool.instance(); +#if VERSION_MAJOR == 4 + surftool.instantiate(); +#else + surftool.instance(); +#endif surftool->begin(Mesh::PRIMITIVE_TRIANGLES); for (int j = planeAnchor.geometry.triangleCount * 3 - 1; j >= 0; j--) { @@ -836,6 +909,7 @@ } surftool->generate_normals(); + tracker->set_mesh(surftool->commit()); } else { Ref nomesh; @@ -850,17 +924,24 @@ // We may extract that in our XRAnchor/ARVRAnchor class Basis b; matrix_float4x4 m44 = anchor.transform; - b.elements[0].x = m44.columns[0][0]; - b.elements[1].x = m44.columns[0][1]; - b.elements[2].x = m44.columns[0][2]; - b.elements[0].y = m44.columns[1][0]; - b.elements[1].y = m44.columns[1][1]; - b.elements[2].y = m44.columns[1][2]; - b.elements[0].z = m44.columns[2][0]; - b.elements[1].z = m44.columns[2][1]; - b.elements[2].z = m44.columns[2][2]; + b.rows[0].x = m44.columns[0][0]; + b.rows[1].x = m44.columns[0][1]; + b.rows[2].x = m44.columns[0][2]; + b.rows[0].y = m44.columns[1][0]; + b.rows[1].y = m44.columns[1][1]; + b.rows[2].y = m44.columns[1][2]; + b.rows[0].z = m44.columns[2][0]; + b.rows[1].z = m44.columns[2][1]; + b.rows[2].z = m44.columns[2][2]; +#if VERSION_MAJOR == 4 + Transform3D pose = Transform3D(b, Vector3(m44.columns[3][0], m44.columns[3][1], m44.columns[3][2])); + tracker->set_pose("default", pose, Vector3(), Vector3()); +#else tracker->set_orientation(b); tracker->set_rw_position(Vector3(m44.columns[3][0], m44.columns[3][1], m44.columns[3][2])); +#endif + + XRServer::get_singleton()->emit_signal(SNAME("tracker_updated"), tracker->get_tracker_name(), tracker->get_tracker_type()); } } } @@ -893,6 +974,7 @@ num_anchors = 0; ambient_intensity = 1.0; ambient_color_temperature = 1.0; + exposure_offset = 0.0; image_width[0] = 0; image_width[1] = 0; image_height[0] = 0; diff --git a/plugins/arkit/arkit_module.cpp b/plugins/arkit/arkit_module.cpp index f1f6fb8..6da421d 100644 --- a/plugins/arkit/arkit_module.cpp +++ b/plugins/arkit/arkit_module.cpp @@ -33,15 +33,20 @@ #include "arkit_interface.h" #include "core/version.h" +#include "core/object/class_db.h" + void register_arkit_types() { // does it make sense to register the class? Ref arkit_interface; - arkit_interface.instance(); #if VERSION_MAJOR == 4 + arkit_interface.instantiate(); XRServer::get_singleton()->add_interface(arkit_interface); + //GDREGISTER_CLASS(ARKitAnchorMesh); + ClassDB::register_class(); #else + arkit_interface.instance(); ARVRServer::get_singleton()->add_interface(arkit_interface); #endif } diff --git a/scripts/generate_headers.sh b/scripts/generate_headers.sh index 5f46d27..ba451a4 100755 --- a/scripts/generate_headers.sh +++ b/scripts/generate_headers.sh @@ -5,5 +5,5 @@ then ./../scripts/timeout scons platform=iphone target=release_debug else cd ./godot && \ - ./../scripts/timeout scons platform=ios target=release_debug + ./../scripts/timeout scons platform=ios target=template_debug fi