Merge pull request #42459 from naithar/ios-warnings-3.2

[3.2] [iOS] Fix compilation warnings and deprecated API
This commit is contained in:
Rémi Verschelde 2020-10-01 19:12:23 +02:00 committed by GitHub
commit dbb1df2acc
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
20 changed files with 306 additions and 266 deletions

View file

@ -47,4 +47,4 @@ jobs:
env:
SCONS_CACHE: ${{github.workspace}}/.scons_cache/
run: |
scons -j2 verbose=yes warnings=all werror=no platform=iphone target=release tools=no
scons -j2 verbose=yes warnings=all werror=yes platform=iphone target=release tools=no

View file

@ -319,7 +319,7 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
IPHONEOS_DEPLOYMENT_TARGET = 10.0;
OTHER_LDFLAGS = "$linker_flags";
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
@ -358,7 +358,7 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
IPHONEOS_DEPLOYMENT_TARGET = 10.0;
OTHER_LDFLAGS = "$linker_flags";
SDKROOT = iphoneos;
TARGETED_DEVICE_FAMILY = "1,2";
@ -378,7 +378,7 @@
CONFIGURATION_BUILD_DIR = "$(BUILD_DIR)/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)";
DEVELOPMENT_TEAM = $team_id;
INFOPLIST_FILE = "$binary/$binary-Info.plist";
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
IPHONEOS_DEPLOYMENT_TARGET = 10.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
@ -408,7 +408,7 @@
CONFIGURATION_BUILD_DIR = "$(BUILD_DIR)/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)";
DEVELOPMENT_TEAM = $team_id;
INFOPLIST_FILE = "$binary/$binary-Info.plist";
IPHONEOS_DEPLOYMENT_TARGET = 9.0;
IPHONEOS_DEPLOYMENT_TARGET = 10.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",

View file

@ -42,7 +42,9 @@
#include "arkit_session_delegate.h"
// just a dirty workaround for now, declare these as globals. I'll probably encapsulate ARSession and associated logic into an mm object and change ARKitInterface to a normal cpp object that consumes it.
API_AVAILABLE(ios(11.0))
ARSession *ar_session;
ARKitSessionDelegate *ar_delegate;
NSTimeInterval last_timestamp;
@ -55,22 +57,28 @@ void ARKitInterface::start_session() {
if (initialized) {
print_line("Starting ARKit session");
Class ARWorldTrackingConfigurationClass = NSClassFromString(@"ARWorldTrackingConfiguration");
ARWorldTrackingConfiguration *configuration = [ARWorldTrackingConfigurationClass new];
if (@available(iOS 11, *)) {
Class ARWorldTrackingConfigurationClass = NSClassFromString(@"ARWorldTrackingConfiguration");
ARWorldTrackingConfiguration *configuration = [ARWorldTrackingConfigurationClass new];
configuration.lightEstimationEnabled = light_estimation_is_enabled;
if (plane_detection_is_enabled) {
configuration.planeDetection = ARPlaneDetectionVertical | ARPlaneDetectionHorizontal;
} else {
configuration.planeDetection = 0;
configuration.lightEstimationEnabled = light_estimation_is_enabled;
if (plane_detection_is_enabled) {
if (@available(iOS 11.3, *)) {
configuration.planeDetection = ARPlaneDetectionVertical | ARPlaneDetectionHorizontal;
} else {
configuration.planeDetection = ARPlaneDetectionHorizontal;
}
} else {
configuration.planeDetection = 0;
}
// make sure our camera is on
if (feed.is_valid()) {
feed->set_active(true);
}
[ar_session runWithConfiguration:configuration];
}
// make sure our camera is on
if (feed.is_valid()) {
feed->set_active(true);
}
[ar_session runWithConfiguration:configuration];
}
}
@ -84,7 +92,9 @@ void ARKitInterface::stop_session() {
feed->set_active(false);
}
[ar_session pause];
if (@available(iOS 11.0, *)) {
[ar_session pause];
}
}
}
@ -162,37 +172,41 @@ int ARKitInterface::get_capabilities() const {
}
Array ARKitInterface::raycast(Vector2 p_screen_coord) {
Array arr;
Size2 screen_size = OS::get_singleton()->get_window_size();
CGPoint point;
point.x = p_screen_coord.x / screen_size.x;
point.y = p_screen_coord.y / screen_size.y;
if (@available(iOS 11, *)) {
Array arr;
Size2 screen_size = OS::get_singleton()->get_window_size();
CGPoint point;
point.x = p_screen_coord.x / screen_size.x;
point.y = p_screen_coord.y / screen_size.y;
///@TODO maybe give more options here, for now we're taking just ARAchors into account that were found during plane detection keeping their size into account
NSArray<ARHitTestResult *> *results = [ar_session.currentFrame hittest:point types:ARHitTestResultTypeExistingPlaneUsingExtent];
///@TODO maybe give more options here, for now we're taking just ARAchors into account that were found during plane detection keeping their size into account
NSArray<ARHitTestResult *> *results = [ar_session.currentFrame hitTest:point types:ARHitTestResultTypeExistingPlaneUsingExtent];
for (ARHitTestResult *result in results) {
Transform transform;
for (ARHitTestResult *result in results) {
Transform transform;
matrix_float4x4 m44 = result.worldTransform;
transform.basis.elements[0].x = m44.columns[0][0];
transform.basis.elements[1].x = m44.columns[0][1];
transform.basis.elements[2].x = m44.columns[0][2];
transform.basis.elements[0].y = m44.columns[1][0];
transform.basis.elements[1].y = m44.columns[1][1];
transform.basis.elements[2].y = m44.columns[1][2];
transform.basis.elements[0].z = m44.columns[2][0];
transform.basis.elements[1].z = m44.columns[2][1];
transform.basis.elements[2].z = m44.columns[2][2];
transform.origin.x = m44.columns[3][0];
transform.origin.y = m44.columns[3][1];
transform.origin.z = m44.columns[3][2];
matrix_float4x4 m44 = result.worldTransform;
transform.basis.elements[0].x = m44.columns[0][0];
transform.basis.elements[1].x = m44.columns[0][1];
transform.basis.elements[2].x = m44.columns[0][2];
transform.basis.elements[0].y = m44.columns[1][0];
transform.basis.elements[1].y = m44.columns[1][1];
transform.basis.elements[2].y = m44.columns[1][2];
transform.basis.elements[0].z = m44.columns[2][0];
transform.basis.elements[1].z = m44.columns[2][1];
transform.basis.elements[2].z = m44.columns[2][2];
transform.origin.x = m44.columns[3][0];
transform.origin.y = m44.columns[3][1];
transform.origin.z = m44.columns[3][2];
/* important, NOT scaled to world_scale !! */
arr.push_back(transform);
/* important, NOT scaled to world_scale !! */
arr.push_back(transform);
}
return arr;
} else {
return Array();
}
return arr;
}
void ARKitInterface::_bind_methods() {
@ -221,51 +235,55 @@ bool ARKitInterface::initialize() {
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL_V(arvr_server, false);
if (!initialized) {
print_line("initializing ARKit");
if (@available(iOS 11, *)) {
if (!initialized) {
print_line("initializing ARKit");
// create our ar session and delegate
Class ARSessionClass = NSClassFromString(@"ARSession");
if (ARSessionClass == Nil) {
void *arkit_handle = dlopen("/System/Library/Frameworks/ARKit.framework/ARKit", RTLD_NOW);
if (arkit_handle) {
ARSessionClass = NSClassFromString(@"ARSession");
} else {
print_line("ARKit init failed");
return false;
// create our ar session and delegate
Class ARSessionClass = NSClassFromString(@"ARSession");
if (ARSessionClass == Nil) {
void *arkit_handle = dlopen("/System/Library/Frameworks/ARKit.framework/ARKit", RTLD_NOW);
if (arkit_handle) {
ARSessionClass = NSClassFromString(@"ARSession");
} else {
print_line("ARKit init failed");
return false;
}
}
}
ar_session = [ARSessionClass new];
ar_delegate = [ARKitSessionDelegate new];
ar_delegate.arkit_interface = this;
ar_session.delegate = ar_delegate;
ar_session = [ARSessionClass new];
ar_delegate = [ARKitSessionDelegate new];
ar_delegate.arkit_interface = this;
ar_session.delegate = ar_delegate;
// reset our transform
transform = Transform();
// reset our transform
transform = Transform();
// make this our primary interface
arvr_server->set_primary_interface(this);
// make this our primary interface
arvr_server->set_primary_interface(this);
// make sure we have our feed setup
if (feed.is_null()) {
feed.instance();
feed->set_name("ARKit");
// make sure we have our feed setup
if (feed.is_null()) {
feed.instance();
feed->set_name("ARKit");
CameraServer *cs = CameraServer::get_singleton();
if (cs != NULL) {
cs->add_feed(feed);
CameraServer *cs = CameraServer::get_singleton();
if (cs != NULL) {
cs->add_feed(feed);
}
}
feed->set_active(true);
// yeah!
initialized = true;
// Start our session...
start_session();
}
feed->set_active(true);
// yeah!
initialized = true;
// Start our session...
start_session();
return true;
} else {
return false;
}
return true;
}
void ARKitInterface::uninitialize() {
@ -286,9 +304,12 @@ void ARKitInterface::uninitialize() {
remove_all_anchors();
[ar_session release];
if (@available(iOS 11.0, *)) {
[ar_session release];
ar_session = NULL;
}
[ar_delegate release];
ar_session = NULL;
ar_delegate = NULL;
initialized = false;
session_was_started = false;
@ -444,7 +465,15 @@ void ARKitInterface::process() {
// get some info about our screen and orientation
Size2 screen_size = OS::get_singleton()->get_window_size();
UIInterfaceOrientation orientation = [[UIApplication sharedApplication] statusBarOrientation];
UIInterfaceOrientation orientation = UIInterfaceOrientationUnknown;
if (@available(iOS 13, *)) {
orientation = [UIApplication sharedApplication].delegate.window.windowScene.interfaceOrientation;
#if !defined(TARGET_OS_SIMULATOR) || !TARGET_OS_SIMULATOR
} else {
orientation = [[UIApplication sharedApplication] statusBarOrientation];
#endif
}
// Grab our camera image for our backbuffer
CVPixelBufferRef pixelBuffer = current_frame.capturedImage;
@ -660,67 +689,76 @@ void ARKitInterface::process() {
void ARKitInterface::_add_or_update_anchor(void *p_anchor) {
_THREAD_SAFE_METHOD_
ARAnchor *anchor = (ARAnchor *)p_anchor;
if (@available(iOS 11.0, *)) {
ARAnchor *anchor = (ARAnchor *)p_anchor;
unsigned char uuid[16];
[anchor.identifier getUUIDBytes:uuid];
unsigned char uuid[16];
[anchor.identifier getUUIDBytes:uuid];
ARVRPositionalTracker *tracker = get_anchor_for_uuid(uuid);
if (tracker != NULL) {
// lets update our mesh! (using Arjens code as is for now)
// we should also probably limit how often we do this...
ARVRPositionalTracker *tracker = get_anchor_for_uuid(uuid);
if (tracker != NULL) {
// lets update our mesh! (using Arjens code as is for now)
// we should also probably limit how often we do this...
// can we safely cast this?
ARPlaneAnchor *planeAnchor = (ARPlaneAnchor *)anchor;
// can we safely cast this?
ARPlaneAnchor *planeAnchor = (ARPlaneAnchor *)anchor;
if (planeAnchor.geometry.triangleCount > 0) {
Ref<SurfaceTool> surftool;
surftool.instance();
surftool->begin(Mesh::PRIMITIVE_TRIANGLES);
if (@available(iOS 11.3, *)) {
if (planeAnchor.geometry.triangleCount > 0) {
Ref<SurfaceTool> surftool;
surftool.instance();
surftool->begin(Mesh::PRIMITIVE_TRIANGLES);
for (int j = planeAnchor.geometry.triangleCount * 3 - 1; j >= 0; j--) {
int16_t index = planeAnchor.geometry.triangleIndices[j];
simd_float3 vrtx = planeAnchor.geometry.vertices[index];
simd_float2 textcoord = planeAnchor.geometry.textureCoordinates[index];
surftool->add_uv(Vector2(textcoord[0], textcoord[1]));
surftool->add_color(Color(0.8, 0.8, 0.8));
surftool->add_vertex(Vector3(vrtx[0], vrtx[1], vrtx[2]));
for (int j = planeAnchor.geometry.triangleCount * 3 - 1; j >= 0; j--) {
int16_t index = planeAnchor.geometry.triangleIndices[j];
simd_float3 vrtx = planeAnchor.geometry.vertices[index];
simd_float2 textcoord = planeAnchor.geometry.textureCoordinates[index];
surftool->add_uv(Vector2(textcoord[0], textcoord[1]));
surftool->add_color(Color(0.8, 0.8, 0.8));
surftool->add_vertex(Vector3(vrtx[0], vrtx[1], vrtx[2]));
}
surftool->generate_normals();
tracker->set_mesh(surftool->commit());
} else {
Ref<Mesh> nomesh;
tracker->set_mesh(nomesh);
}
} else {
Ref<Mesh> nomesh;
tracker->set_mesh(nomesh);
}
surftool->generate_normals();
tracker->set_mesh(surftool->commit());
} else {
Ref<Mesh> nomesh;
tracker->set_mesh(nomesh);
// Note, this also contains a scale factor which gives us an idea of the size of the anchor
// We may extract that in our ARVRAnchor class
Basis b;
matrix_float4x4 m44 = anchor.transform;
b.elements[0].x = m44.columns[0][0];
b.elements[1].x = m44.columns[0][1];
b.elements[2].x = m44.columns[0][2];
b.elements[0].y = m44.columns[1][0];
b.elements[1].y = m44.columns[1][1];
b.elements[2].y = m44.columns[1][2];
b.elements[0].z = m44.columns[2][0];
b.elements[1].z = m44.columns[2][1];
b.elements[2].z = m44.columns[2][2];
tracker->set_orientation(b);
tracker->set_rw_position(Vector3(m44.columns[3][0], m44.columns[3][1], m44.columns[3][2]));
}
// Note, this also contains a scale factor which gives us an idea of the size of the anchor
// We may extract that in our ARVRAnchor class
Basis b;
matrix_float4x4 m44 = anchor.transform;
b.elements[0].x = m44.columns[0][0];
b.elements[1].x = m44.columns[0][1];
b.elements[2].x = m44.columns[0][2];
b.elements[0].y = m44.columns[1][0];
b.elements[1].y = m44.columns[1][1];
b.elements[2].y = m44.columns[1][2];
b.elements[0].z = m44.columns[2][0];
b.elements[1].z = m44.columns[2][1];
b.elements[2].z = m44.columns[2][2];
tracker->set_orientation(b);
tracker->set_rw_position(Vector3(m44.columns[3][0], m44.columns[3][1], m44.columns[3][2]));
}
}
void ARKitInterface::_remove_anchor(void *p_anchor) {
_THREAD_SAFE_METHOD_
ARAnchor *anchor = (ARAnchor *)p_anchor;
if (@available(iOS 11.0, *)) {
ARAnchor *anchor = (ARAnchor *)p_anchor;
unsigned char uuid[16];
[anchor.identifier getUUIDBytes:uuid];
unsigned char uuid[16];
[anchor.identifier getUUIDBytes:uuid];
remove_anchor_for_uuid(uuid);
remove_anchor_for_uuid(uuid);
}
}
ARKitInterface::ARKitInterface() {
@ -728,7 +766,9 @@ ARKitInterface::ARKitInterface() {
session_was_started = false;
plane_detection_is_enabled = false;
light_estimation_is_enabled = false;
ar_session = NULL;
if (@available(iOS 11.0, *)) {
ar_session = NULL;
}
z_near = 0.01;
z_far = 1000.0;
projection.set_perspective(60.0, 1.0, z_near, z_far, false);

View file

@ -42,9 +42,9 @@ class ARKitInterface;
@property(nonatomic) ARKitInterface *arkit_interface;
- (void)session:(ARSession *)session didAddAnchors:(NSArray<ARAnchor *> *)anchors;
- (void)session:(ARSession *)session didRemoveAnchors:(NSArray<ARAnchor *> *)anchors;
- (void)session:(ARSession *)session didUpdateAnchors:(NSArray<ARAnchor *> *)anchors;
- (void)session:(ARSession *)session didAddAnchors:(NSArray<ARAnchor *> *)anchors API_AVAILABLE(ios(11.0));
- (void)session:(ARSession *)session didRemoveAnchors:(NSArray<ARAnchor *> *)anchors API_AVAILABLE(ios(11.0));
- (void)session:(ARSession *)session didUpdateAnchors:(NSArray<ARAnchor *> *)anchors API_AVAILABLE(ios(11.0));
@end
#endif /* !ARKIT_SESSION_DELEGATE_H */

View file

@ -163,13 +163,10 @@
{
// do Y
int new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
int new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
int _bytes_per_row = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
size_t new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
size_t new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
if ((width[0] != new_width) || (height[0] != new_height)) {
// printf("Camera Y plane %i, %i - %i\n", new_width, new_height, bytes_per_row);
width[0] = new_width;
height[0] = new_height;
img_data[0].resize(new_width * new_height);
@ -184,13 +181,10 @@
{
// do CbCr
int new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1);
int new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
int bytes_per_row = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
size_t new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1);
size_t new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
if ((width[1] != new_width) || (height[1] != new_height)) {
// printf("Camera CbCr plane %i, %i - %i\n", new_width, new_height, bytes_per_row);
width[1] = new_width;
height[1] = new_height;
img_data[1].resize(2 * new_width * new_height);
@ -359,7 +353,23 @@ void CameraIOS::update_feeds() {
// this way of doing things is deprecated but still works,
// rewrite to using AVCaptureDeviceDiscoverySession
AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:[NSArray arrayWithObjects:AVCaptureDeviceTypeBuiltInTelephotoCamera, AVCaptureDeviceTypeBuiltInDualCamera, AVCaptureDeviceTypeBuiltInTrueDepthCamera, AVCaptureDeviceTypeBuiltInWideAngleCamera, nil] mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionUnspecified];
NSMutableArray *deviceTypes = [NSMutableArray array];
[deviceTypes addObject:AVCaptureDeviceTypeBuiltInWideAngleCamera];
[deviceTypes addObject:AVCaptureDeviceTypeBuiltInTelephotoCamera];
if (@available(iOS 10.2, *)) {
[deviceTypes addObject:AVCaptureDeviceTypeBuiltInDualCamera];
}
if (@available(iOS 11.1, *)) {
[deviceTypes addObject:AVCaptureDeviceTypeBuiltInTrueDepthCamera];
}
AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession
discoverySessionWithDeviceTypes:deviceTypes
mediaType:AVMediaTypeVideo
position:AVCaptureDevicePositionUnspecified];
// remove devices that are gone..
for (int i = feeds.size() - 1; i >= 0; i--) {

View file

@ -6,7 +6,7 @@ iphone_lib = [
"godot_iphone.cpp",
"os_iphone.mm",
"semaphore_iphone.cpp",
"gl_view.mm",
"godot_view.mm",
"main.m",
"app_delegate.mm",
"view_controller.mm",
@ -14,7 +14,7 @@ iphone_lib = [
"in_app_store.mm",
"icloud.mm",
"ios.mm",
"gl_view_gesture_recognizer.mm",
"godot_view_gesture_recognizer.mm",
]
env_ios = env.Clone()

View file

@ -28,20 +28,19 @@
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#import "gl_view.h"
#import "godot_view.h"
#import "view_controller.h"
#import <UIKit/UIKit.h>
#import <CoreMotion/CoreMotion.h>
@interface AppDelegate : NSObject <UIApplicationDelegate, GLViewDelegate> {
@interface AppDelegate : NSObject <UIApplicationDelegate, GodotViewDelegate> {
//@property (strong, nonatomic) UIWindow *window;
ViewController *view_controller;
bool is_focus_out;
};
@property(strong, class, readonly, nonatomic) ViewController *viewController;
@property(strong, nonatomic) UIWindow *window;
+ (ViewController *)getViewController;
@end

View file

@ -32,7 +32,7 @@
#include "core/project_settings.h"
#include "drivers/coreaudio/audio_driver_coreaudio.h"
#import "gl_view.h"
#import "godot_view.h"
#include "main/main.h"
#include "os_iphone.h"
@ -47,16 +47,18 @@ Error _shell_open(String);
void _set_keep_screen_on(bool p_enabled);
Error _shell_open(String p_uri) {
NSString *url = [[NSString alloc] initWithUTF8String:p_uri.utf8().get_data()];
NSString *urlPath = [[NSString alloc] initWithUTF8String:p_uri.utf8().get_data()];
NSURL *url = [NSURL URLWithString:urlPath];
[urlPath release];
if (![[UIApplication sharedApplication] canOpenURL:[NSURL URLWithString:url]]) {
if (![[UIApplication sharedApplication] canOpenURL:url]) {
[url release];
return ERR_CANT_OPEN;
}
printf("opening url %ls\n", p_uri.c_str());
[[UIApplication sharedApplication] openURL:[NSURL URLWithString:url]];
[url release];
[[UIApplication sharedApplication] openURL:url options:@{} completionHandler:nil];
return OK;
};
@ -81,7 +83,8 @@ CMMotionManager *motionManager;
bool motionInitialised;
static ViewController *mainViewController = nil;
+ (ViewController *)getViewController {
+ (ViewController *)viewController {
return mainViewController;
}
@ -154,7 +157,7 @@ static void on_focus_out(ViewController *view_controller, bool *is_focus_out) {
OS::get_singleton()->get_main_loop()->notification(
MainLoop::NOTIFICATION_WM_FOCUS_OUT);
[view_controller.view stopAnimation];
[view_controller.godotView stopAnimation];
if (OS::get_singleton()->native_video_is_playing()) {
OSIPhone::get_singleton()->native_video_focus_out();
}
@ -172,7 +175,7 @@ static void on_focus_in(ViewController *view_controller, bool *is_focus_out) {
OS::get_singleton()->get_main_loop()->notification(
MainLoop::NOTIFICATION_WM_FOCUS_IN);
[view_controller.view startAnimation];
[view_controller.godotView startAnimation];
if (OSIPhone::get_singleton()->native_video_is_playing()) {
OSIPhone::get_singleton()->native_video_unpause();
}
@ -301,45 +304,6 @@ static void on_focus_in(ViewController *view_controller, bool *is_focus_out) {
OSIPhone::get_singleton()->joy_axis(joy_id, JOY_ANALOG_R2, jx);
};
};
} else if (controller.gamepad != nil) {
// gamepad is the standard profile with 4 buttons, shoulder buttons and a
// D-pad
controller.gamepad.valueChangedHandler = ^(GCGamepad *gamepad,
GCControllerElement *element) {
int joy_id = [self getJoyIdForController:controller];
if (element == gamepad.buttonA) {
OSIPhone::get_singleton()->joy_button(joy_id, JOY_BUTTON_0,
gamepad.buttonA.isPressed);
} else if (element == gamepad.buttonB) {
OSIPhone::get_singleton()->joy_button(joy_id, JOY_BUTTON_1,
gamepad.buttonB.isPressed);
} else if (element == gamepad.buttonX) {
OSIPhone::get_singleton()->joy_button(joy_id, JOY_BUTTON_2,
gamepad.buttonX.isPressed);
} else if (element == gamepad.buttonY) {
OSIPhone::get_singleton()->joy_button(joy_id, JOY_BUTTON_3,
gamepad.buttonY.isPressed);
} else if (element == gamepad.leftShoulder) {
OSIPhone::get_singleton()->joy_button(joy_id, JOY_L,
gamepad.leftShoulder.isPressed);
} else if (element == gamepad.rightShoulder) {
OSIPhone::get_singleton()->joy_button(joy_id, JOY_R,
gamepad.rightShoulder.isPressed);
} else if (element == gamepad.dpad) {
OSIPhone::get_singleton()->joy_button(joy_id, JOY_DPAD_UP,
gamepad.dpad.up.isPressed);
OSIPhone::get_singleton()->joy_button(joy_id, JOY_DPAD_DOWN,
gamepad.dpad.down.isPressed);
OSIPhone::get_singleton()->joy_button(joy_id, JOY_DPAD_LEFT,
gamepad.dpad.left.isPressed);
OSIPhone::get_singleton()->joy_button(joy_id, JOY_DPAD_RIGHT,
gamepad.dpad.right.isPressed);
};
};
#ifdef ADD_MICRO_GAMEPAD // disabling this for now, only available on iOS 9+,
// while we are setting that as the minimum, seems our
// build environment doesn't like it
} else if (controller.microGamepad != nil) {
// micro gamepads were added in OS 9 and feature just 2 buttons and a d-pad
controller.microGamepad.valueChangedHandler =
@ -363,8 +327,7 @@ static void on_focus_in(ViewController *view_controller, bool *is_focus_out) {
gamepad.dpad.right.isPressed);
};
};
#endif
};
}
///@TODO need to add support for controller.motion which gives us access to
/// the orientation of the device (if supported)
@ -428,8 +391,7 @@ OS::VideoMode _get_video_mode() {
};
static int frame_count = 0;
- (void)drawView:(GLView *)view;
{
- (void)drawView:(GodotView *)view {
switch (frame_count) {
case 0: {
@ -439,7 +401,6 @@ static int frame_count = 0;
exit(0);
};
++frame_count;
}; break;
case 1: {
@ -611,10 +572,10 @@ static int frame_count = 0;
return FALSE;
};
// WARNING: We must *always* create the GLView after we have constructed the
// OS with iphone_main. This allows the GLView to access project settings so
// WARNING: We must *always* create the GodotView after we have constructed the
// OS with iphone_main. This allows the GodotView to access project settings so
// it can properly initialize the OpenGL context
GLView *glView = [[GLView alloc] initWithFrame:rect];
GodotView *glView = [[GodotView alloc] initWithFrame:rect];
glView.delegate = self;
view_controller = [[ViewController alloc] init];

View file

@ -206,7 +206,7 @@ void GameCenter::request_achievement_descriptions() {
Array hidden;
Array replayable;
for (int i = 0; i < [descriptions count]; i++) {
for (NSUInteger i = 0; i < [descriptions count]; i++) {
GKAchievementDescription *description = [descriptions objectAtIndex:i];
@ -256,7 +256,7 @@ void GameCenter::request_achievements() {
PoolStringArray names;
PoolRealArray percentages;
for (int i = 0; i < [achievements count]; i++) {
for (NSUInteger i = 0; i < [achievements count]; i++) {
GKAchievement *achievement = [achievements objectAtIndex:i];
const char *str = [achievement.identifier UTF8String];

View file

@ -1,5 +1,5 @@
/*************************************************************************/
/* gl_view.h */
/* godot_view.h */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
@ -35,10 +35,10 @@
#import <OpenGLES/ES1/glext.h>
#import <UIKit/UIKit.h>
@protocol GLViewDelegate;
@class GLViewGestureRecognizer;
@protocol GodotViewDelegate;
@class GodotViewGestureRecognizer;
@interface GLView : UIView <UIKeyInput> {
@interface GodotView : UIView <UIKeyInput> {
@private
// The pixel dimensions of the backbuffer
GLint backingWidth;
@ -63,7 +63,7 @@
NSTimeInterval animationInterval;
// Delegate to do our drawing, called by -drawView, which can be called manually or via the animation timer.
id<GLViewDelegate> delegate;
id<GodotViewDelegate> delegate;
// Flag to denote that the -setupView method of a delegate has been called.
// Resets to NO whenever the delegate changes.
@ -72,10 +72,10 @@
float screen_scale;
// Delay gesture recognizer
GLViewGestureRecognizer *delayGestureRecognizer;
GodotViewGestureRecognizer *delayGestureRecognizer;
}
@property(nonatomic, assign) id<GLViewDelegate> delegate;
@property(nonatomic, assign) id<GodotViewDelegate> delegate;
// AVPlayer-related properties
@property(strong, nonatomic) AVAsset *avAsset;
@ -112,16 +112,16 @@
@end
@protocol GLViewDelegate <NSObject>
@protocol GodotViewDelegate <NSObject>
@required
// Draw with OpenGL ES
- (void)drawView:(GLView *)view;
- (void)drawView:(GodotView *)view;
@optional
// Called whenever you need to do some initialization before rendering.
- (void)setupView:(GLView *)view;
- (void)setupView:(GodotView *)view;
@end

View file

@ -1,5 +1,5 @@
/*************************************************************************/
/* gl_view.mm */
/* godot_view.mm */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
@ -28,8 +28,8 @@
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#import "gl_view.h"
#import "gl_view_gesture_recognizer.h"
#import "godot_view.h"
#import "godot_view_gesture_recognizer.h"
#include "core/os/keyboard.h"
#include "core/project_settings.h"
@ -40,7 +40,7 @@
#import <QuartzCore/QuartzCore.h>
/*
@interface GLView (private)
@interface GodotView (private)
- (id)initGLES;
- (BOOL)createFramebuffer;
@ -51,7 +51,7 @@
bool gles3_available = true;
int gl_view_base_fb;
static String keyboard_text;
static GLView *_instance = NULL;
static GodotView *_instance = NULL;
static bool video_found_error = false;
static bool video_playing = false;
@ -203,7 +203,7 @@ CGFloat _points_to_pixels(CGFloat points) {
return (points / pointsPerInch * pixelPerInch);
}
@implementation GLView
@implementation GodotView
@synthesize animationInterval;
@ -325,16 +325,16 @@ static void clear_touches() {
}
- (void)initGestureRecognizer {
delayGestureRecognizer = [[GLViewGestureRecognizer alloc] init];
delayGestureRecognizer = [[GodotViewGestureRecognizer alloc] init];
[self addGestureRecognizer:delayGestureRecognizer];
}
- (id<GLViewDelegate>)delegate {
- (id<GodotViewDelegate>)delegate {
return delegate;
}
// Update the delegate, and if it needs a -setupView: call, set our internal flag so that it will be called.
- (void)setDelegate:(id<GLViewDelegate>)d {
- (void)setDelegate:(id<GodotViewDelegate>)d {
delegate = d;
delegateSetup = ![delegate respondsToSelector:@selector(setupView:)];
}
@ -418,10 +418,10 @@ static void clear_touches() {
// Approximate frame rate
// assumes device refreshes at 60 fps
int frameInterval = (int)floor(animationInterval * 60.0f);
int displayFPS = (NSInteger)(1.0 / animationInterval);
displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(drawView)];
[displayLink setFrameInterval:frameInterval];
displayLink.preferredFramesPerSecond = displayFPS;
// Setup DisplayLink in main thread
[displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes];

View file

@ -1,5 +1,5 @@
/*************************************************************************/
/* gl_view_gesture_recognizer.h */
/* godot_view_gesture_recognizer.h */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
@ -28,7 +28,7 @@
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
// GLViewGestureRecognizer allows iOS gestures to work currectly by
// GodotViewGestureRecognizer allows iOS gestures to work currectly by
// emulating UIScrollView's UIScrollViewDelayedTouchesBeganGestureRecognizer.
// It catches all gestures incoming to UIView and delays them for 150ms
// (the same value used by UIScrollViewDelayedTouchesBeganGestureRecognizer)
@ -37,7 +37,7 @@
#import <UIKit/UIKit.h>
@interface GLViewGestureRecognizer : UIGestureRecognizer {
@interface GodotViewGestureRecognizer : UIGestureRecognizer {
@private
// Timer used to delay begin touch message.

View file

@ -1,5 +1,5 @@
/*************************************************************************/
/* gl_view_gesture_recognizer.mm */
/* godot_view_gesture_recognizer.mm */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
@ -28,7 +28,7 @@
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#import "gl_view_gesture_recognizer.h"
#import "godot_view_gesture_recognizer.h"
#include "core/project_settings.h"
@ -38,13 +38,13 @@
// but big enough to allow click to work.
const CGFloat kGLGestureMovementDistance = 0.5;
@interface GLViewGestureRecognizer ()
@interface GodotViewGestureRecognizer ()
@property(nonatomic, readwrite, assign) NSTimeInterval delayTimeInterval;
@end
@implementation GLViewGestureRecognizer
@implementation GodotViewGestureRecognizer
- (instancetype)init {
self = [super init];

View file

@ -161,7 +161,7 @@ NSObject *variant_to_nsobject(Variant v) {
NSMutableDictionary *result = [[[NSMutableDictionary alloc] init] autorelease];
Dictionary dic = v;
Array keys = dic.keys();
for (unsigned int i = 0; i < keys.size(); ++i) {
for (int i = 0; i < keys.size(); ++i) {
NSString *key = [[[NSString alloc] initWithUTF8String:((String)(keys[i])).utf8().get_data()] autorelease];
NSObject *value = variant_to_nsobject(dic[keys[i]]);
@ -175,7 +175,7 @@ NSObject *variant_to_nsobject(Variant v) {
} else if (v.get_type() == Variant::ARRAY) {
NSMutableArray *result = [[[NSMutableArray alloc] init] autorelease];
Array arr = v;
for (unsigned int i = 0; i < arr.size(); ++i) {
for (int i = 0; i < arr.size(); ++i) {
NSObject *value = variant_to_nsobject(arr[i]);
if (value == NULL) {
//trying to add something unsupported to the array. cancel the whole array
@ -215,7 +215,7 @@ Variant ICloud::set_key_values(Variant p_params) {
Array error_keys;
for (unsigned int i = 0; i < keys.size(); ++i) {
for (int i = 0; i < keys.size(); ++i) {
String variant_key = keys[i];
Variant variant_value = params[variant_key];

View file

@ -39,6 +39,7 @@ extern "C" {
bool auto_finish_transactions = true;
NSMutableDictionary *pending_transactions = [NSMutableDictionary dictionary];
static NSArray *latestProducts;
@interface SKProduct (LocalizedPrice)
@property(nonatomic, readonly) NSString *localizedPrice;
@ -82,6 +83,8 @@ void InAppStore::_bind_methods() {
- (void)productsRequest:(SKProductsRequest *)request didReceiveResponse:(SKProductsResponse *)response {
NSArray *products = response.products;
latestProducts = products;
Dictionary ret;
ret["type"] = "product_info";
ret["result"] = "ok";
@ -189,11 +192,9 @@ Error InAppStore::restore_purchases() {
int sdk_version = 6;
if ([[[UIDevice currentDevice] systemVersion] floatValue] >= 7.0) {
NSURL *receiptFileURL = nil;
NSBundle *bundle = [NSBundle mainBundle];
if ([bundle respondsToSelector:@selector(appStoreReceiptURL)]) {
// Get the transaction receipt file path location in the app bundle.
receiptFileURL = [bundle appStoreReceiptURL];
@ -206,11 +207,11 @@ Error InAppStore::restore_purchases() {
// which is still available in iOS 7.
// Use SKPaymentTransaction's transactionReceipt.
receipt = transaction.transactionReceipt;
receipt = [NSData dataWithContentsOfURL:[[NSBundle mainBundle] appStoreReceiptURL]];
}
} else {
receipt = transaction.transactionReceipt;
receipt = [NSData dataWithContentsOfURL:[[NSBundle mainBundle] appStoreReceiptURL]];
}
NSString *receipt_to_send = nil;
@ -273,7 +274,23 @@ Error InAppStore::purchase(Variant p_params) {
ERR_FAIL_COND_V(!params.has("product_id"), ERR_INVALID_PARAMETER);
NSString *pid = [[[NSString alloc] initWithUTF8String:String(params["product_id"]).utf8().get_data()] autorelease];
SKPayment *payment = [SKPayment paymentWithProductIdentifier:pid];
SKProduct *product = nil;
if (latestProducts) {
for (SKProduct *storedProduct in latestProducts) {
if ([storedProduct.productIdentifier isEqualToString:pid]) {
product = storedProduct;
break;
}
}
}
if (!product) {
return ERR_INVALID_PARAMETER;
}
SKPayment *payment = [SKPayment paymentWithProduct:product];
SKPaymentQueue *defq = [SKPaymentQueue defaultQueue];
[defq addPayment:payment];
printf("purchase sent!\n");

View file

@ -29,9 +29,11 @@
/*************************************************************************/
#include "ios.h"
#include <sys/sysctl.h>
#import "app_delegate.h"
#import <UIKit/UIKit.h>
#include <sys/sysctl.h>
void iOS::_bind_methods() {
@ -39,8 +41,18 @@ void iOS::_bind_methods() {
};
void iOS::alert(const char *p_alert, const char *p_title) {
UIAlertView *alert = [[[UIAlertView alloc] initWithTitle:[NSString stringWithUTF8String:p_title] message:[NSString stringWithUTF8String:p_alert] delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil, nil] autorelease];
[alert show];
NSString *title = [NSString stringWithUTF8String:p_title];
NSString *message = [NSString stringWithUTF8String:p_alert];
UIAlertController *alert = [UIAlertController alertControllerWithTitle:title message:message preferredStyle:UIAlertControllerStyleAlert];
UIAlertAction *button = [UIAlertAction actionWithTitle:@"OK"
style:UIAlertActionStyleCancel
handler:^(id){
}];
[alert addAction:button];
[AppDelegate.viewController presentViewController:alert animated:YES completion:nil];
}
String iOS::get_model() const {

View file

@ -41,11 +41,10 @@ int main(int argc, char *argv[]) {
gargc = argc;
gargv = argv;
NSAutoreleasePool *pool = [NSAutoreleasePool new];
AppDelegate *app = [AppDelegate alloc];
printf("running app main\n");
UIApplicationMain(argc, argv, nil, @"AppDelegate");
printf("main done, pool release\n");
[pool release];
@autoreleasepool {
UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
}
printf("main done\n");
return 0;
}

View file

@ -30,6 +30,11 @@
#ifdef IPHONE_ENABLED
// System headers are at top
// to workaround `ambiguous expansion` warning/error
#import <UIKit/UIKit.h>
#include <dlfcn.h>
#include "os_iphone.h"
#include "drivers/gles2/rasterizer_gles2.h"
@ -47,9 +52,6 @@
#include "semaphore_iphone.h"
#import <UIKit/UIKit.h>
#include <dlfcn.h>
int OSIPhone::get_video_driver_count() const {
return 2;

View file

@ -31,17 +31,11 @@
#import <GameKit/GameKit.h>
#import <UIKit/UIKit.h>
@class GodotView;
@interface ViewController : UIViewController <GKGameCenterControllerDelegate> {
};
- (void)didReceiveMemoryWarning;
- (void)viewDidLoad;
- (UIRectEdge)preferredScreenEdgesDeferringSystemGestures;
- (BOOL)prefersStatusBarHidden;
- (BOOL)prefersHomeIndicatorAutoHidden;
- (GodotView *)godotView;
@end

View file

@ -30,6 +30,7 @@
#import "view_controller.h"
#import "godot_view.h"
#include "os_iphone.h"
#include "core/project_settings.h"
@ -56,16 +57,17 @@ int add_path(int p_argc, char **p_args) {
int add_cmdline(int p_argc, char **p_args) {
NSArray *arr = [[[NSBundle mainBundle] infoDictionary] objectForKey:@"godot_cmdline"];
if (!arr)
if (!arr) {
return p_argc;
}
for (int i = 0; i < [arr count]; i++) {
for (NSUInteger i = 0; i < [arr count]; i++) {
NSString *str = [arr objectAtIndex:i];
if (!str) {
continue;
}
p_args[p_argc++] = (char *)[str cStringUsingEncoding:NSUTF8StringEncoding];
};
}
p_args[p_argc] = NULL;
@ -79,6 +81,10 @@ int add_cmdline(int p_argc, char **p_args) {
@implementation ViewController
- (GodotView *)godotView {
return (GodotView *)self.view;
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
printf("*********** did receive memory warning!\n");