Merge pull request #89734 from BastiaanOlij/openxr_reorder_wait_frame
OpenXR: Change timing of xrWaitFrame and fix XR multithreading issues
This commit is contained in:
commit
70247adf25
19 changed files with 617 additions and 246 deletions
|
@ -1979,6 +1979,12 @@
|
|||
[b]Warning:[/b] This function is primarily intended for editor usage. For in-game use cases, prefer physics collision.
|
||||
</description>
|
||||
</method>
|
||||
<method name="is_on_render_thread">
|
||||
<return type="bool" />
|
||||
<description>
|
||||
Returns [code]true[/code] if our code is currently executing on the rendering thread.
|
||||
</description>
|
||||
</method>
|
||||
<method name="light_directional_set_blend_splits">
|
||||
<return type="void" />
|
||||
<param index="0" name="light" type="RID" />
|
||||
|
|
|
@ -37,8 +37,8 @@
|
|||
You should call this method after a few seconds have passed. For example, when the user requests a realignment of the display holding a designated button on a controller for a short period of time, or when implementing a teleport mechanism.
|
||||
</description>
|
||||
</method>
|
||||
<method name="clear_reference_frame" qualifiers="const">
|
||||
<return type="Transform3D" />
|
||||
<method name="clear_reference_frame">
|
||||
<return type="void" />
|
||||
<description>
|
||||
Clears the reference frame that was set by previous calls to [method center_on_hmd].
|
||||
</description>
|
||||
|
|
|
@ -54,7 +54,7 @@
|
|||
<method name="get_next_frame_time">
|
||||
<return type="int" />
|
||||
<description>
|
||||
Returns the timing for the next frame.
|
||||
Returns the predicted display timing for the next frame.
|
||||
</description>
|
||||
</method>
|
||||
<method name="get_play_space">
|
||||
|
@ -63,6 +63,12 @@
|
|||
Returns the play space, which is an [url=https://registry.khronos.org/OpenXR/specs/1.0/man/html/XrSpace.html]XrSpace[/url] cast to an integer.
|
||||
</description>
|
||||
</method>
|
||||
<method name="get_predicted_display_time">
|
||||
<return type="int" />
|
||||
<description>
|
||||
Returns the predicted display timing for the current frame.
|
||||
</description>
|
||||
</method>
|
||||
<method name="get_session">
|
||||
<return type="int" />
|
||||
<description>
|
||||
|
|
|
@ -147,6 +147,11 @@
|
|||
</member>
|
||||
</members>
|
||||
<signals>
|
||||
<signal name="instance_exiting">
|
||||
<description>
|
||||
Informs our OpenXR instance is exiting.
|
||||
</description>
|
||||
</signal>
|
||||
<signal name="pose_recentered">
|
||||
<description>
|
||||
Informs the user queued a recenter of the player position.
|
||||
|
@ -169,6 +174,11 @@
|
|||
Informs our OpenXR session now has focus.
|
||||
</description>
|
||||
</signal>
|
||||
<signal name="session_loss_pending">
|
||||
<description>
|
||||
Informs our OpenXR session is in the process of being lost.
|
||||
</description>
|
||||
</signal>
|
||||
<signal name="session_stopping">
|
||||
<description>
|
||||
Informs our OpenXR session is stopping.
|
||||
|
|
|
@ -274,7 +274,7 @@ bool OpenXRViewportCompositionLayerProvider::update_and_acquire_swapchain(bool p
|
|||
if (swapchain_size == viewport_size && !p_static_image && !static_image) {
|
||||
// We're all good! Just acquire it.
|
||||
// We can ignore should_render here, return will be false.
|
||||
XrBool32 should_render = true;
|
||||
bool should_render = true;
|
||||
return swapchain_info.acquire(should_render);
|
||||
}
|
||||
|
||||
|
@ -296,7 +296,7 @@ bool OpenXRViewportCompositionLayerProvider::update_and_acquire_swapchain(bool p
|
|||
|
||||
// Acquire our image so we can start rendering into it,
|
||||
// we can ignore should_render here, ret will be false.
|
||||
XrBool32 should_render = true;
|
||||
bool should_render = true;
|
||||
bool ret = swapchain_info.acquire(should_render);
|
||||
|
||||
swapchain_size = viewport_size;
|
||||
|
|
|
@ -128,7 +128,7 @@ void OpenXRHandTrackingExtension::on_process() {
|
|||
}
|
||||
|
||||
// process our hands
|
||||
const XrTime time = OpenXRAPI::get_singleton()->get_next_frame_time(); // This data will be used for the next frame we render
|
||||
const XrTime time = OpenXRAPI::get_singleton()->get_predicted_display_time();
|
||||
if (time == 0) {
|
||||
// we don't have timing info yet, or we're skipping a frame...
|
||||
return;
|
||||
|
|
|
@ -160,7 +160,7 @@ void OpenXRAPI::OpenXRSwapChainInfo::free() {
|
|||
}
|
||||
}
|
||||
|
||||
bool OpenXRAPI::OpenXRSwapChainInfo::acquire(XrBool32 &p_should_render) {
|
||||
bool OpenXRAPI::OpenXRSwapChainInfo::acquire(bool &p_should_render) {
|
||||
ERR_FAIL_COND_V(image_acquired, true); // This was not released when it should be, error out and reuse...
|
||||
|
||||
OpenXRAPI *openxr_api = OpenXRAPI::get_singleton();
|
||||
|
@ -193,10 +193,18 @@ bool OpenXRAPI::OpenXRSwapChainInfo::acquire(XrBool32 &p_should_render) {
|
|||
XrSwapchainImageWaitInfo swapchain_image_wait_info = {
|
||||
XR_TYPE_SWAPCHAIN_IMAGE_WAIT_INFO, // type
|
||||
nullptr, // next
|
||||
17000000 // timeout in nanoseconds
|
||||
1000000000 // 1s timeout in nanoseconds
|
||||
};
|
||||
|
||||
result = openxr_api->xrWaitSwapchainImage(swapchain, &swapchain_image_wait_info);
|
||||
// Wait for a maximum of 10 seconds before calling it a critical failure...
|
||||
for (int retry = 0; retry < 10; retry++) {
|
||||
result = openxr_api->xrWaitSwapchainImage(swapchain, &swapchain_image_wait_info);
|
||||
if (result != XR_TIMEOUT_EXPIRED) {
|
||||
break;
|
||||
}
|
||||
WARN_PRINT("OpenXR: timed out waiting for swapchain image.");
|
||||
}
|
||||
|
||||
if (!XR_UNQUALIFIED_SUCCESS(result)) {
|
||||
// Make sure end_frame knows we need to submit an empty frame
|
||||
p_should_render = false;
|
||||
|
@ -206,6 +214,8 @@ bool OpenXRAPI::OpenXRSwapChainInfo::acquire(XrBool32 &p_should_render) {
|
|||
print_line("OpenXR: failed to wait for swapchain image [", openxr_api->get_error_string(result), "]");
|
||||
return false;
|
||||
} else {
|
||||
WARN_PRINT("OpenXR: couldn't to wait for swapchain but not a complete error [" + openxr_api->get_error_string(result) + "]");
|
||||
|
||||
// Make sure to skip trying to acquire the swapchain image in the next frame
|
||||
skip_acquire_swapchain = true;
|
||||
return false;
|
||||
|
@ -760,21 +770,6 @@ bool OpenXRAPI::load_supported_view_configuration_views(XrViewConfigurationType
|
|||
print_verbose(String(" - recommended render sample count: ") + itos(view_configuration_views[i].recommendedSwapchainSampleCount));
|
||||
}
|
||||
|
||||
// Allocate buffers we'll be populating with view information.
|
||||
views = (XrView *)memalloc(sizeof(XrView) * view_count);
|
||||
ERR_FAIL_NULL_V_MSG(views, false, "OpenXR Couldn't allocate memory for views");
|
||||
memset(views, 0, sizeof(XrView) * view_count);
|
||||
|
||||
projection_views = (XrCompositionLayerProjectionView *)memalloc(sizeof(XrCompositionLayerProjectionView) * view_count);
|
||||
ERR_FAIL_NULL_V_MSG(projection_views, false, "OpenXR Couldn't allocate memory for projection views");
|
||||
memset(projection_views, 0, sizeof(XrCompositionLayerProjectionView) * view_count);
|
||||
|
||||
if (submit_depth_buffer && OpenXRCompositionLayerDepthExtension::get_singleton()->is_available()) {
|
||||
depth_views = (XrCompositionLayerDepthInfoKHR *)memalloc(sizeof(XrCompositionLayerDepthInfoKHR) * view_count);
|
||||
ERR_FAIL_NULL_V_MSG(depth_views, false, "OpenXR Couldn't allocate memory for depth views");
|
||||
memset(depth_views, 0, sizeof(XrCompositionLayerDepthInfoKHR) * view_count);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -927,6 +922,9 @@ bool OpenXRAPI::setup_play_space() {
|
|||
|
||||
// If we've previously created a play space, clean it up first.
|
||||
if (play_space != XR_NULL_HANDLE) {
|
||||
// TODO Investigate if destroying our play space here is safe,
|
||||
// it may still be used in the rendering thread.
|
||||
|
||||
xrDestroySpace(play_space);
|
||||
}
|
||||
play_space = new_play_space;
|
||||
|
@ -936,7 +934,11 @@ bool OpenXRAPI::setup_play_space() {
|
|||
if (emulating_local_floor) {
|
||||
// We'll use the STAGE space to get the floor height, but we can't do that until
|
||||
// after xrWaitFrame(), so just set this flag for now.
|
||||
// Render state will be updated then.
|
||||
should_reset_emulated_floor_height = true;
|
||||
} else {
|
||||
// Update render state so this play space is used rendering the upcoming frame.
|
||||
set_render_play_space(play_space);
|
||||
}
|
||||
|
||||
return true;
|
||||
|
@ -1016,7 +1018,7 @@ bool OpenXRAPI::reset_emulated_floor_height() {
|
|||
identityPose, // pose
|
||||
};
|
||||
|
||||
result = xrLocateSpace(stage_space, local_space, get_next_frame_time(), &stage_location);
|
||||
result = xrLocateSpace(stage_space, local_space, get_predicted_display_time(), &stage_location);
|
||||
|
||||
xrDestroySpace(local_space);
|
||||
xrDestroySpace(stage_space);
|
||||
|
@ -1042,6 +1044,9 @@ bool OpenXRAPI::reset_emulated_floor_height() {
|
|||
// report that as the reference space to the outside world.
|
||||
reference_space = XR_REFERENCE_SPACE_TYPE_LOCAL_FLOOR_EXT;
|
||||
|
||||
// Update render state so this play space is used rendering the upcoming frame.
|
||||
set_render_play_space(play_space);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -1136,6 +1141,7 @@ bool OpenXRAPI::obtain_swapchain_formats() {
|
|||
}
|
||||
|
||||
bool OpenXRAPI::create_main_swapchains(Size2i p_size) {
|
||||
ERR_NOT_ON_RENDER_THREAD_V(false);
|
||||
ERR_FAIL_NULL_V(graphics_extension, false);
|
||||
ERR_FAIL_COND_V(session == XR_NULL_HANDLE, false);
|
||||
|
||||
|
@ -1154,12 +1160,12 @@ bool OpenXRAPI::create_main_swapchains(Size2i p_size) {
|
|||
as we render 3D content into internal buffers that are copied into the swapchain, we do now have (basic) VRS support
|
||||
*/
|
||||
|
||||
main_swapchain_size = p_size;
|
||||
render_state.main_swapchain_size = p_size;
|
||||
uint32_t sample_count = 1;
|
||||
|
||||
// We start with our color swapchain...
|
||||
if (color_swapchain_format != 0) {
|
||||
if (!main_swapchains[OPENXR_SWAPCHAIN_COLOR].create(0, XR_SWAPCHAIN_USAGE_SAMPLED_BIT | XR_SWAPCHAIN_USAGE_COLOR_ATTACHMENT_BIT | XR_SWAPCHAIN_USAGE_MUTABLE_FORMAT_BIT, color_swapchain_format, main_swapchain_size.width, main_swapchain_size.height, sample_count, view_count)) {
|
||||
if (!render_state.main_swapchains[OPENXR_SWAPCHAIN_COLOR].create(0, XR_SWAPCHAIN_USAGE_SAMPLED_BIT | XR_SWAPCHAIN_USAGE_COLOR_ATTACHMENT_BIT | XR_SWAPCHAIN_USAGE_MUTABLE_FORMAT_BIT, color_swapchain_format, render_state.main_swapchain_size.width, render_state.main_swapchain_size.height, sample_count, view_count)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -1169,7 +1175,7 @@ bool OpenXRAPI::create_main_swapchains(Size2i p_size) {
|
|||
// - we support our depth layer extension
|
||||
// - we have our spacewarp extension (not yet implemented)
|
||||
if (depth_swapchain_format != 0 && submit_depth_buffer && OpenXRCompositionLayerDepthExtension::get_singleton()->is_available()) {
|
||||
if (!main_swapchains[OPENXR_SWAPCHAIN_DEPTH].create(0, XR_SWAPCHAIN_USAGE_SAMPLED_BIT | XR_SWAPCHAIN_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, depth_swapchain_format, main_swapchain_size.width, main_swapchain_size.height, sample_count, view_count)) {
|
||||
if (!render_state.main_swapchains[OPENXR_SWAPCHAIN_DEPTH].create(0, XR_SWAPCHAIN_USAGE_SAMPLED_BIT | XR_SWAPCHAIN_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, depth_swapchain_format, render_state.main_swapchain_size.width, render_state.main_swapchain_size.height, sample_count, view_count)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -1180,36 +1186,36 @@ bool OpenXRAPI::create_main_swapchains(Size2i p_size) {
|
|||
// TBD
|
||||
}
|
||||
|
||||
for (uint32_t i = 0; i < view_count; i++) {
|
||||
views[i].type = XR_TYPE_VIEW;
|
||||
views[i].next = nullptr;
|
||||
for (uint32_t i = 0; i < render_state.view_count; i++) {
|
||||
render_state.views[i].type = XR_TYPE_VIEW;
|
||||
render_state.views[i].next = nullptr;
|
||||
|
||||
projection_views[i].type = XR_TYPE_COMPOSITION_LAYER_PROJECTION_VIEW;
|
||||
projection_views[i].next = nullptr;
|
||||
projection_views[i].subImage.swapchain = main_swapchains[OPENXR_SWAPCHAIN_COLOR].get_swapchain();
|
||||
projection_views[i].subImage.imageArrayIndex = i;
|
||||
projection_views[i].subImage.imageRect.offset.x = 0;
|
||||
projection_views[i].subImage.imageRect.offset.y = 0;
|
||||
projection_views[i].subImage.imageRect.extent.width = main_swapchain_size.width;
|
||||
projection_views[i].subImage.imageRect.extent.height = main_swapchain_size.height;
|
||||
render_state.projection_views[i].type = XR_TYPE_COMPOSITION_LAYER_PROJECTION_VIEW;
|
||||
render_state.projection_views[i].next = nullptr;
|
||||
render_state.projection_views[i].subImage.swapchain = render_state.main_swapchains[OPENXR_SWAPCHAIN_COLOR].get_swapchain();
|
||||
render_state.projection_views[i].subImage.imageArrayIndex = i;
|
||||
render_state.projection_views[i].subImage.imageRect.offset.x = 0;
|
||||
render_state.projection_views[i].subImage.imageRect.offset.y = 0;
|
||||
render_state.projection_views[i].subImage.imageRect.extent.width = render_state.main_swapchain_size.width;
|
||||
render_state.projection_views[i].subImage.imageRect.extent.height = render_state.main_swapchain_size.height;
|
||||
|
||||
if (submit_depth_buffer && OpenXRCompositionLayerDepthExtension::get_singleton()->is_available() && depth_views) {
|
||||
projection_views[i].next = &depth_views[i];
|
||||
if (render_state.submit_depth_buffer && OpenXRCompositionLayerDepthExtension::get_singleton()->is_available() && render_state.depth_views) {
|
||||
render_state.projection_views[i].next = &render_state.depth_views[i];
|
||||
|
||||
depth_views[i].type = XR_TYPE_COMPOSITION_LAYER_DEPTH_INFO_KHR;
|
||||
depth_views[i].next = nullptr;
|
||||
depth_views[i].subImage.swapchain = main_swapchains[OPENXR_SWAPCHAIN_DEPTH].get_swapchain();
|
||||
depth_views[i].subImage.imageArrayIndex = i;
|
||||
depth_views[i].subImage.imageRect.offset.x = 0;
|
||||
depth_views[i].subImage.imageRect.offset.y = 0;
|
||||
depth_views[i].subImage.imageRect.extent.width = main_swapchain_size.width;
|
||||
depth_views[i].subImage.imageRect.extent.height = main_swapchain_size.height;
|
||||
render_state.depth_views[i].type = XR_TYPE_COMPOSITION_LAYER_DEPTH_INFO_KHR;
|
||||
render_state.depth_views[i].next = nullptr;
|
||||
render_state.depth_views[i].subImage.swapchain = render_state.main_swapchains[OPENXR_SWAPCHAIN_DEPTH].get_swapchain();
|
||||
render_state.depth_views[i].subImage.imageArrayIndex = i;
|
||||
render_state.depth_views[i].subImage.imageRect.offset.x = 0;
|
||||
render_state.depth_views[i].subImage.imageRect.offset.y = 0;
|
||||
render_state.depth_views[i].subImage.imageRect.extent.width = render_state.main_swapchain_size.width;
|
||||
render_state.depth_views[i].subImage.imageRect.extent.height = render_state.main_swapchain_size.height;
|
||||
// OpenXR spec says that: minDepth < maxDepth.
|
||||
depth_views[i].minDepth = 0.0;
|
||||
depth_views[i].maxDepth = 1.0;
|
||||
render_state.depth_views[i].minDepth = 0.0;
|
||||
render_state.depth_views[i].maxDepth = 1.0;
|
||||
// But we can reverse near and far for reverse-Z.
|
||||
depth_views[i].nearZ = 100.0; // Near and far Z will be set to the correct values in fill_projection_matrix
|
||||
depth_views[i].farZ = 0.01;
|
||||
render_state.depth_views[i].nearZ = 100.0; // Near and far Z will be set to the correct values in fill_projection_matrix
|
||||
render_state.depth_views[i].farZ = 0.01;
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1217,23 +1223,33 @@ bool OpenXRAPI::create_main_swapchains(Size2i p_size) {
|
|||
};
|
||||
|
||||
void OpenXRAPI::destroy_session() {
|
||||
if (running && session != XR_NULL_HANDLE) {
|
||||
xrEndSession(session);
|
||||
// TODO need to figure out if we're still rendering our current frame
|
||||
// in a separate rendering thread and if so,
|
||||
// if we need to wait for completion.
|
||||
// We could be pulling the rug from underneath rendering...
|
||||
|
||||
if (running) {
|
||||
if (session != XR_NULL_HANDLE) {
|
||||
xrEndSession(session);
|
||||
}
|
||||
|
||||
running = false;
|
||||
render_state.running = false;
|
||||
}
|
||||
|
||||
if (views != nullptr) {
|
||||
memfree(views);
|
||||
views = nullptr;
|
||||
if (render_state.views != nullptr) {
|
||||
memfree(render_state.views);
|
||||
render_state.views = nullptr;
|
||||
}
|
||||
|
||||
if (projection_views != nullptr) {
|
||||
memfree(projection_views);
|
||||
projection_views = nullptr;
|
||||
if (render_state.projection_views != nullptr) {
|
||||
memfree(render_state.projection_views);
|
||||
render_state.projection_views = nullptr;
|
||||
}
|
||||
|
||||
if (depth_views != nullptr) {
|
||||
memfree(depth_views);
|
||||
depth_views = nullptr;
|
||||
if (render_state.depth_views != nullptr) {
|
||||
memfree(render_state.depth_views);
|
||||
render_state.depth_views = nullptr;
|
||||
}
|
||||
|
||||
free_main_swapchains();
|
||||
|
@ -1248,6 +1264,7 @@ void OpenXRAPI::destroy_session() {
|
|||
if (play_space != XR_NULL_HANDLE) {
|
||||
xrDestroySpace(play_space);
|
||||
play_space = XR_NULL_HANDLE;
|
||||
render_state.play_space = XR_NULL_HANDLE;
|
||||
}
|
||||
if (view_space != XR_NULL_HANDLE) {
|
||||
xrDestroySpace(view_space);
|
||||
|
@ -1298,6 +1315,7 @@ bool OpenXRAPI::on_state_ready() {
|
|||
|
||||
// we're running
|
||||
running = true;
|
||||
set_render_session_running(true);
|
||||
|
||||
for (OpenXRExtensionWrapper *wrapper : registered_extension_wrappers) {
|
||||
wrapper->on_state_ready();
|
||||
|
@ -1374,34 +1392,37 @@ bool OpenXRAPI::on_state_stopping() {
|
|||
}
|
||||
|
||||
running = false;
|
||||
set_render_session_running(false);
|
||||
}
|
||||
|
||||
// TODO further cleanup
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool OpenXRAPI::on_state_loss_pending() {
|
||||
print_verbose("On state loss pending");
|
||||
|
||||
if (xr_interface) {
|
||||
xr_interface->on_state_loss_pending();
|
||||
}
|
||||
|
||||
for (OpenXRExtensionWrapper *wrapper : registered_extension_wrappers) {
|
||||
wrapper->on_state_loss_pending();
|
||||
}
|
||||
|
||||
// TODO need to look into the correct action here, read up on the spec but we may need to signal Godot to exit (if it's not already exiting)
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool OpenXRAPI::on_state_exiting() {
|
||||
print_verbose("On state existing");
|
||||
|
||||
if (xr_interface) {
|
||||
xr_interface->on_state_exiting();
|
||||
}
|
||||
|
||||
for (OpenXRExtensionWrapper *wrapper : registered_extension_wrappers) {
|
||||
wrapper->on_state_exiting();
|
||||
}
|
||||
|
||||
// TODO need to look into the correct action here, read up on the spec but we may need to signal Godot to exit (if it's not already exiting)
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -1419,10 +1440,7 @@ void OpenXRAPI::set_view_configuration(XrViewConfigurationType p_view_configurat
|
|||
|
||||
bool OpenXRAPI::set_requested_reference_space(XrReferenceSpaceType p_requested_reference_space) {
|
||||
requested_reference_space = p_requested_reference_space;
|
||||
|
||||
if (is_initialized()) {
|
||||
return setup_play_space();
|
||||
}
|
||||
play_space_is_dirty = true;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
@ -1625,11 +1643,6 @@ bool OpenXRAPI::initialize_session() {
|
|||
return false;
|
||||
}
|
||||
|
||||
if (!setup_play_space()) {
|
||||
destroy_session();
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!setup_view_space()) {
|
||||
destroy_session();
|
||||
return false;
|
||||
|
@ -1645,6 +1658,8 @@ bool OpenXRAPI::initialize_session() {
|
|||
return false;
|
||||
}
|
||||
|
||||
allocate_view_buffers(view_count, submit_depth_buffer);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -1696,12 +1711,18 @@ XrHandTrackerEXT OpenXRAPI::get_hand_tracker(int p_hand_index) {
|
|||
}
|
||||
|
||||
Size2 OpenXRAPI::get_recommended_target_size() {
|
||||
RenderingServer *rendering_server = RenderingServer::get_singleton();
|
||||
ERR_FAIL_NULL_V(view_configuration_views, Size2());
|
||||
|
||||
Size2 target_size;
|
||||
|
||||
target_size.width = view_configuration_views[0].recommendedImageRectWidth * render_target_size_multiplier;
|
||||
target_size.height = view_configuration_views[0].recommendedImageRectHeight * render_target_size_multiplier;
|
||||
if (rendering_server && rendering_server->is_on_render_thread()) {
|
||||
target_size.width = view_configuration_views[0].recommendedImageRectWidth * render_state.render_target_size_multiplier;
|
||||
target_size.height = view_configuration_views[0].recommendedImageRectHeight * render_state.render_target_size_multiplier;
|
||||
} else {
|
||||
target_size.width = view_configuration_views[0].recommendedImageRectWidth * render_target_size_multiplier;
|
||||
target_size.height = view_configuration_views[0].recommendedImageRectHeight * render_target_size_multiplier;
|
||||
}
|
||||
|
||||
return target_size;
|
||||
}
|
||||
|
@ -1713,14 +1734,12 @@ XRPose::TrackingConfidence OpenXRAPI::get_head_center(Transform3D &r_transform,
|
|||
return XRPose::XR_TRACKING_CONFIDENCE_NONE;
|
||||
}
|
||||
|
||||
// xrWaitFrame not run yet
|
||||
if (frame_state.predictedDisplayTime == 0) {
|
||||
// Get display time
|
||||
XrTime display_time = get_predicted_display_time();
|
||||
if (display_time == 0) {
|
||||
return XRPose::XR_TRACKING_CONFIDENCE_NONE;
|
||||
}
|
||||
|
||||
// Get timing for the next frame, as that is the current frame we're processing
|
||||
XrTime display_time = get_next_frame_time();
|
||||
|
||||
XrSpaceVelocity velocity = {
|
||||
XR_TYPE_SPACE_VELOCITY, // type
|
||||
nullptr, // next
|
||||
|
@ -1764,54 +1783,47 @@ XRPose::TrackingConfidence OpenXRAPI::get_head_center(Transform3D &r_transform,
|
|||
}
|
||||
|
||||
bool OpenXRAPI::get_view_transform(uint32_t p_view, Transform3D &r_transform) {
|
||||
if (!running) {
|
||||
return false;
|
||||
}
|
||||
ERR_NOT_ON_RENDER_THREAD_V(false);
|
||||
|
||||
// xrWaitFrame not run yet
|
||||
if (frame_state.predictedDisplayTime == 0) {
|
||||
if (!render_state.running) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// we don't have valid view info
|
||||
if (views == nullptr || !view_pose_valid) {
|
||||
if (render_state.views == nullptr || !render_state.view_pose_valid) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Note, the timing of this is set right before rendering, which is what we need here.
|
||||
r_transform = transform_from_pose(views[p_view].pose);
|
||||
r_transform = transform_from_pose(render_state.views[p_view].pose);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool OpenXRAPI::get_view_projection(uint32_t p_view, double p_z_near, double p_z_far, Projection &p_camera_matrix) {
|
||||
ERR_NOT_ON_RENDER_THREAD_V(false);
|
||||
ERR_FAIL_NULL_V(graphics_extension, false);
|
||||
|
||||
if (!running) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// xrWaitFrame not run yet
|
||||
if (frame_state.predictedDisplayTime == 0) {
|
||||
if (!render_state.running) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// we don't have valid view info
|
||||
if (views == nullptr || !view_pose_valid) {
|
||||
if (render_state.views == nullptr || !render_state.view_pose_valid) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// if we're using depth views, make sure we update our near and far there...
|
||||
if (depth_views != nullptr) {
|
||||
for (uint32_t i = 0; i < view_count; i++) {
|
||||
if (render_state.depth_views != nullptr) {
|
||||
for (uint32_t i = 0; i < render_state.view_count; i++) {
|
||||
// As we are using reverse-Z these need to be flipped.
|
||||
depth_views[i].nearZ = p_z_far;
|
||||
depth_views[i].farZ = p_z_near;
|
||||
render_state.depth_views[i].nearZ = p_z_far;
|
||||
render_state.depth_views[i].farZ = p_z_near;
|
||||
}
|
||||
}
|
||||
|
||||
// now update our projection
|
||||
return graphics_extension->create_projection_fov(views[p_view].fov, p_z_near, p_z_far, p_camera_matrix);
|
||||
return graphics_extension->create_projection_fov(render_state.views[p_view].fov, p_z_near, p_z_far, p_camera_matrix);
|
||||
}
|
||||
|
||||
bool OpenXRAPI::poll_events() {
|
||||
|
@ -1934,6 +1946,69 @@ bool OpenXRAPI::poll_events() {
|
|||
}
|
||||
}
|
||||
|
||||
void OpenXRAPI::_allocate_view_buffers(uint32_t p_view_count, bool p_submit_depth_buffer) {
|
||||
// Must be called from rendering thread!
|
||||
ERR_NOT_ON_RENDER_THREAD;
|
||||
|
||||
OpenXRAPI *openxr_api = OpenXRAPI::get_singleton();
|
||||
ERR_FAIL_NULL(openxr_api);
|
||||
|
||||
openxr_api->render_state.view_count = p_view_count;
|
||||
openxr_api->render_state.submit_depth_buffer = p_submit_depth_buffer;
|
||||
|
||||
// Allocate buffers we'll be populating with view information.
|
||||
openxr_api->render_state.views = (XrView *)memalloc(sizeof(XrView) * p_view_count);
|
||||
ERR_FAIL_NULL_MSG(openxr_api->render_state.views, "OpenXR Couldn't allocate memory for views");
|
||||
memset(openxr_api->render_state.views, 0, sizeof(XrView) * p_view_count);
|
||||
|
||||
openxr_api->render_state.projection_views = (XrCompositionLayerProjectionView *)memalloc(sizeof(XrCompositionLayerProjectionView) * p_view_count);
|
||||
ERR_FAIL_NULL_MSG(openxr_api->render_state.projection_views, "OpenXR Couldn't allocate memory for projection views");
|
||||
memset(openxr_api->render_state.projection_views, 0, sizeof(XrCompositionLayerProjectionView) * p_view_count);
|
||||
|
||||
if (p_submit_depth_buffer && OpenXRCompositionLayerDepthExtension::get_singleton()->is_available()) {
|
||||
openxr_api->render_state.depth_views = (XrCompositionLayerDepthInfoKHR *)memalloc(sizeof(XrCompositionLayerDepthInfoKHR) * p_view_count);
|
||||
ERR_FAIL_NULL_MSG(openxr_api->render_state.depth_views, "OpenXR Couldn't allocate memory for depth views");
|
||||
memset(openxr_api->render_state.depth_views, 0, sizeof(XrCompositionLayerDepthInfoKHR) * p_view_count);
|
||||
}
|
||||
}
|
||||
|
||||
void OpenXRAPI::_set_render_session_running(bool p_is_running) {
|
||||
// Must be called from rendering thread!
|
||||
ERR_NOT_ON_RENDER_THREAD;
|
||||
|
||||
OpenXRAPI *openxr_api = OpenXRAPI::get_singleton();
|
||||
ERR_FAIL_NULL(openxr_api);
|
||||
openxr_api->render_state.running = p_is_running;
|
||||
}
|
||||
|
||||
void OpenXRAPI::_set_render_display_info(XrTime p_predicted_display_time, bool p_should_render) {
|
||||
// Must be called from rendering thread!
|
||||
ERR_NOT_ON_RENDER_THREAD;
|
||||
|
||||
OpenXRAPI *openxr_api = OpenXRAPI::get_singleton();
|
||||
ERR_FAIL_NULL(openxr_api);
|
||||
openxr_api->render_state.predicted_display_time = p_predicted_display_time;
|
||||
openxr_api->render_state.should_render = p_should_render;
|
||||
}
|
||||
|
||||
void OpenXRAPI::_set_render_play_space(uint64_t p_play_space) {
|
||||
// Must be called from rendering thread!
|
||||
ERR_NOT_ON_RENDER_THREAD;
|
||||
|
||||
OpenXRAPI *openxr_api = OpenXRAPI::get_singleton();
|
||||
ERR_FAIL_NULL(openxr_api);
|
||||
openxr_api->render_state.play_space = XrSpace(p_play_space);
|
||||
}
|
||||
|
||||
void OpenXRAPI::_set_render_state_multiplier(double p_render_target_size_multiplier) {
|
||||
// Must be called from rendering thread!
|
||||
ERR_NOT_ON_RENDER_THREAD;
|
||||
|
||||
OpenXRAPI *openxr_api = OpenXRAPI::get_singleton();
|
||||
ERR_FAIL_NULL(openxr_api);
|
||||
openxr_api->render_state.render_target_size_multiplier = p_render_target_size_multiplier;
|
||||
}
|
||||
|
||||
bool OpenXRAPI::process() {
|
||||
ERR_FAIL_COND_V(instance == XR_NULL_HANDLE, false);
|
||||
|
||||
|
@ -1945,42 +2020,11 @@ bool OpenXRAPI::process() {
|
|||
return false;
|
||||
}
|
||||
|
||||
for (OpenXRExtensionWrapper *wrapper : registered_extension_wrappers) {
|
||||
wrapper->on_process();
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void OpenXRAPI::free_main_swapchains() {
|
||||
for (int i = 0; i < OPENXR_SWAPCHAIN_MAX; i++) {
|
||||
main_swapchains[i].queue_free();
|
||||
}
|
||||
}
|
||||
|
||||
void OpenXRAPI::pre_render() {
|
||||
ERR_FAIL_COND(instance == XR_NULL_HANDLE);
|
||||
|
||||
if (!running) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Process any swapchains that were queued to be freed
|
||||
OpenXRSwapChainInfo::free_queued();
|
||||
|
||||
Size2i swapchain_size = get_recommended_target_size();
|
||||
if (swapchain_size != main_swapchain_size) {
|
||||
// Out with the old.
|
||||
free_main_swapchains();
|
||||
|
||||
// In with the new.
|
||||
create_main_swapchains(swapchain_size);
|
||||
}
|
||||
|
||||
// Waitframe does 2 important things in our process:
|
||||
// 1) It provides us with predictive timing, telling us when OpenXR expects to display the frame we're about to commit
|
||||
// 2) It will use the previous timing to pause our thread so that rendering starts as close to displaying as possible
|
||||
// This must thus be called as close to when we start rendering as possible
|
||||
// We call xrWaitFrame as early as possible, this will allow OpenXR to get
|
||||
// proper timing info between this point, and when we're ready to start rendering.
|
||||
// As the name suggests, OpenXR can pause the thread to minimize the time between
|
||||
// retrieving tracking data and using that tracking data to render.
|
||||
// OpenXR thus works best if rendering is performed on a separate thread.
|
||||
XrFrameWaitInfo frame_wait_info = { XR_TYPE_FRAME_WAIT_INFO, nullptr };
|
||||
frame_state.predictedDisplayTime = 0;
|
||||
frame_state.predictedDisplayPeriod = 0;
|
||||
|
@ -1995,7 +2039,9 @@ void OpenXRAPI::pre_render() {
|
|||
frame_state.predictedDisplayPeriod = 0;
|
||||
frame_state.shouldRender = false;
|
||||
|
||||
return;
|
||||
set_render_display_info(0, false);
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
if (frame_state.predictedDisplayPeriod > 500000000) {
|
||||
|
@ -2004,11 +2050,53 @@ void OpenXRAPI::pre_render() {
|
|||
frame_state.predictedDisplayPeriod = 0;
|
||||
}
|
||||
|
||||
set_render_display_info(frame_state.predictedDisplayTime, frame_state.shouldRender);
|
||||
|
||||
if (unlikely(play_space_is_dirty)) {
|
||||
setup_play_space();
|
||||
play_space_is_dirty = false;
|
||||
}
|
||||
|
||||
if (unlikely(should_reset_emulated_floor_height)) {
|
||||
reset_emulated_floor_height();
|
||||
should_reset_emulated_floor_height = false;
|
||||
}
|
||||
|
||||
for (OpenXRExtensionWrapper *wrapper : registered_extension_wrappers) {
|
||||
wrapper->on_process();
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void OpenXRAPI::free_main_swapchains() {
|
||||
for (int i = 0; i < OPENXR_SWAPCHAIN_MAX; i++) {
|
||||
render_state.main_swapchains[i].queue_free();
|
||||
}
|
||||
}
|
||||
|
||||
void OpenXRAPI::pre_render() {
|
||||
ERR_FAIL_COND(session == XR_NULL_HANDLE);
|
||||
|
||||
// Must be called from rendering thread!
|
||||
ERR_NOT_ON_RENDER_THREAD;
|
||||
|
||||
if (!render_state.running) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Process any swapchains that were queued to be freed
|
||||
OpenXRSwapChainInfo::free_queued();
|
||||
|
||||
Size2i swapchain_size = get_recommended_target_size();
|
||||
if (swapchain_size != render_state.main_swapchain_size) {
|
||||
// Out with the old.
|
||||
free_main_swapchains();
|
||||
|
||||
// In with the new.
|
||||
create_main_swapchains(swapchain_size);
|
||||
}
|
||||
|
||||
for (OpenXRExtensionWrapper *wrapper : registered_extension_wrappers) {
|
||||
wrapper->on_pre_render();
|
||||
}
|
||||
|
@ -2028,8 +2116,8 @@ void OpenXRAPI::pre_render() {
|
|||
XR_TYPE_VIEW_LOCATE_INFO, // type
|
||||
nullptr, // next
|
||||
view_configuration, // viewConfigurationType
|
||||
frame_state.predictedDisplayTime, // displayTime
|
||||
play_space // space
|
||||
render_state.predicted_display_time, // displayTime
|
||||
render_state.play_space // space
|
||||
};
|
||||
XrViewState view_state = {
|
||||
XR_TYPE_VIEW_STATE, // type
|
||||
|
@ -2037,7 +2125,7 @@ void OpenXRAPI::pre_render() {
|
|||
0 // viewStateFlags
|
||||
};
|
||||
uint32_t view_count_output;
|
||||
result = xrLocateViews(session, &view_locate_info, &view_state, view_count, &view_count_output, views);
|
||||
XrResult result = xrLocateViews(session, &view_locate_info, &view_state, render_state.view_count, &view_count_output, render_state.views);
|
||||
if (XR_FAILED(result)) {
|
||||
print_line("OpenXR: Couldn't locate views [", get_error_string(result), "]");
|
||||
return;
|
||||
|
@ -2050,9 +2138,9 @@ void OpenXRAPI::pre_render() {
|
|||
pose_valid = false;
|
||||
}
|
||||
}
|
||||
if (view_pose_valid != pose_valid) {
|
||||
view_pose_valid = pose_valid;
|
||||
if (!view_pose_valid) {
|
||||
if (render_state.view_pose_valid != pose_valid) {
|
||||
render_state.view_pose_valid = pose_valid;
|
||||
if (!render_state.view_pose_valid) {
|
||||
print_verbose("OpenXR View pose became invalid");
|
||||
} else {
|
||||
print_verbose("OpenXR View pose became valid");
|
||||
|
@ -2071,23 +2159,24 @@ void OpenXRAPI::pre_render() {
|
|||
}
|
||||
|
||||
// Reset this, we haven't found a viewport for output yet
|
||||
has_xr_viewport = false;
|
||||
render_state.has_xr_viewport = false;
|
||||
}
|
||||
|
||||
bool OpenXRAPI::pre_draw_viewport(RID p_render_target) {
|
||||
// We found an XR viewport!
|
||||
has_xr_viewport = true;
|
||||
// Must be called from rendering thread!
|
||||
ERR_NOT_ON_RENDER_THREAD_V(false);
|
||||
|
||||
if (!can_render()) {
|
||||
// We found an XR viewport!
|
||||
render_state.has_xr_viewport = true;
|
||||
|
||||
if (instance == XR_NULL_HANDLE || session == XR_NULL_HANDLE || !render_state.running || !render_state.view_pose_valid || !render_state.should_render) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// TODO: at some point in time we may support multiple viewports in which case we need to handle that...
|
||||
|
||||
// Acquire our images
|
||||
for (int i = 0; i < OPENXR_SWAPCHAIN_MAX; i++) {
|
||||
if (!main_swapchains[i].is_image_acquired() && main_swapchains[i].get_swapchain() != XR_NULL_HANDLE) {
|
||||
if (!main_swapchains[i].acquire(frame_state.shouldRender)) {
|
||||
if (!render_state.main_swapchains[i].is_image_acquired() && render_state.main_swapchains[i].get_swapchain() != XR_NULL_HANDLE) {
|
||||
if (!render_state.main_swapchains[i].acquire(render_state.should_render)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -2101,24 +2190,33 @@ bool OpenXRAPI::pre_draw_viewport(RID p_render_target) {
|
|||
}
|
||||
|
||||
XrSwapchain OpenXRAPI::get_color_swapchain() {
|
||||
return main_swapchains[OPENXR_SWAPCHAIN_COLOR].get_swapchain();
|
||||
ERR_NOT_ON_RENDER_THREAD_V(XR_NULL_HANDLE);
|
||||
|
||||
return render_state.main_swapchains[OPENXR_SWAPCHAIN_COLOR].get_swapchain();
|
||||
}
|
||||
|
||||
RID OpenXRAPI::get_color_texture() {
|
||||
return main_swapchains[OPENXR_SWAPCHAIN_COLOR].get_image();
|
||||
ERR_NOT_ON_RENDER_THREAD_V(RID());
|
||||
|
||||
return render_state.main_swapchains[OPENXR_SWAPCHAIN_COLOR].get_image();
|
||||
}
|
||||
|
||||
RID OpenXRAPI::get_depth_texture() {
|
||||
ERR_NOT_ON_RENDER_THREAD_V(RID());
|
||||
|
||||
// Note, image will not be acquired if we didn't have a suitable swap chain format.
|
||||
if (submit_depth_buffer) {
|
||||
return main_swapchains[OPENXR_SWAPCHAIN_DEPTH].get_image();
|
||||
if (render_state.submit_depth_buffer && render_state.main_swapchains[OPENXR_SWAPCHAIN_DEPTH].is_image_acquired()) {
|
||||
return render_state.main_swapchains[OPENXR_SWAPCHAIN_DEPTH].get_image();
|
||||
} else {
|
||||
return RID();
|
||||
}
|
||||
}
|
||||
|
||||
void OpenXRAPI::post_draw_viewport(RID p_render_target) {
|
||||
if (!can_render()) {
|
||||
// Must be called from rendering thread!
|
||||
ERR_NOT_ON_RENDER_THREAD;
|
||||
|
||||
if (instance == XR_NULL_HANDLE || session == XR_NULL_HANDLE || !render_state.running || !render_state.view_pose_valid || !render_state.should_render) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -2130,30 +2228,33 @@ void OpenXRAPI::post_draw_viewport(RID p_render_target) {
|
|||
void OpenXRAPI::end_frame() {
|
||||
XrResult result;
|
||||
|
||||
ERR_FAIL_COND(instance == XR_NULL_HANDLE);
|
||||
ERR_FAIL_COND(session == XR_NULL_HANDLE);
|
||||
|
||||
if (!running) {
|
||||
// Must be called from rendering thread!
|
||||
ERR_NOT_ON_RENDER_THREAD;
|
||||
|
||||
if (!render_state.running) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (frame_state.shouldRender && view_pose_valid) {
|
||||
if (!has_xr_viewport) {
|
||||
if (render_state.should_render && render_state.view_pose_valid) {
|
||||
if (!render_state.has_xr_viewport) {
|
||||
print_line("OpenXR: No viewport was marked with use_xr, there is no rendered output!");
|
||||
} else if (!main_swapchains[OPENXR_SWAPCHAIN_COLOR].is_image_acquired()) {
|
||||
} else if (!render_state.main_swapchains[OPENXR_SWAPCHAIN_COLOR].is_image_acquired()) {
|
||||
print_line("OpenXR: No swapchain could be acquired to render to!");
|
||||
}
|
||||
}
|
||||
|
||||
// must have:
|
||||
// - shouldRender set to true
|
||||
// - should_render set to true
|
||||
// - a valid view pose for projection_views[eye].pose to submit layer
|
||||
// - an image to render
|
||||
if (!frame_state.shouldRender || !view_pose_valid || !main_swapchains[OPENXR_SWAPCHAIN_COLOR].is_image_acquired()) {
|
||||
if (!render_state.should_render || !render_state.view_pose_valid || !render_state.main_swapchains[OPENXR_SWAPCHAIN_COLOR].is_image_acquired()) {
|
||||
// submit 0 layers when we shouldn't render
|
||||
XrFrameEndInfo frame_end_info = {
|
||||
XR_TYPE_FRAME_END_INFO, // type
|
||||
nullptr, // next
|
||||
frame_state.predictedDisplayTime, // displayTime
|
||||
render_state.predicted_display_time, // displayTime
|
||||
environment_blend_mode, // environmentBlendMode
|
||||
0, // layerCount
|
||||
nullptr // layers
|
||||
|
@ -2170,14 +2271,14 @@ void OpenXRAPI::end_frame() {
|
|||
|
||||
// release our swapchain image if we acquired it
|
||||
for (int i = 0; i < OPENXR_SWAPCHAIN_MAX; i++) {
|
||||
if (main_swapchains[i].is_image_acquired()) {
|
||||
main_swapchains[i].release();
|
||||
if (render_state.main_swapchains[i].is_image_acquired()) {
|
||||
render_state.main_swapchains[i].release();
|
||||
}
|
||||
}
|
||||
|
||||
for (uint32_t eye = 0; eye < view_count; eye++) {
|
||||
projection_views[eye].fov = views[eye].fov;
|
||||
projection_views[eye].pose = views[eye].pose;
|
||||
for (uint32_t eye = 0; eye < render_state.view_count; eye++) {
|
||||
render_state.projection_views[eye].fov = render_state.views[eye].fov;
|
||||
render_state.projection_views[eye].pose = render_state.views[eye].pose;
|
||||
}
|
||||
|
||||
Vector<OrderedCompositionLayer> ordered_layers_list;
|
||||
|
@ -2210,9 +2311,9 @@ void OpenXRAPI::end_frame() {
|
|||
XR_TYPE_COMPOSITION_LAYER_PROJECTION, // type
|
||||
nullptr, // next
|
||||
layer_flags, // layerFlags
|
||||
play_space, // space
|
||||
view_count, // viewCount
|
||||
projection_views, // views
|
||||
render_state.play_space, // space
|
||||
render_state.view_count, // viewCount
|
||||
render_state.projection_views, // views
|
||||
};
|
||||
ordered_layers_list.push_back({ (const XrCompositionLayerBaseHeader *)&projection_layer, 0 });
|
||||
|
||||
|
@ -2228,7 +2329,7 @@ void OpenXRAPI::end_frame() {
|
|||
XrFrameEndInfo frame_end_info = {
|
||||
XR_TYPE_FRAME_END_INFO, // type
|
||||
nullptr, // next
|
||||
frame_state.predictedDisplayTime, // displayTime
|
||||
render_state.predicted_display_time, // displayTime
|
||||
environment_blend_mode, // environmentBlendMode
|
||||
static_cast<uint32_t>(layers_list.size()), // layerCount
|
||||
layers_list.ptr() // layers
|
||||
|
@ -2271,6 +2372,7 @@ double OpenXRAPI::get_render_target_size_multiplier() const {
|
|||
|
||||
void OpenXRAPI::set_render_target_size_multiplier(double multiplier) {
|
||||
render_target_size_multiplier = multiplier;
|
||||
set_render_state_multiplier(multiplier);
|
||||
}
|
||||
|
||||
bool OpenXRAPI::is_foveation_supported() const {
|
||||
|
@ -2414,10 +2516,6 @@ OpenXRAPI::OpenXRAPI() {
|
|||
|
||||
submit_depth_buffer = GLOBAL_GET("xr/openxr/submit_depth_buffer");
|
||||
}
|
||||
|
||||
// Reset a few things that can't be done in our class definition.
|
||||
frame_state.predictedDisplayTime = 0;
|
||||
frame_state.predictedDisplayPeriod = 0;
|
||||
}
|
||||
|
||||
OpenXRAPI::~OpenXRAPI() {
|
||||
|
@ -3132,7 +3230,7 @@ XRPose::TrackingConfidence OpenXRAPI::get_action_pose(RID p_action, RID p_tracke
|
|||
return XRPose::XR_TRACKING_CONFIDENCE_NONE;
|
||||
}
|
||||
|
||||
XrTime display_time = get_next_frame_time();
|
||||
XrTime display_time = get_predicted_display_time();
|
||||
if (display_time == 0) {
|
||||
return XRPose::XR_TRACKING_CONFIDENCE_NONE;
|
||||
}
|
||||
|
|
|
@ -46,13 +46,11 @@
|
|||
#include "core/templates/rb_map.h"
|
||||
#include "core/templates/rid_owner.h"
|
||||
#include "core/templates/vector.h"
|
||||
#include "servers/rendering_server.h"
|
||||
#include "servers/xr/xr_pose.h"
|
||||
|
||||
#include <openxr/openxr.h>
|
||||
|
||||
// Note, OpenXR code that we wrote for our plugin makes use of C++20 notation for initializing structs which ensures zeroing out unspecified members.
|
||||
// Godot is currently restricted to C++17 which doesn't allow this notation. Make sure critical fields are set.
|
||||
|
||||
// forward declarations, we don't want to include these fully
|
||||
class OpenXRInterface;
|
||||
|
||||
|
@ -77,7 +75,7 @@ public:
|
|||
static void free_queued();
|
||||
void free();
|
||||
|
||||
bool acquire(XrBool32 &p_should_render);
|
||||
bool acquire(bool &p_should_render);
|
||||
bool release();
|
||||
RID get_image();
|
||||
};
|
||||
|
@ -151,9 +149,6 @@ private:
|
|||
|
||||
uint32_t view_count = 0;
|
||||
XrViewConfigurationView *view_configuration_views = nullptr;
|
||||
XrView *views = nullptr;
|
||||
XrCompositionLayerProjectionView *projection_views = nullptr;
|
||||
XrCompositionLayerDepthInfoKHR *depth_views = nullptr; // Only used by Composition Layer Depth Extension if available
|
||||
|
||||
enum OpenXRSwapChainTypes {
|
||||
OPENXR_SWAPCHAIN_COLOR,
|
||||
|
@ -164,14 +159,11 @@ private:
|
|||
|
||||
int64_t color_swapchain_format = 0;
|
||||
int64_t depth_swapchain_format = 0;
|
||||
Size2i main_swapchain_size = { 0, 0 };
|
||||
OpenXRSwapChainInfo main_swapchains[OPENXR_SWAPCHAIN_MAX];
|
||||
|
||||
bool play_space_is_dirty = true;
|
||||
XrSpace play_space = XR_NULL_HANDLE;
|
||||
XrSpace view_space = XR_NULL_HANDLE;
|
||||
bool view_pose_valid = false;
|
||||
XRPose::TrackingConfidence head_pose_confidence = XRPose::XR_TRACKING_CONFIDENCE_NONE;
|
||||
bool has_xr_viewport = false;
|
||||
|
||||
bool emulating_local_floor = false;
|
||||
bool should_reset_emulated_floor_height = false;
|
||||
|
@ -328,6 +320,72 @@ private:
|
|||
// convenience
|
||||
void copy_string_to_char_buffer(const String p_string, char *p_buffer, int p_buffer_len);
|
||||
|
||||
// Render state, Only accessible in rendering thread
|
||||
struct RenderState {
|
||||
bool running = false;
|
||||
bool should_render = false;
|
||||
bool has_xr_viewport = false;
|
||||
XrTime predicted_display_time = 0;
|
||||
XrSpace play_space = XR_NULL_HANDLE;
|
||||
double render_target_size_multiplier = 1.0;
|
||||
|
||||
uint32_t view_count = 0;
|
||||
XrView *views = nullptr;
|
||||
XrCompositionLayerProjectionView *projection_views = nullptr;
|
||||
XrCompositionLayerDepthInfoKHR *depth_views = nullptr; // Only used by Composition Layer Depth Extension if available
|
||||
bool submit_depth_buffer = false; // if set to true we submit depth buffers to OpenXR if a suitable extension is enabled.
|
||||
bool view_pose_valid = false;
|
||||
|
||||
Size2i main_swapchain_size;
|
||||
OpenXRSwapChainInfo main_swapchains[OPENXR_SWAPCHAIN_MAX];
|
||||
} render_state;
|
||||
|
||||
static void _allocate_view_buffers(uint32_t p_view_count, bool p_submit_depth_buffer);
|
||||
static void _set_render_session_running(bool p_is_running);
|
||||
static void _set_render_display_info(XrTime p_predicted_display_time, bool p_should_render);
|
||||
static void _set_render_play_space(uint64_t p_play_space);
|
||||
static void _set_render_state_multiplier(double p_render_target_size_multiplier);
|
||||
|
||||
_FORCE_INLINE_ void allocate_view_buffers(uint32_t p_view_count, bool p_submit_depth_buffer) {
|
||||
// If we're rendering on a separate thread, we may still be processing the last frame, don't communicate this till we're ready...
|
||||
RenderingServer *rendering_server = RenderingServer::get_singleton();
|
||||
ERR_FAIL_NULL(rendering_server);
|
||||
|
||||
rendering_server->call_on_render_thread(callable_mp_static(&OpenXRAPI::_allocate_view_buffers).bind(p_view_count, p_submit_depth_buffer));
|
||||
}
|
||||
|
||||
_FORCE_INLINE_ void set_render_session_running(bool p_is_running) {
|
||||
// If we're rendering on a separate thread, we may still be processing the last frame, don't communicate this till we're ready...
|
||||
RenderingServer *rendering_server = RenderingServer::get_singleton();
|
||||
ERR_FAIL_NULL(rendering_server);
|
||||
|
||||
rendering_server->call_on_render_thread(callable_mp_static(&OpenXRAPI::_set_render_session_running).bind(p_is_running));
|
||||
}
|
||||
|
||||
_FORCE_INLINE_ void set_render_display_info(XrTime p_predicted_display_time, bool p_should_render) {
|
||||
// If we're rendering on a separate thread, we may still be processing the last frame, don't communicate this till we're ready...
|
||||
RenderingServer *rendering_server = RenderingServer::get_singleton();
|
||||
ERR_FAIL_NULL(rendering_server);
|
||||
|
||||
rendering_server->call_on_render_thread(callable_mp_static(&OpenXRAPI::_set_render_display_info).bind(p_predicted_display_time, p_should_render));
|
||||
}
|
||||
|
||||
_FORCE_INLINE_ void set_render_play_space(XrSpace p_play_space) {
|
||||
// If we're rendering on a separate thread, we may still be processing the last frame, don't communicate this till we're ready...
|
||||
RenderingServer *rendering_server = RenderingServer::get_singleton();
|
||||
ERR_FAIL_NULL(rendering_server);
|
||||
|
||||
rendering_server->call_on_render_thread(callable_mp_static(&OpenXRAPI::_set_render_play_space).bind(uint64_t(p_play_space)));
|
||||
}
|
||||
|
||||
_FORCE_INLINE_ void set_render_state_multiplier(double p_render_target_size_multiplier) {
|
||||
// If we're rendering on a separate thread, we may still be processing the last frame, don't communicate this till we're ready...
|
||||
RenderingServer *rendering_server = RenderingServer::get_singleton();
|
||||
ERR_FAIL_NULL(rendering_server);
|
||||
|
||||
rendering_server->call_on_render_thread(callable_mp_static(&OpenXRAPI::_set_render_state_multiplier).bind(p_render_target_size_multiplier));
|
||||
}
|
||||
|
||||
public:
|
||||
XrInstance get_instance() const { return instance; };
|
||||
XrSystemId get_system_id() const { return system_id; };
|
||||
|
@ -384,9 +442,13 @@ public:
|
|||
bool initialize_session();
|
||||
void finish();
|
||||
|
||||
XrSpace get_play_space() const { return play_space; }
|
||||
XrTime get_next_frame_time() { return frame_state.predictedDisplayTime + frame_state.predictedDisplayPeriod; }
|
||||
bool can_render() { return instance != XR_NULL_HANDLE && session != XR_NULL_HANDLE && running && view_pose_valid && frame_state.shouldRender; }
|
||||
_FORCE_INLINE_ XrSpace get_play_space() const { return play_space; }
|
||||
_FORCE_INLINE_ XrTime get_predicted_display_time() { return frame_state.predictedDisplayTime; }
|
||||
_FORCE_INLINE_ XrTime get_next_frame_time() { return frame_state.predictedDisplayTime + frame_state.predictedDisplayPeriod; }
|
||||
_FORCE_INLINE_ bool can_render() {
|
||||
ERR_ON_RENDER_THREAD_V(false);
|
||||
return instance != XR_NULL_HANDLE && session != XR_NULL_HANDLE && running && frame_state.shouldRender;
|
||||
}
|
||||
|
||||
XrHandTrackerEXT get_hand_tracker(int p_hand_index);
|
||||
|
||||
|
|
|
@ -48,6 +48,7 @@ void OpenXRAPIExtension::_bind_methods() {
|
|||
ClassDB::bind_method(D_METHOD("is_running"), &OpenXRAPIExtension::is_running);
|
||||
|
||||
ClassDB::bind_method(D_METHOD("get_play_space"), &OpenXRAPIExtension::get_play_space);
|
||||
ClassDB::bind_method(D_METHOD("get_predicted_display_time"), &OpenXRAPIExtension::get_predicted_display_time);
|
||||
ClassDB::bind_method(D_METHOD("get_next_frame_time"), &OpenXRAPIExtension::get_next_frame_time);
|
||||
ClassDB::bind_method(D_METHOD("can_render"), &OpenXRAPIExtension::can_render);
|
||||
|
||||
|
@ -130,8 +131,17 @@ uint64_t OpenXRAPIExtension::get_play_space() {
|
|||
return (uint64_t)OpenXRAPI::get_singleton()->get_play_space();
|
||||
}
|
||||
|
||||
int64_t OpenXRAPIExtension::get_predicted_display_time() {
|
||||
ERR_FAIL_NULL_V(OpenXRAPI::get_singleton(), 0);
|
||||
return (XrTime)OpenXRAPI::get_singleton()->get_predicted_display_time();
|
||||
}
|
||||
|
||||
int64_t OpenXRAPIExtension::get_next_frame_time() {
|
||||
ERR_FAIL_NULL_V(OpenXRAPI::get_singleton(), 0);
|
||||
|
||||
// In the past we needed to look a frame ahead, may be calling this unintentionally so lets warn the dev.
|
||||
WARN_PRINT_ONCE("OpenXR: Next frame timing called, verify this is intended.");
|
||||
|
||||
return (XrTime)OpenXRAPI::get_singleton()->get_next_frame_time();
|
||||
}
|
||||
|
||||
|
|
|
@ -69,6 +69,7 @@ public:
|
|||
bool is_running();
|
||||
|
||||
uint64_t get_play_space();
|
||||
int64_t get_predicted_display_time();
|
||||
int64_t get_next_frame_time();
|
||||
bool can_render();
|
||||
|
||||
|
|
|
@ -43,6 +43,8 @@ void OpenXRInterface::_bind_methods() {
|
|||
ADD_SIGNAL(MethodInfo("session_stopping"));
|
||||
ADD_SIGNAL(MethodInfo("session_focussed"));
|
||||
ADD_SIGNAL(MethodInfo("session_visible"));
|
||||
ADD_SIGNAL(MethodInfo("session_loss_pending"));
|
||||
ADD_SIGNAL(MethodInfo("instance_exiting"));
|
||||
ADD_SIGNAL(MethodInfo("pose_recentered"));
|
||||
ADD_SIGNAL(MethodInfo("refresh_rate_changed", PropertyInfo(Variant::FLOAT, "refresh_rate")));
|
||||
|
||||
|
@ -1258,6 +1260,14 @@ void OpenXRInterface::on_state_stopping() {
|
|||
emit_signal(SNAME("session_stopping"));
|
||||
}
|
||||
|
||||
void OpenXRInterface::on_state_loss_pending() {
|
||||
emit_signal(SNAME("session_loss_pending"));
|
||||
}
|
||||
|
||||
void OpenXRInterface::on_state_exiting() {
|
||||
emit_signal(SNAME("instance_exiting"));
|
||||
}
|
||||
|
||||
void OpenXRInterface::on_pose_recentered() {
|
||||
emit_signal(SNAME("pose_recentered"));
|
||||
}
|
||||
|
|
|
@ -31,6 +31,29 @@
|
|||
#ifndef OPENXR_INTERFACE_H
|
||||
#define OPENXR_INTERFACE_H
|
||||
|
||||
// A note on multithreading and thread safety in OpenXR.
|
||||
//
|
||||
// Most entry points will be called from the main thread in Godot
|
||||
// however a number of entry points will be called from the
|
||||
// rendering thread, potentially while we're already processing
|
||||
// the next frame on the main thread.
|
||||
//
|
||||
// OpenXR itself has been designed with threading in mind including
|
||||
// a high likelihood that the XR runtime runs in separate threads
|
||||
// as well.
|
||||
// Hence all the frame timing information, use of swapchains and
|
||||
// sync functions.
|
||||
// Do note that repeated calls to tracking APIs will provide
|
||||
// increasingly more accurate data for the same timestamp as
|
||||
// tracking data is continuously updated.
|
||||
//
|
||||
// For our code we mostly implement this in our OpenXRAPI class.
|
||||
// We store data accessed from the rendering thread in a separate
|
||||
// struct, setting values through our renderer command queue.
|
||||
//
|
||||
// As some data is setup before we start rendering, and cleaned up
|
||||
// after we've stopped, that is accessed directly from both threads.
|
||||
|
||||
#include "action_map/openxr_action_map.h"
|
||||
#include "extensions/openxr_hand_tracking_extension.h"
|
||||
#include "openxr_api.h"
|
||||
|
@ -173,6 +196,8 @@ public:
|
|||
void on_state_visible();
|
||||
void on_state_focused();
|
||||
void on_state_stopping();
|
||||
void on_state_loss_pending();
|
||||
void on_state_exiting();
|
||||
void on_pose_recentered();
|
||||
void on_refresh_rate_changes(float p_new_rate);
|
||||
void tracker_profile_changed(RID p_tracker, RID p_interaction_profile);
|
||||
|
|
|
@ -688,6 +688,18 @@ void Viewport::_process_picking() {
|
|||
physics_picking_events.clear();
|
||||
return;
|
||||
}
|
||||
#ifndef _3D_DISABLED
|
||||
if (use_xr) {
|
||||
if (XRServer::get_singleton() != nullptr) {
|
||||
Ref<XRInterface> xr_interface = XRServer::get_singleton()->get_primary_interface();
|
||||
if (xr_interface.is_valid() && xr_interface->is_initialized() && xr_interface->get_view_count() > 1) {
|
||||
WARN_PRINT_ONCE("Object picking can't be used when stereo rendering, this will be turned off!");
|
||||
physics_object_picking = false; // don't try again.
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
_drop_physics_mouseover(true);
|
||||
|
||||
|
|
|
@ -1048,6 +1048,10 @@ public:
|
|||
virtual void init() override;
|
||||
virtual void finish() override;
|
||||
|
||||
virtual bool is_on_render_thread() override {
|
||||
return Thread::get_caller_id() == server_thread;
|
||||
}
|
||||
|
||||
virtual void call_on_render_thread(const Callable &p_callable) override {
|
||||
if (Thread::get_caller_id() == server_thread) {
|
||||
command_queue.flush_if_pending();
|
||||
|
|
|
@ -3426,6 +3426,7 @@ void RenderingServer::_bind_methods() {
|
|||
ClassDB::bind_method(D_METHOD("get_rendering_device"), &RenderingServer::get_rendering_device);
|
||||
ClassDB::bind_method(D_METHOD("create_local_rendering_device"), &RenderingServer::create_local_rendering_device);
|
||||
|
||||
ClassDB::bind_method(D_METHOD("is_on_render_thread"), &RenderingServer::is_on_render_thread);
|
||||
ClassDB::bind_method(D_METHOD("call_on_render_thread", "callable"), &RenderingServer::call_on_render_thread);
|
||||
|
||||
#ifndef DISABLE_DEPRECATED
|
||||
|
|
|
@ -41,6 +41,32 @@
|
|||
#include "servers/display_server.h"
|
||||
#include "servers/rendering/rendering_device.h"
|
||||
|
||||
// Helper macros for code outside of the rendering server, but that is
|
||||
// called by the rendering server.
|
||||
#ifdef DEBUG_ENABLED
|
||||
#define ERR_ON_RENDER_THREAD \
|
||||
RenderingServer *rendering_server = RenderingServer::get_singleton(); \
|
||||
ERR_FAIL_NULL(rendering_server); \
|
||||
ERR_FAIL_COND(rendering_server->is_on_render_thread());
|
||||
#define ERR_ON_RENDER_THREAD_V(m_ret) \
|
||||
RenderingServer *rendering_server = RenderingServer::get_singleton(); \
|
||||
ERR_FAIL_NULL_V(rendering_server, m_ret); \
|
||||
ERR_FAIL_COND_V(rendering_server->is_on_render_thread(), m_ret);
|
||||
#define ERR_NOT_ON_RENDER_THREAD \
|
||||
RenderingServer *rendering_server = RenderingServer::get_singleton(); \
|
||||
ERR_FAIL_NULL(rendering_server); \
|
||||
ERR_FAIL_COND(!rendering_server->is_on_render_thread());
|
||||
#define ERR_NOT_ON_RENDER_THREAD_V(m_ret) \
|
||||
RenderingServer *rendering_server = RenderingServer::get_singleton(); \
|
||||
ERR_FAIL_NULL_V(rendering_server, m_ret); \
|
||||
ERR_FAIL_COND_V(!rendering_server->is_on_render_thread(), m_ret);
|
||||
#else
|
||||
#define ERR_ON_RENDER_THREAD
|
||||
#define ERR_ON_RENDER_THREAD_V(m_ret)
|
||||
#define ERR_NOT_ON_RENDER_THREAD
|
||||
#define ERR_NOT_ON_RENDER_THREAD_V(m_ret)
|
||||
#endif
|
||||
|
||||
template <typename T>
|
||||
class TypedArray;
|
||||
|
||||
|
@ -1684,7 +1710,7 @@ public:
|
|||
|
||||
#ifndef DISABLE_DEPRECATED
|
||||
// Never actually used, should be removed when we can break compatibility.
|
||||
enum Features {
|
||||
enum Features{
|
||||
FEATURE_SHADERS,
|
||||
FEATURE_MULTITHREADED,
|
||||
};
|
||||
|
@ -1708,6 +1734,7 @@ public:
|
|||
bool is_render_loop_enabled() const;
|
||||
void set_render_loop_enabled(bool p_enabled);
|
||||
|
||||
virtual bool is_on_render_thread() = 0;
|
||||
virtual void call_on_render_thread(const Callable &p_callable) = 0;
|
||||
|
||||
#ifdef TOOLS_ENABLED
|
||||
|
|
|
@ -122,17 +122,21 @@ public:
|
|||
|
||||
/** rendering and internal **/
|
||||
|
||||
// These methods are called from the main thread.
|
||||
virtual Transform3D get_camera_transform() = 0; /* returns the position of our camera, only used for updating reference frame. For monoscopic this is equal to the views transform, for stereoscopic this should be an average */
|
||||
virtual void process() = 0;
|
||||
|
||||
// These methods can be called from both main and render thread.
|
||||
virtual Size2 get_render_target_size() = 0; /* returns the recommended render target size per eye for this device */
|
||||
virtual uint32_t get_view_count() = 0; /* returns the view count we need (1 is monoscopic, 2 is stereoscopic but can be more) */
|
||||
virtual Transform3D get_camera_transform() = 0; /* returns the position of our camera for updating our camera node. For monoscopic this is equal to the views transform, for stereoscopic this should be an average */
|
||||
|
||||
// These methods are called from the rendering thread.
|
||||
virtual Transform3D get_transform_for_view(uint32_t p_view, const Transform3D &p_cam_transform) = 0; /* get each views transform */
|
||||
virtual Projection get_projection_for_view(uint32_t p_view, double p_aspect, double p_z_near, double p_z_far) = 0; /* get each view projection matrix */
|
||||
virtual RID get_vrs_texture(); /* obtain VRS texture */
|
||||
virtual RID get_color_texture(); /* obtain color output texture (if applicable) */
|
||||
virtual RID get_depth_texture(); /* obtain depth output texture (if applicable, used for reprojection) */
|
||||
virtual RID get_velocity_texture(); /* obtain velocity output texture (if applicable, used for spacewarp) */
|
||||
|
||||
virtual void process() = 0;
|
||||
virtual void pre_render(){};
|
||||
virtual bool pre_draw_viewport(RID p_render_target) { return true; }; /* inform XR interface we are about to start our viewport draw process */
|
||||
virtual Vector<BlitToScreen> post_draw_viewport(RID p_render_target, const Rect2 &p_screen_rect) = 0; /* inform XR interface we finished our viewport draw process */
|
||||
|
|
|
@ -51,7 +51,7 @@ XRServer *XRServer::singleton = nullptr;
|
|||
|
||||
XRServer *XRServer::get_singleton() {
|
||||
return singleton;
|
||||
};
|
||||
}
|
||||
|
||||
void XRServer::_bind_methods() {
|
||||
ClassDB::bind_method(D_METHOD("get_world_scale"), &XRServer::get_world_scale);
|
||||
|
@ -59,7 +59,7 @@ void XRServer::_bind_methods() {
|
|||
ClassDB::bind_method(D_METHOD("get_world_origin"), &XRServer::get_world_origin);
|
||||
ClassDB::bind_method(D_METHOD("set_world_origin", "world_origin"), &XRServer::set_world_origin);
|
||||
ClassDB::bind_method(D_METHOD("get_reference_frame"), &XRServer::get_reference_frame);
|
||||
ClassDB::bind_method(D_METHOD("clear_reference_frame"), &XRServer::get_reference_frame);
|
||||
ClassDB::bind_method(D_METHOD("clear_reference_frame"), &XRServer::clear_reference_frame);
|
||||
ClassDB::bind_method(D_METHOD("center_on_hmd", "rotation_mode", "keep_height"), &XRServer::center_on_hmd);
|
||||
ClassDB::bind_method(D_METHOD("get_hmd_transform"), &XRServer::get_hmd_transform);
|
||||
|
||||
|
@ -104,11 +104,20 @@ void XRServer::_bind_methods() {
|
|||
ADD_SIGNAL(MethodInfo("tracker_added", PropertyInfo(Variant::STRING_NAME, "tracker_name"), PropertyInfo(Variant::INT, "type")));
|
||||
ADD_SIGNAL(MethodInfo("tracker_updated", PropertyInfo(Variant::STRING_NAME, "tracker_name"), PropertyInfo(Variant::INT, "type")));
|
||||
ADD_SIGNAL(MethodInfo("tracker_removed", PropertyInfo(Variant::STRING_NAME, "tracker_name"), PropertyInfo(Variant::INT, "type")));
|
||||
};
|
||||
}
|
||||
|
||||
double XRServer::get_world_scale() const {
|
||||
return world_scale;
|
||||
};
|
||||
RenderingServer *rendering_server = RenderingServer::get_singleton();
|
||||
|
||||
if (rendering_server && rendering_server->is_on_render_thread()) {
|
||||
// Return the value with which we're currently rendering,
|
||||
// if we're on the render thread
|
||||
return render_state.world_scale;
|
||||
} else {
|
||||
// Return our current value
|
||||
return world_scale;
|
||||
}
|
||||
}
|
||||
|
||||
void XRServer::set_world_scale(double p_world_scale) {
|
||||
if (p_world_scale < 0.01) {
|
||||
|
@ -118,19 +127,58 @@ void XRServer::set_world_scale(double p_world_scale) {
|
|||
}
|
||||
|
||||
world_scale = p_world_scale;
|
||||
};
|
||||
set_render_world_scale(world_scale);
|
||||
}
|
||||
|
||||
void XRServer::_set_render_world_scale(double p_world_scale) {
|
||||
// Must be called from rendering thread!
|
||||
ERR_NOT_ON_RENDER_THREAD;
|
||||
|
||||
XRServer *xr_server = XRServer::get_singleton();
|
||||
ERR_FAIL_NULL(xr_server);
|
||||
xr_server->render_state.world_scale = p_world_scale;
|
||||
}
|
||||
|
||||
Transform3D XRServer::get_world_origin() const {
|
||||
return world_origin;
|
||||
};
|
||||
RenderingServer *rendering_server = RenderingServer::get_singleton();
|
||||
|
||||
if (rendering_server && rendering_server->is_on_render_thread()) {
|
||||
// Return the value with which we're currently rendering,
|
||||
// if we're on the render thread
|
||||
return render_state.world_origin;
|
||||
} else {
|
||||
// Return our current value
|
||||
return world_origin;
|
||||
}
|
||||
}
|
||||
|
||||
void XRServer::set_world_origin(const Transform3D &p_world_origin) {
|
||||
world_origin = p_world_origin;
|
||||
};
|
||||
set_render_world_origin(world_origin);
|
||||
}
|
||||
|
||||
void XRServer::_set_render_world_origin(const Transform3D &p_world_origin) {
|
||||
// Must be called from rendering thread!
|
||||
ERR_NOT_ON_RENDER_THREAD;
|
||||
|
||||
XRServer *xr_server = XRServer::get_singleton();
|
||||
ERR_FAIL_NULL(xr_server);
|
||||
xr_server->render_state.world_origin = p_world_origin;
|
||||
}
|
||||
|
||||
Transform3D XRServer::get_reference_frame() const {
|
||||
return reference_frame;
|
||||
};
|
||||
RenderingServer *rendering_server = RenderingServer::get_singleton();
|
||||
ERR_FAIL_NULL_V(rendering_server, reference_frame);
|
||||
|
||||
if (rendering_server->is_on_render_thread()) {
|
||||
// Return the value with which we're currently rendering,
|
||||
// if we're on the render thread
|
||||
return render_state.reference_frame;
|
||||
} else {
|
||||
// Return our current value
|
||||
return reference_frame;
|
||||
}
|
||||
}
|
||||
|
||||
void XRServer::center_on_hmd(RotationMode p_rotation_mode, bool p_keep_height) {
|
||||
if (primary_interface == nullptr) {
|
||||
|
@ -156,27 +204,38 @@ void XRServer::center_on_hmd(RotationMode p_rotation_mode, bool p_keep_height) {
|
|||
} else if (p_rotation_mode == 2) {
|
||||
// remove our rotation, we're only interesting in centering on position
|
||||
new_reference_frame.basis = Basis();
|
||||
};
|
||||
}
|
||||
|
||||
// don't negate our height
|
||||
if (p_keep_height) {
|
||||
new_reference_frame.origin.y = 0.0;
|
||||
};
|
||||
}
|
||||
|
||||
reference_frame = new_reference_frame.inverse();
|
||||
};
|
||||
set_render_reference_frame(reference_frame);
|
||||
}
|
||||
|
||||
void XRServer::clear_reference_frame() {
|
||||
reference_frame = Transform3D();
|
||||
set_render_reference_frame(reference_frame);
|
||||
}
|
||||
|
||||
void XRServer::_set_render_reference_frame(const Transform3D &p_reference_frame) {
|
||||
// Must be called from rendering thread!
|
||||
ERR_NOT_ON_RENDER_THREAD;
|
||||
|
||||
XRServer *xr_server = XRServer::get_singleton();
|
||||
ERR_FAIL_NULL(xr_server);
|
||||
xr_server->render_state.reference_frame = p_reference_frame;
|
||||
}
|
||||
|
||||
Transform3D XRServer::get_hmd_transform() {
|
||||
Transform3D hmd_transform;
|
||||
if (primary_interface != nullptr) {
|
||||
hmd_transform = primary_interface->get_camera_transform();
|
||||
};
|
||||
}
|
||||
return hmd_transform;
|
||||
};
|
||||
}
|
||||
|
||||
void XRServer::add_interface(const Ref<XRInterface> &p_interface) {
|
||||
ERR_FAIL_COND(p_interface.is_null());
|
||||
|
@ -185,12 +244,12 @@ void XRServer::add_interface(const Ref<XRInterface> &p_interface) {
|
|||
if (interfaces[i] == p_interface) {
|
||||
ERR_PRINT("Interface was already added");
|
||||
return;
|
||||
};
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
interfaces.push_back(p_interface);
|
||||
emit_signal(SNAME("interface_added"), p_interface->get_name());
|
||||
};
|
||||
}
|
||||
|
||||
void XRServer::remove_interface(const Ref<XRInterface> &p_interface) {
|
||||
ERR_FAIL_COND(p_interface.is_null());
|
||||
|
@ -200,33 +259,33 @@ void XRServer::remove_interface(const Ref<XRInterface> &p_interface) {
|
|||
if (interfaces[i] == p_interface) {
|
||||
idx = i;
|
||||
break;
|
||||
};
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
ERR_FAIL_COND_MSG(idx == -1, "Interface not found.");
|
||||
print_verbose("XR: Removed interface \"" + p_interface->get_name() + "\"");
|
||||
emit_signal(SNAME("interface_removed"), p_interface->get_name());
|
||||
interfaces.remove_at(idx);
|
||||
};
|
||||
}
|
||||
|
||||
int XRServer::get_interface_count() const {
|
||||
return interfaces.size();
|
||||
};
|
||||
}
|
||||
|
||||
Ref<XRInterface> XRServer::get_interface(int p_index) const {
|
||||
ERR_FAIL_INDEX_V(p_index, interfaces.size(), nullptr);
|
||||
|
||||
return interfaces[p_index];
|
||||
};
|
||||
}
|
||||
|
||||
Ref<XRInterface> XRServer::find_interface(const String &p_name) const {
|
||||
for (int i = 0; i < interfaces.size(); i++) {
|
||||
if (interfaces[i]->get_name() == p_name) {
|
||||
return interfaces[i];
|
||||
};
|
||||
};
|
||||
}
|
||||
}
|
||||
return Ref<XRInterface>();
|
||||
};
|
||||
}
|
||||
|
||||
TypedArray<Dictionary> XRServer::get_interfaces() const {
|
||||
Array ret;
|
||||
|
@ -238,14 +297,14 @@ TypedArray<Dictionary> XRServer::get_interfaces() const {
|
|||
iface_info["name"] = interfaces[i]->get_name();
|
||||
|
||||
ret.push_back(iface_info);
|
||||
};
|
||||
}
|
||||
|
||||
return ret;
|
||||
};
|
||||
}
|
||||
|
||||
Ref<XRInterface> XRServer::get_primary_interface() const {
|
||||
return primary_interface;
|
||||
};
|
||||
}
|
||||
|
||||
void XRServer::set_primary_interface(const Ref<XRInterface> &p_primary_interface) {
|
||||
if (p_primary_interface.is_null()) {
|
||||
|
@ -256,7 +315,7 @@ void XRServer::set_primary_interface(const Ref<XRInterface> &p_primary_interface
|
|||
|
||||
print_verbose("XR: Primary interface set to: " + primary_interface->get_name());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
void XRServer::add_tracker(const Ref<XRTracker> &p_tracker) {
|
||||
ERR_FAIL_COND(p_tracker.is_null());
|
||||
|
@ -272,7 +331,7 @@ void XRServer::add_tracker(const Ref<XRTracker> &p_tracker) {
|
|||
trackers[tracker_name] = p_tracker;
|
||||
emit_signal(SNAME("tracker_added"), tracker_name, p_tracker->get_tracker_type());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
void XRServer::remove_tracker(const Ref<XRTracker> &p_tracker) {
|
||||
ERR_FAIL_COND(p_tracker.is_null());
|
||||
|
@ -285,7 +344,7 @@ void XRServer::remove_tracker(const Ref<XRTracker> &p_tracker) {
|
|||
// and remove it
|
||||
trackers.erase(tracker_name);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
Dictionary XRServer::get_trackers(int p_tracker_types) {
|
||||
Dictionary res;
|
||||
|
@ -307,7 +366,7 @@ Ref<XRTracker> XRServer::get_tracker(const StringName &p_name) const {
|
|||
// tracker hasn't been registered yet, which is fine, no need to spam the error log...
|
||||
return Ref<XRTracker>();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
PackedStringArray XRServer::get_suggested_tracker_names() const {
|
||||
PackedStringArray arr;
|
||||
|
@ -369,9 +428,9 @@ void XRServer::_process() {
|
|||
// ignore, not a valid reference
|
||||
} else if (interfaces[i]->is_initialized()) {
|
||||
interfaces.write[i]->process();
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void XRServer::pre_render() {
|
||||
// called from RendererViewport.draw_viewports right before we start drawing our viewports
|
||||
|
@ -383,8 +442,8 @@ void XRServer::pre_render() {
|
|||
// ignore, not a valid reference
|
||||
} else if (interfaces[i]->is_initialized()) {
|
||||
interfaces.write[i]->pre_render();
|
||||
};
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void XRServer::end_frame() {
|
||||
|
@ -396,14 +455,13 @@ void XRServer::end_frame() {
|
|||
// ignore, not a valid reference
|
||||
} else if (interfaces[i]->is_initialized()) {
|
||||
interfaces.write[i]->end_frame();
|
||||
};
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
XRServer::XRServer() {
|
||||
singleton = this;
|
||||
world_scale = 1.0;
|
||||
};
|
||||
}
|
||||
|
||||
XRServer::~XRServer() {
|
||||
primary_interface.unref();
|
||||
|
@ -412,4 +470,4 @@ XRServer::~XRServer() {
|
|||
trackers.clear();
|
||||
|
||||
singleton = nullptr;
|
||||
};
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@
|
|||
#include "core/os/thread_safe.h"
|
||||
#include "core/templates/rid.h"
|
||||
#include "core/variant/variant.h"
|
||||
#include "rendering_server.h"
|
||||
|
||||
class XRInterface;
|
||||
class XRTracker;
|
||||
|
@ -92,10 +93,46 @@ private:
|
|||
|
||||
Ref<XRInterface> primary_interface; /* we'll identify one interface as primary, this will be used by our viewports */
|
||||
|
||||
double world_scale; /* scale by which we multiply our tracker positions */
|
||||
double world_scale = 1.0; /* scale by which we multiply our tracker positions */
|
||||
Transform3D world_origin; /* our world origin point, maps a location in our virtual world to the origin point in our real world tracking volume */
|
||||
Transform3D reference_frame; /* our reference frame */
|
||||
|
||||
// As we may be updating our main state for our next frame while we're still rendering our previous frame,
|
||||
// we need to keep copies around.
|
||||
struct RenderState {
|
||||
double world_scale = 1.0; /* scale by which we multiply our tracker positions */
|
||||
Transform3D world_origin; /* our world origin point, maps a location in our virtual world to the origin point in our real world tracking volume */
|
||||
Transform3D reference_frame; /* our reference frame */
|
||||
} render_state;
|
||||
|
||||
static void _set_render_world_scale(double p_world_scale);
|
||||
static void _set_render_world_origin(const Transform3D &p_world_origin);
|
||||
static void _set_render_reference_frame(const Transform3D &p_reference_frame);
|
||||
|
||||
_FORCE_INLINE_ void set_render_world_scale(double p_world_scale) {
|
||||
// If we're rendering on a separate thread, we may still be processing the last frame, don't communicate this till we're ready...
|
||||
RenderingServer *rendering_server = RenderingServer::get_singleton();
|
||||
ERR_FAIL_NULL(rendering_server);
|
||||
|
||||
rendering_server->call_on_render_thread(callable_mp_static(&XRServer::_set_render_world_scale).bind(p_world_scale));
|
||||
}
|
||||
|
||||
_FORCE_INLINE_ void set_render_world_origin(const Transform3D &p_world_origin) {
|
||||
// If we're rendering on a separate thread, we may still be processing the last frame, don't communicate this till we're ready...
|
||||
RenderingServer *rendering_server = RenderingServer::get_singleton();
|
||||
ERR_FAIL_NULL(rendering_server);
|
||||
|
||||
rendering_server->call_on_render_thread(callable_mp_static(&XRServer::_set_render_world_origin).bind(p_world_origin));
|
||||
}
|
||||
|
||||
_FORCE_INLINE_ void set_render_reference_frame(const Transform3D &p_reference_frame) {
|
||||
// If we're rendering on a separate thread, we may still be processing the last frame, don't communicate this till we're ready...
|
||||
RenderingServer *rendering_server = RenderingServer::get_singleton();
|
||||
ERR_FAIL_NULL(rendering_server);
|
||||
|
||||
rendering_server->call_on_render_thread(callable_mp_static(&XRServer::_set_render_reference_frame).bind(p_reference_frame));
|
||||
}
|
||||
|
||||
protected:
|
||||
static XRServer *singleton;
|
||||
|
||||
|
|
Loading…
Reference in a new issue