virtualx-engine/doc/classes/ARVRInterface.xml
2017-10-06 20:35:55 +11:00

169 lines
7.5 KiB
XML

<?xml version="1.0" encoding="UTF-8" ?>
<class name="ARVRInterface" inherits="Reference" category="Core" version="3.0.alpha.custom_build">
<brief_description>
Base class for ARVR interface implementation.
</brief_description>
<description>
This class needs to be implemented to make an AR or VR platform available to Godot and these should be implemented as C++ modules or GDNative modules (note that for GDNative the subclass ARVRScriptInterface should be used). Part of the interface is exposed to GDScript so you can detect, enable and configure an AR or VR platform.
Interfaces should be written in such a way that simply enabling them will give us a working setup. You can query the available interfaces through ARVRServer.
</description>
<tutorials>
</tutorials>
<demos>
</demos>
<methods>
<method name="get_anchor_detection_is_enabled" qualifiers="const">
<return type="bool">
</return>
<description>
Returns true if achor detection is enabled (AR only).
</description>
</method>
<method name="get_capabilities" qualifiers="const">
<return type="int">
</return>
<description>
Returns a combination of flags providing information about the capabilities of this interface.
</description>
</method>
<method name="get_name" qualifiers="const">
<return type="String">
</return>
<description>
Returns the name of this interface (OpenVR, OpenHMD, ARKit, etc).
</description>
</method>
<method name="get_recommended_render_targetsize">
<return type="Vector2">
</return>
<description>
Returns the resolution at which we should render our intermediate results before things like lens distortion are applied by the VR platform.
</description>
</method>
<method name="get_tracking_status" qualifiers="const">
<return type="int" enum="ARVRInterface.Tracking_status">
</return>
<description>
If supported, returns the status of our tracking. This will allow you to provide feedback to the user whether there are issues with positional tracking.
</description>
</method>
<method name="initialize">
<return type="bool">
</return>
<description>
Call this to initialize this interface. The first interface that is initialized is identified as the primary interface and it will be used for rendering output.
After initializing the interface you want to use you then need to enable the AR/VR mode of a viewport and rendering should commence.
Note that you must enable the AR/VR mode on the main viewport for any device that uses the main output of Godot such as for mobile VR.
If you do this for a platform that handles its own output (such as OpenVR) Godot will show just one eye without distortion on screen. Alternatively you can add a separate viewport node to your scene and enable AR/VR on that viewport and it will be used to output to the HMD leaving you free to do anything you like in the main window such as using a separate camera as a spectator camera or render out something completely different.
While currently not used you can activate additional interfaces, you may wish to do this if you want to track controllers from other platforms. However at this point in time only one interface can render to an HMD.
</description>
</method>
<method name="is_initialized">
<return type="bool">
</return>
<description>
Returns true if this interface is active.
</description>
</method>
<method name="is_primary">
<return type="bool">
</return>
<description>
Returns true if this interface is currently the primary interface (the interface responsible for showing the output).
</description>
</method>
<method name="is_stereo">
<return type="bool">
</return>
<description>
Returns true if the current output of this interface is in stereo.
</description>
</method>
<method name="set_anchor_detection_is_enabled">
<return type="void">
</return>
<argument index="0" name="enable" type="bool">
</argument>
<description>
Enables anchor detection, this is used on AR interfaces and enables the extra logic that will detect planes, features, objects, etc. and adds/modifies anchor points.
</description>
</method>
<method name="set_is_initialized">
<return type="void">
</return>
<argument index="0" name="initialized" type="bool">
</argument>
<description>
Initialise/uninitilise this interface (same effect as calling intialize/uninitialize).
</description>
</method>
<method name="set_is_primary">
<return type="void">
</return>
<argument index="0" name="enable" type="bool">
</argument>
<description>
Set this interface to the primary interface (unset the old one).
</description>
</method>
<method name="uninitialize">
<return type="void">
</return>
<description>
Turns the interface off.
</description>
</method>
</methods>
<members>
<member name="ar_is_anchor_detection_enabled" type="bool" setter="set_anchor_detection_is_enabled" getter="get_anchor_detection_is_enabled">
On an AR interface, is our anchor detection enabled?
</member>
<member name="interface_is_initialized" type="bool" setter="set_is_initialized" getter="is_initialized">
Has this interface been initialized?
</member>
<member name="interface_is_primary" type="bool" setter="set_is_primary" getter="is_primary">
Is this our primary interface?
</member>
</members>
<constants>
<constant name="ARVR_NONE" value="0">
No ARVR capabilities.
</constant>
<constant name="ARVR_MONO" value="1">
This interface can work with normal rendering output (non-HMD based AR).
</constant>
<constant name="ARVR_STEREO" value="2">
This interface supports stereoscopic rendering.
</constant>
<constant name="ARVR_AR" value="4">
This interface support AR (video background and real world tracking).
</constant>
<constant name="ARVR_EXTERNAL" value="8">
This interface outputs to an external device, if the main viewport is used the on screen output is an unmodified buffer of either the left or right eye (stretched if the viewport size is not changed to the same aspect ratio of get_recommended_render_targetsize. Using a seperate viewport node frees up the main viewport for other purposes.
</constant>
<constant name="EYE_MONO" value="0">
Mono output, this is mostly used internally when retrieving positioning information for our camera node or when stereo scopic rendering is not supported.
</constant>
<constant name="EYE_LEFT" value="1">
Left eye output, this is mostly used internally when rendering the image for the left eye and obtaining positioning and projection information.
</constant>
<constant name="EYE_RIGHT" value="2">
Right eye output, this is mostly used internally when rendering the image for the right eye and obtaining positioning and projection information.
</constant>
<constant name="ARVR_NORMAL_TRACKING" value="0">
Tracking is behaving as expected.
</constant>
<constant name="ARVR_EXCESSIVE_MOTION" value="1">
Tracking is hindered by excessive motion, player is moving faster then tracking can keep up.
</constant>
<constant name="ARVR_INSUFFICIENT_FEATURES" value="2">
Tracking is hindered by insufficient features, it's too dark (for camera based tracking), player is blocked, etc.
</constant>
<constant name="ARVR_UNKNOWN_TRACKING" value="3">
We don't know the status of the tracking or this interface does not provide feedback.
</constant>
<constant name="ARVR_NOT_TRACKING" value="4">
Tracking is not functional (camera not plugged in or obscured, lighthouses turned off, etc.)
</constant>
</constants>
</class>