2023-01-05 13:25:55 +01:00
|
|
|
/**************************************************************************/
|
|
|
|
/* binder_common.h */
|
|
|
|
/**************************************************************************/
|
|
|
|
/* This file is part of: */
|
|
|
|
/* GODOT ENGINE */
|
|
|
|
/* https://godotengine.org */
|
|
|
|
/**************************************************************************/
|
|
|
|
/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
|
|
|
|
/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
|
|
|
|
/* */
|
|
|
|
/* Permission is hereby granted, free of charge, to any person obtaining */
|
|
|
|
/* a copy of this software and associated documentation files (the */
|
|
|
|
/* "Software"), to deal in the Software without restriction, including */
|
|
|
|
/* without limitation the rights to use, copy, modify, merge, publish, */
|
|
|
|
/* distribute, sublicense, and/or sell copies of the Software, and to */
|
|
|
|
/* permit persons to whom the Software is furnished to do so, subject to */
|
|
|
|
/* the following conditions: */
|
|
|
|
/* */
|
|
|
|
/* The above copyright notice and this permission notice shall be */
|
|
|
|
/* included in all copies or substantial portions of the Software. */
|
|
|
|
/* */
|
|
|
|
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
|
|
|
|
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
|
|
|
|
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
|
|
|
|
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
|
|
|
|
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
|
|
|
|
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
|
|
|
|
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
|
|
|
|
/**************************************************************************/
|
2020-10-15 17:29:59 +02:00
|
|
|
|
|
|
|
#ifndef BINDER_COMMON_H
|
|
|
|
#define BINDER_COMMON_H
|
|
|
|
|
2021-03-26 00:58:48 +01:00
|
|
|
#include "core/input/input_enums.h"
|
2020-11-07 23:33:38 +01:00
|
|
|
#include "core/object/object.h"
|
2021-06-20 19:12:33 +02:00
|
|
|
#include "core/os/keyboard.h"
|
2020-11-07 23:33:38 +01:00
|
|
|
#include "core/templates/list.h"
|
|
|
|
#include "core/templates/simple_type.h"
|
2020-10-15 17:29:59 +02:00
|
|
|
#include "core/typedefs.h"
|
2020-11-07 23:33:38 +01:00
|
|
|
#include "core/variant/method_ptrcall.h"
|
|
|
|
#include "core/variant/type_info.h"
|
|
|
|
#include "core/variant/variant.h"
|
|
|
|
#include "core/variant/variant_internal.h"
|
2020-10-15 17:29:59 +02:00
|
|
|
|
|
|
|
#include <stdio.h>
|
|
|
|
|
2022-01-25 16:37:41 +01:00
|
|
|
// Variant cannot define an implicit cast operator for every Object subclass, so the
|
|
|
|
// casting is done here, to allow binding methods with parameters more specific than Object *
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T>
|
2020-10-15 17:29:59 +02:00
|
|
|
struct VariantCaster {
|
|
|
|
static _FORCE_INLINE_ T cast(const Variant &p_variant) {
|
2022-01-25 16:37:41 +01:00
|
|
|
using TStripped = std::remove_pointer_t<T>;
|
2024-02-02 15:43:21 +01:00
|
|
|
if constexpr (std::is_base_of_v<Object, TStripped>) {
|
2022-01-25 16:37:41 +01:00
|
|
|
return Object::cast_to<TStripped>(p_variant);
|
|
|
|
} else {
|
|
|
|
return p_variant;
|
|
|
|
}
|
2020-10-15 17:29:59 +02:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T>
|
2020-10-15 17:29:59 +02:00
|
|
|
struct VariantCaster<T &> {
|
|
|
|
static _FORCE_INLINE_ T cast(const Variant &p_variant) {
|
2022-01-25 16:37:41 +01:00
|
|
|
using TStripped = std::remove_pointer_t<T>;
|
2024-02-02 15:43:21 +01:00
|
|
|
if constexpr (std::is_base_of_v<Object, TStripped>) {
|
2022-01-25 16:37:41 +01:00
|
|
|
return Object::cast_to<TStripped>(p_variant);
|
|
|
|
} else {
|
|
|
|
return p_variant;
|
|
|
|
}
|
2020-10-15 17:29:59 +02:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T>
|
2020-10-15 17:29:59 +02:00
|
|
|
struct VariantCaster<const T &> {
|
|
|
|
static _FORCE_INLINE_ T cast(const Variant &p_variant) {
|
2022-01-25 16:37:41 +01:00
|
|
|
using TStripped = std::remove_pointer_t<T>;
|
2024-02-02 15:43:21 +01:00
|
|
|
if constexpr (std::is_base_of_v<Object, TStripped>) {
|
2022-01-25 16:37:41 +01:00
|
|
|
return Object::cast_to<TStripped>(p_variant);
|
|
|
|
} else {
|
|
|
|
return p_variant;
|
|
|
|
}
|
2020-10-15 17:29:59 +02:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2023-04-25 00:21:32 +02:00
|
|
|
#define VARIANT_ENUM_CAST(m_enum) \
|
|
|
|
MAKE_ENUM_TYPE_INFO(m_enum) \
|
|
|
|
template <> \
|
|
|
|
struct VariantCaster<m_enum> { \
|
|
|
|
static _FORCE_INLINE_ m_enum cast(const Variant &p_variant) { \
|
|
|
|
return (m_enum)p_variant.operator int64_t(); \
|
|
|
|
} \
|
|
|
|
}; \
|
|
|
|
template <> \
|
|
|
|
struct PtrToArg<m_enum> { \
|
|
|
|
_FORCE_INLINE_ static m_enum convert(const void *p_ptr) { \
|
|
|
|
return m_enum(*reinterpret_cast<const int64_t *>(p_ptr)); \
|
|
|
|
} \
|
|
|
|
typedef int64_t EncodeT; \
|
|
|
|
_FORCE_INLINE_ static void encode(m_enum p_val, const void *p_ptr) { \
|
|
|
|
*(int64_t *)p_ptr = (int64_t)p_val; \
|
|
|
|
} \
|
|
|
|
}; \
|
|
|
|
template <> \
|
|
|
|
struct ZeroInitializer<m_enum> { \
|
|
|
|
static void initialize(m_enum &value) { value = (m_enum)0; } \
|
|
|
|
}; \
|
|
|
|
template <> \
|
|
|
|
struct VariantInternalAccessor<m_enum> { \
|
|
|
|
static _FORCE_INLINE_ m_enum get(const Variant *v) { return m_enum(*VariantInternal::get_int(v)); } \
|
|
|
|
static _FORCE_INLINE_ void set(Variant *v, m_enum p_value) { *VariantInternal::get_int(v) = (int64_t)p_value; } \
|
2020-10-15 17:29:59 +02:00
|
|
|
};
|
|
|
|
|
2023-04-25 00:21:32 +02:00
|
|
|
#define VARIANT_BITFIELD_CAST(m_enum) \
|
|
|
|
MAKE_BITFIELD_TYPE_INFO(m_enum) \
|
|
|
|
template <> \
|
|
|
|
struct VariantCaster<BitField<m_enum>> { \
|
|
|
|
static _FORCE_INLINE_ BitField<m_enum> cast(const Variant &p_variant) { \
|
|
|
|
return BitField<m_enum>(p_variant.operator int64_t()); \
|
|
|
|
} \
|
|
|
|
}; \
|
|
|
|
template <> \
|
|
|
|
struct PtrToArg<BitField<m_enum>> { \
|
|
|
|
_FORCE_INLINE_ static BitField<m_enum> convert(const void *p_ptr) { \
|
|
|
|
return BitField<m_enum>(*reinterpret_cast<const int64_t *>(p_ptr)); \
|
|
|
|
} \
|
|
|
|
typedef int64_t EncodeT; \
|
|
|
|
_FORCE_INLINE_ static void encode(BitField<m_enum> p_val, const void *p_ptr) { \
|
|
|
|
*(int64_t *)p_ptr = p_val; \
|
|
|
|
} \
|
|
|
|
}; \
|
|
|
|
template <> \
|
|
|
|
struct ZeroInitializer<BitField<m_enum>> { \
|
|
|
|
static void initialize(BitField<m_enum> &value) { value = 0; } \
|
|
|
|
}; \
|
|
|
|
template <> \
|
|
|
|
struct VariantInternalAccessor<BitField<m_enum>> { \
|
|
|
|
static _FORCE_INLINE_ BitField<m_enum> get(const Variant *v) { return BitField<m_enum>(*VariantInternal::get_int(v)); } \
|
|
|
|
static _FORCE_INLINE_ void set(Variant *v, BitField<m_enum> p_value) { *VariantInternal::get_int(v) = p_value.operator int64_t(); } \
|
2022-06-24 11:16:37 +02:00
|
|
|
};
|
|
|
|
|
2020-10-15 17:29:59 +02:00
|
|
|
// Object enum casts must go here
|
|
|
|
VARIANT_ENUM_CAST(Object::ConnectFlags);
|
|
|
|
|
2019-11-28 09:48:51 +01:00
|
|
|
VARIANT_ENUM_CAST(Vector2::Axis);
|
|
|
|
VARIANT_ENUM_CAST(Vector2i::Axis);
|
2020-10-15 17:29:59 +02:00
|
|
|
VARIANT_ENUM_CAST(Vector3::Axis);
|
2019-11-28 09:48:51 +01:00
|
|
|
VARIANT_ENUM_CAST(Vector3i::Axis);
|
Implement Vector4, Vector4i, Projection
Implement built-in classes Vector4, Vector4i and Projection.
* Two versions of Vector4 (float and integer).
* A Projection class, which is a 4x4 matrix specialized in projection types.
These types have been requested for a long time, but given they were very corner case they were not added before.
Because in Godot 4, reimplementing parts of the rendering engine is now possible, access to these types (heavily used by the rendering code) becomes a necessity.
**Q**: Why Projection and not Matrix4?
**A**: Godot does not use Matrix2, Matrix3, Matrix4x3, etc. naming convention because, within the engine, these types always have a *purpose*. As such, Godot names them: Transform2D, Transform3D or Basis. In this case, this 4x4 matrix is _always_ used as a _Projection_, hence the naming.
2022-07-20 01:11:13 +02:00
|
|
|
VARIANT_ENUM_CAST(Vector4::Axis);
|
|
|
|
VARIANT_ENUM_CAST(Vector4i::Axis);
|
2022-10-02 04:53:12 +02:00
|
|
|
VARIANT_ENUM_CAST(EulerOrder);
|
Implement Vector4, Vector4i, Projection
Implement built-in classes Vector4, Vector4i and Projection.
* Two versions of Vector4 (float and integer).
* A Projection class, which is a 4x4 matrix specialized in projection types.
These types have been requested for a long time, but given they were very corner case they were not added before.
Because in Godot 4, reimplementing parts of the rendering engine is now possible, access to these types (heavily used by the rendering code) becomes a necessity.
**Q**: Why Projection and not Matrix4?
**A**: Godot does not use Matrix2, Matrix3, Matrix4x3, etc. naming convention because, within the engine, these types always have a *purpose*. As such, Godot names them: Transform2D, Transform3D or Basis. In this case, this 4x4 matrix is _always_ used as a _Projection_, hence the naming.
2022-07-20 01:11:13 +02:00
|
|
|
VARIANT_ENUM_CAST(Projection::Planes);
|
2020-10-15 17:29:59 +02:00
|
|
|
|
|
|
|
VARIANT_ENUM_CAST(Error);
|
2020-12-22 17:24:29 +01:00
|
|
|
VARIANT_ENUM_CAST(Side);
|
2021-02-23 17:56:28 +01:00
|
|
|
VARIANT_ENUM_CAST(ClockDirection);
|
2020-10-15 17:29:59 +02:00
|
|
|
VARIANT_ENUM_CAST(Corner);
|
2021-03-26 00:58:48 +01:00
|
|
|
VARIANT_ENUM_CAST(HatDir);
|
2023-01-08 00:55:54 +01:00
|
|
|
VARIANT_BITFIELD_CAST(HatMask);
|
2021-03-26 00:58:48 +01:00
|
|
|
VARIANT_ENUM_CAST(JoyAxis);
|
|
|
|
VARIANT_ENUM_CAST(JoyButton);
|
2023-01-08 00:55:54 +01:00
|
|
|
|
2021-03-26 00:58:48 +01:00
|
|
|
VARIANT_ENUM_CAST(MIDIMessage);
|
|
|
|
VARIANT_ENUM_CAST(MouseButton);
|
2023-01-08 00:55:54 +01:00
|
|
|
VARIANT_BITFIELD_CAST(MouseButtonMask);
|
2020-10-15 17:29:59 +02:00
|
|
|
VARIANT_ENUM_CAST(Orientation);
|
2021-11-25 03:58:47 +01:00
|
|
|
VARIANT_ENUM_CAST(HorizontalAlignment);
|
|
|
|
VARIANT_ENUM_CAST(VerticalAlignment);
|
|
|
|
VARIANT_ENUM_CAST(InlineAlignment);
|
2020-10-15 17:29:59 +02:00
|
|
|
VARIANT_ENUM_CAST(PropertyHint);
|
2023-01-08 00:55:54 +01:00
|
|
|
VARIANT_BITFIELD_CAST(PropertyUsageFlags);
|
2020-10-15 17:29:59 +02:00
|
|
|
VARIANT_ENUM_CAST(Variant::Type);
|
|
|
|
VARIANT_ENUM_CAST(Variant::Operator);
|
|
|
|
|
2023-01-08 00:55:54 +01:00
|
|
|
// Key
|
|
|
|
|
|
|
|
VARIANT_ENUM_CAST(Key);
|
|
|
|
VARIANT_BITFIELD_CAST(KeyModifierMask);
|
2023-08-03 15:18:26 +02:00
|
|
|
VARIANT_ENUM_CAST(KeyLocation);
|
2023-01-08 00:55:54 +01:00
|
|
|
|
|
|
|
static inline Key &operator|=(Key &a, BitField<KeyModifierMask> b) {
|
|
|
|
a = static_cast<Key>(static_cast<int>(a) | static_cast<int>(b.operator int64_t()));
|
|
|
|
return a;
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline Key &operator&=(Key &a, BitField<KeyModifierMask> b) {
|
|
|
|
a = static_cast<Key>(static_cast<int>(a) & static_cast<int>(b.operator int64_t()));
|
|
|
|
return a;
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline Key operator|(Key a, BitField<KeyModifierMask> b) {
|
|
|
|
return (Key)((int)a | (int)b.operator int64_t());
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline Key operator&(Key a, BitField<KeyModifierMask> b) {
|
|
|
|
return (Key)((int)a & (int)b.operator int64_t());
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline Key operator+(BitField<KeyModifierMask> a, Key b) {
|
|
|
|
return (Key)((int)a.operator int64_t() + (int)b);
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline Key operator|(BitField<KeyModifierMask> a, Key b) {
|
|
|
|
return (Key)((int)a.operator int64_t() | (int)b);
|
|
|
|
}
|
|
|
|
|
2020-10-15 17:29:59 +02:00
|
|
|
template <>
|
|
|
|
struct VariantCaster<char32_t> {
|
|
|
|
static _FORCE_INLINE_ char32_t cast(const Variant &p_variant) {
|
|
|
|
return (char32_t)p_variant.operator int();
|
|
|
|
}
|
|
|
|
};
|
2020-11-25 14:08:17 +01:00
|
|
|
|
2020-10-15 17:29:59 +02:00
|
|
|
template <>
|
|
|
|
struct PtrToArg<char32_t> {
|
|
|
|
_FORCE_INLINE_ static char32_t convert(const void *p_ptr) {
|
binder_common: Fix uninitialized marshalling
C# uses `long`s to access many native values. With `PtrToArg<m_enum>` and
`PtrToArg<bitfield<m_enum>>` this isn't a problem, as C++ code converts
through a `*(int64_t*)` cast in assignment, so all 64-bits are initialized.
However, with `PtrToArg<char32_t>`, value assignment happens through an
`*(int *)` cast, leaving 32 bits uninitialized where `int` is 32 bits. On
platforms where `int` is 16 bits, there are presumably 48 bits uninitialized,
though there are very few platforms where this is still the case.
The easiest way to see the practical effects of this is by looking at
`EventInputKey.Unicode`:
```csharp
public override void _Input(InputEvent @event) {
if (@event is InputEventKey keyEvent) {
if (keyEvent.IsPressed() && !keyEvent.Echo) {
var raw = keyEvent.Unicode;
var value = raw & 0xffffffff;
GD.Print($"Key pressed: raw: {raw}; masked: {(char) value} ({value})");
}
}
}
```
Pressing 'a' emits the following line:
```
Key pressed: raw: -3617008645356650399; masked: a (97)
```
Examining execution flow in gdb shows this conversion going through the
following line:
```
PtrToArg<char32_t>::encode (p_ptr=0x7ffcd5bb4b18, p_val=97 U'a') at ./core/variant/binder_common.h:221
221 *(int *)p_ptr = p_val;
```
Here, `p_val` is still 97, which is the value `InputEventKey.Unicode`
is expected to have. After assignment, `p *(int64_t *)0x7ffcd5bb4b18` displays
`-3617008645356650399`, with only the lower 32 bits being properly assigned,
and is the value we see from C#.
With this patch applied, the above testing `_Input` now prints:
```
Key pressed: raw: 97; masked: a (97)
```
Thank you to blujay1269 for asking about an unexpected value they saw in
`EventInputKey.Unicode`, which prompted this investigation.
2024-08-09 07:05:36 +02:00
|
|
|
return char32_t(*reinterpret_cast<const int64_t *>(p_ptr));
|
2020-10-15 17:29:59 +02:00
|
|
|
}
|
2021-06-19 17:58:49 +02:00
|
|
|
typedef int64_t EncodeT;
|
2020-10-15 17:29:59 +02:00
|
|
|
_FORCE_INLINE_ static void encode(char32_t p_val, const void *p_ptr) {
|
binder_common: Fix uninitialized marshalling
C# uses `long`s to access many native values. With `PtrToArg<m_enum>` and
`PtrToArg<bitfield<m_enum>>` this isn't a problem, as C++ code converts
through a `*(int64_t*)` cast in assignment, so all 64-bits are initialized.
However, with `PtrToArg<char32_t>`, value assignment happens through an
`*(int *)` cast, leaving 32 bits uninitialized where `int` is 32 bits. On
platforms where `int` is 16 bits, there are presumably 48 bits uninitialized,
though there are very few platforms where this is still the case.
The easiest way to see the practical effects of this is by looking at
`EventInputKey.Unicode`:
```csharp
public override void _Input(InputEvent @event) {
if (@event is InputEventKey keyEvent) {
if (keyEvent.IsPressed() && !keyEvent.Echo) {
var raw = keyEvent.Unicode;
var value = raw & 0xffffffff;
GD.Print($"Key pressed: raw: {raw}; masked: {(char) value} ({value})");
}
}
}
```
Pressing 'a' emits the following line:
```
Key pressed: raw: -3617008645356650399; masked: a (97)
```
Examining execution flow in gdb shows this conversion going through the
following line:
```
PtrToArg<char32_t>::encode (p_ptr=0x7ffcd5bb4b18, p_val=97 U'a') at ./core/variant/binder_common.h:221
221 *(int *)p_ptr = p_val;
```
Here, `p_val` is still 97, which is the value `InputEventKey.Unicode`
is expected to have. After assignment, `p *(int64_t *)0x7ffcd5bb4b18` displays
`-3617008645356650399`, with only the lower 32 bits being properly assigned,
and is the value we see from C#.
With this patch applied, the above testing `_Input` now prints:
```
Key pressed: raw: 97; masked: a (97)
```
Thank you to blujay1269 for asking about an unexpected value they saw in
`EventInputKey.Unicode`, which prompted this investigation.
2024-08-09 07:05:36 +02:00
|
|
|
*(int64_t *)p_ptr = p_val;
|
2020-10-15 17:29:59 +02:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
template <typename T>
|
|
|
|
struct VariantObjectClassChecker {
|
|
|
|
static _FORCE_INLINE_ bool check(const Variant &p_variant) {
|
2022-01-25 16:37:41 +01:00
|
|
|
using TStripped = std::remove_pointer_t<T>;
|
2024-02-02 15:43:21 +01:00
|
|
|
if constexpr (std::is_base_of_v<Object, TStripped>) {
|
2022-01-25 16:37:41 +01:00
|
|
|
Object *obj = p_variant;
|
|
|
|
return Object::cast_to<TStripped>(p_variant) || !obj;
|
|
|
|
} else {
|
|
|
|
return true;
|
|
|
|
}
|
2020-10-15 17:29:59 +02:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-10-06 23:22:25 +02:00
|
|
|
template <typename T>
|
|
|
|
class Ref;
|
|
|
|
|
|
|
|
template <typename T>
|
|
|
|
struct VariantObjectClassChecker<const Ref<T> &> {
|
|
|
|
static _FORCE_INLINE_ bool check(const Variant &p_variant) {
|
|
|
|
Object *obj = p_variant;
|
|
|
|
const Ref<T> node = p_variant;
|
|
|
|
return node.ptr() || !obj;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-10-15 17:29:59 +02:00
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T>
|
2020-10-15 17:29:59 +02:00
|
|
|
struct VariantCasterAndValidate {
|
|
|
|
static _FORCE_INLINE_ T cast(const Variant **p_args, uint32_t p_arg_idx, Callable::CallError &r_error) {
|
|
|
|
Variant::Type argtype = GetTypeInfo<T>::VARIANT_TYPE;
|
|
|
|
if (!Variant::can_convert_strict(p_args[p_arg_idx]->get_type(), argtype) ||
|
2022-03-09 14:58:40 +01:00
|
|
|
!VariantObjectClassChecker<T>::check(*p_args[p_arg_idx])) {
|
2020-10-15 17:29:59 +02:00
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_INVALID_ARGUMENT;
|
|
|
|
r_error.argument = p_arg_idx;
|
|
|
|
r_error.expected = argtype;
|
|
|
|
}
|
|
|
|
|
|
|
|
return VariantCaster<T>::cast(*p_args[p_arg_idx]);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T>
|
2020-10-15 17:29:59 +02:00
|
|
|
struct VariantCasterAndValidate<T &> {
|
|
|
|
static _FORCE_INLINE_ T cast(const Variant **p_args, uint32_t p_arg_idx, Callable::CallError &r_error) {
|
|
|
|
Variant::Type argtype = GetTypeInfo<T>::VARIANT_TYPE;
|
|
|
|
if (!Variant::can_convert_strict(p_args[p_arg_idx]->get_type(), argtype) ||
|
2022-03-09 14:58:40 +01:00
|
|
|
!VariantObjectClassChecker<T>::check(*p_args[p_arg_idx])) {
|
2020-10-15 17:29:59 +02:00
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_INVALID_ARGUMENT;
|
|
|
|
r_error.argument = p_arg_idx;
|
|
|
|
r_error.expected = argtype;
|
|
|
|
}
|
|
|
|
|
|
|
|
return VariantCaster<T>::cast(*p_args[p_arg_idx]);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T>
|
2020-10-15 17:29:59 +02:00
|
|
|
struct VariantCasterAndValidate<const T &> {
|
|
|
|
static _FORCE_INLINE_ T cast(const Variant **p_args, uint32_t p_arg_idx, Callable::CallError &r_error) {
|
|
|
|
Variant::Type argtype = GetTypeInfo<T>::VARIANT_TYPE;
|
|
|
|
if (!Variant::can_convert_strict(p_args[p_arg_idx]->get_type(), argtype) ||
|
2022-03-09 14:58:40 +01:00
|
|
|
!VariantObjectClassChecker<T>::check(*p_args[p_arg_idx])) {
|
2020-10-15 17:29:59 +02:00
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_INVALID_ARGUMENT;
|
|
|
|
r_error.argument = p_arg_idx;
|
|
|
|
r_error.expected = argtype;
|
|
|
|
}
|
|
|
|
|
|
|
|
return VariantCaster<T>::cast(*p_args[p_arg_idx]);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
#endif // DEBUG_METHODS_ENABLED
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P, size_t... Is>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_variant_args_helper(T *p_instance, void (T::*p_method)(P...), const Variant **p_args, Callable::CallError &r_error, IndexSequence<Is...>) {
|
|
|
|
r_error.error = Callable::CallError::CALL_OK;
|
|
|
|
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
(p_instance->*p_method)(VariantCasterAndValidate<P>::cast(p_args, Is, r_error)...);
|
|
|
|
#else
|
|
|
|
(p_instance->*p_method)(VariantCaster<P>::cast(*p_args[Is])...);
|
|
|
|
#endif
|
|
|
|
(void)(p_args); //avoid warning
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P, size_t... Is>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_variant_argsc_helper(T *p_instance, void (T::*p_method)(P...) const, const Variant **p_args, Callable::CallError &r_error, IndexSequence<Is...>) {
|
|
|
|
r_error.error = Callable::CallError::CALL_OK;
|
|
|
|
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
(p_instance->*p_method)(VariantCasterAndValidate<P>::cast(p_args, Is, r_error)...);
|
|
|
|
#else
|
|
|
|
(p_instance->*p_method)(VariantCaster<P>::cast(*p_args[Is])...);
|
|
|
|
#endif
|
|
|
|
(void)(p_args); //avoid warning
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P, size_t... Is>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_ptr_args_helper(T *p_instance, void (T::*p_method)(P...), const void **p_args, IndexSequence<Is...>) {
|
|
|
|
(p_instance->*p_method)(PtrToArg<P>::convert(p_args[Is])...);
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P, size_t... Is>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_ptr_argsc_helper(T *p_instance, void (T::*p_method)(P...) const, const void **p_args, IndexSequence<Is...>) {
|
|
|
|
(p_instance->*p_method)(PtrToArg<P>::convert(p_args[Is])...);
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P, size_t... Is>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_ptr_args_ret_helper(T *p_instance, R (T::*p_method)(P...), const void **p_args, void *r_ret, IndexSequence<Is...>) {
|
|
|
|
PtrToArg<R>::encode((p_instance->*p_method)(PtrToArg<P>::convert(p_args[Is])...), r_ret);
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P, size_t... Is>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_ptr_args_retc_helper(T *p_instance, R (T::*p_method)(P...) const, const void **p_args, void *r_ret, IndexSequence<Is...>) {
|
|
|
|
PtrToArg<R>::encode((p_instance->*p_method)(PtrToArg<P>::convert(p_args[Is])...), r_ret);
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P, size_t... Is>
|
Add marshalling to PackedByteArray
-Decode/Encode functions for u8,s8,u16,s16,u32,s32,u64,s64,half,float,double,variant
-Improved binder template to allow this
Given in Godot 4.0 PackedByteArray is passed as reference, it is now possible to have these functions there, which makes the most sense.
2021-04-09 22:57:37 +02:00
|
|
|
void call_with_ptr_args_static_helper(T *p_instance, void (*p_method)(T *, P...), const void **p_args, IndexSequence<Is...>) {
|
|
|
|
p_method(p_instance, PtrToArg<P>::convert(p_args[Is])...);
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P, size_t... Is>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_ptr_args_static_retc_helper(T *p_instance, R (*p_method)(T *, P...), const void **p_args, void *r_ret, IndexSequence<Is...>) {
|
|
|
|
PtrToArg<R>::encode(p_method(p_instance, PtrToArg<P>::convert(p_args[Is])...), r_ret);
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename R, typename... P, size_t... Is>
|
2021-02-24 14:56:34 +01:00
|
|
|
void call_with_ptr_args_static_method_ret_helper(R (*p_method)(P...), const void **p_args, void *r_ret, IndexSequence<Is...>) {
|
|
|
|
PtrToArg<R>::encode(p_method(PtrToArg<P>::convert(p_args[Is])...), r_ret);
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename... P, size_t... Is>
|
2021-02-24 14:56:34 +01:00
|
|
|
void call_with_ptr_args_static_method_helper(void (*p_method)(P...), const void **p_args, IndexSequence<Is...>) {
|
|
|
|
p_method(PtrToArg<P>::convert(p_args[Is])...);
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P, size_t... Is>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_validated_variant_args_helper(T *p_instance, void (T::*p_method)(P...), const Variant **p_args, IndexSequence<Is...>) {
|
2024-04-24 11:41:58 +02:00
|
|
|
(p_instance->*p_method)((VariantInternalAccessor<GetSimpleTypeT<P>>::get(p_args[Is]))...);
|
2020-10-15 17:29:59 +02:00
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P, size_t... Is>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_validated_variant_argsc_helper(T *p_instance, void (T::*p_method)(P...) const, const Variant **p_args, IndexSequence<Is...>) {
|
2024-04-24 11:41:58 +02:00
|
|
|
(p_instance->*p_method)((VariantInternalAccessor<GetSimpleTypeT<P>>::get(p_args[Is]))...);
|
2020-10-15 17:29:59 +02:00
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P, size_t... Is>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_validated_variant_args_ret_helper(T *p_instance, R (T::*p_method)(P...), const Variant **p_args, Variant *r_ret, IndexSequence<Is...>) {
|
2024-04-24 11:41:58 +02:00
|
|
|
VariantInternalAccessor<GetSimpleTypeT<R>>::set(r_ret, (p_instance->*p_method)((VariantInternalAccessor<GetSimpleTypeT<P>>::get(p_args[Is]))...));
|
2020-10-15 17:29:59 +02:00
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P, size_t... Is>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_validated_variant_args_retc_helper(T *p_instance, R (T::*p_method)(P...) const, const Variant **p_args, Variant *r_ret, IndexSequence<Is...>) {
|
2024-04-24 11:41:58 +02:00
|
|
|
VariantInternalAccessor<GetSimpleTypeT<R>>::set(r_ret, (p_instance->*p_method)((VariantInternalAccessor<GetSimpleTypeT<P>>::get(p_args[Is]))...));
|
2020-10-15 17:29:59 +02:00
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P, size_t... Is>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_validated_variant_args_static_retc_helper(T *p_instance, R (*p_method)(T *, P...), const Variant **p_args, Variant *r_ret, IndexSequence<Is...>) {
|
2024-04-24 11:41:58 +02:00
|
|
|
VariantInternalAccessor<GetSimpleTypeT<R>>::set(r_ret, p_method(p_instance, (VariantInternalAccessor<GetSimpleTypeT<P>>::get(p_args[Is]))...));
|
2020-10-15 17:29:59 +02:00
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P, size_t... Is>
|
Add marshalling to PackedByteArray
-Decode/Encode functions for u8,s8,u16,s16,u32,s32,u64,s64,half,float,double,variant
-Improved binder template to allow this
Given in Godot 4.0 PackedByteArray is passed as reference, it is now possible to have these functions there, which makes the most sense.
2021-04-09 22:57:37 +02:00
|
|
|
void call_with_validated_variant_args_static_helper(T *p_instance, void (*p_method)(T *, P...), const Variant **p_args, IndexSequence<Is...>) {
|
2024-04-24 11:41:58 +02:00
|
|
|
p_method(p_instance, (VariantInternalAccessor<GetSimpleTypeT<P>>::get(p_args[Is]))...);
|
Add marshalling to PackedByteArray
-Decode/Encode functions for u8,s8,u16,s16,u32,s32,u64,s64,half,float,double,variant
-Improved binder template to allow this
Given in Godot 4.0 PackedByteArray is passed as reference, it is now possible to have these functions there, which makes the most sense.
2021-04-09 22:57:37 +02:00
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename R, typename... P, size_t... Is>
|
2021-02-24 14:56:34 +01:00
|
|
|
void call_with_validated_variant_args_static_method_ret_helper(R (*p_method)(P...), const Variant **p_args, Variant *r_ret, IndexSequence<Is...>) {
|
2024-04-24 11:41:58 +02:00
|
|
|
VariantInternalAccessor<GetSimpleTypeT<R>>::set(r_ret, p_method((VariantInternalAccessor<GetSimpleTypeT<P>>::get(p_args[Is]))...));
|
2021-02-24 14:56:34 +01:00
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename... P, size_t... Is>
|
2021-02-24 14:56:34 +01:00
|
|
|
void call_with_validated_variant_args_static_method_helper(void (*p_method)(P...), const Variant **p_args, IndexSequence<Is...>) {
|
2024-04-24 11:41:58 +02:00
|
|
|
p_method((VariantInternalAccessor<GetSimpleTypeT<P>>::get(p_args[Is]))...);
|
2021-02-24 14:56:34 +01:00
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_variant_args(T *p_instance, void (T::*p_method)(P...), const Variant **p_args, int p_argcount, Callable::CallError &r_error) {
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2020-10-15 17:29:59 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if ((size_t)p_argcount < sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2020-10-15 17:29:59 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
call_with_variant_args_helper<T, P...>(p_instance, p_method, p_args, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_variant_args_dv(T *p_instance, void (T::*p_method)(P...), const Variant **p_args, int p_argcount, Callable::CallError &r_error, const Vector<Variant> &default_values) {
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2020-10-15 17:29:59 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
int32_t missing = (int32_t)sizeof...(P) - (int32_t)p_argcount;
|
|
|
|
|
|
|
|
int32_t dvs = default_values.size();
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if (missing > dvs) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2020-10-15 17:29:59 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
const Variant *args[sizeof...(P) == 0 ? 1 : sizeof...(P)]; //avoid zero sized array
|
|
|
|
for (int32_t i = 0; i < (int32_t)sizeof...(P); i++) {
|
|
|
|
if (i < p_argcount) {
|
|
|
|
args[i] = p_args[i];
|
|
|
|
} else {
|
|
|
|
args[i] = &default_values[i - p_argcount + (dvs - missing)];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
call_with_variant_args_helper(p_instance, p_method, args, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_variant_argsc(T *p_instance, void (T::*p_method)(P...) const, const Variant **p_args, int p_argcount, Callable::CallError &r_error) {
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2020-10-15 17:29:59 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if ((size_t)p_argcount < sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2020-10-15 17:29:59 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
call_with_variant_args_helper<T, P...>(p_instance, p_method, p_args, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_variant_argsc_dv(T *p_instance, void (T::*p_method)(P...) const, const Variant **p_args, int p_argcount, Callable::CallError &r_error, const Vector<Variant> &default_values) {
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2020-10-15 17:29:59 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
int32_t missing = (int32_t)sizeof...(P) - (int32_t)p_argcount;
|
|
|
|
|
|
|
|
int32_t dvs = default_values.size();
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if (missing > dvs) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2020-10-15 17:29:59 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
const Variant *args[sizeof...(P) == 0 ? 1 : sizeof...(P)]; //avoid zero sized array
|
|
|
|
for (int32_t i = 0; i < (int32_t)sizeof...(P); i++) {
|
|
|
|
if (i < p_argcount) {
|
|
|
|
args[i] = p_args[i];
|
|
|
|
} else {
|
|
|
|
args[i] = &default_values[i - p_argcount + (dvs - missing)];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
call_with_variant_argsc_helper(p_instance, p_method, args, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_variant_args_ret_dv(T *p_instance, R (T::*p_method)(P...), const Variant **p_args, int p_argcount, Variant &r_ret, Callable::CallError &r_error, const Vector<Variant> &default_values) {
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2020-10-15 17:29:59 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
int32_t missing = (int32_t)sizeof...(P) - (int32_t)p_argcount;
|
|
|
|
|
|
|
|
int32_t dvs = default_values.size();
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if (missing > dvs) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2020-10-15 17:29:59 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
const Variant *args[sizeof...(P) == 0 ? 1 : sizeof...(P)]; //avoid zero sized array
|
|
|
|
for (int32_t i = 0; i < (int32_t)sizeof...(P); i++) {
|
|
|
|
if (i < p_argcount) {
|
|
|
|
args[i] = p_args[i];
|
|
|
|
} else {
|
|
|
|
args[i] = &default_values[i - p_argcount + (dvs - missing)];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
call_with_variant_args_ret_helper(p_instance, p_method, args, r_ret, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_variant_args_retc_dv(T *p_instance, R (T::*p_method)(P...) const, const Variant **p_args, int p_argcount, Variant &r_ret, Callable::CallError &r_error, const Vector<Variant> &default_values) {
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2020-10-15 17:29:59 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
int32_t missing = (int32_t)sizeof...(P) - (int32_t)p_argcount;
|
|
|
|
|
|
|
|
int32_t dvs = default_values.size();
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if (missing > dvs) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2020-10-15 17:29:59 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
const Variant *args[sizeof...(P) == 0 ? 1 : sizeof...(P)]; //avoid zero sized array
|
|
|
|
for (int32_t i = 0; i < (int32_t)sizeof...(P); i++) {
|
|
|
|
if (i < p_argcount) {
|
|
|
|
args[i] = p_args[i];
|
|
|
|
} else {
|
|
|
|
args[i] = &default_values[i - p_argcount + (dvs - missing)];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
call_with_variant_args_retc_helper(p_instance, p_method, args, r_ret, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_ptr_args(T *p_instance, void (T::*p_method)(P...), const void **p_args) {
|
|
|
|
call_with_ptr_args_helper<T, P...>(p_instance, p_method, p_args, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_ptr_argsc(T *p_instance, void (T::*p_method)(P...) const, const void **p_args) {
|
|
|
|
call_with_ptr_argsc_helper<T, P...>(p_instance, p_method, p_args, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_ptr_args_ret(T *p_instance, R (T::*p_method)(P...), const void **p_args, void *r_ret) {
|
|
|
|
call_with_ptr_args_ret_helper<T, R, P...>(p_instance, p_method, p_args, r_ret, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_ptr_args_retc(T *p_instance, R (T::*p_method)(P...) const, const void **p_args, void *r_ret) {
|
|
|
|
call_with_ptr_args_retc_helper<T, R, P...>(p_instance, p_method, p_args, r_ret, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P>
|
Add marshalling to PackedByteArray
-Decode/Encode functions for u8,s8,u16,s16,u32,s32,u64,s64,half,float,double,variant
-Improved binder template to allow this
Given in Godot 4.0 PackedByteArray is passed as reference, it is now possible to have these functions there, which makes the most sense.
2021-04-09 22:57:37 +02:00
|
|
|
void call_with_ptr_args_static(T *p_instance, void (*p_method)(T *, P...), const void **p_args) {
|
|
|
|
call_with_ptr_args_static_helper<T, P...>(p_instance, p_method, p_args, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_ptr_args_static_retc(T *p_instance, R (*p_method)(T *, P...), const void **p_args, void *r_ret) {
|
|
|
|
call_with_ptr_args_static_retc_helper<T, R, P...>(p_instance, p_method, p_args, r_ret, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename R, typename... P>
|
2021-02-24 14:56:34 +01:00
|
|
|
void call_with_ptr_args_static_method_ret(R (*p_method)(P...), const void **p_args, void *r_ret) {
|
|
|
|
call_with_ptr_args_static_method_ret_helper<R, P...>(p_method, p_args, r_ret, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename... P>
|
2021-02-24 14:56:34 +01:00
|
|
|
void call_with_ptr_args_static_method(void (*p_method)(P...), const void **p_args) {
|
|
|
|
call_with_ptr_args_static_method_helper<P...>(p_method, p_args, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2023-04-25 00:21:32 +02:00
|
|
|
// Validated
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_validated_variant_args(Variant *base, void (T::*p_method)(P...), const Variant **p_args) {
|
|
|
|
call_with_validated_variant_args_helper<T, P...>(VariantGetInternalPtr<T>::get_ptr(base), p_method, p_args, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_validated_variant_args_ret(Variant *base, R (T::*p_method)(P...), const Variant **p_args, Variant *r_ret) {
|
|
|
|
call_with_validated_variant_args_ret_helper<T, R, P...>(VariantGetInternalPtr<T>::get_ptr(base), p_method, p_args, r_ret, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_validated_variant_args_retc(Variant *base, R (T::*p_method)(P...) const, const Variant **p_args, Variant *r_ret) {
|
|
|
|
call_with_validated_variant_args_retc_helper<T, R, P...>(VariantGetInternalPtr<T>::get_ptr(base), p_method, p_args, r_ret, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P>
|
Add marshalling to PackedByteArray
-Decode/Encode functions for u8,s8,u16,s16,u32,s32,u64,s64,half,float,double,variant
-Improved binder template to allow this
Given in Godot 4.0 PackedByteArray is passed as reference, it is now possible to have these functions there, which makes the most sense.
2021-04-09 22:57:37 +02:00
|
|
|
void call_with_validated_variant_args_static(Variant *base, void (*p_method)(T *, P...), const Variant **p_args) {
|
|
|
|
call_with_validated_variant_args_static_helper<T, P...>(VariantGetInternalPtr<T>::get_ptr(base), p_method, p_args, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_validated_variant_args_static_retc(Variant *base, R (*p_method)(T *, P...), const Variant **p_args, Variant *r_ret) {
|
|
|
|
call_with_validated_variant_args_static_retc_helper<T, R, P...>(VariantGetInternalPtr<T>::get_ptr(base), p_method, p_args, r_ret, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename... P>
|
2021-02-24 14:56:34 +01:00
|
|
|
void call_with_validated_variant_args_static_method(void (*p_method)(P...), const Variant **p_args) {
|
|
|
|
call_with_validated_variant_args_static_method_helper<P...>(p_method, p_args, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename R, typename... P>
|
2021-02-24 14:56:34 +01:00
|
|
|
void call_with_validated_variant_args_static_method_ret(R (*p_method)(P...), const Variant **p_args, Variant *r_ret) {
|
|
|
|
call_with_validated_variant_args_static_method_ret_helper<R, P...>(p_method, p_args, r_ret, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2023-04-25 00:21:32 +02:00
|
|
|
// Validated Object
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P>
|
2023-04-25 00:21:32 +02:00
|
|
|
void call_with_validated_object_instance_args(T *base, void (T::*p_method)(P...), const Variant **p_args) {
|
|
|
|
call_with_validated_variant_args_helper<T, P...>(base, p_method, p_args, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P>
|
2023-04-25 00:21:32 +02:00
|
|
|
void call_with_validated_object_instance_argsc(T *base, void (T::*p_method)(P...) const, const Variant **p_args) {
|
|
|
|
call_with_validated_variant_argsc_helper<T, P...>(base, p_method, p_args, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P>
|
2023-04-25 00:21:32 +02:00
|
|
|
void call_with_validated_object_instance_args_ret(T *base, R (T::*p_method)(P...), const Variant **p_args, Variant *r_ret) {
|
|
|
|
call_with_validated_variant_args_ret_helper<T, R, P...>(base, p_method, p_args, r_ret, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P>
|
2023-04-25 00:21:32 +02:00
|
|
|
void call_with_validated_object_instance_args_retc(T *base, R (T::*p_method)(P...) const, const Variant **p_args, Variant *r_ret) {
|
|
|
|
call_with_validated_variant_args_retc_helper<T, R, P...>(base, p_method, p_args, r_ret, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P>
|
2023-04-25 00:21:32 +02:00
|
|
|
void call_with_validated_object_instance_args_static(T *base, void (*p_method)(T *, P...), const Variant **p_args) {
|
|
|
|
call_with_validated_variant_args_static_helper<T, P...>(base, p_method, p_args, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P>
|
2023-04-25 00:21:32 +02:00
|
|
|
void call_with_validated_object_instance_args_static_retc(T *base, R (*p_method)(T *, P...), const Variant **p_args, Variant *r_ret) {
|
|
|
|
call_with_validated_variant_args_static_retc_helper<T, R, P...>(base, p_method, p_args, r_ret, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2020-10-15 17:29:59 +02:00
|
|
|
// GCC raises "parameter 'p_args' set but not used" when P = {},
|
|
|
|
// it's not clever enough to treat other P values as making this branch valid.
|
2021-10-07 20:18:52 +02:00
|
|
|
#if defined(__GNUC__) && !defined(__clang__)
|
2020-10-15 17:29:59 +02:00
|
|
|
#pragma GCC diagnostic push
|
|
|
|
#pragma GCC diagnostic ignored "-Wunused-but-set-parameter"
|
|
|
|
#endif
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename Q>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_get_argument_type_helper(int p_arg, int &index, Variant::Type &type) {
|
|
|
|
if (p_arg == index) {
|
|
|
|
type = GetTypeInfo<Q>::VARIANT_TYPE;
|
|
|
|
}
|
|
|
|
index++;
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename... P>
|
2020-10-15 17:29:59 +02:00
|
|
|
Variant::Type call_get_argument_type(int p_arg) {
|
|
|
|
Variant::Type type = Variant::NIL;
|
|
|
|
int index = 0;
|
|
|
|
// I think rocket science is simpler than modern C++.
|
|
|
|
using expand_type = int[];
|
|
|
|
expand_type a{ 0, (call_get_argument_type_helper<P>(p_arg, index, type), 0)... };
|
|
|
|
(void)a; // Suppress (valid, but unavoidable) -Wunused-variable warning.
|
|
|
|
(void)index; // Suppress GCC warning.
|
|
|
|
return type;
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename Q>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_get_argument_type_info_helper(int p_arg, int &index, PropertyInfo &info) {
|
|
|
|
if (p_arg == index) {
|
|
|
|
info = GetTypeInfo<Q>::get_class_info();
|
|
|
|
}
|
|
|
|
index++;
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename... P>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_get_argument_type_info(int p_arg, PropertyInfo &info) {
|
|
|
|
int index = 0;
|
|
|
|
// I think rocket science is simpler than modern C++.
|
|
|
|
using expand_type = int[];
|
|
|
|
expand_type a{ 0, (call_get_argument_type_info_helper<P>(p_arg, index, info), 0)... };
|
|
|
|
(void)a; // Suppress (valid, but unavoidable) -Wunused-variable warning.
|
|
|
|
(void)index; // Suppress GCC warning.
|
|
|
|
}
|
|
|
|
|
2021-10-07 20:18:52 +02:00
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename Q>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_get_argument_metadata_helper(int p_arg, int &index, GodotTypeInfo::Metadata &md) {
|
|
|
|
if (p_arg == index) {
|
|
|
|
md = GetTypeInfo<Q>::METADATA;
|
|
|
|
}
|
|
|
|
index++;
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename... P>
|
2020-10-15 17:29:59 +02:00
|
|
|
GodotTypeInfo::Metadata call_get_argument_metadata(int p_arg) {
|
|
|
|
GodotTypeInfo::Metadata md = GodotTypeInfo::METADATA_NONE;
|
|
|
|
|
|
|
|
int index = 0;
|
|
|
|
// I think rocket science is simpler than modern C++.
|
|
|
|
using expand_type = int[];
|
|
|
|
expand_type a{ 0, (call_get_argument_metadata_helper<P>(p_arg, index, md), 0)... };
|
|
|
|
(void)a; // Suppress (valid, but unavoidable) -Wunused-variable warning.
|
|
|
|
(void)index;
|
|
|
|
return md;
|
|
|
|
}
|
|
|
|
|
|
|
|
#endif // DEBUG_METHODS_ENABLED
|
|
|
|
|
|
|
|
//////////////////////
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P, size_t... Is>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_variant_args_ret_helper(T *p_instance, R (T::*p_method)(P...), const Variant **p_args, Variant &r_ret, Callable::CallError &r_error, IndexSequence<Is...>) {
|
|
|
|
r_error.error = Callable::CallError::CALL_OK;
|
|
|
|
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
r_ret = (p_instance->*p_method)(VariantCasterAndValidate<P>::cast(p_args, Is, r_error)...);
|
|
|
|
#else
|
|
|
|
r_ret = (p_instance->*p_method)(VariantCaster<P>::cast(*p_args[Is])...);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename R, typename... P, size_t... Is>
|
2021-02-24 14:56:34 +01:00
|
|
|
void call_with_variant_args_static_ret(R (*p_method)(P...), const Variant **p_args, Variant &r_ret, Callable::CallError &r_error, IndexSequence<Is...>) {
|
|
|
|
r_error.error = Callable::CallError::CALL_OK;
|
|
|
|
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
r_ret = (p_method)(VariantCasterAndValidate<P>::cast(p_args, Is, r_error)...);
|
|
|
|
#else
|
|
|
|
r_ret = (p_method)(VariantCaster<P>::cast(*p_args[Is])...);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename... P, size_t... Is>
|
2021-02-24 14:56:34 +01:00
|
|
|
void call_with_variant_args_static(void (*p_method)(P...), const Variant **p_args, Callable::CallError &r_error, IndexSequence<Is...>) {
|
|
|
|
r_error.error = Callable::CallError::CALL_OK;
|
|
|
|
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
(p_method)(VariantCasterAndValidate<P>::cast(p_args, Is, r_error)...);
|
|
|
|
#else
|
|
|
|
(p_method)(VariantCaster<P>::cast(*p_args[Is])...);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_variant_args_ret(T *p_instance, R (T::*p_method)(P...), const Variant **p_args, int p_argcount, Variant &r_ret, Callable::CallError &r_error) {
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2020-10-15 17:29:59 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if ((size_t)p_argcount < sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2020-10-15 17:29:59 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
call_with_variant_args_ret_helper<T, R, P...>(p_instance, p_method, p_args, r_ret, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P, size_t... Is>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_variant_args_retc_helper(T *p_instance, R (T::*p_method)(P...) const, const Variant **p_args, Variant &r_ret, Callable::CallError &r_error, IndexSequence<Is...>) {
|
|
|
|
r_error.error = Callable::CallError::CALL_OK;
|
|
|
|
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
r_ret = (p_instance->*p_method)(VariantCasterAndValidate<P>::cast(p_args, Is, r_error)...);
|
|
|
|
#else
|
|
|
|
r_ret = (p_instance->*p_method)(VariantCaster<P>::cast(*p_args[Is])...);
|
|
|
|
#endif
|
|
|
|
(void)p_args;
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename R, typename... P>
|
2021-02-24 14:56:34 +01:00
|
|
|
void call_with_variant_args_static_ret(R (*p_method)(P...), const Variant **p_args, int p_argcount, Variant &r_ret, Callable::CallError &r_error) {
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2021-02-24 14:56:34 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if ((size_t)p_argcount < sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2021-02-24 14:56:34 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
call_with_variant_args_static_ret<R, P...>(p_method, p_args, r_ret, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename... P>
|
2021-02-24 14:56:34 +01:00
|
|
|
void call_with_variant_args_static_ret(void (*p_method)(P...), const Variant **p_args, int p_argcount, Variant &r_ret, Callable::CallError &r_error) {
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2021-02-24 14:56:34 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if ((size_t)p_argcount < sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2021-02-24 14:56:34 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
call_with_variant_args_static<P...>(p_method, p_args, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_variant_args_retc(T *p_instance, R (T::*p_method)(P...) const, const Variant **p_args, int p_argcount, Variant &r_ret, Callable::CallError &r_error) {
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2020-10-15 17:29:59 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if ((size_t)p_argcount < sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2020-10-15 17:29:59 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
call_with_variant_args_retc_helper<T, R, P...>(p_instance, p_method, p_args, r_ret, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P, size_t... Is>
|
2020-10-15 17:29:59 +02:00
|
|
|
void call_with_variant_args_retc_static_helper(T *p_instance, R (*p_method)(T *, P...), const Variant **p_args, Variant &r_ret, Callable::CallError &r_error, IndexSequence<Is...>) {
|
|
|
|
r_error.error = Callable::CallError::CALL_OK;
|
|
|
|
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
r_ret = (p_method)(p_instance, VariantCasterAndValidate<P>::cast(p_args, Is, r_error)...);
|
|
|
|
#else
|
|
|
|
r_ret = (p_method)(p_instance, VariantCaster<P>::cast(*p_args[Is])...);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
(void)p_args;
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename R, typename... P>
|
2020-11-11 17:16:08 +01:00
|
|
|
void call_with_variant_args_retc_static_helper_dv(T *p_instance, R (*p_method)(T *, P...), const Variant **p_args, int p_argcount, Variant &r_ret, const Vector<Variant> &default_values, Callable::CallError &r_error) {
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2020-11-11 17:16:08 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
int32_t missing = (int32_t)sizeof...(P) - (int32_t)p_argcount;
|
|
|
|
|
|
|
|
int32_t dvs = default_values.size();
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if (missing > dvs) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2020-11-11 17:16:08 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
const Variant *args[sizeof...(P) == 0 ? 1 : sizeof...(P)]; //avoid zero sized array
|
|
|
|
for (int32_t i = 0; i < (int32_t)sizeof...(P); i++) {
|
|
|
|
if (i < p_argcount) {
|
|
|
|
args[i] = p_args[i];
|
|
|
|
} else {
|
|
|
|
args[i] = &default_values[i - p_argcount + (dvs - missing)];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
call_with_variant_args_retc_static_helper(p_instance, p_method, args, r_ret, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P, size_t... Is>
|
Add marshalling to PackedByteArray
-Decode/Encode functions for u8,s8,u16,s16,u32,s32,u64,s64,half,float,double,variant
-Improved binder template to allow this
Given in Godot 4.0 PackedByteArray is passed as reference, it is now possible to have these functions there, which makes the most sense.
2021-04-09 22:57:37 +02:00
|
|
|
void call_with_variant_args_static_helper(T *p_instance, void (*p_method)(T *, P...), const Variant **p_args, Callable::CallError &r_error, IndexSequence<Is...>) {
|
|
|
|
r_error.error = Callable::CallError::CALL_OK;
|
|
|
|
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
(p_method)(p_instance, VariantCasterAndValidate<P>::cast(p_args, Is, r_error)...);
|
|
|
|
#else
|
|
|
|
(p_method)(p_instance, VariantCaster<P>::cast(*p_args[Is])...);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
(void)p_args;
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename T, typename... P>
|
Add marshalling to PackedByteArray
-Decode/Encode functions for u8,s8,u16,s16,u32,s32,u64,s64,half,float,double,variant
-Improved binder template to allow this
Given in Godot 4.0 PackedByteArray is passed as reference, it is now possible to have these functions there, which makes the most sense.
2021-04-09 22:57:37 +02:00
|
|
|
void call_with_variant_args_static_helper_dv(T *p_instance, void (*p_method)(T *, P...), const Variant **p_args, int p_argcount, const Vector<Variant> &default_values, Callable::CallError &r_error) {
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
Add marshalling to PackedByteArray
-Decode/Encode functions for u8,s8,u16,s16,u32,s32,u64,s64,half,float,double,variant
-Improved binder template to allow this
Given in Godot 4.0 PackedByteArray is passed as reference, it is now possible to have these functions there, which makes the most sense.
2021-04-09 22:57:37 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
int32_t missing = (int32_t)sizeof...(P) - (int32_t)p_argcount;
|
|
|
|
|
|
|
|
int32_t dvs = default_values.size();
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if (missing > dvs) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
Add marshalling to PackedByteArray
-Decode/Encode functions for u8,s8,u16,s16,u32,s32,u64,s64,half,float,double,variant
-Improved binder template to allow this
Given in Godot 4.0 PackedByteArray is passed as reference, it is now possible to have these functions there, which makes the most sense.
2021-04-09 22:57:37 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
const Variant *args[sizeof...(P) == 0 ? 1 : sizeof...(P)]; //avoid zero sized array
|
|
|
|
for (int32_t i = 0; i < (int32_t)sizeof...(P); i++) {
|
|
|
|
if (i < p_argcount) {
|
|
|
|
args[i] = p_args[i];
|
|
|
|
} else {
|
|
|
|
args[i] = &default_values[i - p_argcount + (dvs - missing)];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
call_with_variant_args_static_helper(p_instance, p_method, args, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename R, typename... P>
|
2021-02-24 14:56:34 +01:00
|
|
|
void call_with_variant_args_static_ret_dv(R (*p_method)(P...), const Variant **p_args, int p_argcount, Variant &r_ret, Callable::CallError &r_error, const Vector<Variant> &default_values) {
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2021-02-24 14:56:34 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
int32_t missing = (int32_t)sizeof...(P) - (int32_t)p_argcount;
|
|
|
|
|
|
|
|
int32_t dvs = default_values.size();
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if (missing > dvs) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2021-02-24 14:56:34 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
const Variant *args[sizeof...(P) == 0 ? 1 : sizeof...(P)]; //avoid zero sized array
|
|
|
|
for (int32_t i = 0; i < (int32_t)sizeof...(P); i++) {
|
|
|
|
if (i < p_argcount) {
|
|
|
|
args[i] = p_args[i];
|
|
|
|
} else {
|
|
|
|
args[i] = &default_values[i - p_argcount + (dvs - missing)];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
call_with_variant_args_static_ret(p_method, args, r_ret, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:29:49 +01:00
|
|
|
template <typename... P>
|
2021-02-24 14:56:34 +01:00
|
|
|
void call_with_variant_args_static_dv(void (*p_method)(P...), const Variant **p_args, int p_argcount, Callable::CallError &r_error, const Vector<Variant> &default_values) {
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2021-02-24 14:56:34 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
int32_t missing = (int32_t)sizeof...(P) - (int32_t)p_argcount;
|
|
|
|
|
|
|
|
int32_t dvs = default_values.size();
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if (missing > dvs) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
2023-09-29 18:19:46 +02:00
|
|
|
r_error.expected = sizeof...(P);
|
2021-02-24 14:56:34 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
const Variant *args[sizeof...(P) == 0 ? 1 : sizeof...(P)]; //avoid zero sized array
|
|
|
|
for (int32_t i = 0; i < (int32_t)sizeof...(P); i++) {
|
|
|
|
if (i < p_argcount) {
|
|
|
|
args[i] = p_args[i];
|
|
|
|
} else {
|
|
|
|
args[i] = &default_values[i - p_argcount + (dvs - missing)];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
call_with_variant_args_static(p_method, args, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2021-10-07 20:18:52 +02:00
|
|
|
#if defined(__GNUC__) && !defined(__clang__)
|
2020-10-15 17:29:59 +02:00
|
|
|
#pragma GCC diagnostic pop
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#endif // BINDER_COMMON_H
|