2023-01-05 13:25:55 +01:00
/**************************************************************************/
/* json.cpp */
/**************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/**************************************************************************/
/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/**************************************************************************/
2018-01-05 00:50:27 +01:00
2014-02-10 02:10:30 +01:00
# include "json.h"
2018-09-11 18:13:45 +02:00
2023-01-14 12:57:28 +01:00
# include "core/config/engine.h"
2020-11-07 23:33:38 +01:00
# include "core/string/print_string.h"
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
const char * JSON : : tk_name [ TK_MAX ] = {
2014-02-10 02:10:30 +01:00
" '{' " ,
" '}' " ,
" '[' " ,
" ']' " ,
" identifier " ,
" string " ,
" number " ,
" ':' " ,
" ',' " ,
" EOF " ,
} ;
2020-12-29 19:12:33 +01:00
String JSON : : _make_indent ( const String & p_indent , int p_size ) {
2023-01-29 00:52:05 +01:00
return p_indent . repeat ( p_size ) ;
2017-11-03 08:32:58 +01:00
}
2022-05-19 17:00:06 +02:00
String JSON : : _stringify ( const Variant & p_var , const String & p_indent , int p_cur_indent , bool p_sort_keys , HashSet < const void * > & p_markers , bool p_full_precision ) {
2022-09-19 18:10:23 +02:00
ERR_FAIL_COND_V_MSG ( p_cur_indent > Variant : : MAX_RECURSION_DEPTH , " ... " , " JSON structure is too deep. Bailing. " ) ;
2017-11-03 08:32:58 +01:00
String colon = " : " ;
String end_statement = " " ;
2020-12-15 13:04:21 +01:00
if ( ! p_indent . is_empty ( ) ) {
2017-11-03 08:32:58 +01:00
colon + = " " ;
end_statement + = " \n " ;
}
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
switch ( p_var . get_type ( ) ) {
2020-05-10 13:00:47 +02:00
case Variant : : NIL :
return " null " ;
case Variant : : BOOL :
return p_var . operator bool ( ) ? " true " : " false " ;
case Variant : : INT :
return itos ( p_var ) ;
2019-12-23 16:38:33 +01:00
case Variant : : FLOAT : {
double num = p_var ;
if ( p_full_precision ) {
// Store unreliable digits (17) instead of just reliable
// digits (14) so that the value can be decoded exactly.
return String : : num ( num , 17 - ( int ) floor ( log10 ( num ) ) ) ;
} else {
// Store only reliable digits (14) by default.
return String : : num ( num , 14 - ( int ) floor ( log10 ( num ) ) ) ;
}
}
Variant: Added 64-bit packed arrays, renamed Variant::REAL to FLOAT.
- Renames PackedIntArray to PackedInt32Array.
- Renames PackedFloatArray to PackedFloat32Array.
- Adds PackedInt64Array and PackedFloat64Array.
- Renames Variant::REAL to Variant::FLOAT for consistency.
Packed arrays are for storing large amount of data and creating stuff like
meshes, buffers. textures, etc. Forcing them to be 64 is a huge waste of
memory. That said, many users requested the ability to have 64 bits packed
arrays for their games, so this is just an optional added type.
For Variant, the float datatype is always 64 bits, and exposed as `float`.
We still have `real_t` which is the datatype that can change from 32 to 64
bits depending on a compile flag (not entirely working right now, but that's
the idea). It affects math related datatypes and code only.
Neither Variant nor PackedArray make use of real_t, which is only intended
for math precision, so the term is removed from there to keep only float.
2020-02-24 19:20:53 +01:00
case Variant : : PACKED_INT32_ARRAY :
case Variant : : PACKED_INT64_ARRAY :
case Variant : : PACKED_FLOAT32_ARRAY :
case Variant : : PACKED_FLOAT64_ARRAY :
2020-02-17 22:06:54 +01:00
case Variant : : PACKED_STRING_ARRAY :
2014-02-10 02:10:30 +01:00
case Variant : : ARRAY : {
2023-03-01 19:04:38 +01:00
Array a = p_var ;
2023-12-24 13:44:21 +01:00
if ( a . is_empty ( ) ) {
2023-03-01 19:04:38 +01:00
return " [] " ;
}
2014-02-10 02:10:30 +01:00
String s = " [ " ;
2017-11-03 08:32:58 +01:00
s + = end_statement ;
2021-05-26 16:29:32 +02:00
ERR_FAIL_COND_V_MSG ( p_markers . has ( a . id ( ) ) , " \" [...] \" " , " Converting circular structure to JSON. " ) ;
p_markers . insert ( a . id ( ) ) ;
2023-12-24 13:44:21 +01:00
bool first = true ;
for ( const Variant & var : a ) {
if ( first ) {
first = false ;
} else {
2017-11-03 08:32:58 +01:00
s + = " , " ;
s + = end_statement ;
}
2023-12-24 13:44:21 +01:00
s + = _make_indent ( p_indent , p_cur_indent + 1 ) + _stringify ( var , p_indent , p_cur_indent + 1 , p_sort_keys , p_markers ) ;
2014-02-10 02:10:30 +01:00
}
2017-11-03 08:32:58 +01:00
s + = end_statement + _make_indent ( p_indent , p_cur_indent ) + " ] " ;
2021-05-26 16:29:32 +02:00
p_markers . erase ( a . id ( ) ) ;
2014-02-10 02:10:30 +01:00
return s ;
2020-05-19 15:46:49 +02:00
}
2014-02-10 02:10:30 +01:00
case Variant : : DICTIONARY : {
String s = " { " ;
2017-11-03 08:32:58 +01:00
s + = end_statement ;
2014-02-10 02:10:30 +01:00
Dictionary d = p_var ;
2021-05-26 16:29:32 +02:00
ERR_FAIL_COND_V_MSG ( p_markers . has ( d . id ( ) ) , " \" {...} \" " , " Converting circular structure to JSON. " ) ;
p_markers . insert ( d . id ( ) ) ;
2014-02-10 02:10:30 +01:00
List < Variant > keys ;
d . get_key_list ( & keys ) ;
2020-05-14 16:41:43 +02:00
if ( p_sort_keys ) {
2017-11-03 08:32:58 +01:00
keys . sort ( ) ;
2020-05-14 16:41:43 +02:00
}
2017-11-03 08:32:58 +01:00
2021-07-25 03:28:50 +02:00
bool first_key = true ;
2021-07-24 15:46:25 +02:00
for ( const Variant & E : keys ) {
2021-07-25 03:28:50 +02:00
if ( first_key ) {
first_key = false ;
} else {
2017-11-03 08:32:58 +01:00
s + = " , " ;
s + = end_statement ;
}
2021-07-16 05:45:57 +02:00
s + = _make_indent ( p_indent , p_cur_indent + 1 ) + _stringify ( String ( E ) , p_indent , p_cur_indent + 1 , p_sort_keys , p_markers ) ;
2017-11-03 08:32:58 +01:00
s + = colon ;
2021-07-16 05:45:57 +02:00
s + = _stringify ( d [ E ] , p_indent , p_cur_indent + 1 , p_sort_keys , p_markers ) ;
2014-02-10 02:10:30 +01:00
}
2017-11-03 08:32:58 +01:00
s + = end_statement + _make_indent ( p_indent , p_cur_indent ) + " } " ;
2021-05-26 16:29:32 +02:00
p_markers . erase ( d . id ( ) ) ;
2014-02-10 02:10:30 +01:00
return s ;
2020-05-19 15:46:49 +02:00
}
2020-05-10 13:00:47 +02:00
default :
return " \" " + String ( p_var ) . json_escape ( ) + " \" " ;
2014-02-10 02:10:30 +01:00
}
}
2020-07-27 12:43:20 +02:00
Error JSON : : _get_token ( const char32_t * p_str , int & index , int p_len , Token & r_token , int & line , String & r_err_str ) {
2017-01-15 09:59:02 +01:00
while ( p_len > 0 ) {
2017-08-11 21:10:05 +02:00
switch ( p_str [ index ] ) {
2014-02-10 02:10:30 +01:00
case ' \n ' : {
line + + ;
2017-08-11 21:10:05 +02:00
index + + ;
2014-02-10 02:10:30 +01:00
break ;
2020-05-19 15:46:49 +02:00
}
2014-02-10 02:10:30 +01:00
case 0 : {
2017-03-05 16:44:50 +01:00
r_token . type = TK_EOF ;
2014-02-10 02:10:30 +01:00
return OK ;
} break ;
case ' { ' : {
2017-03-05 16:44:50 +01:00
r_token . type = TK_CURLY_BRACKET_OPEN ;
2017-08-11 21:10:05 +02:00
index + + ;
2014-02-10 02:10:30 +01:00
return OK ;
2020-05-19 15:46:49 +02:00
}
2014-02-10 02:10:30 +01:00
case ' } ' : {
2017-03-05 16:44:50 +01:00
r_token . type = TK_CURLY_BRACKET_CLOSE ;
2017-08-11 21:10:05 +02:00
index + + ;
2014-02-10 02:10:30 +01:00
return OK ;
2020-05-19 15:46:49 +02:00
}
2014-02-10 02:10:30 +01:00
case ' [ ' : {
2017-03-05 16:44:50 +01:00
r_token . type = TK_BRACKET_OPEN ;
2017-08-11 21:10:05 +02:00
index + + ;
2014-02-10 02:10:30 +01:00
return OK ;
2020-05-19 15:46:49 +02:00
}
2014-02-10 02:10:30 +01:00
case ' ] ' : {
2017-03-05 16:44:50 +01:00
r_token . type = TK_BRACKET_CLOSE ;
2017-08-11 21:10:05 +02:00
index + + ;
2014-02-10 02:10:30 +01:00
return OK ;
2020-05-19 15:46:49 +02:00
}
2014-02-10 02:10:30 +01:00
case ' : ' : {
2017-03-05 16:44:50 +01:00
r_token . type = TK_COLON ;
2017-08-11 21:10:05 +02:00
index + + ;
2014-02-10 02:10:30 +01:00
return OK ;
2020-05-19 15:46:49 +02:00
}
2014-02-10 02:10:30 +01:00
case ' , ' : {
2017-03-05 16:44:50 +01:00
r_token . type = TK_COMMA ;
2017-08-11 21:10:05 +02:00
index + + ;
2014-02-10 02:10:30 +01:00
return OK ;
2020-05-19 15:46:49 +02:00
}
2014-02-10 02:10:30 +01:00
case ' " ' : {
2017-08-11 21:10:05 +02:00
index + + ;
2014-02-10 02:10:30 +01:00
String str ;
2017-03-05 16:44:50 +01:00
while ( true ) {
2017-08-11 21:10:05 +02:00
if ( p_str [ index ] = = 0 ) {
2017-03-05 16:44:50 +01:00
r_err_str = " Unterminated String " ;
2014-02-10 02:10:30 +01:00
return ERR_PARSE_ERROR ;
2017-08-11 21:10:05 +02:00
} else if ( p_str [ index ] = = ' " ' ) {
index + + ;
2014-02-10 02:10:30 +01:00
break ;
2017-08-11 21:10:05 +02:00
} else if ( p_str [ index ] = = ' \\ ' ) {
2014-02-10 02:10:30 +01:00
//escaped characters...
2017-08-11 21:10:05 +02:00
index + + ;
2020-07-27 12:43:20 +02:00
char32_t next = p_str [ index ] ;
2017-03-05 16:44:50 +01:00
if ( next = = 0 ) {
r_err_str = " Unterminated String " ;
return ERR_PARSE_ERROR ;
2014-02-10 02:10:30 +01:00
}
2020-07-27 12:43:20 +02:00
char32_t res = 0 ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
switch ( next ) {
2020-05-10 13:00:47 +02:00
case ' b ' :
res = 8 ;
break ;
case ' t ' :
res = 9 ;
break ;
case ' n ' :
res = 10 ;
break ;
case ' f ' :
res = 12 ;
break ;
case ' r ' :
res = 13 ;
break ;
2014-02-10 02:10:30 +01:00
case ' u ' : {
2020-02-13 11:37:37 +01:00
// hex number
2017-03-05 16:44:50 +01:00
for ( int j = 0 ; j < 4 ; j + + ) {
2020-07-27 12:43:20 +02:00
char32_t c = p_str [ index + j + 1 ] ;
2017-03-05 16:44:50 +01:00
if ( c = = 0 ) {
r_err_str = " Unterminated String " ;
2014-02-10 02:10:30 +01:00
return ERR_PARSE_ERROR ;
}
2022-02-04 09:32:20 +01:00
if ( ! is_hex_digit ( c ) ) {
2017-03-05 16:44:50 +01:00
r_err_str = " Malformed hex constant in string " ;
2014-02-10 02:10:30 +01:00
return ERR_PARSE_ERROR ;
}
2020-07-27 12:43:20 +02:00
char32_t v ;
2022-02-04 09:32:20 +01:00
if ( is_digit ( c ) ) {
2017-03-05 16:44:50 +01:00
v = c - ' 0 ' ;
} else if ( c > = ' a ' & & c < = ' f ' ) {
v = c - ' a ' ;
v + = 10 ;
} else if ( c > = ' A ' & & c < = ' F ' ) {
v = c - ' A ' ;
v + = 10 ;
2014-02-10 02:10:30 +01:00
} else {
2020-02-13 11:37:37 +01:00
ERR_PRINT ( " Bug parsing hex constant. " ) ;
2017-03-05 16:44:50 +01:00
v = 0 ;
2014-02-10 02:10:30 +01:00
}
2017-03-05 16:44:50 +01:00
res < < = 4 ;
res | = v ;
2014-02-10 02:10:30 +01:00
}
2017-08-11 21:10:05 +02:00
index + = 4 ; //will add at the end anyway
2014-02-10 02:10:30 +01:00
2021-03-01 12:23:12 +01:00
if ( ( res & 0xfffffc00 ) = = 0xd800 ) {
if ( p_str [ index + 1 ] ! = ' \\ ' | | p_str [ index + 2 ] ! = ' u ' ) {
r_err_str = " Invalid UTF-16 sequence in string, unpaired lead surrogate " ;
return ERR_PARSE_ERROR ;
}
index + = 2 ;
char32_t trail = 0 ;
for ( int j = 0 ; j < 4 ; j + + ) {
char32_t c = p_str [ index + j + 1 ] ;
if ( c = = 0 ) {
r_err_str = " Unterminated String " ;
return ERR_PARSE_ERROR ;
}
2022-02-04 09:32:20 +01:00
if ( ! is_hex_digit ( c ) ) {
2021-03-01 12:23:12 +01:00
r_err_str = " Malformed hex constant in string " ;
return ERR_PARSE_ERROR ;
}
char32_t v ;
2022-02-04 09:32:20 +01:00
if ( is_digit ( c ) ) {
2021-03-01 12:23:12 +01:00
v = c - ' 0 ' ;
} else if ( c > = ' a ' & & c < = ' f ' ) {
v = c - ' a ' ;
v + = 10 ;
} else if ( c > = ' A ' & & c < = ' F ' ) {
v = c - ' A ' ;
v + = 10 ;
} else {
ERR_PRINT ( " Bug parsing hex constant. " ) ;
v = 0 ;
}
trail < < = 4 ;
trail | = v ;
}
if ( ( trail & 0xfffffc00 ) = = 0xdc00 ) {
res = ( res < < 10UL ) + trail - ( ( 0xd800 < < 10UL ) + 0xdc00 - 0x10000 ) ;
index + = 4 ; //will add at the end anyway
} else {
r_err_str = " Invalid UTF-16 sequence in string, unpaired lead surrogate " ;
return ERR_PARSE_ERROR ;
}
} else if ( ( res & 0xfffffc00 ) = = 0xdc00 ) {
r_err_str = " Invalid UTF-16 sequence in string, unpaired trail surrogate " ;
return ERR_PARSE_ERROR ;
}
2014-02-10 02:10:30 +01:00
} break ;
2022-09-20 18:28:01 +02:00
case ' " ' :
case ' \\ ' :
case ' / ' : {
2015-11-16 16:05:39 +01:00
res = next ;
2014-02-10 02:10:30 +01:00
} break ;
2022-09-20 18:28:01 +02:00
default : {
r_err_str = " Invalid escape sequence. " ;
return ERR_PARSE_ERROR ;
}
2014-02-10 02:10:30 +01:00
}
2017-03-05 16:44:50 +01:00
str + = res ;
2014-02-10 02:10:30 +01:00
} else {
2020-05-14 16:41:43 +02:00
if ( p_str [ index ] = = ' \n ' ) {
2014-02-10 02:10:30 +01:00
line + + ;
2020-05-14 16:41:43 +02:00
}
2017-08-11 21:10:05 +02:00
str + = p_str [ index ] ;
2014-02-10 02:10:30 +01:00
}
2017-08-11 21:10:05 +02:00
index + + ;
2014-02-10 02:10:30 +01:00
}
2017-03-05 16:44:50 +01:00
r_token . type = TK_STRING ;
r_token . value = str ;
2014-02-10 02:10:30 +01:00
return OK ;
} break ;
default : {
2017-08-11 21:10:05 +02:00
if ( p_str [ index ] < = 32 ) {
index + + ;
2014-02-10 02:10:30 +01:00
break ;
}
2022-02-04 09:32:20 +01:00
if ( p_str [ index ] = = ' - ' | | is_digit ( p_str [ index ] ) ) {
2014-02-10 02:10:30 +01:00
//a number
2020-07-27 12:43:20 +02:00
const char32_t * rptr ;
2020-07-24 20:07:57 +02:00
double number = String : : to_float ( & p_str [ index ] , & rptr ) ;
2017-08-11 21:10:05 +02:00
index + = ( rptr - & p_str [ index ] ) ;
2017-03-05 16:44:50 +01:00
r_token . type = TK_NUMBER ;
r_token . value = number ;
2014-02-10 02:10:30 +01:00
return OK ;
2024-04-20 11:36:41 +02:00
} else if ( is_ascii_alphabet_char ( p_str [ index ] ) ) {
2014-02-10 02:10:30 +01:00
String id ;
2024-04-20 11:36:41 +02:00
while ( is_ascii_alphabet_char ( p_str [ index ] ) ) {
2017-08-11 21:10:05 +02:00
id + = p_str [ index ] ;
index + + ;
2014-02-10 02:10:30 +01:00
}
2017-03-05 16:44:50 +01:00
r_token . type = TK_IDENTIFIER ;
r_token . value = id ;
2014-02-10 02:10:30 +01:00
return OK ;
} else {
2017-03-05 16:44:50 +01:00
r_err_str = " Unexpected character. " ;
2014-02-10 02:10:30 +01:00
return ERR_PARSE_ERROR ;
}
}
}
}
return ERR_PARSE_ERROR ;
}
2022-09-19 18:10:23 +02:00
Error JSON : : _parse_value ( Variant & value , Token & token , const char32_t * p_str , int & index , int p_len , int & line , int p_depth , String & r_err_str ) {
if ( p_depth > Variant : : MAX_RECURSION_DEPTH ) {
r_err_str = " JSON structure is too deep. Bailing. " ;
return ERR_OUT_OF_MEMORY ;
}
2017-03-05 16:44:50 +01:00
if ( token . type = = TK_CURLY_BRACKET_OPEN ) {
2017-01-11 12:53:31 +01:00
Dictionary d ;
2022-09-19 18:10:23 +02:00
Error err = _parse_object ( d , p_str , index , p_len , line , p_depth + 1 , r_err_str ) ;
2020-05-14 16:41:43 +02:00
if ( err ) {
2014-02-10 02:10:30 +01:00
return err ;
2020-05-14 16:41:43 +02:00
}
2017-03-05 16:44:50 +01:00
value = d ;
} else if ( token . type = = TK_BRACKET_OPEN ) {
2017-01-11 12:53:31 +01:00
Array a ;
2022-09-19 18:10:23 +02:00
Error err = _parse_array ( a , p_str , index , p_len , line , p_depth + 1 , r_err_str ) ;
2020-05-14 16:41:43 +02:00
if ( err ) {
2014-02-10 02:10:30 +01:00
return err ;
2020-05-14 16:41:43 +02:00
}
2017-03-05 16:44:50 +01:00
value = a ;
} else if ( token . type = = TK_IDENTIFIER ) {
2014-02-10 02:10:30 +01:00
String id = token . value ;
2020-05-14 16:41:43 +02:00
if ( id = = " true " ) {
2017-03-05 16:44:50 +01:00
value = true ;
2020-05-14 16:41:43 +02:00
} else if ( id = = " false " ) {
2017-03-05 16:44:50 +01:00
value = false ;
2020-05-14 16:41:43 +02:00
} else if ( id = = " null " ) {
2017-03-05 16:44:50 +01:00
value = Variant ( ) ;
2020-05-14 16:41:43 +02:00
} else {
2017-03-05 16:44:50 +01:00
r_err_str = " Expected 'true','false' or 'null', got ' " + id + " '. " ;
2014-02-10 02:10:30 +01:00
return ERR_PARSE_ERROR ;
}
2017-03-05 16:44:50 +01:00
} else if ( token . type = = TK_NUMBER ) {
value = token . value ;
} else if ( token . type = = TK_STRING ) {
value = token . value ;
2014-02-10 02:10:30 +01:00
} else {
2017-03-05 16:44:50 +01:00
r_err_str = " Expected value, got " + String ( tk_name [ token . type ] ) + " . " ;
2014-02-10 02:10:30 +01:00
return ERR_PARSE_ERROR ;
}
2020-07-28 20:09:01 +02:00
return OK ;
2014-02-10 02:10:30 +01:00
}
2022-09-19 18:10:23 +02:00
Error JSON : : _parse_array ( Array & array , const char32_t * p_str , int & index , int p_len , int & line , int p_depth , String & r_err_str ) {
2014-02-10 02:10:30 +01:00
Token token ;
2017-03-05 16:44:50 +01:00
bool need_comma = false ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
while ( index < p_len ) {
Error err = _get_token ( p_str , index , p_len , token , line , r_err_str ) ;
2020-05-14 16:41:43 +02:00
if ( err ! = OK ) {
2014-02-10 02:10:30 +01:00
return err ;
2020-05-14 16:41:43 +02:00
}
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
if ( token . type = = TK_BRACKET_CLOSE ) {
2014-02-10 02:10:30 +01:00
return OK ;
}
if ( need_comma ) {
2017-03-05 16:44:50 +01:00
if ( token . type ! = TK_COMMA ) {
r_err_str = " Expected ',' " ;
2014-02-10 02:10:30 +01:00
return ERR_PARSE_ERROR ;
} else {
2017-03-05 16:44:50 +01:00
need_comma = false ;
2014-02-10 02:10:30 +01:00
continue ;
}
}
Variant v ;
2022-09-19 18:10:23 +02:00
err = _parse_value ( v , token , p_str , index , p_len , line , p_depth , r_err_str ) ;
2020-05-14 16:41:43 +02:00
if ( err ) {
2014-02-10 02:10:30 +01:00
return err ;
2020-05-14 16:41:43 +02:00
}
2014-02-10 02:10:30 +01:00
array . push_back ( v ) ;
2017-03-05 16:44:50 +01:00
need_comma = true ;
2014-02-10 02:10:30 +01:00
}
2020-07-22 21:06:57 +02:00
r_err_str = " Expected ']' " ;
2017-02-06 20:34:34 +01:00
return ERR_PARSE_ERROR ;
2014-02-10 02:10:30 +01:00
}
2022-09-19 18:10:23 +02:00
Error JSON : : _parse_object ( Dictionary & object , const char32_t * p_str , int & index , int p_len , int & line , int p_depth , String & r_err_str ) {
2017-03-05 16:44:50 +01:00
bool at_key = true ;
2014-02-10 02:10:30 +01:00
String key ;
Token token ;
2017-03-05 16:44:50 +01:00
bool need_comma = false ;
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
while ( index < p_len ) {
2014-02-10 02:10:30 +01:00
if ( at_key ) {
2017-03-05 16:44:50 +01:00
Error err = _get_token ( p_str , index , p_len , token , line , r_err_str ) ;
2020-05-14 16:41:43 +02:00
if ( err ! = OK ) {
2014-02-10 02:10:30 +01:00
return err ;
2020-05-14 16:41:43 +02:00
}
2014-02-10 02:10:30 +01:00
2017-03-05 16:44:50 +01:00
if ( token . type = = TK_CURLY_BRACKET_CLOSE ) {
2014-02-10 02:10:30 +01:00
return OK ;
}
if ( need_comma ) {
2017-03-05 16:44:50 +01:00
if ( token . type ! = TK_COMMA ) {
r_err_str = " Expected '}' or ',' " ;
2014-02-10 02:10:30 +01:00
return ERR_PARSE_ERROR ;
} else {
2017-03-05 16:44:50 +01:00
need_comma = false ;
2014-02-10 02:10:30 +01:00
continue ;
}
}
2017-03-05 16:44:50 +01:00
if ( token . type ! = TK_STRING ) {
r_err_str = " Expected key " ;
2014-02-10 02:10:30 +01:00
return ERR_PARSE_ERROR ;
}
2017-03-05 16:44:50 +01:00
key = token . value ;
err = _get_token ( p_str , index , p_len , token , line , r_err_str ) ;
2020-05-14 16:41:43 +02:00
if ( err ! = OK ) {
2014-02-10 02:10:30 +01:00
return err ;
2020-05-14 16:41:43 +02:00
}
2017-03-05 16:44:50 +01:00
if ( token . type ! = TK_COLON ) {
r_err_str = " Expected ':' " ;
2014-02-10 02:10:30 +01:00
return ERR_PARSE_ERROR ;
}
2017-03-05 16:44:50 +01:00
at_key = false ;
2014-02-10 02:10:30 +01:00
} else {
2017-03-05 16:44:50 +01:00
Error err = _get_token ( p_str , index , p_len , token , line , r_err_str ) ;
2020-05-14 16:41:43 +02:00
if ( err ! = OK ) {
2014-02-10 02:10:30 +01:00
return err ;
2020-05-14 16:41:43 +02:00
}
2014-02-10 02:10:30 +01:00
Variant v ;
2022-09-19 18:10:23 +02:00
err = _parse_value ( v , token , p_str , index , p_len , line , p_depth , r_err_str ) ;
2020-05-14 16:41:43 +02:00
if ( err ) {
2014-02-10 02:10:30 +01:00
return err ;
2020-05-14 16:41:43 +02:00
}
2017-03-05 16:44:50 +01:00
object [ key ] = v ;
need_comma = true ;
at_key = true ;
2014-02-10 02:10:30 +01:00
}
}
2020-07-22 21:06:57 +02:00
r_err_str = " Expected '}' " ;
2017-02-06 20:34:34 +01:00
return ERR_PARSE_ERROR ;
2014-02-10 02:10:30 +01:00
}
2022-09-03 19:45:24 +02:00
void JSON : : set_data ( const Variant & p_data ) {
data = p_data ;
2023-01-14 12:57:28 +01:00
text . clear ( ) ;
2022-09-03 19:45:24 +02:00
}
2020-12-29 19:12:33 +01:00
Error JSON : : _parse_string ( const String & p_json , Variant & r_ret , String & r_err_str , int & r_err_line ) {
2020-07-27 12:43:20 +02:00
const char32_t * str = p_json . ptr ( ) ;
2014-02-10 02:10:30 +01:00
int idx = 0 ;
int len = p_json . length ( ) ;
Token token ;
2017-03-05 16:44:50 +01:00
r_err_line = 0 ;
2014-02-10 02:10:30 +01:00
String aux_key ;
2017-03-05 16:44:50 +01:00
Error err = _get_token ( str , idx , len , token , r_err_line , r_err_str ) ;
2020-05-14 16:41:43 +02:00
if ( err ) {
2014-02-10 02:10:30 +01:00
return err ;
2020-05-14 16:41:43 +02:00
}
2014-02-10 02:10:30 +01:00
2022-09-19 18:10:23 +02:00
err = _parse_value ( r_ret , token , str , idx , len , r_err_line , 0 , r_err_str ) ;
2014-02-10 02:10:30 +01:00
2020-07-28 20:09:01 +02:00
// Check if EOF is reached
// or it's a type of the next token.
if ( err = = OK & & idx < len ) {
err = _get_token ( str , idx , len , token , r_err_line , r_err_str ) ;
if ( err | | token . type ! = TK_EOF ) {
r_err_str = " Expected 'EOF' " ;
// Reset return value to empty `Variant`
r_ret = Variant ( ) ;
return ERR_PARSE_ERROR ;
}
}
2017-01-09 02:40:00 +01:00
return err ;
2014-02-10 02:10:30 +01:00
}
2020-11-10 22:31:33 +01:00
2023-01-14 12:57:28 +01:00
Error JSON : : parse ( const String & p_json_string , bool p_keep_text ) {
2020-12-29 19:12:33 +01:00
Error err = _parse_string ( p_json_string , data , err_str , err_line ) ;
if ( err = = Error : : OK ) {
err_line = 0 ;
}
2023-01-14 12:57:28 +01:00
if ( p_keep_text ) {
text = p_json_string ;
}
2020-12-29 19:12:33 +01:00
return err ;
2020-11-10 22:31:33 +01:00
}
2023-01-14 12:57:28 +01:00
String JSON : : get_parsed_text ( ) const {
return text ;
}
2022-04-25 17:23:39 +02:00
String JSON : : stringify ( const Variant & p_var , const String & p_indent , bool p_sort_keys , bool p_full_precision ) {
Ref < JSON > jason ;
jason . instantiate ( ) ;
HashSet < const void * > markers ;
return jason - > _stringify ( p_var , p_indent , 0 , p_sort_keys , markers , p_full_precision ) ;
}
Variant JSON : : parse_string ( const String & p_json_string ) {
Ref < JSON > jason ;
jason . instantiate ( ) ;
Error error = jason - > parse ( p_json_string ) ;
ERR_FAIL_COND_V_MSG ( error ! = Error : : OK , Variant ( ) , vformat ( " Parse JSON failed. Error at line %d: %s " , jason - > get_error_line ( ) , jason - > get_error_message ( ) ) ) ;
return jason - > get_data ( ) ;
}
2020-12-29 19:12:33 +01:00
void JSON : : _bind_methods ( ) {
2022-04-25 17:23:39 +02:00
ClassDB : : bind_static_method ( " JSON " , D_METHOD ( " stringify " , " data " , " indent " , " sort_keys " , " full_precision " ) , & JSON : : stringify , DEFVAL ( " " ) , DEFVAL ( true ) , DEFVAL ( false ) ) ;
ClassDB : : bind_static_method ( " JSON " , D_METHOD ( " parse_string " , " json_string " ) , & JSON : : parse_string ) ;
2023-01-14 12:57:28 +01:00
ClassDB : : bind_method ( D_METHOD ( " parse " , " json_text " , " keep_text " ) , & JSON : : parse , DEFVAL ( false ) ) ;
2020-11-10 22:31:33 +01:00
2020-12-29 19:12:33 +01:00
ClassDB : : bind_method ( D_METHOD ( " get_data " ) , & JSON : : get_data ) ;
2022-09-03 19:45:24 +02:00
ClassDB : : bind_method ( D_METHOD ( " set_data " , " data " ) , & JSON : : set_data ) ;
2023-01-14 12:57:28 +01:00
ClassDB : : bind_method ( D_METHOD ( " get_parsed_text " ) , & JSON : : get_parsed_text ) ;
2020-12-29 19:12:33 +01:00
ClassDB : : bind_method ( D_METHOD ( " get_error_line " ) , & JSON : : get_error_line ) ;
ClassDB : : bind_method ( D_METHOD ( " get_error_message " ) , & JSON : : get_error_message ) ;
2022-09-03 19:45:24 +02:00
2024-06-01 19:40:37 +02:00
ClassDB : : bind_static_method ( " JSON " , D_METHOD ( " to_native " , " json " , " allow_classes " , " allow_scripts " ) , & JSON : : to_native , DEFVAL ( false ) , DEFVAL ( false ) ) ;
ClassDB : : bind_static_method ( " JSON " , D_METHOD ( " from_native " , " variant " , " allow_classes " , " allow_scripts " ) , & JSON : : from_native , DEFVAL ( false ) , DEFVAL ( false ) ) ;
2022-09-03 19:45:24 +02:00
ADD_PROPERTY ( PropertyInfo ( Variant : : NIL , " data " , PROPERTY_HINT_NONE , " " , PROPERTY_USAGE_DEFAULT | PROPERTY_USAGE_NIL_IS_VARIANT ) , " set_data " , " get_data " ) ; // Ensures that it can be serialized as binary.
}
2024-06-01 19:40:37 +02:00
# define GDTYPE "__gdtype"
# define VALUES "values"
# define PASS_ARG p_allow_classes, p_allow_scripts
Variant JSON : : from_native ( const Variant & p_variant , bool p_allow_classes , bool p_allow_scripts ) {
switch ( p_variant . get_type ( ) ) {
case Variant : : NIL : {
Dictionary nil ;
nil [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return nil ;
} break ;
case Variant : : BOOL : {
return p_variant ;
} break ;
case Variant : : INT : {
return p_variant ;
} break ;
case Variant : : FLOAT : {
return p_variant ;
} break ;
case Variant : : STRING : {
return p_variant ;
} break ;
case Variant : : VECTOR2 : {
Dictionary d ;
Vector2 v = p_variant ;
Array values ;
values . push_back ( v . x ) ;
values . push_back ( v . y ) ;
d [ VALUES ] = values ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : VECTOR2I : {
Dictionary d ;
Vector2i v = p_variant ;
Array values ;
values . push_back ( v . x ) ;
values . push_back ( v . y ) ;
d [ VALUES ] = values ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : RECT2 : {
Dictionary d ;
Rect2 r = p_variant ;
d [ " position " ] = from_native ( r . position ) ;
d [ " size " ] = from_native ( r . size ) ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : RECT2I : {
Dictionary d ;
Rect2i r = p_variant ;
d [ " position " ] = from_native ( r . position ) ;
d [ " size " ] = from_native ( r . size ) ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : VECTOR3 : {
Dictionary d ;
Vector3 v = p_variant ;
Array values ;
values . push_back ( v . x ) ;
values . push_back ( v . y ) ;
values . push_back ( v . z ) ;
d [ VALUES ] = values ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : VECTOR3I : {
Dictionary d ;
Vector3i v = p_variant ;
Array values ;
values . push_back ( v . x ) ;
values . push_back ( v . y ) ;
values . push_back ( v . z ) ;
d [ VALUES ] = values ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : TRANSFORM2D : {
Dictionary d ;
Transform2D t = p_variant ;
d [ " x " ] = from_native ( t [ 0 ] ) ;
d [ " y " ] = from_native ( t [ 1 ] ) ;
d [ " origin " ] = from_native ( t [ 2 ] ) ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : VECTOR4 : {
Dictionary d ;
Vector4 v = p_variant ;
Array values ;
values . push_back ( v . x ) ;
values . push_back ( v . y ) ;
values . push_back ( v . z ) ;
values . push_back ( v . w ) ;
d [ VALUES ] = values ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : VECTOR4I : {
Dictionary d ;
Vector4i v = p_variant ;
Array values ;
values . push_back ( v . x ) ;
values . push_back ( v . y ) ;
values . push_back ( v . z ) ;
values . push_back ( v . w ) ;
d [ VALUES ] = values ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : PLANE : {
Dictionary d ;
Plane p = p_variant ;
d [ " normal " ] = from_native ( p . normal ) ;
d [ " d " ] = p . d ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : QUATERNION : {
Dictionary d ;
Quaternion q = p_variant ;
Array values ;
values . push_back ( q . x ) ;
values . push_back ( q . y ) ;
values . push_back ( q . z ) ;
values . push_back ( q . w ) ;
d [ VALUES ] = values ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : AABB : {
Dictionary d ;
AABB aabb = p_variant ;
d [ " position " ] = from_native ( aabb . position ) ;
d [ " size " ] = from_native ( aabb . size ) ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : BASIS : {
Dictionary d ;
Basis t = p_variant ;
d [ " x " ] = from_native ( t . get_column ( 0 ) ) ;
d [ " y " ] = from_native ( t . get_column ( 1 ) ) ;
d [ " z " ] = from_native ( t . get_column ( 2 ) ) ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : TRANSFORM3D : {
Dictionary d ;
Transform3D t = p_variant ;
d [ " basis " ] = from_native ( t . basis ) ;
d [ " origin " ] = from_native ( t . origin ) ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : PROJECTION : {
Dictionary d ;
Projection t = p_variant ;
d [ " x " ] = from_native ( t [ 0 ] ) ;
d [ " y " ] = from_native ( t [ 1 ] ) ;
d [ " z " ] = from_native ( t [ 2 ] ) ;
d [ " w " ] = from_native ( t [ 3 ] ) ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : COLOR : {
Dictionary d ;
Color c = p_variant ;
Array values ;
values . push_back ( c . r ) ;
values . push_back ( c . g ) ;
values . push_back ( c . b ) ;
values . push_back ( c . a ) ;
d [ VALUES ] = values ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : STRING_NAME : {
Dictionary d ;
d [ " name " ] = String ( p_variant ) ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : NODE_PATH : {
Dictionary d ;
d [ " path " ] = String ( p_variant ) ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : RID : {
Dictionary d ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : OBJECT : {
Object * obj = p_variant . get_validated_object ( ) ;
if ( p_allow_classes & & obj ) {
Dictionary d ;
List < PropertyInfo > property_list ;
obj - > get_property_list ( & property_list ) ;
d [ " type " ] = obj - > get_class ( ) ;
Dictionary p ;
for ( const PropertyInfo & P : property_list ) {
if ( P . usage & PROPERTY_USAGE_STORAGE ) {
if ( P . name = = " script " & & ! p_allow_scripts ) {
continue ;
}
p [ P . name ] = from_native ( obj - > get ( P . name ) , PASS_ARG ) ;
}
}
d [ " properties " ] = p ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} else {
Dictionary nil ;
nil [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return nil ;
}
} break ;
case Variant : : CALLABLE :
case Variant : : SIGNAL : {
Dictionary nil ;
nil [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return nil ;
} break ;
case Variant : : DICTIONARY : {
Dictionary d = p_variant ;
List < Variant > keys ;
d . get_key_list ( & keys ) ;
bool all_strings = true ;
for ( const Variant & K : keys ) {
if ( K . get_type ( ) ! = Variant : : STRING ) {
all_strings = false ;
break ;
}
}
if ( all_strings ) {
Dictionary ret_dict ;
for ( const Variant & K : keys ) {
ret_dict [ K ] = from_native ( d [ K ] , PASS_ARG ) ;
}
return ret_dict ;
} else {
Dictionary ret ;
Array pairs ;
for ( const Variant & K : keys ) {
Dictionary pair ;
pair [ " key " ] = from_native ( K , PASS_ARG ) ;
pair [ " value " ] = from_native ( d [ K ] , PASS_ARG ) ;
pairs . push_back ( pair ) ;
}
ret [ " pairs " ] = pairs ;
ret [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return ret ;
}
} break ;
case Variant : : ARRAY : {
Array arr = p_variant ;
Array ret ;
for ( int i = 0 ; i < arr . size ( ) ; i + + ) {
ret . push_back ( from_native ( arr [ i ] , PASS_ARG ) ) ;
}
return ret ;
} break ;
case Variant : : PACKED_BYTE_ARRAY : {
Dictionary d ;
PackedByteArray arr = p_variant ;
Array values ;
for ( int i = 0 ; i < arr . size ( ) ; i + + ) {
values . push_back ( arr [ i ] ) ;
}
d [ VALUES ] = values ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : PACKED_INT32_ARRAY : {
Dictionary d ;
PackedInt32Array arr = p_variant ;
Array values ;
for ( int i = 0 ; i < arr . size ( ) ; i + + ) {
values . push_back ( arr [ i ] ) ;
}
d [ VALUES ] = values ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : PACKED_INT64_ARRAY : {
Dictionary d ;
PackedInt64Array arr = p_variant ;
Array values ;
for ( int i = 0 ; i < arr . size ( ) ; i + + ) {
values . push_back ( arr [ i ] ) ;
}
d [ VALUES ] = values ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : PACKED_FLOAT32_ARRAY : {
Dictionary d ;
PackedFloat32Array arr = p_variant ;
Array values ;
for ( int i = 0 ; i < arr . size ( ) ; i + + ) {
values . push_back ( arr [ i ] ) ;
}
d [ VALUES ] = values ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : PACKED_FLOAT64_ARRAY : {
Dictionary d ;
PackedFloat64Array arr = p_variant ;
Array values ;
for ( int i = 0 ; i < arr . size ( ) ; i + + ) {
values . push_back ( arr [ i ] ) ;
}
d [ VALUES ] = values ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : PACKED_STRING_ARRAY : {
Dictionary d ;
PackedStringArray arr = p_variant ;
Array values ;
for ( int i = 0 ; i < arr . size ( ) ; i + + ) {
values . push_back ( arr [ i ] ) ;
}
d [ VALUES ] = values ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : PACKED_VECTOR2_ARRAY : {
Dictionary d ;
PackedVector2Array arr = p_variant ;
Array values ;
for ( int i = 0 ; i < arr . size ( ) ; i + + ) {
Vector2 v = arr [ i ] ;
values . push_back ( v . x ) ;
values . push_back ( v . y ) ;
}
d [ VALUES ] = values ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : PACKED_VECTOR3_ARRAY : {
Dictionary d ;
PackedVector3Array arr = p_variant ;
Array values ;
for ( int i = 0 ; i < arr . size ( ) ; i + + ) {
Vector3 v = arr [ i ] ;
values . push_back ( v . x ) ;
values . push_back ( v . y ) ;
values . push_back ( v . z ) ;
}
d [ VALUES ] = values ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : PACKED_COLOR_ARRAY : {
Dictionary d ;
PackedColorArray arr = p_variant ;
Array values ;
for ( int i = 0 ; i < arr . size ( ) ; i + + ) {
Color v = arr [ i ] ;
values . push_back ( v . r ) ;
values . push_back ( v . g ) ;
values . push_back ( v . b ) ;
values . push_back ( v . a ) ;
}
d [ VALUES ] = values ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
case Variant : : PACKED_VECTOR4_ARRAY : {
Dictionary d ;
PackedVector4Array arr = p_variant ;
Array values ;
for ( int i = 0 ; i < arr . size ( ) ; i + + ) {
Vector4 v = arr [ i ] ;
values . push_back ( v . x ) ;
values . push_back ( v . y ) ;
values . push_back ( v . z ) ;
values . push_back ( v . w ) ;
}
d [ VALUES ] = values ;
d [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return d ;
} break ;
default : {
ERR_PRINT ( vformat ( " Unhandled conversion from native Variant type '%s' to JSON. " , Variant : : get_type_name ( p_variant . get_type ( ) ) ) ) ;
} break ;
}
Dictionary nil ;
nil [ GDTYPE ] = Variant : : get_type_name ( p_variant . get_type ( ) ) ;
return nil ;
}
Variant JSON : : to_native ( const Variant & p_json , bool p_allow_classes , bool p_allow_scripts ) {
switch ( p_json . get_type ( ) ) {
case Variant : : BOOL : {
return p_json ;
} break ;
case Variant : : INT : {
return p_json ;
} break ;
case Variant : : FLOAT : {
return p_json ;
} break ;
case Variant : : STRING : {
return p_json ;
} break ;
case Variant : : STRING_NAME : {
return p_json ;
} break ;
case Variant : : CALLABLE : {
return p_json ;
} break ;
case Variant : : DICTIONARY : {
Dictionary d = p_json ;
if ( d . has ( GDTYPE ) ) {
// Specific Godot Variant types serialized to JSON.
String type = d [ GDTYPE ] ;
if ( type = = Variant : : get_type_name ( Variant : : VECTOR2 ) ) {
ERR_FAIL_COND_V ( ! d . has ( VALUES ) , Variant ( ) ) ;
Array values = d [ VALUES ] ;
ERR_FAIL_COND_V ( values . size ( ) ! = 2 , Variant ( ) ) ;
Vector2 v ;
v . x = values [ 0 ] ;
v . y = values [ 1 ] ;
return v ;
} else if ( type = = Variant : : get_type_name ( Variant : : VECTOR2I ) ) {
ERR_FAIL_COND_V ( ! d . has ( VALUES ) , Variant ( ) ) ;
Array values = d [ VALUES ] ;
ERR_FAIL_COND_V ( values . size ( ) ! = 2 , Variant ( ) ) ;
Vector2i v ;
v . x = values [ 0 ] ;
v . y = values [ 1 ] ;
return v ;
} else if ( type = = Variant : : get_type_name ( Variant : : RECT2 ) ) {
ERR_FAIL_COND_V ( ! d . has ( " position " ) , Variant ( ) ) ;
ERR_FAIL_COND_V ( ! d . has ( " size " ) , Variant ( ) ) ;
Rect2 r ;
r . position = to_native ( d [ " position " ] ) ;
r . size = to_native ( d [ " size " ] ) ;
return r ;
} else if ( type = = Variant : : get_type_name ( Variant : : RECT2I ) ) {
ERR_FAIL_COND_V ( ! d . has ( " position " ) , Variant ( ) ) ;
ERR_FAIL_COND_V ( ! d . has ( " size " ) , Variant ( ) ) ;
Rect2i r ;
r . position = to_native ( d [ " position " ] ) ;
r . size = to_native ( d [ " size " ] ) ;
return r ;
} else if ( type = = Variant : : get_type_name ( Variant : : VECTOR3 ) ) {
ERR_FAIL_COND_V ( ! d . has ( VALUES ) , Variant ( ) ) ;
Array values = d [ VALUES ] ;
ERR_FAIL_COND_V ( values . size ( ) ! = 3 , Variant ( ) ) ;
Vector3 v ;
v . x = values [ 0 ] ;
v . y = values [ 1 ] ;
v . z = values [ 2 ] ;
return v ;
} else if ( type = = Variant : : get_type_name ( Variant : : VECTOR3I ) ) {
ERR_FAIL_COND_V ( ! d . has ( VALUES ) , Variant ( ) ) ;
Array values = d [ VALUES ] ;
ERR_FAIL_COND_V ( values . size ( ) ! = 3 , Variant ( ) ) ;
Vector3i v ;
v . x = values [ 0 ] ;
v . y = values [ 1 ] ;
v . z = values [ 2 ] ;
return v ;
} else if ( type = = Variant : : get_type_name ( Variant : : TRANSFORM2D ) ) {
ERR_FAIL_COND_V ( ! d . has ( " x " ) , Variant ( ) ) ;
ERR_FAIL_COND_V ( ! d . has ( " y " ) , Variant ( ) ) ;
ERR_FAIL_COND_V ( ! d . has ( " origin " ) , Variant ( ) ) ;
Transform2D t ;
t [ 0 ] = to_native ( d [ " x " ] ) ;
t [ 1 ] = to_native ( d [ " y " ] ) ;
t [ 2 ] = to_native ( d [ " origin " ] ) ;
return t ;
} else if ( type = = Variant : : get_type_name ( Variant : : VECTOR4 ) ) {
ERR_FAIL_COND_V ( ! d . has ( VALUES ) , Variant ( ) ) ;
Array values = d [ VALUES ] ;
ERR_FAIL_COND_V ( values . size ( ) ! = 4 , Variant ( ) ) ;
Vector4 v ;
v . x = values [ 0 ] ;
v . y = values [ 1 ] ;
v . z = values [ 2 ] ;
v . w = values [ 3 ] ;
return v ;
} else if ( type = = Variant : : get_type_name ( Variant : : VECTOR4I ) ) {
ERR_FAIL_COND_V ( ! d . has ( VALUES ) , Variant ( ) ) ;
Array values = d [ VALUES ] ;
ERR_FAIL_COND_V ( values . size ( ) ! = 4 , Variant ( ) ) ;
Vector4i v ;
v . x = values [ 0 ] ;
v . y = values [ 1 ] ;
v . z = values [ 2 ] ;
v . w = values [ 3 ] ;
return v ;
} else if ( type = = Variant : : get_type_name ( Variant : : PLANE ) ) {
ERR_FAIL_COND_V ( ! d . has ( " normal " ) , Variant ( ) ) ;
ERR_FAIL_COND_V ( ! d . has ( " d " ) , Variant ( ) ) ;
Plane p ;
p . normal = to_native ( d [ " normal " ] ) ;
p . d = d [ " d " ] ;
return p ;
} else if ( type = = Variant : : get_type_name ( Variant : : QUATERNION ) ) {
ERR_FAIL_COND_V ( ! d . has ( VALUES ) , Variant ( ) ) ;
Array values = d [ VALUES ] ;
ERR_FAIL_COND_V ( values . size ( ) ! = 4 , Variant ( ) ) ;
Quaternion v ;
v . x = values [ 0 ] ;
v . y = values [ 1 ] ;
v . z = values [ 2 ] ;
v . w = values [ 3 ] ;
return v ;
} else if ( type = = Variant : : get_type_name ( Variant : : AABB ) ) {
ERR_FAIL_COND_V ( ! d . has ( " position " ) , Variant ( ) ) ;
ERR_FAIL_COND_V ( ! d . has ( " size " ) , Variant ( ) ) ;
AABB r ;
r . position = to_native ( d [ " position " ] ) ;
r . size = to_native ( d [ " size " ] ) ;
return r ;
} else if ( type = = Variant : : get_type_name ( Variant : : BASIS ) ) {
ERR_FAIL_COND_V ( ! d . has ( " x " ) , Variant ( ) ) ;
ERR_FAIL_COND_V ( ! d . has ( " y " ) , Variant ( ) ) ;
ERR_FAIL_COND_V ( ! d . has ( " z " ) , Variant ( ) ) ;
Basis b ;
b . set_column ( 0 , to_native ( d [ " x " ] ) ) ;
b . set_column ( 1 , to_native ( d [ " y " ] ) ) ;
b . set_column ( 2 , to_native ( d [ " z " ] ) ) ;
return b ;
} else if ( type = = Variant : : get_type_name ( Variant : : TRANSFORM3D ) ) {
ERR_FAIL_COND_V ( ! d . has ( " basis " ) , Variant ( ) ) ;
ERR_FAIL_COND_V ( ! d . has ( " origin " ) , Variant ( ) ) ;
Transform3D t ;
t . basis = to_native ( d [ " basis " ] ) ;
t . origin = to_native ( d [ " origin " ] ) ;
return t ;
} else if ( type = = Variant : : get_type_name ( Variant : : PROJECTION ) ) {
ERR_FAIL_COND_V ( ! d . has ( " x " ) , Variant ( ) ) ;
ERR_FAIL_COND_V ( ! d . has ( " y " ) , Variant ( ) ) ;
ERR_FAIL_COND_V ( ! d . has ( " z " ) , Variant ( ) ) ;
ERR_FAIL_COND_V ( ! d . has ( " w " ) , Variant ( ) ) ;
Projection p ;
p [ 0 ] = to_native ( d [ " x " ] ) ;
p [ 1 ] = to_native ( d [ " y " ] ) ;
p [ 2 ] = to_native ( d [ " z " ] ) ;
p [ 3 ] = to_native ( d [ " w " ] ) ;
return p ;
} else if ( type = = Variant : : get_type_name ( Variant : : COLOR ) ) {
ERR_FAIL_COND_V ( ! d . has ( VALUES ) , Variant ( ) ) ;
Array values = d [ VALUES ] ;
ERR_FAIL_COND_V ( values . size ( ) ! = 4 , Variant ( ) ) ;
Color c ;
c . r = values [ 0 ] ;
c . g = values [ 1 ] ;
c . b = values [ 2 ] ;
c . a = values [ 3 ] ;
return c ;
} else if ( type = = Variant : : get_type_name ( Variant : : NODE_PATH ) ) {
ERR_FAIL_COND_V ( ! d . has ( " path " ) , Variant ( ) ) ;
NodePath np = d [ " path " ] ;
return np ;
} else if ( type = = Variant : : get_type_name ( Variant : : STRING_NAME ) ) {
ERR_FAIL_COND_V ( ! d . has ( " name " ) , Variant ( ) ) ;
StringName s = d [ " name " ] ;
return s ;
} else if ( type = = Variant : : get_type_name ( Variant : : OBJECT ) ) {
ERR_FAIL_COND_V ( ! d . has ( " type " ) , Variant ( ) ) ;
ERR_FAIL_COND_V ( ! d . has ( " properties " ) , Variant ( ) ) ;
ERR_FAIL_COND_V ( ! p_allow_classes , Variant ( ) ) ;
String obj_type = d [ " type " ] ;
bool is_script = obj_type = = " Script " | | ClassDB : : is_parent_class ( obj_type , " Script " ) ;
ERR_FAIL_COND_V ( ! p_allow_scripts & & is_script , Variant ( ) ) ;
Object * obj = ClassDB : : instantiate ( obj_type ) ;
ERR_FAIL_NULL_V ( obj , Variant ( ) ) ;
Dictionary p = d [ " properties " ] ;
List < Variant > keys ;
p . get_key_list ( & keys ) ;
for ( const Variant & K : keys ) {
String property = K ;
Variant value = to_native ( p [ K ] , PASS_ARG ) ;
obj - > set ( property , value ) ;
}
Variant v ( obj ) ;
return v ;
} else if ( type = = Variant : : get_type_name ( Variant : : DICTIONARY ) ) {
ERR_FAIL_COND_V ( ! d . has ( " pairs " ) , Variant ( ) ) ;
Array pairs = d [ " pairs " ] ;
Dictionary r ;
for ( int i = 0 ; i < pairs . size ( ) ; i + + ) {
Dictionary p = pairs [ i ] ;
ERR_CONTINUE ( ! p . has ( " key " ) ) ;
ERR_CONTINUE ( ! p . has ( " value " ) ) ;
r [ to_native ( p [ " key " ] , PASS_ARG ) ] = to_native ( p [ " value " ] ) ;
}
return r ;
} else if ( type = = Variant : : get_type_name ( Variant : : ARRAY ) ) {
ERR_PRINT ( vformat ( " Unexpected Array with '%s' key. Arrays are supported natively. " , GDTYPE ) ) ;
} else if ( type = = Variant : : get_type_name ( Variant : : PACKED_BYTE_ARRAY ) ) {
ERR_FAIL_COND_V ( ! d . has ( VALUES ) , Variant ( ) ) ;
Array values = d [ VALUES ] ;
PackedByteArray pbarr ;
pbarr . resize ( values . size ( ) ) ;
for ( int i = 0 ; i < pbarr . size ( ) ; i + + ) {
pbarr . write [ i ] = values [ i ] ;
}
return pbarr ;
} else if ( type = = Variant : : get_type_name ( Variant : : PACKED_INT32_ARRAY ) ) {
ERR_FAIL_COND_V ( ! d . has ( VALUES ) , Variant ( ) ) ;
Array values = d [ VALUES ] ;
PackedInt32Array arr ;
arr . resize ( values . size ( ) ) ;
for ( int i = 0 ; i < arr . size ( ) ; i + + ) {
arr . write [ i ] = values [ i ] ;
}
return arr ;
} else if ( type = = Variant : : get_type_name ( Variant : : PACKED_INT64_ARRAY ) ) {
ERR_FAIL_COND_V ( ! d . has ( VALUES ) , Variant ( ) ) ;
Array values = d [ VALUES ] ;
PackedInt64Array arr ;
arr . resize ( values . size ( ) ) ;
for ( int i = 0 ; i < arr . size ( ) ; i + + ) {
arr . write [ i ] = values [ i ] ;
}
return arr ;
} else if ( type = = Variant : : get_type_name ( Variant : : PACKED_FLOAT32_ARRAY ) ) {
ERR_FAIL_COND_V ( ! d . has ( VALUES ) , Variant ( ) ) ;
Array values = d [ VALUES ] ;
PackedFloat32Array arr ;
arr . resize ( values . size ( ) ) ;
for ( int i = 0 ; i < arr . size ( ) ; i + + ) {
arr . write [ i ] = values [ i ] ;
}
return arr ;
} else if ( type = = Variant : : get_type_name ( Variant : : PACKED_FLOAT64_ARRAY ) ) {
ERR_FAIL_COND_V ( ! d . has ( VALUES ) , Variant ( ) ) ;
Array values = d [ VALUES ] ;
PackedFloat64Array arr ;
arr . resize ( values . size ( ) ) ;
for ( int i = 0 ; i < arr . size ( ) ; i + + ) {
arr . write [ i ] = values [ i ] ;
}
return arr ;
} else if ( type = = Variant : : get_type_name ( Variant : : PACKED_STRING_ARRAY ) ) {
ERR_FAIL_COND_V ( ! d . has ( VALUES ) , Variant ( ) ) ;
Array values = d [ VALUES ] ;
PackedStringArray arr ;
arr . resize ( values . size ( ) ) ;
for ( int i = 0 ; i < arr . size ( ) ; i + + ) {
arr . write [ i ] = values [ i ] ;
}
return arr ;
} else if ( type = = Variant : : get_type_name ( Variant : : PACKED_VECTOR2_ARRAY ) ) {
ERR_FAIL_COND_V ( ! d . has ( VALUES ) , Variant ( ) ) ;
Array values = d [ VALUES ] ;
ERR_FAIL_COND_V ( values . size ( ) % 2 ! = 0 , Variant ( ) ) ;
PackedVector2Array arr ;
arr . resize ( values . size ( ) / 2 ) ;
for ( int i = 0 ; i < arr . size ( ) ; i + + ) {
arr . write [ i ] = Vector2 ( values [ i * 2 + 0 ] , values [ i * 2 + 1 ] ) ;
}
return arr ;
} else if ( type = = Variant : : get_type_name ( Variant : : PACKED_VECTOR3_ARRAY ) ) {
ERR_FAIL_COND_V ( ! d . has ( VALUES ) , Variant ( ) ) ;
Array values = d [ VALUES ] ;
ERR_FAIL_COND_V ( values . size ( ) % 3 ! = 0 , Variant ( ) ) ;
PackedVector3Array arr ;
arr . resize ( values . size ( ) / 3 ) ;
for ( int i = 0 ; i < arr . size ( ) ; i + + ) {
arr . write [ i ] = Vector3 ( values [ i * 3 + 0 ] , values [ i * 3 + 1 ] , values [ i * 3 + 2 ] ) ;
}
return arr ;
} else if ( type = = Variant : : get_type_name ( Variant : : PACKED_COLOR_ARRAY ) ) {
ERR_FAIL_COND_V ( ! d . has ( VALUES ) , Variant ( ) ) ;
Array values = d [ VALUES ] ;
ERR_FAIL_COND_V ( values . size ( ) % 4 ! = 0 , Variant ( ) ) ;
PackedColorArray arr ;
arr . resize ( values . size ( ) / 4 ) ;
for ( int i = 0 ; i < arr . size ( ) ; i + + ) {
arr . write [ i ] = Color ( values [ i * 4 + 0 ] , values [ i * 4 + 1 ] , values [ i * 4 + 2 ] , values [ i * 4 + 3 ] ) ;
}
return arr ;
} else if ( type = = Variant : : get_type_name ( Variant : : PACKED_VECTOR4_ARRAY ) ) {
ERR_FAIL_COND_V ( ! d . has ( VALUES ) , Variant ( ) ) ;
Array values = d [ VALUES ] ;
ERR_FAIL_COND_V ( values . size ( ) % 4 ! = 0 , Variant ( ) ) ;
PackedVector4Array arr ;
arr . resize ( values . size ( ) / 4 ) ;
for ( int i = 0 ; i < arr . size ( ) ; i + + ) {
arr . write [ i ] = Vector4 ( values [ i * 4 + 0 ] , values [ i * 4 + 1 ] , values [ i * 4 + 2 ] , values [ i * 4 + 3 ] ) ;
}
return arr ;
} else {
return Variant ( ) ;
}
} else {
// Regular dictionary with string keys.
List < Variant > keys ;
d . get_key_list ( & keys ) ;
Dictionary r ;
for ( const Variant & K : keys ) {
r [ K ] = to_native ( d [ K ] , PASS_ARG ) ;
}
return r ;
}
} break ;
case Variant : : ARRAY : {
Array arr = p_json ;
Array ret ;
ret . resize ( arr . size ( ) ) ;
for ( int i = 0 ; i < arr . size ( ) ; i + + ) {
ret [ i ] = to_native ( arr [ i ] , PASS_ARG ) ;
}
return ret ;
} break ;
default : {
ERR_PRINT ( vformat ( " Unhandled conversion from JSON type '%s' to native Variant type. " , Variant : : get_type_name ( p_json . get_type ( ) ) ) ) ;
return Variant ( ) ;
}
}
return Variant ( ) ;
}
# undef GDTYPE
# undef VALUES
# undef PASS_ARG
2022-09-03 19:45:24 +02:00
////////////
Ref < Resource > ResourceFormatLoaderJSON : : load ( const String & p_path , const String & p_original_path , Error * r_error , bool p_use_sub_threads , float * r_progress , CacheMode p_cache_mode ) {
if ( r_error ) {
* r_error = ERR_FILE_CANT_OPEN ;
}
if ( ! FileAccess : : exists ( p_path ) ) {
* r_error = ERR_FILE_NOT_FOUND ;
return Ref < Resource > ( ) ;
}
Ref < JSON > json ;
json . instantiate ( ) ;
2023-01-14 12:57:28 +01:00
Error err = json - > parse ( FileAccess : : get_file_as_string ( p_path ) , Engine : : get_singleton ( ) - > is_editor_hint ( ) ) ;
2022-09-03 19:45:24 +02:00
if ( err ! = OK ) {
2023-01-14 12:57:28 +01:00
String err_text = " Error parsing JSON file at ' " + p_path + " ', on line " + itos ( json - > get_error_line ( ) ) + " : " + json - > get_error_message ( ) ;
if ( Engine : : get_singleton ( ) - > is_editor_hint ( ) ) {
// If running on editor, still allow opening the JSON so the code editor can edit it.
WARN_PRINT ( err_text ) ;
} else {
if ( r_error ) {
* r_error = err ;
}
ERR_PRINT ( err_text ) ;
return Ref < Resource > ( ) ;
2022-09-03 19:45:24 +02:00
}
}
if ( r_error ) {
* r_error = OK ;
}
return json ;
}
void ResourceFormatLoaderJSON : : get_recognized_extensions ( List < String > * p_extensions ) const {
p_extensions - > push_back ( " json " ) ;
}
bool ResourceFormatLoaderJSON : : handles_type ( const String & p_type ) const {
return ( p_type = = " JSON " ) ;
}
String ResourceFormatLoaderJSON : : get_resource_type ( const String & p_path ) const {
String el = p_path . get_extension ( ) . to_lower ( ) ;
if ( el = = " json " ) {
return " JSON " ;
}
return " " ;
}
Error ResourceFormatSaverJSON : : save ( const Ref < Resource > & p_resource , const String & p_path , uint32_t p_flags ) {
Ref < JSON > json = p_resource ;
ERR_FAIL_COND_V ( json . is_null ( ) , ERR_INVALID_PARAMETER ) ;
2023-01-14 12:57:28 +01:00
String source = json - > get_parsed_text ( ) . is_empty ( ) ? JSON : : stringify ( json - > get_data ( ) , " \t " , false , true ) : json - > get_parsed_text ( ) ;
2022-09-03 19:45:24 +02:00
Error err ;
Ref < FileAccess > file = FileAccess : : open ( p_path , FileAccess : : WRITE , & err ) ;
ERR_FAIL_COND_V_MSG ( err , err , " Cannot save json ' " + p_path + " '. " ) ;
file - > store_string ( source ) ;
if ( file - > get_error ( ) ! = OK & & file - > get_error ( ) ! = ERR_FILE_EOF ) {
return ERR_CANT_CREATE ;
}
return OK ;
}
void ResourceFormatSaverJSON : : get_recognized_extensions ( const Ref < Resource > & p_resource , List < String > * p_extensions ) const {
Ref < JSON > json = p_resource ;
if ( json . is_valid ( ) ) {
p_extensions - > push_back ( " json " ) ;
}
}
bool ResourceFormatSaverJSON : : recognize ( const Ref < Resource > & p_resource ) const {
return p_resource - > get_class_name ( ) = = " JSON " ; //only json, not inherited
2020-11-10 22:31:33 +01:00
}