2014-02-10 02:10:30 +01:00
|
|
|
/*************************************************************************/
|
|
|
|
/* gd_parser.cpp */
|
|
|
|
/*************************************************************************/
|
|
|
|
/* This file is part of: */
|
|
|
|
/* GODOT ENGINE */
|
|
|
|
/* http://www.godotengine.org */
|
|
|
|
/*************************************************************************/
|
2017-01-01 22:01:57 +01:00
|
|
|
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
|
2014-02-10 02:10:30 +01:00
|
|
|
/* */
|
|
|
|
/* Permission is hereby granted, free of charge, to any person obtaining */
|
|
|
|
/* a copy of this software and associated documentation files (the */
|
|
|
|
/* "Software"), to deal in the Software without restriction, including */
|
|
|
|
/* without limitation the rights to use, copy, modify, merge, publish, */
|
|
|
|
/* distribute, sublicense, and/or sell copies of the Software, and to */
|
|
|
|
/* permit persons to whom the Software is furnished to do so, subject to */
|
|
|
|
/* the following conditions: */
|
|
|
|
/* */
|
|
|
|
/* The above copyright notice and this permission notice shall be */
|
|
|
|
/* included in all copies or substantial portions of the Software. */
|
|
|
|
/* */
|
|
|
|
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
|
|
|
|
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
|
|
|
|
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
|
|
|
|
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
|
|
|
|
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
|
|
|
|
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
|
|
|
|
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
|
|
|
|
/*************************************************************************/
|
|
|
|
#include "gd_parser.h"
|
|
|
|
#include "print_string.h"
|
|
|
|
#include "io/resource_loader.h"
|
2014-06-28 04:21:45 +02:00
|
|
|
#include "os/file_access.h"
|
2015-06-26 06:14:31 +02:00
|
|
|
#include "script_language.h"
|
2016-06-30 03:17:55 +02:00
|
|
|
#include "gd_script.h"
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
template<class T>
|
|
|
|
T* GDParser::alloc_node() {
|
|
|
|
|
|
|
|
T *t = memnew( T);
|
|
|
|
|
|
|
|
t->next=list;
|
|
|
|
list=t;
|
|
|
|
|
|
|
|
if (!head)
|
|
|
|
head=t;
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
t->line=tokenizer->get_token_line();
|
|
|
|
t->column=tokenizer->get_token_column();
|
2014-02-10 02:10:30 +01:00
|
|
|
return t;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
bool GDParser::_end_statement() {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_SEMICOLON) {
|
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
return true; //handle next
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_NEWLINE || tokenizer->get_token()==GDTokenizer::TK_EOF) {
|
2014-02-10 02:10:30 +01:00
|
|
|
return true; //will be handled properly
|
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool GDParser::_enter_indent_block(BlockNode* p_block) {
|
|
|
|
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_COLON) {
|
2015-11-29 23:45:28 +01:00
|
|
|
// report location at the previous token (on the previous line)
|
|
|
|
int error_line = tokenizer->get_token_line(-1);
|
|
|
|
int error_column = tokenizer->get_token_column(-1);
|
|
|
|
_set_error("':' expected at end of line.",error_line,error_column);
|
2014-02-10 02:10:30 +01:00
|
|
|
return false;
|
|
|
|
}
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_NEWLINE) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2015-12-29 15:41:37 +01:00
|
|
|
// be more python-like
|
|
|
|
int current = tab_level.back()->get();
|
|
|
|
tab_level.push_back(current+1);
|
|
|
|
return true;
|
|
|
|
//_set_error("newline expected after ':'.");
|
|
|
|
//return false;
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
while(true) {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_NEWLINE) {
|
2016-05-29 16:37:26 +02:00
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
return false; //wtf
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token(1)!=GDTokenizer::TK_NEWLINE) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
int indent = tokenizer->get_token_line_indent();
|
2014-02-10 02:10:30 +01:00
|
|
|
int current = tab_level.back()->get();
|
2015-12-29 15:41:37 +01:00
|
|
|
if (indent<=current) {
|
|
|
|
print_line("current: "+itos(current)+" indent: "+itos(indent));
|
|
|
|
print_line("less than current");
|
2014-02-10 02:10:30 +01:00
|
|
|
return false;
|
2015-12-29 15:41:37 +01:00
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
tab_level.push_back(indent);
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
return true;
|
|
|
|
|
|
|
|
} else if (p_block) {
|
|
|
|
|
|
|
|
NewLineNode *nl = alloc_node<NewLineNode>();
|
2014-02-25 13:31:47 +01:00
|
|
|
nl->line=tokenizer->get_token_line();
|
2014-02-10 02:10:30 +01:00
|
|
|
p_block->statements.push_back(nl);
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance(); // go to next newline
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
bool GDParser::_parse_arguments(Node* p_parent,Vector<Node*>& p_args,bool p_static,bool p_can_codecomplete) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE) {
|
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
} else {
|
|
|
|
|
2016-10-03 20:18:21 +02:00
|
|
|
parenthesis ++;
|
2014-12-17 02:31:57 +01:00
|
|
|
int argidx=0;
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
while(true) {
|
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_CURSOR) {
|
|
|
|
_make_completable_call(argidx);
|
|
|
|
completion_node=p_parent;
|
2015-06-26 06:14:31 +02:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_CONSTANT && tokenizer->get_token_constant().get_type()==Variant::STRING && tokenizer->get_token(1)==GDTokenizer::TK_CURSOR) {
|
|
|
|
//completing a string argument..
|
|
|
|
completion_cursor=tokenizer->get_token_constant();
|
|
|
|
|
|
|
|
_make_completable_call(argidx);
|
|
|
|
completion_node=p_parent;
|
|
|
|
tokenizer->advance(1);
|
|
|
|
return false;
|
2014-12-17 02:31:57 +01:00
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
Node*arg = _parse_expression(p_parent,p_static);
|
|
|
|
if (!arg)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
p_args.push_back(arg);
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE) {
|
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
break;
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token(1)==GDTokenizer::TK_PARENTHESIS_CLOSE) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_set_error("Expression expected");
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-12-17 02:31:57 +01:00
|
|
|
argidx++;
|
2014-02-10 02:10:30 +01:00
|
|
|
} else {
|
|
|
|
// something is broken
|
|
|
|
_set_error("Expected ',' or ')'");
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
2016-10-03 20:18:21 +02:00
|
|
|
parenthesis --;
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
void GDParser::_make_completable_call(int p_arg) {
|
|
|
|
|
|
|
|
completion_cursor=StringName();
|
|
|
|
completion_type=COMPLETION_CALL_ARGUMENTS;
|
|
|
|
completion_class=current_class;
|
|
|
|
completion_function=current_function;
|
|
|
|
completion_line=tokenizer->get_token_line();
|
|
|
|
completion_argument=p_arg;
|
|
|
|
completion_block=current_block;
|
2015-08-30 16:50:10 +02:00
|
|
|
completion_found=true;
|
2014-12-17 02:31:57 +01:00
|
|
|
tokenizer->advance();
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool GDParser::_get_completable_identifier(CompletionType p_type,StringName& identifier) {
|
|
|
|
|
|
|
|
identifier=StringName();
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER) {
|
|
|
|
identifier=tokenizer->get_token_identifier();
|
|
|
|
tokenizer->advance();
|
|
|
|
}
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_CURSOR) {
|
|
|
|
|
|
|
|
completion_cursor=identifier;
|
|
|
|
completion_type=p_type;
|
|
|
|
completion_class=current_class;
|
|
|
|
completion_function=current_function;
|
|
|
|
completion_line=tokenizer->get_token_line();
|
|
|
|
completion_block=current_block;
|
2015-08-30 16:50:10 +02:00
|
|
|
completion_found=true;
|
2016-09-12 15:52:29 +02:00
|
|
|
completion_ident_is_call=false;
|
2014-12-17 02:31:57 +01:00
|
|
|
tokenizer->advance();
|
|
|
|
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER) {
|
|
|
|
identifier=identifier.operator String() + tokenizer->get_token_identifier().operator String();
|
|
|
|
tokenizer->advance();
|
|
|
|
}
|
|
|
|
|
2016-09-12 15:52:29 +02:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_OPEN) {
|
|
|
|
completion_ident_is_call=true;
|
|
|
|
}
|
2014-12-17 02:31:57 +01:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2016-06-30 15:40:13 +02:00
|
|
|
GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_allow_assign,bool p_parsing_constant) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2017-01-14 12:26:56 +01:00
|
|
|
//Vector<Node*> expressions;
|
|
|
|
//Vector<OperatorNode::Operator> operators;
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
Vector<Expression> expression;
|
|
|
|
|
|
|
|
Node *expr=NULL;
|
|
|
|
|
2016-07-22 14:22:34 +02:00
|
|
|
int op_line = tokenizer->get_token_line(); // when operators are created at the bottom, the line might have been changed (\n found)
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
while(true) {
|
|
|
|
|
|
|
|
|
|
|
|
/*****************/
|
|
|
|
/* Parse Operand */
|
|
|
|
/*****************/
|
|
|
|
|
2014-04-05 23:50:09 +02:00
|
|
|
if (parenthesis>0) {
|
|
|
|
//remove empty space (only allowed if inside parenthesis
|
|
|
|
while(tokenizer->get_token()==GDTokenizer::TK_NEWLINE) {
|
|
|
|
tokenizer->advance();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_OPEN) {
|
2014-02-10 02:10:30 +01:00
|
|
|
//subexpression ()
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-04-05 23:50:09 +02:00
|
|
|
parenthesis++;
|
2016-06-30 15:40:13 +02:00
|
|
|
Node* subexpr = _parse_expression(p_parent,p_static,p_allow_assign,p_parsing_constant);
|
2014-04-05 23:50:09 +02:00
|
|
|
parenthesis--;
|
2014-02-10 02:10:30 +01:00
|
|
|
if (!subexpr)
|
|
|
|
return NULL;
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_set_error("Expected ')' in expression");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
expr=subexpr;
|
2017-01-08 06:04:53 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_DOLLAR) {
|
|
|
|
tokenizer->advance();
|
|
|
|
|
|
|
|
String path;
|
|
|
|
|
|
|
|
bool need_identifier=true;
|
|
|
|
bool done=false;
|
|
|
|
|
|
|
|
while(!done) {
|
|
|
|
|
|
|
|
switch(tokenizer->get_token()) {
|
|
|
|
case GDTokenizer::TK_CURSOR: {
|
|
|
|
completion_cursor=StringName();
|
|
|
|
completion_type=COMPLETION_GET_NODE;
|
|
|
|
completion_class=current_class;
|
|
|
|
completion_function=current_function;
|
|
|
|
completion_line=tokenizer->get_token_line();
|
|
|
|
completion_cursor=path;
|
|
|
|
completion_argument=0;
|
|
|
|
completion_block=current_block;
|
|
|
|
completion_found=true;
|
|
|
|
tokenizer->advance();
|
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_CONSTANT: {
|
|
|
|
|
2017-01-08 07:01:52 +01:00
|
|
|
if (!need_identifier) {
|
|
|
|
done=true;
|
2017-01-08 06:04:53 +01:00
|
|
|
break;
|
2017-01-08 07:01:52 +01:00
|
|
|
}
|
2017-01-08 06:04:53 +01:00
|
|
|
|
|
|
|
if (tokenizer->get_token_constant().get_type()!=Variant::STRING) {
|
|
|
|
_set_error("Expected string constant or identifier after '$' or '/'.");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
path+=String(tokenizer->get_token_constant());
|
|
|
|
tokenizer->advance();
|
2017-01-08 07:01:52 +01:00
|
|
|
need_identifier=false;
|
2017-01-08 06:04:53 +01:00
|
|
|
|
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_IDENTIFIER: {
|
2017-01-08 07:01:52 +01:00
|
|
|
if (!need_identifier) {
|
|
|
|
done=true;
|
2017-01-08 06:04:53 +01:00
|
|
|
break;
|
2017-01-08 07:01:52 +01:00
|
|
|
}
|
2017-01-08 06:04:53 +01:00
|
|
|
|
|
|
|
path+=String(tokenizer->get_token_identifier());
|
|
|
|
tokenizer->advance();
|
|
|
|
need_identifier=false;
|
|
|
|
|
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_OP_DIV: {
|
|
|
|
|
2017-01-08 07:01:52 +01:00
|
|
|
if (need_identifier) {
|
|
|
|
done=true;
|
2017-01-08 06:04:53 +01:00
|
|
|
break;
|
2017-01-08 07:01:52 +01:00
|
|
|
}
|
2017-01-08 06:04:53 +01:00
|
|
|
|
|
|
|
path+="/";
|
|
|
|
tokenizer->advance();
|
|
|
|
need_identifier=true;
|
|
|
|
|
|
|
|
} break;
|
|
|
|
default: {
|
|
|
|
done=true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (path=="") {
|
|
|
|
_set_error("Path expected after $.");
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
OperatorNode *op = alloc_node<OperatorNode>();
|
|
|
|
op->op=OperatorNode::OP_CALL;
|
|
|
|
|
|
|
|
op->arguments.push_back(alloc_node<SelfNode>());
|
|
|
|
|
|
|
|
IdentifierNode *funcname = alloc_node<IdentifierNode>();
|
|
|
|
funcname->name="get_node";
|
|
|
|
|
|
|
|
op->arguments.push_back(funcname);
|
|
|
|
|
|
|
|
ConstantNode *nodepath = alloc_node<ConstantNode>();
|
|
|
|
nodepath->value = NodePath(StringName(path));
|
|
|
|
op->arguments.push_back(nodepath);
|
|
|
|
|
|
|
|
expr=op;
|
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_CURSOR) {
|
|
|
|
tokenizer->advance();
|
|
|
|
continue; //no point in cursor in the middle of expression
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_CONSTANT) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
//constant defined by tokenizer
|
|
|
|
ConstantNode *constant = alloc_node<ConstantNode>();
|
2014-02-25 13:31:47 +01:00
|
|
|
constant->value=tokenizer->get_token_constant();
|
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
expr=constant;
|
2016-01-02 21:56:45 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_CONST_PI) {
|
|
|
|
|
|
|
|
//constant defined by tokenizer
|
|
|
|
ConstantNode *constant = alloc_node<ConstantNode>();
|
|
|
|
constant->value=Math_PI;
|
|
|
|
tokenizer->advance();
|
|
|
|
expr=constant;
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_PR_PRELOAD) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
//constant defined by tokenizer
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_OPEN) {
|
2014-02-10 02:10:30 +01:00
|
|
|
_set_error("Expected '(' after 'preload'");
|
|
|
|
return NULL;
|
|
|
|
}
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2016-01-24 23:45:11 +01:00
|
|
|
|
|
|
|
String path;
|
|
|
|
bool valid = false;
|
|
|
|
Node *subexpr = _parse_and_reduce_expression(p_parent, p_static);
|
|
|
|
if (subexpr) {
|
|
|
|
if (subexpr->type == Node::TYPE_CONSTANT) {
|
|
|
|
ConstantNode *cn = static_cast<ConstantNode*>(subexpr);
|
|
|
|
if (cn->value.get_type() == Variant::STRING) {
|
|
|
|
valid = true;
|
|
|
|
path = (String) cn->value;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (!valid) {
|
|
|
|
_set_error("expected string constant as 'preload' argument.");
|
2014-02-10 02:10:30 +01:00
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
if (!path.is_abs_path() && base_path!="")
|
|
|
|
path=base_path+"/"+path;
|
2014-12-07 06:04:20 +01:00
|
|
|
path = path.replace("///","//").simplify_path();
|
|
|
|
if (path==self_path) {
|
|
|
|
|
|
|
|
_set_error("Can't preload itself (use 'get_script()').");
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-06-28 04:21:45 +02:00
|
|
|
Ref<Resource> res;
|
|
|
|
if (!validating) {
|
|
|
|
|
|
|
|
//this can be too slow for just validating code
|
2015-06-26 06:14:31 +02:00
|
|
|
if (for_completion && ScriptCodeCompletionCache::get_sigleton()) {
|
|
|
|
res = ScriptCodeCompletionCache::get_sigleton()->get_cached_resource(path);
|
|
|
|
} else {
|
|
|
|
res = ResourceLoader::load(path);
|
|
|
|
}
|
2014-06-28 04:21:45 +02:00
|
|
|
if (!res.is_valid()) {
|
|
|
|
_set_error("Can't preload resource at path: "+path);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
|
|
|
|
if (!FileAccess::exists(path)) {
|
|
|
|
_set_error("Can't preload resource at path: "+path);
|
|
|
|
return NULL;
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
|
2014-02-10 02:10:30 +01:00
|
|
|
_set_error("Expected ')' after 'preload' path");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
ConstantNode *constant = alloc_node<ConstantNode>();
|
|
|
|
constant->value=res;
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
expr=constant;
|
2014-09-15 16:33:30 +02:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_PR_YIELD) {
|
|
|
|
|
|
|
|
//constant defined by tokenizer
|
|
|
|
|
|
|
|
tokenizer->advance();
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_OPEN) {
|
|
|
|
_set_error("Expected '(' after 'yield'");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
tokenizer->advance();
|
|
|
|
|
|
|
|
OperatorNode *yield = alloc_node<OperatorNode>();
|
|
|
|
yield->op=OperatorNode::OP_YIELD;
|
|
|
|
|
2016-10-03 20:18:21 +02:00
|
|
|
while (tokenizer->get_token()==GDTokenizer::TK_NEWLINE) {
|
|
|
|
tokenizer->advance();
|
|
|
|
}
|
|
|
|
|
2014-09-15 16:33:30 +02:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE) {
|
|
|
|
expr=yield;
|
|
|
|
tokenizer->advance();
|
|
|
|
} else {
|
|
|
|
|
2016-10-03 20:18:21 +02:00
|
|
|
parenthesis ++;
|
|
|
|
|
2014-09-15 16:33:30 +02:00
|
|
|
Node *object = _parse_and_reduce_expression(p_parent,p_static);
|
|
|
|
if (!object)
|
|
|
|
return NULL;
|
|
|
|
yield->arguments.push_back(object);
|
|
|
|
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_COMMA) {
|
|
|
|
_set_error("Expected ',' after first argument of 'yield'");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
tokenizer->advance();
|
|
|
|
|
2016-08-07 03:11:03 +02:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_CURSOR) {
|
|
|
|
|
|
|
|
|
|
|
|
completion_cursor=StringName();
|
|
|
|
completion_node=object;
|
|
|
|
completion_type=COMPLETION_YIELD;
|
|
|
|
completion_class=current_class;
|
|
|
|
completion_function=current_function;
|
|
|
|
completion_line=tokenizer->get_token_line();
|
|
|
|
completion_argument=0;
|
|
|
|
completion_block=current_block;
|
|
|
|
completion_found=true;
|
|
|
|
tokenizer->advance();
|
|
|
|
}
|
|
|
|
|
2014-09-15 16:33:30 +02:00
|
|
|
Node *signal = _parse_and_reduce_expression(p_parent,p_static);
|
|
|
|
if (!signal)
|
|
|
|
return NULL;
|
|
|
|
yield->arguments.push_back(signal);
|
|
|
|
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
|
|
|
|
_set_error("Expected ')' after second argument of 'yield'");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2016-10-03 20:18:21 +02:00
|
|
|
parenthesis --;
|
|
|
|
|
2014-09-15 16:33:30 +02:00
|
|
|
tokenizer->advance();
|
|
|
|
|
|
|
|
expr=yield;
|
|
|
|
}
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_SELF) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
if (p_static) {
|
|
|
|
_set_error("'self'' not allowed in static function or constant expression");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
//constant defined by tokenizer
|
|
|
|
SelfNode *self = alloc_node<SelfNode>();
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
expr=self;
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_BUILT_IN_TYPE && tokenizer->get_token(1)==GDTokenizer::TK_PERIOD) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
Variant::Type bi_type = tokenizer->get_token_type();
|
|
|
|
tokenizer->advance(2);
|
2014-12-17 02:31:57 +01:00
|
|
|
|
|
|
|
StringName identifier;
|
|
|
|
|
|
|
|
if (_get_completable_identifier(COMPLETION_BUILT_IN_TYPE_CONSTANT,identifier)) {
|
|
|
|
|
|
|
|
completion_built_in_constant=bi_type;
|
|
|
|
}
|
|
|
|
|
2014-12-17 03:17:35 +01:00
|
|
|
if (identifier==StringName()) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_set_error("Built-in type constant expected after '.'");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
if (!Variant::has_numeric_constant(bi_type,identifier)) {
|
|
|
|
|
|
|
|
_set_error("Static constant '"+identifier.operator String()+"' not present in built-in type "+Variant::get_type_name(bi_type)+".");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
ConstantNode *cn = alloc_node<ConstantNode>();
|
|
|
|
cn->value=Variant::get_numeric_constant_value(bi_type,identifier);
|
|
|
|
expr=cn;
|
2014-12-17 02:31:57 +01:00
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token(1)==GDTokenizer::TK_PARENTHESIS_OPEN && (tokenizer->get_token()==GDTokenizer::TK_BUILT_IN_TYPE || tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER || tokenizer->get_token()==GDTokenizer::TK_BUILT_IN_FUNC)) {
|
2014-02-10 02:10:30 +01:00
|
|
|
//function or constructor
|
|
|
|
|
|
|
|
OperatorNode *op = alloc_node<OperatorNode>();
|
|
|
|
op->op=OperatorNode::OP_CALL;
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_BUILT_IN_TYPE) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
TypeNode *tn = alloc_node<TypeNode>();
|
2014-02-25 13:31:47 +01:00
|
|
|
tn->vtype=tokenizer->get_token_type();
|
2014-02-10 02:10:30 +01:00
|
|
|
op->arguments.push_back(tn);
|
2014-12-17 02:31:57 +01:00
|
|
|
tokenizer->advance(2);
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_BUILT_IN_FUNC) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
BuiltInFunctionNode *bn = alloc_node<BuiltInFunctionNode>();
|
2014-02-25 13:31:47 +01:00
|
|
|
bn->function=tokenizer->get_token_built_in_func();
|
2014-02-10 02:10:30 +01:00
|
|
|
op->arguments.push_back(bn);
|
2014-12-17 02:31:57 +01:00
|
|
|
tokenizer->advance(2);
|
2014-02-10 02:10:30 +01:00
|
|
|
} else {
|
|
|
|
|
|
|
|
SelfNode *self = alloc_node<SelfNode>();
|
|
|
|
op->arguments.push_back(self);
|
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
StringName identifier;
|
|
|
|
if (_get_completable_identifier(COMPLETION_FUNCTION,identifier)) {
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
IdentifierNode* id = alloc_node<IdentifierNode>();
|
2014-12-17 02:31:57 +01:00
|
|
|
id->name=identifier;
|
2014-02-10 02:10:30 +01:00
|
|
|
op->arguments.push_back(id);
|
2014-12-17 02:31:57 +01:00
|
|
|
tokenizer->advance(1);
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_CURSOR) {
|
|
|
|
_make_completable_call(0);
|
|
|
|
completion_node=op;
|
|
|
|
|
|
|
|
}
|
|
|
|
if (!_parse_arguments(op,op->arguments,p_static,true))
|
2014-02-10 02:10:30 +01:00
|
|
|
return NULL;
|
|
|
|
|
|
|
|
expr=op;
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER) {
|
2014-02-10 02:10:30 +01:00
|
|
|
//identifier (reference)
|
|
|
|
|
2016-06-28 16:02:53 +02:00
|
|
|
const ClassNode* cln = current_class;
|
2014-12-17 02:31:57 +01:00
|
|
|
bool bfn = false;
|
|
|
|
StringName identifier;
|
|
|
|
if (_get_completable_identifier(COMPLETION_IDENTIFIER,identifier)) {
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2016-06-30 15:40:13 +02:00
|
|
|
if (p_parsing_constant) {
|
|
|
|
for( int i=0; i<cln->constant_expressions.size(); ++i ) {
|
2014-12-17 02:31:57 +01:00
|
|
|
|
2016-06-30 15:40:13 +02:00
|
|
|
if( cln->constant_expressions[i].identifier == identifier ) {
|
2014-12-17 02:31:57 +01:00
|
|
|
|
2016-06-30 15:40:13 +02:00
|
|
|
expr = cln->constant_expressions[i].expression;
|
|
|
|
bfn = true;
|
|
|
|
break;
|
|
|
|
}
|
2014-12-17 02:31:57 +01:00
|
|
|
}
|
|
|
|
|
2016-06-30 15:40:13 +02:00
|
|
|
if (GDScriptLanguage::get_singleton()->get_global_map().has(identifier)) {
|
|
|
|
//check from constants
|
|
|
|
ConstantNode *constant = alloc_node<ConstantNode>();
|
|
|
|
constant->value = GDScriptLanguage::get_singleton()->get_global_array()[ GDScriptLanguage::get_singleton()->get_global_map()[identifier] ];
|
|
|
|
expr=constant;
|
|
|
|
bfn = true;
|
|
|
|
}
|
2016-06-30 03:17:55 +02:00
|
|
|
}
|
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
if ( !bfn ) {
|
|
|
|
IdentifierNode *id = alloc_node<IdentifierNode>();
|
|
|
|
id->name = identifier;
|
|
|
|
expr = id;
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2016-10-12 23:43:59 +02:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_OP_ADD || tokenizer->get_token()==GDTokenizer::TK_OP_SUB || tokenizer->get_token()==GDTokenizer::TK_OP_NOT || tokenizer->get_token()==GDTokenizer::TK_OP_BIT_INVERT) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2016-10-12 23:43:59 +02:00
|
|
|
//single prefix operators like !expr +expr -expr ++expr --expr
|
2016-07-09 03:12:50 +02:00
|
|
|
alloc_node<OperatorNode>();
|
2014-02-10 02:10:30 +01:00
|
|
|
Expression e;
|
|
|
|
e.is_op=true;
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
switch(tokenizer->get_token()) {
|
2016-10-12 23:43:59 +02:00
|
|
|
case GDTokenizer::TK_OP_ADD: e.op=OperatorNode::OP_POS; break;
|
2014-02-10 02:10:30 +01:00
|
|
|
case GDTokenizer::TK_OP_SUB: e.op=OperatorNode::OP_NEG; break;
|
|
|
|
case GDTokenizer::TK_OP_NOT: e.op=OperatorNode::OP_NOT; break;
|
2017-01-15 22:15:47 +01:00
|
|
|
case GDTokenizer::TK_OP_BIT_INVERT: e.op=OperatorNode::OP_BIT_INVERT; break;
|
2014-02-10 02:10:30 +01:00
|
|
|
default: {}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (e.op!=OperatorNode::OP_NOT && tokenizer->get_token()==GDTokenizer::TK_OP_NOT) {
|
2014-02-10 02:10:30 +01:00
|
|
|
_set_error("Misplaced 'not'.");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
expression.push_back(e);
|
|
|
|
continue; //only exception, must continue...
|
|
|
|
|
|
|
|
/*
|
|
|
|
Node *subexpr=_parse_expression(op,p_static);
|
|
|
|
if (!subexpr)
|
|
|
|
return NULL;
|
|
|
|
op->arguments.push_back(subexpr);
|
|
|
|
expr=op;*/
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_BRACKET_OPEN) {
|
2014-02-10 02:10:30 +01:00
|
|
|
// array
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
ArrayNode *arr = alloc_node<ArrayNode>();
|
|
|
|
bool expecting_comma=false;
|
|
|
|
|
|
|
|
while(true) {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_EOF) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_set_error("Unterminated array");
|
|
|
|
return NULL;
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_BRACKET_CLOSE) {
|
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
break;
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_NEWLINE) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance(); //ignore newline
|
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
|
2014-02-10 02:10:30 +01:00
|
|
|
if (!expecting_comma) {
|
|
|
|
_set_error("expression or ']' expected");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
expecting_comma=false;
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance(); //ignore newline
|
2014-02-10 02:10:30 +01:00
|
|
|
} else {
|
|
|
|
//parse expression
|
|
|
|
if (expecting_comma) {
|
|
|
|
_set_error("',' or ']' expected");
|
|
|
|
return NULL;
|
|
|
|
}
|
2016-06-30 15:40:13 +02:00
|
|
|
Node *n = _parse_expression(arr,p_static,p_allow_assign,p_parsing_constant);
|
2014-02-10 02:10:30 +01:00
|
|
|
if (!n)
|
|
|
|
return NULL;
|
|
|
|
arr->elements.push_back(n);
|
|
|
|
expecting_comma=true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
expr=arr;
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_CURLY_BRACKET_OPEN) {
|
2014-02-10 02:10:30 +01:00
|
|
|
// array
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
DictionaryNode *dict = alloc_node<DictionaryNode>();
|
|
|
|
|
|
|
|
enum DictExpect {
|
|
|
|
|
|
|
|
DICT_EXPECT_KEY,
|
|
|
|
DICT_EXPECT_COLON,
|
|
|
|
DICT_EXPECT_VALUE,
|
|
|
|
DICT_EXPECT_COMMA
|
|
|
|
|
|
|
|
};
|
|
|
|
|
|
|
|
Node *key=NULL;
|
2016-11-26 13:40:13 +01:00
|
|
|
Set<Variant> keys;
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
DictExpect expecting=DICT_EXPECT_KEY;
|
|
|
|
|
|
|
|
while(true) {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_EOF) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_set_error("Unterminated dictionary");
|
|
|
|
return NULL;
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_CURLY_BRACKET_CLOSE) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
if (expecting==DICT_EXPECT_COLON) {
|
|
|
|
_set_error("':' expected");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
if (expecting==DICT_EXPECT_VALUE) {
|
|
|
|
_set_error("value expected");
|
|
|
|
return NULL;
|
|
|
|
}
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
break;
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_NEWLINE) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance(); //ignore newline
|
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
if (expecting==DICT_EXPECT_KEY) {
|
|
|
|
_set_error("key or '}' expected");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
if (expecting==DICT_EXPECT_VALUE) {
|
|
|
|
_set_error("value expected");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
if (expecting==DICT_EXPECT_COLON) {
|
|
|
|
_set_error("':' expected");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
expecting=DICT_EXPECT_KEY;
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance(); //ignore newline
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_COLON) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
if (expecting==DICT_EXPECT_KEY) {
|
|
|
|
_set_error("key or '}' expected");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
if (expecting==DICT_EXPECT_VALUE) {
|
|
|
|
_set_error("value expected");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
if (expecting==DICT_EXPECT_COMMA) {
|
|
|
|
_set_error("',' or '}' expected");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
expecting=DICT_EXPECT_VALUE;
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance(); //ignore newline
|
2014-02-10 02:10:30 +01:00
|
|
|
} else {
|
|
|
|
|
|
|
|
if (expecting==DICT_EXPECT_COMMA) {
|
|
|
|
_set_error("',' or '}' expected");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
if (expecting==DICT_EXPECT_COLON) {
|
|
|
|
_set_error("':' expected");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (expecting==DICT_EXPECT_KEY) {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER && tokenizer->get_token(1)==GDTokenizer::TK_OP_ASSIGN) {
|
2014-02-10 02:10:30 +01:00
|
|
|
//lua style identifier, easier to write
|
|
|
|
ConstantNode *cn = alloc_node<ConstantNode>();
|
2014-02-25 13:31:47 +01:00
|
|
|
cn->value = tokenizer->get_token_identifier();
|
2014-02-10 02:10:30 +01:00
|
|
|
key = cn;
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance(2);
|
2014-02-10 02:10:30 +01:00
|
|
|
expecting=DICT_EXPECT_VALUE;
|
|
|
|
} else {
|
|
|
|
//python/js style more flexible
|
2016-06-30 15:40:13 +02:00
|
|
|
key = _parse_expression(dict,p_static,p_allow_assign,p_parsing_constant);
|
2014-02-10 02:10:30 +01:00
|
|
|
if (!key)
|
|
|
|
return NULL;
|
|
|
|
expecting=DICT_EXPECT_COLON;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (expecting==DICT_EXPECT_VALUE) {
|
2016-06-30 15:40:13 +02:00
|
|
|
Node *value = _parse_expression(dict,p_static,p_allow_assign,p_parsing_constant);
|
2014-02-10 02:10:30 +01:00
|
|
|
if (!value)
|
|
|
|
return NULL;
|
|
|
|
expecting=DICT_EXPECT_COMMA;
|
|
|
|
|
2016-11-26 13:40:13 +01:00
|
|
|
if (key->type == GDParser::Node::TYPE_CONSTANT) {
|
|
|
|
Variant const& keyName = static_cast<const GDParser::ConstantNode*>(key)->value;
|
|
|
|
|
|
|
|
if (keys.has(keyName)) {
|
|
|
|
_set_error("Duplicate key found in Dictionary literal");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
keys.insert(keyName);
|
|
|
|
}
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
DictionaryNode::Pair pair;
|
|
|
|
pair.key=key;
|
|
|
|
pair.value=value;
|
|
|
|
dict->elements.push_back(pair);
|
|
|
|
key=NULL;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
expr=dict;
|
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_PERIOD && (tokenizer->get_token(1)==GDTokenizer::TK_IDENTIFIER || tokenizer->get_token(1)==GDTokenizer::TK_CURSOR) && tokenizer->get_token(2)==GDTokenizer::TK_PARENTHESIS_OPEN) {
|
2014-02-10 02:10:30 +01:00
|
|
|
// parent call
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance(); //goto identifier
|
2014-02-10 02:10:30 +01:00
|
|
|
OperatorNode *op = alloc_node<OperatorNode>();
|
|
|
|
op->op=OperatorNode::OP_PARENT_CALL;
|
|
|
|
|
|
|
|
|
|
|
|
/*SelfNode *self = alloc_node<SelfNode>();
|
|
|
|
op->arguments.push_back(self);
|
|
|
|
forbidden for now */
|
2014-12-17 02:31:57 +01:00
|
|
|
StringName identifier;
|
|
|
|
if (_get_completable_identifier(COMPLETION_PARENT_FUNCTION,identifier)) {
|
|
|
|
//indexing stuff
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
IdentifierNode *id = alloc_node<IdentifierNode>();
|
|
|
|
id->name=identifier;
|
2014-02-10 02:10:30 +01:00
|
|
|
op->arguments.push_back(id);
|
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
tokenizer->advance(1);
|
2014-02-10 02:10:30 +01:00
|
|
|
if (!_parse_arguments(op,op->arguments,p_static))
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
expr=op;
|
|
|
|
|
|
|
|
} else {
|
|
|
|
|
|
|
|
//find list [ or find dictionary {
|
|
|
|
|
2016-06-12 01:36:28 +02:00
|
|
|
//print_line("found bug?");
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
_set_error("Error parsing expression, misplaced: "+String(tokenizer->get_token_name(tokenizer->get_token())));
|
2014-02-10 02:10:30 +01:00
|
|
|
return NULL; //nothing
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!expr) {
|
|
|
|
ERR_EXPLAIN("GDParser bug, couldn't figure out what expression is..");
|
|
|
|
ERR_FAIL_COND_V(!expr,NULL);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/******************/
|
|
|
|
/* Parse Indexing */
|
|
|
|
/******************/
|
|
|
|
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
|
|
|
|
//expressions can be indexed any number of times
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PERIOD) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
//indexing using "."
|
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
if (tokenizer->get_token(1)!=GDTokenizer::TK_CURSOR && tokenizer->get_token(1)!=GDTokenizer::TK_IDENTIFIER && tokenizer->get_token(1)!=GDTokenizer::TK_BUILT_IN_FUNC ) {
|
2014-02-10 02:10:30 +01:00
|
|
|
_set_error("Expected identifier as member");
|
|
|
|
return NULL;
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token(2)==GDTokenizer::TK_PARENTHESIS_OPEN) {
|
2014-02-10 02:10:30 +01:00
|
|
|
//call!!
|
|
|
|
OperatorNode * op = alloc_node<OperatorNode>();
|
|
|
|
op->op=OperatorNode::OP_CALL;
|
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
tokenizer->advance();
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
IdentifierNode * id = alloc_node<IdentifierNode>();
|
2014-12-17 02:31:57 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_BUILT_IN_FUNC ) {
|
2014-02-16 01:16:33 +01:00
|
|
|
//small hack so built in funcs don't obfuscate methods
|
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
id->name=GDFunctions::get_func_name(tokenizer->get_token_built_in_func());
|
|
|
|
tokenizer->advance();
|
|
|
|
|
2014-02-16 01:16:33 +01:00
|
|
|
} else {
|
2014-12-17 02:31:57 +01:00
|
|
|
StringName identifier;
|
|
|
|
if (_get_completable_identifier(COMPLETION_METHOD,identifier)) {
|
|
|
|
completion_node=op;
|
|
|
|
//indexing stuff
|
|
|
|
}
|
|
|
|
|
|
|
|
id->name=identifier;
|
2014-02-16 01:16:33 +01:00
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
op->arguments.push_back(expr); // call what
|
|
|
|
op->arguments.push_back(id); // call func
|
|
|
|
//get arguments
|
2014-12-17 02:31:57 +01:00
|
|
|
tokenizer->advance(1);
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_CURSOR) {
|
|
|
|
_make_completable_call(0);
|
|
|
|
completion_node=op;
|
|
|
|
|
|
|
|
}
|
|
|
|
if (!_parse_arguments(op,op->arguments,p_static,true))
|
2014-02-10 02:10:30 +01:00
|
|
|
return NULL;
|
|
|
|
expr=op;
|
|
|
|
|
|
|
|
} else {
|
|
|
|
//simple indexing!
|
2014-12-17 02:31:57 +01:00
|
|
|
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
OperatorNode * op = alloc_node<OperatorNode>();
|
|
|
|
op->op=OperatorNode::OP_INDEX_NAMED;
|
2014-12-17 02:31:57 +01:00
|
|
|
tokenizer->advance();
|
|
|
|
|
|
|
|
|
|
|
|
StringName identifier;
|
|
|
|
if (_get_completable_identifier(COMPLETION_INDEX,identifier)) {
|
|
|
|
|
|
|
|
if (identifier==StringName()) {
|
|
|
|
identifier="@temp"; //so it parses allright
|
|
|
|
}
|
|
|
|
completion_node=op;
|
|
|
|
|
|
|
|
//indexing stuff
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
IdentifierNode * id = alloc_node<IdentifierNode>();
|
2014-12-17 02:31:57 +01:00
|
|
|
id->name=identifier;
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
op->arguments.push_back(expr);
|
|
|
|
op->arguments.push_back(id);
|
|
|
|
|
|
|
|
expr=op;
|
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_BRACKET_OPEN) {
|
2014-02-10 02:10:30 +01:00
|
|
|
//indexing using "[]"
|
|
|
|
OperatorNode * op = alloc_node<OperatorNode>();
|
|
|
|
op->op=OperatorNode::OP_INDEX;
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance(1);
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2016-06-30 15:40:13 +02:00
|
|
|
Node *subexpr = _parse_expression(op,p_static,p_allow_assign,p_parsing_constant);
|
2014-02-10 02:10:30 +01:00
|
|
|
if (!subexpr) {
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_BRACKET_CLOSE) {
|
2014-02-10 02:10:30 +01:00
|
|
|
_set_error("Expected ']'");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
op->arguments.push_back(expr);
|
|
|
|
op->arguments.push_back(subexpr);
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance(1);
|
2014-02-10 02:10:30 +01:00
|
|
|
expr=op;
|
|
|
|
|
|
|
|
} else
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
/******************/
|
|
|
|
/* Parse Operator */
|
|
|
|
/******************/
|
|
|
|
|
2014-04-05 23:50:09 +02:00
|
|
|
if (parenthesis>0) {
|
|
|
|
//remove empty space (only allowed if inside parenthesis
|
|
|
|
while(tokenizer->get_token()==GDTokenizer::TK_NEWLINE) {
|
|
|
|
tokenizer->advance();
|
|
|
|
}
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
Expression e;
|
|
|
|
e.is_op=false;
|
|
|
|
e.node=expr;
|
|
|
|
expression.push_back(e);
|
|
|
|
|
|
|
|
// determine which operator is next
|
|
|
|
|
|
|
|
OperatorNode::Operator op;
|
|
|
|
bool valid=true;
|
|
|
|
|
|
|
|
//assign, if allowed is only alowed on the first operator
|
|
|
|
#define _VALIDATE_ASSIGN if (!p_allow_assign) { _set_error("Unexpected assign."); return NULL; } p_allow_assign=false;
|
2014-02-25 13:31:47 +01:00
|
|
|
switch(tokenizer->get_token()) { //see operator
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
case GDTokenizer::TK_OP_IN: op=OperatorNode::OP_IN; break;
|
|
|
|
case GDTokenizer::TK_OP_EQUAL: op=OperatorNode::OP_EQUAL ; break;
|
|
|
|
case GDTokenizer::TK_OP_NOT_EQUAL: op=OperatorNode::OP_NOT_EQUAL ; break;
|
|
|
|
case GDTokenizer::TK_OP_LESS: op=OperatorNode::OP_LESS ; break;
|
|
|
|
case GDTokenizer::TK_OP_LESS_EQUAL: op=OperatorNode::OP_LESS_EQUAL ; break;
|
|
|
|
case GDTokenizer::TK_OP_GREATER: op=OperatorNode::OP_GREATER ; break;
|
|
|
|
case GDTokenizer::TK_OP_GREATER_EQUAL: op=OperatorNode::OP_GREATER_EQUAL ; break;
|
|
|
|
case GDTokenizer::TK_OP_AND: op=OperatorNode::OP_AND ; break;
|
|
|
|
case GDTokenizer::TK_OP_OR: op=OperatorNode::OP_OR ; break;
|
|
|
|
case GDTokenizer::TK_OP_ADD: op=OperatorNode::OP_ADD ; break;
|
|
|
|
case GDTokenizer::TK_OP_SUB: op=OperatorNode::OP_SUB ; break;
|
|
|
|
case GDTokenizer::TK_OP_MUL: op=OperatorNode::OP_MUL ; break;
|
|
|
|
case GDTokenizer::TK_OP_DIV: op=OperatorNode::OP_DIV ; break;
|
|
|
|
case GDTokenizer::TK_OP_MOD: op=OperatorNode::OP_MOD ; break;
|
|
|
|
//case GDTokenizer::TK_OP_NEG: op=OperatorNode::OP_NEG ; break;
|
|
|
|
case GDTokenizer::TK_OP_SHIFT_LEFT: op=OperatorNode::OP_SHIFT_LEFT ; break;
|
|
|
|
case GDTokenizer::TK_OP_SHIFT_RIGHT: op=OperatorNode::OP_SHIFT_RIGHT ; break;
|
|
|
|
case GDTokenizer::TK_OP_ASSIGN: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN ; break;
|
|
|
|
case GDTokenizer::TK_OP_ASSIGN_ADD: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN_ADD ; break;
|
|
|
|
case GDTokenizer::TK_OP_ASSIGN_SUB: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN_SUB ; break;
|
|
|
|
case GDTokenizer::TK_OP_ASSIGN_MUL: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN_MUL ; break;
|
|
|
|
case GDTokenizer::TK_OP_ASSIGN_DIV: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN_DIV ; break;
|
|
|
|
case GDTokenizer::TK_OP_ASSIGN_MOD: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN_MOD ; break;
|
2017-01-15 16:38:54 +01:00
|
|
|
case GDTokenizer::TK_OP_ASSIGN_SHIFT_LEFT: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN_SHIFT_LEFT; break;
|
|
|
|
case GDTokenizer::TK_OP_ASSIGN_SHIFT_RIGHT: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN_SHIFT_RIGHT; break;
|
2014-02-10 02:10:30 +01:00
|
|
|
case GDTokenizer::TK_OP_ASSIGN_BIT_AND: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN_BIT_AND ; break;
|
|
|
|
case GDTokenizer::TK_OP_ASSIGN_BIT_OR: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN_BIT_OR ; break;
|
|
|
|
case GDTokenizer::TK_OP_ASSIGN_BIT_XOR: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN_BIT_XOR ; break;
|
|
|
|
case GDTokenizer::TK_OP_BIT_AND: op=OperatorNode::OP_BIT_AND ; break;
|
|
|
|
case GDTokenizer::TK_OP_BIT_OR: op=OperatorNode::OP_BIT_OR ; break;
|
|
|
|
case GDTokenizer::TK_OP_BIT_XOR: op=OperatorNode::OP_BIT_XOR ; break;
|
|
|
|
case GDTokenizer::TK_PR_EXTENDS: op=OperatorNode::OP_EXTENDS; break;
|
2016-08-25 20:18:35 +02:00
|
|
|
case GDTokenizer::TK_CF_IF: op=OperatorNode::OP_TERNARY_IF; break;
|
|
|
|
case GDTokenizer::TK_CF_ELSE: op=OperatorNode::OP_TERNARY_ELSE; break;
|
2014-02-10 02:10:30 +01:00
|
|
|
default: valid=false; break;
|
|
|
|
}
|
|
|
|
|
2016-03-09 00:00:52 +01:00
|
|
|
if (valid) {
|
2014-02-10 02:10:30 +01:00
|
|
|
e.is_op=true;
|
|
|
|
e.op=op;
|
|
|
|
expression.push_back(e);
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Reduce the set set of expressions and place them in an operator tree, respecting precedence */
|
|
|
|
|
|
|
|
|
|
|
|
while(expression.size()>1) {
|
|
|
|
|
|
|
|
int next_op=-1;
|
|
|
|
int min_priority=0xFFFFF;
|
|
|
|
bool is_unary=false;
|
2016-08-25 20:18:35 +02:00
|
|
|
bool is_ternary=false;
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
for(int i=0;i<expression.size();i++) {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if (!expression[i].is_op) {
|
|
|
|
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
int priority;
|
|
|
|
|
|
|
|
bool unary=false;
|
2016-08-25 20:18:35 +02:00
|
|
|
bool ternary=false;
|
|
|
|
bool error=false;
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
switch(expression[i].op) {
|
|
|
|
|
|
|
|
case OperatorNode::OP_EXTENDS: priority=-1; break; //before anything
|
|
|
|
|
|
|
|
case OperatorNode::OP_BIT_INVERT: priority=0; unary=true; break;
|
|
|
|
case OperatorNode::OP_NEG: priority=1; unary=true; break;
|
2016-10-12 23:43:59 +02:00
|
|
|
case OperatorNode::OP_POS: priority=1; unary=true; break;
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
case OperatorNode::OP_MUL: priority=2; break;
|
|
|
|
case OperatorNode::OP_DIV: priority=2; break;
|
|
|
|
case OperatorNode::OP_MOD: priority=2; break;
|
|
|
|
|
|
|
|
case OperatorNode::OP_ADD: priority=3; break;
|
|
|
|
case OperatorNode::OP_SUB: priority=3; break;
|
|
|
|
|
|
|
|
case OperatorNode::OP_SHIFT_LEFT: priority=4; break;
|
|
|
|
case OperatorNode::OP_SHIFT_RIGHT: priority=4; break;
|
|
|
|
|
|
|
|
case OperatorNode::OP_BIT_AND: priority=5; break;
|
|
|
|
case OperatorNode::OP_BIT_XOR: priority=6; break;
|
|
|
|
case OperatorNode::OP_BIT_OR: priority=7; break;
|
|
|
|
|
|
|
|
case OperatorNode::OP_LESS: priority=8; break;
|
|
|
|
case OperatorNode::OP_LESS_EQUAL: priority=8; break;
|
|
|
|
case OperatorNode::OP_GREATER: priority=8; break;
|
|
|
|
case OperatorNode::OP_GREATER_EQUAL: priority=8; break;
|
|
|
|
|
|
|
|
case OperatorNode::OP_EQUAL: priority=8; break;
|
|
|
|
case OperatorNode::OP_NOT_EQUAL: priority=8; break;
|
|
|
|
|
2016-08-25 20:18:35 +02:00
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
case OperatorNode::OP_IN: priority=10; break;
|
2016-08-25 20:18:35 +02:00
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
case OperatorNode::OP_NOT: priority=11; unary=true; break;
|
|
|
|
case OperatorNode::OP_AND: priority=12; break;
|
|
|
|
case OperatorNode::OP_OR: priority=13; break;
|
2016-08-25 20:18:35 +02:00
|
|
|
|
|
|
|
case OperatorNode::OP_TERNARY_IF: priority=14; ternary=true; break;
|
|
|
|
case OperatorNode::OP_TERNARY_ELSE: priority=14; error=true; break; // Errors out when found without IF (since IF would consume it)
|
|
|
|
|
|
|
|
case OperatorNode::OP_ASSIGN: priority=15; break;
|
|
|
|
case OperatorNode::OP_ASSIGN_ADD: priority=15; break;
|
|
|
|
case OperatorNode::OP_ASSIGN_SUB: priority=15; break;
|
|
|
|
case OperatorNode::OP_ASSIGN_MUL: priority=15; break;
|
|
|
|
case OperatorNode::OP_ASSIGN_DIV: priority=15; break;
|
|
|
|
case OperatorNode::OP_ASSIGN_MOD: priority=15; break;
|
|
|
|
case OperatorNode::OP_ASSIGN_SHIFT_LEFT: priority=15; break;
|
|
|
|
case OperatorNode::OP_ASSIGN_SHIFT_RIGHT: priority=15; break;
|
|
|
|
case OperatorNode::OP_ASSIGN_BIT_AND: priority=15; break;
|
|
|
|
case OperatorNode::OP_ASSIGN_BIT_OR: priority=15; break;
|
|
|
|
case OperatorNode::OP_ASSIGN_BIT_XOR: priority=15; break;
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
|
|
|
|
default: {
|
|
|
|
_set_error("GDParser bug, invalid operator in expression: "+itos(expression[i].op));
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
if (priority<min_priority) {
|
2016-08-25 20:18:35 +02:00
|
|
|
if(error) {
|
|
|
|
_set_error("Unexpected operator");
|
|
|
|
return NULL;
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
// < is used for left to right (default)
|
|
|
|
// <= is used for right to left
|
|
|
|
next_op=i;
|
|
|
|
min_priority=priority;
|
|
|
|
is_unary=unary;
|
2016-08-25 20:18:35 +02:00
|
|
|
is_ternary=ternary;
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if (next_op==-1) {
|
|
|
|
|
|
|
|
|
|
|
|
_set_error("Yet another parser bug....");
|
|
|
|
ERR_FAIL_COND_V(next_op==-1,NULL);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// OK! create operator..
|
|
|
|
if (is_unary) {
|
|
|
|
|
|
|
|
int expr_pos=next_op;
|
|
|
|
while(expression[expr_pos].is_op) {
|
|
|
|
|
|
|
|
expr_pos++;
|
|
|
|
if (expr_pos==expression.size()) {
|
|
|
|
//can happen..
|
|
|
|
_set_error("Unexpected end of expression..");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
//consecutively do unary opeators
|
|
|
|
for(int i=expr_pos-1;i>=next_op;i--) {
|
|
|
|
|
|
|
|
OperatorNode *op = alloc_node<OperatorNode>();
|
|
|
|
op->op=expression[i].op;
|
|
|
|
op->arguments.push_back(expression[i+1].node);
|
2016-07-22 14:22:34 +02:00
|
|
|
op->line=op_line; //line might have been changed from a \n
|
2014-02-10 02:10:30 +01:00
|
|
|
expression[i].is_op=false;
|
|
|
|
expression[i].node=op;
|
|
|
|
expression.remove(i+1);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2016-08-25 20:18:35 +02:00
|
|
|
} else if(is_ternary) {
|
|
|
|
if (next_op <1 || next_op>=(expression.size()-1)) {
|
|
|
|
_set_error("Parser bug..");
|
|
|
|
ERR_FAIL_V(NULL);
|
|
|
|
}
|
|
|
|
|
|
|
|
if(next_op>=(expression.size()-2) || expression[next_op+2].op != OperatorNode::OP_TERNARY_ELSE) {
|
|
|
|
_set_error("Expected else after ternary if.");
|
|
|
|
ERR_FAIL_V(NULL);
|
|
|
|
}
|
|
|
|
if(next_op>=(expression.size()-3)) {
|
|
|
|
_set_error("Expected value after ternary else.");
|
|
|
|
ERR_FAIL_V(NULL);
|
|
|
|
}
|
|
|
|
|
|
|
|
OperatorNode *op = alloc_node<OperatorNode>();
|
|
|
|
op->op=expression[next_op].op;
|
|
|
|
op->line=op_line; //line might have been changed from a \n
|
|
|
|
|
|
|
|
if (expression[next_op-1].is_op) {
|
|
|
|
|
|
|
|
_set_error("Parser bug..");
|
|
|
|
ERR_FAIL_V(NULL);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (expression[next_op+1].is_op) {
|
|
|
|
// this is not invalid and can really appear
|
|
|
|
// but it becomes invalid anyway because no binary op
|
|
|
|
// can be followed by an unary op in a valid combination,
|
|
|
|
// due to how precedence works, unaries will always dissapear first
|
|
|
|
|
|
|
|
_set_error("Unexpected two consecutive operators after ternary if.");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (expression[next_op+3].is_op) {
|
|
|
|
// this is not invalid and can really appear
|
|
|
|
// but it becomes invalid anyway because no binary op
|
|
|
|
// can be followed by an unary op in a valid combination,
|
|
|
|
// due to how precedence works, unaries will always dissapear first
|
|
|
|
|
|
|
|
_set_error("Unexpected two consecutive operators after ternary else.");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
op->arguments.push_back(expression[next_op+1].node); //next expression goes as first
|
|
|
|
op->arguments.push_back(expression[next_op-1].node); //left expression goes as when-true
|
|
|
|
op->arguments.push_back(expression[next_op+3].node); //expression after next goes as when-false
|
|
|
|
|
|
|
|
//replace all 3 nodes by this operator and make it an expression
|
|
|
|
expression[next_op-1].node=op;
|
|
|
|
expression.remove(next_op);
|
|
|
|
expression.remove(next_op);
|
|
|
|
expression.remove(next_op);
|
|
|
|
expression.remove(next_op);
|
2014-02-10 02:10:30 +01:00
|
|
|
} else {
|
|
|
|
|
|
|
|
if (next_op <1 || next_op>=(expression.size()-1)) {
|
|
|
|
_set_error("Parser bug..");
|
|
|
|
ERR_FAIL_V(NULL);
|
|
|
|
}
|
|
|
|
|
|
|
|
OperatorNode *op = alloc_node<OperatorNode>();
|
|
|
|
op->op=expression[next_op].op;
|
2016-07-22 14:22:34 +02:00
|
|
|
op->line=op_line; //line might have been changed from a \n
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
if (expression[next_op-1].is_op) {
|
|
|
|
|
|
|
|
_set_error("Parser bug..");
|
|
|
|
ERR_FAIL_V(NULL);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (expression[next_op+1].is_op) {
|
|
|
|
// this is not invalid and can really appear
|
|
|
|
// but it becomes invalid anyway because no binary op
|
|
|
|
// can be followed by an unary op in a valid combination,
|
|
|
|
// due to how precedence works, unaries will always dissapear first
|
|
|
|
|
2016-06-24 15:30:36 +02:00
|
|
|
_set_error("Unexpected two consecutive operators.");
|
|
|
|
return NULL;
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
op->arguments.push_back(expression[next_op-1].node); //expression goes as left
|
|
|
|
op->arguments.push_back(expression[next_op+1].node); //next expression goes as right
|
|
|
|
|
|
|
|
//replace all 3 nodes by this operator and make it an expression
|
|
|
|
expression[next_op-1].node=op;
|
|
|
|
expression.remove(next_op);
|
|
|
|
expression.remove(next_op);
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
return expression[0].node;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
GDParser::Node* GDParser::_reduce_expression(Node *p_node,bool p_to_const) {
|
|
|
|
|
|
|
|
switch(p_node->type) {
|
|
|
|
|
|
|
|
case Node::TYPE_BUILT_IN_FUNCTION: {
|
|
|
|
//many may probably be optimizable
|
|
|
|
return p_node;
|
|
|
|
} break;
|
|
|
|
case Node::TYPE_ARRAY: {
|
|
|
|
|
|
|
|
ArrayNode *an = static_cast<ArrayNode*>(p_node);
|
|
|
|
bool all_constants=true;
|
|
|
|
|
|
|
|
for(int i=0;i<an->elements.size();i++) {
|
|
|
|
|
|
|
|
an->elements[i]=_reduce_expression(an->elements[i],p_to_const);
|
|
|
|
if (an->elements[i]->type!=Node::TYPE_CONSTANT)
|
|
|
|
all_constants=false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (all_constants && p_to_const) {
|
|
|
|
//reduce constant array expression
|
|
|
|
|
|
|
|
ConstantNode *cn = alloc_node<ConstantNode>();
|
2017-01-11 12:53:31 +01:00
|
|
|
Array arr;
|
2015-04-08 02:18:46 +02:00
|
|
|
//print_line("mk array "+itos(!p_to_const));
|
2014-02-10 02:10:30 +01:00
|
|
|
arr.resize(an->elements.size());
|
|
|
|
for(int i=0;i<an->elements.size();i++) {
|
|
|
|
ConstantNode *acn = static_cast<ConstantNode*>(an->elements[i]);
|
|
|
|
arr[i]=acn->value;
|
|
|
|
|
|
|
|
}
|
|
|
|
cn->value=arr;
|
|
|
|
return cn;
|
|
|
|
}
|
|
|
|
|
|
|
|
return an;
|
|
|
|
|
|
|
|
} break;
|
|
|
|
case Node::TYPE_DICTIONARY: {
|
|
|
|
|
|
|
|
DictionaryNode *dn = static_cast<DictionaryNode*>(p_node);
|
|
|
|
bool all_constants=true;
|
|
|
|
|
|
|
|
for(int i=0;i<dn->elements.size();i++) {
|
|
|
|
|
|
|
|
dn->elements[i].key=_reduce_expression(dn->elements[i].key,p_to_const);
|
|
|
|
if (dn->elements[i].key->type!=Node::TYPE_CONSTANT)
|
|
|
|
all_constants=false;
|
|
|
|
dn->elements[i].value=_reduce_expression(dn->elements[i].value,p_to_const);
|
|
|
|
if (dn->elements[i].value->type!=Node::TYPE_CONSTANT)
|
|
|
|
all_constants=false;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
if (all_constants && p_to_const) {
|
|
|
|
//reduce constant array expression
|
|
|
|
|
|
|
|
ConstantNode *cn = alloc_node<ConstantNode>();
|
2017-01-11 12:53:31 +01:00
|
|
|
Dictionary dict;
|
2014-02-10 02:10:30 +01:00
|
|
|
for(int i=0;i<dn->elements.size();i++) {
|
|
|
|
ConstantNode *key_c = static_cast<ConstantNode*>(dn->elements[i].key);
|
|
|
|
ConstantNode *value_c = static_cast<ConstantNode*>(dn->elements[i].value);
|
|
|
|
|
|
|
|
dict[key_c->value]=value_c->value;
|
|
|
|
|
|
|
|
}
|
|
|
|
cn->value=dict;
|
|
|
|
return cn;
|
|
|
|
}
|
|
|
|
|
|
|
|
return dn;
|
|
|
|
|
|
|
|
|
|
|
|
} break;
|
|
|
|
case Node::TYPE_OPERATOR: {
|
|
|
|
|
|
|
|
OperatorNode *op=static_cast<OperatorNode*>(p_node);
|
|
|
|
|
|
|
|
bool all_constants=true;
|
|
|
|
int last_not_constant=-1;
|
|
|
|
|
|
|
|
for(int i=0;i<op->arguments.size();i++) {
|
|
|
|
|
|
|
|
op->arguments[i]=_reduce_expression(op->arguments[i],p_to_const);
|
|
|
|
if (op->arguments[i]->type!=Node::TYPE_CONSTANT) {
|
|
|
|
all_constants=false;
|
|
|
|
last_not_constant=i;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (op->op==OperatorNode::OP_EXTENDS) {
|
|
|
|
//nothing much
|
|
|
|
return op;
|
|
|
|
|
|
|
|
} if (op->op==OperatorNode::OP_PARENT_CALL) {
|
|
|
|
//nothing much
|
|
|
|
return op;
|
|
|
|
|
|
|
|
} else if (op->op==OperatorNode::OP_CALL) {
|
|
|
|
//can reduce base type constructors
|
|
|
|
if ((op->arguments[0]->type==Node::TYPE_TYPE || (op->arguments[0]->type==Node::TYPE_BUILT_IN_FUNCTION && GDFunctions::is_deterministic( static_cast<BuiltInFunctionNode*>(op->arguments[0])->function))) && last_not_constant==0) {
|
|
|
|
|
|
|
|
//native type constructor or intrinsic function
|
|
|
|
const Variant **vptr=NULL;
|
|
|
|
Vector<Variant*> ptrs;
|
|
|
|
if (op->arguments.size()>1) {
|
|
|
|
|
|
|
|
ptrs.resize(op->arguments.size()-1);
|
|
|
|
for(int i=0;i<ptrs.size();i++) {
|
|
|
|
|
|
|
|
|
|
|
|
ConstantNode *cn = static_cast<ConstantNode*>(op->arguments[i+1]);
|
|
|
|
ptrs[i]=&cn->value;
|
|
|
|
}
|
|
|
|
|
|
|
|
vptr=(const Variant**)&ptrs[0];
|
|
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
Variant::CallError ce;
|
|
|
|
Variant v;
|
|
|
|
|
|
|
|
if (op->arguments[0]->type==Node::TYPE_TYPE) {
|
|
|
|
TypeNode *tn = static_cast<TypeNode*>(op->arguments[0]);
|
|
|
|
v = Variant::construct(tn->vtype,vptr,ptrs.size(),ce);
|
|
|
|
|
|
|
|
} else {
|
|
|
|
GDFunctions::Function func = static_cast<BuiltInFunctionNode*>(op->arguments[0])->function;
|
|
|
|
GDFunctions::call(func,vptr,ptrs.size(),v,ce);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if (ce.error!=Variant::CallError::CALL_OK) {
|
|
|
|
|
|
|
|
String errwhere;
|
|
|
|
if (op->arguments[0]->type==Node::TYPE_TYPE) {
|
|
|
|
TypeNode *tn = static_cast<TypeNode*>(op->arguments[0]);
|
|
|
|
errwhere="'"+Variant::get_type_name(tn->vtype)+"'' constructor";
|
|
|
|
|
|
|
|
} else {
|
|
|
|
GDFunctions::Function func = static_cast<BuiltInFunctionNode*>(op->arguments[0])->function;
|
|
|
|
errwhere=String("'")+GDFunctions::get_func_name(func)+"'' intrinsic function";
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
switch(ce.error) {
|
|
|
|
|
|
|
|
case Variant::CallError::CALL_ERROR_INVALID_ARGUMENT: {
|
|
|
|
|
|
|
|
_set_error("Invalid argument (#"+itos(ce.argument+1)+") for "+errwhere+".");
|
|
|
|
|
|
|
|
} break;
|
|
|
|
case Variant::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS: {
|
|
|
|
|
|
|
|
_set_error("Too many arguments for "+errwhere+".");
|
|
|
|
} break;
|
|
|
|
case Variant::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS: {
|
|
|
|
|
|
|
|
_set_error("Too few arguments for "+errwhere+".");
|
|
|
|
} break;
|
|
|
|
default: {
|
|
|
|
_set_error("Invalid arguments for "+errwhere+".");
|
|
|
|
|
|
|
|
} break;
|
|
|
|
}
|
|
|
|
|
2016-07-22 14:22:34 +02:00
|
|
|
error_line=op->line;
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
return p_node;
|
|
|
|
}
|
|
|
|
|
|
|
|
ConstantNode *cn = alloc_node<ConstantNode>();
|
|
|
|
cn->value=v;
|
|
|
|
return cn;
|
|
|
|
|
|
|
|
} else if (op->arguments[0]->type==Node::TYPE_BUILT_IN_FUNCTION && last_not_constant==0) {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
return op; //don't reduce yet
|
2014-09-15 16:33:30 +02:00
|
|
|
|
|
|
|
} else if (op->op==OperatorNode::OP_YIELD) {
|
|
|
|
return op;
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
} else if (op->op==OperatorNode::OP_INDEX) {
|
|
|
|
//can reduce indices into constant arrays or dictionaries
|
|
|
|
|
|
|
|
if (all_constants) {
|
|
|
|
|
|
|
|
ConstantNode *ca = static_cast<ConstantNode*>(op->arguments[0]);
|
|
|
|
ConstantNode *cb = static_cast<ConstantNode*>(op->arguments[1]);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
bool valid;
|
|
|
|
|
|
|
|
Variant v = ca->value.get(cb->value,&valid);
|
|
|
|
if (!valid) {
|
|
|
|
_set_error("invalid index in constant expression");
|
2016-07-22 14:22:34 +02:00
|
|
|
error_line=op->line;
|
2014-02-10 02:10:30 +01:00
|
|
|
return op;
|
|
|
|
}
|
|
|
|
|
|
|
|
ConstantNode *cn = alloc_node<ConstantNode>();
|
|
|
|
cn->value=v;
|
|
|
|
return cn;
|
|
|
|
|
2014-11-06 01:20:42 +01:00
|
|
|
} /*else if (op->arguments[0]->type==Node::TYPE_CONSTANT && op->arguments[1]->type==Node::TYPE_IDENTIFIER) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
ConstantNode *ca = static_cast<ConstantNode*>(op->arguments[0]);
|
|
|
|
IdentifierNode *ib = static_cast<IdentifierNode*>(op->arguments[1]);
|
|
|
|
|
|
|
|
bool valid;
|
|
|
|
Variant v = ca->value.get_named(ib->name,&valid);
|
|
|
|
if (!valid) {
|
|
|
|
_set_error("invalid index '"+String(ib->name)+"' in constant expression");
|
|
|
|
return op;
|
|
|
|
}
|
|
|
|
|
|
|
|
ConstantNode *cn = alloc_node<ConstantNode>();
|
|
|
|
cn->value=v;
|
|
|
|
return cn;
|
2014-11-06 01:20:42 +01:00
|
|
|
}*/
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-11-06 01:20:42 +01:00
|
|
|
return op;
|
|
|
|
|
|
|
|
} else if (op->op==OperatorNode::OP_INDEX_NAMED) {
|
|
|
|
|
|
|
|
if (op->arguments[0]->type==Node::TYPE_CONSTANT && op->arguments[1]->type==Node::TYPE_IDENTIFIER) {
|
|
|
|
|
|
|
|
ConstantNode *ca = static_cast<ConstantNode*>(op->arguments[0]);
|
|
|
|
IdentifierNode *ib = static_cast<IdentifierNode*>(op->arguments[1]);
|
|
|
|
|
|
|
|
bool valid;
|
|
|
|
Variant v = ca->value.get_named(ib->name,&valid);
|
|
|
|
if (!valid) {
|
|
|
|
_set_error("invalid index '"+String(ib->name)+"' in constant expression");
|
2016-07-22 14:22:34 +02:00
|
|
|
error_line=op->line;
|
2014-11-06 01:20:42 +01:00
|
|
|
return op;
|
|
|
|
}
|
|
|
|
|
|
|
|
ConstantNode *cn = alloc_node<ConstantNode>();
|
|
|
|
cn->value=v;
|
|
|
|
return cn;
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return op;
|
2014-11-06 01:20:42 +01:00
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
//validate assignment (don't assign to cosntant expression
|
|
|
|
switch(op->op) {
|
|
|
|
|
|
|
|
case OperatorNode::OP_ASSIGN:
|
|
|
|
case OperatorNode::OP_ASSIGN_ADD:
|
|
|
|
case OperatorNode::OP_ASSIGN_SUB:
|
|
|
|
case OperatorNode::OP_ASSIGN_MUL:
|
|
|
|
case OperatorNode::OP_ASSIGN_DIV:
|
|
|
|
case OperatorNode::OP_ASSIGN_MOD:
|
|
|
|
case OperatorNode::OP_ASSIGN_SHIFT_LEFT:
|
|
|
|
case OperatorNode::OP_ASSIGN_SHIFT_RIGHT:
|
|
|
|
case OperatorNode::OP_ASSIGN_BIT_AND:
|
|
|
|
case OperatorNode::OP_ASSIGN_BIT_OR:
|
|
|
|
case OperatorNode::OP_ASSIGN_BIT_XOR: {
|
|
|
|
|
|
|
|
if (op->arguments[0]->type==Node::TYPE_CONSTANT) {
|
2014-10-27 22:26:11 +01:00
|
|
|
_set_error("Can't assign to constant",tokenizer->get_token_line()-1);
|
2016-07-22 14:22:34 +02:00
|
|
|
error_line=op->line;
|
2014-02-10 02:10:30 +01:00
|
|
|
return op;
|
|
|
|
}
|
|
|
|
|
2016-12-29 11:31:19 +01:00
|
|
|
if (op->arguments[0]->type==Node::TYPE_OPERATOR) {
|
|
|
|
OperatorNode *on = static_cast<OperatorNode*>(op->arguments[0]);
|
|
|
|
if (on->op != OperatorNode::OP_INDEX && on->op != OperatorNode::OP_INDEX_NAMED) {
|
|
|
|
_set_error("Can't assign to an expression",tokenizer->get_token_line()-1);
|
|
|
|
error_line=op->line;
|
|
|
|
return op;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
} break;
|
|
|
|
default: { break; }
|
|
|
|
}
|
|
|
|
//now se if all are constants
|
|
|
|
if (!all_constants)
|
|
|
|
return op; //nothing to reduce from here on
|
|
|
|
#define _REDUCE_UNARY(m_vop)\
|
|
|
|
bool valid=false;\
|
|
|
|
Variant res;\
|
|
|
|
Variant::evaluate(m_vop,static_cast<ConstantNode*>(op->arguments[0])->value,Variant(),res,valid);\
|
|
|
|
if (!valid) {\
|
|
|
|
_set_error("Invalid operand for unary operator");\
|
2016-07-22 14:22:34 +02:00
|
|
|
error_line=op->line;\
|
2014-02-10 02:10:30 +01:00
|
|
|
return p_node;\
|
|
|
|
}\
|
|
|
|
ConstantNode *cn = alloc_node<ConstantNode>();\
|
|
|
|
cn->value=res;\
|
|
|
|
return cn;
|
|
|
|
|
|
|
|
#define _REDUCE_BINARY(m_vop)\
|
|
|
|
bool valid=false;\
|
|
|
|
Variant res;\
|
|
|
|
Variant::evaluate(m_vop,static_cast<ConstantNode*>(op->arguments[0])->value,static_cast<ConstantNode*>(op->arguments[1])->value,res,valid);\
|
|
|
|
if (!valid) {\
|
|
|
|
_set_error("Invalid operands for operator");\
|
2016-07-22 14:22:34 +02:00
|
|
|
error_line=op->line;\
|
2014-02-10 02:10:30 +01:00
|
|
|
return p_node;\
|
|
|
|
}\
|
|
|
|
ConstantNode *cn = alloc_node<ConstantNode>();\
|
|
|
|
cn->value=res;\
|
|
|
|
return cn;
|
|
|
|
|
|
|
|
switch(op->op) {
|
|
|
|
|
|
|
|
//unary operators
|
|
|
|
case OperatorNode::OP_NEG: { _REDUCE_UNARY(Variant::OP_NEGATE); } break;
|
2016-10-12 23:43:59 +02:00
|
|
|
case OperatorNode::OP_POS: { _REDUCE_UNARY(Variant::OP_POSITIVE); } break;
|
2014-02-10 02:10:30 +01:00
|
|
|
case OperatorNode::OP_NOT: { _REDUCE_UNARY(Variant::OP_NOT); } break;
|
|
|
|
case OperatorNode::OP_BIT_INVERT: { _REDUCE_UNARY(Variant::OP_BIT_NEGATE); } break;
|
|
|
|
//binary operators (in precedence order)
|
|
|
|
case OperatorNode::OP_IN: { _REDUCE_BINARY(Variant::OP_IN); } break;
|
|
|
|
case OperatorNode::OP_EQUAL: { _REDUCE_BINARY(Variant::OP_EQUAL); } break;
|
|
|
|
case OperatorNode::OP_NOT_EQUAL: { _REDUCE_BINARY(Variant::OP_NOT_EQUAL); } break;
|
|
|
|
case OperatorNode::OP_LESS: { _REDUCE_BINARY(Variant::OP_LESS); } break;
|
|
|
|
case OperatorNode::OP_LESS_EQUAL: { _REDUCE_BINARY(Variant::OP_LESS_EQUAL); } break;
|
|
|
|
case OperatorNode::OP_GREATER: { _REDUCE_BINARY(Variant::OP_GREATER); } break;
|
|
|
|
case OperatorNode::OP_GREATER_EQUAL: { _REDUCE_BINARY(Variant::OP_GREATER_EQUAL); } break;
|
|
|
|
case OperatorNode::OP_AND: { _REDUCE_BINARY(Variant::OP_AND); } break;
|
|
|
|
case OperatorNode::OP_OR: { _REDUCE_BINARY(Variant::OP_OR); } break;
|
|
|
|
case OperatorNode::OP_ADD: { _REDUCE_BINARY(Variant::OP_ADD); } break;
|
|
|
|
case OperatorNode::OP_SUB: { _REDUCE_BINARY(Variant::OP_SUBSTRACT); } break;
|
|
|
|
case OperatorNode::OP_MUL: { _REDUCE_BINARY(Variant::OP_MULTIPLY); } break;
|
|
|
|
case OperatorNode::OP_DIV: { _REDUCE_BINARY(Variant::OP_DIVIDE); } break;
|
|
|
|
case OperatorNode::OP_MOD: { _REDUCE_BINARY(Variant::OP_MODULE); } break;
|
|
|
|
case OperatorNode::OP_SHIFT_LEFT: { _REDUCE_BINARY(Variant::OP_SHIFT_LEFT); } break;
|
|
|
|
case OperatorNode::OP_SHIFT_RIGHT: { _REDUCE_BINARY(Variant::OP_SHIFT_RIGHT); } break;
|
|
|
|
case OperatorNode::OP_BIT_AND: { _REDUCE_BINARY(Variant::OP_BIT_AND); } break;
|
|
|
|
case OperatorNode::OP_BIT_OR: { _REDUCE_BINARY(Variant::OP_BIT_OR); } break;
|
|
|
|
case OperatorNode::OP_BIT_XOR: { _REDUCE_BINARY(Variant::OP_BIT_XOR); } break;
|
|
|
|
default: { ERR_FAIL_V(op); }
|
|
|
|
}
|
|
|
|
|
|
|
|
ERR_FAIL_V(op);
|
|
|
|
} break;
|
|
|
|
default: {
|
|
|
|
return p_node;
|
|
|
|
} break;
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
GDParser::Node* GDParser::_parse_and_reduce_expression(Node *p_parent,bool p_static,bool p_reduce_const,bool p_allow_assign) {
|
|
|
|
|
2016-06-30 15:40:13 +02:00
|
|
|
Node* expr=_parse_expression(p_parent,p_static,p_allow_assign,p_reduce_const);
|
2014-02-10 02:10:30 +01:00
|
|
|
if (!expr || error_set)
|
|
|
|
return NULL;
|
|
|
|
expr = _reduce_expression(expr,p_reduce_const);
|
|
|
|
if (!expr || error_set)
|
|
|
|
return NULL;
|
|
|
|
return expr;
|
|
|
|
}
|
|
|
|
|
2015-08-30 16:50:10 +02:00
|
|
|
bool GDParser::_recover_from_completion() {
|
|
|
|
|
|
|
|
if (!completion_found) {
|
|
|
|
return false; //can't recover if no completion
|
|
|
|
}
|
|
|
|
//skip stuff until newline
|
2015-09-02 05:56:51 +02:00
|
|
|
while(tokenizer->get_token()!=GDTokenizer::TK_NEWLINE && tokenizer->get_token()!=GDTokenizer::TK_EOF && tokenizer->get_token()!=GDTokenizer::TK_ERROR) {
|
2015-08-30 16:50:10 +02:00
|
|
|
tokenizer->advance();
|
|
|
|
}
|
|
|
|
completion_found=false;
|
|
|
|
error_set=false;
|
2015-09-02 05:56:51 +02:00
|
|
|
if(tokenizer->get_token() == GDTokenizer::TK_ERROR){
|
|
|
|
error_set = true;
|
|
|
|
}
|
|
|
|
|
2015-08-30 16:50:10 +02:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2016-09-30 21:40:31 +02:00
|
|
|
GDParser::PatternNode *GDParser::_parse_pattern(bool p_static)
|
|
|
|
{
|
|
|
|
|
2016-10-05 18:48:38 +02:00
|
|
|
PatternNode *pattern = alloc_node<PatternNode>();
|
2016-09-30 21:40:31 +02:00
|
|
|
|
|
|
|
GDTokenizer::Token token = tokenizer->get_token();
|
|
|
|
if (error_set)
|
|
|
|
return NULL;
|
|
|
|
|
2016-10-05 18:48:38 +02:00
|
|
|
if (token == GDTokenizer::TK_EOF) {
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2016-09-30 21:40:31 +02:00
|
|
|
switch (token) {
|
2016-10-16 13:20:28 +02:00
|
|
|
// dictionary
|
2016-09-30 21:40:31 +02:00
|
|
|
case GDTokenizer::TK_BRACKET_OPEN: {
|
|
|
|
tokenizer->advance();
|
|
|
|
pattern->pt_type = GDParser::PatternNode::PT_ARRAY;
|
|
|
|
while (true) {
|
|
|
|
|
|
|
|
if (tokenizer->get_token() == GDTokenizer::TK_BRACKET_CLOSE) {
|
|
|
|
tokenizer->advance();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (tokenizer->get_token() == GDTokenizer::TK_PERIOD && tokenizer->get_token(1) == GDTokenizer::TK_PERIOD) {
|
|
|
|
// match everything
|
|
|
|
tokenizer->advance(2);
|
2016-10-05 18:48:38 +02:00
|
|
|
PatternNode *sub_pattern = alloc_node<PatternNode>();
|
|
|
|
sub_pattern->pt_type = GDParser::PatternNode::PT_IGNORE_REST;
|
|
|
|
pattern->array.push_back(sub_pattern);
|
2016-09-30 21:40:31 +02:00
|
|
|
if (tokenizer->get_token() == GDTokenizer::TK_COMMA && tokenizer->get_token(1) == GDTokenizer::TK_BRACKET_CLOSE) {
|
|
|
|
tokenizer->advance(2);
|
|
|
|
break;
|
|
|
|
} else if (tokenizer->get_token() == GDTokenizer::TK_BRACKET_CLOSE) {
|
|
|
|
tokenizer->advance(1);
|
|
|
|
break;
|
|
|
|
} else {
|
|
|
|
_set_error("'..' pattern only allowed at the end of an array pattern");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
PatternNode *sub_pattern = _parse_pattern(p_static);
|
|
|
|
if (!sub_pattern) {
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
pattern->array.push_back(sub_pattern);
|
|
|
|
|
|
|
|
if (tokenizer->get_token() == GDTokenizer::TK_COMMA) {
|
|
|
|
tokenizer->advance();
|
|
|
|
continue;
|
|
|
|
} else if (tokenizer->get_token() == GDTokenizer::TK_BRACKET_CLOSE) {
|
|
|
|
tokenizer->advance();
|
|
|
|
break;
|
|
|
|
} else {
|
|
|
|
_set_error("Not a valid pattern");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} break;
|
2016-10-16 13:20:28 +02:00
|
|
|
// bind
|
2016-10-05 18:48:38 +02:00
|
|
|
case GDTokenizer::TK_PR_VAR: {
|
|
|
|
tokenizer->advance();
|
2016-09-30 21:40:31 +02:00
|
|
|
pattern->pt_type = GDParser::PatternNode::PT_BIND;
|
|
|
|
pattern->bind = tokenizer->get_token_identifier();
|
|
|
|
tokenizer->advance();
|
|
|
|
} break;
|
2016-10-16 13:20:28 +02:00
|
|
|
// array
|
2016-09-30 21:40:31 +02:00
|
|
|
case GDTokenizer::TK_CURLY_BRACKET_OPEN: {
|
|
|
|
tokenizer->advance();
|
2016-10-05 18:48:38 +02:00
|
|
|
pattern->pt_type = GDParser::PatternNode::PT_DICTIONARY;
|
2016-09-30 21:40:31 +02:00
|
|
|
while (true) {
|
|
|
|
|
|
|
|
if (tokenizer->get_token() == GDTokenizer::TK_CURLY_BRACKET_CLOSE) {
|
|
|
|
tokenizer->advance();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (tokenizer->get_token() == GDTokenizer::TK_PERIOD && tokenizer->get_token(1) == GDTokenizer::TK_PERIOD) {
|
|
|
|
// match everything
|
|
|
|
tokenizer->advance(2);
|
2016-10-05 18:48:38 +02:00
|
|
|
PatternNode *sub_pattern = alloc_node<PatternNode>();
|
|
|
|
sub_pattern->pt_type = PatternNode::PT_IGNORE_REST;
|
|
|
|
pattern->array.push_back(sub_pattern);
|
2016-09-30 21:40:31 +02:00
|
|
|
if (tokenizer->get_token() == GDTokenizer::TK_COMMA && tokenizer->get_token(1) == GDTokenizer::TK_CURLY_BRACKET_CLOSE) {
|
|
|
|
tokenizer->advance(2);
|
|
|
|
break;
|
|
|
|
} else if (tokenizer->get_token() == GDTokenizer::TK_CURLY_BRACKET_CLOSE) {
|
|
|
|
tokenizer->advance(1);
|
|
|
|
break;
|
|
|
|
} else {
|
|
|
|
_set_error("'..' pattern only allowed at the end of an dictionary pattern");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Node *key = _parse_and_reduce_expression(pattern, p_static);
|
|
|
|
if (!key) {
|
|
|
|
_set_error("Not a valid key in pattern");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (key->type != GDParser::Node::TYPE_CONSTANT) {
|
|
|
|
_set_error("Not a constant expression as key");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (tokenizer->get_token() == GDTokenizer::TK_COLON) {
|
|
|
|
tokenizer->advance();
|
|
|
|
|
|
|
|
PatternNode *value = _parse_pattern(p_static);
|
|
|
|
if (!value) {
|
|
|
|
_set_error("Expected pattern in dictionary value");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
pattern->dictionary.insert(static_cast<ConstantNode*>(key), value);
|
|
|
|
} else {
|
|
|
|
pattern->dictionary.insert(static_cast<ConstantNode*>(key), NULL);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if (tokenizer->get_token() == GDTokenizer::TK_COMMA) {
|
|
|
|
tokenizer->advance();
|
|
|
|
continue;
|
|
|
|
} else if (tokenizer->get_token() == GDTokenizer::TK_CURLY_BRACKET_CLOSE) {
|
|
|
|
tokenizer->advance();
|
|
|
|
break;
|
|
|
|
} else {
|
|
|
|
_set_error("Not a valid pattern");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} break;
|
2016-10-16 13:20:28 +02:00
|
|
|
// all the constants like strings and numbers
|
2016-09-30 21:40:31 +02:00
|
|
|
default: {
|
2016-10-05 18:48:38 +02:00
|
|
|
Node *value = _parse_and_reduce_expression(pattern, p_static);
|
|
|
|
if (error_set) {
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
if (value->type == Node::TYPE_IDENTIFIER && static_cast<IdentifierNode*>(value)->name == "_") {
|
|
|
|
// wildcard pattern
|
|
|
|
pattern->pt_type = PatternNode::PT_WILDCARD;
|
|
|
|
break;
|
|
|
|
}
|
2016-10-16 13:20:28 +02:00
|
|
|
|
|
|
|
if (value->type != Node::TYPE_IDENTIFIER && value->type != Node::TYPE_CONSTANT) {
|
|
|
|
_set_error("Only constant expressions or variables allowed in a pattern");
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2016-10-05 18:48:38 +02:00
|
|
|
pattern->pt_type = PatternNode::PT_CONSTANT;
|
|
|
|
pattern->constant = value;
|
|
|
|
} break;
|
2016-09-30 21:40:31 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return pattern;
|
|
|
|
}
|
|
|
|
|
2016-10-05 18:48:38 +02:00
|
|
|
void GDParser::_parse_pattern_block(BlockNode *p_block, Vector<PatternBranchNode*> &p_branches, bool p_static)
|
2016-09-30 21:40:31 +02:00
|
|
|
{
|
|
|
|
int indent_level = tab_level.back()->get();
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
|
|
|
|
while (tokenizer->get_token() == GDTokenizer::TK_NEWLINE && _parse_newline());
|
|
|
|
|
|
|
|
// GDTokenizer::Token token = tokenizer->get_token();
|
|
|
|
if (error_set)
|
|
|
|
return;
|
|
|
|
|
|
|
|
if (indent_level > tab_level.back()->get()) {
|
|
|
|
return; // go back a level
|
|
|
|
}
|
|
|
|
|
|
|
|
if (pending_newline!=-1) {
|
|
|
|
pending_newline=-1;
|
|
|
|
}
|
|
|
|
|
2016-10-05 18:48:38 +02:00
|
|
|
PatternBranchNode *branch = alloc_node<PatternBranchNode>();
|
2016-09-30 21:40:31 +02:00
|
|
|
|
2016-10-16 13:20:28 +02:00
|
|
|
branch->patterns.push_back(_parse_pattern(p_static));
|
|
|
|
if (!branch->patterns[0]) {
|
2016-09-30 21:40:31 +02:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-10-16 13:20:28 +02:00
|
|
|
while (tokenizer->get_token() == GDTokenizer::TK_COMMA) {
|
|
|
|
tokenizer->advance();
|
|
|
|
branch->patterns.push_back(_parse_pattern(p_static));
|
|
|
|
if (!branch->patterns[branch->patterns.size() - 1]) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-09-30 21:40:31 +02:00
|
|
|
if(!_enter_indent_block()) {
|
|
|
|
_set_error("Expected block in pattern branch");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-10-05 18:48:38 +02:00
|
|
|
branch->body = alloc_node<BlockNode>();
|
|
|
|
branch->body->parent_block = p_block;
|
|
|
|
p_block->sub_blocks.push_back(branch->body);
|
|
|
|
current_block = branch->body;
|
2016-09-30 21:40:31 +02:00
|
|
|
|
|
|
|
_parse_block(branch->body, p_static);
|
|
|
|
|
2016-10-05 18:48:38 +02:00
|
|
|
current_block = p_block;
|
|
|
|
|
|
|
|
p_branches.push_back(branch);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2016-10-16 13:20:28 +02:00
|
|
|
void GDParser::_generate_pattern(PatternNode *p_pattern, Node *p_node_to_match, Node *&p_resulting_node, Map<StringName, Node*> &p_bindings)
|
2016-10-05 18:48:38 +02:00
|
|
|
{
|
2016-10-16 13:20:28 +02:00
|
|
|
switch (p_pattern->pt_type) {
|
|
|
|
case PatternNode::PT_CONSTANT: {
|
2016-10-05 18:48:38 +02:00
|
|
|
|
2016-10-16 13:20:28 +02:00
|
|
|
// typecheck
|
|
|
|
BuiltInFunctionNode *typeof_node = alloc_node<BuiltInFunctionNode>();
|
|
|
|
typeof_node->function = GDFunctions::TYPE_OF;
|
2016-10-05 18:48:38 +02:00
|
|
|
|
2016-10-16 13:20:28 +02:00
|
|
|
OperatorNode *typeof_match_value = alloc_node<OperatorNode>();
|
|
|
|
typeof_match_value->op = OperatorNode::OP_CALL;
|
|
|
|
typeof_match_value->arguments.push_back(typeof_node);
|
|
|
|
typeof_match_value->arguments.push_back(p_node_to_match);
|
2016-10-05 18:48:38 +02:00
|
|
|
|
2016-10-16 13:20:28 +02:00
|
|
|
OperatorNode *typeof_pattern_value = alloc_node<OperatorNode>();
|
|
|
|
typeof_pattern_value->op = OperatorNode::OP_CALL;
|
|
|
|
typeof_pattern_value->arguments.push_back(typeof_node);
|
|
|
|
typeof_pattern_value->arguments.push_back(p_pattern->constant);
|
2016-10-05 18:48:38 +02:00
|
|
|
|
2016-10-16 13:20:28 +02:00
|
|
|
OperatorNode *type_comp = alloc_node<OperatorNode>();
|
|
|
|
type_comp->op = OperatorNode::OP_EQUAL;
|
|
|
|
type_comp->arguments.push_back(typeof_match_value);
|
|
|
|
type_comp->arguments.push_back(typeof_pattern_value);
|
2016-10-05 18:48:38 +02:00
|
|
|
|
|
|
|
|
2016-10-16 13:20:28 +02:00
|
|
|
// comare the actual values
|
|
|
|
OperatorNode *value_comp = alloc_node<OperatorNode>();
|
|
|
|
value_comp->op = OperatorNode::OP_EQUAL;
|
|
|
|
value_comp->arguments.push_back(p_pattern->constant);
|
|
|
|
value_comp->arguments.push_back(p_node_to_match);
|
|
|
|
|
|
|
|
|
|
|
|
OperatorNode *comparison = alloc_node<OperatorNode>();
|
|
|
|
comparison->op = OperatorNode::OP_AND;
|
|
|
|
comparison->arguments.push_back(type_comp);
|
|
|
|
comparison->arguments.push_back(value_comp);
|
|
|
|
|
|
|
|
p_resulting_node = comparison;
|
|
|
|
|
2016-10-05 18:48:38 +02:00
|
|
|
} break;
|
|
|
|
case PatternNode::PT_BIND: {
|
2016-10-16 13:20:28 +02:00
|
|
|
p_bindings[p_pattern->bind] = p_node_to_match;
|
2016-10-05 18:48:38 +02:00
|
|
|
|
2016-10-16 13:20:28 +02:00
|
|
|
// a bind always matches
|
2016-10-05 18:48:38 +02:00
|
|
|
ConstantNode *true_value = alloc_node<ConstantNode>();
|
|
|
|
true_value->value = Variant(true);
|
|
|
|
p_resulting_node = true_value;
|
|
|
|
} break;
|
|
|
|
case PatternNode::PT_ARRAY: {
|
2016-10-16 13:20:28 +02:00
|
|
|
|
|
|
|
bool open_ended = false;
|
|
|
|
|
|
|
|
if (p_pattern->array.size() > 0) {
|
|
|
|
if (p_pattern->array[p_pattern->array.size() - 1]->pt_type == PatternNode::PT_IGNORE_REST) {
|
|
|
|
open_ended = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// typeof(value_to_match) == TYPE_ARRAY && value_to_match.size() >= length
|
|
|
|
// typeof(value_to_match) == TYPE_ARRAY && value_to_match.size() == length
|
|
|
|
|
|
|
|
{
|
|
|
|
// typecheck
|
|
|
|
BuiltInFunctionNode *typeof_node = alloc_node<BuiltInFunctionNode>();
|
|
|
|
typeof_node->function = GDFunctions::TYPE_OF;
|
|
|
|
|
|
|
|
OperatorNode *typeof_match_value = alloc_node<OperatorNode>();
|
|
|
|
typeof_match_value->op = OperatorNode::OP_CALL;
|
|
|
|
typeof_match_value->arguments.push_back(typeof_node);
|
|
|
|
typeof_match_value->arguments.push_back(p_node_to_match);
|
|
|
|
|
|
|
|
IdentifierNode *typeof_array = alloc_node<IdentifierNode>();
|
|
|
|
typeof_array->name = "TYPE_ARRAY";
|
|
|
|
|
|
|
|
OperatorNode *type_comp = alloc_node<OperatorNode>();
|
|
|
|
type_comp->op = OperatorNode::OP_EQUAL;
|
|
|
|
type_comp->arguments.push_back(typeof_match_value);
|
|
|
|
type_comp->arguments.push_back(typeof_array);
|
|
|
|
|
|
|
|
|
|
|
|
// size
|
|
|
|
ConstantNode *length = alloc_node<ConstantNode>();
|
|
|
|
length->value = Variant(open_ended ? p_pattern->array.size() - 1 : p_pattern->array.size());
|
|
|
|
|
|
|
|
OperatorNode *call = alloc_node<OperatorNode>();
|
|
|
|
call->op = OperatorNode::OP_CALL;
|
|
|
|
call->arguments.push_back(p_node_to_match);
|
|
|
|
|
|
|
|
IdentifierNode *size = alloc_node<IdentifierNode>();
|
|
|
|
size->name = "size";
|
|
|
|
call->arguments.push_back(size);
|
|
|
|
|
|
|
|
OperatorNode *length_comparison = alloc_node<OperatorNode>();
|
|
|
|
length_comparison->op = open_ended ? OperatorNode::OP_GREATER_EQUAL : OperatorNode::OP_EQUAL;
|
|
|
|
length_comparison->arguments.push_back(call);
|
|
|
|
length_comparison->arguments.push_back(length);
|
|
|
|
|
|
|
|
OperatorNode *type_and_length_comparison = alloc_node<OperatorNode>();
|
|
|
|
type_and_length_comparison->op = OperatorNode::OP_AND;
|
|
|
|
type_and_length_comparison->arguments.push_back(type_comp);
|
|
|
|
type_and_length_comparison->arguments.push_back(length_comparison);
|
|
|
|
|
|
|
|
p_resulting_node = type_and_length_comparison;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
for (int i = 0; i < p_pattern->array.size(); i++) {
|
|
|
|
PatternNode *pattern = p_pattern->array[i];
|
|
|
|
|
|
|
|
Node *condition = NULL;
|
|
|
|
|
|
|
|
ConstantNode *index = alloc_node<ConstantNode>();
|
|
|
|
index->value = Variant(i);
|
|
|
|
|
|
|
|
OperatorNode *indexed_value = alloc_node<OperatorNode>();
|
|
|
|
indexed_value->op = OperatorNode::OP_INDEX;
|
|
|
|
indexed_value->arguments.push_back(p_node_to_match);
|
|
|
|
indexed_value->arguments.push_back(index);
|
|
|
|
|
|
|
|
_generate_pattern(pattern, indexed_value, condition, p_bindings);
|
|
|
|
|
|
|
|
// concatenate all the patterns with &&
|
|
|
|
OperatorNode *and_node = alloc_node<OperatorNode>();
|
|
|
|
and_node->op = OperatorNode::OP_AND;
|
|
|
|
and_node->arguments.push_back(p_resulting_node);
|
|
|
|
and_node->arguments.push_back(condition);
|
|
|
|
|
|
|
|
p_resulting_node = and_node;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2016-10-05 18:48:38 +02:00
|
|
|
} break;
|
|
|
|
case PatternNode::PT_DICTIONARY: {
|
2016-10-16 13:20:28 +02:00
|
|
|
|
|
|
|
bool open_ended = false;
|
|
|
|
|
|
|
|
if (p_pattern->array.size() > 0) {
|
|
|
|
open_ended = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
// typeof(value_to_match) == TYPE_DICTIONARY && value_to_match.size() >= length
|
|
|
|
// typeof(value_to_match) == TYPE_DICTIONARY && value_to_match.size() == length
|
|
|
|
|
|
|
|
|
|
|
|
{
|
|
|
|
// typecheck
|
|
|
|
BuiltInFunctionNode *typeof_node = alloc_node<BuiltInFunctionNode>();
|
|
|
|
typeof_node->function = GDFunctions::TYPE_OF;
|
|
|
|
|
|
|
|
OperatorNode *typeof_match_value = alloc_node<OperatorNode>();
|
|
|
|
typeof_match_value->op = OperatorNode::OP_CALL;
|
|
|
|
typeof_match_value->arguments.push_back(typeof_node);
|
|
|
|
typeof_match_value->arguments.push_back(p_node_to_match);
|
|
|
|
|
|
|
|
IdentifierNode *typeof_dictionary = alloc_node<IdentifierNode>();
|
|
|
|
typeof_dictionary->name = "TYPE_DICTIONARY";
|
|
|
|
|
|
|
|
OperatorNode *type_comp = alloc_node<OperatorNode>();
|
|
|
|
type_comp->op = OperatorNode::OP_EQUAL;
|
|
|
|
type_comp->arguments.push_back(typeof_match_value);
|
|
|
|
type_comp->arguments.push_back(typeof_dictionary);
|
|
|
|
|
|
|
|
// size
|
|
|
|
ConstantNode *length = alloc_node<ConstantNode>();
|
|
|
|
length->value = Variant(open_ended ? p_pattern->dictionary.size() - 1 : p_pattern->dictionary.size());
|
|
|
|
|
|
|
|
OperatorNode *call = alloc_node<OperatorNode>();
|
|
|
|
call->op = OperatorNode::OP_CALL;
|
|
|
|
call->arguments.push_back(p_node_to_match);
|
|
|
|
|
|
|
|
IdentifierNode *size = alloc_node<IdentifierNode>();
|
|
|
|
size->name = "size";
|
|
|
|
call->arguments.push_back(size);
|
|
|
|
|
|
|
|
OperatorNode *length_comparison = alloc_node<OperatorNode>();
|
|
|
|
length_comparison->op = open_ended ? OperatorNode::OP_GREATER_EQUAL : OperatorNode::OP_EQUAL;
|
|
|
|
length_comparison->arguments.push_back(call);
|
|
|
|
length_comparison->arguments.push_back(length);
|
|
|
|
|
|
|
|
OperatorNode *type_and_length_comparison = alloc_node<OperatorNode>();
|
|
|
|
type_and_length_comparison->op = OperatorNode::OP_AND;
|
|
|
|
type_and_length_comparison->arguments.push_back(type_comp);
|
|
|
|
type_and_length_comparison->arguments.push_back(length_comparison);
|
|
|
|
|
|
|
|
p_resulting_node = type_and_length_comparison;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
for (Map<ConstantNode*, PatternNode*>::Element *e = p_pattern->dictionary.front(); e; e = e->next()) {
|
|
|
|
|
|
|
|
Node *condition = NULL;
|
|
|
|
|
|
|
|
// chech for has, then for pattern
|
|
|
|
|
|
|
|
IdentifierNode *has = alloc_node<IdentifierNode>();
|
|
|
|
has->name = "has";
|
|
|
|
|
|
|
|
OperatorNode *has_call = alloc_node<OperatorNode>();
|
|
|
|
has_call->op = OperatorNode::OP_CALL;
|
|
|
|
has_call->arguments.push_back(p_node_to_match);
|
|
|
|
has_call->arguments.push_back(has);
|
|
|
|
has_call->arguments.push_back(e->key());
|
|
|
|
|
|
|
|
|
|
|
|
if (e->value()) {
|
|
|
|
|
|
|
|
OperatorNode *indexed_value = alloc_node<OperatorNode>();
|
|
|
|
indexed_value->op = OperatorNode::OP_INDEX;
|
|
|
|
indexed_value->arguments.push_back(p_node_to_match);
|
|
|
|
indexed_value->arguments.push_back(e->key());
|
|
|
|
|
|
|
|
_generate_pattern(e->value(), indexed_value, condition, p_bindings);
|
|
|
|
|
|
|
|
OperatorNode *has_and_pattern = alloc_node<OperatorNode>();
|
|
|
|
has_and_pattern->op = OperatorNode::OP_AND;
|
|
|
|
has_and_pattern->arguments.push_back(has_call);
|
|
|
|
has_and_pattern->arguments.push_back(condition);
|
|
|
|
|
|
|
|
condition = has_and_pattern;
|
|
|
|
|
|
|
|
} else {
|
|
|
|
condition = has_call;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// concatenate all the patterns with &&
|
|
|
|
OperatorNode *and_node = alloc_node<OperatorNode>();
|
|
|
|
and_node->op = OperatorNode::OP_AND;
|
|
|
|
and_node->arguments.push_back(p_resulting_node);
|
|
|
|
and_node->arguments.push_back(condition);
|
|
|
|
|
|
|
|
p_resulting_node = and_node;
|
|
|
|
}
|
|
|
|
|
2016-10-05 18:48:38 +02:00
|
|
|
} break;
|
2016-10-16 13:20:28 +02:00
|
|
|
case PatternNode::PT_IGNORE_REST:
|
2016-10-05 18:48:38 +02:00
|
|
|
case PatternNode::PT_WILDCARD: {
|
|
|
|
// simply generate a `true`
|
|
|
|
ConstantNode *true_value = alloc_node<ConstantNode>();
|
|
|
|
true_value->value = Variant(true);
|
|
|
|
p_resulting_node = true_value;
|
|
|
|
} break;
|
|
|
|
default: {
|
|
|
|
|
|
|
|
} break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void GDParser::_transform_match_statment(BlockNode *p_block, MatchNode *p_match_statement)
|
|
|
|
{
|
|
|
|
IdentifierNode *id = alloc_node<IdentifierNode>();
|
2016-10-16 13:20:28 +02:00
|
|
|
id->name = "#match_value";
|
2016-10-05 18:48:38 +02:00
|
|
|
|
|
|
|
for (int i = 0; i < p_match_statement->branches.size(); i++) {
|
2016-10-16 13:20:28 +02:00
|
|
|
|
2016-10-05 18:48:38 +02:00
|
|
|
PatternBranchNode *branch = p_match_statement->branches[i];
|
|
|
|
|
2016-10-16 13:20:28 +02:00
|
|
|
MatchNode::CompiledPatternBranch compiled_branch;
|
|
|
|
compiled_branch.compiled_pattern = NULL;
|
|
|
|
|
|
|
|
Map<StringName, Node*> binding;
|
|
|
|
|
|
|
|
for (int j = 0; j < branch->patterns.size(); j++) {
|
|
|
|
PatternNode *pattern = branch->patterns[j];
|
|
|
|
|
|
|
|
Map<StringName, Node*> bindings;
|
|
|
|
Node *resulting_node;
|
|
|
|
_generate_pattern(pattern, id, resulting_node, bindings);
|
|
|
|
|
|
|
|
if (!binding.empty() && !bindings.empty()) {
|
|
|
|
_set_error("Multipatterns can't contain bindings");
|
|
|
|
return;
|
|
|
|
} else {
|
|
|
|
binding = bindings;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (compiled_branch.compiled_pattern) {
|
|
|
|
OperatorNode *or_node = alloc_node<OperatorNode>();
|
|
|
|
or_node->op = OperatorNode::OP_OR;
|
|
|
|
or_node->arguments.push_back(compiled_branch.compiled_pattern);
|
|
|
|
or_node->arguments.push_back(resulting_node);
|
|
|
|
|
|
|
|
compiled_branch.compiled_pattern = or_node;
|
|
|
|
} else {
|
|
|
|
// single pattern | first one
|
|
|
|
compiled_branch.compiled_pattern = resulting_node;
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2016-10-05 18:48:38 +02:00
|
|
|
|
2016-10-16 13:20:28 +02:00
|
|
|
// prepare the body ...hehe
|
|
|
|
for (Map<StringName, Node*>::Element *e = binding.front(); e; e = e->next()) {
|
|
|
|
LocalVarNode *local_var = alloc_node<LocalVarNode>();
|
|
|
|
local_var->name = e->key();
|
|
|
|
local_var->assign = e->value();
|
|
|
|
|
|
|
|
|
|
|
|
IdentifierNode *id = alloc_node<IdentifierNode>();
|
|
|
|
id->name = local_var->name;
|
|
|
|
|
|
|
|
OperatorNode *op = alloc_node<OperatorNode>();
|
|
|
|
op->op=OperatorNode::OP_ASSIGN;
|
|
|
|
op->arguments.push_back(id);
|
|
|
|
op->arguments.push_back(local_var->assign);
|
|
|
|
|
|
|
|
branch->body->statements.push_front(op);
|
|
|
|
branch->body->statements.push_front(local_var);
|
|
|
|
}
|
|
|
|
|
|
|
|
compiled_branch.body = branch->body;
|
|
|
|
|
|
|
|
|
|
|
|
p_match_statement->compiled_pattern_branches.push_back(compiled_branch);
|
2016-10-05 18:48:38 +02:00
|
|
|
}
|
|
|
|
|
2016-09-30 21:40:31 +02:00
|
|
|
}
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
|
|
|
|
|
|
|
|
int indent_level = tab_level.back()->get();
|
|
|
|
|
|
|
|
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
|
|
|
|
NewLineNode *nl = alloc_node<NewLineNode>();
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
nl->line=tokenizer->get_token_line();
|
2014-02-10 02:10:30 +01:00
|
|
|
p_block->statements.push_back(nl);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
while(true) {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
GDTokenizer::Token token = tokenizer->get_token();
|
2014-02-10 02:10:30 +01:00
|
|
|
if (error_set)
|
|
|
|
return;
|
|
|
|
|
|
|
|
if (indent_level>tab_level.back()->get()) {
|
2014-02-25 13:31:47 +01:00
|
|
|
p_block->end_line=tokenizer->get_token_line();
|
2014-02-10 02:10:30 +01:00
|
|
|
return; //go back a level
|
|
|
|
}
|
|
|
|
|
2014-05-24 06:35:47 +02:00
|
|
|
if (pending_newline!=-1) {
|
|
|
|
|
|
|
|
NewLineNode *nl = alloc_node<NewLineNode>();
|
|
|
|
nl->line=pending_newline;
|
|
|
|
p_block->statements.push_back(nl);
|
|
|
|
pending_newline=-1;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
switch(token) {
|
|
|
|
|
|
|
|
|
|
|
|
case GDTokenizer::TK_EOF:
|
2014-02-25 13:31:47 +01:00
|
|
|
p_block->end_line=tokenizer->get_token_line();
|
2014-02-10 02:10:30 +01:00
|
|
|
case GDTokenizer::TK_ERROR: {
|
|
|
|
return; //go back
|
|
|
|
|
|
|
|
//end of file!
|
|
|
|
|
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_NEWLINE: {
|
|
|
|
|
|
|
|
if (!_parse_newline()) {
|
|
|
|
if (!error_set) {
|
2014-02-25 13:31:47 +01:00
|
|
|
p_block->end_line=tokenizer->get_token_line();
|
2014-05-24 06:35:47 +02:00
|
|
|
pending_newline=p_block->end_line;
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
2014-05-24 06:35:47 +02:00
|
|
|
|
|
|
|
NewLineNode *nl = alloc_node<NewLineNode>();
|
|
|
|
nl->line=tokenizer->get_token_line();
|
|
|
|
p_block->statements.push_back(nl);
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_CF_PASS: {
|
2015-04-27 01:28:41 +02:00
|
|
|
if (tokenizer->get_token(1)!=GDTokenizer::TK_SEMICOLON && tokenizer->get_token(1)!=GDTokenizer::TK_NEWLINE && tokenizer->get_token(1)!=GDTokenizer::TK_EOF) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_set_error("Expected ';' or <NewLine>.");
|
|
|
|
return;
|
|
|
|
}
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2016-07-10 17:20:53 +02:00
|
|
|
if(tokenizer->get_token()==GDTokenizer::TK_SEMICOLON) {
|
|
|
|
// Ignore semicolon after 'pass'
|
|
|
|
tokenizer->advance();
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_PR_VAR: {
|
|
|
|
//variale declaration and (eventual) initialization
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_set_error("Expected identifier for local variable name.");
|
|
|
|
return;
|
|
|
|
}
|
2014-02-25 13:31:47 +01:00
|
|
|
StringName n = tokenizer->get_token_identifier();
|
|
|
|
tokenizer->advance();
|
2016-05-30 17:22:34 +02:00
|
|
|
if (current_function){
|
|
|
|
for (int i=0;i<current_function->arguments.size();i++){
|
|
|
|
if (n == current_function->arguments[i]){
|
|
|
|
_set_error("Variable '"+String(n)+"' already defined in the scope (at line: "+itos(current_function->line)+").");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
BlockNode *check_block = p_block;
|
|
|
|
while (check_block){
|
|
|
|
for (int i=0;i<check_block->variables.size();i++){
|
|
|
|
if (n == check_block->variables[i]){
|
|
|
|
_set_error("Variable '"+String(n)+"' already defined in the scope (at line: "+itos(check_block->variable_lines[i])+").");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
check_block = check_block->parent_block;
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
p_block->variables.push_back(n); //line?
|
2014-02-25 13:31:47 +01:00
|
|
|
p_block->variable_lines.push_back(tokenizer->get_token_line());
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
|
|
|
|
//must know when the local variable is declared
|
|
|
|
LocalVarNode *lv = alloc_node<LocalVarNode>();
|
|
|
|
lv->name=n;
|
|
|
|
p_block->statements.push_back(lv);
|
|
|
|
|
|
|
|
Node *assigned=NULL;
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_OP_ASSIGN) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
Node *subexpr=NULL;
|
|
|
|
|
|
|
|
subexpr = _parse_and_reduce_expression(p_block,p_static);
|
2015-08-30 16:50:10 +02:00
|
|
|
if (!subexpr) {
|
|
|
|
if (_recover_from_completion()) {
|
|
|
|
break;
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
2015-08-30 16:50:10 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
lv->assign=subexpr;
|
|
|
|
assigned=subexpr;
|
|
|
|
} else {
|
|
|
|
|
|
|
|
ConstantNode *c = alloc_node<ConstantNode>();
|
|
|
|
c->value=Variant();
|
|
|
|
assigned = c;
|
|
|
|
|
|
|
|
}
|
|
|
|
IdentifierNode *id = alloc_node<IdentifierNode>();
|
|
|
|
id->name=n;
|
|
|
|
|
|
|
|
|
|
|
|
OperatorNode *op = alloc_node<OperatorNode>();
|
|
|
|
op->op=OperatorNode::OP_ASSIGN;
|
|
|
|
op->arguments.push_back(id);
|
|
|
|
op->arguments.push_back(assigned);
|
|
|
|
p_block->statements.push_back(op);
|
|
|
|
|
2015-06-24 18:29:23 +02:00
|
|
|
if (!_end_statement()) {
|
|
|
|
_set_error("Expected end of statement (var)");
|
|
|
|
return;
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_CF_IF: {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2016-10-03 20:18:21 +02:00
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
Node *condition = _parse_and_reduce_expression(p_block,p_static);
|
2015-08-30 16:50:10 +02:00
|
|
|
if (!condition) {
|
|
|
|
if (_recover_from_completion()) {
|
|
|
|
break;
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
2015-08-30 16:50:10 +02:00
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
ControlFlowNode *cf_if = alloc_node<ControlFlowNode>();
|
|
|
|
|
|
|
|
cf_if->cf_type=ControlFlowNode::CF_IF;
|
|
|
|
cf_if->arguments.push_back(condition);
|
|
|
|
|
|
|
|
cf_if->body = alloc_node<BlockNode>();
|
2014-12-17 02:31:57 +01:00
|
|
|
cf_if->body->parent_block=p_block;
|
2014-02-10 02:10:30 +01:00
|
|
|
p_block->sub_blocks.push_back(cf_if->body);
|
|
|
|
|
|
|
|
if (!_enter_indent_block(cf_if->body)) {
|
2015-12-29 15:41:37 +01:00
|
|
|
_set_error("Expected intended block after 'if'");
|
2014-02-25 13:31:47 +01:00
|
|
|
p_block->end_line=tokenizer->get_token_line();
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
current_block=cf_if->body;
|
2014-02-10 02:10:30 +01:00
|
|
|
_parse_block(cf_if->body,p_static);
|
2014-12-17 02:31:57 +01:00
|
|
|
current_block=p_block;
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
if (error_set)
|
|
|
|
return;
|
|
|
|
p_block->statements.push_back(cf_if);
|
|
|
|
|
|
|
|
while(true) {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
while(tokenizer->get_token()==GDTokenizer::TK_NEWLINE) {
|
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if (tab_level.back()->get() < indent_level) { //not at current indent level
|
2014-02-25 13:31:47 +01:00
|
|
|
p_block->end_line=tokenizer->get_token_line();
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_CF_ELIF) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
if (tab_level.back()->get() > indent_level) {
|
|
|
|
|
|
|
|
_set_error("Invalid indent");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
cf_if->body_else=alloc_node<BlockNode>();
|
2014-12-17 02:31:57 +01:00
|
|
|
cf_if->body_else->parent_block=p_block;
|
2014-02-10 02:10:30 +01:00
|
|
|
p_block->sub_blocks.push_back(cf_if->body_else);
|
|
|
|
|
|
|
|
ControlFlowNode *cf_else = alloc_node<ControlFlowNode>();
|
|
|
|
cf_else->cf_type=ControlFlowNode::CF_IF;
|
|
|
|
|
|
|
|
//condition
|
|
|
|
Node *condition = _parse_and_reduce_expression(p_block,p_static);
|
2015-08-30 16:50:10 +02:00
|
|
|
if (!condition) {
|
|
|
|
if (_recover_from_completion()) {
|
|
|
|
break;
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
2015-08-30 16:50:10 +02:00
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
cf_else->arguments.push_back(condition);
|
|
|
|
cf_else->cf_type=ControlFlowNode::CF_IF;
|
|
|
|
|
|
|
|
cf_if->body_else->statements.push_back(cf_else);
|
|
|
|
cf_if=cf_else;
|
|
|
|
cf_if->body=alloc_node<BlockNode>();
|
2014-12-17 02:31:57 +01:00
|
|
|
cf_if->body->parent_block=p_block;
|
2014-02-10 02:10:30 +01:00
|
|
|
p_block->sub_blocks.push_back(cf_if->body);
|
|
|
|
|
|
|
|
|
|
|
|
if (!_enter_indent_block(cf_if->body)) {
|
2015-12-29 15:41:37 +01:00
|
|
|
_set_error("Expected indented block after 'elif'");
|
2014-02-25 13:31:47 +01:00
|
|
|
p_block->end_line=tokenizer->get_token_line();
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
current_block=cf_else->body;
|
2014-02-10 02:10:30 +01:00
|
|
|
_parse_block(cf_else->body,p_static);
|
2014-12-17 02:31:57 +01:00
|
|
|
current_block=p_block;
|
2014-02-10 02:10:30 +01:00
|
|
|
if (error_set)
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_CF_ELSE) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
if (tab_level.back()->get() > indent_level) {
|
|
|
|
_set_error("Invalid indent");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
cf_if->body_else=alloc_node<BlockNode>();
|
2014-12-17 02:31:57 +01:00
|
|
|
cf_if->body_else->parent_block=p_block;
|
2014-02-10 02:10:30 +01:00
|
|
|
p_block->sub_blocks.push_back(cf_if->body_else);
|
|
|
|
|
|
|
|
if (!_enter_indent_block(cf_if->body_else)) {
|
2015-12-29 15:41:37 +01:00
|
|
|
_set_error("Expected indented block after 'else'");
|
2014-02-25 13:31:47 +01:00
|
|
|
p_block->end_line=tokenizer->get_token_line();
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
|
|
|
}
|
2014-12-17 02:31:57 +01:00
|
|
|
current_block=cf_if->body_else;
|
2014-02-10 02:10:30 +01:00
|
|
|
_parse_block(cf_if->body_else,p_static);
|
2014-12-17 02:31:57 +01:00
|
|
|
current_block=p_block;
|
2014-02-10 02:10:30 +01:00
|
|
|
if (error_set)
|
|
|
|
return;
|
|
|
|
|
|
|
|
|
|
|
|
break; //after else, exit
|
|
|
|
|
|
|
|
} else
|
|
|
|
break;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_CF_WHILE: {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
Node *condition = _parse_and_reduce_expression(p_block,p_static);
|
2015-08-30 16:50:10 +02:00
|
|
|
if (!condition) {
|
|
|
|
if (_recover_from_completion()) {
|
|
|
|
break;
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
2015-08-30 16:50:10 +02:00
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
ControlFlowNode *cf_while = alloc_node<ControlFlowNode>();
|
|
|
|
|
|
|
|
cf_while->cf_type=ControlFlowNode::CF_WHILE;
|
|
|
|
cf_while->arguments.push_back(condition);
|
|
|
|
|
|
|
|
cf_while->body = alloc_node<BlockNode>();
|
2014-12-17 02:31:57 +01:00
|
|
|
cf_while->body->parent_block=p_block;
|
2014-02-10 02:10:30 +01:00
|
|
|
p_block->sub_blocks.push_back(cf_while->body);
|
|
|
|
|
|
|
|
if (!_enter_indent_block(cf_while->body)) {
|
2015-12-29 15:41:37 +01:00
|
|
|
_set_error("Expected indented block after 'while'");
|
2014-02-25 13:31:47 +01:00
|
|
|
p_block->end_line=tokenizer->get_token_line();
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
current_block=cf_while->body;
|
2014-02-10 02:10:30 +01:00
|
|
|
_parse_block(cf_while->body,p_static);
|
2014-12-17 02:31:57 +01:00
|
|
|
current_block=p_block;
|
2014-02-10 02:10:30 +01:00
|
|
|
if (error_set)
|
|
|
|
return;
|
|
|
|
p_block->statements.push_back(cf_while);
|
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_CF_FOR: {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_set_error("identifier expected after 'for'");
|
|
|
|
}
|
|
|
|
|
|
|
|
IdentifierNode *id = alloc_node<IdentifierNode>();
|
2014-02-25 13:31:47 +01:00
|
|
|
id->name=tokenizer->get_token_identifier();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_OP_IN) {
|
2014-02-10 02:10:30 +01:00
|
|
|
_set_error("'in' expected after identifier");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
Node *container = _parse_and_reduce_expression(p_block,p_static);
|
2015-08-30 16:50:10 +02:00
|
|
|
if (!container) {
|
|
|
|
if (_recover_from_completion()) {
|
|
|
|
break;
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
2015-08-30 16:50:10 +02:00
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2017-01-12 00:09:45 +01:00
|
|
|
if (container->type==Node::TYPE_OPERATOR) {
|
|
|
|
|
|
|
|
OperatorNode* op = static_cast<OperatorNode*>(container);
|
|
|
|
if (op->op==OperatorNode::OP_CALL && op->arguments[0]->type==Node::TYPE_BUILT_IN_FUNCTION && static_cast<BuiltInFunctionNode*>(op->arguments[0])->function==GDFunctions::GEN_RANGE) {
|
|
|
|
//iterating a range, so see if range() can be optimized without allocating memory, by replacing it by vectors (which can work as iterable too!)
|
|
|
|
|
|
|
|
Vector<Node*> args;
|
|
|
|
Vector<double> constants;
|
|
|
|
|
|
|
|
bool constant=true;
|
|
|
|
|
|
|
|
for(int i=1;i<op->arguments.size();i++) {
|
|
|
|
args.push_back(op->arguments[i]);
|
|
|
|
if (constant && op->arguments[i]->type==Node::TYPE_CONSTANT) {
|
|
|
|
ConstantNode *c = static_cast<ConstantNode*>(op->arguments[i]);
|
|
|
|
if (c->value.get_type()==Variant::REAL || c->value.get_type()==Variant::INT) {
|
|
|
|
constants.push_back(c->value);
|
|
|
|
} else {
|
|
|
|
constant=false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (args.size()>0 || args.size()<4) {
|
|
|
|
|
|
|
|
if (constant) {
|
|
|
|
|
|
|
|
ConstantNode *cn = alloc_node<ConstantNode>();
|
|
|
|
switch(args.size()) {
|
|
|
|
case 1: cn->value=constants[0]; break;
|
|
|
|
case 2: cn->value=Vector2(constants[0],constants[1]); break;
|
|
|
|
case 3: cn->value=Vector3(constants[0],constants[1],constants[2]); break;
|
|
|
|
}
|
|
|
|
container=cn;
|
|
|
|
} else {
|
|
|
|
OperatorNode *on = alloc_node<OperatorNode>();
|
|
|
|
on->op=OperatorNode::OP_CALL;
|
|
|
|
|
|
|
|
TypeNode *tn = alloc_node<TypeNode>();
|
|
|
|
on->arguments.push_back(tn);
|
|
|
|
|
|
|
|
switch(args.size()) {
|
|
|
|
case 1: tn->vtype=Variant::REAL; break;
|
|
|
|
case 2: tn->vtype=Variant::VECTOR2; break;
|
|
|
|
case 3: tn->vtype=Variant::VECTOR3; break;
|
|
|
|
}
|
|
|
|
|
|
|
|
for(int i=0;i<args.size();i++) {
|
|
|
|
on->arguments.push_back(args[i]);
|
|
|
|
}
|
|
|
|
|
|
|
|
container=on;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
ControlFlowNode *cf_for = alloc_node<ControlFlowNode>();
|
|
|
|
|
|
|
|
cf_for->cf_type=ControlFlowNode::CF_FOR;
|
|
|
|
cf_for->arguments.push_back(id);
|
|
|
|
cf_for->arguments.push_back(container);
|
|
|
|
|
|
|
|
cf_for->body = alloc_node<BlockNode>();
|
2014-12-17 02:31:57 +01:00
|
|
|
cf_for->body->parent_block=p_block;
|
2014-02-10 02:10:30 +01:00
|
|
|
p_block->sub_blocks.push_back(cf_for->body);
|
|
|
|
|
|
|
|
if (!_enter_indent_block(cf_for->body)) {
|
2016-02-24 00:08:53 +01:00
|
|
|
_set_error("Expected indented block after 'for'");
|
2014-02-25 13:31:47 +01:00
|
|
|
p_block->end_line=tokenizer->get_token_line();
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
current_block=cf_for->body;
|
2016-05-30 17:22:34 +02:00
|
|
|
|
|
|
|
// this is for checking variable for redefining
|
|
|
|
// inside this _parse_block
|
|
|
|
cf_for->body->variables.push_back(id->name);
|
|
|
|
cf_for->body->variable_lines.push_back(id->line);
|
2014-02-10 02:10:30 +01:00
|
|
|
_parse_block(cf_for->body,p_static);
|
2016-05-30 17:22:34 +02:00
|
|
|
cf_for->body->variables.remove(0);
|
|
|
|
cf_for->body->variable_lines.remove(0);
|
2014-12-17 02:31:57 +01:00
|
|
|
current_block=p_block;
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
if (error_set)
|
|
|
|
return;
|
|
|
|
p_block->statements.push_back(cf_for);
|
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_CF_CONTINUE: {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
ControlFlowNode *cf_continue = alloc_node<ControlFlowNode>();
|
|
|
|
cf_continue->cf_type=ControlFlowNode::CF_CONTINUE;
|
|
|
|
p_block->statements.push_back(cf_continue);
|
|
|
|
if (!_end_statement()) {
|
|
|
|
_set_error("Expected end of statement (continue)");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_CF_BREAK: {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
ControlFlowNode *cf_break = alloc_node<ControlFlowNode>();
|
|
|
|
cf_break->cf_type=ControlFlowNode::CF_BREAK;
|
|
|
|
p_block->statements.push_back(cf_break);
|
|
|
|
if (!_end_statement()) {
|
|
|
|
_set_error("Expected end of statement (break)");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_CF_RETURN: {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
ControlFlowNode *cf_return = alloc_node<ControlFlowNode>();
|
|
|
|
cf_return->cf_type=ControlFlowNode::CF_RETURN;
|
|
|
|
|
|
|
|
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_SEMICOLON || tokenizer->get_token()==GDTokenizer::TK_NEWLINE || tokenizer->get_token()==GDTokenizer::TK_EOF) {
|
2014-02-10 02:10:30 +01:00
|
|
|
//expect end of statement
|
|
|
|
p_block->statements.push_back(cf_return);
|
|
|
|
if (!_end_statement()) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
//expect expression
|
|
|
|
Node *retexpr = _parse_and_reduce_expression(p_block,p_static);
|
2015-08-30 16:50:10 +02:00
|
|
|
if (!retexpr) {
|
|
|
|
if (_recover_from_completion()) {
|
|
|
|
break;
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
2015-08-30 16:50:10 +02:00
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
cf_return->arguments.push_back(retexpr);
|
|
|
|
p_block->statements.push_back(cf_return);
|
|
|
|
if (!_end_statement()) {
|
|
|
|
_set_error("Expected end of statement after return expression.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2016-09-30 21:40:31 +02:00
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_CF_MATCH: {
|
|
|
|
|
|
|
|
tokenizer->advance();
|
|
|
|
|
2016-10-05 18:48:38 +02:00
|
|
|
MatchNode *match_node = alloc_node<MatchNode>();
|
2016-09-30 21:40:31 +02:00
|
|
|
|
|
|
|
Node *val_to_match = _parse_and_reduce_expression(p_block, p_static);
|
|
|
|
|
|
|
|
if (!val_to_match) {
|
|
|
|
if (_recover_from_completion()) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-10-05 18:48:38 +02:00
|
|
|
match_node->val_to_match = val_to_match;
|
2016-09-30 21:40:31 +02:00
|
|
|
|
|
|
|
if (!_enter_indent_block()) {
|
|
|
|
_set_error("Expected indented pattern matching block after 'match'");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-10-05 18:48:38 +02:00
|
|
|
BlockNode *compiled_branches = alloc_node<BlockNode>();
|
|
|
|
compiled_branches->parent_block = p_block;
|
|
|
|
compiled_branches->parent_class = p_block->parent_class;
|
|
|
|
|
|
|
|
p_block->sub_blocks.push_back(compiled_branches);
|
|
|
|
|
|
|
|
_parse_pattern_block(compiled_branches, match_node->branches, p_static);
|
|
|
|
|
|
|
|
_transform_match_statment(compiled_branches, match_node);
|
|
|
|
|
|
|
|
ControlFlowNode *match_cf_node = alloc_node<ControlFlowNode>();
|
|
|
|
match_cf_node->cf_type = ControlFlowNode::CF_MATCH;
|
|
|
|
match_cf_node->match = match_node;
|
|
|
|
|
|
|
|
p_block->statements.push_back(match_cf_node);
|
|
|
|
|
|
|
|
_end_statement();
|
2014-02-10 02:10:30 +01:00
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_PR_ASSERT: {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
Node *condition = _parse_and_reduce_expression(p_block,p_static);
|
2015-08-30 16:50:10 +02:00
|
|
|
if (!condition) {
|
|
|
|
if (_recover_from_completion()) {
|
|
|
|
break;
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
2015-08-30 16:50:10 +02:00
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
AssertNode *an = alloc_node<AssertNode>();
|
|
|
|
an->condition=condition;
|
|
|
|
p_block->statements.push_back(an);
|
|
|
|
|
|
|
|
if (!_end_statement()) {
|
|
|
|
_set_error("Expected end of statement after assert.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
} break;
|
2015-12-29 16:11:21 +01:00
|
|
|
case GDTokenizer::TK_PR_BREAKPOINT: {
|
|
|
|
|
|
|
|
tokenizer->advance();
|
|
|
|
BreakpointNode *bn = alloc_node<BreakpointNode>();
|
|
|
|
p_block->statements.push_back(bn);
|
|
|
|
|
|
|
|
if (!_end_statement()) {
|
|
|
|
_set_error("Expected end of statement after breakpoint.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
} break;
|
2014-02-10 02:10:30 +01:00
|
|
|
default: {
|
|
|
|
|
2017-01-02 20:01:27 +01:00
|
|
|
Node *expression = _parse_and_reduce_expression(p_block,p_static,false,true);
|
2015-08-30 16:50:10 +02:00
|
|
|
if (!expression) {
|
|
|
|
if (_recover_from_completion()) {
|
|
|
|
break;
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
2015-08-30 16:50:10 +02:00
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
p_block->statements.push_back(expression);
|
|
|
|
if (!_end_statement()) {
|
|
|
|
_set_error("Expected end of statement after expression.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
} break;
|
|
|
|
/*
|
|
|
|
case GDTokenizer::TK_CF_LOCAL: {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token(1)!=GDTokenizer::TK_SEMICOLON && tokenizer->get_token(1)!=GDTokenizer::TK_NEWLINE ) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_set_error("Expected ';' or <NewLine>.");
|
|
|
|
}
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
} break;
|
|
|
|
*/
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
bool GDParser::_parse_newline() {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token(1)!=GDTokenizer::TK_EOF && tokenizer->get_token(1)!=GDTokenizer::TK_NEWLINE) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
int indent = tokenizer->get_token_line_indent();
|
2014-02-10 02:10:30 +01:00
|
|
|
int current_indent = tab_level.back()->get();
|
|
|
|
|
|
|
|
if (indent>current_indent) {
|
|
|
|
_set_error("Unexpected indent.");
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (indent<current_indent) {
|
|
|
|
|
|
|
|
while(indent<current_indent) {
|
|
|
|
|
|
|
|
//exit block
|
|
|
|
if (tab_level.size()==1) {
|
|
|
|
_set_error("Invalid indent. BUG?");
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
tab_level.pop_back();
|
|
|
|
|
|
|
|
if (tab_level.back()->get()<indent) {
|
|
|
|
|
|
|
|
_set_error("Unindent does not match any outer indentation level.");
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
current_indent = tab_level.back()->get();
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
return true;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void GDParser::_parse_extends(ClassNode *p_class) {
|
|
|
|
|
|
|
|
|
|
|
|
if (p_class->extends_used) {
|
|
|
|
|
|
|
|
_set_error("'extends' already used for this class.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!p_class->constant_expressions.empty() || !p_class->subclasses.empty() || !p_class->functions.empty() || !p_class->variables.empty()) {
|
|
|
|
|
|
|
|
_set_error("'extends' must be used before anything else.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
p_class->extends_used=true;
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2015-11-12 15:50:20 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_BUILT_IN_TYPE && tokenizer->get_token_type()==Variant::OBJECT) {
|
|
|
|
p_class->extends_class.push_back(Variant::get_type_name(Variant::OBJECT));
|
|
|
|
tokenizer->advance();
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// see if inheritance happens from a file
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_CONSTANT) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
Variant constant = tokenizer->get_token_constant();
|
2014-02-10 02:10:30 +01:00
|
|
|
if (constant.get_type()!=Variant::STRING) {
|
|
|
|
|
|
|
|
_set_error("'extends' constant must be a string.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
p_class->extends_file=constant;
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PERIOD) {
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
|
|
|
} else
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
while(true) {
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_set_error("Invalid 'extends' syntax, expected string constant (path) and/or identifier (parent class).");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
StringName identifier=tokenizer->get_token_identifier();
|
2014-02-10 02:10:30 +01:00
|
|
|
p_class->extends_class.push_back(identifier);
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance(1);
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PERIOD)
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
void GDParser::_parse_class(ClassNode *p_class) {
|
|
|
|
|
|
|
|
int indent_level = tab_level.back()->get();
|
|
|
|
|
|
|
|
while(true) {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
GDTokenizer::Token token = tokenizer->get_token();
|
2014-02-10 02:10:30 +01:00
|
|
|
if (error_set)
|
|
|
|
return;
|
|
|
|
|
2016-08-19 21:48:08 +02:00
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
if (indent_level>tab_level.back()->get()) {
|
2014-02-25 13:31:47 +01:00
|
|
|
p_class->end_line=tokenizer->get_token_line();
|
2014-02-10 02:10:30 +01:00
|
|
|
return; //go back a level
|
|
|
|
}
|
|
|
|
|
|
|
|
switch(token) {
|
|
|
|
|
|
|
|
case GDTokenizer::TK_EOF:
|
2014-02-25 13:31:47 +01:00
|
|
|
p_class->end_line=tokenizer->get_token_line();
|
2014-02-10 02:10:30 +01:00
|
|
|
case GDTokenizer::TK_ERROR: {
|
|
|
|
return; //go back
|
|
|
|
//end of file!
|
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_NEWLINE: {
|
|
|
|
if (!_parse_newline()) {
|
|
|
|
if (!error_set) {
|
2014-02-25 13:31:47 +01:00
|
|
|
p_class->end_line=tokenizer->get_token_line();
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_PR_EXTENDS: {
|
|
|
|
|
|
|
|
_parse_extends(p_class);
|
|
|
|
if (error_set)
|
|
|
|
return;
|
2015-06-24 18:29:23 +02:00
|
|
|
if (!_end_statement()) {
|
|
|
|
_set_error("Expected end of statement after extends");
|
|
|
|
return;
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_PR_TOOL: {
|
|
|
|
|
|
|
|
if (p_class->tool) {
|
|
|
|
|
|
|
|
_set_error("tool used more than once");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
p_class->tool=true;
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_PR_CLASS: {
|
|
|
|
//class inside class :D
|
|
|
|
|
|
|
|
StringName name;
|
|
|
|
StringName extends;
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token(1)!=GDTokenizer::TK_IDENTIFIER) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_set_error("'class' syntax: 'class <Name>:' or 'class <Name> extends <BaseClass>:'");
|
|
|
|
return;
|
|
|
|
}
|
2014-02-25 13:31:47 +01:00
|
|
|
name = tokenizer->get_token_identifier(1);
|
|
|
|
tokenizer->advance(2);
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
ClassNode *newclass = alloc_node<ClassNode>();
|
|
|
|
newclass->initializer = alloc_node<BlockNode>();
|
2014-12-17 02:31:57 +01:00
|
|
|
newclass->initializer->parent_class=newclass;
|
2015-12-30 01:23:26 +01:00
|
|
|
newclass->ready = alloc_node<BlockNode>();
|
|
|
|
newclass->ready->parent_class=newclass;
|
2014-02-10 02:10:30 +01:00
|
|
|
newclass->name=name;
|
2014-12-17 02:31:57 +01:00
|
|
|
newclass->owner=p_class;
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
p_class->subclasses.push_back(newclass);
|
|
|
|
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PR_EXTENDS) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_parse_extends(newclass);
|
|
|
|
if (error_set)
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!_enter_indent_block()) {
|
|
|
|
|
|
|
|
_set_error("Indented block expected.");
|
|
|
|
return;
|
|
|
|
}
|
2014-12-17 02:31:57 +01:00
|
|
|
current_class=newclass;
|
2014-02-10 02:10:30 +01:00
|
|
|
_parse_class(newclass);
|
2014-12-17 02:31:57 +01:00
|
|
|
current_class=p_class;
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
} break;
|
|
|
|
/* this is for functions....
|
|
|
|
case GDTokenizer::TK_CF_PASS: {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance(1);
|
2014-02-10 02:10:30 +01:00
|
|
|
} break;
|
|
|
|
*/
|
|
|
|
case GDTokenizer::TK_PR_STATIC: {
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PR_FUNCTION) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_set_error("Expected 'func'.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
}; //fallthrough to function
|
|
|
|
case GDTokenizer::TK_PR_FUNCTION: {
|
|
|
|
|
|
|
|
bool _static=false;
|
2014-05-24 06:35:47 +02:00
|
|
|
pending_newline=-1;
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token(-1)==GDTokenizer::TK_PR_STATIC) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_static=true;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-01-03 17:03:13 +01:00
|
|
|
tokenizer->advance();
|
|
|
|
StringName name;
|
|
|
|
|
|
|
|
if (_get_completable_identifier(COMPLETION_VIRTUAL_FUNC,name)) {
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if (name==StringName()) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_set_error("Expected identifier after 'func' (syntax: 'func <identifier>([arguments]):' ).");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
for(int i=0;i<p_class->functions.size();i++) {
|
|
|
|
if (p_class->functions[i]->name==name) {
|
|
|
|
_set_error("Function '"+String(name)+"' already exists in this class (at line: "+itos(p_class->functions[i]->line)+").");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for(int i=0;i<p_class->static_functions.size();i++) {
|
|
|
|
if (p_class->static_functions[i]->name==name) {
|
|
|
|
_set_error("Function '"+String(name)+"' already exists in this class (at line: "+itos(p_class->static_functions[i]->line)+").");
|
|
|
|
}
|
|
|
|
}
|
2015-01-03 17:03:13 +01:00
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_OPEN) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_set_error("Expected '(' after identifier (syntax: 'func <identifier>([arguments]):' ).");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
Vector<StringName> arguments;
|
|
|
|
Vector<Node*> default_values;
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
int fnline = tokenizer->get_token_line();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
|
2014-02-10 02:10:30 +01:00
|
|
|
//has arguments
|
|
|
|
bool defaulting=false;
|
|
|
|
while(true) {
|
|
|
|
|
2016-10-03 20:18:21 +02:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_NEWLINE) {
|
|
|
|
tokenizer->advance();
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PR_VAR) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance(); //var before the identifier is allowed
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_set_error("Expected identifier for argument.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
StringName argname=tokenizer->get_token_identifier();
|
2014-02-10 02:10:30 +01:00
|
|
|
arguments.push_back(argname);
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (defaulting && tokenizer->get_token()!=GDTokenizer::TK_OP_ASSIGN) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_set_error("Default parameter expected.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
//tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_OP_ASSIGN) {
|
2014-02-10 02:10:30 +01:00
|
|
|
defaulting=true;
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance(1);
|
2014-02-10 02:10:30 +01:00
|
|
|
Node *defval=NULL;
|
|
|
|
|
|
|
|
defval=_parse_and_reduce_expression(p_class,_static);
|
|
|
|
if (!defval || error_set)
|
|
|
|
return;
|
|
|
|
|
|
|
|
OperatorNode *on = alloc_node<OperatorNode>();
|
|
|
|
on->op=OperatorNode::OP_ASSIGN;
|
|
|
|
|
|
|
|
IdentifierNode *in = alloc_node<IdentifierNode>();
|
|
|
|
in->name=argname;
|
|
|
|
|
|
|
|
on->arguments.push_back(in);
|
|
|
|
on->arguments.push_back(defval);
|
|
|
|
/* no ..
|
|
|
|
if (defval->type!=Node::TYPE_CONSTANT) {
|
|
|
|
|
|
|
|
_set_error("default argument must be constant");
|
|
|
|
}
|
|
|
|
*/
|
|
|
|
default_values.push_back(on);
|
|
|
|
}
|
|
|
|
|
2016-10-03 20:18:21 +02:00
|
|
|
while (tokenizer->get_token()==GDTokenizer::TK_NEWLINE) {
|
|
|
|
tokenizer->advance();
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
|
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
continue;
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_set_error("Expected ',' or ')'.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
BlockNode *block = alloc_node<BlockNode>();
|
2014-12-17 02:31:57 +01:00
|
|
|
block->parent_class=p_class;
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
if (name=="_init") {
|
|
|
|
|
|
|
|
if (p_class->extends_used) {
|
|
|
|
|
|
|
|
OperatorNode *cparent = alloc_node<OperatorNode>();
|
|
|
|
cparent->op=OperatorNode::OP_PARENT_CALL;
|
|
|
|
block->statements.push_back(cparent);
|
|
|
|
|
|
|
|
IdentifierNode *id = alloc_node<IdentifierNode>();
|
|
|
|
id->name="_init";
|
|
|
|
cparent->arguments.push_back(id);
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PERIOD) {
|
|
|
|
tokenizer->advance();
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_OPEN) {
|
2014-02-10 02:10:30 +01:00
|
|
|
_set_error("expected '(' for parent constructor arguments.");
|
|
|
|
}
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
|
2014-02-10 02:10:30 +01:00
|
|
|
//has arguments
|
2016-10-03 20:18:21 +02:00
|
|
|
parenthesis ++;
|
2014-02-10 02:10:30 +01:00
|
|
|
while(true) {
|
|
|
|
|
|
|
|
Node *arg = _parse_and_reduce_expression(p_class,_static);
|
|
|
|
cparent->arguments.push_back(arg);
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
|
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
continue;
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_set_error("Expected ',' or ')'.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
break;
|
|
|
|
|
|
|
|
}
|
2016-10-03 20:18:21 +02:00
|
|
|
parenthesis --;
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PERIOD) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_set_error("Parent constructor call found for a class without inheritance.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!_enter_indent_block(block)) {
|
|
|
|
|
|
|
|
_set_error("Indented block expected.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
FunctionNode *function = alloc_node<FunctionNode>();
|
|
|
|
function->name=name;
|
|
|
|
function->arguments=arguments;
|
|
|
|
function->default_values=default_values;
|
|
|
|
function->_static=_static;
|
|
|
|
function->line=fnline;
|
|
|
|
|
2016-08-19 21:48:08 +02:00
|
|
|
function->rpc_mode=rpc_mode;
|
|
|
|
rpc_mode=ScriptInstance::RPC_MODE_DISABLED;
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
if (_static)
|
|
|
|
p_class->static_functions.push_back(function);
|
|
|
|
else
|
|
|
|
p_class->functions.push_back(function);
|
|
|
|
|
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
current_function=function;
|
2014-02-10 02:10:30 +01:00
|
|
|
function->body=block;
|
2014-12-17 02:31:57 +01:00
|
|
|
current_block=block;
|
|
|
|
_parse_block(block,_static);
|
|
|
|
current_block=NULL;
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
//arguments
|
|
|
|
} break;
|
2015-06-24 18:29:23 +02:00
|
|
|
case GDTokenizer::TK_PR_SIGNAL: {
|
|
|
|
tokenizer->advance();
|
|
|
|
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
|
|
|
|
_set_error("Expected identifier after 'signal'.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
ClassNode::Signal sig;
|
|
|
|
sig.name = tokenizer->get_token_identifier();
|
|
|
|
tokenizer->advance();
|
|
|
|
|
|
|
|
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_OPEN) {
|
|
|
|
tokenizer->advance();
|
|
|
|
while(true) {
|
2016-10-03 20:18:21 +02:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_NEWLINE) {
|
|
|
|
tokenizer->advance();
|
|
|
|
continue;
|
|
|
|
}
|
2015-06-24 18:29:23 +02:00
|
|
|
|
|
|
|
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE) {
|
|
|
|
tokenizer->advance();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
|
|
|
|
_set_error("Expected identifier in signal argument.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
sig.arguments.push_back(tokenizer->get_token_identifier());
|
|
|
|
tokenizer->advance();
|
|
|
|
|
2016-10-03 20:18:21 +02:00
|
|
|
while (tokenizer->get_token()==GDTokenizer::TK_NEWLINE) {
|
|
|
|
tokenizer->advance();
|
|
|
|
}
|
|
|
|
|
2015-06-24 18:29:23 +02:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
|
|
|
|
tokenizer->advance();
|
|
|
|
} else if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
|
|
|
|
_set_error("Expected ',' or ')' after signal parameter identifier.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
p_class->_signals.push_back(sig);
|
|
|
|
|
|
|
|
if (!_end_statement()) {
|
|
|
|
_set_error("Expected end of statement (signal)");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
} break;
|
2014-02-10 02:10:30 +01:00
|
|
|
case GDTokenizer::TK_PR_EXPORT: {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_OPEN) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_BUILT_IN_TYPE) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
Variant::Type type = tokenizer->get_token_type();
|
2014-02-10 02:10:30 +01:00
|
|
|
if (type==Variant::NIL) {
|
|
|
|
_set_error("Can't export null type.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
current_export.type=type;
|
2016-06-11 23:31:22 +02:00
|
|
|
current_export.usage|=PROPERTY_USAGE_SCRIPT_VARIABLE;
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2016-10-26 13:38:41 +02:00
|
|
|
|
|
|
|
String hint_prefix ="";
|
|
|
|
|
|
|
|
if(type == Variant::ARRAY && tokenizer->get_token()==GDTokenizer::TK_COMMA) {
|
|
|
|
tokenizer->advance();
|
|
|
|
|
|
|
|
while(tokenizer->get_token()==GDTokenizer::TK_BUILT_IN_TYPE) {
|
|
|
|
type = tokenizer->get_token_type();
|
|
|
|
|
|
|
|
tokenizer->advance();
|
|
|
|
|
|
|
|
if(type == Variant::ARRAY) {
|
|
|
|
hint_prefix += itos(Variant::ARRAY)+":";
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
|
|
|
|
tokenizer->advance();
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
hint_prefix += itos(type);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
|
2014-02-10 02:10:30 +01:00
|
|
|
// hint expected next!
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2016-10-26 13:38:41 +02:00
|
|
|
|
|
|
|
switch(type) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
|
|
|
|
case Variant::INT: {
|
|
|
|
|
2015-07-20 20:02:46 +02:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER && tokenizer->get_token_identifier()=="FLAGS") {
|
|
|
|
|
2017-01-11 02:20:57 +01:00
|
|
|
//current_export.hint=PROPERTY_HINT_ALL_FLAGS;
|
2015-07-20 20:02:46 +02:00
|
|
|
tokenizer->advance();
|
2015-12-11 15:15:57 +01:00
|
|
|
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_COMMA)
|
|
|
|
{
|
|
|
|
_set_error("Expected ')' or ',' in bit flags hint.");
|
2015-07-20 20:02:46 +02:00
|
|
|
return;
|
|
|
|
}
|
2015-12-11 15:15:57 +01:00
|
|
|
|
|
|
|
current_export.hint=PROPERTY_HINT_FLAGS;
|
|
|
|
tokenizer->advance();
|
|
|
|
|
|
|
|
bool first = true;
|
|
|
|
while(true) {
|
|
|
|
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_CONSTANT || tokenizer->get_token_constant().get_type()!=Variant::STRING) {
|
|
|
|
current_export=PropertyInfo();
|
|
|
|
_set_error("Expected a string constant in named bit flags hint.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
String c = tokenizer->get_token_constant();
|
|
|
|
if (!first)
|
|
|
|
current_export.hint_string+=",";
|
|
|
|
else
|
|
|
|
first=false;
|
|
|
|
|
|
|
|
current_export.hint_string+=c.xml_escape();
|
|
|
|
|
|
|
|
tokenizer->advance();
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE)
|
|
|
|
break;
|
|
|
|
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_COMMA) {
|
|
|
|
current_export=PropertyInfo();
|
|
|
|
_set_error("Expected ')' or ',' in named bit flags hint.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
tokenizer->advance();
|
|
|
|
}
|
|
|
|
|
2015-07-20 20:02:46 +02:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_CONSTANT && tokenizer->get_token_constant().get_type()==Variant::STRING) {
|
2014-02-10 02:10:30 +01:00
|
|
|
//enumeration
|
|
|
|
current_export.hint=PROPERTY_HINT_ENUM;
|
|
|
|
bool first=true;
|
|
|
|
while(true) {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_CONSTANT || tokenizer->get_token_constant().get_type()!=Variant::STRING) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
current_export=PropertyInfo();
|
|
|
|
_set_error("Expected a string constant in enumeration hint.");
|
2014-09-19 23:39:50 +02:00
|
|
|
return;
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
String c = tokenizer->get_token_constant();
|
2014-02-10 02:10:30 +01:00
|
|
|
if (!first)
|
|
|
|
current_export.hint_string+=",";
|
|
|
|
else
|
|
|
|
first=false;
|
|
|
|
|
|
|
|
current_export.hint_string+=c.xml_escape();
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE)
|
2014-02-10 02:10:30 +01:00
|
|
|
break;
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_COMMA) {
|
2014-02-10 02:10:30 +01:00
|
|
|
current_export=PropertyInfo();
|
|
|
|
_set_error("Expected ')' or ',' in enumeration hint.");
|
2014-09-19 23:39:50 +02:00
|
|
|
return;
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2014-09-21 06:43:42 +02:00
|
|
|
}; //fallthrough to use the same
|
2014-02-10 02:10:30 +01:00
|
|
|
case Variant::REAL: {
|
|
|
|
|
2015-10-16 20:37:13 +02:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER && tokenizer->get_token_identifier()=="EASE") {
|
|
|
|
current_export.hint=PROPERTY_HINT_EXP_EASING;
|
|
|
|
tokenizer->advance();
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
|
|
|
|
_set_error("Expected ')' in hint.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
Add GDScript export hints for global filesystem and exponential ranges
- File in global filesystem: `String, FILE, GLOBAL, "*.png"`, tool scripts only
- Directory in global filesystem: `String, DIR, GLOBAL`, tool scripts only
- Exponential range: `float, EXP, 50, 150, 2`
2015-12-11 06:45:03 +01:00
|
|
|
// range
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER && tokenizer->get_token_identifier()=="EXP") {
|
|
|
|
|
|
|
|
current_export.hint=PROPERTY_HINT_EXP_RANGE;
|
|
|
|
tokenizer->advance();
|
|
|
|
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE)
|
|
|
|
break;
|
|
|
|
else if (tokenizer->get_token()!=GDTokenizer::TK_COMMA) {
|
|
|
|
_set_error("Expected ')' or ',' in exponential range hint.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
tokenizer->advance();
|
|
|
|
}
|
|
|
|
else
|
|
|
|
current_export.hint=PROPERTY_HINT_RANGE;
|
|
|
|
|
2014-09-21 06:43:42 +02:00
|
|
|
float sign=1.0;
|
|
|
|
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_OP_SUB) {
|
|
|
|
sign=-1;
|
|
|
|
tokenizer->advance();
|
|
|
|
}
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_CONSTANT || !tokenizer->get_token_constant().is_num()) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
current_export=PropertyInfo();
|
|
|
|
_set_error("Expected a range in numeric hint.");
|
2014-09-19 23:39:50 +02:00
|
|
|
return;
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
}
|
|
|
|
|
2014-09-21 06:43:42 +02:00
|
|
|
current_export.hint_string=rtos(sign*double(tokenizer->get_token_constant()));
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE) {
|
2014-02-10 02:10:30 +01:00
|
|
|
current_export.hint_string="0,"+current_export.hint_string;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_COMMA) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
current_export=PropertyInfo();
|
|
|
|
_set_error("Expected ',' or ')' in numeric range hint.");
|
2014-09-19 23:39:50 +02:00
|
|
|
return;
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-09-21 06:43:42 +02:00
|
|
|
sign=1.0;
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_OP_SUB) {
|
|
|
|
sign=-1;
|
|
|
|
tokenizer->advance();
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_CONSTANT || !tokenizer->get_token_constant().is_num()) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
current_export=PropertyInfo();
|
|
|
|
_set_error("Expected a number as upper bound in numeric range hint.");
|
2014-09-19 23:39:50 +02:00
|
|
|
return;
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
2014-09-21 06:43:42 +02:00
|
|
|
current_export.hint_string+=","+rtos(sign*double(tokenizer->get_token_constant()));
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE)
|
2014-02-10 02:10:30 +01:00
|
|
|
break;
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_COMMA) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
current_export=PropertyInfo();
|
|
|
|
_set_error("Expected ',' or ')' in numeric range hint.");
|
2014-09-19 23:39:50 +02:00
|
|
|
return;
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-09-21 06:43:42 +02:00
|
|
|
sign=1.0;
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_OP_SUB) {
|
|
|
|
sign=-1;
|
|
|
|
tokenizer->advance();
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_CONSTANT || !tokenizer->get_token_constant().is_num()) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
current_export=PropertyInfo();
|
|
|
|
_set_error("Expected a number as step in numeric range hint.");
|
2014-09-19 23:39:50 +02:00
|
|
|
return;
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
2014-09-21 06:43:42 +02:00
|
|
|
current_export.hint_string+=","+rtos(sign*double(tokenizer->get_token_constant()));
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
} break;
|
|
|
|
case Variant::STRING: {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_CONSTANT && tokenizer->get_token_constant().get_type()==Variant::STRING) {
|
2014-02-10 02:10:30 +01:00
|
|
|
//enumeration
|
|
|
|
current_export.hint=PROPERTY_HINT_ENUM;
|
|
|
|
bool first=true;
|
|
|
|
while(true) {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_CONSTANT || tokenizer->get_token_constant().get_type()!=Variant::STRING) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
current_export=PropertyInfo();
|
|
|
|
_set_error("Expected a string constant in enumeration hint.");
|
2014-09-19 23:39:50 +02:00
|
|
|
return;
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
String c = tokenizer->get_token_constant();
|
2014-02-10 02:10:30 +01:00
|
|
|
if (!first)
|
|
|
|
current_export.hint_string+=",";
|
|
|
|
else
|
|
|
|
first=false;
|
|
|
|
|
|
|
|
current_export.hint_string+=c.xml_escape();
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE)
|
2014-02-10 02:10:30 +01:00
|
|
|
break;
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_COMMA) {
|
2014-02-10 02:10:30 +01:00
|
|
|
current_export=PropertyInfo();
|
|
|
|
_set_error("Expected ')' or ',' in enumeration hint.");
|
|
|
|
return;
|
|
|
|
}
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER && tokenizer->get_token_identifier()=="DIR") {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
Add GDScript export hints for global filesystem and exponential ranges
- File in global filesystem: `String, FILE, GLOBAL, "*.png"`, tool scripts only
- Directory in global filesystem: `String, DIR, GLOBAL`, tool scripts only
- Exponential range: `float, EXP, 50, 150, 2`
2015-12-11 06:45:03 +01:00
|
|
|
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE)
|
|
|
|
current_export.hint=PROPERTY_HINT_DIR;
|
|
|
|
else if (tokenizer->get_token()==GDTokenizer::TK_COMMA ) {
|
|
|
|
|
|
|
|
tokenizer->advance();
|
|
|
|
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER || !(tokenizer->get_token_identifier()=="GLOBAL")) {
|
|
|
|
_set_error("Expected 'GLOBAL' after comma in directory hint.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (!p_class->tool) {
|
|
|
|
_set_error("Global filesystem hints may only be used in tool scripts.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
current_export.hint=PROPERTY_HINT_GLOBAL_DIR;
|
|
|
|
tokenizer->advance();
|
|
|
|
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
|
|
|
|
_set_error("Expected ')' in hint.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
_set_error("Expected ')' or ',' in hint.");
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER && tokenizer->get_token_identifier()=="FILE") {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
current_export.hint=PROPERTY_HINT_FILE;
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
Add GDScript export hints for global filesystem and exponential ranges
- File in global filesystem: `String, FILE, GLOBAL, "*.png"`, tool scripts only
- Directory in global filesystem: `String, DIR, GLOBAL`, tool scripts only
- Exponential range: `float, EXP, 50, 150, 2`
2015-12-11 06:45:03 +01:00
|
|
|
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER && tokenizer->get_token_identifier()=="GLOBAL") {
|
|
|
|
|
|
|
|
if (!p_class->tool) {
|
|
|
|
_set_error("Global filesystem hints may only be used in tool scripts.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
current_export.hint=PROPERTY_HINT_GLOBAL_FILE;
|
|
|
|
tokenizer->advance();
|
|
|
|
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE)
|
|
|
|
break;
|
|
|
|
else if (tokenizer->get_token()==GDTokenizer::TK_COMMA)
|
|
|
|
tokenizer->advance();
|
|
|
|
else {
|
|
|
|
_set_error("Expected ')' or ',' in hint.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_CONSTANT || tokenizer->get_token_constant().get_type()!=Variant::STRING) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
Add GDScript export hints for global filesystem and exponential ranges
- File in global filesystem: `String, FILE, GLOBAL, "*.png"`, tool scripts only
- Directory in global filesystem: `String, DIR, GLOBAL`, tool scripts only
- Exponential range: `float, EXP, 50, 150, 2`
2015-12-11 06:45:03 +01:00
|
|
|
if (current_export.hint==PROPERTY_HINT_GLOBAL_FILE)
|
|
|
|
_set_error("Expected string constant with filter");
|
|
|
|
else
|
|
|
|
_set_error("Expected 'GLOBAL' or string constant with filter");
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
|
|
|
}
|
2014-02-25 13:31:47 +01:00
|
|
|
current_export.hint_string=tokenizer->get_token_constant();
|
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
|
2014-02-10 02:10:30 +01:00
|
|
|
_set_error("Expected ')' in hint.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
2015-10-16 16:39:59 +02:00
|
|
|
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER && tokenizer->get_token_identifier()=="MULTILINE") {
|
|
|
|
|
|
|
|
current_export.hint=PROPERTY_HINT_MULTILINE_TEXT;
|
|
|
|
tokenizer->advance();
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
|
|
|
|
_set_error("Expected ')' in hint.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
} break;
|
|
|
|
case Variant::COLOR: {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER ) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
current_export=PropertyInfo();
|
|
|
|
_set_error("Color type hint expects RGB or RGBA as hints");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
String identifier = tokenizer->get_token_identifier();
|
2014-02-10 02:10:30 +01:00
|
|
|
if (identifier=="RGB") {
|
|
|
|
current_export.hint=PROPERTY_HINT_COLOR_NO_ALPHA;
|
|
|
|
} else if (identifier=="RGBA") {
|
|
|
|
//none
|
|
|
|
} else {
|
|
|
|
current_export=PropertyInfo();
|
|
|
|
_set_error("Color type hint expects RGB or RGBA as hints");
|
|
|
|
return;
|
|
|
|
}
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
} break;
|
|
|
|
default: {
|
|
|
|
|
|
|
|
current_export=PropertyInfo();
|
|
|
|
_set_error("Type '"+Variant::get_type_name(type)+"' can't take hints.");
|
|
|
|
return;
|
|
|
|
} break;
|
|
|
|
}
|
2016-10-26 13:38:41 +02:00
|
|
|
|
|
|
|
}
|
|
|
|
if(current_export.type == Variant::ARRAY && !hint_prefix.empty()) {
|
|
|
|
if(current_export.hint) {
|
|
|
|
hint_prefix += "/"+itos(current_export.hint);
|
|
|
|
}
|
|
|
|
current_export.hint_string=hint_prefix+":"+current_export.hint_string;
|
|
|
|
current_export.hint=PROPERTY_HINT_NONE;
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
String identifier = tokenizer->get_token_identifier();
|
2017-01-03 03:03:46 +01:00
|
|
|
if (!ClassDB::is_parent_class(identifier,"Resource")) {
|
2015-10-16 16:18:46 +02:00
|
|
|
|
|
|
|
current_export=PropertyInfo();
|
|
|
|
_set_error("Export hint not a type or resource.");
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
2015-10-16 16:18:46 +02:00
|
|
|
|
|
|
|
current_export.type=Variant::OBJECT;
|
|
|
|
current_export.hint=PROPERTY_HINT_RESOURCE_TYPE;
|
2016-06-11 23:31:22 +02:00
|
|
|
current_export.usage|=PROPERTY_USAGE_SCRIPT_VARIABLE;
|
|
|
|
|
2015-10-16 16:18:46 +02:00
|
|
|
current_export.hint_string=identifier;
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
current_export=PropertyInfo();
|
|
|
|
_set_error("Expected ')' or ',' after export hint.");
|
|
|
|
return;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
}
|
|
|
|
|
2016-08-19 21:48:08 +02:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PR_VAR && tokenizer->get_token()!=GDTokenizer::TK_PR_ONREADY && tokenizer->get_token()!=GDTokenizer::TK_PR_REMOTE && tokenizer->get_token()!=GDTokenizer::TK_PR_MASTER && tokenizer->get_token()!=GDTokenizer::TK_PR_SLAVE && tokenizer->get_token()!=GDTokenizer::TK_PR_SYNC) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
current_export=PropertyInfo();
|
2016-08-19 21:48:08 +02:00
|
|
|
_set_error("Expected 'var', 'onready', 'remote', 'master', 'slave' or 'sync'.");
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-08-19 21:48:08 +02:00
|
|
|
continue;
|
|
|
|
} break;
|
2015-12-28 23:31:52 +01:00
|
|
|
case GDTokenizer::TK_PR_ONREADY: {
|
|
|
|
|
2016-08-19 21:48:08 +02:00
|
|
|
//may be fallthrough from export, ignore if so
|
|
|
|
tokenizer->advance();
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PR_VAR) {
|
|
|
|
_set_error("Expected 'var'.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
continue;
|
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_PR_REMOTE: {
|
|
|
|
|
|
|
|
//may be fallthrough from export, ignore if so
|
|
|
|
tokenizer->advance();
|
|
|
|
if (current_export.type) {
|
2015-12-28 23:31:52 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PR_VAR) {
|
|
|
|
_set_error("Expected 'var'.");
|
|
|
|
return;
|
|
|
|
}
|
2016-08-19 21:48:08 +02:00
|
|
|
|
|
|
|
} else {
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PR_VAR && tokenizer->get_token()!=GDTokenizer::TK_PR_FUNCTION) {
|
|
|
|
_set_error("Expected 'var' or 'func'.");
|
|
|
|
return;
|
|
|
|
}
|
2015-12-28 23:31:52 +01:00
|
|
|
}
|
2016-08-19 21:48:08 +02:00
|
|
|
rpc_mode=ScriptInstance::RPC_MODE_REMOTE;
|
|
|
|
|
|
|
|
continue;
|
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_PR_MASTER: {
|
|
|
|
|
|
|
|
//may be fallthrough from export, ignore if so
|
|
|
|
tokenizer->advance();
|
|
|
|
if (current_export.type) {
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PR_VAR) {
|
|
|
|
_set_error("Expected 'var'.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
} else {
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PR_VAR && tokenizer->get_token()!=GDTokenizer::TK_PR_FUNCTION) {
|
|
|
|
_set_error("Expected 'var' or 'func'.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
rpc_mode=ScriptInstance::RPC_MODE_MASTER;
|
|
|
|
continue;
|
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_PR_SLAVE: {
|
|
|
|
|
|
|
|
//may be fallthrough from export, ignore if so
|
|
|
|
tokenizer->advance();
|
|
|
|
if (current_export.type) {
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PR_VAR) {
|
|
|
|
_set_error("Expected 'var'.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
} else {
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PR_VAR && tokenizer->get_token()!=GDTokenizer::TK_PR_FUNCTION) {
|
|
|
|
_set_error("Expected 'var' or 'func'.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
rpc_mode=ScriptInstance::RPC_MODE_SLAVE;
|
|
|
|
continue;
|
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_PR_SYNC: {
|
|
|
|
|
|
|
|
//may be fallthrough from export, ignore if so
|
|
|
|
tokenizer->advance();
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_PR_VAR && tokenizer->get_token()!=GDTokenizer::TK_PR_FUNCTION) {
|
|
|
|
if (current_export.type)
|
|
|
|
_set_error("Expected 'var'.");
|
|
|
|
else
|
|
|
|
_set_error("Expected 'var' or 'func'.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
rpc_mode=ScriptInstance::RPC_MODE_SYNC;
|
|
|
|
continue;
|
|
|
|
} break;
|
2014-02-10 02:10:30 +01:00
|
|
|
case GDTokenizer::TK_PR_VAR: {
|
|
|
|
//variale declaration and (eventual) initialization
|
|
|
|
|
|
|
|
ClassNode::Member member;
|
2014-02-25 13:31:47 +01:00
|
|
|
bool autoexport = tokenizer->get_token(-1)==GDTokenizer::TK_PR_EXPORT;
|
2014-02-10 02:10:30 +01:00
|
|
|
if (current_export.type!=Variant::NIL) {
|
|
|
|
member._export=current_export;
|
|
|
|
current_export=PropertyInfo();
|
|
|
|
}
|
|
|
|
|
2015-12-28 23:31:52 +01:00
|
|
|
bool onready = tokenizer->get_token(-1)==GDTokenizer::TK_PR_ONREADY;
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_set_error("Expected identifier for member variable name.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
member.identifier=tokenizer->get_token_identifier();
|
2014-12-17 02:31:57 +01:00
|
|
|
member.expression=NULL;
|
2014-02-10 02:10:30 +01:00
|
|
|
member._export.name=member.identifier;
|
2014-12-17 02:31:57 +01:00
|
|
|
member.line=tokenizer->get_token_line();
|
2016-08-19 21:48:08 +02:00
|
|
|
member.rpc_mode=rpc_mode;
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2016-08-19 21:48:08 +02:00
|
|
|
rpc_mode=ScriptInstance::RPC_MODE_DISABLED;
|
|
|
|
|
2014-10-28 02:54:32 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_OP_ASSIGN) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-10-28 02:54:32 +01:00
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
int line = tokenizer->get_token_line();
|
|
|
|
#endif
|
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-10-28 02:54:32 +01:00
|
|
|
Node *subexpr=NULL;
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2015-08-26 01:45:51 +02:00
|
|
|
subexpr = _parse_and_reduce_expression(p_class,false,autoexport);
|
2015-08-30 16:50:10 +02:00
|
|
|
if (!subexpr) {
|
|
|
|
if (_recover_from_completion()) {
|
|
|
|
break;
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
2015-08-30 16:50:10 +02:00
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2015-12-28 23:31:52 +01:00
|
|
|
//discourage common error
|
|
|
|
if (!onready && subexpr->type==Node::TYPE_OPERATOR) {
|
|
|
|
|
|
|
|
OperatorNode *op=static_cast<OperatorNode*>(subexpr);
|
|
|
|
if (op->op==OperatorNode::OP_CALL && op->arguments[0]->type==Node::TYPE_SELF && op->arguments[1]->type==Node::TYPE_IDENTIFIER) {
|
|
|
|
IdentifierNode *id=static_cast<IdentifierNode*>(op->arguments[1]);
|
|
|
|
if (id->name=="get_node") {
|
|
|
|
|
|
|
|
_set_error("Use 'onready var "+String(member.identifier)+" = get_node(..)' instead");
|
|
|
|
return;
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
member.expression=subexpr;
|
|
|
|
|
2014-10-28 02:54:32 +01:00
|
|
|
if (autoexport) {
|
2015-08-26 01:45:51 +02:00
|
|
|
if (1)/*(subexpr->type==Node::TYPE_ARRAY) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-10-28 02:54:32 +01:00
|
|
|
member._export.type=Variant::ARRAY;
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-10-28 02:54:32 +01:00
|
|
|
} else if (subexpr->type==Node::TYPE_DICTIONARY) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-10-28 02:54:32 +01:00
|
|
|
member._export.type=Variant::DICTIONARY;
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2015-08-26 01:45:51 +02:00
|
|
|
} else*/ {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-10-28 02:54:32 +01:00
|
|
|
if (subexpr->type!=Node::TYPE_CONSTANT) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-10-28 02:54:32 +01:00
|
|
|
_set_error("Type-less export needs a constant expression assigned to infer type.");
|
|
|
|
return;
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-10-28 02:54:32 +01:00
|
|
|
ConstantNode *cn = static_cast<ConstantNode*>(subexpr);
|
|
|
|
if (cn->value.get_type()==Variant::NIL) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-10-28 02:54:32 +01:00
|
|
|
_set_error("Can't accept a null constant expression for infering export type.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
member._export.type=cn->value.get_type();
|
2016-06-11 23:31:22 +02:00
|
|
|
member._export.usage|=PROPERTY_USAGE_SCRIPT_VARIABLE;
|
2016-10-06 20:24:32 +02:00
|
|
|
if (cn->value.get_type()==Variant::OBJECT) {
|
|
|
|
Object *obj = cn->value;
|
|
|
|
Resource *res = obj->cast_to<Resource>();
|
|
|
|
if(res==NULL) {
|
|
|
|
_set_error("Exported constant not a type or resource.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
member._export.hint=PROPERTY_HINT_RESOURCE_TYPE;
|
2017-01-03 03:03:46 +01:00
|
|
|
member._export.hint_string=res->get_class();
|
2016-10-06 20:24:32 +02:00
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
2014-10-28 02:54:32 +01:00
|
|
|
}
|
|
|
|
#ifdef TOOLS_ENABLED
|
|
|
|
if (subexpr->type==Node::TYPE_CONSTANT && member._export.type!=Variant::NIL) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
ConstantNode *cn = static_cast<ConstantNode*>(subexpr);
|
2014-10-28 02:54:32 +01:00
|
|
|
if (cn->value.get_type()!=Variant::NIL) {
|
|
|
|
member.default_value=cn->value;
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
}
|
2014-10-28 02:54:32 +01:00
|
|
|
#endif
|
|
|
|
|
|
|
|
IdentifierNode *id = alloc_node<IdentifierNode>();
|
|
|
|
id->name=member.identifier;
|
|
|
|
|
|
|
|
OperatorNode *op = alloc_node<OperatorNode>();
|
2014-11-02 15:31:01 +01:00
|
|
|
op->op=OperatorNode::OP_INIT_ASSIGN;
|
2014-10-28 02:54:32 +01:00
|
|
|
op->arguments.push_back(id);
|
|
|
|
op->arguments.push_back(subexpr);
|
|
|
|
|
2015-12-28 23:31:52 +01:00
|
|
|
|
2014-10-28 02:54:32 +01:00
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
NewLineNode *nl = alloc_node<NewLineNode>();
|
|
|
|
nl->line=line;
|
2015-12-28 23:31:52 +01:00
|
|
|
if (onready)
|
|
|
|
p_class->ready->statements.push_back(nl);
|
|
|
|
else
|
|
|
|
p_class->initializer->statements.push_back(nl);
|
2014-10-28 02:54:32 +01:00
|
|
|
#endif
|
2015-12-28 23:31:52 +01:00
|
|
|
if (onready)
|
|
|
|
p_class->ready->statements.push_back(op);
|
|
|
|
else
|
|
|
|
p_class->initializer->statements.push_back(op);
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-10-28 02:54:32 +01:00
|
|
|
|
|
|
|
|
|
|
|
} else {
|
|
|
|
|
|
|
|
if (autoexport) {
|
|
|
|
|
|
|
|
_set_error("Type-less export needs a constant expression assigned to infer type.");
|
|
|
|
return;
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
2014-10-28 02:54:32 +01:00
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
2014-10-28 02:54:32 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_PR_SETGET) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
|
2014-10-28 02:54:32 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-10-28 02:54:32 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_COMMA) {
|
|
|
|
//just comma means using only getter
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
|
|
|
|
_set_error("Expected identifier for setter function after 'notify'.");
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-10-28 02:54:32 +01:00
|
|
|
member.setter=tokenizer->get_token_identifier();
|
|
|
|
|
|
|
|
tokenizer->advance();
|
|
|
|
}
|
|
|
|
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
|
|
|
|
//there is a getter
|
|
|
|
tokenizer->advance();
|
|
|
|
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
|
|
|
|
_set_error("Expected identifier for getter function after ','.");
|
|
|
|
}
|
|
|
|
|
|
|
|
member.getter=tokenizer->get_token_identifier();
|
|
|
|
tokenizer->advance();
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
p_class->variables.push_back(member);
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2015-06-24 18:29:23 +02:00
|
|
|
if (!_end_statement()) {
|
|
|
|
_set_error("Expected end of statement (continue)");
|
|
|
|
return;
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_PR_CONST: {
|
|
|
|
//variale declaration and (eventual) initialization
|
|
|
|
|
|
|
|
ClassNode::Constant constant;
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_set_error("Expected name (identifier) for constant.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
constant.identifier=tokenizer->get_token_identifier();
|
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_OP_ASSIGN) {
|
2014-02-10 02:10:30 +01:00
|
|
|
_set_error("Constant expects assignment.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer->advance();
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
Node *subexpr=NULL;
|
|
|
|
|
|
|
|
subexpr = _parse_and_reduce_expression(p_class,true,true);
|
2015-08-30 16:50:10 +02:00
|
|
|
if (!subexpr) {
|
|
|
|
if (_recover_from_completion()) {
|
|
|
|
break;
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
2015-08-30 16:50:10 +02:00
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
if (subexpr->type!=Node::TYPE_CONSTANT) {
|
|
|
|
_set_error("Expected constant expression");
|
|
|
|
}
|
|
|
|
constant.expression=subexpr;
|
|
|
|
|
|
|
|
p_class->constant_expressions.push_back(constant);
|
|
|
|
|
2015-06-24 18:29:23 +02:00
|
|
|
if (!_end_statement()) {
|
|
|
|
_set_error("Expected end of statement (constant)");
|
|
|
|
return;
|
|
|
|
}
|
2014-02-10 02:10:30 +01:00
|
|
|
|
2016-08-26 13:15:45 +02:00
|
|
|
} break;
|
|
|
|
case GDTokenizer::TK_PR_ENUM: {
|
|
|
|
//mutiple constant declarations..
|
|
|
|
|
|
|
|
int last_assign = -1; // Incremented by 1 right before the assingment.
|
2016-08-27 14:56:51 +02:00
|
|
|
String enum_name;
|
|
|
|
Dictionary enum_dict;
|
2016-08-26 13:15:45 +02:00
|
|
|
|
|
|
|
tokenizer->advance();
|
2016-08-27 14:56:51 +02:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER) {
|
|
|
|
enum_name=tokenizer->get_token_identifier();
|
|
|
|
tokenizer->advance();
|
|
|
|
}
|
2016-08-26 13:15:45 +02:00
|
|
|
if (tokenizer->get_token()!=GDTokenizer::TK_CURLY_BRACKET_OPEN) {
|
|
|
|
_set_error("Expected '{' in enum declaration");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
tokenizer->advance();
|
|
|
|
|
|
|
|
while(true) {
|
|
|
|
if(tokenizer->get_token()==GDTokenizer::TK_NEWLINE) {
|
|
|
|
|
|
|
|
tokenizer->advance(); // Ignore newlines
|
|
|
|
} else if (tokenizer->get_token()==GDTokenizer::TK_CURLY_BRACKET_CLOSE) {
|
|
|
|
|
|
|
|
tokenizer->advance();
|
|
|
|
break; // End of enum
|
|
|
|
} else if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
|
|
|
|
|
|
|
|
if(tokenizer->get_token()==GDTokenizer::TK_EOF) {
|
|
|
|
_set_error("Unexpected end of file.");
|
|
|
|
} else {
|
|
|
|
_set_error(String("Unexpected ") + GDTokenizer::get_token_name(tokenizer->get_token()) + ", expected identifier");
|
|
|
|
}
|
|
|
|
|
|
|
|
return;
|
|
|
|
} else { // tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER
|
|
|
|
ClassNode::Constant constant;
|
|
|
|
|
|
|
|
constant.identifier=tokenizer->get_token_identifier();
|
|
|
|
|
|
|
|
tokenizer->advance();
|
|
|
|
|
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_OP_ASSIGN) {
|
|
|
|
tokenizer->advance();
|
|
|
|
|
|
|
|
Node *subexpr=NULL;
|
|
|
|
|
|
|
|
subexpr = _parse_and_reduce_expression(p_class,true,true);
|
|
|
|
if (!subexpr) {
|
|
|
|
if (_recover_from_completion()) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (subexpr->type!=Node::TYPE_CONSTANT) {
|
|
|
|
_set_error("Expected constant expression");
|
|
|
|
}
|
|
|
|
|
|
|
|
const ConstantNode *subexpr_const = static_cast<const ConstantNode*>(subexpr);
|
|
|
|
|
|
|
|
if(subexpr_const->value.get_type() != Variant::INT) {
|
|
|
|
_set_error("Expected an int value for enum");
|
|
|
|
}
|
|
|
|
|
|
|
|
last_assign = subexpr_const->value;
|
|
|
|
|
|
|
|
constant.expression=subexpr;
|
|
|
|
|
|
|
|
} else {
|
|
|
|
last_assign = last_assign + 1;
|
|
|
|
ConstantNode *cn = alloc_node<ConstantNode>();
|
|
|
|
cn->value = last_assign;
|
|
|
|
constant.expression = cn;
|
|
|
|
}
|
|
|
|
|
|
|
|
if(tokenizer->get_token()==GDTokenizer::TK_COMMA) {
|
|
|
|
tokenizer->advance();
|
|
|
|
}
|
|
|
|
|
2016-08-27 14:56:51 +02:00
|
|
|
if(enum_name != "") {
|
|
|
|
const ConstantNode *cn = static_cast<const ConstantNode*>(constant.expression);
|
|
|
|
enum_dict[constant.identifier] = cn->value;
|
|
|
|
}
|
|
|
|
|
2016-08-26 13:15:45 +02:00
|
|
|
p_class->constant_expressions.push_back(constant);
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
2016-08-27 14:56:51 +02:00
|
|
|
|
|
|
|
if(enum_name != "") {
|
|
|
|
ClassNode::Constant enum_constant;
|
|
|
|
enum_constant.identifier=enum_name;
|
|
|
|
ConstantNode *cn = alloc_node<ConstantNode>();
|
|
|
|
cn->value = enum_dict;
|
|
|
|
enum_constant.expression=cn;
|
|
|
|
p_class->constant_expressions.push_back(enum_constant);
|
|
|
|
}
|
2016-08-26 13:15:45 +02:00
|
|
|
|
|
|
|
if (!_end_statement()) {
|
|
|
|
_set_error("Expected end of statement (enum)");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
} break;
|
2016-11-03 11:26:38 +01:00
|
|
|
|
|
|
|
case GDTokenizer::TK_CONSTANT: {
|
|
|
|
if(tokenizer->get_token_constant().get_type() == Variant::STRING) {
|
|
|
|
tokenizer->advance();
|
|
|
|
// Ignore
|
|
|
|
} else {
|
|
|
|
_set_error(String()+"Unexpected constant of type: "+Variant::get_type_name(tokenizer->get_token_constant().get_type()));
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
} break;
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
default: {
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
_set_error(String()+"Unexpected token: "+tokenizer->get_token_name(tokenizer->get_token())+":"+tokenizer->get_token_identifier());
|
2014-02-10 02:10:30 +01:00
|
|
|
return;
|
|
|
|
|
|
|
|
} break;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void GDParser::_set_error(const String& p_error, int p_line, int p_column) {
|
|
|
|
|
|
|
|
|
|
|
|
if (error_set)
|
|
|
|
return; //allow no further errors
|
|
|
|
|
|
|
|
error=p_error;
|
2014-02-25 13:31:47 +01:00
|
|
|
error_line=p_line<0?tokenizer->get_token_line():p_line;
|
|
|
|
error_column=p_column<0?tokenizer->get_token_column():p_column;
|
2014-02-10 02:10:30 +01:00
|
|
|
error_set=true;
|
|
|
|
}
|
|
|
|
|
|
|
|
String GDParser::get_error() const {
|
|
|
|
|
|
|
|
return error;
|
|
|
|
}
|
|
|
|
|
|
|
|
int GDParser::get_error_line() const {
|
|
|
|
|
|
|
|
return error_line;
|
|
|
|
}
|
|
|
|
int GDParser::get_error_column() const {
|
|
|
|
|
|
|
|
return error_column;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
Error GDParser::_parse(const String& p_base_path) {
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
base_path=p_base_path;
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
clear();
|
|
|
|
|
|
|
|
//assume class
|
|
|
|
ClassNode *main_class = alloc_node<ClassNode>();
|
|
|
|
main_class->initializer = alloc_node<BlockNode>();
|
2014-12-17 02:31:57 +01:00
|
|
|
main_class->initializer->parent_class=main_class;
|
2015-12-28 23:31:52 +01:00
|
|
|
main_class->ready = alloc_node<BlockNode>();
|
|
|
|
main_class->ready->parent_class=main_class;
|
2014-12-17 02:31:57 +01:00
|
|
|
current_class=main_class;
|
2014-02-10 02:10:30 +01:00
|
|
|
|
|
|
|
_parse_class(main_class);
|
|
|
|
|
2014-02-25 13:31:47 +01:00
|
|
|
if (tokenizer->get_token()==GDTokenizer::TK_ERROR) {
|
2014-02-10 02:10:30 +01:00
|
|
|
error_set=false;
|
2014-02-25 13:31:47 +01:00
|
|
|
_set_error("Parse Error: "+tokenizer->get_token_error());
|
2014-02-10 02:10:30 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if (error_set) {
|
|
|
|
|
|
|
|
return ERR_PARSE_ERROR;
|
|
|
|
}
|
|
|
|
return OK;
|
|
|
|
}
|
|
|
|
|
2014-12-07 06:04:20 +01:00
|
|
|
Error GDParser::parse_bytecode(const Vector<uint8_t> &p_bytecode,const String& p_base_path, const String &p_self_path) {
|
2014-02-25 13:31:47 +01:00
|
|
|
|
2015-06-26 06:14:31 +02:00
|
|
|
for_completion=false;
|
|
|
|
validating=false;
|
2014-12-17 02:31:57 +01:00
|
|
|
completion_type=COMPLETION_NONE;
|
|
|
|
completion_node=NULL;
|
|
|
|
completion_class=NULL;
|
|
|
|
completion_function=NULL;
|
|
|
|
completion_block=NULL;
|
2015-08-30 16:50:10 +02:00
|
|
|
completion_found=false;
|
2014-12-17 03:46:55 +01:00
|
|
|
current_block=NULL;
|
|
|
|
current_class=NULL;
|
|
|
|
current_function=NULL;
|
2014-12-17 02:31:57 +01:00
|
|
|
|
2014-12-07 06:04:20 +01:00
|
|
|
self_path=p_self_path;
|
2014-02-25 13:31:47 +01:00
|
|
|
GDTokenizerBuffer *tb = memnew( GDTokenizerBuffer );
|
|
|
|
tb->set_code_buffer(p_bytecode);
|
|
|
|
tokenizer=tb;
|
|
|
|
Error ret = _parse(p_base_path);
|
|
|
|
memdelete(tb);
|
|
|
|
tokenizer=NULL;
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-06-26 06:14:31 +02:00
|
|
|
Error GDParser::parse(const String& p_code, const String& p_base_path, bool p_just_validate, const String &p_self_path,bool p_for_completion) {
|
2014-02-25 13:31:47 +01:00
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
completion_type=COMPLETION_NONE;
|
|
|
|
completion_node=NULL;
|
|
|
|
completion_class=NULL;
|
|
|
|
completion_function=NULL;
|
|
|
|
completion_block=NULL;
|
2015-08-30 16:50:10 +02:00
|
|
|
completion_found=false;
|
2014-12-17 03:46:55 +01:00
|
|
|
current_block=NULL;
|
|
|
|
current_class=NULL;
|
|
|
|
|
|
|
|
current_function=NULL;
|
2014-12-17 02:31:57 +01:00
|
|
|
|
2014-12-07 06:04:20 +01:00
|
|
|
self_path=p_self_path;
|
2014-02-25 13:31:47 +01:00
|
|
|
GDTokenizerText *tt = memnew( GDTokenizerText );
|
|
|
|
tt->set_code(p_code);
|
|
|
|
|
2014-06-28 04:21:45 +02:00
|
|
|
validating=p_just_validate;
|
2015-06-26 06:14:31 +02:00
|
|
|
for_completion=p_for_completion;
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer=tt;
|
|
|
|
Error ret = _parse(p_base_path);
|
|
|
|
memdelete(tt);
|
|
|
|
tokenizer=NULL;
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2016-01-23 19:36:03 +01:00
|
|
|
bool GDParser::is_tool_script() const {
|
|
|
|
|
|
|
|
return (head && head->type==Node::TYPE_CLASS && static_cast<const ClassNode*>(head)->tool);
|
|
|
|
}
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
const GDParser::Node *GDParser::get_parse_tree() const {
|
|
|
|
|
|
|
|
return head;
|
|
|
|
}
|
|
|
|
|
|
|
|
void GDParser::clear() {
|
|
|
|
|
|
|
|
while(list) {
|
|
|
|
|
|
|
|
Node *l=list;
|
|
|
|
list=list->next;
|
|
|
|
memdelete(l);
|
|
|
|
}
|
|
|
|
|
|
|
|
head=NULL;
|
|
|
|
list=NULL;
|
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
completion_type=COMPLETION_NONE;
|
|
|
|
completion_node=NULL;
|
|
|
|
completion_class=NULL;
|
|
|
|
completion_function=NULL;
|
|
|
|
completion_block=NULL;
|
2014-12-17 03:46:55 +01:00
|
|
|
current_block=NULL;
|
|
|
|
current_class=NULL;
|
|
|
|
|
2015-08-30 16:50:10 +02:00
|
|
|
completion_found=false;
|
2016-08-19 21:48:08 +02:00
|
|
|
rpc_mode=ScriptInstance::RPC_MODE_DISABLED;
|
2015-08-30 16:50:10 +02:00
|
|
|
|
2014-12-17 03:46:55 +01:00
|
|
|
current_function=NULL;
|
2014-12-17 02:31:57 +01:00
|
|
|
|
2014-06-28 04:21:45 +02:00
|
|
|
validating=false;
|
2015-06-26 06:14:31 +02:00
|
|
|
for_completion=false;
|
2014-02-10 02:10:30 +01:00
|
|
|
error_set=false;
|
|
|
|
tab_level.clear();
|
|
|
|
tab_level.push_back(0);
|
|
|
|
error_line=0;
|
|
|
|
error_column=0;
|
2014-05-24 06:35:47 +02:00
|
|
|
pending_newline=-1;
|
2014-04-05 23:50:09 +02:00
|
|
|
parenthesis=0;
|
2014-02-10 02:10:30 +01:00
|
|
|
current_export.type=Variant::NIL;
|
|
|
|
error="";
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2014-12-17 02:31:57 +01:00
|
|
|
|
|
|
|
GDParser::CompletionType GDParser::get_completion_type() {
|
|
|
|
|
|
|
|
return completion_type;
|
|
|
|
}
|
|
|
|
|
|
|
|
StringName GDParser::get_completion_cursor() {
|
|
|
|
|
|
|
|
return completion_cursor;
|
|
|
|
}
|
|
|
|
|
|
|
|
int GDParser::get_completion_line() {
|
|
|
|
|
|
|
|
return completion_line;
|
|
|
|
}
|
|
|
|
|
|
|
|
Variant::Type GDParser::get_completion_built_in_constant(){
|
|
|
|
|
|
|
|
return completion_built_in_constant;
|
|
|
|
}
|
|
|
|
|
|
|
|
GDParser::Node *GDParser::get_completion_node(){
|
|
|
|
|
|
|
|
return completion_node;
|
|
|
|
}
|
|
|
|
|
|
|
|
GDParser::BlockNode *GDParser::get_completion_block() {
|
|
|
|
|
|
|
|
return completion_block;
|
|
|
|
}
|
|
|
|
|
|
|
|
GDParser::ClassNode *GDParser::get_completion_class(){
|
|
|
|
|
|
|
|
return completion_class;
|
|
|
|
}
|
|
|
|
|
|
|
|
GDParser::FunctionNode *GDParser::get_completion_function(){
|
|
|
|
|
|
|
|
return completion_function;
|
|
|
|
}
|
|
|
|
|
|
|
|
int GDParser::get_completion_argument_index() {
|
|
|
|
|
|
|
|
return completion_argument;
|
|
|
|
}
|
|
|
|
|
2016-09-12 15:52:29 +02:00
|
|
|
int GDParser::get_completion_identifier_is_function() {
|
|
|
|
|
|
|
|
return completion_ident_is_call;
|
|
|
|
}
|
|
|
|
|
2014-02-10 02:10:30 +01:00
|
|
|
GDParser::GDParser() {
|
|
|
|
|
|
|
|
head=NULL;
|
|
|
|
list=NULL;
|
2014-02-25 13:31:47 +01:00
|
|
|
tokenizer=NULL;
|
2014-05-24 06:35:47 +02:00
|
|
|
pending_newline=-1;
|
2014-02-10 02:10:30 +01:00
|
|
|
clear();
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
GDParser::~GDParser() {
|
|
|
|
|
|
|
|
clear();
|
|
|
|
}
|