virtualx-engine/modules/gdscript/gd_parser.cpp

2495 lines
65 KiB
C++
Raw Normal View History

2014-02-10 02:10:30 +01:00
/*************************************************************************/
/* gd_parser.cpp */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* http://www.godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#include "gd_parser.h"
#include "print_string.h"
#include "io/resource_loader.h"
/* TODO:
*Property reduce constant expressions
*Implement missing operators in variant?
*constructor
*/
/*
todo:
fix post ++,--
make sure ++,-- don't work on constant expressions
seems passing parent node as param is not needed
*/
template<class T>
T* GDParser::alloc_node() {
T *t = memnew( T);
t->next=list;
list=t;
if (!head)
head=t;
t->line=tokenizer->get_token_line();
t->column=tokenizer->get_token_column();
2014-02-10 02:10:30 +01:00
return t;
}
bool GDParser::_end_statement() {
if (tokenizer->get_token()==GDTokenizer::TK_SEMICOLON) {
tokenizer->advance();
2014-02-10 02:10:30 +01:00
return true; //handle next
} else if (tokenizer->get_token()==GDTokenizer::TK_NEWLINE || tokenizer->get_token()==GDTokenizer::TK_EOF) {
2014-02-10 02:10:30 +01:00
return true; //will be handled properly
}
return false;
}
bool GDParser::_enter_indent_block(BlockNode* p_block) {
if (tokenizer->get_token()!=GDTokenizer::TK_COLON) {
2014-02-10 02:10:30 +01:00
_set_error("':' expected at end of line.");
return false;
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
if (tokenizer->get_token()!=GDTokenizer::TK_NEWLINE) {
2014-02-10 02:10:30 +01:00
_set_error("newline expected after ':'.");
return false;
}
while(true) {
if (tokenizer->get_token()!=GDTokenizer::TK_NEWLINE) {
2014-02-10 02:10:30 +01:00
return false; //wtf
} else if (tokenizer->get_token(1)!=GDTokenizer::TK_NEWLINE) {
2014-02-10 02:10:30 +01:00
int indent = tokenizer->get_token_line_indent();
2014-02-10 02:10:30 +01:00
int current = tab_level.back()->get();
if (indent<=current)
return false;
tab_level.push_back(indent);
tokenizer->advance();
2014-02-10 02:10:30 +01:00
return true;
} else if (p_block) {
NewLineNode *nl = alloc_node<NewLineNode>();
nl->line=tokenizer->get_token_line();
2014-02-10 02:10:30 +01:00
p_block->statements.push_back(nl);
}
tokenizer->advance(); // go to next newline
2014-02-10 02:10:30 +01:00
}
}
bool GDParser::_parse_arguments(Node* p_parent,Vector<Node*>& p_args,bool p_static) {
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE) {
tokenizer->advance();
2014-02-10 02:10:30 +01:00
} else {
while(true) {
Node*arg = _parse_expression(p_parent,p_static);
if (!arg)
return false;
p_args.push_back(arg);
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE) {
tokenizer->advance();
2014-02-10 02:10:30 +01:00
break;
} else if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
2014-02-10 02:10:30 +01:00
if (tokenizer->get_token(1)==GDTokenizer::TK_PARENTHESIS_CLOSE) {
2014-02-10 02:10:30 +01:00
_set_error("Expression expected");
return false;
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
} else {
// something is broken
_set_error("Expected ',' or ')'");
return false;
}
}
}
return true;
}
GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_allow_assign) {
// Vector<Node*> expressions;
// Vector<OperatorNode::Operator> operators;
Vector<Expression> expression;
Node *expr=NULL;
while(true) {
/*****************/
/* Parse Operand */
/*****************/
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_OPEN) {
2014-02-10 02:10:30 +01:00
//subexpression ()
tokenizer->advance();
2014-02-10 02:10:30 +01:00
Node* subexpr = _parse_expression(p_parent,p_static);
if (!subexpr)
return NULL;
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
2014-02-10 02:10:30 +01:00
_set_error("Expected ')' in expression");
return NULL;
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
expr=subexpr;
} else if (tokenizer->get_token()==GDTokenizer::TK_CONSTANT) {
2014-02-10 02:10:30 +01:00
//constant defined by tokenizer
ConstantNode *constant = alloc_node<ConstantNode>();
constant->value=tokenizer->get_token_constant();
tokenizer->advance();
2014-02-10 02:10:30 +01:00
expr=constant;
} else if (tokenizer->get_token()==GDTokenizer::TK_PR_PRELOAD) {
2014-02-10 02:10:30 +01:00
//constant defined by tokenizer
tokenizer->advance();
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_OPEN) {
2014-02-10 02:10:30 +01:00
_set_error("Expected '(' after 'preload'");
return NULL;
}
tokenizer->advance();
if (tokenizer->get_token()!=GDTokenizer::TK_CONSTANT || tokenizer->get_token_constant().get_type()!=Variant::STRING) {
2014-02-10 02:10:30 +01:00
_set_error("Expected string constant as 'preload' argument.");
return NULL;
}
String path = tokenizer->get_token_constant();
2014-02-10 02:10:30 +01:00
if (!path.is_abs_path() && base_path!="")
path=base_path+"/"+path;
Ref<Resource> res = ResourceLoader::load(path);
if (!res.is_valid()) {
_set_error("Can't preload resource at path: "+path);
return NULL;
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
2014-02-10 02:10:30 +01:00
_set_error("Expected ')' after 'preload' path");
return NULL;
}
ConstantNode *constant = alloc_node<ConstantNode>();
constant->value=res;
tokenizer->advance();
2014-02-10 02:10:30 +01:00
expr=constant;
} else if (tokenizer->get_token()==GDTokenizer::TK_SELF) {
2014-02-10 02:10:30 +01:00
if (p_static) {
_set_error("'self'' not allowed in static function or constant expression");
return NULL;
}
//constant defined by tokenizer
SelfNode *self = alloc_node<SelfNode>();
tokenizer->advance();
2014-02-10 02:10:30 +01:00
expr=self;
} else if (tokenizer->get_token()==GDTokenizer::TK_BUILT_IN_TYPE && tokenizer->get_token(1)==GDTokenizer::TK_PERIOD) {
2014-02-10 02:10:30 +01:00
Variant::Type bi_type = tokenizer->get_token_type();
tokenizer->advance(2);
if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
2014-02-10 02:10:30 +01:00
_set_error("Built-in type constant expected after '.'");
return NULL;
}
StringName identifier = tokenizer->get_token_identifier();
2014-02-10 02:10:30 +01:00
if (!Variant::has_numeric_constant(bi_type,identifier)) {
_set_error("Static constant '"+identifier.operator String()+"' not present in built-in type "+Variant::get_type_name(bi_type)+".");
return NULL;
}
ConstantNode *cn = alloc_node<ConstantNode>();
cn->value=Variant::get_numeric_constant_value(bi_type,identifier);
expr=cn;
tokenizer->advance();
2014-02-10 02:10:30 +01:00
} else if (tokenizer->get_token(1)==GDTokenizer::TK_PARENTHESIS_OPEN && (tokenizer->get_token()==GDTokenizer::TK_BUILT_IN_TYPE || tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER || tokenizer->get_token()==GDTokenizer::TK_BUILT_IN_FUNC)) {
2014-02-10 02:10:30 +01:00
//function or constructor
OperatorNode *op = alloc_node<OperatorNode>();
op->op=OperatorNode::OP_CALL;
if (tokenizer->get_token()==GDTokenizer::TK_BUILT_IN_TYPE) {
2014-02-10 02:10:30 +01:00
TypeNode *tn = alloc_node<TypeNode>();
tn->vtype=tokenizer->get_token_type();
2014-02-10 02:10:30 +01:00
op->arguments.push_back(tn);
} else if (tokenizer->get_token()==GDTokenizer::TK_BUILT_IN_FUNC) {
2014-02-10 02:10:30 +01:00
BuiltInFunctionNode *bn = alloc_node<BuiltInFunctionNode>();
bn->function=tokenizer->get_token_built_in_func();
2014-02-10 02:10:30 +01:00
op->arguments.push_back(bn);
} else {
SelfNode *self = alloc_node<SelfNode>();
op->arguments.push_back(self);
IdentifierNode* id = alloc_node<IdentifierNode>();
id->name=tokenizer->get_token_identifier();
2014-02-10 02:10:30 +01:00
op->arguments.push_back(id);
}
tokenizer->advance(2);
2014-02-10 02:10:30 +01:00
if (!_parse_arguments(op,op->arguments,p_static))
return NULL;
expr=op;
} else if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER) {
2014-02-10 02:10:30 +01:00
//identifier (reference)
IdentifierNode *id = alloc_node<IdentifierNode>();
id->name=tokenizer->get_token_identifier();
tokenizer->advance();
2014-02-10 02:10:30 +01:00
expr=id;
} else if (/*tokenizer->get_token()==GDTokenizer::TK_OP_ADD ||*/ tokenizer->get_token()==GDTokenizer::TK_OP_SUB || tokenizer->get_token()==GDTokenizer::TK_OP_NOT || tokenizer->get_token()==GDTokenizer::TK_OP_BIT_INVERT) {
2014-02-10 02:10:30 +01:00
//single prefix operators like !expr -expr ++expr --expr
OperatorNode *op = alloc_node<OperatorNode>();
Expression e;
e.is_op=true;
switch(tokenizer->get_token()) {
2014-02-10 02:10:30 +01:00
case GDTokenizer::TK_OP_SUB: e.op=OperatorNode::OP_NEG; break;
case GDTokenizer::TK_OP_NOT: e.op=OperatorNode::OP_NOT; break;
case GDTokenizer::TK_OP_BIT_INVERT: e.op=OperatorNode::OP_BIT_INVERT;; break;
default: {}
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
if (e.op!=OperatorNode::OP_NOT && tokenizer->get_token()==GDTokenizer::TK_OP_NOT) {
2014-02-10 02:10:30 +01:00
_set_error("Misplaced 'not'.");
return NULL;
}
expression.push_back(e);
continue; //only exception, must continue...
/*
Node *subexpr=_parse_expression(op,p_static);
if (!subexpr)
return NULL;
op->arguments.push_back(subexpr);
expr=op;*/
} else if (tokenizer->get_token()==GDTokenizer::TK_BRACKET_OPEN) {
2014-02-10 02:10:30 +01:00
// array
tokenizer->advance();
2014-02-10 02:10:30 +01:00
ArrayNode *arr = alloc_node<ArrayNode>();
bool expecting_comma=false;
while(true) {
if (tokenizer->get_token()==GDTokenizer::TK_EOF) {
2014-02-10 02:10:30 +01:00
_set_error("Unterminated array");
return NULL;
} else if (tokenizer->get_token()==GDTokenizer::TK_BRACKET_CLOSE) {
tokenizer->advance();
2014-02-10 02:10:30 +01:00
break;
} else if (tokenizer->get_token()==GDTokenizer::TK_NEWLINE) {
2014-02-10 02:10:30 +01:00
tokenizer->advance(); //ignore newline
} else if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
2014-02-10 02:10:30 +01:00
if (!expecting_comma) {
_set_error("expression or ']' expected");
return NULL;
}
expecting_comma=false;
tokenizer->advance(); //ignore newline
2014-02-10 02:10:30 +01:00
} else {
//parse expression
if (expecting_comma) {
_set_error("',' or ']' expected");
return NULL;
}
Node *n = _parse_expression(arr,p_static);
if (!n)
return NULL;
arr->elements.push_back(n);
expecting_comma=true;
}
}
expr=arr;
} else if (tokenizer->get_token()==GDTokenizer::TK_CURLY_BRACKET_OPEN) {
2014-02-10 02:10:30 +01:00
// array
tokenizer->advance();
2014-02-10 02:10:30 +01:00
DictionaryNode *dict = alloc_node<DictionaryNode>();
enum DictExpect {
DICT_EXPECT_KEY,
DICT_EXPECT_COLON,
DICT_EXPECT_VALUE,
DICT_EXPECT_COMMA
};
Node *key=NULL;
DictExpect expecting=DICT_EXPECT_KEY;
while(true) {
if (tokenizer->get_token()==GDTokenizer::TK_EOF) {
2014-02-10 02:10:30 +01:00
_set_error("Unterminated dictionary");
return NULL;
} else if (tokenizer->get_token()==GDTokenizer::TK_CURLY_BRACKET_CLOSE) {
2014-02-10 02:10:30 +01:00
if (expecting==DICT_EXPECT_COLON) {
_set_error("':' expected");
return NULL;
}
if (expecting==DICT_EXPECT_VALUE) {
_set_error("value expected");
return NULL;
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
break;
} else if (tokenizer->get_token()==GDTokenizer::TK_NEWLINE) {
2014-02-10 02:10:30 +01:00
tokenizer->advance(); //ignore newline
} else if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
2014-02-10 02:10:30 +01:00
if (expecting==DICT_EXPECT_KEY) {
_set_error("key or '}' expected");
return NULL;
}
if (expecting==DICT_EXPECT_VALUE) {
_set_error("value expected");
return NULL;
}
if (expecting==DICT_EXPECT_COLON) {
_set_error("':' expected");
return NULL;
}
expecting=DICT_EXPECT_KEY;
tokenizer->advance(); //ignore newline
2014-02-10 02:10:30 +01:00
} else if (tokenizer->get_token()==GDTokenizer::TK_COLON) {
2014-02-10 02:10:30 +01:00
if (expecting==DICT_EXPECT_KEY) {
_set_error("key or '}' expected");
return NULL;
}
if (expecting==DICT_EXPECT_VALUE) {
_set_error("value expected");
return NULL;
}
if (expecting==DICT_EXPECT_COMMA) {
_set_error("',' or '}' expected");
return NULL;
}
expecting=DICT_EXPECT_VALUE;
tokenizer->advance(); //ignore newline
2014-02-10 02:10:30 +01:00
} else {
if (expecting==DICT_EXPECT_COMMA) {
_set_error("',' or '}' expected");
return NULL;
}
if (expecting==DICT_EXPECT_COLON) {
_set_error("':' expected");
return NULL;
}
if (expecting==DICT_EXPECT_KEY) {
if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER && tokenizer->get_token(1)==GDTokenizer::TK_OP_ASSIGN) {
2014-02-10 02:10:30 +01:00
//lua style identifier, easier to write
ConstantNode *cn = alloc_node<ConstantNode>();
cn->value = tokenizer->get_token_identifier();
2014-02-10 02:10:30 +01:00
key = cn;
tokenizer->advance(2);
2014-02-10 02:10:30 +01:00
expecting=DICT_EXPECT_VALUE;
} else {
//python/js style more flexible
key = _parse_expression(dict,p_static);
if (!key)
return NULL;
expecting=DICT_EXPECT_COLON;
}
}
if (expecting==DICT_EXPECT_VALUE) {
Node *value = _parse_expression(dict,p_static);
if (!value)
return NULL;
expecting=DICT_EXPECT_COMMA;
DictionaryNode::Pair pair;
pair.key=key;
pair.value=value;
dict->elements.push_back(pair);
key=NULL;
}
}
}
expr=dict;
} else if (tokenizer->get_token()==GDTokenizer::TK_PERIOD && tokenizer->get_token(1)==GDTokenizer::TK_IDENTIFIER && tokenizer->get_token(2)==GDTokenizer::TK_PARENTHESIS_OPEN) {
2014-02-10 02:10:30 +01:00
// parent call
tokenizer->advance(); //goto identifier
2014-02-10 02:10:30 +01:00
OperatorNode *op = alloc_node<OperatorNode>();
op->op=OperatorNode::OP_PARENT_CALL;
/*SelfNode *self = alloc_node<SelfNode>();
op->arguments.push_back(self);
forbidden for now */
IdentifierNode* id = alloc_node<IdentifierNode>();
id->name=tokenizer->get_token_identifier();
2014-02-10 02:10:30 +01:00
op->arguments.push_back(id);
tokenizer->advance(2);
2014-02-10 02:10:30 +01:00
if (!_parse_arguments(op,op->arguments,p_static))
return NULL;
expr=op;
} else {
//find list [ or find dictionary {
print_line("found bug?");
_set_error("Error parsing expression, misplaced: "+String(tokenizer->get_token_name(tokenizer->get_token())));
2014-02-10 02:10:30 +01:00
return NULL; //nothing
}
if (!expr) {
ERR_EXPLAIN("GDParser bug, couldn't figure out what expression is..");
ERR_FAIL_COND_V(!expr,NULL);
}
/******************/
/* Parse Indexing */
/******************/
while (true) {
//expressions can be indexed any number of times
if (tokenizer->get_token()==GDTokenizer::TK_PERIOD) {
2014-02-10 02:10:30 +01:00
//indexing using "."
if (tokenizer->get_token(1)!=GDTokenizer::TK_IDENTIFIER && tokenizer->get_token(1)!=GDTokenizer::TK_BUILT_IN_FUNC ) {
2014-02-10 02:10:30 +01:00
_set_error("Expected identifier as member");
return NULL;
} else if (tokenizer->get_token(2)==GDTokenizer::TK_PARENTHESIS_OPEN) {
2014-02-10 02:10:30 +01:00
//call!!
OperatorNode * op = alloc_node<OperatorNode>();
op->op=OperatorNode::OP_CALL;
IdentifierNode * id = alloc_node<IdentifierNode>();
if (tokenizer->get_token(1)==GDTokenizer::TK_BUILT_IN_FUNC ) {
//small hack so built in funcs don't obfuscate methods
id->name=GDFunctions::get_func_name(tokenizer->get_token_built_in_func(1));
} else {
id->name=tokenizer->get_token_identifier(1);
}
2014-02-10 02:10:30 +01:00
op->arguments.push_back(expr); // call what
op->arguments.push_back(id); // call func
//get arguments
tokenizer->advance(3);
2014-02-10 02:10:30 +01:00
if (!_parse_arguments(op,op->arguments,p_static))
return NULL;
expr=op;
} else {
//simple indexing!
OperatorNode * op = alloc_node<OperatorNode>();
op->op=OperatorNode::OP_INDEX_NAMED;
IdentifierNode * id = alloc_node<IdentifierNode>();
id->name=tokenizer->get_token_identifier(1);
2014-02-10 02:10:30 +01:00
op->arguments.push_back(expr);
op->arguments.push_back(id);
expr=op;
tokenizer->advance(2);
2014-02-10 02:10:30 +01:00
}
} else if (tokenizer->get_token()==GDTokenizer::TK_BRACKET_OPEN) {
2014-02-10 02:10:30 +01:00
//indexing using "[]"
OperatorNode * op = alloc_node<OperatorNode>();
op->op=OperatorNode::OP_INDEX;
tokenizer->advance(1);
2014-02-10 02:10:30 +01:00
Node *subexpr = _parse_expression(op,p_static);
if (!subexpr) {
return NULL;
}
if (tokenizer->get_token()!=GDTokenizer::TK_BRACKET_CLOSE) {
2014-02-10 02:10:30 +01:00
_set_error("Expected ']'");
return NULL;
}
op->arguments.push_back(expr);
op->arguments.push_back(subexpr);
tokenizer->advance(1);
2014-02-10 02:10:30 +01:00
expr=op;
} else
break;
}
/******************/
/* Parse Operator */
/******************/
Expression e;
e.is_op=false;
e.node=expr;
expression.push_back(e);
// determine which operator is next
OperatorNode::Operator op;
bool valid=true;
//assign, if allowed is only alowed on the first operator
#define _VALIDATE_ASSIGN if (!p_allow_assign) { _set_error("Unexpected assign."); return NULL; } p_allow_assign=false;
switch(tokenizer->get_token()) { //see operator
2014-02-10 02:10:30 +01:00
case GDTokenizer::TK_OP_IN: op=OperatorNode::OP_IN; break;
case GDTokenizer::TK_OP_EQUAL: op=OperatorNode::OP_EQUAL ; break;
case GDTokenizer::TK_OP_NOT_EQUAL: op=OperatorNode::OP_NOT_EQUAL ; break;
case GDTokenizer::TK_OP_LESS: op=OperatorNode::OP_LESS ; break;
case GDTokenizer::TK_OP_LESS_EQUAL: op=OperatorNode::OP_LESS_EQUAL ; break;
case GDTokenizer::TK_OP_GREATER: op=OperatorNode::OP_GREATER ; break;
case GDTokenizer::TK_OP_GREATER_EQUAL: op=OperatorNode::OP_GREATER_EQUAL ; break;
case GDTokenizer::TK_OP_AND: op=OperatorNode::OP_AND ; break;
case GDTokenizer::TK_OP_OR: op=OperatorNode::OP_OR ; break;
case GDTokenizer::TK_OP_ADD: op=OperatorNode::OP_ADD ; break;
case GDTokenizer::TK_OP_SUB: op=OperatorNode::OP_SUB ; break;
case GDTokenizer::TK_OP_MUL: op=OperatorNode::OP_MUL ; break;
case GDTokenizer::TK_OP_DIV: op=OperatorNode::OP_DIV ; break;
case GDTokenizer::TK_OP_MOD: op=OperatorNode::OP_MOD ; break;
//case GDTokenizer::TK_OP_NEG: op=OperatorNode::OP_NEG ; break;
case GDTokenizer::TK_OP_SHIFT_LEFT: op=OperatorNode::OP_SHIFT_LEFT ; break;
case GDTokenizer::TK_OP_SHIFT_RIGHT: op=OperatorNode::OP_SHIFT_RIGHT ; break;
case GDTokenizer::TK_OP_ASSIGN: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN ; break;
case GDTokenizer::TK_OP_ASSIGN_ADD: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN_ADD ; break;
case GDTokenizer::TK_OP_ASSIGN_SUB: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN_SUB ; break;
case GDTokenizer::TK_OP_ASSIGN_MUL: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN_MUL ; break;
case GDTokenizer::TK_OP_ASSIGN_DIV: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN_DIV ; break;
case GDTokenizer::TK_OP_ASSIGN_MOD: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN_MOD ; break;
case GDTokenizer::TK_OP_ASSIGN_SHIFT_LEFT: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN_SHIFT_LEFT; ; break;
case GDTokenizer::TK_OP_ASSIGN_SHIFT_RIGHT: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN_SHIFT_RIGHT; ; break;
case GDTokenizer::TK_OP_ASSIGN_BIT_AND: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN_BIT_AND ; break;
case GDTokenizer::TK_OP_ASSIGN_BIT_OR: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN_BIT_OR ; break;
case GDTokenizer::TK_OP_ASSIGN_BIT_XOR: _VALIDATE_ASSIGN op=OperatorNode::OP_ASSIGN_BIT_XOR ; break;
case GDTokenizer::TK_OP_BIT_AND: op=OperatorNode::OP_BIT_AND ; break;
case GDTokenizer::TK_OP_BIT_OR: op=OperatorNode::OP_BIT_OR ; break;
case GDTokenizer::TK_OP_BIT_XOR: op=OperatorNode::OP_BIT_XOR ; break;
case GDTokenizer::TK_PR_EXTENDS: op=OperatorNode::OP_EXTENDS; break;
default: valid=false; break;
}
if (valid) {
e.is_op=true;
e.op=op;
expression.push_back(e);
tokenizer->advance();
2014-02-10 02:10:30 +01:00
} else {
break;
}
}
/* Reduce the set set of expressions and place them in an operator tree, respecting precedence */
while(expression.size()>1) {
int next_op=-1;
int min_priority=0xFFFFF;
bool is_unary=false;
for(int i=0;i<expression.size();i++) {
if (!expression[i].is_op) {
continue;
}
int priority;
bool unary=false;
switch(expression[i].op) {
case OperatorNode::OP_EXTENDS: priority=-1; break; //before anything
case OperatorNode::OP_BIT_INVERT: priority=0; unary=true; break;
case OperatorNode::OP_NEG: priority=1; unary=true; break;
case OperatorNode::OP_MUL: priority=2; break;
case OperatorNode::OP_DIV: priority=2; break;
case OperatorNode::OP_MOD: priority=2; break;
case OperatorNode::OP_ADD: priority=3; break;
case OperatorNode::OP_SUB: priority=3; break;
case OperatorNode::OP_SHIFT_LEFT: priority=4; break;
case OperatorNode::OP_SHIFT_RIGHT: priority=4; break;
case OperatorNode::OP_BIT_AND: priority=5; break;
case OperatorNode::OP_BIT_XOR: priority=6; break;
case OperatorNode::OP_BIT_OR: priority=7; break;
case OperatorNode::OP_LESS: priority=8; break;
case OperatorNode::OP_LESS_EQUAL: priority=8; break;
case OperatorNode::OP_GREATER: priority=8; break;
case OperatorNode::OP_GREATER_EQUAL: priority=8; break;
case OperatorNode::OP_EQUAL: priority=8; break;
case OperatorNode::OP_NOT_EQUAL: priority=8; break;
case OperatorNode::OP_IN: priority=10; break;
case OperatorNode::OP_NOT: priority=11; unary=true; break;
case OperatorNode::OP_AND: priority=12; break;
case OperatorNode::OP_OR: priority=13; break;
// ?: = 10
case OperatorNode::OP_ASSIGN: priority=14; break;
case OperatorNode::OP_ASSIGN_ADD: priority=14; break;
case OperatorNode::OP_ASSIGN_SUB: priority=14; break;
case OperatorNode::OP_ASSIGN_MUL: priority=14; break;
case OperatorNode::OP_ASSIGN_DIV: priority=14; break;
case OperatorNode::OP_ASSIGN_MOD: priority=14; break;
case OperatorNode::OP_ASSIGN_SHIFT_LEFT: priority=14; break;
case OperatorNode::OP_ASSIGN_SHIFT_RIGHT: priority=14; break;
case OperatorNode::OP_ASSIGN_BIT_AND: priority=14; break;
case OperatorNode::OP_ASSIGN_BIT_OR: priority=14; break;
case OperatorNode::OP_ASSIGN_BIT_XOR: priority=14; break;
default: {
_set_error("GDParser bug, invalid operator in expression: "+itos(expression[i].op));
return NULL;
}
}
if (priority<min_priority) {
// < is used for left to right (default)
// <= is used for right to left
next_op=i;
min_priority=priority;
is_unary=unary;
}
}
if (next_op==-1) {
_set_error("Yet another parser bug....");
ERR_FAIL_COND_V(next_op==-1,NULL);
}
// OK! create operator..
if (is_unary) {
int expr_pos=next_op;
while(expression[expr_pos].is_op) {
expr_pos++;
if (expr_pos==expression.size()) {
//can happen..
_set_error("Unexpected end of expression..");
return NULL;
}
}
//consecutively do unary opeators
for(int i=expr_pos-1;i>=next_op;i--) {
OperatorNode *op = alloc_node<OperatorNode>();
op->op=expression[i].op;
op->arguments.push_back(expression[i+1].node);
expression[i].is_op=false;
expression[i].node=op;
expression.remove(i+1);
}
} else {
if (next_op <1 || next_op>=(expression.size()-1)) {
_set_error("Parser bug..");
ERR_FAIL_V(NULL);
}
OperatorNode *op = alloc_node<OperatorNode>();
op->op=expression[next_op].op;
if (expression[next_op-1].is_op) {
_set_error("Parser bug..");
ERR_FAIL_V(NULL);
}
if (expression[next_op+1].is_op) {
// this is not invalid and can really appear
// but it becomes invalid anyway because no binary op
// can be followed by an unary op in a valid combination,
// due to how precedence works, unaries will always dissapear first
_set_error("Parser bug..");
}
op->arguments.push_back(expression[next_op-1].node); //expression goes as left
op->arguments.push_back(expression[next_op+1].node); //next expression goes as right
//replace all 3 nodes by this operator and make it an expression
expression[next_op-1].node=op;
expression.remove(next_op);
expression.remove(next_op);
}
}
return expression[0].node;
}
GDParser::Node* GDParser::_reduce_expression(Node *p_node,bool p_to_const) {
switch(p_node->type) {
case Node::TYPE_BUILT_IN_FUNCTION: {
//many may probably be optimizable
return p_node;
} break;
case Node::TYPE_ARRAY: {
ArrayNode *an = static_cast<ArrayNode*>(p_node);
bool all_constants=true;
for(int i=0;i<an->elements.size();i++) {
an->elements[i]=_reduce_expression(an->elements[i],p_to_const);
if (an->elements[i]->type!=Node::TYPE_CONSTANT)
all_constants=false;
}
if (all_constants && p_to_const) {
//reduce constant array expression
ConstantNode *cn = alloc_node<ConstantNode>();
Array arr(!p_to_const);
arr.resize(an->elements.size());
for(int i=0;i<an->elements.size();i++) {
ConstantNode *acn = static_cast<ConstantNode*>(an->elements[i]);
arr[i]=acn->value;
}
cn->value=arr;
return cn;
}
return an;
} break;
case Node::TYPE_DICTIONARY: {
DictionaryNode *dn = static_cast<DictionaryNode*>(p_node);
bool all_constants=true;
for(int i=0;i<dn->elements.size();i++) {
dn->elements[i].key=_reduce_expression(dn->elements[i].key,p_to_const);
if (dn->elements[i].key->type!=Node::TYPE_CONSTANT)
all_constants=false;
dn->elements[i].value=_reduce_expression(dn->elements[i].value,p_to_const);
if (dn->elements[i].value->type!=Node::TYPE_CONSTANT)
all_constants=false;
}
if (all_constants && p_to_const) {
//reduce constant array expression
ConstantNode *cn = alloc_node<ConstantNode>();
Dictionary dict(!p_to_const);
for(int i=0;i<dn->elements.size();i++) {
ConstantNode *key_c = static_cast<ConstantNode*>(dn->elements[i].key);
ConstantNode *value_c = static_cast<ConstantNode*>(dn->elements[i].value);
dict[key_c->value]=value_c->value;
}
cn->value=dict;
return cn;
}
return dn;
} break;
case Node::TYPE_OPERATOR: {
OperatorNode *op=static_cast<OperatorNode*>(p_node);
bool all_constants=true;
int last_not_constant=-1;
for(int i=0;i<op->arguments.size();i++) {
op->arguments[i]=_reduce_expression(op->arguments[i],p_to_const);
if (op->arguments[i]->type!=Node::TYPE_CONSTANT) {
all_constants=false;
last_not_constant=i;
}
}
if (op->op==OperatorNode::OP_EXTENDS) {
//nothing much
return op;
} if (op->op==OperatorNode::OP_PARENT_CALL) {
//nothing much
return op;
} else if (op->op==OperatorNode::OP_CALL) {
//can reduce base type constructors
if ((op->arguments[0]->type==Node::TYPE_TYPE || (op->arguments[0]->type==Node::TYPE_BUILT_IN_FUNCTION && GDFunctions::is_deterministic( static_cast<BuiltInFunctionNode*>(op->arguments[0])->function))) && last_not_constant==0) {
//native type constructor or intrinsic function
const Variant **vptr=NULL;
Vector<Variant*> ptrs;
if (op->arguments.size()>1) {
ptrs.resize(op->arguments.size()-1);
for(int i=0;i<ptrs.size();i++) {
ConstantNode *cn = static_cast<ConstantNode*>(op->arguments[i+1]);
ptrs[i]=&cn->value;
}
vptr=(const Variant**)&ptrs[0];
}
Variant::CallError ce;
Variant v;
if (op->arguments[0]->type==Node::TYPE_TYPE) {
TypeNode *tn = static_cast<TypeNode*>(op->arguments[0]);
v = Variant::construct(tn->vtype,vptr,ptrs.size(),ce);
} else {
GDFunctions::Function func = static_cast<BuiltInFunctionNode*>(op->arguments[0])->function;
GDFunctions::call(func,vptr,ptrs.size(),v,ce);
}
if (ce.error!=Variant::CallError::CALL_OK) {
String errwhere;
if (op->arguments[0]->type==Node::TYPE_TYPE) {
TypeNode *tn = static_cast<TypeNode*>(op->arguments[0]);
errwhere="'"+Variant::get_type_name(tn->vtype)+"'' constructor";
} else {
GDFunctions::Function func = static_cast<BuiltInFunctionNode*>(op->arguments[0])->function;
errwhere=String("'")+GDFunctions::get_func_name(func)+"'' intrinsic function";
}
switch(ce.error) {
case Variant::CallError::CALL_ERROR_INVALID_ARGUMENT: {
_set_error("Invalid argument (#"+itos(ce.argument+1)+") for "+errwhere+".");
} break;
case Variant::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS: {
_set_error("Too many arguments for "+errwhere+".");
} break;
case Variant::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS: {
_set_error("Too few arguments for "+errwhere+".");
} break;
default: {
_set_error("Invalid arguments for "+errwhere+".");
} break;
}
return p_node;
}
ConstantNode *cn = alloc_node<ConstantNode>();
cn->value=v;
return cn;
} else if (op->arguments[0]->type==Node::TYPE_BUILT_IN_FUNCTION && last_not_constant==0) {
}
return op; //don't reduce yet
} else if (op->op==OperatorNode::OP_INDEX) {
//can reduce indices into constant arrays or dictionaries
if (all_constants) {
ConstantNode *ca = static_cast<ConstantNode*>(op->arguments[0]);
ConstantNode *cb = static_cast<ConstantNode*>(op->arguments[1]);
bool valid;
Variant v = ca->value.get(cb->value,&valid);
if (!valid) {
_set_error("invalid index in constant expression");
return op;
}
ConstantNode *cn = alloc_node<ConstantNode>();
cn->value=v;
return cn;
} else if (op->arguments[0]->type==Node::TYPE_CONSTANT && op->arguments[1]->type==Node::TYPE_IDENTIFIER) {
ConstantNode *ca = static_cast<ConstantNode*>(op->arguments[0]);
IdentifierNode *ib = static_cast<IdentifierNode*>(op->arguments[1]);
bool valid;
Variant v = ca->value.get_named(ib->name,&valid);
if (!valid) {
_set_error("invalid index '"+String(ib->name)+"' in constant expression");
return op;
}
ConstantNode *cn = alloc_node<ConstantNode>();
cn->value=v;
return cn;
}
return op;
}
//validate assignment (don't assign to cosntant expression
switch(op->op) {
case OperatorNode::OP_ASSIGN:
case OperatorNode::OP_ASSIGN_ADD:
case OperatorNode::OP_ASSIGN_SUB:
case OperatorNode::OP_ASSIGN_MUL:
case OperatorNode::OP_ASSIGN_DIV:
case OperatorNode::OP_ASSIGN_MOD:
case OperatorNode::OP_ASSIGN_SHIFT_LEFT:
case OperatorNode::OP_ASSIGN_SHIFT_RIGHT:
case OperatorNode::OP_ASSIGN_BIT_AND:
case OperatorNode::OP_ASSIGN_BIT_OR:
case OperatorNode::OP_ASSIGN_BIT_XOR: {
if (op->arguments[0]->type==Node::TYPE_CONSTANT) {
_set_error("Can't assign to constant");
return op;
}
} break;
default: { break; }
}
//now se if all are constants
if (!all_constants)
return op; //nothing to reduce from here on
#define _REDUCE_UNARY(m_vop)\
bool valid=false;\
Variant res;\
Variant::evaluate(m_vop,static_cast<ConstantNode*>(op->arguments[0])->value,Variant(),res,valid);\
if (!valid) {\
_set_error("Invalid operand for unary operator");\
return p_node;\
}\
ConstantNode *cn = alloc_node<ConstantNode>();\
cn->value=res;\
return cn;
#define _REDUCE_BINARY(m_vop)\
bool valid=false;\
Variant res;\
Variant::evaluate(m_vop,static_cast<ConstantNode*>(op->arguments[0])->value,static_cast<ConstantNode*>(op->arguments[1])->value,res,valid);\
if (!valid) {\
_set_error("Invalid operands for operator");\
return p_node;\
}\
ConstantNode *cn = alloc_node<ConstantNode>();\
cn->value=res;\
return cn;
switch(op->op) {
//unary operators
case OperatorNode::OP_NEG: { _REDUCE_UNARY(Variant::OP_NEGATE); } break;
case OperatorNode::OP_NOT: { _REDUCE_UNARY(Variant::OP_NOT); } break;
case OperatorNode::OP_BIT_INVERT: { _REDUCE_UNARY(Variant::OP_BIT_NEGATE); } break;
//binary operators (in precedence order)
case OperatorNode::OP_IN: { _REDUCE_BINARY(Variant::OP_IN); } break;
case OperatorNode::OP_EQUAL: { _REDUCE_BINARY(Variant::OP_EQUAL); } break;
case OperatorNode::OP_NOT_EQUAL: { _REDUCE_BINARY(Variant::OP_NOT_EQUAL); } break;
case OperatorNode::OP_LESS: { _REDUCE_BINARY(Variant::OP_LESS); } break;
case OperatorNode::OP_LESS_EQUAL: { _REDUCE_BINARY(Variant::OP_LESS_EQUAL); } break;
case OperatorNode::OP_GREATER: { _REDUCE_BINARY(Variant::OP_GREATER); } break;
case OperatorNode::OP_GREATER_EQUAL: { _REDUCE_BINARY(Variant::OP_GREATER_EQUAL); } break;
case OperatorNode::OP_AND: { _REDUCE_BINARY(Variant::OP_AND); } break;
case OperatorNode::OP_OR: { _REDUCE_BINARY(Variant::OP_OR); } break;
case OperatorNode::OP_ADD: { _REDUCE_BINARY(Variant::OP_ADD); } break;
case OperatorNode::OP_SUB: { _REDUCE_BINARY(Variant::OP_SUBSTRACT); } break;
case OperatorNode::OP_MUL: { _REDUCE_BINARY(Variant::OP_MULTIPLY); } break;
case OperatorNode::OP_DIV: { _REDUCE_BINARY(Variant::OP_DIVIDE); } break;
case OperatorNode::OP_MOD: { _REDUCE_BINARY(Variant::OP_MODULE); } break;
case OperatorNode::OP_SHIFT_LEFT: { _REDUCE_BINARY(Variant::OP_SHIFT_LEFT); } break;
case OperatorNode::OP_SHIFT_RIGHT: { _REDUCE_BINARY(Variant::OP_SHIFT_RIGHT); } break;
case OperatorNode::OP_BIT_AND: { _REDUCE_BINARY(Variant::OP_BIT_AND); } break;
case OperatorNode::OP_BIT_OR: { _REDUCE_BINARY(Variant::OP_BIT_OR); } break;
case OperatorNode::OP_BIT_XOR: { _REDUCE_BINARY(Variant::OP_BIT_XOR); } break;
default: { ERR_FAIL_V(op); }
}
ERR_FAIL_V(op);
} break;
default: {
return p_node;
} break;
}
}
GDParser::Node* GDParser::_parse_and_reduce_expression(Node *p_parent,bool p_static,bool p_reduce_const,bool p_allow_assign) {
Node* expr=_parse_expression(p_parent,p_static,p_allow_assign);
if (!expr || error_set)
return NULL;
expr = _reduce_expression(expr,p_reduce_const);
if (!expr || error_set)
return NULL;
return expr;
}
void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
int indent_level = tab_level.back()->get();
#ifdef DEBUG_ENABLED
NewLineNode *nl = alloc_node<NewLineNode>();
nl->line=tokenizer->get_token_line();
2014-02-10 02:10:30 +01:00
p_block->statements.push_back(nl);
#endif
while(true) {
GDTokenizer::Token token = tokenizer->get_token();
2014-02-10 02:10:30 +01:00
if (error_set)
return;
if (indent_level>tab_level.back()->get()) {
p_block->end_line=tokenizer->get_token_line();
2014-02-10 02:10:30 +01:00
return; //go back a level
}
switch(token) {
case GDTokenizer::TK_EOF:
p_block->end_line=tokenizer->get_token_line();
2014-02-10 02:10:30 +01:00
case GDTokenizer::TK_ERROR: {
return; //go back
//end of file!
} break;
case GDTokenizer::TK_NEWLINE: {
NewLineNode *nl = alloc_node<NewLineNode>();
nl->line=tokenizer->get_token_line();
2014-02-10 02:10:30 +01:00
p_block->statements.push_back(nl);
if (!_parse_newline()) {
if (!error_set) {
p_block->end_line=tokenizer->get_token_line();
2014-02-10 02:10:30 +01:00
}
return;
}
} break;
case GDTokenizer::TK_CF_PASS: {
if (tokenizer->get_token(1)!=GDTokenizer::TK_SEMICOLON && tokenizer->get_token(1)!=GDTokenizer::TK_NEWLINE ) {
2014-02-10 02:10:30 +01:00
_set_error("Expected ';' or <NewLine>.");
return;
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
} break;
case GDTokenizer::TK_PR_VAR: {
//variale declaration and (eventual) initialization
tokenizer->advance();
if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
2014-02-10 02:10:30 +01:00
_set_error("Expected identifier for local variable name.");
return;
}
StringName n = tokenizer->get_token_identifier();
tokenizer->advance();
2014-02-10 02:10:30 +01:00
p_block->variables.push_back(n); //line?
p_block->variable_lines.push_back(tokenizer->get_token_line());
2014-02-10 02:10:30 +01:00
//must know when the local variable is declared
LocalVarNode *lv = alloc_node<LocalVarNode>();
lv->name=n;
p_block->statements.push_back(lv);
Node *assigned=NULL;
if (tokenizer->get_token()==GDTokenizer::TK_OP_ASSIGN) {
2014-02-10 02:10:30 +01:00
tokenizer->advance();
2014-02-10 02:10:30 +01:00
Node *subexpr=NULL;
subexpr = _parse_and_reduce_expression(p_block,p_static);
if (!subexpr)
return;
lv->assign=subexpr;
assigned=subexpr;
} else {
ConstantNode *c = alloc_node<ConstantNode>();
c->value=Variant();
assigned = c;
}
IdentifierNode *id = alloc_node<IdentifierNode>();
id->name=n;
OperatorNode *op = alloc_node<OperatorNode>();
op->op=OperatorNode::OP_ASSIGN;
op->arguments.push_back(id);
op->arguments.push_back(assigned);
p_block->statements.push_back(op);
_end_statement();
} break;
case GDTokenizer::TK_CF_IF: {
tokenizer->advance();
2014-02-10 02:10:30 +01:00
Node *condition = _parse_and_reduce_expression(p_block,p_static);
if (!condition)
return;
ControlFlowNode *cf_if = alloc_node<ControlFlowNode>();
cf_if->cf_type=ControlFlowNode::CF_IF;
cf_if->arguments.push_back(condition);
cf_if->body = alloc_node<BlockNode>();
p_block->sub_blocks.push_back(cf_if->body);
if (!_enter_indent_block(cf_if->body)) {
p_block->end_line=tokenizer->get_token_line();
2014-02-10 02:10:30 +01:00
return;
}
_parse_block(cf_if->body,p_static);
if (error_set)
return;
p_block->statements.push_back(cf_if);
while(true) {
while(tokenizer->get_token()==GDTokenizer::TK_NEWLINE) {
tokenizer->advance();
2014-02-10 02:10:30 +01:00
}
if (tab_level.back()->get() < indent_level) { //not at current indent level
p_block->end_line=tokenizer->get_token_line();
2014-02-10 02:10:30 +01:00
return;
}
if (tokenizer->get_token()==GDTokenizer::TK_CF_ELIF) {
2014-02-10 02:10:30 +01:00
if (tab_level.back()->get() > indent_level) {
_set_error("Invalid indent");
return;
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
cf_if->body_else=alloc_node<BlockNode>();
p_block->sub_blocks.push_back(cf_if->body_else);
ControlFlowNode *cf_else = alloc_node<ControlFlowNode>();
cf_else->cf_type=ControlFlowNode::CF_IF;
//condition
Node *condition = _parse_and_reduce_expression(p_block,p_static);
if (!condition)
return;
cf_else->arguments.push_back(condition);
cf_else->cf_type=ControlFlowNode::CF_IF;
cf_if->body_else->statements.push_back(cf_else);
cf_if=cf_else;
cf_if->body=alloc_node<BlockNode>();
p_block->sub_blocks.push_back(cf_if->body);
if (!_enter_indent_block(cf_if->body)) {
p_block->end_line=tokenizer->get_token_line();
2014-02-10 02:10:30 +01:00
return;
}
_parse_block(cf_else->body,p_static);
if (error_set)
return;
} else if (tokenizer->get_token()==GDTokenizer::TK_CF_ELSE) {
2014-02-10 02:10:30 +01:00
if (tab_level.back()->get() > indent_level) {
_set_error("Invalid indent");
return;
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
cf_if->body_else=alloc_node<BlockNode>();
p_block->sub_blocks.push_back(cf_if->body_else);
if (!_enter_indent_block(cf_if->body_else)) {
p_block->end_line=tokenizer->get_token_line();
2014-02-10 02:10:30 +01:00
return;
}
_parse_block(cf_if->body_else,p_static);
if (error_set)
return;
break; //after else, exit
} else
break;
}
} break;
case GDTokenizer::TK_CF_WHILE: {
tokenizer->advance();
2014-02-10 02:10:30 +01:00
Node *condition = _parse_and_reduce_expression(p_block,p_static);
if (!condition)
return;
ControlFlowNode *cf_while = alloc_node<ControlFlowNode>();
cf_while->cf_type=ControlFlowNode::CF_WHILE;
cf_while->arguments.push_back(condition);
cf_while->body = alloc_node<BlockNode>();
p_block->sub_blocks.push_back(cf_while->body);
if (!_enter_indent_block(cf_while->body)) {
p_block->end_line=tokenizer->get_token_line();
2014-02-10 02:10:30 +01:00
return;
}
_parse_block(cf_while->body,p_static);
if (error_set)
return;
p_block->statements.push_back(cf_while);
} break;
case GDTokenizer::TK_CF_FOR: {
tokenizer->advance();
2014-02-10 02:10:30 +01:00
if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
2014-02-10 02:10:30 +01:00
_set_error("identifier expected after 'for'");
}
IdentifierNode *id = alloc_node<IdentifierNode>();
id->name=tokenizer->get_token_identifier();
2014-02-10 02:10:30 +01:00
tokenizer->advance();
2014-02-10 02:10:30 +01:00
if (tokenizer->get_token()!=GDTokenizer::TK_OP_IN) {
2014-02-10 02:10:30 +01:00
_set_error("'in' expected after identifier");
return;
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
Node *container = _parse_and_reduce_expression(p_block,p_static);
if (!container)
return;
ControlFlowNode *cf_for = alloc_node<ControlFlowNode>();
cf_for->cf_type=ControlFlowNode::CF_FOR;
cf_for->arguments.push_back(id);
cf_for->arguments.push_back(container);
cf_for->body = alloc_node<BlockNode>();
p_block->sub_blocks.push_back(cf_for->body);
if (!_enter_indent_block(cf_for->body)) {
p_block->end_line=tokenizer->get_token_line();
2014-02-10 02:10:30 +01:00
return;
}
_parse_block(cf_for->body,p_static);
if (error_set)
return;
p_block->statements.push_back(cf_for);
} break;
case GDTokenizer::TK_CF_CONTINUE: {
tokenizer->advance();
2014-02-10 02:10:30 +01:00
ControlFlowNode *cf_continue = alloc_node<ControlFlowNode>();
cf_continue->cf_type=ControlFlowNode::CF_CONTINUE;
p_block->statements.push_back(cf_continue);
if (!_end_statement()) {
_set_error("Expected end of statement (continue)");
return;
}
} break;
case GDTokenizer::TK_CF_BREAK: {
tokenizer->advance();
2014-02-10 02:10:30 +01:00
ControlFlowNode *cf_break = alloc_node<ControlFlowNode>();
cf_break->cf_type=ControlFlowNode::CF_BREAK;
p_block->statements.push_back(cf_break);
if (!_end_statement()) {
_set_error("Expected end of statement (break)");
return;
}
} break;
case GDTokenizer::TK_CF_RETURN: {
tokenizer->advance();
2014-02-10 02:10:30 +01:00
ControlFlowNode *cf_return = alloc_node<ControlFlowNode>();
cf_return->cf_type=ControlFlowNode::CF_RETURN;
if (tokenizer->get_token()==GDTokenizer::TK_SEMICOLON || tokenizer->get_token()==GDTokenizer::TK_NEWLINE || tokenizer->get_token()==GDTokenizer::TK_EOF) {
2014-02-10 02:10:30 +01:00
//expect end of statement
p_block->statements.push_back(cf_return);
if (!_end_statement()) {
return;
}
} else {
//expect expression
Node *retexpr = _parse_and_reduce_expression(p_block,p_static);
if (!retexpr)
return;
cf_return->arguments.push_back(retexpr);
p_block->statements.push_back(cf_return);
if (!_end_statement()) {
_set_error("Expected end of statement after return expression.");
return;
}
}
} break;
case GDTokenizer::TK_PR_ASSERT: {
tokenizer->advance();
2014-02-10 02:10:30 +01:00
Node *condition = _parse_and_reduce_expression(p_block,p_static);
if (!condition)
return;
AssertNode *an = alloc_node<AssertNode>();
an->condition=condition;
p_block->statements.push_back(an);
if (!_end_statement()) {
_set_error("Expected end of statement after assert.");
return;
}
} break;
default: {
Node *expression = _parse_and_reduce_expression(p_block,p_static,false,true);
if (!expression)
return;
p_block->statements.push_back(expression);
if (!_end_statement()) {
_set_error("Expected end of statement after expression.");
return;
}
} break;
/*
case GDTokenizer::TK_CF_LOCAL: {
if (tokenizer->get_token(1)!=GDTokenizer::TK_SEMICOLON && tokenizer->get_token(1)!=GDTokenizer::TK_NEWLINE ) {
2014-02-10 02:10:30 +01:00
_set_error("Expected ';' or <NewLine>.");
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
} break;
*/
}
}
}
bool GDParser::_parse_newline() {
if (tokenizer->get_token(1)!=GDTokenizer::TK_EOF && tokenizer->get_token(1)!=GDTokenizer::TK_NEWLINE) {
2014-02-10 02:10:30 +01:00
int indent = tokenizer->get_token_line_indent();
2014-02-10 02:10:30 +01:00
int current_indent = tab_level.back()->get();
if (indent>current_indent) {
_set_error("Unexpected indent.");
return false;
}
if (indent<current_indent) {
while(indent<current_indent) {
//exit block
if (tab_level.size()==1) {
_set_error("Invalid indent. BUG?");
return false;
}
tab_level.pop_back();
if (tab_level.back()->get()<indent) {
_set_error("Unindent does not match any outer indentation level.");
return false;
}
current_indent = tab_level.back()->get();
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
return false;
}
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
return true;
}
void GDParser::_parse_extends(ClassNode *p_class) {
if (p_class->extends_used) {
_set_error("'extends' already used for this class.");
return;
}
if (!p_class->constant_expressions.empty() || !p_class->subclasses.empty() || !p_class->functions.empty() || !p_class->variables.empty()) {
_set_error("'extends' must be used before anything else.");
return;
}
p_class->extends_used=true;
//see if inheritance happens from a file
tokenizer->advance();
2014-02-10 02:10:30 +01:00
if (tokenizer->get_token()==GDTokenizer::TK_CONSTANT) {
2014-02-10 02:10:30 +01:00
Variant constant = tokenizer->get_token_constant();
2014-02-10 02:10:30 +01:00
if (constant.get_type()!=Variant::STRING) {
_set_error("'extends' constant must be a string.");
return;
}
p_class->extends_file=constant;
tokenizer->advance();
2014-02-10 02:10:30 +01:00
if (tokenizer->get_token()!=GDTokenizer::TK_PERIOD) {
2014-02-10 02:10:30 +01:00
return;
} else
tokenizer->advance();
2014-02-10 02:10:30 +01:00
}
while(true) {
if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
2014-02-10 02:10:30 +01:00
_set_error("Invalid 'extends' syntax, expected string constant (path) and/or identifier (parent class).");
return;
}
StringName identifier=tokenizer->get_token_identifier();
2014-02-10 02:10:30 +01:00
p_class->extends_class.push_back(identifier);
tokenizer->advance(1);
if (tokenizer->get_token()!=GDTokenizer::TK_PERIOD)
2014-02-10 02:10:30 +01:00
return;
}
}
void GDParser::_parse_class(ClassNode *p_class) {
int indent_level = tab_level.back()->get();
while(true) {
GDTokenizer::Token token = tokenizer->get_token();
2014-02-10 02:10:30 +01:00
if (error_set)
return;
if (indent_level>tab_level.back()->get()) {
p_class->end_line=tokenizer->get_token_line();
2014-02-10 02:10:30 +01:00
return; //go back a level
}
switch(token) {
case GDTokenizer::TK_EOF:
p_class->end_line=tokenizer->get_token_line();
2014-02-10 02:10:30 +01:00
case GDTokenizer::TK_ERROR: {
return; //go back
//end of file!
} break;
case GDTokenizer::TK_NEWLINE: {
if (!_parse_newline()) {
if (!error_set) {
p_class->end_line=tokenizer->get_token_line();
2014-02-10 02:10:30 +01:00
}
return;
}
} break;
case GDTokenizer::TK_PR_EXTENDS: {
_parse_extends(p_class);
if (error_set)
return;
_end_statement();
} break;
case GDTokenizer::TK_PR_TOOL: {
if (p_class->tool) {
_set_error("tool used more than once");
return;
}
p_class->tool=true;
tokenizer->advance();
2014-02-10 02:10:30 +01:00
} break;
case GDTokenizer::TK_PR_CLASS: {
//class inside class :D
StringName name;
StringName extends;
if (tokenizer->get_token(1)!=GDTokenizer::TK_IDENTIFIER) {
2014-02-10 02:10:30 +01:00
_set_error("'class' syntax: 'class <Name>:' or 'class <Name> extends <BaseClass>:'");
return;
}
name = tokenizer->get_token_identifier(1);
tokenizer->advance(2);
2014-02-10 02:10:30 +01:00
ClassNode *newclass = alloc_node<ClassNode>();
newclass->initializer = alloc_node<BlockNode>();
newclass->name=name;
p_class->subclasses.push_back(newclass);
if (tokenizer->get_token()==GDTokenizer::TK_PR_EXTENDS) {
2014-02-10 02:10:30 +01:00
_parse_extends(newclass);
if (error_set)
return;
}
if (!_enter_indent_block()) {
_set_error("Indented block expected.");
return;
}
_parse_class(newclass);
} break;
/* this is for functions....
case GDTokenizer::TK_CF_PASS: {
tokenizer->advance(1);
2014-02-10 02:10:30 +01:00
} break;
*/
case GDTokenizer::TK_PR_STATIC: {
tokenizer->advance();
if (tokenizer->get_token()!=GDTokenizer::TK_PR_FUNCTION) {
2014-02-10 02:10:30 +01:00
_set_error("Expected 'func'.");
return;
}
}; //fallthrough to function
case GDTokenizer::TK_PR_FUNCTION: {
bool _static=false;
if (tokenizer->get_token(-1)==GDTokenizer::TK_PR_STATIC) {
2014-02-10 02:10:30 +01:00
_static=true;
}
if (tokenizer->get_token(1)!=GDTokenizer::TK_IDENTIFIER) {
2014-02-10 02:10:30 +01:00
_set_error("Expected identifier after 'func' (syntax: 'func <identifier>([arguments]):' ).");
return;
}
StringName name = tokenizer->get_token_identifier(1);
2014-02-10 02:10:30 +01:00
for(int i=0;i<p_class->functions.size();i++) {
if (p_class->functions[i]->name==name) {
_set_error("Function '"+String(name)+"' already exists in this class (at line: "+itos(p_class->functions[i]->line)+").");
}
}
for(int i=0;i<p_class->static_functions.size();i++) {
if (p_class->static_functions[i]->name==name) {
_set_error("Function '"+String(name)+"' already exists in this class (at line: "+itos(p_class->static_functions[i]->line)+").");
}
}
tokenizer->advance(2);
2014-02-10 02:10:30 +01:00
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_OPEN) {
2014-02-10 02:10:30 +01:00
_set_error("Expected '(' after identifier (syntax: 'func <identifier>([arguments]):' ).");
return;
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
Vector<StringName> arguments;
Vector<Node*> default_values;
int fnline = tokenizer->get_token_line();
2014-02-10 02:10:30 +01:00
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
2014-02-10 02:10:30 +01:00
//has arguments
bool defaulting=false;
while(true) {
if (tokenizer->get_token()==GDTokenizer::TK_PR_VAR) {
2014-02-10 02:10:30 +01:00
tokenizer->advance(); //var before the identifier is allowed
2014-02-10 02:10:30 +01:00
}
if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
2014-02-10 02:10:30 +01:00
_set_error("Expected identifier for argument.");
return;
}
StringName argname=tokenizer->get_token_identifier();
2014-02-10 02:10:30 +01:00
arguments.push_back(argname);
tokenizer->advance();
2014-02-10 02:10:30 +01:00
if (defaulting && tokenizer->get_token()!=GDTokenizer::TK_OP_ASSIGN) {
2014-02-10 02:10:30 +01:00
_set_error("Default parameter expected.");
return;
}
//tokenizer->advance();
2014-02-10 02:10:30 +01:00
if (tokenizer->get_token()==GDTokenizer::TK_OP_ASSIGN) {
2014-02-10 02:10:30 +01:00
defaulting=true;
tokenizer->advance(1);
2014-02-10 02:10:30 +01:00
Node *defval=NULL;
defval=_parse_and_reduce_expression(p_class,_static);
if (!defval || error_set)
return;
OperatorNode *on = alloc_node<OperatorNode>();
on->op=OperatorNode::OP_ASSIGN;
IdentifierNode *in = alloc_node<IdentifierNode>();
in->name=argname;
on->arguments.push_back(in);
on->arguments.push_back(defval);
/* no ..
if (defval->type!=Node::TYPE_CONSTANT) {
_set_error("default argument must be constant");
}
*/
default_values.push_back(on);
}
if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
tokenizer->advance();
2014-02-10 02:10:30 +01:00
continue;
} else if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
2014-02-10 02:10:30 +01:00
_set_error("Expected ',' or ')'.");
return;
}
break;
}
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
BlockNode *block = alloc_node<BlockNode>();
if (name=="_init") {
if (p_class->extends_used) {
OperatorNode *cparent = alloc_node<OperatorNode>();
cparent->op=OperatorNode::OP_PARENT_CALL;
block->statements.push_back(cparent);
IdentifierNode *id = alloc_node<IdentifierNode>();
id->name="_init";
cparent->arguments.push_back(id);
if (tokenizer->get_token()==GDTokenizer::TK_PERIOD) {
tokenizer->advance();
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_OPEN) {
2014-02-10 02:10:30 +01:00
_set_error("expected '(' for parent constructor arguments.");
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
2014-02-10 02:10:30 +01:00
//has arguments
while(true) {
Node *arg = _parse_and_reduce_expression(p_class,_static);
cparent->arguments.push_back(arg);
if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
tokenizer->advance();
2014-02-10 02:10:30 +01:00
continue;
} else if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
2014-02-10 02:10:30 +01:00
_set_error("Expected ',' or ')'.");
return;
}
break;
}
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
}
} else {
if (tokenizer->get_token()==GDTokenizer::TK_PERIOD) {
2014-02-10 02:10:30 +01:00
_set_error("Parent constructor call found for a class without inheritance.");
return;
}
}
}
if (!_enter_indent_block(block)) {
_set_error("Indented block expected.");
return;
}
FunctionNode *function = alloc_node<FunctionNode>();
function->name=name;
function->arguments=arguments;
function->default_values=default_values;
function->_static=_static;
function->line=fnline;
if (_static)
p_class->static_functions.push_back(function);
else
p_class->functions.push_back(function);
_parse_block(block,_static);
function->body=block;
//arguments
} break;
case GDTokenizer::TK_PR_EXPORT: {
tokenizer->advance();
2014-02-10 02:10:30 +01:00
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_OPEN) {
2014-02-10 02:10:30 +01:00
tokenizer->advance();
if (tokenizer->get_token()==GDTokenizer::TK_BUILT_IN_TYPE) {
2014-02-10 02:10:30 +01:00
Variant::Type type = tokenizer->get_token_type();
2014-02-10 02:10:30 +01:00
if (type==Variant::NIL) {
_set_error("Can't export null type.");
return;
}
current_export.type=type;
tokenizer->advance();
if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
2014-02-10 02:10:30 +01:00
// hint expected next!
tokenizer->advance();
2014-02-10 02:10:30 +01:00
switch(current_export.type) {
case Variant::INT: {
if (tokenizer->get_token()==GDTokenizer::TK_CONSTANT && tokenizer->get_token_constant().get_type()==Variant::STRING) {
2014-02-10 02:10:30 +01:00
//enumeration
current_export.hint=PROPERTY_HINT_ENUM;
bool first=true;
while(true) {
if (tokenizer->get_token()!=GDTokenizer::TK_CONSTANT || tokenizer->get_token_constant().get_type()!=Variant::STRING) {
2014-02-10 02:10:30 +01:00
current_export=PropertyInfo();
_set_error("Expected a string constant in enumeration hint.");
}
String c = tokenizer->get_token_constant();
2014-02-10 02:10:30 +01:00
if (!first)
current_export.hint_string+=",";
else
first=false;
current_export.hint_string+=c.xml_escape();
tokenizer->advance();
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE)
2014-02-10 02:10:30 +01:00
break;
if (tokenizer->get_token()!=GDTokenizer::TK_COMMA) {
2014-02-10 02:10:30 +01:00
current_export=PropertyInfo();
_set_error("Expected ')' or ',' in enumeration hint.");
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
}
break;
}
};
case Variant::REAL: {
if (tokenizer->get_token()!=GDTokenizer::TK_CONSTANT || !tokenizer->get_token_constant().is_num()) {
2014-02-10 02:10:30 +01:00
current_export=PropertyInfo();
_set_error("Expected a range in numeric hint.");
}
//enumeration
current_export.hint=PROPERTY_HINT_RANGE;
current_export.hint_string=tokenizer->get_token_constant().operator String();
tokenizer->advance();
2014-02-10 02:10:30 +01:00
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE) {
2014-02-10 02:10:30 +01:00
current_export.hint_string="0,"+current_export.hint_string;
break;
}
if (tokenizer->get_token()!=GDTokenizer::TK_COMMA) {
2014-02-10 02:10:30 +01:00
current_export=PropertyInfo();
_set_error("Expected ',' or ')' in numeric range hint.");
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
if (tokenizer->get_token()!=GDTokenizer::TK_CONSTANT || !tokenizer->get_token_constant().is_num()) {
2014-02-10 02:10:30 +01:00
current_export=PropertyInfo();
_set_error("Expected a number as upper bound in numeric range hint.");
}
current_export.hint_string+=","+tokenizer->get_token_constant().operator String();
tokenizer->advance();
2014-02-10 02:10:30 +01:00
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE)
2014-02-10 02:10:30 +01:00
break;
if (tokenizer->get_token()!=GDTokenizer::TK_COMMA) {
2014-02-10 02:10:30 +01:00
current_export=PropertyInfo();
_set_error("Expected ',' or ')' in numeric range hint.");
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
if (tokenizer->get_token()!=GDTokenizer::TK_CONSTANT || !tokenizer->get_token_constant().is_num()) {
2014-02-10 02:10:30 +01:00
current_export=PropertyInfo();
_set_error("Expected a number as step in numeric range hint.");
}
current_export.hint_string+=","+tokenizer->get_token_constant().operator String();
tokenizer->advance();
2014-02-10 02:10:30 +01:00
} break;
case Variant::STRING: {
if (tokenizer->get_token()==GDTokenizer::TK_CONSTANT && tokenizer->get_token_constant().get_type()==Variant::STRING) {
2014-02-10 02:10:30 +01:00
//enumeration
current_export.hint=PROPERTY_HINT_ENUM;
bool first=true;
while(true) {
if (tokenizer->get_token()!=GDTokenizer::TK_CONSTANT || tokenizer->get_token_constant().get_type()!=Variant::STRING) {
2014-02-10 02:10:30 +01:00
current_export=PropertyInfo();
_set_error("Expected a string constant in enumeration hint.");
}
String c = tokenizer->get_token_constant();
2014-02-10 02:10:30 +01:00
if (!first)
current_export.hint_string+=",";
else
first=false;
current_export.hint_string+=c.xml_escape();
tokenizer->advance();
if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE)
2014-02-10 02:10:30 +01:00
break;
if (tokenizer->get_token()!=GDTokenizer::TK_COMMA) {
2014-02-10 02:10:30 +01:00
current_export=PropertyInfo();
_set_error("Expected ')' or ',' in enumeration hint.");
return;
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
}
break;
}
if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER && tokenizer->get_token_identifier()=="DIR") {
2014-02-10 02:10:30 +01:00
current_export.hint=PROPERTY_HINT_DIR;
tokenizer->advance();
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
2014-02-10 02:10:30 +01:00
_set_error("Expected ')' in hint.");
return;
}
break;
}
if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER && tokenizer->get_token_identifier()=="FILE") {
2014-02-10 02:10:30 +01:00
current_export.hint=PROPERTY_HINT_FILE;
tokenizer->advance();
2014-02-10 02:10:30 +01:00
if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
2014-02-10 02:10:30 +01:00
tokenizer->advance();
if (tokenizer->get_token()!=GDTokenizer::TK_CONSTANT || tokenizer->get_token_constant().get_type()!=Variant::STRING) {
2014-02-10 02:10:30 +01:00
_set_error("Expected string constant with filter");
return;
}
current_export.hint_string=tokenizer->get_token_constant();
tokenizer->advance();
2014-02-10 02:10:30 +01:00
}
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
2014-02-10 02:10:30 +01:00
_set_error("Expected ')' in hint.");
return;
}
break;
}
} break;
case Variant::COLOR: {
if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER ) {
2014-02-10 02:10:30 +01:00
current_export=PropertyInfo();
_set_error("Color type hint expects RGB or RGBA as hints");
return;
}
String identifier = tokenizer->get_token_identifier();
2014-02-10 02:10:30 +01:00
if (identifier=="RGB") {
current_export.hint=PROPERTY_HINT_COLOR_NO_ALPHA;
} else if (identifier=="RGBA") {
//none
} else {
current_export=PropertyInfo();
_set_error("Color type hint expects RGB or RGBA as hints");
return;
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
} break;
default: {
current_export=PropertyInfo();
_set_error("Type '"+Variant::get_type_name(type)+"' can't take hints.");
return;
} break;
}
}
} else if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER) {
2014-02-10 02:10:30 +01:00
String identifier = tokenizer->get_token_identifier();
2014-02-10 02:10:30 +01:00
if (!ObjectTypeDB::is_type(identifier,"Resource")) {
current_export=PropertyInfo();
_set_error("Export hint not a type or resource.");
}
current_export.type=Variant::OBJECT;
current_export.hint=PROPERTY_HINT_RESOURCE_TYPE;
current_export.hint_string=identifier;
tokenizer->advance();
2014-02-10 02:10:30 +01:00
}
if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
2014-02-10 02:10:30 +01:00
current_export=PropertyInfo();
_set_error("Expected ')' or ',' after export hint.");
return;
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
}
if (tokenizer->get_token()!=GDTokenizer::TK_PR_VAR) {
2014-02-10 02:10:30 +01:00
current_export=PropertyInfo();
_set_error("Expected 'var'.");
return;
}
}; //fallthrough to var
case GDTokenizer::TK_PR_VAR: {
//variale declaration and (eventual) initialization
ClassNode::Member member;
bool autoexport = tokenizer->get_token(-1)==GDTokenizer::TK_PR_EXPORT;
2014-02-10 02:10:30 +01:00
if (current_export.type!=Variant::NIL) {
member._export=current_export;
current_export=PropertyInfo();
}
tokenizer->advance();
if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
2014-02-10 02:10:30 +01:00
_set_error("Expected identifier for member variable name.");
return;
}
member.identifier=tokenizer->get_token_identifier();
2014-02-10 02:10:30 +01:00
member._export.name=member.identifier;
tokenizer->advance();
2014-02-10 02:10:30 +01:00
p_class->variables.push_back(member);
if (tokenizer->get_token()!=GDTokenizer::TK_OP_ASSIGN) {
2014-02-10 02:10:30 +01:00
if (autoexport) {
_set_error("Type-less export needs a constant expression assigned to infer type.");
return;
}
break;
}
#ifdef DEBUG_ENABLED
int line = tokenizer->get_token_line();
2014-02-10 02:10:30 +01:00
#endif
tokenizer->advance();
2014-02-10 02:10:30 +01:00
Node *subexpr=NULL;
subexpr = _parse_and_reduce_expression(p_class,false);
if (!subexpr)
return;
if (autoexport) {
if (subexpr->type==Node::TYPE_ARRAY) {
p_class->variables[p_class->variables.size()-1]._export.type=Variant::ARRAY;
} else if (subexpr->type==Node::TYPE_DICTIONARY) {
p_class->variables[p_class->variables.size()-1]._export.type=Variant::DICTIONARY;
} else {
if (subexpr->type!=Node::TYPE_CONSTANT) {
_set_error("Type-less export needs a constant expression assigned to infer type.");
return;
}
ConstantNode *cn = static_cast<ConstantNode*>(subexpr);
if (cn->value.get_type()==Variant::NIL) {
_set_error("Can't accept a null constant expression for infering export type.");
return;
}
p_class->variables[p_class->variables.size()-1]._export.type=cn->value.get_type();
}
}
#ifdef TOOLS_ENABLED
if (subexpr->type==Node::TYPE_CONSTANT && p_class->variables[p_class->variables.size()-1]._export.type!=Variant::NIL) {
ConstantNode *cn = static_cast<ConstantNode*>(subexpr);
if (cn->value.get_type()!=Variant::NIL) {
p_class->variables[p_class->variables.size()-1].default_value=cn->value;
}
}
#endif
IdentifierNode *id = alloc_node<IdentifierNode>();
id->name=member.identifier;
OperatorNode *op = alloc_node<OperatorNode>();
op->op=OperatorNode::OP_ASSIGN;
op->arguments.push_back(id);
op->arguments.push_back(subexpr);
#ifdef DEBUG_ENABLED
NewLineNode *nl = alloc_node<NewLineNode>();
nl->line=line;
p_class->initializer->statements.push_back(nl);
#endif
p_class->initializer->statements.push_back(op);
_end_statement();
} break;
case GDTokenizer::TK_PR_CONST: {
//variale declaration and (eventual) initialization
ClassNode::Constant constant;
tokenizer->advance();
if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
2014-02-10 02:10:30 +01:00
_set_error("Expected name (identifier) for constant.");
return;
}
constant.identifier=tokenizer->get_token_identifier();
tokenizer->advance();
2014-02-10 02:10:30 +01:00
if (tokenizer->get_token()!=GDTokenizer::TK_OP_ASSIGN) {
2014-02-10 02:10:30 +01:00
_set_error("Constant expects assignment.");
return;
}
tokenizer->advance();
2014-02-10 02:10:30 +01:00
Node *subexpr=NULL;
subexpr = _parse_and_reduce_expression(p_class,true,true);
if (!subexpr)
return;
if (subexpr->type!=Node::TYPE_CONSTANT) {
_set_error("Expected constant expression");
}
constant.expression=subexpr;
p_class->constant_expressions.push_back(constant);
_end_statement();
} break;
default: {
_set_error(String()+"Unexpected token: "+tokenizer->get_token_name(tokenizer->get_token())+":"+tokenizer->get_token_identifier());
2014-02-10 02:10:30 +01:00
return;
} break;
}
}
}
void GDParser::_set_error(const String& p_error, int p_line, int p_column) {
if (error_set)
return; //allow no further errors
error=p_error;
error_line=p_line<0?tokenizer->get_token_line():p_line;
error_column=p_column<0?tokenizer->get_token_column():p_column;
2014-02-10 02:10:30 +01:00
error_set=true;
}
String GDParser::get_error() const {
return error;
}
int GDParser::get_error_line() const {
return error_line;
}
int GDParser::get_error_column() const {
return error_column;
}
Error GDParser::_parse(const String& p_base_path) {
2014-02-10 02:10:30 +01:00
base_path=p_base_path;
2014-02-10 02:10:30 +01:00
clear();
//assume class
ClassNode *main_class = alloc_node<ClassNode>();
main_class->initializer = alloc_node<BlockNode>();
_parse_class(main_class);
if (tokenizer->get_token()==GDTokenizer::TK_ERROR) {
2014-02-10 02:10:30 +01:00
error_set=false;
_set_error("Parse Error: "+tokenizer->get_token_error());
2014-02-10 02:10:30 +01:00
}
if (error_set) {
return ERR_PARSE_ERROR;
}
return OK;
}
Error GDParser::parse_bytecode(const Vector<uint8_t> &p_bytecode,const String& p_base_path) {
GDTokenizerBuffer *tb = memnew( GDTokenizerBuffer );
tb->set_code_buffer(p_bytecode);
tokenizer=tb;
Error ret = _parse(p_base_path);
memdelete(tb);
tokenizer=NULL;
return ret;
}
Error GDParser::parse(const String& p_code,const String& p_base_path) {
GDTokenizerText *tt = memnew( GDTokenizerText );
tt->set_code(p_code);
tokenizer=tt;
Error ret = _parse(p_base_path);
memdelete(tt);
tokenizer=NULL;
return ret;
}
2014-02-10 02:10:30 +01:00
const GDParser::Node *GDParser::get_parse_tree() const {
return head;
}
void GDParser::clear() {
while(list) {
Node *l=list;
list=list->next;
memdelete(l);
}
head=NULL;
list=NULL;
error_set=false;
tab_level.clear();
tab_level.push_back(0);
error_line=0;
error_column=0;
current_export.type=Variant::NIL;
error="";
}
GDParser::GDParser() {
head=NULL;
list=NULL;
tokenizer=NULL;
2014-02-10 02:10:30 +01:00
clear();
}
GDParser::~GDParser() {
clear();
}