summaryrefslogtreecommitdiff
path: root/modules/gdscript
diff options
context:
space:
mode:
Diffstat (limited to 'modules/gdscript')
-rw-r--r--modules/gdscript/gd_editor.cpp2
-rw-r--r--modules/gdscript/gd_parser.cpp585
-rw-r--r--modules/gdscript/gd_parser.h7
-rw-r--r--modules/gdscript/gd_script.cpp79
-rw-r--r--modules/gdscript/gd_script.h1
-rw-r--r--modules/gdscript/gd_tokenizer.cpp409
-rw-r--r--modules/gdscript/gd_tokenizer.h86
-rw-r--r--modules/gdscript/register_types.cpp56
8 files changed, 899 insertions, 326 deletions
diff --git a/modules/gdscript/gd_editor.cpp b/modules/gdscript/gd_editor.cpp
index c10cadf83f..f8717c292f 100644
--- a/modules/gdscript/gd_editor.cpp
+++ b/modules/gdscript/gd_editor.cpp
@@ -106,7 +106,7 @@ bool GDScriptLanguage::has_named_classes() const {
int GDScriptLanguage::find_function(const String& p_function,const String& p_code) const {
- GDTokenizer tokenizer;
+ GDTokenizerText tokenizer;
tokenizer.set_code(p_code);
int indent=0;
while(tokenizer.get_token()!=GDTokenizer::TK_EOF && tokenizer.get_token()!=GDTokenizer::TK_ERROR) {
diff --git a/modules/gdscript/gd_parser.cpp b/modules/gdscript/gd_parser.cpp
index e558ceb416..fb4f56aa8f 100644
--- a/modules/gdscript/gd_parser.cpp
+++ b/modules/gdscript/gd_parser.cpp
@@ -54,18 +54,18 @@ T* GDParser::alloc_node() {
if (!head)
head=t;
- t->line=tokenizer.get_token_line();
- t->column=tokenizer.get_token_column();
+ t->line=tokenizer->get_token_line();
+ t->column=tokenizer->get_token_column();
return t;
}
bool GDParser::_end_statement() {
- if (tokenizer.get_token()==GDTokenizer::TK_SEMICOLON) {
- tokenizer.advance();
+ if (tokenizer->get_token()==GDTokenizer::TK_SEMICOLON) {
+ tokenizer->advance();
return true; //handle next
- } else if (tokenizer.get_token()==GDTokenizer::TK_NEWLINE || tokenizer.get_token()==GDTokenizer::TK_EOF) {
+ } else if (tokenizer->get_token()==GDTokenizer::TK_NEWLINE || tokenizer->get_token()==GDTokenizer::TK_EOF) {
return true; //will be handled properly
}
@@ -75,14 +75,14 @@ bool GDParser::_end_statement() {
bool GDParser::_enter_indent_block(BlockNode* p_block) {
- if (tokenizer.get_token()!=GDTokenizer::TK_COLON) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_COLON) {
_set_error("':' expected at end of line.");
return false;
}
- tokenizer.advance();
+ tokenizer->advance();
- if (tokenizer.get_token()!=GDTokenizer::TK_NEWLINE) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_NEWLINE) {
_set_error("newline expected after ':'.");
return false;
@@ -90,35 +90,35 @@ bool GDParser::_enter_indent_block(BlockNode* p_block) {
while(true) {
- if (tokenizer.get_token()!=GDTokenizer::TK_NEWLINE) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_NEWLINE) {
return false; //wtf
- } else if (tokenizer.get_token(1)!=GDTokenizer::TK_NEWLINE) {
+ } else if (tokenizer->get_token(1)!=GDTokenizer::TK_NEWLINE) {
- int indent = tokenizer.get_token_line_indent();
+ int indent = tokenizer->get_token_line_indent();
int current = tab_level.back()->get();
if (indent<=current)
return false;
tab_level.push_back(indent);
- tokenizer.advance();
+ tokenizer->advance();
return true;
} else if (p_block) {
NewLineNode *nl = alloc_node<NewLineNode>();
- nl->line=tokenizer.get_token_line();
+ nl->line=tokenizer->get_token_line();
p_block->statements.push_back(nl);
}
- tokenizer.advance(); // go to next newline
+ tokenizer->advance(); // go to next newline
}
}
bool GDParser::_parse_arguments(Node* p_parent,Vector<Node*>& p_args,bool p_static) {
- if (tokenizer.get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE) {
- tokenizer.advance();
+ if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE) {
+ tokenizer->advance();
} else {
while(true) {
@@ -130,19 +130,19 @@ bool GDParser::_parse_arguments(Node* p_parent,Vector<Node*>& p_args,bool p_stat
p_args.push_back(arg);
- if (tokenizer.get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE) {
- tokenizer.advance();
+ if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE) {
+ tokenizer->advance();
break;
- } else if (tokenizer.get_token()==GDTokenizer::TK_COMMA) {
+ } else if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
- if (tokenizer.get_token(1)==GDTokenizer::TK_PARENTHESIS_CLOSE) {
+ if (tokenizer->get_token(1)==GDTokenizer::TK_PARENTHESIS_CLOSE) {
_set_error("Expression expected");
return false;
}
- tokenizer.advance();
+ tokenizer->advance();
} else {
// something is broken
_set_error("Expected ',' or ')'");
@@ -174,45 +174,45 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
/* Parse Operand */
/*****************/
- if (tokenizer.get_token()==GDTokenizer::TK_PARENTHESIS_OPEN) {
+ if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_OPEN) {
//subexpression ()
- tokenizer.advance();
+ tokenizer->advance();
Node* subexpr = _parse_expression(p_parent,p_static);
if (!subexpr)
return NULL;
- if (tokenizer.get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
_set_error("Expected ')' in expression");
return NULL;
}
- tokenizer.advance();
+ tokenizer->advance();
expr=subexpr;
- } else if (tokenizer.get_token()==GDTokenizer::TK_CONSTANT) {
+ } else if (tokenizer->get_token()==GDTokenizer::TK_CONSTANT) {
//constant defined by tokenizer
ConstantNode *constant = alloc_node<ConstantNode>();
- constant->value=tokenizer.get_token_constant();
- tokenizer.advance();
+ constant->value=tokenizer->get_token_constant();
+ tokenizer->advance();
expr=constant;
- } else if (tokenizer.get_token()==GDTokenizer::TK_PR_PRELOAD) {
+ } else if (tokenizer->get_token()==GDTokenizer::TK_PR_PRELOAD) {
//constant defined by tokenizer
- tokenizer.advance();
- if (tokenizer.get_token()!=GDTokenizer::TK_PARENTHESIS_OPEN) {
+ tokenizer->advance();
+ if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_OPEN) {
_set_error("Expected '(' after 'preload'");
return NULL;
}
- tokenizer.advance();
- if (tokenizer.get_token()!=GDTokenizer::TK_CONSTANT || tokenizer.get_token_constant().get_type()!=Variant::STRING) {
+ tokenizer->advance();
+ if (tokenizer->get_token()!=GDTokenizer::TK_CONSTANT || tokenizer->get_token_constant().get_type()!=Variant::STRING) {
_set_error("Expected string constant as 'preload' argument.");
return NULL;
}
- String path = tokenizer.get_token_constant();
+ String path = tokenizer->get_token_constant();
if (!path.is_abs_path() && base_path!="")
path=base_path+"/"+path;
@@ -222,20 +222,20 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
return NULL;
}
- tokenizer.advance();
+ tokenizer->advance();
- if (tokenizer.get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
_set_error("Expected ')' after 'preload' path");
return NULL;
}
ConstantNode *constant = alloc_node<ConstantNode>();
constant->value=res;
- tokenizer.advance();
+ tokenizer->advance();
expr=constant;
- } else if (tokenizer.get_token()==GDTokenizer::TK_SELF) {
+ } else if (tokenizer->get_token()==GDTokenizer::TK_SELF) {
if (p_static) {
_set_error("'self'' not allowed in static function or constant expression");
@@ -243,18 +243,18 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
}
//constant defined by tokenizer
SelfNode *self = alloc_node<SelfNode>();
- tokenizer.advance();
+ tokenizer->advance();
expr=self;
- } else if (tokenizer.get_token()==GDTokenizer::TK_BUILT_IN_TYPE && tokenizer.get_token(1)==GDTokenizer::TK_PERIOD) {
+ } else if (tokenizer->get_token()==GDTokenizer::TK_BUILT_IN_TYPE && tokenizer->get_token(1)==GDTokenizer::TK_PERIOD) {
- Variant::Type bi_type = tokenizer.get_token_type();
- tokenizer.advance(2);
- if (tokenizer.get_token()!=GDTokenizer::TK_IDENTIFIER) {
+ Variant::Type bi_type = tokenizer->get_token_type();
+ tokenizer->advance(2);
+ if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
_set_error("Built-in type constant expected after '.'");
return NULL;
}
- StringName identifier = tokenizer.get_token_identifier();
+ StringName identifier = tokenizer->get_token_identifier();
if (!Variant::has_numeric_constant(bi_type,identifier)) {
_set_error("Static constant '"+identifier.operator String()+"' not present in built-in type "+Variant::get_type_name(bi_type)+".");
@@ -264,23 +264,23 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
ConstantNode *cn = alloc_node<ConstantNode>();
cn->value=Variant::get_numeric_constant_value(bi_type,identifier);
expr=cn;
- tokenizer.advance();
+ tokenizer->advance();
- } else if (tokenizer.get_token(1)==GDTokenizer::TK_PARENTHESIS_OPEN && (tokenizer.get_token()==GDTokenizer::TK_BUILT_IN_TYPE || tokenizer.get_token()==GDTokenizer::TK_IDENTIFIER || tokenizer.get_token()==GDTokenizer::TK_BUILT_IN_FUNC)) {
+ } else if (tokenizer->get_token(1)==GDTokenizer::TK_PARENTHESIS_OPEN && (tokenizer->get_token()==GDTokenizer::TK_BUILT_IN_TYPE || tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER || tokenizer->get_token()==GDTokenizer::TK_BUILT_IN_FUNC)) {
//function or constructor
OperatorNode *op = alloc_node<OperatorNode>();
op->op=OperatorNode::OP_CALL;
- if (tokenizer.get_token()==GDTokenizer::TK_BUILT_IN_TYPE) {
+ if (tokenizer->get_token()==GDTokenizer::TK_BUILT_IN_TYPE) {
TypeNode *tn = alloc_node<TypeNode>();
- tn->vtype=tokenizer.get_token_type();
+ tn->vtype=tokenizer->get_token_type();
op->arguments.push_back(tn);
- } else if (tokenizer.get_token()==GDTokenizer::TK_BUILT_IN_FUNC) {
+ } else if (tokenizer->get_token()==GDTokenizer::TK_BUILT_IN_FUNC) {
BuiltInFunctionNode *bn = alloc_node<BuiltInFunctionNode>();
- bn->function=tokenizer.get_token_built_in_func();
+ bn->function=tokenizer->get_token_built_in_func();
op->arguments.push_back(bn);
} else {
@@ -288,25 +288,25 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
op->arguments.push_back(self);
IdentifierNode* id = alloc_node<IdentifierNode>();
- id->name=tokenizer.get_token_identifier();
+ id->name=tokenizer->get_token_identifier();
op->arguments.push_back(id);
}
- tokenizer.advance(2);
+ tokenizer->advance(2);
if (!_parse_arguments(op,op->arguments,p_static))
return NULL;
expr=op;
- } else if (tokenizer.get_token()==GDTokenizer::TK_IDENTIFIER) {
+ } else if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER) {
//identifier (reference)
IdentifierNode *id = alloc_node<IdentifierNode>();
- id->name=tokenizer.get_token_identifier();
- tokenizer.advance();
+ id->name=tokenizer->get_token_identifier();
+ tokenizer->advance();
expr=id;
- } else if (/*tokenizer.get_token()==GDTokenizer::TK_OP_ADD ||*/ tokenizer.get_token()==GDTokenizer::TK_OP_SUB || tokenizer.get_token()==GDTokenizer::TK_OP_NOT || tokenizer.get_token()==GDTokenizer::TK_OP_BIT_INVERT) {
+ } else if (/*tokenizer->get_token()==GDTokenizer::TK_OP_ADD ||*/ tokenizer->get_token()==GDTokenizer::TK_OP_SUB || tokenizer->get_token()==GDTokenizer::TK_OP_NOT || tokenizer->get_token()==GDTokenizer::TK_OP_BIT_INVERT) {
//single prefix operators like !expr -expr ++expr --expr
OperatorNode *op = alloc_node<OperatorNode>();
@@ -314,7 +314,7 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
Expression e;
e.is_op=true;
- switch(tokenizer.get_token()) {
+ switch(tokenizer->get_token()) {
case GDTokenizer::TK_OP_SUB: e.op=OperatorNode::OP_NEG; break;
case GDTokenizer::TK_OP_NOT: e.op=OperatorNode::OP_NOT; break;
case GDTokenizer::TK_OP_BIT_INVERT: e.op=OperatorNode::OP_BIT_INVERT;; break;
@@ -322,9 +322,9 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
}
- tokenizer.advance();
+ tokenizer->advance();
- if (e.op!=OperatorNode::OP_NOT && tokenizer.get_token()==GDTokenizer::TK_OP_NOT) {
+ if (e.op!=OperatorNode::OP_NOT && tokenizer->get_token()==GDTokenizer::TK_OP_NOT) {
_set_error("Misplaced 'not'.");
return NULL;
}
@@ -339,34 +339,34 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
op->arguments.push_back(subexpr);
expr=op;*/
- } else if (tokenizer.get_token()==GDTokenizer::TK_BRACKET_OPEN) {
+ } else if (tokenizer->get_token()==GDTokenizer::TK_BRACKET_OPEN) {
// array
- tokenizer.advance();
+ tokenizer->advance();
ArrayNode *arr = alloc_node<ArrayNode>();
bool expecting_comma=false;
while(true) {
- if (tokenizer.get_token()==GDTokenizer::TK_EOF) {
+ if (tokenizer->get_token()==GDTokenizer::TK_EOF) {
_set_error("Unterminated array");
return NULL;
- } else if (tokenizer.get_token()==GDTokenizer::TK_BRACKET_CLOSE) {
- tokenizer.advance();
+ } else if (tokenizer->get_token()==GDTokenizer::TK_BRACKET_CLOSE) {
+ tokenizer->advance();
break;
- } else if (tokenizer.get_token()==GDTokenizer::TK_NEWLINE) {
+ } else if (tokenizer->get_token()==GDTokenizer::TK_NEWLINE) {
- tokenizer.advance(); //ignore newline
- } else if (tokenizer.get_token()==GDTokenizer::TK_COMMA) {
+ tokenizer->advance(); //ignore newline
+ } else if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
if (!expecting_comma) {
_set_error("expression or ']' expected");
return NULL;
}
expecting_comma=false;
- tokenizer.advance(); //ignore newline
+ tokenizer->advance(); //ignore newline
} else {
//parse expression
if (expecting_comma) {
@@ -382,9 +382,9 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
}
expr=arr;
- } else if (tokenizer.get_token()==GDTokenizer::TK_CURLY_BRACKET_OPEN) {
+ } else if (tokenizer->get_token()==GDTokenizer::TK_CURLY_BRACKET_OPEN) {
// array
- tokenizer.advance();
+ tokenizer->advance();
DictionaryNode *dict = alloc_node<DictionaryNode>();
@@ -403,12 +403,12 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
while(true) {
- if (tokenizer.get_token()==GDTokenizer::TK_EOF) {
+ if (tokenizer->get_token()==GDTokenizer::TK_EOF) {
_set_error("Unterminated dictionary");
return NULL;
- } else if (tokenizer.get_token()==GDTokenizer::TK_CURLY_BRACKET_CLOSE) {
+ } else if (tokenizer->get_token()==GDTokenizer::TK_CURLY_BRACKET_CLOSE) {
if (expecting==DICT_EXPECT_COLON) {
_set_error("':' expected");
@@ -418,12 +418,12 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
_set_error("value expected");
return NULL;
}
- tokenizer.advance();
+ tokenizer->advance();
break;
- } else if (tokenizer.get_token()==GDTokenizer::TK_NEWLINE) {
+ } else if (tokenizer->get_token()==GDTokenizer::TK_NEWLINE) {
- tokenizer.advance(); //ignore newline
- } else if (tokenizer.get_token()==GDTokenizer::TK_COMMA) {
+ tokenizer->advance(); //ignore newline
+ } else if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
if (expecting==DICT_EXPECT_KEY) {
_set_error("key or '}' expected");
@@ -439,9 +439,9 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
}
expecting=DICT_EXPECT_KEY;
- tokenizer.advance(); //ignore newline
+ tokenizer->advance(); //ignore newline
- } else if (tokenizer.get_token()==GDTokenizer::TK_COLON) {
+ } else if (tokenizer->get_token()==GDTokenizer::TK_COLON) {
if (expecting==DICT_EXPECT_KEY) {
_set_error("key or '}' expected");
@@ -457,7 +457,7 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
}
expecting=DICT_EXPECT_VALUE;
- tokenizer.advance(); //ignore newline
+ tokenizer->advance(); //ignore newline
} else {
if (expecting==DICT_EXPECT_COMMA) {
@@ -471,12 +471,12 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
if (expecting==DICT_EXPECT_KEY) {
- if (tokenizer.get_token()==GDTokenizer::TK_IDENTIFIER && tokenizer.get_token(1)==GDTokenizer::TK_OP_ASSIGN) {
+ if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER && tokenizer->get_token(1)==GDTokenizer::TK_OP_ASSIGN) {
//lua style identifier, easier to write
ConstantNode *cn = alloc_node<ConstantNode>();
- cn->value = tokenizer.get_token_identifier();
+ cn->value = tokenizer->get_token_identifier();
key = cn;
- tokenizer.advance(2);
+ tokenizer->advance(2);
expecting=DICT_EXPECT_VALUE;
} else {
//python/js style more flexible
@@ -506,10 +506,10 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
expr=dict;
- } else if (tokenizer.get_token()==GDTokenizer::TK_PERIOD && tokenizer.get_token(1)==GDTokenizer::TK_IDENTIFIER && tokenizer.get_token(2)==GDTokenizer::TK_PARENTHESIS_OPEN) {
+ } else if (tokenizer->get_token()==GDTokenizer::TK_PERIOD && tokenizer->get_token(1)==GDTokenizer::TK_IDENTIFIER && tokenizer->get_token(2)==GDTokenizer::TK_PARENTHESIS_OPEN) {
// parent call
- tokenizer.advance(); //goto identifier
+ tokenizer->advance(); //goto identifier
OperatorNode *op = alloc_node<OperatorNode>();
op->op=OperatorNode::OP_PARENT_CALL;
@@ -519,10 +519,10 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
forbidden for now */
IdentifierNode* id = alloc_node<IdentifierNode>();
- id->name=tokenizer.get_token_identifier();
+ id->name=tokenizer->get_token_identifier();
op->arguments.push_back(id);
- tokenizer.advance(2);
+ tokenizer->advance(2);
if (!_parse_arguments(op,op->arguments,p_static))
return NULL;
@@ -534,7 +534,7 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
print_line("found bug?");
- _set_error("Error parsing expression, misplaced: "+String(tokenizer.get_token_name(tokenizer.get_token())));
+ _set_error("Error parsing expression, misplaced: "+String(tokenizer->get_token_name(tokenizer->get_token())));
return NULL; //nothing
}
@@ -553,31 +553,31 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
//expressions can be indexed any number of times
- if (tokenizer.get_token()==GDTokenizer::TK_PERIOD) {
+ if (tokenizer->get_token()==GDTokenizer::TK_PERIOD) {
//indexing using "."
- if (tokenizer.get_token(1)!=GDTokenizer::TK_IDENTIFIER && tokenizer.get_token(1)!=GDTokenizer::TK_BUILT_IN_FUNC ) {
+ if (tokenizer->get_token(1)!=GDTokenizer::TK_IDENTIFIER && tokenizer->get_token(1)!=GDTokenizer::TK_BUILT_IN_FUNC ) {
_set_error("Expected identifier as member");
return NULL;
- } else if (tokenizer.get_token(2)==GDTokenizer::TK_PARENTHESIS_OPEN) {
+ } else if (tokenizer->get_token(2)==GDTokenizer::TK_PARENTHESIS_OPEN) {
//call!!
OperatorNode * op = alloc_node<OperatorNode>();
op->op=OperatorNode::OP_CALL;
IdentifierNode * id = alloc_node<IdentifierNode>();
- if (tokenizer.get_token(1)==GDTokenizer::TK_BUILT_IN_FUNC ) {
+ if (tokenizer->get_token(1)==GDTokenizer::TK_BUILT_IN_FUNC ) {
//small hack so built in funcs don't obfuscate methods
- id->name=GDFunctions::get_func_name(tokenizer.get_token_built_in_func(1));
+ id->name=GDFunctions::get_func_name(tokenizer->get_token_built_in_func(1));
} else {
- id->name=tokenizer.get_token_identifier(1);
+ id->name=tokenizer->get_token_identifier(1);
}
op->arguments.push_back(expr); // call what
op->arguments.push_back(id); // call func
//get arguments
- tokenizer.advance(3);
+ tokenizer->advance(3);
if (!_parse_arguments(op,op->arguments,p_static))
return NULL;
expr=op;
@@ -588,36 +588,36 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
op->op=OperatorNode::OP_INDEX_NAMED;
IdentifierNode * id = alloc_node<IdentifierNode>();
- id->name=tokenizer.get_token_identifier(1);
+ id->name=tokenizer->get_token_identifier(1);
op->arguments.push_back(expr);
op->arguments.push_back(id);
expr=op;
- tokenizer.advance(2);
+ tokenizer->advance(2);
}
- } else if (tokenizer.get_token()==GDTokenizer::TK_BRACKET_OPEN) {
+ } else if (tokenizer->get_token()==GDTokenizer::TK_BRACKET_OPEN) {
//indexing using "[]"
OperatorNode * op = alloc_node<OperatorNode>();
op->op=OperatorNode::OP_INDEX;
- tokenizer.advance(1);
+ tokenizer->advance(1);
Node *subexpr = _parse_expression(op,p_static);
if (!subexpr) {
return NULL;
}
- if (tokenizer.get_token()!=GDTokenizer::TK_BRACKET_CLOSE) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_BRACKET_CLOSE) {
_set_error("Expected ']'");
return NULL;
}
op->arguments.push_back(expr);
op->arguments.push_back(subexpr);
- tokenizer.advance(1);
+ tokenizer->advance(1);
expr=op;
} else
@@ -641,7 +641,7 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
//assign, if allowed is only alowed on the first operator
#define _VALIDATE_ASSIGN if (!p_allow_assign) { _set_error("Unexpected assign."); return NULL; } p_allow_assign=false;
- switch(tokenizer.get_token()) { //see operator
+ switch(tokenizer->get_token()) { //see operator
case GDTokenizer::TK_OP_IN: op=OperatorNode::OP_IN; break;
case GDTokenizer::TK_OP_EQUAL: op=OperatorNode::OP_EQUAL ; break;
@@ -682,7 +682,7 @@ GDParser::Node* GDParser::_parse_expression(Node *p_parent,bool p_static,bool p_
e.is_op=true;
e.op=op;
expression.push_back(e);
- tokenizer.advance();
+ tokenizer->advance();
} else {
break;
}
@@ -1190,18 +1190,18 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
NewLineNode *nl = alloc_node<NewLineNode>();
- nl->line=tokenizer.get_token_line();
+ nl->line=tokenizer->get_token_line();
p_block->statements.push_back(nl);
#endif
while(true) {
- GDTokenizer::Token token = tokenizer.get_token();
+ GDTokenizer::Token token = tokenizer->get_token();
if (error_set)
return;
if (indent_level>tab_level.back()->get()) {
- p_block->end_line=tokenizer.get_token_line();
+ p_block->end_line=tokenizer->get_token_line();
return; //go back a level
}
@@ -1209,7 +1209,7 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
case GDTokenizer::TK_EOF:
- p_block->end_line=tokenizer.get_token_line();
+ p_block->end_line=tokenizer->get_token_line();
case GDTokenizer::TK_ERROR: {
return; //go back
@@ -1219,38 +1219,38 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
case GDTokenizer::TK_NEWLINE: {
NewLineNode *nl = alloc_node<NewLineNode>();
- nl->line=tokenizer.get_token_line();
+ nl->line=tokenizer->get_token_line();
p_block->statements.push_back(nl);
if (!_parse_newline()) {
if (!error_set) {
- p_block->end_line=tokenizer.get_token_line();
+ p_block->end_line=tokenizer->get_token_line();
}
return;
}
} break;
case GDTokenizer::TK_CF_PASS: {
- if (tokenizer.get_token(1)!=GDTokenizer::TK_SEMICOLON && tokenizer.get_token(1)!=GDTokenizer::TK_NEWLINE ) {
+ if (tokenizer->get_token(1)!=GDTokenizer::TK_SEMICOLON && tokenizer->get_token(1)!=GDTokenizer::TK_NEWLINE ) {
_set_error("Expected ';' or <NewLine>.");
return;
}
- tokenizer.advance();
+ tokenizer->advance();
} break;
case GDTokenizer::TK_PR_VAR: {
//variale declaration and (eventual) initialization
- tokenizer.advance();
- if (tokenizer.get_token()!=GDTokenizer::TK_IDENTIFIER) {
+ tokenizer->advance();
+ if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
_set_error("Expected identifier for local variable name.");
return;
}
- StringName n = tokenizer.get_token_identifier();
- tokenizer.advance();
+ StringName n = tokenizer->get_token_identifier();
+ tokenizer->advance();
p_block->variables.push_back(n); //line?
- p_block->variable_lines.push_back(tokenizer.get_token_line());
+ p_block->variable_lines.push_back(tokenizer->get_token_line());
//must know when the local variable is declared
@@ -1260,9 +1260,9 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
Node *assigned=NULL;
- if (tokenizer.get_token()==GDTokenizer::TK_OP_ASSIGN) {
+ if (tokenizer->get_token()==GDTokenizer::TK_OP_ASSIGN) {
- tokenizer.advance();
+ tokenizer->advance();
Node *subexpr=NULL;
subexpr = _parse_and_reduce_expression(p_block,p_static);
@@ -1294,7 +1294,7 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
} break;
case GDTokenizer::TK_CF_IF: {
- tokenizer.advance();
+ tokenizer->advance();
Node *condition = _parse_and_reduce_expression(p_block,p_static);
if (!condition)
return;
@@ -1308,7 +1308,7 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
p_block->sub_blocks.push_back(cf_if->body);
if (!_enter_indent_block(cf_if->body)) {
- p_block->end_line=tokenizer.get_token_line();
+ p_block->end_line=tokenizer->get_token_line();
return;
}
@@ -1319,16 +1319,16 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
while(true) {
- while(tokenizer.get_token()==GDTokenizer::TK_NEWLINE) {
- tokenizer.advance();
+ while(tokenizer->get_token()==GDTokenizer::TK_NEWLINE) {
+ tokenizer->advance();
}
if (tab_level.back()->get() < indent_level) { //not at current indent level
- p_block->end_line=tokenizer.get_token_line();
+ p_block->end_line=tokenizer->get_token_line();
return;
}
- if (tokenizer.get_token()==GDTokenizer::TK_CF_ELIF) {
+ if (tokenizer->get_token()==GDTokenizer::TK_CF_ELIF) {
if (tab_level.back()->get() > indent_level) {
@@ -1336,7 +1336,7 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
return;
}
- tokenizer.advance();
+ tokenizer->advance();
cf_if->body_else=alloc_node<BlockNode>();
p_block->sub_blocks.push_back(cf_if->body_else);
@@ -1358,7 +1358,7 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
if (!_enter_indent_block(cf_if->body)) {
- p_block->end_line=tokenizer.get_token_line();
+ p_block->end_line=tokenizer->get_token_line();
return;
}
@@ -1367,7 +1367,7 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
return;
- } else if (tokenizer.get_token()==GDTokenizer::TK_CF_ELSE) {
+ } else if (tokenizer->get_token()==GDTokenizer::TK_CF_ELSE) {
if (tab_level.back()->get() > indent_level) {
@@ -1376,12 +1376,12 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
}
- tokenizer.advance();
+ tokenizer->advance();
cf_if->body_else=alloc_node<BlockNode>();
p_block->sub_blocks.push_back(cf_if->body_else);
if (!_enter_indent_block(cf_if->body_else)) {
- p_block->end_line=tokenizer.get_token_line();
+ p_block->end_line=tokenizer->get_token_line();
return;
}
_parse_block(cf_if->body_else,p_static);
@@ -1400,7 +1400,7 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
} break;
case GDTokenizer::TK_CF_WHILE: {
- tokenizer.advance();
+ tokenizer->advance();
Node *condition = _parse_and_reduce_expression(p_block,p_static);
if (!condition)
return;
@@ -1414,7 +1414,7 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
p_block->sub_blocks.push_back(cf_while->body);
if (!_enter_indent_block(cf_while->body)) {
- p_block->end_line=tokenizer.get_token_line();
+ p_block->end_line=tokenizer->get_token_line();
return;
}
@@ -1425,24 +1425,24 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
} break;
case GDTokenizer::TK_CF_FOR: {
- tokenizer.advance();
+ tokenizer->advance();
- if (tokenizer.get_token()!=GDTokenizer::TK_IDENTIFIER) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
_set_error("identifier expected after 'for'");
}
IdentifierNode *id = alloc_node<IdentifierNode>();
- id->name=tokenizer.get_token_identifier();
+ id->name=tokenizer->get_token_identifier();
- tokenizer.advance();
+ tokenizer->advance();
- if (tokenizer.get_token()!=GDTokenizer::TK_OP_IN) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_OP_IN) {
_set_error("'in' expected after identifier");
return;
}
- tokenizer.advance();
+ tokenizer->advance();
Node *container = _parse_and_reduce_expression(p_block,p_static);
if (!container)
@@ -1458,7 +1458,7 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
p_block->sub_blocks.push_back(cf_for->body);
if (!_enter_indent_block(cf_for->body)) {
- p_block->end_line=tokenizer.get_token_line();
+ p_block->end_line=tokenizer->get_token_line();
return;
}
@@ -1469,7 +1469,7 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
} break;
case GDTokenizer::TK_CF_CONTINUE: {
- tokenizer.advance();
+ tokenizer->advance();
ControlFlowNode *cf_continue = alloc_node<ControlFlowNode>();
cf_continue->cf_type=ControlFlowNode::CF_CONTINUE;
p_block->statements.push_back(cf_continue);
@@ -1480,7 +1480,7 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
} break;
case GDTokenizer::TK_CF_BREAK: {
- tokenizer.advance();
+ tokenizer->advance();
ControlFlowNode *cf_break = alloc_node<ControlFlowNode>();
cf_break->cf_type=ControlFlowNode::CF_BREAK;
p_block->statements.push_back(cf_break);
@@ -1491,13 +1491,13 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
} break;
case GDTokenizer::TK_CF_RETURN: {
- tokenizer.advance();
+ tokenizer->advance();
ControlFlowNode *cf_return = alloc_node<ControlFlowNode>();
cf_return->cf_type=ControlFlowNode::CF_RETURN;
- if (tokenizer.get_token()==GDTokenizer::TK_SEMICOLON || tokenizer.get_token()==GDTokenizer::TK_NEWLINE || tokenizer.get_token()==GDTokenizer::TK_EOF) {
+ if (tokenizer->get_token()==GDTokenizer::TK_SEMICOLON || tokenizer->get_token()==GDTokenizer::TK_NEWLINE || tokenizer->get_token()==GDTokenizer::TK_EOF) {
//expect end of statement
p_block->statements.push_back(cf_return);
if (!_end_statement()) {
@@ -1520,7 +1520,7 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
} break;
case GDTokenizer::TK_PR_ASSERT: {
- tokenizer.advance();
+ tokenizer->advance();
Node *condition = _parse_and_reduce_expression(p_block,p_static);
if (!condition)
return;
@@ -1548,11 +1548,11 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
/*
case GDTokenizer::TK_CF_LOCAL: {
- if (tokenizer.get_token(1)!=GDTokenizer::TK_SEMICOLON && tokenizer.get_token(1)!=GDTokenizer::TK_NEWLINE ) {
+ if (tokenizer->get_token(1)!=GDTokenizer::TK_SEMICOLON && tokenizer->get_token(1)!=GDTokenizer::TK_NEWLINE ) {
_set_error("Expected ';' or <NewLine>.");
}
- tokenizer.advance();
+ tokenizer->advance();
} break;
*/
@@ -1563,9 +1563,9 @@ void GDParser::_parse_block(BlockNode *p_block,bool p_static) {
bool GDParser::_parse_newline() {
- if (tokenizer.get_token(1)!=GDTokenizer::TK_EOF && tokenizer.get_token(1)!=GDTokenizer::TK_NEWLINE) {
+ if (tokenizer->get_token(1)!=GDTokenizer::TK_EOF && tokenizer->get_token(1)!=GDTokenizer::TK_NEWLINE) {
- int indent = tokenizer.get_token_line_indent();
+ int indent = tokenizer->get_token_line_indent();
int current_indent = tab_level.back()->get();
if (indent>current_indent) {
@@ -1593,12 +1593,12 @@ bool GDParser::_parse_newline() {
current_indent = tab_level.back()->get();
}
- tokenizer.advance();
+ tokenizer->advance();
return false;
}
}
- tokenizer.advance();
+ tokenizer->advance();
return true;
}
@@ -1622,11 +1622,11 @@ void GDParser::_parse_extends(ClassNode *p_class) {
p_class->extends_used=true;
//see if inheritance happens from a file
- tokenizer.advance();
+ tokenizer->advance();
- if (tokenizer.get_token()==GDTokenizer::TK_CONSTANT) {
+ if (tokenizer->get_token()==GDTokenizer::TK_CONSTANT) {
- Variant constant = tokenizer.get_token_constant();
+ Variant constant = tokenizer->get_token_constant();
if (constant.get_type()!=Variant::STRING) {
_set_error("'extends' constant must be a string.");
@@ -1634,27 +1634,27 @@ void GDParser::_parse_extends(ClassNode *p_class) {
}
p_class->extends_file=constant;
- tokenizer.advance();
+ tokenizer->advance();
- if (tokenizer.get_token()!=GDTokenizer::TK_PERIOD) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_PERIOD) {
return;
} else
- tokenizer.advance();
+ tokenizer->advance();
}
while(true) {
- if (tokenizer.get_token()!=GDTokenizer::TK_IDENTIFIER) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
_set_error("Invalid 'extends' syntax, expected string constant (path) and/or identifier (parent class).");
return;
}
- StringName identifier=tokenizer.get_token_identifier();
+ StringName identifier=tokenizer->get_token_identifier();
p_class->extends_class.push_back(identifier);
- tokenizer.advance(1);
- if (tokenizer.get_token()!=GDTokenizer::TK_PERIOD)
+ tokenizer->advance(1);
+ if (tokenizer->get_token()!=GDTokenizer::TK_PERIOD)
return;
}
@@ -1666,19 +1666,19 @@ void GDParser::_parse_class(ClassNode *p_class) {
while(true) {
- GDTokenizer::Token token = tokenizer.get_token();
+ GDTokenizer::Token token = tokenizer->get_token();
if (error_set)
return;
if (indent_level>tab_level.back()->get()) {
- p_class->end_line=tokenizer.get_token_line();
+ p_class->end_line=tokenizer->get_token_line();
return; //go back a level
}
switch(token) {
case GDTokenizer::TK_EOF:
- p_class->end_line=tokenizer.get_token_line();
+ p_class->end_line=tokenizer->get_token_line();
case GDTokenizer::TK_ERROR: {
return; //go back
//end of file!
@@ -1686,7 +1686,7 @@ void GDParser::_parse_class(ClassNode *p_class) {
case GDTokenizer::TK_NEWLINE: {
if (!_parse_newline()) {
if (!error_set) {
- p_class->end_line=tokenizer.get_token_line();
+ p_class->end_line=tokenizer->get_token_line();
}
return;
}
@@ -1709,7 +1709,7 @@ void GDParser::_parse_class(ClassNode *p_class) {
}
p_class->tool=true;
- tokenizer.advance();
+ tokenizer->advance();
} break;
case GDTokenizer::TK_PR_CLASS: {
@@ -1718,13 +1718,13 @@ void GDParser::_parse_class(ClassNode *p_class) {
StringName name;
StringName extends;
- if (tokenizer.get_token(1)!=GDTokenizer::TK_IDENTIFIER) {
+ if (tokenizer->get_token(1)!=GDTokenizer::TK_IDENTIFIER) {
_set_error("'class' syntax: 'class <Name>:' or 'class <Name> extends <BaseClass>:'");
return;
}
- name = tokenizer.get_token_identifier(1);
- tokenizer.advance(2);
+ name = tokenizer->get_token_identifier(1);
+ tokenizer->advance(2);
ClassNode *newclass = alloc_node<ClassNode>();
newclass->initializer = alloc_node<BlockNode>();
@@ -1733,7 +1733,7 @@ void GDParser::_parse_class(ClassNode *p_class) {
p_class->subclasses.push_back(newclass);
- if (tokenizer.get_token()==GDTokenizer::TK_PR_EXTENDS) {
+ if (tokenizer->get_token()==GDTokenizer::TK_PR_EXTENDS) {
_parse_extends(newclass);
if (error_set)
@@ -1751,12 +1751,12 @@ void GDParser::_parse_class(ClassNode *p_class) {
/* this is for functions....
case GDTokenizer::TK_CF_PASS: {
- tokenizer.advance(1);
+ tokenizer->advance(1);
} break;
*/
case GDTokenizer::TK_PR_STATIC: {
- tokenizer.advance();
- if (tokenizer.get_token()!=GDTokenizer::TK_PR_FUNCTION) {
+ tokenizer->advance();
+ if (tokenizer->get_token()!=GDTokenizer::TK_PR_FUNCTION) {
_set_error("Expected 'func'.");
return;
@@ -1767,19 +1767,19 @@ void GDParser::_parse_class(ClassNode *p_class) {
bool _static=false;
- if (tokenizer.get_token(-1)==GDTokenizer::TK_PR_STATIC) {
+ if (tokenizer->get_token(-1)==GDTokenizer::TK_PR_STATIC) {
_static=true;
}
- if (tokenizer.get_token(1)!=GDTokenizer::TK_IDENTIFIER) {
+ if (tokenizer->get_token(1)!=GDTokenizer::TK_IDENTIFIER) {
_set_error("Expected identifier after 'func' (syntax: 'func <identifier>([arguments]):' ).");
return;
}
- StringName name = tokenizer.get_token_identifier(1);
+ StringName name = tokenizer->get_token_identifier(1);
for(int i=0;i<p_class->functions.size();i++) {
if (p_class->functions[i]->name==name) {
@@ -1791,56 +1791,56 @@ void GDParser::_parse_class(ClassNode *p_class) {
_set_error("Function '"+String(name)+"' already exists in this class (at line: "+itos(p_class->static_functions[i]->line)+").");
}
}
- tokenizer.advance(2);
+ tokenizer->advance(2);
- if (tokenizer.get_token()!=GDTokenizer::TK_PARENTHESIS_OPEN) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_OPEN) {
_set_error("Expected '(' after identifier (syntax: 'func <identifier>([arguments]):' ).");
return;
}
- tokenizer.advance();
+ tokenizer->advance();
Vector<StringName> arguments;
Vector<Node*> default_values;
- int fnline = tokenizer.get_token_line();
+ int fnline = tokenizer->get_token_line();
- if (tokenizer.get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
//has arguments
bool defaulting=false;
while(true) {
- if (tokenizer.get_token()==GDTokenizer::TK_PR_VAR) {
+ if (tokenizer->get_token()==GDTokenizer::TK_PR_VAR) {
- tokenizer.advance(); //var before the identifier is allowed
+ tokenizer->advance(); //var before the identifier is allowed
}
- if (tokenizer.get_token()!=GDTokenizer::TK_IDENTIFIER) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
_set_error("Expected identifier for argument.");
return;
}
- StringName argname=tokenizer.get_token_identifier();
+ StringName argname=tokenizer->get_token_identifier();
arguments.push_back(argname);
- tokenizer.advance();
+ tokenizer->advance();
- if (defaulting && tokenizer.get_token()!=GDTokenizer::TK_OP_ASSIGN) {
+ if (defaulting && tokenizer->get_token()!=GDTokenizer::TK_OP_ASSIGN) {
_set_error("Default parameter expected.");
return;
}
- //tokenizer.advance();
+ //tokenizer->advance();
- if (tokenizer.get_token()==GDTokenizer::TK_OP_ASSIGN) {
+ if (tokenizer->get_token()==GDTokenizer::TK_OP_ASSIGN) {
defaulting=true;
- tokenizer.advance(1);
+ tokenizer->advance(1);
Node *defval=NULL;
defval=_parse_and_reduce_expression(p_class,_static);
@@ -1864,10 +1864,10 @@ void GDParser::_parse_class(ClassNode *p_class) {
default_values.push_back(on);
}
- if (tokenizer.get_token()==GDTokenizer::TK_COMMA) {
- tokenizer.advance();
+ if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
+ tokenizer->advance();
continue;
- } else if (tokenizer.get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
+ } else if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
_set_error("Expected ',' or ')'.");
return;
@@ -1879,7 +1879,7 @@ void GDParser::_parse_class(ClassNode *p_class) {
}
- tokenizer.advance();
+ tokenizer->advance();
BlockNode *block = alloc_node<BlockNode>();
@@ -1895,24 +1895,24 @@ void GDParser::_parse_class(ClassNode *p_class) {
id->name="_init";
cparent->arguments.push_back(id);
- if (tokenizer.get_token()==GDTokenizer::TK_PERIOD) {
- tokenizer.advance();
- if (tokenizer.get_token()!=GDTokenizer::TK_PARENTHESIS_OPEN) {
+ if (tokenizer->get_token()==GDTokenizer::TK_PERIOD) {
+ tokenizer->advance();
+ if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_OPEN) {
_set_error("expected '(' for parent constructor arguments.");
}
- tokenizer.advance();
+ tokenizer->advance();
- if (tokenizer.get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
//has arguments
while(true) {
Node *arg = _parse_and_reduce_expression(p_class,_static);
cparent->arguments.push_back(arg);
- if (tokenizer.get_token()==GDTokenizer::TK_COMMA) {
- tokenizer.advance();
+ if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
+ tokenizer->advance();
continue;
- } else if (tokenizer.get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
+ } else if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
_set_error("Expected ',' or ')'.");
return;
@@ -1923,12 +1923,12 @@ void GDParser::_parse_class(ClassNode *p_class) {
}
}
- tokenizer.advance();
+ tokenizer->advance();
}
} else {
- if (tokenizer.get_token()==GDTokenizer::TK_PERIOD) {
+ if (tokenizer->get_token()==GDTokenizer::TK_PERIOD) {
_set_error("Parent constructor call found for a class without inheritance.");
return;
@@ -1963,41 +1963,41 @@ void GDParser::_parse_class(ClassNode *p_class) {
} break;
case GDTokenizer::TK_PR_EXPORT: {
- tokenizer.advance();
+ tokenizer->advance();
- if (tokenizer.get_token()==GDTokenizer::TK_PARENTHESIS_OPEN) {
+ if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_OPEN) {
- tokenizer.advance();
- if (tokenizer.get_token()==GDTokenizer::TK_BUILT_IN_TYPE) {
+ tokenizer->advance();
+ if (tokenizer->get_token()==GDTokenizer::TK_BUILT_IN_TYPE) {
- Variant::Type type = tokenizer.get_token_type();
+ Variant::Type type = tokenizer->get_token_type();
if (type==Variant::NIL) {
_set_error("Can't export null type.");
return;
}
current_export.type=type;
- tokenizer.advance();
- if (tokenizer.get_token()==GDTokenizer::TK_COMMA) {
+ tokenizer->advance();
+ if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
// hint expected next!
- tokenizer.advance();
+ tokenizer->advance();
switch(current_export.type) {
case Variant::INT: {
- if (tokenizer.get_token()==GDTokenizer::TK_CONSTANT && tokenizer.get_token_constant().get_type()==Variant::STRING) {
+ if (tokenizer->get_token()==GDTokenizer::TK_CONSTANT && tokenizer->get_token_constant().get_type()==Variant::STRING) {
//enumeration
current_export.hint=PROPERTY_HINT_ENUM;
bool first=true;
while(true) {
- if (tokenizer.get_token()!=GDTokenizer::TK_CONSTANT || tokenizer.get_token_constant().get_type()!=Variant::STRING) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_CONSTANT || tokenizer->get_token_constant().get_type()!=Variant::STRING) {
current_export=PropertyInfo();
_set_error("Expected a string constant in enumeration hint.");
}
- String c = tokenizer.get_token_constant();
+ String c = tokenizer->get_token_constant();
if (!first)
current_export.hint_string+=",";
else
@@ -2005,16 +2005,16 @@ void GDParser::_parse_class(ClassNode *p_class) {
current_export.hint_string+=c.xml_escape();
- tokenizer.advance();
- if (tokenizer.get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE)
+ tokenizer->advance();
+ if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE)
break;
- if (tokenizer.get_token()!=GDTokenizer::TK_COMMA) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_COMMA) {
current_export=PropertyInfo();
_set_error("Expected ')' or ',' in enumeration hint.");
}
- tokenizer.advance();
+ tokenizer->advance();
}
@@ -2024,7 +2024,7 @@ void GDParser::_parse_class(ClassNode *p_class) {
};
case Variant::REAL: {
- if (tokenizer.get_token()!=GDTokenizer::TK_CONSTANT || !tokenizer.get_token_constant().is_num()) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_CONSTANT || !tokenizer->get_token_constant().is_num()) {
current_export=PropertyInfo();
_set_error("Expected a range in numeric hint.");
@@ -2033,119 +2033,119 @@ void GDParser::_parse_class(ClassNode *p_class) {
//enumeration
current_export.hint=PROPERTY_HINT_RANGE;
- current_export.hint_string=tokenizer.get_token_constant().operator String();
- tokenizer.advance();
+ current_export.hint_string=tokenizer->get_token_constant().operator String();
+ tokenizer->advance();
- if (tokenizer.get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE) {
+ if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE) {
current_export.hint_string="0,"+current_export.hint_string;
break;
}
- if (tokenizer.get_token()!=GDTokenizer::TK_COMMA) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_COMMA) {
current_export=PropertyInfo();
_set_error("Expected ',' or ')' in numeric range hint.");
}
- tokenizer.advance();
+ tokenizer->advance();
- if (tokenizer.get_token()!=GDTokenizer::TK_CONSTANT || !tokenizer.get_token_constant().is_num()) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_CONSTANT || !tokenizer->get_token_constant().is_num()) {
current_export=PropertyInfo();
_set_error("Expected a number as upper bound in numeric range hint.");
}
- current_export.hint_string+=","+tokenizer.get_token_constant().operator String();
- tokenizer.advance();
+ current_export.hint_string+=","+tokenizer->get_token_constant().operator String();
+ tokenizer->advance();
- if (tokenizer.get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE)
+ if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE)
break;
- if (tokenizer.get_token()!=GDTokenizer::TK_COMMA) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_COMMA) {
current_export=PropertyInfo();
_set_error("Expected ',' or ')' in numeric range hint.");
}
- tokenizer.advance();
+ tokenizer->advance();
- if (tokenizer.get_token()!=GDTokenizer::TK_CONSTANT || !tokenizer.get_token_constant().is_num()) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_CONSTANT || !tokenizer->get_token_constant().is_num()) {
current_export=PropertyInfo();
_set_error("Expected a number as step in numeric range hint.");
}
- current_export.hint_string+=","+tokenizer.get_token_constant().operator String();
- tokenizer.advance();
+ current_export.hint_string+=","+tokenizer->get_token_constant().operator String();
+ tokenizer->advance();
} break;
case Variant::STRING: {
- if (tokenizer.get_token()==GDTokenizer::TK_CONSTANT && tokenizer.get_token_constant().get_type()==Variant::STRING) {
+ if (tokenizer->get_token()==GDTokenizer::TK_CONSTANT && tokenizer->get_token_constant().get_type()==Variant::STRING) {
//enumeration
current_export.hint=PROPERTY_HINT_ENUM;
bool first=true;
while(true) {
- if (tokenizer.get_token()!=GDTokenizer::TK_CONSTANT || tokenizer.get_token_constant().get_type()!=Variant::STRING) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_CONSTANT || tokenizer->get_token_constant().get_type()!=Variant::STRING) {
current_export=PropertyInfo();
_set_error("Expected a string constant in enumeration hint.");
}
- String c = tokenizer.get_token_constant();
+ String c = tokenizer->get_token_constant();
if (!first)
current_export.hint_string+=",";
else
first=false;
current_export.hint_string+=c.xml_escape();
- tokenizer.advance();
- if (tokenizer.get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE)
+ tokenizer->advance();
+ if (tokenizer->get_token()==GDTokenizer::TK_PARENTHESIS_CLOSE)
break;
- if (tokenizer.get_token()!=GDTokenizer::TK_COMMA) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_COMMA) {
current_export=PropertyInfo();
_set_error("Expected ')' or ',' in enumeration hint.");
return;
}
- tokenizer.advance();
+ tokenizer->advance();
}
break;
}
- if (tokenizer.get_token()==GDTokenizer::TK_IDENTIFIER && tokenizer.get_token_identifier()=="DIR") {
+ if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER && tokenizer->get_token_identifier()=="DIR") {
current_export.hint=PROPERTY_HINT_DIR;
- tokenizer.advance();
- if (tokenizer.get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
+ tokenizer->advance();
+ if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
_set_error("Expected ')' in hint.");
return;
}
break;
}
- if (tokenizer.get_token()==GDTokenizer::TK_IDENTIFIER && tokenizer.get_token_identifier()=="FILE") {
+ if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER && tokenizer->get_token_identifier()=="FILE") {
current_export.hint=PROPERTY_HINT_FILE;
- tokenizer.advance();
+ tokenizer->advance();
- if (tokenizer.get_token()==GDTokenizer::TK_COMMA) {
+ if (tokenizer->get_token()==GDTokenizer::TK_COMMA) {
- tokenizer.advance();
- if (tokenizer.get_token()!=GDTokenizer::TK_CONSTANT || tokenizer.get_token_constant().get_type()!=Variant::STRING) {
+ tokenizer->advance();
+ if (tokenizer->get_token()!=GDTokenizer::TK_CONSTANT || tokenizer->get_token_constant().get_type()!=Variant::STRING) {
_set_error("Expected string constant with filter");
return;
}
- current_export.hint_string=tokenizer.get_token_constant();
- tokenizer.advance();
+ current_export.hint_string=tokenizer->get_token_constant();
+ tokenizer->advance();
}
- if (tokenizer.get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
_set_error("Expected ')' in hint.");
return;
}
@@ -2154,14 +2154,14 @@ void GDParser::_parse_class(ClassNode *p_class) {
} break;
case Variant::COLOR: {
- if (tokenizer.get_token()!=GDTokenizer::TK_IDENTIFIER ) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER ) {
current_export=PropertyInfo();
_set_error("Color type hint expects RGB or RGBA as hints");
return;
}
- String identifier = tokenizer.get_token_identifier();
+ String identifier = tokenizer->get_token_identifier();
if (identifier=="RGB") {
current_export.hint=PROPERTY_HINT_COLOR_NO_ALPHA;
} else if (identifier=="RGBA") {
@@ -2171,7 +2171,7 @@ void GDParser::_parse_class(ClassNode *p_class) {
_set_error("Color type hint expects RGB or RGBA as hints");
return;
}
- tokenizer.advance();
+ tokenizer->advance();
} break;
default: {
@@ -2184,9 +2184,9 @@ void GDParser::_parse_class(ClassNode *p_class) {
}
- } else if (tokenizer.get_token()==GDTokenizer::TK_IDENTIFIER) {
+ } else if (tokenizer->get_token()==GDTokenizer::TK_IDENTIFIER) {
- String identifier = tokenizer.get_token_identifier();
+ String identifier = tokenizer->get_token_identifier();
if (!ObjectTypeDB::is_type(identifier,"Resource")) {
current_export=PropertyInfo();
@@ -2197,10 +2197,10 @@ void GDParser::_parse_class(ClassNode *p_class) {
current_export.hint=PROPERTY_HINT_RESOURCE_TYPE;
current_export.hint_string=identifier;
- tokenizer.advance();
+ tokenizer->advance();
}
- if (tokenizer.get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_PARENTHESIS_CLOSE) {
current_export=PropertyInfo();
_set_error("Expected ')' or ',' after export hint.");
@@ -2208,11 +2208,11 @@ void GDParser::_parse_class(ClassNode *p_class) {
}
- tokenizer.advance();
+ tokenizer->advance();
}
- if (tokenizer.get_token()!=GDTokenizer::TK_PR_VAR) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_PR_VAR) {
current_export=PropertyInfo();
_set_error("Expected 'var'.");
@@ -2224,26 +2224,26 @@ void GDParser::_parse_class(ClassNode *p_class) {
//variale declaration and (eventual) initialization
ClassNode::Member member;
- bool autoexport = tokenizer.get_token(-1)==GDTokenizer::TK_PR_EXPORT;
+ bool autoexport = tokenizer->get_token(-1)==GDTokenizer::TK_PR_EXPORT;
if (current_export.type!=Variant::NIL) {
member._export=current_export;
current_export=PropertyInfo();
}
- tokenizer.advance();
- if (tokenizer.get_token()!=GDTokenizer::TK_IDENTIFIER) {
+ tokenizer->advance();
+ if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
_set_error("Expected identifier for member variable name.");
return;
}
- member.identifier=tokenizer.get_token_identifier();
+ member.identifier=tokenizer->get_token_identifier();
member._export.name=member.identifier;
- tokenizer.advance();
+ tokenizer->advance();
p_class->variables.push_back(member);
- if (tokenizer.get_token()!=GDTokenizer::TK_OP_ASSIGN) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_OP_ASSIGN) {
if (autoexport) {
@@ -2253,9 +2253,9 @@ void GDParser::_parse_class(ClassNode *p_class) {
break;
}
#ifdef DEBUG_ENABLED
- int line = tokenizer.get_token_line();
+ int line = tokenizer->get_token_line();
#endif
- tokenizer.advance();
+ tokenizer->advance();
Node *subexpr=NULL;
@@ -2324,22 +2324,22 @@ void GDParser::_parse_class(ClassNode *p_class) {
ClassNode::Constant constant;
- tokenizer.advance();
- if (tokenizer.get_token()!=GDTokenizer::TK_IDENTIFIER) {
+ tokenizer->advance();
+ if (tokenizer->get_token()!=GDTokenizer::TK_IDENTIFIER) {
_set_error("Expected name (identifier) for constant.");
return;
}
- constant.identifier=tokenizer.get_token_identifier();
- tokenizer.advance();
+ constant.identifier=tokenizer->get_token_identifier();
+ tokenizer->advance();
- if (tokenizer.get_token()!=GDTokenizer::TK_OP_ASSIGN) {
+ if (tokenizer->get_token()!=GDTokenizer::TK_OP_ASSIGN) {
_set_error("Constant expects assignment.");
return;
}
- tokenizer.advance();
+ tokenizer->advance();
Node *subexpr=NULL;
@@ -2362,7 +2362,7 @@ void GDParser::_parse_class(ClassNode *p_class) {
default: {
- _set_error(String()+"Unexpected token: "+tokenizer.get_token_name(tokenizer.get_token())+":"+tokenizer.get_token_identifier());
+ _set_error(String()+"Unexpected token: "+tokenizer->get_token_name(tokenizer->get_token())+":"+tokenizer->get_token_identifier());
return;
} break;
@@ -2382,8 +2382,8 @@ void GDParser::_set_error(const String& p_error, int p_line, int p_column) {
return; //allow no further errors
error=p_error;
- error_line=p_line<0?tokenizer.get_token_line():p_line;
- error_column=p_column<0?tokenizer.get_token_column():p_column;
+ error_line=p_line<0?tokenizer->get_token_line():p_line;
+ error_column=p_column<0?tokenizer->get_token_column():p_column;
error_set=true;
}
@@ -2402,11 +2402,10 @@ int GDParser::get_error_column() const {
}
-Error GDParser::parse(const String& p_code,const String& p_base_path) {
+Error GDParser::_parse(const String& p_base_path) {
- base_path=p_base_path;
- tokenizer.set_code(p_code);
+ base_path=p_base_path;
clear();
@@ -2416,9 +2415,9 @@ Error GDParser::parse(const String& p_code,const String& p_base_path) {
_parse_class(main_class);
- if (tokenizer.get_token()==GDTokenizer::TK_ERROR) {
+ if (tokenizer->get_token()==GDTokenizer::TK_ERROR) {
error_set=false;
- _set_error("Parse Error: "+tokenizer.get_token_error());
+ _set_error("Parse Error: "+tokenizer->get_token_error());
}
if (error_set) {
@@ -2428,6 +2427,31 @@ Error GDParser::parse(const String& p_code,const String& p_base_path) {
return OK;
}
+Error GDParser::parse_bytecode(const Vector<uint8_t> &p_bytecode,const String& p_base_path) {
+
+ GDTokenizerBuffer *tb = memnew( GDTokenizerBuffer );
+ tb->set_code_buffer(p_bytecode);
+ tokenizer=tb;
+ Error ret = _parse(p_base_path);
+ memdelete(tb);
+ tokenizer=NULL;
+ return ret;
+}
+
+
+Error GDParser::parse(const String& p_code,const String& p_base_path) {
+
+
+ GDTokenizerText *tt = memnew( GDTokenizerText );
+ tt->set_code(p_code);
+
+ tokenizer=tt;
+ Error ret = _parse(p_base_path);
+ memdelete(tt);
+ tokenizer=NULL;
+ return ret;
+}
+
const GDParser::Node *GDParser::get_parse_tree() const {
return head;
@@ -2459,6 +2483,7 @@ GDParser::GDParser() {
head=NULL;
list=NULL;
+ tokenizer=NULL;
clear();
}
diff --git a/modules/gdscript/gd_parser.h b/modules/gdscript/gd_parser.h
index 8011495340..1925808cac 100644
--- a/modules/gdscript/gd_parser.h
+++ b/modules/gdscript/gd_parser.h
@@ -343,10 +343,12 @@ public:
ProgramNode() { type=TYPE_PROGRAM; }
};
*/
+
+
private:
- GDTokenizer tokenizer;
+ GDTokenizer *tokenizer;
Node *head;
@@ -380,12 +382,15 @@ private:
void _parse_class(ClassNode *p_class);
bool _end_statement();
+ Error _parse(const String& p_base_path);
+
public:
String get_error() const;
int get_error_line() const;
int get_error_column() const;
Error parse(const String& p_code,const String& p_base_path="");
+ Error parse_bytecode(const Vector<uint8_t> &p_bytecode,const String& p_base_path="");
const Node *get_parse_tree() const;
diff --git a/modules/gdscript/gd_script.cpp b/modules/gdscript/gd_script.cpp
index 5679e1e066..29857e6be6 100644
--- a/modules/gdscript/gd_script.cpp
+++ b/modules/gdscript/gd_script.cpp
@@ -1416,7 +1416,7 @@ Error GDScript::reload() {
String basedir=path;
if (basedir=="")
- basedir==get_path();
+ basedir=get_path();
if (basedir!="")
basedir=basedir.get_base_dir();
@@ -1560,6 +1560,49 @@ void GDScript::_bind_methods() {
}
+
+
+Error GDScript::load_byte_code(const String& p_path) {
+
+ Vector<uint8_t> bytecode = FileAccess::get_file_as_array(p_path);
+ ERR_FAIL_COND_V(bytecode.size()==0,ERR_PARSE_ERROR);
+ path=p_path;
+
+ String basedir=path;
+
+ if (basedir=="")
+ basedir=get_path();
+
+ if (basedir!="")
+ basedir=basedir.get_base_dir();
+
+ valid=false;
+ GDParser parser;
+ Error err = parser.parse_bytecode(bytecode,basedir);
+ if (err) {
+ _err_print_error("GDScript::load_byte_code",path.empty()?"built-in":(const char*)path.utf8().get_data(),parser.get_error_line(),("Parse Error: "+parser.get_error()).utf8().get_data());
+ ERR_FAIL_V(ERR_PARSE_ERROR);
+ }
+
+ GDCompiler compiler;
+ err = compiler.compile(&parser,this);
+
+ if (err) {
+ _err_print_error("GDScript::load_byte_code",path.empty()?"built-in":(const char*)path.utf8().get_data(),compiler.get_error_line(),("Compile Error: "+compiler.get_error()).utf8().get_data());
+ ERR_FAIL_V(ERR_COMPILATION_FAILED);
+ }
+
+ valid=true;
+
+ for(Map<StringName,Ref<GDScript> >::Element *E=subclasses.front();E;E=E->next()) {
+
+ _set_subclass_path(E->get(),path);
+ }
+
+ return OK;
+}
+
+
Error GDScript::load_source_code(const String& p_path) {
@@ -2153,24 +2196,39 @@ RES ResourceFormatLoaderGDScript::load(const String &p_path,const String& p_orig
Ref<GDScript> scriptres(script);
- Error err = script->load_source_code(p_path);
+ if (p_path.ends_with(".gdc")) {
- if (err!=OK) {
+ script->set_script_path(p_original_path); // script needs this.
+ script->set_path(p_original_path);
+ Error err = script->load_byte_code(p_path);
- ERR_FAIL_COND_V(err!=OK, RES());
- }
- script->set_script_path(p_original_path); // script needs this.
- script->set_path(p_original_path);
- //script->set_name(p_path.get_file());
+ if (err!=OK) {
+
+ ERR_FAIL_COND_V(err!=OK, RES());
+ }
+
+ } else {
+ Error err = script->load_source_code(p_path);
- script->reload();
+ if (err!=OK) {
+
+ ERR_FAIL_COND_V(err!=OK, RES());
+ }
+
+ script->set_script_path(p_original_path); // script needs this.
+ script->set_path(p_original_path);
+ //script->set_name(p_path.get_file());
+
+ script->reload();
+ }
return scriptres;
}
void ResourceFormatLoaderGDScript::get_recognized_extensions(List<String> *p_extensions) const {
p_extensions->push_back("gd");
+ p_extensions->push_back("gdc");
}
bool ResourceFormatLoaderGDScript::handles_type(const String& p_type) const {
@@ -2180,7 +2238,8 @@ bool ResourceFormatLoaderGDScript::handles_type(const String& p_type) const {
String ResourceFormatLoaderGDScript::get_resource_type(const String &p_path) const {
- if (p_path.extension().to_lower()=="gd")
+ String el = p_path.extension().to_lower();
+ if (el=="gd" || el=="gdc")
return "GDScript";
return "";
}
diff --git a/modules/gdscript/gd_script.h b/modules/gdscript/gd_script.h
index 70dec4e8ee..55bc547e8d 100644
--- a/modules/gdscript/gd_script.h
+++ b/modules/gdscript/gd_script.h
@@ -261,6 +261,7 @@ public:
virtual String get_node_type() const;
void set_script_path(const String& p_path) { path=p_path; } //because subclasses need a path too...
Error load_source_code(const String& p_path);
+ Error load_byte_code(const String& p_path);
virtual ScriptLanguage *get_language() const;
diff --git a/modules/gdscript/gd_tokenizer.cpp b/modules/gdscript/gd_tokenizer.cpp
index f7320799a5..ff9be7926b 100644
--- a/modules/gdscript/gd_tokenizer.cpp
+++ b/modules/gdscript/gd_tokenizer.cpp
@@ -29,6 +29,9 @@
#include "gd_tokenizer.h"
#include "print_string.h"
#include "gd_functions.h"
+#include "io/marshalls.h"
+#include "map.h"
+
const char* GDTokenizer::token_names[TK_MAX]={
"Empty",
"Identifier",
@@ -128,7 +131,7 @@ static bool _is_hex(CharType c) {
return (c>='0' && c<='9') || (c>='a' && c<='f') || (c>='A' && c<='F');
}
-void GDTokenizer::_make_token(Token p_type) {
+void GDTokenizerText::_make_token(Token p_type) {
TokenData &tk=tk_rb[tk_rb_pos];
@@ -138,7 +141,7 @@ void GDTokenizer::_make_token(Token p_type) {
tk_rb_pos=(tk_rb_pos+1)%TK_RB_SIZE;
}
-void GDTokenizer::_make_identifier(const StringName& p_identifier) {
+void GDTokenizerText::_make_identifier(const StringName& p_identifier) {
TokenData &tk=tk_rb[tk_rb_pos];
@@ -151,7 +154,7 @@ void GDTokenizer::_make_identifier(const StringName& p_identifier) {
}
-void GDTokenizer::_make_built_in_func(GDFunctions::Function p_func) {
+void GDTokenizerText::_make_built_in_func(GDFunctions::Function p_func) {
TokenData &tk=tk_rb[tk_rb_pos];
@@ -163,7 +166,7 @@ void GDTokenizer::_make_built_in_func(GDFunctions::Function p_func) {
tk_rb_pos=(tk_rb_pos+1)%TK_RB_SIZE;
}
-void GDTokenizer::_make_constant(const Variant& p_constant) {
+void GDTokenizerText::_make_constant(const Variant& p_constant) {
TokenData &tk=tk_rb[tk_rb_pos];
@@ -176,7 +179,7 @@ void GDTokenizer::_make_constant(const Variant& p_constant) {
}
-void GDTokenizer::_make_type(const Variant::Type& p_type) {
+void GDTokenizerText::_make_type(const Variant::Type& p_type) {
TokenData &tk=tk_rb[tk_rb_pos];
@@ -191,7 +194,7 @@ void GDTokenizer::_make_type(const Variant::Type& p_type) {
}
-void GDTokenizer::_make_error(const String& p_error) {
+void GDTokenizerText::_make_error(const String& p_error) {
error_flag=true;
last_error=p_error;
@@ -206,7 +209,7 @@ void GDTokenizer::_make_error(const String& p_error) {
}
-void GDTokenizer::_make_newline(int p_spaces) {
+void GDTokenizerText::_make_newline(int p_spaces) {
TokenData &tk=tk_rb[tk_rb_pos];
tk.type=TK_NEWLINE;
@@ -216,7 +219,7 @@ void GDTokenizer::_make_newline(int p_spaces) {
tk_rb_pos=(tk_rb_pos+1)%TK_RB_SIZE;
}
-void GDTokenizer::_advance() {
+void GDTokenizerText::_advance() {
if (error_flag) {
//parser broke
@@ -859,7 +862,7 @@ void GDTokenizer::_advance() {
}
-void GDTokenizer::set_code(const String& p_code) {
+void GDTokenizerText::set_code(const String& p_code) {
code=p_code;
len = p_code.length();
@@ -878,7 +881,7 @@ void GDTokenizer::set_code(const String& p_code) {
_advance();
}
-GDTokenizer::Token GDTokenizer::get_token(int p_offset) const {
+GDTokenizerText::Token GDTokenizerText::get_token(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, TK_ERROR);
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, TK_ERROR);
@@ -886,7 +889,7 @@ GDTokenizer::Token GDTokenizer::get_token(int p_offset) const {
return tk_rb[ofs].type;
}
-int GDTokenizer::get_token_line(int p_offset) const {
+int GDTokenizerText::get_token_line(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, -1);
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, -1);
@@ -894,7 +897,7 @@ int GDTokenizer::get_token_line(int p_offset) const {
return tk_rb[ofs].line;
}
-int GDTokenizer::get_token_column(int p_offset) const {
+int GDTokenizerText::get_token_column(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, -1);
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, -1);
@@ -902,7 +905,7 @@ int GDTokenizer::get_token_column(int p_offset) const {
return tk_rb[ofs].col;
}
-const Variant& GDTokenizer::get_token_constant(int p_offset) const {
+const Variant& GDTokenizerText::get_token_constant(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, tk_rb[0].constant);
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, tk_rb[0].constant);
@@ -910,7 +913,7 @@ const Variant& GDTokenizer::get_token_constant(int p_offset) const {
ERR_FAIL_COND_V(tk_rb[ofs].type!=TK_CONSTANT,tk_rb[0].constant);
return tk_rb[ofs].constant;
}
-StringName GDTokenizer::get_token_identifier(int p_offset) const {
+StringName GDTokenizerText::get_token_identifier(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, StringName());
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, StringName());
@@ -921,7 +924,7 @@ StringName GDTokenizer::get_token_identifier(int p_offset) const {
}
-GDFunctions::Function GDTokenizer::get_token_built_in_func(int p_offset) const {
+GDFunctions::Function GDTokenizerText::get_token_built_in_func(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, GDFunctions::FUNC_MAX);
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, GDFunctions::FUNC_MAX);
@@ -932,7 +935,7 @@ GDFunctions::Function GDTokenizer::get_token_built_in_func(int p_offset) const {
}
-Variant::Type GDTokenizer::get_token_type(int p_offset) const {
+Variant::Type GDTokenizerText::get_token_type(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, Variant::NIL);
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, Variant::NIL);
@@ -944,7 +947,7 @@ Variant::Type GDTokenizer::get_token_type(int p_offset) const {
}
-int GDTokenizer::get_token_line_indent(int p_offset) const {
+int GDTokenizerText::get_token_line_indent(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, 0);
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, 0);
@@ -955,7 +958,7 @@ int GDTokenizer::get_token_line_indent(int p_offset) const {
}
-String GDTokenizer::get_token_error(int p_offset) const {
+String GDTokenizerText::get_token_error(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, String());
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, String());
@@ -965,9 +968,377 @@ String GDTokenizer::get_token_error(int p_offset) const {
return tk_rb[ofs].constant;
}
-void GDTokenizer::advance(int p_amount) {
+void GDTokenizerText::advance(int p_amount) {
ERR_FAIL_COND( p_amount <=0 );
for(int i=0;i<p_amount;i++)
_advance();
}
+
+
+
+
+
+
+
+
+
+
+
+//////////////////////////////////////////////////////////////////////////////////////////////////////
+
+#define BYTECODE_VERSION 1
+
+Error GDTokenizerBuffer::set_code_buffer(const Vector<uint8_t> & p_buffer) {
+
+
+ const uint8_t *buf=p_buffer.ptr();
+ int total_len=p_buffer.size();
+ ERR_FAIL_COND_V( p_buffer.size()<24 || p_buffer[0]!='G' || p_buffer[1]!='D' || p_buffer[2]!='S' || p_buffer[3]!='C',ERR_INVALID_DATA);
+
+ int version = decode_uint32(&buf[4]);
+ if (version>1) {
+ ERR_EXPLAIN("Bytecode is too New!");
+ ERR_FAIL_COND_V(version>BYTECODE_VERSION,ERR_INVALID_DATA);
+ }
+ int identifier_count = decode_uint32(&buf[8]);
+ int constant_count = decode_uint32(&buf[12]);
+ int line_count = decode_uint32(&buf[16]);
+ int token_count = decode_uint32(&buf[20]);
+
+ const uint8_t *b=buf;
+
+ b=&buf[24];
+ total_len-=24;
+
+ identifiers.resize(identifier_count);
+ for(int i=0;i<identifier_count;i++) {
+
+ int len = decode_uint32(b);
+ ERR_FAIL_COND_V(len>total_len,ERR_INVALID_DATA);
+ b+=4;
+ Vector<uint8_t> cs;
+ cs.resize(len);
+ for(int j=0;j<len;j++) {
+ cs[j]=b[j]^0xb6;
+ }
+
+ cs[cs.size()-1]=0;
+ String s;
+ s.parse_utf8((const char*)cs.ptr());
+ b+=len;
+ total_len-=len+4;
+ identifiers[i]=s;
+ }
+
+ constants.resize(constant_count);
+ for(int i=0;i<constant_count;i++) {
+
+ Variant v;
+ int len;
+ Error err = decode_variant(v,b,total_len,&len);
+ if (err)
+ return err;
+ b+=len;
+ total_len-=len;
+ constants[i]=v;
+
+ }
+
+ ERR_FAIL_COND_V(line_count*8>total_len,ERR_INVALID_DATA);
+
+ for(int i=0;i<line_count;i++) {
+
+ uint32_t token=decode_uint32(b);
+ b+=4;
+ uint32_t linecol=decode_uint32(b);
+ b+=4;
+
+ lines.insert(token,linecol);
+ total_len-=8;
+ }
+
+ tokens.resize(token_count);
+
+ for(int i=0;i<token_count;i++) {
+
+ ERR_FAIL_COND_V( total_len < 1, ERR_INVALID_DATA);
+
+ if ((*b)&TOKEN_BYTE_MASK) { //little endian always
+ ERR_FAIL_COND_V( total_len < 4, ERR_INVALID_DATA);
+
+ tokens[i]=decode_uint32(b)&~TOKEN_BYTE_MASK;
+ b+=4;
+ } else {
+ tokens[i]=*b;
+ b+=1;
+ total_len--;
+ }
+ }
+
+ token=0;
+
+ return OK;
+
+}
+
+
+Vector<uint8_t> GDTokenizerBuffer::parse_code_string(const String& p_code) {
+
+ Vector<uint8_t> buf;
+
+
+ Map<StringName,int> identifier_map;
+ HashMap<Variant,int,VariantHasher> constant_map;
+ Map<uint32_t,int> line_map;
+ Vector<uint32_t> token_array;
+
+ GDTokenizerText tt;
+ tt.set_code(p_code);
+ int line=-1;
+ int col=0;
+
+ while(true) {
+
+ if (tt.get_token_line()!=line) {
+
+ line=tt.get_token_line();
+ line_map[line]=token_array.size();
+ }
+
+ uint32_t token=tt.get_token();
+ switch(tt.get_token()) {
+
+ case TK_IDENTIFIER: {
+ StringName id = tt.get_token_identifier();
+ if (!identifier_map.has(id)) {
+ int idx = identifier_map.size();
+ identifier_map[id]=idx;
+ }
+ token|=identifier_map[id]<<TOKEN_BITS;
+ } break;
+ case TK_CONSTANT: {
+
+ Variant c = tt.get_token_constant();
+ if (!constant_map.has(c)) {
+ int idx = constant_map.size();
+ constant_map[c]=idx;
+ }
+ token|=constant_map[c]<<TOKEN_BITS;
+ } break;
+ case TK_BUILT_IN_TYPE: {
+
+ token|=tt.get_token_type()<<TOKEN_BITS;
+ } break;
+ case TK_BUILT_IN_FUNC: {
+
+ token|=tt.get_token_built_in_func()<<TOKEN_BITS;
+
+ } break;
+ case TK_NEWLINE: {
+
+ token|=tt.get_token_line_indent()<<TOKEN_BITS;
+ } break;
+ case TK_ERROR: {
+
+ ERR_FAIL_V(Vector<uint8_t>());
+ } break;
+ default: {}
+
+ };
+
+ token_array.push_back(token);
+
+ if (tt.get_token()==TK_EOF)
+ break;
+ tt.advance();
+
+ }
+
+ //reverse maps
+
+ Map<int,StringName> rev_identifier_map;
+ for(Map<StringName,int>::Element *E=identifier_map.front();E;E=E->next()) {
+ rev_identifier_map[E->get()]=E->key();
+ }
+
+ Map<int,Variant> rev_constant_map;
+ const Variant *K =NULL;
+ while((K=constant_map.next(K))) {
+ rev_constant_map[constant_map[*K]]=*K;
+ }
+
+ Map<int,uint32_t> rev_line_map;
+ for(Map<uint32_t,int>::Element *E=line_map.front();E;E=E->next()) {
+ rev_line_map[E->get()]=E->key();
+ }
+
+ //save header
+ buf.resize(24);
+ buf[0]='G';
+ buf[1]='D';
+ buf[2]='S';
+ buf[3]='C';
+ encode_uint32(BYTECODE_VERSION,&buf[4]);
+ encode_uint32(identifier_map.size(),&buf[8]);
+ encode_uint32(constant_map.size(),&buf[12]);
+ encode_uint32(line_map.size(),&buf[16]);
+ encode_uint32(token_array.size(),&buf[20]);
+
+ //save identifiers
+
+ for(Map<int,StringName>::Element *E=rev_identifier_map.front();E;E=E->next()) {
+
+ CharString cs = String(E->get()).utf8();
+ int len = cs.length()+1;
+ int extra = 4-(len%4);
+ if (extra==4)
+ extra=0;
+
+ uint8_t ibuf[4];
+ encode_uint32(len+extra,ibuf);
+ for(int i=0;i<4;i++) {
+ buf.push_back(ibuf[i]);
+ }
+ for(int i=0;i<len;i++) {
+ buf.push_back(cs[i]^0xb6);
+ }
+ for(int i=0;i<extra;i++) {
+ buf.push_back(0^0xb6);
+ }
+ }
+
+ for(Map<int,Variant>::Element *E=rev_constant_map.front();E;E=E->next()) {
+
+ int len;
+ Error err = encode_variant(E->get(),NULL,len);
+ ERR_FAIL_COND_V(err!=OK,Vector<uint8_t>());
+ int pos=buf.size();
+ buf.resize(pos+len);
+ encode_variant(E->get(),&buf[pos],len);
+ }
+
+ for(Map<int,uint32_t>::Element *E=rev_line_map.front();E;E=E->next()) {
+
+ uint8_t ibuf[8];
+ encode_uint32(E->key(),&ibuf[0]);
+ encode_uint32(E->get(),&ibuf[4]);
+ for(int i=0;i<8;i++)
+ buf.push_back(ibuf[i]);
+ }
+
+ for(int i=0;i<token_array.size();i++) {
+
+ uint32_t token = token_array[i];
+
+ if (token&~TOKEN_MASK) {
+ uint8_t buf4[4];
+ encode_uint32(token_array[i]|TOKEN_BYTE_MASK,&buf4[0]);
+ for(int j=0;j<4;j++) {
+ buf.push_back(buf4[j]);
+ }
+ } else {
+ buf.push_back(token);
+ }
+ }
+
+ return buf;
+
+}
+
+GDTokenizerBuffer::Token GDTokenizerBuffer::get_token(int p_offset) const {
+
+ int offset = token+p_offset;
+
+ if (offset<0 || offset>=tokens.size())
+ return TK_EOF;
+
+ return GDTokenizerBuffer::Token(tokens[offset]&TOKEN_MASK);
+}
+
+
+StringName GDTokenizerBuffer::get_token_identifier(int p_offset) const{
+
+ int offset = token+p_offset;
+
+ ERR_FAIL_INDEX_V(offset,tokens.size(),StringName());
+ uint32_t identifier = tokens[offset]>>TOKEN_BITS;
+ ERR_FAIL_INDEX_V(identifier,identifiers.size(),StringName());
+
+ return identifiers[identifier];
+}
+
+GDFunctions::Function GDTokenizerBuffer::get_token_built_in_func(int p_offset) const{
+
+ int offset = token+p_offset;
+ ERR_FAIL_INDEX_V(offset,tokens.size(),GDFunctions::FUNC_MAX);
+ return GDFunctions::Function(tokens[offset]>>TOKEN_BITS);
+}
+
+Variant::Type GDTokenizerBuffer::get_token_type(int p_offset) const{
+
+ int offset = token+p_offset;
+ ERR_FAIL_INDEX_V(offset,tokens.size(),Variant::NIL);
+
+ return Variant::Type(tokens[offset]>>TOKEN_BITS);
+}
+
+int GDTokenizerBuffer::get_token_line(int p_offset) const{
+
+ int offset = token+p_offset;
+ int pos = lines.find_nearest(offset);
+
+ if (pos<0)
+ return -1;
+ if (pos>=lines.size())
+ pos=lines.size()-1;
+
+ uint32_t l = lines.getv(pos);
+ return l&TOKEN_LINE_MASK;
+
+}
+int GDTokenizerBuffer::get_token_column(int p_offset) const{
+
+ int offset = token+p_offset;
+ int pos = lines.find_nearest(offset);
+ if (pos<0)
+ return -1;
+ if (pos>=lines.size())
+ pos=lines.size()-1;
+
+ uint32_t l = lines.getv(pos);
+ return l>>TOKEN_LINE_BITS;
+
+}
+int GDTokenizerBuffer::get_token_line_indent(int p_offset) const{
+
+ int offset = token+p_offset;
+ ERR_FAIL_INDEX_V(offset,tokens.size(),0);
+ return tokens[offset]>>TOKEN_BITS;
+}
+const Variant& GDTokenizerBuffer::get_token_constant(int p_offset) const{
+
+
+ int offset = token+p_offset;
+ ERR_FAIL_INDEX_V(offset,tokens.size(),nil);
+ uint32_t constant = tokens[offset]>>TOKEN_BITS;
+ ERR_FAIL_INDEX_V(constant,constants.size(),nil);
+ return constants[constant];
+
+}
+String GDTokenizerBuffer::get_token_error(int p_offset) const{
+
+ ERR_FAIL_V(String());
+}
+
+void GDTokenizerBuffer::advance(int p_amount){
+
+ ERR_FAIL_INDEX(p_amount+token,tokens.size());
+ token+=p_amount;
+}
+GDTokenizerBuffer::GDTokenizerBuffer(){
+
+ token=0;
+
+}
+
diff --git a/modules/gdscript/gd_tokenizer.h b/modules/gdscript/gd_tokenizer.h
index 24ee2be7ad..c517e07b89 100644
--- a/modules/gdscript/gd_tokenizer.h
+++ b/modules/gdscript/gd_tokenizer.h
@@ -33,6 +33,8 @@
#include "variant.h"
#include "string_db.h"
#include "gd_functions.h"
+#include "vmap.h"
+
class GDTokenizer {
public:
@@ -117,11 +119,28 @@ public:
TK_MAX
};
+protected:
+ static const char* token_names[TK_MAX];
+public:
+ static const char *get_token_name(Token p_token);
+ virtual const Variant& get_token_constant(int p_offset=0) const=0;
+ virtual Token get_token(int p_offset=0) const=0;
+ virtual StringName get_token_identifier(int p_offset=0) const=0;
+ virtual GDFunctions::Function get_token_built_in_func(int p_offset=0) const=0;
+ virtual Variant::Type get_token_type(int p_offset=0) const=0;
+ virtual int get_token_line(int p_offset=0) const=0;
+ virtual int get_token_column(int p_offset=0) const=0;
+ virtual int get_token_line_indent(int p_offset=0) const=0;
+ virtual String get_token_error(int p_offset=0) const=0;
+ virtual void advance(int p_amount=1)=0;
-private:
+ virtual ~GDTokenizer(){};
+
+};
+
+class GDTokenizerText : public GDTokenizer {
- static const char* token_names[TK_MAX];
enum {
MAX_LOOKAHEAD=4,
TK_RB_SIZE=MAX_LOOKAHEAD*2+1
@@ -162,20 +181,59 @@ private:
void _advance();
public:
- static const char *get_token_name(Token p_token);
void set_code(const String& p_code);
- Token get_token(int p_offset=0) const;
- const Variant& get_token_constant(int p_offset=0) const;
- StringName get_token_identifier(int p_offset=0) const;
- GDFunctions::Function get_token_built_in_func(int p_offset=0) const;
- Variant::Type get_token_type(int p_offset=0) const;
- int get_token_line(int p_offset=0) const;
- int get_token_column(int p_offset=0) const;
- int get_token_line_indent(int p_offset=0) const;
-
- String get_token_error(int p_offset=0) const;
- void advance(int p_amount=1);
+ virtual Token get_token(int p_offset=0) const;
+ virtual StringName get_token_identifier(int p_offset=0) const;
+ virtual GDFunctions::Function get_token_built_in_func(int p_offset=0) const;
+ virtual Variant::Type get_token_type(int p_offset=0) const;
+ virtual int get_token_line(int p_offset=0) const;
+ virtual int get_token_column(int p_offset=0) const;
+ virtual int get_token_line_indent(int p_offset=0) const;
+ virtual const Variant& get_token_constant(int p_offset=0) const;
+ virtual String get_token_error(int p_offset=0) const;
+ virtual void advance(int p_amount=1);
+};
+
+
+
+
+class GDTokenizerBuffer : public GDTokenizer {
+
+
+ enum {
+
+ TOKEN_BYTE_MASK=0x80,
+ TOKEN_BITS=8,
+ TOKEN_MASK=(1<<TOKEN_BITS)-1,
+ TOKEN_LINE_BITS=24,
+ TOKEN_LINE_MASK=(1<<TOKEN_LINE_BITS)-1,
+ };
+
+
+ Vector<StringName> identifiers;
+ Vector<Variant> constants;
+ VMap<uint32_t,uint32_t> lines;
+ Vector<uint32_t> tokens;
+ Variant nil;
+ int token;
+
+public:
+
+
+ Error set_code_buffer(const Vector<uint8_t> & p_buffer);
+ static Vector<uint8_t> parse_code_string(const String& p_code);
+ virtual Token get_token(int p_offset=0) const;
+ virtual StringName get_token_identifier(int p_offset=0) const;
+ virtual GDFunctions::Function get_token_built_in_func(int p_offset=0) const;
+ virtual Variant::Type get_token_type(int p_offset=0) const;
+ virtual int get_token_line(int p_offset=0) const;
+ virtual int get_token_column(int p_offset=0) const;
+ virtual int get_token_line_indent(int p_offset=0) const;
+ virtual const Variant& get_token_constant(int p_offset=0) const;
+ virtual String get_token_error(int p_offset=0) const;
+ virtual void advance(int p_amount=1);
+ GDTokenizerBuffer();
};
#endif // TOKENIZER_H
diff --git a/modules/gdscript/register_types.cpp b/modules/gdscript/register_types.cpp
index d2d7bf426a..6bcd12857b 100644
--- a/modules/gdscript/register_types.cpp
+++ b/modules/gdscript/register_types.cpp
@@ -13,11 +13,60 @@
#include "gd_script.h"
#include "io/resource_loader.h"
+#include "os/file_access.h"
+
GDScriptLanguage *script_language_gd=NULL;
ResourceFormatLoaderGDScript *resource_loader_gd=NULL;
ResourceFormatSaverGDScript *resource_saver_gd=NULL;
+#ifdef TOOLS_ENABLED
+
+#include "tools/editor/editor_import_export.h"
+#include "gd_tokenizer.h"
+#include "tools/editor/editor_node.h"
+
+class EditorExportGDScript : public EditorExportPlugin {
+
+ OBJ_TYPE(EditorExportGDScript,EditorExportPlugin);
+
+public:
+
+ virtual Vector<uint8_t> custom_export(String& p_path,const Ref<EditorExportPlatform> &p_platform) {
+ //compile gdscript to bytecode
+ if (p_path.ends_with(".gd")) {
+ Vector<uint8_t> file = FileAccess::get_file_as_array(p_path);
+ if (file.empty())
+ return file;
+ String txt;
+ txt.parse_utf8((const char*)file.ptr(),file.size());
+ file = GDTokenizerBuffer::parse_code_string(txt);
+ if (!file.empty()) {
+ print_line("PREV: "+p_path);
+ p_path=p_path.basename()+".gdc";
+ print_line("NOW: "+p_path);
+ return file;
+ }
+
+ }
+
+ return Vector<uint8_t>();
+ }
+
+
+ EditorExportGDScript(){}
+
+};
+
+static void register_editor_plugin() {
+
+ Ref<EditorExportGDScript> egd = memnew( EditorExportGDScript );
+ EditorImportExport::get_singleton()->add_export_plugin(egd);
+}
+
+
+#endif
+
void register_gdscript_types() {
@@ -30,6 +79,11 @@ void register_gdscript_types() {
resource_saver_gd=memnew( ResourceFormatSaverGDScript );
ResourceSaver::add_resource_format_saver(resource_saver_gd);
+#ifdef TOOLS_ENABLED
+
+ EditorNode::add_init_callback(register_editor_plugin);
+#endif
+
}
void unregister_gdscript_types() {
@@ -43,4 +97,4 @@ void unregister_gdscript_types() {
if (resource_saver_gd)
memdelete( resource_saver_gd );
-} \ No newline at end of file
+}