summaryrefslogtreecommitdiff
path: root/modules/gdscript/gdscript_tokenizer.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'modules/gdscript/gdscript_tokenizer.cpp')
-rw-r--r--modules/gdscript/gdscript_tokenizer.cpp49
1 files changed, 27 insertions, 22 deletions
diff --git a/modules/gdscript/gdscript_tokenizer.cpp b/modules/gdscript/gdscript_tokenizer.cpp
index 3c8e1ddbe4..7ae7c72ed3 100644
--- a/modules/gdscript/gdscript_tokenizer.cpp
+++ b/modules/gdscript/gdscript_tokenizer.cpp
@@ -91,6 +91,7 @@ const char *GDScriptTokenizer::token_names[TK_MAX] = {
"match",
"func",
"class",
+ "class_name",
"extends",
"is",
"onready",
@@ -100,6 +101,8 @@ const char *GDScriptTokenizer::token_names[TK_MAX] = {
"setget",
"const",
"var",
+ "as",
+ "void",
"enum",
"preload",
"assert",
@@ -124,6 +127,7 @@ const char *GDScriptTokenizer::token_names[TK_MAX] = {
"'.'",
"'?'",
"':'",
+ "'->'",
"'$'",
"'\\n'",
"PI",
@@ -187,6 +191,7 @@ static const _kws _keyword_list[] = {
//func
{ GDScriptTokenizer::TK_PR_FUNCTION, "func" },
{ GDScriptTokenizer::TK_PR_CLASS, "class" },
+ { GDScriptTokenizer::TK_PR_CLASS_NAME, "class_name" },
{ GDScriptTokenizer::TK_PR_EXTENDS, "extends" },
{ GDScriptTokenizer::TK_PR_IS, "is" },
{ GDScriptTokenizer::TK_PR_ONREADY, "onready" },
@@ -195,6 +200,8 @@ static const _kws _keyword_list[] = {
{ GDScriptTokenizer::TK_PR_EXPORT, "export" },
{ GDScriptTokenizer::TK_PR_SETGET, "setget" },
{ GDScriptTokenizer::TK_PR_VAR, "var" },
+ { GDScriptTokenizer::TK_PR_AS, "as" },
+ { GDScriptTokenizer::TK_PR_VOID, "void" },
{ GDScriptTokenizer::TK_PR_PRELOAD, "preload" },
{ GDScriptTokenizer::TK_PR_ASSERT, "assert" },
{ GDScriptTokenizer::TK_PR_YIELD, "yield" },
@@ -705,11 +712,9 @@ void GDScriptTokenizerText::_advance() {
if (GETCHAR(1) == '=') {
_make_token(TK_OP_ASSIGN_SUB);
INCPOS(1);
- /*
- } else if (GETCHAR(1)=='-') {
- _make_token(TK_OP_MINUS_MINUS);
+ } else if (GETCHAR(1) == '>') {
+ _make_token(TK_FORWARD_ARROW);
INCPOS(1);
- */
} else {
_make_token(TK_OP_SUB);
}
@@ -1135,9 +1140,9 @@ void GDScriptTokenizerText::advance(int p_amount) {
_advance();
}
- //////////////////////////////////////////////////////////////////////////////////////////////////////
+//////////////////////////////////////////////////////////////////////////////////////////////////////
-#define BYTECODE_VERSION 12
+#define BYTECODE_VERSION 13
Error GDScriptTokenizerBuffer::set_code_buffer(const Vector<uint8_t> &p_buffer) {
@@ -1167,15 +1172,15 @@ Error GDScriptTokenizerBuffer::set_code_buffer(const Vector<uint8_t> &p_buffer)
Vector<uint8_t> cs;
cs.resize(len);
for (int j = 0; j < len; j++) {
- cs[j] = b[j] ^ 0xb6;
+ cs.write[j] = b[j] ^ 0xb6;
}
- cs[cs.size() - 1] = 0;
+ cs.write[cs.size() - 1] = 0;
String s;
s.parse_utf8((const char *)cs.ptr());
b += len;
total_len -= len + 4;
- identifiers[i] = s;
+ identifiers.write[i] = s;
}
constants.resize(constant_count);
@@ -1188,7 +1193,7 @@ Error GDScriptTokenizerBuffer::set_code_buffer(const Vector<uint8_t> &p_buffer)
return err;
b += len;
total_len -= len;
- constants[i] = v;
+ constants.write[i] = v;
}
ERR_FAIL_COND_V(line_count * 8 > total_len, ERR_INVALID_DATA);
@@ -1213,10 +1218,10 @@ Error GDScriptTokenizerBuffer::set_code_buffer(const Vector<uint8_t> &p_buffer)
if ((*b) & TOKEN_BYTE_MASK) { //little endian always
ERR_FAIL_COND_V(total_len < 4, ERR_INVALID_DATA);
- tokens[i] = decode_uint32(b) & ~TOKEN_BYTE_MASK;
+ tokens.write[i] = decode_uint32(b) & ~TOKEN_BYTE_MASK;
b += 4;
} else {
- tokens[i] = *b;
+ tokens.write[i] = *b;
b += 1;
total_len--;
}
@@ -1315,15 +1320,15 @@ Vector<uint8_t> GDScriptTokenizerBuffer::parse_code_string(const String &p_code)
//save header
buf.resize(24);
- buf[0] = 'G';
- buf[1] = 'D';
- buf[2] = 'S';
- buf[3] = 'C';
- encode_uint32(BYTECODE_VERSION, &buf[4]);
- encode_uint32(identifier_map.size(), &buf[8]);
- encode_uint32(constant_map.size(), &buf[12]);
- encode_uint32(line_map.size(), &buf[16]);
- encode_uint32(token_array.size(), &buf[20]);
+ buf.write[0] = 'G';
+ buf.write[1] = 'D';
+ buf.write[2] = 'S';
+ buf.write[3] = 'C';
+ encode_uint32(BYTECODE_VERSION, &buf.write[4]);
+ encode_uint32(identifier_map.size(), &buf.write[8]);
+ encode_uint32(constant_map.size(), &buf.write[12]);
+ encode_uint32(line_map.size(), &buf.write[16]);
+ encode_uint32(token_array.size(), &buf.write[20]);
//save identifiers
@@ -1355,7 +1360,7 @@ Vector<uint8_t> GDScriptTokenizerBuffer::parse_code_string(const String &p_code)
ERR_FAIL_COND_V(err != OK, Vector<uint8_t>());
int pos = buf.size();
buf.resize(pos + len);
- encode_variant(E->get(), &buf[pos], len);
+ encode_variant(E->get(), &buf.write[pos], len);
}
for (Map<int, uint32_t>::Element *E = rev_line_map.front(); E; E = E->next()) {