From 0e29f7974b59e4440cf02e1388fb9d8ab2b5c5fd Mon Sep 17 00:00:00 2001 From: Hein-Pieter van Braam Date: Wed, 25 Jul 2018 03:11:03 +0200 Subject: Reduce unnecessary COW on Vector by make writing explicit This commit makes operator[] on Vector const and adds a write proxy to it. From now on writes to Vectors need to happen through the .write proxy. So for instance: Vector vec; vec.push_back(10); std::cout << vec[0] << std::endl; vec.write[0] = 20; Failing to use the .write proxy will cause a compilation error. In addition COWable datatypes can now embed a CowData pointer to their data. This means that String, CharString, and VMap no longer use or derive from Vector. _ALWAYS_INLINE_ and _FORCE_INLINE_ are now equivalent for debug and non-debug builds. This is a lot faster for Vector in the editor and while running tests. The reason why this difference used to exist is because force-inlined methods used to give a bad debugging experience. After extensive testing with modern compilers this is no longer the case. --- modules/gdscript/gdscript_tokenizer.cpp | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) (limited to 'modules/gdscript/gdscript_tokenizer.cpp') diff --git a/modules/gdscript/gdscript_tokenizer.cpp b/modules/gdscript/gdscript_tokenizer.cpp index 940bdcbc8d..7ae7c72ed3 100644 --- a/modules/gdscript/gdscript_tokenizer.cpp +++ b/modules/gdscript/gdscript_tokenizer.cpp @@ -1172,15 +1172,15 @@ Error GDScriptTokenizerBuffer::set_code_buffer(const Vector &p_buffer) Vector cs; cs.resize(len); for (int j = 0; j < len; j++) { - cs[j] = b[j] ^ 0xb6; + cs.write[j] = b[j] ^ 0xb6; } - cs[cs.size() - 1] = 0; + cs.write[cs.size() - 1] = 0; String s; s.parse_utf8((const char *)cs.ptr()); b += len; total_len -= len + 4; - identifiers[i] = s; + identifiers.write[i] = s; } constants.resize(constant_count); @@ -1193,7 +1193,7 @@ Error GDScriptTokenizerBuffer::set_code_buffer(const Vector &p_buffer) return err; b += len; total_len -= len; - constants[i] = v; + constants.write[i] = v; } ERR_FAIL_COND_V(line_count * 8 > total_len, ERR_INVALID_DATA); @@ -1218,10 +1218,10 @@ Error GDScriptTokenizerBuffer::set_code_buffer(const Vector &p_buffer) if ((*b) & TOKEN_BYTE_MASK) { //little endian always ERR_FAIL_COND_V(total_len < 4, ERR_INVALID_DATA); - tokens[i] = decode_uint32(b) & ~TOKEN_BYTE_MASK; + tokens.write[i] = decode_uint32(b) & ~TOKEN_BYTE_MASK; b += 4; } else { - tokens[i] = *b; + tokens.write[i] = *b; b += 1; total_len--; } @@ -1320,15 +1320,15 @@ Vector GDScriptTokenizerBuffer::parse_code_string(const String &p_code) //save header buf.resize(24); - buf[0] = 'G'; - buf[1] = 'D'; - buf[2] = 'S'; - buf[3] = 'C'; - encode_uint32(BYTECODE_VERSION, &buf[4]); - encode_uint32(identifier_map.size(), &buf[8]); - encode_uint32(constant_map.size(), &buf[12]); - encode_uint32(line_map.size(), &buf[16]); - encode_uint32(token_array.size(), &buf[20]); + buf.write[0] = 'G'; + buf.write[1] = 'D'; + buf.write[2] = 'S'; + buf.write[3] = 'C'; + encode_uint32(BYTECODE_VERSION, &buf.write[4]); + encode_uint32(identifier_map.size(), &buf.write[8]); + encode_uint32(constant_map.size(), &buf.write[12]); + encode_uint32(line_map.size(), &buf.write[16]); + encode_uint32(token_array.size(), &buf.write[20]); //save identifiers @@ -1360,7 +1360,7 @@ Vector GDScriptTokenizerBuffer::parse_code_string(const String &p_code) ERR_FAIL_COND_V(err != OK, Vector()); int pos = buf.size(); buf.resize(pos + len); - encode_variant(E->get(), &buf[pos], len); + encode_variant(E->get(), &buf.write[pos], len); } for (Map::Element *E = rev_line_map.front(); E; E = E->next()) { -- cgit v1.2.3