summaryrefslogtreecommitdiff
path: root/scene
diff options
context:
space:
mode:
authorholgac <huseyinolgac@gmail.com>2022-06-17 00:56:14 +0200
committerHuseyin Olgac <huseyinolgac@gmail.com>2022-07-01 01:03:13 +0200
commit95244f0e7a81a340af6810dd4dbf033449045b91 (patch)
tree75bfad09d810da989e21bb0bc3196b0124611791 /scene
parentafdae67cc381bb340da2e864279da6b836804b7f (diff)
Fixes #62097: infinite loop in animation compress due to too small page size
Fixed an infinite loop caused when given page size is too small to store anything. If, in the first step of compression, we attempt a rollback, we end up calling LocalVector::resize with AnimationCompressionDataState::validated_packet_count which is -1. This resize function takes a uint, and there's a bitshift operation that assumes that by bitshifting enough times, we'll reach a number greater than or equal to the requested memory amount, which we'll never do for uint(-1), and this causes an infinite loop. To fix, I'm adding an early failure in case the given page size isn't enough.
Diffstat (limited to 'scene')
-rw-r--r--scene/resources/animation.cpp6
1 files changed, 6 insertions, 0 deletions
diff --git a/scene/resources/animation.cpp b/scene/resources/animation.cpp
index 7183accc66..2d749915f0 100644
--- a/scene/resources/animation.cpp
+++ b/scene/resources/animation.cpp
@@ -4619,6 +4619,7 @@ void Animation::compress(uint32_t p_page_size, uint32_t p_fps, float p_split_tol
data_tracks.resize(tracks_to_compress.size());
time_tracks.resize(tracks_to_compress.size());
+ uint32_t needed_min_page_size = base_page_size;
for (uint32_t i = 0; i < data_tracks.size(); i++) {
data_tracks[i].split_tolerance = p_split_tolerance;
if (track_get_type(tracks_to_compress[i]) == TYPE_BLEND_SHAPE) {
@@ -4626,7 +4627,12 @@ void Animation::compress(uint32_t p_page_size, uint32_t p_fps, float p_split_tol
} else {
data_tracks[i].components = 3;
}
+ needed_min_page_size += data_tracks[i].data.size() + data_tracks[i].get_temp_packet_size();
}
+ for (uint32_t i = 0; i < time_tracks.size(); i++) {
+ needed_min_page_size += time_tracks[i].packets.size() * 4; // time packet is 32 bits
+ }
+ ERR_FAIL_COND_MSG(p_page_size < needed_min_page_size, "Cannot compress with the given page size");
while (true) {
// Begin by finding the keyframe in all tracks with the time closest to the current time