Merge pull request #68934 from Faless/net/4.x_http_decompress_fix
[Net] Fix HTTPRequest gzip with high compression ratio.
This commit is contained in:
commit
dce1602eda
1 changed files with 33 additions and 14 deletions
|
@ -276,10 +276,10 @@ bool HTTPRequest::_handle_response(bool *ret_value) {
|
|||
}
|
||||
if (content_encoding == "gzip") {
|
||||
decompressor.instantiate();
|
||||
decompressor->start_decompression(false, get_download_chunk_size() * 2);
|
||||
decompressor->start_decompression(false, get_download_chunk_size());
|
||||
} else if (content_encoding == "deflate") {
|
||||
decompressor.instantiate();
|
||||
decompressor->start_decompression(true, get_download_chunk_size() * 2);
|
||||
decompressor->start_decompression(true, get_download_chunk_size());
|
||||
}
|
||||
|
||||
return false;
|
||||
|
@ -390,19 +390,38 @@ bool HTTPRequest::_update_connection() {
|
|||
return false;
|
||||
}
|
||||
|
||||
PackedByteArray chunk = client->read_response_body_chunk();
|
||||
downloaded.add(chunk.size());
|
||||
PackedByteArray chunk;
|
||||
if (decompressor.is_null()) {
|
||||
// Chunk can be read directly.
|
||||
chunk = client->read_response_body_chunk();
|
||||
downloaded.add(chunk.size());
|
||||
} else {
|
||||
// Chunk is the result of decompression.
|
||||
PackedByteArray compressed = client->read_response_body_chunk();
|
||||
downloaded.add(compressed.size());
|
||||
|
||||
// Decompress chunk if needed.
|
||||
if (decompressor.is_valid()) {
|
||||
Error err = decompressor->put_data(chunk.ptr(), chunk.size());
|
||||
if (err == OK) {
|
||||
chunk.resize(decompressor->get_available_bytes());
|
||||
err = decompressor->get_data(chunk.ptrw(), chunk.size());
|
||||
}
|
||||
if (err != OK) {
|
||||
_defer_done(RESULT_BODY_DECOMPRESS_FAILED, response_code, response_headers, PackedByteArray());
|
||||
return true;
|
||||
int pos = 0;
|
||||
int left = compressed.size();
|
||||
while (left) {
|
||||
int w = 0;
|
||||
Error err = decompressor->put_partial_data(compressed.ptr() + pos, left, w);
|
||||
if (err == OK) {
|
||||
PackedByteArray dc;
|
||||
dc.resize(decompressor->get_available_bytes());
|
||||
err = decompressor->get_data(dc.ptrw(), dc.size());
|
||||
chunk.append_array(dc);
|
||||
}
|
||||
if (err != OK) {
|
||||
_defer_done(RESULT_BODY_DECOMPRESS_FAILED, response_code, response_headers, PackedByteArray());
|
||||
return true;
|
||||
}
|
||||
// We need this check here because a "zip bomb" could result in a chunk of few kilos decompressing into gigabytes of data.
|
||||
if (body_size_limit >= 0 && final_body_size.get() + chunk.size() > body_size_limit) {
|
||||
_defer_done(RESULT_BODY_SIZE_LIMIT_EXCEEDED, response_code, response_headers, PackedByteArray());
|
||||
return true;
|
||||
}
|
||||
pos += w;
|
||||
left -= w;
|
||||
}
|
||||
}
|
||||
final_body_size.add(chunk.size());
|
||||
|
|
Loading…
Reference in a new issue