mirror of
https://github.com/yhirose/cpp-httplib.git
synced 2025-05-12 06:01:40 +00:00
Fix gzip_decompressor in case of one chunk being exactly equal to buffer size (#636)
* add larget chunks test * revert test * Fix gzip decoder in case of chunk being equal to buffer size * add test
This commit is contained in:
parent
69e75f4a67
commit
3b5bab3308
@ -2267,7 +2267,7 @@ public:
|
|||||||
strm_.next_in = const_cast<Bytef *>(reinterpret_cast<const Bytef *>(data));
|
strm_.next_in = const_cast<Bytef *>(reinterpret_cast<const Bytef *>(data));
|
||||||
|
|
||||||
std::array<char, 16384> buff{};
|
std::array<char, 16384> buff{};
|
||||||
do {
|
while (strm_.avail_in > 0) {
|
||||||
strm_.avail_out = buff.size();
|
strm_.avail_out = buff.size();
|
||||||
strm_.next_out = reinterpret_cast<Bytef *>(buff.data());
|
strm_.next_out = reinterpret_cast<Bytef *>(buff.data());
|
||||||
|
|
||||||
@ -2282,7 +2282,7 @@ public:
|
|||||||
if (!callback(buff.data(), buff.size() - strm_.avail_out)) {
|
if (!callback(buff.data(), buff.size() - strm_.avail_out)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
} while (strm_.avail_out == 0);
|
}
|
||||||
|
|
||||||
return ret == Z_OK || ret == Z_STREAM_END;
|
return ret == Z_OK || ret == Z_STREAM_END;
|
||||||
}
|
}
|
||||||
|
43
test/test.cc
43
test/test.cc
@ -2175,6 +2175,49 @@ TEST_F(ServerTest, GetStreamedChunkedWithGzip2) {
|
|||||||
EXPECT_EQ(200, res->status);
|
EXPECT_EQ(200, res->status);
|
||||||
EXPECT_EQ(std::string("123456789"), res->body);
|
EXPECT_EQ(std::string("123456789"), res->body);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
TEST(GzipDecompressor, ChunkedDecompression) {
|
||||||
|
std::string data;
|
||||||
|
for (size_t i = 0; i < 32 * 1024; ++i) {
|
||||||
|
data.push_back(static_cast<char>('a' + i % 26));
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string compressed_data;
|
||||||
|
{
|
||||||
|
httplib::detail::gzip_compressor compressor;
|
||||||
|
bool result = compressor.compress(
|
||||||
|
data.data(),
|
||||||
|
data.size(),
|
||||||
|
/*last=*/true,
|
||||||
|
[&] (const char* data, size_t size) {
|
||||||
|
compressed_data.insert(compressed_data.size(), data, size);
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
ASSERT_TRUE(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string decompressed_data;
|
||||||
|
{
|
||||||
|
httplib::detail::gzip_decompressor decompressor;
|
||||||
|
|
||||||
|
// Chunk size is chosen specificaly to have a decompressed chunk size equal to 16384 bytes
|
||||||
|
// 16384 bytes is the size of decompressor output buffer
|
||||||
|
size_t chunk_size = 130;
|
||||||
|
for (size_t chunk_begin = 0; chunk_begin < compressed_data.size(); chunk_begin += chunk_size) {
|
||||||
|
size_t current_chunk_size = std::min(compressed_data.size() - chunk_begin, chunk_size);
|
||||||
|
bool result = decompressor.decompress(
|
||||||
|
compressed_data.data() + chunk_begin,
|
||||||
|
current_chunk_size,
|
||||||
|
[&] (const char* data, size_t size) {
|
||||||
|
decompressed_data.insert(decompressed_data.size(), data, size);
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
ASSERT_TRUE(result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ASSERT_EQ(data, decompressed_data);
|
||||||
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef CPPHTTPLIB_BROTLI_SUPPORT
|
#ifdef CPPHTTPLIB_BROTLI_SUPPORT
|
||||||
|
Loading…
x
Reference in New Issue
Block a user