Skip to content

Commit aa516b4

Browse files
committed
S3: Download files in 1GiB chunks to reduce memory pressure.
Signed-off-by: Pascal Spörri <[email protected]>
1 parent aef8782 commit aa516b4

File tree

1 file changed

+18
-2
lines changed

1 file changed

+18
-2
lines changed

src/s3/S3Endpoint.cpp

+18-2
Original file line numberDiff line numberDiff line change
@@ -296,8 +296,24 @@ absl::Status Endpoint::putObject(const std::string &bucket, const std::string &k
296296
absl::StatusOr<size_t> Endpoint::readBytes(const std::string &bucket, const std::string &key,
297297
uint8_t *bytes, size_t position, size_t length) const {
298298

299-
auto stream = utility::ByteIOStream(bytes, length);
300-
return read(bucket, key, stream, position, length);
299+
size_t count = 0;
300+
while (count < length) {
301+
// Only request 1GiB chunks at a time.
302+
// ToDo: Make this value configurable.
303+
auto request = std::min((size_t)1024 * 1024 * 1024, length - count);
304+
auto stream = utility::ByteIOStream(&bytes[count], request);
305+
auto status = read(bucket, key, stream, position + count, request);
306+
if (!status.ok()) {
307+
return status;
308+
}
309+
if (*status == 0) {
310+
LOG_WARNING("Unexpected length for ", bucket, "/", key, ": Requested ", length, " (at pos ",
311+
position, ") but got ", count, "!");
312+
break;
313+
}
314+
count += *status;
315+
}
316+
return count;
301317
}
302318

303319
absl::StatusOr<size_t> Endpoint::read(const std::string &bucket, const std::string &key,

0 commit comments

Comments
 (0)