mirror of
https://github.com/s3fs-fuse/s3fs-fuse.git
synced 2024-12-23 17:28:56 +00:00
Issue multipart when object size exceeds part size
Previously s3fs issued multipart uploads when the object size was twice the part size. Conjoining this with the part size was confusing and s3fs should add a separate tunable for this if needed, similar to singlepart_copy_limit. Fixes #1058.
This commit is contained in:
parent
21321a9d96
commit
febaf6849f
@ -1194,7 +1194,7 @@ int FdEntity::Load(off_t start, off_t size)
|
|||||||
off_t over_size = iter->bytes - need_load_size;
|
off_t over_size = iter->bytes - need_load_size;
|
||||||
|
|
||||||
// download
|
// download
|
||||||
if(2 * S3fsCurl::GetMultipartSize() <= need_load_size && !nomultipart){ // default 20MB
|
if(S3fsCurl::GetMultipartSize() <= need_load_size && !nomultipart){
|
||||||
// parallel request
|
// parallel request
|
||||||
// Additional time is needed for large files
|
// Additional time is needed for large files
|
||||||
time_t backup = 0;
|
time_t backup = 0;
|
||||||
@ -1544,7 +1544,7 @@ int FdEntity::RowFlush(const char* tpath, bool force_sync)
|
|||||||
S3FS_PRN_ERR("fstat is failed by errno(%d), but continue...", errno);
|
S3FS_PRN_ERR("fstat is failed by errno(%d), but continue...", errno);
|
||||||
}
|
}
|
||||||
|
|
||||||
if(pagelist.Size() >= 2 * S3fsCurl::GetMultipartSize() && !nomultipart){ // default 20MB
|
if(pagelist.Size() >= S3fsCurl::GetMultipartSize() && !nomultipart){
|
||||||
// Additional time is needed for large files
|
// Additional time is needed for large files
|
||||||
time_t backup = 0;
|
time_t backup = 0;
|
||||||
if(120 > S3fsCurl::GetReadwriteTimeout()){
|
if(120 > S3fsCurl::GetReadwriteTimeout()){
|
||||||
|
Loading…
Reference in New Issue
Block a user