Fixed a bug.

* Fixed a bug
  Fixes a bug that is dead loop when s3fs listed in a directory with
  directory objects which was not object(no information).
  This bug made by r493, and reported by issue 389.




git-svn-id: http://s3fs.googlecode.com/svn/trunk@497 df820570-a93a-0410-bd06-b72b767a4274
This commit is contained in:
ggtakec@gmail.com 2013-11-18 02:29:41 +00:00
parent 40b9f0a408
commit 8acbaf7199
2 changed files with 4 additions and 2 deletions

View File

@ -3091,12 +3091,14 @@ int S3fsMultiCurl::MultiRead(void)
// as possibly in multipart
DPRN("failed a request(%ld: %s)", responseCode, s3fscurl->url.c_str());
isRetry = true;
}else{
}else if(500 == responseCode){
// case of all other result, do retry.(11/13/2013)
// because it was found that s3fs got 500 error from S3, but could success
// to retry it.
DPRN("failed a request(%ld: %s)", responseCode, s3fscurl->url.c_str());
isRetry = true;
}else{
DPRN("failed a request(%ld: %s)", responseCode, s3fscurl->url.c_str());
}
}else{
DPRN("failed a request(Unknown respons code: %s)", s3fscurl->url.c_str());

View File

@ -2090,7 +2090,7 @@ static S3fsCurl* multi_head_retry_callback(S3fsCurl* s3fscurl)
delete newcurl;
return NULL;
}
newcurl->SetMultipartRetryCount(s3fscurl->GetMultipartRetryCount());
newcurl->SetMultipartRetryCount(s3fscurl->GetMultipartRetryCount() + 1);
return newcurl;
}