mirror of
https://github.com/s3fs-fuse/s3fs-fuse.git
synced 2024-12-22 08:48:55 +00:00
Fixed cppcheck error on osx
This commit is contained in:
parent
a45ff6cdaa
commit
c494e54320
@ -32,11 +32,12 @@ cppcheck:
|
||||
cppcheck --quiet --error-exitcode=1 \
|
||||
--inline-suppr \
|
||||
--std=c++03 \
|
||||
-D HAVE_ATTR_XATTR_H \
|
||||
-D HAVE_SYS_EXTATTR_H \
|
||||
-D HAVE_MALLOC_TRIM \
|
||||
-U CURLE_PEER_FAILED_VERIFICATION \
|
||||
-U P_tmpdir \
|
||||
-U ENOATTR \
|
||||
--enable=all \
|
||||
--enable=warning,style,information,missingInclude \
|
||||
--suppress=missingIncludeSystem \
|
||||
--suppress=unusedFunction \
|
||||
--suppress=variableScope \
|
||||
src/ test/
|
||||
|
@ -130,8 +130,8 @@ bool AdditionalHeader::Load(const char* file)
|
||||
// compile
|
||||
regex_t* preg = new regex_t;
|
||||
int result;
|
||||
char errbuf[256];
|
||||
if(0 != (result = regcomp(preg, key.c_str(), REG_EXTENDED | REG_NOSUB))){ // we do not need matching info
|
||||
char errbuf[256];
|
||||
regerror(result, preg, errbuf, sizeof(errbuf));
|
||||
S3FS_PRN_ERR("failed to compile regex from %s key by %s.", key.c_str(), errbuf);
|
||||
delete preg;
|
||||
|
25
src/curl.cpp
25
src/curl.cpp
@ -226,8 +226,8 @@ bool BodyData::Append(void* ptr, size_t bytes)
|
||||
|
||||
const char* BodyData::str(void) const
|
||||
{
|
||||
static const char* strnull = "";
|
||||
if(!text){
|
||||
static const char* strnull = "";
|
||||
return strnull;
|
||||
}
|
||||
return text;
|
||||
@ -697,10 +697,8 @@ bool S3fsCurl::LocateBundle(void)
|
||||
// See if environment variable CURL_CA_BUNDLE is set
|
||||
// if so, check it, if it is a good path, then set the
|
||||
// curl_ca_bundle variable to it
|
||||
char *CURL_CA_BUNDLE;
|
||||
|
||||
if(0 == S3fsCurl::curl_ca_bundle.size()){
|
||||
CURL_CA_BUNDLE = getenv("CURL_CA_BUNDLE");
|
||||
char* CURL_CA_BUNDLE = getenv("CURL_CA_BUNDLE");
|
||||
if(CURL_CA_BUNDLE != NULL) {
|
||||
// check for existence and readability of the file
|
||||
ifstream BF(CURL_CA_BUNDLE);
|
||||
@ -1620,16 +1618,12 @@ int S3fsCurl::CurlDebugFunc(CURL* hcurl, curl_infotype type, char* data, size_t
|
||||
break;
|
||||
case CURLINFO_HEADER_IN:
|
||||
case CURLINFO_HEADER_OUT:
|
||||
size_t length, remaining;
|
||||
int newline;
|
||||
char* p;
|
||||
|
||||
// Print each line individually for tidy output
|
||||
remaining = size;
|
||||
p = data;
|
||||
size_t remaining = size;
|
||||
char* p = data;
|
||||
do {
|
||||
char* eol = (char*)memchr(p, '\n', remaining);
|
||||
newline = 0;
|
||||
char* eol = (char*)memchr(p, '\n', remaining);
|
||||
int newline = 0;
|
||||
if (eol == NULL) {
|
||||
eol = (char*)memchr(p, '\r', remaining);
|
||||
} else if (eol > p && *(eol - 1) == '\r') {
|
||||
@ -1639,7 +1633,7 @@ int S3fsCurl::CurlDebugFunc(CURL* hcurl, curl_infotype type, char* data, size_t
|
||||
newline++;
|
||||
eol++;
|
||||
}
|
||||
length = eol - p;
|
||||
size_t length = eol - p;
|
||||
S3FS_PRN_CURL("%c %.*s", CURLINFO_HEADER_IN == type ? '<' : '>', (int)length - newline, p);
|
||||
remaining -= length;
|
||||
p = eol;
|
||||
@ -2867,7 +2861,6 @@ int S3fsCurl::PutRequest(const char* tpath, headers_t& meta, int fd)
|
||||
{
|
||||
struct stat st;
|
||||
FILE* file = NULL;
|
||||
int fd2;
|
||||
|
||||
S3FS_PRN_INFO3("[tpath=%s]", SAFESTRPTR(tpath));
|
||||
|
||||
@ -2876,6 +2869,7 @@ int S3fsCurl::PutRequest(const char* tpath, headers_t& meta, int fd)
|
||||
}
|
||||
if(-1 != fd){
|
||||
// duplicate fd
|
||||
int fd2;
|
||||
if(-1 == (fd2 = dup(fd)) || -1 == fstat(fd2, &st) || 0 != lseek(fd2, 0, SEEK_SET) || NULL == (file = fdopen(fd2, "rb"))){
|
||||
S3FS_PRN_ERR("Could not duplicate file descriptor(errno=%d)", errno);
|
||||
if(-1 != fd2){
|
||||
@ -3994,8 +3988,6 @@ int S3fsMultiCurl::MultiRead(void)
|
||||
|
||||
int S3fsMultiCurl::Request(void)
|
||||
{
|
||||
int result;
|
||||
|
||||
S3FS_PRN_INFO3("[count=%zu]", cMap_all.size());
|
||||
|
||||
// Make request list.
|
||||
@ -4005,6 +3997,7 @@ int S3fsMultiCurl::Request(void)
|
||||
//
|
||||
while(!cMap_all.empty()){
|
||||
// set curl handle to multi handle
|
||||
int result;
|
||||
int cnt;
|
||||
s3fscurlmap_t::iterator iter;
|
||||
for(cnt = 0, iter = cMap_all.begin(); cnt < S3fsMultiCurl::max_multireq && iter != cMap_all.end(); cMap_all.erase(iter++), cnt++){
|
||||
|
@ -1571,7 +1571,6 @@ ssize_t FdEntity::Read(char* bytes, off_t start, size_t size, bool force_load)
|
||||
pagelist.SetPageLoadedStatus(start, size, false);
|
||||
}
|
||||
|
||||
int result;
|
||||
ssize_t rsize;
|
||||
|
||||
// check disk space
|
||||
@ -1605,6 +1604,7 @@ ssize_t FdEntity::Read(char* bytes, off_t start, size_t size, bool force_load)
|
||||
}
|
||||
}
|
||||
// Loading
|
||||
int result;
|
||||
if(0 < size && 0 != (result = Load(start, load_size))){
|
||||
S3FS_PRN_ERR("could not download. start(%jd), size(%zu), errno(%d)", (intmax_t)start, size, result);
|
||||
return -EIO;
|
||||
|
10
src/s3fs.cpp
10
src/s3fs.cpp
@ -2463,7 +2463,6 @@ static int s3fs_readdir(const char* path, void* buf, fuse_fill_dir_t filler, off
|
||||
|
||||
static int list_bucket(const char* path, S3ObjList& head, const char* delimiter, bool check_content_only)
|
||||
{
|
||||
int result;
|
||||
string s3_realpath;
|
||||
string query_delimiter;;
|
||||
string query_prefix;;
|
||||
@ -2472,7 +2471,6 @@ static int list_bucket(const char* path, S3ObjList& head, const char* delimiter,
|
||||
bool truncated = true;
|
||||
S3fsCurl s3fscurl;
|
||||
xmlDocPtr doc;
|
||||
BodyData* body;
|
||||
|
||||
S3FS_PRN_INFO1("[path=%s]", path);
|
||||
|
||||
@ -2508,11 +2506,12 @@ static int list_bucket(const char* path, S3ObjList& head, const char* delimiter,
|
||||
each_query += query_prefix;
|
||||
|
||||
// request
|
||||
int result;
|
||||
if(0 != (result = s3fscurl.ListBucketRequest(path, each_query.c_str()))){
|
||||
S3FS_PRN_ERR("ListBucketRequest returns with error.");
|
||||
return result;
|
||||
}
|
||||
body = s3fscurl.GetBodyData();
|
||||
BodyData* body = s3fscurl.GetBodyData();
|
||||
|
||||
// xmlDocPtr
|
||||
if(NULL == (doc = xmlReadMemory(body->str(), static_cast<int>(body->size()), "", NULL, 0))){
|
||||
@ -3847,7 +3846,6 @@ static int parse_passwd_file(bucketkvmap_t& resmap)
|
||||
{
|
||||
string line;
|
||||
size_t first_pos;
|
||||
size_t last_pos;
|
||||
readline_t linelist;
|
||||
readline_t::iterator iter;
|
||||
|
||||
@ -3902,8 +3900,8 @@ static int parse_passwd_file(bucketkvmap_t& resmap)
|
||||
|
||||
// read ':' type
|
||||
for(iter = linelist.begin(); iter != linelist.end(); ++iter){
|
||||
first_pos = iter->find_first_of(":");
|
||||
last_pos = iter->find_last_of(":");
|
||||
first_pos = iter->find_first_of(":");
|
||||
size_t last_pos = iter->find_last_of(":");
|
||||
if(first_pos == string::npos){
|
||||
continue;
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user