diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 146a343..187c71c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -144,7 +144,7 @@ jobs: - name: Install brew other packages run: | - S3FS_BREW_PACKAGES='automake cppcheck python3 coreutils gnu-sed shellcheck'; + S3FS_BREW_PACKAGES='automake cppcheck python3 coreutils gnu-sed shellcheck nlohmann-json'; for s3fs_brew_pkg in ${S3FS_BREW_PACKAGES}; do if brew list | grep -q ${s3fs_brew_pkg}; then if brew outdated | grep -q ${s3fs_brew_pkg}; then HOMEBREW_NO_AUTO_UPDATE=1 brew upgrade ${s3fs_brew_pkg}; fi; else HOMEBREW_NO_AUTO_UPDATE=1 brew install ${s3fs_brew_pkg}; fi; done; - name: Install awscli2 @@ -160,7 +160,7 @@ jobs: - name: Build run: | ./autogen.sh - PKG_CONFIG_PATH=/usr/local/opt/curl/lib/pkgconfig:/usr/local/opt/openssl/lib/pkgconfig ./configure CXXFLAGS='-std=c++03 -DS3FS_PTHREAD_ERRORCHECK=1' + PKG_CONFIG_PATH=/usr/local/opt/curl/lib/pkgconfig:/usr/local/opt/openssl/lib/pkgconfig ./configure CXXFLAGS='-std=c++11 -DS3FS_PTHREAD_ERRORCHECK=1' make --jobs=$(sysctl -n hw.ncpu) - name: Cppcheck diff --git a/.github/workflows/linux-ci-helper.sh b/.github/workflows/linux-ci-helper.sh index 1bbb5fa..0da24ce 100755 --- a/.github/workflows/linux-ci-helper.sh +++ b/.github/workflows/linux-ci-helper.sh @@ -85,7 +85,7 @@ if [ "${CONTAINER_FULLNAME}" = "ubuntu:22.04" ]; then PACKAGE_UPDATE_OPTIONS="update -y -qq" PACKAGE_INSTALL_OPTIONS="install -y" - INSTALL_PACKAGES="autoconf autotools-dev default-jre-headless fuse libfuse-dev libcurl4-openssl-dev libxml2-dev locales-all mime-support libtool pkg-config libssl-dev attr curl python3-pip unzip" + INSTALL_PACKAGES="autoconf autotools-dev default-jre-headless fuse libfuse-dev libcurl4-openssl-dev libxml2-dev locales-all mime-support libtool pkg-config libssl-dev attr curl python3-pip unzip nlohmann-json3-dev zip" INSTALL_CHECKER_PKGS="cppcheck shellcheck" INSTALL_CHECKER_PKG_OPTIONS="" @@ -94,7 +94,7 @@ elif [ "${CONTAINER_FULLNAME}" = "ubuntu:20.04" ]; then PACKAGE_UPDATE_OPTIONS="update -y -qq" PACKAGE_INSTALL_OPTIONS="install -y" - INSTALL_PACKAGES="autoconf autotools-dev default-jre-headless fuse libfuse-dev libcurl4-openssl-dev libxml2-dev locales-all mime-support libtool pkg-config libssl-dev attr curl python3-pip unzip" + INSTALL_PACKAGES="autoconf autotools-dev default-jre-headless fuse libfuse-dev libcurl4-openssl-dev libxml2-dev locales-all mime-support libtool pkg-config libssl-dev attr curl python3-pip unzip nlohmann-json3-dev zip" INSTALL_CHECKER_PKGS="cppcheck shellcheck" INSTALL_CHECKER_PKG_OPTIONS="" @@ -103,25 +103,19 @@ elif [ "${CONTAINER_FULLNAME}" = "ubuntu:18.04" ]; then PACKAGE_UPDATE_OPTIONS="update -y -qq" PACKAGE_INSTALL_OPTIONS="install -y" - INSTALL_PACKAGES="autoconf autotools-dev default-jre-headless fuse libfuse-dev libcurl4-openssl-dev libxml2-dev locales-all mime-support libtool pkg-config libssl-dev attr curl python3-pip unzip" + INSTALL_PACKAGES="autoconf autotools-dev default-jre-headless fuse libfuse-dev libcurl4-openssl-dev libxml2-dev locales-all mime-support libtool pkg-config libssl-dev attr curl python3-pip unzip nlohmann-json3-dev zip" INSTALL_CHECKER_PKGS="cppcheck shellcheck" INSTALL_CHECKER_PKG_OPTIONS="" + apt-get update; apt-get install software-properties-common -y + add-apt-repository ppa:team-xbmc/ppa -elif [ "${CONTAINER_FULLNAME}" = "ubuntu:16.04" ]; then - PACKAGE_MANAGER_BIN="apt-get" - PACKAGE_UPDATE_OPTIONS="update -y -qq" - PACKAGE_INSTALL_OPTIONS="install -y" - - INSTALL_PACKAGES="autoconf autotools-dev default-jre-headless fuse libfuse-dev libcurl4-openssl-dev libxml2-dev locales-all mime-support libtool pkg-config libssl-dev attr curl python3-pip unzip" - INSTALL_CHECKER_PKGS="cppcheck shellcheck" - INSTALL_CHECKER_PKG_OPTIONS="" elif [ "${CONTAINER_FULLNAME}" = "debian:bullseye" ]; then PACKAGE_MANAGER_BIN="apt-get" PACKAGE_UPDATE_OPTIONS="update -y -qq" PACKAGE_INSTALL_OPTIONS="install -y" - INSTALL_PACKAGES="autoconf autotools-dev default-jre-headless fuse libfuse-dev libcurl4-openssl-dev libxml2-dev locales-all mime-support libtool pkg-config libssl-dev attr curl procps python3-pip unzip" + INSTALL_PACKAGES="autoconf autotools-dev default-jre-headless fuse libfuse-dev libcurl4-openssl-dev libxml2-dev locales-all mime-support libtool pkg-config libssl-dev attr curl procps python3-pip unzip zip nlohmann-json3-dev" INSTALL_CHECKER_PKGS="cppcheck shellcheck" INSTALL_CHECKER_PKG_OPTIONS="" @@ -130,7 +124,7 @@ elif [ "${CONTAINER_FULLNAME}" = "debian:buster" ]; then PACKAGE_UPDATE_OPTIONS="update -y -qq" PACKAGE_INSTALL_OPTIONS="install -y" - INSTALL_PACKAGES="autoconf autotools-dev default-jre-headless fuse libfuse-dev libcurl4-openssl-dev libxml2-dev locales-all mime-support libtool pkg-config libssl-dev attr curl procps python3-pip unzip" + INSTALL_PACKAGES="autoconf autotools-dev default-jre-headless fuse libfuse-dev libcurl4-openssl-dev libxml2-dev locales-all mime-support libtool pkg-config libssl-dev attr curl procps python3-pip nlohmann-json3-dev zip unzip" INSTALL_CHECKER_PKGS="cppcheck shellcheck" INSTALL_CHECKER_PKG_OPTIONS="" @@ -146,10 +140,12 @@ elif [ "${CONTAINER_FULLNAME}" = "rockylinux:9" ]; then # PACKAGE_INSTALL_ADDITIONAL_OPTIONS="--allowerasing" - INSTALL_PACKAGES="curl-devel fuse fuse-devel gcc libstdc++-devel gcc-c++ glibc-langpack-en java-11-openjdk-headless libxml2-devel mailcap git automake make openssl-devel attr diffutils curl python3 procps unzip xz https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm" + INSTALL_PACKAGES="autoconf autotools-dev curl-devel fuse fuse-devel procps gcc libstdc++-devel gcc-c++ glibc-langpack-en java-11-openjdk-headless libxml2-devel mailcap git automake make openssl-devel attr diffutils curl python3 python3-pip procps zip unzip xz nlohmann-json3-dev/stretch-backports https://dl.fedoraproject.org/pub/epel/epel-release-latest-9.noarch.rpm" INSTALL_CHECKER_PKGS="cppcheck" INSTALL_CHECKER_PKG_OPTIONS="--enablerepo=epel" + echo 'deb http://deb.debian.org/debian stretch-backports main' > /etc/apt/sources.list.d/stretch-backports.list + # [NOTE] # For RockyLinux, ShellCheck is downloaded from the github archive and installed. # @@ -160,7 +156,11 @@ elif [ "${CONTAINER_FULLNAME}" = "rockylinux:8" ]; then PACKAGE_UPDATE_OPTIONS="update -y -qq" PACKAGE_INSTALL_OPTIONS="install -y" - INSTALL_PACKAGES="curl-devel fuse fuse-devel gcc libstdc++-devel gcc-c++ glibc-langpack-en java-11-openjdk-headless libxml2-devel mailcap git automake make openssl-devel attr diffutils curl python3 unzip" + # [NOTE] + # Installing ShellCheck on Rocky Linux is not easy. + # Give up to run ShellCheck on Rocky Linux as we don't have to run ShellChek on all operating systems. + # + INSTALL_PACKAGES="curl-devel fuse fuse-devel gcc libstdc++-devel gcc-c++ glibc-langpack-en java-11-openjdk-headless libxml2-devel mailcap git automake make openssl-devel attr diffutils curl python3 json-devel zip unzip" INSTALL_CHECKER_PKGS="cppcheck" INSTALL_CHECKER_PKG_OPTIONS="--enablerepo=powertools" @@ -179,7 +179,7 @@ elif [ "${CONTAINER_FULLNAME}" = "centos:centos7" ]; then # And in this version, it cannot be passed due to following error. # "shellcheck: ./test/integration-test-main.sh: hGetContents: invalid argument (invalid byte sequence)" # - INSTALL_PACKAGES="curl-devel fuse fuse-devel gcc libstdc++-devel gcc-c++ glibc-langpack-en java-11-openjdk-headless libxml2-devel mailcap git automake make openssl-devel attr curl python3 epel-release unzip" + INSTALL_PACKAGES="curl-devel fuse fuse-devel gcc libstdc++-devel gcc-c++ glibc-langpack-en java-11-openjdk-headless libxml2-devel mailcap git automake make openssl-devel attr curl python3 epel-release unzip json-devel" INSTALL_CHECKER_PKGS="cppcheck" INSTALL_CHECKER_PKG_OPTIONS="--enablerepo=epel" @@ -189,17 +189,7 @@ elif [ "${CONTAINER_FULLNAME}" = "fedora:38" ]; then PACKAGE_INSTALL_OPTIONS="install -y" # TODO: Cannot use java-latest-openjdk (17) due to modules issue in S3Proxy/jclouds/Guice - INSTALL_PACKAGES="curl-devel fuse fuse-devel gcc libstdc++-devel gcc-c++ glibc-langpack-en java-11-openjdk-headless libxml2-devel mailcap git automake make openssl-devel curl attr diffutils procps python3-pip unzip" - INSTALL_CHECKER_PKGS="cppcheck ShellCheck" - INSTALL_CHECKER_PKG_OPTIONS="" - -elif [ "${CONTAINER_FULLNAME}" = "fedora:37" ]; then - PACKAGE_MANAGER_BIN="dnf" - PACKAGE_UPDATE_OPTIONS="update -y -qq" - PACKAGE_INSTALL_OPTIONS="install -y" - - # TODO: Cannot use java-latest-openjdk (17) due to modules issue in S3Proxy/jclouds/Guice - INSTALL_PACKAGES="curl-devel fuse fuse-devel gcc libstdc++-devel gcc-c++ glibc-langpack-en java-11-openjdk-headless libxml2-devel mailcap git automake make openssl-devel curl attr diffutils procps python3-pip unzip" + INSTALL_PACKAGES="curl-devel fuse fuse-devel gcc libstdc++-devel gcc-c++ glibc-langpack-en java-11-openjdk-headless libxml2-devel mailcap git automake make openssl-devel curl attr diffutils procps python3-pip unzip json-devel zip" INSTALL_CHECKER_PKGS="cppcheck ShellCheck" INSTALL_CHECKER_PKG_OPTIONS="" @@ -208,7 +198,7 @@ elif [ "${CONTAINER_FULLNAME}" = "opensuse/leap:15" ]; then PACKAGE_UPDATE_OPTIONS="refresh" PACKAGE_INSTALL_OPTIONS="install -y" - INSTALL_PACKAGES="automake curl-devel fuse fuse-devel gcc-c++ java-11-openjdk-headless libxml2-devel make openssl-devel python3-pip curl attr ShellCheck unzip" + INSTALL_PACKAGES="automake curl-devel fuse fuse-devel gcc-c++ java-11-openjdk-headless libxml2-devel make openssl-devel python3-pip curl attr ShellCheck unzip nlohmann_json zip" INSTALL_CHECKER_PKGS="cppcheck ShellCheck" INSTALL_CHECKER_PKG_OPTIONS="" @@ -217,7 +207,7 @@ elif [ "${CONTAINER_FULLNAME}" = "alpine:3.17" ]; then PACKAGE_UPDATE_OPTIONS="update --no-progress" PACKAGE_INSTALL_OPTIONS="add --no-progress --no-cache" - INSTALL_PACKAGES="bash curl g++ make automake autoconf libtool git curl-dev fuse-dev libxml2-dev coreutils procps attr sed mailcap openjdk11 aws-cli" + INSTALL_PACKAGES="bash curl g++ make automake autoconf libtool git curl-dev fuse-dev libxml2-dev coreutils procps attr sed mailcap openjdk11 aws-cli zip nlohmann-json" INSTALL_CHECKER_PKGS="cppcheck shellcheck" INSTALL_CHECKER_PKG_OPTIONS="" diff --git a/COMPILATION.md b/COMPILATION.md index 9333107..7bdafb2 100644 --- a/COMPILATION.md +++ b/COMPILATION.md @@ -15,6 +15,7 @@ Keep in mind using the pre-built packages when available. * libcurl * libxml2 * openssl +* nlohmann-json3-dev * mime.types (the package providing depends on the OS) * s3fs tries to detect `/etc/mime.types` as default regardless of the OS * Else s3fs tries to detect `/etc/apache2/mime.types` if OS is macOS diff --git a/doc/man/s3fs.1.in b/doc/man/s3fs.1.in index e846685..d17cc06 100644 --- a/doc/man/s3fs.1.in +++ b/doc/man/s3fs.1.in @@ -63,6 +63,12 @@ All s3fs options must given in the form where "opt" is: \fB\-o\fR bucket if it is not specified bucket name (and path) in command line, must specify this option after \-o option for bucket name. .TP +.TP +\fB\-o\fR aws_home_path +specify the aws path to use in the case of SSO authentication (only avaialble for sso authentication). +${HOME}/.aws is used by default. +It must specify this option after \-o option for bucket name. +.TP \fB\-o\fR default_acl (default="private") the default canned acl to apply to all written s3 objects, e.g., "private", "public-read". see https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl for the full list of canned ACLs. diff --git a/src/s3fs_cred.cpp b/src/s3fs_cred.cpp index bcccf56..797f643 100644 --- a/src/s3fs_cred.cpp +++ b/src/s3fs_cred.cpp @@ -26,6 +26,8 @@ #include #include #include +#include +#include #include "common.h" #include "s3fs_cred.h" @@ -33,7 +35,9 @@ #include "curl.h" #include "string_util.h" #include "metaheader.h" +#include "s3fs_util.h" +using json = nlohmann::json; //------------------------------------------------------------------- // Symbols //------------------------------------------------------------------- @@ -53,7 +57,7 @@ const char* VersionS3fsCredential(bool detail) { static const char version[] = "built-in"; - static const char detail_version[] = + static const char detail_version[] = "s3fs-fuse built-in Credential I/F Function\n" "Copyright(C) 2007 s3fs-fuse\n"; @@ -131,6 +135,7 @@ const char* S3fsCred::IAMv2_token_ttl_hdr = "X-aws-ec2-metadata-token-ttl- const char* S3fsCred::IAMv2_token_hdr = "X-aws-ec2-metadata-token"; std::string S3fsCred::bucket_name; +std::string profile_prefix = "profile "; //------------------------------------------------------------------- // Class Methods @@ -173,14 +178,20 @@ bool S3fsCred::ParseIAMRoleFromMetaDataResponse(const char* response, std::strin //------------------------------------------------------------------- S3fsCred::S3fsCred() : is_lock_init(false), + passwd_file(""), + aws_home_dir(""), aws_profile(DEFAULT_AWS_PROFILE_NAME), load_iamrole(false), + AWSAccessKeyId(""), + AWSSecretAccessKey(""), + AWSAccessToken(""), AWSAccessTokenExpire(0), is_ecs(false), is_use_session_token(false), is_ibm_iam_auth(false), IAM_cred_url("http://169.254.169.254/latest/meta-data/iam/security-credentials/"), IAM_api_version(2), + IAMv2_api_token(""), IAM_field_count(4), IAM_token_field("Token"), IAM_expiry_field("Expiration"), @@ -236,6 +247,22 @@ bool S3fsCred::IsSetPasswdFile() const return !passwd_file.empty(); } + +bool S3fsCred::SetAwsHomePath(const char* file) +{ + if(!file || strlen(file) == 0){ + return false; + } + aws_home_dir = file; + + return true; +} + +bool S3fsCred::ItSetAwsHomePath() +{ + return !aws_home_dir.empty(); +} + bool S3fsCred::SetAwsProfileName(const char* name) { if(!name || strlen(name) == 0){ @@ -881,6 +908,135 @@ int S3fsCred::CheckS3fsCredentialAwsFormat(const kvmap_t& kvmap, std::string& ac return 1; } +// +// Return: 1 - OK(could read and set accesskey etc.) +// 0 - NG(could not read) +// +int S3fsCred::CheckSsoCacheKey(std::string& sso_cache_key){ + std::string config_filename =std::string(getpwuid(getuid())->pw_dir) + "/.aws/config"; + if(ItSetAwsHomePath()){ + // Override the config path + config_filename = aws_home_dir + "/config"; + } + std::ifstream PF(config_filename.c_str()); + + std::string profile; + std::string sso_start_url; + std::string sso_region; + std::string sso_account_id; + std::string sso_role_name; + + // read each line + std::string line; + while(getline(PF, line)){ + line = trim(line); + if(line.empty()){ + continue; + } + if('#' == line[0]){ + continue; + } + + if(line.size() > 2 && line[0] == '[' && line[line.size() - 1] == ']') { + if(profile == aws_profile){ + S3FS_PRN_INFO("Use the profile %s to connect in s3", profile.c_str()); + break; + } + // Remove 'profile ' + std::string filter_line; + std::size_t found = line.find(profile_prefix); + if (found != std::string::npos){ + line.replace(found, profile_prefix.size(), filter_line); + }else{ + filter_line = line; + } + profile = line.substr(1, line.size() - 2); + sso_start_url.clear(); + sso_region.clear(); + sso_account_id.clear(); + sso_role_name.clear(); + } + + size_t pos = line.find_first_of('='); + if(pos == std::string::npos){ + continue; + } + std::string key = trim(line.substr(0, pos)); + std::string value = trim(line.substr(pos + 1, std::string::npos)); + if(key == "sso_start_url"){ + sso_start_url = value; + }else if(key == "sso_region"){ + sso_region = value; + }else if(key == "sso_account_id"){ + sso_account_id = value; + }else if(key == "sso_role_name"){ + sso_role_name = value; + } + } + + // No sso configure for the current profile + if(sso_start_url != ""){ + + // Json generate for each environment. Check aws-cli github v2 code SSOCredentialFetcher in credentials.py + json json_sso = + { + {"startUrl",sso_start_url.c_str()},{"roleName",sso_role_name.c_str()},{"accountId",sso_account_id.c_str()} + }; + + // Digest the json via SHA1 + unsigned char digest[SHA_DIGEST_LENGTH]; + std::string json_sso_string = json_sso.dump(); + S3FS_PRN_INFO("AWS SSO found : sha1 json_sso_string %s", json_sso_string.c_str()); + int n = json_sso_string.length(); + char json_sso_char[n]; + strcpy(json_sso_char, json_sso_string.c_str()); + // Digest the json + SHA1((unsigned char*)&json_sso_char, strlen(json_sso_char), (unsigned char*)&digest); + char mdString[SHA_DIGEST_LENGTH*2+1]; + + for(int i = 0; i < SHA_DIGEST_LENGTH; i++) + sprintf(&mdString[i*2], "%02x", (unsigned int)digest[i]); + + S3FS_PRN_INFO("AWS SSO found : sso_cache_key (.aws/cli/cache/****.json) is %s", mdString); + // Override the sso_cache_key + sso_cache_key = mdString; + + return 1; + }else { + // No sso configure in tour config file + return 0; + } + } + +// +// Read Sso Aws Credential Json file +// Use the library nlohmann-json3-dev to parse json +// +bool S3fsCred::ReadSsoAwsCredentialFile(const std::string &filename, AutoLock::Type type) +{ + + // open cerdentials cache pase file + std::ifstream PF(filename.c_str()); + if(!PF.good()){ + return false; + } + json jf = json::parse(PF); + + std::string accesskey = jf["Credentials"]["AccessKeyId"]; + std::string secret = jf["Credentials"]["SecretAccessKey"]; + std::string session_token = jf["Credentials"]["SessionToken"]; + + if(session_token.empty()){ + S3FS_PRN_EXIT("AWS session token was expected but wasn't provided in aws/cli/cache file"); + }else{ + if(!SetAccessKeyWithSessionToken(accesskey.c_str(), secret.c_str(), session_token.c_str(), type)){ + S3FS_PRN_EXIT("session token is invalid. Think to refresh your sso creedentials.\n Execute aws sso login --profile profile and aws sts get-caller-identity --profile profile"); + return false; + } + } + + return true; +} // // Read Aws Credential File // @@ -965,6 +1121,7 @@ bool S3fsCred::ReadAwsCredentialFile(const std::string &filename, AutoLock::Type // // 1 - from the command line (security risk) // 2 - from a password file specified on the command line +// 2b - from ${HOME}/.aws/config check SSO configuration // 3 - from environment variables // 3a - from the AWS_CREDENTIAL_FILE environment variable // 3b - from ${HOME}/.aws/credentials @@ -1000,7 +1157,6 @@ bool S3fsCred::InitialS3fsCredentials() char* AWSACCESSKEYID = getenv("AWS_ACCESS_KEY_ID") ? getenv("AWS_ACCESS_KEY_ID") : getenv("AWSACCESSKEYID"); char* AWSSECRETACCESSKEY = getenv("AWS_SECRET_ACCESS_KEY") ? getenv("AWS_SECRET_ACCESS_KEY") : getenv("AWSSECRETACCESSKEY"); char* AWSSESSIONTOKEN = getenv("AWS_SESSION_TOKEN") ? getenv("AWS_SESSION_TOKEN") : getenv("AWSSESSIONTOKEN"); - if(AWSACCESSKEYID != NULL || AWSSECRETACCESSKEY != NULL){ if( (AWSACCESSKEYID == NULL && AWSSECRETACCESSKEY != NULL) || (AWSACCESSKEYID != NULL && AWSSECRETACCESSKEY == NULL) ){ @@ -1028,6 +1184,24 @@ bool S3fsCred::InitialS3fsCredentials() return true; } + // 2b - check ${HOME}/.aws/config + // find sso_start_url, sso_region, sso_account_id,sso_role_name for the selected profile + std::string sso_cache_key; + if(CheckSsoCacheKey(sso_cache_key)){ + // Use the cli cache. Check aws-cli github v2 code SSOCredentialFetcher in credentials.py + std::string aws_credentials_sso = std::string(getpwuid(getuid())->pw_dir) + "/.aws/cli/cache/"+ sso_cache_key +".json"; + if(ItSetAwsHomePath()){ + // Override the aws_home_dir path + aws_credentials_sso = aws_home_dir + "/cli/cache/"+ sso_cache_key +".json"; + } + + if(ReadSsoAwsCredentialFile(aws_credentials_sso, AutoLock::NONE)){ + return true; + }else{ + S3FS_PRN_EXIT("Sso configuration is invalid. Think to refresh your sso creedentials.\n Execute aws sso login --profile profile and aws sts get-caller-identity --profile profile"); + } + } + // 3a - from the AWS_CREDENTIAL_FILE environment variable char* AWS_CREDENTIAL_FILE = getenv("AWS_CREDENTIAL_FILE"); if(AWS_CREDENTIAL_FILE != NULL){ @@ -1510,6 +1684,11 @@ int S3fsCred::DetectParam(const char* arg) return 0; } + if(is_prefix(arg, "aws_home_path=")){ + SetAwsHomePath(strchr(arg, '=') + sizeof(char)); + return 0; + } + return 1; } diff --git a/src/s3fs_cred.h b/src/s3fs_cred.h index 43e9b76..d06a068 100644 --- a/src/s3fs_cred.h +++ b/src/s3fs_cred.h @@ -57,6 +57,7 @@ class S3fsCred bool is_lock_init; std::string passwd_file; + std::string aws_home_dir; std::string aws_profile; bool load_iamrole; @@ -102,6 +103,10 @@ class S3fsCred bool SetAwsProfileName(const char* profile_name); bool SetIAMRoleMetadataType(bool flag); + bool SetAwsHomePath(const char* dir); + bool ItSetAwsHomePath(); + + bool SetAccessKey(const char* AccessKeyId, const char* SecretAccessKey, AutoLock::Type type); bool SetAccessKeyWithSessionToken(const char* AccessKeyId, const char* SecretAccessKey, const char * SessionToken, AutoLock::Type type); bool IsSetAccessKeys(AutoLock::Type type) const; @@ -131,6 +136,8 @@ class S3fsCred bool ReadS3fsPasswdFile(AutoLock::Type type); static int CheckS3fsCredentialAwsFormat(const kvmap_t& kvmap, std::string& access_key_id, std::string& secret_access_key); + int CheckSsoCacheKey(std::string& sso_cache_key); + bool ReadSsoAwsCredentialFile(const std::string &filename, AutoLock::Type type); bool ReadAwsCredentialFile(const std::string &filename, AutoLock::Type type); bool InitialS3fsCredentials(); diff --git a/src/s3fs_help.cpp b/src/s3fs_help.cpp index 9b776ca..f04ffa0 100644 --- a/src/s3fs_help.cpp +++ b/src/s3fs_help.cpp @@ -59,6 +59,11 @@ static const char help_string[] = " - if it is not specified bucket name (and path) in command line,\n" " must specify this option after -o option for bucket name.\n" "\n" + " aws_home_path (default=\"\")\n" + " - specify the aws path to use in the case of SSO authentication\n" + " ${HOME}/.aws is used by default" + " - this implementation is only avaialble for sso authentication\n" + "\n" " default_acl (default=\"private\")\n" " - the default canned acl to apply to all written s3 objects,\n" " e.g., private, public-read. see\n" diff --git a/test/integration-test-common.sh b/test/integration-test-common.sh index b3c4cf7..6a32a6e 100644 --- a/test/integration-test-common.sh +++ b/test/integration-test-common.sh @@ -36,6 +36,7 @@ # from /var/tmp # CHAOS_HTTP_PROXY=1 Test proxy(environment) by CHAOS HTTP PROXY # CHAOS_HTTP_PROXY_OPT=1 Test proxy(option) by CHAOS HTTP PROXY +# Think to add the root certficate of AWS in the file /tmp/keystore.pem to execute aws_cli command # # Example of running against Amazon S3 using a bucket named "bucket": #