diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5efbc7f..cf16d6e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -101,6 +101,10 @@ jobs: run: | make cppcheck + - name: Shellcheck + run: | + make shellcheck + - name: Test suite run: | make ALL_TESTS=1 check -C test || (test/filter-suite-log.sh test/test-suite.log; exit 1) @@ -131,7 +135,7 @@ jobs: - name: Install brew other packages run: | - S3FS_BREW_PACKAGES='automake cppcheck python3 coreutils gnu-sed'; + S3FS_BREW_PACKAGES='automake cppcheck python3 coreutils gnu-sed shellcheck'; for s3fs_brew_pkg in ${S3FS_BREW_PACKAGES}; do if brew list | grep -q ${s3fs_brew_pkg}; then if brew outdated | grep -q ${s3fs_brew_pkg}; then HOMEBREW_NO_AUTO_UPDATE=1 brew upgrade ${s3fs_brew_pkg}; fi; else HOMEBREW_NO_AUTO_UPDATE=1 brew install ${s3fs_brew_pkg}; fi; done; - name: Install awscli @@ -152,6 +156,10 @@ jobs: run: | make cppcheck + - name: Shellcheck + run: | + make shellcheck + - name: Test suite run: | make check -C src diff --git a/.github/workflows/linux-ci-helper.sh b/.github/workflows/linux-ci-helper.sh index a0f4c44..97ddb73 100755 --- a/.github/workflows/linux-ci-helper.sh +++ b/.github/workflows/linux-ci-helper.sh @@ -24,7 +24,7 @@ echo "${PRGNAME} [INFO] Start Linux helper for installing packages." #----------------------------------------------------------- # Common variables #----------------------------------------------------------- -PRGNAME=`basename $0` +PRGNAME=$(basename "$0") #----------------------------------------------------------- # Parameter check @@ -40,8 +40,10 @@ fi # Container OS variables #----------------------------------------------------------- CONTAINER_FULLNAME=$1 -CONTAINER_OSNAME=`echo ${CONTAINER_FULLNAME} | sed 's/:/ /g' | awk '{print $1}'` -CONTAINER_OSVERSION=`echo ${CONTAINER_FULLNAME} | sed 's/:/ /g' | awk '{print $2}'` +# shellcheck disable=SC2034 +CONTAINER_OSNAME=$(echo "${CONTAINER_FULLNAME}" | sed 's/:/ /g' | awk '{print $1}') +# shellcheck disable=SC2034 +CONTAINER_OSVERSION=$(echo "${CONTAINER_FULLNAME}" | sed 's/:/ /g' | awk '{print $2}') #----------------------------------------------------------- # Common variables for pip @@ -53,6 +55,7 @@ INSTALL_AWSCLI_PACKAGES="awscli" #----------------------------------------------------------- # Parameters for configure(set environments) #----------------------------------------------------------- +# shellcheck disable=SC2089 CONFIGURE_OPTIONS="CXXFLAGS='-O -std=c++03 -DS3FS_PTHREAD_ERRORCHECK=1' --prefix=/usr --with-openssl" #----------------------------------------------------------- @@ -63,63 +66,81 @@ if [ "${CONTAINER_FULLNAME}" = "ubuntu:21.10" ]; then PACKAGE_UPDATE_OPTIONS="update -y -qq" INSTALL_PACKAGES="autoconf autotools-dev default-jre-headless fuse libfuse-dev libcurl4-openssl-dev libxml2-dev locales-all mime-support libtool pkg-config libssl-dev attr curl python3-pip" - INSTALL_CPPCHECK_OPTIONS="" + INSTALL_CHECKER_PKGS="cppcheck shellcheck" + INSTALL_CHECKER_PKG_OPTIONS="" elif [ "${CONTAINER_FULLNAME}" = "ubuntu:20.04" ]; then PACKAGE_MANAGER_BIN="apt-get" PACKAGE_UPDATE_OPTIONS="update -y -qq" INSTALL_PACKAGES="autoconf autotools-dev default-jre-headless fuse libfuse-dev libcurl4-openssl-dev libxml2-dev locales-all mime-support libtool pkg-config libssl-dev attr curl python3-pip" - INSTALL_CPPCHECK_OPTIONS="" + INSTALL_CHECKER_PKGS="cppcheck shellcheck" + INSTALL_CHECKER_PKG_OPTIONS="" elif [ "${CONTAINER_FULLNAME}" = "ubuntu:18.04" ]; then PACKAGE_MANAGER_BIN="apt-get" PACKAGE_UPDATE_OPTIONS="update -y -qq" INSTALL_PACKAGES="autoconf autotools-dev default-jre-headless fuse libfuse-dev libcurl4-openssl-dev libxml2-dev locales-all mime-support libtool pkg-config libssl-dev attr curl python3-pip" - INSTALL_CPPCHECK_OPTIONS="" + INSTALL_CHECKER_PKGS="cppcheck shellcheck" + INSTALL_CHECKER_PKG_OPTIONS="" elif [ "${CONTAINER_FULLNAME}" = "ubuntu:16.04" ]; then PACKAGE_MANAGER_BIN="apt-get" PACKAGE_UPDATE_OPTIONS="update -y -qq" INSTALL_PACKAGES="autoconf autotools-dev default-jre-headless fuse libfuse-dev libcurl4-openssl-dev libxml2-dev locales-all mime-support libtool pkg-config libssl-dev attr curl python3-pip" - INSTALL_CPPCHECK_OPTIONS="" + INSTALL_CHECKER_PKGS="cppcheck shellcheck" + INSTALL_CHECKER_PKG_OPTIONS="" elif [ "${CONTAINER_FULLNAME}" = "debian:bullseye" ]; then PACKAGE_MANAGER_BIN="apt-get" PACKAGE_UPDATE_OPTIONS="update -y -qq" INSTALL_PACKAGES="autoconf autotools-dev default-jre-headless fuse libfuse-dev libcurl4-openssl-dev libxml2-dev locales-all mime-support libtool pkg-config libssl-dev attr curl procps python3-pip" - INSTALL_CPPCHECK_OPTIONS="" + INSTALL_CHECKER_PKGS="cppcheck shellcheck" + INSTALL_CHECKER_PKG_OPTIONS="" elif [ "${CONTAINER_FULLNAME}" = "debian:buster" ]; then PACKAGE_MANAGER_BIN="apt-get" PACKAGE_UPDATE_OPTIONS="update -y -qq" INSTALL_PACKAGES="autoconf autotools-dev default-jre-headless fuse libfuse-dev libcurl4-openssl-dev libxml2-dev locales-all mime-support libtool pkg-config libssl-dev attr curl procps python3-pip" - INSTALL_CPPCHECK_OPTIONS="" + INSTALL_CHECKER_PKGS="cppcheck shellcheck" + INSTALL_CHECKER_PKG_OPTIONS="" elif [ "${CONTAINER_FULLNAME}" = "debian:stretch" ]; then PACKAGE_MANAGER_BIN="apt-get" PACKAGE_UPDATE_OPTIONS="update -y -qq" INSTALL_PACKAGES="autoconf autotools-dev default-jre-headless fuse libfuse-dev libcurl4-openssl-dev libxml2-dev locales-all mime-support libtool pkg-config libssl-dev attr curl procps python3-pip" - INSTALL_CPPCHECK_OPTIONS="" + INSTALL_CHECKER_PKGS="cppcheck shellcheck" + INSTALL_CHECKER_PKG_OPTIONS="" elif [ "${CONTAINER_FULLNAME}" = "centos:centos8" ]; then PACKAGE_MANAGER_BIN="dnf" PACKAGE_UPDATE_OPTIONS="update -y -qq" + # [NOTE] + # Installing ShellCheck on CentOS 8 is not easy. + # Give up to run ShellCheck on CentOS 8 as we don't have to run ShellChek on all operating systems. + # INSTALL_PACKAGES="curl-devel fuse fuse-devel gcc libstdc++-devel gcc-c++ glibc-langpack-en java-11-openjdk-headless libxml2-devel mailcap git automake make openssl-devel attr diffutils curl python3" - INSTALL_CPPCHECK_OPTIONS="--enablerepo=powertools" + INSTALL_CHECKER_PKGS="cppcheck" + INSTALL_CHECKER_PKG_OPTIONS="--enablerepo=powertools" elif [ "${CONTAINER_FULLNAME}" = "centos:centos7" ]; then PACKAGE_MANAGER_BIN="yum" PACKAGE_UPDATE_OPTIONS="update -y" + # [NOTE] + # ShellCheck version(0.3.8) is too low to check. + # And in this version, it cannot be passed due to following error. + # "shellcheck: ./test/integration-test-main.sh: hGetContents: invalid argument (invalid byte sequence)" + # INSTALL_PACKAGES="curl-devel fuse fuse-devel gcc libstdc++-devel gcc-c++ glibc-langpack-en java-11-openjdk-headless libxml2-devel mailcap git automake make openssl-devel attr curl python3 epel-release" - INSTALL_CPPCHECK_OPTIONS="--enablerepo=epel" + INSTALL_CHECKER_PKGS="cppcheck" + INSTALL_CHECKER_PKG_OPTIONS="--enablerepo=epel" elif [ "${CONTAINER_FULLNAME}" = "fedora:35" ]; then PACKAGE_MANAGER_BIN="dnf" @@ -127,14 +148,16 @@ elif [ "${CONTAINER_FULLNAME}" = "fedora:35" ]; then # TODO: Cannot use java-latest-openjdk (17) due to modules issue in S3Proxy/jclouds/Guice INSTALL_PACKAGES="curl-devel fuse fuse-devel gcc libstdc++-devel gcc-c++ glibc-langpack-en java-11-openjdk-headless libxml2-devel mailcap git automake make openssl-devel curl attr diffutils procps python3-pip" - INSTALL_CPPCHECK_OPTIONS="" + INSTALL_CHECKER_PKGS="cppcheck ShellCheck" + INSTALL_CHECKER_PKG_OPTIONS="" elif [ "${CONTAINER_FULLNAME}" = "opensuse/leap:15" ]; then PACKAGE_MANAGER_BIN="zypper" PACKAGE_UPDATE_OPTIONS="refresh" - INSTALL_PACKAGES="automake curl-devel fuse fuse-devel gcc-c++ java-11-openjdk-headless libxml2-devel make openssl-devel python3-pip curl attr" - INSTALL_CPPCHECK_OPTIONS="" + INSTALL_PACKAGES="automake curl-devel fuse fuse-devel gcc-c++ java-11-openjdk-headless libxml2-devel make openssl-devel python3-pip curl attr ShellCheck" + INSTALL_CHECKER_PKGS="cppcheck ShellCheck" + INSTALL_CHECKER_PKG_OPTIONS="" else echo "No container configured for: ${CONTAINER_FULLNAME}" @@ -148,16 +171,16 @@ fi # Update packages (ex. apt-get update -y -qq) # echo "${PRGNAME} [INFO] Updates." -${PACKAGE_MANAGER_BIN} ${PACKAGE_UPDATE_OPTIONS} +/bin/sh -c "${PACKAGE_MANAGER_BIN} ${PACKAGE_UPDATE_OPTIONS}" # # Install packages ( with cppcheck ) # echo "${PRGNAME} [INFO] Install packages." -${PACKAGE_MANAGER_BIN} install -y ${INSTALL_PACKAGES} +/bin/sh -c "${PACKAGE_MANAGER_BIN} install -y ${INSTALL_PACKAGES}" echo "${PRGNAME} [INFO] Install cppcheck package." -${PACKAGE_MANAGER_BIN} ${INSTALL_CPPCHECK_OPTIONS} install -y cppcheck +/bin/sh -c "${PACKAGE_MANAGER_BIN} ${INSTALL_CHECKER_PKG_OPTIONS} install -y ${INSTALL_CHECKER_PKGS}" # Check Java version java -version @@ -166,16 +189,19 @@ java -version # Install awscli # echo "${PRGNAME} [INFO] Install awscli package." -${PIP_BIN} install ${PIP_OPTIONS} ${INSTALL_AWSCLI_PACKAGES} -${PIP_BIN} install ${PIP_OPTIONS} rsa +/bin/sh -c "${PIP_BIN} install ${PIP_OPTIONS} ${INSTALL_AWSCLI_PACKAGES}" +/bin/sh -c "${PIP_BIN} install ${PIP_OPTIONS} rsa" #----------------------------------------------------------- # Set environment for configure #----------------------------------------------------------- echo "${PRGNAME} [INFO] Set environment for configure options" + +# shellcheck disable=SC2090 export CONFIGURE_OPTIONS echo "${PRGNAME} [INFO] Finish Linux helper for installing packages." + exit 0 # diff --git a/Makefile.am b/Makefile.am index ebea5ea..fbb9b36 100644 --- a/Makefile.am +++ b/Makefile.am @@ -17,6 +17,7 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. ###################################################################### + SUBDIRS=src test doc EXTRA_DIST=doc default_commit_hash @@ -28,6 +29,8 @@ dist-hook: release : dist ../utils/release.sh ../utils/release.sh $(DIST_ARCHIVES) +.PHONY: cppcheck shellcheck + cppcheck: cppcheck --quiet --error-exitcode=1 \ --inline-suppr \ @@ -43,6 +46,35 @@ cppcheck: --suppress=unmatchedSuppression \ src/ test/ +# +# ShellCheck +# +SHELLCHECK_CMD = shellcheck +SHELLCHECK_SH_OPT = --shell=sh +SHELLCHECK_BASH_OPT = --shell=bash + +# [NOTE] +# To control error warnings as a whole, specify the "SC" with the following variables. +# +SHELLCHECK_COMMON_IGN = --exclude=SC1091 +SHELLCHECK_CUSTOM_IGN = --exclude=SC1091 + +shellcheck: + @if type shellcheck > /dev/null 2>&1; then \ + echo "* ShellCheck version"; \ + $(SHELLCHECK_CMD) --version; \ + echo ""; \ + echo "* Check all sh files with ShellCheck"; \ + LC_ALL=C.UTF-8 $(SHELLCHECK_CMD) $(SHELLCHECK_SH_OPT) $(SHELLCHECK_COMMON_IGN) $$(grep '#![[:space:]]*/bin/sh' $$(find . -type f -name \*.sh) | sed -e 's|^\(.*\):#\!.*$$|\1|g') || exit 1; \ + echo "-> No error was detected."; \ + echo ""; \ + echo "* Check all bash files with ShellCheck"; \ + LC_ALL=C.UTF-8 $(SHELLCHECK_CMD) $(SHELLCHECK_BASH_OPT) $(SHELLCHECK_COMMON_IGN) $$(grep '#![[:space:]]*/bin/bash' $$(find . -type f -name \*.sh) | sed -e 's|^\(.*\):#\!.*$$|\1|g') || exit 1; \ + echo "-> No error was detected."; \ + else \ + echo "* ShellCheck is not installed, so skip this."; \ + fi + # # Local variables: # tab-width: 4 diff --git a/autogen.sh b/autogen.sh index 700fc7d..5ff38be 100755 --- a/autogen.sh +++ b/autogen.sh @@ -1,5 +1,5 @@ -#! /bin/sh - +#!/bin/sh +# # This file is part of S3FS. # # Copyright 2009, 2010 Free Software Foundation, Inc. @@ -22,14 +22,12 @@ echo "--- Make commit hash file -------" SHORTHASH="unknown" -type git > /dev/null 2>&1 -if [ $? -eq 0 -a -d .git ]; then - RESULT=`git rev-parse --short HEAD` - if [ $? -eq 0 ]; then - SHORTHASH=${RESULT} +if command -v git > /dev/null 2>&1 && test -d .git; then + if RESULT=$(git rev-parse --short HEAD); then + SHORTHASH="${RESULT}" fi fi -echo ${SHORTHASH} > default_commit_hash +echo "${SHORTHASH}" > default_commit_hash echo "--- Finished commit hash file ---" diff --git a/test/filter-suite-log.sh b/test/filter-suite-log.sh index 532c91b..2a4abc8 100755 --- a/test/filter-suite-log.sh +++ b/test/filter-suite-log.sh @@ -28,25 +28,25 @@ func_usage() echo "" } -PRGNAME=`basename $0` -SCRIPTDIR=`dirname $0` -S3FSDIR=`cd ${SCRIPTDIR}/..; pwd` -TOPDIR=`cd ${S3FSDIR}/test; pwd` +PRGNAME=$(basename "$0") +SCRIPTDIR=$(dirname "$0") +S3FSDIR=$(cd "${SCRIPTDIR}"/.. || exit 1; pwd) +TOPDIR=$(cd "${S3FSDIR}"/test || exit 1; pwd) SUITELOG="${TOPDIR}/test-suite.log" TMP_LINENO_FILE="/tmp/.lineno.tmp" while [ $# -ne 0 ]; do if [ "X$1" = "X" ]; then break - elif [ "X$1" = "X-h" -o "X$1" = "X-H" -o "X$1" = "X--help" -o "X$1" = "X--HELP" ]; then - func_usage ${PRGNAME} + elif [ "X$1" = "X-h" ] || [ "X$1" = "X-H" ] || [ "X$1" = "X--help" ] || [ "X$1" = "X--HELP" ]; then + func_usage "${PRGNAME}" exit 0 else SUITELOG=$1 fi shift done -if [ ! -f ${SUITELOG} ]; then +if [ ! -f "${SUITELOG}" ]; then echo "[ERROR] not found ${SUITELOG} log file." exit 1 fi @@ -59,75 +59,77 @@ fi # 2 : passed line of end of one small test(specified in test-utils.sh) # 3 : failed line of end of one small test(specified in test-utils.sh) # -grep -n -e 'test_.*: ".*"' -o -e 'test_.* passed' -o -e 'test_.* failed' ${SUITELOG} 2>/dev/null | sed 's/:test_.*: ".*"/ 1/g' | sed 's/:test_.* passed/ 2/g' | sed 's/:test_.* failed/ 3/g' > ${TMP_LINENO_FILE} +grep -n -e 'test_.*: ".*"' -o -e 'test_.* passed' -o -e 'test_.* failed' "${SUITELOG}" 2>/dev/null | sed 's/:test_.*: ".*"/ 1/g' | sed 's/:test_.* passed/ 2/g' | sed 's/:test_.* failed/ 3/g' > "${TMP_LINENO_FILE}" # # Loop for printing result # prev_line_type=0 prev_line_number=1 -while read line; do +while read -r line; do # line is " " - number_type=($line) + # + # shellcheck disable=SC2206 + number_type=(${line}) - head_line_cnt=`expr ${number_type[0]} - 1` - tail_line_cnt=`expr ${number_type[0]} - ${prev_line_number}` + head_line_cnt=$((number_type[0] - 1)) + tail_line_cnt=$((number_type[0] - prev_line_number)) - if [ ${number_type[1]} -eq 2 ]; then + if [ "${number_type[1]}" -eq 2 ]; then echo "" fi - if [ ${prev_line_type} -eq 1 ]; then - if [ ${number_type[1]} -eq 2 ]; then + if [ "${prev_line_type}" -eq 1 ]; then + if [ "${number_type[1]}" -eq 2 ]; then # if passed, cut s3fs information messages - head -${head_line_cnt} ${SUITELOG} | tail -${tail_line_cnt} | grep -v -e '[0-9]\+\%' | grep -v -e '^s3fs: ' -a -e '\[INF\]' - elif [ ${number_type[1]} -eq 3 ]; then + head "-${head_line_cnt}" "${SUITELOG}" | tail "-${tail_line_cnt}" | grep -v -e '[0-9]\+\%' | grep -v -e '^s3fs: ' -a -e '\[INF\]' + elif [ "${number_type[1]}" -eq 3 ]; then # if failed, print all - head -${head_line_cnt} ${SUITELOG} | tail -${tail_line_cnt} | grep -v -e '[0-9]\+\%' + head "-${head_line_cnt}" "${SUITELOG}" | tail "-${tail_line_cnt}" | grep -v -e '[0-9]\+\%' else # there is start keyword but not end keyword, so print all - head -${head_line_cnt} ${SUITELOG} | tail -${tail_line_cnt} | grep -v -e '[0-9]\+\%' + head "-${head_line_cnt}" "${SUITELOG}" | tail "-${tail_line_cnt}" | grep -v -e '[0-9]\+\%' fi - elif [ ${prev_line_type} -eq 2 -o ${prev_line_type} -eq 3 ]; then - if [ ${number_type[1]} -eq 2 -o ${number_type[1]} -eq 3 ]; then + elif [ "${prev_line_type}" -eq 2 ] || [ "${prev_line_type}" -eq 3 ]; then + if [ "${number_type[1]}" -eq 2 ] || [ "${number_type[1]}" -eq 3 ]; then # previous is end of chmpx, but this type is end of chmpx without start keyword. then print all - head -${head_line_cnt} ${SUITELOG} | tail -${tail_line_cnt} | grep -v -e '[0-9]\+\%' + head "-${head_line_cnt}" "${SUITELOG}" | tail "-${tail_line_cnt}" | grep -v -e '[0-9]\+\%' else # this area is not from start to end, cut s3fs information messages - head -${head_line_cnt} ${SUITELOG} | tail -${tail_line_cnt} | grep -v -e '[0-9]\+\%' | grep -v -e '^s3fs: ' -a -e '\[INF\]' + head "-${head_line_cnt}" "${SUITELOG}" | tail "-${tail_line_cnt}" | grep -v -e '[0-9]\+\%' | grep -v -e '^s3fs: ' -a -e '\[INF\]' fi else - if [ ${number_type[1]} -eq 2 -o ${number_type[1]} -eq 3 ]; then + if [ "${number_type[1]}" -eq 2 ] || [ "${number_type[1]}" -eq 3 ]; then # previous is normal, but this type is end of chmpx without start keyword. then print all - head -${head_line_cnt} ${SUITELOG} | tail -${tail_line_cnt} | grep -v -e '[0-9]\+\%' + head "-${head_line_cnt}" "${SUITELOG}" | tail "-${tail_line_cnt}" | grep -v -e '[0-9]\+\%' else # this area is normal, cut s3fs information messages - head -${head_line_cnt} ${SUITELOG} | tail -${tail_line_cnt} | grep -v -e '[0-9]\+\%' | grep -v -e '^s3fs: ' -a -e '\[INF\]' + head "-${head_line_cnt}" "${SUITELOG}" | tail "-${tail_line_cnt}" | grep -v -e '[0-9]\+\%' | grep -v -e '^s3fs: ' -a -e '\[INF\]' fi fi - if [ ${number_type[1]} -eq 3 ]; then + if [ "${number_type[1]}" -eq 3 ]; then echo "" fi - prev_line_type=${number_type[1]} - prev_line_number=${number_type[0]} + prev_line_type="${number_type[1]}" + prev_line_number="${number_type[0]}" -done < ${TMP_LINENO_FILE} +done < "${TMP_LINENO_FILE}" # # Print rest lines # -file_line_cnt=`wc -l ${SUITELOG} | awk '{print $1}'` -tail_line_cnt=`expr ${file_line_cnt} - ${prev_line_number}` +file_line_cnt=$(wc -l "${SUITELOG}" | awk '{print $1}') +tail_line_cnt=$((file_line_cnt - prev_line_number)) -if [ ${prev_line_type} -eq 1 ]; then - tail -${tail_line_cnt} ${SUITELOG} | grep -v -e '[0-9]\+\%' +if [ "${prev_line_type}" -eq 1 ]; then + tail "-${tail_line_cnt}" "${SUITELOG}" | grep -v -e '[0-9]\+\%' else - tail -${tail_line_cnt} ${SUITELOG} | grep -v -e '[0-9]\+\%' | grep -v -e '^s3fs: ' -a -e '\[INF\]' + tail "-${tail_line_cnt}" "${SUITELOG}" | grep -v -e '[0-9]\+\%' | grep -v -e '^s3fs: ' -a -e '\[INF\]' fi # # Remove temp file # -rm -f ${TMP_LINENO_FILE} +rm -f "${TMP_LINENO_FILE}" exit 0 diff --git a/test/integration-test-common.sh b/test/integration-test-common.sh index 24aa246..01ad20b 100644 --- a/test/integration-test-common.sh +++ b/test/integration-test-common.sh @@ -66,59 +66,65 @@ set -o pipefail S3FS=../src/s3fs # Allow these defaulted values to be overridden -: ${S3_URL:="https://127.0.0.1:8080"} -: ${S3_ENDPOINT:="us-east-1"} -: ${S3FS_CREDENTIALS_FILE:="passwd-s3fs"} -: ${TEST_BUCKET_1:="s3fs-integration-test"} +: "${S3_URL:="https://127.0.0.1:8080"}" +: "${S3_ENDPOINT:="us-east-1"}" +: "${S3FS_CREDENTIALS_FILE:="passwd-s3fs"}" +: "${TEST_BUCKET_1:="s3fs-integration-test"}" export TEST_BUCKET_1 export S3_URL export S3_ENDPOINT -export TEST_SCRIPT_DIR=`pwd` +TEST_SCRIPT_DIR=$(pwd) +export TEST_SCRIPT_DIR export TEST_BUCKET_MOUNT_POINT_1=${TEST_BUCKET_1} S3PROXY_VERSION="1.9.0" -S3PROXY_BINARY=${S3PROXY_BINARY-"s3proxy-${S3PROXY_VERSION}"} +S3PROXY_BINARY="${S3PROXY_BINARY-"s3proxy-${S3PROXY_VERSION}"}" CHAOS_HTTP_PROXY_VERSION="1.1.0" CHAOS_HTTP_PROXY_BINARY="chaos-http-proxy-${CHAOS_HTTP_PROXY_VERSION}" if [ ! -f "$S3FS_CREDENTIALS_FILE" ] then - echo "Missing credentials file: $S3FS_CREDENTIALS_FILE" + echo "Missing credentials file: ${S3FS_CREDENTIALS_FILE}" exit 1 fi -chmod 600 "$S3FS_CREDENTIALS_FILE" +chmod 600 "${S3FS_CREDENTIALS_FILE}" if [ -z "${S3FS_PROFILE}" ]; then - export AWS_ACCESS_KEY_ID=$(cut -d: -f1 ${S3FS_CREDENTIALS_FILE}) - export AWS_SECRET_ACCESS_KEY=$(cut -d: -f2 ${S3FS_CREDENTIALS_FILE}) + AWS_ACCESS_KEY_ID=$(cut -d: -f1 "${S3FS_CREDENTIALS_FILE}") + export AWS_ACCESS_KEY_ID + + AWS_SECRET_ACCESS_KEY=$(cut -d: -f2 "${S3FS_CREDENTIALS_FILE}") + export AWS_SECRET_ACCESS_KEY fi -if [ ! -d $TEST_BUCKET_MOUNT_POINT_1 ] -then - mkdir -p $TEST_BUCKET_MOUNT_POINT_1 +if [ ! -d "${TEST_BUCKET_MOUNT_POINT_1}" ]; then + mkdir -p "${TEST_BUCKET_MOUNT_POINT_1}" fi # This function execute the function parameters $1 times # before giving up, with 1 second delays. function retry { - local N=$1; shift; + local N="$1" + shift rc=0 - for i in $(seq $N); do + for _ in $(seq "${N}"); do echo "Trying: $*" - eval $@; rc=$? - if [ $rc = 0 ]; then + # shellcheck disable=SC2068,SC2294 + eval $@ + rc=$? + if [ "${rc}" -eq 0 ]; then break fi sleep 1 echo "Retrying: $*" done - if [ $rc != 0 ]; then + if [ "${rc}" -ne 0 ]; then echo "timeout waiting for $*" fi - return $rc + return "${rc}" } # Proxy is not started if S3PROXY_BINARY is an empty string @@ -145,7 +151,7 @@ function start_s3proxy { echo -e 'password\npassword\n\n\n\n\n\n\nyes' | keytool -genkey -keystore /tmp/keystore.jks -keyalg RSA -keysize 2048 -validity 365 -ext SAN=IP:127.0.0.1 echo password | keytool -exportcert -keystore /tmp/keystore.jks -rfc -file /tmp/keystore.pem - ${STDBUF_BIN} -oL -eL java -jar "$S3PROXY_BINARY" --properties $S3PROXY_CONFIG & + "${STDBUF_BIN}" -oL -eL java -jar "${S3PROXY_BINARY}" --properties "${S3PROXY_CONFIG}" & S3PROXY_PID=$! # wait for S3Proxy to start @@ -159,7 +165,7 @@ function start_s3proxy { chmod +x "${CHAOS_HTTP_PROXY_BINARY}" fi - ${STDBUF_BIN} -oL -eL java -jar ${CHAOS_HTTP_PROXY_BINARY} --properties chaos-http-proxy.conf & + "${STDBUF_BIN}" -oL -eL java -jar "${CHAOS_HTTP_PROXY_BINARY}" --properties chaos-http-proxy.conf & CHAOS_HTTP_PROXY_PID=$! # wait for Chaos HTTP Proxy to start @@ -170,12 +176,12 @@ function start_s3proxy { function stop_s3proxy { if [ -n "${S3PROXY_PID}" ] then - kill $S3PROXY_PID + kill "${S3PROXY_PID}" fi if [ -n "${CHAOS_HTTP_PROXY_PID}" ] then - kill $CHAOS_HTTP_PROXY_PID + kill "${CHAOS_HTTP_PROXY_PID}" fi } @@ -200,7 +206,7 @@ function start_s3fs { fi # On OSX only, we need to specify the direct_io and auto_cache flag. - if [ `uname` = "Darwin" ]; then + if [ "$(uname)" = "Darwin" ]; then local DIRECT_IO_OPT="-o direct_io -o auto_cache" else local DIRECT_IO_OPT="" @@ -215,7 +221,7 @@ function start_s3fs { # Therefore, when it is macos, it is not executed via stdbuf. # This patch may be temporary, but no other method has been found at this time. # - if [ `uname` = "Darwin" ]; then + if [ "$(uname)" = "Darwin" ]; then local VIA_STDBUF_CMDLINE="" else local VIA_STDBUF_CMDLINE="${STDBUF_BIN} -oL -eL" @@ -238,15 +244,17 @@ function start_s3fs { # # subshell with set -x to log exact invocation of s3fs-fuse + # shellcheck disable=SC2086 ( set -x ${VIA_STDBUF_CMDLINE} \ - ${VALGRIND_EXEC} ${S3FS} \ - $TEST_BUCKET_1 \ - $TEST_BUCKET_MOUNT_POINT_1 \ + ${VALGRIND_EXEC} \ + ${S3FS} \ + ${TEST_BUCKET_1} \ + ${TEST_BUCKET_MOUNT_POINT_1} \ -o use_path_request_style \ - -o url=${S3_URL} \ - -o endpoint=${S3_ENDPOINT} \ + -o url="${S3_URL}" \ + -o endpoint="${S3_ENDPOINT}" \ -o no_check_certificate \ -o ssl_verify_hostname=0 \ -o use_xattr=1 \ @@ -255,37 +263,39 @@ function start_s3fs { ${DIRECT_IO_OPT} \ -o stat_cache_expire=1 \ -o stat_cache_interval_expire=1 \ - -o dbglevel=${DBGLEVEL:=info} \ + -o dbglevel="${DBGLEVEL:=info}" \ -o no_time_stamp_msg \ -o retries=3 \ -f \ "${@}" & echo $! >&3 - ) 3>pid | ${STDBUF_BIN} -oL -eL ${SED_BIN} ${SED_BUFFER_FLAG} "s/^/s3fs: /" & + ) 3>pid | "${STDBUF_BIN}" -oL -eL "${SED_BIN}" "${SED_BUFFER_FLAG}" "s/^/s3fs: /" & sleep 1 - export S3FS_PID=$( ${TEST_TEXT_FILE} + for x in $(seq 1 "${TEST_TEXT_FILE_LENGTH}"); do + echo "${TEST_INPUT}" + done > "${TEST_TEXT_FILE}" - check_file_size "${TEST_TEXT_FILE}" $(($TEST_TEXT_FILE_LENGTH * $(echo $TEST_INPUT | wc -c))) + check_file_size "${TEST_TEXT_FILE}" $((TEST_TEXT_FILE_LENGTH * $((${#TEST_INPUT} + 1)) )) rm_test_file } @@ -56,10 +55,10 @@ function test_append_file { function test_truncate_file { describe "Testing truncate file ..." # Write a small test file - echo "${TEST_TEXT}" > ${TEST_TEXT_FILE} + echo "${TEST_TEXT}" > "${TEST_TEXT_FILE}" # Truncate file to 0 length. This should trigger open(path, O_RDWR | O_TRUNC...) - : > ${TEST_TEXT_FILE} + : > "${TEST_TEXT_FILE}" check_file_size "${TEST_TEXT_FILE}" 0 @@ -72,23 +71,23 @@ function test_truncate_upload { # This file size uses multipart, mix upload when uploading. # We will test these cases. - rm_test_file ${BIG_FILE} + rm_test_file "${BIG_FILE}" - ${TRUNCATE_BIN} ${BIG_FILE} -s ${BIG_FILE_LENGTH} + "${TRUNCATE_BIN}" "${BIG_FILE}" -s "${BIG_FILE_LENGTH}" - rm_test_file ${BIG_FILE} + rm_test_file "${BIG_FILE}" } function test_truncate_empty_file { describe "Testing truncate empty file ..." # Write an empty test file - touch ${TEST_TEXT_FILE} + touch "${TEST_TEXT_FILE}" # Truncate the file to 1024 length local t_size=1024 - ${TRUNCATE_BIN} ${TEST_TEXT_FILE} -s $t_size + "${TRUNCATE_BIN}" "${TEST_TEXT_FILE}" -s "${t_size}" - check_file_size "${TEST_TEXT_FILE}" $t_size + check_file_size "${TEST_TEXT_FILE}" "${t_size}" rm_test_file } @@ -96,12 +95,12 @@ function test_truncate_empty_file { function test_mv_file { describe "Testing mv file function ..." # if the rename file exists, delete it - if [ -e $ALT_TEST_TEXT_FILE ] + if [ -e "${ALT_TEST_TEXT_FILE}" ] then - rm $ALT_TEST_TEXT_FILE + rm "${ALT_TEST_TEXT_FILE}" fi - if [ -e $ALT_TEST_TEXT_FILE ] + if [ -e "${ALT_TEST_TEXT_FILE}" ] then echo "Could not delete file ${ALT_TEST_TEXT_FILE}, it still exists" return 1 @@ -111,11 +110,11 @@ function test_mv_file { mk_test_file # save file length - local ALT_TEXT_LENGTH=`wc -c $TEST_TEXT_FILE | awk '{print $1}'` + local ALT_TEXT_LENGTH; ALT_TEXT_LENGTH=$(wc -c "${TEST_TEXT_FILE}" | awk '{print $1}') #rename the test file - mv $TEST_TEXT_FILE $ALT_TEST_TEXT_FILE - if [ ! -e $ALT_TEST_TEXT_FILE ] + mv "${TEST_TEXT_FILE}" "${ALT_TEST_TEXT_FILE}" + if [ ! -e "${ALT_TEST_TEXT_FILE}" ] then echo "Could not move file" return 1 @@ -124,11 +123,11 @@ function test_mv_file { #check the renamed file content-type if [ -f "/etc/mime.types" ] then - check_content_type "$1/$ALT_TEST_TEXT_FILE" "text/plain" + check_content_type "$1/${ALT_TEST_TEXT_FILE}" "text/plain" fi # Check the contents of the alt file - local ALT_FILE_LENGTH=`wc -c $ALT_TEST_TEXT_FILE | awk '{print $1}'` + local ALT_FILE_LENGTH; ALT_FILE_LENGTH=$(wc -c "${ALT_TEST_TEXT_FILE}" | awk '{print $1}') if [ "$ALT_FILE_LENGTH" -ne "$ALT_TEXT_LENGTH" ] then echo "moved file length is not as expected expected: $ALT_TEXT_LENGTH got: $ALT_FILE_LENGTH" @@ -136,7 +135,7 @@ function test_mv_file { fi # clean up - rm_test_file $ALT_TEST_TEXT_FILE + rm_test_file "${ALT_TEST_TEXT_FILE}" } function test_mv_to_exist_file { @@ -144,30 +143,30 @@ function test_mv_to_exist_file { local BIG_MV_FILE_BLOCK_SIZE=$((BIG_FILE_BLOCK_SIZE + 1)) - ../../junk_data $(($BIG_FILE_BLOCK_SIZE * $BIG_FILE_COUNT)) > "${BIG_FILE}" - ../../junk_data $(($BIG_FILE_BLOCK_SIZE * $BIG_FILE_COUNT)) > "${BIG_FILE}-mv" + ../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${BIG_FILE}" + ../../junk_data $((BIG_MV_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${BIG_FILE}-mv" - mv ${BIG_FILE} ${BIG_FILE}-mv + mv "${BIG_FILE}" "${BIG_FILE}-mv" rm_test_file "${BIG_FILE}-mv" } function test_mv_empty_directory { describe "Testing mv directory function ..." - if [ -e $TEST_DIR ]; then + if [ -e "${TEST_DIR}" ]; then echo "Unexpected, this file/directory exists: ${TEST_DIR}" return 1 fi mk_test_dir - mv ${TEST_DIR} ${TEST_DIR}_rename + mv "${TEST_DIR}" "${TEST_DIR}_rename" if [ ! -d "${TEST_DIR}_rename" ]; then echo "Directory ${TEST_DIR} was not renamed" return 1 fi - rmdir ${TEST_DIR}_rename + rmdir "${TEST_DIR}_rename" if [ -e "${TEST_DIR}_rename" ]; then echo "Could not remove the test directory, it still exists: ${TEST_DIR}_rename" return 1 @@ -176,22 +175,22 @@ function test_mv_empty_directory { function test_mv_nonempty_directory { describe "Testing mv directory function ..." - if [ -e $TEST_DIR ]; then + if [ -e "${TEST_DIR}" ]; then echo "Unexpected, this file/directory exists: ${TEST_DIR}" return 1 fi mk_test_dir - touch ${TEST_DIR}/file + touch "${TEST_DIR}"/file - mv ${TEST_DIR} ${TEST_DIR}_rename + mv "${TEST_DIR}" "${TEST_DIR}_rename" if [ ! -d "${TEST_DIR}_rename" ]; then echo "Directory ${TEST_DIR} was not renamed" return 1 fi - rm -r ${TEST_DIR}_rename + rm -r "${TEST_DIR}_rename" if [ -e "${TEST_DIR}_rename" ]; then echo "Could not remove the test directory, it still exists: ${TEST_DIR}_rename" return 1 @@ -201,35 +200,35 @@ function test_mv_nonempty_directory { function test_redirects { describe "Testing redirects ..." - mk_test_file ABCDEF + mk_test_file "ABCDEF" - local CONTENT=`cat $TEST_TEXT_FILE` + local CONTENT; CONTENT=$(cat "${TEST_TEXT_FILE}") if [ "${CONTENT}" != "ABCDEF" ]; then echo "CONTENT read is unexpected, got ${CONTENT}, expected ABCDEF" return 1 fi - echo XYZ > $TEST_TEXT_FILE + echo "XYZ" > "${TEST_TEXT_FILE}" - local CONTENT=`cat $TEST_TEXT_FILE` + CONTENT=$(cat "${TEST_TEXT_FILE}") - if [ ${CONTENT} != "XYZ" ]; then + if [ "${CONTENT}" != "XYZ" ]; then echo "CONTENT read is unexpected, got ${CONTENT}, expected XYZ" return 1 fi - echo 123456 >> $TEST_TEXT_FILE + echo "123456" >> "${TEST_TEXT_FILE}" - local LINE1=`${SED_BIN} -n '1,1p' $TEST_TEXT_FILE` - local LINE2=`${SED_BIN} -n '2,2p' $TEST_TEXT_FILE` + local LINE1; LINE1=$("${SED_BIN}" -n '1,1p' "${TEST_TEXT_FILE}") + local LINE2; LINE2=$("${SED_BIN}" -n '2,2p' "${TEST_TEXT_FILE}") - if [ ${LINE1} != "XYZ" ]; then + if [ "${LINE1}" != "XYZ" ]; then echo "LINE1 was not as expected, got ${LINE1}, expected XYZ" return 1 fi - if [ ${LINE2} != "123456" ]; then + if [ "${LINE2}" != "123456" ]; then echo "LINE2 was not as expected, got ${LINE2}, expected 123456" return 1 fi @@ -241,7 +240,7 @@ function test_redirects { function test_mkdir_rmdir { describe "Testing creation/removal of a directory ..." - if [ -e $TEST_DIR ]; then + if [ -e "${TEST_DIR}" ]; then echo "Unexpected, this file/directory exists: ${TEST_DIR}" return 1 fi @@ -256,15 +255,15 @@ function test_chmod { # create the test file again mk_test_file - local ORIGINAL_PERMISSIONS=$(get_permissions $TEST_TEXT_FILE) + local ORIGINAL_PERMISSIONS; ORIGINAL_PERMISSIONS=$(get_permissions "${TEST_TEXT_FILE}") - chmod 777 $TEST_TEXT_FILE; + chmod 777 "${TEST_TEXT_FILE}"; # if they're the same, we have a problem. - local CHANGED_PERMISSIONS=$(get_permissions $TEST_TEXT_FILE) - if [ $CHANGED_PERMISSIONS = $ORIGINAL_PERMISSIONS ] + local CHANGED_PERMISSIONS; CHANGED_PERMISSIONS=$(get_permissions "${TEST_TEXT_FILE}") + if [ "${CHANGED_PERMISSIONS}" = "${ORIGINAL_PERMISSIONS}" ] then - echo "Could not modify $TEST_TEXT_FILE permissions" + echo "Could not modify ${TEST_TEXT_FILE} permissions" return 1 fi @@ -278,10 +277,11 @@ function test_chown { # create the test file again mk_test_file - if [ `uname` = "Darwin" ]; then - local ORIGINAL_PERMISSIONS=$(stat -f "%u:%g" $TEST_TEXT_FILE) + local ORIGINAL_PERMISSIONS + if [ "$(uname)" = "Darwin" ]; then + ORIGINAL_PERMISSIONS=$(stat -f "%u:%g" "${TEST_TEXT_FILE}") else - local ORIGINAL_PERMISSIONS=$(stat --format=%u:%g $TEST_TEXT_FILE) + ORIGINAL_PERMISSIONS=$(stat --format=%u:%g "${TEST_TEXT_FILE}") fi # [NOTE] @@ -291,21 +291,22 @@ function test_chown { # '|| true' was added due to a problem with Travis CI and MacOS # and ensure_diskfree option. # - chown 1000:1000 $TEST_TEXT_FILE || true + chown 1000:1000 "${TEST_TEXT_FILE}" || true # if they're the same, we have a problem. - if [ `uname` = "Darwin" ]; then - local CHANGED_PERMISSIONS=$(stat -f "%u:%g" $TEST_TEXT_FILE) + local CHANGED_PERMISSIONS + if [ "$(uname)" = "Darwin" ]; then + CHANGED_PERMISSIONS=$(stat -f "%u:%g" "${TEST_TEXT_FILE}") else - local CHANGED_PERMISSIONS=$(stat --format=%u:%g $TEST_TEXT_FILE) + CHANGED_PERMISSIONS=$(stat --format=%u:%g "${TEST_TEXT_FILE}") fi - if [ $CHANGED_PERMISSIONS = $ORIGINAL_PERMISSIONS ] + if [ "${CHANGED_PERMISSIONS}" = "${ORIGINAL_PERMISSIONS}" ] then - if [ $ORIGINAL_PERMISSIONS = "1000:1000" ] + if [ "${ORIGINAL_PERMISSIONS}" = "1000:1000" ] then echo "Could not be strict check because original file permission 1000:1000" else - echo "Could not modify $TEST_TEXT_FILE ownership($ORIGINAL_PERMISSIONS to 1000:1000)" + echo "Could not modify ${TEST_TEXT_FILE} ownership($ORIGINAL_PERMISSIONS to 1000:1000)" return 1 fi fi @@ -319,9 +320,10 @@ function test_list { mk_test_file mk_test_dir - local file_cnt=$(ls -1 | wc -l) - if [ $file_cnt != 2 ]; then - echo "Expected 2 file but got $file_cnt" + local file_list=(*) + local file_cnt=${#file_list[@]} + if [ "${file_cnt}" -ne 2 ]; then + echo "Expected 2 file but got ${file_cnt}" return 1 fi @@ -343,30 +345,30 @@ function test_remove_nonempty_directory { function test_external_directory_creation { describe "Test external directory creation ..." - local OBJECT_NAME="$(basename $PWD)/directory/${TEST_TEXT_FILE}" + local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/directory/"${TEST_TEXT_FILE}" echo "data" | aws_cli s3 cp - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" - ls | grep -q directory + ls directory >/dev/null 2>&1 get_permissions directory | grep -q 750$ ls directory - cmp <(echo "data") directory/${TEST_TEXT_FILE} - rm -f directory/${TEST_TEXT_FILE} + cmp <(echo "data") directory/"${TEST_TEXT_FILE}" + rm -f directory/"${TEST_TEXT_FILE}" } function test_external_modification { describe "Test external modification to an object ..." - echo "old" > ${TEST_TEXT_FILE} - local OBJECT_NAME="$(basename $PWD)/${TEST_TEXT_FILE}" + echo "old" > "${TEST_TEXT_FILE}" + local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}" echo "new new" | aws_cli s3 cp - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" - cmp ${TEST_TEXT_FILE} <(echo "new new") - rm -f ${TEST_TEXT_FILE} + cmp "${TEST_TEXT_FILE}" <(echo "new new") + rm -f "${TEST_TEXT_FILE}" } function test_read_external_object() { describe "create objects via aws CLI and read via s3fs ..." - local OBJECT_NAME="$(basename $PWD)/${TEST_TEXT_FILE}" + local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}" echo "test" | aws_cli s3 cp - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" - cmp ${TEST_TEXT_FILE} <(echo "test") - rm -f ${TEST_TEXT_FILE} + cmp "${TEST_TEXT_FILE}" <(echo "test") + rm -f "${TEST_TEXT_FILE}" } function test_update_metadata_external_small_object() { @@ -375,7 +377,7 @@ function test_update_metadata_external_small_object() { # [NOTE] # Use the only filename in the test to avoid being affected by noobjcache. # - local TEST_FILE_EXT=`make_random_string` + local TEST_FILE_EXT; TEST_FILE_EXT=$(make_random_string) local TEST_CHMOD_FILE="${TEST_TEXT_FILE}_chmod.${TEST_FILE_EXT}" local TEST_CHOWN_FILE="${TEST_TEXT_FILE}_chown.${TEST_FILE_EXT}" local TEST_UTIMENS_FILE="${TEST_TEXT_FILE}_utimens.${TEST_FILE_EXT}" @@ -387,50 +389,50 @@ function test_update_metadata_external_small_object() { # # chmod # - local OBJECT_NAME="$(basename $PWD)/${TEST_CHMOD_FILE}" + local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_CHMOD_FILE}" echo "${TEST_INPUT}" | aws_cli s3 cp - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" - chmod +x ${TEST_CHMOD_FILE} - cmp ${TEST_CHMOD_FILE} <(echo "${TEST_INPUT}") + chmod +x "${TEST_CHMOD_FILE}" + cmp "${TEST_CHMOD_FILE}" <(echo "${TEST_INPUT}") # # chown # - local OBJECT_NAME="$(basename $PWD)/${TEST_CHOWN_FILE}" + OBJECT_NAME=$(basename "${PWD}")/"${TEST_CHOWN_FILE}" echo "${TEST_INPUT}" | aws_cli s3 cp - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" - chown $UID ${TEST_CHOWN_FILE} - cmp ${TEST_CHOWN_FILE} <(echo "${TEST_INPUT}") + chown "${UID}" "${TEST_CHOWN_FILE}" + cmp "${TEST_CHOWN_FILE}" <(echo "${TEST_INPUT}") # # utimens # - local OBJECT_NAME="$(basename $PWD)/${TEST_UTIMENS_FILE}" + OBJECT_NAME=$(basename "${PWD}")/"${TEST_UTIMENS_FILE}" echo "${TEST_INPUT}" | aws_cli s3 cp - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" - touch ${TEST_UTIMENS_FILE} - cmp ${TEST_UTIMENS_FILE} <(echo "${TEST_INPUT}") + touch "${TEST_UTIMENS_FILE}" + cmp "${TEST_UTIMENS_FILE}" <(echo "${TEST_INPUT}") # # set xattr # - local OBJECT_NAME="$(basename $PWD)/${TEST_SETXATTR_FILE}" + OBJECT_NAME=$(basename "${PWD}")/"${TEST_SETXATTR_FILE}" echo "${TEST_INPUT}" | aws_cli s3 cp - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" - set_xattr key value ${TEST_SETXATTR_FILE} - cmp ${TEST_SETXATTR_FILE} <(echo "${TEST_INPUT}") + set_xattr key value "${TEST_SETXATTR_FILE}" + cmp "${TEST_SETXATTR_FILE}" <(echo "${TEST_INPUT}") # # remove xattr # # "%7B%22key%22%3A%22dmFsdWU%3D%22%7D" = {"key":"value"} # - local OBJECT_NAME="$(basename $PWD)/${TEST_RMXATTR_FILE}" + OBJECT_NAME=$(basename "${PWD}")/"${TEST_RMXATTR_FILE}" echo "${TEST_INPUT}" | aws_cli s3 cp - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" --metadata xattr=%7B%22key%22%3A%22dmFsdWU%3D%22%7D - del_xattr key ${TEST_RMXATTR_FILE} - cmp ${TEST_RMXATTR_FILE} <(echo "${TEST_INPUT}") + del_xattr key "${TEST_RMXATTR_FILE}" + cmp "${TEST_RMXATTR_FILE}" <(echo "${TEST_INPUT}") - rm -f ${TEST_CHMOD_FILE} - rm -f ${TEST_CHOWN_FILE} - rm -f ${TEST_UTIMENS_FILE} - rm -f ${TEST_SETXATTR_FILE} - rm -f ${TEST_RMXATTR_FILE} + rm -f "${TEST_CHMOD_FILE}" + rm -f "${TEST_CHOWN_FILE}" + rm -f "${TEST_UTIMENS_FILE}" + rm -f "${TEST_SETXATTR_FILE}" + rm -f "${TEST_RMXATTR_FILE}" } function test_update_metadata_external_large_object() { @@ -439,86 +441,88 @@ function test_update_metadata_external_large_object() { # [NOTE] # Use the only filename in the test to avoid being affected by noobjcache. # - local TEST_FILE_EXT=`make_random_string` + local TEST_FILE_EXT; TEST_FILE_EXT=$(make_random_string) local TEST_CHMOD_FILE="${TEST_TEXT_FILE}_chmod.${TEST_FILE_EXT}" local TEST_CHOWN_FILE="${TEST_TEXT_FILE}_chown.${TEST_FILE_EXT}" local TEST_UTIMENS_FILE="${TEST_TEXT_FILE}_utimens.${TEST_FILE_EXT}" local TEST_SETXATTR_FILE="${TEST_TEXT_FILE}_xattr.${TEST_FILE_EXT}" local TEST_RMXATTR_FILE="${TEST_TEXT_FILE}_xattr.${TEST_FILE_EXT}" - ../../junk_data $(($BIG_FILE_BLOCK_SIZE * $BIG_FILE_COUNT)) > "${TEMP_DIR}/${BIG_FILE}" + ../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${TEMP_DIR}/${BIG_FILE}" # # chmod # - local OBJECT_NAME="$(basename $PWD)/${TEST_CHMOD_FILE}" - aws_cli s3 cp ${TEMP_DIR}/${BIG_FILE} "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" --no-progress - chmod +x ${TEST_CHMOD_FILE} - cmp ${TEST_CHMOD_FILE} ${TEMP_DIR}/${BIG_FILE} + local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_CHMOD_FILE}" + aws_cli s3 cp "${TEMP_DIR}/${BIG_FILE}" "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" --no-progress + chmod +x "${TEST_CHMOD_FILE}" + cmp "${TEST_CHMOD_FILE}" "${TEMP_DIR}/${BIG_FILE}" # # chown # - local OBJECT_NAME="$(basename $PWD)/${TEST_CHOWN_FILE}" - aws_cli s3 cp ${TEMP_DIR}/${BIG_FILE} "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" --no-progress - chown $UID ${TEST_CHOWN_FILE} - cmp ${TEST_CHOWN_FILE} ${TEMP_DIR}/${BIG_FILE} + OBJECT_NAME=$(basename "${PWD}")/"${TEST_CHOWN_FILE}" + aws_cli s3 cp "${TEMP_DIR}/${BIG_FILE}" "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" --no-progress + chown "${UID}" "${TEST_CHOWN_FILE}" + cmp "${TEST_CHOWN_FILE}" "${TEMP_DIR}/${BIG_FILE}" # # utimens # - local OBJECT_NAME="$(basename $PWD)/${TEST_UTIMENS_FILE}" - aws_cli s3 cp ${TEMP_DIR}/${BIG_FILE} "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" --no-progress - touch ${TEST_UTIMENS_FILE} - cmp ${TEST_UTIMENS_FILE} ${TEMP_DIR}/${BIG_FILE} + OBJECT_NAME=$(basename "${PWD}")/"${TEST_UTIMENS_FILE}" + aws_cli s3 cp "${TEMP_DIR}/${BIG_FILE}" "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" --no-progress + touch "${TEST_UTIMENS_FILE}" + cmp "${TEST_UTIMENS_FILE}" "${TEMP_DIR}/${BIG_FILE}" # # set xattr # - local OBJECT_NAME="$(basename $PWD)/${TEST_SETXATTR_FILE}" - aws_cli s3 cp ${TEMP_DIR}/${BIG_FILE} "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" --no-progress - set_xattr key value ${TEST_SETXATTR_FILE} - cmp ${TEST_SETXATTR_FILE} ${TEMP_DIR}/${BIG_FILE} + OBJECT_NAME=$(basename "${PWD}")/"${TEST_SETXATTR_FILE}" + aws_cli s3 cp "${TEMP_DIR}/${BIG_FILE}" "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" --no-progress + set_xattr key value "${TEST_SETXATTR_FILE}" + cmp "${TEST_SETXATTR_FILE}" "${TEMP_DIR}/${BIG_FILE}" # # remove xattr # # "%7B%22key%22%3A%22dmFsdWU%3D%22%7D" = {"key":"value"} # - local OBJECT_NAME="$(basename $PWD)/${TEST_RMXATTR_FILE}" - aws_cli s3 cp ${TEMP_DIR}/${BIG_FILE} "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" --no-progress --metadata xattr=%7B%22key%22%3A%22dmFsdWU%3D%22%7D - del_xattr key ${TEST_RMXATTR_FILE} - cmp ${TEST_RMXATTR_FILE} ${TEMP_DIR}/${BIG_FILE} + OBJECT_NAME=$(basename "${PWD}")/"${TEST_RMXATTR_FILE}" + aws_cli s3 cp "${TEMP_DIR}/${BIG_FILE}" "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" --no-progress --metadata xattr=%7B%22key%22%3A%22dmFsdWU%3D%22%7D + del_xattr key "${TEST_RMXATTR_FILE}" + cmp "${TEST_RMXATTR_FILE}" "${TEMP_DIR}/${BIG_FILE}" - rm -f ${TEMP_DIR}/${BIG_FILE} - rm -f ${TEST_CHMOD_FILE} - rm -f ${TEST_CHOWN_FILE} - rm -f ${TEST_UTIMENS_FILE} - rm -f ${TEST_SETXATTR_FILE} - rm -f ${TEST_RMXATTR_FILE} + rm -f "${TEMP_DIR}/${BIG_FILE}" + rm -f "${TEST_CHMOD_FILE}" + rm -f "${TEST_CHOWN_FILE}" + rm -f "${TEST_UTIMENS_FILE}" + rm -f "${TEST_SETXATTR_FILE}" + rm -f "${TEST_RMXATTR_FILE}" } function test_rename_before_close { describe "Testing rename before close ..." + + # shellcheck disable=SC2094 ( echo foo - mv $TEST_TEXT_FILE ${TEST_TEXT_FILE}.new - ) > $TEST_TEXT_FILE + mv "${TEST_TEXT_FILE}" "${TEST_TEXT_FILE}.new" + ) > "${TEST_TEXT_FILE}" - if ! cmp <(echo foo) ${TEST_TEXT_FILE}.new; then + if ! cmp <(echo "foo") "${TEST_TEXT_FILE}.new"; then echo "rename before close failed" return 1 fi - rm_test_file ${TEST_TEXT_FILE}.new - rm -f ${TEST_TEXT_FILE} + rm_test_file "${TEST_TEXT_FILE}.new" + rm -f "${TEST_TEXT_FILE}" } function test_multipart_upload { describe "Testing multi-part upload ..." - ../../junk_data $(($BIG_FILE_BLOCK_SIZE * $BIG_FILE_COUNT)) > "${TEMP_DIR}/${BIG_FILE}" - dd if="${TEMP_DIR}/${BIG_FILE}" of="${BIG_FILE}" bs=$BIG_FILE_BLOCK_SIZE count=$BIG_FILE_COUNT + ../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${TEMP_DIR}/${BIG_FILE}" + dd if="${TEMP_DIR}/${BIG_FILE}" of="${BIG_FILE}" bs="${BIG_FILE_BLOCK_SIZE}" count="${BIG_FILE_COUNT}" # Verify contents of file echo "Comparing test file" @@ -534,8 +538,8 @@ function test_multipart_upload { function test_multipart_copy { describe "Testing multi-part copy ..." - ../../junk_data $(($BIG_FILE_BLOCK_SIZE * $BIG_FILE_COUNT)) > "${TEMP_DIR}/${BIG_FILE}" - dd if="${TEMP_DIR}/${BIG_FILE}" of="${BIG_FILE}" bs=$BIG_FILE_BLOCK_SIZE count=$BIG_FILE_COUNT + ../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${TEMP_DIR}/${BIG_FILE}" + dd if="${TEMP_DIR}/${BIG_FILE}" of="${BIG_FILE}" bs="${BIG_FILE_BLOCK_SIZE}" count="${BIG_FILE_COUNT}" mv "${BIG_FILE}" "${BIG_FILE}-copy" # Verify contents of file @@ -555,23 +559,23 @@ function test_multipart_copy { function test_multipart_mix { describe "Testing multi-part mix ..." - if [ `uname` = "Darwin" ]; then - cat /dev/null > $BIG_FILE + if [ "$(uname)" = "Darwin" ]; then + cat /dev/null > "${BIG_FILE}" fi - ../../junk_data $(($BIG_FILE_BLOCK_SIZE * $BIG_FILE_COUNT)) > "${TEMP_DIR}/${BIG_FILE}" - dd if="${TEMP_DIR}/${BIG_FILE}" of="${BIG_FILE}" bs=$BIG_FILE_BLOCK_SIZE count=$BIG_FILE_COUNT + ../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${TEMP_DIR}/${BIG_FILE}" + dd if="${TEMP_DIR}/${BIG_FILE}" of="${BIG_FILE}" bs="${BIG_FILE_BLOCK_SIZE}" count="${BIG_FILE_COUNT}" # (1) Edit the middle of an existing file # modify directly(seek 7.5MB offset) # In the case of nomultipart and nocopyapi, # it makes no sense, but copying files is because it leaves no cache. # - cp ${TEMP_DIR}/${BIG_FILE} ${TEMP_DIR}/${BIG_FILE}-mix - cp ${BIG_FILE} ${BIG_FILE}-mix + cp "${TEMP_DIR}/${BIG_FILE}" "${TEMP_DIR}/${BIG_FILE}-mix" + cp "${BIG_FILE}" "${BIG_FILE}-mix" local MODIFY_START_BLOCK=$((15*1024*1024/2/4)) - echo -n "0123456789ABCDEF" | dd of="${BIG_FILE}-mix" bs=4 count=4 seek=$MODIFY_START_BLOCK conv=notrunc - echo -n "0123456789ABCDEF" | dd of="${TEMP_DIR}/${BIG_FILE}-mix" bs=4 count=4 seek=$MODIFY_START_BLOCK conv=notrunc + echo -n "0123456789ABCDEF" | dd of="${BIG_FILE}-mix" bs=4 count=4 seek="${MODIFY_START_BLOCK}" conv=notrunc + echo -n "0123456789ABCDEF" | dd of="${TEMP_DIR}/${BIG_FILE}-mix" bs=4 count=4 seek="${MODIFY_START_BLOCK}" conv=notrunc # Verify contents of file echo "Comparing test file (1)" @@ -583,12 +587,12 @@ function test_multipart_mix { # (2) Write to an area larger than the size of the existing file # modify directly(over file end offset) # - cp ${TEMP_DIR}/${BIG_FILE} ${TEMP_DIR}/${BIG_FILE}-mix - cp ${BIG_FILE} ${BIG_FILE}-mix + cp "${TEMP_DIR}/${BIG_FILE}" "${TEMP_DIR}/${BIG_FILE}-mix" + cp "${BIG_FILE}" "${BIG_FILE}-mix" local OVER_FILE_BLOCK_POS=$((26*1024*1024/4)) - echo -n "0123456789ABCDEF" | dd of="${BIG_FILE}-mix" bs=4 count=4 seek=$OVER_FILE_BLOCK_POS conv=notrunc - echo -n "0123456789ABCDEF" | dd of="${TEMP_DIR}/${BIG_FILE}-mix" bs=4 count=4 seek=$OVER_FILE_BLOCK_POS conv=notrunc + echo -n "0123456789ABCDEF" | dd of="${BIG_FILE}-mix" bs=4 count=4 seek="${OVER_FILE_BLOCK_POS}" conv=notrunc + echo -n "0123456789ABCDEF" | dd of="${TEMP_DIR}/${BIG_FILE}-mix" bs=4 count=4 seek="${OVER_FILE_BLOCK_POS}" conv=notrunc # Verify contents of file echo "Comparing test file (2)" @@ -599,8 +603,8 @@ function test_multipart_mix { # (3) Writing from the 0th byte # - cp ${TEMP_DIR}/${BIG_FILE} ${TEMP_DIR}/${BIG_FILE}-mix - cp ${BIG_FILE} ${BIG_FILE}-mix + cp "${TEMP_DIR}/${BIG_FILE}" "${TEMP_DIR}/${BIG_FILE}-mix" + cp "${BIG_FILE}" "${BIG_FILE}-mix" echo -n "0123456789ABCDEF" | dd of="${BIG_FILE}-mix" bs=4 count=4 seek=0 conv=notrunc echo -n "0123456789ABCDEF" | dd of="${TEMP_DIR}/${BIG_FILE}-mix" bs=4 count=4 seek=0 conv=notrunc @@ -615,12 +619,12 @@ function test_multipart_mix { # (4) Write to the area within 5MB from the top # modify directly(seek 1MB offset) # - cp ${TEMP_DIR}/${BIG_FILE} ${TEMP_DIR}/${BIG_FILE}-mix - cp ${BIG_FILE} ${BIG_FILE}-mix + cp "${TEMP_DIR}/${BIG_FILE}" "${TEMP_DIR}/${BIG_FILE}-mix" + cp "${BIG_FILE}" "${BIG_FILE}-mix" local MODIFY_START_BLOCK=$((1*1024*1024)) - echo -n "0123456789ABCDEF" | dd of="${BIG_FILE}-mix" bs=4 count=4 seek=$MODIFY_START_BLOCK conv=notrunc - echo -n "0123456789ABCDEF" | dd of="${TEMP_DIR}/${BIG_FILE}-mix" bs=4 count=4 seek=$MODIFY_START_BLOCK conv=notrunc + echo -n "0123456789ABCDEF" | dd of="${BIG_FILE}-mix" bs=4 count=4 seek="${MODIFY_START_BLOCK}" conv=notrunc + echo -n "0123456789ABCDEF" | dd of="${TEMP_DIR}/${BIG_FILE}-mix" bs=4 count=4 seek="${MODIFY_START_BLOCK}" conv=notrunc # Verify contents of file echo "Comparing test file (4)" @@ -638,12 +642,12 @@ function test_multipart_mix { function test_utimens_during_multipart { describe "Testing utimens calling during multipart copy ..." - ../../junk_data $(($BIG_FILE_BLOCK_SIZE * $BIG_FILE_COUNT)) > "${TEMP_DIR}/${BIG_FILE}" + ../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${TEMP_DIR}/${BIG_FILE}" - cp ${TEMP_DIR}/${BIG_FILE} ${BIG_FILE} + cp "${TEMP_DIR}/${BIG_FILE}" "${BIG_FILE}" # The second copy of the "-p" option calls utimens during multipart upload. - cp -p ${TEMP_DIR}/${BIG_FILE} ${BIG_FILE} + cp -p "${TEMP_DIR}/${BIG_FILE}" "${BIG_FILE}" rm -f "${TEMP_DIR}/${BIG_FILE}" rm_test_file "${BIG_FILE}" @@ -654,10 +658,15 @@ function test_special_characters { ( set +o pipefail + # shellcheck disable=SC2010 ls 'special' 2>&1 | grep -q 'No such file or directory' + # shellcheck disable=SC2010 ls 'special?' 2>&1 | grep -q 'No such file or directory' + # shellcheck disable=SC2010 ls 'special*' 2>&1 | grep -q 'No such file or directory' + # shellcheck disable=SC2010 ls 'special~' 2>&1 | grep -q 'No such file or directory' + # shellcheck disable=SC2010 ls 'specialĀµ' 2>&1 | grep -q 'No such file or directory' ) @@ -668,56 +677,56 @@ function test_special_characters { function test_hardlink { describe "Testing hardlinks ..." - rm -f $TEST_TEXT_FILE - rm -f $ALT_TEST_TEXT_FILE - echo foo > $TEST_TEXT_FILE + rm -f "${TEST_TEXT_FILE}" + rm -f "${ALT_TEST_TEXT_FILE}" + echo foo > "${TEST_TEXT_FILE}" ( set +o pipefail - ln $TEST_TEXT_FILE $ALT_TEST_TEXT_FILE 2>&1 | grep -q 'Operation not supported' + ln "${TEST_TEXT_FILE}" "${ALT_TEST_TEXT_FILE}" 2>&1 | grep -q 'Operation not supported' ) rm_test_file - rm_test_file $ALT_TEST_TEXT_FILE + rm_test_file "${ALT_TEST_TEXT_FILE}" } function test_symlink { describe "Testing symlinks ..." - rm -f $TEST_TEXT_FILE - rm -f $ALT_TEST_TEXT_FILE - echo foo > $TEST_TEXT_FILE + rm -f "${TEST_TEXT_FILE}" + rm -f "${ALT_TEST_TEXT_FILE}" + echo foo > "${TEST_TEXT_FILE}" - ln -s $TEST_TEXT_FILE $ALT_TEST_TEXT_FILE - cmp $TEST_TEXT_FILE $ALT_TEST_TEXT_FILE + ln -s "${TEST_TEXT_FILE}" "${ALT_TEST_TEXT_FILE}" + cmp "${TEST_TEXT_FILE}" "${ALT_TEST_TEXT_FILE}" - rm -f $TEST_TEXT_FILE + rm -f "${TEST_TEXT_FILE}" - [ -L $ALT_TEST_TEXT_FILE ] - [ ! -f $ALT_TEST_TEXT_FILE ] + [ -L "${ALT_TEST_TEXT_FILE}" ] + [ ! -f "${ALT_TEST_TEXT_FILE}" ] - rm -f $ALT_TEST_TEXT_FILE + rm -f "${ALT_TEST_TEXT_FILE}" } function test_extended_attributes { describe "Testing extended attributes ..." - rm -f $TEST_TEXT_FILE - touch $TEST_TEXT_FILE + rm -f "${TEST_TEXT_FILE}" + touch "${TEST_TEXT_FILE}" # set value - set_xattr key1 value1 $TEST_TEXT_FILE - get_xattr key1 $TEST_TEXT_FILE | grep -q '^value1$' + set_xattr key1 value1 "${TEST_TEXT_FILE}" + get_xattr key1 "${TEST_TEXT_FILE}" | grep -q '^value1$' # append value - set_xattr key2 value2 $TEST_TEXT_FILE - get_xattr key1 $TEST_TEXT_FILE | grep -q '^value1$' - get_xattr key2 $TEST_TEXT_FILE | grep -q '^value2$' + set_xattr key2 value2 "${TEST_TEXT_FILE}" + get_xattr key1 "${TEST_TEXT_FILE}" | grep -q '^value1$' + get_xattr key2 "${TEST_TEXT_FILE}" | grep -q '^value2$' # remove value - del_xattr key1 $TEST_TEXT_FILE - ! get_xattr key1 $TEST_TEXT_FILE - get_xattr key2 $TEST_TEXT_FILE | grep -q '^value2$' + del_xattr key1 "${TEST_TEXT_FILE}" + get_xattr key1 "${TEST_TEXT_FILE}" && exit 1 + get_xattr key2 "${TEST_TEXT_FILE}" | grep -q '^value2$' rm_test_file } @@ -726,12 +735,12 @@ function test_mtime_file { describe "Testing mtime preservation function ..." # if the rename file exists, delete it - if [ -e $ALT_TEST_TEXT_FILE -o -L $ALT_TEST_TEXT_FILE ] + if [ -e "${ALT_TEST_TEXT_FILE}" ] || [ -L "${ALT_TEST_TEXT_FILE}" ] then - rm $ALT_TEST_TEXT_FILE + rm "${ALT_TEST_TEXT_FILE}" fi - if [ -e $ALT_TEST_TEXT_FILE ] + if [ -e "${ALT_TEST_TEXT_FILE}" ] then echo "Could not delete file ${ALT_TEST_TEXT_FILE}, it still exists" return 1 @@ -742,9 +751,9 @@ function test_mtime_file { sleep 1 # allow for some time to pass to compare the timestamps between test & alt #copy the test file with preserve mode - cp -p $TEST_TEXT_FILE $ALT_TEST_TEXT_FILE - local testmtime=`get_mtime $TEST_TEXT_FILE` - local altmtime=`get_mtime $ALT_TEST_TEXT_FILE` + cp -p "${TEST_TEXT_FILE}" "${ALT_TEST_TEXT_FILE}" + local testmtime; testmtime=$(get_mtime "${TEST_TEXT_FILE}") + local altmtime; altmtime=$(get_mtime "${ALT_TEST_TEXT_FILE}") if [ "$testmtime" -ne "$altmtime" ] then echo "File times do not match: $testmtime != $altmtime" @@ -752,7 +761,7 @@ function test_mtime_file { fi rm_test_file - rm_test_file $ALT_TEST_TEXT_FILE + rm_test_file "${ALT_TEST_TEXT_FILE}" } # [NOTE] @@ -775,20 +784,20 @@ function test_update_time_chmod() { describe "Testing update time function chmod..." local t0=1000000000 # 9 September 2001 - local OBJECT_NAME="$(basename $PWD)/${TEST_TEXT_FILE}" - echo data | aws_cli s3 cp --metadata="atime=$t0,ctime=$t0,mtime=$t0" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" - local base_atime=`get_atime $TEST_TEXT_FILE` - local base_ctime=`get_ctime $TEST_TEXT_FILE` - local base_mtime=`get_mtime $TEST_TEXT_FILE` + local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}" + echo data | aws_cli s3 cp --metadata="atime=${t0},ctime=${t0},mtime=${t0}" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" + local base_atime; base_atime=$(get_atime "${TEST_TEXT_FILE}") + local base_ctime; base_ctime=$(get_ctime "${TEST_TEXT_FILE}") + local base_mtime; base_mtime=$(get_mtime "${TEST_TEXT_FILE}") # # chmod -> update only ctime # - chmod +x $TEST_TEXT_FILE - local atime=`get_atime $TEST_TEXT_FILE` - local ctime=`get_ctime $TEST_TEXT_FILE` - local mtime=`get_mtime $TEST_TEXT_FILE` - if [ $base_atime -ne $atime -o $base_ctime -eq $ctime -o $base_mtime -ne $mtime ]; then + chmod +x "${TEST_TEXT_FILE}" + local atime; atime=$(get_atime "${TEST_TEXT_FILE}") + local ctime; ctime=$(get_ctime "${TEST_TEXT_FILE}") + local mtime; mtime=$(get_mtime "${TEST_TEXT_FILE}") + if [ "${base_atime}" -ne "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then echo "chmod expected updated ctime: $base_ctime != $ctime and same mtime: $base_mtime == $mtime, atime: $base_atime == $atime" return 1 fi @@ -802,17 +811,17 @@ function test_update_time_chown() { # chown -> update only ctime # local t0=1000000000 # 9 September 2001 - local OBJECT_NAME="$(basename $PWD)/${TEST_TEXT_FILE}" - echo data | aws_cli s3 cp --metadata="atime=$t0,ctime=$t0,mtime=$t0" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" - local base_atime=`get_atime $TEST_TEXT_FILE` - local base_ctime=`get_ctime $TEST_TEXT_FILE` - local base_mtime=`get_mtime $TEST_TEXT_FILE` + local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}" + echo data | aws_cli s3 cp --metadata="atime=${t0},ctime=${t0},mtime=${t0}" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" + local base_atime; base_atime=$(get_atime "${TEST_TEXT_FILE}") + local base_ctime; base_ctime=$(get_ctime "${TEST_TEXT_FILE}") + local base_mtime; base_mtime=$(get_mtime "${TEST_TEXT_FILE}") - chown $UID $TEST_TEXT_FILE - local atime=`get_atime $TEST_TEXT_FILE` - local ctime=`get_ctime $TEST_TEXT_FILE` - local mtime=`get_mtime $TEST_TEXT_FILE` - if [ $base_atime -ne $atime -o $base_ctime -eq $ctime -o $base_mtime -ne $mtime ]; then + chown $UID "${TEST_TEXT_FILE}" + local atime; atime=$(get_atime "${TEST_TEXT_FILE}") + local ctime; ctime=$(get_ctime "${TEST_TEXT_FILE}") + local mtime; mtime=$(get_mtime "${TEST_TEXT_FILE}") + if [ "${base_atime}" -ne "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then echo "chown expected updated ctime: $base_ctime != $ctime and same mtime: $base_mtime == $mtime, atime: $base_atime == $atime" return 1 fi @@ -823,20 +832,20 @@ function test_update_time_xattr() { describe "Testing update time function set_xattr..." local t0=1000000000 # 9 September 2001 - local OBJECT_NAME="$(basename $PWD)/${TEST_TEXT_FILE}" - echo data | aws_cli s3 cp --metadata="atime=$t0,ctime=$t0,mtime=$t0" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" - local base_atime=`get_atime $TEST_TEXT_FILE` - local base_ctime=`get_ctime $TEST_TEXT_FILE` - local base_mtime=`get_mtime $TEST_TEXT_FILE` + local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}" + echo data | aws_cli s3 cp --metadata="atime=${t0},ctime=${t0},mtime=${t0}" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" + local base_atime; base_atime=$(get_atime "${TEST_TEXT_FILE}") + local base_ctime; base_ctime=$(get_ctime "${TEST_TEXT_FILE}") + local base_mtime; base_mtime=$(get_mtime "${TEST_TEXT_FILE}") # # set_xattr -> update only ctime # - set_xattr key value $TEST_TEXT_FILE - local atime=`get_atime $TEST_TEXT_FILE` - local ctime=`get_ctime $TEST_TEXT_FILE` - local mtime=`get_mtime $TEST_TEXT_FILE` - if [ $base_atime -ne $atime -o $base_ctime -eq $ctime -o $base_mtime -ne $mtime ]; then + set_xattr key value "${TEST_TEXT_FILE}" + local atime; atime=$(get_atime "${TEST_TEXT_FILE}") + local ctime; ctime=$(get_ctime "${TEST_TEXT_FILE}") + local mtime; mtime=$(get_mtime "${TEST_TEXT_FILE}") + if [ "${base_atime}" -ne "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then echo "set_xattr expected updated ctime: $base_ctime != $ctime and same mtime: $base_mtime == $mtime, atime: $base_atime == $atime" return 1 fi @@ -847,20 +856,20 @@ function test_update_time_touch() { describe "Testing update time function touch..." local t0=1000000000 # 9 September 2001 - local OBJECT_NAME="$(basename $PWD)/${TEST_TEXT_FILE}" - echo data | aws_cli s3 cp --metadata="atime=$t0,ctime=$t0,mtime=$t0" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" - local base_atime=`get_atime $TEST_TEXT_FILE` - local base_ctime=`get_ctime $TEST_TEXT_FILE` - local base_mtime=`get_mtime $TEST_TEXT_FILE` + local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}" + echo data | aws_cli s3 cp --metadata="atime=${t0},ctime=${t0},mtime=${t0}" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" + local base_atime; base_atime=$(get_atime "${TEST_TEXT_FILE}") + local base_ctime; base_ctime=$(get_ctime "${TEST_TEXT_FILE}") + local base_mtime; base_mtime=$(get_mtime "${TEST_TEXT_FILE}") # # touch -> update ctime/atime/mtime # - touch $TEST_TEXT_FILE - local atime=`get_atime $TEST_TEXT_FILE` - local ctime=`get_ctime $TEST_TEXT_FILE` - local mtime=`get_mtime $TEST_TEXT_FILE` - if [ $base_atime -eq $atime -o $base_ctime -eq $ctime -o $base_mtime -eq $mtime ]; then + touch "${TEST_TEXT_FILE}" + local atime; atime=$(get_atime "${TEST_TEXT_FILE}") + local ctime; ctime=$(get_ctime "${TEST_TEXT_FILE}") + local mtime; mtime=$(get_mtime "${TEST_TEXT_FILE}") + if [ "${base_atime}" -eq "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -eq "${mtime}" ]; then echo "touch expected updated ctime: $base_ctime != $ctime, mtime: $base_mtime != $mtime, atime: $base_atime != $atime" return 1 fi @@ -871,20 +880,20 @@ function test_update_time_touch_a() { describe "Testing update time function touch -a..." local t0=1000000000 # 9 September 2001 - local OBJECT_NAME="$(basename $PWD)/${TEST_TEXT_FILE}" - echo data | aws_cli s3 cp --metadata="atime=$t0,ctime=$t0,mtime=$t0" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" - local base_atime=`get_atime $TEST_TEXT_FILE` - local base_ctime=`get_ctime $TEST_TEXT_FILE` - local base_mtime=`get_mtime $TEST_TEXT_FILE` + local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}" + echo data | aws_cli s3 cp --metadata="atime=${t0},ctime=${t0},mtime=${t0}" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" + local base_atime; base_atime=$(get_atime "${TEST_TEXT_FILE}") + local base_ctime; base_ctime=$(get_ctime "${TEST_TEXT_FILE}") + local base_mtime; base_mtime=$(get_mtime "${TEST_TEXT_FILE}") # # "touch -a" -> update ctime/atime, not update mtime # - touch -a $TEST_TEXT_FILE - local atime=`get_atime $TEST_TEXT_FILE` - local ctime=`get_ctime $TEST_TEXT_FILE` - local mtime=`get_mtime $TEST_TEXT_FILE` - if [ $base_atime -eq $atime -o $base_ctime -eq $ctime -o $base_mtime -ne $mtime ]; then + touch -a "${TEST_TEXT_FILE}" + local atime; atime=$(get_atime "${TEST_TEXT_FILE}") + local ctime; ctime=$(get_ctime "${TEST_TEXT_FILE}") + local mtime; mtime=$(get_mtime "${TEST_TEXT_FILE}") + if [ "${base_atime}" -eq "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then echo "touch with -a option expected updated ctime: $base_ctime != $ctime, atime: $base_atime != $atime and same mtime: $base_mtime == $mtime" return 1 fi @@ -895,20 +904,20 @@ function test_update_time_append() { describe "Testing update time function append..." local t0=1000000000 # 9 September 2001 - local OBJECT_NAME="$(basename $PWD)/${TEST_TEXT_FILE}" - echo data | aws_cli s3 cp --metadata="atime=$t0,ctime=$t0,mtime=$t0" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" - local base_atime=`get_atime $TEST_TEXT_FILE` - local base_ctime=`get_ctime $TEST_TEXT_FILE` - local base_mtime=`get_mtime $TEST_TEXT_FILE` + local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}" + echo data | aws_cli s3 cp --metadata="atime=${t0},ctime=${t0},mtime=${t0}" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" + local base_atime; base_atime=$(get_atime "${TEST_TEXT_FILE}") + local base_ctime; base_ctime=$(get_ctime "${TEST_TEXT_FILE}") + local base_mtime; base_mtime=$(get_mtime "${TEST_TEXT_FILE}") # # append -> update ctime/mtime, not update atime # - echo foo >> $TEST_TEXT_FILE - local atime=`get_atime $TEST_TEXT_FILE` - local ctime=`get_ctime $TEST_TEXT_FILE` - local mtime=`get_mtime $TEST_TEXT_FILE` - if [ $base_atime -ne $atime -o $base_ctime -eq $ctime -o $base_mtime -eq $mtime ]; then + echo foo >> "${TEST_TEXT_FILE}" + local atime; atime=$(get_atime "${TEST_TEXT_FILE}") + local ctime; ctime=$(get_ctime "${TEST_TEXT_FILE}") + local mtime; mtime=$(get_mtime "${TEST_TEXT_FILE}") + if [ "${base_atime}" -ne "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -eq "${mtime}" ]; then echo "append expected updated ctime: $base_ctime != $ctime, mtime: $base_mtime != $mtime and same atime: $base_atime == $atime" return 1 fi @@ -919,21 +928,21 @@ function test_update_time_cp_p() { describe "Testing update time function cp -p..." local t0=1000000000 # 9 September 2001 - local OBJECT_NAME="$(basename $PWD)/${TEST_TEXT_FILE}" - echo data | aws_cli s3 cp --metadata="atime=$t0,ctime=$t0,mtime=$t0" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" - local base_atime=`get_atime $TEST_TEXT_FILE` - local base_ctime=`get_ctime $TEST_TEXT_FILE` - local base_mtime=`get_mtime $TEST_TEXT_FILE` + local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}" + echo data | aws_cli s3 cp --metadata="atime=${t0},ctime=${t0},mtime=${t0}" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" + local base_atime; base_atime=$(get_atime "${TEST_TEXT_FILE}") + local base_ctime; base_ctime=$(get_ctime "${TEST_TEXT_FILE}") + local base_mtime; base_mtime=$(get_mtime "${TEST_TEXT_FILE}") # # cp -p -> update ctime, not update atime/mtime # local TIME_TEST_TEXT_FILE=test-s3fs-time.txt - cp -p $TEST_TEXT_FILE $TIME_TEST_TEXT_FILE - local atime=`get_atime $TIME_TEST_TEXT_FILE` - local ctime=`get_ctime $TIME_TEST_TEXT_FILE` - local mtime=`get_mtime $TIME_TEST_TEXT_FILE` - if [ $base_atime -ne $atime -o $base_ctime -eq $ctime -o $base_mtime -ne $mtime ]; then + cp -p "${TEST_TEXT_FILE}" "${TIME_TEST_TEXT_FILE}" + local atime; atime=$(get_atime "${TIME_TEST_TEXT_FILE}") + local ctime; ctime=$(get_ctime "${TIME_TEST_TEXT_FILE}") + local mtime; mtime=$(get_mtime "${TIME_TEST_TEXT_FILE}") + if [ "${base_atime}" -ne "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then echo "cp with -p option expected updated ctime: $base_ctime != $ctime and same mtime: $base_mtime == $mtime, atime: $base_atime == $atime" return 1 fi @@ -943,27 +952,27 @@ function test_update_time_mv() { describe "Testing update time function mv..." local t0=1000000000 # 9 September 2001 - local OBJECT_NAME="$(basename $PWD)/${TEST_TEXT_FILE}" - echo data | aws_cli s3 cp --metadata="atime=$t0,ctime=$t0,mtime=$t0" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" - local base_atime=`get_atime $TEST_TEXT_FILE` - local base_ctime=`get_ctime $TEST_TEXT_FILE` - local base_mtime=`get_mtime $TEST_TEXT_FILE` + local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}" + echo data | aws_cli s3 cp --metadata="atime=${t0},ctime=${t0},mtime=${t0}" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" + local base_atime; base_atime=$(get_atime "${TEST_TEXT_FILE}") + local base_ctime; base_ctime=$(get_ctime "${TEST_TEXT_FILE}") + local base_mtime; base_mtime=$(get_mtime "${TEST_TEXT_FILE}") # # mv -> update ctime, not update atime/mtime # local TIME2_TEST_TEXT_FILE=test-s3fs-time2.txt - mv $TEST_TEXT_FILE $TIME2_TEST_TEXT_FILE - local atime=`get_atime $TIME2_TEST_TEXT_FILE` - local ctime=`get_ctime $TIME2_TEST_TEXT_FILE` - local mtime=`get_mtime $TIME2_TEST_TEXT_FILE` - if [ $base_atime -ne $atime -o $base_ctime -eq $ctime -o $base_mtime -ne $mtime ]; then + mv "${TEST_TEXT_FILE}" "${TIME2_TEST_TEXT_FILE}" + local atime; atime=$(get_atime "${TIME2_TEST_TEXT_FILE}") + local ctime; ctime=$(get_ctime "${TIME2_TEST_TEXT_FILE}") + local mtime; mtime=$(get_mtime "${TIME2_TEST_TEXT_FILE}") + if [ "${base_atime}" -ne "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then echo "mv expected updated ctime: $base_ctime != $ctime and same mtime: $base_mtime == $mtime, atime: $base_atime == $atime" return 1 fi - rm_test_file $TIME_TEST_TEXT_FILE - rm_test_file $TIME2_TEST_TEXT_FILE + rm_test_file "${TIME_TEST_TEXT_FILE}" + rm_test_file "${TIME2_TEST_TEXT_FILE}" } # [NOTE] @@ -977,181 +986,181 @@ function test_update_directory_time_chmod() { # create the directory and sub-directory and a file in directory # local t0=1000000000 # 9 September 2001 - local DIRECTORY_NAME="$(basename $PWD)/${TEST_DIR}" - aws_cli s3api put-object --content-type="application/x-directory" --metadata="atime=$t0,ctime=$t0,mtime=$t0" --bucket "${TEST_BUCKET_1}" --key "$DIRECTORY_NAME/" + local DIRECTORY_NAME; DIRECTORY_NAME=$(basename "${PWD}")/"${TEST_DIR}" + aws_cli s3api put-object --content-type="application/x-directory" --metadata="atime=${t0},ctime=${t0},mtime=${t0}" --bucket "${TEST_BUCKET_1}" --key "$DIRECTORY_NAME/" - local base_atime=`get_atime $TEST_DIR` - local base_ctime=`get_ctime $TEST_DIR` - local base_mtime=`get_mtime $TEST_DIR` + local base_atime; base_atime=$(get_atime "${TEST_DIR}") + local base_ctime; base_ctime=$(get_ctime "${TEST_DIR}") + local base_mtime; base_mtime=$(get_mtime "${TEST_DIR}") # # chmod -> update only ctime # - chmod 0777 $TEST_DIR - local atime=`get_atime $TEST_DIR` - local ctime=`get_ctime $TEST_DIR` - local mtime=`get_mtime $TEST_DIR` - if [ $base_atime -ne $atime -o $base_ctime -eq $ctime -o $base_mtime -ne $mtime ]; then + chmod 0777 "${TEST_DIR}" + local atime; atime=$(get_atime "${TEST_DIR}") + local ctime; ctime=$(get_ctime "${TEST_DIR}") + local mtime; mtime=$(get_mtime "${TEST_DIR}") + if [ "${base_atime}" -ne "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then echo "chmod expected updated ctime: $base_ctime != $ctime and same mtime: $base_mtime == $mtime, atime: $base_atime == $atime" return 1 fi - rm -rf $TEST_DIR + rm -rf "${TEST_DIR}" } function test_update_directory_time_chown { describe "Testing update time for directory chown..." local t0=1000000000 # 9 September 2001 - local DIRECTORY_NAME="$(basename $PWD)/${TEST_DIR}" - aws_cli s3api put-object --content-type="application/x-directory" --metadata="atime=$t0,ctime=$t0,mtime=$t0" --bucket "${TEST_BUCKET_1}" --key "$DIRECTORY_NAME/" + local DIRECTORY_NAME; DIRECTORY_NAME=$(basename "${PWD}")/"${TEST_DIR}" + aws_cli s3api put-object --content-type="application/x-directory" --metadata="atime=${t0},ctime=${t0},mtime=${t0}" --bucket "${TEST_BUCKET_1}" --key "$DIRECTORY_NAME/" - local base_atime=`get_atime $TEST_DIR` - local base_ctime=`get_ctime $TEST_DIR` - local base_mtime=`get_mtime $TEST_DIR` + local base_atime; base_atime=$(get_atime "${TEST_DIR}") + local base_ctime; base_ctime=$(get_ctime "${TEST_DIR}") + local base_mtime; base_mtime=$(get_mtime "${TEST_DIR}") # # chown -> update only ctime # - chown $UID $TEST_DIR - local atime=`get_atime $TEST_DIR` - local ctime=`get_ctime $TEST_DIR` - local mtime=`get_mtime $TEST_DIR` - if [ $base_atime -ne $atime -o $base_ctime -eq $ctime -o $base_mtime -ne $mtime ]; then + chown $UID "${TEST_DIR}" + local atime; atime=$(get_atime "${TEST_DIR}") + local ctime; ctime=$(get_ctime "${TEST_DIR}") + local mtime; mtime=$(get_mtime "${TEST_DIR}") + if [ "${base_atime}" -ne "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then echo "chown expected updated ctime: $base_ctime != $ctime and same mtime: $base_mtime == $mtime, atime: $base_atime == $atime" return 1 fi - rm -rf $TEST_DIR + rm -rf "${TEST_DIR}" } function test_update_directory_time_set_xattr { describe "Testing update time for directory set_xattr..." local t0=1000000000 # 9 September 2001 - local DIRECTORY_NAME="$(basename $PWD)/${TEST_DIR}" - aws_cli s3api put-object --content-type="application/x-directory" --metadata="atime=$t0,ctime=$t0,mtime=$t0" --bucket "${TEST_BUCKET_1}" --key "$DIRECTORY_NAME/" + local DIRECTORY_NAME; DIRECTORY_NAME=$(basename "${PWD}")/"${TEST_DIR}" + aws_cli s3api put-object --content-type="application/x-directory" --metadata="atime=${t0},ctime=${t0},mtime=${t0}" --bucket "${TEST_BUCKET_1}" --key "$DIRECTORY_NAME/" - local base_atime=`get_atime $TEST_DIR` - local base_ctime=`get_ctime $TEST_DIR` - local base_mtime=`get_mtime $TEST_DIR` + local base_atime; base_atime=$(get_atime "${TEST_DIR}") + local base_ctime; base_ctime=$(get_ctime "${TEST_DIR}") + local base_mtime; base_mtime=$(get_mtime "${TEST_DIR}") # # set_xattr -> update only ctime # - set_xattr key value $TEST_DIR - local atime=`get_atime $TEST_DIR` - local ctime=`get_ctime $TEST_DIR` - local mtime=`get_mtime $TEST_DIR` - if [ $base_atime -ne $atime -o $base_ctime -eq $ctime -o $base_mtime -ne $mtime ]; then + set_xattr key value "${TEST_DIR}" + local atime; atime=$(get_atime "${TEST_DIR}") + local ctime; ctime=$(get_ctime "${TEST_DIR}") + local mtime; mtime=$(get_mtime "${TEST_DIR}") + if [ "${base_atime}" -ne "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then echo "set_xattr expected updated ctime: $base_ctime != $ctime and same mtime: $base_mtime == $mtime, atime: $base_atime == $atime" return 1 fi - rm -rf $TEST_DIR + rm -rf "${TEST_DIR}" } function test_update_directory_time_touch { describe "Testing update time for directory touch..." - local local t0=1000000000 # 9 September 2001 - local DIRECTORY_NAME="$(basename $PWD)/${TEST_DIR}" - aws_cli s3api put-object --content-type="application/x-directory" --metadata="atime=$t0,ctime=$t0,mtime=$t0" --bucket "${TEST_BUCKET_1}" --key "$DIRECTORY_NAME/" + local t0=1000000000 # 9 September 2001 + local DIRECTORY_NAME; DIRECTORY_NAME=$(basename "${PWD}")/"${TEST_DIR}" + aws_cli s3api put-object --content-type="application/x-directory" --metadata="atime=${t0},ctime=${t0},mtime=${t0}" --bucket "${TEST_BUCKET_1}" --key "$DIRECTORY_NAME/" - local base_atime=`get_atime $TEST_DIR` - local base_ctime=`get_ctime $TEST_DIR` - local base_mtime=`get_mtime $TEST_DIR` + local base_atime; base_atime=$(get_atime "${TEST_DIR}") + local base_ctime; base_ctime=$(get_ctime "${TEST_DIR}") + local base_mtime; base_mtime=$(get_mtime "${TEST_DIR}") # # touch -> update ctime/atime/mtime # - touch $TEST_DIR - local atime=`get_atime $TEST_DIR` - local ctime=`get_ctime $TEST_DIR` - local mtime=`get_mtime $TEST_DIR` - if [ $base_atime -eq $atime -o $base_ctime -eq $ctime -o $base_mtime -eq $mtime ]; then + touch "${TEST_DIR}" + local atime; atime=$(get_atime "${TEST_DIR}") + local ctime; ctime=$(get_ctime "${TEST_DIR}") + local mtime; mtime=$(get_mtime "${TEST_DIR}") + if [ "${base_atime}" -eq "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -eq "${mtime}" ]; then echo "touch expected updated ctime: $base_ctime != $ctime, mtime: $base_mtime != $mtime, atime: $base_atime != $atime" return 1 fi - rm -rf $TEST_DIR + rm -rf "${TEST_DIR}" } function test_update_directory_time_touch_a { describe "Testing update time for directory touch -a..." local t0=1000000000 # 9 September 2001 - local DIRECTORY_NAME="$(basename $PWD)/${TEST_DIR}" - aws_cli s3api put-object --content-type="application/x-directory" --metadata="atime=$t0,ctime=$t0,mtime=$t0" --bucket "${TEST_BUCKET_1}" --key "$DIRECTORY_NAME/" + local DIRECTORY_NAME; DIRECTORY_NAME=$(basename "${PWD}")/"${TEST_DIR}" + aws_cli s3api put-object --content-type="application/x-directory" --metadata="atime=${t0},ctime=${t0},mtime=${t0}" --bucket "${TEST_BUCKET_1}" --key "$DIRECTORY_NAME/" - local base_atime=`get_atime $TEST_DIR` - local base_ctime=`get_ctime $TEST_DIR` - local base_mtime=`get_mtime $TEST_DIR` + local base_atime; base_atime=$(get_atime "${TEST_DIR}") + local base_ctime; base_ctime=$(get_ctime "${TEST_DIR}") + local base_mtime; base_mtime=$(get_mtime "${TEST_DIR}") # # "touch -a" -> update ctime/atime, not update mtime # - touch -a $TEST_DIR - local atime=`get_atime $TEST_DIR` - local ctime=`get_ctime $TEST_DIR` - local mtime=`get_mtime $TEST_DIR` - if [ $base_atime -eq $atime -o $base_ctime -eq $ctime -o $base_mtime -ne $mtime ]; then + touch -a "${TEST_DIR}" + local atime; atime=$(get_atime "${TEST_DIR}") + local ctime; ctime=$(get_ctime "${TEST_DIR}") + local mtime; mtime=$(get_mtime "${TEST_DIR}") + if [ "${base_atime}" -eq "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then echo "touch with -a option expected updated ctime: $base_ctime != $ctime, atime: $base_atime != $atime and same mtime: $base_mtime == $mtime" return 1 fi - rm -rf $TEST_DIR + rm -rf "${TEST_DIR}" } function test_update_directory_time_subdir() { describe "Testing update time for directory subdirectory..." - local TIME_TEST_SUBDIR="$TEST_DIR/testsubdir" - local TIME_TEST_FILE_INDIR="$TEST_DIR/testfile" + local TIME_TEST_SUBDIR="${TEST_DIR}/testsubdir" + local TIME_TEST_FILE_INDIR="${TEST_DIR}/testfile" mk_test_dir - mkdir $TIME_TEST_SUBDIR - touch $TIME_TEST_FILE_INDIR + mkdir "${TIME_TEST_SUBDIR}" + touch "${TIME_TEST_FILE_INDIR}" # TODO: remove sleep after improving AWS CLI speed sleep 1 - local base_atime=`get_atime $TEST_DIR` - local base_ctime=`get_ctime $TEST_DIR` - local base_mtime=`get_mtime $TEST_DIR` - local subdir_atime=`get_atime $TIME_TEST_SUBDIR` - local subdir_ctime=`get_ctime $TIME_TEST_SUBDIR` - local subdir_mtime=`get_mtime $TIME_TEST_SUBDIR` - local subfile_atime=`get_atime $TIME_TEST_FILE_INDIR` - local subfile_ctime=`get_ctime $TIME_TEST_FILE_INDIR` - local subfile_mtime=`get_mtime $TIME_TEST_FILE_INDIR` + local base_atime; base_atime=$(get_atime "${TEST_DIR}") + local base_ctime; base_ctime=$(get_ctime "${TEST_DIR}") + local base_mtime; base_mtime=$(get_mtime "${TEST_DIR}") + local subdir_atime; subdir_atime=$(get_atime "${TIME_TEST_SUBDIR}") + local subdir_ctime; subdir_ctime=$(get_ctime "${TIME_TEST_SUBDIR}") + local subdir_mtime; subdir_mtime=$(get_mtime "${TIME_TEST_SUBDIR}") + local subfile_atime; subfile_atime=$(get_atime "${TIME_TEST_FILE_INDIR}") + local subfile_ctime; subfile_ctime=$(get_ctime "${TIME_TEST_FILE_INDIR}") + local subfile_mtime; subfile_mtime=$(get_mtime "${TIME_TEST_FILE_INDIR}") # # mv -> update ctime, not update atime/mtime for target directory # not update any for sub-directory and a file # local TIME_TEST_DIR=timetestdir - local TIME2_TEST_SUBDIR="$TIME_TEST_DIR/testsubdir" - local TIME2_TEST_FILE_INDIR="$TIME_TEST_DIR/testfile" - mv $TEST_DIR $TIME_TEST_DIR - local atime=`get_atime $TIME_TEST_DIR` - local ctime=`get_ctime $TIME_TEST_DIR` - local mtime=`get_mtime $TIME_TEST_DIR` - if [ $base_atime -ne $atime -o $base_ctime -eq $ctime -o $base_mtime -ne $mtime ]; then + local TIME2_TEST_SUBDIR="${TIME_TEST_DIR}/testsubdir" + local TIME2_TEST_FILE_INDIR="${TIME_TEST_DIR}/testfile" + mv "${TEST_DIR}" "${TIME_TEST_DIR}" + local atime; atime=$(get_atime "${TIME_TEST_DIR}") + local ctime; ctime=$(get_ctime "${TIME_TEST_DIR}") + local mtime; mtime=$(get_mtime "${TIME_TEST_DIR}") + if [ "${base_atime}" -ne "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then echo "mv expected updated ctime: $base_ctime != $ctime and same mtime: $base_mtime == $mtime, atime: $base_atime == $atime" return 1 fi - local atime=`get_atime $TIME2_TEST_SUBDIR` - local ctime=`get_ctime $TIME2_TEST_SUBDIR` - local mtime=`get_mtime $TIME2_TEST_SUBDIR` - if [ $subdir_atime -ne $atime -o $subdir_ctime -ne $ctime -o $subdir_mtime -ne $mtime ]; then + atime=$(get_atime "${TIME2_TEST_SUBDIR}") + ctime=$(get_ctime "${TIME2_TEST_SUBDIR}") + mtime=$(get_mtime "${TIME2_TEST_SUBDIR}") + if [ "${subdir_atime}" -ne "${atime}" ] || [ "${subdir_ctime}" -ne "${ctime}" ] || [ "${subdir_mtime}" -ne "${mtime}" ]; then echo "mv for sub-directory expected same ctime: $subdir_ctime == $ctime, mtime: $subdir_mtime == $mtime, atime: $subdir_atime == $atime" return 1 fi - local atime=`get_atime $TIME2_TEST_FILE_INDIR` - local ctime=`get_ctime $TIME2_TEST_FILE_INDIR` - local mtime=`get_mtime $TIME2_TEST_FILE_INDIR` - if [ $subfile_atime -ne $atime -o $subfile_ctime -ne $ctime -o $subfile_mtime -ne $mtime ]; then + atime=$(get_atime "${TIME2_TEST_FILE_INDIR}") + ctime=$(get_ctime "${TIME2_TEST_FILE_INDIR}") + mtime=$(get_mtime "${TIME2_TEST_FILE_INDIR}") + if [ "${subfile_atime}" -ne "${atime}" ] || [ "${subfile_ctime}" -ne "${ctime}" ] || [ "${subfile_mtime}" -ne "${mtime}" ]; then echo "mv for a file in directory expected same ctime: $subfile_ctime == $ctime, mtime: $subfile_mtime == $mtime, atime: $subfile_atime == $atime" return 1 fi - rm -rf $TIME_TEST_SUBDIR - rm -rf $TIME_TEST_DIR - rm -rf $TEST_DIR + rm -rf "${TIME_TEST_SUBDIR}" + rm -rf "${TIME_TEST_DIR}" + rm -rf "${TEST_DIR}" } function test_rm_rf_dir { @@ -1190,9 +1199,9 @@ function test_write_after_seek_ahead { function test_overwrite_existing_file_range { describe "Test overwrite range succeeds ..." - dd if=<(seq 1000) of=${TEST_TEXT_FILE} - dd if=/dev/zero of=${TEST_TEXT_FILE} seek=1 count=1 bs=1024 conv=notrunc - cmp ${TEST_TEXT_FILE} <( + dd if=<(seq 1000) of="${TEST_TEXT_FILE}" + dd if=/dev/zero of="${TEST_TEXT_FILE}" seek=1 count=1 bs=1024 conv=notrunc + cmp "${TEST_TEXT_FILE}" <( seq 1000 | head -c 1024 dd if=/dev/zero count=1 bs=1024 seq 1000 | tail -c +2049 @@ -1202,24 +1211,29 @@ function test_overwrite_existing_file_range { function test_concurrent_directory_updates { describe "Test concurrent updates to a directory ..." - for i in `seq 5`; do echo foo > $i; done - for process in `seq 10`; do - for i in `seq 5`; do - local file=$(ls `seq 5` | ${SED_BIN} -n "$(($RANDOM % 5 + 1))p") - cat $file >/dev/null || true - rm -f $file - echo foo > $file || true + for i in $(seq 5); do + echo foo > "${i}" + done + for _ in $(seq 10); do + for i in $(seq 5); do + local file + # shellcheck disable=SC2012,SC2046 + file=$(ls $(seq 5) | "${SED_BIN}" -n "$((RANDOM % 5 + 1))p") + cat "${file}" >/dev/null || true + rm -f "${file}" + echo "foo" > "${file}" || true done & done wait - rm -f `seq 5` + # shellcheck disable=SC2046 + rm -f $(seq 5) } function test_concurrent_reads { describe "Test concurrent reads from a file ..." - ../../junk_data $(($BIG_FILE_BLOCK_SIZE * $BIG_FILE_COUNT)) > "${TEST_TEXT_FILE}" - for process in `seq 10`; do - dd if=${TEST_TEXT_FILE} of=/dev/null seek=$(($RANDOM % $BIG_FILE_LENGTH)) count=16 bs=1024 & + ../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${TEST_TEXT_FILE}" + for _ in $(seq 10); do + dd if="${TEST_TEXT_FILE}" of=/dev/null seek=$((RANDOM % BIG_FILE_LENGTH)) count=16 bs=1024 & done wait rm_test_file @@ -1227,9 +1241,9 @@ function test_concurrent_reads { function test_concurrent_writes { describe "Test concurrent writes to a file ..." - ../../junk_data $(($BIG_FILE_BLOCK_SIZE * $BIG_FILE_COUNT)) > "${TEST_TEXT_FILE}" - for process in `seq 10`; do - dd if=/dev/zero of=${TEST_TEXT_FILE} seek=$(($RANDOM % $BIG_FILE_LENGTH)) count=16 bs=1024 conv=notrunc & + ../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${TEST_TEXT_FILE}" + for _ in $(seq 10); do + dd if=/dev/zero of="${TEST_TEXT_FILE}" seek=$((RANDOM % BIG_FILE_LENGTH)) count=16 bs=1024 conv=notrunc & done wait rm_test_file @@ -1238,8 +1252,11 @@ function test_concurrent_writes { function test_open_second_fd { describe "read from an open fd ..." rm_test_file second_fd_file - local RESULT=$( (echo foo ; wc -c < second_fd_file >&2) 2>& 1>second_fd_file) - if [ "$RESULT" -ne 4 ]; then + + local RESULT + # shellcheck disable=SC2094 + RESULT=$( (echo foo ; wc -c < second_fd_file >&2) 2>& 1>second_fd_file) + if [ "${RESULT}" -ne 4 ]; then echo "size mismatch, expected: 4, was: ${RESULT}" return 1 fi @@ -1248,14 +1265,14 @@ function test_open_second_fd { function test_write_multiple_offsets { describe "test writing to multiple offsets ..." - ../../write_multiblock -f ${TEST_TEXT_FILE} -p "1024:1" -p "$((16 * 1024 * 1024)):1" -p "$((18 * 1024 * 1024)):1" - rm_test_file ${TEST_TEXT_FILE} + ../../write_multiblock -f "${TEST_TEXT_FILE}" -p "1024:1" -p "$((16 * 1024 * 1024)):1" -p "$((18 * 1024 * 1024)):1" + rm_test_file "${TEST_TEXT_FILE}" } function test_write_multiple_offsets_backwards { describe "test writing to multiple offsets ..." - ../../write_multiblock -f ${TEST_TEXT_FILE} -p "$((20 * 1024 * 1024 + 1)):1" -p "$((10 * 1024 * 1024)):1" - rm_test_file ${TEST_TEXT_FILE} + ../../write_multiblock -f "${TEST_TEXT_FILE}" -p "$((20 * 1024 * 1024 + 1)):1" -p "$((10 * 1024 * 1024)):1" + rm_test_file "${TEST_TEXT_FILE}" } function test_clean_up_cache() { @@ -1263,57 +1280,58 @@ function test_clean_up_cache() { local dir="many_files" local count=25 - mkdir -p $dir + mkdir -p "${dir}" - for x in $(seq $count); do - ../../junk_data 10485760 > $dir/file-$x + for x in $(seq "${count}"); do + ../../junk_data 10485760 > "${dir}"/file-"${x}" done - local file_cnt=$(ls $dir | wc -l) - if [ $file_cnt != $count ]; then - echo "Expected $count files but got $file_cnt" - rm -rf $dir + local file_list=("${dir}"/*); + local file_cnt="${#file_list[@]}" + if [ "${file_cnt}" != "${count}" ]; then + echo "Expected $count files but got ${file_cnt}" + rm -rf "${dir}" return 1 fi - local CACHE_DISK_AVAIL_SIZE=`get_disk_avail_size $CACHE_DIR` - if [ "$CACHE_DISK_AVAIL_SIZE" -lt "$ENSURE_DISKFREE_SIZE" ];then - echo "Cache disk avail size:$CACHE_DISK_AVAIL_SIZE less than ensure_diskfree size:$ENSURE_DISKFREE_SIZE" - rm -rf $dir + local CACHE_DISK_AVAIL_SIZE; CACHE_DISK_AVAIL_SIZE=$(get_disk_avail_size "${CACHE_DIR}") + if [ "${CACHE_DISK_AVAIL_SIZE}" -lt "${ENSURE_DISKFREE_SIZE}" ];then + echo "Cache disk avail size:${CACHE_DISK_AVAIL_SIZE} less than ensure_diskfree size:${ENSURE_DISKFREE_SIZE}" + rm -rf "${dir}" return 1 fi - rm -rf $dir + rm -rf "${dir}" } function test_content_type() { describe "Test Content-Type detection ..." - local DIR_NAME="$(basename $PWD)" + local DIR_NAME; DIR_NAME=$(basename "${PWD}") touch "test.txt" - local CONTENT_TYPE=$(aws_cli s3api head-object --bucket "${TEST_BUCKET_1}" --key "${DIR_NAME}/test.txt" | grep "ContentType") - if ! echo $CONTENT_TYPE | grep -q "text/plain"; then - echo "Unexpected Content-Type: $CONTENT_TYPE" + local CONTENT_TYPE; CONTENT_TYPE=$(aws_cli s3api head-object --bucket "${TEST_BUCKET_1}" --key "${DIR_NAME}/test.txt" | grep "ContentType") + if ! echo "${CONTENT_TYPE}" | grep -q "text/plain"; then + echo "Unexpected Content-Type: ${CONTENT_TYPE}" return 1; fi touch "test.jpg" - local CONTENT_TYPE=$(aws_cli s3api head-object --bucket "${TEST_BUCKET_1}" --key "${DIR_NAME}/test.jpg" | grep "ContentType") - if ! echo $CONTENT_TYPE | grep -q "image/jpeg"; then - echo "Unexpected Content-Type: $CONTENT_TYPE" + CONTENT_TYPE=$(aws_cli s3api head-object --bucket "${TEST_BUCKET_1}" --key "${DIR_NAME}/test.jpg" | grep "ContentType") + if ! echo "${CONTENT_TYPE}" | grep -q "image/jpeg"; then + echo "Unexpected Content-Type: ${CONTENT_TYPE}" return 1; fi touch "test.bin" - local CONTENT_TYPE=$(aws_cli s3api head-object --bucket "${TEST_BUCKET_1}" --key "${DIR_NAME}/test.bin" | grep "ContentType") - if ! echo $CONTENT_TYPE | grep -q "application/octet-stream"; then - echo "Unexpected Content-Type: $CONTENT_TYPE" + CONTENT_TYPE=$(aws_cli s3api head-object --bucket "${TEST_BUCKET_1}" --key "${DIR_NAME}/test.bin" | grep "ContentType") + if ! echo "${CONTENT_TYPE}" | grep -q "application/octet-stream"; then + echo "Unexpected Content-Type: ${CONTENT_TYPE}" return 1; fi mkdir "test.dir" - local CONTENT_TYPE=$(aws_cli s3api head-object --bucket "${TEST_BUCKET_1}" --key "${DIR_NAME}/test.dir/" | grep "ContentType") - if ! echo $CONTENT_TYPE | grep -q "application/x-directory"; then - echo "Unexpected Content-Type: $CONTENT_TYPE" + CONTENT_TYPE=$(aws_cli s3api head-object --bucket "${TEST_BUCKET_1}" --key "${DIR_NAME}/test.dir/" | grep "ContentType") + if ! echo "${CONTENT_TYPE}" | grep -q "application/x-directory"; then + echo "Unexpected Content-Type: ${CONTENT_TYPE}" return 1; fi @@ -1328,20 +1346,21 @@ function test_truncate_cache() { describe "Test make cache files over max cache file size ..." for dir in $(seq 2); do - mkdir $dir + mkdir "${dir}" for file in $(seq 75); do - touch $dir/$file + touch "${dir}/${file}" done - ls $dir + ls "${dir}" done - rm -rf `seq 2` + # shellcheck disable=SC2046 + rm -rf $(seq 2) } function test_cache_file_stat() { describe "Test cache file stat ..." - ../../junk_data $(($BIG_FILE_BLOCK_SIZE * $BIG_FILE_COUNT)) > "${BIG_FILE}" + ../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${BIG_FILE}" # # The first argument of the script is "testrun-" the directory name. @@ -1351,8 +1370,10 @@ function test_cache_file_stat() { # # get cache file inode number # - local CACHE_FILE_INODE=$(ls -i ${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${BIG_FILE} 2>/dev/null | awk '{print $1}') - if [ -z ${CACHE_FILE_INODE} ]; then + local CACHE_FILE_INODE + # shellcheck disable=SC2012 + CACHE_FILE_INODE=$(ls -i "${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${BIG_FILE}" 2>/dev/null | awk '{print $1}') + if [ -z "${CACHE_FILE_INODE}" ]; then echo "Not found cache file or failed to get inode: ${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${BIG_FILE}" return 1; fi @@ -1360,9 +1381,9 @@ function test_cache_file_stat() { # # get lines from cache stat file # - local CACHE_FILE_STAT_LINE_1=$(${SED_BIN} -n 1p ${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${BIG_FILE}) - local CACHE_FILE_STAT_LINE_2=$(${SED_BIN} -n 2p ${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${BIG_FILE}) - if [ -z ${CACHE_FILE_STAT_LINE_1} ] || [ -z ${CACHE_FILE_STAT_LINE_2} ]; then + local CACHE_FILE_STAT_LINE_1; CACHE_FILE_STAT_LINE_1=$("${SED_BIN}" -n 1p "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${BIG_FILE}") + local CACHE_FILE_STAT_LINE_2; CACHE_FILE_STAT_LINE_2=$("${SED_BIN}" -n 2p "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${BIG_FILE}") + if [ -z "${CACHE_FILE_STAT_LINE_1}" ] || [ -z "${CACHE_FILE_STAT_LINE_2}" ]; then echo "could not get first or second line from cache file stat: ${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${BIG_FILE}" return 1; fi @@ -1382,20 +1403,21 @@ function test_cache_file_stat() { # # remove cache files directly # - rm -f ${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${BIG_FILE} - rm -f ${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${BIG_FILE} + rm -f "${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${BIG_FILE}" + rm -f "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${BIG_FILE}" # # write a byte into the middle(not the boundary) of the file # local CHECK_UPLOAD_OFFSET=$((10 * 1024 * 1024 + 17)) - dd if=/dev/urandom of="${BIG_FILE}" bs=1 count=1 seek=${CHECK_UPLOAD_OFFSET} conv=notrunc + dd if=/dev/urandom of="${BIG_FILE}" bs=1 count=1 seek="${CHECK_UPLOAD_OFFSET}" conv=notrunc # # get cache file inode number # - local CACHE_FILE_INODE=$(ls -i ${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${BIG_FILE} 2>/dev/null | awk '{print $1}') - if [ -z ${CACHE_FILE_INODE} ]; then + # shellcheck disable=SC2012 + CACHE_FILE_INODE=$(ls -i "${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${BIG_FILE}" 2>/dev/null | awk '{print $1}') + if [ -z "${CACHE_FILE_INODE}" ]; then echo "Not found cache file or failed to get inode: ${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${BIG_FILE}" return 1; fi @@ -1403,9 +1425,9 @@ function test_cache_file_stat() { # # get lines from cache stat file # - local CACHE_FILE_STAT_LINE_1=$(${SED_BIN} -n 1p ${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${BIG_FILE}) - local CACHE_FILE_STAT_LINE_E=$(tail -1 ${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${BIG_FILE} 2>/dev/null) - if [ -z ${CACHE_FILE_STAT_LINE_1} ] || [ -z ${CACHE_FILE_STAT_LINE_E} ]; then + CACHE_FILE_STAT_LINE_1=$("${SED_BIN}" -n 1p "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${BIG_FILE}") + local CACHE_FILE_STAT_LINE_E; CACHE_FILE_STAT_LINE_E=$(tail -1 "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${BIG_FILE}" 2>/dev/null) + if [ -z "${CACHE_FILE_STAT_LINE_1}" ] || [ -z "${CACHE_FILE_STAT_LINE_E}" ]; then echo "could not get first or end line from cache file stat: ${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${BIG_FILE}" return 1; fi @@ -1417,15 +1439,15 @@ function test_cache_file_stat() { # differs depending on the processing system etc., then the cache file # size is calculated and compared. # - local CACHE_LAST_OFFSET=$(echo ${CACHE_FILE_STAT_LINE_E} | cut -d ":" -f1) - local CACHE_LAST_SIZE=$(echo ${CACHE_FILE_STAT_LINE_E} | cut -d ":" -f2) - local CACHE_TOTAL_SIZE=$((${CACHE_LAST_OFFSET} + ${CACHE_LAST_SIZE})) + local CACHE_LAST_OFFSET; CACHE_LAST_OFFSET=$(echo "${CACHE_FILE_STAT_LINE_E}" | cut -d ":" -f1) + local CACHE_LAST_SIZE; CACHE_LAST_SIZE=$(echo "${CACHE_FILE_STAT_LINE_E}" | cut -d ":" -f2) + local CACHE_TOTAL_SIZE=$((CACHE_LAST_OFFSET + CACHE_LAST_SIZE)) if [ "${CACHE_FILE_STAT_LINE_1}" != "${CACHE_FILE_INODE}:${BIG_FILE_LENGTH}" ]; then echo "first line(cache file stat) is different: \"${CACHE_FILE_STAT_LINE_1}\" != \"${CACHE_FILE_INODE}:${BIG_FILE_LENGTH}\"" return 1; fi - if [ ${BIG_FILE_LENGTH} -ne ${CACHE_TOTAL_SIZE} ]; then + if [ "${BIG_FILE_LENGTH}" -ne "${CACHE_TOTAL_SIZE}" ]; then echo "the file size indicated by the cache stat file is different: \"${BIG_FILE_LENGTH}\" != \"${CACHE_TOTAL_SIZE}\"" return 1; fi @@ -1436,12 +1458,12 @@ function test_cache_file_stat() { function test_zero_cache_file_stat() { describe "Test zero byte cache file stat ..." - rm_test_file ${TEST_TEXT_FILE} + rm_test_file "${TEST_TEXT_FILE}" # # create empty file # - touch ${TEST_TEXT_FILE} + touch "${TEST_TEXT_FILE}" # # The first argument of the script is "testrun-" the directory name. @@ -1451,44 +1473,43 @@ function test_zero_cache_file_stat() { # [NOTE] # The stat file is a one-line text file, expecting for ":0"(ex. "4543937: 0"). # - head -1 ${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${TEST_TEXT_FILE} 2>/dev/null | grep -q ':0$' 2>/dev/null - if [ $? -ne 0 ]; then + if ! head -1 "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${TEST_TEXT_FILE}" 2>/dev/null | grep -q ':0$' 2>/dev/null; then echo "The cache file stat after creating an empty file is incorrect : ${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${TEST_TEXT_FILE}" return 1; fi - rm_test_file ${TEST_TEXT_FILE} + rm_test_file "${TEST_TEXT_FILE}" } function test_upload_sparsefile { describe "Testing upload sparse file ..." - rm_test_file ${BIG_FILE} - rm -f ${TEMP_DIR}/${BIG_FILE} + rm_test_file "${BIG_FILE}" + rm -f "${TEMP_DIR}/${BIG_FILE}" # # Make all HOLE file # - ${TRUNCATE_BIN} ${BIG_FILE} -s ${BIG_FILE_LENGTH} + "${TRUNCATE_BIN}" "${BIG_FILE}" -s "${BIG_FILE_LENGTH}" # # Write some bytes to ABOUT middle in the file # (Dare to remove the block breaks) # - local WRITE_POS=$((${BIG_FILE_LENGTH} / 2 - 128)) - echo -n "0123456789ABCDEF" | dd of="${TEMP_DIR}/${BIG_FILE}" bs=1 count=16 seek=${WRITE_POS} conv=notrunc + local WRITE_POS=$((BIG_FILE_LENGTH / 2 - 128)) + echo -n "0123456789ABCDEF" | dd of="${TEMP_DIR}/${BIG_FILE}" bs=1 count=16 seek="${WRITE_POS}" conv=notrunc # # copy(upload) the file # - cp ${TEMP_DIR}/${BIG_FILE} ${BIG_FILE} + cp "${TEMP_DIR}/${BIG_FILE}" "${BIG_FILE}" # # check # - cmp ${TEMP_DIR}/${BIG_FILE} ${BIG_FILE} + cmp "${TEMP_DIR}/${BIG_FILE}" "${BIG_FILE}" - rm_test_file ${BIG_FILE} - rm -f ${TEMP_DIR}/${BIG_FILE} + rm_test_file "${BIG_FILE}" + rm -f "${TEMP_DIR}/${BIG_FILE}" } function test_mix_upload_entities() { @@ -1497,26 +1518,26 @@ function test_mix_upload_entities() { # # Make test file # - ../../junk_data $(($BIG_FILE_BLOCK_SIZE * $BIG_FILE_COUNT)) > "${BIG_FILE}" + ../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${BIG_FILE}" # # If the cache option is enabled, delete the cache of uploaded files. # - if [ -f ${CACHE_DIR}/${TEST_BUCKET_1}/${BIG_FILE} ]; then - rm -f ${CACHE_DIR}/${TEST_BUCKET_1}/${BIG_FILE} + if [ -f "${CACHE_DIR}/${TEST_BUCKET_1}/${BIG_FILE}" ]; then + rm -f "${CACHE_DIR}/${TEST_BUCKET_1}/${BIG_FILE}" fi - if [ -f ${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${BIG_FILE} ]; then - rm -f ${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${BIG_FILE} + if [ -f "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${BIG_FILE}" ]; then + rm -f "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${BIG_FILE}" fi # # Do a partial write to the file. # - echo -n "0123456789ABCDEF" | dd of=${BIG_FILE} bs=1 count=16 seek=0 conv=notrunc - echo -n "0123456789ABCDEF" | dd of=${BIG_FILE} bs=1 count=16 seek=8192 conv=notrunc - echo -n "0123456789ABCDEF" | dd of=${BIG_FILE} bs=1 count=16 seek=1073152 conv=notrunc - echo -n "0123456789ABCDEF" | dd of=${BIG_FILE} bs=1 count=16 seek=26214400 conv=notrunc - echo -n "0123456789ABCDEF" | dd of=${BIG_FILE} bs=1 count=16 seek=26222592 conv=notrunc + echo -n "0123456789ABCDEF" | dd of="${BIG_FILE}" bs=1 count=16 seek=0 conv=notrunc + echo -n "0123456789ABCDEF" | dd of="${BIG_FILE}" bs=1 count=16 seek=8192 conv=notrunc + echo -n "0123456789ABCDEF" | dd of="${BIG_FILE}" bs=1 count=16 seek=1073152 conv=notrunc + echo -n "0123456789ABCDEF" | dd of="${BIG_FILE}" bs=1 count=16 seek=26214400 conv=notrunc + echo -n "0123456789ABCDEF" | dd of="${BIG_FILE}" bs=1 count=16 seek=26222592 conv=notrunc rm_test_file "${BIG_FILE}" } @@ -1533,27 +1554,28 @@ function test_ensurespace_move_file() { # # Make test file which is not under mountpoint # - mkdir -p ${CACHE_DIR}/.s3fs_test_tmpdir - ../../junk_data $(($BIG_FILE_BLOCK_SIZE * $BIG_FILE_COUNT)) > "${CACHE_DIR}/.s3fs_test_tmpdir/${BIG_FILE}" + mkdir -p "${CACHE_DIR}/.s3fs_test_tmpdir" + ../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${CACHE_DIR}/.s3fs_test_tmpdir/${BIG_FILE}" # # Backup file stat # - if [ `uname` = "Darwin" ]; then - local ORIGINAL_PERMISSIONS=$(stat -f "%u:%g" ${CACHE_DIR}/.s3fs_test_tmpdir/$BIG_FILE) + local ORIGINAL_PERMISSIONS + if [ "$(uname)" = "Darwin" ]; then + ORIGINAL_PERMISSIONS=$(stat -f "%u:%g" "${CACHE_DIR}/.s3fs_test_tmpdir/${BIG_FILE}") else - local ORIGINAL_PERMISSIONS=$(stat --format=%u:%g ${CACHE_DIR}/.s3fs_test_tmpdir/$BIG_FILE) + ORIGINAL_PERMISSIONS=$(stat --format=%u:%g "${CACHE_DIR}/.s3fs_test_tmpdir/${BIG_FILE}") fi # # Fill the disk size # - local NOW_CACHE_DISK_AVAIL_SIZE=`get_disk_avail_size ${CACHE_DIR}` + local NOW_CACHE_DISK_AVAIL_SIZE; NOW_CACHE_DISK_AVAIL_SIZE=$(get_disk_avail_size "${CACHE_DIR}") local TMP_FILE_NO=0 while true; do local ALLOWED_USING_SIZE=$((NOW_CACHE_DISK_AVAIL_SIZE - ENSURE_DISKFREE_SIZE)) - if [ ${ALLOWED_USING_SIZE} -gt ${BIG_FILE_LENGTH} ]; then - cp -p ${CACHE_DIR}/.s3fs_test_tmpdir/${BIG_FILE} ${CACHE_DIR}/.s3fs_test_tmpdir/${BIG_FILE}_${TMP_FILE_NO} + if [ "${ALLOWED_USING_SIZE}" -gt "${BIG_FILE_LENGTH}" ]; then + cp -p "${CACHE_DIR}/.s3fs_test_tmpdir/${BIG_FILE}" "${CACHE_DIR}/.s3fs_test_tmpdir/${BIG_FILE}_${TMP_FILE_NO}" local TMP_FILE_NO=$((TMP_FILE_NO + 1)) else break; @@ -1568,12 +1590,15 @@ function test_ensurespace_move_file() { # # file stat # - if [ `uname` = "Darwin" ]; then - local MOVED_PERMISSIONS=$(stat -f "%u:%g" $BIG_FILE) + local MOVED_PERMISSIONS + if [ "$(uname)" = "Darwin" ]; then + MOVED_PERMISSIONS=$(stat -f "%u:%g" "${BIG_FILE}") else - local MOVED_PERMISSIONS=$(stat --format=%u:%g $BIG_FILE) + MOVED_PERMISSIONS=$(stat --format=%u:%g "${BIG_FILE}") fi - local MOVED_FILE_LENGTH=$(ls -l $BIG_FILE | awk '{print $5}') + local MOVED_FILE_LENGTH + # shellcheck disable=SC2012 + MOVED_FILE_LENGTH=$(ls -l "${BIG_FILE}" | awk '{print $5}') # # check @@ -1582,18 +1607,20 @@ function test_ensurespace_move_file() { echo "Failed to move file with permission" return 1 fi - if [ ${MOVED_FILE_LENGTH} -ne ${BIG_FILE_LENGTH} ]; then + if [ "${MOVED_FILE_LENGTH}" -ne "${BIG_FILE_LENGTH}" ]; then echo "Failed to move file with file length: ${MOVED_FILE_LENGTH} ${BIG_FILE_LENGTH}" return 1 fi rm_test_file "${BIG_FILE}" - rm -rf ${CACHE_DIR}/.s3fs_test_tmpdir + rm -rf "${CACHE_DIR}/.s3fs_test_tmpdir" } function test_ut_ossfs { describe "Testing ossfs python ut..." - export TEST_BUCKET_MOUNT_POINT=$TEST_BUCKET_MOUNT_POINT_1 + + # shellcheck disable=SC2153 + export TEST_BUCKET_MOUNT_POINT="${TEST_BUCKET_MOUNT_POINT_1}" ../../ut_test.py } @@ -1631,8 +1658,8 @@ function test_write_data_with_skip() { # # Clean files # - rm_test_file ${_SKIPWRITE_FILE} - rm_test_file ${_TMP_SKIPWRITE_FILE} + rm_test_file "${_SKIPWRITE_FILE}" + rm_test_file "${_TMP_SKIPWRITE_FILE}" # # Create new file in bucket and temporary directory(/tmp) @@ -1664,15 +1691,16 @@ function test_write_data_with_skip() { # # delete cache file if using cache # - if ps u $S3FS_PID | grep -q use_cache; then - rm -f ${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${_SKIPWRITE_FILE} - rm -f ${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${_SKIPWRITE_FILE} + # shellcheck disable=SC2009 + if ps u "${S3FS_PID}" | grep -q use_cache; then + rm -f "${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${_SKIPWRITE_FILE}" + rm -f "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${_SKIPWRITE_FILE}" fi # # Compare # - cmp ${_SKIPWRITE_FILE} ${_TMP_SKIPWRITE_FILE} + cmp "${_SKIPWRITE_FILE}" "${_TMP_SKIPWRITE_FILE}" #------------------------------------------------------ # (2) test existed file @@ -1680,9 +1708,10 @@ function test_write_data_with_skip() { # [NOTE] # This test uses the file used in the previous test as an existing file. # - if ps u $S3FS_PID | grep -q use_cache; then - rm -f ${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${_SKIPWRITE_FILE} - rm -f ${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${_SKIPWRITE_FILE} + # shellcheck disable=SC2009 + if ps u "${S3FS_PID}" | grep -q use_cache; then + rm -f "${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${_SKIPWRITE_FILE}" + rm -f "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${_SKIPWRITE_FILE}" fi # @@ -1717,29 +1746,32 @@ function test_write_data_with_skip() { # # delete cache file if using cache # - if ps u $S3FS_PID | grep -q use_cache; then - rm -f ${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${_SKIPWRITE_FILE} - rm -f ${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${_SKIPWRITE_FILE} + # shellcheck disable=SC2009 + if ps u "${S3FS_PID}" | grep -q use_cache; then + rm -f "${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${_SKIPWRITE_FILE}" + rm -f "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${_SKIPWRITE_FILE}" fi # # Compare # - cmp ${_SKIPWRITE_FILE} ${_TMP_SKIPWRITE_FILE} + cmp "${_SKIPWRITE_FILE}" "${_TMP_SKIPWRITE_FILE}" # # Clean files # - rm_test_file ${_SKIPWRITE_FILE} - rm_test_file ${_TMP_SKIPWRITE_FILE} + rm_test_file "${_SKIPWRITE_FILE}" + rm_test_file "${_TMP_SKIPWRITE_FILE}" } function add_all_tests { - if ps u $S3FS_PID | grep -q use_cache; then + # shellcheck disable=SC2009 + if ps u "${S3FS_PID}" | grep -q use_cache; then add_tests test_cache_file_stat add_tests test_zero_cache_file_stat fi - if ! ps u $S3FS_PID | grep -q ensure_diskfree && ! uname | grep -q Darwin; then + # shellcheck disable=SC2009 + if ! ps u "${S3FS_PID}" | grep -q ensure_diskfree && ! uname | grep -q Darwin; then add_tests test_clean_up_cache fi add_tests test_create_empty_file @@ -1757,7 +1789,8 @@ function add_all_tests { add_tests test_chown add_tests test_list add_tests test_remove_nonempty_directory - if ! ps u $S3FS_PID | grep -q notsup_compat_dir; then + # shellcheck disable=SC2009 + if ! ps u "${S3FS_PID}" | grep -q notsup_compat_dir; then # TODO: investigate why notsup_compat_dir fails add_tests test_external_directory_creation fi @@ -1811,7 +1844,8 @@ function add_all_tests { add_tests test_upload_sparsefile add_tests test_mix_upload_entities add_tests test_ut_ossfs - if ! ps u $S3FS_PID | grep -q ensure_diskfree && ! uname | grep -q Darwin; then + # shellcheck disable=SC2009 + if ! ps u "${S3FS_PID}" | grep -q ensure_diskfree && ! uname | grep -q Darwin; then add_tests test_ensurespace_move_file fi add_tests test_write_data_with_skip diff --git a/test/junk_data.c b/test/junk_data.c index 00dd1f2..148dbb6 100644 --- a/test/junk_data.c +++ b/test/junk_data.c @@ -40,3 +40,12 @@ int main(int argc, char *argv[]) } return 0; } + +/* +* Local variables: +* tab-width: 4 +* c-basic-offset: 4 +* End: +* vim600: expandtab sw=4 ts=4 fdm=marker +* vim<600: expandtab sw=4 ts=4 +*/ diff --git a/test/mergedir.sh b/test/mergedir.sh index 05d60c8..7e86aab 100755 --- a/test/mergedir.sh +++ b/test/mergedir.sh @@ -40,25 +40,25 @@ UsageFunction() } ### Check parameters -WHOAMI=`whoami` -OWNNAME=`basename $0` +WHOAMI=$(whoami) +OWNNAME=$(basename "$0") AUTOYES="no" ALLYES="no" DIRPARAM="" while [ "$1" != "" ]; do - if [ "X$1" = "X-help" -o "X$1" = "X-h" -o "X$1" = "X-H" ]; then - UsageFunction $OWNNAME + if [ "X$1" = "X-help" ] || [ "X$1" = "X-h" ] || [ "X$1" = "X-H" ]; then + UsageFunction "${OWNNAME}" exit 0 - elif [ "X$1" = "X-y" -o "X$1" = "X-Y" ]; then + elif [ "X$1" = "X-y" ] || [ "X$1" = "X-Y" ]; then AUTOYES="yes" - elif [ "X$1" = "X-all" -o "X$1" = "X-ALL" ]; then + elif [ "X$1" = "X-all" ] || [ "X$1" = "X-ALL" ]; then ALLYES="yes" else if [ "X$DIRPARAM" != "X" ]; then echo "*** Input error." echo "" - UsageFunction $OWNNAME + UsageFunction "${OWNNAME}" exit 1 fi DIRPARAM=$1 @@ -68,7 +68,7 @@ done if [ "X$DIRPARAM" = "X" ]; then echo "*** Input error." echo "" - UsageFunction $OWNNAME + UsageFunction "${OWNNAME}" exit 1 fi @@ -88,18 +88,17 @@ echo "Please execute this program by responsibility of your own." echo "#############################################################################" echo "" -DATE=`date +'%Y%m%d-%H%M%S'` -LOGFILE="$OWNNAME-$DATE.log" +DATE=$(date +'%Y%m%d-%H%M%S') +LOGFILE="${OWNNAME}-${DATE}.log" -echo -n "Start to merge directory object... [$DIRPARAM]" -echo "# Start to merge directory object... [$DIRPARAM]" >> $LOGFILE -echo -n "# DATE : " >> $LOGFILE -echo `date` >> $LOGFILE -echo -n "# BASEDIR : " >> $LOGFILE -echo `pwd` >> $LOGFILE -echo -n "# TARGET PATH : " >> $LOGFILE -echo $DIRPARAM >> $LOGFILE -echo "" >> $LOGFILE +echo "Start to merge directory object... [${DIRPARAM}]" +{ + echo "# Start to merge directory object... [${DIRPARAM}]" + echo "# DATE : $(date)" + echo "# BASEDIR : $(pwd)" + echo "# TARGET PATH : ${DIRPARAM}" + echo "" +} > "${LOGFILE}" if [ "$AUTOYES" = "yes" ]; then echo "(no confirmation)" @@ -109,80 +108,84 @@ fi echo "" ### Get Directory list -DIRLIST=`find $DIRPARAM -type d -print | grep -v ^\.$` +DIRLIST=$(find "${DIRPARAM}" -type d -print | grep -v ^\.$) # # Main loop # for DIR in $DIRLIST; do ### Skip "." and ".." directories - BASENAME=`basename $DIR` - if [ "$BASENAME" = "." -o "$BASENAME" = ".." ]; then + BASENAME=$(basename "${DIR}") + if [ "${BASENAME}" = "." ] || [ "${BASENAME}" = ".." ]; then continue fi - if [ "$ALLYES" = "no" ]; then + if [ "${ALLYES}" = "no" ]; then ### Skip "d---------" directories. ### Other clients make directory object "dir/" which don't have ### "x-amz-meta-mode" attribute. ### Then these directories is "d---------", it is target directory. - DIRPERMIT=`ls -ld --time-style=+'%Y%m%d%H%M' $DIR | awk '{print $1}'` - if [ "$DIRPERMIT" != "d---------" ]; then + + # shellcheck disable=SC2012 + DIRPERMIT=$(ls -ld --time-style=+'%Y%m%d%H%M' "${DIR}" | awk '{print $1}') + if [ "${DIRPERMIT}" != "d---------" ]; then continue fi fi ### Confirm ANSWER="" - if [ "$AUTOYES" = "yes" ]; then + if [ "${AUTOYES}" = "yes" ]; then ANSWER="y" fi - while [ "X$ANSWER" != "XY" -a "X$ANSWER" != "Xy" -a "X$ANSWER" != "XN" -a "X$ANSWER" != "Xn" ]; do - echo -n "Do you merge $DIR? (y/n): " - read ANSWER + while [ "X${ANSWER}" != "XY" ] && [ "X${ANSWER}" != "Xy" ] && [ "X${ANSWER}" != "XN" ] && [ "X${ANSWER}" != "Xn" ]; do + printf "%s" "Do you merge ${DIR} ? (y/n): " + read -r ANSWER done - if [ "X$ANSWER" != "XY" -a "X$ANSWER" != "Xy" ]; then + if [ "X${ANSWER}" != "XY" ] && [ "X${ANSWER}" != "Xy" ]; then continue fi ### Do - CHOWN=`ls -ld --time-style=+'%Y%m%d%H%M' $DIR | awk '{print $3":"$4" "$7}'` - CHMOD=`ls -ld --time-style=+'%Y%m%d%H%M' $DIR | awk '{print $7}'` - TOUCH=`ls -ld --time-style=+'%Y%m%d%H%M' $DIR | awk '{print $6" "$7}'` + # shellcheck disable=SC2012 + CHOWN=$(ls -ld --time-style=+'%Y%m%d%H%M' "${DIR}" | awk '{print $3":"$4" "$7}') + # shellcheck disable=SC2012 + CHMOD=$(ls -ld --time-style=+'%Y%m%d%H%M' "${DIR}" | awk '{print $7}') + # shellcheck disable=SC2012 + TOUCH=$(ls -ld --time-style=+'%Y%m%d%H%M' "${DIR}" | awk '{print $6" "$7}') - echo -n "*** Merge $DIR : " - echo -n " $DIR : " >> $LOGFILE + printf "%s" "*** Merge ${DIR} : " + printf "%s" " ${DIR} : " >> "${LOGFILE}" - chmod 755 $CHMOD > /dev/null 2>&1 + chmod 755 "${CHMOD}" > /dev/null 2>&1 RESULT=$? - if [ $RESULT -ne 0 ]; then + if [ "${RESULT}" -ne 0 ]; then echo "Failed(chmod)" - echo "Failed(chmod)" >> $LOGFILE + echo "Failed(chmod)" >> "${LOGFILE}" continue fi - chown $CHOWN > /dev/null 2>&1 + chown "${CHOWN}" > /dev/null 2>&1 RESULT=$? - if [ $RESULT -ne 0 ]; then + if [ "${RESULT}" -ne 0 ]; then echo "Failed(chown)" - echo "Failed(chown)" >> $LOGFILE + echo "Failed(chown)" >> "${LOGFILE}" continue fi - touch -t $TOUCH > /dev/null 2>&1 + touch -t "${TOUCH}" > /dev/null 2>&1 RESULT=$? - if [ $RESULT -ne 0 ]; then + if [ "${RESULT}" -ne 0 ]; then echo "Failed(touch)" - echo "Failed(touch)" >> $LOGFILE + echo "Failed(touch)" >> "${LOGFILE}" continue fi echo "Succeed" - echo "Succeed" >> $LOGFILE + echo "Succeed" >> "${LOGFILE}" done echo "" -echo "" >> $LOGFILE +echo "" >> "${LOGFILE}" echo "Finished." -echo -n "# Finished : " >> $LOGFILE -echo `date` >> $LOGFILE +echo "# Finished : $(date)" >> "${LOGFILE}" # # Local variables: diff --git a/test/sample_delcache.sh b/test/sample_delcache.sh index dca66e8..ce7ea66 100755 --- a/test/sample_delcache.sh +++ b/test/sample_delcache.sh @@ -46,33 +46,33 @@ func_usage() echo "" } -PRGNAME=`basename $0` +PRGNAME=$(basename "$0") -if [ "X$1" = "X-h" -o "X$1" = "X-H" ]; then - func_usage $PRGNAME +if [ "X$1" = "X-h" ] || [ "X$1" = "X-H" ]; then + func_usage "${PRGNAME}" exit 0 fi -if [ "X$1" = "X" -o "X$2" = "X" -o "X$3" = "X" ]; then - func_usage $PRGNAME +if [ "X$1" = "X" ] || [ "X$2" = "X" ] || [ "X$3" = "X" ]; then + func_usage "${PRGNAME}" exit 1 fi -BUCKET=$1 +BUCKET="$1" CDIR="$2" -LIMIT=$3 +LIMIT="$3" SILENT=0 if [ "X$4" = "X-silent" ]; then SILENT=1 fi FILES_CDIR="${CDIR}/${BUCKET}" STATS_CDIR="${CDIR}/.${BUCKET}.stat" -CURRENT_CACHE_SIZE=`du -sb "$FILES_CDIR" | awk '{print $1}'` +CURRENT_CACHE_SIZE=$(du -sb "${FILES_CDIR}" | awk '{print $1}') # # Check total size # -if [ $LIMIT -ge $CURRENT_CACHE_SIZE ]; then +if [ "${LIMIT}" -ge "${CURRENT_CACHE_SIZE}" ]; then if [ $SILENT -ne 1 ]; then - echo "$FILES_CDIR ($CURRENT_CACHE_SIZE) is below allowed $LIMIT" + echo "${FILES_CDIR} (${CURRENT_CACHE_SIZE}) is below allowed ${LIMIT}" fi exit 0 fi @@ -86,37 +86,36 @@ TMP_CFILE="" # # Make file list by sorted access time # -find "$STATS_CDIR" -type f -exec stat -c "%X:%n" "{}" \; | sort | while read part +find "${STATS_CDIR}" -type f -exec stat -c "%X:%n" "{}" \; | sort | while read -r part do - echo Looking at $part - TMP_ATIME=`echo "$part" | cut -d: -f1` - TMP_STATS="`echo "$part" | cut -d: -f2`" - TMP_CFILE=`echo "$TMP_STATS" | sed s/\.$BUCKET\.stat/$BUCKET/` + echo "Looking at ${part}" + TMP_ATIME=$(echo "${part}" | cut -d: -f1) + TMP_STATS=$(echo "${part}" | cut -d: -f2) + TMP_CFILE=$(echo "${TMP_STATS}" | sed -e "s/\\.${BUCKET}\\.stat/${BUCKET}/") - if [ `stat -c %X "$TMP_STATS"` -eq $TMP_ATIME ]; then - rm -f "$TMP_STATS" "$TMP_CFILE" > /dev/null 2>&1 - if [ $? -ne 0 ]; then - if [ $SILENT -ne 1 ]; then - echo "ERROR: Could not remove files($TMP_STATS,$TMP_CFILE)" + if [ "$(stat -c %X "${TMP_STATS}")" -eq "${TMP_ATIME}" ]; then + if ! rm "${TMP_STATS}" "${TMP_CFILE}" > /dev/null 2>&1; then + if [ "${SILENT}" -ne 1 ]; then + echo "ERROR: Could not remove files(${TMP_STATS},${TMP_CFILE})" fi exit 1 else - if [ $SILENT -ne 1 ]; then - echo "remove file: $TMP_CFILE $TMP_STATS" + if [ "${SILENT}" -ne 1 ]; then + echo "remove file: ${TMP_CFILE} ${TMP_STATS}" fi fi fi - if [ $LIMIT -ge `du -sb "$FILES_CDIR" | awk '{print $1}'` ]; then - if [ $SILENT -ne 1 ]; then + if [ "${LIMIT}" -ge "$(du -sb "${FILES_CDIR}" | awk '{print $1}')" ]; then + if [ "${SILENT}" -ne 1 ]; then echo "finish removing files" fi break fi done -if [ $SILENT -ne 1 ]; then - TOTAL_SIZE=`du -sb "$FILES_CDIR" | awk '{print $1}'` - echo "Finish: $FILES_CDIR total size is $TOTAL_SIZE" +if [ "${SILENT}" -ne 1 ]; then + TOTAL_SIZE=$(du -sb "${FILES_CDIR}" | awk '{print $1}') + echo "Finish: ${FILES_CDIR} total size is ${TOTAL_SIZE}" fi exit 0 diff --git a/test/small-integration-test.sh b/test/small-integration-test.sh index 6aa2154..e527297 100755 --- a/test/small-integration-test.sh +++ b/test/small-integration-test.sh @@ -32,8 +32,9 @@ CACHE_DIR="/tmp/s3fs-cache" rm -rf "${CACHE_DIR}" mkdir "${CACHE_DIR}" -#reserve 200MB for data cache source test-utils.sh + +#reserve 200MB for data cache FAKE_FREE_DISK_SIZE=200 ENSURE_DISKFREE_SIZE=10 @@ -44,13 +45,13 @@ if [ -n "${ALL_TESTS}" ]; then "use_cache=${CACHE_DIR} -o ensure_diskfree=${ENSURE_DISKFREE_SIZE} -o fake_diskfree=${FAKE_FREE_DISK_SIZE}" enable_content_md5 enable_noobj_cache - max_stat_cache_size=100 + "max_stat_cache_size=100" nocopyapi nomultipart notsup_compat_dir sigv2 sigv4 - singlepart_copy_limit=10 # limit size to exercise multipart code paths + "singlepart_copy_limit=10" # limit size to exercise multipart code paths #use_sse # TODO: S3Proxy does not support SSE ) else @@ -64,9 +65,10 @@ start_s3proxy aws_cli s3 mb "s3://${TEST_BUCKET_1}" --region "${S3_ENDPOINT}" for flag in "${FLAGS[@]}"; do - echo "testing s3fs flag: $flag" + echo "testing s3fs flag: ${flag}" - start_s3fs -o $flag + # shellcheck disable=SC2086 + start_s3fs -o ${flag} ./integration-test-main.sh diff --git a/test/test-utils.sh b/test/test-utils.sh index 8b11a97..c2a1b67 100644 --- a/test/test-utils.sh +++ b/test/test-utils.sh @@ -24,19 +24,29 @@ set -o errexit set -o pipefail +# # Configuration +# TEST_TEXT="HELLO WORLD" TEST_TEXT_FILE=test-s3fs.txt TEST_DIR=testdir +# shellcheck disable=SC2034 ALT_TEST_TEXT_FILE=test-s3fs-ALT.txt +# shellcheck disable=SC2034 TEST_TEXT_FILE_LENGTH=15 +# shellcheck disable=SC2034 BIG_FILE=big-file-s3fs.txt -TEMP_DIR=${TMPDIR:-"/var/tmp"} +# shellcheck disable=SC2034 +TEMP_DIR="${TMPDIR:-"/var/tmp"}" + # /dev/urandom can only return 32 MB per block maximum BIG_FILE_BLOCK_SIZE=$((25 * 1024 * 1024)) BIG_FILE_COUNT=1 + # This should be greater than the multipart size -BIG_FILE_LENGTH=$(($BIG_FILE_BLOCK_SIZE * $BIG_FILE_COUNT)) +# shellcheck disable=SC2034 +BIG_FILE_LENGTH=$((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) + # Set locale because some tests check for English expressions export LC_ALL=en_US.UTF-8 export RUN_DIR @@ -48,7 +58,7 @@ export RUN_DIR # and uses gnu commands(gstdbuf, gtruncate, gsed). # Set your PATH appropriately so that you can find these commands. # -if [ `uname` = "Darwin" ]; then +if [ "$(uname)" = "Darwin" ]; then export STDBUF_BIN="gstdbuf" export TRUNCATE_BIN="gtruncate" export SED_BIN="gsed" @@ -62,7 +72,7 @@ fi export SED_BUFFER_FLAG="--unbuffered" function get_xattr() { - if [ `uname` = "Darwin" ]; then + if [ "$(uname)" = "Darwin" ]; then xattr -p "$1" "$2" else getfattr -n "$1" --only-values "$2" @@ -70,7 +80,7 @@ function get_xattr() { } function set_xattr() { - if [ `uname` = "Darwin" ]; then + if [ "$(uname)" = "Darwin" ]; then xattr -w "$1" "$2" "$3" else setfattr -n "$1" -v "$2" "$3" @@ -78,7 +88,7 @@ function set_xattr() { } function del_xattr() { - if [ `uname` = "Darwin" ]; then + if [ "$(uname)" = "Darwin" ]; then xattr -d "$1" "$2" else setfattr -x "$1" "$2" @@ -86,7 +96,7 @@ function del_xattr() { } function get_size() { - if [ `uname` = "Darwin" ]; then + if [ "$(uname)" = "Darwin" ]; then stat -f "%z" "$1" else stat -c %s "$1" @@ -98,45 +108,47 @@ function check_file_size() { local EXPECTED_SIZE="$2" # Verify file is zero length via metadata - local size=$(get_size ${FILE_NAME}) - if [ $size -ne $EXPECTED_SIZE ] + local size + size=$(get_size "${FILE_NAME}") + if [ "${size}" -ne "${EXPECTED_SIZE}" ] then echo "error: expected ${FILE_NAME} to be zero length" return 1 fi # Verify file is zero length via data - local size=$(cat ${FILE_NAME} | wc -c) - if [ $size -ne $EXPECTED_SIZE ] + size=$(wc -c < "${FILE_NAME}") + if [ "${size}" -ne "${EXPECTED_SIZE}" ] then - echo "error: expected ${FILE_NAME} to be $EXPECTED_SIZE length, got $size" + echo "error: expected ${FILE_NAME} to be ${EXPECTED_SIZE} length, got ${size}" return 1 fi } function mk_test_file { if [ $# = 0 ]; then - local TEXT=$TEST_TEXT + local TEXT="${TEST_TEXT}" else - local TEXT=$1 + local TEXT="$1" fi - echo $TEXT > $TEST_TEXT_FILE - if [ ! -e $TEST_TEXT_FILE ] + echo "${TEXT}" > "${TEST_TEXT_FILE}" + if [ ! -e "${TEST_TEXT_FILE}" ] then echo "Could not create file ${TEST_TEXT_FILE}, it does not exist" exit 1 fi # wait & check - local BASE_TEXT_LENGTH=`echo $TEXT | wc -c | awk '{print $1}'` + local BASE_TEXT_LENGTH; BASE_TEXT_LENGTH=$(echo "${TEXT}" | wc -c | awk '{print $1}') local TRY_COUNT=10 while true; do - local MK_TEXT_LENGTH=`wc -c $TEST_TEXT_FILE | awk '{print $1}'` - if [ $BASE_TEXT_LENGTH -eq $MK_TEXT_LENGTH ]; then + local MK_TEXT_LENGTH + MK_TEXT_LENGTH=$(wc -c "${TEST_TEXT_FILE}" | awk '{print $1}') + if [ "${BASE_TEXT_LENGTH}" -eq "${MK_TEXT_LENGTH}" ]; then break fi - local TRY_COUNT=`expr $TRY_COUNT - 1` - if [ $TRY_COUNT -le 0 ]; then + local TRY_COUNT=$((TRY_COUNT - 1)) + if [ "${TRY_COUNT}" -le 0 ]; then echo "Could not create file ${TEST_TEXT_FILE}, that file size is something wrong" fi sleep 1 @@ -145,13 +157,13 @@ function mk_test_file { function rm_test_file { if [ $# = 0 ]; then - local FILE=$TEST_TEXT_FILE + local FILE="${TEST_TEXT_FILE}" else - local FILE=$1 + local FILE="$1" fi - rm -f $FILE + rm -f "${FILE}" - if [ -e $FILE ] + if [ -e "${FILE}" ] then echo "Could not cleanup file ${TEST_TEXT_FILE}" exit 1 @@ -159,17 +171,17 @@ function rm_test_file { } function mk_test_dir { - mkdir ${TEST_DIR} + mkdir "${TEST_DIR}" - if [ ! -d ${TEST_DIR} ]; then + if [ ! -d "${TEST_DIR}" ]; then echo "Directory ${TEST_DIR} was not created" exit 1 fi } function rm_test_dir { - rmdir ${TEST_DIR} - if [ -e $TEST_DIR ]; then + rmdir "${TEST_DIR}" + if [ -e "${TEST_DIR}" ]; then echo "Could not remove the test directory, it still exists: ${TEST_DIR}" exit 1 fi @@ -178,18 +190,18 @@ function rm_test_dir { # Create and cd to a unique directory for this test run # Sets RUN_DIR to the name of the created directory function cd_run_dir { - if [ "$TEST_BUCKET_MOUNT_POINT_1" = "" ]; then + if [ "${TEST_BUCKET_MOUNT_POINT_1}" = "" ]; then echo "TEST_BUCKET_MOUNT_POINT_1 variable not set" exit 1 fi - local RUN_DIR=${TEST_BUCKET_MOUNT_POINT_1}/${1} - mkdir -p ${RUN_DIR} - cd ${RUN_DIR} + local RUN_DIR="${TEST_BUCKET_MOUNT_POINT_1}/${1}" + mkdir -p "${RUN_DIR}" + cd "${RUN_DIR}" } function clean_run_dir { - if [ -d ${RUN_DIR} ]; then - rm -rf ${RUN_DIR} || echo "Error removing ${RUN_DIR}" + if [ -d "${RUN_DIR}" ]; then + rm -rf "${RUN_DIR}" || echo "Error removing ${RUN_DIR}" fi } @@ -204,14 +216,14 @@ function init_suite { # report_pass TEST_NAME function report_pass { echo "$1 passed" - TEST_PASSED_LIST+=($1) + TEST_PASSED_LIST+=("$1") } # Report a failing test case # report_fail TEST_NAME function report_fail { echo "$1 failed" - TEST_FAILED_LIST+=($1) + TEST_FAILED_LIST+=("$1") } # Add tests to the suite @@ -231,37 +243,37 @@ function describe { # directory in the bucket. An attempt to clean this directory is # made after the test run. function run_suite { - orig_dir=$PWD - key_prefix="testrun-$RANDOM" - cd_run_dir $key_prefix + orig_dir="${PWD}" + key_prefix="testrun-${RANDOM}" + cd_run_dir "${key_prefix}" for t in "${TEST_LIST[@]}"; do # Ensure test input name differs every iteration - TEST_TEXT_FILE=test-s3fs.txt-$RANDOM - TEST_DIR=testdir-$RANDOM - $t $key_prefix && rc=$? || rc=$? + TEST_TEXT_FILE="test-s3fs.txt-${RANDOM}" + TEST_DIR="testdir-${RANDOM}" + "${t}" "${key_prefix}" && rc=$? || rc=$? - if [[ $rc = 0 ]] ; then - report_pass $t + if [[ "${rc}" = 0 ]] ; then + report_pass "${t}" else - report_fail $t + report_fail "${t}" fi done - cd ${orig_dir} + cd "${orig_dir}" clean_run_dir for t in "${TEST_PASSED_LIST[@]}"; do - echo "PASS: $t" + echo "PASS: ${t}" done for t in "${TEST_FAILED_LIST[@]}"; do - echo "FAIL: $t" + echo "FAIL: ${t}" done - local passed=${#TEST_PASSED_LIST[@]} - local failed=${#TEST_FAILED_LIST[@]} + local passed=${#TEST_PASSED_LIST[@]} + local failed=${#TEST_FAILED_LIST[@]} - echo "SUMMARY for $0: $passed tests passed. $failed tests failed." + echo "SUMMARY for $0: ${passed} tests passed. ${failed} tests failed." - if [[ $failed != 0 ]]; then + if [[ "${failed}" != 0 ]]; then return 1 else return 0 @@ -269,7 +281,7 @@ function run_suite { } function get_ctime() { - if [ `uname` = "Darwin" ]; then + if [ "$(uname)" = "Darwin" ]; then stat -f "%c" "$1" else stat -c "%Z" "$1" @@ -277,7 +289,7 @@ function get_ctime() { } function get_mtime() { - if [ `uname` = "Darwin" ]; then + if [ "$(uname)" = "Darwin" ]; then stat -f "%m" "$1" else stat -c "%Y" "$1" @@ -285,7 +297,7 @@ function get_mtime() { } function get_atime() { - if [ `uname` = "Darwin" ]; then + if [ "$(uname)" = "Darwin" ]; then stat -f "%a" "$1" else stat -c "%X" "$1" @@ -293,7 +305,7 @@ function get_atime() { } function get_permissions() { - if [ `uname` = "Darwin" ]; then + if [ "$(uname)" = "Darwin" ]; then stat -f "%p" "$1" else stat -c "%a" "$1" @@ -301,7 +313,8 @@ function get_permissions() { } function check_content_type() { - local INFO_STR=`aws_cli s3api head-object --bucket ${TEST_BUCKET_1} --key $1` + local INFO_STR + INFO_STR=$(aws_cli s3api head-object --bucket "${TEST_BUCKET_1}" --key "$1") if [[ "${INFO_STR}" != *"$2"* ]] then echo "moved file content-type is not as expected expected:$2 got:${INFO_STR}" @@ -310,8 +323,9 @@ function check_content_type() { } function get_disk_avail_size() { - local DISK_AVAIL_SIZE=`BLOCKSIZE=$((1024 * 1024)) df $1 | awk '{print $4}' | tail -n 1` - echo ${DISK_AVAIL_SIZE} + local DISK_AVAIL_SIZE + DISK_AVAIL_SIZE=$(BLOCKSIZE=$((1024 * 1024)) df "$1" | awk '{print $4}' | tail -n 1) + echo "${DISK_AVAIL_SIZE}" } function aws_cli() { @@ -319,12 +333,13 @@ function aws_cli() { if [ -n "${S3FS_PROFILE}" ]; then FLAGS="--profile ${S3FS_PROFILE}" fi - aws $* --endpoint-url "${S3_URL}" --ca-bundle /tmp/keystore.pem $FLAGS + # shellcheck disable=SC2086,SC2068 + aws $@ --endpoint-url "${S3_URL}" --ca-bundle /tmp/keystore.pem ${FLAGS} } function wait_for_port() { - local PORT=$1 - for i in $(seq 30); do + local PORT="$1" + for _ in $(seq 30); do if exec 3<>"/dev/tcp/127.0.0.1/${PORT}"; then exec 3<&- # Close for read @@ -337,12 +352,12 @@ function wait_for_port() { function make_random_string() { if [ -n "$1" ]; then - local END_POS=$1 + local END_POS="$1" else local END_POS=8 fi - ${BASE64_BIN} --wrap=0 < /dev/urandom | tr -d /+ | head -c ${END_POS} + "${BASE64_BIN}" --wrap=0 < /dev/urandom | tr -d /+ | head -c "${END_POS}" return 0 }