2015-03-09 17:33:47 +00:00
|
|
|
#!/bin/bash
|
2020-08-22 12:40:53 +00:00
|
|
|
#
|
|
|
|
# s3fs - FUSE-based file system backed by Amazon S3
|
|
|
|
#
|
|
|
|
# Copyright 2007-2008 Randy Rizun <rrizun@gmail.com>
|
|
|
|
#
|
|
|
|
# This program is free software; you can redistribute it and/or
|
|
|
|
# modify it under the terms of the GNU General Public License
|
|
|
|
# as published by the Free Software Foundation; either version 2
|
|
|
|
# of the License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with this program; if not, write to the Free Software
|
|
|
|
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
|
|
#
|
2015-03-09 17:33:47 +00:00
|
|
|
|
|
|
|
set -o errexit
|
2019-07-03 05:31:48 +00:00
|
|
|
set -o pipefail
|
2015-03-09 17:33:47 +00:00
|
|
|
|
2016-02-05 12:24:13 +00:00
|
|
|
source test-utils.sh
|
2015-02-24 01:58:38 +00:00
|
|
|
|
2021-04-30 10:56:33 +00:00
|
|
|
function test_create_empty_file {
|
|
|
|
describe "Testing creating an empty file ..."
|
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}"
|
2021-04-30 10:56:33 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
touch "${TEST_TEXT_FILE}"
|
2021-04-30 10:56:33 +00:00
|
|
|
|
|
|
|
check_file_size "${TEST_TEXT_FILE}" 0
|
|
|
|
|
|
|
|
aws_cli s3api head-object --bucket "${TEST_BUCKET_1}" --key "${OBJECT_NAME}"
|
|
|
|
|
|
|
|
rm_test_file
|
|
|
|
}
|
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
function test_append_file {
|
2016-02-05 12:24:13 +00:00
|
|
|
describe "Testing append to file ..."
|
2022-01-09 04:03:36 +00:00
|
|
|
local TEST_INPUT="echo ${TEST_TEXT} to ${TEST_TEXT_FILE}"
|
2017-08-11 14:09:43 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
# Write a small test file
|
2022-01-15 17:08:46 +00:00
|
|
|
for x in $(seq 1 "${TEST_TEXT_FILE_LENGTH}"); do
|
|
|
|
echo "${TEST_INPUT}"
|
|
|
|
done > "${TEST_TEXT_FILE}"
|
2015-08-16 22:48:05 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
check_file_size "${TEST_TEXT_FILE}" $((TEST_TEXT_FILE_LENGTH * $((${#TEST_INPUT} + 1)) ))
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
rm_test_file
|
|
|
|
}
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2015-11-04 05:47:15 +00:00
|
|
|
function test_truncate_file {
|
2016-02-05 12:24:13 +00:00
|
|
|
describe "Testing truncate file ..."
|
2015-11-04 05:47:15 +00:00
|
|
|
# Write a small test file
|
2022-01-15 17:08:46 +00:00
|
|
|
echo "${TEST_TEXT}" > "${TEST_TEXT_FILE}"
|
2016-02-05 12:24:13 +00:00
|
|
|
|
2015-11-04 05:47:15 +00:00
|
|
|
# Truncate file to 0 length. This should trigger open(path, O_RDWR | O_TRUNC...)
|
2022-01-15 17:08:46 +00:00
|
|
|
: > "${TEST_TEXT_FILE}"
|
2016-02-05 12:24:13 +00:00
|
|
|
|
2019-08-21 01:19:25 +00:00
|
|
|
check_file_size "${TEST_TEXT_FILE}" 0
|
|
|
|
|
2015-11-04 05:47:15 +00:00
|
|
|
rm_test_file
|
|
|
|
}
|
|
|
|
|
2020-06-25 11:53:53 +00:00
|
|
|
function test_truncate_upload {
|
|
|
|
describe "Testing truncate file for uploading ..."
|
|
|
|
|
|
|
|
# This file size uses multipart, mix upload when uploading.
|
|
|
|
# We will test these cases.
|
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rm_test_file "${BIG_FILE}"
|
2020-06-25 11:53:53 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
"${TRUNCATE_BIN}" "${BIG_FILE}" -s "${BIG_FILE_LENGTH}"
|
2020-06-25 11:53:53 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rm_test_file "${BIG_FILE}"
|
2020-06-25 11:53:53 +00:00
|
|
|
}
|
|
|
|
|
2016-04-22 06:49:37 +00:00
|
|
|
function test_truncate_empty_file {
|
2016-11-19 23:36:02 +00:00
|
|
|
describe "Testing truncate empty file ..."
|
2016-04-22 06:49:37 +00:00
|
|
|
# Write an empty test file
|
2022-01-15 17:08:46 +00:00
|
|
|
touch "${TEST_TEXT_FILE}"
|
2016-04-22 06:49:37 +00:00
|
|
|
|
|
|
|
# Truncate the file to 1024 length
|
2022-01-09 04:03:36 +00:00
|
|
|
local t_size=1024
|
2022-01-15 17:08:46 +00:00
|
|
|
"${TRUNCATE_BIN}" "${TEST_TEXT_FILE}" -s "${t_size}"
|
2016-04-22 06:49:37 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
check_file_size "${TEST_TEXT_FILE}" "${t_size}"
|
2019-08-21 01:19:25 +00:00
|
|
|
|
2016-04-22 06:49:37 +00:00
|
|
|
rm_test_file
|
|
|
|
}
|
2015-11-04 05:47:15 +00:00
|
|
|
|
2022-02-27 06:34:45 +00:00
|
|
|
function test_truncate_shrink_file {
|
|
|
|
describe "Testing truncate shrinking large binary file ..."
|
|
|
|
|
|
|
|
local BIG_TRUNCATE_TEST_FILE="big-truncate-test.bin"
|
|
|
|
local t_size=$((1024 * 1024 * 32 + 64))
|
|
|
|
|
|
|
|
dd if=/dev/urandom of="${TEMP_DIR}/${BIG_TRUNCATE_TEST_FILE}" bs=1024 count=$((1024 * 64))
|
|
|
|
cp "${TEMP_DIR}/${BIG_TRUNCATE_TEST_FILE}" "${BIG_TRUNCATE_TEST_FILE}"
|
|
|
|
|
|
|
|
"${TRUNCATE_BIN}" "${TEMP_DIR}/${BIG_TRUNCATE_TEST_FILE}" -s "${t_size}"
|
|
|
|
"${TRUNCATE_BIN}" "${BIG_TRUNCATE_TEST_FILE}" -s "${t_size}"
|
|
|
|
|
|
|
|
if ! cmp "${TEMP_DIR}/${BIG_TRUNCATE_TEST_FILE}" "${BIG_TRUNCATE_TEST_FILE}"; then
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
rm -f "${TEMP_DIR}/${BIG_TRUNCATE_TEST_FILE}"
|
|
|
|
rm_test_file "${BIG_TRUNCATE_TEST_FILE}"
|
|
|
|
}
|
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
function test_mv_file {
|
2016-02-05 12:24:13 +00:00
|
|
|
describe "Testing mv file function ..."
|
2015-08-16 22:48:05 +00:00
|
|
|
# if the rename file exists, delete it
|
2022-01-15 17:08:46 +00:00
|
|
|
if [ -e "${ALT_TEST_TEXT_FILE}" ]
|
2015-08-16 22:48:05 +00:00
|
|
|
then
|
2022-01-15 17:08:46 +00:00
|
|
|
rm "${ALT_TEST_TEXT_FILE}"
|
2015-08-16 22:48:05 +00:00
|
|
|
fi
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
if [ -e "${ALT_TEST_TEXT_FILE}" ]
|
2015-08-16 22:48:05 +00:00
|
|
|
then
|
|
|
|
echo "Could not delete file ${ALT_TEST_TEXT_FILE}, it still exists"
|
2016-02-05 12:24:13 +00:00
|
|
|
return 1
|
2015-08-16 22:48:05 +00:00
|
|
|
fi
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
# create the test file again
|
|
|
|
mk_test_file
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2017-08-11 14:09:43 +00:00
|
|
|
# save file length
|
2022-01-15 17:08:46 +00:00
|
|
|
local ALT_TEXT_LENGTH; ALT_TEXT_LENGTH=$(wc -c "${TEST_TEXT_FILE}" | awk '{print $1}')
|
2017-08-11 14:09:43 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
#rename the test file
|
2022-01-15 17:08:46 +00:00
|
|
|
mv "${TEST_TEXT_FILE}" "${ALT_TEST_TEXT_FILE}"
|
|
|
|
if [ ! -e "${ALT_TEST_TEXT_FILE}" ]
|
2015-08-16 22:48:05 +00:00
|
|
|
then
|
|
|
|
echo "Could not move file"
|
2016-02-05 12:24:13 +00:00
|
|
|
return 1
|
2015-08-16 22:48:05 +00:00
|
|
|
fi
|
2019-08-29 05:25:09 +00:00
|
|
|
|
|
|
|
#check the renamed file content-type
|
|
|
|
if [ -f "/etc/mime.types" ]
|
|
|
|
then
|
2022-01-15 17:08:46 +00:00
|
|
|
check_content_type "$1/${ALT_TEST_TEXT_FILE}" "text/plain"
|
2019-08-29 05:25:09 +00:00
|
|
|
fi
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
# Check the contents of the alt file
|
2022-01-15 17:08:46 +00:00
|
|
|
local ALT_FILE_LENGTH; ALT_FILE_LENGTH=$(wc -c "${ALT_TEST_TEXT_FILE}" | awk '{print $1}')
|
2015-08-16 22:48:05 +00:00
|
|
|
if [ "$ALT_FILE_LENGTH" -ne "$ALT_TEXT_LENGTH" ]
|
|
|
|
then
|
|
|
|
echo "moved file length is not as expected expected: $ALT_TEXT_LENGTH got: $ALT_FILE_LENGTH"
|
2016-02-05 12:24:13 +00:00
|
|
|
return 1
|
2015-08-16 22:48:05 +00:00
|
|
|
fi
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
# clean up
|
2022-01-15 17:08:46 +00:00
|
|
|
rm_test_file "${ALT_TEST_TEXT_FILE}"
|
2015-08-16 22:48:05 +00:00
|
|
|
}
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2021-06-25 14:14:53 +00:00
|
|
|
function test_mv_to_exist_file {
|
|
|
|
describe "Testing mv file to exist file function ..."
|
|
|
|
|
2022-01-09 04:03:36 +00:00
|
|
|
local BIG_MV_FILE_BLOCK_SIZE=$((BIG_FILE_BLOCK_SIZE + 1))
|
2021-06-25 14:14:53 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${BIG_FILE}"
|
|
|
|
../../junk_data $((BIG_MV_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${BIG_FILE}-mv"
|
2021-06-25 14:14:53 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
mv "${BIG_FILE}" "${BIG_FILE}-mv"
|
2021-06-25 14:14:53 +00:00
|
|
|
|
|
|
|
rm_test_file "${BIG_FILE}-mv"
|
|
|
|
}
|
|
|
|
|
2019-01-23 06:12:05 +00:00
|
|
|
function test_mv_empty_directory {
|
2016-02-05 12:24:13 +00:00
|
|
|
describe "Testing mv directory function ..."
|
2022-01-15 17:08:46 +00:00
|
|
|
if [ -e "${TEST_DIR}" ]; then
|
2015-08-16 22:48:05 +00:00
|
|
|
echo "Unexpected, this file/directory exists: ${TEST_DIR}"
|
2016-02-05 12:24:13 +00:00
|
|
|
return 1
|
2015-08-16 22:48:05 +00:00
|
|
|
fi
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
mk_test_dir
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
mv "${TEST_DIR}" "${TEST_DIR}_rename"
|
2015-08-16 22:48:05 +00:00
|
|
|
if [ ! -d "${TEST_DIR}_rename" ]; then
|
|
|
|
echo "Directory ${TEST_DIR} was not renamed"
|
2016-02-05 12:24:13 +00:00
|
|
|
return 1
|
2015-08-16 22:48:05 +00:00
|
|
|
fi
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rmdir "${TEST_DIR}_rename"
|
2015-08-16 22:48:05 +00:00
|
|
|
if [ -e "${TEST_DIR}_rename" ]; then
|
|
|
|
echo "Could not remove the test directory, it still exists: ${TEST_DIR}_rename"
|
2016-02-05 12:24:13 +00:00
|
|
|
return 1
|
2015-08-16 22:48:05 +00:00
|
|
|
fi
|
|
|
|
}
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2019-01-23 06:12:05 +00:00
|
|
|
function test_mv_nonempty_directory {
|
|
|
|
describe "Testing mv directory function ..."
|
2022-01-15 17:08:46 +00:00
|
|
|
if [ -e "${TEST_DIR}" ]; then
|
2019-01-23 06:12:05 +00:00
|
|
|
echo "Unexpected, this file/directory exists: ${TEST_DIR}"
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
mk_test_dir
|
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
touch "${TEST_DIR}"/file
|
2019-01-23 06:12:05 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
mv "${TEST_DIR}" "${TEST_DIR}_rename"
|
2019-01-23 06:12:05 +00:00
|
|
|
if [ ! -d "${TEST_DIR}_rename" ]; then
|
|
|
|
echo "Directory ${TEST_DIR} was not renamed"
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rm -r "${TEST_DIR}_rename"
|
2019-01-23 06:12:05 +00:00
|
|
|
if [ -e "${TEST_DIR}_rename" ]; then
|
|
|
|
echo "Could not remove the test directory, it still exists: ${TEST_DIR}_rename"
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
function test_redirects {
|
2016-02-05 12:24:13 +00:00
|
|
|
describe "Testing redirects ..."
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
mk_test_file "ABCDEF"
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
local CONTENT; CONTENT=$(cat "${TEST_TEXT_FILE}")
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2016-02-05 12:24:13 +00:00
|
|
|
if [ "${CONTENT}" != "ABCDEF" ]; then
|
2015-08-16 22:48:05 +00:00
|
|
|
echo "CONTENT read is unexpected, got ${CONTENT}, expected ABCDEF"
|
2016-02-05 12:24:13 +00:00
|
|
|
return 1
|
2015-08-16 22:48:05 +00:00
|
|
|
fi
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
echo "XYZ" > "${TEST_TEXT_FILE}"
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
CONTENT=$(cat "${TEST_TEXT_FILE}")
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
if [ "${CONTENT}" != "XYZ" ]; then
|
2015-08-16 22:48:05 +00:00
|
|
|
echo "CONTENT read is unexpected, got ${CONTENT}, expected XYZ"
|
2016-02-05 12:24:13 +00:00
|
|
|
return 1
|
2015-08-16 22:48:05 +00:00
|
|
|
fi
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
echo "123456" >> "${TEST_TEXT_FILE}"
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
local LINE1; LINE1=$("${SED_BIN}" -n '1,1p' "${TEST_TEXT_FILE}")
|
|
|
|
local LINE2; LINE2=$("${SED_BIN}" -n '2,2p' "${TEST_TEXT_FILE}")
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
if [ "${LINE1}" != "XYZ" ]; then
|
2015-08-16 22:48:05 +00:00
|
|
|
echo "LINE1 was not as expected, got ${LINE1}, expected XYZ"
|
2016-02-05 12:24:13 +00:00
|
|
|
return 1
|
2015-08-16 22:48:05 +00:00
|
|
|
fi
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
if [ "${LINE2}" != "123456" ]; then
|
2015-08-16 22:48:05 +00:00
|
|
|
echo "LINE2 was not as expected, got ${LINE2}, expected 123456"
|
2016-02-05 12:24:13 +00:00
|
|
|
return 1
|
2015-08-16 22:48:05 +00:00
|
|
|
fi
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
# clean up
|
|
|
|
rm_test_file
|
|
|
|
}
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
function test_mkdir_rmdir {
|
2020-06-25 11:53:53 +00:00
|
|
|
describe "Testing creation/removal of a directory ..."
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
if [ -e "${TEST_DIR}" ]; then
|
2015-08-16 22:48:05 +00:00
|
|
|
echo "Unexpected, this file/directory exists: ${TEST_DIR}"
|
2016-02-05 12:24:13 +00:00
|
|
|
return 1
|
2015-08-16 22:48:05 +00:00
|
|
|
fi
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
mk_test_dir
|
|
|
|
rm_test_dir
|
|
|
|
}
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
function test_chmod {
|
2016-02-05 12:24:13 +00:00
|
|
|
describe "Testing chmod file function ..."
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
# create the test file again
|
|
|
|
mk_test_file
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
local ORIGINAL_PERMISSIONS; ORIGINAL_PERMISSIONS=$(get_permissions "${TEST_TEXT_FILE}")
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
chmod 777 "${TEST_TEXT_FILE}";
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
# if they're the same, we have a problem.
|
2022-01-15 17:08:46 +00:00
|
|
|
local CHANGED_PERMISSIONS; CHANGED_PERMISSIONS=$(get_permissions "${TEST_TEXT_FILE}")
|
|
|
|
if [ "${CHANGED_PERMISSIONS}" = "${ORIGINAL_PERMISSIONS}" ]
|
2015-08-16 22:48:05 +00:00
|
|
|
then
|
2022-01-15 17:08:46 +00:00
|
|
|
echo "Could not modify ${TEST_TEXT_FILE} permissions"
|
2016-02-05 12:24:13 +00:00
|
|
|
return 1
|
2015-08-16 22:48:05 +00:00
|
|
|
fi
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
# clean up
|
|
|
|
rm_test_file
|
|
|
|
}
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
function test_chown {
|
2016-02-05 12:24:13 +00:00
|
|
|
describe "Testing chown file function ..."
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
# create the test file again
|
|
|
|
mk_test_file
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
local ORIGINAL_PERMISSIONS
|
|
|
|
if [ "$(uname)" = "Darwin" ]; then
|
|
|
|
ORIGINAL_PERMISSIONS=$(stat -f "%u:%g" "${TEST_TEXT_FILE}")
|
2017-08-11 14:09:43 +00:00
|
|
|
else
|
2022-01-15 17:08:46 +00:00
|
|
|
ORIGINAL_PERMISSIONS=$(stat --format=%u:%g "${TEST_TEXT_FILE}")
|
2017-08-11 14:09:43 +00:00
|
|
|
fi
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2020-01-28 10:32:55 +00:00
|
|
|
# [NOTE]
|
|
|
|
# Prevents test interruptions due to permission errors, etc.
|
|
|
|
# If the chown command fails, an error will occur with the
|
|
|
|
# following judgment statement. So skip the chown command error.
|
|
|
|
# '|| true' was added due to a problem with Travis CI and MacOS
|
|
|
|
# and ensure_diskfree option.
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
chown 1000:1000 "${TEST_TEXT_FILE}" || true
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
# if they're the same, we have a problem.
|
2022-01-15 17:08:46 +00:00
|
|
|
local CHANGED_PERMISSIONS
|
|
|
|
if [ "$(uname)" = "Darwin" ]; then
|
|
|
|
CHANGED_PERMISSIONS=$(stat -f "%u:%g" "${TEST_TEXT_FILE}")
|
2017-08-11 14:09:43 +00:00
|
|
|
else
|
2022-01-15 17:08:46 +00:00
|
|
|
CHANGED_PERMISSIONS=$(stat --format=%u:%g "${TEST_TEXT_FILE}")
|
2017-08-11 14:09:43 +00:00
|
|
|
fi
|
2022-01-15 17:08:46 +00:00
|
|
|
if [ "${CHANGED_PERMISSIONS}" = "${ORIGINAL_PERMISSIONS}" ]
|
2015-08-16 22:48:05 +00:00
|
|
|
then
|
2022-01-15 17:08:46 +00:00
|
|
|
if [ "${ORIGINAL_PERMISSIONS}" = "1000:1000" ]
|
2016-09-11 13:41:50 +00:00
|
|
|
then
|
2016-09-11 13:37:53 +00:00
|
|
|
echo "Could not be strict check because original file permission 1000:1000"
|
|
|
|
else
|
2022-01-15 17:08:46 +00:00
|
|
|
echo "Could not modify ${TEST_TEXT_FILE} ownership($ORIGINAL_PERMISSIONS to 1000:1000)"
|
2016-09-11 13:37:53 +00:00
|
|
|
return 1
|
2016-09-11 13:41:50 +00:00
|
|
|
fi
|
2015-08-16 22:48:05 +00:00
|
|
|
fi
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
# clean up
|
|
|
|
rm_test_file
|
|
|
|
}
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
function test_list {
|
2020-06-25 11:53:53 +00:00
|
|
|
describe "Testing list ..."
|
2015-08-16 22:48:05 +00:00
|
|
|
mk_test_file
|
|
|
|
mk_test_dir
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
local file_list=(*)
|
|
|
|
local file_cnt=${#file_list[@]}
|
|
|
|
if [ "${file_cnt}" -ne 2 ]; then
|
|
|
|
echo "Expected 2 file but got ${file_cnt}"
|
2016-02-05 12:24:13 +00:00
|
|
|
return 1
|
2015-08-16 22:48:05 +00:00
|
|
|
fi
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
rm_test_file
|
|
|
|
rm_test_dir
|
|
|
|
}
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
function test_remove_nonempty_directory {
|
2020-06-25 11:53:53 +00:00
|
|
|
describe "Testing removing a non-empty directory ..."
|
2015-08-16 22:48:05 +00:00
|
|
|
mk_test_dir
|
|
|
|
touch "${TEST_DIR}/file"
|
2019-07-03 05:31:48 +00:00
|
|
|
(
|
|
|
|
set +o pipefail
|
|
|
|
rmdir "${TEST_DIR}" 2>&1 | grep -q "Directory not empty"
|
|
|
|
)
|
2015-08-16 22:48:05 +00:00
|
|
|
rm "${TEST_DIR}/file"
|
|
|
|
rm_test_dir
|
|
|
|
}
|
2015-02-24 01:58:38 +00:00
|
|
|
|
2020-07-23 09:18:13 +00:00
|
|
|
function test_external_directory_creation {
|
|
|
|
describe "Test external directory creation ..."
|
2022-01-15 17:08:46 +00:00
|
|
|
local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/directory/"${TEST_TEXT_FILE}"
|
2020-07-23 09:18:13 +00:00
|
|
|
echo "data" | aws_cli s3 cp - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}"
|
2022-01-15 17:08:46 +00:00
|
|
|
ls directory >/dev/null 2>&1
|
2020-09-17 12:03:52 +00:00
|
|
|
get_permissions directory | grep -q 750$
|
2020-07-23 09:18:13 +00:00
|
|
|
ls directory
|
2022-01-15 17:08:46 +00:00
|
|
|
cmp <(echo "data") directory/"${TEST_TEXT_FILE}"
|
|
|
|
rm -f directory/"${TEST_TEXT_FILE}"
|
2020-07-23 09:18:13 +00:00
|
|
|
}
|
|
|
|
|
2019-06-22 02:46:25 +00:00
|
|
|
function test_external_modification {
|
2020-06-25 11:53:53 +00:00
|
|
|
describe "Test external modification to an object ..."
|
2022-01-15 17:08:46 +00:00
|
|
|
echo "old" > "${TEST_TEXT_FILE}"
|
2022-02-13 04:32:19 +00:00
|
|
|
|
|
|
|
# [NOTE]
|
|
|
|
# If the stat and file cache directory are enabled, an error will
|
|
|
|
# occur if the unixtime(sec) value does not change.
|
|
|
|
# If mtime(ctime/atime) when updating from the external program
|
|
|
|
# (awscli) is the same unixtime value as immediately before, the
|
|
|
|
# cache will be read out.
|
|
|
|
# Therefore, we need to wait over 1 second here.
|
|
|
|
#
|
|
|
|
sleep 1
|
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}"
|
2019-08-29 05:25:09 +00:00
|
|
|
echo "new new" | aws_cli s3 cp - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}"
|
2022-01-15 17:08:46 +00:00
|
|
|
cmp "${TEST_TEXT_FILE}" <(echo "new new")
|
|
|
|
rm -f "${TEST_TEXT_FILE}"
|
2019-06-22 02:46:25 +00:00
|
|
|
}
|
|
|
|
|
2022-02-23 14:34:58 +00:00
|
|
|
function test_external_creation {
|
|
|
|
describe "Test external creation of an object ..."
|
|
|
|
local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}"
|
|
|
|
echo "data" | aws_cli s3 cp - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}"
|
|
|
|
# shellcheck disable=SC2009
|
2022-03-12 07:57:31 +00:00
|
|
|
if ! ps u -p "${S3FS_PID}" | grep -q disable_noobj_cache; then
|
2022-02-23 14:34:58 +00:00
|
|
|
[ ! -e "${TEST_TEXT_FILE}" ]
|
|
|
|
fi
|
|
|
|
sleep 1
|
|
|
|
[ -e "${TEST_TEXT_FILE}" ]
|
|
|
|
rm -f "${TEST_TEXT_FILE}"
|
|
|
|
}
|
|
|
|
|
2019-08-01 23:09:34 +00:00
|
|
|
function test_read_external_object() {
|
2020-06-25 11:53:53 +00:00
|
|
|
describe "create objects via aws CLI and read via s3fs ..."
|
2022-01-15 17:08:46 +00:00
|
|
|
local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}"
|
2019-08-29 05:25:09 +00:00
|
|
|
echo "test" | aws_cli s3 cp - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}"
|
2022-01-15 17:08:46 +00:00
|
|
|
cmp "${TEST_TEXT_FILE}" <(echo "test")
|
|
|
|
rm -f "${TEST_TEXT_FILE}"
|
2019-08-01 23:09:34 +00:00
|
|
|
}
|
|
|
|
|
2022-01-29 09:05:05 +00:00
|
|
|
function test_read_external_dir_object() {
|
|
|
|
describe "create directory objects via aws CLI and read via s3fs ..."
|
|
|
|
local SUB_DIR_NAME; SUB_DIR_NAME="subdir"
|
|
|
|
local SUB_DIR_TEST_FILE; SUB_DIR_TEST_FILE="${SUB_DIR_NAME}/${TEST_TEXT_FILE}"
|
|
|
|
local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${SUB_DIR_TEST_FILE}"
|
|
|
|
|
|
|
|
echo "test" | aws_cli s3 cp - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}"
|
|
|
|
|
|
|
|
if stat "${SUB_DIR_NAME}" | grep -q '1969-12-31[[:space:]]23:59:59[.]000000000'; then
|
|
|
|
echo "sub directory a/c/m time is underflow(-1)."
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
rm -rf "${SUB_DIR_NAME}"
|
|
|
|
}
|
|
|
|
|
2021-02-20 13:20:46 +00:00
|
|
|
function test_update_metadata_external_small_object() {
|
|
|
|
describe "update meta to small file after created file by aws cli"
|
|
|
|
|
|
|
|
# [NOTE]
|
|
|
|
# Use the only filename in the test to avoid being affected by noobjcache.
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
local TEST_FILE_EXT; TEST_FILE_EXT=$(make_random_string)
|
2022-01-09 04:03:36 +00:00
|
|
|
local TEST_CHMOD_FILE="${TEST_TEXT_FILE}_chmod.${TEST_FILE_EXT}"
|
|
|
|
local TEST_CHOWN_FILE="${TEST_TEXT_FILE}_chown.${TEST_FILE_EXT}"
|
|
|
|
local TEST_UTIMENS_FILE="${TEST_TEXT_FILE}_utimens.${TEST_FILE_EXT}"
|
|
|
|
local TEST_SETXATTR_FILE="${TEST_TEXT_FILE}_xattr.${TEST_FILE_EXT}"
|
|
|
|
local TEST_RMXATTR_FILE="${TEST_TEXT_FILE}_xattr.${TEST_FILE_EXT}"
|
2021-02-20 13:20:46 +00:00
|
|
|
|
2022-01-09 04:03:36 +00:00
|
|
|
local TEST_INPUT="TEST_STRING_IN_SMALL_FILE"
|
2021-02-20 13:20:46 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# chmod
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_CHMOD_FILE}"
|
2021-02-20 13:20:46 +00:00
|
|
|
echo "${TEST_INPUT}" | aws_cli s3 cp - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}"
|
2022-01-15 17:08:46 +00:00
|
|
|
chmod +x "${TEST_CHMOD_FILE}"
|
|
|
|
cmp "${TEST_CHMOD_FILE}" <(echo "${TEST_INPUT}")
|
2021-02-20 13:20:46 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# chown
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
OBJECT_NAME=$(basename "${PWD}")/"${TEST_CHOWN_FILE}"
|
2021-02-20 13:20:46 +00:00
|
|
|
echo "${TEST_INPUT}" | aws_cli s3 cp - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}"
|
2022-01-15 17:08:46 +00:00
|
|
|
chown "${UID}" "${TEST_CHOWN_FILE}"
|
|
|
|
cmp "${TEST_CHOWN_FILE}" <(echo "${TEST_INPUT}")
|
2021-02-20 13:20:46 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# utimens
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
OBJECT_NAME=$(basename "${PWD}")/"${TEST_UTIMENS_FILE}"
|
2021-02-20 13:20:46 +00:00
|
|
|
echo "${TEST_INPUT}" | aws_cli s3 cp - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}"
|
2022-01-15 17:08:46 +00:00
|
|
|
touch "${TEST_UTIMENS_FILE}"
|
|
|
|
cmp "${TEST_UTIMENS_FILE}" <(echo "${TEST_INPUT}")
|
2021-02-20 13:20:46 +00:00
|
|
|
|
2021-02-20 17:03:41 +00:00
|
|
|
#
|
|
|
|
# set xattr
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
OBJECT_NAME=$(basename "${PWD}")/"${TEST_SETXATTR_FILE}"
|
2021-02-20 17:03:41 +00:00
|
|
|
echo "${TEST_INPUT}" | aws_cli s3 cp - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}"
|
2022-01-15 17:08:46 +00:00
|
|
|
set_xattr key value "${TEST_SETXATTR_FILE}"
|
|
|
|
cmp "${TEST_SETXATTR_FILE}" <(echo "${TEST_INPUT}")
|
2021-02-20 17:03:41 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# remove xattr
|
|
|
|
#
|
|
|
|
# "%7B%22key%22%3A%22dmFsdWU%3D%22%7D" = {"key":"value"}
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
OBJECT_NAME=$(basename "${PWD}")/"${TEST_RMXATTR_FILE}"
|
2021-02-20 17:03:41 +00:00
|
|
|
echo "${TEST_INPUT}" | aws_cli s3 cp - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" --metadata xattr=%7B%22key%22%3A%22dmFsdWU%3D%22%7D
|
2022-01-15 17:08:46 +00:00
|
|
|
del_xattr key "${TEST_RMXATTR_FILE}"
|
|
|
|
cmp "${TEST_RMXATTR_FILE}" <(echo "${TEST_INPUT}")
|
2021-02-20 17:03:41 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rm -f "${TEST_CHMOD_FILE}"
|
|
|
|
rm -f "${TEST_CHOWN_FILE}"
|
|
|
|
rm -f "${TEST_UTIMENS_FILE}"
|
|
|
|
rm -f "${TEST_SETXATTR_FILE}"
|
|
|
|
rm -f "${TEST_RMXATTR_FILE}"
|
2021-02-20 13:20:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
function test_update_metadata_external_large_object() {
|
|
|
|
describe "update meta to large file after created file by aws cli"
|
|
|
|
|
|
|
|
# [NOTE]
|
|
|
|
# Use the only filename in the test to avoid being affected by noobjcache.
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
local TEST_FILE_EXT; TEST_FILE_EXT=$(make_random_string)
|
2022-01-09 04:03:36 +00:00
|
|
|
local TEST_CHMOD_FILE="${TEST_TEXT_FILE}_chmod.${TEST_FILE_EXT}"
|
|
|
|
local TEST_CHOWN_FILE="${TEST_TEXT_FILE}_chown.${TEST_FILE_EXT}"
|
|
|
|
local TEST_UTIMENS_FILE="${TEST_TEXT_FILE}_utimens.${TEST_FILE_EXT}"
|
|
|
|
local TEST_SETXATTR_FILE="${TEST_TEXT_FILE}_xattr.${TEST_FILE_EXT}"
|
|
|
|
local TEST_RMXATTR_FILE="${TEST_TEXT_FILE}_xattr.${TEST_FILE_EXT}"
|
2021-02-20 13:20:46 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${TEMP_DIR}/${BIG_FILE}"
|
2021-02-20 13:20:46 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# chmod
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_CHMOD_FILE}"
|
|
|
|
aws_cli s3 cp "${TEMP_DIR}/${BIG_FILE}" "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" --no-progress
|
|
|
|
chmod +x "${TEST_CHMOD_FILE}"
|
|
|
|
cmp "${TEST_CHMOD_FILE}" "${TEMP_DIR}/${BIG_FILE}"
|
2021-02-20 13:20:46 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# chown
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
OBJECT_NAME=$(basename "${PWD}")/"${TEST_CHOWN_FILE}"
|
|
|
|
aws_cli s3 cp "${TEMP_DIR}/${BIG_FILE}" "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" --no-progress
|
|
|
|
chown "${UID}" "${TEST_CHOWN_FILE}"
|
|
|
|
cmp "${TEST_CHOWN_FILE}" "${TEMP_DIR}/${BIG_FILE}"
|
2021-02-20 13:20:46 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# utimens
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
OBJECT_NAME=$(basename "${PWD}")/"${TEST_UTIMENS_FILE}"
|
|
|
|
aws_cli s3 cp "${TEMP_DIR}/${BIG_FILE}" "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" --no-progress
|
|
|
|
touch "${TEST_UTIMENS_FILE}"
|
|
|
|
cmp "${TEST_UTIMENS_FILE}" "${TEMP_DIR}/${BIG_FILE}"
|
2021-02-20 13:20:46 +00:00
|
|
|
|
2021-02-20 17:03:41 +00:00
|
|
|
#
|
|
|
|
# set xattr
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
OBJECT_NAME=$(basename "${PWD}")/"${TEST_SETXATTR_FILE}"
|
|
|
|
aws_cli s3 cp "${TEMP_DIR}/${BIG_FILE}" "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" --no-progress
|
|
|
|
set_xattr key value "${TEST_SETXATTR_FILE}"
|
|
|
|
cmp "${TEST_SETXATTR_FILE}" "${TEMP_DIR}/${BIG_FILE}"
|
2021-02-20 17:03:41 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# remove xattr
|
|
|
|
#
|
|
|
|
# "%7B%22key%22%3A%22dmFsdWU%3D%22%7D" = {"key":"value"}
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
OBJECT_NAME=$(basename "${PWD}")/"${TEST_RMXATTR_FILE}"
|
|
|
|
aws_cli s3 cp "${TEMP_DIR}/${BIG_FILE}" "s3://${TEST_BUCKET_1}/${OBJECT_NAME}" --no-progress --metadata xattr=%7B%22key%22%3A%22dmFsdWU%3D%22%7D
|
|
|
|
del_xattr key "${TEST_RMXATTR_FILE}"
|
|
|
|
cmp "${TEST_RMXATTR_FILE}" "${TEMP_DIR}/${BIG_FILE}"
|
2021-02-20 17:03:41 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rm -f "${TEMP_DIR}/${BIG_FILE}"
|
|
|
|
rm -f "${TEST_CHMOD_FILE}"
|
|
|
|
rm -f "${TEST_CHOWN_FILE}"
|
|
|
|
rm -f "${TEST_UTIMENS_FILE}"
|
|
|
|
rm -f "${TEST_SETXATTR_FILE}"
|
|
|
|
rm -f "${TEST_RMXATTR_FILE}"
|
2021-02-20 13:20:46 +00:00
|
|
|
}
|
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
function test_rename_before_close {
|
2016-02-05 12:24:13 +00:00
|
|
|
describe "Testing rename before close ..."
|
2022-01-15 17:08:46 +00:00
|
|
|
|
|
|
|
# shellcheck disable=SC2094
|
2015-08-16 22:48:05 +00:00
|
|
|
(
|
|
|
|
echo foo
|
2022-01-15 17:08:46 +00:00
|
|
|
mv "${TEST_TEXT_FILE}" "${TEST_TEXT_FILE}.new"
|
|
|
|
) > "${TEST_TEXT_FILE}"
|
2015-02-24 01:58:38 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
if ! cmp <(echo "foo") "${TEST_TEXT_FILE}.new"; then
|
2015-08-16 22:48:05 +00:00
|
|
|
echo "rename before close failed"
|
2016-02-05 12:24:13 +00:00
|
|
|
return 1
|
2015-08-16 22:48:05 +00:00
|
|
|
fi
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rm_test_file "${TEST_TEXT_FILE}.new"
|
|
|
|
rm -f "${TEST_TEXT_FILE}"
|
2015-08-16 22:48:05 +00:00
|
|
|
}
|
2015-01-12 22:46:24 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
function test_multipart_upload {
|
2016-02-05 12:24:13 +00:00
|
|
|
describe "Testing multi-part upload ..."
|
2017-08-11 14:09:43 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${TEMP_DIR}/${BIG_FILE}"
|
|
|
|
dd if="${TEMP_DIR}/${BIG_FILE}" of="${BIG_FILE}" bs="${BIG_FILE_BLOCK_SIZE}" count="${BIG_FILE_COUNT}"
|
2015-03-03 00:58:11 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
# Verify contents of file
|
|
|
|
echo "Comparing test file"
|
2021-02-07 03:50:02 +00:00
|
|
|
if ! cmp "${TEMP_DIR}/${BIG_FILE}" "${BIG_FILE}"
|
2015-08-16 22:48:05 +00:00
|
|
|
then
|
2016-02-05 12:24:13 +00:00
|
|
|
return 1
|
2015-08-16 22:48:05 +00:00
|
|
|
fi
|
|
|
|
|
2021-02-07 03:50:02 +00:00
|
|
|
rm -f "${TEMP_DIR}/${BIG_FILE}"
|
2015-08-16 22:48:05 +00:00
|
|
|
rm_test_file "${BIG_FILE}"
|
|
|
|
}
|
2015-03-03 00:58:11 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
function test_multipart_copy {
|
2016-02-05 12:24:13 +00:00
|
|
|
describe "Testing multi-part copy ..."
|
2017-08-11 14:09:43 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${TEMP_DIR}/${BIG_FILE}"
|
|
|
|
dd if="${TEMP_DIR}/${BIG_FILE}" of="${BIG_FILE}" bs="${BIG_FILE_BLOCK_SIZE}" count="${BIG_FILE_COUNT}"
|
2015-08-16 22:48:05 +00:00
|
|
|
mv "${BIG_FILE}" "${BIG_FILE}-copy"
|
2015-07-27 22:47:08 +00:00
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
# Verify contents of file
|
|
|
|
echo "Comparing test file"
|
2021-02-07 03:50:02 +00:00
|
|
|
if ! cmp "${TEMP_DIR}/${BIG_FILE}" "${BIG_FILE}-copy"
|
2015-08-16 22:48:05 +00:00
|
|
|
then
|
2016-02-05 12:24:13 +00:00
|
|
|
return 1
|
2015-08-16 22:48:05 +00:00
|
|
|
fi
|
2015-07-27 22:47:08 +00:00
|
|
|
|
2019-08-29 05:25:09 +00:00
|
|
|
#check the renamed file content-type
|
|
|
|
check_content_type "$1/${BIG_FILE}-copy" "application/octet-stream"
|
|
|
|
|
2021-02-07 03:50:02 +00:00
|
|
|
rm -f "${TEMP_DIR}/${BIG_FILE}"
|
2015-08-16 22:48:05 +00:00
|
|
|
rm_test_file "${BIG_FILE}-copy"
|
|
|
|
}
|
2015-07-27 22:47:08 +00:00
|
|
|
|
2019-09-26 02:30:58 +00:00
|
|
|
function test_multipart_mix {
|
|
|
|
describe "Testing multi-part mix ..."
|
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
if [ "$(uname)" = "Darwin" ]; then
|
|
|
|
cat /dev/null > "${BIG_FILE}"
|
2019-09-26 02:30:58 +00:00
|
|
|
fi
|
2022-01-15 17:08:46 +00:00
|
|
|
../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${TEMP_DIR}/${BIG_FILE}"
|
|
|
|
dd if="${TEMP_DIR}/${BIG_FILE}" of="${BIG_FILE}" bs="${BIG_FILE_BLOCK_SIZE}" count="${BIG_FILE_COUNT}"
|
2019-09-26 02:30:58 +00:00
|
|
|
|
|
|
|
# (1) Edit the middle of an existing file
|
|
|
|
# modify directly(seek 7.5MB offset)
|
|
|
|
# In the case of nomultipart and nocopyapi,
|
|
|
|
# it makes no sense, but copying files is because it leaves no cache.
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
cp "${TEMP_DIR}/${BIG_FILE}" "${TEMP_DIR}/${BIG_FILE}-mix"
|
|
|
|
cp "${BIG_FILE}" "${BIG_FILE}-mix"
|
2019-09-26 02:30:58 +00:00
|
|
|
|
2022-01-09 04:03:36 +00:00
|
|
|
local MODIFY_START_BLOCK=$((15*1024*1024/2/4))
|
2022-01-15 17:08:46 +00:00
|
|
|
echo -n "0123456789ABCDEF" | dd of="${BIG_FILE}-mix" bs=4 count=4 seek="${MODIFY_START_BLOCK}" conv=notrunc
|
|
|
|
echo -n "0123456789ABCDEF" | dd of="${TEMP_DIR}/${BIG_FILE}-mix" bs=4 count=4 seek="${MODIFY_START_BLOCK}" conv=notrunc
|
2019-09-26 02:30:58 +00:00
|
|
|
|
|
|
|
# Verify contents of file
|
|
|
|
echo "Comparing test file (1)"
|
2021-02-07 03:50:02 +00:00
|
|
|
if ! cmp "${TEMP_DIR}/${BIG_FILE}-mix" "${BIG_FILE}-mix"
|
2019-09-26 02:30:58 +00:00
|
|
|
then
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
# (2) Write to an area larger than the size of the existing file
|
|
|
|
# modify directly(over file end offset)
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
cp "${TEMP_DIR}/${BIG_FILE}" "${TEMP_DIR}/${BIG_FILE}-mix"
|
|
|
|
cp "${BIG_FILE}" "${BIG_FILE}-mix"
|
2019-09-26 02:30:58 +00:00
|
|
|
|
2022-01-09 04:03:36 +00:00
|
|
|
local OVER_FILE_BLOCK_POS=$((26*1024*1024/4))
|
2022-01-15 17:08:46 +00:00
|
|
|
echo -n "0123456789ABCDEF" | dd of="${BIG_FILE}-mix" bs=4 count=4 seek="${OVER_FILE_BLOCK_POS}" conv=notrunc
|
|
|
|
echo -n "0123456789ABCDEF" | dd of="${TEMP_DIR}/${BIG_FILE}-mix" bs=4 count=4 seek="${OVER_FILE_BLOCK_POS}" conv=notrunc
|
2019-09-26 02:30:58 +00:00
|
|
|
|
|
|
|
# Verify contents of file
|
|
|
|
echo "Comparing test file (2)"
|
2021-02-07 03:50:02 +00:00
|
|
|
if ! cmp "${TEMP_DIR}/${BIG_FILE}-mix" "${BIG_FILE}-mix"
|
2019-09-26 02:30:58 +00:00
|
|
|
then
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
# (3) Writing from the 0th byte
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
cp "${TEMP_DIR}/${BIG_FILE}" "${TEMP_DIR}/${BIG_FILE}-mix"
|
|
|
|
cp "${BIG_FILE}" "${BIG_FILE}-mix"
|
2019-09-26 02:30:58 +00:00
|
|
|
|
|
|
|
echo -n "0123456789ABCDEF" | dd of="${BIG_FILE}-mix" bs=4 count=4 seek=0 conv=notrunc
|
2021-02-07 03:50:02 +00:00
|
|
|
echo -n "0123456789ABCDEF" | dd of="${TEMP_DIR}/${BIG_FILE}-mix" bs=4 count=4 seek=0 conv=notrunc
|
2019-09-26 02:30:58 +00:00
|
|
|
|
|
|
|
# Verify contents of file
|
|
|
|
echo "Comparing test file (3)"
|
2021-02-07 03:50:02 +00:00
|
|
|
if ! cmp "${TEMP_DIR}/${BIG_FILE}-mix" "${BIG_FILE}-mix"
|
2019-09-26 02:30:58 +00:00
|
|
|
then
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
# (4) Write to the area within 5MB from the top
|
|
|
|
# modify directly(seek 1MB offset)
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
cp "${TEMP_DIR}/${BIG_FILE}" "${TEMP_DIR}/${BIG_FILE}-mix"
|
|
|
|
cp "${BIG_FILE}" "${BIG_FILE}-mix"
|
2019-09-26 02:30:58 +00:00
|
|
|
|
2022-01-09 04:03:36 +00:00
|
|
|
local MODIFY_START_BLOCK=$((1*1024*1024))
|
2022-01-15 17:08:46 +00:00
|
|
|
echo -n "0123456789ABCDEF" | dd of="${BIG_FILE}-mix" bs=4 count=4 seek="${MODIFY_START_BLOCK}" conv=notrunc
|
|
|
|
echo -n "0123456789ABCDEF" | dd of="${TEMP_DIR}/${BIG_FILE}-mix" bs=4 count=4 seek="${MODIFY_START_BLOCK}" conv=notrunc
|
2019-09-26 02:30:58 +00:00
|
|
|
|
|
|
|
# Verify contents of file
|
|
|
|
echo "Comparing test file (4)"
|
2021-02-07 03:50:02 +00:00
|
|
|
if ! cmp "${TEMP_DIR}/${BIG_FILE}-mix" "${BIG_FILE}-mix"
|
2019-09-26 02:30:58 +00:00
|
|
|
then
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
|
2021-02-07 03:50:02 +00:00
|
|
|
rm -f "${TEMP_DIR}/${BIG_FILE}"
|
|
|
|
rm -f "${TEMP_DIR}/${BIG_FILE}-mix"
|
2019-09-26 02:30:58 +00:00
|
|
|
rm_test_file "${BIG_FILE}"
|
|
|
|
rm_test_file "${BIG_FILE}-mix"
|
|
|
|
}
|
|
|
|
|
2021-06-20 07:57:52 +00:00
|
|
|
function test_utimens_during_multipart {
|
|
|
|
describe "Testing utimens calling during multipart copy ..."
|
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${TEMP_DIR}/${BIG_FILE}"
|
2021-06-20 07:57:52 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
cp "${TEMP_DIR}/${BIG_FILE}" "${BIG_FILE}"
|
2021-06-20 07:57:52 +00:00
|
|
|
|
|
|
|
# The second copy of the "-p" option calls utimens during multipart upload.
|
2022-01-15 17:08:46 +00:00
|
|
|
cp -p "${TEMP_DIR}/${BIG_FILE}" "${BIG_FILE}"
|
2021-06-20 07:57:52 +00:00
|
|
|
|
|
|
|
rm -f "${TEMP_DIR}/${BIG_FILE}"
|
|
|
|
rm_test_file "${BIG_FILE}"
|
|
|
|
}
|
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
function test_special_characters {
|
2016-02-05 12:24:13 +00:00
|
|
|
describe "Testing special characters ..."
|
2015-07-27 22:47:08 +00:00
|
|
|
|
2019-07-03 05:31:48 +00:00
|
|
|
(
|
|
|
|
set +o pipefail
|
2022-01-15 17:08:46 +00:00
|
|
|
# shellcheck disable=SC2010
|
2019-07-03 05:31:48 +00:00
|
|
|
ls 'special' 2>&1 | grep -q 'No such file or directory'
|
2022-01-15 17:08:46 +00:00
|
|
|
# shellcheck disable=SC2010
|
2019-07-03 05:31:48 +00:00
|
|
|
ls 'special?' 2>&1 | grep -q 'No such file or directory'
|
2022-01-15 17:08:46 +00:00
|
|
|
# shellcheck disable=SC2010
|
2019-07-03 05:31:48 +00:00
|
|
|
ls 'special*' 2>&1 | grep -q 'No such file or directory'
|
2022-01-15 17:08:46 +00:00
|
|
|
# shellcheck disable=SC2010
|
2019-07-03 05:31:48 +00:00
|
|
|
ls 'special~' 2>&1 | grep -q 'No such file or directory'
|
2022-01-15 17:08:46 +00:00
|
|
|
# shellcheck disable=SC2010
|
2019-07-03 05:31:48 +00:00
|
|
|
ls 'specialµ' 2>&1 | grep -q 'No such file or directory'
|
|
|
|
)
|
2020-02-02 09:43:20 +00:00
|
|
|
|
|
|
|
mkdir "TOYOTA TRUCK 8.2.2"
|
2021-02-13 07:48:36 +00:00
|
|
|
rm -rf "TOYOTA TRUCK 8.2.2"
|
2015-08-16 22:48:05 +00:00
|
|
|
}
|
|
|
|
|
2020-07-23 08:35:05 +00:00
|
|
|
function test_hardlink {
|
|
|
|
describe "Testing hardlinks ..."
|
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rm -f "${TEST_TEXT_FILE}"
|
|
|
|
rm -f "${ALT_TEST_TEXT_FILE}"
|
|
|
|
echo foo > "${TEST_TEXT_FILE}"
|
2020-07-23 08:35:05 +00:00
|
|
|
|
|
|
|
(
|
|
|
|
set +o pipefail
|
2022-01-15 17:08:46 +00:00
|
|
|
ln "${TEST_TEXT_FILE}" "${ALT_TEST_TEXT_FILE}" 2>&1 | grep -q 'Operation not supported'
|
2020-07-23 08:35:05 +00:00
|
|
|
)
|
2021-02-13 07:48:36 +00:00
|
|
|
|
|
|
|
rm_test_file
|
2022-01-15 17:08:46 +00:00
|
|
|
rm_test_file "${ALT_TEST_TEXT_FILE}"
|
2020-07-23 08:35:05 +00:00
|
|
|
}
|
|
|
|
|
2022-06-10 14:06:54 +00:00
|
|
|
function test_mknod {
|
|
|
|
describe "Testing mknod system call function ..."
|
|
|
|
|
|
|
|
local MKNOD_TEST_FILE_BASENAME="mknod_testfile"
|
|
|
|
|
|
|
|
rm -f "${MKNOD_TEST_FILE_BASENAME}*"
|
|
|
|
|
|
|
|
../../mknod_test "${MKNOD_TEST_FILE_BASENAME}"
|
|
|
|
}
|
|
|
|
|
2015-10-22 21:07:39 +00:00
|
|
|
function test_symlink {
|
2016-02-05 12:24:13 +00:00
|
|
|
describe "Testing symlinks ..."
|
2015-10-22 21:07:39 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rm -f "${TEST_TEXT_FILE}"
|
|
|
|
rm -f "${ALT_TEST_TEXT_FILE}"
|
|
|
|
echo foo > "${TEST_TEXT_FILE}"
|
2015-10-22 21:07:39 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
ln -s "${TEST_TEXT_FILE}" "${ALT_TEST_TEXT_FILE}"
|
|
|
|
cmp "${TEST_TEXT_FILE}" "${ALT_TEST_TEXT_FILE}"
|
2015-10-22 21:07:39 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rm -f "${TEST_TEXT_FILE}"
|
2015-10-22 21:07:39 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
[ -L "${ALT_TEST_TEXT_FILE}" ]
|
|
|
|
[ ! -f "${ALT_TEST_TEXT_FILE}" ]
|
2020-01-26 13:04:10 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rm -f "${ALT_TEST_TEXT_FILE}"
|
2015-10-22 21:07:39 +00:00
|
|
|
}
|
|
|
|
|
2015-08-16 22:48:05 +00:00
|
|
|
function test_extended_attributes {
|
2016-02-05 12:24:13 +00:00
|
|
|
describe "Testing extended attributes ..."
|
2015-08-16 22:48:05 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rm -f "${TEST_TEXT_FILE}"
|
|
|
|
touch "${TEST_TEXT_FILE}"
|
2015-08-16 22:48:05 +00:00
|
|
|
|
|
|
|
# set value
|
2022-01-15 17:08:46 +00:00
|
|
|
set_xattr key1 value1 "${TEST_TEXT_FILE}"
|
|
|
|
get_xattr key1 "${TEST_TEXT_FILE}" | grep -q '^value1$'
|
2015-08-16 22:48:05 +00:00
|
|
|
|
|
|
|
# append value
|
2022-01-15 17:08:46 +00:00
|
|
|
set_xattr key2 value2 "${TEST_TEXT_FILE}"
|
|
|
|
get_xattr key1 "${TEST_TEXT_FILE}" | grep -q '^value1$'
|
|
|
|
get_xattr key2 "${TEST_TEXT_FILE}" | grep -q '^value2$'
|
2015-08-16 22:48:05 +00:00
|
|
|
|
|
|
|
# remove value
|
2022-01-15 17:08:46 +00:00
|
|
|
del_xattr key1 "${TEST_TEXT_FILE}"
|
2022-07-16 16:33:50 +00:00
|
|
|
get_xattr key1 "${TEST_TEXT_FILE}" && return 1
|
2022-01-15 17:08:46 +00:00
|
|
|
get_xattr key2 "${TEST_TEXT_FILE}" | grep -q '^value2$'
|
2020-01-26 13:04:10 +00:00
|
|
|
|
|
|
|
rm_test_file
|
2015-08-16 22:48:05 +00:00
|
|
|
}
|
2015-07-27 22:47:08 +00:00
|
|
|
|
2015-11-24 08:29:54 +00:00
|
|
|
function test_mtime_file {
|
2016-02-05 12:24:13 +00:00
|
|
|
describe "Testing mtime preservation function ..."
|
2015-11-24 08:29:54 +00:00
|
|
|
|
|
|
|
# if the rename file exists, delete it
|
2022-01-15 17:08:46 +00:00
|
|
|
if [ -e "${ALT_TEST_TEXT_FILE}" ] || [ -L "${ALT_TEST_TEXT_FILE}" ]
|
2015-11-24 08:29:54 +00:00
|
|
|
then
|
2022-01-15 17:08:46 +00:00
|
|
|
rm "${ALT_TEST_TEXT_FILE}"
|
2015-11-24 08:29:54 +00:00
|
|
|
fi
|
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
if [ -e "${ALT_TEST_TEXT_FILE}" ]
|
2015-11-24 08:29:54 +00:00
|
|
|
then
|
|
|
|
echo "Could not delete file ${ALT_TEST_TEXT_FILE}, it still exists"
|
2016-02-05 12:24:13 +00:00
|
|
|
return 1
|
2015-11-24 08:29:54 +00:00
|
|
|
fi
|
|
|
|
|
|
|
|
# create the test file again
|
|
|
|
mk_test_file
|
2021-10-27 14:47:08 +00:00
|
|
|
sleep 1 # allow for some time to pass to compare the timestamps between test & alt
|
2015-11-24 08:29:54 +00:00
|
|
|
|
|
|
|
#copy the test file with preserve mode
|
2022-01-15 17:08:46 +00:00
|
|
|
cp -p "${TEST_TEXT_FILE}" "${ALT_TEST_TEXT_FILE}"
|
|
|
|
local testmtime; testmtime=$(get_mtime "${TEST_TEXT_FILE}")
|
|
|
|
local altmtime; altmtime=$(get_mtime "${ALT_TEST_TEXT_FILE}")
|
2015-11-24 08:29:54 +00:00
|
|
|
if [ "$testmtime" -ne "$altmtime" ]
|
|
|
|
then
|
|
|
|
echo "File times do not match: $testmtime != $altmtime"
|
2016-02-05 12:24:13 +00:00
|
|
|
return 1
|
2015-11-24 08:29:54 +00:00
|
|
|
fi
|
2020-01-26 13:04:10 +00:00
|
|
|
|
|
|
|
rm_test_file
|
2022-01-15 17:08:46 +00:00
|
|
|
rm_test_file "${ALT_TEST_TEXT_FILE}"
|
2015-11-24 08:29:54 +00:00
|
|
|
}
|
|
|
|
|
2020-12-02 14:21:25 +00:00
|
|
|
# [NOTE]
|
|
|
|
# If it mounted with relatime or noatime options , the "touch -a"
|
|
|
|
# command may not update the atime.
|
|
|
|
# In ubuntu:xenial, atime was updated even if relatime was granted.
|
|
|
|
# However, it was not updated in bionic/focal.
|
|
|
|
# We can probably update atime by explicitly specifying the strictatime
|
|
|
|
# option and running the "touch -a" command. However, the strictatime
|
|
|
|
# option cannot be set.
|
|
|
|
# Therefore, if the relatime option is set, the test with the "touch -a"
|
|
|
|
# command is bypassed.
|
|
|
|
# We do not know why atime is not updated may or not be affected by
|
|
|
|
# these options.(can't say for sure)
|
|
|
|
# However, if atime has not been updated, the s3fs_utimens entry point
|
|
|
|
# will not be called from FUSE library. We added this bypass because
|
|
|
|
# the test became unstable.
|
|
|
|
#
|
2021-10-30 01:54:18 +00:00
|
|
|
function test_update_time_chmod() {
|
|
|
|
describe "Testing update time function chmod..."
|
2019-01-07 01:51:42 +00:00
|
|
|
|
2022-01-09 04:03:36 +00:00
|
|
|
local t0=1000000000 # 9 September 2001
|
2022-01-15 17:08:46 +00:00
|
|
|
local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}"
|
|
|
|
echo data | aws_cli s3 cp --metadata="atime=${t0},ctime=${t0},mtime=${t0}" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}"
|
|
|
|
local base_atime; base_atime=$(get_atime "${TEST_TEXT_FILE}")
|
|
|
|
local base_ctime; base_ctime=$(get_ctime "${TEST_TEXT_FILE}")
|
|
|
|
local base_mtime; base_mtime=$(get_mtime "${TEST_TEXT_FILE}")
|
2020-10-03 02:14:23 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# chmod -> update only ctime
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
chmod +x "${TEST_TEXT_FILE}"
|
|
|
|
local atime; atime=$(get_atime "${TEST_TEXT_FILE}")
|
|
|
|
local ctime; ctime=$(get_ctime "${TEST_TEXT_FILE}")
|
|
|
|
local mtime; mtime=$(get_mtime "${TEST_TEXT_FILE}")
|
|
|
|
if [ "${base_atime}" -ne "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then
|
2020-10-03 02:14:23 +00:00
|
|
|
echo "chmod expected updated ctime: $base_ctime != $ctime and same mtime: $base_mtime == $mtime, atime: $base_atime == $atime"
|
|
|
|
return 1
|
|
|
|
fi
|
2021-10-30 01:54:18 +00:00
|
|
|
rm_test_file
|
|
|
|
}
|
|
|
|
|
|
|
|
function test_update_time_chown() {
|
|
|
|
describe "Testing update time function chown..."
|
2019-01-07 01:51:42 +00:00
|
|
|
|
2020-10-03 02:14:23 +00:00
|
|
|
#
|
|
|
|
# chown -> update only ctime
|
|
|
|
#
|
2022-01-09 04:03:36 +00:00
|
|
|
local t0=1000000000 # 9 September 2001
|
2022-01-15 17:08:46 +00:00
|
|
|
local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}"
|
|
|
|
echo data | aws_cli s3 cp --metadata="atime=${t0},ctime=${t0},mtime=${t0}" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}"
|
|
|
|
local base_atime; base_atime=$(get_atime "${TEST_TEXT_FILE}")
|
|
|
|
local base_ctime; base_ctime=$(get_ctime "${TEST_TEXT_FILE}")
|
|
|
|
local base_mtime; base_mtime=$(get_mtime "${TEST_TEXT_FILE}")
|
|
|
|
|
|
|
|
chown $UID "${TEST_TEXT_FILE}"
|
|
|
|
local atime; atime=$(get_atime "${TEST_TEXT_FILE}")
|
|
|
|
local ctime; ctime=$(get_ctime "${TEST_TEXT_FILE}")
|
|
|
|
local mtime; mtime=$(get_mtime "${TEST_TEXT_FILE}")
|
|
|
|
if [ "${base_atime}" -ne "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then
|
2020-10-03 02:14:23 +00:00
|
|
|
echo "chown expected updated ctime: $base_ctime != $ctime and same mtime: $base_mtime == $mtime, atime: $base_atime == $atime"
|
|
|
|
return 1
|
|
|
|
fi
|
2021-10-30 01:54:18 +00:00
|
|
|
rm_test_file
|
|
|
|
}
|
|
|
|
|
|
|
|
function test_update_time_xattr() {
|
|
|
|
describe "Testing update time function set_xattr..."
|
|
|
|
|
2022-01-09 04:03:36 +00:00
|
|
|
local t0=1000000000 # 9 September 2001
|
2022-01-15 17:08:46 +00:00
|
|
|
local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}"
|
|
|
|
echo data | aws_cli s3 cp --metadata="atime=${t0},ctime=${t0},mtime=${t0}" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}"
|
|
|
|
local base_atime; base_atime=$(get_atime "${TEST_TEXT_FILE}")
|
|
|
|
local base_ctime; base_ctime=$(get_ctime "${TEST_TEXT_FILE}")
|
|
|
|
local base_mtime; base_mtime=$(get_mtime "${TEST_TEXT_FILE}")
|
2019-01-07 01:51:42 +00:00
|
|
|
|
2020-10-03 02:14:23 +00:00
|
|
|
#
|
|
|
|
# set_xattr -> update only ctime
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
set_xattr key value "${TEST_TEXT_FILE}"
|
|
|
|
local atime; atime=$(get_atime "${TEST_TEXT_FILE}")
|
|
|
|
local ctime; ctime=$(get_ctime "${TEST_TEXT_FILE}")
|
|
|
|
local mtime; mtime=$(get_mtime "${TEST_TEXT_FILE}")
|
|
|
|
if [ "${base_atime}" -ne "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then
|
2020-10-03 02:14:23 +00:00
|
|
|
echo "set_xattr expected updated ctime: $base_ctime != $ctime and same mtime: $base_mtime == $mtime, atime: $base_atime == $atime"
|
2019-01-07 01:51:42 +00:00
|
|
|
return 1
|
|
|
|
fi
|
2021-10-30 01:54:18 +00:00
|
|
|
rm_test_file
|
|
|
|
}
|
|
|
|
|
|
|
|
function test_update_time_touch() {
|
|
|
|
describe "Testing update time function touch..."
|
|
|
|
|
2022-01-09 04:03:36 +00:00
|
|
|
local t0=1000000000 # 9 September 2001
|
2022-01-15 17:08:46 +00:00
|
|
|
local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}"
|
|
|
|
echo data | aws_cli s3 cp --metadata="atime=${t0},ctime=${t0},mtime=${t0}" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}"
|
|
|
|
local base_atime; base_atime=$(get_atime "${TEST_TEXT_FILE}")
|
|
|
|
local base_ctime; base_ctime=$(get_ctime "${TEST_TEXT_FILE}")
|
|
|
|
local base_mtime; base_mtime=$(get_mtime "${TEST_TEXT_FILE}")
|
2019-01-07 01:51:42 +00:00
|
|
|
|
2020-10-03 02:14:23 +00:00
|
|
|
#
|
|
|
|
# touch -> update ctime/atime/mtime
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
touch "${TEST_TEXT_FILE}"
|
|
|
|
local atime; atime=$(get_atime "${TEST_TEXT_FILE}")
|
|
|
|
local ctime; ctime=$(get_ctime "${TEST_TEXT_FILE}")
|
|
|
|
local mtime; mtime=$(get_mtime "${TEST_TEXT_FILE}")
|
|
|
|
if [ "${base_atime}" -eq "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -eq "${mtime}" ]; then
|
2020-10-03 02:14:23 +00:00
|
|
|
echo "touch expected updated ctime: $base_ctime != $ctime, mtime: $base_mtime != $mtime, atime: $base_atime != $atime"
|
|
|
|
return 1
|
|
|
|
fi
|
2021-10-30 01:54:18 +00:00
|
|
|
rm_test_file
|
|
|
|
}
|
|
|
|
|
|
|
|
function test_update_time_touch_a() {
|
|
|
|
describe "Testing update time function touch -a..."
|
|
|
|
|
2022-01-09 04:03:36 +00:00
|
|
|
local t0=1000000000 # 9 September 2001
|
2022-01-15 17:08:46 +00:00
|
|
|
local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}"
|
|
|
|
echo data | aws_cli s3 cp --metadata="atime=${t0},ctime=${t0},mtime=${t0}" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}"
|
|
|
|
local base_atime; base_atime=$(get_atime "${TEST_TEXT_FILE}")
|
|
|
|
local base_ctime; base_ctime=$(get_ctime "${TEST_TEXT_FILE}")
|
|
|
|
local base_mtime; base_mtime=$(get_mtime "${TEST_TEXT_FILE}")
|
2019-01-07 01:51:42 +00:00
|
|
|
|
2020-10-03 02:14:23 +00:00
|
|
|
#
|
|
|
|
# "touch -a" -> update ctime/atime, not update mtime
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
touch -a "${TEST_TEXT_FILE}"
|
|
|
|
local atime; atime=$(get_atime "${TEST_TEXT_FILE}")
|
|
|
|
local ctime; ctime=$(get_ctime "${TEST_TEXT_FILE}")
|
|
|
|
local mtime; mtime=$(get_mtime "${TEST_TEXT_FILE}")
|
|
|
|
if [ "${base_atime}" -eq "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then
|
2021-10-30 01:54:18 +00:00
|
|
|
echo "touch with -a option expected updated ctime: $base_ctime != $ctime, atime: $base_atime != $atime and same mtime: $base_mtime == $mtime"
|
|
|
|
return 1
|
2019-01-07 01:51:42 +00:00
|
|
|
fi
|
2021-10-30 01:54:18 +00:00
|
|
|
rm_test_file
|
|
|
|
}
|
|
|
|
|
|
|
|
function test_update_time_append() {
|
|
|
|
describe "Testing update time function append..."
|
|
|
|
|
2022-01-09 04:03:36 +00:00
|
|
|
local t0=1000000000 # 9 September 2001
|
2022-01-15 17:08:46 +00:00
|
|
|
local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}"
|
|
|
|
echo data | aws_cli s3 cp --metadata="atime=${t0},ctime=${t0},mtime=${t0}" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}"
|
|
|
|
local base_atime; base_atime=$(get_atime "${TEST_TEXT_FILE}")
|
|
|
|
local base_ctime; base_ctime=$(get_ctime "${TEST_TEXT_FILE}")
|
|
|
|
local base_mtime; base_mtime=$(get_mtime "${TEST_TEXT_FILE}")
|
2019-01-07 01:51:42 +00:00
|
|
|
|
2020-10-03 02:14:23 +00:00
|
|
|
#
|
|
|
|
# append -> update ctime/mtime, not update atime
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
echo foo >> "${TEST_TEXT_FILE}"
|
|
|
|
local atime; atime=$(get_atime "${TEST_TEXT_FILE}")
|
|
|
|
local ctime; ctime=$(get_ctime "${TEST_TEXT_FILE}")
|
|
|
|
local mtime; mtime=$(get_mtime "${TEST_TEXT_FILE}")
|
|
|
|
if [ "${base_atime}" -ne "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -eq "${mtime}" ]; then
|
2021-10-30 01:54:18 +00:00
|
|
|
echo "append expected updated ctime: $base_ctime != $ctime, mtime: $base_mtime != $mtime and same atime: $base_atime == $atime"
|
|
|
|
return 1
|
2020-10-03 02:14:23 +00:00
|
|
|
fi
|
2021-10-30 01:54:18 +00:00
|
|
|
rm_test_file
|
|
|
|
}
|
|
|
|
|
|
|
|
function test_update_time_cp_p() {
|
2021-11-03 23:16:40 +00:00
|
|
|
describe "Testing update time function cp -p..."
|
|
|
|
|
2022-01-09 04:03:36 +00:00
|
|
|
local t0=1000000000 # 9 September 2001
|
2022-01-15 17:08:46 +00:00
|
|
|
local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}"
|
|
|
|
echo data | aws_cli s3 cp --metadata="atime=${t0},ctime=${t0},mtime=${t0}" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}"
|
|
|
|
local base_atime; base_atime=$(get_atime "${TEST_TEXT_FILE}")
|
|
|
|
local base_ctime; base_ctime=$(get_ctime "${TEST_TEXT_FILE}")
|
|
|
|
local base_mtime; base_mtime=$(get_mtime "${TEST_TEXT_FILE}")
|
2019-01-07 01:51:42 +00:00
|
|
|
|
2020-10-03 02:14:23 +00:00
|
|
|
#
|
|
|
|
# cp -p -> update ctime, not update atime/mtime
|
|
|
|
#
|
2022-01-09 04:03:36 +00:00
|
|
|
local TIME_TEST_TEXT_FILE=test-s3fs-time.txt
|
2022-01-15 17:08:46 +00:00
|
|
|
cp -p "${TEST_TEXT_FILE}" "${TIME_TEST_TEXT_FILE}"
|
|
|
|
local atime; atime=$(get_atime "${TIME_TEST_TEXT_FILE}")
|
|
|
|
local ctime; ctime=$(get_ctime "${TIME_TEST_TEXT_FILE}")
|
|
|
|
local mtime; mtime=$(get_mtime "${TIME_TEST_TEXT_FILE}")
|
|
|
|
if [ "${base_atime}" -ne "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then
|
2020-10-03 02:14:23 +00:00
|
|
|
echo "cp with -p option expected updated ctime: $base_ctime != $ctime and same mtime: $base_mtime == $mtime, atime: $base_atime == $atime"
|
2019-07-03 09:54:01 +00:00
|
|
|
return 1
|
2019-01-07 01:51:42 +00:00
|
|
|
fi
|
2021-10-30 01:54:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
function test_update_time_mv() {
|
2021-11-03 23:16:40 +00:00
|
|
|
describe "Testing update time function mv..."
|
|
|
|
|
2022-01-09 04:03:36 +00:00
|
|
|
local t0=1000000000 # 9 September 2001
|
2022-01-15 17:08:46 +00:00
|
|
|
local OBJECT_NAME; OBJECT_NAME=$(basename "${PWD}")/"${TEST_TEXT_FILE}"
|
|
|
|
echo data | aws_cli s3 cp --metadata="atime=${t0},ctime=${t0},mtime=${t0}" - "s3://${TEST_BUCKET_1}/${OBJECT_NAME}"
|
|
|
|
local base_atime; base_atime=$(get_atime "${TEST_TEXT_FILE}")
|
|
|
|
local base_ctime; base_ctime=$(get_ctime "${TEST_TEXT_FILE}")
|
|
|
|
local base_mtime; base_mtime=$(get_mtime "${TEST_TEXT_FILE}")
|
2019-01-07 01:51:42 +00:00
|
|
|
|
2020-10-03 02:14:23 +00:00
|
|
|
#
|
|
|
|
# mv -> update ctime, not update atime/mtime
|
|
|
|
#
|
2022-01-09 04:03:36 +00:00
|
|
|
local TIME2_TEST_TEXT_FILE=test-s3fs-time2.txt
|
2022-01-15 17:08:46 +00:00
|
|
|
mv "${TEST_TEXT_FILE}" "${TIME2_TEST_TEXT_FILE}"
|
|
|
|
local atime; atime=$(get_atime "${TIME2_TEST_TEXT_FILE}")
|
|
|
|
local ctime; ctime=$(get_ctime "${TIME2_TEST_TEXT_FILE}")
|
|
|
|
local mtime; mtime=$(get_mtime "${TIME2_TEST_TEXT_FILE}")
|
|
|
|
if [ "${base_atime}" -ne "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then
|
2020-10-03 02:14:23 +00:00
|
|
|
echo "mv expected updated ctime: $base_ctime != $ctime and same mtime: $base_mtime == $mtime, atime: $base_atime == $atime"
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rm_test_file "${TIME_TEST_TEXT_FILE}"
|
|
|
|
rm_test_file "${TIME2_TEST_TEXT_FILE}"
|
2020-10-03 02:14:23 +00:00
|
|
|
}
|
|
|
|
|
2020-12-02 14:21:25 +00:00
|
|
|
# [NOTE]
|
|
|
|
# See the description of test_update_time () for notes about the
|
|
|
|
# "touch -a" command and atime.
|
|
|
|
#
|
2021-11-03 23:16:40 +00:00
|
|
|
function test_update_directory_time_chmod() {
|
|
|
|
describe "Testing update time for directory mv..."
|
2020-10-03 02:14:23 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# create the directory and sub-directory and a file in directory
|
|
|
|
#
|
2022-01-09 04:03:36 +00:00
|
|
|
local t0=1000000000 # 9 September 2001
|
2022-01-15 17:08:46 +00:00
|
|
|
local DIRECTORY_NAME; DIRECTORY_NAME=$(basename "${PWD}")/"${TEST_DIR}"
|
|
|
|
aws_cli s3api put-object --content-type="application/x-directory" --metadata="atime=${t0},ctime=${t0},mtime=${t0}" --bucket "${TEST_BUCKET_1}" --key "$DIRECTORY_NAME/"
|
2020-10-03 02:14:23 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
local base_atime; base_atime=$(get_atime "${TEST_DIR}")
|
|
|
|
local base_ctime; base_ctime=$(get_ctime "${TEST_DIR}")
|
|
|
|
local base_mtime; base_mtime=$(get_mtime "${TEST_DIR}")
|
2020-10-03 02:14:23 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# chmod -> update only ctime
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
chmod 0777 "${TEST_DIR}"
|
|
|
|
local atime; atime=$(get_atime "${TEST_DIR}")
|
|
|
|
local ctime; ctime=$(get_ctime "${TEST_DIR}")
|
|
|
|
local mtime; mtime=$(get_mtime "${TEST_DIR}")
|
|
|
|
if [ "${base_atime}" -ne "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then
|
2020-10-03 02:14:23 +00:00
|
|
|
echo "chmod expected updated ctime: $base_ctime != $ctime and same mtime: $base_mtime == $mtime, atime: $base_atime == $atime"
|
|
|
|
return 1
|
|
|
|
fi
|
2019-01-07 01:51:42 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rm -rf "${TEST_DIR}"
|
2021-11-03 23:16:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
function test_update_directory_time_chown {
|
|
|
|
describe "Testing update time for directory chown..."
|
|
|
|
|
2022-01-09 04:03:36 +00:00
|
|
|
local t0=1000000000 # 9 September 2001
|
2022-01-15 17:08:46 +00:00
|
|
|
local DIRECTORY_NAME; DIRECTORY_NAME=$(basename "${PWD}")/"${TEST_DIR}"
|
|
|
|
aws_cli s3api put-object --content-type="application/x-directory" --metadata="atime=${t0},ctime=${t0},mtime=${t0}" --bucket "${TEST_BUCKET_1}" --key "$DIRECTORY_NAME/"
|
2021-11-03 23:16:40 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
local base_atime; base_atime=$(get_atime "${TEST_DIR}")
|
|
|
|
local base_ctime; base_ctime=$(get_ctime "${TEST_DIR}")
|
|
|
|
local base_mtime; base_mtime=$(get_mtime "${TEST_DIR}")
|
2020-10-03 02:14:23 +00:00
|
|
|
#
|
|
|
|
# chown -> update only ctime
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
chown $UID "${TEST_DIR}"
|
|
|
|
local atime; atime=$(get_atime "${TEST_DIR}")
|
|
|
|
local ctime; ctime=$(get_ctime "${TEST_DIR}")
|
|
|
|
local mtime; mtime=$(get_mtime "${TEST_DIR}")
|
|
|
|
if [ "${base_atime}" -ne "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then
|
2020-10-03 02:14:23 +00:00
|
|
|
echo "chown expected updated ctime: $base_ctime != $ctime and same mtime: $base_mtime == $mtime, atime: $base_atime == $atime"
|
2019-01-07 01:51:42 +00:00
|
|
|
return 1
|
|
|
|
fi
|
2020-01-26 13:04:10 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rm -rf "${TEST_DIR}"
|
2021-11-03 23:16:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
function test_update_directory_time_set_xattr {
|
|
|
|
describe "Testing update time for directory set_xattr..."
|
|
|
|
|
2022-01-09 04:03:36 +00:00
|
|
|
local t0=1000000000 # 9 September 2001
|
2022-01-15 17:08:46 +00:00
|
|
|
local DIRECTORY_NAME; DIRECTORY_NAME=$(basename "${PWD}")/"${TEST_DIR}"
|
|
|
|
aws_cli s3api put-object --content-type="application/x-directory" --metadata="atime=${t0},ctime=${t0},mtime=${t0}" --bucket "${TEST_BUCKET_1}" --key "$DIRECTORY_NAME/"
|
2021-11-03 23:16:40 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
local base_atime; base_atime=$(get_atime "${TEST_DIR}")
|
|
|
|
local base_ctime; base_ctime=$(get_ctime "${TEST_DIR}")
|
|
|
|
local base_mtime; base_mtime=$(get_mtime "${TEST_DIR}")
|
2020-10-03 02:14:23 +00:00
|
|
|
#
|
|
|
|
# set_xattr -> update only ctime
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
set_xattr key value "${TEST_DIR}"
|
|
|
|
local atime; atime=$(get_atime "${TEST_DIR}")
|
|
|
|
local ctime; ctime=$(get_ctime "${TEST_DIR}")
|
|
|
|
local mtime; mtime=$(get_mtime "${TEST_DIR}")
|
|
|
|
if [ "${base_atime}" -ne "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then
|
2020-10-03 02:14:23 +00:00
|
|
|
echo "set_xattr expected updated ctime: $base_ctime != $ctime and same mtime: $base_mtime == $mtime, atime: $base_atime == $atime"
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rm -rf "${TEST_DIR}"
|
2021-11-03 23:16:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
function test_update_directory_time_touch {
|
|
|
|
describe "Testing update time for directory touch..."
|
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
local t0=1000000000 # 9 September 2001
|
|
|
|
local DIRECTORY_NAME; DIRECTORY_NAME=$(basename "${PWD}")/"${TEST_DIR}"
|
|
|
|
aws_cli s3api put-object --content-type="application/x-directory" --metadata="atime=${t0},ctime=${t0},mtime=${t0}" --bucket "${TEST_BUCKET_1}" --key "$DIRECTORY_NAME/"
|
2021-11-03 23:16:40 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
local base_atime; base_atime=$(get_atime "${TEST_DIR}")
|
|
|
|
local base_ctime; base_ctime=$(get_ctime "${TEST_DIR}")
|
|
|
|
local base_mtime; base_mtime=$(get_mtime "${TEST_DIR}")
|
2020-10-03 02:14:23 +00:00
|
|
|
#
|
|
|
|
# touch -> update ctime/atime/mtime
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
touch "${TEST_DIR}"
|
|
|
|
local atime; atime=$(get_atime "${TEST_DIR}")
|
|
|
|
local ctime; ctime=$(get_ctime "${TEST_DIR}")
|
|
|
|
local mtime; mtime=$(get_mtime "${TEST_DIR}")
|
|
|
|
if [ "${base_atime}" -eq "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -eq "${mtime}" ]; then
|
2020-10-03 02:14:23 +00:00
|
|
|
echo "touch expected updated ctime: $base_ctime != $ctime, mtime: $base_mtime != $mtime, atime: $base_atime != $atime"
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rm -rf "${TEST_DIR}"
|
2021-11-03 23:16:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
function test_update_directory_time_touch_a {
|
|
|
|
describe "Testing update time for directory touch -a..."
|
|
|
|
|
2022-01-09 04:03:36 +00:00
|
|
|
local t0=1000000000 # 9 September 2001
|
2022-01-15 17:08:46 +00:00
|
|
|
local DIRECTORY_NAME; DIRECTORY_NAME=$(basename "${PWD}")/"${TEST_DIR}"
|
|
|
|
aws_cli s3api put-object --content-type="application/x-directory" --metadata="atime=${t0},ctime=${t0},mtime=${t0}" --bucket "${TEST_BUCKET_1}" --key "$DIRECTORY_NAME/"
|
2021-11-03 23:16:40 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
local base_atime; base_atime=$(get_atime "${TEST_DIR}")
|
|
|
|
local base_ctime; base_ctime=$(get_ctime "${TEST_DIR}")
|
|
|
|
local base_mtime; base_mtime=$(get_mtime "${TEST_DIR}")
|
2020-10-03 02:14:23 +00:00
|
|
|
#
|
|
|
|
# "touch -a" -> update ctime/atime, not update mtime
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
touch -a "${TEST_DIR}"
|
|
|
|
local atime; atime=$(get_atime "${TEST_DIR}")
|
|
|
|
local ctime; ctime=$(get_ctime "${TEST_DIR}")
|
|
|
|
local mtime; mtime=$(get_mtime "${TEST_DIR}")
|
|
|
|
if [ "${base_atime}" -eq "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then
|
2021-11-03 23:16:40 +00:00
|
|
|
echo "touch with -a option expected updated ctime: $base_ctime != $ctime, atime: $base_atime != $atime and same mtime: $base_mtime == $mtime"
|
|
|
|
return 1
|
2020-10-03 02:14:23 +00:00
|
|
|
fi
|
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rm -rf "${TEST_DIR}"
|
2021-11-03 23:16:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
function test_update_directory_time_subdir() {
|
|
|
|
describe "Testing update time for directory subdirectory..."
|
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
local TIME_TEST_SUBDIR="${TEST_DIR}/testsubdir"
|
|
|
|
local TIME_TEST_FILE_INDIR="${TEST_DIR}/testfile"
|
2021-11-03 23:16:40 +00:00
|
|
|
mk_test_dir
|
2022-01-15 17:08:46 +00:00
|
|
|
mkdir "${TIME_TEST_SUBDIR}"
|
|
|
|
touch "${TIME_TEST_FILE_INDIR}"
|
2021-11-03 23:16:40 +00:00
|
|
|
# TODO: remove sleep after improving AWS CLI speed
|
|
|
|
sleep 1
|
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
local base_atime; base_atime=$(get_atime "${TEST_DIR}")
|
|
|
|
local base_ctime; base_ctime=$(get_ctime "${TEST_DIR}")
|
|
|
|
local base_mtime; base_mtime=$(get_mtime "${TEST_DIR}")
|
|
|
|
local subdir_atime; subdir_atime=$(get_atime "${TIME_TEST_SUBDIR}")
|
|
|
|
local subdir_ctime; subdir_ctime=$(get_ctime "${TIME_TEST_SUBDIR}")
|
|
|
|
local subdir_mtime; subdir_mtime=$(get_mtime "${TIME_TEST_SUBDIR}")
|
|
|
|
local subfile_atime; subfile_atime=$(get_atime "${TIME_TEST_FILE_INDIR}")
|
|
|
|
local subfile_ctime; subfile_ctime=$(get_ctime "${TIME_TEST_FILE_INDIR}")
|
|
|
|
local subfile_mtime; subfile_mtime=$(get_mtime "${TIME_TEST_FILE_INDIR}")
|
2020-10-03 02:14:23 +00:00
|
|
|
#
|
2021-06-27 02:22:33 +00:00
|
|
|
# mv -> update ctime, not update atime/mtime for target directory
|
2020-10-03 02:14:23 +00:00
|
|
|
# not update any for sub-directory and a file
|
|
|
|
#
|
2022-01-09 04:03:36 +00:00
|
|
|
local TIME_TEST_DIR=timetestdir
|
2022-01-15 17:08:46 +00:00
|
|
|
local TIME2_TEST_SUBDIR="${TIME_TEST_DIR}/testsubdir"
|
|
|
|
local TIME2_TEST_FILE_INDIR="${TIME_TEST_DIR}/testfile"
|
|
|
|
mv "${TEST_DIR}" "${TIME_TEST_DIR}"
|
|
|
|
local atime; atime=$(get_atime "${TIME_TEST_DIR}")
|
|
|
|
local ctime; ctime=$(get_ctime "${TIME_TEST_DIR}")
|
|
|
|
local mtime; mtime=$(get_mtime "${TIME_TEST_DIR}")
|
|
|
|
if [ "${base_atime}" -ne "${atime}" ] || [ "${base_ctime}" -eq "${ctime}" ] || [ "${base_mtime}" -ne "${mtime}" ]; then
|
2020-10-03 02:14:23 +00:00
|
|
|
echo "mv expected updated ctime: $base_ctime != $ctime and same mtime: $base_mtime == $mtime, atime: $base_atime == $atime"
|
|
|
|
return 1
|
|
|
|
fi
|
2022-01-15 17:08:46 +00:00
|
|
|
atime=$(get_atime "${TIME2_TEST_SUBDIR}")
|
|
|
|
ctime=$(get_ctime "${TIME2_TEST_SUBDIR}")
|
|
|
|
mtime=$(get_mtime "${TIME2_TEST_SUBDIR}")
|
|
|
|
if [ "${subdir_atime}" -ne "${atime}" ] || [ "${subdir_ctime}" -ne "${ctime}" ] || [ "${subdir_mtime}" -ne "${mtime}" ]; then
|
2020-10-03 02:14:23 +00:00
|
|
|
echo "mv for sub-directory expected same ctime: $subdir_ctime == $ctime, mtime: $subdir_mtime == $mtime, atime: $subdir_atime == $atime"
|
|
|
|
return 1
|
|
|
|
fi
|
2022-01-15 17:08:46 +00:00
|
|
|
atime=$(get_atime "${TIME2_TEST_FILE_INDIR}")
|
|
|
|
ctime=$(get_ctime "${TIME2_TEST_FILE_INDIR}")
|
|
|
|
mtime=$(get_mtime "${TIME2_TEST_FILE_INDIR}")
|
|
|
|
if [ "${subfile_atime}" -ne "${atime}" ] || [ "${subfile_ctime}" -ne "${ctime}" ] || [ "${subfile_mtime}" -ne "${mtime}" ]; then
|
2020-10-03 02:14:23 +00:00
|
|
|
echo "mv for a file in directory expected same ctime: $subfile_ctime == $ctime, mtime: $subfile_mtime == $mtime, atime: $subfile_atime == $atime"
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rm -rf "${TIME_TEST_SUBDIR}"
|
|
|
|
rm -rf "${TIME_TEST_DIR}"
|
|
|
|
rm -rf "${TEST_DIR}"
|
2019-01-07 01:51:42 +00:00
|
|
|
}
|
|
|
|
|
2022-05-29 09:52:10 +00:00
|
|
|
# [NOTE]
|
|
|
|
# This test changes the file mode while creating/editing a new file,
|
|
|
|
# and finally closes it.
|
|
|
|
# Test with the sed command as it occurs when in place mode of the sed
|
|
|
|
# command. (If trying it with a standard C function(and shell script),
|
|
|
|
# it will be not the same result of sed, so sed is used.)
|
|
|
|
#
|
|
|
|
function test_update_chmod_opened_file() {
|
|
|
|
describe "Testing create, modify the file by sed in place mode"
|
|
|
|
|
|
|
|
# test file
|
|
|
|
local BEFORE_STRING_DATA; BEFORE_STRING_DATA="sed in place test : BEFORE DATA"
|
|
|
|
local AFTER_STRING_DATA; AFTER_STRING_DATA="sed in place test : AFTER DATA"
|
|
|
|
echo "${BEFORE_STRING_DATA}" > "${TEST_TEXT_FILE}"
|
|
|
|
|
|
|
|
# sed in place
|
|
|
|
sed -i -e 's/BEFORE DATA/AFTER DATA/g' "${TEST_TEXT_FILE}"
|
|
|
|
|
|
|
|
# compare result
|
|
|
|
local RESULT_STRING; RESULT_STRING=$(cat "${TEST_TEXT_FILE}")
|
|
|
|
|
|
|
|
if [ -z "${RESULT_STRING}" ] || [ "${RESULT_STRING}" != "${AFTER_STRING_DATA}" ]; then
|
|
|
|
echo "the file conversion by sed in place command failed."
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
# clean up
|
|
|
|
rm_test_file "${ALT_TEST_TEXT_FILE}"
|
|
|
|
}
|
|
|
|
|
2016-02-05 12:24:13 +00:00
|
|
|
function test_rm_rf_dir {
|
2020-06-25 11:53:53 +00:00
|
|
|
describe "Test that rm -rf will remove directory with contents ..."
|
2016-02-05 12:24:13 +00:00
|
|
|
# Create a dir with some files and directories
|
|
|
|
mkdir dir1
|
|
|
|
mkdir dir1/dir2
|
|
|
|
touch dir1/file1
|
|
|
|
touch dir1/dir2/file2
|
|
|
|
|
|
|
|
# Remove the dir with recursive rm
|
|
|
|
rm -rf dir1
|
|
|
|
|
|
|
|
if [ -e dir1 ]; then
|
|
|
|
echo "rm -rf did not remove $PWD/dir1"
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
2019-08-11 07:42:48 +00:00
|
|
|
function test_copy_file {
|
2020-06-25 11:53:53 +00:00
|
|
|
describe "Test simple copy ..."
|
2019-08-11 07:42:48 +00:00
|
|
|
|
|
|
|
dd if=/dev/urandom of=/tmp/simple_file bs=1024 count=1
|
|
|
|
cp /tmp/simple_file copied_simple_file
|
|
|
|
cmp /tmp/simple_file copied_simple_file
|
|
|
|
|
2019-08-13 14:21:42 +00:00
|
|
|
rm_test_file /tmp/simple_file
|
|
|
|
rm_test_file copied_simple_file
|
2019-08-11 07:42:48 +00:00
|
|
|
}
|
|
|
|
|
2016-03-15 18:27:46 +00:00
|
|
|
function test_write_after_seek_ahead {
|
2020-06-25 11:53:53 +00:00
|
|
|
describe "Test writes succeed after a seek ahead ..."
|
2016-03-15 18:27:46 +00:00
|
|
|
dd if=/dev/zero of=testfile seek=1 count=1 bs=1024
|
2019-08-13 14:21:42 +00:00
|
|
|
rm_test_file testfile
|
2016-03-15 18:27:46 +00:00
|
|
|
}
|
|
|
|
|
2019-01-23 03:52:53 +00:00
|
|
|
function test_overwrite_existing_file_range {
|
2020-06-25 11:53:53 +00:00
|
|
|
describe "Test overwrite range succeeds ..."
|
2022-01-15 17:08:46 +00:00
|
|
|
dd if=<(seq 1000) of="${TEST_TEXT_FILE}"
|
|
|
|
dd if=/dev/zero of="${TEST_TEXT_FILE}" seek=1 count=1 bs=1024 conv=notrunc
|
|
|
|
cmp "${TEST_TEXT_FILE}" <(
|
2019-01-23 03:52:53 +00:00
|
|
|
seq 1000 | head -c 1024
|
|
|
|
dd if=/dev/zero count=1 bs=1024
|
|
|
|
seq 1000 | tail -c +2049
|
|
|
|
)
|
2019-08-13 14:21:42 +00:00
|
|
|
rm_test_file
|
2019-01-23 03:52:53 +00:00
|
|
|
}
|
2016-03-15 18:27:46 +00:00
|
|
|
|
2020-08-23 00:36:45 +00:00
|
|
|
function test_concurrent_directory_updates {
|
2020-06-25 11:53:53 +00:00
|
|
|
describe "Test concurrent updates to a directory ..."
|
2022-01-15 17:08:46 +00:00
|
|
|
for i in $(seq 5); do
|
|
|
|
echo foo > "${i}"
|
|
|
|
done
|
|
|
|
for _ in $(seq 10); do
|
|
|
|
for i in $(seq 5); do
|
|
|
|
local file
|
|
|
|
# shellcheck disable=SC2012,SC2046
|
|
|
|
file=$(ls $(seq 5) | "${SED_BIN}" -n "$((RANDOM % 5 + 1))p")
|
|
|
|
cat "${file}" >/dev/null || true
|
|
|
|
rm -f "${file}"
|
|
|
|
echo "foo" > "${file}" || true
|
2019-03-04 08:57:03 +00:00
|
|
|
done &
|
|
|
|
done
|
|
|
|
wait
|
2022-01-15 17:08:46 +00:00
|
|
|
# shellcheck disable=SC2046
|
|
|
|
rm -f $(seq 5)
|
2019-03-04 08:57:03 +00:00
|
|
|
}
|
|
|
|
|
2020-08-23 00:36:45 +00:00
|
|
|
function test_concurrent_reads {
|
|
|
|
describe "Test concurrent reads from a file ..."
|
2022-01-15 17:08:46 +00:00
|
|
|
../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${TEST_TEXT_FILE}"
|
|
|
|
for _ in $(seq 10); do
|
|
|
|
dd if="${TEST_TEXT_FILE}" of=/dev/null seek=$((RANDOM % BIG_FILE_LENGTH)) count=16 bs=1024 &
|
2020-08-23 00:36:45 +00:00
|
|
|
done
|
|
|
|
wait
|
|
|
|
rm_test_file
|
|
|
|
}
|
|
|
|
|
2019-07-16 19:55:14 +00:00
|
|
|
function test_concurrent_writes {
|
2020-08-23 00:36:45 +00:00
|
|
|
describe "Test concurrent writes to a file ..."
|
2022-01-15 17:08:46 +00:00
|
|
|
../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${TEST_TEXT_FILE}"
|
|
|
|
for _ in $(seq 10); do
|
|
|
|
dd if=/dev/zero of="${TEST_TEXT_FILE}" seek=$((RANDOM % BIG_FILE_LENGTH)) count=16 bs=1024 conv=notrunc &
|
2019-07-16 19:55:14 +00:00
|
|
|
done
|
|
|
|
wait
|
|
|
|
rm_test_file
|
|
|
|
}
|
|
|
|
|
2019-05-05 00:05:28 +00:00
|
|
|
function test_open_second_fd {
|
2020-06-25 11:53:53 +00:00
|
|
|
describe "read from an open fd ..."
|
2019-08-14 12:39:38 +00:00
|
|
|
rm_test_file second_fd_file
|
2022-01-15 17:08:46 +00:00
|
|
|
|
|
|
|
local RESULT
|
|
|
|
# shellcheck disable=SC2094
|
|
|
|
RESULT=$( (echo foo ; wc -c < second_fd_file >&2) 2>& 1>second_fd_file)
|
|
|
|
if [ "${RESULT}" -ne 4 ]; then
|
2019-05-05 00:05:28 +00:00
|
|
|
echo "size mismatch, expected: 4, was: ${RESULT}"
|
|
|
|
return 1
|
|
|
|
fi
|
2019-08-14 12:39:38 +00:00
|
|
|
rm_test_file second_fd_file
|
2019-05-05 00:05:28 +00:00
|
|
|
}
|
2019-03-04 08:57:03 +00:00
|
|
|
|
2019-09-26 02:21:22 +00:00
|
|
|
function test_write_multiple_offsets {
|
2020-06-25 11:53:53 +00:00
|
|
|
describe "test writing to multiple offsets ..."
|
2022-01-15 17:08:46 +00:00
|
|
|
../../write_multiblock -f "${TEST_TEXT_FILE}" -p "1024:1" -p "$((16 * 1024 * 1024)):1" -p "$((18 * 1024 * 1024)):1"
|
|
|
|
rm_test_file "${TEST_TEXT_FILE}"
|
2020-03-01 08:41:45 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
function test_write_multiple_offsets_backwards {
|
2020-06-25 11:53:53 +00:00
|
|
|
describe "test writing to multiple offsets ..."
|
2022-01-15 17:08:46 +00:00
|
|
|
../../write_multiblock -f "${TEST_TEXT_FILE}" -p "$((20 * 1024 * 1024 + 1)):1" -p "$((10 * 1024 * 1024)):1"
|
|
|
|
rm_test_file "${TEST_TEXT_FILE}"
|
2019-09-26 02:21:22 +00:00
|
|
|
}
|
|
|
|
|
2019-09-07 06:23:05 +00:00
|
|
|
function test_clean_up_cache() {
|
2020-06-25 11:53:53 +00:00
|
|
|
describe "Test clean up cache ..."
|
2019-09-07 06:23:05 +00:00
|
|
|
|
2022-01-09 04:03:36 +00:00
|
|
|
local dir="many_files"
|
|
|
|
local count=25
|
2022-01-15 17:08:46 +00:00
|
|
|
mkdir -p "${dir}"
|
2019-09-07 06:23:05 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
for x in $(seq "${count}"); do
|
|
|
|
../../junk_data 10485760 > "${dir}"/file-"${x}"
|
2019-09-07 06:23:05 +00:00
|
|
|
done
|
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
local file_list=("${dir}"/*);
|
|
|
|
local file_cnt="${#file_list[@]}"
|
|
|
|
if [ "${file_cnt}" != "${count}" ]; then
|
|
|
|
echo "Expected $count files but got ${file_cnt}"
|
|
|
|
rm -rf "${dir}"
|
2019-09-07 06:23:05 +00:00
|
|
|
return 1
|
|
|
|
fi
|
2022-01-15 17:08:46 +00:00
|
|
|
local CACHE_DISK_AVAIL_SIZE; CACHE_DISK_AVAIL_SIZE=$(get_disk_avail_size "${CACHE_DIR}")
|
|
|
|
if [ "${CACHE_DISK_AVAIL_SIZE}" -lt "${ENSURE_DISKFREE_SIZE}" ];then
|
|
|
|
echo "Cache disk avail size:${CACHE_DISK_AVAIL_SIZE} less than ensure_diskfree size:${ENSURE_DISKFREE_SIZE}"
|
|
|
|
rm -rf "${dir}"
|
2019-09-07 06:23:05 +00:00
|
|
|
return 1
|
|
|
|
fi
|
2022-01-15 17:08:46 +00:00
|
|
|
rm -rf "${dir}"
|
2019-09-07 06:23:05 +00:00
|
|
|
}
|
|
|
|
|
2020-02-02 13:01:56 +00:00
|
|
|
function test_content_type() {
|
2020-06-25 11:53:53 +00:00
|
|
|
describe "Test Content-Type detection ..."
|
2020-02-02 13:01:56 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
local DIR_NAME; DIR_NAME=$(basename "${PWD}")
|
2020-02-02 13:01:56 +00:00
|
|
|
|
|
|
|
touch "test.txt"
|
2022-01-15 17:08:46 +00:00
|
|
|
local CONTENT_TYPE; CONTENT_TYPE=$(aws_cli s3api head-object --bucket "${TEST_BUCKET_1}" --key "${DIR_NAME}/test.txt" | grep "ContentType")
|
|
|
|
if ! echo "${CONTENT_TYPE}" | grep -q "text/plain"; then
|
|
|
|
echo "Unexpected Content-Type: ${CONTENT_TYPE}"
|
2020-03-19 15:13:21 +00:00
|
|
|
return 1;
|
2020-02-02 13:01:56 +00:00
|
|
|
fi
|
|
|
|
|
|
|
|
touch "test.jpg"
|
2022-01-15 17:08:46 +00:00
|
|
|
CONTENT_TYPE=$(aws_cli s3api head-object --bucket "${TEST_BUCKET_1}" --key "${DIR_NAME}/test.jpg" | grep "ContentType")
|
|
|
|
if ! echo "${CONTENT_TYPE}" | grep -q "image/jpeg"; then
|
|
|
|
echo "Unexpected Content-Type: ${CONTENT_TYPE}"
|
2020-03-19 15:13:21 +00:00
|
|
|
return 1;
|
2020-02-02 13:01:56 +00:00
|
|
|
fi
|
|
|
|
|
|
|
|
touch "test.bin"
|
2022-01-15 17:08:46 +00:00
|
|
|
CONTENT_TYPE=$(aws_cli s3api head-object --bucket "${TEST_BUCKET_1}" --key "${DIR_NAME}/test.bin" | grep "ContentType")
|
|
|
|
if ! echo "${CONTENT_TYPE}" | grep -q "application/octet-stream"; then
|
|
|
|
echo "Unexpected Content-Type: ${CONTENT_TYPE}"
|
2020-02-02 13:01:56 +00:00
|
|
|
return 1;
|
|
|
|
fi
|
2020-02-02 09:43:20 +00:00
|
|
|
|
|
|
|
mkdir "test.dir"
|
2022-01-15 17:08:46 +00:00
|
|
|
CONTENT_TYPE=$(aws_cli s3api head-object --bucket "${TEST_BUCKET_1}" --key "${DIR_NAME}/test.dir/" | grep "ContentType")
|
|
|
|
if ! echo "${CONTENT_TYPE}" | grep -q "application/x-directory"; then
|
|
|
|
echo "Unexpected Content-Type: ${CONTENT_TYPE}"
|
2020-02-02 09:43:20 +00:00
|
|
|
return 1;
|
|
|
|
fi
|
2021-02-13 07:48:36 +00:00
|
|
|
|
|
|
|
rm -f test.txt
|
|
|
|
rm -f test.jpg
|
|
|
|
rm -f test.bin
|
|
|
|
rm -rf test.dir
|
2020-02-02 13:01:56 +00:00
|
|
|
}
|
|
|
|
|
2020-04-11 09:59:24 +00:00
|
|
|
# create more files than -o max_stat_cache_size
|
|
|
|
function test_truncate_cache() {
|
2020-06-25 11:53:53 +00:00
|
|
|
describe "Test make cache files over max cache file size ..."
|
|
|
|
|
2020-04-11 09:59:24 +00:00
|
|
|
for dir in $(seq 2); do
|
2022-01-15 17:08:46 +00:00
|
|
|
mkdir "${dir}"
|
2020-04-11 09:59:24 +00:00
|
|
|
for file in $(seq 75); do
|
2022-01-15 17:08:46 +00:00
|
|
|
touch "${dir}/${file}"
|
2020-04-11 09:59:24 +00:00
|
|
|
done
|
2022-01-15 17:08:46 +00:00
|
|
|
ls "${dir}"
|
2020-04-11 09:59:24 +00:00
|
|
|
done
|
2021-02-13 07:48:36 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
# shellcheck disable=SC2046
|
|
|
|
rm -rf $(seq 2)
|
2020-04-11 09:59:24 +00:00
|
|
|
}
|
|
|
|
|
2020-04-22 13:54:08 +00:00
|
|
|
function test_cache_file_stat() {
|
2020-06-25 11:53:53 +00:00
|
|
|
describe "Test cache file stat ..."
|
2020-04-22 13:54:08 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${BIG_FILE}"
|
2020-04-22 13:54:08 +00:00
|
|
|
|
|
|
|
#
|
2021-10-24 08:24:47 +00:00
|
|
|
# The first argument of the script is "testrun-<random>" the directory name.
|
2020-04-22 13:54:08 +00:00
|
|
|
#
|
2022-01-09 04:03:36 +00:00
|
|
|
local CACHE_TESTRUN_DIR=$1
|
2020-04-22 13:54:08 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# get cache file inode number
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
local CACHE_FILE_INODE
|
|
|
|
# shellcheck disable=SC2012
|
|
|
|
CACHE_FILE_INODE=$(ls -i "${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${BIG_FILE}" 2>/dev/null | awk '{print $1}')
|
|
|
|
if [ -z "${CACHE_FILE_INODE}" ]; then
|
2020-04-22 13:54:08 +00:00
|
|
|
echo "Not found cache file or failed to get inode: ${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${BIG_FILE}"
|
|
|
|
return 1;
|
|
|
|
fi
|
|
|
|
|
|
|
|
#
|
|
|
|
# get lines from cache stat file
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
local CACHE_FILE_STAT_LINE_1; CACHE_FILE_STAT_LINE_1=$("${SED_BIN}" -n 1p "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${BIG_FILE}")
|
|
|
|
local CACHE_FILE_STAT_LINE_2; CACHE_FILE_STAT_LINE_2=$("${SED_BIN}" -n 2p "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${BIG_FILE}")
|
|
|
|
if [ -z "${CACHE_FILE_STAT_LINE_1}" ] || [ -z "${CACHE_FILE_STAT_LINE_2}" ]; then
|
2020-04-22 13:54:08 +00:00
|
|
|
echo "could not get first or second line from cache file stat: ${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${BIG_FILE}"
|
|
|
|
return 1;
|
|
|
|
fi
|
|
|
|
|
|
|
|
#
|
|
|
|
# compare
|
|
|
|
#
|
|
|
|
if [ "${CACHE_FILE_STAT_LINE_1}" != "${CACHE_FILE_INODE}:${BIG_FILE_LENGTH}" ]; then
|
|
|
|
echo "first line(cache file stat) is different: \"${CACHE_FILE_STAT_LINE_1}\" != \"${CACHE_FILE_INODE}:${BIG_FILE_LENGTH}\""
|
|
|
|
return 1;
|
|
|
|
fi
|
|
|
|
if [ "${CACHE_FILE_STAT_LINE_2}" != "0:${BIG_FILE_LENGTH}:1:0" ]; then
|
|
|
|
echo "last line(cache file stat) is different: \"${CACHE_FILE_STAT_LINE_2}\" != \"0:${BIG_FILE_LENGTH}:1:0\""
|
|
|
|
return 1;
|
|
|
|
fi
|
|
|
|
|
|
|
|
#
|
|
|
|
# remove cache files directly
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
rm -f "${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${BIG_FILE}"
|
|
|
|
rm -f "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${BIG_FILE}"
|
2020-04-22 13:54:08 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# write a byte into the middle(not the boundary) of the file
|
|
|
|
#
|
2022-01-09 04:03:36 +00:00
|
|
|
local CHECK_UPLOAD_OFFSET=$((10 * 1024 * 1024 + 17))
|
2022-01-15 17:08:46 +00:00
|
|
|
dd if=/dev/urandom of="${BIG_FILE}" bs=1 count=1 seek="${CHECK_UPLOAD_OFFSET}" conv=notrunc
|
2020-04-22 13:54:08 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# get cache file inode number
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
# shellcheck disable=SC2012
|
|
|
|
CACHE_FILE_INODE=$(ls -i "${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${BIG_FILE}" 2>/dev/null | awk '{print $1}')
|
|
|
|
if [ -z "${CACHE_FILE_INODE}" ]; then
|
2020-04-22 13:54:08 +00:00
|
|
|
echo "Not found cache file or failed to get inode: ${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${BIG_FILE}"
|
|
|
|
return 1;
|
|
|
|
fi
|
|
|
|
|
|
|
|
#
|
|
|
|
# get lines from cache stat file
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
CACHE_FILE_STAT_LINE_1=$("${SED_BIN}" -n 1p "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${BIG_FILE}")
|
|
|
|
local CACHE_FILE_STAT_LINE_E; CACHE_FILE_STAT_LINE_E=$(tail -1 "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${BIG_FILE}" 2>/dev/null)
|
|
|
|
if [ -z "${CACHE_FILE_STAT_LINE_1}" ] || [ -z "${CACHE_FILE_STAT_LINE_E}" ]; then
|
2020-04-22 13:54:08 +00:00
|
|
|
echo "could not get first or end line from cache file stat: ${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${BIG_FILE}"
|
|
|
|
return 1;
|
|
|
|
fi
|
|
|
|
|
|
|
|
#
|
|
|
|
# check first and cache file length from last line
|
|
|
|
#
|
|
|
|
# we should check all stat lines, but there are cases where the value
|
|
|
|
# differs depending on the processing system etc., then the cache file
|
|
|
|
# size is calculated and compared.
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
local CACHE_LAST_OFFSET; CACHE_LAST_OFFSET=$(echo "${CACHE_FILE_STAT_LINE_E}" | cut -d ":" -f1)
|
|
|
|
local CACHE_LAST_SIZE; CACHE_LAST_SIZE=$(echo "${CACHE_FILE_STAT_LINE_E}" | cut -d ":" -f2)
|
|
|
|
local CACHE_TOTAL_SIZE=$((CACHE_LAST_OFFSET + CACHE_LAST_SIZE))
|
2020-04-22 13:54:08 +00:00
|
|
|
|
|
|
|
if [ "${CACHE_FILE_STAT_LINE_1}" != "${CACHE_FILE_INODE}:${BIG_FILE_LENGTH}" ]; then
|
|
|
|
echo "first line(cache file stat) is different: \"${CACHE_FILE_STAT_LINE_1}\" != \"${CACHE_FILE_INODE}:${BIG_FILE_LENGTH}\""
|
|
|
|
return 1;
|
|
|
|
fi
|
2022-01-15 17:08:46 +00:00
|
|
|
if [ "${BIG_FILE_LENGTH}" -ne "${CACHE_TOTAL_SIZE}" ]; then
|
2020-04-22 13:54:08 +00:00
|
|
|
echo "the file size indicated by the cache stat file is different: \"${BIG_FILE_LENGTH}\" != \"${CACHE_TOTAL_SIZE}\""
|
|
|
|
return 1;
|
|
|
|
fi
|
|
|
|
|
|
|
|
rm_test_file "${BIG_FILE}"
|
|
|
|
}
|
|
|
|
|
2021-10-16 15:51:36 +00:00
|
|
|
function test_zero_cache_file_stat() {
|
|
|
|
describe "Test zero byte cache file stat ..."
|
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rm_test_file "${TEST_TEXT_FILE}"
|
2021-10-16 15:51:36 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# create empty file
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
touch "${TEST_TEXT_FILE}"
|
2021-10-16 15:51:36 +00:00
|
|
|
|
|
|
|
#
|
2021-10-24 08:24:47 +00:00
|
|
|
# The first argument of the script is "testrun-<random>" the directory name.
|
2021-10-16 15:51:36 +00:00
|
|
|
#
|
2022-01-09 04:03:36 +00:00
|
|
|
local CACHE_TESTRUN_DIR=$1
|
2021-10-16 15:51:36 +00:00
|
|
|
|
|
|
|
# [NOTE]
|
|
|
|
# The stat file is a one-line text file, expecting for "<inode>:0"(ex. "4543937: 0").
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
if ! head -1 "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${TEST_TEXT_FILE}" 2>/dev/null | grep -q ':0$' 2>/dev/null; then
|
2021-10-16 15:51:36 +00:00
|
|
|
echo "The cache file stat after creating an empty file is incorrect : ${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${TEST_TEXT_FILE}"
|
|
|
|
return 1;
|
|
|
|
fi
|
2022-01-15 17:08:46 +00:00
|
|
|
rm_test_file "${TEST_TEXT_FILE}"
|
2021-10-16 15:51:36 +00:00
|
|
|
}
|
|
|
|
|
2020-06-25 11:53:53 +00:00
|
|
|
function test_upload_sparsefile {
|
|
|
|
describe "Testing upload sparse file ..."
|
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rm_test_file "${BIG_FILE}"
|
|
|
|
rm -f "${TEMP_DIR}/${BIG_FILE}"
|
2020-06-25 11:53:53 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# Make all HOLE file
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
"${TRUNCATE_BIN}" "${BIG_FILE}" -s "${BIG_FILE_LENGTH}"
|
2020-06-25 11:53:53 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# Write some bytes to ABOUT middle in the file
|
|
|
|
# (Dare to remove the block breaks)
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
local WRITE_POS=$((BIG_FILE_LENGTH / 2 - 128))
|
|
|
|
echo -n "0123456789ABCDEF" | dd of="${TEMP_DIR}/${BIG_FILE}" bs=1 count=16 seek="${WRITE_POS}" conv=notrunc
|
2020-06-25 11:53:53 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# copy(upload) the file
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
cp "${TEMP_DIR}/${BIG_FILE}" "${BIG_FILE}"
|
2020-06-25 11:53:53 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# check
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
cmp "${TEMP_DIR}/${BIG_FILE}" "${BIG_FILE}"
|
2020-06-25 11:53:53 +00:00
|
|
|
|
2022-01-15 17:08:46 +00:00
|
|
|
rm_test_file "${BIG_FILE}"
|
|
|
|
rm -f "${TEMP_DIR}/${BIG_FILE}"
|
2020-06-25 11:53:53 +00:00
|
|
|
}
|
|
|
|
|
2020-06-21 18:04:49 +00:00
|
|
|
function test_mix_upload_entities() {
|
2020-06-25 11:53:53 +00:00
|
|
|
describe "Testing upload sparse files ..."
|
|
|
|
|
2020-06-21 18:04:49 +00:00
|
|
|
#
|
|
|
|
# Make test file
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${BIG_FILE}"
|
2020-06-21 18:04:49 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# If the cache option is enabled, delete the cache of uploaded files.
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
if [ -f "${CACHE_DIR}/${TEST_BUCKET_1}/${BIG_FILE}" ]; then
|
|
|
|
rm -f "${CACHE_DIR}/${TEST_BUCKET_1}/${BIG_FILE}"
|
2020-06-21 18:04:49 +00:00
|
|
|
fi
|
2022-01-15 17:08:46 +00:00
|
|
|
if [ -f "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${BIG_FILE}" ]; then
|
|
|
|
rm -f "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${BIG_FILE}"
|
2020-06-21 18:04:49 +00:00
|
|
|
fi
|
|
|
|
|
|
|
|
#
|
|
|
|
# Do a partial write to the file.
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
echo -n "0123456789ABCDEF" | dd of="${BIG_FILE}" bs=1 count=16 seek=0 conv=notrunc
|
|
|
|
echo -n "0123456789ABCDEF" | dd of="${BIG_FILE}" bs=1 count=16 seek=8192 conv=notrunc
|
|
|
|
echo -n "0123456789ABCDEF" | dd of="${BIG_FILE}" bs=1 count=16 seek=1073152 conv=notrunc
|
|
|
|
echo -n "0123456789ABCDEF" | dd of="${BIG_FILE}" bs=1 count=16 seek=26214400 conv=notrunc
|
|
|
|
echo -n "0123456789ABCDEF" | dd of="${BIG_FILE}" bs=1 count=16 seek=26222592 conv=notrunc
|
2020-06-21 18:04:49 +00:00
|
|
|
|
|
|
|
rm_test_file "${BIG_FILE}"
|
|
|
|
}
|
|
|
|
|
2020-08-16 12:47:29 +00:00
|
|
|
#
|
|
|
|
# [NOTE]
|
|
|
|
# This test runs last because it uses up disk space and may not recover.
|
|
|
|
# This may be a problem, especially on MacOS. (See the comment near the definition
|
|
|
|
# line for the ENSURE_DISKFREE_SIZE variable)
|
|
|
|
#
|
|
|
|
function test_ensurespace_move_file() {
|
2022-02-21 09:54:26 +00:00
|
|
|
describe "Testing upload(mv) file when disk space is not enough ..."
|
2020-08-16 12:47:29 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# Make test file which is not under mountpoint
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
mkdir -p "${CACHE_DIR}/.s3fs_test_tmpdir"
|
|
|
|
../../junk_data $((BIG_FILE_BLOCK_SIZE * BIG_FILE_COUNT)) > "${CACHE_DIR}/.s3fs_test_tmpdir/${BIG_FILE}"
|
2020-08-16 12:47:29 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# Backup file stat
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
local ORIGINAL_PERMISSIONS
|
|
|
|
if [ "$(uname)" = "Darwin" ]; then
|
|
|
|
ORIGINAL_PERMISSIONS=$(stat -f "%u:%g" "${CACHE_DIR}/.s3fs_test_tmpdir/${BIG_FILE}")
|
2020-08-16 12:47:29 +00:00
|
|
|
else
|
2022-01-15 17:08:46 +00:00
|
|
|
ORIGINAL_PERMISSIONS=$(stat --format=%u:%g "${CACHE_DIR}/.s3fs_test_tmpdir/${BIG_FILE}")
|
2020-08-16 12:47:29 +00:00
|
|
|
fi
|
|
|
|
|
|
|
|
#
|
|
|
|
# Fill the disk size
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
local NOW_CACHE_DISK_AVAIL_SIZE; NOW_CACHE_DISK_AVAIL_SIZE=$(get_disk_avail_size "${CACHE_DIR}")
|
2022-01-09 04:03:36 +00:00
|
|
|
local TMP_FILE_NO=0
|
2020-08-16 12:47:29 +00:00
|
|
|
while true; do
|
2022-01-09 04:03:36 +00:00
|
|
|
local ALLOWED_USING_SIZE=$((NOW_CACHE_DISK_AVAIL_SIZE - ENSURE_DISKFREE_SIZE))
|
2022-01-15 17:08:46 +00:00
|
|
|
if [ "${ALLOWED_USING_SIZE}" -gt "${BIG_FILE_LENGTH}" ]; then
|
|
|
|
cp -p "${CACHE_DIR}/.s3fs_test_tmpdir/${BIG_FILE}" "${CACHE_DIR}/.s3fs_test_tmpdir/${BIG_FILE}_${TMP_FILE_NO}"
|
2022-01-09 04:03:36 +00:00
|
|
|
local TMP_FILE_NO=$((TMP_FILE_NO + 1))
|
2020-08-16 12:47:29 +00:00
|
|
|
else
|
|
|
|
break;
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
|
|
|
#
|
|
|
|
# move file
|
|
|
|
#
|
|
|
|
mv "${CACHE_DIR}/.s3fs_test_tmpdir/${BIG_FILE}" "${BIG_FILE}"
|
|
|
|
|
|
|
|
#
|
|
|
|
# file stat
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
local MOVED_PERMISSIONS
|
|
|
|
if [ "$(uname)" = "Darwin" ]; then
|
|
|
|
MOVED_PERMISSIONS=$(stat -f "%u:%g" "${BIG_FILE}")
|
2020-08-16 12:47:29 +00:00
|
|
|
else
|
2022-01-15 17:08:46 +00:00
|
|
|
MOVED_PERMISSIONS=$(stat --format=%u:%g "${BIG_FILE}")
|
2020-08-16 12:47:29 +00:00
|
|
|
fi
|
2022-01-15 17:08:46 +00:00
|
|
|
local MOVED_FILE_LENGTH
|
|
|
|
# shellcheck disable=SC2012
|
|
|
|
MOVED_FILE_LENGTH=$(ls -l "${BIG_FILE}" | awk '{print $5}')
|
2020-08-16 12:47:29 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# check
|
|
|
|
#
|
|
|
|
if [ "${MOVED_PERMISSIONS}" != "${ORIGINAL_PERMISSIONS}" ]; then
|
|
|
|
echo "Failed to move file with permission"
|
|
|
|
return 1
|
|
|
|
fi
|
2022-01-15 17:08:46 +00:00
|
|
|
if [ "${MOVED_FILE_LENGTH}" -ne "${BIG_FILE_LENGTH}" ]; then
|
2021-04-24 03:27:39 +00:00
|
|
|
echo "Failed to move file with file length: ${MOVED_FILE_LENGTH} ${BIG_FILE_LENGTH}"
|
2020-08-16 12:47:29 +00:00
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
rm_test_file "${BIG_FILE}"
|
2022-01-15 17:08:46 +00:00
|
|
|
rm -rf "${CACHE_DIR}/.s3fs_test_tmpdir"
|
2020-08-16 12:47:29 +00:00
|
|
|
}
|
|
|
|
|
2020-05-24 14:18:00 +00:00
|
|
|
function test_ut_ossfs {
|
2020-06-30 13:22:38 +00:00
|
|
|
describe "Testing ossfs python ut..."
|
2022-01-15 17:08:46 +00:00
|
|
|
|
|
|
|
# shellcheck disable=SC2153
|
|
|
|
export TEST_BUCKET_MOUNT_POINT="${TEST_BUCKET_MOUNT_POINT_1}"
|
2020-05-24 14:18:00 +00:00
|
|
|
../../ut_test.py
|
|
|
|
}
|
|
|
|
|
2021-10-24 09:21:04 +00:00
|
|
|
#
|
|
|
|
# This test opens a file and writes multiple sets of data.
|
|
|
|
# The file is opened only once and multiple blocks of data are written
|
|
|
|
# to the file descriptor with a gap.
|
|
|
|
#
|
|
|
|
# That is, the data sets are written discontinuously.
|
|
|
|
# The data to be written uses multiple data that is less than or larger
|
|
|
|
# than the part size of the multi-part upload.
|
|
|
|
# The gap should be at least the part size of the multi-part upload.
|
|
|
|
# Write as shown below:
|
|
|
|
# <SOF>....<write data>....<write data>....<write data><EOF>
|
|
|
|
#
|
|
|
|
# There are two types of tests: new files and existing files.
|
|
|
|
# For existing files, the file size must be larger than where this test
|
|
|
|
# writes last position.
|
|
|
|
# <SOF>....<write data>....<write data>....<write data>...<EOF>
|
|
|
|
#
|
|
|
|
function test_write_data_with_skip() {
|
|
|
|
describe "Testing write data block with skipping block..."
|
|
|
|
|
|
|
|
#
|
|
|
|
# The first argument of the script is "testrun-<random>" the directory name.
|
|
|
|
#
|
2022-01-09 04:03:36 +00:00
|
|
|
local CACHE_TESTRUN_DIR=$1
|
2021-10-24 09:21:04 +00:00
|
|
|
|
2022-01-09 04:03:36 +00:00
|
|
|
local _SKIPWRITE_FILE="test_skipwrite"
|
|
|
|
local _TMP_SKIPWRITE_FILE="/tmp/${_SKIPWRITE_FILE}"
|
2021-10-24 09:21:04 +00:00
|
|
|
|
|
|
|
#------------------------------------------------------
|
|
|
|
# (1) test new file
|
|
|
|
#------------------------------------------------------
|
|
|
|
#
|
|
|
|
# Clean files
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
rm_test_file "${_SKIPWRITE_FILE}"
|
|
|
|
rm_test_file "${_TMP_SKIPWRITE_FILE}"
|
2021-10-24 09:21:04 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# Create new file in bucket and temporary directory(/tmp)
|
|
|
|
#
|
|
|
|
# Writing to the file is as follows:
|
|
|
|
# |<-- skip(12MB) --><-- write(1MB) --><-- skip(22MB) --><-- write(20MB) --><-- skip(23MB) --><-- write(1MB) -->| (79MB)
|
|
|
|
#
|
|
|
|
# As a result, areas that are not written to the file are mixed.
|
|
|
|
# The part that is not written has a HOLE that is truncate and filled
|
|
|
|
# with 0x00.
|
|
|
|
# Assuming that multipart upload is performed on a part-by-part basis,
|
|
|
|
# it will be as follows:
|
|
|
|
# part 1) 0x0.. 0x9FFFFF : <not write area(0x00)>
|
|
|
|
# part 2) 0xA00000..0x13FFFFF : 0xA00000..0xBFFFFF <not write area(0x00)>
|
|
|
|
# 0xC00000..0xCFFFFF <write area>
|
|
|
|
# 0xD00000..0x13FFFFF <not write area(0x00)>
|
|
|
|
# part 3) 0x1400000..0x1DFFFFF : <not write area(0x00)>
|
|
|
|
# part 4) 0x1E00000..0x27FFFFF : 0x1E00000..0x22FFFFF <not write area(0x00)>
|
|
|
|
# 0x2300000..0x27FFFFF <write area>
|
|
|
|
# part 5) 0x2800000..0x31FFFFF : <write area>
|
|
|
|
# part 6) 0x3200000..0x3BFFFFF : 0x3200000..0x36FFFFF <write area>
|
|
|
|
# 0x3700000..0x3BFFFFF <not write area(0x00)>
|
|
|
|
# part 7) 0x3C00000..0x45FFFFF : <not write area(0x00)>
|
|
|
|
# part 8) 0x4600000..0x4BFFFFF : 0x4600000..0x4AFFFFF <not write area(0x00)>
|
|
|
|
# 0x4B00000..0x4BFFFFF <write area>
|
|
|
|
#
|
2021-10-26 14:35:28 +00:00
|
|
|
../../write_multiblock -f "${_SKIPWRITE_FILE}" -f "${_TMP_SKIPWRITE_FILE}" -p 12582912:65536 -p 36700160:20971520 -p 78643200:65536
|
2021-10-24 09:21:04 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# delete cache file if using cache
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
# shellcheck disable=SC2009
|
2022-02-20 11:40:29 +00:00
|
|
|
if ps u -p "${S3FS_PID}" | grep -q use_cache; then
|
2022-01-15 17:08:46 +00:00
|
|
|
rm -f "${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${_SKIPWRITE_FILE}"
|
|
|
|
rm -f "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${_SKIPWRITE_FILE}"
|
2021-10-24 09:21:04 +00:00
|
|
|
fi
|
|
|
|
|
|
|
|
#
|
|
|
|
# Compare
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
cmp "${_SKIPWRITE_FILE}" "${_TMP_SKIPWRITE_FILE}"
|
2021-10-24 09:21:04 +00:00
|
|
|
|
|
|
|
#------------------------------------------------------
|
|
|
|
# (2) test existed file
|
|
|
|
#------------------------------------------------------
|
|
|
|
# [NOTE]
|
|
|
|
# This test uses the file used in the previous test as an existing file.
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
# shellcheck disable=SC2009
|
2022-02-20 11:40:29 +00:00
|
|
|
if ps u -p "${S3FS_PID}" | grep -q use_cache; then
|
2022-01-15 17:08:46 +00:00
|
|
|
rm -f "${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${_SKIPWRITE_FILE}"
|
|
|
|
rm -f "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${_SKIPWRITE_FILE}"
|
2021-10-24 09:21:04 +00:00
|
|
|
fi
|
|
|
|
|
|
|
|
#
|
|
|
|
# Over write data to existed file in bucket and temporary directory(/tmp)
|
|
|
|
#
|
|
|
|
# Writing to the file is as follows:
|
|
|
|
# |<----------------------------------------------- existed file ----------------------------------------------------------->| (79MB)
|
|
|
|
# |<-- skip(12MB) --><-- write(1MB) --><-- skip(22MB) --><-- write(20MB) --><-- skip(22MB) --><-- write(1MB) --><-- 1MB -->| (79MB)
|
|
|
|
#
|
|
|
|
# As a result, areas that are not written to the file are mixed.
|
|
|
|
# The part that is not written has a HOLE that is truncate and filled
|
|
|
|
# with 0x00.
|
|
|
|
# Assuming that multipart upload is performed on a part-by-part basis,
|
|
|
|
# it will be as follows:
|
|
|
|
# part 1) 0x0.. 0x9FFFFF : <not write area(0x00)>
|
|
|
|
# part 2) 0xA00000..0x13FFFFF : 0xA00000..0xBFFFFF <not write area(0x00)>
|
|
|
|
# 0xC00000..0xCFFFFF <write area>
|
|
|
|
# 0xD00000..0x13FFFFF <not write area(0x00)>
|
|
|
|
# part 3) 0x1400000..0x1DFFFFF : <not write area(0x00)>
|
|
|
|
# part 4) 0x1E00000..0x27FFFFF : 0x1E00000..0x22FFFFF <not write area(0x00)>
|
|
|
|
# 0x2300000..0x27FFFFF <write area>
|
|
|
|
# part 5) 0x2800000..0x31FFFFF : <write area>
|
|
|
|
# part 6) 0x3200000..0x3BFFFFF : 0x3200000..0x36FFFFF <write area>
|
|
|
|
# 0x3700000..0x3BFFFFF <not write area(0x00)>
|
|
|
|
# part 7) 0x3C00000..0x45FFFFF : <not write area(0x00)>
|
|
|
|
# part 8) 0x4600000..0x4BFFFFF : 0x4600000..0x49FFFFF <not write area(0x00)>
|
|
|
|
# part 8) 0x4600000..0x4BFFFFF : 0x4A00000..0x4AFFFFF <write area>
|
|
|
|
# 0x4B00000..0x4BFFFFF <not write area(0x00)>
|
|
|
|
#
|
2021-10-26 14:35:28 +00:00
|
|
|
../../write_multiblock -f "${_SKIPWRITE_FILE}" -f "${_TMP_SKIPWRITE_FILE}" -p 12582912:65536 -p 36700160:20971520 -p 77594624:65536
|
2021-10-24 09:21:04 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# delete cache file if using cache
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
# shellcheck disable=SC2009
|
2022-02-20 11:40:29 +00:00
|
|
|
if ps u -p "${S3FS_PID}" | grep -q use_cache; then
|
2022-01-15 17:08:46 +00:00
|
|
|
rm -f "${CACHE_DIR}/${TEST_BUCKET_1}/${CACHE_TESTRUN_DIR}/${_SKIPWRITE_FILE}"
|
|
|
|
rm -f "${CACHE_DIR}/.${TEST_BUCKET_1}.stat/${CACHE_TESTRUN_DIR}/${_SKIPWRITE_FILE}"
|
2021-10-24 09:21:04 +00:00
|
|
|
fi
|
|
|
|
|
|
|
|
#
|
|
|
|
# Compare
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
cmp "${_SKIPWRITE_FILE}" "${_TMP_SKIPWRITE_FILE}"
|
2021-10-24 09:21:04 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# Clean files
|
|
|
|
#
|
2022-01-15 17:08:46 +00:00
|
|
|
rm_test_file "${_SKIPWRITE_FILE}"
|
|
|
|
rm_test_file "${_TMP_SKIPWRITE_FILE}"
|
2021-10-24 09:21:04 +00:00
|
|
|
}
|
|
|
|
|
2016-02-05 12:24:13 +00:00
|
|
|
function add_all_tests {
|
2022-01-15 17:08:46 +00:00
|
|
|
# shellcheck disable=SC2009
|
2022-02-20 11:40:29 +00:00
|
|
|
if ps u -p "${S3FS_PID}" | grep -q use_cache; then
|
2020-08-02 13:37:06 +00:00
|
|
|
add_tests test_cache_file_stat
|
2021-10-16 15:51:36 +00:00
|
|
|
add_tests test_zero_cache_file_stat
|
2020-08-02 13:37:06 +00:00
|
|
|
fi
|
2022-01-15 17:08:46 +00:00
|
|
|
# shellcheck disable=SC2009
|
2022-02-20 11:40:29 +00:00
|
|
|
if ! ps u -p "${S3FS_PID}" | grep -q ensure_diskfree && ! uname | grep -q Darwin; then
|
2019-09-07 06:23:05 +00:00
|
|
|
add_tests test_clean_up_cache
|
|
|
|
fi
|
2021-04-30 10:56:33 +00:00
|
|
|
add_tests test_create_empty_file
|
2020-04-22 13:54:08 +00:00
|
|
|
add_tests test_append_file
|
|
|
|
add_tests test_truncate_file
|
2020-06-25 11:53:53 +00:00
|
|
|
add_tests test_truncate_upload
|
2016-04-22 06:49:37 +00:00
|
|
|
add_tests test_truncate_empty_file
|
2022-02-27 06:34:45 +00:00
|
|
|
add_tests test_truncate_shrink_file
|
2016-02-05 12:24:13 +00:00
|
|
|
add_tests test_mv_file
|
2021-06-25 14:14:53 +00:00
|
|
|
add_tests test_mv_to_exist_file
|
2019-01-23 06:12:05 +00:00
|
|
|
add_tests test_mv_empty_directory
|
|
|
|
add_tests test_mv_nonempty_directory
|
2016-02-05 12:24:13 +00:00
|
|
|
add_tests test_redirects
|
|
|
|
add_tests test_mkdir_rmdir
|
|
|
|
add_tests test_chmod
|
|
|
|
add_tests test_chown
|
|
|
|
add_tests test_list
|
|
|
|
add_tests test_remove_nonempty_directory
|
2022-06-27 22:50:24 +00:00
|
|
|
add_tests test_external_directory_creation
|
2019-06-22 02:46:25 +00:00
|
|
|
add_tests test_external_modification
|
2022-02-23 14:34:58 +00:00
|
|
|
add_tests test_external_creation
|
2019-08-01 23:09:34 +00:00
|
|
|
add_tests test_read_external_object
|
2022-01-29 09:05:05 +00:00
|
|
|
add_tests test_read_external_dir_object
|
2021-02-20 13:20:46 +00:00
|
|
|
add_tests test_update_metadata_external_small_object
|
2021-05-06 13:52:54 +00:00
|
|
|
add_tests test_update_metadata_external_large_object
|
2019-01-17 19:55:27 +00:00
|
|
|
add_tests test_rename_before_close
|
2016-02-05 12:24:13 +00:00
|
|
|
add_tests test_multipart_upload
|
2016-11-15 01:13:25 +00:00
|
|
|
add_tests test_multipart_copy
|
2019-09-26 02:30:58 +00:00
|
|
|
add_tests test_multipart_mix
|
2021-06-20 07:57:52 +00:00
|
|
|
add_tests test_utimens_during_multipart
|
2016-02-05 12:24:13 +00:00
|
|
|
add_tests test_special_characters
|
2020-07-23 08:35:05 +00:00
|
|
|
add_tests test_hardlink
|
2016-02-05 12:24:13 +00:00
|
|
|
add_tests test_symlink
|
2022-06-10 14:06:54 +00:00
|
|
|
if ! uname | grep -q Darwin; then
|
|
|
|
add_tests test_mknod
|
|
|
|
fi
|
2016-02-05 12:24:13 +00:00
|
|
|
add_tests test_extended_attributes
|
2016-11-19 23:09:25 +00:00
|
|
|
add_tests test_mtime_file
|
2021-11-03 23:16:40 +00:00
|
|
|
|
2021-10-30 01:54:18 +00:00
|
|
|
add_tests test_update_time_chmod
|
|
|
|
add_tests test_update_time_chown
|
|
|
|
add_tests test_update_time_xattr
|
|
|
|
add_tests test_update_time_touch
|
2022-02-20 11:49:35 +00:00
|
|
|
if ! mount -t fuse.s3fs | grep "$TEST_BUCKET_MOUNT_POINT_1 " | grep -q -e noatime -e relatime ; then
|
2021-10-30 01:54:18 +00:00
|
|
|
add_tests test_update_time_touch_a
|
|
|
|
fi
|
|
|
|
add_tests test_update_time_append
|
|
|
|
add_tests test_update_time_cp_p
|
|
|
|
add_tests test_update_time_mv
|
2021-11-03 23:16:40 +00:00
|
|
|
|
|
|
|
add_tests test_update_directory_time_chmod
|
|
|
|
add_tests test_update_directory_time_chown
|
|
|
|
add_tests test_update_directory_time_set_xattr
|
|
|
|
add_tests test_update_directory_time_touch
|
2022-02-20 11:49:35 +00:00
|
|
|
if ! mount -t fuse.s3fs | grep "$TEST_BUCKET_MOUNT_POINT_1 " | grep -q -e noatime -e relatime ; then
|
2021-11-03 23:16:40 +00:00
|
|
|
add_tests test_update_directory_time_touch_a
|
|
|
|
fi
|
|
|
|
add_tests test_update_directory_time_subdir
|
2022-05-29 09:52:10 +00:00
|
|
|
add_tests test_update_chmod_opened_file
|
2021-11-03 23:16:40 +00:00
|
|
|
|
2016-02-05 12:24:13 +00:00
|
|
|
add_tests test_rm_rf_dir
|
2019-08-11 07:42:48 +00:00
|
|
|
add_tests test_copy_file
|
2016-03-15 18:27:46 +00:00
|
|
|
add_tests test_write_after_seek_ahead
|
2019-01-23 03:52:53 +00:00
|
|
|
add_tests test_overwrite_existing_file_range
|
2020-08-23 00:36:45 +00:00
|
|
|
add_tests test_concurrent_directory_updates
|
|
|
|
add_tests test_concurrent_reads
|
2019-07-16 19:55:14 +00:00
|
|
|
add_tests test_concurrent_writes
|
2019-05-05 00:05:28 +00:00
|
|
|
add_tests test_open_second_fd
|
2019-09-26 02:21:22 +00:00
|
|
|
add_tests test_write_multiple_offsets
|
2020-03-01 08:41:45 +00:00
|
|
|
add_tests test_write_multiple_offsets_backwards
|
2020-02-02 13:01:56 +00:00
|
|
|
add_tests test_content_type
|
2020-04-11 09:59:24 +00:00
|
|
|
add_tests test_truncate_cache
|
2020-06-25 11:53:53 +00:00
|
|
|
add_tests test_upload_sparsefile
|
2020-06-21 18:04:49 +00:00
|
|
|
add_tests test_mix_upload_entities
|
2020-05-24 14:18:00 +00:00
|
|
|
add_tests test_ut_ossfs
|
2022-01-15 17:08:46 +00:00
|
|
|
# shellcheck disable=SC2009
|
2022-02-20 11:40:29 +00:00
|
|
|
if ! ps u -p "${S3FS_PID}" | grep -q ensure_diskfree && ! uname | grep -q Darwin; then
|
2020-08-16 12:47:29 +00:00
|
|
|
add_tests test_ensurespace_move_file
|
|
|
|
fi
|
2021-10-26 14:35:28 +00:00
|
|
|
add_tests test_write_data_with_skip
|
2015-08-16 22:48:05 +00:00
|
|
|
}
|
|
|
|
|
2016-02-05 12:24:13 +00:00
|
|
|
init_suite
|
|
|
|
add_all_tests
|
|
|
|
run_suite
|
2020-08-22 12:40:53 +00:00
|
|
|
|
|
|
|
#
|
|
|
|
# Local variables:
|
|
|
|
# tab-width: 4
|
|
|
|
# c-basic-offset: 4
|
|
|
|
# End:
|
2020-09-15 13:11:14 +00:00
|
|
|
# vim600: expandtab sw=4 ts=4 fdm=marker
|
|
|
|
# vim<600: expandtab sw=4 ts=4
|
2020-08-22 12:40:53 +00:00
|
|
|
#
|