mirror of
https://github.com/frappe/frappe_docker.git
synced 2025-02-02 20:18:25 +00:00
feat: restrict backups to backup limit for each site
This commit is contained in:
parent
5f187c4e3f
commit
754ba8a91a
@ -244,6 +244,13 @@ docker exec -it \
|
|||||||
-e "BUCKET_DIR=frappe-bench-v12" \
|
-e "BUCKET_DIR=frappe-bench-v12" \
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Note:
|
||||||
|
|
||||||
|
- Above example will backup files in bucket called `backup` at location `frappe-bench-v12/site.name.com/DATE_TIME/DATE_TIME-site_name_com-{filetype}.{extension}`,
|
||||||
|
- example DATE_TIME: 20200325_042020.
|
||||||
|
- example filetype: database, files or private-files
|
||||||
|
- example extension: sql.gz or tar
|
||||||
|
|
||||||
#### Updating and Migrating Sites
|
#### Updating and Migrating Sites
|
||||||
|
|
||||||
Switch to the root of the `frappe_docker` directory before running the following commands:
|
Switch to the root of the `frappe_docker` directory before running the following commands:
|
||||||
|
@ -6,6 +6,8 @@ import datetime
|
|||||||
from glob import glob
|
from glob import glob
|
||||||
from frappe.utils import get_sites
|
from frappe.utils import get_sites
|
||||||
|
|
||||||
|
DATE_FORMAT = "%Y%m%d_%H%M%S"
|
||||||
|
|
||||||
def get_file_ext():
|
def get_file_ext():
|
||||||
return {
|
return {
|
||||||
"database": "-database.sql.gz",
|
"database": "-database.sql.gz",
|
||||||
@ -84,8 +86,9 @@ def upload_file_to_s3(filename, folder, conn, bucket):
|
|||||||
print("Error uploading: %s" % (e))
|
print("Error uploading: %s" % (e))
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
def delete_old_backups(limit, bucket, folder):
|
def delete_old_backups(limit, bucket, site_name):
|
||||||
all_backups = list()
|
all_backups = list()
|
||||||
|
all_backup_dates = list()
|
||||||
backup_limit = int(limit)
|
backup_limit = int(limit)
|
||||||
check_environment_variables()
|
check_environment_variables()
|
||||||
bucket_dir = os.environ.get('BUCKET_DIR')
|
bucket_dir = os.environ.get('BUCKET_DIR')
|
||||||
@ -104,29 +107,46 @@ def delete_old_backups(limit, bucket, folder):
|
|||||||
|
|
||||||
if objects:
|
if objects:
|
||||||
for obj in objects.get('CommonPrefixes'):
|
for obj in objects.get('CommonPrefixes'):
|
||||||
if obj.get('Prefix') in folder:
|
if obj.get('Prefix') == bucket_dir + '/':
|
||||||
for backup_obj in bucket.objects.filter(Prefix=obj.get('Prefix')):
|
for backup_obj in bucket.objects.filter(Prefix=obj.get('Prefix')):
|
||||||
try:
|
try:
|
||||||
backup_dir = backup_obj.key.split('/')[1]
|
# backup_obj.key is bucket_dir/site/date_time/backupfile.extension
|
||||||
all_backups.append(backup_dir)
|
bucket_dir, site_slug, date_time, backupfile = backup_obj.key.split('/')
|
||||||
except expression as error:
|
date_time_object = datetime.datetime.strptime(
|
||||||
|
date_time, DATE_FORMAT
|
||||||
|
)
|
||||||
|
|
||||||
|
if site_name in backup_obj.key:
|
||||||
|
all_backup_dates.append(date_time_object)
|
||||||
|
all_backups.append(backup_obj.key)
|
||||||
|
except IndexError as error:
|
||||||
print(error)
|
print(error)
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
all_backups = set(sorted(all_backups))
|
oldest_backup_date = min(all_backup_dates)
|
||||||
if len(all_backups) > backup_limit:
|
|
||||||
latest_backup = sorted(all_backups)[0] if len(all_backups) > 0 else None
|
if len(all_backups) / 3 > backup_limit:
|
||||||
print("Deleting Backup: {0}".format(latest_backup))
|
oldest_backup = None
|
||||||
for obj in bucket.objects.filter(Prefix=bucket_dir + '/' + latest_backup):
|
for backup in all_backups:
|
||||||
# delete all keys that are inside the latest_backup
|
try:
|
||||||
if bucket_dir in obj.key:
|
# backup is bucket_dir/site/date_time/backupfile.extension
|
||||||
try:
|
backup_dir, site_slug, backup_dt_string, filename = backup.split('/')
|
||||||
delete_directory = obj.key.split('/')[1]
|
backup_datetime = datetime.datetime.strptime(
|
||||||
print('Deleteing ' + obj.key)
|
backup_dt_string, DATE_FORMAT
|
||||||
s3.Object(bucket.name, obj.key).delete()
|
)
|
||||||
except expression as error:
|
if backup_datetime == oldest_backup_date:
|
||||||
print(error)
|
oldest_backup = backup
|
||||||
exit(1)
|
|
||||||
|
except IndexError as error:
|
||||||
|
print(error)
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
if oldest_backup:
|
||||||
|
for obj in bucket.objects.filter(Prefix=oldest_backup):
|
||||||
|
# delete all keys that are inside the oldest_backup
|
||||||
|
if bucket_dir in obj.key:
|
||||||
|
print('Deleteing ' + obj.key)
|
||||||
|
s3.Object(bucket.name, obj.key).delete()
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
details = dict()
|
details = dict()
|
||||||
@ -136,23 +156,22 @@ def main():
|
|||||||
for site in sites:
|
for site in sites:
|
||||||
details = get_backup_details(site)
|
details = get_backup_details(site)
|
||||||
db_file = details.get('database', {}).get('file_path')
|
db_file = details.get('database', {}).get('file_path')
|
||||||
folder = None
|
folder = os.environ.get('BUCKET_DIR') + '/' + site + '/'
|
||||||
if db_file:
|
if db_file:
|
||||||
folder = os.environ.get('BUCKET_DIR') + '/' + os.path.basename(db_file)[:15] + '/'
|
folder = os.environ.get('BUCKET_DIR') + '/' + site + '/' + os.path.basename(db_file)[:15] + '/'
|
||||||
upload_file_to_s3(db_file, folder, conn, bucket)
|
upload_file_to_s3(db_file, folder, conn, bucket)
|
||||||
|
|
||||||
public_files = details.get('public_files', {}).get('file_path')
|
public_files = details.get('public_files', {}).get('file_path')
|
||||||
if public_files:
|
if public_files:
|
||||||
folder = os.environ.get('BUCKET_DIR') + '/' + os.path.basename(public_files)[:15] + '/'
|
folder = os.environ.get('BUCKET_DIR') + '/' + site + '/' + os.path.basename(public_files)[:15] + '/'
|
||||||
upload_file_to_s3(public_files, folder, conn, bucket)
|
upload_file_to_s3(public_files, folder, conn, bucket)
|
||||||
|
|
||||||
private_files = details.get('private_files', {}).get('file_path')
|
private_files = details.get('private_files', {}).get('file_path')
|
||||||
if private_files:
|
if private_files:
|
||||||
folder = os.environ.get('BUCKET_DIR') + '/' + os.path.basename(private_files)[:15] + '/'
|
folder = os.environ.get('BUCKET_DIR') + '/' + site + '/' + os.path.basename(private_files)[:15] + '/'
|
||||||
upload_file_to_s3(private_files, folder, conn, bucket)
|
upload_file_to_s3(private_files, folder, conn, bucket)
|
||||||
|
|
||||||
if folder:
|
delete_old_backups(os.environ.get('BACKUP_LIMIT', '3'), bucket, site)
|
||||||
delete_old_backups(os.environ.get('BACKUP_LIMIT', '3'), bucket, folder)
|
|
||||||
|
|
||||||
print('push-backup complete')
|
print('push-backup complete')
|
||||||
exit(0)
|
exit(0)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user