mirror of
https://github.com/frappe/frappe_docker.git
synced 2025-01-24 23:58:27 +00:00
Add push-backup
This commit is contained in:
parent
e6aaed9e79
commit
37878f4342
@ -59,6 +59,7 @@ RUN git clone --depth 1 -b ${ERPNEXT_VERSION} https://github.com/frappe/erpnext
|
||||
FROM base as configured_base
|
||||
|
||||
COPY pretend-bench.sh /usr/local/bin/bench
|
||||
COPY push_backup.py /usr/local/bin/push-backup
|
||||
# healthcheck.sh used in helm chart
|
||||
COPY entrypoint.sh patched_bench_helper.py healthcheck.sh /usr/local/bin/
|
||||
|
||||
|
87
build/worker/push_backup.py
Executable file
87
build/worker/push_backup.py
Executable file
@ -0,0 +1,87 @@
|
||||
#!/home/frappe/frappe-bench/env/bin/python
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
import boto3
|
||||
import frappe
|
||||
from frappe.utils.backups import BackupGenerator
|
||||
|
||||
|
||||
class Arguments(argparse.Namespace):
|
||||
site: str
|
||||
bucket: str
|
||||
region_name: str
|
||||
endpoint_url: str
|
||||
aws_access_key_id: str
|
||||
aws_secret_access_key: str
|
||||
|
||||
|
||||
def get_bucket(arguments: Arguments):
|
||||
return boto3.resource(
|
||||
service_name="s3",
|
||||
endpoint_url=arguments.endpoint_url,
|
||||
region_name=arguments.region_name,
|
||||
aws_access_key_id=arguments.aws_access_key_id,
|
||||
aws_secret_access_key=arguments.aws_secret_access_key,
|
||||
).Bucket(arguments.bucket)
|
||||
|
||||
|
||||
def get_files(site_name: str):
|
||||
frappe.connect(site_name)
|
||||
backup_generator = BackupGenerator(
|
||||
db_name=frappe.conf.db_name,
|
||||
user=frappe.conf.db_name,
|
||||
password=frappe.conf.db_password,
|
||||
db_host=frappe.db.host,
|
||||
db_port=frappe.db.port,
|
||||
db_type=frappe.conf.db_type,
|
||||
)
|
||||
recent_backup_files = backup_generator.get_recent_backup(24)
|
||||
return [f for f in recent_backup_files if f]
|
||||
|
||||
|
||||
def upload(arguments: Arguments):
|
||||
"""Get latest backup files using Frappe utils, push them to S3 and remove local copy"""
|
||||
files = get_files(arguments.site)
|
||||
if not files:
|
||||
print("No backup found that was taken <24 hours ago.")
|
||||
return
|
||||
|
||||
bucket = get_bucket(arguments)
|
||||
print(f"Uploading files: {str(files)}")
|
||||
|
||||
for file_name in files:
|
||||
abs_file_path = os.path.abspath(file_name)
|
||||
bucket.upload_file(Filename=abs_file_path, Key=abs_file_path)
|
||||
os.remove(file_name)
|
||||
|
||||
|
||||
def _parse_args(args: list[str]):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--site", required=True)
|
||||
parser.add_argument("--bucket", required=True)
|
||||
parser.add_argument("--region-name", required=True)
|
||||
parser.add_argument("--endpoint-url", required=True)
|
||||
# Looking for default AWS credentials variables
|
||||
parser.add_argument(
|
||||
"--aws-access-key-id", required=True, default=os.getenv("AWS_ACCESS_KEY_ID")
|
||||
)
|
||||
parser.add_argument(
|
||||
"--aws-secret-access-key",
|
||||
required=True,
|
||||
default=os.getenv("AWS_SECRET_ACCESS_KEY"),
|
||||
)
|
||||
return parser.parse_args(args, namespace=Arguments())
|
||||
|
||||
|
||||
def main(args: list[str]) -> int:
|
||||
arguments = _parse_args(args)
|
||||
upload(arguments)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print(sys.argv[1:])
|
||||
raise SystemExit(main(sys.argv[1:]))
|
109
tests/main.py
109
tests/main.py
@ -9,6 +9,8 @@ from typing import Any, Callable, Optional
|
||||
from urllib.error import HTTPError
|
||||
from urllib.request import Request, urlopen
|
||||
|
||||
import boto3
|
||||
|
||||
CI = os.getenv("CI")
|
||||
SITE_NAME = "tests"
|
||||
BACKEND_SERVICES = (
|
||||
@ -18,6 +20,8 @@ BACKEND_SERVICES = (
|
||||
"queue-long",
|
||||
"scheduler",
|
||||
)
|
||||
MINIO_ACCESS_KEY = "AKIAIOSFODNN7EXAMPLE"
|
||||
MINIO_SECRET_KEY = "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY"
|
||||
|
||||
|
||||
def patch_print():
|
||||
@ -111,7 +115,7 @@ def print_compose_configuration():
|
||||
|
||||
@log("Create containers")
|
||||
def create_containers():
|
||||
docker_compose("up", "-d")
|
||||
docker_compose("up", "-d", "--quiet-pull")
|
||||
|
||||
|
||||
@log("Check if backend services have connections")
|
||||
@ -221,9 +225,101 @@ def check_files():
|
||||
)
|
||||
|
||||
|
||||
def get_s3_resource():
|
||||
return boto3.resource(
|
||||
service_name="s3",
|
||||
endpoint_url="http://127.0.0.1:9000",
|
||||
region_name="us-east-1",
|
||||
aws_access_key_id=MINIO_ACCESS_KEY,
|
||||
aws_secret_access_key=MINIO_SECRET_KEY,
|
||||
use_ssl=False,
|
||||
)
|
||||
|
||||
|
||||
@log("Prepare S3 server")
|
||||
def prepare_s3_server():
|
||||
run(
|
||||
"docker",
|
||||
"run",
|
||||
"--name",
|
||||
"minio",
|
||||
"-d",
|
||||
"-e",
|
||||
f"MINIO_ACCESS_KEY={MINIO_ACCESS_KEY}",
|
||||
"-e",
|
||||
f"MINIO_SECRET_KEY={MINIO_SECRET_KEY}",
|
||||
"--network",
|
||||
"test_default",
|
||||
"--publish",
|
||||
"9000:9000",
|
||||
"minio/minio",
|
||||
"server",
|
||||
"/data",
|
||||
)
|
||||
get_s3_resource().create_bucket(Bucket="frappe")
|
||||
|
||||
|
||||
@log("Push backup to S3")
|
||||
def push_backup_to_s3():
|
||||
docker_compose(
|
||||
"exec", "backend", "bench", "--site", SITE_NAME, "backup", "--with-files"
|
||||
)
|
||||
docker_compose(
|
||||
"exec",
|
||||
"backend",
|
||||
"push-backup",
|
||||
"--site",
|
||||
SITE_NAME,
|
||||
"--bucket",
|
||||
"frappe",
|
||||
"--region-name",
|
||||
"us-east-1",
|
||||
"--endpoint-url",
|
||||
"http://minio:9000",
|
||||
"--aws-access-key-id",
|
||||
MINIO_ACCESS_KEY,
|
||||
"--aws-secret-access-key",
|
||||
MINIO_SECRET_KEY,
|
||||
)
|
||||
|
||||
|
||||
@log("Check backup in S3")
|
||||
def check_backup_in_s3():
|
||||
bucket = get_s3_resource().Bucket("frappe")
|
||||
db = False
|
||||
config = False
|
||||
private_files = False
|
||||
public_files = False
|
||||
for obj in bucket.objects.all():
|
||||
if obj.key.endswith("database.sql.gz"):
|
||||
db = True
|
||||
elif obj.key.endswith("site_config_backup.json"):
|
||||
config = True
|
||||
elif obj.key.endswith("private-files.tar"):
|
||||
private_files = True
|
||||
elif obj.key.endswith("files.tar"):
|
||||
public_files = True
|
||||
|
||||
exc = lambda type_: Exception(f"Didn't push {type_} backup")
|
||||
if not db:
|
||||
raise exc("database")
|
||||
if not config:
|
||||
raise exc("site config")
|
||||
if not private_files:
|
||||
raise exc("private files")
|
||||
if not public_files:
|
||||
raise exc("public files")
|
||||
print("All files was pushed to S3!")
|
||||
|
||||
|
||||
@log("Stop S3 container")
|
||||
def stop_s3_container():
|
||||
run("docker", "rm", "minio", "-f")
|
||||
|
||||
|
||||
@log("Recreate with https override")
|
||||
def recreate_with_https_override():
|
||||
docker_compose("-f", "overrides/compose.https.yml", "up", "-d")
|
||||
docker_compose("-f", "overrides/compose.https.yml", "up", "-d", "--quiet-pull")
|
||||
|
||||
|
||||
@log("Check / (https)")
|
||||
@ -241,7 +337,7 @@ def create_containers_with_erpnext_override():
|
||||
args = ["-f", "overrides/compose.erpnext.yml"]
|
||||
if CI:
|
||||
args.extend(("-f", "tests/compose.ci-erpnext.yml"))
|
||||
docker_compose(*args, "up", "-d")
|
||||
docker_compose(*args, "up", "-d", "--quiet-pull")
|
||||
|
||||
|
||||
@log("Create ERPNext site")
|
||||
@ -280,7 +376,7 @@ def check_erpnext_assets():
|
||||
|
||||
@log("Create containers with Postgres override")
|
||||
def create_containers_with_postgres_override():
|
||||
docker_compose("-f", "overrides/compose.postgres.yml", "up", "-d")
|
||||
docker_compose("-f", "overrides/compose.postgres.yml", "up", "-d", "--quiet-pull")
|
||||
|
||||
|
||||
@log("Create Postgres site")
|
||||
@ -331,6 +427,11 @@ def main() -> int:
|
||||
check_assets()
|
||||
check_files()
|
||||
|
||||
prepare_s3_server()
|
||||
push_backup_to_s3()
|
||||
check_backup_in_s3()
|
||||
stop_s3_container()
|
||||
|
||||
recreate_with_https_override()
|
||||
check_index_https()
|
||||
stop_containers()
|
||||
|
Loading…
x
Reference in New Issue
Block a user