2
0
mirror of https://github.com/frappe/frappe_docker.git synced 2024-09-18 18:19:02 +00:00

feat: add restic (#1044)

* feat: add restic

allows incremental snapshot backups
remove custom push-backup script

* ci: remove .git dir to skip fetch_details_from_tag

fixes https://github.com/frappe/frappe_docker/actions/runs/3938883301/jobs/6738091655
This commit is contained in:
Revant Nandgaonkar 2023-01-18 11:31:18 +05:30 committed by GitHub
parent 44df16fd04
commit bb1e4bb341
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 41 additions and 247 deletions

View File

@ -5,40 +5,34 @@ Create backup service or stack.
version: "3.7"
services:
backup:
image: frappe/erpnext:v14
image: frappe/erpnext:${VERSION}
entrypoint: ["bash", "-c"]
command: |
for SITE in $(/home/frappe/frappe-bench/env/bin/python -c "import frappe;print(' '.join(frappe.utils.get_sites()))")
do
bench --site $SITE backup --with-files
push-backup \
--site $SITE \
--bucket $BUCKET_NAME \
--region-name $REGION \
--endpoint-url $ENDPOINT_URL \
--aws-access-key-id $ACCESS_KEY_ID \
--aws-secret-access-key $SECRET_ACCESS_KEY
done
command:
- |
bench backup-all-sites
## Uncomment following to snapshot sites
# restic snapshots || restic init
# restic backup sites
environment:
- BUCKET_NAME=erpnext
- REGION=us-east-1
- ACCESS_KEY_ID=RANDOMACCESSKEY
- SECRET_ACCESS_KEY=RANDOMSECRETKEY
- ENDPOINT_URL=https://endpoint.url
# Set correct environment variables for restic
- RESTIC_REPOSITORY=s3:https://s3.endpoint.com/restic
- AWS_ACCESS_KEY_ID=access_key
- AWS_SECRET_ACCESS_KEY=secret_access_key
- RESTIC_PASSWORD=restic_password
volumes:
- "sites-vol:/home/frappe/frappe-bench/sites"
- "sites:/home/frappe/frappe-bench/sites"
networks:
- erpnext-network
networks:
erpnext-network:
external: true
name: <your_frappe_docker_project_name>_default
name: ${PROJECT_NAME:-erpnext}_default
volumes:
sites-vol:
sites:
external: true
name: <your_frappe_docker_project_name>_sites-vol
name: ${PROJECT_NAME:-erpnext}_sites
```
In case of single docker host setup, add crontab entry for backup every 6 hours.
@ -47,6 +41,13 @@ In case of single docker host setup, add crontab entry for backup every 6 hours.
0 */6 * * * /usr/local/bin/docker-compose -f /path/to/backup-job.yml up -d > /dev/null
```
Or
```
0 */6 * * * docker compose -p erpnext exec backend bench backup-all-sites > /dev/null
```
Notes:
- Change the cron string as per need.
- In case of docker compose exec set the correct project name

View File

@ -59,7 +59,7 @@ Note:
- Make sure `APPS_JSON_BASE64` variable has correct base64 encoded JSON string. It is consumed as build arg, base64 encoding ensures it to be friendly with environment variables. Use `jq empty apps.json` to validate `apps.json` file.
- Make sure the `--tag` is valid image name that will be pushed to registry.
- Change `--build-arg` as per version of Python, NodeJS, Frappe Framework repo and branch
- Set `--build-arg=REMOVE_GIT_REMOTE=true` to remove git upstream remotes from all apps. Use this in case they have secrets or private tokens and you don't wish to ship them in final image.
- `.git` directories for all apps are removed from the image.
### Push image to use in yaml files

View File

@ -1,6 +1,6 @@
# Images
There's 4 images that you can find in `/images` directory:
There are 3 images that you can find in `/images` directory:
- `bench`. It is used for development. [Learn more how to start development](../development/README.md).
- `production`.

View File

@ -19,6 +19,8 @@ RUN useradd -ms /bin/bash frappe \
libharfbuzz0b \
libpangoft2-1.0-0 \
libpangocairo-1.0-0 \
# For backups
restic \
# MariaDB
mariadb-client \
# Postgres
@ -62,7 +64,6 @@ RUN useradd -ms /bin/bash frappe \
COPY resources/nginx-template.conf /templates/nginx/frappe.conf.template
COPY resources/nginx-entrypoint.sh /usr/local/bin/nginx-entrypoint.sh
COPY resources/push_backup.py /usr/local/bin/push-backup
FROM base AS builder
@ -98,7 +99,6 @@ RUN if [ -n "${APPS_JSON_BASE64}" ]; then \
USER frappe
ARG REMOVE_GIT_REMOTE
ARG FRAPPE_BRANCH=version-14
ARG FRAPPE_PATH=https://github.com/frappe/frappe
RUN export APP_INSTALL_ARGS="" && \
@ -116,9 +116,7 @@ RUN export APP_INSTALL_ARGS="" && \
cd /home/frappe/frappe-bench && \
echo "$(jq 'del(.db_host, .redis_cache, .redis_queue, .redis_socketio)' sites/common_site_config.json)" \
> sites/common_site_config.json && \
if [ -n "${REMOVE_GIT_REMOTE}" ]; then \
find apps -name .git -type d -prune | xargs -i git --git-dir {} remote rm upstream; \
fi
find apps -mindepth 1 -path "*/.git" | xargs rm -fr
WORKDIR /home/frappe/frappe-bench

View File

@ -19,6 +19,8 @@ RUN useradd -ms /bin/bash frappe \
libharfbuzz0b \
libpangoft2-1.0-0 \
libpangocairo-1.0-0 \
# For backups
restic \
# MariaDB
mariadb-client \
# Postgres
@ -62,7 +64,6 @@ RUN useradd -ms /bin/bash frappe \
COPY resources/nginx-template.conf /templates/nginx/frappe.conf.template
COPY resources/nginx-entrypoint.sh /usr/local/bin/nginx-entrypoint.sh
COPY resources/push_backup.py /usr/local/bin/push-backup
FROM base AS builder
@ -108,7 +109,7 @@ RUN bench init \
bench get-app --branch=${ERPNEXT_BRANCH} --resolve-deps erpnext ${ERPNEXT_REPO} && \
echo "$(jq 'del(.db_host, .redis_cache, .redis_queue, .redis_socketio)' sites/common_site_config.json)" \
> sites/common_site_config.json && \
find apps -name .git -type d -prune | xargs -i git --git-dir {} remote rm upstream
find apps -mindepth 1 -path "*/.git" | xargs rm -fr
FROM base as erpnext

View File

@ -1,118 +0,0 @@
#!/home/frappe/frappe-bench/env/bin/python
from __future__ import annotations
import argparse
import os
import sys
from pathlib import Path
from typing import TYPE_CHECKING, Any, List, cast
import boto3
import frappe
from frappe.utils.backups import BackupGenerator
if TYPE_CHECKING:
from mypy_boto3_s3.service_resource import _Bucket
class Arguments(argparse.Namespace):
site: str
bucket: str
region_name: str
endpoint_url: str
aws_access_key_id: str
aws_secret_access_key: str
bucket_directory: str
def _get_files_from_previous_backup(site_name: str) -> list[Path]:
frappe.connect(site_name)
conf = cast(Any, frappe.conf)
backup_generator = BackupGenerator(
db_name=conf.db_name,
user=conf.db_name,
password=conf.db_password,
db_host=frappe.db.host,
db_port=frappe.db.port,
db_type=conf.db_type,
)
recent_backup_files = backup_generator.get_recent_backup(24)
frappe.destroy()
return [Path(f) for f in recent_backup_files if f]
def get_files_from_previous_backup(site_name: str) -> list[Path]:
files = _get_files_from_previous_backup(site_name)
if not files:
print("No backup found that was taken <24 hours ago.")
return files
def get_bucket(args: Arguments) -> _Bucket:
return boto3.resource(
service_name="s3",
endpoint_url=args.endpoint_url,
region_name=args.region_name,
aws_access_key_id=args.aws_access_key_id,
aws_secret_access_key=args.aws_secret_access_key,
).Bucket(args.bucket)
def upload_file(
path: Path, site_name: str, bucket: _Bucket, bucket_directory: str = None
) -> None:
filename = str(path.absolute())
key = str(Path(site_name) / path.name)
if bucket_directory:
key = bucket_directory + "/" + key
print(f"Uploading {key}")
bucket.upload_file(Filename=filename, Key=key)
os.remove(path)
def push_backup(args: Arguments) -> None:
"""Get latest backup files using Frappe utils, push them to S3 and remove local copy"""
files = get_files_from_previous_backup(args.site)
bucket = get_bucket(args)
for path in files:
upload_file(
path=path,
site_name=args.site,
bucket=bucket,
bucket_directory=args.bucket_directory,
)
print("Done!")
def parse_args(args: list[str]) -> Arguments:
parser = argparse.ArgumentParser()
parser.add_argument("--site", required=True)
parser.add_argument("--bucket", required=True)
parser.add_argument("--region-name", required=True)
parser.add_argument("--endpoint-url", required=True)
# Looking for default AWS credentials variables
parser.add_argument(
"--aws-access-key-id", required=True, default=os.getenv("AWS_ACCESS_KEY_ID")
)
parser.add_argument(
"--aws-secret-access-key",
required=True,
default=os.getenv("AWS_SECRET_ACCESS_KEY"),
)
parser.add_argument("--bucket-directory")
return parser.parse_args(args, namespace=Arguments())
def main(args: list[str]) -> int:
os.chdir("sites")
push_backup(parse_args(args))
return 0
if __name__ == "__main__":
raise SystemExit(main(sys.argv[1:]))

View File

@ -1,69 +0,0 @@
import os
import re
from typing import TYPE_CHECKING
import boto3
if TYPE_CHECKING:
from mypy_boto3_s3.service_resource import BucketObjectsCollection, _Bucket
def get_bucket() -> "_Bucket":
return boto3.resource(
service_name="s3",
endpoint_url="http://minio:9000",
region_name="us-east-1",
aws_access_key_id=os.getenv("S3_ACCESS_KEY"),
aws_secret_access_key=os.getenv("S3_SECRET_KEY"),
).Bucket("frappe")
def get_key_builder():
site_name = os.getenv("SITE_NAME")
assert site_name
def builder(key: str, suffix: str) -> bool:
return bool(re.match(rf"{site_name}.*{suffix}$", key))
return builder
def check_keys(objects: "BucketObjectsCollection"):
check_key = get_key_builder()
db = False
config = False
private_files = False
public_files = False
for obj in objects:
if check_key(obj.key, "database.sql.gz"):
db = True
elif check_key(obj.key, "site_config_backup.json"):
config = True
elif check_key(obj.key, "private-files.tar"):
private_files = True
elif check_key(obj.key, "files.tar"):
public_files = True
exc = lambda type_: Exception(f"Didn't push {type_} backup")
if not db:
raise exc("database")
if not config:
raise exc("site config")
if not private_files:
raise exc("private files")
if not public_files:
raise exc("public files")
print("All files were pushed to S3!")
def main() -> int:
bucket = get_bucket()
check_keys(bucket.objects.all())
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@ -99,40 +99,21 @@ def test_frappe_connections_in_backends(
def test_push_backup(
python_path: str,
frappe_site: str,
s3_service: S3ServiceResult,
compose: Compose,
):
restic_password = "secret"
compose.bench("--site", frappe_site, "backup", "--with-files")
compose.exec(
"backend",
"push-backup",
"--site",
frappe_site,
"--bucket",
"frappe",
"--region-name",
"us-east-1",
"--endpoint-url",
"http://minio:9000",
"--aws-access-key-id",
s3_service.access_key,
"--aws-secret-access-key",
s3_service.secret_key,
)
compose("cp", "tests/_check_backup_files.py", "backend:/tmp")
compose.exec(
"-e",
f"S3_ACCESS_KEY={s3_service.access_key}",
"-e",
f"S3_SECRET_KEY={s3_service.secret_key}",
"-e",
f"SITE_NAME={frappe_site}",
"backend",
python_path,
"/tmp/_check_backup_files.py",
)
restic_args = [
"--env=RESTIC_REPOSITORY=s3:http://minio:9000/frappe",
f"--env=AWS_ACCESS_KEY_ID={s3_service.access_key}",
f"--env=AWS_SECRET_ACCESS_KEY={s3_service.secret_key}",
f"--env=RESTIC_PASSWORD={restic_password}",
]
compose.exec(*restic_args, "backend", "restic", "init")
compose.exec(*restic_args, "backend", "restic", "backup", "sites")
compose.exec(*restic_args, "backend", "restic", "snapshots")
def test_https(frappe_site: str, compose: Compose):