2
0
mirror of https://github.com/frappe/frappe_docker.git synced 2024-11-08 06:15:26 +00:00

chore(lint): Run black

This commit is contained in:
Lev Vereshchagin 2021-12-10 11:52:40 +03:00
parent c4772bc5b5
commit f8b7b7af05
12 changed files with 316 additions and 239 deletions

View File

@ -6,14 +6,18 @@ import os
if __name__ == "__main__":
bench_dir = os.path.join(os.sep, 'home', 'frappe', 'frappe-bench')
sites_dir = os.path.join(bench_dir, 'sites')
bench_dir = os.path.join(os.sep, "home", "frappe", "frappe-bench")
sites_dir = os.path.join(bench_dir, "sites")
bench_helper = os.path.join(
bench_dir, 'apps', 'frappe',
'frappe', 'utils', 'bench_helper.py',
bench_dir,
"apps",
"frappe",
"frappe",
"utils",
"bench_helper.py",
)
cwd = os.getcwd()
os.chdir(sites_dir)
subprocess.check_call(
[sys.executable, bench_helper, 'frappe'] + sys.argv[1:],
[sys.executable, bench_helper, "frappe"] + sys.argv[1:],
)

View File

@ -8,7 +8,7 @@ from utils import (
get_apps,
get_container_versions,
get_version_file,
get_config
get_config,
)
@ -30,12 +30,12 @@ def main():
version_file_hash = None
container_hash = None
repo = git.Repo(os.path.join('..', 'apps', app))
repo = git.Repo(os.path.join("..", "apps", app))
branch = repo.active_branch.name
if branch == 'develop':
version_file_hash = version_file.get(app+'_git_hash')
container_hash = container_versions.get(app+'_git_hash')
if branch == "develop":
version_file_hash = version_file.get(app + "_git_hash")
container_hash = container_versions.get(app + "_git_hash")
if container_hash and version_file_hash:
if container_hash != version_file_hash:
is_ready = True
@ -54,7 +54,7 @@ def main():
config = get_config()
if is_ready and config.get('maintenance_mode') != 1:
if is_ready and config.get("maintenance_mode") != 1:
migrate_sites(maintenance_mode=True)
version_file = container_versions
save_version_file(version_file)

View File

@ -13,12 +13,17 @@ def backup(sites, with_files=False):
backup_path_db=None,
backup_path_files=None,
backup_path_private_files=None,
force=True
force=True,
)
print("database backup taken -", odb.backup_path_db, "- on", now())
if with_files:
print("files backup taken -", odb.backup_path_files, "- on", now())
print("private files backup taken -", odb.backup_path_private_files, "- on", now())
print(
"private files backup taken -",
odb.backup_path_private_files,
"- on",
now(),
)
frappe.destroy()

View File

@ -8,7 +8,7 @@ from constants import (
REDIS_SOCKETIO_KEY,
DB_HOST_KEY,
DB_PORT_KEY,
DB_PORT
DB_PORT,
)
@ -40,30 +40,26 @@ def check_host(ip, port, retry=10, delay=3, print_attempt=True):
# Check service
def check_service(
retry=10,
delay=3,
print_attempt=True,
service_name=None,
service_port=None):
retry=10, delay=3, print_attempt=True, service_name=None, service_port=None
):
config = get_config()
if not service_name:
service_name = config.get(DB_HOST_KEY, 'mariadb')
service_name = config.get(DB_HOST_KEY, "mariadb")
if not service_port:
service_port = config.get(DB_PORT_KEY, DB_PORT)
is_db_connected = False
is_db_connected = check_host(
service_name,
service_port,
retry,
delay,
print_attempt)
service_name, service_port, retry, delay, print_attempt
)
if not is_db_connected:
print("Connection to {service_name}:{service_port} timed out".format(
service_name=service_name,
service_port=service_port,
))
print(
"Connection to {service_name}:{service_port} timed out".format(
service_name=service_name,
service_port=service_port,
)
)
exit(1)
@ -71,14 +67,13 @@ def check_service(
def check_redis_queue(retry=10, delay=3, print_attempt=True):
check_redis_queue = False
config = get_config()
redis_queue_url = urlparse(config.get(REDIS_QUEUE_KEY, "redis://redis-queue:6379")).netloc
redis_queue_url = urlparse(
config.get(REDIS_QUEUE_KEY, "redis://redis-queue:6379")
).netloc
redis_queue, redis_queue_port = redis_queue_url.split(":")
check_redis_queue = check_host(
redis_queue,
redis_queue_port,
retry,
delay,
print_attempt)
redis_queue, redis_queue_port, retry, delay, print_attempt
)
if not check_redis_queue:
print("Connection to redis queue timed out")
exit(1)
@ -88,14 +83,13 @@ def check_redis_queue(retry=10, delay=3, print_attempt=True):
def check_redis_cache(retry=10, delay=3, print_attempt=True):
check_redis_cache = False
config = get_config()
redis_cache_url = urlparse(config.get(REDIS_CACHE_KEY, "redis://redis-cache:6379")).netloc
redis_cache_url = urlparse(
config.get(REDIS_CACHE_KEY, "redis://redis-cache:6379")
).netloc
redis_cache, redis_cache_port = redis_cache_url.split(":")
check_redis_cache = check_host(
redis_cache,
redis_cache_port,
retry,
delay,
print_attempt)
redis_cache, redis_cache_port, retry, delay, print_attempt
)
if not check_redis_cache:
print("Connection to redis cache timed out")
exit(1)
@ -105,14 +99,13 @@ def check_redis_cache(retry=10, delay=3, print_attempt=True):
def check_redis_socketio(retry=10, delay=3, print_attempt=True):
check_redis_socketio = False
config = get_config()
redis_socketio_url = urlparse(config.get(REDIS_SOCKETIO_KEY, "redis://redis-socketio:6379")).netloc
redis_socketio_url = urlparse(
config.get(REDIS_SOCKETIO_KEY, "redis://redis-socketio:6379")
).netloc
redis_socketio, redis_socketio_port = redis_socketio_url.split(":")
check_redis_socketio = check_host(
redis_socketio,
redis_socketio_port,
retry,
delay,
print_attempt)
redis_socketio, redis_socketio_port, retry, delay, print_attempt
)
if not check_redis_socketio:
print("Connection to redis socketio timed out")
exit(1)
@ -123,7 +116,7 @@ def main():
check_redis_queue()
check_redis_cache()
check_redis_socketio()
print('Connections OK')
print("Connections OK")
if __name__ == "__main__":

View File

@ -1,13 +1,13 @@
REDIS_QUEUE_KEY = 'redis_queue'
REDIS_CACHE_KEY = 'redis_cache'
REDIS_SOCKETIO_KEY = 'redis_socketio'
DB_HOST_KEY = 'db_host'
DB_PORT_KEY = 'db_port'
REDIS_QUEUE_KEY = "redis_queue"
REDIS_CACHE_KEY = "redis_cache"
REDIS_SOCKETIO_KEY = "redis_socketio"
DB_HOST_KEY = "db_host"
DB_PORT_KEY = "db_port"
DB_PORT = 3306
APP_VERSIONS_JSON_FILE = 'app_versions.json'
APPS_TXT_FILE = 'apps.txt'
COMMON_SITE_CONFIG_FILE = 'common_site_config.json'
APP_VERSIONS_JSON_FILE = "app_versions.json"
APPS_TXT_FILE = "apps.txt"
COMMON_SITE_CONFIG_FILE = "common_site_config.json"
DATE_FORMAT = "%Y%m%d_%H%M%S"
RDS_DB = 'rds_db'
RDS_DB = "rds_db"
RDS_PRIVILEGES = "SELECT, INSERT, UPDATE, DELETE, CREATE, DROP, INDEX, ALTER, CREATE TEMPORARY TABLES, CREATE VIEW, EVENT, TRIGGER, SHOW VIEW, CREATE ROUTINE, ALTER ROUTINE, EXECUTE, LOCK TABLES"
ARCHIVE_SITES_PATH = '/home/frappe/frappe-bench/sites/archive_sites'
ARCHIVE_SITES_PATH = "/home/frappe/frappe-bench/sites/archive_sites"

View File

@ -11,10 +11,10 @@ from check_connection import (
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'-p',
'--ping-service',
dest='ping_services',
action='append',
"-p",
"--ping-service",
dest="ping_services",
action="append",
type=str,
help='list of services to ping, e.g. doctor -p "postgres:5432" --ping-service "mariadb:3306"',
)
@ -33,15 +33,15 @@ def main():
check_redis_socketio(retry=1, delay=0, print_attempt=False)
print("Redis SocketIO Connected")
if(args.ping_services):
if args.ping_services:
for service in args.ping_services:
service_name = None
service_port = None
try:
service_name, service_port = service.split(':')
service_name, service_port = service.split(":")
except ValueError:
print('Service should be in format host:port, e.g postgres:5432')
print("Service should be in format host:port, e.g postgres:5432")
exit(1)
check_service(

View File

@ -1,2 +1,3 @@
import gevent.monkey
gevent.monkey.patch_all()

View File

@ -27,11 +27,12 @@ def migrate_sites(maintenance_mode=False):
set_maintenance_mode(True)
for site in sites:
print('Migrating', site)
print("Migrating", site)
frappe.init(site=site)
frappe.connect()
try:
from frappe.migrate import migrate
migrate()
finally:
frappe.destroy()

View File

@ -24,33 +24,43 @@ except ImportError:
def main():
config = get_config()
db_type = 'mariadb'
db_port = config.get('db_port', 3306)
db_host = config.get('db_host')
site_name = os.environ.get("SITE_NAME", 'site1.localhost')
db_root_username = os.environ.get("DB_ROOT_USER", 'root')
mariadb_root_password = get_password("MYSQL_ROOT_PASSWORD", 'admin')
db_type = "mariadb"
db_port = config.get("db_port", 3306)
db_host = config.get("db_host")
site_name = os.environ.get("SITE_NAME", "site1.localhost")
db_root_username = os.environ.get("DB_ROOT_USER", "root")
mariadb_root_password = get_password("MYSQL_ROOT_PASSWORD", "admin")
postgres_root_password = get_password("POSTGRES_PASSWORD")
db_root_password = mariadb_root_password
if postgres_root_password:
db_type = 'postgres'
db_type = "postgres"
db_host = os.environ.get("POSTGRES_HOST")
db_port = 5432
db_root_password = postgres_root_password
if not db_host:
db_host = config.get('db_host')
print('Environment variable POSTGRES_HOST not found.')
print('Using db_host from common_site_config.json')
db_host = config.get("db_host")
print("Environment variable POSTGRES_HOST not found.")
print("Using db_host from common_site_config.json")
sites_path = os.getcwd()
common_site_config_path = os.path.join(sites_path, COMMON_SITE_CONFIG_FILE)
update_site_config("root_login", db_root_username, validate = False, site_config_path = common_site_config_path)
update_site_config("root_password", db_root_password, validate = False, site_config_path = common_site_config_path)
update_site_config(
"root_login",
db_root_username,
validate=False,
site_config_path=common_site_config_path,
)
update_site_config(
"root_password",
db_root_password,
validate=False,
site_config_path=common_site_config_path,
)
force = True if os.environ.get("FORCE", None) else False
install_apps = os.environ.get("INSTALL_APPS", None)
install_apps = install_apps.split(',') if install_apps else []
install_apps = install_apps.split(",") if install_apps else []
frappe.init(site_name, new_site=True)
if semantic_version.Version(frappe.__version__).major > 11:
@ -59,7 +69,7 @@ def main():
site_name,
mariadb_root_username=db_root_username,
mariadb_root_password=db_root_password,
admin_password=get_password("ADMIN_PASSWORD", 'admin'),
admin_password=get_password("ADMIN_PASSWORD", "admin"),
verbose=True,
install_apps=install_apps,
source_sql=None,
@ -75,7 +85,7 @@ def main():
site_name,
mariadb_root_username=db_root_username,
mariadb_root_password=db_root_password,
admin_password=get_password("ADMIN_PASSWORD", 'admin'),
admin_password=get_password("ADMIN_PASSWORD", "admin"),
verbose=True,
install_apps=install_apps,
source_sql=None,
@ -83,16 +93,23 @@ def main():
reinstall=False,
)
if db_type == "mariadb":
site_config = get_site_config(site_name)
db_name = site_config.get('db_name')
db_password = site_config.get('db_password')
db_name = site_config.get("db_name")
db_password = site_config.get("db_password")
mysql_command = ["mysql", f"-h{db_host}", f"-u{db_root_username}", f"-p{mariadb_root_password}", "-e"]
mysql_command = [
"mysql",
f"-h{db_host}",
f"-u{db_root_username}",
f"-p{mariadb_root_password}",
"-e",
]
# Drop User if exists
command = mysql_command + [f"DROP USER IF EXISTS '{db_name}'; FLUSH PRIVILEGES;"]
command = mysql_command + [
f"DROP USER IF EXISTS '{db_name}'; FLUSH PRIVILEGES;"
]
run_command(command)
# Grant permission to database and set password
@ -102,10 +119,12 @@ def main():
if config.get(RDS_DB) or site_config.get(RDS_DB):
grant_privileges = RDS_PRIVILEGES
command = mysql_command + [f"\
command = mysql_command + [
f"\
CREATE USER IF NOT EXISTS '{db_name}'@'%' IDENTIFIED BY '{db_password}'; \
GRANT {grant_privileges} ON `{db_name}`.* TO '{db_name}'@'%'; \
FLUSH PRIVILEGES;"]
FLUSH PRIVILEGES;"
]
run_command(command)
if frappe.redis_server:

View File

@ -18,7 +18,7 @@ def get_file_ext():
"database": "-database.sql.gz",
"private_files": "-private-files.tar",
"public_files": "-files.tar",
"site_config": "-site_config_backup.json"
"site_config": "-site_config_backup.json",
}
@ -31,19 +31,26 @@ def get_backup_details(sitename):
if os.path.exists(site_backup_path):
for filetype, ext in file_ext.items():
site_slug = sitename.replace('.', '_')
pattern = site_backup_path + '*-' + site_slug + ext
site_slug = sitename.replace(".", "_")
pattern = site_backup_path + "*-" + site_slug + ext
backup_files = list(filter(os.path.isfile, glob(pattern)))
if len(backup_files) > 0:
backup_files.sort(key=lambda file: os.stat(os.path.join(site_backup_path, file)).st_ctime)
backup_date = datetime.datetime.strptime(time.ctime(os.path.getmtime(backup_files[0])), "%a %b %d %H:%M:%S %Y")
backup_files.sort(
key=lambda file: os.stat(
os.path.join(site_backup_path, file)
).st_ctime
)
backup_date = datetime.datetime.strptime(
time.ctime(os.path.getmtime(backup_files[0])),
"%a %b %d %H:%M:%S %Y",
)
backup_details[filetype] = {
"sitename": sitename,
"file_size_in_bytes": os.stat(backup_files[-1]).st_size,
"file_path": os.path.abspath(backup_files[-1]),
"filename": os.path.basename(backup_files[-1]),
"backup_date": backup_date.date().strftime("%Y-%m-%d %H:%M:%S")
"backup_date": backup_date.date().strftime("%Y-%m-%d %H:%M:%S"),
}
return backup_details
@ -54,31 +61,34 @@ def delete_old_backups(limit, bucket, site_name):
all_backup_dates = list()
backup_limit = int(limit)
check_s3_environment_variables()
bucket_dir = os.environ.get('BUCKET_DIR')
bucket_dir = os.environ.get("BUCKET_DIR")
oldest_backup_date = None
s3 = boto3.resource(
's3',
region_name=os.environ.get('REGION'),
aws_access_key_id=os.environ.get('ACCESS_KEY_ID'),
aws_secret_access_key=os.environ.get('SECRET_ACCESS_KEY'),
endpoint_url=os.environ.get('ENDPOINT_URL')
"s3",
region_name=os.environ.get("REGION"),
aws_access_key_id=os.environ.get("ACCESS_KEY_ID"),
aws_secret_access_key=os.environ.get("SECRET_ACCESS_KEY"),
endpoint_url=os.environ.get("ENDPOINT_URL"),
)
bucket = s3.Bucket(bucket)
objects = bucket.meta.client.list_objects_v2(
Bucket=bucket.name,
Delimiter='/')
objects = bucket.meta.client.list_objects_v2(Bucket=bucket.name, Delimiter="/")
if objects:
for obj in objects.get('CommonPrefixes'):
if obj.get('Prefix') == bucket_dir + '/':
for backup_obj in bucket.objects.filter(Prefix=obj.get('Prefix')):
for obj in objects.get("CommonPrefixes"):
if obj.get("Prefix") == bucket_dir + "/":
for backup_obj in bucket.objects.filter(Prefix=obj.get("Prefix")):
if backup_obj.get()["ContentType"] == "application/x-directory":
continue
try:
# backup_obj.key is bucket_dir/site/date_time/backupfile.extension
bucket_dir, site_slug, date_time, backupfile = backup_obj.key.split('/')
(
bucket_dir,
site_slug,
date_time,
backupfile,
) = backup_obj.key.split("/")
date_time_object = datetime.datetime.strptime(
date_time, DATE_FORMAT
)
@ -98,7 +108,7 @@ def delete_old_backups(limit, bucket, site_name):
for backup in all_backups:
try:
# backup is bucket_dir/site/date_time/backupfile.extension
backup_dir, site_slug, backup_dt_string, filename = backup.split('/')
backup_dir, site_slug, backup_dt_string, filename = backup.split("/")
backup_datetime = datetime.datetime.strptime(
backup_dt_string, DATE_FORMAT
)
@ -113,7 +123,7 @@ def delete_old_backups(limit, bucket, site_name):
for obj in bucket.objects.filter(Prefix=oldest_backup):
# delete all keys that are inside the oldest_backup
if bucket_dir in obj.key:
print('Deleteing ' + obj.key)
print("Deleteing " + obj.key)
s3.Object(bucket.name, obj.key).delete()
@ -124,31 +134,52 @@ def main():
for site in sites:
details = get_backup_details(site)
db_file = details.get('database', {}).get('file_path')
folder = os.environ.get('BUCKET_DIR') + '/' + site + '/'
db_file = details.get("database", {}).get("file_path")
folder = os.environ.get("BUCKET_DIR") + "/" + site + "/"
if db_file:
folder = os.environ.get('BUCKET_DIR') + '/' + site + '/' + os.path.basename(db_file)[:15] + '/'
folder = (
os.environ.get("BUCKET_DIR")
+ "/"
+ site
+ "/"
+ os.path.basename(db_file)[:15]
+ "/"
)
upload_file_to_s3(db_file, folder, conn, bucket)
# Archive site_config.json
site_config_file = details.get('site_config', {}).get('file_path')
site_config_file = details.get("site_config", {}).get("file_path")
if not site_config_file:
site_config_file = os.path.join(os.getcwd(), site, 'site_config.json')
site_config_file = os.path.join(os.getcwd(), site, "site_config.json")
upload_file_to_s3(site_config_file, folder, conn, bucket)
public_files = details.get('public_files', {}).get('file_path')
public_files = details.get("public_files", {}).get("file_path")
if public_files:
folder = os.environ.get('BUCKET_DIR') + '/' + site + '/' + os.path.basename(public_files)[:15] + '/'
folder = (
os.environ.get("BUCKET_DIR")
+ "/"
+ site
+ "/"
+ os.path.basename(public_files)[:15]
+ "/"
)
upload_file_to_s3(public_files, folder, conn, bucket)
private_files = details.get('private_files', {}).get('file_path')
private_files = details.get("private_files", {}).get("file_path")
if private_files:
folder = os.environ.get('BUCKET_DIR') + '/' + site + '/' + os.path.basename(private_files)[:15] + '/'
folder = (
os.environ.get("BUCKET_DIR")
+ "/"
+ site
+ "/"
+ os.path.basename(private_files)[:15]
+ "/"
)
upload_file_to_s3(private_files, folder, conn, bucket)
delete_old_backups(os.environ.get('BACKUP_LIMIT', '3'), bucket, site)
delete_old_backups(os.environ.get("BACKUP_LIMIT", "3"), bucket, site)
print('push-backup complete')
print("push-backup complete")
exit(0)

View File

@ -10,7 +10,7 @@ from frappe.installer import (
make_conf,
get_conf_params,
make_site_dirs,
update_site_config
update_site_config,
)
from constants import COMMON_SITE_CONFIG_FILE, DATE_FORMAT, RDS_DB, RDS_PRIVILEGES
from utils import (
@ -25,69 +25,64 @@ from utils import (
def get_backup_dir():
return os.path.join(
os.path.expanduser('~'),
'backups'
)
return os.path.join(os.path.expanduser("~"), "backups")
def decompress_db(database_file, site):
command = ["gunzip", "-c", database_file]
with open(database_file.replace(".gz", ""), "w") as db_file:
print(f'Extract Database GZip for site {site}')
print(f"Extract Database GZip for site {site}")
run_command(command, stdout=db_file)
def restore_database(files_base, site_config_path, site):
# restore database
database_file = files_base + '-database.sql.gz'
database_file = files_base + "-database.sql.gz"
decompress_db(database_file, site)
config = get_config()
# Set db_type if it exists in backup site_config.json
set_key_in_site_config('db_type', site, site_config_path)
set_key_in_site_config("db_type", site, site_config_path)
# Set db_host if it exists in backup site_config.json
set_key_in_site_config('db_host', site, site_config_path)
set_key_in_site_config("db_host", site, site_config_path)
# Set db_port if it exists in backup site_config.json
set_key_in_site_config('db_port', site, site_config_path)
set_key_in_site_config("db_port", site, site_config_path)
# get updated site_config
site_config = get_site_config(site)
# if no db_type exists, default to mariadb
db_type = site_config.get('db_type', 'mariadb')
db_type = site_config.get("db_type", "mariadb")
is_database_restored = False
if db_type == 'mariadb':
if db_type == "mariadb":
restore_mariadb(
config=config,
site_config=site_config,
database_file=database_file)
config=config, site_config=site_config, database_file=database_file
)
is_database_restored = True
elif db_type == 'postgres':
elif db_type == "postgres":
restore_postgres(
config=config,
site_config=site_config,
database_file=database_file)
config=config, site_config=site_config, database_file=database_file
)
is_database_restored = True
if is_database_restored:
# Set encryption_key if it exists in backup site_config.json
set_key_in_site_config('encryption_key', site, site_config_path)
set_key_in_site_config("encryption_key", site, site_config_path)
def restore_files(files_base):
public_files = files_base + '-files.tar'
public_files = files_base + "-files.tar"
# extract tar
public_tar = tarfile.open(public_files)
print(f'Extracting {public_files}')
print(f"Extracting {public_files}")
public_tar.extractall()
def restore_private_files(files_base):
private_files = files_base + '-private-files.tar'
private_files = files_base + "-private-files.tar"
private_tar = tarfile.open(private_files)
print(f'Extracting {private_files}')
print(f"Extracting {private_files}")
private_tar.extractall()
@ -96,15 +91,15 @@ def pull_backup_from_s3():
# https://stackoverflow.com/a/54672690
s3 = boto3.resource(
's3',
region_name=os.environ.get('REGION'),
aws_access_key_id=os.environ.get('ACCESS_KEY_ID'),
aws_secret_access_key=os.environ.get('SECRET_ACCESS_KEY'),
endpoint_url=os.environ.get('ENDPOINT_URL')
"s3",
region_name=os.environ.get("REGION"),
aws_access_key_id=os.environ.get("ACCESS_KEY_ID"),
aws_secret_access_key=os.environ.get("SECRET_ACCESS_KEY"),
endpoint_url=os.environ.get("ENDPOINT_URL"),
)
bucket_dir = os.environ.get('BUCKET_DIR')
bucket_name = os.environ.get('BUCKET_NAME')
bucket_dir = os.environ.get("BUCKET_DIR")
bucket_name = os.environ.get("BUCKET_NAME")
bucket = s3.Bucket(bucket_name)
# Change directory to /home/frappe/backups
@ -118,10 +113,10 @@ def pull_backup_from_s3():
for obj in bucket.objects.filter(Prefix=bucket_dir):
if obj.get()["ContentType"] == "application/x-directory":
continue
backup_file = obj.key.replace(os.path.join(bucket_dir, ''), '')
backup_file = obj.key.replace(os.path.join(bucket_dir, ""), "")
backup_files.append(backup_file)
site_name, timestamp, backup_type = backup_file.split('/')
site_timestamp = site_name + '/' + timestamp
site_name, timestamp, backup_type = backup_file.split("/")
site_timestamp = site_name + "/" + timestamp
sites.add(site_name)
site_timestamps.add(site_timestamp)
@ -129,13 +124,11 @@ def pull_backup_from_s3():
for site in sites:
backup_timestamps = []
for site_timestamp in site_timestamps:
site_name, timestamp = site_timestamp.split('/')
site_name, timestamp = site_timestamp.split("/")
if site == site_name:
timestamp_datetime = datetime.datetime.strptime(
timestamp, DATE_FORMAT
)
timestamp_datetime = datetime.datetime.strptime(timestamp, DATE_FORMAT)
backup_timestamps.append(timestamp)
download_backups.append(site + '/' + max(backup_timestamps))
download_backups.append(site + "/" + max(backup_timestamps))
# Only download latest backups
for backup_file in backup_files:
@ -143,21 +136,21 @@ def pull_backup_from_s3():
if backup in backup_file:
if not os.path.exists(os.path.dirname(backup_file)):
os.makedirs(os.path.dirname(backup_file))
print(f'Downloading {backup_file}')
bucket.download_file(bucket_dir + '/' + backup_file, backup_file)
print(f"Downloading {backup_file}")
bucket.download_file(bucket_dir + "/" + backup_file, backup_file)
os.chdir(os.path.join(os.path.expanduser('~'), 'frappe-bench', 'sites'))
os.chdir(os.path.join(os.path.expanduser("~"), "frappe-bench", "sites"))
def restore_postgres(config, site_config, database_file):
# common config
common_site_config_path = os.path.join(os.getcwd(), COMMON_SITE_CONFIG_FILE)
db_root_user = config.get('root_login')
db_root_user = config.get("root_login")
if not db_root_user:
postgres_user = os.environ.get('DB_ROOT_USER')
postgres_user = os.environ.get("DB_ROOT_USER")
if not postgres_user:
print('Variable DB_ROOT_USER not set')
print("Variable DB_ROOT_USER not set")
exit(1)
db_root_user = postgres_user
@ -165,13 +158,14 @@ def restore_postgres(config, site_config, database_file):
"root_login",
db_root_user,
validate=False,
site_config_path=common_site_config_path)
site_config_path=common_site_config_path,
)
db_root_password = config.get('root_password')
db_root_password = config.get("root_password")
if not db_root_password:
root_password = get_password('POSTGRES_PASSWORD')
root_password = get_password("POSTGRES_PASSWORD")
if not root_password:
print('Variable POSTGRES_PASSWORD not set')
print("Variable POSTGRES_PASSWORD not set")
exit(1)
db_root_password = root_password
@ -179,53 +173,72 @@ def restore_postgres(config, site_config, database_file):
"root_password",
db_root_password,
validate=False,
site_config_path=common_site_config_path)
site_config_path=common_site_config_path,
)
# site config
db_host = site_config.get('db_host')
db_port = site_config.get('db_port', 5432)
db_name = site_config.get('db_name')
db_password = site_config.get('db_password')
db_host = site_config.get("db_host")
db_port = site_config.get("db_port", 5432)
db_name = site_config.get("db_name")
db_password = site_config.get("db_password")
psql_command = ["psql"]
psql_uri = f"postgres://{db_root_user}:{db_root_password}@{db_host}:{db_port}"
print('Restoring PostgreSQL')
run_command(psql_command + [psql_uri, "-c", f"DROP DATABASE IF EXISTS \"{db_name}\""])
print("Restoring PostgreSQL")
run_command(psql_command + [psql_uri, "-c", f'DROP DATABASE IF EXISTS "{db_name}"'])
run_command(psql_command + [psql_uri, "-c", f"DROP USER IF EXISTS {db_name}"])
run_command(psql_command + [psql_uri, "-c", f"CREATE DATABASE \"{db_name}\""])
run_command(psql_command + [psql_uri, "-c", f"CREATE user {db_name} password '{db_password}'"])
run_command(psql_command + [psql_uri, "-c", f"GRANT ALL PRIVILEGES ON DATABASE \"{db_name}\" TO {db_name}"])
with open(database_file.replace('.gz', '')) as db_file:
run_command(psql_command + [psql_uri, "-c", f'CREATE DATABASE "{db_name}"'])
run_command(
psql_command
+ [psql_uri, "-c", f"CREATE user {db_name} password '{db_password}'"]
)
run_command(
psql_command
+ [psql_uri, "-c", f'GRANT ALL PRIVILEGES ON DATABASE "{db_name}" TO {db_name}']
)
with open(database_file.replace(".gz", "")) as db_file:
run_command(psql_command + [f"{psql_uri}/{db_name}", "<"], stdin=db_file)
def restore_mariadb(config, site_config, database_file):
db_root_password = get_password('MYSQL_ROOT_PASSWORD')
db_root_password = get_password("MYSQL_ROOT_PASSWORD")
if not db_root_password:
print('Variable MYSQL_ROOT_PASSWORD not set')
print("Variable MYSQL_ROOT_PASSWORD not set")
exit(1)
db_root_user = os.environ.get("DB_ROOT_USER", 'root')
db_root_user = os.environ.get("DB_ROOT_USER", "root")
db_host = site_config.get('db_host', config.get('db_host'))
db_port = site_config.get('db_port', config.get('db_port', 3306))
db_name = site_config.get('db_name')
db_password = site_config.get('db_password')
db_host = site_config.get("db_host", config.get("db_host"))
db_port = site_config.get("db_port", config.get("db_port", 3306))
db_name = site_config.get("db_name")
db_password = site_config.get("db_password")
# mysql command prefix
mysql_command = ["mysql", f"-u{db_root_user}", f"-h{db_host}", f"-p{db_root_password}", f"-P{db_port}"]
mysql_command = [
"mysql",
f"-u{db_root_user}",
f"-h{db_host}",
f"-p{db_root_password}",
f"-P{db_port}",
]
# drop db if exists for clean restore
drop_database = mysql_command + ["-e", f"DROP DATABASE IF EXISTS `{db_name}`;"]
drop_database = mysql_command + ["-e", f"DROP DATABASE IF EXISTS `{db_name}`;"]
run_command(drop_database)
# create db
create_database = mysql_command + ["-e", f"CREATE DATABASE IF NOT EXISTS `{db_name}`;"]
create_database = mysql_command + [
"-e",
f"CREATE DATABASE IF NOT EXISTS `{db_name}`;",
]
run_command(create_database)
# create user
create_user = mysql_command + ["-e", f"CREATE USER IF NOT EXISTS '{db_name}'@'%' IDENTIFIED BY '{db_password}'; FLUSH PRIVILEGES;"]
create_user = mysql_command + [
"-e",
f"CREATE USER IF NOT EXISTS '{db_name}'@'%' IDENTIFIED BY '{db_password}'; FLUSH PRIVILEGES;",
]
run_command(create_user)
# grant db privileges to user
@ -236,11 +249,14 @@ def restore_mariadb(config, site_config, database_file):
if config.get(RDS_DB) or site_config.get(RDS_DB):
grant_privileges = RDS_PRIVILEGES
grant_privileges_command = mysql_command + ["-e", f"GRANT {grant_privileges} ON `{db_name}`.* TO '{db_name}'@'%' IDENTIFIED BY '{db_password}'; FLUSH PRIVILEGES;"]
grant_privileges_command = mysql_command + [
"-e",
f"GRANT {grant_privileges} ON `{db_name}`.* TO '{db_name}'@'%' IDENTIFIED BY '{db_password}'; FLUSH PRIVILEGES;",
]
run_command(grant_privileges_command)
print('Restoring MariaDB')
with open(database_file.replace('.gz', '')) as db_file:
print("Restoring MariaDB")
with open(database_file.replace(".gz", "")) as db_file:
run_command(mysql_command + [f"{db_name}"], stdin=db_file)
@ -251,35 +267,38 @@ def main():
pull_backup_from_s3()
for site in list_directories(backup_dir):
site_slug = site.replace('.', '_')
backups = [datetime.datetime.strptime(backup, DATE_FORMAT) for backup in list_directories(os.path.join(backup_dir, site))]
site_slug = site.replace(".", "_")
backups = [
datetime.datetime.strptime(backup, DATE_FORMAT)
for backup in list_directories(os.path.join(backup_dir, site))
]
latest_backup = max(backups).strftime(DATE_FORMAT)
files_base = os.path.join(backup_dir, site, latest_backup, '')
files_base += latest_backup + '-' + site_slug
site_config_path = files_base + '-site_config_backup.json'
files_base = os.path.join(backup_dir, site, latest_backup, "")
files_base += latest_backup + "-" + site_slug
site_config_path = files_base + "-site_config_backup.json"
if not os.path.exists(site_config_path):
site_config_path = os.path.join(backup_dir, site, 'site_config.json')
site_config_path = os.path.join(backup_dir, site, "site_config.json")
if site in get_sites():
print(f'Overwrite site {site}')
print(f"Overwrite site {site}")
restore_database(files_base, site_config_path, site)
restore_private_files(files_base)
restore_files(files_base)
else:
site_config = get_conf_params(
db_name='_' + hashlib.sha1(site.encode()).hexdigest()[:16],
db_password=random_string(16)
db_name="_" + hashlib.sha1(site.encode()).hexdigest()[:16],
db_password=random_string(16),
)
frappe.local.site = site
frappe.local.sites_path = os.getcwd()
frappe.local.site_path = os.getcwd() + '/' + site
frappe.local.site_path = os.getcwd() + "/" + site
make_conf(
db_name=site_config.get('db_name'),
db_password=site_config.get('db_password'),
db_name=site_config.get("db_name"),
db_password=site_config.get("db_password"),
)
make_site_dirs()
print(f'Create site {site}')
print(f"Create site {site}")
restore_database(files_base, site_config_path, site)
restore_private_files(files_base)
restore_files(files_base)

View File

@ -5,11 +5,8 @@ import boto3
import git
from frappe.installer import update_site_config
from constants import (
APP_VERSIONS_JSON_FILE,
APPS_TXT_FILE,
COMMON_SITE_CONFIG_FILE
)
from constants import APP_VERSIONS_JSON_FILE, APPS_TXT_FILE, COMMON_SITE_CONFIG_FILE
def run_command(command, stdout=None, stdin=None, stderr=None):
stdout = stdout or subprocess.PIPE
@ -26,7 +23,7 @@ def run_command(command, stdout=None, stdin=None, stderr=None):
def save_version_file(versions):
with open(APP_VERSIONS_JSON_FILE, 'w') as f:
with open(APP_VERSIONS_JSON_FILE, "w") as f:
return json.dump(versions, f, indent=1, sort_keys=True)
@ -58,10 +55,10 @@ def get_container_versions(apps):
pass
try:
path = os.path.join('..', 'apps', app)
path = os.path.join("..", "apps", app)
repo = git.Repo(path)
commit_hash = repo.head.object.hexsha
versions.update({app+'_git_hash': commit_hash})
versions.update({app + "_git_hash": commit_hash})
except Exception:
pass
@ -94,18 +91,22 @@ def get_config():
def get_site_config(site_name):
site_config = None
with open(f'{site_name}/site_config.json') as site_config_file:
with open(f"{site_name}/site_config.json") as site_config_file:
site_config = json.load(site_config_file)
return site_config
def save_config(config):
with open(COMMON_SITE_CONFIG_FILE, 'w') as f:
with open(COMMON_SITE_CONFIG_FILE, "w") as f:
return json.dump(config, f, indent=1, sort_keys=True)
def get_password(env_var, default=None):
return os.environ.get(env_var) or get_password_from_secret(f"{env_var}_FILE") or default
return (
os.environ.get(env_var)
or get_password_from_secret(f"{env_var}_FILE")
or default
)
def get_password_from_secret(env_var):
@ -128,14 +129,14 @@ def get_password_from_secret(env_var):
def get_s3_config():
check_s3_environment_variables()
bucket = os.environ.get('BUCKET_NAME')
bucket = os.environ.get("BUCKET_NAME")
conn = boto3.client(
's3',
region_name=os.environ.get('REGION'),
aws_access_key_id=os.environ.get('ACCESS_KEY_ID'),
aws_secret_access_key=os.environ.get('SECRET_ACCESS_KEY'),
endpoint_url=os.environ.get('ENDPOINT_URL')
"s3",
region_name=os.environ.get("REGION"),
aws_access_key_id=os.environ.get("ACCESS_KEY_ID"),
aws_secret_access_key=os.environ.get("SECRET_ACCESS_KEY"),
endpoint_url=os.environ.get("ENDPOINT_URL"),
)
return conn, bucket
@ -173,32 +174,35 @@ def set_key_in_site_config(key, site, site_config_path):
site_config = get_site_config_from_path(site_config_path)
value = site_config.get(key)
if value:
print(f'Set {key} in site config for site: {site}')
update_site_config(key, value,
site_config_path=os.path.join(os.getcwd(), site, "site_config.json"))
print(f"Set {key} in site config for site: {site}")
update_site_config(
key,
value,
site_config_path=os.path.join(os.getcwd(), site, "site_config.json"),
)
def check_s3_environment_variables():
if 'BUCKET_NAME' not in os.environ:
print('Variable BUCKET_NAME not set')
if "BUCKET_NAME" not in os.environ:
print("Variable BUCKET_NAME not set")
exit(1)
if 'ACCESS_KEY_ID' not in os.environ:
print('Variable ACCESS_KEY_ID not set')
if "ACCESS_KEY_ID" not in os.environ:
print("Variable ACCESS_KEY_ID not set")
exit(1)
if 'SECRET_ACCESS_KEY' not in os.environ:
print('Variable SECRET_ACCESS_KEY not set')
if "SECRET_ACCESS_KEY" not in os.environ:
print("Variable SECRET_ACCESS_KEY not set")
exit(1)
if 'ENDPOINT_URL' not in os.environ:
print('Variable ENDPOINT_URL not set')
if "ENDPOINT_URL" not in os.environ:
print("Variable ENDPOINT_URL not set")
exit(1)
if 'BUCKET_DIR' not in os.environ:
print('Variable BUCKET_DIR not set')
if "BUCKET_DIR" not in os.environ:
print("Variable BUCKET_DIR not set")
exit(1)
if 'REGION' not in os.environ:
print('Variable REGION not set')
if "REGION" not in os.environ:
print("Variable REGION not set")
exit(1)