mirror of
https://github.com/frappe/bench.git
synced 2024-11-13 16:56:33 +00:00
Merge pull request #1335 from gavindsouza/pre-commit-files
feat!: Add auto-formatting & linting via pre-commit
This commit is contained in:
commit
1e0054bc14
37
.flake8
Normal file
37
.flake8
Normal file
@ -0,0 +1,37 @@
|
||||
[flake8]
|
||||
ignore =
|
||||
E121,
|
||||
E126,
|
||||
E127,
|
||||
E128,
|
||||
E203,
|
||||
E225,
|
||||
E226,
|
||||
E231,
|
||||
E241,
|
||||
E251,
|
||||
E261,
|
||||
E265,
|
||||
E302,
|
||||
E303,
|
||||
E305,
|
||||
E402,
|
||||
E501,
|
||||
E741,
|
||||
W291,
|
||||
W292,
|
||||
W293,
|
||||
W391,
|
||||
W503,
|
||||
W504,
|
||||
F403,
|
||||
B007,
|
||||
B950,
|
||||
W191,
|
||||
E124, # closing bracket, irritating while writing QB code
|
||||
E131, # continuation line unaligned for hanging indent
|
||||
E123, # closing bracket does not match indentation of opening bracket's line
|
||||
E101, # ensured by use of black
|
||||
B009, # allow usage of getattr
|
||||
|
||||
max-line-length = 200
|
37
.pre-commit-config.yaml
Normal file
37
.pre-commit-config.yaml
Normal file
@ -0,0 +1,37 @@
|
||||
exclude: '.git'
|
||||
default_stages: [commit]
|
||||
fail_fast: false
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.3.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
files: "frappe.*"
|
||||
exclude: ".*json$|.*txt$|.*csv|.*md|.*svg"
|
||||
- id: check-yaml
|
||||
- id: check-merge-conflict
|
||||
- id: check-ast
|
||||
- id: check-json
|
||||
- id: check-toml
|
||||
- id: check-yaml
|
||||
- id: debug-statements
|
||||
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v2.34.0
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: ['--py37-plus']
|
||||
|
||||
- repo: https://github.com/adityahase/black
|
||||
rev: 9cb0a69f4d0030cdf687eddf314468b39ed54119
|
||||
hooks:
|
||||
- id: black
|
||||
additional_dependencies: ['click==8.0.4']
|
||||
|
||||
- repo: https://gitlab.com/pycqa/flake8
|
||||
rev: 3.9.2
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies: ['flake8-bugbear',]
|
||||
args: ['--config', '.flake8']
|
77
bench/app.py
77
bench/app.py
@ -11,7 +11,6 @@ import typing
|
||||
from collections import OrderedDict
|
||||
from datetime import date
|
||||
from urllib.parse import urlparse
|
||||
import os
|
||||
|
||||
# imports - third party imports
|
||||
import click
|
||||
@ -21,6 +20,7 @@ from git import Repo
|
||||
import bench
|
||||
from bench.exceptions import NotInBenchDirectoryError
|
||||
from bench.utils import (
|
||||
UNSET_ARG,
|
||||
fetch_details_from_tag,
|
||||
get_available_folder_name,
|
||||
is_bench_directory,
|
||||
@ -29,10 +29,7 @@ from bench.utils import (
|
||||
log,
|
||||
run_frappe_cmd,
|
||||
)
|
||||
from bench.utils.bench import (
|
||||
build_assets,
|
||||
install_python_dev_dependencies,
|
||||
)
|
||||
from bench.utils.bench import build_assets, install_python_dev_dependencies
|
||||
from bench.utils.render import step
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
@ -46,18 +43,18 @@ class AppMeta:
|
||||
def __init__(self, name: str, branch: str = None, to_clone: bool = True):
|
||||
"""
|
||||
name (str): This could look something like
|
||||
1. https://github.com/frappe/healthcare.git
|
||||
2. git@github.com:frappe/healthcare.git
|
||||
3. frappe/healthcare@develop
|
||||
4. healthcare
|
||||
5. healthcare@develop, healthcare@v13.12.1
|
||||
1. https://github.com/frappe/healthcare.git
|
||||
2. git@github.com:frappe/healthcare.git
|
||||
3. frappe/healthcare@develop
|
||||
4. healthcare
|
||||
5. healthcare@develop, healthcare@v13.12.1
|
||||
|
||||
References for Version Identifiers:
|
||||
* https://www.python.org/dev/peps/pep-0440/#version-specifiers
|
||||
* https://docs.npmjs.com/about-semantic-versioning
|
||||
|
||||
class Healthcare(AppConfig):
|
||||
dependencies = [{"frappe/erpnext": "~13.17.0"}]
|
||||
dependencies = [{"frappe/erpnext": "~13.17.0"}]
|
||||
"""
|
||||
self.name = name.rstrip("/")
|
||||
self.remote_server = "github.com"
|
||||
@ -76,9 +73,7 @@ class AppMeta:
|
||||
|
||||
def setup_details(self):
|
||||
# fetch meta from installed apps
|
||||
if self.bench and os.path.exists(
|
||||
os.path.join(self.bench.name, "apps", self.name)
|
||||
):
|
||||
if self.bench and os.path.exists(os.path.join(self.bench.name, "apps", self.name)):
|
||||
self.mount_path = os.path.join(self.bench.name, "apps", self.name)
|
||||
self.from_apps = True
|
||||
self._setup_details_from_mounted_disk()
|
||||
@ -98,9 +93,7 @@ class AppMeta:
|
||||
self._setup_details_from_name_tag()
|
||||
|
||||
if self.git_repo:
|
||||
self.app_name = os.path.basename(
|
||||
os.path.normpath(self.git_repo.working_tree_dir)
|
||||
)
|
||||
self.app_name = os.path.basename(os.path.normpath(self.git_repo.working_tree_dir))
|
||||
else:
|
||||
self.app_name = self.repo
|
||||
|
||||
@ -203,7 +196,9 @@ class App(AppMeta):
|
||||
log(f"App deleted from {active_app_path}")
|
||||
else:
|
||||
archived_path = os.path.join("archived", "apps")
|
||||
archived_name = get_available_folder_name(f"{self.repo}-{date.today()}", archived_path)
|
||||
archived_name = get_available_folder_name(
|
||||
f"{self.repo}-{date.today()}", archived_path
|
||||
)
|
||||
archived_app_path = os.path.join(archived_path, archived_name)
|
||||
|
||||
shutil.move(active_app_path, archived_app_path)
|
||||
@ -239,7 +234,7 @@ class App(AppMeta):
|
||||
verbose=verbose,
|
||||
skip_assets=skip_assets,
|
||||
restart_bench=restart_bench,
|
||||
resolution=self.local_resolution
|
||||
resolution=self.local_resolution,
|
||||
)
|
||||
|
||||
@step(title="Cloning and installing {repo}", success="App {repo} Installed")
|
||||
@ -255,7 +250,7 @@ class App(AppMeta):
|
||||
from bench.utils.app import get_required_deps, required_apps_from_hooks
|
||||
|
||||
if self.on_disk:
|
||||
required_deps = os.path.join(self.mount_path, self.repo,'hooks.py')
|
||||
required_deps = os.path.join(self.mount_path, self.repo, "hooks.py")
|
||||
try:
|
||||
return required_apps_from_hooks(required_deps, local=True)
|
||||
except IndexError:
|
||||
@ -278,7 +273,6 @@ class App(AppMeta):
|
||||
)
|
||||
|
||||
|
||||
|
||||
def make_resolution_plan(app: App, bench: "Bench"):
|
||||
"""
|
||||
decide what apps and versions to install and in what order
|
||||
@ -303,7 +297,7 @@ def get_excluded_apps(bench_path="."):
|
||||
try:
|
||||
with open(os.path.join(bench_path, "sites", "excluded_apps.txt")) as f:
|
||||
return f.read().strip().split("\n")
|
||||
except IOError:
|
||||
except OSError:
|
||||
return []
|
||||
|
||||
|
||||
@ -366,7 +360,9 @@ def get_app(
|
||||
resolution = make_resolution_plan(app, bench)
|
||||
click.secho("Following apps will be installed", fg="bright_blue")
|
||||
for idx, app in enumerate(reversed(resolution.values()), start=1):
|
||||
print(f"{idx}. {app.name} {f'(required by {app.required_by})' if app.required_by else ''}")
|
||||
print(
|
||||
f"{idx}. {app.name} {f'(required by {app.required_by})' if app.required_by else ''}"
|
||||
)
|
||||
|
||||
if "frappe" in resolution:
|
||||
# Todo: Make frappe a terminal dependency for all frappe apps.
|
||||
@ -385,7 +381,7 @@ def get_app(
|
||||
init(
|
||||
path=bench_path,
|
||||
frappe_path=frappe_path,
|
||||
frappe_branch=frappe_branch if frappe_branch else branch,
|
||||
frappe_branch=frappe_branch or branch,
|
||||
)
|
||||
os.chdir(bench_path)
|
||||
bench_setup = True
|
||||
@ -458,22 +454,27 @@ def install_resolved_deps(
|
||||
installed_branch = bench.apps.states[repo_name]["resolution"]["branch"].strip()
|
||||
except Exception:
|
||||
installed_branch = (
|
||||
subprocess.
|
||||
check_output("git rev-parse --abbrev-ref HEAD", shell=True, cwd=path_to_app)
|
||||
subprocess.check_output(
|
||||
"git rev-parse --abbrev-ref HEAD", shell=True, cwd=path_to_app
|
||||
)
|
||||
.decode("utf-8")
|
||||
.rstrip()
|
||||
)
|
||||
)
|
||||
try:
|
||||
if app.tag is None:
|
||||
current_remote = (
|
||||
subprocess.check_output(f"git config branch.{installed_branch}.remote", shell=True, cwd=path_to_app)
|
||||
subprocess.check_output(
|
||||
f"git config branch.{installed_branch}.remote", shell=True, cwd=path_to_app
|
||||
)
|
||||
.decode("utf-8")
|
||||
.rstrip()
|
||||
)
|
||||
|
||||
default_branch = (
|
||||
subprocess.check_output(
|
||||
f"git symbolic-ref refs/remotes/{current_remote}/HEAD", shell=True, cwd=path_to_app
|
||||
f"git symbolic-ref refs/remotes/{current_remote}/HEAD",
|
||||
shell=True,
|
||||
cwd=path_to_app,
|
||||
)
|
||||
.decode("utf-8")
|
||||
.rsplit("/")[-1]
|
||||
@ -485,7 +486,7 @@ def install_resolved_deps(
|
||||
except Exception:
|
||||
is_compatible = False
|
||||
|
||||
prefix = 'C' if is_compatible else 'Inc'
|
||||
prefix = "C" if is_compatible else "Inc"
|
||||
click.secho(
|
||||
f"{prefix}ompatible version of {repo_name} is already installed",
|
||||
fg="green" if is_compatible else "red",
|
||||
@ -503,14 +504,15 @@ def install_resolved_deps(
|
||||
|
||||
def new_app(app, no_git=None, bench_path="."):
|
||||
if bench.FRAPPE_VERSION in (0, None):
|
||||
raise NotInBenchDirectoryError(f"{os.path.realpath(bench_path)} is not a valid bench directory.")
|
||||
raise NotInBenchDirectoryError(
|
||||
f"{os.path.realpath(bench_path)} is not a valid bench directory."
|
||||
)
|
||||
|
||||
# For backwards compatibility
|
||||
app = app.lower().replace(" ", "_").replace("-", "_")
|
||||
if app[0].isdigit() or "." in app:
|
||||
click.secho(
|
||||
"App names cannot start with numbers(digits) or have dot(.) in them",
|
||||
fg="red"
|
||||
"App names cannot start with numbers(digits) or have dot(.) in them", fg="red"
|
||||
)
|
||||
return
|
||||
|
||||
@ -535,7 +537,7 @@ def install_app(
|
||||
no_cache=False,
|
||||
restart_bench=True,
|
||||
skip_assets=False,
|
||||
resolution=[]
|
||||
resolution=UNSET_ARG,
|
||||
):
|
||||
import bench.cli as bench_cli
|
||||
from bench.bench import Bench
|
||||
@ -544,6 +546,9 @@ def install_app(
|
||||
click.secho(install_text, fg="yellow")
|
||||
logger.log(install_text)
|
||||
|
||||
if resolution == UNSET_ARG:
|
||||
resolution = []
|
||||
|
||||
bench = Bench(bench_path)
|
||||
conf = bench.conf
|
||||
|
||||
@ -553,7 +558,9 @@ def install_app(
|
||||
|
||||
app_path = os.path.realpath(os.path.join(bench_path, "apps", app))
|
||||
|
||||
bench.run(f"{bench.python} -m pip install {quiet_flag} --upgrade -e {app_path} {cache_flag}")
|
||||
bench.run(
|
||||
f"{bench.python} -m pip install {quiet_flag} --upgrade -e {app_path} {cache_flag}"
|
||||
)
|
||||
|
||||
if conf.get("developer_mode"):
|
||||
install_python_dev_dependencies(apps=app, bench_path=bench_path, verbose=verbose)
|
||||
|
@ -13,6 +13,7 @@ import bench
|
||||
from bench.exceptions import AppNotInstalledError, InvalidRemoteException
|
||||
from bench.config.common_site_config import setup_config
|
||||
from bench.utils import (
|
||||
UNSET_ARG,
|
||||
paths_in_bench,
|
||||
exec_cmd,
|
||||
is_bench_directory,
|
||||
@ -141,8 +142,7 @@ class Bench(Base, Validator):
|
||||
|
||||
@step(title="Reloading Bench Processes", success="Bench Processes Reloaded")
|
||||
def reload(self, web=False, supervisor=True, systemd=True):
|
||||
"""If web is True, only web workers are restarted
|
||||
"""
|
||||
"""If web is True, only web workers are restarted"""
|
||||
conf = self.conf
|
||||
|
||||
if conf.get("developer_mode"):
|
||||
@ -153,15 +153,17 @@ class Bench(Base, Validator):
|
||||
restart_systemd_processes(bench_path=self.name, web_workers=web)
|
||||
|
||||
def get_installed_apps(self) -> List:
|
||||
"""Returns list of installed apps on bench, not in excluded_apps.txt
|
||||
"""
|
||||
"""Returns list of installed apps on bench, not in excluded_apps.txt"""
|
||||
try:
|
||||
installed_packages = get_cmd_output(f"{self.python} -m pip freeze", cwd=self.name)
|
||||
except Exception:
|
||||
installed_packages = []
|
||||
is_installed = lambda app: app in installed_packages
|
||||
|
||||
return [app for app in self.apps if app not in self.excluded_apps and is_installed(app)]
|
||||
return [
|
||||
app
|
||||
for app in self.apps
|
||||
if app not in self.excluded_apps and app in installed_packages
|
||||
]
|
||||
|
||||
|
||||
class BenchApps(MutableSequence):
|
||||
@ -174,18 +176,20 @@ class BenchApps(MutableSequence):
|
||||
|
||||
def set_states(self):
|
||||
try:
|
||||
with open(self.states_path, "r") as f:
|
||||
with open(self.states_path) as f:
|
||||
self.states = json.loads(f.read() or "{}")
|
||||
except FileNotFoundError:
|
||||
self.states = {}
|
||||
|
||||
def update_apps_states(
|
||||
self,
|
||||
app_dir: str = None,
|
||||
app_name: Union[str, None] = None,
|
||||
branch: Union[str, None] = None,
|
||||
required: List = [],
|
||||
self,
|
||||
app_dir: str = None,
|
||||
app_name: Union[str, None] = None,
|
||||
branch: Union[str, None] = None,
|
||||
required: List = UNSET_ARG,
|
||||
):
|
||||
if required == UNSET_ARG:
|
||||
required = []
|
||||
if self.apps and not os.path.exists(self.states_path):
|
||||
# idx according to apps listed in apps.txt (backwards compatibility)
|
||||
# Keeping frappe as the first app.
|
||||
@ -198,13 +202,10 @@ class BenchApps(MutableSequence):
|
||||
print("Found existing apps updating states...")
|
||||
for idx, app in enumerate(self.apps, start=1):
|
||||
self.states[app] = {
|
||||
"resolution": {
|
||||
"commit_hash": None,
|
||||
"branch": None
|
||||
},
|
||||
"required": required,
|
||||
"idx": idx,
|
||||
"version": get_current_version(app, self.bench.name),
|
||||
"resolution": {"commit_hash": None, "branch": None},
|
||||
"required": required,
|
||||
"idx": idx,
|
||||
"version": get_current_version(app, self.bench.name),
|
||||
}
|
||||
|
||||
apps_to_remove = []
|
||||
@ -224,21 +225,21 @@ class BenchApps(MutableSequence):
|
||||
app_dir = os.path.join(self.apps_path, app_dir)
|
||||
if not branch:
|
||||
branch = (
|
||||
subprocess
|
||||
.check_output("git rev-parse --abbrev-ref HEAD", shell=True, cwd=app_dir)
|
||||
.decode("utf-8")
|
||||
.rstrip()
|
||||
)
|
||||
subprocess.check_output("git rev-parse --abbrev-ref HEAD", shell=True, cwd=app_dir)
|
||||
.decode("utf-8")
|
||||
.rstrip()
|
||||
)
|
||||
|
||||
commit_hash = subprocess.check_output(f"git rev-parse {branch}", shell=True, cwd=app_dir).decode("utf-8").rstrip()
|
||||
commit_hash = (
|
||||
subprocess.check_output(f"git rev-parse {branch}", shell=True, cwd=app_dir)
|
||||
.decode("utf-8")
|
||||
.rstrip()
|
||||
)
|
||||
|
||||
self.states[app_name] = {
|
||||
"resolution": {
|
||||
"commit_hash":commit_hash,
|
||||
"branch": branch
|
||||
},
|
||||
"required":required,
|
||||
"idx":len(self.states) + 1,
|
||||
"resolution": {"commit_hash": commit_hash, "branch": branch},
|
||||
"required": required,
|
||||
"idx": len(self.states) + 1,
|
||||
"version": version,
|
||||
}
|
||||
|
||||
@ -250,18 +251,17 @@ class BenchApps(MutableSequence):
|
||||
app_name: Union[str, None] = None,
|
||||
app_dir: Union[str, None] = None,
|
||||
branch: Union[str, None] = None,
|
||||
required: List = []
|
||||
required: List = UNSET_ARG,
|
||||
):
|
||||
if required == UNSET_ARG:
|
||||
required = []
|
||||
self.initialize_apps()
|
||||
|
||||
with open(self.bench.apps_txt, "w") as f:
|
||||
f.write("\n".join(self.apps))
|
||||
|
||||
self.update_apps_states(
|
||||
app_name=app_name,
|
||||
app_dir=app_dir,
|
||||
branch=branch,
|
||||
required=required
|
||||
app_name=app_name, app_dir=app_dir, branch=branch, required=required
|
||||
)
|
||||
|
||||
def initialize_apps(self):
|
||||
@ -277,17 +277,17 @@ class BenchApps(MutableSequence):
|
||||
self.apps = []
|
||||
|
||||
def __getitem__(self, key):
|
||||
""" retrieves an item by its index, key"""
|
||||
"""retrieves an item by its index, key"""
|
||||
return self.apps[key]
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
""" set the item at index, key, to value """
|
||||
"""set the item at index, key, to value"""
|
||||
# should probably not be allowed
|
||||
# self.apps[key] = value
|
||||
raise NotImplementedError
|
||||
|
||||
def __delitem__(self, key):
|
||||
""" removes the item at index, key """
|
||||
"""removes the item at index, key"""
|
||||
# TODO: uninstall and delete app from bench
|
||||
del self.apps[key]
|
||||
|
||||
@ -295,7 +295,7 @@ class BenchApps(MutableSequence):
|
||||
return len(self.apps)
|
||||
|
||||
def insert(self, key, value):
|
||||
""" add an item, value, at index, key. """
|
||||
"""add an item, value, at index, key."""
|
||||
# TODO: fetch and install app to bench
|
||||
self.apps.insert(key, value)
|
||||
|
||||
@ -382,8 +382,7 @@ class BenchSetup(Base):
|
||||
|
||||
@step(title="Updating pip", success="Updated pip")
|
||||
def pip(self, verbose=False):
|
||||
"""Updates env pip; assumes that env is setup
|
||||
"""
|
||||
"""Updates env pip; assumes that env is setup"""
|
||||
import bench.cli
|
||||
|
||||
verbose = bench.cli.verbose or verbose
|
||||
@ -428,8 +427,7 @@ class BenchSetup(Base):
|
||||
|
||||
@job(title="Setting Up Bench Dependencies", success="Bench Dependencies Set Up")
|
||||
def requirements(self, apps=None):
|
||||
"""Install and upgrade specified / all installed apps on given Bench
|
||||
"""
|
||||
"""Install and upgrade specified / all installed apps on given Bench"""
|
||||
from bench.app import App
|
||||
|
||||
apps = apps or self.bench.apps
|
||||
@ -445,8 +443,7 @@ class BenchSetup(Base):
|
||||
)
|
||||
|
||||
def python(self, apps=None):
|
||||
"""Install and upgrade Python dependencies for specified / all installed apps on given Bench
|
||||
"""
|
||||
"""Install and upgrade Python dependencies for specified / all installed apps on given Bench"""
|
||||
import bench.cli
|
||||
|
||||
apps = apps or self.bench.apps
|
||||
@ -461,8 +458,7 @@ class BenchSetup(Base):
|
||||
self.run(f"{self.bench.python} -m pip install {quiet_flag} --upgrade -e {app_path}")
|
||||
|
||||
def node(self, apps=None):
|
||||
"""Install and upgrade Node dependencies for specified / all apps on given Bench
|
||||
"""
|
||||
"""Install and upgrade Node dependencies for specified / all apps on given Bench"""
|
||||
from bench.utils.bench import update_node_packages
|
||||
|
||||
return update_node_packages(bench_path=self.bench.name, apps=apps)
|
||||
|
@ -35,7 +35,7 @@ from bench.utils.bench import get_env_cmd
|
||||
dynamic_feed = False
|
||||
verbose = False
|
||||
is_envvar_warn_set = None
|
||||
from_command_line = False # set when commands are executed via the CLI
|
||||
from_command_line = False # set when commands are executed via the CLI
|
||||
bench.LOG_BUFFER = []
|
||||
|
||||
change_uid_msg = "You should not run this command as root"
|
||||
@ -104,7 +104,9 @@ def cli():
|
||||
if (
|
||||
not in_bench
|
||||
and len(sys.argv) > 1
|
||||
and not argv.intersection({"init", "find", "src", "drop", "get", "get-app", "--version"})
|
||||
and not argv.intersection(
|
||||
{"init", "find", "src", "drop", "get", "get-app", "--version"}
|
||||
)
|
||||
and not cmd_requires_root()
|
||||
):
|
||||
log("Command not being executed in bench directory", level=3)
|
||||
@ -201,7 +203,7 @@ def frappe_cmd(bench_path="."):
|
||||
|
||||
def get_cached_frappe_commands():
|
||||
if os.path.exists(bench_cache_file):
|
||||
command_dump = open(bench_cache_file, "r").read() or "[]"
|
||||
command_dump = open(bench_cache_file).read() or "[]"
|
||||
return set(json.loads(command_dump))
|
||||
return set()
|
||||
|
||||
@ -238,6 +240,7 @@ def change_working_directory():
|
||||
|
||||
def setup_clear_cache():
|
||||
from copy import copy
|
||||
|
||||
f = copy(os.chdir)
|
||||
|
||||
def _chdir(*args, **kwargs):
|
||||
|
@ -19,10 +19,17 @@ from bench.utils.cli import (
|
||||
expose_value=False,
|
||||
)
|
||||
@click.option(
|
||||
"--use-feature", is_eager=True, callback=use_experimental_feature, expose_value=False,
|
||||
"--use-feature",
|
||||
is_eager=True,
|
||||
callback=use_experimental_feature,
|
||||
expose_value=False,
|
||||
)
|
||||
@click.option(
|
||||
"-v", "--verbose", is_flag=True, callback=setup_verbosity, expose_value=False,
|
||||
"-v",
|
||||
"--verbose",
|
||||
is_flag=True,
|
||||
callback=setup_verbosity,
|
||||
expose_value=False,
|
||||
)
|
||||
def bench_command(bench_path="."):
|
||||
import bench
|
||||
|
@ -5,55 +5,64 @@ from bench.config.common_site_config import update_config, put_config
|
||||
import click
|
||||
|
||||
|
||||
@click.group(help='Change bench configuration')
|
||||
@click.group(help="Change bench configuration")
|
||||
def config():
|
||||
pass
|
||||
|
||||
|
||||
@click.command('restart_supervisor_on_update', help='Enable/Disable auto restart of supervisor processes')
|
||||
@click.argument('state', type=click.Choice(['on', 'off']))
|
||||
@click.command(
|
||||
"restart_supervisor_on_update",
|
||||
help="Enable/Disable auto restart of supervisor processes",
|
||||
)
|
||||
@click.argument("state", type=click.Choice(["on", "off"]))
|
||||
def config_restart_supervisor_on_update(state):
|
||||
update_config({'restart_supervisor_on_update': state == 'on'})
|
||||
update_config({"restart_supervisor_on_update": state == "on"})
|
||||
|
||||
|
||||
@click.command('restart_systemd_on_update', help='Enable/Disable auto restart of systemd units')
|
||||
@click.argument('state', type=click.Choice(['on', 'off']))
|
||||
@click.command(
|
||||
"restart_systemd_on_update", help="Enable/Disable auto restart of systemd units"
|
||||
)
|
||||
@click.argument("state", type=click.Choice(["on", "off"]))
|
||||
def config_restart_systemd_on_update(state):
|
||||
update_config({'restart_systemd_on_update': state == 'on'})
|
||||
update_config({"restart_systemd_on_update": state == "on"})
|
||||
|
||||
|
||||
@click.command('dns_multitenant', help='Enable/Disable bench multitenancy on running bench update')
|
||||
@click.argument('state', type=click.Choice(['on', 'off']))
|
||||
@click.command(
|
||||
"dns_multitenant", help="Enable/Disable bench multitenancy on running bench update"
|
||||
)
|
||||
@click.argument("state", type=click.Choice(["on", "off"]))
|
||||
def config_dns_multitenant(state):
|
||||
update_config({'dns_multitenant': state == 'on'})
|
||||
update_config({"dns_multitenant": state == "on"})
|
||||
|
||||
|
||||
@click.command('serve_default_site', help='Configure nginx to serve the default site on port 80')
|
||||
@click.argument('state', type=click.Choice(['on', 'off']))
|
||||
@click.command(
|
||||
"serve_default_site", help="Configure nginx to serve the default site on port 80"
|
||||
)
|
||||
@click.argument("state", type=click.Choice(["on", "off"]))
|
||||
def config_serve_default_site(state):
|
||||
update_config({'serve_default_site': state == 'on'})
|
||||
update_config({"serve_default_site": state == "on"})
|
||||
|
||||
|
||||
@click.command('rebase_on_pull', help='Rebase repositories on pulling')
|
||||
@click.argument('state', type=click.Choice(['on', 'off']))
|
||||
@click.command("rebase_on_pull", help="Rebase repositories on pulling")
|
||||
@click.argument("state", type=click.Choice(["on", "off"]))
|
||||
def config_rebase_on_pull(state):
|
||||
update_config({'rebase_on_pull': state == 'on'})
|
||||
update_config({"rebase_on_pull": state == "on"})
|
||||
|
||||
|
||||
@click.command('http_timeout', help='Set HTTP timeout')
|
||||
@click.argument('seconds', type=int)
|
||||
@click.command("http_timeout", help="Set HTTP timeout")
|
||||
@click.argument("seconds", type=int)
|
||||
def config_http_timeout(seconds):
|
||||
update_config({'http_timeout': seconds})
|
||||
update_config({"http_timeout": seconds})
|
||||
|
||||
|
||||
@click.command('set-common-config', help='Set value in common config')
|
||||
@click.option('configs', '-c', '--config', multiple=True, type=(str, str))
|
||||
@click.command("set-common-config", help="Set value in common config")
|
||||
@click.option("configs", "-c", "--config", multiple=True, type=(str, str))
|
||||
def set_common_config(configs):
|
||||
import ast
|
||||
|
||||
common_site_config = {}
|
||||
for key, value in configs:
|
||||
if value in ('true', 'false'):
|
||||
if value in ("true", "false"):
|
||||
value = value.title()
|
||||
try:
|
||||
value = ast.literal_eval(value)
|
||||
@ -62,14 +71,17 @@ def set_common_config(configs):
|
||||
|
||||
common_site_config[key] = value
|
||||
|
||||
update_config(common_site_config, bench_path='.')
|
||||
update_config(common_site_config, bench_path=".")
|
||||
|
||||
|
||||
@click.command('remove-common-config', help='Remove specific keys from current bench\'s common config')
|
||||
@click.argument('keys', nargs=-1)
|
||||
@click.command(
|
||||
"remove-common-config", help="Remove specific keys from current bench's common config"
|
||||
)
|
||||
@click.argument("keys", nargs=-1)
|
||||
def remove_common_config(keys):
|
||||
from bench.bench import Bench
|
||||
common_site_config = Bench('.').conf
|
||||
|
||||
common_site_config = Bench(".").conf
|
||||
for key in keys:
|
||||
if key in common_site_config:
|
||||
del common_site_config[key]
|
||||
|
@ -6,9 +6,7 @@ from bench.utils.system import setup_sudoers
|
||||
import click
|
||||
|
||||
|
||||
extra_vars = {
|
||||
"production": True
|
||||
}
|
||||
extra_vars = {"production": True}
|
||||
|
||||
|
||||
@click.group(help="Install system dependencies for setting up Frappe environment")
|
||||
@ -16,75 +14,100 @@ def install():
|
||||
pass
|
||||
|
||||
|
||||
@click.command('prerequisites', help="Installs pre-requisite libraries, essential tools like b2zip, htop, screen, vim, x11-fonts, python libs, cups and Redis")
|
||||
@click.command(
|
||||
"prerequisites",
|
||||
help="Installs pre-requisite libraries, essential tools like b2zip, htop, screen, vim, x11-fonts, python libs, cups and Redis",
|
||||
)
|
||||
def install_prerequisites():
|
||||
run_playbook('site.yml', tag='common, redis')
|
||||
run_playbook("site.yml", tag="common, redis")
|
||||
|
||||
|
||||
@click.command('mariadb', help="Install and setup MariaDB of specified version and root password")
|
||||
@click.option('--mysql_root_password', '--mysql-root-password', default="")
|
||||
@click.option('--version', default="10.3")
|
||||
@click.command(
|
||||
"mariadb", help="Install and setup MariaDB of specified version and root password"
|
||||
)
|
||||
@click.option("--mysql_root_password", "--mysql-root-password", default="")
|
||||
@click.option("--version", default="10.3")
|
||||
def install_maridb(mysql_root_password, version):
|
||||
if mysql_root_password:
|
||||
extra_vars.update({
|
||||
"mysql_root_password": mysql_root_password,
|
||||
})
|
||||
extra_vars.update(
|
||||
{
|
||||
"mysql_root_password": mysql_root_password,
|
||||
}
|
||||
)
|
||||
|
||||
extra_vars.update({
|
||||
"mariadb_version": version
|
||||
})
|
||||
extra_vars.update({"mariadb_version": version})
|
||||
|
||||
run_playbook('site.yml', extra_vars=extra_vars, tag='mariadb')
|
||||
run_playbook("site.yml", extra_vars=extra_vars, tag="mariadb")
|
||||
|
||||
|
||||
@click.command('wkhtmltopdf', help="Installs wkhtmltopdf v0.12.3 for linux")
|
||||
@click.command("wkhtmltopdf", help="Installs wkhtmltopdf v0.12.3 for linux")
|
||||
def install_wkhtmltopdf():
|
||||
run_playbook('site.yml', extra_vars=extra_vars, tag='wkhtmltopdf')
|
||||
run_playbook("site.yml", extra_vars=extra_vars, tag="wkhtmltopdf")
|
||||
|
||||
|
||||
@click.command('nodejs', help="Installs Node.js v8")
|
||||
@click.command("nodejs", help="Installs Node.js v8")
|
||||
def install_nodejs():
|
||||
run_playbook('site.yml', extra_vars=extra_vars, tag='nodejs')
|
||||
run_playbook("site.yml", extra_vars=extra_vars, tag="nodejs")
|
||||
|
||||
|
||||
@click.command('psutil', help="Installs psutil via pip")
|
||||
@click.command("psutil", help="Installs psutil via pip")
|
||||
def install_psutil():
|
||||
run_playbook('site.yml', extra_vars=extra_vars, tag='psutil')
|
||||
run_playbook("site.yml", extra_vars=extra_vars, tag="psutil")
|
||||
|
||||
|
||||
@click.command('supervisor', help="Installs supervisor. If user is specified, sudoers is setup for that user")
|
||||
@click.option('--user')
|
||||
@click.command(
|
||||
"supervisor",
|
||||
help="Installs supervisor. If user is specified, sudoers is setup for that user",
|
||||
)
|
||||
@click.option("--user")
|
||||
def install_supervisor(user=None):
|
||||
run_playbook('site.yml', extra_vars=extra_vars, tag='supervisor')
|
||||
run_playbook("site.yml", extra_vars=extra_vars, tag="supervisor")
|
||||
if user:
|
||||
setup_sudoers(user)
|
||||
|
||||
|
||||
@click.command('nginx', help="Installs NGINX. If user is specified, sudoers is setup for that user")
|
||||
@click.option('--user')
|
||||
@click.command(
|
||||
"nginx", help="Installs NGINX. If user is specified, sudoers is setup for that user"
|
||||
)
|
||||
@click.option("--user")
|
||||
def install_nginx(user=None):
|
||||
run_playbook('site.yml', extra_vars=extra_vars, tag='nginx')
|
||||
run_playbook("site.yml", extra_vars=extra_vars, tag="nginx")
|
||||
if user:
|
||||
setup_sudoers(user)
|
||||
|
||||
|
||||
@click.command('virtualbox', help="Installs supervisor")
|
||||
@click.command("virtualbox", help="Installs supervisor")
|
||||
def install_virtualbox():
|
||||
run_playbook('vm_build.yml', tag='virtualbox')
|
||||
run_playbook("vm_build.yml", tag="virtualbox")
|
||||
|
||||
|
||||
@click.command('packer', help="Installs Oracle virtualbox and packer 1.2.1")
|
||||
@click.command("packer", help="Installs Oracle virtualbox and packer 1.2.1")
|
||||
def install_packer():
|
||||
run_playbook('vm_build.yml', tag='packer')
|
||||
run_playbook("vm_build.yml", tag="packer")
|
||||
|
||||
|
||||
@click.command("fail2ban", help="Install fail2ban, an intrusion prevention software framework that protects computer servers from brute-force attacks")
|
||||
@click.option('--maxretry', default=6, help="Number of matches (i.e. value of the counter) which triggers ban action on the IP.")
|
||||
@click.option('--bantime', default=600, help="The counter is set to zero if no match is found within 'findtime' seconds.")
|
||||
@click.option('--findtime', default=600, help='Duration (in seconds) for IP to be banned for. Negative number for "permanent" ban.')
|
||||
@click.command(
|
||||
"fail2ban",
|
||||
help="Install fail2ban, an intrusion prevention software framework that protects computer servers from brute-force attacks",
|
||||
)
|
||||
@click.option(
|
||||
"--maxretry",
|
||||
default=6,
|
||||
help="Number of matches (i.e. value of the counter) which triggers ban action on the IP.",
|
||||
)
|
||||
@click.option(
|
||||
"--bantime",
|
||||
default=600,
|
||||
help="The counter is set to zero if no match is found within 'findtime' seconds.",
|
||||
)
|
||||
@click.option(
|
||||
"--findtime",
|
||||
default=600,
|
||||
help='Duration (in seconds) for IP to be banned for. Negative number for "permanent" ban.',
|
||||
)
|
||||
def install_failtoban(**kwargs):
|
||||
extra_vars.update(kwargs)
|
||||
run_playbook('site.yml', extra_vars=extra_vars, tag='fail2ban')
|
||||
run_playbook("site.yml", extra_vars=extra_vars, tag="fail2ban")
|
||||
|
||||
|
||||
install.add_command(install_prerequisites)
|
||||
|
@ -37,9 +37,7 @@ import click
|
||||
help="Skip redis config generation if already specifying the common-site-config file",
|
||||
)
|
||||
@click.option("--skip-assets", is_flag=True, default=False, help="Do not build assets")
|
||||
@click.option(
|
||||
"--install-app", help="Install particular app after initialization"
|
||||
)
|
||||
@click.option("--install-app", help="Install particular app after initialization")
|
||||
@click.option("--verbose", is_flag=True, help="Verbose output during install")
|
||||
def init(
|
||||
path,
|
||||
@ -69,7 +67,7 @@ def init(
|
||||
try:
|
||||
init(
|
||||
path,
|
||||
apps_path=apps_path, # can be used from --config flag? Maybe config file could have more info?
|
||||
apps_path=apps_path, # can be used from --config flag? Maybe config file could have more info?
|
||||
no_procfile=no_procfile,
|
||||
no_backups=no_backups,
|
||||
frappe_path=frappe_path,
|
||||
@ -130,7 +128,12 @@ def drop(path):
|
||||
@click.option("--branch", default=None, help="branch to checkout")
|
||||
@click.option("--overwrite", is_flag=True, default=False)
|
||||
@click.option("--skip-assets", is_flag=True, default=False, help="Do not build assets")
|
||||
@click.option("--soft-link", is_flag=True, default=False, help="Create a soft link to git repo instead of clone.")
|
||||
@click.option(
|
||||
"--soft-link",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Create a soft link to git repo instead of clone.",
|
||||
)
|
||||
@click.option(
|
||||
"--init-bench", is_flag=True, default=False, help="Initialize Bench if not in one"
|
||||
)
|
||||
@ -163,12 +166,13 @@ def get_app(
|
||||
resolve_deps=resolve_deps,
|
||||
)
|
||||
|
||||
|
||||
@click.command("new-app", help="Create a new Frappe application under apps folder")
|
||||
@click.option(
|
||||
"--no-git",
|
||||
is_flag=True,
|
||||
flag_value="--no-git",
|
||||
help="Do not initialize git repository for the app (available in Frappe v14+)"
|
||||
help="Do not initialize git repository for the app (available in Frappe v14+)",
|
||||
)
|
||||
@click.argument("app-name")
|
||||
def new_app(app_name, no_git=None):
|
||||
|
@ -14,15 +14,20 @@ def setup():
|
||||
pass
|
||||
|
||||
|
||||
@click.command("sudoers", help="Add commands to sudoers list for execution without password")
|
||||
@click.command(
|
||||
"sudoers", help="Add commands to sudoers list for execution without password"
|
||||
)
|
||||
@click.argument("user")
|
||||
def setup_sudoers(user):
|
||||
from bench.utils.system import setup_sudoers
|
||||
|
||||
setup_sudoers(user)
|
||||
|
||||
|
||||
@click.command("nginx", help="Generate configuration files for NGINX")
|
||||
@click.option("--yes", help="Yes to regeneration of nginx config file", default=False, is_flag=True)
|
||||
@click.option(
|
||||
"--yes", help="Yes to regeneration of nginx config file", default=False, is_flag=True
|
||||
)
|
||||
def setup_nginx(yes=False):
|
||||
from bench.config.nginx import make_nginx_conf
|
||||
|
||||
@ -38,11 +43,18 @@ def reload_nginx():
|
||||
|
||||
@click.command("supervisor", help="Generate configuration for supervisor")
|
||||
@click.option("--user", help="optional user argument")
|
||||
@click.option("--yes", help="Yes to regeneration of supervisor config", is_flag=True, default=False)
|
||||
@click.option("--skip-redis", help="Skip redis configuration", is_flag=True, default=False)
|
||||
@click.option(
|
||||
"--yes", help="Yes to regeneration of supervisor config", is_flag=True, default=False
|
||||
)
|
||||
@click.option(
|
||||
"--skip-redis", help="Skip redis configuration", is_flag=True, default=False
|
||||
)
|
||||
def setup_supervisor(user=None, yes=False, skip_redis=False):
|
||||
from bench.utils import get_cmd_output
|
||||
from bench.config.supervisor import update_supervisord_config, generate_supervisor_config
|
||||
from bench.config.supervisor import (
|
||||
update_supervisord_config,
|
||||
generate_supervisor_config,
|
||||
)
|
||||
|
||||
which("supervisorctl", raise_err=True)
|
||||
|
||||
@ -55,33 +67,42 @@ def setup_supervisor(user=None, yes=False, skip_redis=False):
|
||||
@click.command("redis", help="Generates configuration for Redis")
|
||||
def setup_redis():
|
||||
from bench.config.redis import generate_config
|
||||
|
||||
generate_config(".")
|
||||
|
||||
|
||||
@click.command("fonts", help="Add Frappe fonts to system")
|
||||
def setup_fonts():
|
||||
from bench.utils.system import setup_fonts
|
||||
|
||||
setup_fonts()
|
||||
|
||||
|
||||
@click.command("production", help="Setup Frappe production environment for specific user")
|
||||
@click.command(
|
||||
"production", help="Setup Frappe production environment for specific user"
|
||||
)
|
||||
@click.argument("user")
|
||||
@click.option("--yes", help="Yes to regeneration config", is_flag=True, default=False)
|
||||
def setup_production(user, yes=False):
|
||||
from bench.config.production_setup import setup_production
|
||||
|
||||
setup_production(user=user, yes=yes)
|
||||
|
||||
|
||||
@click.command("backups", help="Add cronjob for bench backups")
|
||||
def setup_backups():
|
||||
from bench.bench import Bench
|
||||
|
||||
Bench(".").setup.backups()
|
||||
|
||||
|
||||
@click.command("env", help="Setup virtualenv for bench")
|
||||
@click.option("--python", type = str, default = "python3", help = "Path to Python Executable.")
|
||||
@click.option(
|
||||
"--python", type=str, default="python3", help="Path to Python Executable."
|
||||
)
|
||||
def setup_env(python="python3"):
|
||||
from bench.bench import Bench
|
||||
|
||||
return Bench(".").setup.env(python=python)
|
||||
|
||||
|
||||
@ -90,7 +111,10 @@ def setup_env(python="python3"):
|
||||
@click.option("--force")
|
||||
def setup_firewall(ssh_port=None, force=False):
|
||||
if not force:
|
||||
click.confirm(f"Setting up the firewall will block all ports except 80, 443 and {ssh_port}\nDo you want to continue?", abort=True)
|
||||
click.confirm(
|
||||
f"Setting up the firewall will block all ports except 80, 443 and {ssh_port}\nDo you want to continue?",
|
||||
abort=True,
|
||||
)
|
||||
|
||||
if not ssh_port:
|
||||
ssh_port = 22
|
||||
@ -103,7 +127,9 @@ def setup_firewall(ssh_port=None, force=False):
|
||||
@click.option("--force")
|
||||
def set_ssh_port(port, force=False):
|
||||
if not force:
|
||||
click.confirm(f"This will change your SSH Port to {port}\nDo you want to continue?", abort=True)
|
||||
click.confirm(
|
||||
f"This will change your SSH Port to {port}\nDo you want to continue?", abort=True
|
||||
)
|
||||
|
||||
run_playbook("roles/bench/tasks/change_ssh_port.yml", {"ssh_port": port})
|
||||
|
||||
@ -111,35 +137,63 @@ def set_ssh_port(port, force=False):
|
||||
@click.command("lets-encrypt", help="Setup lets-encrypt SSL for site")
|
||||
@click.argument("site")
|
||||
@click.option("--custom-domain")
|
||||
@click.option('-n', '--non-interactive', default=False, is_flag=True, help="Run command non-interactively. This flag restarts nginx and runs certbot non interactively. Shouldn't be used on 1'st attempt")
|
||||
@click.option(
|
||||
"-n",
|
||||
"--non-interactive",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
help="Run command non-interactively. This flag restarts nginx and runs certbot non interactively. Shouldn't be used on 1'st attempt",
|
||||
)
|
||||
def setup_letsencrypt(site, custom_domain, non_interactive):
|
||||
from bench.config.lets_encrypt import setup_letsencrypt
|
||||
|
||||
setup_letsencrypt(site, custom_domain, bench_path=".", interactive=not non_interactive)
|
||||
|
||||
|
||||
@click.command("wildcard-ssl", help="Setup wildcard SSL certificate for multi-tenant bench")
|
||||
@click.command(
|
||||
"wildcard-ssl", help="Setup wildcard SSL certificate for multi-tenant bench"
|
||||
)
|
||||
@click.argument("domain")
|
||||
@click.option("--email")
|
||||
@click.option("--exclude-base-domain", default=False, is_flag=True, help="SSL Certificate not applicable for base domain")
|
||||
@click.option(
|
||||
"--exclude-base-domain",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
help="SSL Certificate not applicable for base domain",
|
||||
)
|
||||
def setup_wildcard_ssl(domain, email, exclude_base_domain):
|
||||
from bench.config.lets_encrypt import setup_wildcard_ssl
|
||||
setup_wildcard_ssl(domain, email, bench_path=".", exclude_base_domain=exclude_base_domain)
|
||||
|
||||
setup_wildcard_ssl(
|
||||
domain, email, bench_path=".", exclude_base_domain=exclude_base_domain
|
||||
)
|
||||
|
||||
|
||||
@click.command("procfile", help="Generate Procfile for bench start")
|
||||
def setup_procfile():
|
||||
from bench.config.procfile import setup_procfile
|
||||
|
||||
setup_procfile(".")
|
||||
|
||||
|
||||
@click.command("socketio", help="[DEPRECATED] Setup node dependencies for socketio server")
|
||||
@click.command(
|
||||
"socketio", help="[DEPRECATED] Setup node dependencies for socketio server"
|
||||
)
|
||||
def setup_socketio():
|
||||
return
|
||||
|
||||
|
||||
@click.command("requirements")
|
||||
@click.option("--node", help="Update only Node packages", default=False, is_flag=True)
|
||||
@click.option("--python", help="Update only Python packages", default=False, is_flag=True)
|
||||
@click.option("--dev", help="Install optional python development dependencies", default=False, is_flag=True)
|
||||
@click.option(
|
||||
"--python", help="Update only Python packages", default=False, is_flag=True
|
||||
)
|
||||
@click.option(
|
||||
"--dev",
|
||||
help="Install optional python development dependencies",
|
||||
default=False,
|
||||
is_flag=True,
|
||||
)
|
||||
@click.argument("apps", nargs=-1)
|
||||
def setup_requirements(node=False, python=False, dev=False, apps=None):
|
||||
"""
|
||||
@ -162,15 +216,26 @@ def setup_requirements(node=False, python=False, dev=False, apps=None):
|
||||
|
||||
else:
|
||||
from bench.utils.bench import install_python_dev_dependencies
|
||||
|
||||
install_python_dev_dependencies(apps=apps)
|
||||
|
||||
if node:
|
||||
click.secho("--dev flag only supports python dependencies. All node development dependencies are installed by default.", fg="yellow")
|
||||
click.secho(
|
||||
"--dev flag only supports python dependencies. All node development dependencies are installed by default.",
|
||||
fg="yellow",
|
||||
)
|
||||
|
||||
|
||||
@click.command("manager", help="Setup bench-manager.local site with the bench_manager app installed on it")
|
||||
@click.option("--yes", help="Yes to regeneration of nginx config file", default=False, is_flag=True)
|
||||
@click.option("--port", help="Port on which you want to run bench manager", default=23624)
|
||||
@click.command(
|
||||
"manager",
|
||||
help="Setup bench-manager.local site with the bench_manager app installed on it",
|
||||
)
|
||||
@click.option(
|
||||
"--yes", help="Yes to regeneration of nginx config file", default=False, is_flag=True
|
||||
)
|
||||
@click.option(
|
||||
"--port", help="Port on which you want to run bench manager", default=23624
|
||||
)
|
||||
@click.option("--domain", help="Domain on which you want to run bench manager")
|
||||
def setup_manager(yes=False, port=23624, domain=None):
|
||||
from bench.bench import Bench
|
||||
@ -194,10 +259,14 @@ def setup_manager(yes=False, port=23624, domain=None):
|
||||
bench_path = "."
|
||||
bench = Bench(bench_path)
|
||||
|
||||
if bench.conf.get("restart_supervisor_on_update") or bench.conf.get("restart_systemd_on_update"):
|
||||
if bench.conf.get("restart_supervisor_on_update") or bench.conf.get(
|
||||
"restart_systemd_on_update"
|
||||
):
|
||||
# implicates a production setup or so I presume
|
||||
if not domain:
|
||||
print("Please specify the site name on which you want to host bench-manager using the 'domain' flag")
|
||||
print(
|
||||
"Please specify the site name on which you want to host bench-manager using the 'domain' flag"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
if domain not in bench.sites:
|
||||
@ -209,6 +278,7 @@ def setup_manager(yes=False, port=23624, domain=None):
|
||||
@click.command("config", help="Generate or over-write sites/common_site_config.json")
|
||||
def setup_config():
|
||||
from bench.config.common_site_config import setup_config
|
||||
|
||||
setup_config(".")
|
||||
|
||||
|
||||
@ -224,6 +294,7 @@ def add_domain(domain, site=None, ssl_certificate=None, ssl_certificate_key=None
|
||||
sys.exit(1)
|
||||
|
||||
from bench.config.site_config import add_domain
|
||||
|
||||
add_domain(site, domain, ssl_certificate, ssl_certificate_key, bench_path=".")
|
||||
|
||||
|
||||
@ -236,10 +307,14 @@ def remove_domain(domain, site=None):
|
||||
sys.exit(1)
|
||||
|
||||
from bench.config.site_config import remove_domain
|
||||
|
||||
remove_domain(site, domain, bench_path=".")
|
||||
|
||||
|
||||
@click.command("sync-domains", help="Check if there is a change in domains. If yes, updates the domains list.")
|
||||
@click.command(
|
||||
"sync-domains",
|
||||
help="Check if there is a change in domains. If yes, updates the domains list.",
|
||||
)
|
||||
@click.option("--domain", multiple=True)
|
||||
@click.option("--site", prompt=True)
|
||||
def sync_domains(domain=None, site=None):
|
||||
@ -254,6 +329,7 @@ def sync_domains(domain=None, site=None):
|
||||
sys.exit(1)
|
||||
|
||||
from bench.config.site_config import sync_domains
|
||||
|
||||
changed = sync_domains(site, domains, bench_path=".")
|
||||
|
||||
# if changed, success, else failure
|
||||
@ -275,24 +351,53 @@ def setup_roles(role, **kwargs):
|
||||
run_playbook("site.yml", extra_vars=extra_vars)
|
||||
|
||||
|
||||
@click.command("fail2ban", help="Setup fail2ban, an intrusion prevention software framework that protects computer servers from brute-force attacks")
|
||||
@click.option("--maxretry", default=6, help="Number of matches (i.e. value of the counter) which triggers ban action on the IP. Default is 6 seconds" )
|
||||
@click.option("--bantime", default=600, help="Duration (in seconds) for IP to be banned for. Negative number for 'permanent' ban. Default is 600 seconds")
|
||||
@click.option("--findtime", default=600, help="The counter is set to zero if match found within 'findtime' seconds doesn't exceed 'maxretry'. Default is 600 seconds")
|
||||
@click.command(
|
||||
"fail2ban",
|
||||
help="Setup fail2ban, an intrusion prevention software framework that protects computer servers from brute-force attacks",
|
||||
)
|
||||
@click.option(
|
||||
"--maxretry",
|
||||
default=6,
|
||||
help="Number of matches (i.e. value of the counter) which triggers ban action on the IP. Default is 6 seconds",
|
||||
)
|
||||
@click.option(
|
||||
"--bantime",
|
||||
default=600,
|
||||
help="Duration (in seconds) for IP to be banned for. Negative number for 'permanent' ban. Default is 600 seconds",
|
||||
)
|
||||
@click.option(
|
||||
"--findtime",
|
||||
default=600,
|
||||
help="The counter is set to zero if match found within 'findtime' seconds doesn't exceed 'maxretry'. Default is 600 seconds",
|
||||
)
|
||||
def setup_nginx_proxy_jail(**kwargs):
|
||||
run_playbook("roles/fail2ban/tasks/configure_nginx_jail.yml", extra_vars=kwargs)
|
||||
|
||||
|
||||
@click.command("systemd", help="Generate configuration for systemd")
|
||||
@click.option("--user", help="Optional user argument")
|
||||
@click.option("--yes", help="Yes to regeneration of systemd config files", is_flag=True, default=False)
|
||||
@click.option(
|
||||
"--yes",
|
||||
help="Yes to regeneration of systemd config files",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
)
|
||||
@click.option("--stop", help="Stop bench services", is_flag=True, default=False)
|
||||
@click.option("--create-symlinks", help="Create Symlinks", is_flag=True, default=False)
|
||||
@click.option("--delete-symlinks", help="Delete Symlinks", is_flag=True, default=False)
|
||||
def setup_systemd(user=None, yes=False, stop=False, create_symlinks=False, delete_symlinks=False):
|
||||
def setup_systemd(
|
||||
user=None, yes=False, stop=False, create_symlinks=False, delete_symlinks=False
|
||||
):
|
||||
from bench.config.systemd import generate_systemd_config
|
||||
generate_systemd_config(bench_path=".", user=user, yes=yes,
|
||||
stop=stop, create_symlinks=create_symlinks, delete_symlinks=delete_symlinks)
|
||||
|
||||
generate_systemd_config(
|
||||
bench_path=".",
|
||||
user=user,
|
||||
yes=yes,
|
||||
stop=stop,
|
||||
create_symlinks=create_symlinks,
|
||||
delete_symlinks=delete_symlinks,
|
||||
)
|
||||
|
||||
|
||||
setup.add_command(setup_sudoers)
|
||||
|
@ -6,43 +6,96 @@ from bench.app import pull_apps
|
||||
from bench.utils.bench import post_upgrade, patch_sites, build_assets
|
||||
|
||||
|
||||
@click.command('update', help="Performs an update operation on current bench. Without any flags will backup, pull, setup requirements, build, run patches and restart bench. Using specific flags will only do certain tasks instead of all")
|
||||
@click.option('--pull', is_flag=True, help="Pull updates for all the apps in bench")
|
||||
@click.option('--apps', type=str)
|
||||
@click.option('--patch', is_flag=True, help="Run migrations for all sites in the bench")
|
||||
@click.option('--build', is_flag=True, help="Build JS and CSS assets for the bench")
|
||||
@click.option('--requirements', is_flag=True, help="Update requirements. If run alone, equivalent to `bench setup requirements`")
|
||||
@click.option('--restart-supervisor', is_flag=True, help="Restart supervisor processes after update")
|
||||
@click.option('--restart-systemd', is_flag=True, help="Restart systemd units after update")
|
||||
@click.option('--no-backup', is_flag=True, help="If this flag is set, sites won't be backed up prior to updates. Note: This is not recommended in production.")
|
||||
@click.option('--no-compile', is_flag=True, help="If set, Python bytecode won't be compiled before restarting the processes")
|
||||
@click.option('--force', is_flag=True, help="Forces major version upgrades")
|
||||
@click.option('--reset', is_flag=True, help="Hard resets git branch's to their new states overriding any changes and overriding rebase on pull")
|
||||
def update(pull, apps, patch, build, requirements, restart_supervisor, restart_systemd, no_backup, no_compile, force, reset):
|
||||
@click.command(
|
||||
"update",
|
||||
help="Performs an update operation on current bench. Without any flags will backup, pull, setup requirements, build, run patches and restart bench. Using specific flags will only do certain tasks instead of all",
|
||||
)
|
||||
@click.option("--pull", is_flag=True, help="Pull updates for all the apps in bench")
|
||||
@click.option("--apps", type=str)
|
||||
@click.option("--patch", is_flag=True, help="Run migrations for all sites in the bench")
|
||||
@click.option("--build", is_flag=True, help="Build JS and CSS assets for the bench")
|
||||
@click.option(
|
||||
"--requirements",
|
||||
is_flag=True,
|
||||
help="Update requirements. If run alone, equivalent to `bench setup requirements`",
|
||||
)
|
||||
@click.option(
|
||||
"--restart-supervisor", is_flag=True, help="Restart supervisor processes after update"
|
||||
)
|
||||
@click.option(
|
||||
"--restart-systemd", is_flag=True, help="Restart systemd units after update"
|
||||
)
|
||||
@click.option(
|
||||
"--no-backup",
|
||||
is_flag=True,
|
||||
help="If this flag is set, sites won't be backed up prior to updates. Note: This is not recommended in production.",
|
||||
)
|
||||
@click.option(
|
||||
"--no-compile",
|
||||
is_flag=True,
|
||||
help="If set, Python bytecode won't be compiled before restarting the processes",
|
||||
)
|
||||
@click.option("--force", is_flag=True, help="Forces major version upgrades")
|
||||
@click.option(
|
||||
"--reset",
|
||||
is_flag=True,
|
||||
help="Hard resets git branch's to their new states overriding any changes and overriding rebase on pull",
|
||||
)
|
||||
def update(
|
||||
pull,
|
||||
apps,
|
||||
patch,
|
||||
build,
|
||||
requirements,
|
||||
restart_supervisor,
|
||||
restart_systemd,
|
||||
no_backup,
|
||||
no_compile,
|
||||
force,
|
||||
reset,
|
||||
):
|
||||
from bench.utils.bench import update
|
||||
update(pull=pull, apps=apps, patch=patch, build=build, requirements=requirements, restart_supervisor=restart_supervisor, restart_systemd=restart_systemd, backup=not no_backup, compile=not no_compile, force=force, reset=reset)
|
||||
|
||||
update(
|
||||
pull=pull,
|
||||
apps=apps,
|
||||
patch=patch,
|
||||
build=build,
|
||||
requirements=requirements,
|
||||
restart_supervisor=restart_supervisor,
|
||||
restart_systemd=restart_systemd,
|
||||
backup=not no_backup,
|
||||
compile=not no_compile,
|
||||
force=force,
|
||||
reset=reset,
|
||||
)
|
||||
|
||||
|
||||
@click.command('retry-upgrade', help="Retry a failed upgrade")
|
||||
@click.option('--version', default=5)
|
||||
@click.command("retry-upgrade", help="Retry a failed upgrade")
|
||||
@click.option("--version", default=5)
|
||||
def retry_upgrade(version):
|
||||
pull_apps()
|
||||
patch_sites()
|
||||
build_assets()
|
||||
post_upgrade(version-1, version)
|
||||
post_upgrade(version - 1, version)
|
||||
|
||||
|
||||
@click.command('switch-to-branch', help="Switch all apps to specified branch, or specify apps separated by space")
|
||||
@click.argument('branch')
|
||||
@click.argument('apps', nargs=-1)
|
||||
@click.option('--upgrade',is_flag=True)
|
||||
@click.command(
|
||||
"switch-to-branch",
|
||||
help="Switch all apps to specified branch, or specify apps separated by space",
|
||||
)
|
||||
@click.argument("branch")
|
||||
@click.argument("apps", nargs=-1)
|
||||
@click.option("--upgrade", is_flag=True)
|
||||
def switch_to_branch(branch, apps, upgrade=False):
|
||||
from bench.utils.app import switch_to_branch
|
||||
|
||||
switch_to_branch(branch=branch, apps=list(apps), upgrade=upgrade)
|
||||
|
||||
|
||||
@click.command('switch-to-develop')
|
||||
@click.command("switch-to-develop")
|
||||
def switch_to_develop(upgrade=False):
|
||||
"Switch frappe and erpnext to develop branch"
|
||||
from bench.utils.app import switch_to_develop
|
||||
switch_to_develop(apps=['frappe', 'erpnext'])
|
||||
|
||||
switch_to_develop(apps=["frappe", "erpnext"])
|
||||
|
@ -6,162 +6,200 @@ import sys
|
||||
import click
|
||||
|
||||
|
||||
@click.command('start', help="Start Frappe development processes")
|
||||
@click.option('--no-dev', is_flag=True, default=False)
|
||||
@click.option('--no-prefix', is_flag=True, default=False, help="Hide process name from bench start log")
|
||||
@click.option('--concurrency', '-c', type=str)
|
||||
@click.option('--procfile', '-p', type=str)
|
||||
@click.option('--man', '-m', help="Process Manager of your choice ;)")
|
||||
@click.command("start", help="Start Frappe development processes")
|
||||
@click.option("--no-dev", is_flag=True, default=False)
|
||||
@click.option(
|
||||
"--no-prefix",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Hide process name from bench start log",
|
||||
)
|
||||
@click.option("--concurrency", "-c", type=str)
|
||||
@click.option("--procfile", "-p", type=str)
|
||||
@click.option("--man", "-m", help="Process Manager of your choice ;)")
|
||||
def start(no_dev, concurrency, procfile, no_prefix, man):
|
||||
from bench.utils.system import start
|
||||
start(no_dev=no_dev, concurrency=concurrency, procfile=procfile, no_prefix=no_prefix, procman=man)
|
||||
|
||||
start(
|
||||
no_dev=no_dev,
|
||||
concurrency=concurrency,
|
||||
procfile=procfile,
|
||||
no_prefix=no_prefix,
|
||||
procman=man,
|
||||
)
|
||||
|
||||
|
||||
@click.command('restart', help="Restart supervisor processes or systemd units")
|
||||
@click.option('--web', is_flag=True, default=False)
|
||||
@click.option('--supervisor', is_flag=True, default=False)
|
||||
@click.option('--systemd', is_flag=True, default=False)
|
||||
@click.command("restart", help="Restart supervisor processes or systemd units")
|
||||
@click.option("--web", is_flag=True, default=False)
|
||||
@click.option("--supervisor", is_flag=True, default=False)
|
||||
@click.option("--systemd", is_flag=True, default=False)
|
||||
def restart(web, supervisor, systemd):
|
||||
from bench.bench import Bench
|
||||
|
||||
if not systemd and not web:
|
||||
supervisor = True
|
||||
|
||||
Bench(".").reload(web, supervisor, systemd)
|
||||
|
||||
|
||||
@click.command('set-nginx-port', help="Set NGINX port for site")
|
||||
@click.argument('site')
|
||||
@click.argument('port', type=int)
|
||||
@click.command("set-nginx-port", help="Set NGINX port for site")
|
||||
@click.argument("site")
|
||||
@click.argument("port", type=int)
|
||||
def set_nginx_port(site, port):
|
||||
from bench.config.site_config import set_nginx_port
|
||||
|
||||
set_nginx_port(site, port)
|
||||
|
||||
|
||||
@click.command('set-ssl-certificate', help="Set SSL certificate path for site")
|
||||
@click.argument('site')
|
||||
@click.argument('ssl-certificate-path')
|
||||
@click.command("set-ssl-certificate", help="Set SSL certificate path for site")
|
||||
@click.argument("site")
|
||||
@click.argument("ssl-certificate-path")
|
||||
def set_ssl_certificate(site, ssl_certificate_path):
|
||||
from bench.config.site_config import set_ssl_certificate
|
||||
|
||||
set_ssl_certificate(site, ssl_certificate_path)
|
||||
|
||||
|
||||
@click.command('set-ssl-key', help="Set SSL certificate private key path for site")
|
||||
@click.argument('site')
|
||||
@click.argument('ssl-certificate-key-path')
|
||||
@click.command("set-ssl-key", help="Set SSL certificate private key path for site")
|
||||
@click.argument("site")
|
||||
@click.argument("ssl-certificate-key-path")
|
||||
def set_ssl_certificate_key(site, ssl_certificate_key_path):
|
||||
from bench.config.site_config import set_ssl_certificate_key
|
||||
|
||||
set_ssl_certificate_key(site, ssl_certificate_key_path)
|
||||
|
||||
|
||||
@click.command('set-url-root', help="Set URL root for site")
|
||||
@click.argument('site')
|
||||
@click.argument('url-root')
|
||||
@click.command("set-url-root", help="Set URL root for site")
|
||||
@click.argument("site")
|
||||
@click.argument("url-root")
|
||||
def set_url_root(site, url_root):
|
||||
from bench.config.site_config import set_url_root
|
||||
|
||||
set_url_root(site, url_root)
|
||||
|
||||
|
||||
@click.command('set-mariadb-host', help="Set MariaDB host for bench")
|
||||
@click.argument('host')
|
||||
@click.command("set-mariadb-host", help="Set MariaDB host for bench")
|
||||
@click.argument("host")
|
||||
def set_mariadb_host(host):
|
||||
from bench.utils.bench import set_mariadb_host
|
||||
|
||||
set_mariadb_host(host)
|
||||
|
||||
|
||||
@click.command('set-redis-cache-host', help="Set Redis cache host for bench")
|
||||
@click.argument('host')
|
||||
@click.command("set-redis-cache-host", help="Set Redis cache host for bench")
|
||||
@click.argument("host")
|
||||
def set_redis_cache_host(host):
|
||||
"""
|
||||
Usage: bench set-redis-cache-host localhost:6379/1
|
||||
"""
|
||||
from bench.utils.bench import set_redis_cache_host
|
||||
|
||||
set_redis_cache_host(host)
|
||||
|
||||
|
||||
@click.command('set-redis-queue-host', help="Set Redis queue host for bench")
|
||||
@click.argument('host')
|
||||
@click.command("set-redis-queue-host", help="Set Redis queue host for bench")
|
||||
@click.argument("host")
|
||||
def set_redis_queue_host(host):
|
||||
"""
|
||||
Usage: bench set-redis-queue-host localhost:6379/2
|
||||
"""
|
||||
from bench.utils.bench import set_redis_queue_host
|
||||
|
||||
set_redis_queue_host(host)
|
||||
|
||||
|
||||
@click.command('set-redis-socketio-host', help="Set Redis socketio host for bench")
|
||||
@click.argument('host')
|
||||
@click.command("set-redis-socketio-host", help="Set Redis socketio host for bench")
|
||||
@click.argument("host")
|
||||
def set_redis_socketio_host(host):
|
||||
"""
|
||||
Usage: bench set-redis-socketio-host localhost:6379/3
|
||||
"""
|
||||
from bench.utils.bench import set_redis_socketio_host
|
||||
|
||||
set_redis_socketio_host(host)
|
||||
|
||||
|
||||
|
||||
@click.command('download-translations', help="Download latest translations")
|
||||
@click.command("download-translations", help="Download latest translations")
|
||||
def download_translations():
|
||||
from bench.utils.translation import download_translations_p
|
||||
|
||||
download_translations_p()
|
||||
|
||||
|
||||
@click.command('renew-lets-encrypt', help="Sets Up latest cron and Renew Let's Encrypt certificate")
|
||||
@click.command(
|
||||
"renew-lets-encrypt", help="Sets Up latest cron and Renew Let's Encrypt certificate"
|
||||
)
|
||||
def renew_lets_encrypt():
|
||||
from bench.config.lets_encrypt import renew_certs
|
||||
|
||||
renew_certs()
|
||||
|
||||
|
||||
@click.command('backup', help="Backup single site")
|
||||
@click.argument('site')
|
||||
@click.command("backup", help="Backup single site")
|
||||
@click.argument("site")
|
||||
def backup_site(site):
|
||||
from bench.bench import Bench
|
||||
from bench.utils.system import backup_site
|
||||
|
||||
if site not in Bench(".").sites:
|
||||
print(f'Site `{site}` not found')
|
||||
print(f"Site `{site}` not found")
|
||||
sys.exit(1)
|
||||
backup_site(site, bench_path='.')
|
||||
backup_site(site, bench_path=".")
|
||||
|
||||
|
||||
@click.command('backup-all-sites', help="Backup all sites in current bench")
|
||||
@click.command("backup-all-sites", help="Backup all sites in current bench")
|
||||
def backup_all_sites():
|
||||
from bench.utils.system import backup_all_sites
|
||||
backup_all_sites(bench_path='.')
|
||||
|
||||
backup_all_sites(bench_path=".")
|
||||
|
||||
|
||||
@click.command('disable-production', help="Disables production environment for the bench.")
|
||||
@click.command(
|
||||
"disable-production", help="Disables production environment for the bench."
|
||||
)
|
||||
def disable_production():
|
||||
from bench.config.production_setup import disable_production
|
||||
disable_production(bench_path='.')
|
||||
|
||||
disable_production(bench_path=".")
|
||||
|
||||
|
||||
@click.command('src', help="Prints bench source folder path, which can be used as: cd `bench src`")
|
||||
@click.command(
|
||||
"src", help="Prints bench source folder path, which can be used as: cd `bench src`"
|
||||
)
|
||||
def bench_src():
|
||||
from bench.cli import src
|
||||
|
||||
print(os.path.dirname(src))
|
||||
|
||||
|
||||
@click.command('find', help="Finds benches recursively from location")
|
||||
@click.argument('location', default='')
|
||||
@click.command("find", help="Finds benches recursively from location")
|
||||
@click.argument("location", default="")
|
||||
def find_benches(location):
|
||||
from bench.utils import find_benches
|
||||
|
||||
find_benches(directory=location)
|
||||
|
||||
|
||||
@click.command('migrate-env', help="Migrate Virtual Environment to desired Python Version")
|
||||
@click.argument('python', type=str)
|
||||
@click.option('--no-backup', 'backup', is_flag=True, default=True)
|
||||
@click.command(
|
||||
"migrate-env", help="Migrate Virtual Environment to desired Python Version"
|
||||
)
|
||||
@click.argument("python", type=str)
|
||||
@click.option("--no-backup", "backup", is_flag=True, default=True)
|
||||
def migrate_env(python, backup=True):
|
||||
from bench.utils.bench import migrate_env
|
||||
|
||||
migrate_env(python=python, backup=backup)
|
||||
|
||||
|
||||
@click.command('generate-command-cache', help="Caches Frappe Framework commands")
|
||||
def generate_command_cache(bench_path='.'):
|
||||
@click.command("generate-command-cache", help="Caches Frappe Framework commands")
|
||||
def generate_command_cache(bench_path="."):
|
||||
from bench.utils import generate_command_cache
|
||||
|
||||
return generate_command_cache(bench_path=bench_path)
|
||||
|
||||
|
||||
@click.command('clear-command-cache', help="Clears Frappe Framework cached commands")
|
||||
def clear_command_cache(bench_path='.'):
|
||||
@click.command("clear-command-cache", help="Clears Frappe Framework cached commands")
|
||||
def clear_command_cache(bench_path="."):
|
||||
from bench.utils import clear_command_cache
|
||||
|
||||
return clear_command_cache(bench_path=bench_path)
|
||||
|
@ -3,4 +3,5 @@
|
||||
|
||||
def env():
|
||||
from jinja2 import Environment, PackageLoader
|
||||
return Environment(loader=PackageLoader('bench.config'))
|
||||
|
||||
return Environment(loader=PackageLoader("bench.config"))
|
||||
|
@ -3,20 +3,19 @@ import getpass
|
||||
import json
|
||||
import os
|
||||
|
||||
|
||||
|
||||
default_config = {
|
||||
'restart_supervisor_on_update': False,
|
||||
'restart_systemd_on_update': False,
|
||||
'serve_default_site': True,
|
||||
'rebase_on_pull': False,
|
||||
'frappe_user': getpass.getuser(),
|
||||
'shallow_clone': True,
|
||||
'background_workers': 1,
|
||||
'use_redis_auth': False,
|
||||
'live_reload': True
|
||||
"restart_supervisor_on_update": False,
|
||||
"restart_systemd_on_update": False,
|
||||
"serve_default_site": True,
|
||||
"rebase_on_pull": False,
|
||||
"frappe_user": getpass.getuser(),
|
||||
"shallow_clone": True,
|
||||
"background_workers": 1,
|
||||
"use_redis_auth": False,
|
||||
"live_reload": True,
|
||||
}
|
||||
|
||||
|
||||
def setup_config(bench_path):
|
||||
make_pid_folder(bench_path)
|
||||
bench_config = get_config(bench_path)
|
||||
@ -26,52 +25,55 @@ def setup_config(bench_path):
|
||||
|
||||
put_config(bench_config, bench_path)
|
||||
|
||||
|
||||
def get_config(bench_path):
|
||||
return get_common_site_config(bench_path)
|
||||
|
||||
|
||||
def get_common_site_config(bench_path):
|
||||
config_path = get_config_path(bench_path)
|
||||
if not os.path.exists(config_path):
|
||||
return {}
|
||||
with open(config_path, 'r') as f:
|
||||
with open(config_path) as f:
|
||||
return json.load(f)
|
||||
|
||||
def put_config(config, bench_path='.'):
|
||||
|
||||
def put_config(config, bench_path="."):
|
||||
config_path = get_config_path(bench_path)
|
||||
with open(config_path, 'w') as f:
|
||||
with open(config_path, "w") as f:
|
||||
return json.dump(config, f, indent=1, sort_keys=True)
|
||||
|
||||
def update_config(new_config, bench_path='.'):
|
||||
|
||||
def update_config(new_config, bench_path="."):
|
||||
config = get_config(bench_path=bench_path)
|
||||
config.update(new_config)
|
||||
put_config(config, bench_path=bench_path)
|
||||
|
||||
|
||||
def get_config_path(bench_path):
|
||||
return os.path.join(bench_path, 'sites', 'common_site_config.json')
|
||||
return os.path.join(bench_path, "sites", "common_site_config.json")
|
||||
|
||||
|
||||
def get_gunicorn_workers():
|
||||
'''This function will return the maximum workers that can be started depending upon
|
||||
number of cpu's present on the machine'''
|
||||
"""This function will return the maximum workers that can be started depending upon
|
||||
number of cpu's present on the machine"""
|
||||
import multiprocessing
|
||||
|
||||
return {
|
||||
"gunicorn_workers": multiprocessing.cpu_count() * 2 + 1
|
||||
}
|
||||
return {"gunicorn_workers": multiprocessing.cpu_count() * 2 + 1}
|
||||
|
||||
|
||||
def update_config_for_frappe(config, bench_path):
|
||||
ports = make_ports(bench_path)
|
||||
|
||||
for key in ('redis_cache', 'redis_queue', 'redis_socketio'):
|
||||
for key in ("redis_cache", "redis_queue", "redis_socketio"):
|
||||
if key not in config:
|
||||
config[key] = f"redis://localhost:{ports[key]}"
|
||||
|
||||
for key in ('webserver_port', 'socketio_port', 'file_watcher_port'):
|
||||
for key in ("webserver_port", "socketio_port", "file_watcher_port"):
|
||||
if key not in config:
|
||||
config[key] = ports[key]
|
||||
|
||||
|
||||
# TODO Optionally we need to add the host or domain name in case dns_multitenant is false
|
||||
|
||||
def make_ports(bench_path):
|
||||
from urllib.parse import urlparse
|
||||
|
||||
@ -83,7 +85,7 @@ def make_ports(bench_path):
|
||||
"file_watcher_port": 6787,
|
||||
"redis_queue": 11000,
|
||||
"redis_socketio": 12000,
|
||||
"redis_cache": 13000
|
||||
"redis_cache": 13000,
|
||||
}
|
||||
|
||||
# collect all existing ports
|
||||
@ -96,7 +98,7 @@ def make_ports(bench_path):
|
||||
value = bench_config.get(key)
|
||||
|
||||
# extract port from redis url
|
||||
if value and (key in ('redis_cache', 'redis_queue', 'redis_socketio')):
|
||||
if value and (key in ("redis_cache", "redis_queue", "redis_socketio")):
|
||||
value = urlparse(value).port
|
||||
|
||||
if value:
|
||||
@ -113,7 +115,8 @@ def make_ports(bench_path):
|
||||
|
||||
return ports
|
||||
|
||||
|
||||
def make_pid_folder(bench_path):
|
||||
pids_path = os.path.join(bench_path, 'config', 'pids')
|
||||
pids_path = os.path.join(bench_path, "config", "pids")
|
||||
if not os.path.exists(pids_path):
|
||||
os.makedirs(pids_path)
|
||||
|
@ -14,28 +14,31 @@ from bench.utils import exec_cmd, which
|
||||
from bench.utils.bench import update_common_site_config
|
||||
from bench.exceptions import CommandFailedError
|
||||
|
||||
|
||||
def setup_letsencrypt(site, custom_domain, bench_path, interactive):
|
||||
|
||||
site_path = os.path.join(bench_path, "sites", site, "site_config.json")
|
||||
if not os.path.exists(os.path.dirname(site_path)):
|
||||
print("No site named "+site)
|
||||
print("No site named " + site)
|
||||
return
|
||||
|
||||
if custom_domain:
|
||||
domains = get_domains(site, bench_path)
|
||||
for d in domains:
|
||||
if (isinstance(d, dict) and d['domain']==custom_domain):
|
||||
if isinstance(d, dict) and d["domain"] == custom_domain:
|
||||
print(f"SSL for Domain {custom_domain} already exists")
|
||||
return
|
||||
|
||||
if not custom_domain in domains:
|
||||
if custom_domain not in domains:
|
||||
print(f"No custom domain named {custom_domain} set for site")
|
||||
return
|
||||
|
||||
if interactive:
|
||||
click.confirm('Running this will stop the nginx service temporarily causing your sites to go offline\n'
|
||||
'Do you want to continue?',
|
||||
abort=True)
|
||||
click.confirm(
|
||||
"Running this will stop the nginx service temporarily causing your sites to go offline\n"
|
||||
"Do you want to continue?",
|
||||
abort=True,
|
||||
)
|
||||
|
||||
if not Bench(bench_path).conf.get("dns_multitenant"):
|
||||
print("You cannot setup SSL without DNS Multitenancy")
|
||||
@ -47,56 +50,66 @@ def setup_letsencrypt(site, custom_domain, bench_path, interactive):
|
||||
|
||||
|
||||
def create_config(site, custom_domain):
|
||||
config = bench.config.env().get_template('letsencrypt.cfg').render(domain=custom_domain or site)
|
||||
config_path = f'/etc/letsencrypt/configs/{custom_domain or site}.cfg'
|
||||
config = (
|
||||
bench.config.env()
|
||||
.get_template("letsencrypt.cfg")
|
||||
.render(domain=custom_domain or site)
|
||||
)
|
||||
config_path = f"/etc/letsencrypt/configs/{custom_domain or site}.cfg"
|
||||
create_dir_if_missing(config_path)
|
||||
|
||||
with open(config_path, 'w') as f:
|
||||
with open(config_path, "w") as f:
|
||||
f.write(config)
|
||||
|
||||
|
||||
def run_certbot_and_setup_ssl(site, custom_domain, bench_path, interactive=True):
|
||||
service('nginx', 'stop')
|
||||
service("nginx", "stop")
|
||||
|
||||
try:
|
||||
interactive = '' if interactive else '-n'
|
||||
exec_cmd(f"{get_certbot_path()} {interactive} --config /etc/letsencrypt/configs/{custom_domain or site}.cfg certonly")
|
||||
interactive = "" if interactive else "-n"
|
||||
exec_cmd(
|
||||
f"{get_certbot_path()} {interactive} --config /etc/letsencrypt/configs/{custom_domain or site}.cfg certonly"
|
||||
)
|
||||
except CommandFailedError:
|
||||
service('nginx', 'start')
|
||||
service("nginx", "start")
|
||||
print("There was a problem trying to setup SSL for your site")
|
||||
return
|
||||
|
||||
ssl_path = f"/etc/letsencrypt/live/{custom_domain or site}/"
|
||||
ssl_config = { "ssl_certificate": os.path.join(ssl_path, "fullchain.pem"),
|
||||
"ssl_certificate_key": os.path.join(ssl_path, "privkey.pem") }
|
||||
ssl_config = {
|
||||
"ssl_certificate": os.path.join(ssl_path, "fullchain.pem"),
|
||||
"ssl_certificate_key": os.path.join(ssl_path, "privkey.pem"),
|
||||
}
|
||||
|
||||
if custom_domain:
|
||||
remove_domain(site, custom_domain, bench_path)
|
||||
domains = get_domains(site, bench_path)
|
||||
ssl_config['domain'] = custom_domain
|
||||
ssl_config["domain"] = custom_domain
|
||||
domains.append(ssl_config)
|
||||
update_site_config(site, { "domains": domains }, bench_path=bench_path)
|
||||
update_site_config(site, {"domains": domains}, bench_path=bench_path)
|
||||
else:
|
||||
update_site_config(site, ssl_config, bench_path=bench_path)
|
||||
|
||||
make_nginx_conf(bench_path)
|
||||
service('nginx', 'start')
|
||||
service("nginx", "start")
|
||||
|
||||
|
||||
def setup_crontab():
|
||||
from crontab import CronTab
|
||||
|
||||
job_command = f'{get_certbot_path()} renew -a nginx --post-hook "systemctl reload nginx"'
|
||||
job_comment = 'Renew lets-encrypt every month'
|
||||
job_command = (
|
||||
f'{get_certbot_path()} renew -a nginx --post-hook "systemctl reload nginx"'
|
||||
)
|
||||
job_comment = "Renew lets-encrypt every month"
|
||||
print(f"Setting Up cron job to {job_comment}")
|
||||
|
||||
system_crontab = CronTab(user='root')
|
||||
system_crontab = CronTab(user="root")
|
||||
|
||||
for job in system_crontab.find_comment(comment=job_comment): # Removes older entries
|
||||
for job in system_crontab.find_comment(comment=job_comment): # Removes older entries
|
||||
system_crontab.remove(job)
|
||||
|
||||
job = system_crontab.new(command=job_command, comment=job_comment)
|
||||
job.setall('0 0 */1 * *') # Run at 00:00 every day-of-month
|
||||
job.setall("0 0 */1 * *") # Run at 00:00 every day-of-month
|
||||
system_crontab.write()
|
||||
|
||||
|
||||
@ -109,35 +122,39 @@ def get_certbot_path():
|
||||
try:
|
||||
return which("certbot", raise_err=True)
|
||||
except FileNotFoundError:
|
||||
raise CommandFailedError("Certbot is not installed on your system. Please visit https://certbot.eff.org/instructions for installation instructions, then try again.")
|
||||
raise CommandFailedError(
|
||||
"Certbot is not installed on your system. Please visit https://certbot.eff.org/instructions for installation instructions, then try again."
|
||||
)
|
||||
|
||||
|
||||
def renew_certs():
|
||||
# Needs to be run with sudo
|
||||
click.confirm('Running this will stop the nginx service temporarily causing your sites to go offline\n'
|
||||
'Do you want to continue?',
|
||||
abort=True)
|
||||
click.confirm(
|
||||
"Running this will stop the nginx service temporarily causing your sites to go offline\n"
|
||||
"Do you want to continue?",
|
||||
abort=True,
|
||||
)
|
||||
|
||||
setup_crontab()
|
||||
|
||||
service('nginx', 'stop')
|
||||
service("nginx", "stop")
|
||||
exec_cmd(f"{get_certbot_path()} renew")
|
||||
service('nginx', 'start')
|
||||
service("nginx", "start")
|
||||
|
||||
|
||||
def setup_wildcard_ssl(domain, email, bench_path, exclude_base_domain):
|
||||
|
||||
def _get_domains(domain):
|
||||
domain_list = [domain]
|
||||
|
||||
if not domain.startswith('*.'):
|
||||
if not domain.startswith("*."):
|
||||
# add wildcard caracter to domain if missing
|
||||
domain_list.append(f'*.{domain}')
|
||||
domain_list.append(f"*.{domain}")
|
||||
else:
|
||||
# include base domain based on flag
|
||||
domain_list.append(domain.replace('*.', ''))
|
||||
domain_list.append(domain.replace("*.", ""))
|
||||
|
||||
if exclude_base_domain:
|
||||
domain_list.remove(domain.replace('*.', ''))
|
||||
domain_list.remove(domain.replace("*.", ""))
|
||||
|
||||
return domain_list
|
||||
|
||||
@ -147,14 +164,16 @@ def setup_wildcard_ssl(domain, email, bench_path, exclude_base_domain):
|
||||
|
||||
domain_list = _get_domains(domain.strip())
|
||||
|
||||
email_param = ''
|
||||
email_param = ""
|
||||
if email:
|
||||
email_param = f'--email {email}'
|
||||
email_param = f"--email {email}"
|
||||
|
||||
try:
|
||||
exec_cmd(f"{get_certbot_path()} certonly --manual --preferred-challenges=dns {email_param} \
|
||||
exec_cmd(
|
||||
f"{get_certbot_path()} certonly --manual --preferred-challenges=dns {email_param} \
|
||||
--server https://acme-v02.api.letsencrypt.org/directory \
|
||||
--agree-tos -d {' -d '.join(domain_list)}")
|
||||
--agree-tos -d {' -d '.join(domain_list)}"
|
||||
)
|
||||
|
||||
except CommandFailedError:
|
||||
print("There was a problem trying to setup SSL")
|
||||
@ -165,7 +184,7 @@ def setup_wildcard_ssl(domain, email, bench_path, exclude_base_domain):
|
||||
"wildcard": {
|
||||
"domain": domain,
|
||||
"ssl_certificate": os.path.join(ssl_path, "fullchain.pem"),
|
||||
"ssl_certificate_key": os.path.join(ssl_path, "privkey.pem")
|
||||
"ssl_certificate_key": os.path.join(ssl_path, "privkey.pem"),
|
||||
}
|
||||
}
|
||||
|
||||
@ -174,4 +193,4 @@ def setup_wildcard_ssl(domain, email, bench_path, exclude_base_domain):
|
||||
|
||||
make_nginx_conf(bench_path)
|
||||
print("Restrting Nginx service")
|
||||
service('nginx', 'restart')
|
||||
service("nginx", "restart")
|
||||
|
@ -17,10 +17,12 @@ def make_nginx_conf(bench_path, yes=False):
|
||||
conf_path = os.path.join(bench_path, "config", "nginx.conf")
|
||||
|
||||
if not yes and os.path.exists(conf_path):
|
||||
if not click.confirm('nginx.conf already exists and this will overwrite it. Do you want to continue?'):
|
||||
if not click.confirm(
|
||||
"nginx.conf already exists and this will overwrite it. Do you want to continue?"
|
||||
):
|
||||
return
|
||||
|
||||
template = bench.config.env().get_template('nginx.conf')
|
||||
template = bench.config.env().get_template("nginx.conf")
|
||||
bench_path = os.path.abspath(bench_path)
|
||||
sites_path = os.path.join(bench_path, "sites")
|
||||
|
||||
@ -28,37 +30,39 @@ def make_nginx_conf(bench_path, yes=False):
|
||||
sites = prepare_sites(config, bench_path)
|
||||
bench_name = get_bench_name(bench_path)
|
||||
|
||||
allow_rate_limiting = config.get('allow_rate_limiting', False)
|
||||
allow_rate_limiting = config.get("allow_rate_limiting", False)
|
||||
|
||||
template_vars = {
|
||||
"sites_path": sites_path,
|
||||
"http_timeout": config.get("http_timeout"),
|
||||
"sites": sites,
|
||||
"webserver_port": config.get('webserver_port'),
|
||||
"socketio_port": config.get('socketio_port'),
|
||||
"webserver_port": config.get("webserver_port"),
|
||||
"socketio_port": config.get("socketio_port"),
|
||||
"bench_name": bench_name,
|
||||
"error_pages": get_error_pages(),
|
||||
"allow_rate_limiting": allow_rate_limiting,
|
||||
# for nginx map variable
|
||||
"random_string": "".join(random.choice(string.ascii_lowercase) for i in range(7))
|
||||
"random_string": "".join(random.choice(string.ascii_lowercase) for i in range(7)),
|
||||
}
|
||||
|
||||
if allow_rate_limiting:
|
||||
template_vars.update({
|
||||
'bench_name_hash': hashlib.sha256(bench_name).hexdigest()[:16],
|
||||
'limit_conn_shared_memory': get_limit_conn_shared_memory()
|
||||
})
|
||||
template_vars.update(
|
||||
{
|
||||
"bench_name_hash": hashlib.sha256(bench_name).hexdigest()[:16],
|
||||
"limit_conn_shared_memory": get_limit_conn_shared_memory(),
|
||||
}
|
||||
)
|
||||
|
||||
nginx_conf = template.render(**template_vars)
|
||||
|
||||
|
||||
with open(conf_path, "w") as f:
|
||||
f.write(nginx_conf)
|
||||
|
||||
|
||||
def make_bench_manager_nginx_conf(bench_path, yes=False, port=23624, domain=None):
|
||||
from bench.config.site_config import get_site_config
|
||||
|
||||
template = bench.config.env().get_template('bench_manager_nginx.conf')
|
||||
template = bench.config.env().get_template("bench_manager_nginx.conf")
|
||||
bench_path = os.path.abspath(bench_path)
|
||||
sites_path = os.path.join(bench_path, "sites")
|
||||
|
||||
@ -72,12 +76,12 @@ def make_bench_manager_nginx_conf(bench_path, yes=False, port=23624, domain=None
|
||||
"bench_manager_site_name": "bench-manager.local",
|
||||
"sites_path": sites_path,
|
||||
"http_timeout": config.get("http_timeout"),
|
||||
"webserver_port": config.get('webserver_port'),
|
||||
"socketio_port": config.get('socketio_port'),
|
||||
"webserver_port": config.get("webserver_port"),
|
||||
"socketio_port": config.get("socketio_port"),
|
||||
"bench_name": bench_name,
|
||||
"error_pages": get_error_pages(),
|
||||
"ssl_certificate": site_config.get('ssl_certificate'),
|
||||
"ssl_certificate_key": site_config.get('ssl_certificate_key')
|
||||
"ssl_certificate": site_config.get("ssl_certificate"),
|
||||
"ssl_certificate_key": site_config.get("ssl_certificate_key"),
|
||||
}
|
||||
|
||||
bench_manager_nginx_conf = template.render(**template_vars)
|
||||
@ -85,29 +89,31 @@ def make_bench_manager_nginx_conf(bench_path, yes=False, port=23624, domain=None
|
||||
conf_path = os.path.join(bench_path, "config", "nginx.conf")
|
||||
|
||||
if not yes and os.path.exists(conf_path):
|
||||
click.confirm('nginx.conf already exists and bench-manager configuration will be appended to it. Do you want to continue?',
|
||||
abort=True)
|
||||
click.confirm(
|
||||
"nginx.conf already exists and bench-manager configuration will be appended to it. Do you want to continue?",
|
||||
abort=True,
|
||||
)
|
||||
|
||||
with open(conf_path, "a") as myfile:
|
||||
myfile.write(bench_manager_nginx_conf)
|
||||
|
||||
|
||||
def prepare_sites(config, bench_path):
|
||||
sites = {
|
||||
"that_use_port": [],
|
||||
"that_use_dns": [],
|
||||
"that_use_ssl": [],
|
||||
"that_use_wildcard_ssl": []
|
||||
"that_use_wildcard_ssl": [],
|
||||
}
|
||||
|
||||
domain_map = {}
|
||||
ports_in_use = {}
|
||||
|
||||
dns_multitenant = config.get('dns_multitenant')
|
||||
dns_multitenant = config.get("dns_multitenant")
|
||||
|
||||
shared_port_exception_found = False
|
||||
sites_configs = get_sites_with_config(bench_path=bench_path)
|
||||
|
||||
|
||||
# preload all preset site ports to avoid conflicts
|
||||
|
||||
if not dns_multitenant:
|
||||
@ -119,20 +125,20 @@ def prepare_sites(config, bench_path):
|
||||
|
||||
for site in sites_configs:
|
||||
if dns_multitenant:
|
||||
domain = site.get('domain')
|
||||
domain = site.get("domain")
|
||||
|
||||
if domain:
|
||||
# when site's folder name is different than domain name
|
||||
domain_map[domain] = site['name']
|
||||
domain_map[domain] = site["name"]
|
||||
|
||||
site_name = domain or site['name']
|
||||
site_name = domain or site["name"]
|
||||
|
||||
if site.get('wildcard'):
|
||||
if site.get("wildcard"):
|
||||
sites["that_use_wildcard_ssl"].append(site_name)
|
||||
|
||||
if not sites.get('wildcard_ssl_certificate'):
|
||||
sites["wildcard_ssl_certificate"] = site['ssl_certificate']
|
||||
sites["wildcard_ssl_certificate_key"] = site['ssl_certificate_key']
|
||||
if not sites.get("wildcard_ssl_certificate"):
|
||||
sites["wildcard_ssl_certificate"] = site["ssl_certificate"]
|
||||
sites["wildcard_ssl_certificate_key"] = site["ssl_certificate_key"]
|
||||
|
||||
elif site.get("ssl_certificate") and site.get("ssl_certificate_key"):
|
||||
sites["that_use_ssl"].append(site)
|
||||
@ -157,7 +163,6 @@ def prepare_sites(config, bench_path):
|
||||
|
||||
sites["that_use_port"].append(site)
|
||||
|
||||
|
||||
if not dns_multitenant and shared_port_exception_found:
|
||||
message = "Port conflicts found:"
|
||||
port_conflict_index = 0
|
||||
@ -176,11 +181,11 @@ def prepare_sites(config, bench_path):
|
||||
|
||||
print(message)
|
||||
|
||||
|
||||
sites['domain_map'] = domain_map
|
||||
sites["domain_map"] = domain_map
|
||||
|
||||
return sites
|
||||
|
||||
|
||||
def get_sites_with_config(bench_path):
|
||||
from bench.bench import Bench
|
||||
from bench.config.site_config import get_site_config
|
||||
@ -188,94 +193,105 @@ def get_sites_with_config(bench_path):
|
||||
bench = Bench(bench_path)
|
||||
sites = bench.sites
|
||||
conf = bench.conf
|
||||
dns_multitenant = conf.get('dns_multitenant')
|
||||
dns_multitenant = conf.get("dns_multitenant")
|
||||
|
||||
ret = []
|
||||
for site in sites:
|
||||
try:
|
||||
site_config = get_site_config(site, bench_path=bench_path)
|
||||
except Exception as e:
|
||||
strict_nginx = conf.get('strict_nginx')
|
||||
strict_nginx = conf.get("strict_nginx")
|
||||
if strict_nginx:
|
||||
print(f"\n\nERROR: The site config for the site {site} is broken.",
|
||||
print(
|
||||
f"\n\nERROR: The site config for the site {site} is broken.",
|
||||
"If you want this command to pass, instead of just throwing an error,",
|
||||
"You may remove the 'strict_nginx' flag from common_site_config.json or set it to 0",
|
||||
"\n\n")
|
||||
"\n\n",
|
||||
)
|
||||
raise e
|
||||
else:
|
||||
print(f"\n\nWARNING: The site config for the site {site} is broken.",
|
||||
print(
|
||||
f"\n\nWARNING: The site config for the site {site} is broken.",
|
||||
"If you want this command to fail, instead of just showing a warning,",
|
||||
"You may add the 'strict_nginx' flag to common_site_config.json and set it to 1",
|
||||
"\n\n")
|
||||
"\n\n",
|
||||
)
|
||||
continue
|
||||
|
||||
ret.append({
|
||||
"name": site,
|
||||
"port": site_config.get('nginx_port'),
|
||||
"ssl_certificate": site_config.get('ssl_certificate'),
|
||||
"ssl_certificate_key": site_config.get('ssl_certificate_key')
|
||||
})
|
||||
ret.append(
|
||||
{
|
||||
"name": site,
|
||||
"port": site_config.get("nginx_port"),
|
||||
"ssl_certificate": site_config.get("ssl_certificate"),
|
||||
"ssl_certificate_key": site_config.get("ssl_certificate_key"),
|
||||
}
|
||||
)
|
||||
|
||||
if dns_multitenant and site_config.get('domains'):
|
||||
for domain in site_config.get('domains'):
|
||||
if dns_multitenant and site_config.get("domains"):
|
||||
for domain in site_config.get("domains"):
|
||||
# domain can be a string or a dict with 'domain', 'ssl_certificate', 'ssl_certificate_key'
|
||||
if isinstance(domain, str):
|
||||
domain = { 'domain': domain }
|
||||
domain = {"domain": domain}
|
||||
|
||||
domain['name'] = site
|
||||
domain["name"] = site
|
||||
ret.append(domain)
|
||||
|
||||
use_wildcard_certificate(bench_path, ret)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def use_wildcard_certificate(bench_path, ret):
|
||||
'''
|
||||
stored in common_site_config.json as:
|
||||
"wildcard": {
|
||||
"domain": "*.erpnext.com",
|
||||
"ssl_certificate": "/path/to/erpnext.com.cert",
|
||||
"ssl_certificate_key": "/path/to/erpnext.com.key"
|
||||
}
|
||||
'''
|
||||
"""
|
||||
stored in common_site_config.json as:
|
||||
"wildcard": {
|
||||
"domain": "*.erpnext.com",
|
||||
"ssl_certificate": "/path/to/erpnext.com.cert",
|
||||
"ssl_certificate_key": "/path/to/erpnext.com.key"
|
||||
}
|
||||
"""
|
||||
from bench.bench import Bench
|
||||
|
||||
config = Bench(bench_path).conf
|
||||
wildcard = config.get('wildcard')
|
||||
wildcard = config.get("wildcard")
|
||||
|
||||
if not wildcard:
|
||||
return
|
||||
|
||||
domain = wildcard['domain']
|
||||
ssl_certificate = wildcard['ssl_certificate']
|
||||
ssl_certificate_key = wildcard['ssl_certificate_key']
|
||||
domain = wildcard["domain"]
|
||||
ssl_certificate = wildcard["ssl_certificate"]
|
||||
ssl_certificate_key = wildcard["ssl_certificate_key"]
|
||||
|
||||
# If domain is set as "*" all domains will be included
|
||||
if domain.startswith('*'):
|
||||
if domain.startswith("*"):
|
||||
domain = domain[1:]
|
||||
else:
|
||||
domain = '.' + domain
|
||||
domain = "." + domain
|
||||
|
||||
for site in ret:
|
||||
if site.get('ssl_certificate'):
|
||||
if site.get("ssl_certificate"):
|
||||
continue
|
||||
|
||||
if (site.get('domain') or site['name']).endswith(domain):
|
||||
if (site.get("domain") or site["name"]).endswith(domain):
|
||||
# example: ends with .erpnext.com
|
||||
site['ssl_certificate'] = ssl_certificate
|
||||
site['ssl_certificate_key'] = ssl_certificate_key
|
||||
site['wildcard'] = 1
|
||||
site["ssl_certificate"] = ssl_certificate
|
||||
site["ssl_certificate_key"] = ssl_certificate_key
|
||||
site["wildcard"] = 1
|
||||
|
||||
|
||||
def get_error_pages():
|
||||
import bench
|
||||
bench_app_path = os.path.abspath(bench.__path__[0])
|
||||
templates = os.path.join(bench_app_path, 'config', 'templates')
|
||||
|
||||
return {
|
||||
502: os.path.join(templates, '502.html')
|
||||
}
|
||||
bench_app_path = os.path.abspath(bench.__path__[0])
|
||||
templates = os.path.join(bench_app_path, "config", "templates")
|
||||
|
||||
return {502: os.path.join(templates, "502.html")}
|
||||
|
||||
|
||||
def get_limit_conn_shared_memory():
|
||||
"""Allocate 2 percent of total virtual memory as shared memory for nginx limit_conn_zone"""
|
||||
total_vm = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES')) / (1024 * 1024) # in MB
|
||||
total_vm = (os.sysconf("SC_PAGE_SIZE") * os.sysconf("SC_PHYS_PAGES")) / (
|
||||
1024 * 1024
|
||||
) # in MB
|
||||
|
||||
return int(0.02 * total_vm)
|
||||
|
@ -13,18 +13,25 @@ from bench.bench import Bench
|
||||
|
||||
def setup_procfile(bench_path, yes=False, skip_redis=False):
|
||||
config = Bench(bench_path).conf
|
||||
procfile_path = os.path.join(bench_path, 'Procfile')
|
||||
procfile_path = os.path.join(bench_path, "Procfile")
|
||||
if not yes and os.path.exists(procfile_path):
|
||||
click.confirm('A Procfile already exists and this will overwrite it. Do you want to continue?',
|
||||
abort=True)
|
||||
click.confirm(
|
||||
"A Procfile already exists and this will overwrite it. Do you want to continue?",
|
||||
abort=True,
|
||||
)
|
||||
|
||||
procfile = bench.config.env().get_template('Procfile').render(
|
||||
node=which("node") or which("nodejs"),
|
||||
use_rq=use_rq(bench_path),
|
||||
webserver_port=config.get('webserver_port'),
|
||||
CI=os.environ.get('CI'),
|
||||
skip_redis=skip_redis,
|
||||
workers=config.get("workers", {}))
|
||||
procfile = (
|
||||
bench.config.env()
|
||||
.get_template("Procfile")
|
||||
.render(
|
||||
node=which("node") or which("nodejs"),
|
||||
use_rq=use_rq(bench_path),
|
||||
webserver_port=config.get("webserver_port"),
|
||||
CI=os.environ.get("CI"),
|
||||
skip_redis=skip_redis,
|
||||
workers=config.get("workers", {}),
|
||||
)
|
||||
)
|
||||
|
||||
with open(procfile_path, 'w') as f:
|
||||
with open(procfile_path, "w") as f:
|
||||
f.write(procfile)
|
||||
|
@ -6,7 +6,10 @@ import sys
|
||||
# imports - module imports
|
||||
import bench
|
||||
from bench.config.nginx import make_nginx_conf
|
||||
from bench.config.supervisor import generate_supervisor_config, update_supervisord_config
|
||||
from bench.config.supervisor import (
|
||||
generate_supervisor_config,
|
||||
update_supervisord_config,
|
||||
)
|
||||
from bench.config.systemd import generate_systemd_config
|
||||
from bench.bench import Bench
|
||||
from bench.utils import exec_cmd, which, get_bench_name, get_cmd_output, log
|
||||
@ -28,16 +31,18 @@ def setup_production_prerequisites():
|
||||
exec_cmd("bench setup role supervisor")
|
||||
|
||||
|
||||
def setup_production(user, bench_path='.', yes=False):
|
||||
def setup_production(user, bench_path=".", yes=False):
|
||||
print("Setting Up prerequisites...")
|
||||
setup_production_prerequisites()
|
||||
|
||||
conf = Bench(bench_path).conf
|
||||
|
||||
if conf.get('restart_supervisor_on_update') and conf.get('restart_systemd_on_update'):
|
||||
raise Exception("You cannot use supervisor and systemd at the same time. Modify your common_site_config accordingly." )
|
||||
if conf.get("restart_supervisor_on_update") and conf.get("restart_systemd_on_update"):
|
||||
raise Exception(
|
||||
"You cannot use supervisor and systemd at the same time. Modify your common_site_config accordingly."
|
||||
)
|
||||
|
||||
if conf.get('restart_systemd_on_update'):
|
||||
if conf.get("restart_systemd_on_update"):
|
||||
print("Setting Up systemd...")
|
||||
generate_systemd_config(bench_path=bench_path, user=user, yes=yes)
|
||||
else:
|
||||
@ -51,45 +56,54 @@ def setup_production(user, bench_path='.', yes=False):
|
||||
remove_default_nginx_configs()
|
||||
|
||||
bench_name = get_bench_name(bench_path)
|
||||
nginx_conf = f'/etc/nginx/conf.d/{bench_name}.conf'
|
||||
nginx_conf = f"/etc/nginx/conf.d/{bench_name}.conf"
|
||||
|
||||
print("Setting Up symlinks and reloading services...")
|
||||
if conf.get('restart_supervisor_on_update'):
|
||||
if conf.get("restart_supervisor_on_update"):
|
||||
supervisor_conf_extn = "ini" if is_centos7() else "conf"
|
||||
supervisor_conf = os.path.join(get_supervisor_confdir(), f'{bench_name}.{supervisor_conf_extn}')
|
||||
supervisor_conf = os.path.join(
|
||||
get_supervisor_confdir(), f"{bench_name}.{supervisor_conf_extn}"
|
||||
)
|
||||
|
||||
# Check if symlink exists, If not then create it.
|
||||
if not os.path.islink(supervisor_conf):
|
||||
os.symlink(os.path.abspath(os.path.join(bench_path, 'config', 'supervisor.conf')), supervisor_conf)
|
||||
os.symlink(
|
||||
os.path.abspath(os.path.join(bench_path, "config", "supervisor.conf")),
|
||||
supervisor_conf,
|
||||
)
|
||||
|
||||
if not os.path.islink(nginx_conf):
|
||||
os.symlink(os.path.abspath(os.path.join(bench_path, 'config', 'nginx.conf')), nginx_conf)
|
||||
os.symlink(
|
||||
os.path.abspath(os.path.join(bench_path, "config", "nginx.conf")), nginx_conf
|
||||
)
|
||||
|
||||
if conf.get('restart_supervisor_on_update'):
|
||||
if conf.get("restart_supervisor_on_update"):
|
||||
reload_supervisor()
|
||||
|
||||
if os.environ.get('NO_SERVICE_RESTART'):
|
||||
if os.environ.get("NO_SERVICE_RESTART"):
|
||||
return
|
||||
|
||||
reload_nginx()
|
||||
|
||||
|
||||
def disable_production(bench_path='.'):
|
||||
def disable_production(bench_path="."):
|
||||
bench_name = get_bench_name(bench_path)
|
||||
conf = Bench(bench_path).conf
|
||||
|
||||
# supervisorctl
|
||||
supervisor_conf_extn = "ini" if is_centos7() else "conf"
|
||||
supervisor_conf = os.path.join(get_supervisor_confdir(), f'{bench_name}.{supervisor_conf_extn}')
|
||||
supervisor_conf = os.path.join(
|
||||
get_supervisor_confdir(), f"{bench_name}.{supervisor_conf_extn}"
|
||||
)
|
||||
|
||||
if os.path.islink(supervisor_conf):
|
||||
os.unlink(supervisor_conf)
|
||||
|
||||
if conf.get('restart_supervisor_on_update'):
|
||||
if conf.get("restart_supervisor_on_update"):
|
||||
reload_supervisor()
|
||||
|
||||
# nginx
|
||||
nginx_conf = f'/etc/nginx/conf.d/{bench_name}.conf'
|
||||
nginx_conf = f"/etc/nginx/conf.d/{bench_name}.conf"
|
||||
|
||||
if os.path.islink(nginx_conf):
|
||||
os.unlink(nginx_conf)
|
||||
@ -98,10 +112,10 @@ def disable_production(bench_path='.'):
|
||||
|
||||
|
||||
def service(service_name, service_option):
|
||||
if os.path.basename(which('systemctl') or '') == 'systemctl' and is_running_systemd():
|
||||
if os.path.basename(which("systemctl") or "") == "systemctl" and is_running_systemd():
|
||||
exec_cmd(f"sudo systemctl {service_option} {service_name}")
|
||||
|
||||
elif os.path.basename(which('service') or '') == 'service':
|
||||
elif os.path.basename(which("service") or "") == "service":
|
||||
exec_cmd(f"sudo service {service_name} {service_option}")
|
||||
|
||||
else:
|
||||
@ -115,18 +129,29 @@ def service(service_name, service_option):
|
||||
exec_cmd(service_manager_command)
|
||||
|
||||
else:
|
||||
log(f"No service manager found: '{service_name} {service_option}' failed to execute", level=2)
|
||||
log(
|
||||
f"No service manager found: '{service_name} {service_option}' failed to execute",
|
||||
level=2,
|
||||
)
|
||||
|
||||
|
||||
def get_supervisor_confdir():
|
||||
possiblities = ('/etc/supervisor/conf.d', '/etc/supervisor.d/', '/etc/supervisord/conf.d', '/etc/supervisord.d')
|
||||
possiblities = (
|
||||
"/etc/supervisor/conf.d",
|
||||
"/etc/supervisor.d/",
|
||||
"/etc/supervisord/conf.d",
|
||||
"/etc/supervisord.d",
|
||||
)
|
||||
for possiblity in possiblities:
|
||||
if os.path.exists(possiblity):
|
||||
return possiblity
|
||||
|
||||
|
||||
def remove_default_nginx_configs():
|
||||
default_nginx_configs = ['/etc/nginx/conf.d/default.conf', '/etc/nginx/sites-enabled/default']
|
||||
default_nginx_configs = [
|
||||
"/etc/nginx/conf.d/default.conf",
|
||||
"/etc/nginx/sites-enabled/default",
|
||||
]
|
||||
|
||||
for conf_file in default_nginx_configs:
|
||||
if os.path.exists(conf_file):
|
||||
@ -134,11 +159,17 @@ def remove_default_nginx_configs():
|
||||
|
||||
|
||||
def is_centos7():
|
||||
return os.path.exists('/etc/redhat-release') and get_cmd_output("cat /etc/redhat-release | sed 's/Linux\ //g' | cut -d' ' -f3 | cut -d. -f1").strip() == '7'
|
||||
return (
|
||||
os.path.exists("/etc/redhat-release")
|
||||
and get_cmd_output(
|
||||
r"cat /etc/redhat-release | sed 's/Linux\ //g' | cut -d' ' -f3 | cut -d. -f1"
|
||||
).strip()
|
||||
== "7"
|
||||
)
|
||||
|
||||
|
||||
def is_running_systemd():
|
||||
with open('/proc/1/comm') as f:
|
||||
with open("/proc/1/comm") as f:
|
||||
comm = f.read().strip()
|
||||
if comm == "init":
|
||||
return False
|
||||
@ -148,41 +179,42 @@ def is_running_systemd():
|
||||
|
||||
|
||||
def reload_supervisor():
|
||||
supervisorctl = which('supervisorctl')
|
||||
supervisorctl = which("supervisorctl")
|
||||
|
||||
try:
|
||||
# first try reread/update
|
||||
exec_cmd(f'{supervisorctl} reread')
|
||||
exec_cmd(f'{supervisorctl} update')
|
||||
exec_cmd(f"{supervisorctl} reread")
|
||||
exec_cmd(f"{supervisorctl} update")
|
||||
return
|
||||
except CommandFailedError:
|
||||
pass
|
||||
|
||||
try:
|
||||
# something is wrong, so try reloading
|
||||
exec_cmd(f'{supervisorctl} reload')
|
||||
exec_cmd(f"{supervisorctl} reload")
|
||||
return
|
||||
except CommandFailedError:
|
||||
pass
|
||||
|
||||
try:
|
||||
# then try restart for centos
|
||||
service('supervisord', 'restart')
|
||||
service("supervisord", "restart")
|
||||
return
|
||||
except CommandFailedError:
|
||||
pass
|
||||
|
||||
try:
|
||||
# else try restart for ubuntu / debian
|
||||
service('supervisor', 'restart')
|
||||
service("supervisor", "restart")
|
||||
return
|
||||
except CommandFailedError:
|
||||
pass
|
||||
|
||||
|
||||
def reload_nginx():
|
||||
try:
|
||||
exec_cmd(f"sudo {which('nginx')} -t")
|
||||
except Exception:
|
||||
raise
|
||||
|
||||
service('nginx', 'reload')
|
||||
service("nginx", "reload")
|
||||
|
@ -15,36 +15,33 @@ def generate_config(bench_path):
|
||||
redis_version = get_redis_version()
|
||||
|
||||
ports = {}
|
||||
for key in ('redis_cache', 'redis_queue', 'redis_socketio'):
|
||||
for key in ("redis_cache", "redis_queue", "redis_socketio"):
|
||||
ports[key] = urlparse(config[key]).port
|
||||
|
||||
write_redis_config(
|
||||
template_name='redis_queue.conf',
|
||||
template_name="redis_queue.conf",
|
||||
context={
|
||||
"port": ports['redis_queue'],
|
||||
"port": ports["redis_queue"],
|
||||
"bench_path": os.path.abspath(bench_path),
|
||||
"redis_version": redis_version
|
||||
"redis_version": redis_version,
|
||||
},
|
||||
bench_path=bench_path
|
||||
bench_path=bench_path,
|
||||
)
|
||||
|
||||
write_redis_config(
|
||||
template_name='redis_socketio.conf',
|
||||
context={
|
||||
"port": ports['redis_socketio'],
|
||||
"redis_version": redis_version
|
||||
},
|
||||
bench_path=bench_path
|
||||
template_name="redis_socketio.conf",
|
||||
context={"port": ports["redis_socketio"], "redis_version": redis_version},
|
||||
bench_path=bench_path,
|
||||
)
|
||||
|
||||
write_redis_config(
|
||||
template_name='redis_cache.conf',
|
||||
template_name="redis_cache.conf",
|
||||
context={
|
||||
"maxmemory": config.get('cache_maxmemory', get_max_redis_memory()),
|
||||
"port": ports['redis_cache'],
|
||||
"redis_version": redis_version
|
||||
"maxmemory": config.get("cache_maxmemory", get_max_redis_memory()),
|
||||
"port": ports["redis_cache"],
|
||||
"redis_version": redis_version,
|
||||
},
|
||||
bench_path=bench_path
|
||||
bench_path=bench_path,
|
||||
)
|
||||
|
||||
# make pids folder
|
||||
@ -60,9 +57,10 @@ def generate_config(bench_path):
|
||||
acl_rq_path = os.path.join(bench_path, "config", "redis_queue.acl")
|
||||
acl_redis_cache_path = os.path.join(bench_path, "config", "redis_cache.acl")
|
||||
acl_redis_socketio_path = os.path.join(bench_path, "config", "redis_socketio.acl")
|
||||
open(acl_rq_path, 'a').close()
|
||||
open(acl_redis_cache_path, 'a').close()
|
||||
open(acl_redis_socketio_path, 'a').close()
|
||||
open(acl_rq_path, "a").close()
|
||||
open(acl_redis_cache_path, "a").close()
|
||||
open(acl_redis_socketio_path, "a").close()
|
||||
|
||||
|
||||
def write_redis_config(template_name, context, bench_path):
|
||||
template = bench.config.env().get_template(template_name)
|
||||
@ -73,25 +71,27 @@ def write_redis_config(template_name, context, bench_path):
|
||||
if "pid_path" not in context:
|
||||
context["pid_path"] = os.path.join(context["config_path"], "pids")
|
||||
|
||||
with open(os.path.join(bench_path, 'config', template_name), 'w') as f:
|
||||
with open(os.path.join(bench_path, "config", template_name), "w") as f:
|
||||
f.write(template.render(**context))
|
||||
|
||||
|
||||
def get_redis_version():
|
||||
import semantic_version
|
||||
|
||||
version_string = subprocess.check_output('redis-server --version', shell=True)
|
||||
version_string = version_string.decode('utf-8').strip()
|
||||
version_string = subprocess.check_output("redis-server --version", shell=True)
|
||||
version_string = version_string.decode("utf-8").strip()
|
||||
# extract version number from string
|
||||
version = re.findall("\d+\.\d+", version_string)
|
||||
version = re.findall(r"\d+\.\d+", version_string)
|
||||
if not version:
|
||||
return None
|
||||
|
||||
version = semantic_version.Version(version[0], partial=True)
|
||||
return float(f'{version.major}.{version.minor}')
|
||||
return float(f"{version.major}.{version.minor}")
|
||||
|
||||
|
||||
def get_max_redis_memory():
|
||||
try:
|
||||
max_mem = os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES')
|
||||
max_mem = os.sysconf("SC_PAGE_SIZE") * os.sysconf("SC_PHYS_PAGES")
|
||||
except ValueError:
|
||||
max_mem = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']).strip())
|
||||
return max(50, int((max_mem / (1024. ** 2)) * 0.05))
|
||||
max_mem = int(subprocess.check_output(["sysctl", "-n", "hw.memsize"]).strip())
|
||||
return max(50, int((max_mem / (1024.0**2)) * 0.05))
|
||||
|
@ -4,33 +4,51 @@ import os
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
def get_site_config(site, bench_path='.'):
|
||||
config_path = os.path.join(bench_path, 'sites', site, 'site_config.json')
|
||||
def get_site_config(site, bench_path="."):
|
||||
config_path = os.path.join(bench_path, "sites", site, "site_config.json")
|
||||
if not os.path.exists(config_path):
|
||||
return {}
|
||||
with open(config_path) as f:
|
||||
return json.load(f)
|
||||
|
||||
def put_site_config(site, config, bench_path='.'):
|
||||
config_path = os.path.join(bench_path, 'sites', site, 'site_config.json')
|
||||
with open(config_path, 'w') as f:
|
||||
|
||||
def put_site_config(site, config, bench_path="."):
|
||||
config_path = os.path.join(bench_path, "sites", site, "site_config.json")
|
||||
with open(config_path, "w") as f:
|
||||
return json.dump(config, f, indent=1)
|
||||
|
||||
def update_site_config(site, new_config, bench_path='.'):
|
||||
|
||||
def update_site_config(site, new_config, bench_path="."):
|
||||
config = get_site_config(site, bench_path=bench_path)
|
||||
config.update(new_config)
|
||||
put_site_config(site, config, bench_path=bench_path)
|
||||
|
||||
def set_nginx_port(site, port, bench_path='.', gen_config=True):
|
||||
set_site_config_nginx_property(site, {"nginx_port": port}, bench_path=bench_path, gen_config=gen_config)
|
||||
|
||||
def set_ssl_certificate(site, ssl_certificate, bench_path='.', gen_config=True):
|
||||
set_site_config_nginx_property(site, {"ssl_certificate": ssl_certificate}, bench_path=bench_path, gen_config=gen_config)
|
||||
def set_nginx_port(site, port, bench_path=".", gen_config=True):
|
||||
set_site_config_nginx_property(
|
||||
site, {"nginx_port": port}, bench_path=bench_path, gen_config=gen_config
|
||||
)
|
||||
|
||||
def set_ssl_certificate_key(site, ssl_certificate_key, bench_path='.', gen_config=True):
|
||||
set_site_config_nginx_property(site, {"ssl_certificate_key": ssl_certificate_key}, bench_path=bench_path, gen_config=gen_config)
|
||||
|
||||
def set_site_config_nginx_property(site, config, bench_path='.', gen_config=True):
|
||||
def set_ssl_certificate(site, ssl_certificate, bench_path=".", gen_config=True):
|
||||
set_site_config_nginx_property(
|
||||
site,
|
||||
{"ssl_certificate": ssl_certificate},
|
||||
bench_path=bench_path,
|
||||
gen_config=gen_config,
|
||||
)
|
||||
|
||||
|
||||
def set_ssl_certificate_key(site, ssl_certificate_key, bench_path=".", gen_config=True):
|
||||
set_site_config_nginx_property(
|
||||
site,
|
||||
{"ssl_certificate_key": ssl_certificate_key},
|
||||
bench_path=bench_path,
|
||||
gen_config=gen_config,
|
||||
)
|
||||
|
||||
|
||||
def set_site_config_nginx_property(site, config, bench_path=".", gen_config=True):
|
||||
from bench.config.nginx import make_nginx_conf
|
||||
from bench.bench import Bench
|
||||
|
||||
@ -40,36 +58,40 @@ def set_site_config_nginx_property(site, config, bench_path='.', gen_config=True
|
||||
if gen_config:
|
||||
make_nginx_conf(bench_path=bench_path)
|
||||
|
||||
def set_url_root(site, url_root, bench_path='.'):
|
||||
|
||||
def set_url_root(site, url_root, bench_path="."):
|
||||
update_site_config(site, {"host_name": url_root}, bench_path=bench_path)
|
||||
|
||||
def add_domain(site, domain, ssl_certificate, ssl_certificate_key, bench_path='.'):
|
||||
|
||||
def add_domain(site, domain, ssl_certificate, ssl_certificate_key, bench_path="."):
|
||||
domains = get_domains(site, bench_path)
|
||||
for d in domains:
|
||||
if (isinstance(d, dict) and d['domain']==domain) or d==domain:
|
||||
if (isinstance(d, dict) and d["domain"] == domain) or d == domain:
|
||||
print(f"Domain {domain} already exists")
|
||||
return
|
||||
|
||||
if ssl_certificate_key and ssl_certificate:
|
||||
domain = {
|
||||
'domain' : domain,
|
||||
'ssl_certificate': ssl_certificate,
|
||||
'ssl_certificate_key': ssl_certificate_key
|
||||
"domain": domain,
|
||||
"ssl_certificate": ssl_certificate,
|
||||
"ssl_certificate_key": ssl_certificate_key,
|
||||
}
|
||||
|
||||
domains.append(domain)
|
||||
update_site_config(site, { "domains": domains }, bench_path=bench_path)
|
||||
update_site_config(site, {"domains": domains}, bench_path=bench_path)
|
||||
|
||||
def remove_domain(site, domain, bench_path='.'):
|
||||
|
||||
def remove_domain(site, domain, bench_path="."):
|
||||
domains = get_domains(site, bench_path)
|
||||
for i, d in enumerate(domains):
|
||||
if (isinstance(d, dict) and d['domain']==domain) or d==domain:
|
||||
if (isinstance(d, dict) and d["domain"] == domain) or d == domain:
|
||||
domains.remove(d)
|
||||
break
|
||||
|
||||
update_site_config(site, { 'domains': domains }, bench_path=bench_path)
|
||||
update_site_config(site, {"domains": domains}, bench_path=bench_path)
|
||||
|
||||
def sync_domains(site, domains, bench_path='.'):
|
||||
|
||||
def sync_domains(site, domains, bench_path="."):
|
||||
"""Checks if there is a change in domains. If yes, updates the domains list."""
|
||||
changed = False
|
||||
existing_domains = get_domains_dict(get_domains(site, bench_path))
|
||||
@ -80,26 +102,28 @@ def sync_domains(site, domains, bench_path='.'):
|
||||
|
||||
else:
|
||||
for d in list(existing_domains.values()):
|
||||
if d != new_domains.get(d['domain']):
|
||||
if d != new_domains.get(d["domain"]):
|
||||
changed = True
|
||||
break
|
||||
|
||||
if changed:
|
||||
# replace existing domains with this one
|
||||
update_site_config(site, { 'domains': domains }, bench_path='.')
|
||||
update_site_config(site, {"domains": domains}, bench_path=".")
|
||||
|
||||
return changed
|
||||
|
||||
def get_domains(site, bench_path='.'):
|
||||
return get_site_config(site, bench_path=bench_path).get('domains') or []
|
||||
|
||||
def get_domains(site, bench_path="."):
|
||||
return get_site_config(site, bench_path=bench_path).get("domains") or []
|
||||
|
||||
|
||||
def get_domains_dict(domains):
|
||||
domains_dict = defaultdict(dict)
|
||||
for d in domains:
|
||||
if isinstance(d, str):
|
||||
domains_dict[d] = { 'domain': d }
|
||||
domains_dict[d] = {"domain": d}
|
||||
|
||||
elif isinstance(d, dict):
|
||||
domains_dict[d['domain']] = d
|
||||
domains_dict[d["domain"]] = d
|
||||
|
||||
return domains_dict
|
||||
|
@ -23,44 +23,56 @@ def generate_supervisor_config(bench_path, user=None, yes=False, skip_redis=Fals
|
||||
user = getpass.getuser()
|
||||
|
||||
config = Bench(bench_path).conf
|
||||
template = bench.config.env().get_template('supervisor.conf')
|
||||
template = bench.config.env().get_template("supervisor.conf")
|
||||
bench_dir = os.path.abspath(bench_path)
|
||||
|
||||
config = template.render(**{
|
||||
"bench_dir": bench_dir,
|
||||
"sites_dir": os.path.join(bench_dir, 'sites'),
|
||||
"user": user,
|
||||
"use_rq": use_rq(bench_path),
|
||||
"http_timeout": config.get("http_timeout", 120),
|
||||
"redis_server": which('redis-server'),
|
||||
"node": which('node') or which('nodejs'),
|
||||
"redis_cache_config": os.path.join(bench_dir, 'config', 'redis_cache.conf'),
|
||||
"redis_socketio_config": os.path.join(bench_dir, 'config', 'redis_socketio.conf'),
|
||||
"redis_queue_config": os.path.join(bench_dir, 'config', 'redis_queue.conf'),
|
||||
"webserver_port": config.get('webserver_port', 8000),
|
||||
"gunicorn_workers": config.get('gunicorn_workers', get_gunicorn_workers()["gunicorn_workers"]),
|
||||
"bench_name": get_bench_name(bench_path),
|
||||
"background_workers": config.get('background_workers') or 1,
|
||||
"bench_cmd": which('bench'),
|
||||
"skip_redis": skip_redis,
|
||||
"workers": config.get("workers", {}),
|
||||
})
|
||||
config = template.render(
|
||||
**{
|
||||
"bench_dir": bench_dir,
|
||||
"sites_dir": os.path.join(bench_dir, "sites"),
|
||||
"user": user,
|
||||
"use_rq": use_rq(bench_path),
|
||||
"http_timeout": config.get("http_timeout", 120),
|
||||
"redis_server": which("redis-server"),
|
||||
"node": which("node") or which("nodejs"),
|
||||
"redis_cache_config": os.path.join(bench_dir, "config", "redis_cache.conf"),
|
||||
"redis_socketio_config": os.path.join(bench_dir, "config", "redis_socketio.conf"),
|
||||
"redis_queue_config": os.path.join(bench_dir, "config", "redis_queue.conf"),
|
||||
"webserver_port": config.get("webserver_port", 8000),
|
||||
"gunicorn_workers": config.get(
|
||||
"gunicorn_workers", get_gunicorn_workers()["gunicorn_workers"]
|
||||
),
|
||||
"bench_name": get_bench_name(bench_path),
|
||||
"background_workers": config.get("background_workers") or 1,
|
||||
"bench_cmd": which("bench"),
|
||||
"skip_redis": skip_redis,
|
||||
"workers": config.get("workers", {}),
|
||||
}
|
||||
)
|
||||
|
||||
conf_path = os.path.join(bench_path, 'config', 'supervisor.conf')
|
||||
conf_path = os.path.join(bench_path, "config", "supervisor.conf")
|
||||
if not yes and os.path.exists(conf_path):
|
||||
click.confirm('supervisor.conf already exists and this will overwrite it. Do you want to continue?',
|
||||
abort=True)
|
||||
click.confirm(
|
||||
"supervisor.conf already exists and this will overwrite it. Do you want to continue?",
|
||||
abort=True,
|
||||
)
|
||||
|
||||
with open(conf_path, 'w') as f:
|
||||
with open(conf_path, "w") as f:
|
||||
f.write(config)
|
||||
|
||||
update_config({'restart_supervisor_on_update': True}, bench_path=bench_path)
|
||||
update_config({'restart_systemd_on_update': False}, bench_path=bench_path)
|
||||
update_config({"restart_supervisor_on_update": True}, bench_path=bench_path)
|
||||
update_config({"restart_systemd_on_update": False}, bench_path=bench_path)
|
||||
|
||||
|
||||
def get_supervisord_conf():
|
||||
"""Returns path of supervisord config from possible paths"""
|
||||
possibilities = ("supervisord.conf", "etc/supervisord.conf", "/etc/supervisord.conf", "/etc/supervisor/supervisord.conf", "/etc/supervisord.conf")
|
||||
possibilities = (
|
||||
"supervisord.conf",
|
||||
"etc/supervisord.conf",
|
||||
"/etc/supervisord.conf",
|
||||
"/etc/supervisor/supervisord.conf",
|
||||
"/etc/supervisord.conf",
|
||||
)
|
||||
|
||||
for possibility in possibilities:
|
||||
if os.path.exists(possibility):
|
||||
@ -77,10 +89,7 @@ def update_supervisord_config(user=None, yes=False):
|
||||
|
||||
supervisord_conf = get_supervisord_conf()
|
||||
section = "unix_http_server"
|
||||
updated_values = {
|
||||
"chmod": "0760",
|
||||
"chown": f"{user}:{user}"
|
||||
}
|
||||
updated_values = {"chmod": "0760", "chown": f"{user}:{user}"}
|
||||
supervisord_conf_changes = ""
|
||||
|
||||
if not supervisord_conf:
|
||||
@ -94,7 +103,7 @@ def update_supervisord_config(user=None, yes=False):
|
||||
config.add_section(section)
|
||||
action = f"Section {section} Added"
|
||||
logger.log(action)
|
||||
supervisord_conf_changes += '\n' + action
|
||||
supervisord_conf_changes += "\n" + action
|
||||
|
||||
for key, value in updated_values.items():
|
||||
try:
|
||||
@ -104,18 +113,25 @@ def update_supervisord_config(user=None, yes=False):
|
||||
|
||||
if current_value.strip() != value:
|
||||
config.set(section, key, value)
|
||||
action = f"Updated supervisord.conf: '{key}' changed from '{current_value}' to '{value}'"
|
||||
action = (
|
||||
f"Updated supervisord.conf: '{key}' changed from '{current_value}' to '{value}'"
|
||||
)
|
||||
logger.log(action)
|
||||
supervisord_conf_changes += '\n' + action
|
||||
supervisord_conf_changes += "\n" + action
|
||||
|
||||
if not supervisord_conf_changes:
|
||||
logger.error("supervisord.conf not updated")
|
||||
contents = "\n".join(f"{x}={y}" for x, y in updated_values.items())
|
||||
print(f"Update your {supervisord_conf} with the following values:\n[{section}]\n{contents}")
|
||||
print(
|
||||
f"Update your {supervisord_conf} with the following values:\n[{section}]\n{contents}"
|
||||
)
|
||||
return
|
||||
|
||||
if not yes:
|
||||
click.confirm(f"{supervisord_conf} will be updated with the following values:\n{supervisord_conf_changes}\nDo you want to continue?", abort=True)
|
||||
click.confirm(
|
||||
f"{supervisord_conf} will be updated with the following values:\n{supervisord_conf_changes}\nDo you want to continue?",
|
||||
abort=True,
|
||||
)
|
||||
|
||||
try:
|
||||
with open(supervisord_conf, "w") as f:
|
||||
@ -125,4 +141,4 @@ def update_supervisord_config(user=None, yes=False):
|
||||
logger.log(f"Updating supervisord.conf failed due to '{e}'")
|
||||
|
||||
# Reread supervisor configuration, reload supervisord and supervisorctl, restart services that were started
|
||||
service('supervisor', 'reload')
|
||||
service("supervisor", "reload")
|
||||
|
@ -13,9 +13,14 @@ from bench.config.common_site_config import get_gunicorn_workers, update_config
|
||||
from bench.utils import exec_cmd, which, get_bench_name
|
||||
|
||||
|
||||
def generate_systemd_config(bench_path, user=None, yes=False,
|
||||
stop=False, create_symlinks=False,
|
||||
delete_symlinks=False):
|
||||
def generate_systemd_config(
|
||||
bench_path,
|
||||
user=None,
|
||||
yes=False,
|
||||
stop=False,
|
||||
create_symlinks=False,
|
||||
delete_symlinks=False,
|
||||
):
|
||||
|
||||
if not user:
|
||||
user = getpass.getuser()
|
||||
@ -26,7 +31,9 @@ def generate_systemd_config(bench_path, user=None, yes=False,
|
||||
bench_name = get_bench_name(bench_path)
|
||||
|
||||
if stop:
|
||||
exec_cmd(f'sudo systemctl stop -- $(systemctl show -p Requires {bench_name}.target | cut -d= -f2)')
|
||||
exec_cmd(
|
||||
f"sudo systemctl stop -- $(systemctl show -p Requires {bench_name}.target | cut -d= -f2)"
|
||||
)
|
||||
return
|
||||
|
||||
if create_symlinks:
|
||||
@ -37,38 +44,48 @@ def generate_systemd_config(bench_path, user=None, yes=False,
|
||||
_delete_symlinks(bench_path)
|
||||
return
|
||||
|
||||
number_of_workers = config.get('background_workers') or 1
|
||||
number_of_workers = config.get("background_workers") or 1
|
||||
background_workers = []
|
||||
for i in range(number_of_workers):
|
||||
background_workers.append(get_bench_name(bench_path) + "-frappe-default-worker@" + str(i+1) + ".service")
|
||||
background_workers.append(
|
||||
get_bench_name(bench_path) + "-frappe-default-worker@" + str(i + 1) + ".service"
|
||||
)
|
||||
|
||||
for i in range(number_of_workers):
|
||||
background_workers.append(get_bench_name(bench_path) + "-frappe-short-worker@" + str(i+1) + ".service")
|
||||
background_workers.append(
|
||||
get_bench_name(bench_path) + "-frappe-short-worker@" + str(i + 1) + ".service"
|
||||
)
|
||||
|
||||
for i in range(number_of_workers):
|
||||
background_workers.append(get_bench_name(bench_path) + "-frappe-long-worker@" + str(i+1) + ".service")
|
||||
background_workers.append(
|
||||
get_bench_name(bench_path) + "-frappe-long-worker@" + str(i + 1) + ".service"
|
||||
)
|
||||
|
||||
bench_info = {
|
||||
"bench_dir": bench_dir,
|
||||
"sites_dir": os.path.join(bench_dir, 'sites'),
|
||||
"sites_dir": os.path.join(bench_dir, "sites"),
|
||||
"user": user,
|
||||
"use_rq": use_rq(bench_path),
|
||||
"http_timeout": config.get("http_timeout", 120),
|
||||
"redis_server": which('redis-server'),
|
||||
"node": which('node') or which('nodejs'),
|
||||
"redis_cache_config": os.path.join(bench_dir, 'config', 'redis_cache.conf'),
|
||||
"redis_socketio_config": os.path.join(bench_dir, 'config', 'redis_socketio.conf'),
|
||||
"redis_queue_config": os.path.join(bench_dir, 'config', 'redis_queue.conf'),
|
||||
"webserver_port": config.get('webserver_port', 8000),
|
||||
"gunicorn_workers": config.get('gunicorn_workers', get_gunicorn_workers()["gunicorn_workers"]),
|
||||
"redis_server": which("redis-server"),
|
||||
"node": which("node") or which("nodejs"),
|
||||
"redis_cache_config": os.path.join(bench_dir, "config", "redis_cache.conf"),
|
||||
"redis_socketio_config": os.path.join(bench_dir, "config", "redis_socketio.conf"),
|
||||
"redis_queue_config": os.path.join(bench_dir, "config", "redis_queue.conf"),
|
||||
"webserver_port": config.get("webserver_port", 8000),
|
||||
"gunicorn_workers": config.get(
|
||||
"gunicorn_workers", get_gunicorn_workers()["gunicorn_workers"]
|
||||
),
|
||||
"bench_name": get_bench_name(bench_path),
|
||||
"worker_target_wants": " ".join(background_workers),
|
||||
"bench_cmd": which('bench')
|
||||
"bench_cmd": which("bench"),
|
||||
}
|
||||
|
||||
if not yes:
|
||||
click.confirm('current systemd configuration will be overwritten. Do you want to continue?',
|
||||
abort=True)
|
||||
click.confirm(
|
||||
"current systemd configuration will be overwritten. Do you want to continue?",
|
||||
abort=True,
|
||||
)
|
||||
|
||||
setup_systemd_directory(bench_path)
|
||||
setup_main_config(bench_info, bench_path)
|
||||
@ -76,29 +93,44 @@ def generate_systemd_config(bench_path, user=None, yes=False,
|
||||
setup_web_config(bench_info, bench_path)
|
||||
setup_redis_config(bench_info, bench_path)
|
||||
|
||||
update_config({'restart_systemd_on_update': True}, bench_path=bench_path)
|
||||
update_config({'restart_supervisor_on_update': False}, bench_path=bench_path)
|
||||
update_config({"restart_systemd_on_update": True}, bench_path=bench_path)
|
||||
update_config({"restart_supervisor_on_update": False}, bench_path=bench_path)
|
||||
|
||||
|
||||
def setup_systemd_directory(bench_path):
|
||||
if not os.path.exists(os.path.join(bench_path, 'config', 'systemd')):
|
||||
os.makedirs(os.path.join(bench_path, 'config', 'systemd'))
|
||||
if not os.path.exists(os.path.join(bench_path, "config", "systemd")):
|
||||
os.makedirs(os.path.join(bench_path, "config", "systemd"))
|
||||
|
||||
|
||||
def setup_main_config(bench_info, bench_path):
|
||||
# Main config
|
||||
bench_template = bench.config.env().get_template('systemd/frappe-bench.target')
|
||||
bench_template = bench.config.env().get_template("systemd/frappe-bench.target")
|
||||
bench_config = bench_template.render(**bench_info)
|
||||
bench_config_path = os.path.join(bench_path, 'config', 'systemd' , bench_info.get("bench_name") + '.target')
|
||||
bench_config_path = os.path.join(
|
||||
bench_path, "config", "systemd", bench_info.get("bench_name") + ".target"
|
||||
)
|
||||
|
||||
with open(bench_config_path, 'w') as f:
|
||||
with open(bench_config_path, "w") as f:
|
||||
f.write(bench_config)
|
||||
|
||||
|
||||
def setup_workers_config(bench_info, bench_path):
|
||||
# Worker Group
|
||||
bench_workers_target_template = bench.config.env().get_template('systemd/frappe-bench-workers.target')
|
||||
bench_default_worker_template = bench.config.env().get_template('systemd/frappe-bench-frappe-default-worker.service')
|
||||
bench_short_worker_template = bench.config.env().get_template('systemd/frappe-bench-frappe-short-worker.service')
|
||||
bench_long_worker_template = bench.config.env().get_template('systemd/frappe-bench-frappe-long-worker.service')
|
||||
bench_schedule_worker_template = bench.config.env().get_template('systemd/frappe-bench-frappe-schedule.service')
|
||||
bench_workers_target_template = bench.config.env().get_template(
|
||||
"systemd/frappe-bench-workers.target"
|
||||
)
|
||||
bench_default_worker_template = bench.config.env().get_template(
|
||||
"systemd/frappe-bench-frappe-default-worker.service"
|
||||
)
|
||||
bench_short_worker_template = bench.config.env().get_template(
|
||||
"systemd/frappe-bench-frappe-short-worker.service"
|
||||
)
|
||||
bench_long_worker_template = bench.config.env().get_template(
|
||||
"systemd/frappe-bench-frappe-long-worker.service"
|
||||
)
|
||||
bench_schedule_worker_template = bench.config.env().get_template(
|
||||
"systemd/frappe-bench-frappe-schedule.service"
|
||||
)
|
||||
|
||||
bench_workers_target_config = bench_workers_target_template.render(**bench_info)
|
||||
bench_default_worker_config = bench_default_worker_template.render(**bench_info)
|
||||
@ -106,112 +138,175 @@ def setup_workers_config(bench_info, bench_path):
|
||||
bench_long_worker_config = bench_long_worker_template.render(**bench_info)
|
||||
bench_schedule_worker_config = bench_schedule_worker_template.render(**bench_info)
|
||||
|
||||
bench_workers_target_config_path = os.path.join(bench_path, 'config', 'systemd' , bench_info.get("bench_name") + '-workers.target')
|
||||
bench_default_worker_config_path = os.path.join(bench_path, 'config', 'systemd' , bench_info.get("bench_name") + '-frappe-default-worker@.service')
|
||||
bench_short_worker_config_path = os.path.join(bench_path, 'config', 'systemd' , bench_info.get("bench_name") + '-frappe-short-worker@.service')
|
||||
bench_long_worker_config_path = os.path.join(bench_path, 'config', 'systemd' , bench_info.get("bench_name") + '-frappe-long-worker@.service')
|
||||
bench_schedule_worker_config_path = os.path.join(bench_path, 'config', 'systemd' , bench_info.get("bench_name") + '-frappe-schedule.service')
|
||||
bench_workers_target_config_path = os.path.join(
|
||||
bench_path, "config", "systemd", bench_info.get("bench_name") + "-workers.target"
|
||||
)
|
||||
bench_default_worker_config_path = os.path.join(
|
||||
bench_path,
|
||||
"config",
|
||||
"systemd",
|
||||
bench_info.get("bench_name") + "-frappe-default-worker@.service",
|
||||
)
|
||||
bench_short_worker_config_path = os.path.join(
|
||||
bench_path,
|
||||
"config",
|
||||
"systemd",
|
||||
bench_info.get("bench_name") + "-frappe-short-worker@.service",
|
||||
)
|
||||
bench_long_worker_config_path = os.path.join(
|
||||
bench_path,
|
||||
"config",
|
||||
"systemd",
|
||||
bench_info.get("bench_name") + "-frappe-long-worker@.service",
|
||||
)
|
||||
bench_schedule_worker_config_path = os.path.join(
|
||||
bench_path,
|
||||
"config",
|
||||
"systemd",
|
||||
bench_info.get("bench_name") + "-frappe-schedule.service",
|
||||
)
|
||||
|
||||
with open(bench_workers_target_config_path, 'w') as f:
|
||||
with open(bench_workers_target_config_path, "w") as f:
|
||||
f.write(bench_workers_target_config)
|
||||
|
||||
with open(bench_default_worker_config_path, 'w') as f:
|
||||
with open(bench_default_worker_config_path, "w") as f:
|
||||
f.write(bench_default_worker_config)
|
||||
|
||||
with open(bench_short_worker_config_path, 'w') as f:
|
||||
with open(bench_short_worker_config_path, "w") as f:
|
||||
f.write(bench_short_worker_config)
|
||||
|
||||
with open(bench_long_worker_config_path, 'w') as f:
|
||||
with open(bench_long_worker_config_path, "w") as f:
|
||||
f.write(bench_long_worker_config)
|
||||
|
||||
with open(bench_schedule_worker_config_path, 'w') as f:
|
||||
with open(bench_schedule_worker_config_path, "w") as f:
|
||||
f.write(bench_schedule_worker_config)
|
||||
|
||||
|
||||
def setup_web_config(bench_info, bench_path):
|
||||
# Web Group
|
||||
bench_web_target_template = bench.config.env().get_template('systemd/frappe-bench-web.target')
|
||||
bench_web_service_template = bench.config.env().get_template('systemd/frappe-bench-frappe-web.service')
|
||||
bench_node_socketio_template = bench.config.env().get_template('systemd/frappe-bench-node-socketio.service')
|
||||
bench_web_target_template = bench.config.env().get_template(
|
||||
"systemd/frappe-bench-web.target"
|
||||
)
|
||||
bench_web_service_template = bench.config.env().get_template(
|
||||
"systemd/frappe-bench-frappe-web.service"
|
||||
)
|
||||
bench_node_socketio_template = bench.config.env().get_template(
|
||||
"systemd/frappe-bench-node-socketio.service"
|
||||
)
|
||||
|
||||
bench_web_target_config = bench_web_target_template.render(**bench_info)
|
||||
bench_web_service_config = bench_web_service_template.render(**bench_info)
|
||||
bench_node_socketio_config = bench_node_socketio_template.render(**bench_info)
|
||||
|
||||
bench_web_target_config_path = os.path.join(bench_path, 'config', 'systemd' , bench_info.get("bench_name") + '-web.target')
|
||||
bench_web_service_config_path = os.path.join(bench_path, 'config', 'systemd' , bench_info.get("bench_name") + '-frappe-web.service')
|
||||
bench_node_socketio_config_path = os.path.join(bench_path, 'config', 'systemd' , bench_info.get("bench_name") + '-node-socketio.service')
|
||||
bench_web_target_config_path = os.path.join(
|
||||
bench_path, "config", "systemd", bench_info.get("bench_name") + "-web.target"
|
||||
)
|
||||
bench_web_service_config_path = os.path.join(
|
||||
bench_path, "config", "systemd", bench_info.get("bench_name") + "-frappe-web.service"
|
||||
)
|
||||
bench_node_socketio_config_path = os.path.join(
|
||||
bench_path,
|
||||
"config",
|
||||
"systemd",
|
||||
bench_info.get("bench_name") + "-node-socketio.service",
|
||||
)
|
||||
|
||||
with open(bench_web_target_config_path, 'w') as f:
|
||||
with open(bench_web_target_config_path, "w") as f:
|
||||
f.write(bench_web_target_config)
|
||||
|
||||
with open(bench_web_service_config_path, 'w') as f:
|
||||
with open(bench_web_service_config_path, "w") as f:
|
||||
f.write(bench_web_service_config)
|
||||
|
||||
with open(bench_node_socketio_config_path, 'w') as f:
|
||||
with open(bench_node_socketio_config_path, "w") as f:
|
||||
f.write(bench_node_socketio_config)
|
||||
|
||||
|
||||
def setup_redis_config(bench_info, bench_path):
|
||||
# Redis Group
|
||||
bench_redis_target_template = bench.config.env().get_template('systemd/frappe-bench-redis.target')
|
||||
bench_redis_cache_template = bench.config.env().get_template('systemd/frappe-bench-redis-cache.service')
|
||||
bench_redis_queue_template = bench.config.env().get_template('systemd/frappe-bench-redis-queue.service')
|
||||
bench_redis_socketio_template = bench.config.env().get_template('systemd/frappe-bench-redis-socketio.service')
|
||||
bench_redis_target_template = bench.config.env().get_template(
|
||||
"systemd/frappe-bench-redis.target"
|
||||
)
|
||||
bench_redis_cache_template = bench.config.env().get_template(
|
||||
"systemd/frappe-bench-redis-cache.service"
|
||||
)
|
||||
bench_redis_queue_template = bench.config.env().get_template(
|
||||
"systemd/frappe-bench-redis-queue.service"
|
||||
)
|
||||
bench_redis_socketio_template = bench.config.env().get_template(
|
||||
"systemd/frappe-bench-redis-socketio.service"
|
||||
)
|
||||
|
||||
bench_redis_target_config = bench_redis_target_template.render(**bench_info)
|
||||
bench_redis_cache_config = bench_redis_cache_template.render(**bench_info)
|
||||
bench_redis_queue_config = bench_redis_queue_template.render(**bench_info)
|
||||
bench_redis_socketio_config = bench_redis_socketio_template.render(**bench_info)
|
||||
|
||||
bench_redis_target_config_path = os.path.join(bench_path, 'config', 'systemd' , bench_info.get("bench_name") + '-redis.target')
|
||||
bench_redis_cache_config_path = os.path.join(bench_path, 'config', 'systemd' , bench_info.get("bench_name") + '-redis-cache.service')
|
||||
bench_redis_queue_config_path = os.path.join(bench_path, 'config', 'systemd' , bench_info.get("bench_name") + '-redis-queue.service')
|
||||
bench_redis_socketio_config_path = os.path.join(bench_path, 'config', 'systemd' , bench_info.get("bench_name") + '-redis-socketio.service')
|
||||
bench_redis_target_config_path = os.path.join(
|
||||
bench_path, "config", "systemd", bench_info.get("bench_name") + "-redis.target"
|
||||
)
|
||||
bench_redis_cache_config_path = os.path.join(
|
||||
bench_path, "config", "systemd", bench_info.get("bench_name") + "-redis-cache.service"
|
||||
)
|
||||
bench_redis_queue_config_path = os.path.join(
|
||||
bench_path, "config", "systemd", bench_info.get("bench_name") + "-redis-queue.service"
|
||||
)
|
||||
bench_redis_socketio_config_path = os.path.join(
|
||||
bench_path,
|
||||
"config",
|
||||
"systemd",
|
||||
bench_info.get("bench_name") + "-redis-socketio.service",
|
||||
)
|
||||
|
||||
with open(bench_redis_target_config_path, 'w') as f:
|
||||
with open(bench_redis_target_config_path, "w") as f:
|
||||
f.write(bench_redis_target_config)
|
||||
|
||||
with open(bench_redis_cache_config_path, 'w') as f:
|
||||
with open(bench_redis_cache_config_path, "w") as f:
|
||||
f.write(bench_redis_cache_config)
|
||||
|
||||
with open(bench_redis_queue_config_path, 'w') as f:
|
||||
with open(bench_redis_queue_config_path, "w") as f:
|
||||
f.write(bench_redis_queue_config)
|
||||
|
||||
with open(bench_redis_socketio_config_path, 'w') as f:
|
||||
with open(bench_redis_socketio_config_path, "w") as f:
|
||||
f.write(bench_redis_socketio_config)
|
||||
|
||||
|
||||
def _create_symlinks(bench_path):
|
||||
bench_dir = os.path.abspath(bench_path)
|
||||
etc_systemd_system = os.path.join('/', 'etc', 'systemd', 'system')
|
||||
config_path = os.path.join(bench_dir, 'config', 'systemd')
|
||||
etc_systemd_system = os.path.join("/", "etc", "systemd", "system")
|
||||
config_path = os.path.join(bench_dir, "config", "systemd")
|
||||
unit_files = get_unit_files(bench_dir)
|
||||
for unit_file in unit_files:
|
||||
filename = "".join(unit_file)
|
||||
exec_cmd(f'sudo ln -s {config_path}/{filename} {etc_systemd_system}/{"".join(unit_file)}')
|
||||
exec_cmd('sudo systemctl daemon-reload')
|
||||
exec_cmd(
|
||||
f'sudo ln -s {config_path}/{filename} {etc_systemd_system}/{"".join(unit_file)}'
|
||||
)
|
||||
exec_cmd("sudo systemctl daemon-reload")
|
||||
|
||||
|
||||
def _delete_symlinks(bench_path):
|
||||
bench_dir = os.path.abspath(bench_path)
|
||||
etc_systemd_system = os.path.join('/', 'etc', 'systemd', 'system')
|
||||
etc_systemd_system = os.path.join("/", "etc", "systemd", "system")
|
||||
unit_files = get_unit_files(bench_dir)
|
||||
for unit_file in unit_files:
|
||||
exec_cmd(f'sudo rm {etc_systemd_system}/{"".join(unit_file)}')
|
||||
exec_cmd('sudo systemctl daemon-reload')
|
||||
exec_cmd("sudo systemctl daemon-reload")
|
||||
|
||||
|
||||
def get_unit_files(bench_path):
|
||||
bench_name = get_bench_name(bench_path)
|
||||
unit_files = [
|
||||
[bench_name, ".target"],
|
||||
[bench_name+"-workers", ".target"],
|
||||
[bench_name+"-web", ".target"],
|
||||
[bench_name+"-redis", ".target"],
|
||||
[bench_name+"-frappe-default-worker@", ".service"],
|
||||
[bench_name+"-frappe-short-worker@", ".service"],
|
||||
[bench_name+"-frappe-long-worker@", ".service"],
|
||||
[bench_name+"-frappe-schedule", ".service"],
|
||||
[bench_name+"-frappe-web", ".service"],
|
||||
[bench_name+"-node-socketio", ".service"],
|
||||
[bench_name+"-redis-cache", ".service"],
|
||||
[bench_name+"-redis-queue", ".service"],
|
||||
[bench_name+"-redis-socketio", ".service"],
|
||||
[bench_name + "-workers", ".target"],
|
||||
[bench_name + "-web", ".target"],
|
||||
[bench_name + "-redis", ".target"],
|
||||
[bench_name + "-frappe-default-worker@", ".service"],
|
||||
[bench_name + "-frappe-short-worker@", ".service"],
|
||||
[bench_name + "-frappe-long-worker@", ".service"],
|
||||
[bench_name + "-frappe-schedule", ".service"],
|
||||
[bench_name + "-frappe-web", ".service"],
|
||||
[bench_name + "-node-socketio", ".service"],
|
||||
[bench_name + "-redis-cache", ".service"],
|
||||
[bench_name + "-redis-queue", ".service"],
|
||||
[bench_name + "-redis-socketio", ".service"],
|
||||
]
|
||||
return unit_files
|
||||
|
@ -39,4 +39,4 @@ class NotInBenchDirectoryError(Exception):
|
||||
|
||||
|
||||
class VersionNotFound(Exception):
|
||||
pass
|
||||
pass
|
||||
|
@ -1,31 +1,38 @@
|
||||
import os, importlib
|
||||
import os
|
||||
import importlib
|
||||
|
||||
|
||||
def run(bench_path):
|
||||
source_patch_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'patches.txt')
|
||||
target_patch_file = os.path.join(os.path.abspath(bench_path), 'patches.txt')
|
||||
source_patch_file = os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)), "patches.txt"
|
||||
)
|
||||
target_patch_file = os.path.join(os.path.abspath(bench_path), "patches.txt")
|
||||
|
||||
with open(source_patch_file, 'r') as f:
|
||||
patches = [p.strip() for p in f.read().splitlines()
|
||||
if p.strip() and not p.strip().startswith("#")]
|
||||
with open(source_patch_file) as f:
|
||||
patches = [
|
||||
p.strip()
|
||||
for p in f.read().splitlines()
|
||||
if p.strip() and not p.strip().startswith("#")
|
||||
]
|
||||
|
||||
executed_patches = []
|
||||
if os.path.exists(target_patch_file):
|
||||
with open(target_patch_file, 'r') as f:
|
||||
with open(target_patch_file) as f:
|
||||
executed_patches = f.read().splitlines()
|
||||
|
||||
try:
|
||||
for patch in patches:
|
||||
if patch not in executed_patches:
|
||||
module = importlib.import_module(patch.split()[0])
|
||||
execute = getattr(module, 'execute')
|
||||
execute = getattr(module, "execute")
|
||||
result = execute(bench_path)
|
||||
|
||||
if result != False:
|
||||
if not result:
|
||||
executed_patches.append(patch)
|
||||
|
||||
finally:
|
||||
with open(target_patch_file, 'w') as f:
|
||||
f.write('\n'.join(executed_patches))
|
||||
with open(target_patch_file, "w") as f:
|
||||
f.write("\n".join(executed_patches))
|
||||
|
||||
# end with an empty line
|
||||
f.write('\n')
|
||||
f.write("\n")
|
||||
|
@ -1,25 +0,0 @@
|
||||
import click, os
|
||||
from bench.config.procfile import setup_procfile
|
||||
from bench.config.supervisor import generate_supervisor_config
|
||||
from bench.utils.app import get_current_frappe_version, get_current_branch
|
||||
|
||||
def execute(bench_path):
|
||||
frappe_branch = get_current_branch('frappe', bench_path)
|
||||
frappe_version = get_current_frappe_version(bench_path)
|
||||
|
||||
if not (frappe_branch=='develop' or frappe_version >= 7):
|
||||
# not version 7+
|
||||
# prevent running this patch
|
||||
return False
|
||||
|
||||
click.confirm('\nThis update will remove Celery config and prepare the bench to use Python RQ.\n'
|
||||
'And it will overwrite Procfile and supervisor.conf.\n'
|
||||
'If you don\'t know what this means, type Y ;)\n\n'
|
||||
'Do you want to continue?',
|
||||
abort=True)
|
||||
|
||||
setup_procfile(bench_path, yes=True)
|
||||
|
||||
# if production setup
|
||||
if os.path.exists(os.path.join(bench_path, 'config', 'supervisor.conf')):
|
||||
generate_supervisor_config(bench_path, yes=True)
|
@ -1,38 +0,0 @@
|
||||
import os, json
|
||||
from bench.config.common_site_config import get_config, put_config, get_common_site_config
|
||||
|
||||
def execute(bench_path):
|
||||
# deprecate bench config
|
||||
bench_config_path = os.path.join(bench_path, 'config.json')
|
||||
if not os.path.exists(bench_config_path):
|
||||
return
|
||||
|
||||
with open(bench_config_path, "r") as f:
|
||||
bench_config = json.loads(f.read())
|
||||
|
||||
common_site_config = get_common_site_config(bench_path)
|
||||
common_site_config.update(bench_config)
|
||||
put_config(common_site_config, bench_path)
|
||||
|
||||
# remove bench/config.json
|
||||
os.remove(bench_config_path)
|
||||
|
||||
# change keys
|
||||
config = get_config(bench_path)
|
||||
changed = False
|
||||
for from_key, to_key, default in (
|
||||
("celery_broker", "redis_queue", "redis://localhost:6379"),
|
||||
("async_redis_server", "redis_socketio", "redis://localhost:12311"),
|
||||
("cache_redis_server", "redis_cache", "redis://localhost:11311")
|
||||
):
|
||||
if from_key in config:
|
||||
config[to_key] = config[from_key]
|
||||
del config[from_key]
|
||||
changed = True
|
||||
|
||||
elif to_key not in config:
|
||||
config[to_key] = default
|
||||
changed = True
|
||||
|
||||
if changed:
|
||||
put_config(config, bench_path)
|
@ -1,10 +0,0 @@
|
||||
import click
|
||||
from bench.config.redis import generate_config
|
||||
|
||||
def execute(bench_path):
|
||||
click.confirm('\nThis update will replace ERPNext\'s Redis configuration files to fix a major security issue.\n'
|
||||
'If you don\'t know what this means, type Y ;)\n\n'
|
||||
'Do you want to continue?',
|
||||
abort=True)
|
||||
|
||||
generate_config(bench_path)
|
@ -1,5 +0,0 @@
|
||||
import os
|
||||
from bench.utils import exec_cmd
|
||||
|
||||
def execute(bench_path):
|
||||
exec_cmd('npm install yarn', os.path.join(bench_path, 'apps/frappe'))
|
@ -1,32 +0,0 @@
|
||||
import click, subprocess, sys
|
||||
from semantic_version import Version
|
||||
from distutils.spawn import find_executable
|
||||
|
||||
def execute(bench_path):
|
||||
expected_node_ver = Version('5.0.0')
|
||||
node_exec = find_executable('node') or find_executable('nodejs')
|
||||
|
||||
|
||||
if node_exec:
|
||||
result = subprocess.check_output([node_exec, '-v']).decode()
|
||||
else:
|
||||
click.echo('''
|
||||
No node executable was found on your machine.
|
||||
Please install latest node version before running "bench update". For installation instructions
|
||||
please refer "Debian and Ubuntu based Linux distributions" section or "Enterprise Linux and
|
||||
Fedora" section depending upon your OS on the following link,
|
||||
"https://nodejs.org/en/download/package-manager/"
|
||||
''')
|
||||
sys.exit(1)
|
||||
|
||||
node_ver = Version(result.rstrip('\n').lstrip('v'))
|
||||
|
||||
if node_ver < expected_node_ver:
|
||||
click.echo('''
|
||||
Please update node to latest version before running "bench update".
|
||||
Please install latest node version before running "bench update". For installation instructions
|
||||
please refer "Debian and Ubuntu based Linux distributions" section or "Enterprise Linux and
|
||||
Fedora" section depending upon your OS on the following link,
|
||||
"https://nodejs.org/en/download/package-manager/"
|
||||
''')
|
||||
sys.exit(1)
|
@ -1,4 +0,0 @@
|
||||
import subprocess
|
||||
|
||||
def execute(bench_path):
|
||||
subprocess.check_output(['npm', 'install', 'socket.io'])
|
@ -4,10 +4,10 @@ from crontab import CronTab
|
||||
|
||||
def execute(bench_path):
|
||||
"""
|
||||
This patch fixes a cron job that would backup sites every minute per 6 hours
|
||||
This patch fixes a cron job that would backup sites every minute per 6 hours
|
||||
"""
|
||||
|
||||
user = get_config(bench_path=bench_path).get('frappe_user')
|
||||
user = get_config(bench_path=bench_path).get("frappe_user")
|
||||
user_crontab = CronTab(user=user)
|
||||
|
||||
for job in user_crontab.find_comment("bench auto backups set for every 6 hours"):
|
||||
|
@ -34,13 +34,13 @@ def is_production_set(bench_path):
|
||||
bench_name = get_bench_name(bench_path)
|
||||
|
||||
supervisor_conf_extn = "ini" if is_centos7() else "conf"
|
||||
supervisor_conf_file_name = f'{bench_name}.{supervisor_conf_extn}'
|
||||
supervisor_conf_file_name = f"{bench_name}.{supervisor_conf_extn}"
|
||||
supervisor_conf = os.path.join(get_supervisor_confdir(), supervisor_conf_file_name)
|
||||
|
||||
if os.path.exists(supervisor_conf):
|
||||
production_setup = production_setup or True
|
||||
|
||||
nginx_conf = f'/etc/nginx/conf.d/{bench_name}.conf'
|
||||
nginx_conf = f"/etc/nginx/conf.d/{bench_name}.conf"
|
||||
|
||||
if os.path.exists(nginx_conf):
|
||||
production_setup = production_setup or True
|
||||
@ -50,7 +50,7 @@ def is_production_set(bench_path):
|
||||
|
||||
def execute(bench_path):
|
||||
"""This patch checks if bench sudoers is set and regenerate supervisor and sudoers files"""
|
||||
user = get_config('.').get("frappe_user") or getpass.getuser()
|
||||
user = get_config(".").get("frappe_user") or getpass.getuser()
|
||||
|
||||
if is_sudoers_set():
|
||||
if is_production_set(bench_path):
|
||||
|
@ -2,4 +2,4 @@ from bench.config.common_site_config import update_config
|
||||
|
||||
|
||||
def execute(bench_path):
|
||||
update_config({'live_reload': True}, bench_path)
|
||||
update_config({"live_reload": True}, bench_path)
|
||||
|
@ -17,7 +17,7 @@ from semantic_version import Version
|
||||
|
||||
|
||||
def execute(bench_path):
|
||||
frappe_version = Version(get_current_version('frappe'))
|
||||
frappe_version = Version(get_current_version("frappe"))
|
||||
|
||||
if frappe_version.major < 14 or os.name != "posix":
|
||||
# Returning False means patch has been skipped
|
||||
|
@ -9,7 +9,6 @@ import traceback
|
||||
import unittest
|
||||
|
||||
# imports - module imports
|
||||
import bench
|
||||
from bench.utils import paths_in_bench, exec_cmd
|
||||
from bench.utils.system import init
|
||||
from bench.bench import Bench
|
||||
@ -23,6 +22,7 @@ if PYTHON_VER.major == 3:
|
||||
else:
|
||||
FRAPPE_BRANCH = "develop"
|
||||
|
||||
|
||||
class TestBenchBase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.benches_path = "."
|
||||
@ -32,11 +32,26 @@ class TestBenchBase(unittest.TestCase):
|
||||
for bench_name in self.benches:
|
||||
bench_path = os.path.join(self.benches_path, bench_name)
|
||||
bench = Bench(bench_path)
|
||||
mariadb_password = "travis" if os.environ.get("CI") else getpass.getpass(prompt="Enter MariaDB root Password: ")
|
||||
mariadb_password = (
|
||||
"travis"
|
||||
if os.environ.get("CI")
|
||||
else getpass.getpass(prompt="Enter MariaDB root Password: ")
|
||||
)
|
||||
|
||||
if bench.exists:
|
||||
for site in bench.sites:
|
||||
subprocess.call(["bench", "drop-site", site, "--force", "--no-backup", "--root-password", mariadb_password], cwd=bench_path)
|
||||
subprocess.call(
|
||||
[
|
||||
"bench",
|
||||
"drop-site",
|
||||
site,
|
||||
"--force",
|
||||
"--no-backup",
|
||||
"--root-password",
|
||||
mariadb_password,
|
||||
],
|
||||
cwd=bench_path,
|
||||
)
|
||||
shutil.rmtree(bench_path, ignore_errors=True)
|
||||
|
||||
def assert_folders(self, bench_name):
|
||||
@ -55,18 +70,21 @@ class TestBenchBase(unittest.TestCase):
|
||||
for config, search_key in (
|
||||
("redis_queue.conf", "redis_queue.rdb"),
|
||||
("redis_socketio.conf", "redis_socketio.rdb"),
|
||||
("redis_cache.conf", "redis_cache.rdb")):
|
||||
("redis_cache.conf", "redis_cache.rdb"),
|
||||
):
|
||||
|
||||
self.assert_exists(bench_name, "config", config)
|
||||
|
||||
with open(os.path.join(bench_name, "config", config), "r") as f:
|
||||
with open(os.path.join(bench_name, "config", config)) as f:
|
||||
self.assertTrue(search_key in f.read())
|
||||
|
||||
def assert_common_site_config(self, bench_name, expected_config):
|
||||
common_site_config_path = os.path.join(self.benches_path, bench_name, 'sites', 'common_site_config.json')
|
||||
common_site_config_path = os.path.join(
|
||||
self.benches_path, bench_name, "sites", "common_site_config.json"
|
||||
)
|
||||
self.assertTrue(os.path.exists(common_site_config_path))
|
||||
|
||||
with open(common_site_config_path, "r") as f:
|
||||
with open(common_site_config_path) as f:
|
||||
config = json.load(f)
|
||||
|
||||
for key, value in list(expected_config.items()):
|
||||
@ -78,7 +96,7 @@ class TestBenchBase(unittest.TestCase):
|
||||
def new_site(self, site_name, bench_name):
|
||||
new_site_cmd = ["bench", "new-site", site_name, "--admin-password", "admin"]
|
||||
|
||||
if os.environ.get('CI'):
|
||||
if os.environ.get("CI"):
|
||||
new_site_cmd.extend(["--mariadb-root-password", "travis"])
|
||||
|
||||
subprocess.call(new_site_cmd, cwd=os.path.join(self.benches_path, bench_name))
|
||||
@ -88,18 +106,25 @@ class TestBenchBase(unittest.TestCase):
|
||||
frappe_tmp_path = "/tmp/frappe"
|
||||
|
||||
if not os.path.exists(frappe_tmp_path):
|
||||
exec_cmd(f"git clone https://github.com/frappe/frappe -b {FRAPPE_BRANCH} --depth 1 --origin upstream {frappe_tmp_path}")
|
||||
exec_cmd(
|
||||
f"git clone https://github.com/frappe/frappe -b {FRAPPE_BRANCH} --depth 1 --origin upstream {frappe_tmp_path}"
|
||||
)
|
||||
|
||||
kwargs.update(dict(
|
||||
python=sys.executable,
|
||||
no_procfile=True,
|
||||
no_backups=True,
|
||||
frappe_path=frappe_tmp_path
|
||||
))
|
||||
kwargs.update(
|
||||
dict(
|
||||
python=sys.executable,
|
||||
no_procfile=True,
|
||||
no_backups=True,
|
||||
frappe_path=frappe_tmp_path,
|
||||
)
|
||||
)
|
||||
|
||||
if not os.path.exists(os.path.join(self.benches_path, bench_name)):
|
||||
init(bench_name, **kwargs)
|
||||
exec_cmd("git remote set-url upstream https://github.com/frappe/frappe", cwd=os.path.join(self.benches_path, bench_name, "apps", "frappe"))
|
||||
exec_cmd(
|
||||
"git remote set-url upstream https://github.com/frappe/frappe",
|
||||
cwd=os.path.join(self.benches_path, bench_name, "apps", "frappe"),
|
||||
)
|
||||
|
||||
def file_exists(self, path):
|
||||
if os.environ.get("CI"):
|
||||
|
@ -19,6 +19,7 @@ from bench.bench import Bench
|
||||
# for longer since docs.erpnext.com is powered by it ;)
|
||||
TEST_FRAPPE_APP = "frappe_docs"
|
||||
|
||||
|
||||
class TestBenchInit(TestBenchBase):
|
||||
def test_utils(self):
|
||||
self.assertEqual(subprocess.call("bench"), 0)
|
||||
@ -27,9 +28,9 @@ class TestBenchInit(TestBenchBase):
|
||||
self.init_bench(bench_name, **kwargs)
|
||||
app = App("file:///tmp/frappe")
|
||||
self.assertTupleEqual(
|
||||
(app.mount_path, app.url, app.repo, app.org),
|
||||
("/tmp/frappe", "file:///tmp/frappe", "frappe", "frappe"),
|
||||
)
|
||||
(app.mount_path, app.url, app.repo, app.org),
|
||||
("/tmp/frappe", "file:///tmp/frappe", "frappe", "frappe"),
|
||||
)
|
||||
self.assert_folders(bench_name)
|
||||
self.assert_virtual_env(bench_name)
|
||||
self.assert_config(bench_name)
|
||||
@ -43,30 +44,33 @@ class TestBenchInit(TestBenchBase):
|
||||
except Exception:
|
||||
print(self.get_traceback())
|
||||
|
||||
|
||||
def test_multiple_benches(self):
|
||||
for bench_name in ("test-bench-1", "test-bench-2"):
|
||||
self.init_bench(bench_name)
|
||||
|
||||
self.assert_common_site_config("test-bench-1", {
|
||||
"webserver_port": 8000,
|
||||
"socketio_port": 9000,
|
||||
"file_watcher_port": 6787,
|
||||
"redis_queue": "redis://localhost:11000",
|
||||
"redis_socketio": "redis://localhost:12000",
|
||||
"redis_cache": "redis://localhost:13000"
|
||||
})
|
||||
|
||||
self.assert_common_site_config("test-bench-2", {
|
||||
"webserver_port": 8001,
|
||||
"socketio_port": 9001,
|
||||
"file_watcher_port": 6788,
|
||||
"redis_queue": "redis://localhost:11001",
|
||||
"redis_socketio": "redis://localhost:12001",
|
||||
"redis_cache": "redis://localhost:13001"
|
||||
})
|
||||
|
||||
self.assert_common_site_config(
|
||||
"test-bench-1",
|
||||
{
|
||||
"webserver_port": 8000,
|
||||
"socketio_port": 9000,
|
||||
"file_watcher_port": 6787,
|
||||
"redis_queue": "redis://localhost:11000",
|
||||
"redis_socketio": "redis://localhost:12000",
|
||||
"redis_cache": "redis://localhost:13000",
|
||||
},
|
||||
)
|
||||
|
||||
self.assert_common_site_config(
|
||||
"test-bench-2",
|
||||
{
|
||||
"webserver_port": 8001,
|
||||
"socketio_port": 9001,
|
||||
"file_watcher_port": 6788,
|
||||
"redis_queue": "redis://localhost:11001",
|
||||
"redis_socketio": "redis://localhost:12001",
|
||||
"redis_cache": "redis://localhost:13001",
|
||||
},
|
||||
)
|
||||
|
||||
def test_new_site(self):
|
||||
bench_name = "test-bench"
|
||||
@ -85,7 +89,7 @@ class TestBenchInit(TestBenchBase):
|
||||
self.assertTrue(os.path.exists(os.path.join(site_path, "public", "files")))
|
||||
self.assertTrue(os.path.exists(site_config_path))
|
||||
|
||||
with open(site_config_path, "r") as f:
|
||||
with open(site_config_path) as f:
|
||||
site_config = json.loads(f.read())
|
||||
|
||||
for key in ("db_name", "db_password"):
|
||||
@ -97,7 +101,9 @@ class TestBenchInit(TestBenchBase):
|
||||
bench_path = os.path.join(self.benches_path, "test-bench")
|
||||
exec_cmd(f"bench get-app {TEST_FRAPPE_APP}", cwd=bench_path)
|
||||
self.assertTrue(os.path.exists(os.path.join(bench_path, "apps", TEST_FRAPPE_APP)))
|
||||
app_installed_in_env = TEST_FRAPPE_APP in subprocess.check_output(["bench", "pip", "freeze"], cwd=bench_path).decode('utf8')
|
||||
app_installed_in_env = TEST_FRAPPE_APP in subprocess.check_output(
|
||||
["bench", "pip", "freeze"], cwd=bench_path
|
||||
).decode("utf8")
|
||||
self.assertTrue(app_installed_in_env)
|
||||
|
||||
def test_get_app_resolve_deps(self):
|
||||
@ -108,12 +114,12 @@ class TestBenchInit(TestBenchBase):
|
||||
self.assertTrue(os.path.exists(os.path.join(bench_path, "apps", FRAPPE_APP)))
|
||||
|
||||
states_path = os.path.join(bench_path, "sites", "apps.json")
|
||||
self.assert_(os.path.exists(states_path))
|
||||
self.assertTrue(os.path.exists(states_path))
|
||||
|
||||
with open(states_path, "r") as f:
|
||||
with open(states_path) as f:
|
||||
states = json.load(f)
|
||||
|
||||
self.assert_(FRAPPE_APP in states)
|
||||
self.assertTrue(FRAPPE_APP in states)
|
||||
|
||||
def test_install_app(self):
|
||||
bench_name = "test-bench"
|
||||
@ -128,33 +134,42 @@ class TestBenchInit(TestBenchBase):
|
||||
self.assertTrue(os.path.exists(os.path.join(bench_path, "apps", TEST_FRAPPE_APP)))
|
||||
|
||||
# check if app is installed
|
||||
app_installed_in_env = TEST_FRAPPE_APP in subprocess.check_output(["bench", "pip", "freeze"], cwd=bench_path).decode('utf8')
|
||||
app_installed_in_env = TEST_FRAPPE_APP in subprocess.check_output(
|
||||
["bench", "pip", "freeze"], cwd=bench_path
|
||||
).decode("utf8")
|
||||
self.assertTrue(app_installed_in_env)
|
||||
|
||||
# create and install app on site
|
||||
self.new_site(site_name, bench_name)
|
||||
installed_app = not exec_cmd(f"bench --site {site_name} install-app {TEST_FRAPPE_APP}", cwd=bench_path)
|
||||
installed_app = not exec_cmd(
|
||||
f"bench --site {site_name} install-app {TEST_FRAPPE_APP}", cwd=bench_path
|
||||
)
|
||||
|
||||
app_installed_on_site = subprocess.check_output(["bench", "--site", site_name, "list-apps"], cwd=bench_path).decode('utf8')
|
||||
app_installed_on_site = subprocess.check_output(
|
||||
["bench", "--site", site_name, "list-apps"], cwd=bench_path
|
||||
).decode("utf8")
|
||||
|
||||
if installed_app:
|
||||
self.assertTrue(TEST_FRAPPE_APP in app_installed_on_site)
|
||||
|
||||
|
||||
def test_remove_app(self):
|
||||
self.init_bench("test-bench")
|
||||
bench_path = os.path.join(self.benches_path, "test-bench")
|
||||
|
||||
exec_cmd("bench setup requirements --node", cwd=bench_path)
|
||||
exec_cmd(f"bench get-app {TEST_FRAPPE_APP} --branch master --overwrite", cwd=bench_path)
|
||||
exec_cmd(
|
||||
f"bench get-app {TEST_FRAPPE_APP} --branch master --overwrite", cwd=bench_path
|
||||
)
|
||||
exec_cmd(f"bench remove-app {TEST_FRAPPE_APP}", cwd=bench_path)
|
||||
|
||||
with open(os.path.join(bench_path, "sites", "apps.txt")) as f:
|
||||
self.assertFalse(TEST_FRAPPE_APP in f.read())
|
||||
self.assertFalse(TEST_FRAPPE_APP in subprocess.check_output(["bench", "pip", "freeze"], cwd=bench_path).decode('utf8'))
|
||||
self.assertFalse(
|
||||
TEST_FRAPPE_APP
|
||||
in subprocess.check_output(["bench", "pip", "freeze"], cwd=bench_path).decode("utf8")
|
||||
)
|
||||
self.assertFalse(os.path.exists(os.path.join(bench_path, "apps", TEST_FRAPPE_APP)))
|
||||
|
||||
|
||||
def test_switch_to_branch(self):
|
||||
self.init_bench("test-bench")
|
||||
bench_path = os.path.join(self.benches_path, "test-bench")
|
||||
@ -166,16 +181,20 @@ class TestBenchInit(TestBenchBase):
|
||||
# assuming we follow `version-#`
|
||||
prevoius_branch = f"version-{int(FRAPPE_BRANCH.split('-')[1]) - 1}"
|
||||
|
||||
successful_switch = not exec_cmd(f"bench switch-to-branch {prevoius_branch} frappe --upgrade", cwd=bench_path)
|
||||
successful_switch = not exec_cmd(
|
||||
f"bench switch-to-branch {prevoius_branch} frappe --upgrade", cwd=bench_path
|
||||
)
|
||||
app_branch_after_switch = str(git.Repo(path=app_path).active_branch)
|
||||
if successful_switch:
|
||||
self.assertEqual(prevoius_branch, app_branch_after_switch)
|
||||
|
||||
successful_switch = not exec_cmd(f"bench switch-to-branch {FRAPPE_BRANCH} frappe --upgrade", cwd=bench_path)
|
||||
successful_switch = not exec_cmd(
|
||||
f"bench switch-to-branch {FRAPPE_BRANCH} frappe --upgrade", cwd=bench_path
|
||||
)
|
||||
app_branch_after_second_switch = str(git.Repo(path=app_path).active_branch)
|
||||
if successful_switch:
|
||||
self.assertEqual(FRAPPE_BRANCH, app_branch_after_second_switch)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
@ -32,16 +32,16 @@ class TestSetupProduction(TestBenchBase):
|
||||
bench_path = os.path.join(os.path.abspath(self.benches_path), bench_name)
|
||||
exec_cmd("sudo bench disable-production", cwd=bench_path)
|
||||
|
||||
|
||||
def production(self):
|
||||
try:
|
||||
self.test_setup_production()
|
||||
except Exception:
|
||||
print(self.get_traceback())
|
||||
|
||||
|
||||
def assert_nginx_config(self, bench_name):
|
||||
conf_src = os.path.join(os.path.abspath(self.benches_path), bench_name, 'config', 'nginx.conf')
|
||||
conf_src = os.path.join(
|
||||
os.path.abspath(self.benches_path), bench_name, "config", "nginx.conf"
|
||||
)
|
||||
conf_dest = f"/etc/nginx/conf.d/{bench_name}.conf"
|
||||
|
||||
self.assertTrue(self.file_exists(conf_src))
|
||||
@ -51,23 +51,23 @@ class TestSetupProduction(TestBenchBase):
|
||||
self.assertEqual(os.path.realpath(conf_dest), conf_src)
|
||||
|
||||
# file content
|
||||
with open(conf_src, "r") as f:
|
||||
with open(conf_src) as f:
|
||||
f = f.read()
|
||||
|
||||
for key in (
|
||||
f"upstream {bench_name}-frappe",
|
||||
f"upstream {bench_name}-socketio-server"
|
||||
):
|
||||
f"upstream {bench_name}-frappe",
|
||||
f"upstream {bench_name}-socketio-server",
|
||||
):
|
||||
self.assertTrue(key in f)
|
||||
|
||||
|
||||
def assert_nginx_process(self):
|
||||
out = get_cmd_output("sudo nginx -t 2>&1")
|
||||
self.assertTrue("nginx: configuration file /etc/nginx/nginx.conf test is successful" in out)
|
||||
|
||||
self.assertTrue(
|
||||
"nginx: configuration file /etc/nginx/nginx.conf test is successful" in out
|
||||
)
|
||||
|
||||
def assert_sudoers(self, user):
|
||||
sudoers_file = '/etc/sudoers.d/frappe'
|
||||
sudoers_file = "/etc/sudoers.d/frappe"
|
||||
service = which("service")
|
||||
nginx = which("nginx")
|
||||
|
||||
@ -76,15 +76,16 @@ class TestSetupProduction(TestBenchBase):
|
||||
if os.environ.get("CI"):
|
||||
sudoers = subprocess.check_output(["sudo", "cat", sudoers_file]).decode("utf-8")
|
||||
else:
|
||||
with open(sudoers_file, 'r') as f:
|
||||
with open(sudoers_file) as f:
|
||||
sudoers = f.read()
|
||||
|
||||
self.assertTrue(f'{user} ALL = (root) NOPASSWD: {service} nginx *' in sudoers)
|
||||
self.assertTrue(f'{user} ALL = (root) NOPASSWD: {nginx}' in sudoers)
|
||||
|
||||
self.assertTrue(f"{user} ALL = (root) NOPASSWD: {service} nginx *" in sudoers)
|
||||
self.assertTrue(f"{user} ALL = (root) NOPASSWD: {nginx}" in sudoers)
|
||||
|
||||
def assert_supervisor_config(self, bench_name, use_rq=True):
|
||||
conf_src = os.path.join(os.path.abspath(self.benches_path), bench_name, 'config', 'supervisor.conf')
|
||||
conf_src = os.path.join(
|
||||
os.path.abspath(self.benches_path), bench_name, "config", "supervisor.conf"
|
||||
)
|
||||
|
||||
supervisor_conf_dir = get_supervisor_confdir()
|
||||
conf_dest = f"{supervisor_conf_dir}/{bench_name}.conf"
|
||||
@ -96,7 +97,7 @@ class TestSetupProduction(TestBenchBase):
|
||||
self.assertEqual(os.path.realpath(conf_dest), conf_src)
|
||||
|
||||
# file content
|
||||
with open(conf_src, "r") as f:
|
||||
with open(conf_src) as f:
|
||||
f = f.read()
|
||||
|
||||
tests = [
|
||||
@ -106,65 +107,72 @@ class TestSetupProduction(TestBenchBase):
|
||||
f"program:{bench_name}-redis-socketio",
|
||||
f"group:{bench_name}-web",
|
||||
f"group:{bench_name}-workers",
|
||||
f"group:{bench_name}-redis"
|
||||
f"group:{bench_name}-redis",
|
||||
]
|
||||
|
||||
if not os.environ.get("CI"):
|
||||
tests.append(f"program:{bench_name}-node-socketio")
|
||||
|
||||
if use_rq:
|
||||
tests.extend([
|
||||
f"program:{bench_name}-frappe-schedule",
|
||||
f"program:{bench_name}-frappe-default-worker",
|
||||
f"program:{bench_name}-frappe-short-worker",
|
||||
f"program:{bench_name}-frappe-long-worker"
|
||||
])
|
||||
tests.extend(
|
||||
[
|
||||
f"program:{bench_name}-frappe-schedule",
|
||||
f"program:{bench_name}-frappe-default-worker",
|
||||
f"program:{bench_name}-frappe-short-worker",
|
||||
f"program:{bench_name}-frappe-long-worker",
|
||||
]
|
||||
)
|
||||
|
||||
else:
|
||||
tests.extend([
|
||||
f"program:{bench_name}-frappe-workerbeat",
|
||||
f"program:{bench_name}-frappe-worker",
|
||||
f"program:{bench_name}-frappe-longjob-worker",
|
||||
f"program:{bench_name}-frappe-async-worker"
|
||||
])
|
||||
tests.extend(
|
||||
[
|
||||
f"program:{bench_name}-frappe-workerbeat",
|
||||
f"program:{bench_name}-frappe-worker",
|
||||
f"program:{bench_name}-frappe-longjob-worker",
|
||||
f"program:{bench_name}-frappe-async-worker",
|
||||
]
|
||||
)
|
||||
|
||||
for key in tests:
|
||||
self.assertTrue(key in f)
|
||||
|
||||
|
||||
def assert_supervisor_process(self, bench_name, use_rq=True, disable_production=False):
|
||||
out = get_cmd_output("supervisorctl status")
|
||||
|
||||
while "STARTING" in out:
|
||||
print ("Waiting for all processes to start...")
|
||||
print("Waiting for all processes to start...")
|
||||
time.sleep(10)
|
||||
out = get_cmd_output("supervisorctl status")
|
||||
|
||||
tests = [
|
||||
"{bench_name}-web:{bench_name}-frappe-web[\s]+RUNNING",
|
||||
r"{bench_name}-web:{bench_name}-frappe-web[\s]+RUNNING",
|
||||
# Have commented for the time being. Needs to be uncommented later on. Bench is failing on travis because of this.
|
||||
# It works on one bench and fails on another.giving FATAL or BACKOFF (Exited too quickly (process log may have details))
|
||||
# "{bench_name}-web:{bench_name}-node-socketio[\s]+RUNNING",
|
||||
"{bench_name}-redis:{bench_name}-redis-cache[\s]+RUNNING",
|
||||
"{bench_name}-redis:{bench_name}-redis-queue[\s]+RUNNING",
|
||||
"{bench_name}-redis:{bench_name}-redis-socketio[\s]+RUNNING"
|
||||
r"{bench_name}-redis:{bench_name}-redis-cache[\s]+RUNNING",
|
||||
r"{bench_name}-redis:{bench_name}-redis-queue[\s]+RUNNING",
|
||||
r"{bench_name}-redis:{bench_name}-redis-socketio[\s]+RUNNING",
|
||||
]
|
||||
|
||||
if use_rq:
|
||||
tests.extend([
|
||||
"{bench_name}-workers:{bench_name}-frappe-schedule[\s]+RUNNING",
|
||||
"{bench_name}-workers:{bench_name}-frappe-default-worker-0[\s]+RUNNING",
|
||||
"{bench_name}-workers:{bench_name}-frappe-short-worker-0[\s]+RUNNING",
|
||||
"{bench_name}-workers:{bench_name}-frappe-long-worker-0[\s]+RUNNING"
|
||||
])
|
||||
tests.extend(
|
||||
[
|
||||
r"{bench_name}-workers:{bench_name}-frappe-schedule[\s]+RUNNING",
|
||||
r"{bench_name}-workers:{bench_name}-frappe-default-worker-0[\s]+RUNNING",
|
||||
r"{bench_name}-workers:{bench_name}-frappe-short-worker-0[\s]+RUNNING",
|
||||
r"{bench_name}-workers:{bench_name}-frappe-long-worker-0[\s]+RUNNING",
|
||||
]
|
||||
)
|
||||
|
||||
else:
|
||||
tests.extend([
|
||||
"{bench_name}-workers:{bench_name}-frappe-workerbeat[\s]+RUNNING",
|
||||
"{bench_name}-workers:{bench_name}-frappe-worker[\s]+RUNNING",
|
||||
"{bench_name}-workers:{bench_name}-frappe-longjob-worker[\s]+RUNNING",
|
||||
"{bench_name}-workers:{bench_name}-frappe-async-worker[\s]+RUNNING"
|
||||
])
|
||||
tests.extend(
|
||||
[
|
||||
r"{bench_name}-workers:{bench_name}-frappe-workerbeat[\s]+RUNNING",
|
||||
r"{bench_name}-workers:{bench_name}-frappe-worker[\s]+RUNNING",
|
||||
r"{bench_name}-workers:{bench_name}-frappe-longjob-worker[\s]+RUNNING",
|
||||
r"{bench_name}-workers:{bench_name}-frappe-async-worker[\s]+RUNNING",
|
||||
]
|
||||
)
|
||||
|
||||
for key in tests:
|
||||
if disable_production:
|
||||
@ -173,5 +181,5 @@ class TestSetupProduction(TestBenchBase):
|
||||
self.assertTrue(re.search(key, out))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
@ -20,8 +20,8 @@ class TestUtils(unittest.TestCase):
|
||||
app.name == git_url,
|
||||
app.branch == branch,
|
||||
app.tag == branch,
|
||||
app.is_url == True,
|
||||
app.on_disk == False,
|
||||
app.is_url is True,
|
||||
app.on_disk is False,
|
||||
app.org == "frappe",
|
||||
app.url == git_url,
|
||||
]
|
||||
@ -30,11 +30,19 @@ class TestUtils(unittest.TestCase):
|
||||
|
||||
def test_is_valid_frappe_branch(self):
|
||||
with self.assertRaises(InvalidRemoteException):
|
||||
is_valid_frappe_branch("https://github.com/frappe/frappe.git", frappe_branch="random-branch")
|
||||
is_valid_frappe_branch("https://github.com/random/random.git", frappe_branch="random-branch")
|
||||
is_valid_frappe_branch(
|
||||
"https://github.com/frappe/frappe.git", frappe_branch="random-branch"
|
||||
)
|
||||
is_valid_frappe_branch(
|
||||
"https://github.com/random/random.git", frappe_branch="random-branch"
|
||||
)
|
||||
|
||||
is_valid_frappe_branch("https://github.com/frappe/frappe.git", frappe_branch="develop")
|
||||
is_valid_frappe_branch("https://github.com/frappe/frappe.git", frappe_branch="v13.29.0")
|
||||
is_valid_frappe_branch(
|
||||
"https://github.com/frappe/frappe.git", frappe_branch="develop"
|
||||
)
|
||||
is_valid_frappe_branch(
|
||||
"https://github.com/frappe/frappe.git", frappe_branch="v13.29.0"
|
||||
)
|
||||
|
||||
def test_app_states(self):
|
||||
bench_dir = "./sandbox"
|
||||
@ -48,7 +56,10 @@ class TestUtils(unittest.TestCase):
|
||||
self.assertTrue(hasattr(fake_bench.apps, "states"))
|
||||
|
||||
fake_bench.apps.states = {
|
||||
"frappe": {"resolution": {"branch": "develop", "commit_hash": "234rwefd"}, "version": "14.0.0-dev"}
|
||||
"frappe": {
|
||||
"resolution": {"branch": "develop", "commit_hash": "234rwefd"},
|
||||
"version": "14.0.0-dev",
|
||||
}
|
||||
}
|
||||
fake_bench.apps.update_apps_states()
|
||||
|
||||
@ -64,7 +75,9 @@ class TestUtils(unittest.TestCase):
|
||||
f.write("__version__ = '11.0'")
|
||||
|
||||
subprocess.run(["git", "add", "."], cwd=frappe_path, capture_output=True, check=True)
|
||||
subprocess.run(["git", "commit", "-m", "temp"], cwd=frappe_path, capture_output=True, check=True)
|
||||
subprocess.run(
|
||||
["git", "commit", "-m", "temp"], cwd=frappe_path, capture_output=True, check=True
|
||||
)
|
||||
|
||||
fake_bench.apps.update_apps_states(app_name="frappe")
|
||||
|
||||
@ -76,4 +89,4 @@ class TestUtils(unittest.TestCase):
|
||||
|
||||
def test_ssh_ports(self):
|
||||
app = App("git@github.com:22:frappe/frappe")
|
||||
self.assertEqual((app.use_ssh, app.org, app.repo), (True, "frappe", "frappe"))
|
||||
self.assertEqual((app.use_ssh, app.org, app.repo), (True, "frappe", "frappe"))
|
||||
|
@ -2,29 +2,31 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from functools import lru_cache
|
||||
from glob import glob
|
||||
from shlex import split
|
||||
from typing import List, Tuple
|
||||
from functools import lru_cache
|
||||
|
||||
# imports - third party imports
|
||||
import click
|
||||
import requests
|
||||
|
||||
# imports - module imports
|
||||
from bench import PROJECT_NAME, VERSION
|
||||
|
||||
from bench.exceptions import CommandFailedError, InvalidRemoteException, AppNotInstalledError
|
||||
|
||||
from bench.exceptions import (
|
||||
AppNotInstalledError,
|
||||
CommandFailedError,
|
||||
InvalidRemoteException,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(PROJECT_NAME)
|
||||
bench_cache_file = ".bench.cmd"
|
||||
paths_in_app = ("hooks.py", "modules.txt", "patches.txt")
|
||||
paths_in_bench = ("apps", "sites", "config", "logs", "config/pids")
|
||||
sudoers_file = "/etc/sudoers.d/frappe"
|
||||
UNSET_ARG = object()
|
||||
|
||||
|
||||
def is_bench_directory(directory=os.path.curdir):
|
||||
@ -51,7 +53,7 @@ def is_frappe_app(directory: str) -> bool:
|
||||
|
||||
|
||||
@lru_cache(maxsize=None)
|
||||
def is_valid_frappe_branch(frappe_path:str, frappe_branch:str):
|
||||
def is_valid_frappe_branch(frappe_path: str, frappe_branch: str):
|
||||
"""Check if a branch exists in a repo. Throws InvalidRemoteException if branch is not found
|
||||
|
||||
Uses native git command to check for branches on a remote.
|
||||
@ -209,7 +211,9 @@ def get_git_version() -> float:
|
||||
def get_cmd_output(cmd, cwd=".", _raise=True):
|
||||
output = ""
|
||||
try:
|
||||
output = subprocess.check_output(cmd, cwd=cwd, shell=True, stderr=subprocess.PIPE, encoding="utf-8").strip()
|
||||
output = subprocess.check_output(
|
||||
cmd, cwd=cwd, shell=True, stderr=subprocess.PIPE, encoding="utf-8"
|
||||
).strip()
|
||||
except subprocess.CalledProcessError as e:
|
||||
if e.output:
|
||||
output = e.output
|
||||
@ -508,6 +512,7 @@ def get_traceback() -> str:
|
||||
|
||||
class _dict(dict):
|
||||
"""dict like object that exposes keys as attributes"""
|
||||
|
||||
# bench port of frappe._dict
|
||||
def __getattr__(self, key):
|
||||
ret = self.get(key)
|
||||
@ -515,16 +520,21 @@ class _dict(dict):
|
||||
if not ret and key.startswith("__") and key != "__deepcopy__":
|
||||
raise AttributeError()
|
||||
return ret
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
self[key] = value
|
||||
|
||||
def __getstate__(self):
|
||||
return self
|
||||
|
||||
def __setstate__(self, d):
|
||||
self.update(d)
|
||||
|
||||
def update(self, d):
|
||||
"""update and return self -- the missing dict feature in python"""
|
||||
super(_dict, self).update(d)
|
||||
super().update(d)
|
||||
return self
|
||||
|
||||
def copy(self):
|
||||
return _dict(dict(self).copy())
|
||||
|
||||
@ -539,10 +549,8 @@ def get_cmd_from_sysargv():
|
||||
"""
|
||||
# context is passed as options to frappe's bench_helper
|
||||
from bench.bench import Bench
|
||||
frappe_context = _dict(
|
||||
params={"--site"},
|
||||
flags={"--verbose", "--profile", "--force"}
|
||||
)
|
||||
|
||||
frappe_context = _dict(params={"--site"}, flags={"--verbose", "--profile", "--force"})
|
||||
cmd_from_ctx = None
|
||||
sys_argv = sys.argv[1:]
|
||||
skip_next = False
|
||||
|
@ -1,7 +1,13 @@
|
||||
# imports - standard imports
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
import subprocess
|
||||
from typing import List
|
||||
from functools import lru_cache
|
||||
|
||||
# imports - module imports
|
||||
from bench.exceptions import (
|
||||
InvalidRemoteException,
|
||||
InvalidBranchException,
|
||||
@ -9,7 +15,6 @@ from bench.exceptions import (
|
||||
VersionNotFound,
|
||||
)
|
||||
from bench.app import get_repo_dir
|
||||
from functools import lru_cache
|
||||
|
||||
|
||||
def is_version_upgrade(app="frappe", bench_path=".", branch=None):
|
||||
@ -108,7 +113,9 @@ def switch_to_develop(apps=None, bench_path=".", upgrade=True):
|
||||
|
||||
|
||||
def get_version_from_string(contents, field="__version__"):
|
||||
match = re.search(r"^(\s*%s\s*=\s*['\\\"])(.+?)(['\"])" % field, contents, flags=(re.S | re.M))
|
||||
match = re.search(
|
||||
r"^(\s*%s\s*=\s*['\\\"])(.+?)(['\"])" % field, contents, flags=(re.S | re.M)
|
||||
)
|
||||
if not match:
|
||||
raise VersionNotFound(f"{contents} is not a valid version")
|
||||
return match.group(2)
|
||||
@ -157,7 +164,7 @@ def get_upstream_version(app, branch=None, bench_path="."):
|
||||
def get_current_frappe_version(bench_path="."):
|
||||
try:
|
||||
return get_major_version(get_current_version("frappe", bench_path=bench_path))
|
||||
except IOError:
|
||||
except OSError:
|
||||
return 0
|
||||
|
||||
|
||||
@ -184,13 +191,17 @@ def get_required_deps(org, name, branch, deps="hooks.py"):
|
||||
return base64.decodebytes(res["content"].encode()).decode()
|
||||
|
||||
|
||||
def required_apps_from_hooks(required_deps, local=False):
|
||||
def required_apps_from_hooks(required_deps: str, local: bool = False) -> List:
|
||||
import ast
|
||||
|
||||
required_apps_re = re.compile(r"required_apps\s+=\s+(.*)")
|
||||
|
||||
if local:
|
||||
with open(required_deps) as f:
|
||||
required_deps = f.read()
|
||||
lines = [x for x in required_deps.split("\n") if x.strip().startswith("required_apps")]
|
||||
required_apps = eval(lines[0].strip("required_apps").strip().lstrip("=").strip())
|
||||
return required_apps
|
||||
required_deps = pathlib.Path(required_deps).read_text()
|
||||
|
||||
_req_apps_tag = required_apps_re.search(required_deps)
|
||||
req_apps_tag = _req_apps_tag[1]
|
||||
return ast.literal_eval(req_apps_tag)
|
||||
|
||||
|
||||
def get_remote(app, bench_path="."):
|
||||
@ -247,6 +258,7 @@ def get_app_name(bench_path: str, folder_name: str) -> str:
|
||||
|
||||
return folder_name
|
||||
|
||||
|
||||
def check_existing_dir(bench_path, repo_name):
|
||||
cloned_path = os.path.join(bench_path, "apps", repo_name)
|
||||
dir_already_exists = os.path.isdir(cloned_path)
|
||||
|
@ -1,4 +1,5 @@
|
||||
# imports - standard imports
|
||||
import contextlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
@ -6,25 +7,14 @@ import re
|
||||
import subprocess
|
||||
import sys
|
||||
from json.decoder import JSONDecodeError
|
||||
import typing
|
||||
|
||||
# imports - third party imports
|
||||
import click
|
||||
import bench
|
||||
|
||||
# imports - module imports
|
||||
from bench.utils import (
|
||||
which,
|
||||
log,
|
||||
exec_cmd,
|
||||
get_bench_name,
|
||||
get_cmd_output,
|
||||
)
|
||||
import bench
|
||||
from bench.exceptions import PatchError, ValidationError
|
||||
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from bench.bench import Bench
|
||||
from bench.utils import exec_cmd, get_bench_name, get_cmd_output, log, which
|
||||
|
||||
logger = logging.getLogger(bench.PROJECT_NAME)
|
||||
|
||||
@ -56,9 +46,10 @@ def get_venv_path(verbose=False):
|
||||
|
||||
def update_node_packages(bench_path=".", apps=None):
|
||||
print("Updating node packages...")
|
||||
from bench.utils.app import get_develop_version
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
from bench.utils.app import get_develop_version
|
||||
|
||||
v = LooseVersion(get_develop_version("frappe", bench_path=bench_path))
|
||||
|
||||
# After rollup was merged, frappe_version = 10.1
|
||||
@ -95,7 +86,9 @@ def install_python_dev_dependencies(bench_path=".", apps=None, verbose=False):
|
||||
bench.run(f"{bench.python} -m pip install {quiet_flag} --upgrade {pyproject_deps}")
|
||||
|
||||
if not pyproject_deps and os.path.exists(dev_requirements_path):
|
||||
bench.run(f"{bench.python} -m pip install {quiet_flag} --upgrade -r {dev_requirements_path}")
|
||||
bench.run(
|
||||
f"{bench.python} -m pip install {quiet_flag} --upgrade -r {dev_requirements_path}"
|
||||
)
|
||||
|
||||
|
||||
def _generate_dev_deps_pattern(pyproject_path):
|
||||
@ -107,12 +100,10 @@ def _generate_dev_deps_pattern(pyproject_path):
|
||||
requirements_pattern = ""
|
||||
pyroject_config = loads(open(pyproject_path).read())
|
||||
|
||||
try:
|
||||
for pkg, version in pyroject_config['tool']['bench']['dev-dependencies'].items():
|
||||
with contextlib.suppress(KeyError):
|
||||
for pkg, version in pyroject_config["tool"]["bench"]["dev-dependencies"].items():
|
||||
op = "==" if "=" not in version else ""
|
||||
requirements_pattern += f"{pkg}{op}{version} "
|
||||
except KeyError:
|
||||
pass
|
||||
return requirements_pattern
|
||||
|
||||
|
||||
@ -120,9 +111,7 @@ def update_yarn_packages(bench_path=".", apps=None):
|
||||
from bench.bench import Bench
|
||||
|
||||
bench = Bench(bench_path)
|
||||
|
||||
apps = apps or bench.apps
|
||||
|
||||
apps_dir = os.path.join(bench.name, "apps")
|
||||
|
||||
# TODO: Check for stuff like this early on only??
|
||||
@ -149,7 +138,7 @@ def update_npm_packages(bench_path=".", apps=None):
|
||||
package_json_path = os.path.join(apps_dir, app, "package.json")
|
||||
|
||||
if os.path.exists(package_json_path):
|
||||
with open(package_json_path, "r") as f:
|
||||
with open(package_json_path) as f:
|
||||
app_package_json = json.loads(f.read())
|
||||
# package.json is usually a dict in a dict
|
||||
for key, value in app_package_json.items():
|
||||
@ -164,7 +153,7 @@ def update_npm_packages(bench_path=".", apps=None):
|
||||
package_json[key] = value
|
||||
|
||||
if package_json is {}:
|
||||
with open(os.path.join(os.path.dirname(__file__), "package.json"), "r") as f:
|
||||
with open(os.path.join(os.path.dirname(__file__), "package.json")) as f:
|
||||
package_json = json.loads(f.read())
|
||||
|
||||
with open(os.path.join(bench_path, "package.json"), "w") as f:
|
||||
@ -176,6 +165,7 @@ def update_npm_packages(bench_path=".", apps=None):
|
||||
def migrate_env(python, backup=False):
|
||||
import shutil
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from bench.bench import Bench
|
||||
|
||||
bench = Bench(".")
|
||||
@ -231,16 +221,15 @@ def migrate_env(python, backup=False):
|
||||
|
||||
|
||||
def validate_upgrade(from_ver, to_ver, bench_path="."):
|
||||
if to_ver >= 6:
|
||||
if not which("npm") and not (which("node") or which("nodejs")):
|
||||
raise Exception("Please install nodejs and npm")
|
||||
if to_ver >= 6 and not which("npm") and not which("node") and not which("nodejs"):
|
||||
raise Exception("Please install nodejs and npm")
|
||||
|
||||
|
||||
def post_upgrade(from_ver, to_ver, bench_path="."):
|
||||
from bench.config import redis
|
||||
from bench.config.supervisor import generate_supervisor_config
|
||||
from bench.config.nginx import make_nginx_conf
|
||||
from bench.bench import Bench
|
||||
from bench.config import redis
|
||||
from bench.config.nginx import make_nginx_conf
|
||||
from bench.config.supervisor import generate_supervisor_config
|
||||
|
||||
conf = Bench(bench_path).conf
|
||||
print("-" * 80 + f"Your bench was upgraded to version {to_ver}")
|
||||
@ -323,9 +312,7 @@ def restart_systemd_processes(bench_path=".", web_workers=False):
|
||||
|
||||
def restart_process_manager(bench_path=".", web_workers=False):
|
||||
# only overmind has the restart feature, not sure other supported procmans do
|
||||
if which("overmind") and os.path.exists(
|
||||
os.path.join(bench_path, ".overmind.sock")
|
||||
):
|
||||
if which("overmind") and os.path.exists(os.path.join(bench_path, ".overmind.sock")):
|
||||
worker = "web" if web_workers else ""
|
||||
exec_cmd(f"overmind restart {worker}", cwd=bench_path)
|
||||
|
||||
@ -338,7 +325,7 @@ def build_assets(bench_path=".", app=None):
|
||||
|
||||
|
||||
def handle_version_upgrade(version_upgrade, bench_path, force, reset, conf):
|
||||
from bench.utils import pause_exec, log
|
||||
from bench.utils import log, pause_exec
|
||||
|
||||
if version_upgrade[0]:
|
||||
if force:
|
||||
@ -386,13 +373,12 @@ def update(
|
||||
):
|
||||
"""command: bench update"""
|
||||
import re
|
||||
from bench import patches
|
||||
|
||||
from bench import patches
|
||||
from bench.app import pull_apps
|
||||
from bench.bench import Bench
|
||||
from bench.config.common_site_config import update_config
|
||||
from bench.exceptions import CannotUpdateReleaseBench
|
||||
|
||||
from bench.utils import clear_command_cache
|
||||
from bench.utils.app import is_version_upgrade
|
||||
from bench.utils.system import backup_all_sites
|
||||
@ -459,8 +445,7 @@ def update(
|
||||
update_config(conf, bench_path=bench_path)
|
||||
|
||||
print(
|
||||
"_" * 80
|
||||
+ "\nBench: Deployment tool for Frappe and Frappe Applications"
|
||||
"_" * 80 + "\nBench: Deployment tool for Frappe and Frappe Applications"
|
||||
" (https://frappe.io/bench).\nOpen source depends on your contributions, so do"
|
||||
" give back by submitting bug reports, patches and fixes and be a part of the"
|
||||
" community :)"
|
||||
@ -500,7 +485,7 @@ def clone_apps_from(bench_path, clone_from, update_app=True):
|
||||
|
||||
install_app(app, bench_path, restart_bench=False)
|
||||
|
||||
with open(os.path.join(clone_from, "sites", "apps.txt"), "r") as f:
|
||||
with open(os.path.join(clone_from, "sites", "apps.txt")) as f:
|
||||
apps = f.read().splitlines()
|
||||
|
||||
for app in apps:
|
||||
@ -509,6 +494,7 @@ def clone_apps_from(bench_path, clone_from, update_app=True):
|
||||
|
||||
def remove_backups_crontab(bench_path="."):
|
||||
from crontab import CronTab
|
||||
|
||||
from bench.bench import Bench
|
||||
|
||||
logger.log("removing backup cronjob")
|
||||
@ -543,7 +529,7 @@ def update_common_site_config(ddict, bench_path="."):
|
||||
filename = os.path.join(bench_path, "sites", "common_site_config.json")
|
||||
|
||||
if os.path.exists(filename):
|
||||
with open(filename, "r") as f:
|
||||
with open(filename) as f:
|
||||
content = json.load(f)
|
||||
|
||||
else:
|
||||
@ -605,7 +591,7 @@ def validate_branch():
|
||||
apps = Bench(".").apps
|
||||
|
||||
installed_apps = set(apps)
|
||||
check_apps = set(["frappe", "erpnext"])
|
||||
check_apps = {"frappe", "erpnext"}
|
||||
intersection_apps = installed_apps.intersection(check_apps)
|
||||
|
||||
for app in intersection_apps:
|
||||
|
@ -14,7 +14,7 @@ class Capturing(list):
|
||||
Util to consume the stdout encompassed in it and push it to a list
|
||||
|
||||
with Capturing() as output:
|
||||
subprocess.check_output("ls", shell=True)
|
||||
subprocess.check_output("ls", shell=True)
|
||||
|
||||
print(output)
|
||||
# ["b'Applications\\nDesktop\\nDocuments\\nDownloads\\n'"]
|
||||
@ -53,20 +53,25 @@ class Rendering:
|
||||
if not self.dynamic_feed:
|
||||
return
|
||||
|
||||
_prefix = click.style('⏼', fg='bright_yellow')
|
||||
_hierarchy = " " if not self.is_parent else ""
|
||||
_prefix = click.style("⏼", fg="bright_yellow")
|
||||
_hierarchy = "" if self.is_parent else " "
|
||||
self._title = self.title.format(**self.kw)
|
||||
click.secho(f"{_hierarchy}{_prefix} {self._title}")
|
||||
|
||||
bench.LOG_BUFFER.append(
|
||||
{"message": self._title, "prefix": _prefix, "color": None, "is_parent": self.is_parent}
|
||||
{
|
||||
"message": self._title,
|
||||
"prefix": _prefix,
|
||||
"color": None,
|
||||
"is_parent": self.is_parent,
|
||||
}
|
||||
)
|
||||
|
||||
def __exit__(self, *args, **kwargs):
|
||||
if not self.dynamic_feed:
|
||||
return
|
||||
|
||||
self._prefix = click.style('✔', fg='green')
|
||||
self._prefix = click.style("✔", fg="green")
|
||||
self._success = self.success.format(**self.kw)
|
||||
|
||||
self.render_screen()
|
||||
@ -78,7 +83,7 @@ class Rendering:
|
||||
if l["message"] == self._title:
|
||||
l["prefix"] = self._prefix
|
||||
l["message"] = self._success
|
||||
_hierarchy = " " if not l["is_parent"] else ""
|
||||
_hierarchy = "" if l.get("is_parent") else " "
|
||||
click.secho(f'{_hierarchy}{l["prefix"]} {l["message"]}', fg=l["color"])
|
||||
|
||||
|
||||
@ -87,14 +92,20 @@ def job(title: str = None, success: str = None):
|
||||
For instance, the `get-app` command consists of two jobs: `initializing bench`
|
||||
and `fetching and installing app`.
|
||||
"""
|
||||
|
||||
def innfn(fn):
|
||||
def wrapper_fn(*args, **kwargs):
|
||||
with Rendering(
|
||||
success=success, title=title, is_parent=True, args=args, kwargs=kwargs,
|
||||
success=success,
|
||||
title=title,
|
||||
is_parent=True,
|
||||
args=args,
|
||||
kwargs=kwargs,
|
||||
):
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
return wrapper_fn
|
||||
|
||||
return innfn
|
||||
|
||||
|
||||
@ -102,12 +113,18 @@ def step(title: str = None, success: str = None):
|
||||
"""Supposed to be wrapped around the smallest possible atomic step in a given operation.
|
||||
For instance, `building assets` is a step in the update operation.
|
||||
"""
|
||||
|
||||
def innfn(fn):
|
||||
def wrapper_fn(*args, **kwargs):
|
||||
with Rendering(
|
||||
success=success, title=title, is_parent=False, args=args, kwargs=kwargs,
|
||||
success=success,
|
||||
title=title,
|
||||
is_parent=False,
|
||||
args=args,
|
||||
kwargs=kwargs,
|
||||
):
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
return wrapper_fn
|
||||
|
||||
return innfn
|
||||
|
@ -44,8 +44,8 @@ def init(
|
||||
* setup config (dir/pids/redis/procfile) for the bench
|
||||
* setup patches.txt for bench
|
||||
* clone & install frappe
|
||||
* install python & node dependencies
|
||||
* build assets
|
||||
* install python & node dependencies
|
||||
* build assets
|
||||
* setup backups crontab
|
||||
"""
|
||||
|
||||
@ -113,10 +113,7 @@ def setup_sudoers(user):
|
||||
if not os.path.exists("/etc/sudoers.d"):
|
||||
os.makedirs("/etc/sudoers.d")
|
||||
|
||||
set_permissions = False
|
||||
if not os.path.exists("/etc/sudoers"):
|
||||
set_permissions = True
|
||||
|
||||
set_permissions = not os.path.exists("/etc/sudoers")
|
||||
with open("/etc/sudoers", "a") as f:
|
||||
f.write("\n#includedir /etc/sudoers.d\n")
|
||||
|
||||
@ -142,11 +139,7 @@ def setup_sudoers(user):
|
||||
|
||||
|
||||
def start(no_dev=False, concurrency=None, procfile=None, no_prefix=False, procman=None):
|
||||
if procman:
|
||||
program = which(procman)
|
||||
else:
|
||||
program = get_process_manager()
|
||||
|
||||
program = which(procman) if procman else get_process_manager()
|
||||
if not program:
|
||||
raise Exception("No process manager found")
|
||||
|
||||
|
@ -43,7 +43,7 @@ def update_translations(app, lang):
|
||||
import requests
|
||||
|
||||
translations_dir = os.path.join("apps", app, app, "translations")
|
||||
csv_file = os.path.join(translations_dir, lang + ".csv")
|
||||
csv_file = os.path.join(translations_dir, f"{lang}.csv")
|
||||
url = f"https://translate.erpnext.com/files/{app}-{lang}.csv"
|
||||
r = requests.get(url, stream=True)
|
||||
r.raise_for_status()
|
||||
|
12
setup.py
12
setup.py
@ -1,11 +1,11 @@
|
||||
import pathlib
|
||||
|
||||
from setuptools import find_packages, setup
|
||||
|
||||
from bench import PROJECT_NAME, VERSION
|
||||
|
||||
with open("requirements.txt") as f:
|
||||
install_requires = f.read().strip().split("\n")
|
||||
|
||||
with open("README.md") as f:
|
||||
long_description = f.read()
|
||||
install_requires = pathlib.Path("requirements.txt").read_text().strip().split("\n")
|
||||
long_description = pathlib.Path("README.md").read_text()
|
||||
|
||||
setup(
|
||||
name=PROJECT_NAME,
|
||||
@ -34,7 +34,7 @@ setup(
|
||||
"Topic :: System :: Installation/Setup",
|
||||
],
|
||||
packages=find_packages(),
|
||||
python_requires="~=3.6",
|
||||
python_requires=">=3.7",
|
||||
zip_safe=False,
|
||||
include_package_data=True,
|
||||
install_requires=install_requires,
|
||||
|
Loading…
Reference in New Issue
Block a user