mirror of
https://github.com/frappe/bench.git
synced 2025-01-24 23:48:24 +00:00
Merge branch 'develop' into referrer-policy
This commit is contained in:
commit
beb14c8681
@ -1,32 +0,0 @@
|
||||
version: 2
|
||||
jobs:
|
||||
build:
|
||||
machine: true
|
||||
working_directory: ~/repo
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Setup
|
||||
command: |
|
||||
sudo pip install --ignore-installed setuptools
|
||||
sudo pip install urllib3 pyOpenSSL ndg-httpsclient pyasn1
|
||||
sudo cp -r ~/.ssh/* /root/.ssh
|
||||
mkdir -p ~/.bench
|
||||
mkdir -p /tmp/.bench
|
||||
cp -r ~/repo/* ~/.bench
|
||||
cp -r ~/repo/* /tmp/.bench
|
||||
|
||||
- run:
|
||||
name: Install Bench (Production)
|
||||
command: sudo python ~/repo/playbooks/install.py --user travis --run-travis --production
|
||||
|
||||
- run:
|
||||
name: Setup Tests
|
||||
command: |
|
||||
cd ~
|
||||
sudo pip install --upgrade pip
|
||||
sudo pip install -e ~/.bench
|
||||
|
||||
- run:
|
||||
name: Run Tests
|
||||
command: sudo -E python -m unittest -v bench.tests.test_setup_production
|
13
.github/semantic.yml
vendored
Normal file
13
.github/semantic.yml
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
# Always validate the PR title AND all the commits
|
||||
titleAndCommits: true
|
||||
|
||||
# Allow use of Merge commits (eg on github: "Merge branch 'master' into feature/ride-unicorns")
|
||||
# this is only relevant when using commitsOnly: true (or titleAndCommits: true)
|
||||
allowMergeCommits: true
|
||||
|
||||
# Allow use of Revert commits (eg on github: "Revert "feat: ride unicorns"")
|
||||
# this is only relevant when using commitsOnly: true (or titleAndCommits: true)
|
||||
allowRevertCommits: true
|
||||
|
||||
# For allowed PR types: https://github.com/commitizen/conventional-commit-types/blob/v3.0.0/index.json
|
||||
# Tool Reference: https://github.com/zeke/semantic-pull-requests
|
28
.github/workflows/release.yml
vendored
Normal file
28
.github/workflows/release.yml
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
name: Generate Semantic Release and publish on PyPI
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- v5.x
|
||||
jobs:
|
||||
release:
|
||||
name: Release
|
||||
runs-on: ubuntu-18.04
|
||||
steps:
|
||||
- name: Checkout Entire Repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup Node.js v12
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12
|
||||
- name: Setup dependencies
|
||||
run: |
|
||||
npm install @semantic-release/git @semantic-release/exec --no-save
|
||||
pip install wheel twine
|
||||
- name: Create Release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
|
||||
PYPI_USERNAME: ${{ secrets.PYPI_USERNAME }}
|
||||
run: npx semantic-release
|
35
.releaserc
Normal file
35
.releaserc
Normal file
@ -0,0 +1,35 @@
|
||||
{
|
||||
"branches": ["v5.x"],
|
||||
"plugins": [
|
||||
"@semantic-release/commit-analyzer",
|
||||
"@semantic-release/release-notes-generator",
|
||||
[
|
||||
"@semantic-release/exec", {
|
||||
"prepareCmd": 'sed -ir "s/[0-9]*\.[0-9]*\.[0-9]*/${nextRelease.version}/" bench/__init__.py'
|
||||
}
|
||||
],
|
||||
[
|
||||
"@semantic-release/exec", {
|
||||
"prepareCmd": "python setup.py bdist_wheel --universal"
|
||||
}
|
||||
],
|
||||
[
|
||||
"@semantic-release/git", {
|
||||
"assets": ["bench/__init__.py"],
|
||||
"message": "chore(release): Bumped to Version ${nextRelease.version}\n\n${nextRelease.notes}"
|
||||
}
|
||||
],
|
||||
[
|
||||
"@semantic-release/github", {
|
||||
"assets": [
|
||||
{"path": "dist/*"},
|
||||
]
|
||||
}
|
||||
],
|
||||
[
|
||||
"@semantic-release/exec", {
|
||||
"publishCmd": "python -m twine upload dist/* -u $PYPI_USERNAME -p $PYPI_PASSWORD"
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
49
.travis.yml
49
.travis.yml
@ -15,35 +15,20 @@ addons:
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- name: "Python 2.7 Basic Setup"
|
||||
python: 2.7
|
||||
env: TEST=bench
|
||||
script: python bench/tests/test_init.py TestBenchInit.basic
|
||||
|
||||
- name: "Python 3.6 Basic Setup"
|
||||
python: 3.6
|
||||
env: TEST=bench
|
||||
script: python bench/tests/test_init.py TestBenchInit.basic
|
||||
|
||||
- name: "Python 3.7 Basic Setup"
|
||||
python: 3.7
|
||||
env: TEST=bench
|
||||
script: python bench/tests/test_init.py TestBenchInit.basic
|
||||
|
||||
- name: "Python 3.8 Production Setup"
|
||||
- name: "Python 3.8 Basic Setup"
|
||||
python: 3.8
|
||||
env: TEST=bench
|
||||
script: python bench/tests/test_setup_production.py TestSetupProduction.production
|
||||
script: python bench/tests/test_init.py TestBenchInit.basic
|
||||
|
||||
- name: "Python 2.7 Production Setup"
|
||||
python: 2.7
|
||||
- name: "Python 3.9 Basic Setup"
|
||||
python: 3.9
|
||||
env: TEST=bench
|
||||
script: python bench/tests/test_setup_production.py TestSetupProduction.production
|
||||
|
||||
- name: "Python 3.6 Production Setup"
|
||||
python: 3.6
|
||||
env: TEST=bench
|
||||
script: python bench/tests/test_setup_production.py TestSetupProduction.production
|
||||
script: python bench/tests/test_init.py TestBenchInit.basic
|
||||
|
||||
- name: "Python 3.7 Production Setup"
|
||||
python: 3.7
|
||||
@ -55,20 +40,25 @@ matrix:
|
||||
env: TEST=bench
|
||||
script: python bench/tests/test_setup_production.py TestSetupProduction.production
|
||||
|
||||
- name: "Python 2.7 Tests"
|
||||
python: 2.7
|
||||
- name: "Python 3.9 Production Setup"
|
||||
python: 3.9
|
||||
env: TEST=bench
|
||||
script: python -m unittest -v bench.tests.test_init
|
||||
script: python bench/tests/test_setup_production.py TestSetupProduction.production
|
||||
|
||||
- name: "Python 3.7 Tests"
|
||||
python: 3.7
|
||||
env: TEST=bench
|
||||
script: python -m unittest -v bench.tests.test_init
|
||||
|
||||
- name: "Python 3.5 Easy Install"
|
||||
python: 3.5
|
||||
env: TEST=easy_install
|
||||
script: sudo python $TRAVIS_BUILD_DIR/install.py --user travis --run-travis --production --verbose
|
||||
- name: "Python 3.8 Tests"
|
||||
python: 3.8
|
||||
env: TEST=bench
|
||||
script: python -m unittest -v bench.tests.test_init
|
||||
|
||||
- name: "Python 3.9 Tests"
|
||||
python: 3.9
|
||||
env: TEST=bench
|
||||
script: python -m unittest -v bench.tests.test_init
|
||||
|
||||
- name: "Python 3.7 Easy Install"
|
||||
python: 3.7
|
||||
@ -80,6 +70,11 @@ matrix:
|
||||
env: TEST=easy_install
|
||||
script: sudo python $TRAVIS_BUILD_DIR/install.py --user travis --run-travis --production --verbose
|
||||
|
||||
- name: "Python 3.9 Easy Install"
|
||||
python: 3.9
|
||||
env: TEST=easy_install
|
||||
script: sudo python $TRAVIS_BUILD_DIR/install.py --user travis --run-travis --production --verbose
|
||||
|
||||
install:
|
||||
- pip install urllib3 pyOpenSSL ndg-httpsclient pyasn1
|
||||
|
||||
|
63
README.md
63
README.md
@ -8,12 +8,10 @@ Bench is a command-line utility that helps you to install, update, and manage mu
|
||||
## Table of Contents
|
||||
|
||||
- [Installation](#installation)
|
||||
- [Docker Installation](#docker-installation)
|
||||
- [Development Setup](#docker-installation-for-development)
|
||||
- [Production Setup](#docker-installation-for-production)
|
||||
- [Containerized Installation](#containerized-installation)
|
||||
- [Easy Install Script](#easy-install-script)
|
||||
- [Manual Installation](#manual-installation)
|
||||
- [Usage](#usage)
|
||||
- [Usage](#basic-usage)
|
||||
- [Custom Bench commands](#custom-bench-commands)
|
||||
- [Bench Manager](#bench-manager)
|
||||
- [Guides](#guides)
|
||||
@ -28,7 +26,7 @@ A typical bench setup provides two types of environments — Development and
|
||||
|
||||
The setup for each of these installations can be achieved in multiple ways:
|
||||
|
||||
- [Docker Installation](#docker-installation)
|
||||
- [Containerized Installation](#containerized-installation)
|
||||
- [Easy Install Script](#easy-install-script)
|
||||
- [Manual Installation](#manual-installation)
|
||||
|
||||
@ -37,7 +35,7 @@ We recommend using either the Docker Installation or the Easy Install Script to
|
||||
Otherwise, if you are looking to evaluate ERPNext, you can also download the [Virtual Machine Image](https://erpnext.com/download) or register for [a free trial on erpnext.com](https://erpnext.com/pricing).
|
||||
|
||||
|
||||
### Docker Installation
|
||||
### Containerized Installation
|
||||
|
||||
A Frappe/ERPNext instance can be setup and replicated easily using [Docker](https://docker.com). The officially supported Docker installation can be used to setup either of both Development and Production environments.
|
||||
|
||||
@ -48,56 +46,7 @@ $ git clone https://github.com/frappe/frappe_docker.git
|
||||
$ cd frappe_docker
|
||||
```
|
||||
|
||||
A quick setup guide for both the envionments can be found below. For more details, check out the [Frappe/ERPNext Docker Repository](https://github.com/frappe/frappe_docker).
|
||||
|
||||
#### Docker Installation for Development
|
||||
|
||||
To setup a development environment for Docker, follow the [Frappe/ERPNext Docker for Development Guide](https://github.com/frappe/frappe_docker/blob/develop/development/README.md).
|
||||
|
||||
#### Docker Installation for Production
|
||||
|
||||
Copy the `env-example` file to `.env`
|
||||
|
||||
```sh
|
||||
$ cp env-example .env
|
||||
```
|
||||
|
||||
Optionally, you may also setup an [NGINX Proxy for SSL Certificates](https://github.com/evertramos/docker-compose-letsencrypt-nginx-proxy-companion) with auto-renewal for your Production instance. We recommend this for instances being accessed over the internet. For this to work, the DNS needs to be configured correctly so that [LetsEncrypt](https://letsencrypt.org) can verify the domain. To setup the proxy, run the following commands:
|
||||
|
||||
```sh
|
||||
$ git clone https://github.com/evertramos/docker-compose-letsencrypt-nginx-proxy-companion.git
|
||||
$ cd docker-compose-letsencrypt-nginx-proxy-companion
|
||||
$ cp .env.sample .env
|
||||
$ ./start.sh
|
||||
```
|
||||
|
||||
To get the Production instance running, run the following command:
|
||||
|
||||
```sh
|
||||
$ docker-compose \
|
||||
--project-name <project-name> \
|
||||
-f installation/docker-compose-common.yml \
|
||||
-f installation/docker-compose-erpnext.yml \
|
||||
-f installation/docker-compose-networks.yml \
|
||||
--project-directory installation up -d
|
||||
```
|
||||
|
||||
Make sure to replace `<project-name>` with whatever you wish to call it. This should get the instance running through docker. Now, to create a new site on the instance you may run:
|
||||
|
||||
```sh
|
||||
docker exec -it \
|
||||
-e "SITE_NAME=$SITE_NAME" \
|
||||
-e "DB_ROOT_USER=$DB_ROOT_USER" \
|
||||
-e "MYSQL_ROOT_PASSWORD=$MYSQL_ROOT_PASSWORD" \
|
||||
-e "ADMIN_PASSWORD=$ADMIN_PASSWORD" \
|
||||
-e "INSTALL_APPS=erpnext" \ # optional, if you want to install any other apps
|
||||
<project-name>_erpnext-python_1 docker-entrypoint.sh new
|
||||
```
|
||||
|
||||
Once this is done, you may access the instance at `$SITE_NAME`.
|
||||
|
||||
**Note:** The Production setup does not contain, require, or use bench. For a list of substitute commands, check out the [Frappe/ERPNext Docker Site Operations](https://github.com/frappe/frappe_docker/#site-operations).
|
||||
|
||||
A quick setup guide for both the environments can be found below. For more details, check out the [Frappe/ERPNext Docker Repository](https://github.com/frappe/frappe_docker).
|
||||
|
||||
### Easy Install Script
|
||||
|
||||
@ -105,6 +54,8 @@ The Easy Install script should get you going with a Frappe/ERPNext setup with mi
|
||||
|
||||
**Note:** This script works only on GNU/Linux based server distributions, and has been designed and tested to work on Ubuntu 16.04+, CentOS 7+, and Debian-based systems.
|
||||
|
||||
> This script installs Version 12 by default. It is untested with Version 13 and above. Containerized or manual installs are recommended for newer setups.
|
||||
|
||||
#### Prerequisites
|
||||
|
||||
You need to install the following packages for the script to run:
|
||||
|
@ -1,4 +1,4 @@
|
||||
VERSION = "5.2.1"
|
||||
VERSION = "5.0.0-dev"
|
||||
PROJECT_NAME = "frappe-bench"
|
||||
FRAPPE_VERSION = None
|
||||
|
||||
|
271
bench/app.py
271
bench/app.py
@ -1,25 +1,18 @@
|
||||
# imports - compatibility imports
|
||||
from __future__ import print_function
|
||||
|
||||
# imports - standard imports
|
||||
import json
|
||||
from json.decoder import JSONDecodeError
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
# imports - third party imports
|
||||
import click
|
||||
import git
|
||||
import requests
|
||||
import semantic_version
|
||||
from six.moves import reload_module
|
||||
from setuptools.config import read_configuration
|
||||
|
||||
# imports - module imports
|
||||
import bench
|
||||
from bench.config.common_site_config import get_config
|
||||
from bench.utils import color, CommandFailedError, build_assets, check_git_for_shallow_clone, exec_cmd, get_cmd_output, get_frappe, restart_supervisor_processes, restart_systemd_processes, run_frappe_cmd
|
||||
|
||||
|
||||
@ -74,7 +67,7 @@ def add_to_excluded_apps_txt(app, bench_path='.'):
|
||||
if app == 'frappe':
|
||||
raise ValueError('Frappe app cannot be excludeed from update')
|
||||
if app not in os.listdir('apps'):
|
||||
raise ValueError('The app {} does not exist'.format(app))
|
||||
raise ValueError(f'The app {app} does not exist')
|
||||
apps = get_excluded_apps(bench_path=bench_path)
|
||||
if app not in apps:
|
||||
apps.append(app)
|
||||
@ -91,48 +84,49 @@ def remove_from_excluded_apps_txt(app, bench_path='.'):
|
||||
return write_excluded_apps_txt(apps, bench_path=bench_path)
|
||||
|
||||
def get_app(git_url, branch=None, bench_path='.', skip_assets=False, verbose=False, restart_bench=True, overwrite=False):
|
||||
import requests
|
||||
import shutil
|
||||
|
||||
if not os.path.exists(git_url):
|
||||
if not is_git_url(git_url):
|
||||
orgs = ['frappe', 'erpnext']
|
||||
for org in orgs:
|
||||
url = 'https://api.github.com/repos/{org}/{app}'.format(org=org, app=git_url)
|
||||
url = f'https://api.github.com/repos/{org}/{git_url}'
|
||||
res = requests.get(url)
|
||||
if res.ok:
|
||||
data = res.json()
|
||||
if 'name' in data:
|
||||
if git_url == data['name']:
|
||||
git_url = 'https://github.com/{org}/{app}'.format(org=org, app=git_url)
|
||||
git_url = f'https://github.com/{org}/{git_url}'
|
||||
break
|
||||
else:
|
||||
bench.utils.log("App {app} not found".format(app=git_url), level=2)
|
||||
bench.utils.log(f"App {git_url} not found", level=2)
|
||||
sys.exit(1)
|
||||
|
||||
# Gets repo name from URL
|
||||
repo_name = git_url.rstrip('/').rsplit('/', 1)[1].rsplit('.', 1)[0]
|
||||
shallow_clone = '--depth 1' if check_git_for_shallow_clone() else ''
|
||||
branch = '--branch {branch}'.format(branch=branch) if branch else ''
|
||||
branch = f'--branch {branch}' if branch else ''
|
||||
else:
|
||||
repo_name = git_url.split(os.sep)[-1]
|
||||
git_url = os.path.abspath(git_url)
|
||||
_, repo_name = os.path.split(git_url)
|
||||
shallow_clone = ''
|
||||
branch = '--branch {branch}'.format(branch=branch) if branch else ''
|
||||
branch = f'--branch {branch}' if branch else ''
|
||||
|
||||
if os.path.isdir(os.path.join(bench_path, 'apps', repo_name)):
|
||||
# application directory already exists
|
||||
# prompt user to overwrite it
|
||||
if overwrite or click.confirm('''A directory for the application "{0}" already exists.
|
||||
Do you want to continue and overwrite it?'''.format(repo_name)):
|
||||
if overwrite or click.confirm(f'''A directory for the application "{repo_name}" already exists.
|
||||
Do you want to continue and overwrite it?'''):
|
||||
shutil.rmtree(os.path.join(bench_path, 'apps', repo_name))
|
||||
elif click.confirm('''Do you want to reinstall the existing application?''', abort=True):
|
||||
app_name = get_app_name(bench_path, repo_name)
|
||||
install_app(app=app_name, bench_path=bench_path, verbose=verbose, skip_assets=skip_assets)
|
||||
sys.exit()
|
||||
|
||||
print('\n{0}Getting {1}{2}'.format(color.yellow, repo_name, color.nc))
|
||||
logger.log('Getting app {0}'.format(repo_name))
|
||||
exec_cmd("git clone {git_url} {branch} {shallow_clone} --origin upstream".format(
|
||||
git_url=git_url,
|
||||
shallow_clone=shallow_clone,
|
||||
branch=branch),
|
||||
print(f'\n{color.yellow}Getting {repo_name}{color.nc}')
|
||||
logger.log(f'Getting app {repo_name}')
|
||||
exec_cmd(f"git clone {git_url} {branch} {shallow_clone} --origin upstream",
|
||||
cwd=os.path.join(bench_path, 'apps'))
|
||||
|
||||
app_name = get_app_name(bench_path, repo_name)
|
||||
@ -140,41 +134,47 @@ Do you want to continue and overwrite it?'''.format(repo_name)):
|
||||
|
||||
|
||||
def get_app_name(bench_path, repo_name):
|
||||
# retrieves app name from setup.py
|
||||
app_path = os.path.join(bench_path, 'apps', repo_name, 'setup.py')
|
||||
with open(app_path, 'rb') as f:
|
||||
app_name = re.search(r'name\s*=\s*[\'"](.*)[\'"]', f.read().decode('utf-8')).group(1)
|
||||
if repo_name != app_name:
|
||||
apps_path = os.path.join(os.path.abspath(bench_path), 'apps')
|
||||
os.rename(os.path.join(apps_path, repo_name), os.path.join(apps_path, app_name))
|
||||
app_name = None
|
||||
apps_path = os.path.join(os.path.abspath(bench_path), 'apps')
|
||||
config_path = os.path.join(apps_path, repo_name, 'setup.cfg')
|
||||
if os.path.exists(config_path):
|
||||
config = read_configuration(config_path)
|
||||
app_name = config.get('metadata', {}).get('name')
|
||||
|
||||
if not app_name:
|
||||
# retrieve app name from setup.py as fallback
|
||||
app_path = os.path.join(apps_path, repo_name, 'setup.py')
|
||||
with open(app_path, 'rb') as f:
|
||||
app_name = re.search(r'name\s*=\s*[\'"](.*)[\'"]', f.read().decode('utf-8')).group(1)
|
||||
|
||||
if app_name and repo_name != app_name:
|
||||
os.rename(os.path.join(apps_path, repo_name), os.path.join(apps_path, app_name))
|
||||
return app_name
|
||||
|
||||
return repo_name
|
||||
|
||||
|
||||
def new_app(app, bench_path='.'):
|
||||
# For backwards compatibility
|
||||
app = app.lower().replace(" ", "_").replace("-", "_")
|
||||
logger.log('creating new app {}'.format(app))
|
||||
logger.log(f'creating new app {app}')
|
||||
apps = os.path.abspath(os.path.join(bench_path, 'apps'))
|
||||
bench.set_frappe_version(bench_path=bench_path)
|
||||
|
||||
if bench.FRAPPE_VERSION == 4:
|
||||
exec_cmd("{frappe} --make_app {apps} {app}".format(frappe=get_frappe(bench_path=bench_path),
|
||||
apps=apps, app=app))
|
||||
else:
|
||||
run_frappe_cmd('make-app', apps, app, bench_path=bench_path)
|
||||
run_frappe_cmd('make-app', apps, app, bench_path=bench_path)
|
||||
install_app(app, bench_path=bench_path)
|
||||
|
||||
|
||||
def install_app(app, bench_path=".", verbose=False, no_cache=False, restart_bench=True, skip_assets=False):
|
||||
print('\n{0}Installing {1}{2}'.format(color.yellow, app, color.nc))
|
||||
logger.log("installing {}".format(app))
|
||||
from bench.config.common_site_config import get_config
|
||||
|
||||
pip_path = os.path.join(bench_path, "env", "bin", "pip")
|
||||
print(f'\n{color.yellow}Installing {app}{color.nc}')
|
||||
logger.log(f"installing {app}")
|
||||
|
||||
python_path = os.path.join(bench_path, "env", "bin", "python")
|
||||
quiet_flag = "-q" if not verbose else ""
|
||||
app_path = os.path.join(bench_path, "apps", app)
|
||||
cache_flag = "--no-cache-dir" if no_cache else ""
|
||||
|
||||
exec_cmd("{pip} install {quiet} -U -e {app} {no_cache}".format(pip=pip_path, quiet=quiet_flag, app=app_path, no_cache=cache_flag))
|
||||
exec_cmd(f"{python_path} -m pip install {quiet_flag} -U -e {app_path} {cache_flag}")
|
||||
|
||||
if os.path.exists(os.path.join(app_path, 'package.json')):
|
||||
exec_cmd("yarn install", cwd=app_path)
|
||||
@ -194,65 +194,111 @@ def install_app(app, bench_path=".", verbose=False, no_cache=False, restart_benc
|
||||
|
||||
|
||||
def remove_app(app, bench_path='.'):
|
||||
if app not in get_apps(bench_path):
|
||||
print("No app named {0}".format(app))
|
||||
sys.exit(1)
|
||||
import shutil
|
||||
from bench.config.common_site_config import get_config
|
||||
|
||||
app_path = os.path.join(bench_path, 'apps', app)
|
||||
site_path = os.path.join(bench_path, 'sites')
|
||||
pip = os.path.join(bench_path, 'env', 'bin', 'pip')
|
||||
py = os.path.join(bench_path, 'env', 'bin', 'python')
|
||||
|
||||
for site in os.listdir(site_path):
|
||||
req_file = os.path.join(site_path, site, 'site_config.json')
|
||||
if os.path.exists(req_file):
|
||||
out = subprocess.check_output(["bench", "--site", site, "list-apps"], cwd=bench_path).decode('utf-8')
|
||||
if re.search(r'\b' + app + r'\b', out):
|
||||
print("Cannot remove, app is installed on site: {0}".format(site))
|
||||
sys.exit(1)
|
||||
# validate app removal
|
||||
if app not in get_apps(bench_path):
|
||||
print(f"No app named {app}")
|
||||
sys.exit(1)
|
||||
|
||||
exec_cmd("{0} uninstall -y {1}".format(pip, app), cwd=bench_path)
|
||||
validate_app_installed_on_sites(app, bench_path=bench_path)
|
||||
|
||||
# remove app from bench
|
||||
exec_cmd("{0} -m pip uninstall -y {1}".format(py, app), cwd=bench_path)
|
||||
remove_from_appstxt(app, bench_path)
|
||||
shutil.rmtree(app_path)
|
||||
|
||||
# re-build assets and restart processes
|
||||
run_frappe_cmd("build", bench_path=bench_path)
|
||||
if get_config(bench_path).get('restart_supervisor_on_update'):
|
||||
restart_supervisor_processes(bench_path=bench_path)
|
||||
if get_config(bench_path).get('restart_systemd_on_update'):
|
||||
restart_systemd_processes(bench_path=bench_path)
|
||||
|
||||
|
||||
def validate_app_installed_on_sites(app, bench_path="."):
|
||||
print("Checking if app installed on active sites...")
|
||||
ret = check_app_installed(app, bench_path=bench_path)
|
||||
|
||||
if ret is None:
|
||||
check_app_installed_legacy(app, bench_path=bench_path)
|
||||
else:
|
||||
return ret
|
||||
|
||||
|
||||
def check_app_installed(app, bench_path="."):
|
||||
try:
|
||||
out = subprocess.check_output(
|
||||
["bench", "--site", "all", "list-apps", "--format", "json"],
|
||||
stderr=open(os.devnull, "wb"),
|
||||
cwd=bench_path,
|
||||
).decode('utf-8')
|
||||
except subprocess.CalledProcessError:
|
||||
return None
|
||||
|
||||
try:
|
||||
apps_sites_dict = json.loads(out)
|
||||
except JSONDecodeError:
|
||||
return None
|
||||
|
||||
for site, apps in apps_sites_dict.items():
|
||||
if app in apps:
|
||||
print("Cannot remove, app is installed on site: {0}".format(site))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def check_app_installed_legacy(app, bench_path="."):
|
||||
site_path = os.path.join(bench_path, 'sites')
|
||||
|
||||
for site in os.listdir(site_path):
|
||||
req_file = os.path.join(site_path, site, 'site_config.json')
|
||||
if os.path.exists(req_file):
|
||||
out = subprocess.check_output(["bench", "--site", site, "list-apps"], cwd=bench_path).decode('utf-8')
|
||||
if re.search(r'\b' + app + r'\b', out):
|
||||
print(f"Cannot remove, app is installed on site: {site}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def pull_apps(apps=None, bench_path='.', reset=False):
|
||||
'''Check all apps if there no local changes, pull'''
|
||||
from bench.config.common_site_config import get_config
|
||||
|
||||
rebase = '--rebase' if get_config(bench_path).get('rebase_on_pull') else ''
|
||||
|
||||
apps = apps or get_apps(bench_path=bench_path)
|
||||
# chech for local changes
|
||||
# check for local changes
|
||||
if not reset:
|
||||
for app in apps:
|
||||
excluded_apps = get_excluded_apps()
|
||||
if app in excluded_apps:
|
||||
print("Skipping reset for app {}".format(app))
|
||||
print(f"Skipping reset for app {app}")
|
||||
continue
|
||||
app_dir = get_repo_dir(app, bench_path=bench_path)
|
||||
if os.path.exists(os.path.join(app_dir, '.git')):
|
||||
out = subprocess.check_output(["git", "status"], cwd=app_dir)
|
||||
out = subprocess.check_output('git status', shell=True, cwd=app_dir)
|
||||
out = out.decode('utf-8')
|
||||
if not re.search(r'nothing to commit, working (directory|tree) clean', out):
|
||||
print('''
|
||||
print(f'''
|
||||
|
||||
Cannot proceed with update: You have local changes in app "{0}" that are not committed.
|
||||
Cannot proceed with update: You have local changes in app "{app}" that are not committed.
|
||||
|
||||
Here are your choices:
|
||||
|
||||
1. Merge the {0} app manually with "git pull" / "git pull --rebase" and fix conflicts.
|
||||
1. Merge the {app} app manually with "git pull" / "git pull --rebase" and fix conflicts.
|
||||
1. Temporarily remove your changes with "git stash" or discard them completely
|
||||
with "bench update --reset" or for individual repositries "git reset --hard"
|
||||
2. If your changes are helpful for others, send in a pull request via GitHub and
|
||||
wait for them to be merged in the core.'''.format(app))
|
||||
wait for them to be merged in the core.''')
|
||||
sys.exit(1)
|
||||
|
||||
excluded_apps = get_excluded_apps()
|
||||
for app in apps:
|
||||
if app in excluded_apps:
|
||||
print("Skipping pull for app {}".format(app))
|
||||
print(f"Skipping pull for app {app}")
|
||||
continue
|
||||
app_dir = get_repo_dir(app, bench_path=bench_path)
|
||||
if os.path.exists(os.path.join(app_dir, '.git')):
|
||||
@ -260,34 +306,44 @@ Here are your choices:
|
||||
if not remote:
|
||||
# remote is False, i.e. remote doesn't exist, add the app to excluded_apps.txt
|
||||
add_to_excluded_apps_txt(app, bench_path=bench_path)
|
||||
print("Skipping pull for app {}, since remote doesn't exist, and adding it to excluded apps".format(app))
|
||||
print(f"Skipping pull for app {app}, since remote doesn't exist, and adding it to excluded apps")
|
||||
continue
|
||||
logger.log('pulling {0}'.format(app))
|
||||
|
||||
if not get_config(bench_path).get('shallow_clone') or not reset:
|
||||
is_shallow = os.path.exists(os.path.join(app_dir, ".git", "shallow"))
|
||||
if is_shallow:
|
||||
s = " to safely pull remote changes." if not reset else ""
|
||||
print(f"Unshallowing {app}{s}")
|
||||
exec_cmd(f"git fetch {remote} --unshallow", cwd=app_dir)
|
||||
|
||||
branch = get_current_branch(app, bench_path=bench_path)
|
||||
logger.log(f'pulling {app}')
|
||||
if reset:
|
||||
exec_cmd("git fetch --all", cwd=app_dir)
|
||||
exec_cmd("git reset --hard {remote}/{branch}".format(
|
||||
remote=remote, branch=get_current_branch(app,bench_path=bench_path)), cwd=app_dir)
|
||||
reset_cmd = f"git reset --hard {remote}/{branch}"
|
||||
if get_config(bench_path).get('shallow_clone'):
|
||||
exec_cmd(f"git fetch --depth=1 --no-tags {remote} {branch}",
|
||||
cwd=app_dir)
|
||||
exec_cmd(reset_cmd, cwd=app_dir)
|
||||
exec_cmd("git reflog expire --all", cwd=app_dir)
|
||||
exec_cmd("git gc --prune=all", cwd=app_dir)
|
||||
else:
|
||||
exec_cmd("git fetch --all", cwd=app_dir)
|
||||
exec_cmd(reset_cmd, cwd=app_dir)
|
||||
else:
|
||||
exec_cmd("git pull {rebase} {remote} {branch}".format(rebase=rebase,
|
||||
remote=remote, branch=get_current_branch(app, bench_path=bench_path)), cwd=app_dir)
|
||||
exec_cmd(f"git pull {rebase} {remote} {branch}", cwd=app_dir)
|
||||
exec_cmd('find . -name "*.pyc" -delete', cwd=app_dir)
|
||||
|
||||
|
||||
def is_version_upgrade(app='frappe', bench_path='.', branch=None):
|
||||
try:
|
||||
fetch_upstream(app, bench_path=bench_path)
|
||||
except CommandFailedError:
|
||||
raise InvalidRemoteException("No remote named upstream for {0}".format(app))
|
||||
|
||||
upstream_version = get_upstream_version(app=app, branch=branch, bench_path=bench_path)
|
||||
|
||||
if not upstream_version:
|
||||
raise InvalidBranchException("Specified branch of app {0} is not in upstream".format(app))
|
||||
raise InvalidBranchException(f'Specified branch of app {app} is not in upstream remote')
|
||||
|
||||
local_version = get_major_version(get_current_version(app, bench_path=bench_path))
|
||||
upstream_version = get_major_version(upstream_version)
|
||||
|
||||
if upstream_version - local_version > 0:
|
||||
if upstream_version > local_version:
|
||||
return (True, local_version, upstream_version)
|
||||
|
||||
return (False, local_version, upstream_version)
|
||||
@ -320,20 +376,27 @@ def use_rq(bench_path):
|
||||
celery_app = os.path.join(bench_path, 'apps', 'frappe', 'frappe', 'celery_app.py')
|
||||
return not os.path.exists(celery_app)
|
||||
|
||||
def fetch_upstream(app, bench_path='.'):
|
||||
repo_dir = get_repo_dir(app, bench_path=bench_path)
|
||||
return subprocess.call(["git", "fetch", "upstream"], cwd=repo_dir)
|
||||
|
||||
def get_current_version(app, bench_path='.'):
|
||||
current_version = None
|
||||
repo_dir = get_repo_dir(app, bench_path=bench_path)
|
||||
config_path = os.path.join(repo_dir, "setup.cfg")
|
||||
init_path = os.path.join(repo_dir, os.path.basename(repo_dir), '__init__.py')
|
||||
setup_path = os.path.join(repo_dir, 'setup.py')
|
||||
|
||||
try:
|
||||
with open(os.path.join(repo_dir, os.path.basename(repo_dir), '__init__.py')) as f:
|
||||
return get_version_from_string(f.read())
|
||||
if os.path.exists(config_path):
|
||||
config = read_configuration(config_path)
|
||||
current_version = config.get("metadata", {}).get("version")
|
||||
if not current_version:
|
||||
with open(init_path) as f:
|
||||
current_version = get_version_from_string(f.read())
|
||||
|
||||
except AttributeError:
|
||||
# backward compatibility
|
||||
with open(os.path.join(repo_dir, 'setup.py')) as f:
|
||||
return get_version_from_string(f.read(), field='version')
|
||||
with open(setup_path) as f:
|
||||
current_version = get_version_from_string(f.read(), field='version')
|
||||
|
||||
return current_version
|
||||
|
||||
def get_develop_version(app, bench_path='.'):
|
||||
repo_dir = get_repo_dir(app, bench_path=bench_path)
|
||||
@ -344,8 +407,15 @@ def get_upstream_version(app, branch=None, bench_path='.'):
|
||||
repo_dir = get_repo_dir(app, bench_path=bench_path)
|
||||
if not branch:
|
||||
branch = get_current_branch(app, bench_path=bench_path)
|
||||
|
||||
try:
|
||||
contents = subprocess.check_output(['git', 'show', 'upstream/{branch}:{app}/__init__.py'.format(branch=branch, app=app)], cwd=repo_dir, stderr=subprocess.STDOUT)
|
||||
subprocess.call(f'git fetch --depth=1 --no-tags upstream {branch}', shell=True, cwd=repo_dir)
|
||||
except CommandFailedError:
|
||||
raise InvalidRemoteException(f'Failed to fetch from remote named upstream for {app}')
|
||||
|
||||
try:
|
||||
contents = subprocess.check_output(f'git show upstream/{branch}:{app}/__init__.py',
|
||||
shell=True, cwd=repo_dir, stderr=subprocess.STDOUT)
|
||||
contents = contents.decode('utf-8')
|
||||
except subprocess.CalledProcessError as e:
|
||||
if b"Invalid object" in e.output:
|
||||
@ -358,7 +428,10 @@ def get_repo_dir(app, bench_path='.'):
|
||||
return os.path.join(bench_path, 'apps', app)
|
||||
|
||||
def switch_branch(branch, apps=None, bench_path='.', upgrade=False, check_upgrade=True):
|
||||
import git
|
||||
import importlib
|
||||
from bench.utils import update_requirements, update_node_packages, backup_all_sites, patch_sites, build_assets, post_upgrade
|
||||
|
||||
apps_dir = os.path.join(bench_path, 'apps')
|
||||
version_upgrade = (False,)
|
||||
switched_apps = []
|
||||
@ -373,29 +446,29 @@ def switch_branch(branch, apps=None, bench_path='.', upgrade=False, check_upgrad
|
||||
app_dir = os.path.join(apps_dir, app)
|
||||
|
||||
if not os.path.exists(app_dir):
|
||||
bench.utils.log("{} does not exist!".format(app), level=2)
|
||||
bench.utils.log(f"{app} does not exist!", level=2)
|
||||
continue
|
||||
|
||||
repo = git.Repo(app_dir)
|
||||
unshallow_flag = os.path.exists(os.path.join(app_dir, ".git", "shallow"))
|
||||
bench.utils.log("Fetching upstream {0}for {1}".format("unshallow " if unshallow_flag else "", app))
|
||||
bench.utils.log(f"Fetching upstream {'unshallow ' if unshallow_flag else ''}for {app}")
|
||||
|
||||
bench.utils.exec_cmd("git remote set-branches upstream '*'", cwd=app_dir)
|
||||
bench.utils.exec_cmd("git fetch --all{0} --quiet".format(" --unshallow" if unshallow_flag else ""), cwd=app_dir)
|
||||
bench.utils.exec_cmd(f"git fetch --all{' --unshallow' if unshallow_flag else ''} --quiet", cwd=app_dir)
|
||||
|
||||
if check_upgrade:
|
||||
version_upgrade = is_version_upgrade(app=app, bench_path=bench_path, branch=branch)
|
||||
if version_upgrade[0] and not upgrade:
|
||||
bench.utils.log("Switching to {0} will cause upgrade from {1} to {2}. Pass --upgrade to confirm".format(branch, version_upgrade[1], version_upgrade[2]), level=2)
|
||||
bench.utils.log(f"Switching to {branch} will cause upgrade from {version_upgrade[1]} to {version_upgrade[2]}. Pass --upgrade to confirm", level=2)
|
||||
sys.exit(1)
|
||||
|
||||
print("Switching for "+app)
|
||||
bench.utils.exec_cmd("git checkout -f {0}".format(branch), cwd=app_dir)
|
||||
bench.utils.exec_cmd(f"git checkout -f {branch}", cwd=app_dir)
|
||||
|
||||
if str(repo.active_branch) == branch:
|
||||
switched_apps.append(app)
|
||||
else:
|
||||
bench.utils.log("Switching branches failed for: {}".format(app), level=2)
|
||||
bench.utils.log(f"Switching branches failed for: {app}", level=2)
|
||||
|
||||
if switched_apps:
|
||||
bench.utils.log("Successfully switched branches for: " + ", ".join(switched_apps), level=1)
|
||||
@ -404,7 +477,7 @@ def switch_branch(branch, apps=None, bench_path='.', upgrade=False, check_upgrad
|
||||
if version_upgrade[0] and upgrade:
|
||||
update_requirements()
|
||||
update_node_packages()
|
||||
reload_module(bench.utils)
|
||||
importlib.reload(bench.utils)
|
||||
backup_all_sites()
|
||||
patch_sites()
|
||||
build_assets()
|
||||
@ -414,9 +487,6 @@ def switch_branch(branch, apps=None, bench_path='.', upgrade=False, check_upgrad
|
||||
def switch_to_branch(branch=None, apps=None, bench_path='.', upgrade=False):
|
||||
switch_branch(branch, apps=apps, bench_path=bench_path, upgrade=upgrade)
|
||||
|
||||
def switch_to_master(apps=None, bench_path='.', upgrade=True):
|
||||
switch_branch('master', apps=apps, bench_path=bench_path, upgrade=upgrade)
|
||||
|
||||
def switch_to_develop(apps=None, bench_path='.', upgrade=True):
|
||||
switch_branch('develop', apps=apps, bench_path=bench_path, upgrade=upgrade)
|
||||
|
||||
@ -425,6 +495,8 @@ def get_version_from_string(contents, field='__version__'):
|
||||
return match.group(2)
|
||||
|
||||
def get_major_version(version):
|
||||
import semantic_version
|
||||
|
||||
return semantic_version.Version(version).major
|
||||
|
||||
def install_apps_from_path(path, bench_path='.'):
|
||||
@ -433,6 +505,8 @@ def install_apps_from_path(path, bench_path='.'):
|
||||
get_app(app['url'], branch=app.get('branch'), bench_path=bench_path, skip_assets=True)
|
||||
|
||||
def get_apps_json(path):
|
||||
import requests
|
||||
|
||||
if path.startswith('http'):
|
||||
r = requests.get(path)
|
||||
return r.json()
|
||||
@ -454,7 +528,8 @@ As of January 2020, the following branches are
|
||||
version Frappe ERPNext
|
||||
11 version-11 version-11
|
||||
12 version-12 version-12
|
||||
13 develop develop
|
||||
13 version-13 version-13
|
||||
14 develop develop
|
||||
|
||||
Please switch to new branches to get future updates.
|
||||
To switch to your required branch, run the following commands: bench switch-to-branch [branch-name]""")
|
||||
|
14
bench/cli.py
14
bench/cli.py
@ -27,11 +27,13 @@ def cli():
|
||||
command = " ".join(sys.argv)
|
||||
|
||||
change_working_directory()
|
||||
logger = setup_logging() or logging.getLogger(bench.PROJECT_NAME)
|
||||
logger = setup_logging()
|
||||
logger.info(command)
|
||||
check_uid()
|
||||
change_dir()
|
||||
change_uid()
|
||||
|
||||
if len(sys.argv) > 1 and sys.argv[1] not in ("src", ):
|
||||
check_uid()
|
||||
change_uid()
|
||||
change_dir()
|
||||
|
||||
if is_dist_editable(bench.PROJECT_NAME) and len(sys.argv) > 1 and sys.argv[1] != "src" and not get_config(".").get("developer_mode"):
|
||||
log("bench is installed in editable mode!\n\nThis is not the recommended mode of installation for production. Instead, install the package from PyPI with: `pip install frappe-bench`\n", level=3)
|
||||
@ -62,7 +64,7 @@ def cli():
|
||||
except BaseException as e:
|
||||
return_code = getattr(e, "code", 0)
|
||||
if return_code:
|
||||
logger.warning("{0} executed with exit code {1}".format(command, return_code))
|
||||
logger.warning(f"{command} executed with exit code {return_code}")
|
||||
sys.exit(return_code)
|
||||
|
||||
|
||||
@ -138,7 +140,7 @@ def get_frappe_help(bench_path='.'):
|
||||
python = get_env_cmd('python', bench_path=bench_path)
|
||||
sites_path = os.path.join(bench_path, 'sites')
|
||||
try:
|
||||
out = get_cmd_output("{python} -m frappe.utils.bench_helper get-frappe-help".format(python=python), cwd=sites_path)
|
||||
out = get_cmd_output(f"{python} -m frappe.utils.bench_helper get-frappe-help", cwd=sites_path)
|
||||
return "\n\nFramework commands:\n" + out.split('Commands:')[1]
|
||||
except:
|
||||
return ""
|
||||
|
@ -27,16 +27,15 @@ bench_command.add_command(include_app_for_update)
|
||||
bench_command.add_command(pip)
|
||||
|
||||
|
||||
from bench.commands.update import update, retry_upgrade, switch_to_branch, switch_to_master, switch_to_develop
|
||||
from bench.commands.update import update, retry_upgrade, switch_to_branch, switch_to_develop
|
||||
bench_command.add_command(update)
|
||||
bench_command.add_command(retry_upgrade)
|
||||
bench_command.add_command(switch_to_branch)
|
||||
bench_command.add_command(switch_to_master)
|
||||
bench_command.add_command(switch_to_develop)
|
||||
|
||||
|
||||
from bench.commands.utils import (start, restart, set_nginx_port, set_ssl_certificate, set_ssl_certificate_key, set_url_root,
|
||||
set_mariadb_host, set_default_site, download_translations, backup_site, backup_all_sites, release, renew_lets_encrypt,
|
||||
set_mariadb_host, download_translations, backup_site, backup_all_sites, release, renew_lets_encrypt,
|
||||
disable_production, bench_src, prepare_beta_release, set_redis_cache_host, set_redis_queue_host, set_redis_socketio_host, find_benches, migrate_env,
|
||||
generate_command_cache, clear_command_cache)
|
||||
bench_command.add_command(start)
|
||||
@ -49,7 +48,6 @@ bench_command.add_command(set_mariadb_host)
|
||||
bench_command.add_command(set_redis_cache_host)
|
||||
bench_command.add_command(set_redis_queue_host)
|
||||
bench_command.add_command(set_redis_socketio_host)
|
||||
bench_command.add_command(set_default_site)
|
||||
bench_command.add_command(download_translations)
|
||||
bench_command.add_command(backup_site)
|
||||
bench_command.add_command(backup_all_sites)
|
||||
|
@ -1,6 +1,3 @@
|
||||
# imports - standard imports
|
||||
import ast
|
||||
|
||||
# imports - module imports
|
||||
from bench.config.common_site_config import update_config, get_config, put_config
|
||||
|
||||
@ -52,6 +49,8 @@ def config_http_timeout(seconds):
|
||||
@click.command('set-common-config', help='Set value in common config')
|
||||
@click.option('configs', '-c', '--config', multiple=True, type=(str, str))
|
||||
def set_common_config(configs):
|
||||
import ast
|
||||
|
||||
common_site_config = {}
|
||||
for key, value in configs:
|
||||
if value in ('true', 'false'):
|
||||
|
@ -19,7 +19,7 @@ def remote_set_url(git_url):
|
||||
@click.command('remote-reset-url', help="Reset app remote url to frappe official")
|
||||
@click.argument('app')
|
||||
def remote_reset_url(app):
|
||||
git_url = "https://github.com/frappe/{}.git".format(app)
|
||||
git_url = f"https://github.com/frappe/{app}.git"
|
||||
set_git_remote_url(git_url)
|
||||
|
||||
|
||||
@ -30,6 +30,6 @@ def remote_urls():
|
||||
|
||||
if os.path.exists(os.path.join(repo_dir, '.git')):
|
||||
remote = get_remote(app)
|
||||
remote_url = subprocess.check_output(['git', 'config', '--get', 'remote.{}.url'.format(remote)], cwd=repo_dir).strip()
|
||||
print("{app} {remote_url}".format(app=app, remote_url=remote_url))
|
||||
remote_url = subprocess.check_output(['git', 'config', '--get', f'remote.{remote}.url'], cwd=repo_dir).strip()
|
||||
print(f"{app}\t{remote_url}")
|
||||
|
||||
|
@ -8,11 +8,11 @@ import click
|
||||
@click.option('--ignore-exist', is_flag = True, default = False, help = "Ignore if Bench instance exists.")
|
||||
@click.option('--apps_path', default=None, help="path to json files with apps to install after init")
|
||||
@click.option('--frappe-path', default=None, help="path to frappe repo")
|
||||
@click.option('--frappe-branch', default=None, help="path to frappe repo")
|
||||
@click.option('--frappe-branch', default=None, help="Clone a particular branch of frappe")
|
||||
@click.option('--clone-from', default=None, help="copy repos from path")
|
||||
@click.option('--clone-without-update', is_flag=True, help="copy repos from path without update")
|
||||
@click.option('--no-procfile', is_flag=True, help="Pull changes in all the apps in bench")
|
||||
@click.option('--no-backups',is_flag=True, help="Run migrations for all sites in the bench")
|
||||
@click.option('--no-procfile', is_flag=True, help="Do not create a Procfile")
|
||||
@click.option('--no-backups',is_flag=True, help="Do not set up automatic periodic backups for all sites on this bench")
|
||||
@click.option('--skip-redis-config-generation', is_flag=True, help="Skip redis config generation if already specifying the common-site-config file")
|
||||
@click.option('--skip-assets',is_flag=True, default=False, help="Do not build assets")
|
||||
@click.option('--verbose',is_flag=True, help="Verbose output during install")
|
||||
@ -35,17 +35,17 @@ def init(path, apps_path, frappe_path, frappe_branch, no_procfile, no_backups, c
|
||||
skip_assets=skip_assets,
|
||||
python=python,
|
||||
)
|
||||
log('Bench {} initialized'.format(path), level=1)
|
||||
log(f'Bench {path} initialized', level=1)
|
||||
except SystemExit:
|
||||
pass
|
||||
except Exception as e:
|
||||
import os, shutil, time, six
|
||||
import os, shutil, time
|
||||
# add a sleep here so that the traceback of other processes doesnt overlap with the prompts
|
||||
time.sleep(1)
|
||||
print(e)
|
||||
log("There was a problem while creating {}".format(path), level=2)
|
||||
if six.moves.input("Do you want to rollback these changes? [Y/n]: ").lower() == "y":
|
||||
print('Rolling back Bench "{}"'.format(path))
|
||||
log(f"There was a problem while creating {path}", level=2)
|
||||
if click.confirm("Do you want to rollback these changes?"):
|
||||
print(f'Rolling back Bench "{path}"')
|
||||
if os.path.exists(path):
|
||||
shutil.rmtree(path)
|
||||
|
||||
@ -98,5 +98,5 @@ def pip(ctx, args):
|
||||
"Run pip commands in bench env"
|
||||
import os
|
||||
from bench.utils import get_env_cmd
|
||||
env_pip = get_env_cmd('pip')
|
||||
os.execv(env_pip, (env_pip,) + args)
|
||||
env_py = get_env_cmd('python')
|
||||
os.execv(env_py, (env_py, '-m', 'pip') + args)
|
||||
|
@ -6,10 +6,7 @@ import sys
|
||||
import click
|
||||
|
||||
# imports - module imports
|
||||
import bench.config.lets_encrypt
|
||||
import bench.config.nginx
|
||||
import bench.config.procfile
|
||||
import bench.config.production_setup
|
||||
import bench.config.redis
|
||||
import bench.config.site_config
|
||||
import bench.config.supervisor
|
||||
@ -31,20 +28,25 @@ def setup_sudoers(user):
|
||||
@click.command("nginx", help="Generate configuration files for NGINX")
|
||||
@click.option("--yes", help="Yes to regeneration of nginx config file", default=False, is_flag=True)
|
||||
def setup_nginx(yes=False):
|
||||
import bench.config.nginx
|
||||
|
||||
bench.config.nginx.make_nginx_conf(bench_path=".", yes=yes)
|
||||
|
||||
|
||||
@click.command("reload-nginx", help="Checks NGINX config file and reloads service")
|
||||
def reload_nginx():
|
||||
import bench.config.production_setup
|
||||
|
||||
bench.config.production_setup.reload_nginx()
|
||||
|
||||
|
||||
@click.command("supervisor", help="Generate configuration for supervisor")
|
||||
@click.option("--user", help="optional user argument")
|
||||
@click.option("--yes", help="Yes to regeneration of supervisor config", is_flag=True, default=False)
|
||||
def setup_supervisor(user=None, yes=False):
|
||||
@click.option("--skip-redis", help="Skip redis configuration", is_flag=True, default=False)
|
||||
def setup_supervisor(user=None, yes=False, skip_redis=False):
|
||||
bench.config.supervisor.update_supervisord_config(user=user, yes=yes)
|
||||
bench.config.supervisor.generate_supervisor_config(bench_path=".", user=user, yes=yes)
|
||||
bench.config.supervisor.generate_supervisor_config(bench_path=".", user=user, yes=yes, skip_redis=skip_redis)
|
||||
|
||||
|
||||
@click.command("redis", help="Generates configuration for Redis")
|
||||
@ -61,6 +63,8 @@ def setup_fonts():
|
||||
@click.argument("user")
|
||||
@click.option("--yes", help="Yes to regeneration config", is_flag=True, default=False)
|
||||
def setup_production(user, yes=False):
|
||||
import bench.config.production_setup
|
||||
|
||||
bench.config.production_setup.setup_production(user=user, yes=yes)
|
||||
|
||||
|
||||
@ -80,7 +84,7 @@ def setup_env(python="python3"):
|
||||
@click.option("--force")
|
||||
def setup_firewall(ssh_port=None, force=False):
|
||||
if not force:
|
||||
click.confirm("Setting up the firewall will block all ports except 80, 443 and {0}\nDo you want to continue?".format(ssh_port), abort=True)
|
||||
click.confirm(f"Setting up the firewall will block all ports except 80, 443 and {ssh_port}\nDo you want to continue?", abort=True)
|
||||
|
||||
if not ssh_port:
|
||||
ssh_port = 22
|
||||
@ -93,7 +97,7 @@ def setup_firewall(ssh_port=None, force=False):
|
||||
@click.option("--force")
|
||||
def set_ssh_port(port, force=False):
|
||||
if not force:
|
||||
click.confirm("This will change your SSH Port to {}\nDo you want to continue?".format(port), abort=True)
|
||||
click.confirm(f"This will change your SSH Port to {port}\nDo you want to continue?", abort=True)
|
||||
|
||||
run_playbook("roles/bench/tasks/change_ssh_port.yml", {"ssh_port": port})
|
||||
|
||||
@ -103,6 +107,8 @@ def set_ssh_port(port, force=False):
|
||||
@click.option("--custom-domain")
|
||||
@click.option('-n', '--non-interactive', default=False, is_flag=True, help="Run command non-interactively. This flag restarts nginx and runs certbot non interactively. Shouldn't be used on 1'st attempt")
|
||||
def setup_letsencrypt(site, custom_domain, non_interactive):
|
||||
import bench.config.lets_encrypt
|
||||
|
||||
bench.config.lets_encrypt.setup_letsencrypt(site, custom_domain, bench_path=".", interactive=not non_interactive)
|
||||
|
||||
|
||||
@ -111,6 +117,8 @@ def setup_letsencrypt(site, custom_domain, non_interactive):
|
||||
@click.option("--email")
|
||||
@click.option("--exclude-base-domain", default=False, is_flag=True, help="SSL Certificate not applicable for base domain")
|
||||
def setup_wildcard_ssl(domain, email, exclude_base_domain):
|
||||
import bench.config.lets_encrypt
|
||||
|
||||
bench.config.lets_encrypt.setup_wildcard_ssl(domain, email, bench_path=".", exclude_base_domain=exclude_base_domain)
|
||||
|
||||
|
||||
@ -146,7 +154,6 @@ def setup_requirements(node=False, python=False):
|
||||
@click.option("--port", help="Port on which you want to run bench manager", default=23624)
|
||||
@click.option("--domain", help="Domain on which you want to run bench manager")
|
||||
def setup_manager(yes=False, port=23624, domain=None):
|
||||
from six.moves import input
|
||||
from bench.utils import get_sites
|
||||
from bench.config.common_site_config import get_config
|
||||
from bench.config.nginx import make_bench_manager_nginx_conf
|
||||
@ -154,11 +161,7 @@ def setup_manager(yes=False, port=23624, domain=None):
|
||||
create_new_site = True
|
||||
|
||||
if "bench-manager.local" in os.listdir("sites"):
|
||||
ans = input("Site already exists. Overwrite existing site? [Y/n]: ").lower()
|
||||
while ans not in ("y", "n", ""):
|
||||
ans = input("Please enter 'y' or 'n'. Site already exists. Overwrite existing site? [Y/n]: ").lower()
|
||||
if ans == "n":
|
||||
create_new_site = False
|
||||
create_new_site = click.confirm("Site already exists. Overwrite existing site?")
|
||||
|
||||
if create_new_site:
|
||||
exec_cmd("bench new-site --force bench-manager.local")
|
||||
@ -253,8 +256,8 @@ def setup_roles(role, **kwargs):
|
||||
|
||||
@click.command("fail2ban", help="Setup fail2ban, an intrusion prevention software framework that protects computer servers from brute-force attacks")
|
||||
@click.option("--maxretry", default=6, help="Number of matches (i.e. value of the counter) which triggers ban action on the IP. Default is 6 seconds" )
|
||||
@click.option("--bantime", default=600, help="The counter is set to zero if no match is found within 'findtime' seconds. Default is 600 seconds")
|
||||
@click.option("--findtime", default=600, help="Duration (in seconds) for IP to be banned for. Negative number for 'permanent' ban. Default is 600 seconds")
|
||||
@click.option("--bantime", default=600, help="Duration (in seconds) for IP to be banned for. Negative number for 'permanent' ban. Default is 600 seconds")
|
||||
@click.option("--findtime", default=600, help="The counter is set to zero if match found within 'findtime' seconds doesn't exceed 'maxretry'. Default is 600 seconds")
|
||||
def setup_nginx_proxy_jail(**kwargs):
|
||||
run_playbook("roles/fail2ban/tasks/configure_nginx_jail.yml", extra_vars=kwargs)
|
||||
|
||||
|
@ -15,11 +15,12 @@ from bench.utils import post_upgrade, patch_sites, build_assets
|
||||
@click.option('--restart-supervisor', is_flag=True, help="Restart supervisor processes after update")
|
||||
@click.option('--restart-systemd', is_flag=True, help="Restart systemd units after update")
|
||||
@click.option('--no-backup', is_flag=True, help="If this flag is set, sites won't be backed up prior to updates. Note: This is not recommended in production.")
|
||||
@click.option('--no-compile', is_flag=True, help="If set, Python bytecode won't be compiled before restarting the processes")
|
||||
@click.option('--force', is_flag=True, help="Forces major version upgrades")
|
||||
@click.option('--reset', is_flag=True, help="Hard resets git branch's to their new states overriding any changes and overriding rebase on pull")
|
||||
def update(pull, apps, patch, build, requirements, restart_supervisor, restart_systemd, no_backup, force, reset):
|
||||
def update(pull, apps, patch, build, requirements, restart_supervisor, restart_systemd, no_backup, no_compile, force, reset):
|
||||
from bench.utils import update
|
||||
update(pull=pull, apps=apps, patch=patch, build=build, requirements=requirements, restart_supervisor=restart_supervisor, restart_systemd=restart_systemd, backup=not no_backup, force=force, reset=reset)
|
||||
update(pull=pull, apps=apps, patch=patch, build=build, requirements=requirements, restart_supervisor=restart_supervisor, restart_systemd=restart_systemd, backup=not no_backup, compile=not no_compile, force=force, reset=reset)
|
||||
|
||||
|
||||
@click.command('retry-upgrade', help="Retry a failed upgrade")
|
||||
@ -40,12 +41,6 @@ def switch_to_branch(branch, apps, upgrade=False):
|
||||
switch_to_branch(branch=branch, apps=list(apps), upgrade=upgrade)
|
||||
|
||||
|
||||
@click.command('switch-to-master', help="[DEPRECATED]: Switch frappe and erpnext to master branch")
|
||||
def switch_to_master():
|
||||
from bench.utils import log
|
||||
log("`switch-to-master` has been deprecated as master branches were renamed to version-11")
|
||||
|
||||
|
||||
@click.command('switch-to-develop')
|
||||
def switch_to_develop(upgrade=False):
|
||||
"Switch frappe and erpnext to develop branch"
|
||||
|
@ -98,12 +98,6 @@ def set_redis_socketio_host(host):
|
||||
set_redis_socketio_host(host)
|
||||
|
||||
|
||||
@click.command('set-default-site', help="Set default site for bench")
|
||||
@click.argument('site')
|
||||
def set_default_site(site):
|
||||
from bench.utils import set_default_site
|
||||
set_default_site(site)
|
||||
|
||||
|
||||
@click.command('download-translations', help="Download latest translations")
|
||||
def download_translations():
|
||||
@ -111,7 +105,7 @@ def download_translations():
|
||||
download_translations_p()
|
||||
|
||||
|
||||
@click.command('renew-lets-encrypt', help="Renew Let's Encrypt certificate")
|
||||
@click.command('renew-lets-encrypt', help="Sets Up latest cron and Renew Let's Encrypt certificate")
|
||||
def renew_lets_encrypt():
|
||||
from bench.config.lets_encrypt import renew_certs
|
||||
renew_certs()
|
||||
@ -122,7 +116,7 @@ def renew_lets_encrypt():
|
||||
def backup_site(site):
|
||||
from bench.utils import get_sites, backup_site
|
||||
if site not in get_sites(bench_path='.'):
|
||||
print('Site `{0}` not found'.format(site))
|
||||
print(f'Site `{site}` not found')
|
||||
sys.exit(1)
|
||||
backup_site(site, bench_path='.')
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
"""Module for setting up system and respective bench configurations"""
|
||||
|
||||
# imports - third party imports
|
||||
from jinja2 import Environment, PackageLoader
|
||||
|
||||
env = Environment(loader=PackageLoader('bench.config'))
|
||||
def env():
|
||||
from jinja2 import Environment, PackageLoader
|
||||
return Environment(loader=PackageLoader('bench.config'))
|
||||
|
@ -1,22 +1,19 @@
|
||||
# imports - standard imports
|
||||
import getpass
|
||||
import json
|
||||
import multiprocessing
|
||||
import os
|
||||
|
||||
# imports - third party imports
|
||||
from six.moves.urllib.parse import urlparse
|
||||
|
||||
|
||||
default_config = {
|
||||
'restart_supervisor_on_update': False,
|
||||
'restart_systemd_on_update': False,
|
||||
'auto_update': False,
|
||||
'serve_default_site': True,
|
||||
'rebase_on_pull': False,
|
||||
'frappe_user': getpass.getuser(),
|
||||
'shallow_clone': True,
|
||||
'background_workers': 1
|
||||
'background_workers': 1,
|
||||
'use_redis_auth': False
|
||||
}
|
||||
|
||||
def make_config(bench_path):
|
||||
@ -54,8 +51,10 @@ def get_config_path(bench_path):
|
||||
def get_gunicorn_workers():
|
||||
'''This function will return the maximum workers that can be started depending upon
|
||||
number of cpu's present on the machine'''
|
||||
import multiprocessing
|
||||
|
||||
return {
|
||||
"gunicorn_workers": multiprocessing.cpu_count()
|
||||
"gunicorn_workers": multiprocessing.cpu_count() * 2 + 1
|
||||
}
|
||||
|
||||
def update_config_for_frappe(config, bench_path):
|
||||
@ -63,7 +62,7 @@ def update_config_for_frappe(config, bench_path):
|
||||
|
||||
for key in ('redis_cache', 'redis_queue', 'redis_socketio'):
|
||||
if key not in config:
|
||||
config[key] = "redis://localhost:{0}".format(ports[key])
|
||||
config[key] = f"redis://localhost:{ports[key]}"
|
||||
|
||||
for key in ('webserver_port', 'socketio_port', 'file_watcher_port'):
|
||||
if key not in config:
|
||||
@ -73,6 +72,8 @@ def update_config_for_frappe(config, bench_path):
|
||||
# TODO Optionally we need to add the host or domain name in case dns_multitenant is false
|
||||
|
||||
def make_ports(bench_path):
|
||||
from urllib.parse import urlparse
|
||||
|
||||
benches_path = os.path.dirname(os.path.abspath(bench_path))
|
||||
|
||||
default_ports = {
|
||||
|
@ -3,8 +3,6 @@ import os
|
||||
|
||||
# imports - third party imports
|
||||
import click
|
||||
from crontab import CronTab
|
||||
from six.moves.urllib.request import urlretrieve
|
||||
|
||||
# imports - module imports
|
||||
import bench
|
||||
@ -26,11 +24,11 @@ def setup_letsencrypt(site, custom_domain, bench_path, interactive):
|
||||
domains = get_domains(site, bench_path)
|
||||
for d in domains:
|
||||
if (isinstance(d, dict) and d['domain']==custom_domain):
|
||||
print("SSL for Domain {0} already exists".format(custom_domain))
|
||||
print(f"SSL for Domain {custom_domain} already exists")
|
||||
return
|
||||
|
||||
if not custom_domain in domains:
|
||||
print("No custom domain named {0} set for site".format(custom_domain))
|
||||
print(f"No custom domain named {custom_domain} set for site")
|
||||
return
|
||||
|
||||
if interactive:
|
||||
@ -48,8 +46,8 @@ def setup_letsencrypt(site, custom_domain, bench_path, interactive):
|
||||
|
||||
|
||||
def create_config(site, custom_domain):
|
||||
config = bench.config.env.get_template('letsencrypt.cfg').render(domain=custom_domain or site)
|
||||
config_path = '/etc/letsencrypt/configs/{site}.cfg'.format(site=custom_domain or site)
|
||||
config = bench.config.env().get_template('letsencrypt.cfg').render(domain=custom_domain or site)
|
||||
config_path = f'/etc/letsencrypt/configs/{custom_domain or site}.cfg'
|
||||
create_dir_if_missing(config_path)
|
||||
|
||||
with open(config_path, 'w') as f:
|
||||
@ -62,13 +60,13 @@ def run_certbot_and_setup_ssl(site, custom_domain, bench_path, interactive=True)
|
||||
|
||||
try:
|
||||
interactive = '' if interactive else '-n'
|
||||
exec_cmd("{path} {interactive} --config /etc/letsencrypt/configs/{site}.cfg certonly".format(path=get_certbot_path(), interactive=interactive, site=custom_domain or site))
|
||||
exec_cmd(f"{get_certbot_path()} {interactive} --config /etc/letsencrypt/configs/{custom_domain or site}.cfg certonly")
|
||||
except CommandFailedError:
|
||||
service('nginx', 'start')
|
||||
print("There was a problem trying to setup SSL for your site")
|
||||
return
|
||||
|
||||
ssl_path = "/etc/letsencrypt/live/{site}/".format(site=custom_domain or site)
|
||||
ssl_path = f"/etc/letsencrypt/live/{custom_domain or site}/"
|
||||
ssl_config = { "ssl_certificate": os.path.join(ssl_path, "fullchain.pem"),
|
||||
"ssl_certificate_key": os.path.join(ssl_path, "privkey.pem") }
|
||||
|
||||
@ -86,12 +84,20 @@ def run_certbot_and_setup_ssl(site, custom_domain, bench_path, interactive=True)
|
||||
|
||||
|
||||
def setup_crontab():
|
||||
from crontab import CronTab
|
||||
|
||||
job_command = '/opt/certbot-auto renew -a nginx --post-hook "systemctl reload nginx"'
|
||||
job_comment = 'Renew lets-encrypt every month'
|
||||
print(f"Setting Up cron job to {job_comment}")
|
||||
|
||||
system_crontab = CronTab(user='root')
|
||||
if job_command not in str(system_crontab):
|
||||
job = system_crontab.new(command=job_command, comment="Renew lets-encrypt every month")
|
||||
job.day.on(1)
|
||||
system_crontab.write()
|
||||
|
||||
for job in system_crontab.find_comment(comment=job_comment): # Removes older entries
|
||||
system_crontab.remove(job)
|
||||
|
||||
job = system_crontab.new(command=job_command, comment=job_comment)
|
||||
job.setall('0 0 */1 * *') # Run at 00:00 every day-of-month
|
||||
system_crontab.write()
|
||||
|
||||
|
||||
def create_dir_if_missing(path):
|
||||
@ -100,11 +106,13 @@ def create_dir_if_missing(path):
|
||||
|
||||
|
||||
def get_certbot():
|
||||
from urllib.request import urlretrieve
|
||||
|
||||
certbot_path = get_certbot_path()
|
||||
create_dir_if_missing(certbot_path)
|
||||
|
||||
if not os.path.isfile(certbot_path):
|
||||
urlretrieve ("https://dl.eff.org/certbot-auto", certbot_path)
|
||||
urlretrieve("https://dl.eff.org/certbot-auto", certbot_path)
|
||||
os.chmod(certbot_path, 0o744)
|
||||
|
||||
|
||||
@ -113,12 +121,15 @@ def get_certbot_path():
|
||||
|
||||
|
||||
def renew_certs():
|
||||
# Needs to be run with sudo
|
||||
click.confirm('Running this will stop the nginx service temporarily causing your sites to go offline\n'
|
||||
'Do you want to continue?',
|
||||
abort=True)
|
||||
|
||||
setup_crontab()
|
||||
|
||||
service('nginx', 'stop')
|
||||
exec_cmd("{path} renew".format(path=get_certbot_path()))
|
||||
exec_cmd(f"{get_certbot_path()} renew")
|
||||
service('nginx', 'start')
|
||||
|
||||
|
||||
@ -129,7 +140,7 @@ def setup_wildcard_ssl(domain, email, bench_path, exclude_base_domain):
|
||||
|
||||
if not domain.startswith('*.'):
|
||||
# add wildcard caracter to domain if missing
|
||||
domain_list.append('*.{0}'.format(domain))
|
||||
domain_list.append(f'*.{domain}')
|
||||
else:
|
||||
# include base domain based on flag
|
||||
domain_list.append(domain.replace('*.', ''))
|
||||
@ -148,19 +159,18 @@ def setup_wildcard_ssl(domain, email, bench_path, exclude_base_domain):
|
||||
|
||||
email_param = ''
|
||||
if email:
|
||||
email_param = '--email {0}'.format(email)
|
||||
email_param = f'--email {email}'
|
||||
|
||||
try:
|
||||
exec_cmd("{path} certonly --manual --preferred-challenges=dns {email_param} \
|
||||
exec_cmd(f"{get_certbot_path()} certonly --manual --preferred-challenges=dns {email_param} \
|
||||
--server https://acme-v02.api.letsencrypt.org/directory \
|
||||
--agree-tos -d {domain}".format(path=get_certbot_path(), domain=' -d '.join(domain_list),
|
||||
email_param=email_param))
|
||||
--agree-tos -d {' -d '.join(domain_list)}")
|
||||
|
||||
except CommandFailedError:
|
||||
print("There was a problem trying to setup SSL")
|
||||
return
|
||||
|
||||
ssl_path = "/etc/letsencrypt/live/{domain}/".format(domain=domain)
|
||||
ssl_path = f"/etc/letsencrypt/live/{domain}/"
|
||||
ssl_config = {
|
||||
"wildcard": {
|
||||
"domain": domain,
|
||||
|
@ -6,7 +6,6 @@ import string
|
||||
|
||||
# imports - third party imports
|
||||
import click
|
||||
from six import string_types
|
||||
|
||||
# imports - module imports
|
||||
import bench
|
||||
@ -20,7 +19,7 @@ def make_nginx_conf(bench_path, yes=False):
|
||||
if not click.confirm('nginx.conf already exists and this will overwrite it. Do you want to continue?'):
|
||||
return
|
||||
|
||||
template = bench.config.env.get_template('nginx.conf')
|
||||
template = bench.config.env().get_template('nginx.conf')
|
||||
bench_path = os.path.abspath(bench_path)
|
||||
sites_path = os.path.join(bench_path, "sites")
|
||||
|
||||
@ -59,7 +58,7 @@ def make_bench_manager_nginx_conf(bench_path, yes=False, port=23624, domain=None
|
||||
from bench.config.site_config import get_site_config
|
||||
from bench.config.common_site_config import get_config
|
||||
|
||||
template = bench.config.env.get_template('bench_manager_nginx.conf')
|
||||
template = bench.config.env().get_template('bench_manager_nginx.conf')
|
||||
bench_path = os.path.abspath(bench_path)
|
||||
sites_path = os.path.join(bench_path, "sites")
|
||||
|
||||
@ -165,15 +164,15 @@ def prepare_sites(config, bench_path):
|
||||
for port_number in ports_in_use:
|
||||
if len(ports_in_use[port_number]) > 1:
|
||||
port_conflict_index += 1
|
||||
message += "\n{0} - Port {1} is shared among sites:".format(port_conflict_index,port_number)
|
||||
message += f"\n{port_conflict_index} - Port {port_number} is shared among sites:"
|
||||
for site_name in ports_in_use[port_number]:
|
||||
message += " {0}".format(site_name)
|
||||
message += f" {site_name}"
|
||||
raise Exception(message)
|
||||
|
||||
if not dns_multitenant:
|
||||
message = "Port configuration list:"
|
||||
for site in sites_configs:
|
||||
message += "\n\nSite {0} assigned port: {1}".format(site["name"], site["port"])
|
||||
message += f"\n\nSite {site['name']} assigned port: {site['port']}"
|
||||
|
||||
print(message)
|
||||
|
||||
@ -196,13 +195,13 @@ def get_sites_with_config(bench_path):
|
||||
except Exception as e:
|
||||
strict_nginx = get_config(bench_path).get('strict_nginx')
|
||||
if strict_nginx:
|
||||
print("\n\nERROR: The site config for the site {} is broken.".format(site),
|
||||
print(f"\n\nERROR: The site config for the site {site} is broken.",
|
||||
"If you want this command to pass, instead of just throwing an error,",
|
||||
"You may remove the 'strict_nginx' flag from common_site_config.json or set it to 0",
|
||||
"\n\n")
|
||||
raise (e)
|
||||
else:
|
||||
print("\n\nWARNING: The site config for the site {} is broken.".format(site),
|
||||
print(f"\n\nWARNING: The site config for the site {site} is broken.",
|
||||
"If you want this command to fail, instead of just showing a warning,",
|
||||
"You may add the 'strict_nginx' flag to common_site_config.json and set it to 1",
|
||||
"\n\n")
|
||||
@ -218,7 +217,7 @@ def get_sites_with_config(bench_path):
|
||||
if dns_multitenant and site_config.get('domains'):
|
||||
for domain in site_config.get('domains'):
|
||||
# domain can be a string or a dict with 'domain', 'ssl_certificate', 'ssl_certificate_key'
|
||||
if isinstance(domain, string_types):
|
||||
if isinstance(domain, str):
|
||||
domain = { 'domain': domain }
|
||||
|
||||
domain['name'] = site
|
||||
|
@ -8,7 +8,7 @@ import click
|
||||
import bench
|
||||
from bench.app import use_rq
|
||||
from bench.config.common_site_config import get_config
|
||||
from bench.utils import find_executable
|
||||
from bench.utils import which
|
||||
|
||||
|
||||
def setup_procfile(bench_path, yes=False, skip_redis=False):
|
||||
@ -18,8 +18,8 @@ def setup_procfile(bench_path, yes=False, skip_redis=False):
|
||||
click.confirm('A Procfile already exists and this will overwrite it. Do you want to continue?',
|
||||
abort=True)
|
||||
|
||||
procfile = bench.config.env.get_template('Procfile').render(
|
||||
node=find_executable("node") or find_executable("nodejs"),
|
||||
procfile = bench.config.env().get_template('Procfile').render(
|
||||
node=which("node") or which("nodejs"),
|
||||
use_rq=use_rq(bench_path),
|
||||
webserver_port=config.get('webserver_port'),
|
||||
CI=os.environ.get('CI'),
|
||||
|
@ -9,7 +9,7 @@ from bench.config.common_site_config import get_config
|
||||
from bench.config.nginx import make_nginx_conf
|
||||
from bench.config.supervisor import generate_supervisor_config, update_supervisord_config
|
||||
from bench.config.systemd import generate_systemd_config
|
||||
from bench.utils import CommandFailedError, exec_cmd, find_executable, fix_prod_setup_perms, get_bench_name, get_cmd_output, log
|
||||
from bench.utils import CommandFailedError, exec_cmd, which, fix_prod_setup_perms, get_bench_name, get_cmd_output, log
|
||||
|
||||
|
||||
logger = logging.getLogger(bench.PROJECT_NAME)
|
||||
@ -17,13 +17,13 @@ logger = logging.getLogger(bench.PROJECT_NAME)
|
||||
|
||||
def setup_production_prerequisites():
|
||||
"""Installs ansible, fail2banc, NGINX and supervisor"""
|
||||
if not find_executable("ansible"):
|
||||
exec_cmd("sudo {0} -m pip install ansible".format(sys.executable))
|
||||
if not find_executable("fail2ban-client"):
|
||||
if not which("ansible"):
|
||||
exec_cmd(f"sudo {sys.executable} -m pip install ansible")
|
||||
if not which("fail2ban-client"):
|
||||
exec_cmd("bench setup role fail2ban")
|
||||
if not find_executable("nginx"):
|
||||
if not which("nginx"):
|
||||
exec_cmd("bench setup role nginx")
|
||||
if not find_executable("supervisord"):
|
||||
if not which("supervisord"):
|
||||
exec_cmd("bench setup role supervisor")
|
||||
|
||||
|
||||
@ -47,13 +47,12 @@ def setup_production(user, bench_path='.', yes=False):
|
||||
remove_default_nginx_configs()
|
||||
|
||||
bench_name = get_bench_name(bench_path)
|
||||
nginx_conf = '/etc/nginx/conf.d/{bench_name}.conf'.format(bench_name=bench_name)
|
||||
nginx_conf = f'/etc/nginx/conf.d/{bench_name}.conf'
|
||||
|
||||
print("Setting Up symlinks and reloading services...")
|
||||
if get_config(bench_path).get('restart_supervisor_on_update'):
|
||||
supervisor_conf_extn = "ini" if is_centos7() else "conf"
|
||||
supervisor_conf = os.path.join(get_supervisor_confdir(), '{bench_name}.{extn}'.format(
|
||||
bench_name=bench_name, extn=supervisor_conf_extn))
|
||||
supervisor_conf = os.path.join(get_supervisor_confdir(), f'{bench_name}.{supervisor_conf_extn}')
|
||||
|
||||
# Check if symlink exists, If not then create it.
|
||||
if not os.path.islink(supervisor_conf):
|
||||
@ -76,8 +75,7 @@ def disable_production(bench_path='.'):
|
||||
|
||||
# supervisorctl
|
||||
supervisor_conf_extn = "ini" if is_centos7() else "conf"
|
||||
supervisor_conf = os.path.join(get_supervisor_confdir(), '{bench_name}.{extn}'.format(
|
||||
bench_name=bench_name, extn=supervisor_conf_extn))
|
||||
supervisor_conf = os.path.join(get_supervisor_confdir(), f'{bench_name}.{supervisor_conf_extn}')
|
||||
|
||||
if os.path.islink(supervisor_conf):
|
||||
os.unlink(supervisor_conf)
|
||||
@ -86,7 +84,7 @@ def disable_production(bench_path='.'):
|
||||
reload_supervisor()
|
||||
|
||||
# nginx
|
||||
nginx_conf = '/etc/nginx/conf.d/{bench_name}.conf'.format(bench_name=bench_name)
|
||||
nginx_conf = f'/etc/nginx/conf.d/{bench_name}.conf'
|
||||
|
||||
if os.path.islink(nginx_conf):
|
||||
os.unlink(nginx_conf)
|
||||
@ -95,24 +93,24 @@ def disable_production(bench_path='.'):
|
||||
|
||||
|
||||
def service(service_name, service_option):
|
||||
if os.path.basename(find_executable('systemctl') or '') == 'systemctl' and is_running_systemd():
|
||||
systemctl_cmd = "sudo {service_manager} {service_option} {service_name}"
|
||||
exec_cmd(systemctl_cmd.format(service_manager='systemctl', service_option=service_option, service_name=service_name))
|
||||
if os.path.basename(which('systemctl') or '') == 'systemctl' and is_running_systemd():
|
||||
exec_cmd(f"sudo systemctl {service_option} {service_name}")
|
||||
|
||||
elif os.path.basename(find_executable('service') or '') == 'service':
|
||||
service_cmd = "sudo {service_manager} {service_name} {service_option}"
|
||||
exec_cmd(service_cmd.format(service_manager='service', service_name=service_name, service_option=service_option))
|
||||
elif os.path.basename(which('service') or '') == 'service':
|
||||
exec_cmd(f"sudo service {service_name} {service_option}")
|
||||
|
||||
else:
|
||||
# look for 'service_manager' and 'service_manager_command' in environment
|
||||
service_manager = os.environ.get("BENCH_SERVICE_MANAGER")
|
||||
if service_manager:
|
||||
service_manager_command = (os.environ.get("BENCH_SERVICE_MANAGER_COMMAND")
|
||||
or "{service_manager} {service_option} {service}").format(service_manager=service_manager, service=service, service_option=service_option)
|
||||
service_manager_command = (
|
||||
os.environ.get("BENCH_SERVICE_MANAGER_COMMAND")
|
||||
or f"{service_manager} {service_option} {service}"
|
||||
)
|
||||
exec_cmd(service_manager_command)
|
||||
|
||||
else:
|
||||
log("No service manager found: '{0} {1}' failed to execute".format(service_name, service_option), level=2)
|
||||
log(f"No service manager found: '{service_name} {service_option}' failed to execute", level=2)
|
||||
|
||||
|
||||
def get_supervisor_confdir():
|
||||
@ -145,19 +143,19 @@ def is_running_systemd():
|
||||
|
||||
|
||||
def reload_supervisor():
|
||||
supervisorctl = find_executable('supervisorctl')
|
||||
supervisorctl = which('supervisorctl')
|
||||
|
||||
try:
|
||||
# first try reread/update
|
||||
exec_cmd('{0} reread'.format(supervisorctl))
|
||||
exec_cmd('{0} update'.format(supervisorctl))
|
||||
exec_cmd(f'{supervisorctl} reread')
|
||||
exec_cmd(f'{supervisorctl} update')
|
||||
return
|
||||
except CommandFailedError:
|
||||
pass
|
||||
|
||||
try:
|
||||
# something is wrong, so try reloading
|
||||
exec_cmd('{0} reload'.format(supervisorctl))
|
||||
exec_cmd(f'{supervisorctl} reload')
|
||||
return
|
||||
except CommandFailedError:
|
||||
pass
|
||||
@ -178,7 +176,7 @@ def reload_supervisor():
|
||||
|
||||
def reload_nginx():
|
||||
try:
|
||||
exec_cmd('sudo {0} -t'.format(find_executable('nginx')))
|
||||
exec_cmd(f"sudo {which('nginx')} -t")
|
||||
except:
|
||||
raise
|
||||
|
||||
|
@ -3,17 +3,16 @@ import os
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
# imports - third party imports
|
||||
import semantic_version
|
||||
from six.moves.urllib.parse import urlparse
|
||||
|
||||
# imports - module imports
|
||||
import bench
|
||||
from bench.config.common_site_config import get_config
|
||||
|
||||
|
||||
def generate_config(bench_path):
|
||||
from urllib.parse import urlparse
|
||||
|
||||
config = get_config(bench_path)
|
||||
redis_version = get_redis_version()
|
||||
|
||||
ports = {}
|
||||
for key in ('redis_cache', 'redis_queue', 'redis_socketio'):
|
||||
@ -24,6 +23,7 @@ def generate_config(bench_path):
|
||||
context={
|
||||
"port": ports['redis_queue'],
|
||||
"bench_path": os.path.abspath(bench_path),
|
||||
"redis_version": redis_version
|
||||
},
|
||||
bench_path=bench_path
|
||||
)
|
||||
@ -32,6 +32,7 @@ def generate_config(bench_path):
|
||||
template_name='redis_socketio.conf',
|
||||
context={
|
||||
"port": ports['redis_socketio'],
|
||||
"redis_version": redis_version
|
||||
},
|
||||
bench_path=bench_path
|
||||
)
|
||||
@ -41,7 +42,7 @@ def generate_config(bench_path):
|
||||
context={
|
||||
"maxmemory": config.get('cache_maxmemory', get_max_redis_memory()),
|
||||
"port": ports['redis_cache'],
|
||||
"redis_version": get_redis_version(),
|
||||
"redis_version": redis_version
|
||||
},
|
||||
bench_path=bench_path
|
||||
)
|
||||
@ -51,16 +52,33 @@ def generate_config(bench_path):
|
||||
if not os.path.exists(pid_path):
|
||||
os.makedirs(pid_path)
|
||||
|
||||
# ACL feature is introduced in Redis 6.0
|
||||
if redis_version < 6.0:
|
||||
return
|
||||
|
||||
# make ACL files
|
||||
acl_rq_path = os.path.join(bench_path, "config", "redis_queue.acl")
|
||||
acl_redis_cache_path = os.path.join(bench_path, "config", "redis_cache.acl")
|
||||
acl_redis_socketio_path = os.path.join(bench_path, "config", "redis_socketio.acl")
|
||||
open(acl_rq_path, 'a').close()
|
||||
open(acl_redis_cache_path, 'a').close()
|
||||
open(acl_redis_socketio_path, 'a').close()
|
||||
|
||||
def write_redis_config(template_name, context, bench_path):
|
||||
template = bench.config.env.get_template(template_name)
|
||||
template = bench.config.env().get_template(template_name)
|
||||
|
||||
if "config_path" not in context:
|
||||
context["config_path"] = os.path.abspath(os.path.join(bench_path, "config"))
|
||||
|
||||
if "pid_path" not in context:
|
||||
context["pid_path"] = os.path.abspath(os.path.join(bench_path, "config", "pids"))
|
||||
context["pid_path"] = os.path.join(context["config_path"], "pids")
|
||||
|
||||
with open(os.path.join(bench_path, 'config', template_name), 'w') as f:
|
||||
f.write(template.render(**context))
|
||||
|
||||
def get_redis_version():
|
||||
import semantic_version
|
||||
|
||||
version_string = subprocess.check_output('redis-server --version', shell=True)
|
||||
version_string = version_string.decode('utf-8').strip()
|
||||
# extract version number from string
|
||||
@ -69,7 +87,7 @@ def get_redis_version():
|
||||
return None
|
||||
|
||||
version = semantic_version.Version(version[0], partial=True)
|
||||
return float('{major}.{minor}'.format(major=version.major, minor=version.minor))
|
||||
return float(f'{version.major}.{version.minor}')
|
||||
|
||||
def get_max_redis_memory():
|
||||
try:
|
||||
|
@ -4,7 +4,6 @@ import os
|
||||
from collections import defaultdict
|
||||
|
||||
# imports - module imports
|
||||
from bench.config.nginx import make_nginx_conf
|
||||
from bench.utils import get_sites
|
||||
|
||||
|
||||
@ -35,6 +34,8 @@ def set_ssl_certificate_key(site, ssl_certificate_key, bench_path='.', gen_confi
|
||||
set_site_config_nginx_property(site, {"ssl_certificate_key": ssl_certificate_key}, bench_path=bench_path, gen_config=gen_config)
|
||||
|
||||
def set_site_config_nginx_property(site, config, bench_path='.', gen_config=True):
|
||||
from bench.config.nginx import make_nginx_conf
|
||||
|
||||
if site not in get_sites(bench_path=bench_path):
|
||||
raise Exception("No such site")
|
||||
update_site_config(site, config, bench_path=bench_path)
|
||||
@ -48,7 +49,7 @@ def add_domain(site, domain, ssl_certificate, ssl_certificate_key, bench_path='.
|
||||
domains = get_domains(site, bench_path)
|
||||
for d in domains:
|
||||
if (isinstance(d, dict) and d['domain']==domain) or d==domain:
|
||||
print("Domain {0} already exists".format(domain))
|
||||
print(f"Domain {domain} already exists")
|
||||
return
|
||||
|
||||
if ssl_certificate_key and ssl_certificate:
|
||||
|
@ -5,24 +5,23 @@ import os
|
||||
|
||||
# imports - module imports
|
||||
import bench
|
||||
from bench.app import get_current_frappe_version, use_rq
|
||||
from bench.utils import get_bench_name, find_executable
|
||||
from bench.app import use_rq
|
||||
from bench.utils import get_bench_name, which
|
||||
from bench.config.common_site_config import get_config, update_config, get_gunicorn_workers
|
||||
|
||||
# imports - third party imports
|
||||
import click
|
||||
from six.moves import configparser
|
||||
|
||||
|
||||
logger = logging.getLogger(bench.PROJECT_NAME)
|
||||
|
||||
|
||||
def generate_supervisor_config(bench_path, user=None, yes=False):
|
||||
def generate_supervisor_config(bench_path, user=None, yes=False, skip_redis=False):
|
||||
"""Generate supervisor config for respective bench path"""
|
||||
if not user:
|
||||
user = getpass.getuser()
|
||||
|
||||
template = bench.config.env.get_template('supervisor.conf')
|
||||
template = bench.config.env().get_template('supervisor.conf')
|
||||
config = get_config(bench_path=bench_path)
|
||||
bench_dir = os.path.abspath(bench_path)
|
||||
|
||||
@ -30,11 +29,10 @@ def generate_supervisor_config(bench_path, user=None, yes=False):
|
||||
"bench_dir": bench_dir,
|
||||
"sites_dir": os.path.join(bench_dir, 'sites'),
|
||||
"user": user,
|
||||
"frappe_version": get_current_frappe_version(bench_path),
|
||||
"use_rq": use_rq(bench_path),
|
||||
"http_timeout": config.get("http_timeout", 120),
|
||||
"redis_server": find_executable('redis-server'),
|
||||
"node": find_executable('node') or find_executable('nodejs'),
|
||||
"redis_server": which('redis-server'),
|
||||
"node": which('node') or which('nodejs'),
|
||||
"redis_cache_config": os.path.join(bench_dir, 'config', 'redis_cache.conf'),
|
||||
"redis_socketio_config": os.path.join(bench_dir, 'config', 'redis_socketio.conf'),
|
||||
"redis_queue_config": os.path.join(bench_dir, 'config', 'redis_queue.conf'),
|
||||
@ -42,7 +40,8 @@ def generate_supervisor_config(bench_path, user=None, yes=False):
|
||||
"gunicorn_workers": config.get('gunicorn_workers', get_gunicorn_workers()["gunicorn_workers"]),
|
||||
"bench_name": get_bench_name(bench_path),
|
||||
"background_workers": config.get('background_workers') or 1,
|
||||
"bench_cmd": find_executable('bench')
|
||||
"bench_cmd": which('bench'),
|
||||
"skip_redis": skip_redis,
|
||||
})
|
||||
|
||||
conf_path = os.path.join(bench_path, 'config', 'supervisor.conf')
|
||||
@ -68,6 +67,7 @@ def get_supervisord_conf():
|
||||
|
||||
def update_supervisord_config(user=None, yes=False):
|
||||
"""From bench v5.x, we're moving to supervisor running as user"""
|
||||
import configparser
|
||||
from bench.config.production_setup import service
|
||||
|
||||
if not user:
|
||||
@ -77,7 +77,7 @@ def update_supervisord_config(user=None, yes=False):
|
||||
section = "unix_http_server"
|
||||
updated_values = {
|
||||
"chmod": "0760",
|
||||
"chown": "{user}:{user}".format(user=user)
|
||||
"chown": f"{user}:{user}"
|
||||
}
|
||||
supervisord_conf_changes = ""
|
||||
|
||||
@ -90,7 +90,7 @@ def update_supervisord_config(user=None, yes=False):
|
||||
|
||||
if section not in config.sections():
|
||||
config.add_section(section)
|
||||
action = "Section {0} Added".format(section)
|
||||
action = f"Section {section} Added"
|
||||
logger.log(action)
|
||||
supervisord_conf_changes += '\n' + action
|
||||
|
||||
@ -102,7 +102,7 @@ def update_supervisord_config(user=None, yes=False):
|
||||
|
||||
if current_value.strip() != value:
|
||||
config.set(section, key, value)
|
||||
action = "Updated supervisord.conf: '{0}' changed from '{1}' to '{2}'".format(key, current_value, value)
|
||||
action = f"Updated supervisord.conf: '{key}' changed from '{current_value}' to '{value}'"
|
||||
logger.log(action)
|
||||
supervisord_conf_changes += '\n' + action
|
||||
|
||||
@ -111,14 +111,14 @@ def update_supervisord_config(user=None, yes=False):
|
||||
return
|
||||
|
||||
if not yes:
|
||||
click.confirm("{0} will be updated with the following values:\n{1}\nDo you want to continue?".format(supervisord_conf, supervisord_conf_changes), abort=True)
|
||||
click.confirm(f"{supervisord_conf} will be updated with the following values:\n{supervisord_conf_changes}\nDo you want to continue?", abort=True)
|
||||
|
||||
try:
|
||||
with open(supervisord_conf, "w") as f:
|
||||
config.write(f)
|
||||
logger.log("Updated supervisord.conf at '{0}'".format(supervisord_conf))
|
||||
logger.log(f"Updated supervisord.conf at '{supervisord_conf}'")
|
||||
except Exception as e:
|
||||
logger.log("Updating supervisord.conf failed due to '{0}'".format(e))
|
||||
logger.log(f"Updating supervisord.conf failed due to '{e}'")
|
||||
|
||||
# Reread supervisor configuration, reload supervisord and supervisorctl, restart services that were started
|
||||
service('supervisor', 'reload')
|
||||
|
@ -7,9 +7,9 @@ import click
|
||||
|
||||
# imports - module imports
|
||||
import bench
|
||||
from bench.app import get_current_frappe_version, use_rq
|
||||
from bench.app import use_rq
|
||||
from bench.config.common_site_config import get_config, get_gunicorn_workers, update_config
|
||||
from bench.utils import exec_cmd, find_executable, get_bench_name
|
||||
from bench.utils import exec_cmd, which, get_bench_name
|
||||
|
||||
|
||||
def generate_systemd_config(bench_path, user=None, yes=False,
|
||||
@ -25,7 +25,7 @@ def generate_systemd_config(bench_path, user=None, yes=False,
|
||||
bench_name = get_bench_name(bench_path)
|
||||
|
||||
if stop:
|
||||
exec_cmd('sudo systemctl stop -- $(systemctl show -p Requires {bench_name}.target | cut -d= -f2)'.format(bench_name=bench_name))
|
||||
exec_cmd(f'sudo systemctl stop -- $(systemctl show -p Requires {bench_name}.target | cut -d= -f2)')
|
||||
return
|
||||
|
||||
if create_symlinks:
|
||||
@ -51,11 +51,10 @@ def generate_systemd_config(bench_path, user=None, yes=False,
|
||||
"bench_dir": bench_dir,
|
||||
"sites_dir": os.path.join(bench_dir, 'sites'),
|
||||
"user": user,
|
||||
"frappe_version": get_current_frappe_version(bench_path),
|
||||
"use_rq": use_rq(bench_path),
|
||||
"http_timeout": config.get("http_timeout", 120),
|
||||
"redis_server": find_executable('redis-server'),
|
||||
"node": find_executable('node') or find_executable('nodejs'),
|
||||
"redis_server": which('redis-server'),
|
||||
"node": which('node') or which('nodejs'),
|
||||
"redis_cache_config": os.path.join(bench_dir, 'config', 'redis_cache.conf'),
|
||||
"redis_socketio_config": os.path.join(bench_dir, 'config', 'redis_socketio.conf'),
|
||||
"redis_queue_config": os.path.join(bench_dir, 'config', 'redis_queue.conf'),
|
||||
@ -63,7 +62,7 @@ def generate_systemd_config(bench_path, user=None, yes=False,
|
||||
"gunicorn_workers": config.get('gunicorn_workers', get_gunicorn_workers()["gunicorn_workers"]),
|
||||
"bench_name": get_bench_name(bench_path),
|
||||
"worker_target_wants": " ".join(background_workers),
|
||||
"bench_cmd": find_executable('bench')
|
||||
"bench_cmd": which('bench')
|
||||
}
|
||||
|
||||
if not yes:
|
||||
@ -85,7 +84,7 @@ def setup_systemd_directory(bench_path):
|
||||
|
||||
def setup_main_config(bench_info, bench_path):
|
||||
# Main config
|
||||
bench_template = bench.config.env.get_template('systemd/frappe-bench.target')
|
||||
bench_template = bench.config.env().get_template('systemd/frappe-bench.target')
|
||||
bench_config = bench_template.render(**bench_info)
|
||||
bench_config_path = os.path.join(bench_path, 'config', 'systemd' , bench_info.get("bench_name") + '.target')
|
||||
|
||||
@ -94,11 +93,11 @@ def setup_main_config(bench_info, bench_path):
|
||||
|
||||
def setup_workers_config(bench_info, bench_path):
|
||||
# Worker Group
|
||||
bench_workers_target_template = bench.config.env.get_template('systemd/frappe-bench-workers.target')
|
||||
bench_default_worker_template = bench.config.env.get_template('systemd/frappe-bench-frappe-default-worker.service')
|
||||
bench_short_worker_template = bench.config.env.get_template('systemd/frappe-bench-frappe-short-worker.service')
|
||||
bench_long_worker_template = bench.config.env.get_template('systemd/frappe-bench-frappe-long-worker.service')
|
||||
bench_schedule_worker_template = bench.config.env.get_template('systemd/frappe-bench-frappe-schedule.service')
|
||||
bench_workers_target_template = bench.config.env().get_template('systemd/frappe-bench-workers.target')
|
||||
bench_default_worker_template = bench.config.env().get_template('systemd/frappe-bench-frappe-default-worker.service')
|
||||
bench_short_worker_template = bench.config.env().get_template('systemd/frappe-bench-frappe-short-worker.service')
|
||||
bench_long_worker_template = bench.config.env().get_template('systemd/frappe-bench-frappe-long-worker.service')
|
||||
bench_schedule_worker_template = bench.config.env().get_template('systemd/frappe-bench-frappe-schedule.service')
|
||||
|
||||
bench_workers_target_config = bench_workers_target_template.render(**bench_info)
|
||||
bench_default_worker_config = bench_default_worker_template.render(**bench_info)
|
||||
@ -129,9 +128,9 @@ def setup_workers_config(bench_info, bench_path):
|
||||
|
||||
def setup_web_config(bench_info, bench_path):
|
||||
# Web Group
|
||||
bench_web_target_template = bench.config.env.get_template('systemd/frappe-bench-web.target')
|
||||
bench_web_service_template = bench.config.env.get_template('systemd/frappe-bench-frappe-web.service')
|
||||
bench_node_socketio_template = bench.config.env.get_template('systemd/frappe-bench-node-socketio.service')
|
||||
bench_web_target_template = bench.config.env().get_template('systemd/frappe-bench-web.target')
|
||||
bench_web_service_template = bench.config.env().get_template('systemd/frappe-bench-frappe-web.service')
|
||||
bench_node_socketio_template = bench.config.env().get_template('systemd/frappe-bench-node-socketio.service')
|
||||
|
||||
bench_web_target_config = bench_web_target_template.render(**bench_info)
|
||||
bench_web_service_config = bench_web_service_template.render(**bench_info)
|
||||
@ -152,10 +151,10 @@ def setup_web_config(bench_info, bench_path):
|
||||
|
||||
def setup_redis_config(bench_info, bench_path):
|
||||
# Redis Group
|
||||
bench_redis_target_template = bench.config.env.get_template('systemd/frappe-bench-redis.target')
|
||||
bench_redis_cache_template = bench.config.env.get_template('systemd/frappe-bench-redis-cache.service')
|
||||
bench_redis_queue_template = bench.config.env.get_template('systemd/frappe-bench-redis-queue.service')
|
||||
bench_redis_socketio_template = bench.config.env.get_template('systemd/frappe-bench-redis-socketio.service')
|
||||
bench_redis_target_template = bench.config.env().get_template('systemd/frappe-bench-redis.target')
|
||||
bench_redis_cache_template = bench.config.env().get_template('systemd/frappe-bench-redis-cache.service')
|
||||
bench_redis_queue_template = bench.config.env().get_template('systemd/frappe-bench-redis-queue.service')
|
||||
bench_redis_socketio_template = bench.config.env().get_template('systemd/frappe-bench-redis-socketio.service')
|
||||
|
||||
bench_redis_target_config = bench_redis_target_template.render(**bench_info)
|
||||
bench_redis_cache_config = bench_redis_cache_template.render(**bench_info)
|
||||
@ -186,25 +185,15 @@ def _create_symlinks(bench_path):
|
||||
unit_files = get_unit_files(bench_dir)
|
||||
for unit_file in unit_files:
|
||||
filename = "".join(unit_file)
|
||||
exec_cmd('sudo ln -s {config_path}/{unit_file} {etc_systemd_system}/{unit_file_init}'.format(
|
||||
config_path=config_path,
|
||||
etc_systemd_system=etc_systemd_system,
|
||||
unit_file=filename,
|
||||
unit_file_init="".join(unit_file)
|
||||
))
|
||||
exec_cmd(f'sudo ln -s {config_path}/{filename} {etc_systemd_system}/{"".join(unit_file)}')
|
||||
exec_cmd('sudo systemctl daemon-reload')
|
||||
|
||||
def _delete_symlinks(bench_path):
|
||||
bench_dir = os.path.abspath(bench_path)
|
||||
etc_systemd_system = os.path.join('/', 'etc', 'systemd', 'system')
|
||||
config_path = os.path.join(bench_dir, 'config', 'systemd')
|
||||
unit_files = get_unit_files(bench_dir)
|
||||
for unit_file in unit_files:
|
||||
exec_cmd('sudo rm {etc_systemd_system}/{unit_file_init}'.format(
|
||||
config_path=config_path,
|
||||
etc_systemd_system=etc_systemd_system,
|
||||
unit_file_init="".join(unit_file)
|
||||
))
|
||||
exec_cmd(f'sudo rm {etc_systemd_system}/{"".join(unit_file)}')
|
||||
exec_cmd('sudo systemctl daemon-reload')
|
||||
|
||||
def get_unit_files(bench_path):
|
||||
|
@ -29,9 +29,12 @@ server {
|
||||
{% if allow_rate_limiting %}
|
||||
limit_conn per_host_{{ bench_name_hash }} 8;
|
||||
{% endif %}
|
||||
|
||||
proxy_buffer_size 128k;
|
||||
proxy_buffers 4 256k;
|
||||
proxy_busy_buffers_size 256k;
|
||||
|
||||
{% if ssl_certificate and ssl_certificate_key %}
|
||||
ssl on;
|
||||
ssl_certificate {{ ssl_certificate }};
|
||||
ssl_certificate_key {{ ssl_certificate_key }};
|
||||
ssl_session_timeout 5m;
|
||||
|
@ -9,3 +9,6 @@ appendonly no
|
||||
{% if redis_version and redis_version >= 2.2 %}
|
||||
save ""
|
||||
{% endif %}
|
||||
{% if redis_version and redis_version >= 6.0 %}
|
||||
aclfile {{ config_path }}/redis_cache.acl
|
||||
{% endif %}
|
||||
|
@ -3,3 +3,6 @@ dir {{ pid_path }}
|
||||
pidfile {{ pid_path }}/redis_queue.pid
|
||||
bind 127.0.0.1
|
||||
port {{ port }}
|
||||
{% if redis_version and redis_version >= 6.0 %}
|
||||
aclfile {{ config_path }}/redis_queue.acl
|
||||
{% endif %}
|
||||
|
@ -3,3 +3,6 @@ dir {{ pid_path }}
|
||||
pidfile {{ pid_path }}/redis_socketio.pid
|
||||
bind 127.0.0.1
|
||||
port {{ port }}
|
||||
{% if redis_version and redis_version >= 6.0 %}
|
||||
aclfile {{ config_path }}/redis_socketio.acl
|
||||
{% endif %}
|
||||
|
@ -114,6 +114,7 @@ killasgroup=true
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% if not skip_redis %}
|
||||
[program:{{ bench_name }}-redis-cache]
|
||||
command={{ redis_server }} {{ redis_cache_config }}
|
||||
priority=1
|
||||
@ -133,8 +134,9 @@ stdout_logfile={{ bench_dir }}/logs/redis-queue.log
|
||||
stderr_logfile={{ bench_dir }}/logs/redis-queue.error.log
|
||||
user={{ user }}
|
||||
directory={{ sites_dir }}
|
||||
{% endif %}
|
||||
|
||||
{% if frappe_version > 5 %}
|
||||
{% if not skip_redis %}
|
||||
[program:{{ bench_name }}-redis-socketio]
|
||||
command={{ redis_server }} {{ redis_socketio_config }}
|
||||
priority=1
|
||||
@ -144,6 +146,7 @@ stdout_logfile={{ bench_dir }}/logs/redis-socketio.log
|
||||
stderr_logfile={{ bench_dir }}/logs/redis-socketio.error.log
|
||||
user={{ user }}
|
||||
directory={{ sites_dir }}
|
||||
{% endif %}
|
||||
|
||||
{% if node %}
|
||||
[program:{{ bench_name }}-node-socketio]
|
||||
@ -157,8 +160,6 @@ user={{ user }}
|
||||
directory={{ bench_dir }}
|
||||
{% endif %}
|
||||
|
||||
{% endif %}
|
||||
|
||||
[group:{{ bench_name }}-web]
|
||||
programs={{ bench_name }}-frappe-web {%- if node -%} ,{{ bench_name }}-node-socketio {%- endif%}
|
||||
|
||||
@ -174,5 +175,7 @@ programs={{ bench_name }}-frappe-workerbeat,{{ bench_name }}-frappe-worker,{{ be
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% if not skip_redis %}
|
||||
[group:{{ bench_name }}-redis]
|
||||
programs={{ bench_name }}-redis-cache,{{ bench_name }}-redis-queue {%- if frappe_version > 5 -%} ,{{ bench_name }}-redis-socketio {%- endif %}
|
||||
programs={{ bench_name }}-redis-cache,{{ bench_name }}-redis-queue,{{ bench_name }}-redis-socketio
|
||||
{% endif %}
|
||||
|
@ -34,13 +34,13 @@ def is_production_set(bench_path):
|
||||
bench_name = get_bench_name(bench_path)
|
||||
|
||||
supervisor_conf_extn = "ini" if is_centos7() else "conf"
|
||||
supervisor_conf_file_name = '{bench_name}.{extn}'.format(bench_name=bench_name, extn=supervisor_conf_extn)
|
||||
supervisor_conf_file_name = f'{bench_name}.{supervisor_conf_extn}'
|
||||
supervisor_conf = os.path.join(get_supervisor_confdir(), supervisor_conf_file_name)
|
||||
|
||||
if os.path.exists(supervisor_conf):
|
||||
production_setup = production_setup or True
|
||||
|
||||
nginx_conf = '/etc/nginx/conf.d/{bench_name}.conf'.format(bench_name=bench_name)
|
||||
nginx_conf = f'/etc/nginx/conf.d/{bench_name}.conf'
|
||||
|
||||
if os.path.exists(nginx_conf):
|
||||
production_setup = production_setup or True
|
||||
@ -54,7 +54,7 @@ def execute(bench_path):
|
||||
|
||||
if is_sudoers_set():
|
||||
if is_production_set(bench_path):
|
||||
exec_cmd("sudo bench setup supervisor --yes --user {user}".format(user=user))
|
||||
exec_cmd(f"sudo bench setup supervisor --yes --user {user}")
|
||||
service("supervisord", "restart")
|
||||
|
||||
exec_cmd("sudo bench setup sudoers {user}".format(user=user))
|
||||
exec_cmd(f"sudo bench setup sudoers {user}")
|
||||
|
@ -11,7 +11,7 @@
|
||||
|
||||
- name: Set home folder perms
|
||||
file:
|
||||
path: '/home/{{ frappe_user }}'
|
||||
path: '{{ user_directory }}'
|
||||
mode: 'o+rx'
|
||||
owner: '{{ frappe_user }}'
|
||||
group: '{{ frappe_user }}'
|
||||
|
@ -33,7 +33,7 @@
|
||||
|
||||
- name: Fix permissions
|
||||
become_user: root
|
||||
command: chown {{ frappe_user }} -R /home/{{ frappe_user }}
|
||||
command: chown {{ frappe_user }} -R {{ user_directory }}
|
||||
|
||||
- name: python3 bench init for develop
|
||||
command: bench init {{ bench_path }} --frappe-path {{ frappe_repo_url }} --frappe-branch {{ frappe_branch }} --python {{ python }}
|
||||
@ -77,6 +77,6 @@
|
||||
# Setup Bench for production environment
|
||||
- include_tasks: setup_bench_production.yml
|
||||
vars:
|
||||
bench_path: "/home/{{ frappe_user }}/{{ bench_name }}"
|
||||
bench_path: "{{ user_directory }}/{{ bench_name }}"
|
||||
when: not run_travis and production
|
||||
...
|
||||
|
@ -13,16 +13,17 @@
|
||||
- name: Check whether the site already exists
|
||||
stat: path="{{ bench_path }}/sites/{{ site }}"
|
||||
register: site_folder
|
||||
when: not without_site
|
||||
|
||||
- name: Create a new site
|
||||
command: "bench new-site {{ site }} --admin-password '{{ admin_password }}' --mariadb-root-password '{{ mysql_root_password }}'"
|
||||
args:
|
||||
chdir: "{{ bench_path }}"
|
||||
when: not site_folder.stat.exists
|
||||
when: not without_site and not site_folder.stat.exists
|
||||
|
||||
- name: Install ERPNext to default site
|
||||
command: "bench --site {{ site }} install-app erpnext"
|
||||
args:
|
||||
chdir: "{{ bench_path }}"
|
||||
when: not without_erpnext
|
||||
when: not without_site and not without_erpnext
|
||||
...
|
@ -1,11 +1,11 @@
|
||||
---
|
||||
- name: insert/update inputrc for history
|
||||
blockinfile:
|
||||
dest: "/home/{{ frappe_user }}/.inputrc"
|
||||
dest: "{{ user_directory }}/.inputrc"
|
||||
create: yes
|
||||
block: |
|
||||
## arrow up
|
||||
"\e[A":history-search-backward
|
||||
## arrow down
|
||||
"\e[B":history-search-forward
|
||||
...
|
||||
...
|
||||
|
@ -48,6 +48,10 @@
|
||||
[mysqld]
|
||||
pid-file = /var/run/mysqld/mysqld.pid
|
||||
socket = /var/run/mysqld/mysqld.sock
|
||||
|
||||
# setting appeared inside mysql but overwritten by mariadb inside mariadb.conf.d/xx-server.cnf valued as utf8mb4_general_ci
|
||||
|
||||
collation-server = utf8mb4_unicode_ci
|
||||
create: yes
|
||||
become: yes
|
||||
become_user: root
|
||||
|
@ -24,7 +24,14 @@
|
||||
get_url:
|
||||
url: https://github.com/wkhtmltopdf/wkhtmltopdf/releases/download/0.12.5/wkhtmltox_0.12.5-1.focal_amd64.deb
|
||||
dest: /tmp/wkhtmltox.deb
|
||||
when: ansible_distribution == 'Ubuntu' and ansible_distribution_major_version == '20'
|
||||
when: ansible_distribution == 'Ubuntu' and ansible_distribution_major_version == '20' and ansible_architecture != 'aarch64'
|
||||
|
||||
- name: download wkthmltox Ubuntu 20 arm64
|
||||
get_url:
|
||||
# wkhtmltox supports arm64 starting from 0.12.6
|
||||
url: https://github.com/wkhtmltopdf/packaging/releases/download/0.12.6-1/wkhtmltox_0.12.6-1.focal_arm64.deb
|
||||
dest: /tmp/wkhtmltox.deb
|
||||
when: ansible_distribution == 'Ubuntu' and ansible_distribution_major_version == '20' and ansible_architecture == 'aarch64'
|
||||
|
||||
- name: download wkthmltox Ubuntu 18
|
||||
get_url:
|
||||
|
@ -40,8 +40,8 @@
|
||||
- name: setup bench and dev environment
|
||||
hosts: localhost
|
||||
vars:
|
||||
bench_repo_path: "/home/{{ frappe_user }}/.bench"
|
||||
bench_path: "/home/{{ frappe_user }}/{{ bench_name }}"
|
||||
bench_repo_path: "{{ user_directory }}/.bench"
|
||||
bench_path: "{{ user_directory }}/{{ bench_name }}"
|
||||
roles:
|
||||
# setup frappe-bench
|
||||
- { role: bench, tags: "bench", when: not run_travis and not without_bench_setup }
|
||||
|
@ -15,7 +15,7 @@ def prepare_beta_release(bench_path, app, owner='frappe', remote='upstream'):
|
||||
beta_master = click.prompt('Branch name for beta release', type=str)
|
||||
|
||||
if click.confirm("Do you want to setup hotfix for beta ?"):
|
||||
beta_hotfix = click.prompt('Branch name for beta hotfix ({}_hotifx)'.format(beta_master), type=str)
|
||||
beta_hotfix = click.prompt(f'Branch name for beta hotfix ({beta_master}_hotifx)', type=str)
|
||||
|
||||
validate(bench_path)
|
||||
repo_path = os.path.join(bench_path, 'apps', app)
|
||||
@ -26,7 +26,7 @@ def prepare_beta_release(bench_path, app, owner='frappe', remote='upstream'):
|
||||
|
||||
if beta_hotfix:
|
||||
prepare_beta_hotfix(repo_path, beta_hotfix, remote)
|
||||
|
||||
|
||||
tag_name = merge_beta_release_to_develop(repo_path, beta_master, remote, version)
|
||||
push_branches(repo_path, beta_master, beta_hotfix, remote)
|
||||
create_github_release(repo_path, tag_name, '', owner, remote)
|
||||
@ -68,8 +68,8 @@ def set_beta_version(repo_path, version):
|
||||
repo = git.Repo(repo_path)
|
||||
app_name = os.path.basename(repo_path)
|
||||
repo.index.add([os.path.join(app_name, 'hooks.py')])
|
||||
repo.index.commit('bumped to version {}'.format(version))
|
||||
|
||||
repo.index.commit(f'bumped to version {version}')
|
||||
|
||||
|
||||
def prepare_beta_hotfix(repo_path, beta_hotfix, remote):
|
||||
g = git.Repo(repo_path).git
|
||||
@ -83,7 +83,7 @@ def merge_beta_release_to_develop(repo_path, beta_master, remote, version):
|
||||
g = repo.git
|
||||
|
||||
tag_name = 'v' + version
|
||||
repo.create_tag(tag_name, message='Release {}'.format(version))
|
||||
repo.create_tag(tag_name, message=f'Release {version}')
|
||||
|
||||
g.checkout('develop')
|
||||
|
||||
@ -100,12 +100,12 @@ def push_branches(repo_path, beta_master, beta_hotfix, remote):
|
||||
|
||||
args = [
|
||||
'develop:develop',
|
||||
'{beta_master}:{beta_master}'.format(beta_master=beta_master),
|
||||
f'{beta_master}:{beta_master}',
|
||||
]
|
||||
|
||||
if beta_hotfix:
|
||||
args.append('{beta_hotfix}:{beta_hotfix}'.format(beta_hotfix=beta_hotfix))
|
||||
|
||||
args.append(f'{beta_hotfix}:{beta_hotfix}')
|
||||
|
||||
args.append('--tags')
|
||||
|
||||
print("Pushing branches")
|
||||
@ -114,5 +114,5 @@ def push_branches(repo_path, beta_master, beta_hotfix, remote):
|
||||
def create_github_release(repo_path, tag_name, message, owner, remote):
|
||||
from .release import create_github_release
|
||||
|
||||
create_github_release(repo_path, tag_name, message, remote=remote, owner=owner,
|
||||
create_github_release(repo_path, tag_name, message, remote=remote, owner=owner,
|
||||
repo_name=None, gh_username=github_username, gh_password=github_password)
|
@ -4,11 +4,8 @@ import os
|
||||
import sys
|
||||
import semantic_version
|
||||
import git
|
||||
import requests
|
||||
import getpass
|
||||
import re
|
||||
from requests.auth import HTTPBasicAuth
|
||||
import requests.exceptions
|
||||
from time import sleep
|
||||
from .config.common_site_config import get_config
|
||||
import click
|
||||
@ -47,6 +44,9 @@ def release(bench_path, app, bump_type, from_branch, to_branch,
|
||||
repo_name=repo_name, remote=remote, frontport=frontport)
|
||||
|
||||
def validate(bench_path, config):
|
||||
import requests
|
||||
from requests.auth import HTTPBasicAuth
|
||||
|
||||
global github_username, github_password
|
||||
|
||||
github_username = config.get('github_username')
|
||||
@ -93,7 +93,7 @@ def bump(bench_path, app, bump_type, from_branch, to_branch, remote, owner, repo
|
||||
push_release(repo_path, from_branch=from_branch, to_branch=to_branch, remote=remote)
|
||||
prerelease = True if 'beta' in new_version else False
|
||||
create_github_release(repo_path, tag_name, message, remote=remote, owner=owner, repo_name=repo_name, prerelease=prerelease)
|
||||
print('Released {tag} for {repo_path}'.format(tag=tag_name, repo_path=repo_path))
|
||||
print(f'Released {tag_name} for {repo_path}')
|
||||
|
||||
def update_branches_and_check_for_changelog(repo_path, from_branch, to_branch, remote='upstream'):
|
||||
|
||||
@ -125,8 +125,7 @@ def get_release_message(repo_path, from_branch, to_branch, remote='upstream'):
|
||||
|
||||
repo = git.Repo(repo_path)
|
||||
g = repo.git
|
||||
log = g.log('{remote}/{to_branch}..{remote}/{from_branch}'.format(
|
||||
remote=remote, to_branch=to_branch, from_branch=from_branch), '--format=format:%s', '--no-merges')
|
||||
log = g.log(f'{remote}/{to_branch}..{remote}/{from_branch}', '--format=format:%s', '--no-merges')
|
||||
|
||||
if log:
|
||||
return "* " + log.replace('\n', '\n* ')
|
||||
@ -246,7 +245,7 @@ def commit_changes(repo_path, new_version, to_branch):
|
||||
else:
|
||||
repo.index.add([os.path.join(app_name, 'hooks.py')])
|
||||
|
||||
repo.index.commit('bumped to version {}'.format(new_version))
|
||||
repo.index.commit(f'bumped to version {new_version}')
|
||||
|
||||
def create_release(repo_path, new_version, from_branch, to_branch, frontport=True):
|
||||
print('creating release for version', new_version)
|
||||
@ -259,7 +258,7 @@ def create_release(repo_path, new_version, from_branch, to_branch, frontport=Tru
|
||||
handle_merge_error(e, source=from_branch, target=to_branch)
|
||||
|
||||
tag_name = 'v' + new_version
|
||||
repo.create_tag(tag_name, message='Release {}'.format(new_version))
|
||||
repo.create_tag(tag_name, message=f'Release {new_version}')
|
||||
g.checkout(from_branch)
|
||||
|
||||
try:
|
||||
@ -269,8 +268,8 @@ def create_release(repo_path, new_version, from_branch, to_branch, frontport=Tru
|
||||
|
||||
if frontport:
|
||||
for branch in branches_to_update[from_branch]:
|
||||
print ("Front porting changes to {}".format(branch))
|
||||
print('merging {0} into'.format(to_branch), branch)
|
||||
print (f"Front porting changes to {branch}")
|
||||
print(f'merging {to_branch} into', branch)
|
||||
g.checkout(branch)
|
||||
try:
|
||||
g.merge(to_branch)
|
||||
@ -281,7 +280,7 @@ def create_release(repo_path, new_version, from_branch, to_branch, frontport=Tru
|
||||
|
||||
def handle_merge_error(e, source, target):
|
||||
print('-'*80)
|
||||
print('Error when merging {source} into {target}'.format(source=source, target=target))
|
||||
print(f'Error when merging {source} into {target}')
|
||||
print(e)
|
||||
print('You can open a new terminal, try to manually resolve the conflict/error and continue')
|
||||
print('-'*80)
|
||||
@ -292,13 +291,13 @@ def push_release(repo_path, from_branch, to_branch, remote='upstream'):
|
||||
repo = git.Repo(repo_path)
|
||||
g = repo.git
|
||||
args = [
|
||||
'{to_branch}:{to_branch}'.format(to_branch=to_branch),
|
||||
'{from_branch}:{from_branch}'.format(from_branch=from_branch)
|
||||
f'{to_branch}:{to_branch}',
|
||||
f'{from_branch}:{from_branch}'
|
||||
]
|
||||
|
||||
for branch in branches_to_update[from_branch]:
|
||||
print('pushing {0} branch of'.format(branch), repo_path)
|
||||
args.append('{branch}:{branch}'.format(branch=branch))
|
||||
print(f'pushing {branch} branch of', repo_path)
|
||||
args.append(f'{branch}:{branch}')
|
||||
|
||||
args.append('--tags')
|
||||
|
||||
@ -306,6 +305,9 @@ def push_release(repo_path, from_branch, to_branch, remote='upstream'):
|
||||
|
||||
def create_github_release(repo_path, tag_name, message, remote='upstream', owner='frappe', repo_name=None,
|
||||
gh_username=None, gh_password=None, prerelease=False):
|
||||
import requests
|
||||
import requests.exceptions
|
||||
from requests.auth import HTTPBasicAuth
|
||||
|
||||
print('creating release on github')
|
||||
|
||||
@ -327,8 +329,7 @@ def create_github_release(repo_path, tag_name, message, remote='upstream', owner
|
||||
}
|
||||
for i in range(3):
|
||||
try:
|
||||
r = requests.post('https://api.github.com/repos/{owner}/{repo_name}/releases'.format(
|
||||
owner=owner, repo_name=repo_name),
|
||||
r = requests.post(f'https://api.github.com/repos/{owner}/{repo_name}/releases',
|
||||
auth=HTTPBasicAuth(gh_username, gh_password), data=json.dumps(data))
|
||||
r.raise_for_status()
|
||||
break
|
||||
@ -347,9 +348,9 @@ def push_branch_for_old_major_version(bench_path, bump_type, app, repo_path, fro
|
||||
return
|
||||
|
||||
current_version = get_current_version(repo_path)
|
||||
old_major_version_branch = "v{major}.x.x".format(major=current_version.split('.')[0])
|
||||
old_major_version_branch = f"v{current_version.split('.')[0]}.x.x"
|
||||
|
||||
click.confirm('Do you want to push {branch}?'.format(branch=old_major_version_branch), abort=True)
|
||||
click.confirm(f'Do you want to push {old_major_version_branch}?', abort=True)
|
||||
|
||||
update_branch(repo_path, to_branch, remote=remote)
|
||||
|
||||
@ -357,8 +358,8 @@ def push_branch_for_old_major_version(bench_path, bump_type, app, repo_path, fro
|
||||
g.checkout(b=old_major_version_branch)
|
||||
|
||||
args = [
|
||||
'{old_major_version_branch}:{old_major_version_branch}'.format(old_major_version_branch=old_major_version_branch),
|
||||
f'{old_major_version_branch}:{old_major_version_branch}',
|
||||
]
|
||||
|
||||
print("Pushing {old_major_version_branch} ".format(old_major_version_branch=old_major_version_branch))
|
||||
print(f"Pushing {old_major_version_branch} ")
|
||||
print(g.push(remote, *args))
|
||||
|
@ -8,14 +8,11 @@ import sys
|
||||
import traceback
|
||||
import unittest
|
||||
|
||||
# imports - third party imports
|
||||
from six import PY2
|
||||
|
||||
# imports - module imports
|
||||
import bench
|
||||
import bench.utils
|
||||
|
||||
if PY2:
|
||||
if sys.version_info.major == 2:
|
||||
FRAPPE_BRANCH = "version-12"
|
||||
else:
|
||||
FRAPPE_BRANCH = "develop"
|
||||
@ -44,7 +41,7 @@ class TestBenchBase(unittest.TestCase):
|
||||
bench_path = os.path.abspath(bench_name)
|
||||
python_path = os.path.abspath(os.path.join(bench_path, "env", "bin", "python"))
|
||||
self.assertTrue(python_path.startswith(bench_path))
|
||||
for subdir in ("bin", "include", "lib", "share"):
|
||||
for subdir in ("bin", "lib", "share"):
|
||||
self.assert_exists(bench_name, "env", subdir)
|
||||
|
||||
def assert_config(self, bench_name):
|
||||
@ -84,13 +81,12 @@ class TestBenchBase(unittest.TestCase):
|
||||
frappe_tmp_path = "/tmp/frappe"
|
||||
|
||||
if not os.path.exists(frappe_tmp_path):
|
||||
bench.utils.exec_cmd("git clone https://github.com/frappe/frappe -b {branch} --depth 1 --origin upstream {location}".format(branch=FRAPPE_BRANCH, location=frappe_tmp_path))
|
||||
bench.utils.exec_cmd(f"git clone https://github.com/frappe/frappe -b {FRAPPE_BRANCH} --depth 1 --origin upstream {frappe_tmp_path}")
|
||||
|
||||
kwargs.update(dict(
|
||||
python=sys.executable,
|
||||
no_procfile=True,
|
||||
no_backups=True,
|
||||
skip_assets=True,
|
||||
frappe_path=frappe_tmp_path
|
||||
))
|
||||
|
||||
|
@ -9,6 +9,7 @@ import git
|
||||
|
||||
# imports - module imports
|
||||
import bench
|
||||
import bench.cli
|
||||
import bench.utils
|
||||
from bench.release import get_bumped_version
|
||||
from bench.tests.test_base import FRAPPE_BRANCH, TestBenchBase
|
||||
@ -27,6 +28,10 @@ class TestBenchInit(TestBenchBase):
|
||||
self.assertEqual( get_bumped_version('11.0.5-beta.22', 'prerelease'), '11.0.5-beta.23' )
|
||||
|
||||
|
||||
def test_utils(self):
|
||||
self.assertEqual(subprocess.call("bench"), 0)
|
||||
|
||||
|
||||
def test_init(self, bench_name="test-bench", **kwargs):
|
||||
self.init_bench(bench_name, **kwargs)
|
||||
self.assert_folders(bench_name)
|
||||
@ -92,7 +97,7 @@ class TestBenchInit(TestBenchBase):
|
||||
def test_get_app(self):
|
||||
self.init_bench("test-bench")
|
||||
bench_path = os.path.join(self.benches_path, "test-bench")
|
||||
bench.utils.exec_cmd("bench get-app frappe_theme --skip-assets", cwd=bench_path)
|
||||
bench.utils.exec_cmd("bench get-app frappe_theme", cwd=bench_path)
|
||||
self.assertTrue(os.path.exists(os.path.join(bench_path, "apps", "frappe_theme")))
|
||||
app_installed_in_env = "frappe_theme" in subprocess.check_output(["bench", "pip", "freeze"], cwd=bench_path).decode('utf8')
|
||||
self.assertTrue(app_installed_in_env)
|
||||
@ -106,22 +111,22 @@ class TestBenchInit(TestBenchBase):
|
||||
self.init_bench(bench_name)
|
||||
bench.utils.exec_cmd("bench setup requirements --node", cwd=bench_path)
|
||||
bench.utils.exec_cmd("bench build", cwd=bench_path)
|
||||
bench.utils.exec_cmd("bench get-app erpnext --branch {0}".format(FRAPPE_BRANCH), cwd=bench_path)
|
||||
bench.utils.exec_cmd("bench get-app frappe_theme --branch master", cwd=bench_path)
|
||||
|
||||
self.assertTrue(os.path.exists(os.path.join(bench_path, "apps", "erpnext")))
|
||||
self.assertTrue(os.path.exists(os.path.join(bench_path, "apps", "frappe_theme")))
|
||||
|
||||
# check if app is installed
|
||||
app_installed_in_env = "erpnext" in subprocess.check_output(["bench", "pip", "freeze"], cwd=bench_path).decode('utf8')
|
||||
app_installed_in_env = "frappe_theme" in subprocess.check_output(["bench", "pip", "freeze"], cwd=bench_path).decode('utf8')
|
||||
self.assertTrue(app_installed_in_env)
|
||||
|
||||
# create and install app on site
|
||||
self.new_site(site_name, bench_name)
|
||||
installed_erpnext = not bench.utils.exec_cmd("bench --site {0} install-app erpnext".format(site_name), cwd=bench_path)
|
||||
installed_app = not bench.utils.exec_cmd(f"bench --site {site_name} install-app frappe_theme", cwd=bench_path)
|
||||
|
||||
app_installed_on_site = subprocess.check_output(["bench", "--site", site_name, "list-apps"], cwd=bench_path).decode('utf8')
|
||||
|
||||
if installed_erpnext:
|
||||
self.assertTrue("erpnext" in app_installed_on_site)
|
||||
if installed_app:
|
||||
self.assertTrue("frappe_theme" in app_installed_on_site)
|
||||
|
||||
|
||||
def test_remove_app(self):
|
||||
@ -129,13 +134,13 @@ class TestBenchInit(TestBenchBase):
|
||||
bench_path = os.path.join(self.benches_path, "test-bench")
|
||||
|
||||
bench.utils.exec_cmd("bench setup requirements --node", cwd=bench_path)
|
||||
bench.utils.exec_cmd("bench get-app erpnext --branch version-12 --skip-assets --overwrite", cwd=bench_path)
|
||||
bench.utils.exec_cmd("bench remove-app erpnext", cwd=bench_path)
|
||||
bench.utils.exec_cmd("bench get-app frappe_theme --branch master --overwrite", cwd=bench_path)
|
||||
bench.utils.exec_cmd("bench remove-app frappe_theme", cwd=bench_path)
|
||||
|
||||
with open(os.path.join(bench_path, "sites", "apps.txt")) as f:
|
||||
self.assertFalse("erpnext" in f.read())
|
||||
self.assertFalse("erpnext" in subprocess.check_output(["bench", "pip", "freeze"], cwd=bench_path).decode('utf8'))
|
||||
self.assertFalse(os.path.exists(os.path.join(bench_path, "apps", "erpnext")))
|
||||
self.assertFalse("frappe_theme" in f.read())
|
||||
self.assertFalse("frappe_theme" in subprocess.check_output(["bench", "pip", "freeze"], cwd=bench_path).decode('utf8'))
|
||||
self.assertFalse(os.path.exists(os.path.join(bench_path, "apps", "frappe_theme")))
|
||||
|
||||
|
||||
def test_switch_to_branch(self):
|
||||
@ -143,10 +148,10 @@ class TestBenchInit(TestBenchBase):
|
||||
bench_path = os.path.join(self.benches_path, "test-bench")
|
||||
app_path = os.path.join(bench_path, "apps", "frappe")
|
||||
|
||||
successful_switch = not bench.utils.exec_cmd("bench switch-to-branch version-12 frappe --upgrade", cwd=bench_path)
|
||||
successful_switch = not bench.utils.exec_cmd("bench switch-to-branch version-13 frappe --upgrade", cwd=bench_path)
|
||||
app_branch_after_switch = str(git.Repo(path=app_path).active_branch)
|
||||
if successful_switch:
|
||||
self.assertEqual("version-12", app_branch_after_switch)
|
||||
self.assertEqual("version-13", app_branch_after_switch)
|
||||
|
||||
successful_switch = not bench.utils.exec_cmd("bench switch-to-branch develop frappe --upgrade", cwd=bench_path)
|
||||
app_branch_after_second_switch = str(git.Repo(path=app_path).active_branch)
|
||||
|
@ -19,13 +19,13 @@ class TestSetupProduction(TestBenchBase):
|
||||
for bench_name in ("test-bench-1", "test-bench-2"):
|
||||
bench_path = os.path.join(os.path.abspath(self.benches_path), bench_name)
|
||||
self.init_bench(bench_name)
|
||||
bench.utils.exec_cmd("sudo bench setup production {0} --yes".format(user), cwd=bench_path)
|
||||
bench.utils.exec_cmd(f"sudo bench setup production {user} --yes", cwd=bench_path)
|
||||
self.assert_nginx_config(bench_name)
|
||||
self.assert_supervisor_config(bench_name)
|
||||
self.assert_supervisor_process(bench_name)
|
||||
|
||||
self.assert_nginx_process()
|
||||
bench.utils.exec_cmd("sudo bench setup sudoers {0}".format(user))
|
||||
bench.utils.exec_cmd(f"sudo bench setup sudoers {user}")
|
||||
self.assert_sudoers(user)
|
||||
|
||||
for bench_name in self.benches:
|
||||
@ -42,7 +42,7 @@ class TestSetupProduction(TestBenchBase):
|
||||
|
||||
def assert_nginx_config(self, bench_name):
|
||||
conf_src = os.path.join(os.path.abspath(self.benches_path), bench_name, 'config', 'nginx.conf')
|
||||
conf_dest = "/etc/nginx/conf.d/{bench_name}.conf".format(bench_name=bench_name)
|
||||
conf_dest = f"/etc/nginx/conf.d/{bench_name}.conf"
|
||||
|
||||
self.assertTrue(self.file_exists(conf_src))
|
||||
self.assertTrue(self.file_exists(conf_dest))
|
||||
@ -55,10 +55,10 @@ class TestSetupProduction(TestBenchBase):
|
||||
f = f.read()
|
||||
|
||||
for key in (
|
||||
"upstream {bench_name}-frappe",
|
||||
"upstream {bench_name}-socketio-server"
|
||||
f"upstream {bench_name}-frappe",
|
||||
f"upstream {bench_name}-socketio-server"
|
||||
):
|
||||
self.assertTrue(key.format(bench_name=bench_name) in f)
|
||||
self.assertTrue(key in f)
|
||||
|
||||
|
||||
def assert_nginx_process(self):
|
||||
@ -79,15 +79,15 @@ class TestSetupProduction(TestBenchBase):
|
||||
with open(sudoers_file, 'r') as f:
|
||||
sudoers = f.read()
|
||||
|
||||
self.assertTrue('{user} ALL = (root) NOPASSWD: {service} nginx *'.format(service=service, user=user) in sudoers)
|
||||
self.assertTrue('{user} ALL = (root) NOPASSWD: {nginx}'.format(nginx=nginx, user=user) in sudoers)
|
||||
self.assertTrue(f'{user} ALL = (root) NOPASSWD: {service} nginx *' in sudoers)
|
||||
self.assertTrue(f'{user} ALL = (root) NOPASSWD: {nginx}' in sudoers)
|
||||
|
||||
|
||||
def assert_supervisor_config(self, bench_name, use_rq=True):
|
||||
conf_src = os.path.join(os.path.abspath(self.benches_path), bench_name, 'config', 'supervisor.conf')
|
||||
|
||||
supervisor_conf_dir = get_supervisor_confdir()
|
||||
conf_dest = "{supervisor_conf_dir}/{bench_name}.conf".format(supervisor_conf_dir=supervisor_conf_dir, bench_name=bench_name)
|
||||
conf_dest = f"{supervisor_conf_dir}/{bench_name}.conf"
|
||||
|
||||
self.assertTrue(self.file_exists(conf_src))
|
||||
self.assertTrue(self.file_exists(conf_dest))
|
||||
@ -100,38 +100,36 @@ class TestSetupProduction(TestBenchBase):
|
||||
f = f.read()
|
||||
|
||||
tests = [
|
||||
"program:{bench_name}-frappe-web",
|
||||
"program:{bench_name}-redis-cache",
|
||||
"program:{bench_name}-redis-queue",
|
||||
"program:{bench_name}-redis-socketio",
|
||||
"group:{bench_name}-web",
|
||||
"group:{bench_name}-workers",
|
||||
"group:{bench_name}-redis"
|
||||
f"program:{bench_name}-frappe-web",
|
||||
f"program:{bench_name}-redis-cache",
|
||||
f"program:{bench_name}-redis-queue",
|
||||
f"program:{bench_name}-redis-socketio",
|
||||
f"group:{bench_name}-web",
|
||||
f"group:{bench_name}-workers",
|
||||
f"group:{bench_name}-redis"
|
||||
]
|
||||
|
||||
if not os.environ.get("CI"):
|
||||
tests.append("program:{bench_name}-node-socketio")
|
||||
tests.append(f"program:{bench_name}-node-socketio")
|
||||
|
||||
if use_rq:
|
||||
tests.extend([
|
||||
"program:{bench_name}-frappe-schedule",
|
||||
"program:{bench_name}-frappe-default-worker",
|
||||
"program:{bench_name}-frappe-short-worker",
|
||||
"program:{bench_name}-frappe-long-worker"
|
||||
f"program:{bench_name}-frappe-schedule",
|
||||
f"program:{bench_name}-frappe-default-worker",
|
||||
f"program:{bench_name}-frappe-short-worker",
|
||||
f"program:{bench_name}-frappe-long-worker"
|
||||
])
|
||||
|
||||
else:
|
||||
tests.extend([
|
||||
"program:{bench_name}-frappe-workerbeat",
|
||||
"program:{bench_name}-frappe-worker",
|
||||
"program:{bench_name}-frappe-longjob-worker",
|
||||
"program:{bench_name}-frappe-async-worker"
|
||||
f"program:{bench_name}-frappe-workerbeat",
|
||||
f"program:{bench_name}-frappe-worker",
|
||||
f"program:{bench_name}-frappe-longjob-worker",
|
||||
f"program:{bench_name}-frappe-async-worker"
|
||||
])
|
||||
|
||||
for key in tests:
|
||||
if key.format(bench_name=bench_name) not in f:
|
||||
print(key.format(bench_name=bench_name))
|
||||
self.assertTrue(key.format(bench_name=bench_name) in f)
|
||||
self.assertTrue(key in f)
|
||||
|
||||
|
||||
def assert_supervisor_process(self, bench_name, use_rq=True, disable_production=False):
|
||||
@ -170,9 +168,9 @@ class TestSetupProduction(TestBenchBase):
|
||||
|
||||
for key in tests:
|
||||
if disable_production:
|
||||
self.assertFalse(re.search(key.format(bench_name=bench_name), out))
|
||||
self.assertFalse(re.search(key, out))
|
||||
else:
|
||||
self.assertTrue(re.search(key.format(bench_name=bench_name), out))
|
||||
self.assertTrue(re.search(key, out))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
311
bench/utils.py
311
bench/utils.py
@ -2,31 +2,17 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# imports - standard imports
|
||||
import errno
|
||||
import glob
|
||||
import grp
|
||||
import itertools
|
||||
import json
|
||||
import logging
|
||||
import multiprocessing
|
||||
import os
|
||||
import pwd
|
||||
import re
|
||||
import select
|
||||
import shutil
|
||||
import site
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from distutils.spawn import find_executable
|
||||
|
||||
# imports - third party imports
|
||||
import click
|
||||
from crontab import CronTab
|
||||
import requests
|
||||
from semantic_version import Version
|
||||
from six import iteritems
|
||||
from six.moves.urllib.parse import urlparse
|
||||
|
||||
# imports - module imports
|
||||
import bench
|
||||
@ -92,6 +78,12 @@ def safe_decode(string, encoding = 'utf-8'):
|
||||
|
||||
|
||||
def check_latest_version():
|
||||
if bench.VERSION.endswith("dev"):
|
||||
return
|
||||
|
||||
import requests
|
||||
from semantic_version import Version
|
||||
|
||||
try:
|
||||
pypi_request = requests.get("https://pypi.org/pypi/frappe-bench/json")
|
||||
except Exception:
|
||||
@ -105,7 +97,7 @@ def check_latest_version():
|
||||
local_version = Version(bench.VERSION)
|
||||
|
||||
if pypi_version > local_version:
|
||||
log("A newer version of bench is available: {0} → {1}".format(local_version, pypi_version))
|
||||
log(f"A newer version of bench is available: {local_version} → {pypi_version}")
|
||||
|
||||
|
||||
def get_frappe(bench_path='.'):
|
||||
@ -120,6 +112,16 @@ def get_env_cmd(cmd, bench_path='.'):
|
||||
return os.path.abspath(os.path.join(bench_path, 'env', 'bin', cmd))
|
||||
|
||||
|
||||
def pause_exec(seconds=10):
|
||||
from time import sleep
|
||||
|
||||
for i in range(seconds, 0, -1):
|
||||
print(f"Will continue execution in {i} seconds...", end="\r")
|
||||
sleep(1)
|
||||
|
||||
print(" " * 40, end="\r")
|
||||
|
||||
|
||||
def init(path, apps_path=None, no_procfile=False, no_backups=False,
|
||||
frappe_path=None, frappe_branch=None, verbose=False, clone_from=None,
|
||||
skip_redis_config_generation=False, clone_without_update=False, ignore_exist=False, skip_assets=False,
|
||||
@ -132,7 +134,7 @@ def init(path, apps_path=None, no_procfile=False, no_backups=False,
|
||||
from bench.patches import set_all_patches_executed
|
||||
|
||||
if os.path.exists(path) and not ignore_exist:
|
||||
log('Path {path} already exists!'.format(path=path))
|
||||
log(f'Path {path} already exists!')
|
||||
sys.exit(0)
|
||||
elif not os.path.exists(path):
|
||||
# only create dir if it does not exist
|
||||
@ -142,6 +144,8 @@ def init(path, apps_path=None, no_procfile=False, no_backups=False,
|
||||
try:
|
||||
os.makedirs(os.path.join(path, dirname))
|
||||
except OSError as e:
|
||||
import errno
|
||||
|
||||
if e.errno == errno.EEXIST:
|
||||
pass
|
||||
|
||||
@ -162,11 +166,8 @@ def init(path, apps_path=None, no_procfile=False, no_backups=False,
|
||||
if apps_path:
|
||||
install_apps_from_path(apps_path, bench_path=path)
|
||||
|
||||
|
||||
bench.set_frappe_version(bench_path=path)
|
||||
if bench.FRAPPE_VERSION > 5:
|
||||
if not skip_assets:
|
||||
update_node_packages(bench_path=path)
|
||||
if not skip_assets:
|
||||
update_node_packages(bench_path=path)
|
||||
|
||||
set_all_patches_executed(bench_path=path)
|
||||
if not skip_assets:
|
||||
@ -183,9 +184,10 @@ def init(path, apps_path=None, no_procfile=False, no_backups=False,
|
||||
copy_patches_txt(path)
|
||||
|
||||
|
||||
def update(pull=False, apps=None, patch=False, build=False, requirements=False, backup=True, force=False, reset=False,
|
||||
restart_supervisor=False, restart_systemd=False):
|
||||
def update(pull=False, apps=None, patch=False, build=False, requirements=False, backup=True, compile=True,
|
||||
force=False, reset=False, restart_supervisor=False, restart_systemd=False):
|
||||
"""command: bench update"""
|
||||
import re
|
||||
from bench import patches
|
||||
from bench.app import is_version_upgrade, pull_apps, validate_branch
|
||||
from bench.config.common_site_config import get_config, update_config
|
||||
@ -211,14 +213,25 @@ def update(pull=False, apps=None, patch=False, build=False, requirements=False,
|
||||
|
||||
if version_upgrade[0]:
|
||||
if force:
|
||||
print("Force flag has been used for a major version change in Frappe and it's apps. \nThis will take significant time to migrate and might break custom apps.")
|
||||
log("""Force flag has been used for a major version change in Frappe and it's apps.
|
||||
This will take significant time to migrate and might break custom apps.""", level=3)
|
||||
else:
|
||||
print("This update will cause a major version change in Frappe/ERPNext from {0} to {1}. \nThis would take significant time to migrate and might break custom apps.".format(*version_upgrade[1:]))
|
||||
print(f"""This update will cause a major version change in Frappe/ERPNext from {version_upgrade[1]} to {version_upgrade[2]}.
|
||||
This would take significant time to migrate and might break custom apps.""")
|
||||
click.confirm('Do you want to continue?', abort=True)
|
||||
|
||||
if not reset and conf.get('shallow_clone'):
|
||||
log("""shallow_clone is set in your bench config.
|
||||
However without passing the --reset flag, your repositories will be unshallowed.
|
||||
To avoid this, cancel this operation and run `bench update --reset`.
|
||||
|
||||
Consider the consequences of `git reset --hard` on your apps before you run that.
|
||||
To avoid seeing this warning, set shallow_clone to false in your common_site_config.json
|
||||
""", level=3)
|
||||
pause_exec(seconds=10)
|
||||
|
||||
if version_upgrade[0] or (not version_upgrade[0] and force):
|
||||
validate_upgrade(version_upgrade[1], version_upgrade[2], bench_path=bench_path)
|
||||
|
||||
conf.update({ "maintenance_mode": 1, "pause_scheduler": 1 })
|
||||
update_config(conf, bench_path=bench_path)
|
||||
|
||||
@ -230,9 +243,11 @@ def update(pull=False, apps=None, patch=False, build=False, requirements=False,
|
||||
apps = [app.strip() for app in re.split(",| ", apps) if app]
|
||||
|
||||
if pull:
|
||||
print('Updating apps source...')
|
||||
pull_apps(apps=apps, bench_path=bench_path, reset=reset)
|
||||
|
||||
if requirements:
|
||||
print('Setting up requirements...')
|
||||
update_requirements(bench_path=bench_path)
|
||||
update_node_packages(bench_path=bench_path)
|
||||
|
||||
@ -241,11 +256,19 @@ def update(pull=False, apps=None, patch=False, build=False, requirements=False,
|
||||
patch_sites(bench_path=bench_path)
|
||||
|
||||
if build:
|
||||
print('Building assets...')
|
||||
build_assets(bench_path=bench_path)
|
||||
|
||||
if version_upgrade[0] or (not version_upgrade[0] and force):
|
||||
post_upgrade(version_upgrade[1], version_upgrade[2], bench_path=bench_path)
|
||||
|
||||
if pull and compile:
|
||||
from compileall import compile_dir
|
||||
|
||||
print('Compiling Python files...')
|
||||
apps_dir = os.path.join(bench_path, 'apps')
|
||||
compile_dir(apps_dir, quiet=1, rx=re.compile('.*node_modules.*'))
|
||||
|
||||
if restart_supervisor or conf.get('restart_supervisor_on_update'):
|
||||
restart_supervisor_processes(bench_path=bench_path)
|
||||
|
||||
@ -259,18 +282,20 @@ def update(pull=False, apps=None, patch=False, build=False, requirements=False,
|
||||
|
||||
|
||||
def copy_patches_txt(bench_path):
|
||||
import shutil
|
||||
|
||||
shutil.copy(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'patches', 'patches.txt'),
|
||||
os.path.join(bench_path, 'patches.txt'))
|
||||
|
||||
|
||||
def clone_apps_from(bench_path, clone_from, update_app=True):
|
||||
from .app import install_app
|
||||
print('Copying apps from {0}...'.format(clone_from))
|
||||
from bench.app import install_app
|
||||
print(f'Copying apps from {clone_from}...')
|
||||
subprocess.check_output(['cp', '-R', os.path.join(clone_from, 'apps'), bench_path])
|
||||
|
||||
node_modules_path = os.path.join(clone_from, 'node_modules')
|
||||
if os.path.exists(node_modules_path):
|
||||
print('Copying node_modules from {0}...'.format(clone_from))
|
||||
print(f'Copying node_modules from {clone_from}...')
|
||||
subprocess.check_output(['cp', '-R', node_modules_path, bench_path])
|
||||
|
||||
def setup_app(app):
|
||||
@ -286,7 +311,7 @@ def clone_apps_from(bench_path, clone_from, update_app=True):
|
||||
remote = 'upstream'
|
||||
else:
|
||||
remote = remotes[0]
|
||||
print('Cleaning up {0}'.format(app))
|
||||
print(f'Cleaning up {app}')
|
||||
branch = subprocess.check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD'], cwd=app_path).strip()
|
||||
subprocess.check_output(['git', 'reset', '--hard'], cwd=app_path)
|
||||
subprocess.check_output(['git', 'pull', '--rebase', remote, branch], cwd=app_path)
|
||||
@ -302,23 +327,23 @@ def clone_apps_from(bench_path, clone_from, update_app=True):
|
||||
|
||||
def exec_cmd(cmd, cwd='.'):
|
||||
import shlex
|
||||
print("{0}$ {1}{2}".format(color.silver, cmd, color.nc))
|
||||
cwd_info = "cd {0} && ".format(cwd) if cwd != "." else ""
|
||||
cmd_log = "{0}{1}".format(cwd_info, cmd)
|
||||
print(f"{color.silver}$ {cmd}{color.nc}")
|
||||
cwd_info = f"cd {cwd} && " if cwd != "." else ""
|
||||
cmd_log = f"{cwd_info}{cmd}"
|
||||
logger.debug(cmd_log)
|
||||
cmd = shlex.split(cmd)
|
||||
return_code = subprocess.call(cmd, cwd=cwd, universal_newlines=True)
|
||||
if return_code:
|
||||
logger.warning("{0} executed with exit code {1}".format(cmd_log, return_code))
|
||||
logger.warning(f"{cmd_log} executed with exit code {return_code}")
|
||||
|
||||
|
||||
def which(executable, raise_err = False):
|
||||
def which(executable, raise_err=False):
|
||||
from distutils.spawn import find_executable
|
||||
|
||||
exec_ = find_executable(executable)
|
||||
|
||||
if not exec_ and raise_err:
|
||||
raise ValueError('{executable} not found.'.format(
|
||||
executable = executable
|
||||
))
|
||||
raise ValueError(f'{executable} not found.')
|
||||
|
||||
return exec_
|
||||
|
||||
@ -331,19 +356,19 @@ def get_venv_path():
|
||||
with open(os.devnull, "wb") as devnull:
|
||||
is_venv_installed = not subprocess.call([current_python, "-m", "venv", "--help"], stdout=devnull)
|
||||
if is_venv_installed:
|
||||
venv = "{} -m venv".format(current_python)
|
||||
venv = f"{current_python} -m venv"
|
||||
|
||||
return venv or log("virtualenv cannot be found", level=2)
|
||||
|
||||
def setup_env(bench_path='.', python='python3'):
|
||||
frappe = os.path.join(bench_path, "apps", "frappe")
|
||||
pip = os.path.join(bench_path, "env", "bin", "pip")
|
||||
py = os.path.join(bench_path, "env", "bin", "python")
|
||||
virtualenv = get_venv_path()
|
||||
|
||||
exec_cmd('{} -q env -p {}'.format(virtualenv, python), cwd=bench_path)
|
||||
exec_cmd(f'{virtualenv} -q env -p {python}', cwd=bench_path)
|
||||
|
||||
if os.path.exists(frappe):
|
||||
exec_cmd('{} install -q -U -e {}'.format(pip, frappe), cwd=bench_path)
|
||||
exec_cmd(f'{py} -m pip install -q -U -e {frappe}', cwd=bench_path)
|
||||
|
||||
|
||||
def setup_socketio(bench_path='.'):
|
||||
@ -352,27 +377,18 @@ def setup_socketio(bench_path='.'):
|
||||
|
||||
|
||||
def patch_sites(bench_path='.'):
|
||||
bench.set_frappe_version(bench_path=bench_path)
|
||||
|
||||
try:
|
||||
if bench.FRAPPE_VERSION == 4:
|
||||
exec_cmd("{frappe} --latest all".format(frappe=get_frappe(bench_path=bench_path)), cwd=os.path.join(bench_path, 'sites'))
|
||||
else:
|
||||
run_frappe_cmd('--site', 'all', 'migrate', bench_path=bench_path)
|
||||
except subprocess.CalledProcessError:
|
||||
raise PatchError
|
||||
for site in get_sites(bench_path=bench_path):
|
||||
try:
|
||||
migrate_site(site, bench_path=bench_path)
|
||||
except subprocess.CalledProcessError:
|
||||
raise PatchError
|
||||
|
||||
|
||||
def build_assets(bench_path='.', app=None):
|
||||
bench.set_frappe_version(bench_path=bench_path)
|
||||
|
||||
if bench.FRAPPE_VERSION == 4:
|
||||
exec_cmd("{frappe} --build".format(frappe=get_frappe(bench_path=bench_path)), cwd=os.path.join(bench_path, 'sites'))
|
||||
else:
|
||||
command = 'bench build'
|
||||
if app:
|
||||
command += ' --app {}'.format(app)
|
||||
exec_cmd(command, cwd=bench_path)
|
||||
command = 'bench build'
|
||||
if app:
|
||||
command += f' --app {app}'
|
||||
exec_cmd(command, cwd=bench_path)
|
||||
|
||||
|
||||
def get_sites(bench_path='.'):
|
||||
@ -382,21 +398,16 @@ def get_sites(bench_path='.'):
|
||||
|
||||
|
||||
def setup_backups(bench_path='.'):
|
||||
from crontab import CronTab
|
||||
from bench.config.common_site_config import get_config
|
||||
logger.log('setting up backups')
|
||||
|
||||
bench_dir = os.path.abspath(bench_path)
|
||||
user = get_config(bench_path=bench_dir).get('frappe_user')
|
||||
logfile = os.path.join(bench_dir, 'logs', 'backup.log')
|
||||
bench.set_frappe_version(bench_path=bench_path)
|
||||
system_crontab = CronTab(user=user)
|
||||
|
||||
if bench.FRAPPE_VERSION == 4:
|
||||
backup_command = "cd {sites_dir} && {frappe} --backup all".format(frappe=get_frappe(bench_path=bench_path),)
|
||||
else:
|
||||
backup_command = "cd {bench_dir} && {bench} --verbose --site all backup".format(bench_dir=bench_dir, bench=sys.argv[0])
|
||||
|
||||
job_command = "{backup_command} >> {logfile} 2>&1".format(backup_command=backup_command, logfile=logfile)
|
||||
backup_command = f"cd {bench_dir} && {sys.argv[0]} --verbose --site all backup"
|
||||
job_command = f"{backup_command} >> {logfile} 2>&1"
|
||||
|
||||
if job_command not in str(system_crontab):
|
||||
job = system_crontab.new(command=job_command, comment="bench auto backups set for every 6 hours")
|
||||
@ -418,12 +429,12 @@ def setup_sudoers(user):
|
||||
if set_permissions:
|
||||
os.chmod('/etc/sudoers', 0o440)
|
||||
|
||||
template = bench.config.env.get_template('frappe_sudoers')
|
||||
template = bench.config.env().get_template('frappe_sudoers')
|
||||
frappe_sudoers = template.render(**{
|
||||
'user': user,
|
||||
'service': find_executable('service'),
|
||||
'systemctl': find_executable('systemctl'),
|
||||
'nginx': find_executable('nginx'),
|
||||
'service': which('service'),
|
||||
'systemctl': which('systemctl'),
|
||||
'nginx': which('nginx'),
|
||||
})
|
||||
frappe_sudoers = safe_decode(frappe_sudoers)
|
||||
|
||||
@ -431,7 +442,7 @@ def setup_sudoers(user):
|
||||
f.write(frappe_sudoers)
|
||||
|
||||
os.chmod(sudoers_file, 0o440)
|
||||
log("Sudoers was set up for user {}".format(user), level=1)
|
||||
log(f"Sudoers was set up for user {user}", level=1)
|
||||
|
||||
|
||||
def setup_logging(bench_path='.'):
|
||||
@ -443,20 +454,23 @@ def setup_logging(bench_path='.'):
|
||||
logging.Logger.log = logv
|
||||
|
||||
if os.path.exists(os.path.join(bench_path, 'logs')):
|
||||
logger = logging.getLogger(bench.PROJECT_NAME)
|
||||
log_file = os.path.join(bench_path, 'logs', 'bench.log')
|
||||
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
|
||||
hdlr = logging.FileHandler(log_file)
|
||||
hdlr.setFormatter(formatter)
|
||||
logger.addHandler(hdlr)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
else:
|
||||
hdlr = logging.NullHandler()
|
||||
|
||||
return logger
|
||||
logger = logging.getLogger(bench.PROJECT_NAME)
|
||||
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
|
||||
hdlr.setFormatter(formatter)
|
||||
logger.addHandler(hdlr)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
return logger
|
||||
|
||||
|
||||
def get_process_manager():
|
||||
for proc_man in ['honcho', 'foreman', 'forego']:
|
||||
proc_man_path = find_executable(proc_man)
|
||||
proc_man_path = which(proc_man)
|
||||
if proc_man_path:
|
||||
return proc_man_path
|
||||
|
||||
@ -478,7 +492,7 @@ def start(no_dev=False, concurrency=None, procfile=None, no_prefix=False):
|
||||
|
||||
if no_prefix:
|
||||
command.extend(['--no-prefix'])
|
||||
|
||||
|
||||
os.execv(program, command)
|
||||
|
||||
|
||||
@ -493,7 +507,7 @@ def get_git_version():
|
||||
|
||||
|
||||
def check_git_for_shallow_clone():
|
||||
from .config.common_site_config import get_config
|
||||
from bench.config.common_site_config import get_config
|
||||
config = get_config('.')
|
||||
|
||||
if config:
|
||||
@ -521,7 +535,7 @@ def get_cmd_output(cmd, cwd='.', _raise=True):
|
||||
|
||||
|
||||
def restart_supervisor_processes(bench_path='.', web_workers=False):
|
||||
from .config.common_site_config import get_config
|
||||
from bench.config.common_site_config import get_config
|
||||
conf = get_config(bench_path=bench_path)
|
||||
bench_name = get_bench_name(bench_path)
|
||||
|
||||
@ -533,39 +547,38 @@ def restart_supervisor_processes(bench_path='.', web_workers=False):
|
||||
supervisor_status = get_cmd_output('supervisorctl status', cwd=bench_path)
|
||||
supervisor_status = safe_decode(supervisor_status)
|
||||
|
||||
if web_workers and '{bench_name}-web:'.format(bench_name=bench_name) in supervisor_status:
|
||||
group = '{bench_name}-web: '.format(bench_name=bench_name)
|
||||
if web_workers and f'{bench_name}-web:' in supervisor_status:
|
||||
group = f'{bench_name}-web:\t'
|
||||
|
||||
elif '{bench_name}-workers:'.format(bench_name=bench_name) in supervisor_status:
|
||||
group = '{bench_name}-workers: {bench_name}-web:'.format(bench_name=bench_name)
|
||||
elif f'{bench_name}-workers:' in supervisor_status:
|
||||
group = f'{bench_name}-workers: {bench_name}-web:'
|
||||
|
||||
# backward compatibility
|
||||
elif '{bench_name}-processes:'.format(bench_name=bench_name) in supervisor_status:
|
||||
group = '{bench_name}-processes:'.format(bench_name=bench_name)
|
||||
elif f'{bench_name}-processes:' in supervisor_status:
|
||||
group = f'{bench_name}-processes:'
|
||||
|
||||
# backward compatibility
|
||||
else:
|
||||
group = 'frappe:'
|
||||
|
||||
exec_cmd('supervisorctl restart {group}'.format(group=group), cwd=bench_path)
|
||||
exec_cmd(f'supervisorctl restart {group}', cwd=bench_path)
|
||||
|
||||
|
||||
def restart_systemd_processes(bench_path='.', web_workers=False):
|
||||
bench_name = get_bench_name(bench_path)
|
||||
exec_cmd('sudo systemctl stop -- $(systemctl show -p Requires {bench_name}.target | cut -d= -f2)'.format(bench_name=bench_name))
|
||||
exec_cmd('sudo systemctl start -- $(systemctl show -p Requires {bench_name}.target | cut -d= -f2)'.format(bench_name=bench_name))
|
||||
exec_cmd(f'sudo systemctl stop -- $(systemctl show -p Requires {bench_name}.target | cut -d= -f2)')
|
||||
exec_cmd(f'sudo systemctl start -- $(systemctl show -p Requires {bench_name}.target | cut -d= -f2)')
|
||||
|
||||
|
||||
def set_default_site(site, bench_path='.'):
|
||||
if site not in get_sites(bench_path=bench_path):
|
||||
raise Exception("Site not in bench")
|
||||
exec_cmd("{frappe} --use {site}".format(frappe=get_frappe(bench_path=bench_path), site=site),
|
||||
cwd=os.path.join(bench_path, 'sites'))
|
||||
exec_cmd(f"{get_frappe(bench_path)} --use {site}", cwd=os.path.join(bench_path, 'sites'))
|
||||
|
||||
|
||||
def update_env_pip(bench_path):
|
||||
env_pip = os.path.join(bench_path, 'env', 'bin', 'pip')
|
||||
exec_cmd("{pip} install -q -U pip".format(pip=env_pip))
|
||||
env_py = os.path.join(bench_path, 'env', 'bin', 'python')
|
||||
exec_cmd(f"{env_py} -m pip install -q -U pip")
|
||||
|
||||
|
||||
def update_requirements(bench_path='.'):
|
||||
@ -580,14 +593,14 @@ def update_requirements(bench_path='.'):
|
||||
|
||||
def update_python_packages(bench_path='.'):
|
||||
from bench.app import get_apps
|
||||
pip_path = os.path.join(bench_path, "env", "bin", "pip")
|
||||
env_py = os.path.join(bench_path, "env", "bin", "python")
|
||||
print('Updating Python libraries...')
|
||||
|
||||
update_env_pip(bench_path)
|
||||
for app in get_apps():
|
||||
print('\n{0}Installing python dependencies for {1}{2}'.format(color.yellow, app, color.nc))
|
||||
print(f'\n{color.yellow}Installing python dependencies for {app}{color.nc}')
|
||||
app_path = os.path.join(bench_path, "apps", app)
|
||||
exec_cmd("{0} install -q -U -e {1}".format(pip_path, app_path), cwd=bench_path)
|
||||
exec_cmd(f"{env_py} -m pip install -q -U -e {app_path}", cwd=bench_path)
|
||||
|
||||
|
||||
def update_node_packages(bench_path='.'):
|
||||
@ -607,7 +620,7 @@ def update_node_packages(bench_path='.'):
|
||||
def update_yarn_packages(bench_path='.'):
|
||||
apps_dir = os.path.join(bench_path, 'apps')
|
||||
|
||||
if not find_executable('yarn'):
|
||||
if not which('yarn'):
|
||||
print("Please install yarn using below command and try again.")
|
||||
print("`npm install -g yarn`")
|
||||
return
|
||||
@ -615,7 +628,7 @@ def update_yarn_packages(bench_path='.'):
|
||||
for app in os.listdir(apps_dir):
|
||||
app_path = os.path.join(apps_dir, app)
|
||||
if os.path.exists(os.path.join(app_path, 'package.json')):
|
||||
print('\n{0}Installing node dependencies for {1}{2}'.format(color.yellow, app, color.nc))
|
||||
print(f'\n{color.yellow}Installing node dependencies for {app}{color.nc}')
|
||||
exec_cmd('yarn install', cwd=app_path)
|
||||
|
||||
|
||||
@ -630,7 +643,7 @@ def update_npm_packages(bench_path='.'):
|
||||
with open(package_json_path, "r") as f:
|
||||
app_package_json = json.loads(f.read())
|
||||
# package.json is usually a dict in a dict
|
||||
for key, value in iteritems(app_package_json):
|
||||
for key, value in app_package_json.items():
|
||||
if not key in package_json:
|
||||
package_json[key] = value
|
||||
else:
|
||||
@ -651,14 +664,12 @@ def update_npm_packages(bench_path='.'):
|
||||
exec_cmd('npm install', cwd=bench_path)
|
||||
|
||||
|
||||
def backup_site(site, bench_path='.'):
|
||||
bench.set_frappe_version(bench_path=bench_path)
|
||||
def migrate_site(site, bench_path='.'):
|
||||
run_frappe_cmd('--site', site, 'migrate', bench_path=bench_path)
|
||||
|
||||
if bench.FRAPPE_VERSION == 4:
|
||||
exec_cmd("{frappe} --backup {site}".format(frappe=get_frappe(bench_path=bench_path), site=site),
|
||||
cwd=os.path.join(bench_path, 'sites'))
|
||||
else:
|
||||
run_frappe_cmd('--site', site, 'backup', bench_path=bench_path)
|
||||
|
||||
def backup_site(site, bench_path='.'):
|
||||
run_frappe_cmd('--site', site, 'backup', bench_path=bench_path)
|
||||
|
||||
|
||||
def backup_all_sites(bench_path='.'):
|
||||
@ -667,9 +678,7 @@ def backup_all_sites(bench_path='.'):
|
||||
|
||||
|
||||
def is_root():
|
||||
if os.getuid() == 0:
|
||||
return True
|
||||
return False
|
||||
return os.getuid() == 0
|
||||
|
||||
|
||||
def set_mariadb_host(host, bench_path='.'):
|
||||
@ -677,15 +686,15 @@ def set_mariadb_host(host, bench_path='.'):
|
||||
|
||||
|
||||
def set_redis_cache_host(host, bench_path='.'):
|
||||
update_common_site_config({'redis_cache': "redis://{}".format(host)}, bench_path=bench_path)
|
||||
update_common_site_config({'redis_cache': f"redis://{host}"}, bench_path=bench_path)
|
||||
|
||||
|
||||
def set_redis_queue_host(host, bench_path='.'):
|
||||
update_common_site_config({'redis_queue': "redis://{}".format(host)}, bench_path=bench_path)
|
||||
update_common_site_config({'redis_queue': f"redis://{host}"}, bench_path=bench_path)
|
||||
|
||||
|
||||
def set_redis_socketio_host(host, bench_path='.'):
|
||||
update_common_site_config({'redis_socketio': "redis://{}".format(host)}, bench_path=bench_path)
|
||||
update_common_site_config({'redis_socketio': f"redis://{host}"}, bench_path=bench_path)
|
||||
|
||||
|
||||
def update_common_site_config(ddict, bench_path='.'):
|
||||
@ -727,7 +736,8 @@ def drop_privileges(uid_name='nobody', gid_name='nogroup'):
|
||||
|
||||
|
||||
def fix_prod_setup_perms(bench_path='.', frappe_user=None):
|
||||
from .config.common_site_config import get_config
|
||||
from glob import glob
|
||||
from bench.config.common_site_config import get_config
|
||||
|
||||
if not frappe_user:
|
||||
frappe_user = get_config(bench_path).get('frappe_user')
|
||||
@ -738,19 +748,14 @@ def fix_prod_setup_perms(bench_path='.', frappe_user=None):
|
||||
|
||||
globs = ["logs/*", "config/*"]
|
||||
for glob_name in globs:
|
||||
for path in glob.glob(glob_name):
|
||||
for path in glob(glob_name):
|
||||
uid = pwd.getpwnam(frappe_user).pw_uid
|
||||
gid = grp.getgrnam(frappe_user).gr_gid
|
||||
os.chown(path, uid, gid)
|
||||
|
||||
|
||||
def get_current_frappe_version(bench_path='.'):
|
||||
from .app import get_current_frappe_version as fv
|
||||
return fv(bench_path=bench_path)
|
||||
|
||||
|
||||
def run_frappe_cmd(*args, **kwargs):
|
||||
from .cli import from_command_line
|
||||
from bench.cli import from_command_line
|
||||
|
||||
bench_path = kwargs.get('bench_path', '.')
|
||||
f = get_env_cmd('python', bench_path=bench_path)
|
||||
@ -776,17 +781,17 @@ def run_frappe_cmd(*args, **kwargs):
|
||||
|
||||
def validate_upgrade(from_ver, to_ver, bench_path='.'):
|
||||
if to_ver >= 6:
|
||||
if not find_executable('npm') and not (find_executable('node') or find_executable('nodejs')):
|
||||
if not which('npm') and not (which('node') or which('nodejs')):
|
||||
raise Exception("Please install nodejs and npm")
|
||||
|
||||
|
||||
def post_upgrade(from_ver, to_ver, bench_path='.'):
|
||||
from .config.common_site_config import get_config
|
||||
from .config import redis
|
||||
from .config.supervisor import generate_supervisor_config
|
||||
from .config.nginx import make_nginx_conf
|
||||
from bench.config.common_site_config import get_config
|
||||
from bench.config import redis
|
||||
from bench.config.supervisor import generate_supervisor_config
|
||||
from bench.config.nginx import make_nginx_conf
|
||||
conf = get_config(bench_path=bench_path)
|
||||
print("-" * 80 + "Your bench was upgraded to version {0}".format(to_ver))
|
||||
print("-" * 80 + f"Your bench was upgraded to version {to_ver}")
|
||||
|
||||
if conf.get('restart_supervisor_on_update'):
|
||||
redis.generate_config(bench_path=bench_path)
|
||||
@ -808,6 +813,8 @@ sudo supervisorctl reload
|
||||
|
||||
|
||||
def update_translations_p(args):
|
||||
import requests
|
||||
|
||||
try:
|
||||
update_translations(*args)
|
||||
except requests.exceptions.HTTPError:
|
||||
@ -815,6 +822,8 @@ def update_translations_p(args):
|
||||
|
||||
|
||||
def download_translations_p():
|
||||
import multiprocessing
|
||||
|
||||
pool = multiprocessing.Pool(multiprocessing.cpu_count())
|
||||
|
||||
langs = get_langs()
|
||||
@ -839,9 +848,11 @@ def get_langs():
|
||||
|
||||
|
||||
def update_translations(app, lang):
|
||||
import requests
|
||||
|
||||
translations_dir = os.path.join('apps', app, app, 'translations')
|
||||
csv_file = os.path.join(translations_dir, lang + '.csv')
|
||||
url = "https://translate.erpnext.com/files/{}-{}.csv".format(app, lang)
|
||||
url = f"https://translate.erpnext.com/files/{app}-{lang}.csv"
|
||||
r = requests.get(url, stream=True)
|
||||
r.raise_for_status()
|
||||
|
||||
@ -856,8 +867,10 @@ def update_translations(app, lang):
|
||||
|
||||
|
||||
def print_output(p):
|
||||
from select import select
|
||||
|
||||
while p.poll() is None:
|
||||
readx = select.select([p.stdout.fileno(), p.stderr.fileno()], [], [])[0]
|
||||
readx = select([p.stdout.fileno(), p.stderr.fileno()], [], [])[0]
|
||||
send_buffer = []
|
||||
for fd in readx:
|
||||
if fd == p.stdout.fileno():
|
||||
@ -888,6 +901,8 @@ def get_bench_name(bench_path):
|
||||
|
||||
|
||||
def setup_fonts():
|
||||
import shutil
|
||||
|
||||
fonts_path = os.path.join('/tmp', 'fonts')
|
||||
|
||||
if os.path.exists('/etc/fonts_backup'):
|
||||
@ -907,16 +922,16 @@ def set_git_remote_url(git_url, bench_path='.'):
|
||||
app = git_url.rsplit('/', 1)[1].rsplit('.', 1)[0]
|
||||
|
||||
if app not in bench.app.get_apps(bench_path):
|
||||
print("No app named {0}".format(app))
|
||||
print(f"No app named {app}")
|
||||
sys.exit(1)
|
||||
|
||||
app_dir = bench.app.get_repo_dir(app, bench_path=bench_path)
|
||||
if os.path.exists(os.path.join(app_dir, '.git')):
|
||||
exec_cmd("git remote set-url upstream {}".format(git_url), cwd=app_dir)
|
||||
exec_cmd(f"git remote set-url upstream {git_url}", cwd=app_dir)
|
||||
|
||||
|
||||
def run_playbook(playbook_name, extra_vars=None, tag=None):
|
||||
if not find_executable('ansible'):
|
||||
if not which('ansible'):
|
||||
print("Ansible is needed to run this command, please install it using 'pip install ansible'")
|
||||
sys.exit(1)
|
||||
args = ['ansible-playbook', '-c', 'local', playbook_name, '-vvvv']
|
||||
@ -943,7 +958,7 @@ def find_benches(directory=None):
|
||||
if os.path.curdir == directory:
|
||||
print("You are in a bench directory!")
|
||||
else:
|
||||
print("{0} is a bench directory!".format(directory))
|
||||
print(f"{directory} is a bench directory!")
|
||||
return
|
||||
|
||||
benches = []
|
||||
@ -951,7 +966,7 @@ def find_benches(directory=None):
|
||||
sub = os.path.join(directory, sub)
|
||||
if os.path.isdir(sub) and not os.path.islink(sub):
|
||||
if is_bench_directory(sub):
|
||||
print("{} found!".format(sub))
|
||||
print(f"{sub} found!")
|
||||
benches.append(sub)
|
||||
else:
|
||||
benches.extend(find_benches(sub))
|
||||
@ -960,6 +975,8 @@ def find_benches(directory=None):
|
||||
|
||||
|
||||
def migrate_env(python, backup=False):
|
||||
import shutil
|
||||
from urllib.parse import urlparse
|
||||
from bench.config.common_site_config import get_config
|
||||
from bench.app import get_apps
|
||||
|
||||
@ -968,24 +985,24 @@ def migrate_env(python, backup=False):
|
||||
python = which(python)
|
||||
virtualenv = which('virtualenv')
|
||||
pvenv = os.path.join(path, nvenv)
|
||||
pip = os.path.join(pvenv, 'bin', 'pip')
|
||||
|
||||
# Clear Cache before Bench Dies.
|
||||
try:
|
||||
config = get_config(bench_path=os.getcwd())
|
||||
rredis = urlparse(config['redis_cache'])
|
||||
|
||||
redis = '{redis} -p {port}'.format(redis=which('redis-cli'), port=rredis.port)
|
||||
redis = f"{which('redis-cli')} -p {rredis.port}"
|
||||
|
||||
logger.log('Clearing Redis Cache...')
|
||||
exec_cmd('{redis} FLUSHALL'.format(redis = redis))
|
||||
exec_cmd(f'{redis} FLUSHALL')
|
||||
logger.log('Clearing Redis DataBase...')
|
||||
exec_cmd('{redis} FLUSHDB'.format(redis = redis))
|
||||
exec_cmd(f'{redis} FLUSHDB')
|
||||
except:
|
||||
logger.warning('Please ensure Redis Connections are running or Daemonized.')
|
||||
|
||||
# Backup venv: restore using `virtualenv --relocatable` if needed
|
||||
if backup:
|
||||
from datetime import datetime
|
||||
|
||||
parch = os.path.join(path, 'archived_envs')
|
||||
if not os.path.exists(parch):
|
||||
os.mkdir(parch)
|
||||
@ -1003,13 +1020,13 @@ def migrate_env(python, backup=False):
|
||||
# Create virtualenv using specified python
|
||||
venv_creation, packages_setup = 1, 1
|
||||
try:
|
||||
logger.log('Setting up a New Virtual {} Environment'.format(python))
|
||||
venv_creation = exec_cmd('{virtualenv} --python {python} {pvenv}'.format(virtualenv=virtualenv, python=python, pvenv=pvenv))
|
||||
logger.log(f'Setting up a New Virtual {python} Environment')
|
||||
venv_creation = exec_cmd(f'{virtualenv} --python {python} {pvenv}')
|
||||
|
||||
apps = ' '.join(["-e {}".format(os.path.join("apps", app)) for app in get_apps()])
|
||||
packages_setup = exec_cmd('{0} install -q -U {1}'.format(pip, apps))
|
||||
apps = ' '.join([f"-e {os.path.join('apps', app)}" for app in get_apps()])
|
||||
packages_setup = exec_cmd(f'{pvenv} -m pip install -q -U {apps}')
|
||||
|
||||
logger.log('Migration Successful to {}'.format(python))
|
||||
logger.log(f'Migration Successful to {python}')
|
||||
except:
|
||||
if venv_creation or packages_setup:
|
||||
logger.warning('Migration Error')
|
||||
@ -1050,7 +1067,7 @@ def generate_command_cache(bench_path='.'):
|
||||
os.remove(bench_cache_file)
|
||||
|
||||
try:
|
||||
output = get_cmd_output("{0} -m frappe.utils.bench_helper get-frappe-commands".format(python), cwd=sites_path)
|
||||
output = get_cmd_output(f"{python} -m frappe.utils.bench_helper get-frappe-commands", cwd=sites_path)
|
||||
with open(bench_cache_file, 'w') as f:
|
||||
json.dump(eval(output), f)
|
||||
return json.loads(output)
|
||||
|
@ -72,7 +72,7 @@ These commands belong directly to the bench group so they can be invoked directl
|
||||
|
||||
- **init**: Initialize a new bench instance in the specified path. This sets up a complete bench folder with an `apps` folder which contains all the Frappe apps available in the current bench, `sites` folder that stores all site data seperated by individual site folders, `config` folder that contains your redis, NGINX and supervisor configuration files. The `env` folder consists of all python dependencies the current bench and installed Frappe applications have.
|
||||
- **restart**: Restart web, supervisor, systemd processes units. Used in production setup.
|
||||
- **update**: Updates bench tool and if executed in a bench directory, without any flags will backup, pull, setup requirements, build, run patches and restart bench. Using specific flags will only do certain tasks instead of all.
|
||||
- **update**: If executed in a bench directory, without any flags will backup, pull, setup requirements, build, run patches and restart bench. Using specific flags will only do certain tasks instead of all.
|
||||
- **migrate-env**: Migrate Virtual Environment to desired Python version. This regenerates the `env` folder with the specified Python version.
|
||||
- **retry-upgrade**: Retry a failed upgrade
|
||||
- **disable-production**: Disables production environment for the bench.
|
||||
@ -101,7 +101,7 @@ These commands belong directly to the bench group so they can be invoked directl
|
||||
- **set-redis-cache-host**: Set Redis cache host for bench
|
||||
- **set-redis-queue-host**: Set Redis queue host for bench
|
||||
- **set-redis-socketio-host**: Set Redis socketio host for bench
|
||||
- **set-default-site**: Set default site for bench
|
||||
- **use**: Set default site for bench
|
||||
- **download-translations**: Download latest translations
|
||||
|
||||
|
||||
|
@ -2,9 +2,14 @@
|
||||
|
||||
* Updating
|
||||
|
||||
Currently, `bench update` can be run from any directory however the context of the command changes. If run from a bench directory, the vanilla command itself updates all apps, runs migrations and backs up all sites.
|
||||
To update the bench CLI tool, depending on your method of installation, you may use
|
||||
|
||||
bench update
|
||||
pip3 install -U frappe-bench
|
||||
|
||||
|
||||
To backup, update all apps and sites on your bench, you may use
|
||||
|
||||
bench update
|
||||
|
||||
|
||||
To manually update the bench, run `bench update` to update all the apps, run
|
||||
|
@ -34,6 +34,8 @@ If you are on a fresh server and logged in as root, at first create a dedicated
|
||||
|
||||
*(it is very common to use "frappe" as frappe-username, but this comes with the security flaw of ["frappe" ranking very high](https://www.reddit.com/r/dataisbeautiful/comments/b3sirt/i_deployed_over_a_dozen_cyber_honeypots_all_over/?st=JTJ0SC0Q&sh=76e05240) in as a username challenged in hacking attempts. So, for production sites it is highly recommended to use a custom username harder to guess)*
|
||||
|
||||
*(you can specify the flag --home to specify a directory for your [frappe-user]. Bench will follow the home directory specified by the user's home directory e.g. /data/[frappe-user]/frappe-bench)*
|
||||
|
||||
Switch to `[frappe-user]` (using `su [frappe-user]`) and start the setup
|
||||
|
||||
wget https://raw.githubusercontent.com/frappe/bench/develop/install.py
|
||||
@ -71,7 +73,7 @@ use --python flag to specify virtual environments python version, by default scr
|
||||
|
||||
## How do I start ERPNext
|
||||
|
||||
1. For development: Go to your bench folder (`frappe-bench` by default) and start the bench with `bench start`
|
||||
1. For development: Go to your bench folder (`~[frappe-user]/frappe-bench` by default) and start the bench with `bench start`
|
||||
2. For production: Your process will be setup and managed by `nginx` and `supervisor`. Checkout [Setup Production](https://frappe.io/docs/user/en/bench/guides/setup-production.html) for more information.
|
||||
|
||||
---
|
||||
|
56
install.py
56
install.py
@ -1,5 +1,7 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
@ -157,13 +159,24 @@ def install_prerequisites():
|
||||
]
|
||||
})
|
||||
|
||||
# until psycopg2-binary is available for aarch64 (Arm 64-bit), we'll need libpq and libssl dev packages to build psycopg2 from source
|
||||
if platform.machine() == 'aarch64':
|
||||
log("Installing libpq and libssl dev packages to build psycopg2 for aarch64...")
|
||||
run_os_command({
|
||||
'apt-get': ['sudo apt-get install -y libpq-dev libssl-dev'],
|
||||
'yum': ['sudo yum install -y libpq-devel openssl-devel']
|
||||
})
|
||||
|
||||
install_package('curl')
|
||||
install_package('wget')
|
||||
install_package('git')
|
||||
install_package('pip3', 'python3-pip')
|
||||
|
||||
run_os_command({
|
||||
'python3': "sudo -H python3 -m pip install --upgrade pip setuptools-rust"
|
||||
})
|
||||
success = run_os_command({
|
||||
'python3': "sudo -H python3 -m pip install --upgrade setuptools cryptography ansible==2.8.5 pip"
|
||||
'python3': "sudo -H python3 -m pip install --upgrade setuptools wheel cryptography ansible~=2.8.15"
|
||||
})
|
||||
|
||||
if not (success or shutil.which('ansible')):
|
||||
@ -224,9 +237,10 @@ def install_bench(args):
|
||||
extra_vars = vars(args)
|
||||
extra_vars.update(frappe_user=args.user)
|
||||
|
||||
extra_vars.update(user_directory=get_user_home_directory(args.user))
|
||||
|
||||
if os.path.exists(tmp_bench_repo):
|
||||
repo_path = tmp_bench_repo
|
||||
|
||||
else:
|
||||
repo_path = os.path.join(os.path.expanduser('~'), 'bench')
|
||||
|
||||
@ -237,8 +251,8 @@ def install_bench(args):
|
||||
if args.production:
|
||||
extra_vars.update(max_worker_connections=multiprocessing.cpu_count() * 1024)
|
||||
|
||||
frappe_branch = 'version-12'
|
||||
erpnext_branch = 'version-12'
|
||||
frappe_branch = 'version-13'
|
||||
erpnext_branch = 'version-13'
|
||||
|
||||
if args.version:
|
||||
if args.version <= 10:
|
||||
@ -247,12 +261,11 @@ def install_bench(args):
|
||||
else:
|
||||
frappe_branch = "version-{0}".format(args.version)
|
||||
erpnext_branch = "version-{0}".format(args.version)
|
||||
else:
|
||||
if args.frappe_branch:
|
||||
frappe_branch = args.frappe_branch
|
||||
|
||||
if args.erpnext_branch:
|
||||
erpnext_branch = args.erpnext_branch
|
||||
# Allow override of frappe_branch and erpnext_branch, regardless of args.version (which always has a default set)
|
||||
if args.frappe_branch:
|
||||
frappe_branch = args.frappe_branch
|
||||
if args.erpnext_branch:
|
||||
erpnext_branch = args.erpnext_branch
|
||||
|
||||
extra_vars.update(frappe_branch=frappe_branch)
|
||||
extra_vars.update(erpnext_branch=erpnext_branch)
|
||||
@ -261,6 +274,10 @@ def install_bench(args):
|
||||
extra_vars.update(bench_name=bench_name)
|
||||
|
||||
# Will install ERPNext production setup by default
|
||||
if args.without_erpnext:
|
||||
log("Initializing bench {bench_name}:\n\tFrappe Branch: {frappe_branch}\n\tERPNext will not be installed due to --without-erpnext".format(bench_name=bench_name, frappe_branch=frappe_branch))
|
||||
else:
|
||||
log("Initializing bench {bench_name}:\n\tFrappe Branch: {frappe_branch}\n\tERPNext Branch: {erpnext_branch}".format(bench_name=bench_name, frappe_branch=frappe_branch, erpnext_branch=erpnext_branch))
|
||||
run_playbook('site.yml', sudo=True, extra_vars=extra_vars)
|
||||
|
||||
if os.path.exists(tmp_bench_repo):
|
||||
@ -273,11 +290,15 @@ def clone_bench_repo(args):
|
||||
repo_url = args.repo_url or 'https://github.com/frappe/bench'
|
||||
|
||||
if os.path.exists(tmp_bench_repo):
|
||||
log('Not cloning already existing Bench repository at {tmp_bench_repo}'.format(tmp_bench_repo=tmp_bench_repo))
|
||||
return 0
|
||||
elif args.without_bench_setup:
|
||||
clone_path = os.path.join(os.path.expanduser('~'), 'bench')
|
||||
log('--without-bench-setup specified, clone path is: {clone_path}'.format(clone_path=clone_path))
|
||||
else:
|
||||
clone_path = tmp_bench_repo
|
||||
# Not logging repo_url to avoid accidental credential leak in case credential is embedded in URL
|
||||
log('Cloning bench repository branch {branch} into {clone_path}'.format(branch=branch, clone_path=clone_path))
|
||||
|
||||
success = run_os_command(
|
||||
{'git': 'git clone --quiet {repo_url} {bench_repo} --depth 1 --branch {branch}'.format(
|
||||
@ -327,8 +348,8 @@ def get_passwords(args):
|
||||
mysql_root_password = ''
|
||||
continue
|
||||
|
||||
# admin password
|
||||
if not admin_password:
|
||||
# admin password, only needed if we're also creating a site
|
||||
if not admin_password and not args.without_site:
|
||||
admin_password = getpass.unix_getpass(prompt='Please enter the default Administrator user password: ')
|
||||
conf_admin_passswd = getpass.unix_getpass(prompt='Re-enter Administrator password: ')
|
||||
|
||||
@ -336,6 +357,8 @@ def get_passwords(args):
|
||||
passwords_didnt_match("Administrator")
|
||||
admin_password = ''
|
||||
continue
|
||||
elif args.without_site:
|
||||
log("Not creating a new site due to --without-site")
|
||||
|
||||
pass_set = False
|
||||
else:
|
||||
@ -366,6 +389,11 @@ def get_extra_vars_json(extra_args):
|
||||
|
||||
return ('@' + json_path)
|
||||
|
||||
def get_user_home_directory(user):
|
||||
# Return home directory /home/USERNAME or anything else defined as home directory in
|
||||
# passwd for user.
|
||||
return os.path.expanduser('~'+user)
|
||||
|
||||
|
||||
def run_playbook(playbook_name, sudo=False, extra_vars=None):
|
||||
args = ['ansible-playbook', '-c', 'local', playbook_name , '-vvvv']
|
||||
@ -405,8 +433,8 @@ def parse_commandline_args():
|
||||
|
||||
args_group.add_argument('--develop', dest='develop', action='store_true', default=False, help='Install developer setup')
|
||||
args_group.add_argument('--production', dest='production', action='store_true', default=False, help='Setup Production environment for bench')
|
||||
parser.add_argument('--site', dest='site', action='store', default='site1.local', help='Specifiy name for your first ERPNext site')
|
||||
parser.add_argument('--without-site', dest='without_site', action='store_true', default=False)
|
||||
parser.add_argument('--site', dest='site', action='store', default='site1.local', help='Specify name for your first ERPNext site')
|
||||
parser.add_argument('--without-site', dest='without_site', action='store_true', default=False, help='Do not create a new site')
|
||||
parser.add_argument('--verbose', dest='verbose', action='store_true', default=False, help='Run the script in verbose mode')
|
||||
parser.add_argument('--user', dest='user', help='Install frappe-bench for this user')
|
||||
parser.add_argument('--bench-branch', dest='bench_branch', help='Clone a particular branch of bench repository')
|
||||
|
@ -1,24 +0,0 @@
|
||||
{
|
||||
"jasper_erpnext_report": {
|
||||
"app_url": "http://localhost",
|
||||
"app_name": "jasper_erpnext_report",
|
||||
"app_icon": "icon-file-text",
|
||||
"app_color": "black",
|
||||
"app_description": "Make your own reports in jasper and print them in pdf, docx, xlsx and other formats.",
|
||||
"app_publisher": "Luis Fernandes",
|
||||
"repo_url": "https://github.com/saguas/jasper_erpnext_report.git",
|
||||
"app_title": "Jasper Erpnext Report",
|
||||
"app_version": "0.1.0"
|
||||
},
|
||||
"base_vat": {
|
||||
"app_url": "http://localhost",
|
||||
"app_name": "base_vat",
|
||||
"app_icon": "icon-credit-card",
|
||||
"app_color": "#C0C0C0",
|
||||
"app_description": "Check the VAT number depending of the country.",
|
||||
"app_publisher": "Luis Fernandes",
|
||||
"repo_url": "https://github.com/saguas/frappe_base_vat.git",
|
||||
"app_title": "Base VAT",
|
||||
"app_version": "0.0.1"
|
||||
}
|
||||
}
|
@ -1,7 +0,0 @@
|
||||
[
|
||||
{
|
||||
"url":"https://github.com/frappe/erpnext",
|
||||
"name":"erpnext",
|
||||
"branch": "master"
|
||||
}
|
||||
]
|
@ -1,6 +0,0 @@
|
||||
[
|
||||
{
|
||||
"url":"https://github.com/frappe/erpnext",
|
||||
"name":"erpnext"
|
||||
}
|
||||
]
|
@ -1,10 +1,9 @@
|
||||
Click==7.0
|
||||
GitPython==2.1.15
|
||||
honcho==1.0.1
|
||||
Jinja2==2.10.3
|
||||
Jinja2==2.11.3
|
||||
python-crontab==2.4.0
|
||||
requests==2.22.0
|
||||
semantic-version==2.8.2
|
||||
setuptools
|
||||
six
|
||||
virtualenv
|
||||
|
1
setup.py
1
setup.py
@ -11,6 +11,7 @@ setup(
|
||||
author_email='info@frappe.io',
|
||||
version=VERSION,
|
||||
packages=find_packages(),
|
||||
python_requires='~=3.6',
|
||||
zip_safe=False,
|
||||
include_package_data=True,
|
||||
install_requires=install_requires,
|
||||
|
@ -6,7 +6,7 @@ message="
|
||||
Please access ERPNext by going to http://localhost:8080 on the host system.
|
||||
The username is \"Administrator\" and password is \"admin\"
|
||||
|
||||
Do consider donating at https://frappe.io/buy
|
||||
Consider buying professional support from us at https://erpnext.com/support
|
||||
|
||||
To update, login as
|
||||
username: frappe
|
||||
|
Loading…
x
Reference in New Issue
Block a user