mirror of
https://github.com/frappe/bench.git
synced 2025-02-10 14:48:35 +00:00
[tests] test_init - run using - python -m unittest bench.tests.test_init
This commit is contained in:
parent
14f42180cb
commit
68dc8cbfd4
@ -88,31 +88,29 @@ def generate_nginx_config(bench='.'):
|
||||
})
|
||||
write_config_file(bench, 'nginx.conf', config)
|
||||
|
||||
def generate_redis_celery_broker_config(bench='.', pid_db_save_path):
|
||||
def generate_redis_celery_broker_config(bench='.'):
|
||||
"""Redis that is used for queueing celery tasks"""
|
||||
_generate_redis_config(
|
||||
template_name='redis_celery_broker.conf',
|
||||
context={
|
||||
"port": get_config().get('redis_async_broker_port', '12311'),
|
||||
"bench_path": os.path.abspath(bench),
|
||||
"pid_db_save_path": pid_db_save_path
|
||||
},
|
||||
bench=bench
|
||||
)
|
||||
|
||||
def generate_redis_async_broker_config(bench='.', pid_db_save_path):
|
||||
def generate_redis_async_broker_config(bench='.'):
|
||||
"""Redis that is used to do pub/sub"""
|
||||
_generate_redis_config(
|
||||
template_name='redis_async_broker.conf',
|
||||
context={
|
||||
"port": get_config().get('redis_async_broker_port', '12311'),
|
||||
"bench_path": os.path.abspath(bench),
|
||||
"pid_db_save_path": pid_db_save_path
|
||||
"bench_path": os.path.abspath(bench)
|
||||
},
|
||||
bench=bench
|
||||
)
|
||||
|
||||
def generate_redis_cache_config(bench='.', pid_db_save_path):
|
||||
def generate_redis_cache_config(bench='.'):
|
||||
"""Redis that is used and optimized for caching"""
|
||||
_generate_redis_config(
|
||||
template_name='redis_cache.conf',
|
||||
@ -121,12 +119,15 @@ def generate_redis_cache_config(bench='.', pid_db_save_path):
|
||||
"port": get_config().get('redis_cache_port', '11311'),
|
||||
"redis_version": get_redis_version(),
|
||||
"bench_path": os.path.abspath(bench),
|
||||
"pid_db_save_path": pid_db_save_path
|
||||
},
|
||||
bench=bench
|
||||
)
|
||||
|
||||
def _generate_redis_config(template_name, context, bench):
|
||||
template = env.get_template(template_name)
|
||||
|
||||
if "process_files_path" not in context:
|
||||
context["process_files_path"] = os.path.abspath(os.path.join(bench, "config", "files"))
|
||||
|
||||
redis_config = template.render(**context)
|
||||
write_config_file(bench, template_name, redis_config)
|
||||
|
@ -1,5 +1,5 @@
|
||||
dbfilename redis_async_broker.rdb
|
||||
dir {{pid_db_save_path}}
|
||||
pidfile {{pid_db_save_path}}/redis_async_broker.pid
|
||||
dir {{process_files_path}}
|
||||
pidfile {{process_files_path}}/redis_async_broker.pid
|
||||
port {{port}}
|
||||
bench_path {{bench_path}}
|
||||
|
@ -1,6 +1,6 @@
|
||||
dbfilename redis_cache_dump.rdb
|
||||
dir {{pid_db_save_path}}
|
||||
pidfile {{pid_db_save_path}}/redis_cache.pid
|
||||
dir {{process_files_path}}
|
||||
pidfile {{process_files_path}}/redis_cache.pid
|
||||
port {{port}}
|
||||
maxmemory {{maxmemory}}mb
|
||||
maxmemory-policy allkeys-lru
|
||||
|
@ -1,5 +1,5 @@
|
||||
dbfilename redis_async_broker.rdb
|
||||
dir {{pid_db_save_path}}
|
||||
pidfile {{pid_db_save_path}}/redis_async_broker.pid
|
||||
dbfilename redis_celery_broker.rdb
|
||||
dir {{process_files_path}}
|
||||
pidfile {{process_files_path}}/redis_celery_broker.pid
|
||||
port {{port}}
|
||||
bench_path {{bench_path}}
|
||||
|
0
bench/tests/__init__.py
Normal file
0
bench/tests/__init__.py
Normal file
89
bench/tests/test_init.py
Normal file
89
bench/tests/test_init.py
Normal file
@ -0,0 +1,89 @@
|
||||
from __future__ import unicode_literals
|
||||
import unittest
|
||||
import bench
|
||||
import bench.utils
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
|
||||
class TestBenchInit(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.benches_path = "."
|
||||
self.benches = []
|
||||
|
||||
def tearDown(self):
|
||||
for bench_name in self.benches:
|
||||
bench_path = os.path.join(self.benches_path, bench_name)
|
||||
if os.path.exists(bench_path):
|
||||
shutil.rmtree(bench_path)
|
||||
|
||||
def test_init(self, bench_name="test-bench"):
|
||||
self.benches.append(bench_name)
|
||||
|
||||
bench.utils.init(bench_name)
|
||||
|
||||
# logging
|
||||
self.assert_exists(bench_name, "logs", "bench.log")
|
||||
|
||||
self.assert_folders(bench_name)
|
||||
|
||||
self.assert_virtual_env(bench_name)
|
||||
|
||||
self.assert_bench_config(bench_name)
|
||||
|
||||
self.assert_config(bench_name)
|
||||
|
||||
self.assert_socketio(bench_name)
|
||||
|
||||
def test_multiple_benches(self):
|
||||
self.test_init("test-bench-1")
|
||||
self.assert_ports("test-bench-1")
|
||||
|
||||
self.test_init("test-bench-2")
|
||||
self.assert_ports("test-bench-2")
|
||||
|
||||
def assert_folders(self, bench_name):
|
||||
for folder in bench.utils.folders_in_bench:
|
||||
self.assert_exists(bench_name, folder)
|
||||
|
||||
self.assert_exists(bench_name, "sites", "assets")
|
||||
self.assert_exists(bench_name, "apps", "frappe")
|
||||
self.assert_exists(bench_name, "apps", "frappe", "setup.py")
|
||||
|
||||
def assert_virtual_env(self, bench_name):
|
||||
self.assert_exists(bench_name, "env", "lib", "python2.7")
|
||||
self.assert_exists(bench_name, "env", "lib", "python2.7", "site-packages")
|
||||
self.assert_exists(bench_name, "env", "lib", "python2.7", "site-packages", "IPython")
|
||||
self.assert_exists(bench_name, "env", "lib", "python2.7", "site-packages", "MySQL_python-1.2.5.dist-info")
|
||||
self.assert_exists(bench_name, "env", "lib", "python2.7", "site-packages", "pip")
|
||||
|
||||
def assert_bench_config(self, bench_name):
|
||||
config_json = os.path.exists(os.path.join(bench_name, "config.json"))
|
||||
self.assertTrue(config_json)
|
||||
with open(config_json, "r") as f:
|
||||
config_dict = json.loads(f.read().decode("utf-8"))
|
||||
for key, value in bench.utils.default_config.items():
|
||||
self.assertEquals(config_dict.get(key), value)
|
||||
|
||||
def assert_config(self, bench_name):
|
||||
for config, search_key in (
|
||||
("redis_celery_broker.conf", "redis_celery_broker.rdb"),
|
||||
("redis_async_broker.conf", "redis_async_broker.rdb"),
|
||||
("redis_cache.conf", "redis_cache_dump.rdb")):
|
||||
|
||||
self.assert_exists(bench_name, "config", config)
|
||||
|
||||
with open(os.path.join(self.bench, "config", config), "r") as f:
|
||||
f = f.read().decode("utf-8")
|
||||
self.assertTrue(search_key in f)
|
||||
|
||||
def assert_socketio(self, bench_name):
|
||||
self.assert_exists(bench_name, "node_modules")
|
||||
self.assert_exists(bench_name, "node_modules", "socket.io")
|
||||
|
||||
def assert_ports(self, bench_name):
|
||||
pass
|
||||
|
||||
def assert_exists(self, *args):
|
||||
self.assert_exists(*args)
|
||||
|
@ -33,6 +33,8 @@ default_config = {
|
||||
'shallow_clone': True
|
||||
}
|
||||
|
||||
folders_in_bench = ('apps', 'sites', 'config', 'logs', 'config/files')
|
||||
|
||||
def get_frappe(bench='.'):
|
||||
frappe = get_env_cmd('frappe', bench=bench)
|
||||
if not os.path.exists(frappe):
|
||||
@ -56,13 +58,9 @@ def init(path, apps_path=None, no_procfile=False, no_backups=False,
|
||||
# sys.exit(1)
|
||||
|
||||
os.mkdir(path)
|
||||
for dirname in ('apps', 'sites', 'config', 'logs'):
|
||||
for dirname in folders_in_bench:
|
||||
os.mkdir(os.path.join(path, dirname))
|
||||
|
||||
# This is folder to save the pid and redis db files for each redis process
|
||||
pid_db_save_path = os.path.join(path, 'config', 'files')
|
||||
os.mkdir(pid_db_save_path)
|
||||
|
||||
setup_logging()
|
||||
|
||||
setup_env(bench=path)
|
||||
@ -82,9 +80,9 @@ def init(path, apps_path=None, no_procfile=False, no_backups=False,
|
||||
setup_socketio(bench=path)
|
||||
|
||||
build_assets(bench=path)
|
||||
generate_redis_celery_broker_config(bench=path, pid_db_save_path)
|
||||
generate_redis_cache_config(bench=path, pid_db_save_path)
|
||||
generate_redis_async_broker_config(bench=path, pid_db_save_path)
|
||||
generate_redis_celery_broker_config(bench=path)
|
||||
generate_redis_cache_config(bench=path)
|
||||
generate_redis_async_broker_config(bench=path)
|
||||
|
||||
if not no_procfile:
|
||||
setup_procfile(bench=path)
|
||||
@ -122,9 +120,9 @@ def make_ports(benches_path="."):
|
||||
# new port value = max of existing port value + 1
|
||||
ports = {}
|
||||
for key, value in default_ports.items():
|
||||
existing_value = max(existing_ports.get(key, []))
|
||||
existing_value = existing_ports.get(key, [])
|
||||
if existing_value:
|
||||
value = existing_value + 1
|
||||
value = max(existing_value) + 1
|
||||
|
||||
ports[key] = value
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user