6
0
mirror of https://github.com/ChristianLight/tutor.git synced 2024-12-12 22:27:47 +00:00

Merge remote-tracking branch 'origin/master' into nightly

This commit is contained in:
Overhang.IO 2022-07-29 18:40:39 +00:00
commit 31fdcc60af
8 changed files with 189 additions and 141 deletions

View File

@ -18,10 +18,14 @@ Every user-facing change should have an entry in this changelog. Please respect
## Unreleased ## Unreleased
## v14.0.4 (2022-07-29)
- [Feature] Add the `-m/--mount` option to `tutor dev quickstart`.
- [Bugfix] Fix `tutor dev start -m /path/to/frontend-app-learning` by introducing dev-specific `COMPOSE_DEV_TMP` and `COMPOSE_DEV_JOBS_TMP` filters (by @regisb).
- [Bugfix] Log the shell commands that Tutor executes more accurately. (by @kdmccormick) - [Bugfix] Log the shell commands that Tutor executes more accurately. (by @kdmccormick)
- [Fix] `tutor dev quickstart` would fail under certain versions of docker-compose due to a bug in the logic that handled volume mounting. (by @kdmccormick) - [Bugfix] `tutor dev quickstart` would fail under certain versions of docker-compose due to a bug in the logic that handled volume mounting. (by @kdmccormick)
- [Bugfix] The `tutor k8s start` command will succeed even when `k8s-override` and `kustomization-patches-strategic-merge` are not specified. (by @edazzocaisser) - [Bugfix] The `tutor k8s start` command will succeed even when `k8s-override` and `kustomization-patches-strategic-merge` are not specified. (by @edazzocaisser)
- [Fix] `kubectl wait` checks deployments instead of pods as it could hang indefinitely if there are extra pods in a broken state. (by @keithgg) - [BugFix] `kubectl wait` checks deployments instead of pods as it could hang indefinitely if there are extra pods in a broken state. (by @keithgg)
## v14.0.3 (2022-07-09) ## v14.0.3 (2022-07-09)

View File

@ -1,10 +1,13 @@
import typing as t import typing as t
import unittest import unittest
from io import StringIO
from unittest.mock import patch
from click.exceptions import ClickException from click.exceptions import ClickException
from tutor import hooks from tutor import hooks
from tutor.commands import compose from tutor.commands import compose
from tutor.commands.local import LocalContext
class ComposeTests(unittest.TestCase): class ComposeTests(unittest.TestCase):
@ -42,25 +45,22 @@ class ComposeTests(unittest.TestCase):
with self.assertRaises(ClickException): with self.assertRaises(ClickException):
param("lms,:/path/to/edx-platform:/openedx/edx-platform") param("lms,:/path/to/edx-platform:/openedx/edx-platform")
def test_compose_local_tmp_generation(self) -> None: @patch("sys.stdout", new_callable=StringIO)
def test_compose_local_tmp_generation(self, _mock_stdout: StringIO) -> None:
""" """
Ensure that docker-compose.tmp.yml is correctly generated, even when Ensure that docker-compose.tmp.yml is correctly generated.
mounts are processed more than once.
""" """
param = compose.MountParam() param = compose.MountParam()
mount_args: t.Tuple[t.List[compose.MountParam.MountType], ...] = ( mount_args = (
# Auto-mounting of edx-platform to lms* and cms* # Auto-mounting of edx-platform to lms* and cms*
param("/path/to/edx-platform"), param.convert_implicit_form("/path/to/edx-platform"),
# Manual mounting of some other folder to mfe and lms # Manual mounting of some other folder to mfe and lms
param("mfe,lms:/path/to/something-else:/openedx/something-else"), param.convert_explicit_form(
"mfe,lms:/path/to/something-else:/openedx/something-else"
),
) )
# Mount volumes
# In some cases, process_mount_arguments ends up being called more compose.mount_tmp_volumes(mount_args, LocalContext(""))
# than once. To ensure that we can handle that situation, we call it
# multiple times here.
compose.process_mount_arguments(mount_args)
compose.process_mount_arguments(mount_args)
compose.process_mount_arguments(mount_args)
compose_file: t.Dict[str, t.Any] = hooks.Filters.COMPOSE_LOCAL_TMP.apply({}) compose_file: t.Dict[str, t.Any] = hooks.Filters.COMPOSE_LOCAL_TMP.apply({})
actual_services: t.Dict[str, t.Any] = compose_file["services"] actual_services: t.Dict[str, t.Any] = compose_file["services"]

View File

@ -2,7 +2,7 @@ import os
# Increment this version number to trigger a new release. See # Increment this version number to trigger a new release. See
# docs/tutor.html#versioning for information on the versioning scheme. # docs/tutor.html#versioning for information on the versioning scheme.
__version__ = "14.0.3" __version__ = "14.0.4"
# The version suffix will be appended to the actual version, separated by a # The version suffix will be appended to the actual version, separated by a
# dash. Use this suffix to differentiate between the actual released version and # dash. Use this suffix to differentiate between the actual released version and

View File

@ -1,6 +1,7 @@
import os import os
import re import re
import typing as t import typing as t
from copy import deepcopy
import click import click
@ -19,8 +20,6 @@ class ComposeJobRunner(jobs.BaseComposeJobRunner):
self.project_name = "" self.project_name = ""
self.docker_compose_files: t.List[str] = [] self.docker_compose_files: t.List[str] = []
self.docker_compose_job_files: t.List[str] = [] self.docker_compose_job_files: t.List[str] = []
self.docker_compose_tmp_path = ""
self.docker_compose_jobs_tmp_path = ""
def docker_compose(self, *command: str) -> int: def docker_compose(self, *command: str) -> int:
""" """
@ -32,7 +31,6 @@ class ComposeJobRunner(jobs.BaseComposeJobRunner):
hooks.Actions.COMPOSE_PROJECT_STARTED.do( hooks.Actions.COMPOSE_PROJECT_STARTED.do(
self.root, self.config, self.project_name self.root, self.config, self.project_name
) )
self.__update_docker_compose_tmp()
args = [] args = []
for docker_compose_path in self.docker_compose_files: for docker_compose_path in self.docker_compose_files:
if os.path.exists(docker_compose_path): if os.path.exists(docker_compose_path):
@ -41,33 +39,45 @@ class ComposeJobRunner(jobs.BaseComposeJobRunner):
*args, "--project-name", self.project_name, *command *args, "--project-name", self.project_name, *command
) )
def __update_docker_compose_tmp(self) -> None: def update_docker_compose_tmp(
self,
compose_tmp_filter: hooks.filters.Filter,
compose_jobs_tmp_filter: hooks.filters.Filter,
docker_compose_tmp_path: str,
docker_compose_jobs_tmp_path: str,
) -> None:
""" """
Update the contents of the docker-compose.tmp.yml file, which is generated at runtime. Update the contents of the docker-compose.tmp.yml and
docker-compose.jobs.tmp.yml files, which are generated at runtime.
""" """
docker_compose_tmp = { compose_base = {
"version": "{{ DOCKER_COMPOSE_VERSION }}", "version": "{{ DOCKER_COMPOSE_VERSION }}",
"services": {}, "services": {},
} }
docker_compose_jobs_tmp = {
"version": "{{ DOCKER_COMPOSE_VERSION }}", # 1. Apply compose_tmp filter
"services": {}, # 2. Render the resulting dict
} # 3. Serialize to yaml
docker_compose_tmp = hooks.Filters.COMPOSE_LOCAL_TMP.apply(docker_compose_tmp) # 4. Save to disk
docker_compose_jobs_tmp = hooks.Filters.COMPOSE_LOCAL_JOBS_TMP.apply( docker_compose_tmp: str = serialize.dumps(
docker_compose_jobs_tmp tutor_env.render_unknown(
) self.config, compose_tmp_filter.apply(deepcopy(compose_base))
docker_compose_tmp = tutor_env.render_unknown(self.config, docker_compose_tmp) )
docker_compose_jobs_tmp = tutor_env.render_unknown(
self.config, docker_compose_jobs_tmp
) )
tutor_env.write_to( tutor_env.write_to(
serialize.dumps(docker_compose_tmp), docker_compose_tmp,
self.docker_compose_tmp_path, docker_compose_tmp_path,
)
# Same thing but with tmp jobs
docker_compose_jobs_tmp: str = serialize.dumps(
tutor_env.render_unknown(
self.config, compose_jobs_tmp_filter.apply(deepcopy(compose_base))
)
) )
tutor_env.write_to( tutor_env.write_to(
serialize.dumps(docker_compose_jobs_tmp), docker_compose_jobs_tmp,
self.docker_compose_jobs_tmp_path, docker_compose_jobs_tmp_path,
) )
def run_job(self, service: str, command: str) -> int: def run_job(self, service: str, command: str) -> int:
@ -95,6 +105,9 @@ class ComposeJobRunner(jobs.BaseComposeJobRunner):
class BaseComposeContext(BaseJobContext): class BaseComposeContext(BaseJobContext):
COMPOSE_TMP_FILTER: hooks.filters.Filter = NotImplemented
COMPOSE_JOBS_TMP_FILTER: hooks.filters.Filter = NotImplemented
def job_runner(self, config: Config) -> ComposeJobRunner: def job_runner(self, config: Config) -> ComposeJobRunner:
raise NotImplementedError raise NotImplementedError
@ -117,32 +130,43 @@ class MountParam(click.ParamType):
param: t.Optional["click.Parameter"], param: t.Optional["click.Parameter"],
ctx: t.Optional[click.Context], ctx: t.Optional[click.Context],
) -> t.List["MountType"]: ) -> t.List["MountType"]:
mounts: t.List["MountParam.MountType"] = [] mounts = self.convert_explicit_form(value) or self.convert_implicit_form(value)
return mounts
def convert_explicit_form(self, value: str) -> t.List["MountParam.MountType"]:
"""
Argument is of the form "containers:/host/path:/container/path".
"""
match = re.match(self.PARAM_REGEXP, value) match = re.match(self.PARAM_REGEXP, value)
if match: if not match:
# Argument is of the form "containers:/host/path:/container/path" return []
services: t.List[str] = [
service.strip() for service in match["services"].split(",") mounts: t.List["MountParam.MountType"] = []
] services: t.List[str] = [
host_path = os.path.abspath(os.path.expanduser(match["host_path"])) service.strip() for service in match["services"].split(",")
host_path = host_path.replace(os.path.sep, "/") ]
container_path = match["container_path"] host_path = os.path.abspath(os.path.expanduser(match["host_path"]))
for service in services: host_path = host_path.replace(os.path.sep, "/")
if not service: container_path = match["container_path"]
self.fail( for service in services:
f"incorrect services syntax: '{match['services']}'", param, ctx if not service:
) self.fail(f"incorrect services syntax: '{match['services']}'")
mounts.append((service, host_path, container_path)) mounts.append((service, host_path, container_path))
else: return mounts
# Argument is of the form "/host/path"
host_path = os.path.abspath(os.path.expanduser(value)) def convert_implicit_form(self, value: str) -> t.List["MountParam.MountType"]:
volumes: t.Iterator[ """
t.Tuple[str, str] Argument is of the form "/host/path"
] = hooks.Filters.COMPOSE_MOUNTS.iterate(os.path.basename(host_path)) """
for service, container_path in volumes: mounts: t.List["MountParam.MountType"] = []
mounts.append((service, host_path, container_path)) host_path = os.path.abspath(os.path.expanduser(value))
volumes: t.Iterator[t.Tuple[str, str]] = hooks.Filters.COMPOSE_MOUNTS.iterate(
os.path.basename(host_path)
)
for service, container_path in volumes:
mounts.append((service, host_path, container_path))
if not mounts: if not mounts:
raise self.fail(f"no mount found for {value}", param, ctx) raise self.fail(f"no mount found for {value}")
return mounts return mounts
@ -156,6 +180,48 @@ mount_option = click.option(
) )
def mount_tmp_volumes(
all_mounts: t.Tuple[t.List[MountParam.MountType], ...],
context: BaseComposeContext,
) -> None:
for mounts in all_mounts:
for service, host_path, container_path in mounts:
mount_tmp_volume(service, host_path, container_path, context)
def mount_tmp_volume(
service: str,
host_path: str,
container_path: str,
context: BaseComposeContext,
) -> None:
"""
Append user-defined bind-mounted volumes to the docker-compose.tmp file(s).
The service/host path/container path values are appended to the docker-compose
files by mean of two filters. Each dev/local environment is then responsible for
generating the files based on the output of these filters.
Bind-mounts that are associated to "*-job" services will be added to the
docker-compose jobs file.
"""
fmt.echo_info(f"Bind-mount: {host_path} -> {container_path} in {service}")
compose_tmp_filter: hooks.filters.Filter = (
context.COMPOSE_JOBS_TMP_FILTER
if service.endswith("-job")
else context.COMPOSE_TMP_FILTER
)
@compose_tmp_filter.add()
def _add_mounts_to_docker_compose_tmp(
docker_compose: t.Dict[str, t.Any],
) -> t.Dict[str, t.Any]:
services = docker_compose.setdefault("services", {})
services.setdefault(service, {"volumes": []})
services[service]["volumes"].append(f"{host_path}:{container_path}")
return docker_compose
@click.command( @click.command(
short_help="Run all or a selection of services.", short_help="Run all or a selection of services.",
help="Run all or a selection of services. Docker images will be rebuilt where necessary.", help="Run all or a selection of services. Docker images will be rebuilt where necessary.",
@ -178,9 +244,8 @@ def start(
if detach: if detach:
command.append("-d") command.append("-d")
process_mount_arguments(mounts)
# Start services # Start services
mount_tmp_volumes(mounts, context)
config = tutor_config.load(context.root) config = tutor_config.load(context.root)
context.job_runner(config).docker_compose(*command, *services) context.job_runner(config).docker_compose(*command, *services)
@ -240,7 +305,7 @@ def init(
limit: str, limit: str,
mounts: t.Tuple[t.List[MountParam.MountType]], mounts: t.Tuple[t.List[MountParam.MountType]],
) -> None: ) -> None:
process_mount_arguments(mounts) mount_tmp_volumes(mounts, context)
config = tutor_config.load(context.root) config = tutor_config.load(context.root)
runner = context.job_runner(config) runner = context.job_runner(config)
jobs.initialise(runner, limit_to=limit) jobs.initialise(runner, limit_to=limit)
@ -321,11 +386,10 @@ def run(
mounts: t.Tuple[t.List[MountParam.MountType]], mounts: t.Tuple[t.List[MountParam.MountType]],
args: t.List[str], args: t.List[str],
) -> None: ) -> None:
process_mount_arguments(mounts)
extra_args = ["--rm"] extra_args = ["--rm"]
if not utils.is_a_tty(): if not utils.is_a_tty():
extra_args.append("-T") extra_args.append("-T")
context.invoke(dc_command, command="run", args=[*extra_args, *args]) context.invoke(dc_command, mounts=mounts, command="run", args=[*extra_args, *args])
@click.command( @click.command(
@ -444,10 +508,17 @@ def status(context: click.Context) -> None:
context_settings={"ignore_unknown_options": True}, context_settings={"ignore_unknown_options": True},
name="dc", name="dc",
) )
@mount_option
@click.argument("command") @click.argument("command")
@click.argument("args", nargs=-1) @click.argument("args", nargs=-1)
@click.pass_obj @click.pass_obj
def dc_command(context: BaseComposeContext, command: str, args: t.List[str]) -> None: def dc_command(
context: BaseComposeContext,
mounts: t.Tuple[t.List[MountParam.MountType]],
command: str,
args: t.List[str],
) -> None:
mount_tmp_volumes(mounts, context)
config = tutor_config.load(context.root) config = tutor_config.load(context.root)
volumes, non_volume_args = bindmounts.parse_volumes(args) volumes, non_volume_args = bindmounts.parse_volumes(args)
volume_args = [] volume_args = []
@ -465,64 +536,6 @@ def dc_command(context: BaseComposeContext, command: str, args: t.List[str]) ->
context.job_runner(config).docker_compose(command, *volume_args, *non_volume_args) context.job_runner(config).docker_compose(command, *volume_args, *non_volume_args)
def process_mount_arguments(mounts: t.Tuple[t.List[MountParam.MountType], ...]) -> None:
"""
Process --mount arguments.
Most docker-compose commands support --mount arguments. This option
is used to bind-mount folders from the host. A docker-compose.tmp.yml is
generated at runtime and includes the bind-mounted volumes that were passed as CLI
arguments.
Bind-mounts that are associated to "*-job" services will be added to the
docker-compose jobs file.
"""
app_mounts: t.List[MountParam.MountType] = []
job_mounts: t.List[MountParam.MountType] = []
for mount in mounts:
for service, host_path, container_path in mount:
if service.endswith("-job"):
job_mounts.append((service, host_path, container_path))
else:
app_mounts.append((service, host_path, container_path))
def _add_mounts(
docker_compose: t.Dict[str, t.Any], bind_mounts: t.List[MountParam.MountType]
) -> t.Dict[str, t.Any]:
services = docker_compose.setdefault("services", {})
for service, host_path, container_path in bind_mounts:
fmt.echo_info(f"Bind-mount: {host_path} -> {container_path} in {service}")
services.setdefault(service, {"volumes": []})
services[service]["volumes"].append(f"{host_path}:{container_path}")
return docker_compose
# Clear filter callbacks already created within the COMPOSE_CLI_MOUNTS context.
# This prevents us from redundantly specifying these volume mounts in cases
# where process_mount_arguments is called more than once.
hooks.Filters.COMPOSE_LOCAL_TMP.clear(
context=hooks.Contexts.COMPOSE_CLI_MOUNTS.name
)
hooks.Filters.COMPOSE_LOCAL_JOBS_TMP.clear(
context=hooks.Contexts.COMPOSE_CLI_MOUNTS.name
)
# Now, within that COMPOSE_CLI_MOUNTS context, (re-)create filter callbacks to
# specify these volume mounts within the docker-compose[.jobs].tmp.yml files.
with hooks.Contexts.COMPOSE_CLI_MOUNTS.enter():
@hooks.Filters.COMPOSE_LOCAL_TMP.add()
def _add_mounts_to_docker_compose_tmp(
docker_compose_tmp: t.Dict[str, t.Any]
) -> t.Dict[str, t.Any]:
return _add_mounts(docker_compose_tmp, app_mounts)
@hooks.Filters.COMPOSE_LOCAL_JOBS_TMP.add()
def _add_mounts_to_docker_compose_jobs_tmp(
docker_compose_jobs_tmp: t.Dict[str, t.Any]
) -> t.Dict[str, t.Any]:
return _add_mounts(docker_compose_jobs_tmp, job_mounts)
@hooks.Filters.COMPOSE_MOUNTS.add() @hooks.Filters.COMPOSE_MOUNTS.add()
def _mount_edx_platform( def _mount_edx_platform(
volumes: t.List[t.Tuple[str, str]], name: str volumes: t.List[t.Tuple[str, str]], name: str

View File

@ -18,29 +18,39 @@ class DevJobRunner(compose.ComposeJobRunner):
""" """
super().__init__(root, config) super().__init__(root, config)
self.project_name = get_typed(self.config, "DEV_PROJECT_NAME", str) self.project_name = get_typed(self.config, "DEV_PROJECT_NAME", str)
self.docker_compose_tmp_path = tutor_env.pathjoin( docker_compose_tmp_path = tutor_env.pathjoin(
self.root, "dev", "docker-compose.tmp.yml" self.root, "dev", "docker-compose.tmp.yml"
) )
self.docker_compose_jobs_tmp_path = tutor_env.pathjoin( docker_compose_jobs_tmp_path = tutor_env.pathjoin(
self.root, "dev", "docker-compose.jobs.tmp.yml" self.root, "dev", "docker-compose.jobs.tmp.yml"
) )
self.docker_compose_files += [ self.docker_compose_files += [
tutor_env.pathjoin(self.root, "local", "docker-compose.yml"), tutor_env.pathjoin(self.root, "local", "docker-compose.yml"),
tutor_env.pathjoin(self.root, "dev", "docker-compose.yml"), tutor_env.pathjoin(self.root, "dev", "docker-compose.yml"),
self.docker_compose_tmp_path, docker_compose_tmp_path,
tutor_env.pathjoin(self.root, "local", "docker-compose.override.yml"), tutor_env.pathjoin(self.root, "local", "docker-compose.override.yml"),
tutor_env.pathjoin(self.root, "dev", "docker-compose.override.yml"), tutor_env.pathjoin(self.root, "dev", "docker-compose.override.yml"),
] ]
self.docker_compose_job_files += [ self.docker_compose_job_files += [
tutor_env.pathjoin(self.root, "local", "docker-compose.jobs.yml"), tutor_env.pathjoin(self.root, "local", "docker-compose.jobs.yml"),
tutor_env.pathjoin(self.root, "dev", "docker-compose.jobs.yml"), tutor_env.pathjoin(self.root, "dev", "docker-compose.jobs.yml"),
self.docker_compose_jobs_tmp_path, docker_compose_jobs_tmp_path,
tutor_env.pathjoin(self.root, "local", "docker-compose.jobs.override.yml"), tutor_env.pathjoin(self.root, "local", "docker-compose.jobs.override.yml"),
tutor_env.pathjoin(self.root, "dev", "docker-compose.jobs.override.yml"), tutor_env.pathjoin(self.root, "dev", "docker-compose.jobs.override.yml"),
] ]
# Update docker-compose.tmp files
self.update_docker_compose_tmp(
hooks.Filters.COMPOSE_DEV_TMP,
hooks.Filters.COMPOSE_DEV_JOBS_TMP,
docker_compose_tmp_path,
docker_compose_jobs_tmp_path,
)
class DevContext(compose.BaseComposeContext): class DevContext(compose.BaseComposeContext):
COMPOSE_TMP_FILTER = hooks.Filters.COMPOSE_DEV_TMP
COMPOSE_JOBS_TMP_FILTER = hooks.Filters.COMPOSE_DEV_JOBS_TMP
def job_runner(self, config: Config) -> DevJobRunner: def job_runner(self, config: Config) -> DevJobRunner:
return DevJobRunner(self.root, config) return DevJobRunner(self.root, config)
@ -54,8 +64,15 @@ def dev(context: click.Context) -> None:
@click.command(help="Configure and run Open edX from scratch, for development") @click.command(help="Configure and run Open edX from scratch, for development")
@click.option("-I", "--non-interactive", is_flag=True, help="Run non-interactively") @click.option("-I", "--non-interactive", is_flag=True, help="Run non-interactively")
@click.option("-p", "--pullimages", is_flag=True, help="Update docker images") @click.option("-p", "--pullimages", is_flag=True, help="Update docker images")
@compose.mount_option
@click.pass_context @click.pass_context
def quickstart(context: click.Context, non_interactive: bool, pullimages: bool) -> None: def quickstart(
context: click.Context,
non_interactive: bool,
pullimages: bool,
mounts: t.Tuple[t.List[compose.MountParam.MountType]],
) -> None:
compose.mount_tmp_volumes(mounts, context.obj)
try: try:
utils.check_macos_docker_memory() utils.check_macos_docker_memory()
except exceptions.TutorError as e: except exceptions.TutorError as e:

View File

@ -20,27 +20,38 @@ class LocalJobRunner(compose.ComposeJobRunner):
""" """
super().__init__(root, config) super().__init__(root, config)
self.project_name = get_typed(self.config, "LOCAL_PROJECT_NAME", str) self.project_name = get_typed(self.config, "LOCAL_PROJECT_NAME", str)
self.docker_compose_tmp_path = tutor_env.pathjoin( docker_compose_tmp_path = tutor_env.pathjoin(
self.root, "local", "docker-compose.tmp.yml" self.root, "local", "docker-compose.tmp.yml"
) )
self.docker_compose_jobs_tmp_path = tutor_env.pathjoin( docker_compose_jobs_tmp_path = tutor_env.pathjoin(
self.root, "local", "docker-compose.jobs.tmp.yml" self.root, "local", "docker-compose.jobs.tmp.yml"
) )
self.docker_compose_files += [ self.docker_compose_files += [
tutor_env.pathjoin(self.root, "local", "docker-compose.yml"), tutor_env.pathjoin(self.root, "local", "docker-compose.yml"),
tutor_env.pathjoin(self.root, "local", "docker-compose.prod.yml"), tutor_env.pathjoin(self.root, "local", "docker-compose.prod.yml"),
self.docker_compose_tmp_path, docker_compose_tmp_path,
tutor_env.pathjoin(self.root, "local", "docker-compose.override.yml"), tutor_env.pathjoin(self.root, "local", "docker-compose.override.yml"),
] ]
self.docker_compose_job_files += [ self.docker_compose_job_files += [
tutor_env.pathjoin(self.root, "local", "docker-compose.jobs.yml"), tutor_env.pathjoin(self.root, "local", "docker-compose.jobs.yml"),
self.docker_compose_jobs_tmp_path, docker_compose_jobs_tmp_path,
tutor_env.pathjoin(self.root, "local", "docker-compose.jobs.override.yml"), tutor_env.pathjoin(self.root, "local", "docker-compose.jobs.override.yml"),
] ]
# Update docker-compose.tmp files
self.update_docker_compose_tmp(
hooks.Filters.COMPOSE_LOCAL_TMP,
hooks.Filters.COMPOSE_LOCAL_JOBS_TMP,
docker_compose_tmp_path,
docker_compose_jobs_tmp_path,
)
# pylint: disable=too-few-public-methods # pylint: disable=too-few-public-methods
class LocalContext(compose.BaseComposeContext): class LocalContext(compose.BaseComposeContext):
COMPOSE_TMP_FILTER = hooks.Filters.COMPOSE_LOCAL_TMP
COMPOSE_JOBS_TMP_FILTER = hooks.Filters.COMPOSE_LOCAL_JOBS_TMP
def job_runner(self, config: Config) -> LocalJobRunner: def job_runner(self, config: Config) -> LocalJobRunner:
return LocalJobRunner(self.root, config) return LocalJobRunner(self.root, config)
@ -62,6 +73,7 @@ def quickstart(
non_interactive: bool, non_interactive: bool,
pullimages: bool, pullimages: bool,
) -> None: ) -> None:
compose.mount_tmp_volumes(mounts, context.obj)
try: try:
utils.check_macos_docker_memory() utils.check_macos_docker_memory()
except exceptions.TutorError as e: except exceptions.TutorError as e:
@ -128,9 +140,9 @@ Press enter when you are ready to continue"""
click.echo(fmt.title("Docker image updates")) click.echo(fmt.title("Docker image updates"))
context.invoke(compose.dc_command, command="pull") context.invoke(compose.dc_command, command="pull")
click.echo(fmt.title("Starting the platform in detached mode")) click.echo(fmt.title("Starting the platform in detached mode"))
context.invoke(compose.start, mounts=mounts, detach=True) context.invoke(compose.start, detach=True)
click.echo(fmt.title("Database creation and migrations")) click.echo(fmt.title("Database creation and migrations"))
context.invoke(compose.init, mounts=mounts) context.invoke(compose.init)
config = tutor_config.load(context.obj.root) config = tutor_config.load(context.obj.root)
fmt.echo_info( fmt.echo_info(

View File

@ -121,6 +121,12 @@ class Filters:
#: :parameter list[tuple[str, tuple[str, ...]]] tasks: list of ``(service, path)`` tasks. (see :py:data:`COMMANDS_INIT`). #: :parameter list[tuple[str, tuple[str, ...]]] tasks: list of ``(service, path)`` tasks. (see :py:data:`COMMANDS_INIT`).
COMMANDS_PRE_INIT = filters.get("commands:pre-init") COMMANDS_PRE_INIT = filters.get("commands:pre-init")
#: Same as :py:data:`COMPOSE_LOCAL_TMP` but for the development environment.
COMPOSE_DEV_TMP = filters.get("compose:dev:tmp")
#: Same as :py:data:`COMPOSE_LOCAL_JOBS_TMP` but for the development environment.
COMPOSE_DEV_JOBS_TMP = filters.get("compose:dev-jobs:tmp")
#: List of folders to bind-mount in docker-compose containers, either in ``tutor local`` or ``tutor dev``. #: List of folders to bind-mount in docker-compose containers, either in ``tutor local`` or ``tutor dev``.
#: #:
#: Many ``tutor local`` and ``tutor dev`` commands support ``--mounts`` options #: Many ``tutor local`` and ``tutor dev`` commands support ``--mounts`` options
@ -359,7 +365,3 @@ class Contexts:
#: Python entrypoint plugins will be installed within this context. #: Python entrypoint plugins will be installed within this context.
PLUGINS_V0_ENTRYPOINT = contexts.Context("plugins:v0:entrypoint") PLUGINS_V0_ENTRYPOINT = contexts.Context("plugins:v0:entrypoint")
#: Docker Compose volumes added via the CLI's ``--mount`` option will
#: be installed within this context.
COMPOSE_CLI_MOUNTS = contexts.Context("compose:cli:mounts")

View File

@ -1,5 +1,4 @@
import base64 import base64
from functools import lru_cache
import json import json
import os import os
import random import random
@ -9,6 +8,7 @@ import string
import struct import struct
import subprocess import subprocess
import sys import sys
from functools import lru_cache
from typing import List, Tuple from typing import List, Tuple
import click import click