mirror of
https://github.com/ChristianLight/tutor.git
synced 2024-12-04 10:58:24 +00:00
feat: strongly typed hooks
Now that the mypy bugs have been resolved, we are able to define more precisely and cleanly the types of Actions and Filters. Moreover, can now strongly type named actions and hooks (in consts.py). With such a strong typing, we get early alerts of hooks called with incorrect arguments, which is nothing short of awesome :) This change breaks the hooks API by removing the `context=...` argument. The reason for that is that we cannot insert arbitrary arguments between `P.args, P.kwargs`: https://peps.python.org/pep-0612/#the-components-of-a-paramspec > A function declared as def inner(a: A, b: B, *args: P.args, **kwargs: > P.kwargs) -> R has type Callable[Concatenate[A, B, P], R]. Placing > keyword-only parameters between the *args and **kwargs is forbidden. Getting the documentation to build in nitpicky mode is quite difficult... We need to add `nitpick_ignore` to the docs conf.py, otherwise sphinx complains about many missing class references. This, despite upgrading almost all doc requirements (except docutils).
This commit is contained in:
parent
8aeeb7e09c
commit
33e4f33afe
@ -18,6 +18,8 @@ Every user-facing change should have an entry in this changelog. Please respect
|
||||
|
||||
## Unreleased
|
||||
|
||||
- 💥[Feature] Strong typing of action and filter hooks: this allows us to detect incorrect calls to `actions.add` or `filters.add` early. Strong typing forces us to break the `do` and `apply` API by removing the `context` named argument. Developers should replace `do(context=...)` by `do_from_context(..., )` (and similar for `apply`).
|
||||
|
||||
## v14.1.2 (2022-11-02)
|
||||
|
||||
- [Security] Fix edx-platform XSS vulnerability on "next" parameter. (by @regisb)
|
||||
|
@ -5,7 +5,7 @@ build:
|
||||
sphinx-build -b html -a -E -n $(BUILD_ARGS) "." "_build/html"
|
||||
|
||||
html:
|
||||
$(MAKE) build BUILD_ARGS="-W"
|
||||
$(MAKE) build BUILD_ARGS="-W --keep-going"
|
||||
|
||||
browse:
|
||||
sensible-browser _build/html/index.html
|
||||
|
11
docs/conf.py
11
docs/conf.py
@ -23,13 +23,22 @@ extensions = []
|
||||
templates_path = ["_templates"]
|
||||
source_suffix = ".rst"
|
||||
master_doc = "index"
|
||||
language = None
|
||||
language = "en"
|
||||
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
|
||||
pygments_style = None
|
||||
|
||||
# Autodocumentation of modules
|
||||
extensions.append("sphinx.ext.autodoc")
|
||||
autodoc_typehints = "description"
|
||||
# For the life of me I can't get the docs to compile in nitpicky mode without these
|
||||
# ignore statements. You are most welcome to try and remove them.
|
||||
nitpick_ignore = [
|
||||
("py:class", "Config"),
|
||||
("py:class", "click.Command"),
|
||||
("py:class", "tutor.hooks.filters.P"),
|
||||
("py:class", "tutor.hooks.filters.T"),
|
||||
("py:class", "tutor.hooks.actions.P"),
|
||||
]
|
||||
|
||||
# -- Sphinx-Click configuration
|
||||
# https://sphinx-click.readthedocs.io/
|
||||
|
@ -10,6 +10,7 @@ Actions are one of the two types of hooks (the other being :ref:`filters`) that
|
||||
.. autofunction:: tutor.hooks.actions::get_template
|
||||
.. autofunction:: tutor.hooks.actions::add
|
||||
.. autofunction:: tutor.hooks.actions::do
|
||||
.. autofunction:: tutor.hooks.actions::do_from_context
|
||||
.. autofunction:: tutor.hooks.actions::clear
|
||||
.. autofunction:: tutor.hooks.actions::clear_all
|
||||
|
||||
|
@ -12,7 +12,9 @@ Filters are one of the two types of hooks (the other being :ref:`actions`) that
|
||||
.. autofunction:: tutor.hooks.filters::add_item
|
||||
.. autofunction:: tutor.hooks.filters::add_items
|
||||
.. autofunction:: tutor.hooks.filters::apply
|
||||
.. autofunction:: tutor.hooks.filters::apply_from_context
|
||||
.. autofunction:: tutor.hooks.filters::iterate
|
||||
.. autofunction:: tutor.hooks.filters::iterate_from_context
|
||||
.. autofunction:: tutor.hooks.filters::clear
|
||||
.. autofunction:: tutor.hooks.filters::clear_all
|
||||
|
||||
|
@ -6,31 +6,31 @@
|
||||
#
|
||||
appdirs==1.4.4
|
||||
# via -r requirements/base.in
|
||||
cachetools==4.2.4
|
||||
cachetools==5.2.0
|
||||
# via google-auth
|
||||
certifi==2021.10.8
|
||||
certifi==2022.9.24
|
||||
# via
|
||||
# kubernetes
|
||||
# requests
|
||||
charset-normalizer==2.0.7
|
||||
charset-normalizer==2.1.1
|
||||
# via requests
|
||||
click==8.0.3
|
||||
click==8.1.3
|
||||
# via -r requirements/base.in
|
||||
google-auth==2.3.1
|
||||
google-auth==2.14.1
|
||||
# via kubernetes
|
||||
idna==3.3
|
||||
idna==3.4
|
||||
# via requests
|
||||
jinja2==3.0.2
|
||||
jinja2==3.1.2
|
||||
# via -r requirements/base.in
|
||||
kubernetes==18.20.0
|
||||
kubernetes==25.3.0
|
||||
# via -r requirements/base.in
|
||||
markupsafe==2.0.1
|
||||
markupsafe==2.1.1
|
||||
# via jinja2
|
||||
mypy==0.942
|
||||
mypy==0.990
|
||||
# via -r requirements/base.in
|
||||
mypy-extensions==0.4.3
|
||||
# via mypy
|
||||
oauthlib==3.2.1
|
||||
oauthlib==3.2.2
|
||||
# via requests-oauthlib
|
||||
pyasn1==0.4.8
|
||||
# via
|
||||
@ -38,7 +38,7 @@ pyasn1==0.4.8
|
||||
# rsa
|
||||
pyasn1-modules==0.2.8
|
||||
# via google-auth
|
||||
pycryptodome==3.11.0
|
||||
pycryptodome==3.15.0
|
||||
# via -r requirements/base.in
|
||||
python-dateutil==2.8.2
|
||||
# via kubernetes
|
||||
@ -46,13 +46,13 @@ pyyaml==6.0
|
||||
# via
|
||||
# -r requirements/base.in
|
||||
# kubernetes
|
||||
requests==2.26.0
|
||||
requests==2.28.1
|
||||
# via
|
||||
# kubernetes
|
||||
# requests-oauthlib
|
||||
requests-oauthlib==1.3.0
|
||||
requests-oauthlib==1.3.1
|
||||
# via kubernetes
|
||||
rsa==4.7.2
|
||||
rsa==4.9
|
||||
# via google-auth
|
||||
six==1.16.0
|
||||
# via
|
||||
@ -61,13 +61,13 @@ six==1.16.0
|
||||
# python-dateutil
|
||||
tomli==2.0.1
|
||||
# via mypy
|
||||
typing-extensions==3.10.0.2
|
||||
typing-extensions==4.4.0
|
||||
# via mypy
|
||||
urllib3==1.26.7
|
||||
urllib3==1.26.12
|
||||
# via
|
||||
# kubernetes
|
||||
# requests
|
||||
websocket-client==1.2.1
|
||||
websocket-client==1.4.2
|
||||
# via kubernetes
|
||||
|
||||
# The following packages are considered to be unsafe in a requirements file:
|
||||
|
@ -6,6 +6,10 @@ pyinstaller
|
||||
twine
|
||||
coverage
|
||||
|
||||
# doc requirement is lagging behind
|
||||
# https://github.com/readthedocs/sphinx_rtd_theme/issues/1323
|
||||
docutils<0.18
|
||||
|
||||
# Types packages
|
||||
types-docutils
|
||||
types-PyYAML
|
||||
|
@ -4,102 +4,108 @@
|
||||
#
|
||||
# pip-compile requirements/dev.in
|
||||
#
|
||||
altgraph==0.17.2
|
||||
altgraph==0.17.3
|
||||
# via pyinstaller
|
||||
appdirs==1.4.4
|
||||
# via -r requirements/base.txt
|
||||
astroid==2.8.3
|
||||
astroid==2.12.12
|
||||
# via pylint
|
||||
black==22.1.0
|
||||
black==22.10.0
|
||||
# via -r requirements/dev.in
|
||||
bleach==4.1.0
|
||||
bleach==5.0.1
|
||||
# via readme-renderer
|
||||
build==0.8.0
|
||||
build==0.9.0
|
||||
# via pip-tools
|
||||
cachetools==4.2.4
|
||||
cachetools==5.2.0
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# google-auth
|
||||
certifi==2021.10.8
|
||||
certifi==2022.9.24
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# kubernetes
|
||||
# requests
|
||||
cffi==1.15.0
|
||||
cffi==1.15.1
|
||||
# via cryptography
|
||||
charset-normalizer==2.0.7
|
||||
charset-normalizer==2.1.1
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# requests
|
||||
click==8.0.3
|
||||
click==8.1.3
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# black
|
||||
# pip-tools
|
||||
colorama==0.4.4
|
||||
# via twine
|
||||
coverage==6.2
|
||||
commonmark==0.9.1
|
||||
# via rich
|
||||
coverage==6.5.0
|
||||
# via -r requirements/dev.in
|
||||
cryptography==35.0.0
|
||||
cryptography==38.0.3
|
||||
# via secretstorage
|
||||
dill==0.3.6
|
||||
# via pylint
|
||||
docutils==0.17.1
|
||||
# via readme-renderer
|
||||
google-auth==2.3.1
|
||||
# via
|
||||
# -r requirements/dev.in
|
||||
# readme-renderer
|
||||
google-auth==2.14.1
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# kubernetes
|
||||
idna==3.3
|
||||
idna==3.4
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# requests
|
||||
importlib-metadata==4.8.1
|
||||
importlib-metadata==5.0.0
|
||||
# via
|
||||
# keyring
|
||||
# twine
|
||||
isort==5.9.3
|
||||
isort==5.10.1
|
||||
# via pylint
|
||||
jeepney==0.7.1
|
||||
jaraco-classes==3.2.3
|
||||
# via keyring
|
||||
jeepney==0.8.0
|
||||
# via
|
||||
# keyring
|
||||
# secretstorage
|
||||
jinja2==3.0.2
|
||||
jinja2==3.1.2
|
||||
# via -r requirements/base.txt
|
||||
keyring==23.2.1
|
||||
keyring==23.11.0
|
||||
# via twine
|
||||
kubernetes==18.20.0
|
||||
kubernetes==25.3.0
|
||||
# via -r requirements/base.txt
|
||||
lazy-object-proxy==1.6.0
|
||||
lazy-object-proxy==1.8.0
|
||||
# via astroid
|
||||
markupsafe==2.0.1
|
||||
markupsafe==2.1.1
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# jinja2
|
||||
mccabe==0.6.1
|
||||
mccabe==0.7.0
|
||||
# via pylint
|
||||
mypy==0.942
|
||||
more-itertools==9.0.0
|
||||
# via jaraco-classes
|
||||
mypy==0.990
|
||||
# via -r requirements/base.txt
|
||||
mypy-extensions==0.4.3
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# black
|
||||
# mypy
|
||||
oauthlib==3.2.1
|
||||
oauthlib==3.2.2
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# requests-oauthlib
|
||||
packaging==21.0
|
||||
# via
|
||||
# bleach
|
||||
# build
|
||||
pathspec==0.9.0
|
||||
# via black
|
||||
pep517==0.12.0
|
||||
packaging==21.3
|
||||
# via build
|
||||
pip-tools==6.8.0
|
||||
pathspec==0.10.1
|
||||
# via black
|
||||
pep517==0.13.0
|
||||
# via build
|
||||
pip-tools==6.9.0
|
||||
# via -r requirements/dev.in
|
||||
pkginfo==1.7.1
|
||||
pkginfo==1.8.3
|
||||
# via twine
|
||||
platformdirs==2.4.0
|
||||
platformdirs==2.5.3
|
||||
# via
|
||||
# black
|
||||
# pylint
|
||||
@ -112,19 +118,21 @@ pyasn1-modules==0.2.8
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# google-auth
|
||||
pycparser==2.20
|
||||
pycparser==2.21
|
||||
# via cffi
|
||||
pycryptodome==3.11.0
|
||||
pycryptodome==3.15.0
|
||||
# via -r requirements/base.txt
|
||||
pygments==2.10.0
|
||||
# via readme-renderer
|
||||
pyinstaller==4.5.1
|
||||
pygments==2.13.0
|
||||
# via
|
||||
# readme-renderer
|
||||
# rich
|
||||
pyinstaller==5.6.2
|
||||
# via -r requirements/dev.in
|
||||
pyinstaller-hooks-contrib==2021.3
|
||||
pyinstaller-hooks-contrib==2022.13
|
||||
# via pyinstaller
|
||||
pylint==2.11.1
|
||||
pylint==2.15.5
|
||||
# via -r requirements/dev.in
|
||||
pyparsing==3.0.1
|
||||
pyparsing==3.0.9
|
||||
# via packaging
|
||||
python-dateutil==2.8.2
|
||||
# via
|
||||
@ -134,28 +142,30 @@ pyyaml==6.0
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# kubernetes
|
||||
readme-renderer==30.0
|
||||
readme-renderer==37.3
|
||||
# via twine
|
||||
requests==2.26.0
|
||||
requests==2.28.1
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# kubernetes
|
||||
# requests-oauthlib
|
||||
# requests-toolbelt
|
||||
# twine
|
||||
requests-oauthlib==1.3.0
|
||||
requests-oauthlib==1.3.1
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# kubernetes
|
||||
requests-toolbelt==0.9.1
|
||||
requests-toolbelt==0.10.1
|
||||
# via twine
|
||||
rfc3986==1.5.0
|
||||
rfc3986==2.0.0
|
||||
# via twine
|
||||
rsa==4.7.2
|
||||
rich==12.6.0
|
||||
# via twine
|
||||
rsa==4.9
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# google-auth
|
||||
secretstorage==3.3.1
|
||||
secretstorage==3.3.3
|
||||
# via keyring
|
||||
six==1.16.0
|
||||
# via
|
||||
@ -164,8 +174,6 @@ six==1.16.0
|
||||
# google-auth
|
||||
# kubernetes
|
||||
# python-dateutil
|
||||
toml==0.10.2
|
||||
# via pylint
|
||||
tomli==2.0.1
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
@ -173,39 +181,42 @@ tomli==2.0.1
|
||||
# build
|
||||
# mypy
|
||||
# pep517
|
||||
tqdm==4.62.3
|
||||
# via twine
|
||||
twine==3.4.2
|
||||
# pylint
|
||||
tomlkit==0.11.6
|
||||
# via pylint
|
||||
twine==4.0.1
|
||||
# via -r requirements/dev.in
|
||||
types-docutils==0.18.0
|
||||
types-docutils==0.19.1.1
|
||||
# via -r requirements/dev.in
|
||||
types-pyyaml==6.0.0
|
||||
types-pyyaml==6.0.12.2
|
||||
# via -r requirements/dev.in
|
||||
types-setuptools==57.4.2
|
||||
types-setuptools==65.5.0.2
|
||||
# via -r requirements/dev.in
|
||||
typing-extensions==3.10.0.2
|
||||
typing-extensions==4.4.0
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# astroid
|
||||
# black
|
||||
# mypy
|
||||
# pylint
|
||||
urllib3==1.26.7
|
||||
# rich
|
||||
urllib3==1.26.12
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# kubernetes
|
||||
# requests
|
||||
# twine
|
||||
webencodings==0.5.1
|
||||
# via bleach
|
||||
websocket-client==1.2.1
|
||||
websocket-client==1.4.2
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# kubernetes
|
||||
wheel==0.37.0
|
||||
wheel==0.38.4
|
||||
# via pip-tools
|
||||
wrapt==1.13.2
|
||||
wrapt==1.14.1
|
||||
# via astroid
|
||||
zipp==3.6.0
|
||||
zipp==3.10.0
|
||||
# via importlib-metadata
|
||||
|
||||
# The following packages are considered to be unsafe in a requirements file:
|
||||
|
@ -8,22 +8,22 @@ alabaster==0.7.12
|
||||
# via sphinx
|
||||
appdirs==1.4.4
|
||||
# via -r requirements/base.txt
|
||||
babel==2.9.1
|
||||
babel==2.11.0
|
||||
# via sphinx
|
||||
cachetools==4.2.4
|
||||
cachetools==5.2.0
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# google-auth
|
||||
certifi==2021.10.8
|
||||
certifi==2022.9.24
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# kubernetes
|
||||
# requests
|
||||
charset-normalizer==2.0.7
|
||||
charset-normalizer==2.1.1
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# requests
|
||||
click==8.0.3
|
||||
click==8.1.3
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# sphinx-click
|
||||
@ -32,37 +32,39 @@ docutils==0.17.1
|
||||
# sphinx
|
||||
# sphinx-click
|
||||
# sphinx-rtd-theme
|
||||
google-auth==2.3.1
|
||||
google-auth==2.14.1
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# kubernetes
|
||||
idna==3.3
|
||||
idna==3.4
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# requests
|
||||
imagesize==1.2.0
|
||||
imagesize==1.4.1
|
||||
# via sphinx
|
||||
jinja2==3.0.2
|
||||
importlib-metadata==5.0.0
|
||||
# via sphinx
|
||||
jinja2==3.1.2
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# sphinx
|
||||
kubernetes==18.20.0
|
||||
kubernetes==25.3.0
|
||||
# via -r requirements/base.txt
|
||||
markupsafe==2.0.1
|
||||
markupsafe==2.1.1
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# jinja2
|
||||
mypy==0.942
|
||||
mypy==0.990
|
||||
# via -r requirements/base.txt
|
||||
mypy-extensions==0.4.3
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# mypy
|
||||
oauthlib==3.2.1
|
||||
oauthlib==3.2.2
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# requests-oauthlib
|
||||
packaging==21.0
|
||||
packaging==21.3
|
||||
# via sphinx
|
||||
pyasn1==0.4.8
|
||||
# via
|
||||
@ -73,33 +75,33 @@ pyasn1-modules==0.2.8
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# google-auth
|
||||
pycryptodome==3.11.0
|
||||
pycryptodome==3.15.0
|
||||
# via -r requirements/base.txt
|
||||
pygments==2.10.0
|
||||
pygments==2.13.0
|
||||
# via sphinx
|
||||
pyparsing==3.0.1
|
||||
pyparsing==3.0.9
|
||||
# via packaging
|
||||
python-dateutil==2.8.2
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# kubernetes
|
||||
pytz==2021.3
|
||||
pytz==2022.6
|
||||
# via babel
|
||||
pyyaml==6.0
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# kubernetes
|
||||
requests==2.26.0
|
||||
requests==2.28.1
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# kubernetes
|
||||
# requests-oauthlib
|
||||
# sphinx
|
||||
requests-oauthlib==1.3.0
|
||||
requests-oauthlib==1.3.1
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# kubernetes
|
||||
rsa==4.7.2
|
||||
rsa==4.9
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# google-auth
|
||||
@ -109,16 +111,16 @@ six==1.16.0
|
||||
# google-auth
|
||||
# kubernetes
|
||||
# python-dateutil
|
||||
snowballstemmer==2.1.0
|
||||
snowballstemmer==2.2.0
|
||||
# via sphinx
|
||||
sphinx==4.2.0
|
||||
sphinx==5.3.0
|
||||
# via
|
||||
# -r requirements/docs.in
|
||||
# sphinx-click
|
||||
# sphinx-rtd-theme
|
||||
sphinx-click==3.0.1
|
||||
sphinx-click==4.3.0
|
||||
# via -r requirements/docs.in
|
||||
sphinx-rtd-theme==1.0.0
|
||||
sphinx-rtd-theme==1.1.1
|
||||
# via -r requirements/docs.in
|
||||
sphinxcontrib-applehelp==1.0.2
|
||||
# via sphinx
|
||||
@ -136,19 +138,21 @@ tomli==2.0.1
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# mypy
|
||||
typing-extensions==3.10.0.2
|
||||
typing-extensions==4.4.0
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# mypy
|
||||
urllib3==1.26.7
|
||||
urllib3==1.26.12
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# kubernetes
|
||||
# requests
|
||||
websocket-client==1.2.1
|
||||
websocket-client==1.4.2
|
||||
# via
|
||||
# -r requirements/base.txt
|
||||
# kubernetes
|
||||
zipp==3.10.0
|
||||
# via importlib-metadata
|
||||
|
||||
# The following packages are considered to be unsafe in a requirements file:
|
||||
# setuptools
|
||||
|
@ -16,16 +16,18 @@ class PluginActionsTests(unittest.TestCase):
|
||||
with hooks.contexts.enter("tests"):
|
||||
return super().run(result=result)
|
||||
|
||||
def test_on(self) -> None:
|
||||
@hooks.actions.add("test-action")
|
||||
def test_do(self) -> None:
|
||||
action: hooks.actions.Action[int] = hooks.actions.get("test-action")
|
||||
|
||||
@action.add()
|
||||
def _test_action_1(increment: int) -> None:
|
||||
self.side_effect_int += increment
|
||||
|
||||
@hooks.actions.add("test-action")
|
||||
@action.add()
|
||||
def _test_action_2(increment: int) -> None:
|
||||
self.side_effect_int += increment * 2
|
||||
|
||||
hooks.actions.do("test-action", 1)
|
||||
action.do(1)
|
||||
self.assertEqual(3, self.side_effect_int)
|
||||
|
||||
def test_priority(self) -> None:
|
||||
|
@ -38,7 +38,6 @@ class PluginFiltersTests(unittest.TestCase):
|
||||
self.assertTrue(callback.is_in_context(None))
|
||||
self.assertFalse(callback.is_in_context("customcontext"))
|
||||
self.assertEqual(1, callback.apply(0))
|
||||
self.assertEqual(0, callback.apply(0, context="customcontext"))
|
||||
|
||||
def test_filter_context(self) -> None:
|
||||
with hooks.contexts.enter("testcontext"):
|
||||
@ -47,7 +46,7 @@ class PluginFiltersTests(unittest.TestCase):
|
||||
|
||||
self.assertEqual([1, 2], hooks.filters.apply("test:sheeps", []))
|
||||
self.assertEqual(
|
||||
[1], hooks.filters.apply("test:sheeps", [], context="testcontext")
|
||||
[1], hooks.filters.apply_from_context("testcontext", "test:sheeps", [])
|
||||
)
|
||||
|
||||
def test_clear_context(self) -> None:
|
||||
|
@ -7,7 +7,7 @@ import click
|
||||
|
||||
from tests.helpers import PluginsTestCase, temporary_root
|
||||
from tutor import config as tutor_config
|
||||
from tutor import hooks, interactive
|
||||
from tutor import fmt, hooks, interactive, utils
|
||||
from tutor.types import Config, get_typed
|
||||
|
||||
|
||||
@ -25,13 +25,13 @@ class ConfigTests(unittest.TestCase):
|
||||
def test_merge_not_render(self) -> None:
|
||||
config: Config = {}
|
||||
base = tutor_config.get_base()
|
||||
with patch.object(tutor_config.utils, "random_string", return_value="abcd"):
|
||||
with patch.object(utils, "random_string", return_value="abcd"):
|
||||
tutor_config.merge(config, base)
|
||||
|
||||
# Check that merge does not perform a rendering
|
||||
self.assertNotEqual("abcd", config["MYSQL_ROOT_PASSWORD"])
|
||||
|
||||
@patch.object(tutor_config.fmt, "echo")
|
||||
@patch.object(fmt, "echo")
|
||||
def test_update_twice_should_return_same_config(self, _: Mock) -> None:
|
||||
with temporary_root() as root:
|
||||
config1 = tutor_config.load_minimal(root)
|
||||
@ -60,7 +60,7 @@ class ConfigTests(unittest.TestCase):
|
||||
self.assertTrue(tutor_config.is_service_activated(config, "service1"))
|
||||
self.assertFalse(tutor_config.is_service_activated(config, "service2"))
|
||||
|
||||
@patch.object(tutor_config.fmt, "echo")
|
||||
@patch.object(fmt, "echo")
|
||||
def test_json_config_is_overwritten_by_yaml(self, _: Mock) -> None:
|
||||
with temporary_root() as root:
|
||||
# Create config from scratch
|
||||
@ -84,7 +84,7 @@ class ConfigTests(unittest.TestCase):
|
||||
|
||||
|
||||
class ConfigPluginTestCase(PluginsTestCase):
|
||||
@patch.object(tutor_config.fmt, "echo")
|
||||
@patch.object(fmt, "echo")
|
||||
def test_removed_entry_is_added_on_save(self, _: Mock) -> None:
|
||||
with temporary_root() as root:
|
||||
mock_random_string = Mock()
|
||||
|
@ -86,7 +86,7 @@ class EnvTests(PluginsTestCase):
|
||||
rendered = env.render_file(config, "hooks", "mysql", "init")
|
||||
self.assertIn("testpassword", rendered)
|
||||
|
||||
@patch.object(tutor_config.fmt, "echo")
|
||||
@patch.object(fmt, "echo")
|
||||
def test_render_file_missing_configuration(self, _: Mock) -> None:
|
||||
self.assertRaises(
|
||||
exceptions.TutorError, env.render_file, {}, "local", "docker-compose.yml"
|
||||
@ -118,7 +118,7 @@ class EnvTests(PluginsTestCase):
|
||||
def test_patch(self) -> None:
|
||||
patches = {"plugin1": "abcd", "plugin2": "efgh"}
|
||||
with patch.object(
|
||||
env.plugins, "iter_patches", return_value=patches.values()
|
||||
plugins, "iter_patches", return_value=patches.values()
|
||||
) as mock_iter_patches:
|
||||
rendered = env.render_str({}, '{{ patch("location") }}')
|
||||
mock_iter_patches.assert_called_once_with("location")
|
||||
@ -126,7 +126,7 @@ class EnvTests(PluginsTestCase):
|
||||
|
||||
def test_patch_separator_suffix(self) -> None:
|
||||
patches = {"plugin1": "abcd", "plugin2": "efgh"}
|
||||
with patch.object(env.plugins, "iter_patches", return_value=patches.values()):
|
||||
with patch.object(plugins, "iter_patches", return_value=patches.values()):
|
||||
rendered = env.render_str(
|
||||
{}, '{{ patch("location", separator=",\n", suffix=",") }}'
|
||||
)
|
||||
|
@ -53,14 +53,12 @@ class TutorCli(click.MultiCommand):
|
||||
"""
|
||||
We enable plugins as soon as possible to have access to commands.
|
||||
"""
|
||||
if not isinstance(ctx, click.Context):
|
||||
# When generating docs, this function is incorrectly called with a
|
||||
# multicommand object instead of a Context. That's ok, we just
|
||||
# ignore it.
|
||||
# https://github.com/click-contrib/sphinx-click/issues/70
|
||||
if not "root" in ctx.params:
|
||||
# When generating docs, this function is called with empty args.
|
||||
# That's ok, we just ignore it.
|
||||
return
|
||||
if not cls.IS_ROOT_READY:
|
||||
hooks.Actions.PROJECT_ROOT_READY.do(root=ctx.params["root"])
|
||||
hooks.Actions.PROJECT_ROOT_READY.do(ctx.params["root"])
|
||||
cls.IS_ROOT_READY = True
|
||||
|
||||
def list_commands(self, ctx: click.Context) -> t.List[str]:
|
||||
|
@ -4,6 +4,7 @@ import typing as t
|
||||
from copy import deepcopy
|
||||
|
||||
import click
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from tutor import bindmounts
|
||||
from tutor import config as tutor_config
|
||||
@ -13,6 +14,8 @@ from tutor.commands.context import BaseJobContext
|
||||
from tutor.exceptions import TutorError
|
||||
from tutor.types import Config
|
||||
|
||||
COMPOSE_FILTER_TYPE: TypeAlias = "hooks.filters.Filter[t.Dict[str, t.Any], []]"
|
||||
|
||||
|
||||
class ComposeJobRunner(jobs.BaseComposeJobRunner):
|
||||
def __init__(self, root: str, config: Config):
|
||||
@ -41,8 +44,8 @@ class ComposeJobRunner(jobs.BaseComposeJobRunner):
|
||||
|
||||
def update_docker_compose_tmp(
|
||||
self,
|
||||
compose_tmp_filter: hooks.filters.Filter,
|
||||
compose_jobs_tmp_filter: hooks.filters.Filter,
|
||||
compose_tmp_filter: COMPOSE_FILTER_TYPE,
|
||||
compose_jobs_tmp_filter: COMPOSE_FILTER_TYPE,
|
||||
docker_compose_tmp_path: str,
|
||||
docker_compose_jobs_tmp_path: str,
|
||||
) -> None:
|
||||
@ -50,7 +53,7 @@ class ComposeJobRunner(jobs.BaseComposeJobRunner):
|
||||
Update the contents of the docker-compose.tmp.yml and
|
||||
docker-compose.jobs.tmp.yml files, which are generated at runtime.
|
||||
"""
|
||||
compose_base = {
|
||||
compose_base: t.Dict[str, t.Any] = {
|
||||
"version": "{{ DOCKER_COMPOSE_VERSION }}",
|
||||
"services": {},
|
||||
}
|
||||
@ -105,8 +108,8 @@ class ComposeJobRunner(jobs.BaseComposeJobRunner):
|
||||
|
||||
|
||||
class BaseComposeContext(BaseJobContext):
|
||||
COMPOSE_TMP_FILTER: hooks.filters.Filter = NotImplemented
|
||||
COMPOSE_JOBS_TMP_FILTER: hooks.filters.Filter = NotImplemented
|
||||
COMPOSE_TMP_FILTER: COMPOSE_FILTER_TYPE = NotImplemented
|
||||
COMPOSE_JOBS_TMP_FILTER: COMPOSE_FILTER_TYPE = NotImplemented
|
||||
|
||||
def job_runner(self, config: Config) -> ComposeJobRunner:
|
||||
raise NotImplementedError
|
||||
@ -160,10 +163,9 @@ class MountParam(click.ParamType):
|
||||
"""
|
||||
mounts: t.List["MountParam.MountType"] = []
|
||||
host_path = os.path.abspath(os.path.expanduser(value))
|
||||
volumes: t.Iterator[t.Tuple[str, str]] = hooks.Filters.COMPOSE_MOUNTS.iterate(
|
||||
for service, container_path in hooks.Filters.COMPOSE_MOUNTS.iterate(
|
||||
os.path.basename(host_path)
|
||||
)
|
||||
for service, container_path in volumes:
|
||||
):
|
||||
mounts.append((service, host_path, container_path))
|
||||
if not mounts:
|
||||
raise self.fail(f"no mount found for {value}")
|
||||
@ -206,7 +208,7 @@ def mount_tmp_volume(
|
||||
docker-compose jobs file.
|
||||
"""
|
||||
fmt.echo_info(f"Bind-mount: {host_path} -> {container_path} in {service}")
|
||||
compose_tmp_filter: hooks.filters.Filter = (
|
||||
compose_tmp_filter: COMPOSE_FILTER_TYPE = (
|
||||
context.COMPOSE_JOBS_TMP_FILTER
|
||||
if service.endswith("-job")
|
||||
else context.COMPOSE_TMP_FILTER
|
||||
|
@ -21,15 +21,15 @@ VENDOR_IMAGES = [
|
||||
|
||||
@hooks.Filters.IMAGES_BUILD.add()
|
||||
def _add_core_images_to_build(
|
||||
build_images: t.List[t.Tuple[str, t.Tuple[str, str], str, t.List[str]]],
|
||||
build_images: t.List[t.Tuple[str, t.Tuple[str, ...], str, t.Tuple[str, ...]]],
|
||||
config: Config,
|
||||
) -> t.List[t.Tuple[str, t.Tuple[str, str], str, t.List[str]]]:
|
||||
) -> t.List[t.Tuple[str, t.Tuple[str, ...], str, t.Tuple[str, ...]]]:
|
||||
"""
|
||||
Add base images to the list of Docker images to build on `tutor build all`.
|
||||
"""
|
||||
for image in BASE_IMAGE_NAMES:
|
||||
tag = images.get_tag(config, image)
|
||||
build_images.append((image, ("build", image), tag, []))
|
||||
build_images.append((image, ("build", image), tag, ()))
|
||||
return build_images
|
||||
|
||||
|
||||
@ -161,7 +161,7 @@ def printtag(context: Context, image_names: t.List[str]) -> None:
|
||||
|
||||
def find_images_to_build(
|
||||
config: Config, image: str
|
||||
) -> t.Iterator[t.Tuple[str, t.Tuple[str], str, t.List[str]]]:
|
||||
) -> t.Iterator[t.Tuple[str, t.Tuple[str, ...], str, t.Tuple[str, ...]]]:
|
||||
"""
|
||||
Iterate over all images to build.
|
||||
|
||||
@ -169,11 +169,8 @@ def find_images_to_build(
|
||||
|
||||
Yield: (name, path, tag, build args)
|
||||
"""
|
||||
all_images_to_build: t.Iterator[
|
||||
t.Tuple[str, t.Tuple[str], str, t.List[str]]
|
||||
] = hooks.Filters.IMAGES_BUILD.iterate(config)
|
||||
found = False
|
||||
for name, path, tag, args in all_images_to_build:
|
||||
for name, path, tag, args in hooks.Filters.IMAGES_BUILD.iterate(config):
|
||||
if image in [name, "all"]:
|
||||
found = True
|
||||
tag = tutor_env.render_str(config, tag)
|
||||
@ -184,7 +181,9 @@ def find_images_to_build(
|
||||
|
||||
|
||||
def find_remote_image_tags(
|
||||
config: Config, filtre: hooks.filters.Filter, image: str
|
||||
config: Config,
|
||||
filtre: "hooks.filters.Filter[t.List[t.Tuple[str, str]], [Config]]",
|
||||
image: str,
|
||||
) -> t.Iterator[str]:
|
||||
"""
|
||||
Iterate over all images to push or pull.
|
||||
@ -193,7 +192,7 @@ def find_remote_image_tags(
|
||||
|
||||
Yield: tag
|
||||
"""
|
||||
all_remote_images: t.Iterator[t.Tuple[str, str]] = filtre.iterate(config)
|
||||
all_remote_images = filtre.iterate(config)
|
||||
found = False
|
||||
for name, tag in all_remote_images:
|
||||
if image in [name, "all"]:
|
||||
|
@ -127,10 +127,7 @@ def get_defaults() -> Config:
|
||||
Entries in this configuration are unrendered.
|
||||
"""
|
||||
defaults = get_template("defaults.yml")
|
||||
extra_defaults: t.Iterator[
|
||||
t.Tuple[str, ConfigValue]
|
||||
] = hooks.Filters.CONFIG_DEFAULTS.iterate()
|
||||
for name, value in extra_defaults:
|
||||
for name, value in hooks.Filters.CONFIG_DEFAULTS.iterate():
|
||||
defaults[name] = value
|
||||
update_with_env(defaults)
|
||||
return defaults
|
||||
@ -306,10 +303,9 @@ def _remove_plugin_config_overrides_on_unload(
|
||||
) -> None:
|
||||
# Find the configuration entries that were overridden by the plugin and
|
||||
# remove them from the current config
|
||||
overriden_config_items: t.Iterator[
|
||||
t.Tuple[str, ConfigValue]
|
||||
] = hooks.Filters.CONFIG_OVERRIDES.iterate(context=hooks.Contexts.APP(plugin).name)
|
||||
for key, _value in overriden_config_items:
|
||||
for key, _value in hooks.Filters.CONFIG_OVERRIDES.iterate_from_context(
|
||||
hooks.Contexts.APP(plugin).name
|
||||
):
|
||||
value = config.pop(key, None)
|
||||
value = env.render_unknown(config, value)
|
||||
fmt.echo_info(f" config - removing entry: {key}={value}")
|
||||
|
26
tutor/env.py
26
tutor/env.py
@ -42,13 +42,13 @@ def _prepare_environment() -> None:
|
||||
("long_to_base64", utils.long_to_base64),
|
||||
("random_string", utils.random_string),
|
||||
("reverse_host", utils.reverse_host),
|
||||
("rsa_import_key", utils.rsa_import_key),
|
||||
("rsa_private_key", utils.rsa_private_key),
|
||||
],
|
||||
)
|
||||
# Template variables
|
||||
hooks.Filters.ENV_TEMPLATE_VARIABLES.add_items(
|
||||
[
|
||||
("rsa_import_key", utils.rsa_import_key),
|
||||
("HOST_USER_ID", utils.get_user_id()),
|
||||
("TUTOR_APP", __app__.replace("-", "_")),
|
||||
("TUTOR_VERSION", __version__),
|
||||
@ -77,9 +77,7 @@ class Renderer:
|
||||
self.environment = JinjaEnvironment(self.template_roots)
|
||||
|
||||
# Filters
|
||||
plugin_filters: t.Iterator[
|
||||
t.Tuple[str, JinjaFilter]
|
||||
] = hooks.Filters.ENV_TEMPLATE_FILTERS.iterate()
|
||||
plugin_filters = hooks.Filters.ENV_TEMPLATE_FILTERS.iterate()
|
||||
for name, func in plugin_filters:
|
||||
if name in self.environment.filters:
|
||||
fmt.echo_alert(f"Found conflicting template filters named '{name}'")
|
||||
@ -87,10 +85,7 @@ class Renderer:
|
||||
self.environment.filters["walk_templates"] = self.walk_templates
|
||||
|
||||
# Globals
|
||||
plugin_globals: t.Iterator[
|
||||
t.Tuple[str, JinjaFilter]
|
||||
] = hooks.Filters.ENV_TEMPLATE_VARIABLES.iterate()
|
||||
for name, value in plugin_globals:
|
||||
for name, value in hooks.Filters.ENV_TEMPLATE_VARIABLES.iterate():
|
||||
if name in self.environment.globals:
|
||||
fmt.echo_alert(f"Found conflicting template variables named '{name}'")
|
||||
self.environment.globals[name] = value
|
||||
@ -220,12 +215,10 @@ def is_rendered(path: str) -> bool:
|
||||
If the path matches an include pattern, it is rendered. If not and it matches an
|
||||
ignore pattern, it is not rendered. By default, all files are rendered.
|
||||
"""
|
||||
include_patterns: t.Iterator[str] = hooks.Filters.ENV_PATTERNS_INCLUDE.iterate()
|
||||
for include_pattern in include_patterns:
|
||||
for include_pattern in hooks.Filters.ENV_PATTERNS_INCLUDE.iterate():
|
||||
if re.match(include_pattern, path):
|
||||
return True
|
||||
ignore_patterns: t.Iterator[str] = hooks.Filters.ENV_PATTERNS_IGNORE.iterate()
|
||||
for ignore_pattern in ignore_patterns:
|
||||
for ignore_pattern in hooks.Filters.ENV_PATTERNS_IGNORE.iterate():
|
||||
if re.match(ignore_pattern, path):
|
||||
return False
|
||||
return True
|
||||
@ -255,10 +248,7 @@ def save(root: str, config: Config) -> None:
|
||||
Save the full environment, including version information.
|
||||
"""
|
||||
root_env = pathjoin(root)
|
||||
targets: t.Iterator[
|
||||
t.Tuple[str, str]
|
||||
] = hooks.Filters.ENV_TEMPLATE_TARGETS.iterate()
|
||||
for src, dst in targets:
|
||||
for src, dst in hooks.Filters.ENV_TEMPLATE_TARGETS.iterate():
|
||||
save_all_from(src, os.path.join(root_env, dst), config)
|
||||
|
||||
upgrade_obsolete(root)
|
||||
@ -458,8 +448,8 @@ def _delete_plugin_templates(plugin: str, root: str, _config: Config) -> None:
|
||||
"""
|
||||
Delete plugin env files on unload.
|
||||
"""
|
||||
targets: t.Iterator[t.Tuple[str, str]] = hooks.Filters.ENV_TEMPLATE_TARGETS.iterate(
|
||||
context=hooks.Contexts.APP(plugin).name
|
||||
targets = hooks.Filters.ENV_TEMPLATE_TARGETS.iterate_from_context(
|
||||
hooks.Contexts.APP(plugin).name
|
||||
)
|
||||
for src, dst in targets:
|
||||
path = pathjoin(root, dst.replace("/", os.sep), src.replace("/", os.sep))
|
||||
|
@ -4,19 +4,22 @@ __license__ = "Apache 2.0"
|
||||
import sys
|
||||
import typing as t
|
||||
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
from .contexts import Contextualized
|
||||
|
||||
# Similarly to CallableFilter, it should be possible to refine the definition of
|
||||
# CallableAction in the future.
|
||||
CallableAction = t.Callable[..., None]
|
||||
P = ParamSpec("P")
|
||||
# Similarly to CallableFilter, it should be possible to create a CallableAction alias in
|
||||
# the future.
|
||||
# CallableAction = t.Callable[P, None]
|
||||
|
||||
DEFAULT_PRIORITY = 10
|
||||
|
||||
|
||||
class ActionCallback(Contextualized):
|
||||
class ActionCallback(Contextualized, t.Generic[P]):
|
||||
def __init__(
|
||||
self,
|
||||
func: CallableAction,
|
||||
func: t.Callable[P, None],
|
||||
priority: t.Optional[int] = None,
|
||||
):
|
||||
super().__init__()
|
||||
@ -24,29 +27,44 @@ class ActionCallback(Contextualized):
|
||||
self.priority = priority or DEFAULT_PRIORITY
|
||||
|
||||
def do(
|
||||
self, *args: t.Any, context: t.Optional[str] = None, **kwargs: t.Any
|
||||
self,
|
||||
*args: P.args,
|
||||
**kwargs: P.kwargs,
|
||||
) -> None:
|
||||
if self.is_in_context(context):
|
||||
self.func(*args, **kwargs)
|
||||
self.func(*args, **kwargs)
|
||||
|
||||
|
||||
class Action:
|
||||
class Action(t.Generic[P]):
|
||||
"""
|
||||
Each action is associated to a name and a list of callbacks, sorted by
|
||||
priority.
|
||||
Action hooks have callbacks that are triggered independently from one another.
|
||||
|
||||
Several actions are defined across the codebase. Each action is given a unique name.
|
||||
To each action are associated zero or more callbacks, sorted by priority.
|
||||
|
||||
This is the typical action lifecycle:
|
||||
|
||||
1. Create an action with method :py:meth:`get` (or function :py:func:`get`).
|
||||
2. Add callbacks with method :py:meth:`add` (or function :py:func:`add`).
|
||||
3. Call the action callbacks with method :py:meth:`do` (or function :py:func:`do`).
|
||||
|
||||
The `P` type parameter of the Action class corresponds to the expected signature of
|
||||
the action callbacks. For instance, `Action[[str, int]]` means that the action
|
||||
callbacks are expected to take two arguments: one string and one integer.
|
||||
|
||||
This strong typing makes it easier for plugin developers to quickly check whether they are adding and calling action callbacks correctly.
|
||||
"""
|
||||
|
||||
INDEX: t.Dict[str, "Action"] = {}
|
||||
INDEX: t.Dict[str, "Action[t.Any]"] = {}
|
||||
|
||||
def __init__(self, name: str) -> None:
|
||||
self.name = name
|
||||
self.callbacks: t.List[ActionCallback] = []
|
||||
self.callbacks: t.List[ActionCallback[P]] = []
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}('{self.name}')"
|
||||
|
||||
@classmethod
|
||||
def get(cls, name: str) -> "Action":
|
||||
def get(cls, name: str) -> "Action[t.Any]":
|
||||
"""
|
||||
Get an existing action with the given name from the index, or create one.
|
||||
"""
|
||||
@ -54,8 +72,14 @@ class Action:
|
||||
|
||||
def add(
|
||||
self, priority: t.Optional[int] = None
|
||||
) -> t.Callable[[CallableAction], CallableAction]:
|
||||
def inner(func: CallableAction) -> CallableAction:
|
||||
) -> t.Callable[[t.Callable[P, None]], t.Callable[P, None]]:
|
||||
"""
|
||||
Add a callback to the action
|
||||
|
||||
This is similar to :py:func:`add`.
|
||||
"""
|
||||
|
||||
def inner(func: t.Callable[P, None]) -> t.Callable[P, None]:
|
||||
callback = ActionCallback(func, priority=priority)
|
||||
# I wish we could use bisect.insort_right here but the `key=` parameter
|
||||
# is unsupported in Python 3.9
|
||||
@ -71,18 +95,45 @@ class Action:
|
||||
return inner
|
||||
|
||||
def do(
|
||||
self, *args: t.Any, context: t.Optional[str] = None, **kwargs: t.Any
|
||||
self,
|
||||
*args: P.args,
|
||||
**kwargs: P.kwargs,
|
||||
) -> None:
|
||||
"""
|
||||
Run the action callbacks
|
||||
|
||||
This is similar to :py:func:`do`.
|
||||
"""
|
||||
self.do_from_context(None, *args, **kwargs)
|
||||
|
||||
def do_from_context(
|
||||
self,
|
||||
context: t.Optional[str],
|
||||
*args: P.args,
|
||||
**kwargs: P.kwargs,
|
||||
) -> None:
|
||||
"""
|
||||
Same as :py:func:`do` but only run the callbacks from a given context.
|
||||
"""
|
||||
for callback in self.callbacks:
|
||||
try:
|
||||
callback.do(*args, context=context, **kwargs)
|
||||
except:
|
||||
sys.stderr.write(
|
||||
f"Error applying action '{self.name}': func={callback.func} contexts={callback.contexts}'\n"
|
||||
)
|
||||
raise
|
||||
if callback.is_in_context(context):
|
||||
try:
|
||||
callback.do(
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
except:
|
||||
sys.stderr.write(
|
||||
f"Error applying action '{self.name}': func={callback.func} contexts={callback.contexts}'\n"
|
||||
)
|
||||
raise
|
||||
|
||||
def clear(self, context: t.Optional[str] = None) -> None:
|
||||
"""
|
||||
Clear all or part of the callbacks associated to an action
|
||||
|
||||
This is similar to :py:func:`clear`.
|
||||
"""
|
||||
self.callbacks = [
|
||||
callback
|
||||
for callback in self.callbacks
|
||||
@ -90,10 +141,13 @@ class Action:
|
||||
]
|
||||
|
||||
|
||||
class ActionTemplate:
|
||||
class ActionTemplate(t.Generic[P]):
|
||||
"""
|
||||
Action templates are for actions for which the name needs to be formatted
|
||||
before the action can be applied.
|
||||
|
||||
Action templates can generate different :py:class:`Action` objects for which the
|
||||
name matches a certain template.
|
||||
"""
|
||||
|
||||
def __init__(self, name: str):
|
||||
@ -102,7 +156,7 @@ class ActionTemplate:
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}('{self.template}')"
|
||||
|
||||
def __call__(self, *args: t.Any, **kwargs: t.Any) -> Action:
|
||||
def __call__(self, *args: t.Any, **kwargs: t.Any) -> Action[P]:
|
||||
return get(self.template.format(*args, **kwargs))
|
||||
|
||||
|
||||
@ -110,7 +164,7 @@ class ActionTemplate:
|
||||
get = Action.get
|
||||
|
||||
|
||||
def get_template(name: str) -> ActionTemplate:
|
||||
def get_template(name: str) -> ActionTemplate[t.Any]:
|
||||
"""
|
||||
Create an action with a template name.
|
||||
|
||||
@ -130,7 +184,7 @@ def get_template(name: str) -> ActionTemplate:
|
||||
def add(
|
||||
name: str,
|
||||
priority: t.Optional[int] = None,
|
||||
) -> t.Callable[[CallableAction], CallableAction]:
|
||||
) -> t.Callable[[t.Callable[P, None]], t.Callable[P, None]]:
|
||||
"""
|
||||
Decorator to add a callback action associated to a name.
|
||||
|
||||
@ -158,15 +212,14 @@ def add(
|
||||
|
||||
|
||||
def do(
|
||||
name: str, *args: t.Any, context: t.Optional[str] = None, **kwargs: t.Any
|
||||
name: str,
|
||||
*args: P.args,
|
||||
**kwargs: P.kwargs,
|
||||
) -> None:
|
||||
"""
|
||||
Run action callbacks associated to a name/context.
|
||||
|
||||
:param name: name of the action for which callbacks will be run.
|
||||
:param context: limit the set of callback actions to those that
|
||||
were declared within a certain context (see
|
||||
:py:func:`tutor.hooks.contexts.enter`).
|
||||
|
||||
Extra ``*args`` and ``*kwargs`` arguments will be passed as-is to
|
||||
callback functions.
|
||||
@ -175,9 +228,26 @@ def do(
|
||||
management here: a single exception will cause all following callbacks
|
||||
not to be run and the exception to be bubbled up.
|
||||
"""
|
||||
action = Action.INDEX.get(name)
|
||||
if action:
|
||||
action.do(*args, context=context, **kwargs)
|
||||
action: Action[P] = Action.get(name)
|
||||
action.do(*args, **kwargs)
|
||||
|
||||
|
||||
def do_from_context(
|
||||
context: str,
|
||||
name: str,
|
||||
*args: P.args,
|
||||
**kwargs: P.kwargs,
|
||||
) -> None:
|
||||
"""
|
||||
Same as :py:func:`do` but only run the callbacks that were created in a given context.
|
||||
|
||||
:param name: name of the action for which callbacks will be run.
|
||||
:param context: limit the set of callback actions to those that
|
||||
were declared within a certain context (see
|
||||
:py:func:`tutor.hooks.contexts.enter`).
|
||||
"""
|
||||
action: Action[P] = Action.get(name)
|
||||
action.do_from_context(context, *args, **kwargs)
|
||||
|
||||
|
||||
def clear_all(context: t.Optional[str] = None) -> None:
|
||||
@ -204,6 +274,4 @@ def clear(name: str, context: t.Optional[str] = None) -> None:
|
||||
This function should almost certainly never be called by plugins. It is
|
||||
mostly useful to disable some plugins at runtime or in unit tests.
|
||||
"""
|
||||
action = Action.INDEX.get(name)
|
||||
if action:
|
||||
action.clear(context=context)
|
||||
Action.get(name).clear(context=context)
|
||||
|
@ -2,10 +2,19 @@
|
||||
List of all the action, filter and context names used across Tutor. This module is used
|
||||
to generate part of the reference documentation.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
# The Tutor plugin system is licensed under the terms of the Apache 2.0 license.
|
||||
__license__ = "Apache 2.0"
|
||||
|
||||
from typing import Any, Callable
|
||||
import click
|
||||
|
||||
from tutor.types import Config
|
||||
|
||||
from . import actions, contexts, filters
|
||||
from .actions import Action, ActionTemplate
|
||||
from .filters import Filter, FilterTemplate
|
||||
|
||||
__all__ = ["Actions", "Filters", "Contexts"]
|
||||
|
||||
@ -28,9 +37,11 @@ class Actions:
|
||||
#: Triggered whenever a "docker-compose start", "up" or "restart" command is executed.
|
||||
#:
|
||||
#: :parameter: str root: project root.
|
||||
#: :parameter: dict[str, ...] config: project configuration.
|
||||
#: :parameter: dict config: project configuration.
|
||||
#: :parameter: str name: docker-compose project name.
|
||||
COMPOSE_PROJECT_STARTED = actions.get("compose:project:started")
|
||||
COMPOSE_PROJECT_STARTED: Action[[str, Config, str]] = actions.get(
|
||||
"compose:project:started"
|
||||
)
|
||||
|
||||
#: Called whenever the core project is ready to run. This action is called as soon
|
||||
#: as possible. This is the right time to discover plugins, for instance. In
|
||||
@ -45,12 +56,12 @@ class Actions:
|
||||
#: developers probably don't have to implement this action themselves.
|
||||
#:
|
||||
#: This action does not have any parameter.
|
||||
CORE_READY = actions.get("core:ready")
|
||||
CORE_READY: Action[[]] = actions.get("core:ready")
|
||||
|
||||
#: Called as soon as we have access to the Tutor project root.
|
||||
#:
|
||||
#: :parameter str root: absolute path to the project root.
|
||||
PROJECT_ROOT_READY = actions.get("project:root:ready")
|
||||
PROJECT_ROOT_READY: Action[str] = actions.get("project:root:ready")
|
||||
|
||||
#: Triggered when a single plugin needs to be loaded. Only plugins that have previously been
|
||||
#: discovered can be loaded (see :py:data:`CORE_READY`).
|
||||
@ -63,13 +74,13 @@ class Actions:
|
||||
#: they want to perform a specific action at the moment the plugin is enabled.
|
||||
#:
|
||||
#: This action does not have any parameter.
|
||||
PLUGIN_LOADED = actions.get_template("plugins:loaded:{0}")
|
||||
PLUGIN_LOADED: ActionTemplate[[]] = actions.get_template("plugins:loaded:{0}")
|
||||
|
||||
#: Triggered after all plugins have been loaded. At this point the list of loaded
|
||||
#: plugins may be obtained from the :py:data:``Filters.PLUGINS_LOADED`` filter.
|
||||
#:
|
||||
#: This action does not have any parameter.
|
||||
PLUGINS_LOADED = actions.get("plugins:loaded")
|
||||
PLUGINS_LOADED: Action[[]] = actions.get("plugins:loaded")
|
||||
|
||||
#: Triggered when a single plugin is unloaded. Only plugins that have previously been
|
||||
#: loaded can be unloaded (see :py:data:`PLUGIN_LOADED`).
|
||||
@ -81,8 +92,8 @@ class Actions:
|
||||
#:
|
||||
#: :parameter str plugin: plugin name.
|
||||
#: :parameter str root: absolute path to the project root.
|
||||
#: :parameter dict config: full project configuration
|
||||
PLUGIN_UNLOADED = actions.get("plugins:unloaded")
|
||||
#: :parameter config: full project configuration
|
||||
PLUGIN_UNLOADED: Action[str, str, Config] = actions.get("plugins:unloaded")
|
||||
|
||||
|
||||
class Filters:
|
||||
@ -113,19 +124,23 @@ class Filters:
|
||||
#: - ``path`` is a tuple that corresponds to a template relative path.
|
||||
#: Example: ``("myplugin", "hooks", "myservice", "pre-init")`` (see:py:data:`IMAGES_BUILD`).
|
||||
#: The command to execute will be read from that template, after it is rendered.
|
||||
COMMANDS_INIT = filters.get("commands:init")
|
||||
COMMANDS_INIT: Filter[list[tuple[str, tuple[str, ...]]], str] = filters.get(
|
||||
"commands:init"
|
||||
)
|
||||
|
||||
#: List of commands to be executed prior to initialization. These commands are run even
|
||||
#: before the mysql databases are created and the migrations are applied.
|
||||
#:
|
||||
#: :parameter list[tuple[str, tuple[str, ...]]] tasks: list of ``(service, path)`` tasks. (see :py:data:`COMMANDS_INIT`).
|
||||
COMMANDS_PRE_INIT = filters.get("commands:pre-init")
|
||||
COMMANDS_PRE_INIT: Filter[list[tuple[str, tuple[str, ...]]], []] = filters.get(
|
||||
"commands:pre-init"
|
||||
)
|
||||
|
||||
#: Same as :py:data:`COMPOSE_LOCAL_TMP` but for the development environment.
|
||||
COMPOSE_DEV_TMP = filters.get("compose:dev:tmp")
|
||||
COMPOSE_DEV_TMP: Filter[Config, []] = filters.get("compose:dev:tmp")
|
||||
|
||||
#: Same as :py:data:`COMPOSE_LOCAL_JOBS_TMP` but for the development environment.
|
||||
COMPOSE_DEV_JOBS_TMP = filters.get("compose:dev-jobs:tmp")
|
||||
COMPOSE_DEV_JOBS_TMP: Filter[Config, []] = filters.get("compose:dev-jobs:tmp")
|
||||
|
||||
#: List of folders to bind-mount in docker-compose containers, either in ``tutor local`` or ``tutor dev``.
|
||||
#:
|
||||
@ -146,7 +161,7 @@ class Filters:
|
||||
#: :parameter str name: basename of the host-mounted folder. In the example above,
|
||||
#: this is "edx-platform". When implementing this filter you should check this name to
|
||||
#: conditionnally add mounts.
|
||||
COMPOSE_MOUNTS = filters.get("compose:mounts")
|
||||
COMPOSE_MOUNTS: Filter[list[tuple[str, str]], [str]] = filters.get("compose:mounts")
|
||||
|
||||
#: Contents of the (local|dev)/docker-compose.tmp.yml files that will be generated at
|
||||
#: runtime. This is used for instance to bind-mount folders from the host (see
|
||||
@ -154,10 +169,10 @@ class Filters:
|
||||
#:
|
||||
#: :parameter dict[str, ...] docker_compose_tmp: values which will be serialized to local/docker-compose.tmp.yml.
|
||||
#: Keys and values will be rendered before saving, such that you may include ``{{ ... }}`` statements.
|
||||
COMPOSE_LOCAL_TMP = filters.get("compose:local:tmp")
|
||||
COMPOSE_LOCAL_TMP: Filter[Config, []] = filters.get("compose:local:tmp")
|
||||
|
||||
#: Same as :py:data:`COMPOSE_LOCAL_TMP` but for jobs
|
||||
COMPOSE_LOCAL_JOBS_TMP = filters.get("compose:local-jobs:tmp")
|
||||
COMPOSE_LOCAL_JOBS_TMP: Filter[Config, []] = filters.get("compose:local-jobs:tmp")
|
||||
|
||||
#: List of images to be built when we run ``tutor images build ...``.
|
||||
#:
|
||||
@ -171,8 +186,10 @@ class Filters:
|
||||
#: rendered at runtime with the user configuration. Thus, the image tag could
|
||||
#: be ``"{{ DOCKER_REGISTRY }}/myimage:{{ TUTOR_VERSION }}"``.
|
||||
#: - ``args`` is a list of arguments that will be passed to ``docker build ...``.
|
||||
#: :parameter dict config: user configuration.
|
||||
IMAGES_BUILD = filters.get("images:build")
|
||||
#: :parameter Config config: user configuration.
|
||||
IMAGES_BUILD: Filter[
|
||||
list[tuple[str, tuple[str, ...], str, tuple[str, ...]]], [Config]
|
||||
] = filters.get("images:build")
|
||||
|
||||
#: List of images to be pulled when we run ``tutor images pull ...``.
|
||||
#:
|
||||
@ -180,18 +197,18 @@ class Filters:
|
||||
#:
|
||||
#: - ``name`` is the name of the image, as in ``tutor images pull myimage``.
|
||||
#: - ``tag`` is the Docker tag that will be applied to the image. (see :py:data:`IMAGES_BUILD`).
|
||||
#: :parameter dict config: user configuration.
|
||||
IMAGES_PULL = filters.get("images:pull")
|
||||
#: :parameter Config config: user configuration.
|
||||
IMAGES_PULL: Filter[list[tuple[str, str]], [Config]] = filters.get("images:pull")
|
||||
|
||||
#: List of images to be pushed when we run ``tutor images push ...``.
|
||||
#: Parameters are the same as for :py:data:`IMAGES_PULL`.
|
||||
IMAGES_PUSH = filters.get("images:push")
|
||||
IMAGES_PUSH: Filter[list[tuple[str, str]], [Config]] = filters.get("images:push")
|
||||
|
||||
#: List of command line interface (CLI) commands.
|
||||
#:
|
||||
#: :parameter list commands: commands are instances of ``click.Command``. They will
|
||||
#: all be added as subcommands of the main ``tutor`` command.
|
||||
CLI_COMMANDS = filters.get("cli:commands")
|
||||
CLI_COMMANDS: Filter[list[click.Command], []] = filters.get("cli:commands")
|
||||
|
||||
#: Declare new default configuration settings that don't necessarily have to be saved in the user
|
||||
#: ``config.yml`` file. Default settings may be overridden with ``tutor config save --set=...``, in which
|
||||
@ -199,21 +216,23 @@ class Filters:
|
||||
#:
|
||||
#: :parameter list[tuple[str, ...]] items: list of (name, value) new settings. All
|
||||
#: new entries must be prefixed with the plugin name in all-caps.
|
||||
CONFIG_DEFAULTS = filters.get("config:defaults")
|
||||
CONFIG_DEFAULTS: Filter[list[tuple[str, Any]], []] = filters.get("config:defaults")
|
||||
|
||||
#: Modify existing settings, either from Tutor core or from other plugins. Beware not to override any
|
||||
#: important setting, such as passwords! Overridden setting values will be printed to stdout when the plugin
|
||||
#: is disabled, such that users have a chance to back them up.
|
||||
#:
|
||||
#: :parameter list[tuple[str, ...]] items: list of (name, value) settings.
|
||||
CONFIG_OVERRIDES = filters.get("config:overrides")
|
||||
CONFIG_OVERRIDES: Filter[list[tuple[str, Any]], []] = filters.get(
|
||||
"config:overrides"
|
||||
)
|
||||
|
||||
#: Declare uniqaue configuration settings that must be saved in the user ``config.yml`` file. This is where
|
||||
#: you should declare passwords and randomly-generated values that are different from one environment to the next.
|
||||
#:
|
||||
#: :parameter list[tuple[str, ...]] items: list of (name, value) new settings. All
|
||||
#: names must be prefixed with the plugin name in all-caps.
|
||||
CONFIG_UNIQUE = filters.get("config:unique")
|
||||
CONFIG_UNIQUE: Filter[list[tuple[str, Any]], []] = filters.get("config:unique")
|
||||
|
||||
#: List of patches that should be inserted in a given location of the templates. The
|
||||
#: filter name must be formatted with the patch name.
|
||||
@ -221,13 +240,13 @@ class Filters:
|
||||
#: prefer :py:data:`ENV_PATCHES`.
|
||||
#:
|
||||
#: :parameter list[str] patches: each item is the unrendered patch content.
|
||||
ENV_PATCH = filters.get_template("env:patches:{0}")
|
||||
ENV_PATCH: FilterTemplate[list[str], []] = filters.get_template("env:patches:{0}")
|
||||
|
||||
#: List of patches that should be inserted in a given location of the templates. This is very similar to :py:data:`ENV_PATCH`, except that the patch is added as a ``(name, content)`` tuple.
|
||||
#:
|
||||
#: :parameter list[tuple[str, str]] patches: pairs of (name, content) tuples. Use this
|
||||
#: filter to modify the Tutor templates.
|
||||
ENV_PATCHES = filters.get("env:patches")
|
||||
ENV_PATCHES: Filter[list[tuple[str, str]], []] = filters.get("env:patches")
|
||||
|
||||
#: List of template path patterns to be ignored when rendering templates to the project root. By default, we ignore:
|
||||
#:
|
||||
@ -238,20 +257,20 @@ class Filters:
|
||||
#: Ignored patterns are overridden by include patterns; see :py:data:`ENV_PATTERNS_INCLUDE`.
|
||||
#:
|
||||
#: :parameter list[str] patterns: list of regular expression patterns. E.g: ``r"(.*/)?ignored_file_name(/.*)?"``.
|
||||
ENV_PATTERNS_IGNORE = filters.get("env:patterns:ignore")
|
||||
ENV_PATTERNS_IGNORE: Filter[list[str], []] = filters.get("env:patterns:ignore")
|
||||
|
||||
#: List of template path patterns to be included when rendering templates to the project root.
|
||||
#: Patterns from this list will take priority over the patterns from :py:data:`ENV_PATTERNS_IGNORE`.
|
||||
#:
|
||||
#: :parameter list[str] patterns: list of regular expression patterns. See :py:data:`ENV_PATTERNS_IGNORE`.
|
||||
ENV_PATTERNS_INCLUDE = filters.get("env:patterns:include")
|
||||
ENV_PATTERNS_INCLUDE: Filter[list[str], []] = filters.get("env:patterns:include")
|
||||
|
||||
#: List of all template root folders.
|
||||
#:
|
||||
#: :parameter list[str] templates_root: absolute paths to folders which contain templates.
|
||||
#: The templates in these folders will then be accessible by the environment
|
||||
#: renderer using paths that are relative to their template root.
|
||||
ENV_TEMPLATE_ROOTS = filters.get("env:templates:roots")
|
||||
ENV_TEMPLATE_ROOTS: Filter[list[str], []] = filters.get("env:templates:roots")
|
||||
|
||||
#: List of template source/destination targets.
|
||||
#:
|
||||
@ -260,7 +279,9 @@ class Filters:
|
||||
#: is a path relative to the environment root. For instance: adding ``("c/d",
|
||||
#: "a/b")`` to the filter will cause all files from "c/d" to be rendered to the ``a/b/c/d``
|
||||
#: subfolder.
|
||||
ENV_TEMPLATE_TARGETS = filters.get("env:templates:targets")
|
||||
ENV_TEMPLATE_TARGETS: Filter[list[tuple[str, str]], []] = filters.get(
|
||||
"env:templates:targets"
|
||||
)
|
||||
|
||||
#: List of `Jinja2 filters <https://jinja.palletsprojects.com/en/latest/templates/#filters>`__ that will be
|
||||
#: available in templates. Jinja2 filters are basically functions that can be used
|
||||
@ -291,12 +312,16 @@ class Filters:
|
||||
#:
|
||||
#: :parameter filters: list of (name, function) tuples. The function signature
|
||||
#: should correspond to its usage in templates.
|
||||
ENV_TEMPLATE_FILTERS = filters.get("env:templates:filters")
|
||||
ENV_TEMPLATE_FILTERS: Filter[
|
||||
list[tuple[str, Callable[..., Any]]], []
|
||||
] = filters.get("env:templates:filters")
|
||||
|
||||
#: List of extra variables to be included in all templates.
|
||||
#:
|
||||
#: :parameter filters: list of (name, value) tuples.
|
||||
ENV_TEMPLATE_VARIABLES = filters.get("env:templates:variables")
|
||||
ENV_TEMPLATE_VARIABLES: Filter[list[tuple[str, Any]], []] = filters.get(
|
||||
"env:templates:variables"
|
||||
)
|
||||
|
||||
#: List of installed plugins. In order to be added to this list, a plugin must first
|
||||
#: be discovered (see :py:data:`Actions.CORE_READY`).
|
||||
@ -304,19 +329,21 @@ class Filters:
|
||||
#: :param list[str] plugins: plugin developers probably don't have to implement this
|
||||
#: filter themselves, but they can apply it to check for the presence of other
|
||||
#: plugins.
|
||||
PLUGINS_INSTALLED = filters.get("plugins:installed")
|
||||
PLUGINS_INSTALLED: Filter[list[str], []] = filters.get("plugins:installed")
|
||||
|
||||
#: Information about each installed plugin, including its version.
|
||||
#: Keep this information to a single line for easier parsing by 3rd-party scripts.
|
||||
#:
|
||||
#: :param list[tuple[str, str]] versions: each pair is a ``(plugin, info)`` tuple.
|
||||
PLUGINS_INFO = filters.get("plugins:installed:versions")
|
||||
PLUGINS_INFO: Filter[list[tuple[str, str]], []] = filters.get(
|
||||
"plugins:installed:versions"
|
||||
)
|
||||
|
||||
#: List of loaded plugins.
|
||||
#:
|
||||
#: :param list[str] plugins: plugin developers probably don't have to modify this
|
||||
#: filter themselves, but they can apply it to check whether other plugins are enabled.
|
||||
PLUGINS_LOADED = filters.get("plugins:loaded")
|
||||
PLUGINS_LOADED: Filter[list[str], []] = filters.get("plugins:loaded")
|
||||
|
||||
|
||||
class Contexts:
|
||||
@ -348,8 +375,8 @@ class Contexts:
|
||||
# do stuff and all created hooks will include MY_CONTEXT
|
||||
|
||||
# Apply only the hook callbacks that were created within MY_CONTEXT
|
||||
hooks.Actions.MY_ACTION.do(context=str(hooks.Contexts.MY_CONTEXT))
|
||||
hooks.Filters.MY_FILTER.apply(context=hooks.Contexts.MY_CONTEXT.name)
|
||||
hooks.Actions.MY_ACTION.do_from_context(str(hooks.Contexts.MY_CONTEXT))
|
||||
hooks.Filters.MY_FILTER.apply_from_context(hooks.Contexts.MY_CONTEXT.name)
|
||||
"""
|
||||
|
||||
#: We enter this context whenever we create hooks for a specific application or :
|
||||
|
@ -4,82 +4,90 @@ __license__ = "Apache 2.0"
|
||||
import sys
|
||||
import typing as t
|
||||
|
||||
from typing_extensions import Concatenate, ParamSpec
|
||||
|
||||
from . import contexts
|
||||
|
||||
# For now, this signature is not very restrictive. In the future, we could improve it by writing:
|
||||
#
|
||||
# P = ParamSpec("P")
|
||||
# CallableFilter = t.Callable[Concatenate[T, P], T]
|
||||
#
|
||||
# See PEP-612: https://www.python.org/dev/peps/pep-0612/
|
||||
# Unfortunately, this piece of code fails because of a bug in mypy:
|
||||
# https://github.com/python/mypy/issues/11833
|
||||
# https://github.com/python/mypy/issues/8645
|
||||
# https://github.com/python/mypy/issues/5876
|
||||
# https://github.com/python/typing/issues/696
|
||||
T = t.TypeVar("T")
|
||||
CallableFilter = t.Callable[..., t.Any]
|
||||
P = ParamSpec("P")
|
||||
# Specialized typevar for list elements
|
||||
E = t.TypeVar("E")
|
||||
# I wish we could create such an alias, which would greatly simply the definitions
|
||||
# below. Unfortunately this does not work, yet. It will once the following issue is
|
||||
# resolved: https://github.com/python/mypy/issues/11855
|
||||
# CallableFilter = t.Callable[Concatenate[T, P], T]
|
||||
|
||||
|
||||
class FilterCallback(contexts.Contextualized):
|
||||
def __init__(self, func: CallableFilter):
|
||||
class FilterCallback(contexts.Contextualized, t.Generic[T, P]):
|
||||
def __init__(self, func: t.Callable[Concatenate[T, P], T]):
|
||||
super().__init__()
|
||||
self.func = func
|
||||
|
||||
def apply(
|
||||
self, value: T, *args: t.Any, context: t.Optional[str] = None, **kwargs: t.Any
|
||||
) -> T:
|
||||
if self.is_in_context(context):
|
||||
value = self.func(value, *args, **kwargs)
|
||||
return value
|
||||
def apply(self, value: T, *args: P.args, **kwargs: P.kwargs) -> T:
|
||||
return self.func(value, *args, **kwargs)
|
||||
|
||||
|
||||
class Filter:
|
||||
class Filter(t.Generic[T, P]):
|
||||
"""
|
||||
Each filter is associated to a name and a list of callbacks.
|
||||
Filter hooks have callbacks that are triggered as a chain.
|
||||
|
||||
Several filters are defined across the codebase. Each filters is given a unique
|
||||
name. To each filter are associated zero or more callbacks, sorted by priority.
|
||||
|
||||
This is the typical filter lifecycle:
|
||||
|
||||
1. Create an action with method :py:meth:`get` (or function :py:func:`get`).
|
||||
2. Add callbacks with method :py:meth:`add` (or function :py:func:`add`).
|
||||
3. Call the filter callbacks with method :py:meth:`apply` (or function :py:func:`apply`).
|
||||
|
||||
The result of each callback is passed as the first argument to the next one. Thus,
|
||||
the type of the first argument must match the callback return type.
|
||||
|
||||
The `T` and `P` type parameters of the Filter class correspond to the expected
|
||||
signature of the filter callbacks. `T` is the type of the first argument (and thus
|
||||
the return value type as well) and `P` is the signature of the other arguments.
|
||||
|
||||
For instance, `Filter[str, [int]]` means that the filter callbacks are expected to
|
||||
take two arguments: one string and one integer. Each callback must then return a
|
||||
string.
|
||||
|
||||
This strong typing makes it easier for plugin developers to quickly check whether
|
||||
they are adding and calling filter callbacks correctly.
|
||||
"""
|
||||
|
||||
INDEX: t.Dict[str, "Filter"] = {}
|
||||
INDEX: t.Dict[str, "Filter[t.Any, t.Any]"] = {}
|
||||
|
||||
def __init__(self, name: str) -> None:
|
||||
self.name = name
|
||||
self.callbacks: t.List[FilterCallback] = []
|
||||
self.callbacks: t.List[FilterCallback[T, P]] = []
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}('{self.name}')"
|
||||
|
||||
@classmethod
|
||||
def get(cls, name: str) -> "Filter":
|
||||
def get(cls, name: str) -> "Filter[t.Any, t.Any]":
|
||||
"""
|
||||
Get an existing action with the given name from the index, or create one.
|
||||
"""
|
||||
return cls.INDEX.setdefault(name, cls(name))
|
||||
|
||||
def add(self) -> t.Callable[[CallableFilter], CallableFilter]:
|
||||
def inner(func: CallableFilter) -> CallableFilter:
|
||||
self.callbacks.append(FilterCallback(func))
|
||||
def add(
|
||||
self,
|
||||
) -> t.Callable[
|
||||
[t.Callable[Concatenate[T, P], T]], t.Callable[Concatenate[T, P], T]
|
||||
]:
|
||||
def inner(
|
||||
func: t.Callable[Concatenate[T, P], T]
|
||||
) -> t.Callable[Concatenate[T, P], T]:
|
||||
self.callbacks.append(FilterCallback[T, P](func))
|
||||
return func
|
||||
|
||||
return inner
|
||||
|
||||
def add_item(self, item: T) -> None:
|
||||
self.add_items([item])
|
||||
|
||||
def add_items(self, items: t.List[T]) -> None:
|
||||
@self.add()
|
||||
def callback(value: t.List[T], *_args: t.Any, **_kwargs: t.Any) -> t.List[T]:
|
||||
return value + items
|
||||
|
||||
def iterate(
|
||||
self, *args: t.Any, context: t.Optional[str] = None, **kwargs: t.Any
|
||||
) -> t.Iterator[T]:
|
||||
yield from self.apply([], *args, context=context, **kwargs)
|
||||
|
||||
def apply(
|
||||
self,
|
||||
value: T,
|
||||
*args: t.Any,
|
||||
context: t.Optional[str] = None,
|
||||
**kwargs: t.Any,
|
||||
) -> T:
|
||||
"""
|
||||
@ -94,14 +102,29 @@ class Filter:
|
||||
:type value: object
|
||||
:rtype: same as the type of ``value``.
|
||||
"""
|
||||
return self.apply_from_context(None, value, *args, **kwargs)
|
||||
|
||||
def apply_from_context(
|
||||
self,
|
||||
context: t.Optional[str],
|
||||
value: T,
|
||||
*args: P.args,
|
||||
**kwargs: P.kwargs,
|
||||
) -> T:
|
||||
for callback in self.callbacks:
|
||||
try:
|
||||
value = callback.apply(value, *args, context=context, **kwargs)
|
||||
except:
|
||||
sys.stderr.write(
|
||||
f"Error applying filter '{self.name}': func={callback.func} contexts={callback.contexts}'\n"
|
||||
)
|
||||
raise
|
||||
if callback.is_in_context(context):
|
||||
try:
|
||||
|
||||
value = callback.apply(
|
||||
value,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
except:
|
||||
sys.stderr.write(
|
||||
f"Error applying filter '{self.name}': func={callback.func} contexts={callback.contexts}'\n"
|
||||
)
|
||||
raise
|
||||
return value
|
||||
|
||||
def clear(self, context: t.Optional[str] = None) -> None:
|
||||
@ -114,11 +137,45 @@ class Filter:
|
||||
if not callback.is_in_context(context)
|
||||
]
|
||||
|
||||
# The methods below are specific to filters which take lists as first arguments
|
||||
def add_item(self: "Filter[t.List[E], P]", item: E) -> None:
|
||||
self.add_items([item])
|
||||
|
||||
class FilterTemplate:
|
||||
def add_items(self: "Filter[t.List[E], P]", items: t.List[E]) -> None:
|
||||
# Unfortunately we have to type-ignore this line. If not, mypy complains with:
|
||||
#
|
||||
# Argument 1 has incompatible type "Callable[[Arg(List[E], 'values'), **P], List[E]]"; expected "Callable[[List[E], **P], List[E]]"
|
||||
# This is likely because "callback" has named arguments: "values". Consider marking them positional-only
|
||||
#
|
||||
# But we are unable to mark arguments positional-only (by adding / after values arg) in Python 3.7.
|
||||
# Get rid of this statement after Python 3.7 EOL.
|
||||
@self.add() # type: ignore
|
||||
def callback(
|
||||
values: t.List[E], *_args: P.args, **_kwargs: P.kwargs
|
||||
) -> t.List[E]:
|
||||
return values + items
|
||||
|
||||
def iterate(
|
||||
self: "Filter[t.List[E], P]", *args: P.args, **kwargs: P.kwargs
|
||||
) -> t.Iterator[E]:
|
||||
yield from self.iterate_from_context(None, *args, **kwargs)
|
||||
|
||||
def iterate_from_context(
|
||||
self: "Filter[t.List[E], P]",
|
||||
context: t.Optional[str],
|
||||
*args: P.args,
|
||||
**kwargs: P.kwargs,
|
||||
) -> t.Iterator[E]:
|
||||
yield from self.apply_from_context(context, [], *args, **kwargs)
|
||||
|
||||
|
||||
class FilterTemplate(t.Generic[T, P]):
|
||||
"""
|
||||
Filter templates are for filters for which the name needs to be formatted
|
||||
before the filter can be applied.
|
||||
|
||||
Similar to :py:class:`tutor.hooks.actions.ActionTemplate`, filter templates are used to generate
|
||||
:py:class:`Filter` objects for which the name matches a certain template.
|
||||
"""
|
||||
|
||||
def __init__(self, name: str):
|
||||
@ -127,7 +184,7 @@ class FilterTemplate:
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}('{self.template}')"
|
||||
|
||||
def __call__(self, *args: t.Any, **kwargs: t.Any) -> Filter:
|
||||
def __call__(self, *args: t.Any, **kwargs: t.Any) -> Filter[T, P]:
|
||||
return get(self.template.format(*args, **kwargs))
|
||||
|
||||
|
||||
@ -135,7 +192,7 @@ class FilterTemplate:
|
||||
get = Filter.get
|
||||
|
||||
|
||||
def get_template(name: str) -> FilterTemplate:
|
||||
def get_template(name: str) -> FilterTemplate[t.Any, t.Any]:
|
||||
"""
|
||||
Create a filter with a template name.
|
||||
|
||||
@ -145,15 +202,17 @@ def get_template(name: str) -> FilterTemplate:
|
||||
named_filter = filter_template("name")
|
||||
|
||||
@named_filter.add()
|
||||
def my_callback():
|
||||
def my_callback(x: int) -> int:
|
||||
...
|
||||
|
||||
named_filter.do()
|
||||
named_filter.apply(42)
|
||||
"""
|
||||
return FilterTemplate(name)
|
||||
|
||||
|
||||
def add(name: str) -> t.Callable[[CallableFilter], CallableFilter]:
|
||||
def add(
|
||||
name: str,
|
||||
) -> t.Callable[[t.Callable[Concatenate[T, P], T]], t.Callable[Concatenate[T, P], T]]:
|
||||
"""
|
||||
Decorator for functions that will be applied to a single named filter.
|
||||
|
||||
@ -214,9 +273,7 @@ def add_items(name: str, items: t.List[T]) -> None:
|
||||
get(name).add_items(items)
|
||||
|
||||
|
||||
def iterate(
|
||||
name: str, *args: t.Any, context: t.Optional[str] = None, **kwargs: t.Any
|
||||
) -> t.Iterator[T]:
|
||||
def iterate(name: str, *args: t.Any, **kwargs: t.Any) -> t.Iterator[T]:
|
||||
"""
|
||||
Convenient function to iterate over the results of a filter result list.
|
||||
|
||||
@ -230,16 +287,19 @@ def iterate(
|
||||
|
||||
:rtype iterator[T]: iterator over the list items from the filter with the same name.
|
||||
"""
|
||||
yield from Filter.get(name).iterate(*args, context=context, **kwargs)
|
||||
yield from iterate_from_context(None, name, *args, **kwargs)
|
||||
|
||||
|
||||
def apply(
|
||||
name: str,
|
||||
value: T,
|
||||
*args: t.Any,
|
||||
context: t.Optional[str] = None,
|
||||
**kwargs: t.Any,
|
||||
) -> T:
|
||||
def iterate_from_context(
|
||||
context: t.Optional[str], name: str, *args: t.Any, **kwargs: t.Any
|
||||
) -> t.Iterator[T]:
|
||||
"""
|
||||
Same as :py:func:`iterate` but apply only callbacks from a given context.
|
||||
"""
|
||||
yield from Filter.get(name).iterate_from_context(context, *args, **kwargs)
|
||||
|
||||
|
||||
def apply(name: str, value: T, *args: t.Any, **kwargs: t.Any) -> T:
|
||||
"""
|
||||
Apply all declared filters to a single value, passing along the additional arguments.
|
||||
|
||||
@ -252,7 +312,17 @@ def apply(
|
||||
:type value: object
|
||||
:rtype: same as the type of ``value``.
|
||||
"""
|
||||
return Filter.get(name).apply(value, *args, context=context, **kwargs)
|
||||
return apply_from_context(None, name, value, *args, **kwargs)
|
||||
|
||||
|
||||
def apply_from_context(
|
||||
context: t.Optional[str], name: str, value: T, *args: P.args, **kwargs: P.kwargs
|
||||
) -> T:
|
||||
"""
|
||||
Same as :py:func:`apply` but only run the callbacks that were created in a given context.
|
||||
"""
|
||||
filtre: Filter[T, P] = Filter.get(name)
|
||||
return filtre.apply_from_context(context, value, *args, **kwargs)
|
||||
|
||||
|
||||
def clear_all(context: t.Optional[str] = None) -> None:
|
||||
|
@ -62,18 +62,16 @@ def initialise(runner: BaseJobRunner, limit_to: t.Optional[str] = None) -> None:
|
||||
filter_context = hooks.Contexts.APP(limit_to).name if limit_to else None
|
||||
|
||||
# Pre-init tasks
|
||||
iter_pre_init_tasks: t.Iterator[
|
||||
t.Tuple[str, t.Iterable[str]]
|
||||
] = hooks.Filters.COMMANDS_PRE_INIT.iterate(context=filter_context)
|
||||
for service, path in iter_pre_init_tasks:
|
||||
for service, path in hooks.Filters.COMMANDS_PRE_INIT.iterate_from_context(
|
||||
filter_context
|
||||
):
|
||||
fmt.echo_info(f"Running pre-init task: {'/'.join(path)}")
|
||||
runner.run_job_from_template(service, *path)
|
||||
|
||||
# Init tasks
|
||||
iter_init_tasks: t.Iterator[
|
||||
t.Tuple[str, t.Iterable[str]]
|
||||
] = hooks.Filters.COMMANDS_INIT.iterate(context=filter_context)
|
||||
for service, path in iter_init_tasks:
|
||||
for service, path in hooks.Filters.COMMANDS_INIT.iterate_from_context(
|
||||
filter_context
|
||||
):
|
||||
fmt.echo_info(f"Running init task: {'/'.join(path)}")
|
||||
runner.run_job_from_template(service, *path)
|
||||
|
||||
|
@ -41,8 +41,7 @@ def iter_installed() -> t.Iterator[str]:
|
||||
The CORE_READY action must have been triggered prior to calling this function,
|
||||
otherwise no installed plugin will be detected.
|
||||
"""
|
||||
plugins: t.Iterator[str] = hooks.Filters.PLUGINS_INSTALLED.iterate()
|
||||
yield from sorted(plugins)
|
||||
yield from sorted(hooks.Filters.PLUGINS_INSTALLED.iterate())
|
||||
|
||||
|
||||
def iter_info() -> t.Iterator[t.Tuple[str, t.Optional[str]]]:
|
||||
@ -51,10 +50,11 @@ def iter_info() -> t.Iterator[t.Tuple[str, t.Optional[str]]]:
|
||||
|
||||
Yields (<plugin name>, <info>) tuples.
|
||||
"""
|
||||
versions: t.Iterator[
|
||||
t.Tuple[str, t.Optional[str]]
|
||||
] = hooks.Filters.PLUGINS_INFO.iterate()
|
||||
yield from sorted(versions, key=lambda v: v[0])
|
||||
|
||||
def plugin_info_name(info: t.Tuple[str, t.Optional[str]]) -> str:
|
||||
return info[0]
|
||||
|
||||
yield from sorted(hooks.Filters.PLUGINS_INFO.iterate(), key=plugin_info_name)
|
||||
|
||||
|
||||
def is_loaded(name: str) -> bool:
|
||||
@ -72,7 +72,7 @@ def load_all(names: t.Iterable[str]) -> None:
|
||||
for name in names:
|
||||
try:
|
||||
load(name)
|
||||
except Exception as e:
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
fmt.echo_alert(f"Failed to enable plugin '{name}': {e}")
|
||||
hooks.Actions.PLUGINS_LOADED.do()
|
||||
|
||||
|
@ -57,7 +57,7 @@ class BasePlugin:
|
||||
hooks.Filters.PLUGINS_INSTALLED.add_item(self.name)
|
||||
|
||||
# Add plugin version
|
||||
hooks.Filters.PLUGINS_INFO.add_item((self.name, self._version()))
|
||||
hooks.Filters.PLUGINS_INFO.add_item((self.name, self._version() or ""))
|
||||
|
||||
# Create actions and filters on load
|
||||
hooks.Actions.PLUGIN_LOADED(self.name).add()(self.__load)
|
||||
@ -167,7 +167,7 @@ class BasePlugin:
|
||||
# We assume that the dockerfile is in the build/myimage folder.
|
||||
for img, tag in build_image_tasks.items():
|
||||
hooks.Filters.IMAGES_BUILD.add_item(
|
||||
(img, ("plugins", self.name, "build", img), tag, []),
|
||||
(img, ("plugins", self.name, "build", img), tag, ()),
|
||||
)
|
||||
# Remote images: hooks = {"remote-image": {"myimage": "myimage:latest"}}
|
||||
for img, tag in remote_image_tasks.items():
|
||||
|
@ -122,7 +122,7 @@ LANGUAGE_COOKIE_NAME = "openedx-language-preference"
|
||||
# Allow the platform to include itself in an iframe
|
||||
X_FRAME_OPTIONS = "SAMEORIGIN"
|
||||
|
||||
{% set jwt_rsa_key = rsa_import_key(JWT_RSA_PRIVATE_KEY) %}
|
||||
{% set jwt_rsa_key | rsa_import_key %}{{ JWT_RSA_PRIVATE_KEY }}{% endset %}
|
||||
JWT_AUTH["JWT_ISSUER"] = "{{ JWT_COMMON_ISSUER }}"
|
||||
JWT_AUTH["JWT_AUDIENCE"] = "{{ JWT_COMMON_AUDIENCE }}"
|
||||
JWT_AUTH["JWT_SECRET_KEY"] = "{{ JWT_COMMON_SECRET_KEY }}"
|
||||
|
@ -1,6 +1,8 @@
|
||||
# The Tutor plugin system is licensed under the terms of the Apache 2.0 license.
|
||||
__license__ = "Apache 2.0"
|
||||
|
||||
import typing as t
|
||||
|
||||
# https://mypy.readthedocs.io/en/latest/kinds_of_types.html#type-aliases
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from . import exceptions
|
||||
@ -15,6 +17,7 @@ ConfigValue: TypeAlias = t.Union[
|
||||
t.Dict[str, t.Any],
|
||||
t.Dict[t.Any, t.Any],
|
||||
]
|
||||
|
||||
Config: TypeAlias = t.Dict[str, ConfigValue]
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user