diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..8341019 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,2 @@ +[run] +omit = *_test.py,__main__.py,__init__.py diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 0000000..caee7ad --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,34 @@ +name: Publish + +on: + push: + branches: + - "master" + +env: + PYTHON_VERSION: 3.11.3 + +jobs: + publish: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: ${{ env.PYTHON_VERSION }} + + - name: install poetry + run: pip3 install poetry + + - name: install deps + run: poetry install + + + - name: test + run: poetry run pytest --cache-clear + + - name: publish + run: | + poetry config pypi-token.pypi ${{ secrets.PYPI_TOKEN}} + poetry build + poetry publish diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..e69de29 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..059d038 --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +__pycache__ +.coverage +htmlcov/ +dist/ diff --git a/.imgs/zapusk.png b/.imgs/zapusk.png new file mode 100644 index 0000000..6c3765b Binary files /dev/null and b/.imgs/zapusk.png differ diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000..9014f12 --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,7 @@ +Copyright 2024 Anton Shuvalov + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..a68a3fe --- /dev/null +++ b/README.md @@ -0,0 +1,237 @@ +# Zapusk + +![Zapusk ScreenShot](.imgs/zapusk.png) + +Zapusk is a job runner for desktop environments. It helps you manage background tasks with features like pre-configured job execution, background shell commands, scheduling with cron-like syntax, log tailing, and notifications. It also provides detailed JSON output for easy data manipulation and analysis. + +## Key Features + +- **Preconfigured Jobs:** Run jobs defined in your configuration files. +- **Background Command Execution:** Run shell commands in the background with optional log tailing. +- **Cron-like Scheduling:** Schedule tasks using flexible cron syntax. +- **Log Tailing:** View logs in real-time. +- **Job Management:** Cancel running jobs and check their statuses. +- **Job Groups:** Share settings like callbacks and parallelism between jobs. +- **Colored JSON Output:** Easily readable JSON output. +- **Waybar Integration:** Display job statuses and notifications on Waybar. + +## Installation + +Install Zapusk using `pip`: + +```sh +pip install zapusk +``` + +## Usage + +Zapusk offers a command-line interface for managing and executing jobs. Here's a quick reference: + +```sh +Usage: + zapusk -h | --help + zapusk --version + zapusk run [--colors|--no-colors] [--tail] + zapusk exec [--name=] [--group=] [--tail] [--schedule=] [--colors|--no-colors] + zapusk cancel [--scheduled] [--colors|--no-colors] + zapusk tail + zapusk list [--filter=|--scheduled] [--colors|--no-colors] + zapusk config_jobs [--colors|--no-colors] + zapusk config_groups [--colors|--no-colors] + zapusk waybar + +Options: + -h --help Show this screen. + --version Show version. + --colors Enable colors. + --no-colors Disable colors. + --filter= Filter jobs by status. + -n --name= Name for a command. + -g --group= Job group to run the command in. + -t --tail Tail logfile immediately. + +Examples: + + # Run npm install in the background + zapusk exec "npm i" + + # Run pytest and tail its log + zapusk exec "pytest -v" -t + + # Schedule a command to run every minute + zapusk exec "ping -c4 google.com" --schedule "*/1 * * * *" + + # Run a job defined in ~/.config/zapusk/config.yaml + zapusk run youtube_dl + + # Cancel a job by its ID + zapusk cancel 42 + + # See logs for a job by its ID + zapusk tail 42 +``` + +## Example Configuration + +Here is an example configuration file for Zapusk. It defines job groups and individual jobs, specifying commands, schedules, and notifications. + +```yaml +# Port server starts on and client call to +port: 9876 + +# Enable colored JSON output +colors: True + +job_groups: + - id: unsplash + parallel: 1 + - id: sleep + parallel: 2 + - id: cmd + parallel: 10 + on_finish: notify-send -a "zapusk" "Command Finished" "{job.name} has finished" --icon kitty + on_fail: notify-send -a "zapusk" "Command Failed" "{job.name} has failed" --icon kitty + - id: cronie + parallel: 1 + on_finish: notify-send -a "zapusk" "Scheduled Job Finished" "{job.name} has finished" --icon kitty + on_fail: notify-send -a "zapusk" "Scheduled Job Failed" "{job.name} has failed" --icon kitty + +jobs: + - name: Unsplash Download + id: unsplash + args_command: "zenity --entry --text 'Collection ID'" + command: ~/.bin/jobs/unsplash_dl.sh + + - name: Sleep + id: sleep + group: sleep + args_command: "zenity --entry --text 'Sleep Time'" + command: sleep + on_finish: notify-send -a "zapusk" "Job Finished" "{job.name} has finished" --icon kitty + on_fail: notify-send -a "zapusk" "Job Failed" "{job.name} has failed" --icon kitty + + - name: Cronie + id: cronie + group: cronie + schedule: "*/10 * * * *" + command: sleep 2 +``` + +## Advanced Usage + +### Running Preconfigured Jobs + +Run jobs defined in your configuration file using their `id`. + +```yaml +# Job configuration in ~/.config/zapusk/config.yaml +jobs: + - name: Unsplash Download + id: unsplash + args_command: "zenity --entry --text 'Collection ID'" + command: ~/.bin/jobs/unsplash_wallpaper_collection_download.sh + on_finish: notify-send -a "Zapusk" "Wallpapers downloaded" --icon kitty + on_fail: notify-send -a "Zapusk" "Wallpaper download failed" --icon kitty +``` + +```sh +# Run the `unsplash` job: +zapusk run unsplash +``` + +### Background Command Execution + +Run commands in the background with optional log tailing: + +```sh +zapusk exec "npm i" -t +``` + +### Scheduling Commands + +Schedule commands to run at specific intervals using cron syntax: + +```sh +zapusk exec "ping -c4 google.com" --schedule "*/1 * * * *" +``` + +Pre-configured jobs can also be scheduled: + +```yaml +jobs: + - name: Cronie + id: cronie + group: cronie + schedule: "*/10 * * * *" + command: sleep 2 +``` + +### Managing Jobs + +Cancel a running or scheduled job by its ID: + +```sh +zapusk cancel 42 +``` + +Tail the logs of a running job by its ID: + +```sh +zapusk tail 42 +``` + +List all pending, running, and finished jobs: + +```sh +zapusk list +``` + +### Callbacks + +Use `on_finish` and `on_fail` callbacks for notifications. + +For job group callbacks: + +```yaml +job_groups: + - id: my_group + parallel: 10 + on_finish: notify-send -a "zapusk" "Command Finished" "{job.name} has finished" --icon kitty + on_fail: notify-send -a "zapusk" "Command Failed" "{job.name} has failed" --icon kitty +``` + +For individual job callbacks: + +```yaml +jobs: + - name: Sleep + id: sleep + group: sleep + command: ~/.bin/jobs/sleep + on_finish: notify-send -a "zapusk" "Job Finished" "{job.name} has finished" --icon kitty + on_fail: notify-send -a "zapusk" "Job Failed" "{job.name} has failed" --icon kitty +``` + +### Waybar Integration + +Zapusk integrates with Waybar to display job statuses and notifications directly on your desktop. + +```json +// Example integration with wofi and jq +"custom/zapusk": { + "exec": "zapusk waybar", + "on-click": "zapusk config_jobs --no-colors | jq -r \".[].id\" | wofi --dmenu | xargs -I{} zapusk run {}", + "tooltip": true, + "return-type": "json", + "format": "{}", + "interval": 1 +} +``` + +## Contribution + +We welcome contributions! If you find a bug or have an idea for improvement, please open an issue or submit a pull request on our GitHub repository. + +## License + +Zapusk is licensed under the MIT License. See the [LICENSE](LICENSE) file for more information. diff --git a/config.example.yaml b/config.example.yaml new file mode 100644 index 0000000..f4778af --- /dev/null +++ b/config.example.yaml @@ -0,0 +1,27 @@ +# Example configuration for +port: 9876 + +job_groups: + - id: default + parallel: 10 + - id: sequential + parallel: 1 + - id: parallel + parallel: 2 + + +jobs: + - name: Sleep 10 Seconds + id: sleep_10 + command: sleep 10 + + - name: Sleep 30 Seconds + group: parallel + id: sleep_30 + command: sleep 30 + + - name: Configurable Sleep + id: sleep + group: sequential + args_command: "zenity --entry --text 'Sleep Time'" + command: "sleep $1" diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..305233c --- /dev/null +++ b/poetry.lock @@ -0,0 +1,793 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "black" +version = "24.4.2" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, + {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, + {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, + {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, + {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, + {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, + {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, + {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, + {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, + {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, + {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, + {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, + {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, + {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, + {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, + {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, + {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, + {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, + {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, + {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, + {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, + {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "blinker" +version = "1.8.2" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.8" +files = [ + {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, + {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, +] + +[[package]] +name = "certifi" +version = "2024.6.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, + {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.5.3" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45"}, + {file = "coverage-7.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c"}, + {file = "coverage-7.5.3-cp310-cp310-win32.whl", hash = "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84"}, + {file = "coverage-7.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac"}, + {file = "coverage-7.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974"}, + {file = "coverage-7.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614"}, + {file = "coverage-7.5.3-cp311-cp311-win32.whl", hash = "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9"}, + {file = "coverage-7.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a"}, + {file = "coverage-7.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8"}, + {file = "coverage-7.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84"}, + {file = "coverage-7.5.3-cp312-cp312-win32.whl", hash = "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08"}, + {file = "coverage-7.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb"}, + {file = "coverage-7.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb"}, + {file = "coverage-7.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0"}, + {file = "coverage-7.5.3-cp38-cp38-win32.whl", hash = "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485"}, + {file = "coverage-7.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56"}, + {file = "coverage-7.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85"}, + {file = "coverage-7.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd"}, + {file = "coverage-7.5.3-cp39-cp39-win32.whl", hash = "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d"}, + {file = "coverage-7.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0"}, + {file = "coverage-7.5.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884"}, + {file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"}, +] + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "croniter" +version = "2.0.5" +description = "croniter provides iteration for datetime object with cron like format" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.6" +files = [ + {file = "croniter-2.0.5-py2.py3-none-any.whl", hash = "sha256:fdbb44920944045cc323db54599b321325141d82d14fa7453bc0699826bbe9ed"}, + {file = "croniter-2.0.5.tar.gz", hash = "sha256:f1f8ca0af64212fbe99b1bee125ee5a1b53a9c1b433968d8bca8817b79d237f3"}, +] + +[package.dependencies] +python-dateutil = "*" +pytz = ">2021.1" + +[[package]] +name = "docopt" +version = "0.6.2" +description = "Pythonic argument parser, that will make you smile" +optional = false +python-versions = "*" +files = [ + {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, +] + +[[package]] +name = "flask" +version = "3.0.3" +description = "A simple framework for building complex web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask-3.0.3-py3-none-any.whl", hash = "sha256:34e815dfaa43340d1d15a5c3a02b8476004037eb4840b34910c6e21679d288f3"}, + {file = "flask-3.0.3.tar.gz", hash = "sha256:ceb27b0af3823ea2737928a4d99d125a06175b8512c445cbd9a9ce200ef76842"}, +] + +[package.dependencies] +blinker = ">=1.6.2" +click = ">=8.1.3" +itsdangerous = ">=2.1.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=3.0.0" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + +[[package]] +name = "human-readable" +version = "1.3.4" +description = "Human Readable" +optional = false +python-versions = ">=3.8" +files = [ + {file = "human_readable-1.3.4-py3-none-any.whl", hash = "sha256:e8b1211ddf98344b45cb4f3ce49957c9ec59639075e749ce6ce641144996a479"}, + {file = "human_readable-1.3.4.tar.gz", hash = "sha256:5726eac89066ec25d14447a173e645a855184645d024eb306705e2bfbb60f0c0"}, +] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "itsdangerous" +version = "2.2.0" +description = "Safely pass data to untrusted environments and back." +optional = false +python-versions = ">=3.8" +files = [ + {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, + {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, +] + +[[package]] +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "pdoc" +version = "14.5.1" +description = "API Documentation for Python Projects" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pdoc-14.5.1-py3-none-any.whl", hash = "sha256:fda6365a06e438b43ca72235b58a2e2ecd66445fcc444313f6ebbde4b0abd94b"}, + {file = "pdoc-14.5.1.tar.gz", hash = "sha256:4ddd9c5123a79f511cedffd7231bf91a6e0bd0968610f768342ec5d00b5eefee"}, +] + +[package.dependencies] +Jinja2 = ">=2.11.0" +MarkupSafe = "*" +pygments = ">=2.12.0" + +[package.extras] +dev = ["hypothesis", "mypy", "pdoc-pyo3-sample-library (==1.0.11)", "pygments (>=2.14.0)", "pytest", "pytest-cov", "pytest-timeout", "ruff", "tox", "types-pygments"] + +[[package]] +name = "platformdirs" +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pytest-timeout" +version = "2.3.1" +description = "pytest plugin to abort hanging tests" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-timeout-2.3.1.tar.gz", hash = "sha256:12397729125c6ecbdaca01035b9e5239d4db97352320af155b3f5de1ba5165d9"}, + {file = "pytest_timeout-2.3.1-py3-none-any.whl", hash = "sha256:68188cb703edfc6a18fad98dc25a3c61e9f24d644b0b70f33af545219fc7813e"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "responses" +version = "0.25.3" +description = "A utility library for mocking out the `requests` Python library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "responses-0.25.3-py3-none-any.whl", hash = "sha256:521efcbc82081ab8daa588e08f7e8a64ce79b91c39f6e62199b19159bea7dbcb"}, + {file = "responses-0.25.3.tar.gz", hash = "sha256:617b9247abd9ae28313d57a75880422d55ec63c29d33d629697590a034358dba"}, +] + +[package.dependencies] +pyyaml = "*" +requests = ">=2.30.0,<3.0" +urllib3 = ">=1.25.10,<3.0" + +[package.extras] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] + +[[package]] +name = "sh" +version = "2.0.7" +description = "Python subprocess replacement" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "sh-2.0.7-py3-none-any.whl", hash = "sha256:2f2f79a65abd00696cf2e9ad26508cf8abb6dba5745f40255f1c0ded2876926d"}, + {file = "sh-2.0.7.tar.gz", hash = "sha256:029d45198902bfb967391eccfd13a88d92f7cebd200411e93f99ebacc6afbb35"}, +] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "testfixtures" +version = "8.3.0" +description = "A collection of helpers and mock objects for unit tests and doc tests." +optional = false +python-versions = ">=3.7" +files = [ + {file = "testfixtures-8.3.0-py3-none-any.whl", hash = "sha256:3d1e0e0005c4d6ac2a2ab27916704c6471047f0d2f78f2e54adf20abdacc7b10"}, + {file = "testfixtures-8.3.0.tar.gz", hash = "sha256:d4c0b84af2f267610f908009b50d6f983a4e58ade22c67bab6787b5a402d59c0"}, +] + +[package.extras] +build = ["setuptools-git", "twine", "wheel"] +docs = ["django", "furo", "sphinx", "sybil (>=6)", "twisted"] +test = ["django", "mypy", "pytest (>=7.1)", "pytest-cov", "pytest-django", "sybil (>=6)", "twisted"] + +[[package]] +name = "type-docopt" +version = "0.8.2" +description = "Pythonic argument parser, with type description." +optional = false +python-versions = "*" +files = [ + {file = "type-docopt-0.8.2.tar.gz", hash = "sha256:ecd23359c3e184b9384f664f7a8c0de8550abc538af4f404e97eee99b9320b83"}, + {file = "type_docopt-0.8.2-py3-none-any.whl", hash = "sha256:3a57e97f276f47da06954e08a0a94a89130d8c80dfa18ae22902f0e181b00773"}, +] + +[[package]] +name = "urllib3" +version = "2.2.2" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "werkzeug" +version = "3.0.3" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, + {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.12" +content-hash = "66d26082354a8305dfae429a767a7023f77ed229e953bea4133333ffd4ea9888" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..64d4046 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,41 @@ +[tool.poetry] +name = "zapusk" +version = "0.1.0" +description = "" +authors = ["Anton Shuvalov "] +readme = "README.md" + +[tool.poetry.scripts] +zapusk-server = "zapusk.__main__:main" +zapusk = "zapusk.client.__main__:main" + +[tool.pytest.ini_options] +pythonpath = "." + +[tool.poetry.dependencies] +python = "^3.12" +docopt = "^0.6.2" +pyyaml = "^6.0.1" +flask = "^3.0.3" +requests = "^2.32.3" +pygments = "^2.18.0" +type-docopt = "^0.8.2" +python-dateutil = "^2.9.0.post0" +pdoc = "^14.5.1" +human-readable = "^1.3.4" +sh = "^2.0.7" +croniter = "^2.0.5" + +[tool.poetry.group.dev.dependencies] +pytest = "^7.4.0" +pytest-cov = "^4.1.0" +black = "^24.4.2" +pytest-timeout = "^2.3.1" +testfixtures = "^8.3.0" +responses = "^0.25.3" + + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..503ec49 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +timeout = 20 +; log_cli = 1 +; log_cli_level = DEBUG + diff --git a/zapusk/__init__.py b/zapusk/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/zapusk/__main__.py b/zapusk/__main__.py new file mode 100644 index 0000000..252a4b5 --- /dev/null +++ b/zapusk/__main__.py @@ -0,0 +1,73 @@ +#! /bin/python +import logging +from docopt import docopt +import importlib.metadata + +from zapusk.server import create_app +from zapusk.logger import set_loglevel +from zapusk.services.config.service import ConfigService +from zapusk.services import ( + ExecutorManagerService, + ExecutorManagerKawkaBackend, +) +from zapusk.services.scheduler_service.service import SchedulerService + +logger = logging.getLogger(__name__) + +doc = """zapusk + +Simple background job runner + +Usage: + zapusk-server -h | --help + zapusk-server --version + zapusk-server [--config=PATH] [--verbose] + +Options: + -h --help Show this screen + --version Show version. + -v --verbose Enable logging + + --config PATH Define custom config + + +Examples: + pusk start --config ~/.config/pusk/pusk.yml +""" + +version = importlib.metadata.version("zapusk") + + +def main(): + args = docopt(doc, version=version) + print(args) + + if "--verbose" in args: + set_loglevel("DEBUG") + logger.info("Verbose logging has been enabled") + + logger.info(f"{args}") + logger.info("Start") + + executor_manager_service = ExecutorManagerService( + backend=ExecutorManagerKawkaBackend(), + ) + + config_service = ConfigService(args["--config"]) + + scheduler_service = SchedulerService( + executor_manager_service=executor_manager_service, + config_service=config_service, + ) + scheduler_service.start() + + app = create_app( + executor_manager_service=executor_manager_service, + config_service=config_service, + scheduler_service=scheduler_service, + ) + app.run(host="0.0.0.0", port=config_service.get_config().port) + + +if __name__ == "__main__": + main() diff --git a/zapusk/client/__init__.py b/zapusk/client/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/zapusk/client/__main__.py b/zapusk/client/__main__.py new file mode 100644 index 0000000..c5fcb5c --- /dev/null +++ b/zapusk/client/__main__.py @@ -0,0 +1,128 @@ +from type_docopt import docopt +import importlib.metadata + +from zapusk.client.command_manager import CommandManager +from zapusk.models.job import JOB_STATE_ENUM + + +doc = """zapusk-client + + +Usage: + zapusk-client -h | --help + zapusk-client --version + zapusk-client run [--colors|--no-colors] [--tail] + zapusk-client exec [--name=] [--group=] [--tail] [--schedule=] [--colors|--no-colors] + zapusk-client cancel [--scheduled] [--colors|--no-colors] + zapusk-client tail + zapusk-client list [--filter=|--scheduled] [--colors|--no-colors] + zapusk-client config_jobs [--colors|--no-colors] + zapusk-client config_groups [--colors|--no-colors] + zapusk-client waybar + + +Options: + -h --help Show this screen + --version Show version. + --colors Enable colors + --no-colors Disable colors + --filter= Filter running jobs by status [type: JobState] + -n --name= Name for a command + -g --group= Job group to run command in + -t --tail Tail logfile immediately + +Examples: + zapusk run upload_to_s3 + zapusk status +""" + +version = importlib.metadata.version("zapusk") + + +class JobState: + STATES = [e.value for e in JOB_STATE_ENUM] + + def __init__(self, state): + try: + assert state in self.STATES + self.state = state + except AssertionError as e: + print( + f"Status filter has wrong value. Possible values are {', '.join(self.STATES)}", + ) + exit(1) + + +def main(): + args = docopt(doc, version=version, types={"JobStatus": JobState}) + + colors = None + + if args["--colors"] == True: + colors = True + + if args["--no-colors"] == True: + colors = False + + command_manager = CommandManager(colors=colors) + + if args["run"] == True: + command_manager.run.run( + job_config_id=str(args[""]), + ) + return + + if args["exec"] == True: + command_manager.exec.run( + command=str(args[""]), + group_id=str(args["--group"]) if args["--group"] else None, + name=str(args["--name"]) if args["--name"] else None, + schedule=str(args["--schedule"]) if args["--schedule"] else None, + tail=bool(args["--tail"]), + ) + return + + if args["cancel"] == True: + command_manager.cancel.run( + job_id=str(args[""]), + scheduled=bool(args["--scheduled"]), + ) + return + + if args["list"] == True: + command_manager.list.run( + scheduled=bool(args["--scheduled"]), + filter=args["--filter"].state if args["--filter"] else None, + ) + return + + if args["list"] == True: + command_manager.list.run( + scheduled=bool(args["--scheduled"]), + filter=args["--filter"], + ) + return + + if args["config_groups"] == True: + command_manager.config_groups.run() + return + + if args["config_jobs"] == True: + command_manager.config_jobs.run() + return + + if args["waybar"] == True: + command_manager.waybar.run() + return + + if args["tail"] == True: + command_manager.tail.run( + job_id=str(args[""]), + ) + return + + command_manager.output.json({"error": "Command not found"}) + + +if __name__ == "__main__": + main() diff --git a/zapusk/client/api_client.py b/zapusk/client/api_client.py new file mode 100644 index 0000000..1fcc775 --- /dev/null +++ b/zapusk/client/api_client.py @@ -0,0 +1,140 @@ +from collections.abc import Mapping +from typing import NotRequired, Optional, TypedDict +from urllib.parse import urljoin +import requests + +from zapusk.lib.json_serdes import JsonSerdes + +DEFAULT_ERROR_MESSAGE = "Server respond with an error" + + +class ApiClientError(Exception): + """ + Base class for ApiClient exceptions + """ + + def __init__(self, message, *args, **kwargs) -> None: + self.message = message + super().__init__(*args, **kwargs) + + def __str__(self) -> str: + return f"{self.message}" + + +class JobCreateFromConfigPayload(TypedDict): + job_config_id: str + + +class JobCreateFromCommandPayload(TypedDict): + command: str + name: NotRequired[Optional[str]] + group_id: NotRequired[Optional[str]] + + +class JobCreateScheduledPayload(JobCreateFromCommandPayload): + schedule: str + + +class ApiClient: + http_client = requests + + def __init__(self, base_url: str) -> None: + self.base_url = base_url + + def __filter_none(self, d: Mapping): + return {k: v for k, v in d.items() if v is not None} + + def __handle_error(self, res): + body = res.json() + if "error" in res.json(): + raise ApiClientError(body["error"]) + else: + raise ApiClientError(DEFAULT_ERROR_MESSAGE) + + def get_job(self, job_id: str | int): + res = self.http_client.get(urljoin(self.base_url, f"/jobs/{job_id}")) + body = res.json() + + if res.status_code != 200: + return self.__handle_error(res) + + return JsonSerdes.deserialize(body) + + def list_jobs(self): + res = self.http_client.get(urljoin(self.base_url, f"/jobs/")) + body = res.json() + + if res.status_code != 200: + return self.__handle_error(res) + + return JsonSerdes.deserialize(body) + + def list_scheduled_jobs(self): + res = self.http_client.get(urljoin(self.base_url, f"/scheduled-jobs/")) + body = res.json() + + if res.status_code != 200: + return self.__handle_error(res) + + return JsonSerdes.deserialize(body) + + def create_job( + self, payload: JobCreateFromConfigPayload | JobCreateFromCommandPayload + ): + res = requests.post( + urljoin(self.base_url, "/jobs/"), + json=self.__filter_none(payload), + ) + + if res.status_code != 200: + return self.__handle_error(res) + + return JsonSerdes.deserialize(res.json()) + + def create_scheduled_job(self, payload: JobCreateScheduledPayload): + res = requests.post( + urljoin(self.base_url, "/scheduled-jobs/"), + json=self.__filter_none(payload), + ) + if res.status_code != 200: + return self.__handle_error(res) + + return JsonSerdes.deserialize(res.json()) + + def cancel_job(self, job_id: str | int): + res = self.http_client.delete(urljoin(self.base_url, f"/jobs/{job_id}")) + body = res.json() + + if res.status_code != 200: + return self.__handle_error(res) + + return JsonSerdes.deserialize(body) + + def cancel_scheduled_job(self, job_id: str | int): + res = self.http_client.delete( + urljoin(self.base_url, f"/scheduled-jobs/{job_id}") + ) + body = res.json() + + if res.status_code != 200: + return self.__handle_error(res) + + return JsonSerdes.deserialize(body) + + def get_config_groups(self): + res = self.http_client.get(urljoin(self.base_url, f"/config/groups/")) + body = res.json() + + if res.status_code != 200: + return self.__handle_error(res) + + return JsonSerdes.deserialize(body) + + def get_config_jobs(self): + res = self.http_client.get(urljoin(self.base_url, f"/config/jobs/")) + body = res.json() + + if res.status_code != 200: + return self.__handle_error(res) + + return JsonSerdes.deserialize(body) diff --git a/zapusk/client/api_client_test.py b/zapusk/client/api_client_test.py new file mode 100644 index 0000000..8ddd20f --- /dev/null +++ b/zapusk/client/api_client_test.py @@ -0,0 +1,370 @@ +from unittest import TestCase +import pytest +import responses +from responses import matchers + +from zapusk.client.api_client import DEFAULT_ERROR_MESSAGE, ApiClient, ApiClientError + + +BASE_URL = "http://localhost:4000" +api_client = ApiClient(base_url=BASE_URL) + + +@pytest.fixture(autouse=True) +def setUp(): + api_client = ApiClient(base_url=BASE_URL) + + +@responses.activate +@pytest.mark.parametrize( + ",".join( + [ + "method", + "args", + "uri", + "status", + "http_method", + "matchers", + "mocked_json", + "expected_response", + "expected_exception_message", + ] + ), + [ + ( + "get_job", + [1], + "/jobs/1", + 200, + "get", + [], + {"data": "OK"}, + "OK", + None, + ), + ( + "get_job", + [1], + "/jobs/1", + 400, + "get", + [], + {"error": "Error"}, + None, + "Error", + ), + ( + "list_jobs", + [], + "/jobs/", + 200, + "get", + [], + {"data": "OK"}, + "OK", + None, + ), + ( + "list_jobs", + [], + "/jobs/", + 400, + "get", + [], + {"error": "ERROR"}, + None, + "ERROR", + ), + ( + "list_scheduled_jobs", + [], + "/scheduled-jobs/", + 200, + "get", + [], + {"data": "OK"}, + "OK", + None, + ), + ( + "list_scheduled_jobs", + [], + "/scheduled-jobs/", + 400, + "get", + [], + {"error": "ERROR"}, + None, + "ERROR", + ), + ( + "cancel_job", + [1], + "/jobs/1", + 200, + "delete", + [], + {"data": "OK"}, + "OK", + None, + ), + ( + "cancel_job", + [1], + "/jobs/1", + 400, + "delete", + [], + {"error": "ERROR"}, + None, + "ERROR", + ), + ( + "cancel_scheduled_job", + [1], + "/scheduled-jobs/1", + 200, + "delete", + [], + {"data": "OK"}, + "OK", + None, + ), + ( + "cancel_scheduled_job", + [1], + "/scheduled-jobs/1", + 400, + "delete", + [], + {"error": "ERROR"}, + None, + "ERROR", + ), + ( + "get_config_groups", + [], + "/config/groups/", + 200, + "get", + [], + {"data": "OK"}, + "OK", + None, + ), + ( + "get_config_groups", + [], + "/config/groups/", + 400, + "get", + [], + {"error": "Error"}, + None, + "Error", + ), + ( + "get_config_jobs", + [], + "/config/jobs/", + 200, + "get", + [], + {"data": "OK"}, + "OK", + None, + ), + ( + "get_config_jobs", + [], + "/config/jobs/", + 400, + "get", + [], + {"error": "Error"}, + None, + "Error", + ), + ( + "create_job", + [ + { + "job_config_id": "echo", + } + ], + "/jobs/", + 200, + "post", + [ + matchers.json_params_matcher( + { + "job_config_id": "echo", + } + ) + ], + {"data": "OK"}, + "OK", + None, + ), + ( + "create_job", + [ + { + "command": "echo 1", + "name": "Echo", + "group_id": "group", + }, + ], + "/jobs/", + 200, + "post", + [ + matchers.json_params_matcher( + { + "command": "echo 1", + "name": "Echo", + "group_id": "group", + } + ) + ], + {"data": "OK"}, + "OK", + None, + ), + ( + "create_job", + [ + { + "command": "echo 1", + "name": "Echo", + "group_id": "group", + }, + ], + "/jobs/", + 400, + "post", + [ + matchers.json_params_matcher( + { + "command": "echo 1", + "name": "Echo", + "group_id": "group", + } + ) + ], + {"error": "ERROR"}, + None, + "ERROR", + ), + ( + "create_job", + [ + { + "command": "echo 1", + }, + ], + "/jobs/", + 400, + "post", + [], + {}, + None, + DEFAULT_ERROR_MESSAGE, + ), + ( + "create_scheduled_job", + [ + { + "command": "echo 1", + "name": "Echo", + "group_id": "group", + "schedule": "*/1 * * * *", + } + ], + "/scheduled-jobs/", + 200, + "post", + [ + matchers.json_params_matcher( + { + "command": "echo 1", + "name": "Echo", + "group_id": "group", + "schedule": "*/1 * * * *", + } + ) + ], + {"data": "OK"}, + "OK", + None, + ), + ( + "create_scheduled_job", + [ + { + "command": "echo 1", + "schedule": "*/1 * * * *", + } + ], + "/scheduled-jobs/", + 400, + "post", + [], + {"error": "ERROR"}, + None, + "ERROR", + ), + ], + ids=[ + "get_job", + "get_job_non_200", + "list_jobs", + "list_jobs_non_200", + "list_scheduled_jobs", + "list_scheduled_jobs_non_200", + "cancel_job", + "cancel_job_non_200", + "cancel_scheduled_job", + "cancel_scheduled_job_non_200", + "get_config_groups", + "get_config_groups_non_200", + "get_config_jobs", + "get_config_jobs_non_200", + "create_job_from_config", + "create_job_from_command", + "create_job_non_200", + "create_job_non_200_without_error_body", + "create_scheduled_job", + "create_scheduled_job_non_200", + ], +) +def test_get_job( + method, + args, + uri, + status, + http_method, + matchers, + mocked_json, + expected_response, + expected_exception_message, +): + try: + mocked_http_method = getattr(responses, http_method) + mocked_http_method( + url=f"{BASE_URL}{uri}", + status=status, + json=mocked_json, + match=matchers, + ) + + mocked_method = getattr(api_client, method) + res = mocked_method(*args) + assert res == expected_response + except ApiClientError as ex: + assert ex.message == expected_exception_message + + +def test_exception_str(): + ex = ApiClientError("test") + assert "test" == f"{ex}" diff --git a/zapusk/client/command.py b/zapusk/client/command.py new file mode 100644 index 0000000..c376159 --- /dev/null +++ b/zapusk/client/command.py @@ -0,0 +1,30 @@ +from __future__ import annotations +from typing import TYPE_CHECKING + +from .api_client import ApiClient, ApiClientError +from .output import Output + +if TYPE_CHECKING: # pragma: no cover + from .command_manager import CommandManager + + +class Command: + def __init__( + self, + manager: CommandManager, + api_client: ApiClient, + output: Output, + colors=False, + ) -> None: + self.api_client = api_client + self.colors = colors + self.output = output + self.manager = manager + + def run(self, *args, **kwargs): ... # pragma: no cover + + def print_json(self, json_data, one_line=False): + self.output.json(json_data, colors=self.colors, one_line=one_line) + + def print_error(self, exception): + self.output.error(exception, colors=self.colors) diff --git a/zapusk/client/command_cancel.py b/zapusk/client/command_cancel.py new file mode 100644 index 0000000..c3a209a --- /dev/null +++ b/zapusk/client/command_cancel.py @@ -0,0 +1,17 @@ +from .api_client import ApiClientError +from .command import Command + + +class CommandCancel(Command): + def run(self, job_id: str | int, scheduled: bool = False): + try: + if scheduled: + cancelled_job = self.api_client.cancel_scheduled_job(job_id) + self.print_json(cancelled_job) + return + + cancelled_job = self.api_client.cancel_job(job_id) + self.print_json(cancelled_job) + + except ApiClientError as ex: + self.print_error(ex) diff --git a/zapusk/client/command_cancel_test.py b/zapusk/client/command_cancel_test.py new file mode 100644 index 0000000..12003bd --- /dev/null +++ b/zapusk/client/command_cancel_test.py @@ -0,0 +1,38 @@ +import json +import responses + +from .command_testcase import CommandTestCase + + +class TestCommandCancel(CommandTestCase): + @responses.activate + def test_should_cancel_job(self): + responses.delete("http://example.com/jobs/1", status=200, json={"data": True}) + + self.command_manager.cancel.run(job_id=1) + json_data = json.loads(self.printer.print.call_args[0][0]) + + self.assertEqual(json_data, True) + + @responses.activate + def test_should_cancel_scheduled_job(self): + responses.delete( + "http://example.com/scheduled-jobs/1", status=200, json={"data": True} + ) + + self.command_manager.cancel.run(job_id=1, scheduled=True) + json_data = json.loads(self.printer.print.call_args[0][0]) + + self.assertEqual(json_data, True) + + @responses.activate + def test_should_handle_error(self): + responses.delete( + "http://example.com/jobs/1", status=400, json={"error": "ERROR"} + ) + + self.command_manager.cancel.run(job_id=1) + args = self.printer.print.call_args[0] + message = json.loads(args[0]) + + self.assertEqual(message, {"error": {"message": "ERROR"}}) diff --git a/zapusk/client/command_config_groups.py b/zapusk/client/command_config_groups.py new file mode 100644 index 0000000..d21a32e --- /dev/null +++ b/zapusk/client/command_config_groups.py @@ -0,0 +1,11 @@ +from .api_client import ApiClientError +from .command import Command + + +class CommandConfigGroups(Command): + def run(self): + try: + config_groups = self.api_client.get_config_groups() + self.print_json(config_groups) + except ApiClientError as ex: + self.print_error(ex) diff --git a/zapusk/client/command_config_groups_test.py b/zapusk/client/command_config_groups_test.py new file mode 100644 index 0000000..8312d6b --- /dev/null +++ b/zapusk/client/command_config_groups_test.py @@ -0,0 +1,34 @@ +import json +import responses + +from .command_testcase import CommandTestCase + + +class TestCommandConfigGroups(CommandTestCase): + @responses.activate + def test_should_cancel_job(self): + data = [ + {"id": 1}, + {"id": 2}, + ] + + responses.get( + "http://example.com/config/groups/", status=200, json={"data": data} + ) + + self.command_manager.config_groups.run() + json_data = json.loads(self.printer.print.call_args[0][0]) + + self.assertEqual(json_data, data) + + @responses.activate + def test_should_handle_error(self): + responses.get( + "http://example.com/config/groups/", status=400, json={"error": "ERROR"} + ) + + self.command_manager.config_groups.run() + args = self.printer.print.call_args[0] + message = json.loads(args[0]) + + self.assertEqual(message, {"error": {"message": "ERROR"}}) diff --git a/zapusk/client/command_config_jobs.py b/zapusk/client/command_config_jobs.py new file mode 100644 index 0000000..7b1eb69 --- /dev/null +++ b/zapusk/client/command_config_jobs.py @@ -0,0 +1,11 @@ +from .api_client import ApiClientError +from .command import Command + + +class CommandConfigJobs(Command): + def run(self): + try: + config_jobs = self.api_client.get_config_jobs() + self.print_json(config_jobs) + except ApiClientError as ex: + self.print_error(ex) diff --git a/zapusk/client/command_config_jobs_test.py b/zapusk/client/command_config_jobs_test.py new file mode 100644 index 0000000..3249894 --- /dev/null +++ b/zapusk/client/command_config_jobs_test.py @@ -0,0 +1,34 @@ +import json +import responses + +from .command_testcase import CommandTestCase + + +class TestCommandConfigJobs(CommandTestCase): + @responses.activate + def test_should_cancel_job(self): + data = [ + {"id": 1}, + {"id": 2}, + ] + + responses.get( + "http://example.com/config/jobs/", status=200, json={"data": data} + ) + + self.command_manager.config_jobs.run() + json_data = json.loads(self.printer.print.call_args[0][0]) + + self.assertEqual(json_data, data) + + @responses.activate + def test_should_handle_error(self): + responses.get( + "http://example.com/config/jobs/", status=400, json={"error": "ERROR"} + ) + + self.command_manager.config_jobs.run() + args = self.printer.print.call_args[0] + message = json.loads(args[0]) + + self.assertEqual(message, {"error": {"message": "ERROR"}}) diff --git a/zapusk/client/command_exec.py b/zapusk/client/command_exec.py new file mode 100644 index 0000000..1d1a803 --- /dev/null +++ b/zapusk/client/command_exec.py @@ -0,0 +1,48 @@ +from typing import Optional + +from zapusk.client.api_client import ApiClientError + +from .command import Command + + +class CommandExec(Command): + def run( + self, + command: str, + name: Optional[str] = None, + group_id: Optional[str] = None, + schedule: Optional[str] = None, + tail: bool = False, + ): + try: + # exec scheduled job + if schedule: + created_job = self.api_client.create_scheduled_job( + { + "command": command, + "group_id": group_id, + "name": name, + "schedule": schedule, + } + ) + + self.print_json(created_job) + return + + # exec normal job + created_job = self.api_client.create_job( + { + "command": command, + "group_id": group_id, + "name": name, + } + ) + + if tail: + self.manager.tail.run(created_job["id"]) + return + + self.print_json(created_job) + + except ApiClientError as ex: + self.print_error(ex) diff --git a/zapusk/client/command_exec_test.py b/zapusk/client/command_exec_test.py new file mode 100644 index 0000000..da831d6 --- /dev/null +++ b/zapusk/client/command_exec_test.py @@ -0,0 +1,97 @@ +import json +from unittest.mock import call, patch +import responses +from responses import matchers + +from .command_testcase import CommandTestCase + + +class TestCommandExec(CommandTestCase): + @responses.activate + def test_should_exec_job(self): + responses.post( + "http://example.com/jobs/", + status=200, + json={"data": {"id": 1}}, + match=[ + matchers.json_params_matcher( + { + "command": "echo 1", + "group_id": "echo", + "name": "Echo", + } + ) + ], + ) + + self.command_manager.exec.run( + command="echo 1", + group_id="echo", + name="Echo", + ) + json_data = json.loads(self.printer.print.call_args[0][0]) + + self.assertEqual(json_data, {"id": 1}) + + @responses.activate + def test_should_exec_scheduled_job(self): + responses.post( + "http://example.com/scheduled-jobs/", + status=200, + json={"data": {"id": 1}}, + match=[ + matchers.json_params_matcher( + { + "command": "echo 1", + "group_id": "echo", + "name": "Echo", + "schedule": "*/1 * * * *", + } + ) + ], + ) + + self.command_manager.exec.run( + command="echo 1", group_id="echo", name="Echo", schedule="*/1 * * * *" + ) + json_data = json.loads(self.printer.print.call_args[0][0]) + + self.assertEqual(json_data, {"id": 1}) + + @responses.activate + def test_should_handle_error(self): + responses.post( + "http://example.com/jobs/", + status=400, + json={"error": "ERROR"}, + ) + + self.command_manager.exec.run(command="echo 1") + args = self.printer.print.call_args[0] + message = json.loads(args[0]) + + self.assertEqual(message, {"error": {"message": "ERROR"}}) + + @responses.activate + def test_should_tail_job(self): + responses.post( + "http://example.com/jobs/", + status=200, + json={"data": {"id": 1}}, + ) + responses.get( + "http://example.com/jobs/1", + status=200, + json={"data": {"id": 1, "log": "/var/tail.log"}}, + ) + + with patch( + "zapusk.client.command_tail.tail", return_value=["log line 1", "log line 2"] + ): + self.command_manager.exec.run(command="echo 1", tail=True) + + log_line1 = self.printer.print.call_args_list[0] + log_line2 = self.printer.print.call_args_list[1] + + self.assertEqual(log_line1, call("log line 1", end="")) + self.assertEqual(log_line2, call("log line 2", end="")) diff --git a/zapusk/client/command_list.py b/zapusk/client/command_list.py new file mode 100644 index 0000000..649a313 --- /dev/null +++ b/zapusk/client/command_list.py @@ -0,0 +1,29 @@ +from typing import Any +from zapusk.models.job import JOB_STATE_ENUM + +from .api_client import ApiClientError +from .command import Command + + +class CommandList(Command): + def run( + self, + filter: Any = None, + scheduled: bool = False, + ): + try: + if scheduled: + jobs = self.api_client.list_scheduled_jobs() + self.print_json(jobs) + return + + jobs = self.api_client.list_jobs() + + if filter and filter != "ALL": + jobs = [i for i in jobs if i["state"] == filter] + + self.print_json(jobs) + return + + except ApiClientError as ex: + self.print_error(ex) diff --git a/zapusk/client/command_list_test.py b/zapusk/client/command_list_test.py new file mode 100644 index 0000000..94bc3bb --- /dev/null +++ b/zapusk/client/command_list_test.py @@ -0,0 +1,74 @@ +import json +import responses + +from zapusk.models.job import JOB_STATE_ENUM + +from .command_testcase import CommandTestCase + + +class TestCommandList(CommandTestCase): + @responses.activate + def test_should_list_jobs(self): + data = [ + { + "id": 1, + "state": JOB_STATE_ENUM.PENDING, + }, + { + "id": 2, + "state": JOB_STATE_ENUM.RUNNING, + }, + ] + + responses.get("http://example.com/jobs/", status=200, json={"data": data}) + + self.command_manager.list.run() + json_data = json.loads(self.printer.print.call_args[0][0]) + + self.assertEqual(json_data, data) + + @responses.activate + def test_should_list_jobs_with_filter(self): + data = [ + { + "id": 1, + "state": JOB_STATE_ENUM.PENDING, + }, + { + "id": 2, + "state": JOB_STATE_ENUM.RUNNING, + }, + ] + + responses.get("http://example.com/jobs/", status=200, json={"data": data}) + + self.command_manager.list.run(filter=JOB_STATE_ENUM.PENDING) + json_data = json.loads(self.printer.print.call_args[0][0]) + + self.assertEqual(json_data, [data[0]]) + + @responses.activate + def test_should_list_scheduled_jobs(self): + data = [ + { + "id": 1, + }, + ] + + responses.get( + "http://example.com/scheduled-jobs/", status=200, json={"data": data} + ) + + self.command_manager.list.run(scheduled=True) + json_data = json.loads(self.printer.print.call_args[0][0]) + + self.assertEqual(json_data, data) + + @responses.activate + def test_should_list_jobs_error(self): + responses.get("http://example.com/jobs/", status=400, json={"error": "ERROR"}) + + self.command_manager.list.run() + json_data = json.loads(self.printer.print.call_args[0][0]) + + self.assertEqual(json_data, {"error": {"message": "ERROR"}}) diff --git a/zapusk/client/command_manager.py b/zapusk/client/command_manager.py new file mode 100644 index 0000000..b83e9d4 --- /dev/null +++ b/zapusk/client/command_manager.py @@ -0,0 +1,52 @@ +from typing import Optional +from zapusk.client.api_client import ApiClient +from zapusk.client.command_tail import CommandTail +from zapusk.services.config.service import ConfigService +from .command_exec import CommandExec +from .command_run import CommandRun +from .command_cancel import CommandCancel +from .command_list import CommandList +from .command_waybar import CommandWaybar +from .command_config_jobs import CommandConfigJobs +from .command_config_groups import CommandConfigGroups +from .output import Output + + +class CommandManager: + def __init__( + self, + config_service=ConfigService(), + output=Output(), + colors=None, + api_client: Optional[ApiClient] = None, + ) -> None: + self.output = output + self.config_service = config_service + config = self.config_service.get_config() + + self.api_client = ( + api_client + if api_client + else ApiClient( + base_url=f"http://localhost:{config.port}/", + ) + ) + + self.colors = ( + colors if colors != None else self.config_service.get_config().colors + ) + kwargs = { + "colors": self.colors, + "output": self.output, + "api_client": self.api_client, + "manager": self, + } + + self.exec = CommandExec(**kwargs) + self.run = CommandRun(**kwargs) + self.cancel = CommandCancel(**kwargs) + self.list = CommandList(**kwargs) + self.waybar = CommandWaybar(**kwargs) + self.tail = CommandTail(**kwargs) + self.config_jobs = CommandConfigJobs(**kwargs) + self.config_groups = CommandConfigGroups(**kwargs) diff --git a/zapusk/client/command_run.py b/zapusk/client/command_run.py new file mode 100644 index 0000000..16c5231 --- /dev/null +++ b/zapusk/client/command_run.py @@ -0,0 +1,18 @@ +from .api_client import ApiClientError +from .command import Command + + +class CommandRun(Command): + def run( + self, + job_config_id: str, + ): + try: + created_job = self.api_client.create_job( + { + "job_config_id": job_config_id, + } + ) + self.print_json(created_job) + except ApiClientError as ex: + self.print_error(ex) diff --git a/zapusk/client/command_run_test.py b/zapusk/client/command_run_test.py new file mode 100644 index 0000000..f184704 --- /dev/null +++ b/zapusk/client/command_run_test.py @@ -0,0 +1,40 @@ +import json +import responses +from responses import matchers + +from .command_testcase import CommandTestCase + + +class TestCommandRun(CommandTestCase): + @responses.activate + def test_should_run_job(self): + # TODO: check only tail command has been run + data = [{"id": 1}] + + responses.post( + "http://example.com/jobs/", + status=200, + json={"data": data}, + match=[ + matchers.json_params_matcher( + { + "job_config_id": "echo", + } + ) + ], + ) + + self.command_manager.run.run(job_config_id="echo") + json_data = json.loads(self.printer.print.call_args[0][0]) + + self.assertEqual(json_data, data) + + @responses.activate + def test_should_handle_error(self): + responses.post("http://example.com/jobs/", status=400, json={"error": "ERROR"}) + + self.command_manager.run.run(job_config_id="echo") + args = self.printer.print.call_args[0] + message = json.loads(args[0]) + + self.assertEqual(message, {"error": {"message": "ERROR"}}) diff --git a/zapusk/client/command_tail.py b/zapusk/client/command_tail.py new file mode 100644 index 0000000..e868f36 --- /dev/null +++ b/zapusk/client/command_tail.py @@ -0,0 +1,23 @@ +import sys +from time import sleep +from sh import tail + +from .api_client import ApiClientError +from .command import Command + + +class CommandTail(Command): + def run(self, job_id): + try: + job = self.api_client.get_job(job_id) + while not job["log"]: + sleep(1) + job = self.api_client.get_job(job_id) + + for line in tail("-f", "-n", "+1", job["log"], _iter=True): + self.output.text(line, end="") + except KeyboardInterrupt: # pragma: no cover + self.output.text("Tail has been closed") + sys.exit(0) + except ApiClientError as ex: + self.print_error(ex) diff --git a/zapusk/client/command_tail_test.py b/zapusk/client/command_tail_test.py new file mode 100644 index 0000000..be49b04 --- /dev/null +++ b/zapusk/client/command_tail_test.py @@ -0,0 +1,42 @@ +import json +from unittest.mock import call, patch +import responses + +from .command_testcase import CommandTestCase + + +class TestCommandExec(CommandTestCase): + @responses.activate + def test_should_tail_job(self): + responses.get( + "http://example.com/jobs/1", + status=200, + json={"data": {"id": 1, "log": None}}, + ) + + responses.get( + "http://example.com/jobs/1", + status=200, + json={"data": {"id": 1, "log": "/var/tail.log"}}, + ) + + with patch( + "zapusk.client.command_tail.tail", return_value=["log line 1", "log line 2"] + ): + self.command_manager.tail.run(job_id=1) + + log_line1 = self.printer.print.call_args_list[0] + log_line2 = self.printer.print.call_args_list[1] + + self.assertEqual(log_line1, call("log line 1", end="")) + self.assertEqual(log_line2, call("log line 2", end="")) + + @responses.activate + def test_should_handle_error(self): + responses.get("http://example.com/jobs/1", status=400, json={"error": "ERROR"}) + + self.command_manager.tail.run(job_id=1) + args = self.printer.print.call_args[0] + message = json.loads(args[0]) + + self.assertEqual(message, {"error": {"message": "ERROR"}}) diff --git a/zapusk/client/command_testcase.py b/zapusk/client/command_testcase.py new file mode 100644 index 0000000..3ae46e1 --- /dev/null +++ b/zapusk/client/command_testcase.py @@ -0,0 +1,26 @@ +from unittest import TestCase +from unittest.mock import MagicMock +from zapusk.client.api_client import ApiClient + +from .output import Output +from .command_manager import CommandManager + + +class CommandTestCase(TestCase): + def setUp(self) -> None: + self.printer = MagicMock() + self.output = Output( + printer=self.printer, + ) + self.api_client = ApiClient(base_url="http://example.com") + + self.command_manager = CommandManager( + output=self.output, + api_client=self.api_client, + colors=False, + ) + return super().setUp() + + def tearDown(self) -> None: + self.printer.reset_mock() + return super().tearDown() diff --git a/zapusk/client/command_waybar.py b/zapusk/client/command_waybar.py new file mode 100644 index 0000000..2b00e0b --- /dev/null +++ b/zapusk/client/command_waybar.py @@ -0,0 +1,73 @@ +import datetime +from itertools import islice +import json +import human_readable +import dateutil + +from .api_client import ApiClientError +from .command import Command + + +STATE_LOOKUP = { + "PENDING": " ", + "RUNNING": " ", + "FINISHED": " ", + "FAILED": " ", + "CANCELLED": " ", +} + +TIME_PREFIX_LOOKUP = { + "PENDING": "queued", + "RUNNING": "started", + "FINISHED": "finished", + "FAILED": "failed", + "CANCELLED": "cancelled", +} + + +class CommandWaybar(Command): + def run(self): + try: + all_jobs = self.api_client.list_jobs() + self.output.json( + { + "text": self.__build_text(all_jobs), + "tooltip": self.__build_tooltip(all_jobs), + }, + one_line=True, + ) + + except ApiClientError as ex: + self.output.text("{" + f'"text": "{ex}"' + "}") + + def __build_text(self, all_jobs): + pending_jobs = [i for i in all_jobs if i["state"] == "PENDING"] + running_jobs = [i for i in all_jobs if i["state"] == "RUNNING"] + finished_jobs = [i for i in all_jobs if i["state"] == "FINISHED"] + failed_jobs = [i for i in all_jobs if i["state"] == "FAILED"] + cancelled_jobs = [i for i in all_jobs if i["state"] == "CANCELLED"] + + return " ".join( + [ + f"{STATE_LOOKUP['PENDING']} {len(pending_jobs)}", + f"{STATE_LOOKUP['RUNNING']} {len(running_jobs)}", + f"{STATE_LOOKUP['FINISHED']} {len(finished_jobs)}", + f"{STATE_LOOKUP['FAILED']} {len(failed_jobs)}", + f"{STATE_LOOKUP['CANCELLED']} {len(cancelled_jobs)}", + ] + ) + + def __build_tooltip(self, all_jobs): + LAST_JOBS_AMOUNT = 20 + now = datetime.datetime.now().timestamp() + last_jobs = islice(reversed(all_jobs), LAST_JOBS_AMOUNT) + + return "\r\n".join( + [ + f"{i['name']}(id={i["id"]}) {TIME_PREFIX_LOOKUP[i['state']]} {human_readable.date_time(now - self.__parse(i['updated_at']))}" + for i in last_jobs + ] + ) + + def __parse(self, date_str): + return dateutil.parser.parse(date_str, ignoretz=True).timestamp() # type: ignore diff --git a/zapusk/client/command_waybar_test.py b/zapusk/client/command_waybar_test.py new file mode 100644 index 0000000..669c1e5 --- /dev/null +++ b/zapusk/client/command_waybar_test.py @@ -0,0 +1,86 @@ +import json +import datetime +import responses + +from zapusk.models.job import JOB_STATE_ENUM + +from .command_testcase import CommandTestCase + + +class TestCommandRun(CommandTestCase): + @responses.activate + def test_should_run_job(self): + now = datetime.datetime.now() + ago_1m = str(now - datetime.timedelta(minutes=1)) + ago_1h = str(now - datetime.timedelta(hours=1)) + ago_1d = str(now - datetime.timedelta(days=1)) + ago_7d = str(now - datetime.timedelta(days=7)) + + data = [ + { + "id": 1, + "name": "P", + "state": JOB_STATE_ENUM.PENDING, + "updated_at": ago_1m, + }, + { + "id": 2, + "name": "R", + "state": JOB_STATE_ENUM.RUNNING, + "updated_at": ago_1h, + }, + { + "id": 3, + "name": "C", + "state": JOB_STATE_ENUM.CANCELLED, + "updated_at": ago_1d, + }, + { + "id": 4, + "name": "D", + "state": JOB_STATE_ENUM.FINISHED, + "updated_at": ago_7d, + }, + { + "id": 5, + "name": "F", + "state": JOB_STATE_ENUM.FAILED, + "updated_at": ago_1m, + }, + ] + + responses.get( + "http://example.com/jobs/", + status=200, + json={"data": data}, + match=[], + ) + + self.command_manager.waybar.run() + json_data = json.loads(self.printer.print.call_args[0][0]) + + self.assertEqual( + json_data["text"], "\uf4ab 1 \uf144 1 \uf058 1 \uf06a 1 \uf057 1" + ) + self.assertEqual( + json_data["tooltip"], + "\r\n".join( + [ + "F(id=5) failed a minute ago", + "D(id=4) finished 7 days ago", + "C(id=3) cancelled a day ago", + "R(id=2) started an hour ago", + "P(id=1) queued a minute ago", + ] + ), + ) + + @responses.activate + def test_should_handle_error(self): + responses.get("http://example.com/jobs/", status=400, json={"error": "ERROR"}) + + self.command_manager.waybar.run() + args = self.printer.print.call_args[0] + message = json.loads(args[0]) + + self.assertEqual(message, {"text": "ERROR"}) diff --git a/zapusk/client/output.py b/zapusk/client/output.py new file mode 100644 index 0000000..5a5e6c8 --- /dev/null +++ b/zapusk/client/output.py @@ -0,0 +1,46 @@ +import json +import sys +from pygments import highlight, lexers, formatters + +from .printer import Printer + + +class Output: + """ + Manages output to a terminal + """ + + def __init__(self, printer=Printer()) -> None: + self.printer = printer + + def json(self, json_data, colors=False, one_line=False, **kwargs): + """ + Prints colored JSON into stdout or stderr + """ + if one_line: + self.printer.print(json.dumps(json_data), **kwargs) + return + + formatted_json = json.dumps(json_data, indent=2) + + if not colors: + self.printer.print(formatted_json, **kwargs) + return + + colorful_json = highlight( + formatted_json, lexers.JsonLexer(), formatters.TerminalFormatter() + ) + self.printer.print(colorful_json, **kwargs) + + def error(self, exception, **kwargs): + """ + Prints JSON to stderr + """ + error = {"error": {"message": exception.message}} + self.json(error, file=sys.stderr, **kwargs) + + def text(self, *args, **kwargs): + """ + Prints text + """ + self.printer.print(*args, **kwargs) diff --git a/zapusk/client/output_test.py b/zapusk/client/output_test.py new file mode 100644 index 0000000..10957f1 --- /dev/null +++ b/zapusk/client/output_test.py @@ -0,0 +1,76 @@ +import sys +from unittest import TestCase +from unittest.mock import patch + + +from .output import Output +from .printer import Printer + + +class MockPrinter(Printer): + def print(self, *args, **kwargs): + pass + + +class TestOutput(TestCase): + def setUp(self) -> None: + self.mock_printer = MockPrinter() + return super().setUp() + + def test_should_print_to_stdout(self): + with patch.object(self.mock_printer, "print") as mock: + output = Output(printer=self.mock_printer) + output.text("Hello World!") + + mock.assert_called_with("Hello World!") + + def test_should_print_to_stderr(self): + with patch.object(self.mock_printer, "print") as mock: + output = Output(printer=self.mock_printer) + output.text("Hello World!", file=sys.stderr) + + mock.assert_called_with("Hello World!", file=sys.stderr) + + def test_should_print_json(self): + with patch.object(self.mock_printer, "print") as mock: + output = Output(printer=self.mock_printer) + output.json({"key": "val"}) + + mock.assert_called_with('{\n "key": "val"\n}') + + def test_should_print_json_one_line(self): + with patch.object(self.mock_printer, "print") as mock: + output = Output(printer=self.mock_printer) + output.json({"key": "val"}, one_line=True) + + mock.assert_called_with('{"key": "val"}') + + def test_should_print_json_with_colors(self): + with patch.object(self.mock_printer, "print") as mock: + output = Output(printer=self.mock_printer) + output.json({"key": "val"}, colors=True) + + mock.assert_called_with( + '{\x1b[37m\x1b[39;49;00m\n\x1b[37m \x1b[39;49;00m\x1b[94m"key"\x1b[39;49;00m:\x1b[37m \x1b[39;49;00m\x1b[33m"val"\x1b[39;49;00m\x1b[37m\x1b[39;49;00m\n}\x1b[37m\x1b[39;49;00m\n' + ) + + def test_should_print_json_error(self): + with patch.object(self.mock_printer, "print") as mock: + output = Output(printer=self.mock_printer) + output.json({"key": "val"}, file=sys.stderr) + + mock.assert_called_with('{\n "key": "val"\n}', file=sys.stderr) + + def test_should_print_error(self): + with patch.object(self.mock_printer, "print") as mock: + output = Output(printer=self.mock_printer) + + class MockException: + message = "Hello World!" + + output.error(MockException()) + + mock.assert_called_with( + '{\n "error": {\n "message": "Hello World!"\n }\n}', + file=sys.stderr, + ) diff --git a/zapusk/client/printer.py b/zapusk/client/printer.py new file mode 100644 index 0000000..e8ff635 --- /dev/null +++ b/zapusk/client/printer.py @@ -0,0 +1,11 @@ +class Printer: + """ + Was created to mock in the tests, because by some reasons I can't + manage neither testfixtures nor pytest to capture output properly + """ + + def print(self, *args, **kwargs): + """ + Prints given data via `print` to terminal + """ + print(*args, **kwargs) diff --git a/zapusk/client/printer_test.py b/zapusk/client/printer_test.py new file mode 100644 index 0000000..759e260 --- /dev/null +++ b/zapusk/client/printer_test.py @@ -0,0 +1,18 @@ +from unittest import TestCase + +import pytest +from .printer import Printer + + +class TestPrinter(TestCase): + @pytest.fixture(autouse=True) + def capsys(self, capsys): + self.capsys = capsys + + def test_printer_should_print(self): + printer = Printer() + + printer.print("test") + out, _ = self.capsys.readouterr() + + self.assertEqual(out, "test\n") diff --git a/zapusk/kawka/__init__.py b/zapusk/kawka/__init__.py new file mode 100644 index 0000000..9bf907a --- /dev/null +++ b/zapusk/kawka/__init__.py @@ -0,0 +1,9 @@ +from .consumer import Consumer +from .consumer_group import ConsumerGroup +from .producer import Producer + +__all__ = [ + "Consumer", + "ConsumerGroup", + "Producer", +] diff --git a/zapusk/kawka/consumer.py b/zapusk/kawka/consumer.py new file mode 100644 index 0000000..5d68ef8 --- /dev/null +++ b/zapusk/kawka/consumer.py @@ -0,0 +1,45 @@ +import logging +from threading import Thread +from typing import Any, Optional + +from .producer import Producer + +logger = logging.getLogger(__name__) + + +class Consumer(Thread): + def __init__( + self, + producer: Producer, + name: Optional[str] = None, + from_head=False, + context: Optional[dict[str, Any]] = None, + *args, + **kwargs, + ): + super(Consumer, self).__init__(*args, **kwargs) + self.context: dict[str, Any] = context or {} + self.producer = producer + self.from_head = from_head + self.name = name if name else type(self).__name__ + + def on_end(self): + logger.info(f"{self} reached the very end of the {self.producer}") + pass + + def process(self, msg, *args, **kwargs): + logger.info(f"{self}: process {msg}") # pragma: no cover + + def run(self): + logger.info(f"{self}: start polling events") + iterator = self.producer.all() if self.from_head else self.producer + + for msg in iterator: + logger.info(f"{self}: message received {msg}") + self.process(msg) + logger.info(f"{self}: waiting for upcoming message") + + self.on_end() + + def __str__(self) -> str: + return f"consumer.{self.name}" diff --git a/zapusk/kawka/consumer_group.py b/zapusk/kawka/consumer_group.py new file mode 100644 index 0000000..e1ceb7d --- /dev/null +++ b/zapusk/kawka/consumer_group.py @@ -0,0 +1,65 @@ +from typing import TypeVar, Generic + +from .consumer import Consumer +from .producer import Producer + + +class ConsumerGroupIterator: + def __init__(self, producer, from_head): + self.from_head = from_head + self.producer = producer + self.iterator = iter(self.producer.all() if self.from_head else self.producer) + + def __iter__(self): + return self + + def __next__(self): + return next(self.iterator) + + def __str__(self) -> str: + return f"cg_iter.{self.producer.name}" + + +C = TypeVar("C", bound=Consumer) + + +class ConsumerGroup(Generic[C]): + _consumers: list[C] + + def __init__( + self, + producer: Producer, + Consumer: type[C], + parallel=1, + from_head=False, + name=None, + context=None, + ): + self.context = context + self.producer = producer + self.Consumer = Consumer + self.consumerGroupIterator = ConsumerGroupIterator( + from_head=from_head, producer=self.producer + ) + self.parallel = parallel + self._consumers = [] + self.from_head = from_head + + self.name = name + if not self.name: + self.name = type(self).__name__ + + self._consumers = [ + Consumer( + producer=self.consumerGroupIterator, # type: ignore + name=f"{self.name}_{i}", + context=self.context, + ) + for i in range(self.parallel) + ] + + def start(self): + [c.start() for c in self._consumers] + + def join(self, timeout: int): + [c.join(timeout) for c in self._consumers] diff --git a/zapusk/kawka/consumer_group_test.py b/zapusk/kawka/consumer_group_test.py new file mode 100644 index 0000000..6d27fcc --- /dev/null +++ b/zapusk/kawka/consumer_group_test.py @@ -0,0 +1,89 @@ +from time import sleep +import itertools +from unittest import TestCase + +from .consumer import Consumer +from .consumer_group import ConsumerGroup +from .producer import Producer + + +class DummyConsumer(Consumer): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.results = [] + + def process(self, msg): + sleep(0.01) + msg["consumed_by"] = self.name + self.results.append(msg) + + +class ConsumerGroupTest(TestCase): + def test_consumergroup_seq100_parallel1(self): + producer = Producer(name="test_producer", block=True) + + cg = ConsumerGroup(producer=producer, Consumer=DummyConsumer, parallel=1) + cg.start() + + [producer.add({"id": i, "consumed_by": None}) for i in range(100)] + producer.add(Producer.End) + + cg.join(5) + c = cg._consumers[0] + + self.assertEqual(len(c.results), 100) + self.assertEqual( + all(map(lambda x: type(x["consumed_by"]) == str, c.results)), True + ) + + def test_consumergroup_seq100_parallel2(self): + producer = Producer(name="test_producer", block=True) + + cg = ConsumerGroup( + name="DummyGroup", + producer=producer, + Consumer=DummyConsumer, + parallel=2, + ) + cg.start() + + [producer.add({"id": i, "consumed_by": None}) for i in range(100)] + producer.add(Producer.End) + + cg.join(5) + + results = [c.results for c in cg._consumers] + results = list(itertools.chain.from_iterable(results)) + + consumed_by = list(map(lambda x: x["consumed_by"], results)) + + self.assertEqual(len(results), 100) + + self.assertEqual(any(map(lambda x: x == "DummyGroup_0", consumed_by)), True) + self.assertEqual(any(map(lambda x: x == "DummyGroup_1", consumed_by)), True) + + def test_consumergroup_sink(self): + input_producer = Producer(name="input_producer", block=True) + sink_producer = Producer(name="sink_producer", block=True) + + class SinkConsumer(Consumer): + def process(self, msg): + self.context["sink"].add(msg) # type: ignore + + cg = ConsumerGroup( + name="SinkGroup", + producer=input_producer, + Consumer=SinkConsumer, + parallel=1, + context={ + "sink": sink_producer, + }, + ) + cg.start() + + [input_producer.add({"id": i}) for i in range(100)] + input_producer.add(Producer.End) + + cg.join(2) + + self.assertEqual(len(list(sink_producer.all(block=False))), 100) diff --git a/zapusk/kawka/consumer_test.py b/zapusk/kawka/consumer_test.py new file mode 100644 index 0000000..094745f --- /dev/null +++ b/zapusk/kawka/consumer_test.py @@ -0,0 +1,91 @@ +from time import sleep +from unittest import TestCase + +from .consumer import Consumer +from .producer import Producer + + +class DummyConsumer(Consumer): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.results = [] + + def process(self, msg): + msg["consumed"] = True + self.results.append(msg) + + +class SleepyConsumer(Consumer): + def __init__(self, sleep=0, *args, **kwargs): + super().__init__(*args, **kwargs) + self.sleep = sleep + self.results = [] + + def process(self, msg): + sleep(self.sleep) + msg["consumed"] = True + self.results.append(msg) + + +class ConsumerTest(TestCase): + def test_read_from_non_block_producer_head(self): + producer = Producer(name="DummyProducer", block=False) + consumer = DummyConsumer(producer=producer, from_head=True) + + [producer.add({"id": i, "consumed": False}) for i in range(10)] + + consumer.start() + consumer.join() + + self.assertEqual(len(consumer.results), 10) + self.assertEqual(all(map(lambda x: x["consumed"], consumer.results)), True) + + def test_read_from_block_producer_head(self): + producer = Producer(name="DummyProducer", block=True) + consumer = DummyConsumer(producer=producer, from_head=True) + + [producer.add({"id": i, "consumed": False}) for i in range(10)] + producer.add(Producer.End) + + consumer.start() + consumer.join() + + self.assertEqual(len(consumer.results), 10) + self.assertEqual(all(map(lambda x: x["consumed"], consumer.results)), True) + + def test_read_from_producer_tail(self): + producer = Producer(name="DummyProducer", block=True) + + # This events should be ignored, because no consumer yet + [producer.add({"id": i, "consumed": False}) for i in range(-10, 0)] + + # Now start a consumer. It will take only the last item with id -1 + consumer = DummyConsumer(producer=producer) + consumer.results = [] + consumer.start() + + # And handle this events + [producer.add({"id": i, "consumed": False}) for i in range(10)] + producer.add(Producer.End) + + consumer.join() + self.assertEqual(len(consumer.results), 11) + self.assertEqual(all(map(lambda x: x["consumed"], consumer.results)), True) + self.assertEqual( + list(map(lambda x: x["id"], consumer.results)), list(range(-1, 10)) + ) + + def test_slow_consumer(self): + producer = Producer(name="DummyProducer", block=True) + consumer = SleepyConsumer(producer=producer, sleep=1) + consumer.start() + + producer.add({"id": 1, "consumed": False}) + sleep(0.5) + producer.add({"id": 2, "consumed": False}) + producer.add(Producer.End) + + consumer.join() + + self.assertEqual(len(consumer.results), 2) + self.assertEqual(all(map(lambda x: x["consumed"], consumer.results)), True) diff --git a/zapusk/kawka/linked_list.py b/zapusk/kawka/linked_list.py new file mode 100644 index 0000000..1ee9ea3 --- /dev/null +++ b/zapusk/kawka/linked_list.py @@ -0,0 +1,22 @@ +class LinkedList[T]: + """ + Simple linked list implementation + """ + + next: "LinkedList[T] | None" = None + """ + Link to a next element + """ + + def __init__(self, data: T): + self.data = data + + def append(self, data: T): + """ + Appends an element to the linked list + """ + self.next = LinkedList(data) + return self.next + + def __str__(self): + return f"linked_list.{self.data}" diff --git a/zapusk/kawka/linked_list_test.py b/zapusk/kawka/linked_list_test.py new file mode 100644 index 0000000..4b40940 --- /dev/null +++ b/zapusk/kawka/linked_list_test.py @@ -0,0 +1,21 @@ +from unittest import TestCase +from .linked_list import LinkedList + + +class TestLinkedList(TestCase): + def test_linked_list_create(self): + head = LinkedList(1) + + self.assertEqual(head.data, 1) + self.assertEqual(head.next, None) + + def test_linked_list_append(self): + head = LinkedList(1) + item = head.append(2) + + self.assertEqual(head.next, item) + + def test_linked_list_str(self): + head = LinkedList("data") + + self.assertEqual(f"{head}", "linked_list.data") diff --git a/zapusk/kawka/producer.py b/zapusk/kawka/producer.py new file mode 100644 index 0000000..7557dbe --- /dev/null +++ b/zapusk/kawka/producer.py @@ -0,0 +1,51 @@ +import logging +from .topic import Topic, End + +logger = logging.getLogger(__name__) + + +class Producer: + End = End + + terminated = False + + def __init__(self, name, block=True): + self.name = name + + self.__topic = Topic(name=self.name) + self.__block = block + logger.info(f"{self}: initialized") + + def add(self, msg): + # TODO: probably, not needed + logger.info(f"{self}: collected a message {msg}") + + if self.terminated: + return + + if msg == End: + self.terminated = True + logger.info(f"{self}: terminated") + + self.__topic.add(msg) + + def all(self, block=None): + """ + Iterate over all items from head + """ + if block is not None: + return self.__topic.iter(block=block, head=self.__topic.head) + + return self.__topic.iter(block=self.__block, head=self.__topic.head) + + def __len__(self): + return len(self.__topic) + + def __iter__(self): + """ + Iterate from current message + """ + return self.__topic.iter(block=self.__block, head=self.__topic.last) + + def __str__(self) -> str: + return f"producer.{self.name}" diff --git a/zapusk/kawka/producer_test.py b/zapusk/kawka/producer_test.py new file mode 100644 index 0000000..4389649 --- /dev/null +++ b/zapusk/kawka/producer_test.py @@ -0,0 +1,29 @@ +from unittest import TestCase +from .producer import Producer + + +class ProducerTest(TestCase): + def test_collect_messages_non_block(self): + producer = Producer(name="test_producer", block=False) + [producer.add(i) for i in range(10)] + self.assertEqual(len(producer), 10) + self.assertEqual(list(producer.all()), list(range(10))) + + def test_block_producer_should_terminate_end(self): + producer = Producer(name="test_producer", block=True) + [producer.add(i) for i in [*range(10), Producer.End]] + + # should be ignored + [producer.add(i) for i in range(10)] + results = [producer for i in producer.all()] + self.assertEqual(len(producer), 10) + + def test_producer_should_become_terminated_after_receiving_end(self): + producer = Producer(name="test_producer", block=True) + [producer.add(i) for i in [*range(10), Producer.End]] + + l = len(producer) + [producer.add(i) for i in range(10)] + + self.assertEqual(producer.terminated, True) + self.assertEqual(len(producer), l) diff --git a/zapusk/kawka/topic.py b/zapusk/kawka/topic.py new file mode 100644 index 0000000..62269bb --- /dev/null +++ b/zapusk/kawka/topic.py @@ -0,0 +1,53 @@ +import threading + +from .linked_list import LinkedList +from .topic_iterator import TopicIterator, Start, End + + +type L[T] = T | type[Start] | type[End] + + +class Topic[T]: + """ + Topic is a linked list data structure designed to add, store and iterate over its messages. + """ + + head: LinkedList[L[T]] + """ + Link to the first element of the topic + """ + + last: LinkedList[L[T]] + """ + Link to the last element of the topic + """ + + def __init__(self, name: str): + self.name = name + self.mutex = threading.Lock() + self.received = threading.Condition(self.mutex) + + self.head = LinkedList(Start) + self.last = self.head + + def __len__(self): + iter = TopicIterator(block=False, topic=self, head=self.head) + return len(list(iter)) + + def add(self, data: T): + """ + Append a new item to a topic + """ + with self.mutex: + self.last = self.last.append(data) + self.received.notify() + return self + + def iter(self, block=True, head=None): + """ + Creates a blocking or non-blocking iterator over a topic. + """ + return TopicIterator(block=block, topic=self, head=head) + + def __str__(self) -> str: + return f"topic.{self.name}" diff --git a/zapusk/kawka/topic_iterator.py b/zapusk/kawka/topic_iterator.py new file mode 100644 index 0000000..85175db --- /dev/null +++ b/zapusk/kawka/topic_iterator.py @@ -0,0 +1,69 @@ +import logging + +logger = logging.getLogger(__name__) + + +class End: + pass + + +class Start: + pass + + +class TopicIterator: + def __init__(self, topic, head, block=True): + self.topic = topic + self.block = block + + self.prev = None + self.cur = head + + def __iter__(self): + logger.debug(f"{self}: initialized") + return self + + def __next__(self): + # Skip Start node + if self.cur and self.cur.data == Start: + [self.prev, self.cur] = [self.cur, self.cur.next] + + if self.block: + # If iterator reached the end of a topic, let's terminate + if self.cur and self.cur.data is End: + logger.debug(f"{self}: iterator is over. StopIteration") + with self.topic.received: + self.topic.received.notify() + raise StopIteration + + if self.prev and not self.cur: + if not self.prev.next: + logger.debug( + f"{self}: waiting for upcoming message. self.prev:{self.prev.data}" + ) + with self.topic.received: + self.topic.received.wait() + + self.cur = self.prev.next + + # if self.cur is @End, terminate iterator and notify all other readers + if self.cur and self.cur.data is End: + logger.debug(f"{self}: StopIteration") + with self.topic.received: + self.topic.received.notify() + raise StopIteration + + [self.prev, self.cur] = [self.cur, self.cur.next] + logger.debug(f"{self}: returns {self.prev.data}") + return self.prev.data + + # Non-block iteration + if not self.cur or self.cur.data is End: + logger.debug(f"{self}: StopIteration") + raise StopIteration + + [self.prev, self.cur] = [self.cur, self.cur.next] + return self.prev.data + + def __str__(self) -> str: + return f"iter.{self.topic.name}" diff --git a/zapusk/kawka/topic_test.py b/zapusk/kawka/topic_test.py new file mode 100644 index 0000000..34ffb39 --- /dev/null +++ b/zapusk/kawka/topic_test.py @@ -0,0 +1,43 @@ +from unittest import TestCase + +from .topic_iterator import End +from .topic import Topic + + +class TestTopic(TestCase): + def test_topic_len_0(self): + topic = Topic(name="test") + + self.assertEqual(len(topic), 0) + + def test_topic_len_3(self): + topic = Topic(name="test") + topic.add(1) + topic.add(2) + topic.add(3) + + self.assertEqual(len(topic), 3) + + def test_topic_str(self): + topic = Topic(name="test") + + self.assertEqual("topic.test", f"{topic}") + + def test_topic_iter_non_block(self): + topic = Topic(name="test") + [topic.add(i) for i in range(10)] + + self.assertEqual( + list(range(10)), + list(topic.iter(head=topic.head, block=False)), + ) + + def test_topic_iter_block(self): + topic = Topic(name="test") + [topic.add(i) for i in range(10)] + topic.add(End) + + self.assertEqual( + list(range(10)), + list(topic.iter(head=topic.head)), + ) diff --git a/zapusk/lib/__init__.py b/zapusk/lib/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/zapusk/lib/create_jobitem.py b/zapusk/lib/create_jobitem.py new file mode 100644 index 0000000..ff609f9 --- /dev/null +++ b/zapusk/lib/create_jobitem.py @@ -0,0 +1,33 @@ +from zapusk.models import Job, JobConfig, JobGroup + + +def create_jobitem( + command: str, + args_command=None, + args=[], + state=Job.JOB_STATE_ENUM.PENDING, + on_finish=None, + on_fail=None, + group_on_finish=None, + group_on_fail=None, +): + item = Job.from_config( + config=JobConfig( + id="test_config", + name="Test Job Config", + command=command, + group="default", + args_command=args_command, + on_finish=on_finish, + on_fail=on_fail, + ), + group_config=JobGroup( + id="default", + parallel=2, + on_finish=group_on_finish, + on_fail=group_on_fail, + ), + ) + item.args = args + item.state = state + return item diff --git a/zapusk/lib/json_serdes.py b/zapusk/lib/json_serdes.py new file mode 100644 index 0000000..b695b8f --- /dev/null +++ b/zapusk/lib/json_serdes.py @@ -0,0 +1,13 @@ +class JsonSerdes: + """ + Static class to serialize/deserialize data for client-server + communication + """ + + @staticmethod + def serialize(data): + return {"data": data} + + @staticmethod + def deserialize(res: dict): + return res["data"] diff --git a/zapusk/lib/json_serdes_test.py b/zapusk/lib/json_serdes_test.py new file mode 100644 index 0000000..e9c4b80 --- /dev/null +++ b/zapusk/lib/json_serdes_test.py @@ -0,0 +1,12 @@ +from unittest import TestCase + +from zapusk.lib.json_serdes import JsonSerdes + + +class TestJsonSerdes(TestCase): + def test_should_serialize_and_deserialize_data(self): + item = {} + serialized = JsonSerdes.serialize(item) + deserialized = JsonSerdes.deserialize(serialized) + + self.assertEqual(deserialized, item) diff --git a/zapusk/logger.py b/zapusk/logger.py new file mode 100644 index 0000000..4b6f43c --- /dev/null +++ b/zapusk/logger.py @@ -0,0 +1,11 @@ +import logging # pragma: no cover + +FORMAT = "%(levelname)s: %(message)s" # pragma: no cover +logging.basicConfig(format=FORMAT) # pragma: no cover + + +def set_loglevel(level): # pragma: no cover + logging.basicConfig( + format=FORMAT, + level=getattr(logging, level), + ) diff --git a/zapusk/models/__init__.py b/zapusk/models/__init__.py new file mode 100644 index 0000000..230340d --- /dev/null +++ b/zapusk/models/__init__.py @@ -0,0 +1,15 @@ +from .config import Config +from .job_config import JobConfig +from .job_group import JobGroup +from .job import Job +from .id_field import IdField +from .scheduled_job import ScheduledJob + +__ALL__ = [ + "Config", + "JobConfig", + "JobGroup", + "JobI", + "IdField", + "ScheduledJob" +] diff --git a/zapusk/models/base_model.py b/zapusk/models/base_model.py new file mode 100644 index 0000000..665d559 --- /dev/null +++ b/zapusk/models/base_model.py @@ -0,0 +1,32 @@ +from dataclasses import fields +import inspect + + +class BaseModel: + def __eq__(self, value: object, /) -> bool: + if isinstance(value, type(self)): + for fld in fields(self): # type: ignore + left = getattr(self, fld.name) + right = getattr(value, fld.name) + + if left != right: + return False + + return True + if isinstance(value, dict): + for fld in fields(self): # type: ignore + left = getattr(self, fld.name) + right = value.get(fld.name) + + if left != right: + return False + + return True + + return False + + @classmethod + def from_dict(cls, env): + return cls( + **{k: v for k, v in env.items() if k in inspect.signature(cls).parameters} + ) diff --git a/zapusk/models/base_model_test.py b/zapusk/models/base_model_test.py new file mode 100644 index 0000000..da1e6e5 --- /dev/null +++ b/zapusk/models/base_model_test.py @@ -0,0 +1,55 @@ +from dataclasses import dataclass +from unittest import TestCase + +from .base_model import BaseModel + + +class TestBaseModel(TestCase): + def test_base_model_should_compare_dict_and_dataclass(self): + @dataclass(eq=False) + class Model(BaseModel): + attr: int = 1 + + model = Model() + + self.assertEqual(model, {"attr": 1}) + + def test_base_model_should_compare_dict_and_dataclass_fail(self): + @dataclass(eq=False) + class Model(BaseModel): + attr: int = 1 + + model = Model() + + self.assertNotEqual(model, {"attr": 2}) + + def test_base_model_should_compare_dataclasses(self): + @dataclass(eq=False) + class Model(BaseModel): + attr: int + + model1 = Model(attr=1) + model2 = Model(attr=1) + + self.assertEqual(model1, model2) + + def test_base_model_should_compare_dataclasses_fail(self): + @dataclass(eq=False) + class Model(BaseModel): + attr: int + + model1 = Model(attr=1) + model2 = Model(attr=2) + + self.assertNotEqual(model1, model2) + + def test_base_model_should_compare_other_types(self): + @dataclass(eq=False) + class Model(BaseModel): + attr: int = 1 + + model = Model() + + self.assertNotEqual(model, 1) + self.assertNotEqual(model, "1") + self.assertNotEqual(model, None) diff --git a/zapusk/models/config.py b/zapusk/models/config.py new file mode 100644 index 0000000..6788d33 --- /dev/null +++ b/zapusk/models/config.py @@ -0,0 +1,13 @@ +from dataclasses import dataclass, field +from .base_model import BaseModel + +from .job_config import JobConfig +from .job_group import JobGroup + + +@dataclass(eq=False) +class Config(BaseModel): + port: int + colors: bool + job_groups: dict[str, JobGroup] = field(default_factory=dict) + jobs: dict[str, JobConfig] = field(default_factory=dict) diff --git a/zapusk/models/id_field.py b/zapusk/models/id_field.py new file mode 100644 index 0000000..4b928ea --- /dev/null +++ b/zapusk/models/id_field.py @@ -0,0 +1,15 @@ +lookup: dict[str, int] = {} + + +class IdField: + @staticmethod + def next(id: str): + if id not in lookup: + lookup[id] = 0 + + lookup[id] += 1 + return lookup[id] + + @staticmethod + def reset(id): + lookup[id] = 0 diff --git a/zapusk/models/id_field_test.py b/zapusk/models/id_field_test.py new file mode 100644 index 0000000..fcc61b3 --- /dev/null +++ b/zapusk/models/id_field_test.py @@ -0,0 +1,21 @@ +from unittest import TestCase + +from .id_field import IdField + + +class TestCounter(TestCase): + def test_should_get_new_id(self): + id = IdField.next("test") + self.assertEqual(id, 1) + IdField.reset("test") + + def test_should_increment_id(self): + ids = [ + IdField.next("test"), + IdField.next("test"), + IdField.next("test"), + IdField.next("test"), + IdField.next("test"), + ] + self.assertEqual(ids, [1, 2, 3, 4, 5]) + IdField.reset("test") diff --git a/zapusk/models/job.py b/zapusk/models/job.py new file mode 100644 index 0000000..b17dd8a --- /dev/null +++ b/zapusk/models/job.py @@ -0,0 +1,149 @@ +from dataclasses import dataclass, field +from datetime import datetime +from enum import Enum +from typing import Optional + +from .id_field import IdField +from .job_config import JobConfig +from .job_group import JobGroup + + +class JOB_STATE_ENUM(str, Enum): + """ + Enum contains possible job states + """ + + PENDING = "PENDING" + """ + Job is added, but hasn't been picked up by any consumer yet. + """ + + RUNNING = "RUNNING" + """ + Job has been picked up by a consumer. + """ + + FINISHED = "FINISHED" + """ + Job has been finished with zero exit code. + """ + + FAILED = "FAILED" + """ + Job has been finished with non-zero exit code. + """ + + CANCELLED = "CANCELLED" + """ + Job has been cancelled before completion + """ + + +@dataclass +class Job: + """ + Job model + """ + + JOB_STATE_ENUM = JOB_STATE_ENUM + + def __str__(self): + return f"job.{self.job_config_id}.{self.id}" + + @staticmethod + def from_config(group_config: JobGroup, config: JobConfig): + """ + returns a new JobItem created from JobConfig object + """ + return Job( + group_config=group_config, + command=config.command, + args_command=config.args_command, + group=config.group, + job_config_id=config.id, + name=config.name, + on_finish=config.on_finish, + on_fail=config.on_fail, + ) + + group_config: JobGroup + """ + Contains jobconfig for job started with it + """ + + command: str + """ + A shell command to be executed when job becomes `RUNNING`. + """ + + name: str + """ + Job human-readable name + """ + + group: str = "default" + """ + job_group id + """ + + job_config_id: Optional[str] = None + """ + job_config id + """ + + args_command: Optional[str] = None + """ + A command to get arguments to execute job with + """ + + args: list[str] = field(default_factory=list) + + id: int = field(default_factory=lambda: IdField.next("job_item")) + """ + Unique Job id generated when it's created + """ + + on_finish: Optional[str] = None + """ + A command to execute after job has been successfuly finished + """ + + on_fail: Optional[str] = None + """ + A command to execute after job has been successfuly finished + """ + + state: JOB_STATE_ENUM = JOB_STATE_ENUM.PENDING + """ + defines current state in the pipeline, such as `PENDING`, `RUNNING`, `FAILED` or `FINISHED`. + """ + + pid: int | None = None + """ + contains Job process PID if job has been started. + """ + + log: str | None = None + """ + contains a logfile path if job has been started. + """ + + exit_code: int | None = None + """ + contains an exit status if job has been finished. + """ + + consumed_by: str | None = None + """ + Identifier of a consumer took this job + """ + + created_at: datetime = field(default_factory=lambda: datetime.now()) + """ + when job has been added to the WorkLog + """ + + updated_at: datetime = field(default_factory=lambda: datetime.now()) + """ + when job has progressed within the pipeline last time + """ diff --git a/zapusk/models/job_config.py b/zapusk/models/job_config.py new file mode 100644 index 0000000..28aebc0 --- /dev/null +++ b/zapusk/models/job_config.py @@ -0,0 +1,50 @@ +from dataclasses import dataclass +from typing import Optional + +from .base_model import BaseModel + + +@dataclass(eq=False) +class JobConfig(BaseModel): + id: str + """ + Job config id + """ + + name: str + """ + Job name + """ + + command: str + """ + shell command for the job + """ + + group: str = "default" + """ + Group id to run job in + """ + + args_command: Optional[str] = None + """ + callback to fetch arguments to run the command with + """ + + on_finish: Optional[str] = None + """ + On finish callback + """ + + on_fail: Optional[str] = None + """ + On fail callback + """ + + schedule: Optional[str] = None + """ + Cron-like string to define scheduling interval + """ + + def __str__(self) -> str: + return f"job_config.{self.id}" diff --git a/zapusk/models/job_group.py b/zapusk/models/job_group.py new file mode 100644 index 0000000..57f32af --- /dev/null +++ b/zapusk/models/job_group.py @@ -0,0 +1,16 @@ +from dataclasses import dataclass +from typing import Optional + +from .base_model import BaseModel + + +@dataclass(eq=False) +class JobGroup(BaseModel): + id: str + parallel: int + on_finish: Optional[str] = None + on_fail: Optional[str] = None + + def __post_init__(self): + if self.parallel <= 0: + raise ValueError("`parallel` must be a positive number") diff --git a/zapusk/models/scheduled_job.py b/zapusk/models/scheduled_job.py new file mode 100644 index 0000000..a8af415 --- /dev/null +++ b/zapusk/models/scheduled_job.py @@ -0,0 +1,40 @@ +from dataclasses import dataclass +from croniter import croniter +from datetime import datetime, timezone +from typing import Optional + +from .base_model import BaseModel +from .job_config import JobConfig + + +@dataclass(eq=False) +class ScheduledJob(BaseModel): + job_config: JobConfig + + next: int = 0 + """ + Next execution time + """ + + last_run: Optional[datetime] = None + """ + list time job run + """ + + def __post_init__(self): + now = datetime.now(timezone.utc) + if self.job_config.schedule: + self.__iter = croniter(self.job_config.schedule, start_time=now) + else: + raise ValueError( + "Job config {self.job_config} contains no `schedule` property" + ) + + self.next = self.__iter.get_next(start_time=now) + + def record_run(self, now: datetime): + self.last_run = now + self.next = self.__iter.get_next(start_time=now) + + def __str__(self) -> str: + return f"scheduled.{self.job_config}" diff --git a/zapusk/server/__init__.py b/zapusk/server/__init__.py new file mode 100644 index 0000000..2261839 --- /dev/null +++ b/zapusk/server/__init__.py @@ -0,0 +1,3 @@ +from .api import create_app + +__all__ = ["create_app"] diff --git a/zapusk/server/api.py b/zapusk/server/api.py new file mode 100644 index 0000000..3923474 --- /dev/null +++ b/zapusk/server/api.py @@ -0,0 +1,34 @@ +from flask import Flask + +from .controller_jobs import create_jobs_api +from .controller_config import create_config_api +from .controller_scheduled_jobs import create_scheduled_jobs_api + + +def create_app( + executor_manager_service, + config_service, + scheduler_service, +): + app = Flask(__name__) + + app.register_blueprint( + create_jobs_api( + config_service=config_service, + executor_manager_service=executor_manager_service, + ) + ) + app.register_blueprint( + create_config_api( + config_service=config_service, + ) + ) + + app.register_blueprint( + create_scheduled_jobs_api( + scheduler_service=scheduler_service, + config_service=config_service, + ) + ) + + return app diff --git a/zapusk/server/controller_config.py b/zapusk/server/controller_config.py new file mode 100644 index 0000000..6c4caf2 --- /dev/null +++ b/zapusk/server/controller_config.py @@ -0,0 +1,16 @@ +from flask import Blueprint +from zapusk.lib.json_serdes import JsonSerdes + + +def create_config_api(config_service): + jobgroups_api = Blueprint("jobgroups", __name__) + + @jobgroups_api.route("/config/groups/") + def groups_list(): + return JsonSerdes.serialize(config_service.list_jobgroups()) + + @jobgroups_api.route("/config/jobs/") + def job_list(): + return JsonSerdes.serialize(config_service.list_jobs()) + + return jobgroups_api diff --git a/zapusk/server/controller_config_test.py b/zapusk/server/controller_config_test.py new file mode 100644 index 0000000..1d13c27 --- /dev/null +++ b/zapusk/server/controller_config_test.py @@ -0,0 +1,122 @@ +from unittest import TestCase + +from flask import json +from testfixtures import TempDirectory + +from zapusk.services import ( + ConfigService, + SchedulerService, + ExecutorManagerService, + ExecutorManagerKawkaBackend, +) + +from .api import create_app + +CONFIG_DATA = """ +job_groups: + - id: default + parallel: 10 + - id: sequential + parallel: 1 + - id: parallel + parallel: 2 + +jobs: + - id: test1 + name: Test1 + command: test1 + - id: test2 + name: Test2 + command: test2 +""" + + +class TestConfigController(TestCase): + def setUp(self) -> None: + self.temp_dir = TempDirectory() + config_file = self.temp_dir / "config.yml" + config_file.write_text(CONFIG_DATA) + + self.executor_manager_service = ExecutorManagerService( + backend=ExecutorManagerKawkaBackend(), + ) + self.config_service = ConfigService( + config_path=f"{self.temp_dir.path}/config.yml" + ) + self.scheduler_service = SchedulerService( + config_service=self.config_service, + executor_manager_service=self.executor_manager_service, + ) + self.scheduler_service.start() + + self.app = create_app( + executor_manager_service=self.executor_manager_service, + config_service=self.config_service, + scheduler_service=self.scheduler_service, + ) + self.test_client = self.app.test_client() + + def tearDown(self) -> None: + self.executor_manager_service.terminate() + self.scheduler_service.terminate() + self.temp_dir.cleanup() + + def test_config_groups_list(self): + res = self.test_client.get("/config/groups/") + data = json.loads(res.data) + self.assertEqual( + data, + { + "data": [ + { + "id": "default", + "on_fail": None, + "on_finish": None, + "parallel": 10, + }, + { + "id": "sequential", + "on_fail": None, + "on_finish": None, + "parallel": 1, + }, + { + "id": "parallel", + "on_fail": None, + "on_finish": None, + "parallel": 2, + }, + ] + }, + ) + + def test_config_jobs_list(self): + res = self.test_client.get("/config/jobs/") + data = json.loads(res.data) + self.assertEqual( + data, + { + "data": [ + { + "args_command": None, + "command": "test1", + "group": "default", + "id": "test1", + "name": "Test1", + "on_fail": None, + "on_finish": None, + "schedule": None, + }, + { + "args_command": None, + "command": "test2", + "group": "default", + "id": "test2", + "name": "Test2", + "on_fail": None, + "on_finish": None, + "schedule": None, + }, + ] + }, + ) diff --git a/zapusk/server/controller_jobs.py b/zapusk/server/controller_jobs.py new file mode 100644 index 0000000..406503d --- /dev/null +++ b/zapusk/server/controller_jobs.py @@ -0,0 +1,111 @@ +from flask import Blueprint, Response, abort, request +from zapusk.lib.json_serdes import JsonSerdes +from zapusk.models import Job, JobConfig, IdField +from .error_response import error_response + + +def create_jobs_api(config_service, executor_manager_service): + jobs_api = Blueprint("jobs", __name__) + + @jobs_api.route("/jobs/") + def job_get(job_id: str): + job = executor_manager_service.get(int(job_id)) + if not job: + return abort( + error_response(status=404, error=f"Job with id {job_id} not found") + ) + + return JsonSerdes.serialize(job) + + @jobs_api.route("/jobs/") + def job_list(): + jobs = executor_manager_service.list() + return JsonSerdes.serialize(jobs) + + @jobs_api.route("/jobs/", methods=["POST"]) + def job_add(): + body = request.json or {} + + job_config_id = body.get("job_config_id", None) + + # if no config id, let's try to execute it as a command + if not job_config_id: + command = body.get("command", None) + if not command: + return abort( + error_response( + status=400, + error="Request body contains no `command` or `job_config_id`", + ) + ) + + group_id = body.get("group_id", None) + name = body.get("name", None) + + job_group = config_service.get_job_group(group_id or "default") + + if not command or not job_group: + return abort( + error_response( + status=404, + error=f'group_id "{group_id}" not found', + ) + ) + + cmd_id = f"command.{IdField.next("command")}" + job_item = Job.from_config( + group_config=job_group, + config=JobConfig( + id=cmd_id, + name=name or f"{job_group.id}.{cmd_id}", + command=command, + ), + ) + executor_manager_service.add(job_item) + + return JsonSerdes.serialize(job_item) + + job_config = config_service.get_job(job_config_id) + + if not job_config: + return abort( + error_response( + status=404, + error=f"Job with id `{job_config_id}` not found", + ) + ) + + job_group = config_service.get_job_group(job_config.group) + + if not job_group: # pragma: no cover + # this technically not possible, because config_parser will fail first + return abort( + error_response( + status=404, + error=f"Job configuration for {job_config.id} contains unknown jobgroup `{job_config.group}`", + ) + ) + + job_item = Job.from_config( + group_config=job_group, + config=job_config, + ) + executor_manager_service.add(job_item) + + return JsonSerdes.serialize(job_item) + + @jobs_api.route("/jobs/", methods=["DELETE"]) + def job_delete(job_id): + job_item = executor_manager_service.get(int(job_id)) + if not job_item: + return abort( + error_response( + status=404, + error=f"Job with id `{job_id}` not found", + ) + ) + + cancelled_job = executor_manager_service.cancel(job_item) + return JsonSerdes.serialize(cancelled_job) + + return jobs_api diff --git a/zapusk/server/controller_jobs_test.py b/zapusk/server/controller_jobs_test.py new file mode 100644 index 0000000..1b6d2a9 --- /dev/null +++ b/zapusk/server/controller_jobs_test.py @@ -0,0 +1,299 @@ +import json +from unittest import TestCase +from unittest.mock import ANY + +from testfixtures import TempDirectory + +from zapusk.services import ( + ConfigService, + SchedulerService, + ExecutorManagerService, + ExecutorManagerKawkaBackend, +) + +from .api import create_app + +CONFIG_DATA = """ +job_groups: + - id: default + parallel: 10 + - id: cmd + parallel: 2 + +jobs: + - name: Echo + id: echo + command: echo 1 +""" + + +class TestJobController(TestCase): + def setUp(self) -> None: + self.temp_dir = TempDirectory() + self.config_file = self.temp_dir / "config.yml" + self.config_file.write_text(CONFIG_DATA) + + self.executor_manager_service = ExecutorManagerService( + backend=ExecutorManagerKawkaBackend(), + ) + self.config_service = ConfigService( + config_path=f"{self.temp_dir.path}/config.yml" + ) + self.scheduler_service = SchedulerService( + config_service=self.config_service, + executor_manager_service=self.executor_manager_service, + ) + self.scheduler_service.start() + + self.app = create_app( + executor_manager_service=self.executor_manager_service, + config_service=self.config_service, + scheduler_service=self.scheduler_service, + ) + self.test_client = self.app.test_client() + + def tearDown(self) -> None: + self.executor_manager_service.terminate() + self.scheduler_service.terminate() + self.temp_dir.cleanup() + + def test_create_job(self): + res = self.test_client.post("/jobs/", json={"job_config_id": "echo"}) + data = json.loads(res.data) + + self.assertEqual( + data, + { + "data": { + "args": [], + "args_command": None, + "command": "echo 1", + "consumed_by": None, + "created_at": ANY, + "exit_code": None, + "group": "default", + "group_config": { + "id": "default", + "on_fail": None, + "on_finish": None, + "parallel": 10, + }, + "id": ANY, + "job_config_id": "echo", + "log": None, + "name": "Echo", + "on_fail": None, + "on_finish": None, + "pid": None, + "state": "PENDING", + "updated_at": ANY, + } + }, + ) + + def test_create_command(self): + res = self.test_client.post( + "/jobs/", + json={ + "command": "echo 42", + "group_id": "cmd", + "name": "test_command", + }, + ) + data = json.loads(res.data) + + self.assertEqual( + data, + { + "data": { + "args": [], + "args_command": None, + "command": "echo 42", + "consumed_by": None, + "created_at": ANY, + "exit_code": None, + "group": "default", + "group_config": { + "id": "cmd", + "on_fail": None, + "on_finish": None, + "parallel": 2, + }, + "id": ANY, + "job_config_id": ANY, + "log": None, + "name": "test_command", + "on_fail": None, + "on_finish": None, + "pid": None, + "state": "PENDING", + "updated_at": ANY, + } + }, + ) + + def test_get_job(self): + res = self.test_client.post("/jobs/", json={"job_config_id": "echo"}) + data = json.loads(res.data) + + job_id = data["data"]["id"] + res = self.test_client.get(f"/jobs/{job_id}") + data = json.loads(res.data) + + self.assertEqual( + data, + { + "data": { + "args": [], + "args_command": None, + "command": "echo 1", + "consumed_by": None, + "created_at": ANY, + "exit_code": None, + "group": "default", + "group_config": { + "id": "default", + "on_fail": None, + "on_finish": None, + "parallel": 10, + }, + "id": ANY, + "job_config_id": "echo", + "log": None, + "name": "Echo", + "on_fail": None, + "on_finish": None, + "pid": None, + "state": "PENDING", + "updated_at": ANY, + } + }, + ) + + def test_list_job(self): + res = self.test_client.post("/jobs/", json={"job_config_id": "echo"}) + data = json.loads(res.data) + + job_id = data["data"]["id"] + res = self.test_client.get("/jobs/") + data = json.loads(res.data) + + self.assertEqual( + data, + { + "data": [ + { + "args": [], + "args_command": None, + "command": "echo 1", + "consumed_by": None, + "created_at": ANY, + "exit_code": None, + "group": "default", + "group_config": { + "id": "default", + "on_fail": None, + "on_finish": None, + "parallel": 10, + }, + "id": job_id, + "job_config_id": "echo", + "log": None, + "name": "Echo", + "on_fail": None, + "on_finish": None, + "pid": None, + "state": "PENDING", + "updated_at": ANY, + } + ] + }, + ) + + def test_cancel_job(self): + res = self.test_client.post( + "/jobs/", json={"command": "sleep 60", "name": "test_command"} + ) + data = json.loads(res.data) + + job_id = data["data"]["id"] + res = self.test_client.delete(f"/jobs/{job_id}") + data = json.loads(res.data) + + self.assertEqual( + data, + { + "data": { + "args": [], + "args_command": None, + "command": "sleep 60", + "consumed_by": ANY, + "created_at": ANY, + "exit_code": None, + "group": "default", + "group_config": { + "id": "default", + "on_fail": None, + "on_finish": None, + "parallel": 10, + }, + "id": ANY, + "job_config_id": ANY, + "log": ANY, + "name": "test_command", + "on_fail": None, + "on_finish": None, + "pid": None, + "state": "CANCELLED", + "updated_at": ANY, + } + }, + ) + + def test_get_unknown(self): + res = self.test_client.get(f"/jobs/420") + data = json.loads(res.data) + + self.assertEqual(res.status, "404 NOT FOUND") + self.assertEqual(data, {"error": "Job with id 420 not found"}) + + def test_create_without_body(self): + res = self.test_client.post(f"/jobs/", json={}) + data = json.loads(res.data) + + self.assertEqual(res.status, "400 BAD REQUEST") + self.assertEqual( + data, {"error": "Request body contains no `command` or `job_config_id`"} + ) + + def test_create_with_unknown_jobgroup(self): + res = self.test_client.post( + f"/jobs/", + json={ + "command": "echo 1", + "group_id": "unknown", + }, + ) + data = json.loads(res.data) + + self.assertEqual(res.status, "404 NOT FOUND") + self.assertEqual(data, {"error": 'group_id "unknown" not found'}) + + def test_create_with_unknown_jobconfig_id(self): + res = self.test_client.post( + f"/jobs/", + json={ + "job_config_id": "unknown", + }, + ) + data = json.loads(res.data) + + self.assertEqual(res.status, "404 NOT FOUND") + self.assertEqual(data, {"error": "Job with id `unknown` not found"}) + + def test_cancel_unknown_job(self): + res = self.test_client.delete("/jobs/420") + data = json.loads(res.data) + + self.assertEqual(res.status, "404 NOT FOUND") + self.assertEqual(data, {"error": "Job with id `420` not found"}) diff --git a/zapusk/server/controller_scheduled_jobs.py b/zapusk/server/controller_scheduled_jobs.py new file mode 100644 index 0000000..15fcb59 --- /dev/null +++ b/zapusk/server/controller_scheduled_jobs.py @@ -0,0 +1,81 @@ +from flask import Blueprint, abort, request +from zapusk.lib.json_serdes import JsonSerdes +from zapusk.models import Job, JobConfig, IdField +from zapusk.services.config.service import ConfigService +from zapusk.services.scheduler_service.service import SchedulerService +from .error_response import error_response + + +def create_scheduled_jobs_api( + scheduler_service: SchedulerService, + config_service: ConfigService, +): + scheduled_jobs_api = Blueprint("scheduled_jobs", __name__) + + @scheduled_jobs_api.route("/scheduled-jobs/") + def scheduled_jobs_list(): + scheduled_jobs = scheduler_service.list() + return JsonSerdes.serialize(scheduled_jobs) + + @scheduled_jobs_api.route("/scheduled-jobs/", methods=["POST"]) + def scheduled_jobs_add(): + body = request.json or {} + + command = body.get("command", None) + if not command: + return abort( + error_response( + status=400, + error="Request body contains no `command`", + ) + ) + + name = body.get("name", None) + group_id = body.get("group_id", None) + + if group_id: + group = config_service.get_job_group(group_id) + if not group: + return abort( + error_response( + status=404, + error=f"Unknown group `{group_id}`", + ) + ) + + schedule = body.get("schedule", None) + + if not schedule: + return abort( + error_response( + status=400, + error=f"Request body contains no `schedule`", + ) + ) + + cmd_id = f"scheduled.{IdField.next("scheduled")}" + + job_config = JobConfig( + id=cmd_id, + name=name or f"{group_id}.{cmd_id}", + schedule=schedule, + command=command, + ) + + is_added = scheduler_service.add(job_config) + + if not is_added: + return abort( + error_response( + status=500, + error=f"Scheduled job hasn't been added", + ) + ) + + return JsonSerdes.serialize(job_config) + + @scheduled_jobs_api.route("/scheduled-jobs/", methods=["DELETE"]) + def scheduled_jobs_cancel(scheduled_id: str): + return JsonSerdes.serialize(scheduler_service.delete(scheduled_id)) + + return scheduled_jobs_api diff --git a/zapusk/server/controller_scheduled_jobs_test.py b/zapusk/server/controller_scheduled_jobs_test.py new file mode 100644 index 0000000..94e2830 --- /dev/null +++ b/zapusk/server/controller_scheduled_jobs_test.py @@ -0,0 +1,171 @@ +import json +from unittest import TestCase +from unittest.mock import ANY, patch + +from testfixtures import TempDirectory + +from zapusk.services import ( + ConfigService, + SchedulerService, + ExecutorManagerService, + ExecutorManagerKawkaBackend, +) + +from .api import create_app + +CONFIG_DATA = """ +jobs: + - name: Echo + id: scheduled_echo + command: echo 1 + schedule: "0 0 * 1 *" +""" + + +class TestJobController(TestCase): + def setUp(self) -> None: + self.temp_dir = TempDirectory() + self.config_file = self.temp_dir / "config.yml" + self.config_file.write_text(CONFIG_DATA) + + self.executor_manager_service = ExecutorManagerService( + backend=ExecutorManagerKawkaBackend(), + ) + self.config_service = ConfigService( + config_path=f"{self.temp_dir.path}/config.yml" + ) + self.scheduler_service = SchedulerService( + config_service=self.config_service, + executor_manager_service=self.executor_manager_service, + ) + self.scheduler_service.start() + + self.app = create_app( + executor_manager_service=self.executor_manager_service, + config_service=self.config_service, + scheduler_service=self.scheduler_service, + ) + self.test_client = self.app.test_client() + + def tearDown(self) -> None: + self.executor_manager_service.terminate() + self.scheduler_service.terminate() + self.temp_dir.cleanup() + + def test_list(self): + res = self.test_client.get("/scheduled-jobs/") + data = json.loads(res.data) + + self.assertEqual( + data, + { + "data": [ + { + "args_command": None, + "command": "echo 1", + "group": "default", + "id": "scheduled_echo", + "name": "Echo", + "on_fail": None, + "on_finish": None, + "schedule": "0 0 * 1 *", + } + ] + }, + ) + + def test_create(self): + res = self.test_client.post( + "/scheduled-jobs/", + json={ + "command": "echo 42", + "name": "echo", + "schedule": "1 * * * *", + }, + ) + data = json.loads(res.data) + + self.assertEqual( + data, + { + "data": { + "args_command": None, + "command": "echo 42", + "group": "default", + "id": "scheduled.1", + "name": "echo", + "on_fail": None, + "on_finish": None, + "schedule": "1 * * * *", + } + }, + ) + + def test_cancel(self): + res = self.test_client.delete( + "/scheduled-jobs/scheduled_echo", + json={ + "command": "echo 42", + "name": "echo", + "schedule": "1 * * * *", + }, + ) + data = json.loads(res.data) + self.assertEqual(data, {"data": True}) + + res = self.test_client.get("/scheduled-jobs/") + data = json.loads(res.data) + + self.assertEqual(data, {"data": []}) + + def test_create_without_command(self): + res = self.test_client.post( + "/scheduled-jobs/", + json={ + "schedule": "1 * * * *", + }, + ) + data = json.loads(res.data) + + self.assertEqual(res.status, "400 BAD REQUEST") + self.assertEqual(data, {"error": "Request body contains no `command`"}) + + def test_create_without_schedule(self): + res = self.test_client.post( + "/scheduled-jobs/", + json={ + "command": "echo 420", + }, + ) + data = json.loads(res.data) + + self.assertEqual(res.status, "400 BAD REQUEST") + self.assertEqual(data, {"error": "Request body contains no `schedule`"}) + + def test_create_with_unknown_group(self): + res = self.test_client.post( + "/scheduled-jobs/", + json={ + "command": "echo 420", + "schedule": "1 * * * *", + "group_id": "unknown", + }, + ) + data = json.loads(res.data) + + self.assertEqual(res.status, "404 NOT FOUND") + self.assertEqual(data, {"error": "Unknown group `unknown`"}) + + def test_create_failed_by_scheduler_service(self): + with patch.object(self.scheduler_service, "add", return_value=False): + res = self.test_client.post( + "/scheduled-jobs/", + json={ + "command": "echo 420", + "schedule": "1 * * * *", + }, + ) + data = json.loads(res.data) + + self.assertEqual(res.status, "500 INTERNAL SERVER ERROR") + self.assertEqual(data, {"error": "Scheduled job hasn't been added"}) diff --git a/zapusk/server/error_response.py b/zapusk/server/error_response.py new file mode 100644 index 0000000..18a1dc7 --- /dev/null +++ b/zapusk/server/error_response.py @@ -0,0 +1,9 @@ +import json +from flask import Response + + +def error_response(error: str, status: int): + return Response( + json.dumps({"error": error}), + status=status, + ) diff --git a/zapusk/services/__init__.py b/zapusk/services/__init__.py new file mode 100644 index 0000000..38752ed --- /dev/null +++ b/zapusk/services/__init__.py @@ -0,0 +1,13 @@ +from .config import ConfigService +from .scheduler_service import SchedulerService +from .executor_manager import ( + ExecutorManagerService, + ExecutorManagerKawkaBackend, +) + +__ALL__ = [ + "ConfigService", + "ExecutorManagerService", + "ExecutorManagerKawkaBackend", + "SchedulerService", +] diff --git a/zapusk/services/config/__init__.py b/zapusk/services/config/__init__.py new file mode 100644 index 0000000..33d25ed --- /dev/null +++ b/zapusk/services/config/__init__.py @@ -0,0 +1,6 @@ +from .service import ConfigService + + +__ALL__ = [ + "ConfigService", +] diff --git a/zapusk/services/config/config_parser.py b/zapusk/services/config/config_parser.py new file mode 100644 index 0000000..700d808 --- /dev/null +++ b/zapusk/services/config/config_parser.py @@ -0,0 +1,59 @@ +import logging + +from zapusk.models import Config, JobGroup, JobConfig + +from .constants import DEFAULT_JOB_GROUPS, DEFAULT_PORT, DEFAULT_COLORS + + +logger = logging.getLogger(__name__) + + +class ConfigParser: + + @classmethod + def parse(cls, data: dict): + if not data: + data = {} + + port = data.get("port", DEFAULT_PORT) + colors = data.get("colors", DEFAULT_COLORS) + job_groups = cls.__parse_job_groups(data.get("job_groups", {})) + jobs = cls.__parse_jobs(data.get("jobs", []), list(job_groups.keys())) + + return Config( + port=port, + colors=colors, + job_groups=job_groups, + jobs=jobs, + ) + + @classmethod + def __parse_job_groups(cls, data: list): + logger.debug(f"Parsing job groups") + if not data: + return DEFAULT_JOB_GROUPS + + job_groups = {**DEFAULT_JOB_GROUPS} + for v in data: + _id = v["id"] + job_groups[_id] = JobGroup(**v) + logger.debug(f"Parsed {job_groups[_id]}") + + return job_groups + + @classmethod + def __parse_jobs(cls, jobs: list, known_groups: list[str]): + logger.debug(f"Parsing job configs") + logger.debug(f"Known job_groups: {known_groups}") + + retval = {} + for v in jobs: + j = JobConfig.from_dict(v) + + if j.group not in known_groups: + raise ValueError(f"Unknown job_group `{j.group}` in {j}") + + logger.debug(f"Parsed {j}") + retval[v["id"]] = j + + return retval diff --git a/zapusk/services/config/config_parser_test.py b/zapusk/services/config/config_parser_test.py new file mode 100644 index 0000000..15569e4 --- /dev/null +++ b/zapusk/services/config/config_parser_test.py @@ -0,0 +1,220 @@ +import pytest +import yaml + +from zapusk.services.config.constants import DEFAULT_COLORS + +from .config_parser import DEFAULT_JOB_GROUPS, DEFAULT_PORT, ConfigParser + + +@pytest.mark.parametrize( + "config_yaml,expected_result", + [ + [ + """ + + """, + { + "port": DEFAULT_PORT, + "colors": DEFAULT_COLORS, + "job_groups": DEFAULT_JOB_GROUPS, + "jobs": {}, + }, + ], + [ + """ +jobs: + - name: Sleep Timer + id: sleep + command: sleep 10 + """, + { + "port": DEFAULT_PORT, + "colors": DEFAULT_COLORS, + "job_groups": DEFAULT_JOB_GROUPS, + "jobs": { + "sleep": { + "name": "Sleep Timer", + "id": "sleep", + "command": "sleep 10", + "group": "default", + "args_command": None, + } + }, + }, + ], + [ + """ +job_groups: + - id: awesome_group + parallel: 4200 +jobs: + - name: Sleep Timer + group: awesome_group + id: sleep + command: sleep 10 + """, + { + "port": DEFAULT_PORT, + "colors": DEFAULT_COLORS, + "job_groups": { + **DEFAULT_JOB_GROUPS, + **{ + "awesome_group": { + "id": "awesome_group", + "parallel": 4200, + }, + }, + }, + "jobs": { + "sleep": { + "name": "Sleep Timer", + "id": "sleep", + "command": "sleep 10", + "group": "awesome_group", + "args_command": None, + } + }, + }, + ], + [ + """ +port: 1234 +colors: True +job_groups: + - id: default + parallel: 1 + """, + { + "port": 1234, + "colors": True, + "job_groups": {"default": {"id": "default", "parallel": 1}}, + "jobs": {}, + }, + ], + [ + """ +job_groups: + - id: default + parallel: 1 + on_fail: echo fail + on_finish: echo finish + +jobs: + - name: Sleep Timer + id: sleep + command: sleep 10 + on_fail: echo job_fail + on_finish: echo job_finish + """, + { + "port": DEFAULT_PORT, + "colors": DEFAULT_COLORS, + "job_groups": { + "default": { + "id": "default", + "parallel": 1, + "on_fail": "echo fail", + "on_finish": "echo finish", + } + }, + "jobs": { + "sleep": { + "name": "Sleep Timer", + "id": "sleep", + "command": "sleep 10", + "group": "default", + "args_command": None, + "on_fail": "echo job_fail", + "on_finish": "echo job_finish", + } + }, + }, + ], + [ + """ +job_groups: + - id: default + parallel: 1 + +jobs: + - name: Sleep Timer + id: sleep + command: sleep 10 + unknown_property: 1 + """, + { + "port": DEFAULT_PORT, + "colors": DEFAULT_COLORS, + "job_groups": { + "default": { + "id": "default", + "parallel": 1, + } + }, + "jobs": { + "sleep": { + "name": "Sleep Timer", + "id": "sleep", + "command": "sleep 10", + "group": "default", + "args_command": None, + } + }, + }, + ], + ], + ids=[ + "default_config", + "job_config", + "jobgroups_and_jobs", + "port_and_override_default_jobgroup", + "callbacks", + "unknown_property", + ], +) +def test_config_parser_should_parse_config(config_yaml, expected_result): + config_parser = ConfigParser() + config_data = yaml.safe_load(config_yaml) + res = config_parser.parse(config_data) + + assert res == expected_result + + +#################################### + + +@pytest.mark.parametrize( + "config_yaml,expection_msg", + [ + [ + """ +# Should fail with unknown group id +jobs: + - name: Sleep Timer + group: awesome_group + id: sleep + command: sleep 10 + """, + "Unknown job_group `awesome_group` in job_config.sleep", + ], + [ + """ +# Should fail with parallel config error +job_groups: + - id: awesome_group + parallel: -1 + """, + "`parallel` must be a positive number", + ], + ], + ids=["unknown_id_fail", "negative_parallel_fail"], +) +def test_job_should_fail_parsing_config(config_yaml, expection_msg): + config_parser = ConfigParser() + config_data = yaml.safe_load(config_yaml) + + try: + config_parser.parse(config_data) + raise Exception("Should fail") + except Exception as ex: + assert ex.args[0] == expection_msg diff --git a/zapusk/services/config/constants.py b/zapusk/services/config/constants.py new file mode 100644 index 0000000..a975e5f --- /dev/null +++ b/zapusk/services/config/constants.py @@ -0,0 +1,11 @@ +from zapusk.models import JobGroup + + +DEFAULT_COLORS = False +DEFAULT_PORT = 9876 +DEFAULT_JOB_GROUPS: dict[str, JobGroup] = { + "default": JobGroup( + id="default", + parallel=10, + ) +} diff --git a/zapusk/services/config/service.py b/zapusk/services/config/service.py new file mode 100644 index 0000000..214e2c1 --- /dev/null +++ b/zapusk/services/config/service.py @@ -0,0 +1,91 @@ +import logging +import os +from os.path import isfile +from typing import Optional, cast + +from zapusk.models.job_group import JobGroup +from .config_parser import ConfigParser +from .yaml_filereader import YamlFileReader + + +logger = logging.getLogger(__name__) + + +class ConfigService: + config_path: str + + def __init__( + self, + config_path: Optional[str] = None, + file_reader=YamlFileReader(), + parser=ConfigParser(), + ): + self.file_reader = file_reader + self.parser = parser + self.config_path = self.__get_config_path(config_path) + + def __get_config_path(self, config_path): + """ + Returns a path to the config file considering evnironment configuration + """ + if config_path: + return os.path.expanduser(config_path) + + config_dir = os.path.join( + os.environ.get("APPDATA") + or os.environ.get("XDG_CONFIG_HOME") + or os.path.join(os.environ["HOME"], ".config"), + "zapusk", + ) + + logger.info(f"Config Dir: {config_dir}") + + logger.debug(f"Try to load config file: {config_dir}/config.yaml") + if isfile(f"{config_dir}/config.yaml"): + logger.debug(f"Loaded config file: {config_dir}/config.yaml") + return f"{config_dir}/config.yaml" + + logger.debug(f"Try to load config file: {config_dir}/config.yml") + if isfile(f"{config_dir}/config.yml"): + logger.debug(f"Loaded config file: {config_dir}/config.yml") + return f"{config_dir}/config.yml" + else: + raise FileExistsError("Config not found") + + def get_config(self): + config = self.file_reader.read(self.config_path) + return self.parser.parse(config) + + def list_jobs(self): + config = self.get_config() + return list(config.jobs.values()) + + def list_jobgroups(self): + config = self.get_config() + return list(config.job_groups.values()) + + def get_job(self, job_id: str): + config = self.get_config() + + for job in config.jobs.values(): + if job.id == job_id: + return job + + return None + + def get_job_group(self, job_group_id: str): + config = self.get_config() + + for job_group in config.job_groups.values(): + if job_group.id == job_group_id: + return job_group + + return None + + def get_job_group_or_default(self, job_group_id: str): + job_group = self.get_job_group(job_group_id) + + if not job_group: + job_group = cast(JobGroup, self.get_job_group("default")) + + return job_group diff --git a/zapusk/services/config/service_test.py b/zapusk/services/config/service_test.py new file mode 100644 index 0000000..0222c92 --- /dev/null +++ b/zapusk/services/config/service_test.py @@ -0,0 +1,178 @@ +from unittest import TestCase + +from testfixtures import TempDirectory, replace_in_environ +from .service import ConfigService + + +class TestConfigService(TestCase): + def test_config_service_should_return_jobs(self): + config_service = ConfigService(config_path="./config.example.yaml") + jobs = config_service.list_jobs() + + self.assertEqual(len(jobs), 3) + self.assertEqual( + jobs[0], + { + "name": "Sleep 10 Seconds", + "id": "sleep_10", + "group": "default", + "command": "sleep 10", + "args_command": None, + }, + ) + + self.assertEqual( + jobs[1], + { + "name": "Sleep 30 Seconds", + "id": "sleep_30", + "group": "parallel", + "command": "sleep 30", + "args_command": None, + }, + ) + + self.assertEqual( + jobs[2], + { + "name": "Configurable Sleep", + "id": "sleep", + "group": "sequential", + "command": "sleep $1", + "args_command": "zenity --entry --text 'Sleep Time'", + }, + ) + + def test_config_service_should_return_job_groups(self): + config_service = ConfigService(config_path="./config.example.yaml") + job_groups = config_service.list_jobgroups() + + self.assertEqual(len(job_groups), 3) + self.assertEqual( + job_groups[0], + { + "id": "default", + "parallel": 10, + }, + ) + + self.assertEqual( + job_groups[1], + { + "id": "sequential", + "parallel": 1, + }, + ) + + self.assertEqual( + job_groups[2], + { + "id": "parallel", + "parallel": 2, + }, + ) + + def test_config_service_should_return_full_config(self): + config_service = ConfigService(config_path="./config.example.yaml") + config = config_service.get_config() + + self.assertEqual(len(config.job_groups), 3) + self.assertEqual(len(config.jobs), 3) + self.assertEqual(config.port, 9876) + + def test_config_service_should_return_job_group(self): + config_service = ConfigService(config_path="./config.example.yaml") + job_group = config_service.get_job_group("default") + + self.assertEqual( + job_group, + { + "id": "default", + "parallel": 10, + }, + ) + + def test_config_service_should_return_job_group_none(self): + config_service = ConfigService(config_path="./config.example.yaml") + job_group = config_service.get_job_group("unknown") + + self.assertEqual(job_group, None) + + def test_config_service_should_return_job_group_or_default(self): + config_service = ConfigService(config_path="./config.example.yaml") + job_group = config_service.get_job_group_or_default("unknown") + + self.assertEqual( + job_group, + { + "id": "default", + "parallel": 10, + }, + ) + + def test_config_service_should_return_job(self): + config_service = ConfigService(config_path="./config.example.yaml") + job = config_service.get_job("sleep_10") + + self.assertEqual( + job, + { + "name": "Sleep 10 Seconds", + "id": "sleep_10", + "group": "default", + "command": "sleep 10", + "args_command": None, + }, + ) + + def test_config_service_should_return_job_none(self): + config_service = ConfigService(config_path="./config.example.yaml") + job = config_service.get_job("unknown") + + self.assertEqual(job, None) + + def test_config_path_1(self): + with TempDirectory() as d: + with replace_in_environ("APPDATA", d.path): + d.makedir("zapusk") + config_file = d / "zapusk/config.yml" + config_file.write_text("") + + config_service = ConfigService() + self.assertEqual( + config_service.config_path, f"{d.path}/zapusk/config.yml" + ) + + def test_config_path_2(self): + with TempDirectory() as d: + with replace_in_environ("XDG_CONFIG_HOME", d.path): + d.makedir("zapusk") + config_file = d / "zapusk/config.yaml" + config_file.write_text("") + + config_service = ConfigService() + self.assertEqual( + config_service.config_path, f"{d.path}/zapusk/config.yaml" + ) + + def test_config_path_3(self): + with TempDirectory() as d: + with replace_in_environ("HOME", d.path): + with replace_in_environ("XDG_CONFIG_HOME", ""): + d.makedir(".config/zapusk") + config_file = d / ".config/zapusk/config.yaml" + config_file.write_text("") + + config_service = ConfigService() + self.assertEqual( + config_service.config_path, + f"{d.path}/.config/zapusk/config.yaml", + ) + + def test_config_path_fail(self): + with TempDirectory() as d: + with replace_in_environ("XDG_CONFIG_HOME", d.path): + try: + ConfigService() + except FileExistsError as ex: + self.assertEqual(ex.args[0], "Config not found") diff --git a/zapusk/services/config/yaml_filereader.py b/zapusk/services/config/yaml_filereader.py new file mode 100644 index 0000000..bbcc811 --- /dev/null +++ b/zapusk/services/config/yaml_filereader.py @@ -0,0 +1,10 @@ +import yaml + + +class YamlFileReader: + def read(self, file) -> dict: # type: ignore + """ + Reads YAML file as a dict + """ + with open(file) as stream: + return yaml.safe_load(stream) diff --git a/zapusk/services/executor_manager/__init__.py b/zapusk/services/executor_manager/__init__.py new file mode 100644 index 0000000..37d3c85 --- /dev/null +++ b/zapusk/services/executor_manager/__init__.py @@ -0,0 +1,7 @@ +from .service import ExecutorManagerService +from .backends import ExecutorManagerKawkaBackend + +__ALL__ = [ + "ExecutorManagerService", + "ExecutorManagerKawkaBackend", +] diff --git a/zapusk/services/executor_manager/backends/__init__.py b/zapusk/services/executor_manager/backends/__init__.py new file mode 100644 index 0000000..0800624 --- /dev/null +++ b/zapusk/services/executor_manager/backends/__init__.py @@ -0,0 +1,3 @@ +from .kawka import ExecutorManagerKawkaBackend + +__ALL__ = ["ExecutorManagerKawkaBackend"] diff --git a/zapusk/services/executor_manager/backends/kawka/__init__.py b/zapusk/services/executor_manager/backends/kawka/__init__.py new file mode 100644 index 0000000..7aab63b --- /dev/null +++ b/zapusk/services/executor_manager/backends/kawka/__init__.py @@ -0,0 +1,3 @@ +from .backend import ExecutorManagerKawkaBackend + +__ALL__ = ["ExecutorManagerKawkaBackend"] diff --git a/zapusk/services/executor_manager/backends/kawka/args_consumer.py b/zapusk/services/executor_manager/backends/kawka/args_consumer.py new file mode 100644 index 0000000..8d48f86 --- /dev/null +++ b/zapusk/services/executor_manager/backends/kawka/args_consumer.py @@ -0,0 +1,41 @@ +import logging +from datetime import datetime +import subprocess + +from zapusk.kawka import Consumer +from zapusk.models import Job + +logger = logging.getLogger(__name__) + + +class ArgsConsumer(Consumer): + def process(self, job: Job): + logger.info(f"{self}: received a job {job} to get args for") + + sink = (self.context or {})["sink"] + + if not job.args_command: + sink.add(job) + return + + command = job.args_command + proc = subprocess.Popen( + command, + shell=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + exit_code = proc.wait() + out, err = proc.communicate() + + if err or exit_code: + logger.warning(f"{exit_code}: {str(err, 'utf-8')}") + job.state = Job.JOB_STATE_ENUM.FAILED + job.updated_at = datetime.now() + return + + arguments = str(out, "utf-8").split() + logger.info(f"{self} recieved arguments for a job {job}: {arguments}") + job.args = arguments + sink.add(job) + return diff --git a/zapusk/services/executor_manager/backends/kawka/args_consumer_test.py b/zapusk/services/executor_manager/backends/kawka/args_consumer_test.py new file mode 100644 index 0000000..1714301 --- /dev/null +++ b/zapusk/services/executor_manager/backends/kawka/args_consumer_test.py @@ -0,0 +1,82 @@ +from unittest import TestCase + +from zapusk.kawka import Producer +from zapusk.lib.create_jobitem import create_jobitem +from zapusk.models import Job + +from .args_consumer import ArgsConsumer + + +class ArgsConsumerTest(TestCase): + + def test_should_run_args_command_and_add_arguments_to_a_jobitem(self): + input_producer = Producer("input_producer") + sink_producer = Producer("sink_producer") + + args_consumer = ArgsConsumer( + name="args_consumer", + producer=input_producer, + context={ + "sink": sink_producer, + }, + ) + + args_consumer.start() + + item = create_jobitem(command="echo", args_command="echo 1") + input_producer.add(item) + input_producer.add(Producer.End) + + args_consumer.join() + + self.assertEqual(item.args, ["1"]) + self.assertEqual(item.state, Job.JOB_STATE_ENUM.PENDING) + self.assertEqual(len(list(sink_producer.all(block=False))), 1) + + def test_should_pass_items_without_args_command_to_the_sink(self): + input_producer = Producer("input_producer") + sink_producer = Producer("sink_producer") + + args_consumer = ArgsConsumer( + name="args_consumer", + producer=input_producer, + context={ + "sink": sink_producer, + }, + ) + + args_consumer.start() + + item = create_jobitem(command="echo") + input_producer.add(item) + input_producer.add(Producer.End) + + args_consumer.join() + + self.assertEqual(item.args, []) + self.assertEqual(item.state, Job.JOB_STATE_ENUM.PENDING) + self.assertEqual(len(list(sink_producer.all(block=False))), 1) + + def test_should_set_state_to_failed_args_command_fails(self): + input_producer = Producer("input_producer") + sink_producer = Producer("sink_producer") + + args_consumer = ArgsConsumer( + name="args_consumer", + producer=input_producer, + context={ + "sink": sink_producer, + }, + ) + + args_consumer.start() + + item = create_jobitem(command="echo", args_command="exit 1") + input_producer.add(item) + input_producer.add(Producer.End) + + args_consumer.join() + + self.assertEqual(item.args, []) + self.assertEqual(item.state, Job.JOB_STATE_ENUM.FAILED) + self.assertEqual(len(list(sink_producer.all(block=False))), 0) diff --git a/zapusk/services/executor_manager/backends/kawka/backend.py b/zapusk/services/executor_manager/backends/kawka/backend.py new file mode 100644 index 0000000..34c6718 --- /dev/null +++ b/zapusk/services/executor_manager/backends/kawka/backend.py @@ -0,0 +1,53 @@ +import os +import signal +from datetime import datetime +from time import sleep + +from zapusk.kawka import ConsumerGroup, Producer +from zapusk.models import Job + +from .consumer import ExecutorManagerConsumer + + +class ExecutorManagerKawkaBackend: + def start(self): + self._producer = Producer(name="executor_manager_producer") + self._consumer = ConsumerGroup( + producer=self._producer, + Consumer=ExecutorManagerConsumer, + parallel=1, + name="executor_manager", + ) + self._consumer.start() + sleep(0.1) + + def add(self, job_item: Job) -> Job: + self._producer.add(job_item) + return job_item + + def list(self) -> list[Job]: + return list(self._producer.all(block=False)) + + def get(self, job_id: int) -> Job | None: + for job_item in self.list(): + if job_item.id != job_id: + continue + return job_item + return None + + def cancel(self, job_item: Job) -> Job: + if job_item.state in [ + Job.JOB_STATE_ENUM.PENDING, + Job.JOB_STATE_ENUM.RUNNING, + ]: + job_item.state = Job.JOB_STATE_ENUM.CANCELLED + job_item.updated_at = datetime.now() + if job_item.pid: + os.kill(job_item.pid, signal.SIGTERM) + + return job_item + + def terminate(self): + self._producer.add(Producer.End) + sleep(1) + self._consumer.join(1) diff --git a/zapusk/services/executor_manager/backends/kawka/backend_test.py b/zapusk/services/executor_manager/backends/kawka/backend_test.py new file mode 100644 index 0000000..780e8e8 --- /dev/null +++ b/zapusk/services/executor_manager/backends/kawka/backend_test.py @@ -0,0 +1,85 @@ +from time import sleep +from unittest import TestCase + +from zapusk.lib.create_jobitem import create_jobitem +from zapusk.models import Job + +from .backend import ExecutorManagerKawkaBackend + + +class TestKawkaBackend(TestCase): + def test_kawka_backend_add(self): + backend = ExecutorManagerKawkaBackend() + backend.start() + + item = create_jobitem(command="echo 1") + backend.add(item) + backend.terminate() + + self.assertEqual(item.state, Job.JOB_STATE_ENUM.FINISHED) + + def test_kawka_backend_get(self): + backend = ExecutorManagerKawkaBackend() + backend.start() + + item = create_jobitem(command="echo 1") + backend.add(item) + + backend.terminate() + + res = backend.get(item.id) + + self.assertEqual(res, item) + + def test_kawka_backend_get_none(self): + backend = ExecutorManagerKawkaBackend() + backend.start() + + item = create_jobitem(command="echo 1") + backend.add(item) + + backend.terminate() + + res = backend.get(999) + + self.assertEqual(res, None) + + def test_kawka_backend_list(self): + backend = ExecutorManagerKawkaBackend() + backend.start() + + backend.add(create_jobitem(command="echo 1")) + backend.add(create_jobitem(command="echo 2")) + backend.add(create_jobitem(command="echo 3")) + + backend.terminate() + + res = backend.list() + + self.assertEqual(len(res), 3) + self.assertEqual(res[0].command, "echo 1") + self.assertEqual(res[1].command, "echo 2") + self.assertEqual(res[2].command, "echo 3") + + def test_kawka_backend_cancel(self): + backend = ExecutorManagerKawkaBackend() + backend.start() + + item = create_jobitem(command="sleep 10") + backend.add(item) + + sleep(1) + + res = backend.get(item.id) + + if not res: + raise Exception("Fail") + + self.assertEqual(res.state, Job.JOB_STATE_ENUM.RUNNING) + + backend.cancel(item) + sleep(1) + + self.assertEqual(item.state, Job.JOB_STATE_ENUM.CANCELLED) + + backend.terminate() diff --git a/zapusk/services/executor_manager/backends/kawka/consumer.py b/zapusk/services/executor_manager/backends/kawka/consumer.py new file mode 100644 index 0000000..f367ff5 --- /dev/null +++ b/zapusk/services/executor_manager/backends/kawka/consumer.py @@ -0,0 +1,83 @@ +import logging + +from zapusk.kawka import Consumer, ConsumerGroup, Producer +from zapusk.models import Job + +from .args_consumer import ArgsConsumer +from .executor import Executor +from .state import ExecutorManagerState + + +logger = logging.getLogger(__name__) +executorManagerState = ExecutorManagerState() + + +class ExecutorManagerConsumer(Consumer): + state = executorManagerState + + def __init__(self, block=True, *args, **kwargs): + self.state.reset() + self.block = block + super().__init__(*args, **kwargs) + + def join(self, timeout=None, *args, **kwargs): + for cgs in self.state.running_consumergroups.values(): + [args_cg, run_cg] = cgs + args_cg.join(timeout) + run_cg.join(timeout) + + for ps in self.state.running_producers.values(): + [args_ps, run_ps] = ps + args_ps.add(Producer.End) + run_ps.add(Producer.End) + + super().join(*args, **kwargs) + + def process(self, job: Job): + group_config = job.group_config + [args_producer, _] = self.__get_or_create_producers(group_config) + self.__get_or_create_consumergroups(group_config) + args_producer.add(job) + + def __get_or_create_producers(self, group_config): + if group_config.id not in self.state.running_producers: + args_producer = Producer( + name=f"producer_{group_config.id}_args", block=self.block + ) + run_producer = Producer( + name=f"producer_{group_config.id}_run", block=self.block + ) + + self.state.running_producers[group_config.id] = [ + args_producer, + run_producer, + ] + return [args_producer, run_producer] + + return self.state.running_producers[group_config.id] + + def __get_or_create_consumergroups(self, group_config): + if group_config.id not in self.state.running_consumergroups: + [args_producer, run_producer] = self.__get_or_create_producers(group_config) + args_cg = ConsumerGroup( + name=f"{group_config.id}_args", + producer=args_producer, + Consumer=ArgsConsumer, + parallel=1, + context={"sink": run_producer}, + ) + args_cg.start() + + run_cg = ConsumerGroup( + name=f"{group_config.id}_run", + producer=run_producer, + Consumer=Executor, + parallel=group_config.parallel, + context={"sink": run_producer}, + ) + run_cg.start() + + self.state.running_consumergroups[group_config.id] = [args_cg, run_cg] + return [args_cg, run_cg] + + return self.state.running_consumergroups[group_config.id] diff --git a/zapusk/services/executor_manager/backends/kawka/consumer_test.py b/zapusk/services/executor_manager/backends/kawka/consumer_test.py new file mode 100644 index 0000000..05f16e3 --- /dev/null +++ b/zapusk/services/executor_manager/backends/kawka/consumer_test.py @@ -0,0 +1,61 @@ +from time import sleep +from unittest import TestCase, mock +from testfixtures.mock import call +from testfixtures import Replacer +from testfixtures.popen import MockPopen + +from zapusk.kawka import Producer +from zapusk.lib.create_jobitem import create_jobitem +from zapusk.models import Job + +from .consumer import ExecutorManagerConsumer + + +class ExecutorManagerTest(TestCase): + def setUp(self): + self.Popen = MockPopen() + self.r = Replacer() + self.r.replace("subprocess.Popen", self.Popen) + self.addCleanup(self.r.restore) + + def test_should_get_args_and_run_job(self): + input_producer = Producer(name="input_producer") + + executor_manager = ExecutorManagerConsumer( + name="run_consumer", + producer=input_producer, + ) + executor_manager.start() + + self.Popen.set_command("get_args", stdout=b"hello world", stderr=b"") + self.Popen.set_command( + "my_command hello world", stdout=b"hello world", stderr=b"" + ) + + item = create_jobitem(command="my_command", args_command="get_args") + + input_producer.add(item) + input_producer.add(Producer.End) + + sleep(1) + executor_manager.join(2) + + self.assertEqual( + self.Popen.all_calls[0], + call.Popen( + "get_args", + shell=True, + stdout=-1, + stderr=-1, + ), + ) + self.assertEqual( + self.Popen.all_calls[3], + call.Popen( + "my_command hello world", + shell=True, + stdout=mock.ANY, + stderr=mock.ANY, + ), + ) + self.assertEqual(item.state, Job.JOB_STATE_ENUM.FINISHED) diff --git a/zapusk/services/executor_manager/backends/kawka/executor.py b/zapusk/services/executor_manager/backends/kawka/executor.py new file mode 100644 index 0000000..4efab99 --- /dev/null +++ b/zapusk/services/executor_manager/backends/kawka/executor.py @@ -0,0 +1,69 @@ +import logging +import subprocess +from time import time +from datetime import datetime + +from zapusk.kawka import Consumer +from zapusk.models import Job + +logger = logging.getLogger(__name__) + + +class Executor(Consumer): + def process(self, job: Job): + logger.info(f"{self} received a job to run {job}") + + logfile_path = f"/tmp/zapusk-{time()}.log" + + if job.state == Job.JOB_STATE_ENUM.CANCELLED: + logger.info("Skipping cancelled job {job.id}") + return + + job.state = Job.JOB_STATE_ENUM.RUNNING + job.log = logfile_path + job.consumed_by = self.name + job.updated_at = datetime.now() + job.command = " ".join([job.command, *job.args]) + + logger.info(f"Run a command {job.command}") + + with open(logfile_path, "w") as logfile: + proc = subprocess.Popen( + job.command, + shell=True, + stdout=logfile, + stderr=logfile, + ) + job.pid = proc.pid + + exit_code = proc.wait() + + job.exit_code = exit_code + if job.state == Job.JOB_STATE_ENUM.CANCELLED: + logger.info(f"Job {job.id} has been cancelled") + return + + if exit_code == 0: + job.state = Job.JOB_STATE_ENUM.FINISHED + job.updated_at = datetime.now() + logger.info(f"{self.name} finished {job} job") + + on_finish = job.on_finish or job.group_config.on_finish + if on_finish: + subprocess.Popen( + on_finish.format(job=job), + shell=True, + ) + + else: + job.state = Job.JOB_STATE_ENUM.FAILED + job.updated_at = datetime.now() + + on_fail = job.on_fail or job.group_config.on_fail + if on_fail: + subprocess.Popen( + on_fail.format(job=job), + shell=True, + ) + + logger.info(f"{self.name} failed {job} job") diff --git a/zapusk/services/executor_manager/backends/kawka/executor_test.py b/zapusk/services/executor_manager/backends/kawka/executor_test.py new file mode 100644 index 0000000..3224c51 --- /dev/null +++ b/zapusk/services/executor_manager/backends/kawka/executor_test.py @@ -0,0 +1,310 @@ +from unittest import TestCase, mock +from testfixtures.mock import call +from testfixtures import Replacer +from testfixtures.popen import MockPopen + +from zapusk.kawka import Producer +from zapusk.lib.create_jobitem import create_jobitem +from zapusk.models import Job + +from .executor import Executor + + +class ExecutorTest(TestCase): + def setUp(self): + self.Popen = MockPopen() + self.r = Replacer() + self.r.replace("subprocess.Popen", self.Popen) + self.addCleanup(self.r.restore) + + def test_consumer_should_run_command(self): + input_producer = Producer("input_producer") + executor = Executor(name="run_consumer", producer=input_producer) + executor.start() + + self.Popen.set_command("echo 1", stdout=b"1", stderr=b"") + item = create_jobitem(command="echo 1") + input_producer.add(item) + input_producer.add(Producer.End) + + executor.join() + + self.assertEqual( + self.Popen.all_calls[0], + call.Popen( + "echo 1", + shell=True, + stdout=mock.ANY, + stderr=mock.ANY, + ), + ) + self.assertEqual(item.state, Job.JOB_STATE_ENUM.FINISHED) + + def test_consumer_should_run_on_finish_callback(self): + input_producer = Producer("input_producer") + executor = Executor(name="run_consumer", producer=input_producer) + executor.start() + + self.Popen.set_command("echo 1", stdout=b"1", stderr=b"") + self.Popen.set_command("echo finish", stdout=b"finish", stderr=b"") + + item = create_jobitem(command="echo 1", on_finish="echo finish") + + input_producer.add(item) + input_producer.add(Producer.End) + + executor.join() + + self.assertEqual( + self.Popen.all_calls[0], + call.Popen( + "echo 1", + shell=True, + stdout=mock.ANY, + stderr=mock.ANY, + ), + ) + + self.assertEqual( + self.Popen.all_calls[2], + call.Popen( + "echo finish", + shell=True, + ), + ) + + def test_consumer_should_run_on_finish_group_callback(self): + input_producer = Producer("input_producer") + executor = Executor(name="run_consumer", producer=input_producer) + executor.start() + + self.Popen.set_command("echo 1", stdout=b"1", stderr=b"") + self.Popen.set_command("echo finish", stdout=b"finish", stderr=b"") + + item = create_jobitem(command="echo 1", group_on_finish="echo finish") + + input_producer.add(item) + input_producer.add(Producer.End) + + executor.join() + + self.assertEqual( + self.Popen.all_calls[0], + call.Popen( + "echo 1", + shell=True, + stdout=mock.ANY, + stderr=mock.ANY, + ), + ) + + self.assertEqual( + self.Popen.all_calls[2], + call.Popen( + "echo finish", + shell=True, + ), + ) + + def test_consumer_should_run_on_finish_job_callback_if_both_job_and_group_are_defined( + self, + ): + input_producer = Producer("input_producer") + executor = Executor(name="run_consumer", producer=input_producer) + executor.start() + + self.Popen.set_command("echo 1", stdout=b"1", stderr=b"") + self.Popen.set_command("echo finish", stdout=b"finish", stderr=b"") + + item = create_jobitem( + command="echo 1", + on_finish="echo finish", + group_on_finish="echo finish_group", + ) + + input_producer.add(item) + input_producer.add(Producer.End) + + executor.join() + + self.assertEqual( + self.Popen.all_calls[0], + call.Popen( + "echo 1", + shell=True, + stdout=mock.ANY, + stderr=mock.ANY, + ), + ) + + self.assertEqual( + self.Popen.all_calls[2], + call.Popen( + "echo finish", + shell=True, + ), + ) + + def test_consumer_should_run_on_fail_callback(self): + input_producer = Producer("input_producer") + executor = Executor(name="run_consumer", producer=input_producer) + executor.start() + + self.Popen.set_command("exit 1", stdout=b"", stderr=b"1", returncode=1) + self.Popen.set_command("echo fail", stdout=b"fail", stderr=b"") + + item = create_jobitem(command="exit 1", on_fail="echo fail") + + input_producer.add(item) + input_producer.add(Producer.End) + + executor.join() + + self.assertEqual( + self.Popen.all_calls[0], + call.Popen( + "exit 1", + shell=True, + stdout=mock.ANY, + stderr=mock.ANY, + ), + ) + + self.assertEqual( + self.Popen.all_calls[2], + call.Popen( + "echo fail", + shell=True, + ), + ) + + def test_consumer_should_run_group_on_fail_callback(self): + input_producer = Producer("input_producer") + executor = Executor(name="run_consumer", producer=input_producer) + executor.start() + + self.Popen.set_command("exit 1", stdout=b"", stderr=b"1", returncode=1) + self.Popen.set_command("echo fail", stdout=b"fail", stderr=b"") + + item = create_jobitem(command="exit 1", group_on_fail="echo fail") + + input_producer.add(item) + input_producer.add(Producer.End) + + executor.join() + + self.assertEqual( + self.Popen.all_calls[0], + call.Popen( + "exit 1", + shell=True, + stdout=mock.ANY, + stderr=mock.ANY, + ), + ) + + self.assertEqual( + self.Popen.all_calls[2], + call.Popen( + "echo fail", + shell=True, + ), + ) + + def test_consumer_should_run_on_fail_job_callback_if_both_job_and_group_callbacks_are_defined( + self, + ): + input_producer = Producer("input_producer") + executor = Executor(name="run_consumer", producer=input_producer) + executor.start() + + self.Popen.set_command("exit 1", stdout=b"", stderr=b"1", returncode=1) + self.Popen.set_command("echo fail", stdout=b"fail", stderr=b"") + + item = create_jobitem( + command="exit 1", on_fail="echo fail", group_on_fail="echo group_fail" + ) + + input_producer.add(item) + input_producer.add(Producer.End) + + executor.join() + + self.assertEqual( + self.Popen.all_calls[0], + call.Popen( + "exit 1", + shell=True, + stdout=mock.ANY, + stderr=mock.ANY, + ), + ) + + self.assertEqual( + self.Popen.all_calls[2], + call.Popen( + "echo fail", + shell=True, + ), + ) + + def test_consumer_should_run_command_with_args(self): + input_producer = Producer("input_producer") + executor = Executor(name="run_consumer", producer=input_producer) + executor.start() + + self.Popen.set_command("echo 1 2 3", stdout=b"1 2 3", stderr=b"") + item = create_jobitem(command="echo", args=["1", "2", "3"]) + input_producer.add(item) + input_producer.add(Producer.End) + + executor.join(2) + + self.assertEqual( + self.Popen.all_calls[0], + call.Popen( + "echo 1 2 3", + shell=True, + stdout=mock.ANY, + stderr=mock.ANY, + ), + ) + self.assertEqual(item.state, Job.JOB_STATE_ENUM.FINISHED) + + def test_consumer_should_fail_command(self): + input_producer = Producer("input_producer") + executor = Executor(name="run_consumer", producer=input_producer) + executor.start() + + self.Popen.set_command("exit 1", stdout=b"1", stderr=b"", returncode=1) + item = create_jobitem(command="exit 1") + input_producer.add(item) + input_producer.add(Producer.End) + + executor.join(2) + + self.assertEqual( + self.Popen.all_calls[0], + call.Popen( + "exit 1", + shell=True, + stdout=mock.ANY, + stderr=mock.ANY, + ), + ) + self.assertEqual(item.state, Job.JOB_STATE_ENUM.FAILED) + + def test_consumer_should_skip_cancelled(self): + input_producer = Producer("input_producer") + executor = Executor(name="run_consumer", producer=input_producer) + executor.start() + + self.Popen.set_command("exit 1", stdout=b"1", stderr=b"", returncode=1) + item = create_jobitem(command="exit 1", state=Job.JOB_STATE_ENUM.CANCELLED) + input_producer.add(item) + input_producer.add(Producer.End) + + executor.join(2) + + self.assertEqual(len(self.Popen.all_calls), 0) + self.assertEqual(item.state, Job.JOB_STATE_ENUM.CANCELLED) diff --git a/zapusk/services/executor_manager/backends/kawka/state.py b/zapusk/services/executor_manager/backends/kawka/state.py new file mode 100644 index 0000000..f5dad4b --- /dev/null +++ b/zapusk/services/executor_manager/backends/kawka/state.py @@ -0,0 +1,11 @@ +class ExecutorManagerState: + running_consumergroups: dict + running_producers: dict + + def __init__(self): + self.running_consumergroups = {} + self.running_producers = {} + + def reset(self): + self.running_consumergroups = {} + self.running_producers = {} diff --git a/zapusk/services/executor_manager/service.py b/zapusk/services/executor_manager/service.py new file mode 100644 index 0000000..56444cb --- /dev/null +++ b/zapusk/services/executor_manager/service.py @@ -0,0 +1,47 @@ +import logging +from zapusk.models import Job + +logger = logging.getLogger(__name__) + + +class ExecutorManagerService: + """ + JobLog service is a generic interface for a given backend to interact + with the pipeline + """ + + def __init__(self, backend=None): + logger.info("Start joblog") + + if not backend: + raise Exception("ExecutorManagerService backend isn't configured") + + self.__backend = backend + self.__backend.start() + + def get(self, job_id: int) -> Job | None: + """ + returns a job by its id + """ + return self.__backend.get(job_id) + + def list(self) -> list[Job]: + """ + returns all jobs in the pipeline + """ + return self.__backend.list() + + def add(self, job_item: Job) -> Job: + """ + adds a job to the pipeline + """ + return self.__backend.add(job_item) + + def cancel(self, job_item: Job) -> Job: + """ + cancels a job + """ + return self.__backend.cancel(job_item) + + def terminate(self) -> None: + self.__backend.terminate() diff --git a/zapusk/services/executor_manager/service_test.py b/zapusk/services/executor_manager/service_test.py new file mode 100644 index 0000000..f52648b --- /dev/null +++ b/zapusk/services/executor_manager/service_test.py @@ -0,0 +1,63 @@ +import pytest +from unittest import mock + +from .service import ExecutorManagerService + + +class MockBackend: + def start(self): + pass + + def get(self): + pass + + def list(self): + pass + + def add(self): + pass + + def cancel(self): + pass + + +@pytest.mark.parametrize( + "service_method_name,backend_method_name,args,return_value", + [ + ("get", "get", [1], {"id": 1}), + ("list", "list", [], [1, 2, 3]), + ("add", "add", [1], [1]), + ("cancel", "cancel", [1], [1]), + ], +) +def test_method_call_proxied_to_the_backend( + service_method_name, + backend_method_name, + args, + return_value, +): + backend = MockBackend() + setattr( + backend, + backend_method_name, + mock.MagicMock( + name=backend_method_name, + return_value=return_value, + ), + ) + + service = ExecutorManagerService(backend=backend) + method = getattr(service, service_method_name) + result = method(*args) + + mocked_method = getattr(backend, backend_method_name) + mocked_method.assert_called_once_with(*args) + + assert result == return_value + + +def test_executor_manager_service_should_fail_without_backend(): + try: + ExecutorManagerService() + except Exception as ex: + assert ex.args[0] == "ExecutorManagerService backend isn't configured" diff --git a/zapusk/services/scheduler_service/__init__.py b/zapusk/services/scheduler_service/__init__.py new file mode 100644 index 0000000..ce73631 --- /dev/null +++ b/zapusk/services/scheduler_service/__init__.py @@ -0,0 +1,3 @@ +from .service import SchedulerService + +__ALL__ = ["SchedulerService"] diff --git a/zapusk/services/scheduler_service/service.py b/zapusk/services/scheduler_service/service.py new file mode 100644 index 0000000..1d1e6cc --- /dev/null +++ b/zapusk/services/scheduler_service/service.py @@ -0,0 +1,109 @@ +from datetime import datetime, timezone +import logging +from threading import Thread +from time import sleep + +from zapusk.models import Job, ScheduledJob +from zapusk.services.config import ConfigService +from zapusk.services.executor_manager import ExecutorManagerService + + +logger = logging.getLogger(__name__) +config_service = ConfigService() + + +class SchedulerService: + __terminated = False + + def __init__( + self, + config_service: ConfigService, + executor_manager_service: ExecutorManagerService, + interval: float = 1, + ) -> None: + self.__interval = interval + self.__config_service = config_service + self.__executor_manager_service = executor_manager_service + self.__scheduled_jobs = [j for j in config_service.list_jobs() if j.schedule] + logger.info(f"Scheduled jobs detected {[i.id for i in self.__scheduled_jobs]}") + + self.__data: dict[str, ScheduledJob] = {} + + def start(self) -> None: + """ + Reads config service, schedules jobs and starts the scheduler + + """ + self.add_from_config() + thread = Thread(target=self.__start_thread) + thread.start() + + def add(self, job_config): + """ + Schedule job from JobConfig + """ + try: + scheduled_job = ScheduledJob( + job_config=job_config, + ) + self.__data[job_config.id] = scheduled_job + return True + except ValueError as ex: + logger.info(ex.args[0]) + return False + + def delete(self, job_config_id): + """ + Removes given scheduled job by config id + """ + if job_config_id in self.__data: + del self.__data[job_config_id] + return True + return False + + def list(self): + """ + list all scheduled job configs + """ + return [sj.job_config for sj in self.__data.values()] + + def add_from_config(self) -> None: + """ + Reads config from self.config_service and add schedule all jobs + """ + for job_config in self.__scheduled_jobs: + # Just a type guard, was checked in __init__ + if not job_config.schedule: # pragma: no cover + continue + self.add(job_config) + + def terminate(self) -> None: + self.__terminated = True + + def __start_thread(self) -> None: + while not self.__terminated: + now = datetime.now(timezone.utc) + for scheduled_item in self.__data.values(): + logger.debug(f"Checking schedule for {scheduled_item}") + logger.debug( + f"NEXT:{datetime.fromtimestamp(scheduled_item.next)} < NOW:{now}" + ) + if scheduled_item.next < now.timestamp(): + self.__run_job(scheduled_item, now) + + sleep(self.__interval) + + def __run_job(self, scheduled_job: ScheduledJob, now: datetime) -> None: + job_config = scheduled_job.job_config + job_group = self.__config_service.get_job_group_or_default( + job_config.group or "default" + ) + + logger.info(f"Adding a job {scheduled_job.job_config} to the queue") + scheduled_job.record_run(now) + self.__executor_manager_service.add( + Job.from_config( + group_config=job_group, + config=job_config, + ) + ) diff --git a/zapusk/services/scheduler_service/service_test.py b/zapusk/services/scheduler_service/service_test.py new file mode 100644 index 0000000..f64632f --- /dev/null +++ b/zapusk/services/scheduler_service/service_test.py @@ -0,0 +1,227 @@ +from time import sleep +from unittest import TestCase +from unittest.mock import patch +from testfixtures import Replacer, TempDirectory, mock_datetime, Replace + +from zapusk.models import Job +from zapusk.models.job_config import JobConfig +from zapusk.services import ConfigService + +from .service import SchedulerService + +CONFIG_DATA = """ +jobs: + - name: Echo + id: echo + command: echo 1 + schedule: "30 * * * *" +""" + + +class MockExecutorManager: + def add(self): + pass + + +class TestSchedulerService(TestCase): + def setUp(self) -> None: + self.temp_dir = TempDirectory() + self.config_file = self.temp_dir / "config.yml" + self.config_file.write_text(CONFIG_DATA) + self.config_service = ConfigService( + config_path=f"{self.temp_dir.path}/config.yml" + ) + + self.executor_manager_service = MockExecutorManager() + self.d = mock_datetime(1970, 1, 1, 8, 0, 0, delta=0) + + self.r = Replacer() + self.r.replace("zapusk.services.scheduler_service.service.datetime", self.d) + self.r.replace("zapusk.models.scheduled_job.datetime", self.d) + + def tearDown(self) -> None: + self.temp_dir.cleanup() + self.r.restore() + + def test_scheduler_service_should_work(self): + scheduler_service = SchedulerService( + config_service=self.config_service, + executor_manager_service=self.executor_manager_service, # type: ignore + interval=0.1, + ) + + with patch.object(self.executor_manager_service, "add") as mock: + self.d.set(1970, 1, 1, 8, 11, 5) + scheduler_service.start() + sleep(1) + self.d.set(1970, 1, 1, 8, 30, 10) + sleep(1) + scheduler_service.terminate() + + args = mock.call_args.args + + scheduled_job = args[0] + self.assertEqual(type(scheduled_job), Job) + self.assertEqual(scheduled_job.name, "Echo") + + def test_scheduler_service_should_not_add_jobs_without_schedule(self): + scheduler_service = SchedulerService( + config_service=self.config_service, + executor_manager_service=self.executor_manager_service, # type: ignore + interval=0.1, + ) + + with patch.object(self.executor_manager_service, "add") as mock: + scheduler_service.start() + res = scheduler_service.add( + JobConfig(id="no_schedule", name="No Schedule", command="echo 1") + ) + scheduler_service.terminate() + self.assertEqual(res, False) + + def test_scheduler_service_should_list_all_scheduled_jobs(self): + scheduler_service = SchedulerService( + config_service=self.config_service, + executor_manager_service=self.executor_manager_service, # type: ignore + ) + + scheduler_service.add( + JobConfig( + id="1", + name="1", + command="echo 1", + schedule="1 * * * *", + ) + ) + scheduler_service.add( + JobConfig( + id="2", + name="2", + command="echo 2", + schedule="1 * * * *", + ) + ) + + res = scheduler_service.list() + + self.assertEqual( + res, + [ + { + "id": "1", + "name": "1", + "command": "echo 1", + "group": "default", + "args_command": None, + "on_finish": None, + "on_fail": None, + "schedule": "1 * * * *", + }, + { + "id": "2", + "name": "2", + "command": "echo 2", + "group": "default", + "args_command": None, + "on_finish": None, + "on_fail": None, + "schedule": "1 * * * *", + }, + ], + ) + + def test_scheduler_service_should_delete_scheduled_jobs(self): + scheduler_service = SchedulerService( + config_service=self.config_service, + executor_manager_service=self.executor_manager_service, # type: ignore + ) + + scheduler_service.add( + JobConfig( + id="1", + name="1", + command="echo 1", + schedule="1 * * * *", + ) + ) + scheduler_service.add( + JobConfig( + id="2", + name="2", + command="echo 2", + schedule="1 * * * *", + ) + ) + + scheduler_service.delete("1") + res = scheduler_service.list() + + self.assertEqual( + res, + [ + { + "id": "2", + "name": "2", + "command": "echo 2", + "group": "default", + "args_command": None, + "on_finish": None, + "on_fail": None, + "schedule": "1 * * * *", + }, + ], + ) + + def test_scheduler_service_delete_should_ignore_unknown_jobs(self): + scheduler_service = SchedulerService( + config_service=self.config_service, + executor_manager_service=self.executor_manager_service, # type: ignore + ) + + scheduler_service.add( + JobConfig( + id="1", + name="1", + command="echo 1", + schedule="1 * * * *", + ) + ) + scheduler_service.add( + JobConfig( + id="2", + name="2", + command="echo 2", + schedule="1 * * * *", + ) + ) + + res = scheduler_service.delete("3") + self.assertEqual(res, False) + + res = scheduler_service.list() + + self.assertEqual( + res, + [ + { + "id": "1", + "name": "1", + "command": "echo 1", + "group": "default", + "args_command": None, + "on_finish": None, + "on_fail": None, + "schedule": "1 * * * *", + }, + { + "id": "2", + "name": "2", + "command": "echo 2", + "group": "default", + "args_command": None, + "on_finish": None, + "on_fail": None, + "schedule": "1 * * * *", + }, + ], + )