diff --git a/.bumpversion.cfg b/.bumpversion.cfg index de76d89de6e..8495b360a5f 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 4.0.2 +current_version = 4.1.0 commit = True tag = True parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P[a-z]+)? diff --git a/.circleci/config.yml b/.circleci/config.yml index 38bb7ddb1bd..e4ca7890662 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,16 +1,287 @@ version: 2.1 +workflows: + celery: + jobs: + - build-and-upload + - run-unit-tests: + requires: + - build-and-upload + - code-coverage-report: + requires: + - run-unit-tests + orbs: - python: circleci/python@0.2.1 + gcp-gke: circleci/gcp-gke@0.2.0 + slack: circleci/slack@3.2.0 + +commands: + configure_environment: + description: "Initial environment setup: Configure Gcloud and conditionally installs Docker Compose." + parameters: + with_compose: + description: "If true, sets up Docker Compose, which can then be used to + spin up Docker containers." + type: boolean + default: false + steps: + - run: + name: Authorize gcloud + command: | + echo $GCLOUD_SERVICE_KEY > ${HOME}/gcloud-service-key.json + gcloud auth activate-service-account --key-file=${HOME}/gcloud-service-key.json + gcloud --quiet config set project sightmachine-178216 + gcloud --quiet config set compute/zone us-west1-a + gcloud --quiet container clusters get-credentials dev-k8s-uw1 + ## To authenticate to Container Registry, use gcloud as a Docker credential helper + echo y | gcloud auth configure-docker + + - when: + condition: << parameters.with_compose >> + steps: + - run: + name: Setup Docker Compose + command: | + curl -L https://github.com/docker/compose/releases/download/1.19.0/docker-compose-`uname -s`-`uname -m` > ~/docker-compose + chmod +x ~/docker-compose + mv ~/docker-compose /usr/local/bin/docker-compose + + send_slack_msg: + description: "If the job fails for the master branch, send a message to the Slack channel." + steps: + - slack/status: + failure_message: Uh-oh! *$CIRCLE_PROJECT_REPONAME* (master) - Job Failed <$CIRCLE_BUILD_URL|#$CIRCLE_BUILD_NUM> + include_project_field: false + include_job_number_field: false + fail_only: true + only_for_branches: master + webhook: $SLACK_WEBHOOK_URL jobs: - build-and-test: - executor: python/default + ## ------------------ Build and Upload Celery Docker image to GCP Container Registry ------------------ + + build-and-upload: + machine: + image: ubuntu-1604:201903-01 + docker_layer_caching: false + parameters: + run_merge: # do a prospective merge with target branch + type: boolean + default: false steps: - checkout + - run: + name: Configure Environment Variables + command: | + set -x + echo "export GIT_COMMIT=$(git rev-parse HEAD)" > custom.env + echo "export GIT_BRANCH=$(git symbolic-ref -q HEAD | sed -e 's:^refs/heads/::')" >> custom.env + # Sanitize branch name and Git tag (for docker image tag) + echo "export GIT_TAG=$(git describe --tag | sed -E 's/^[.-]|(^[.-])?[^A-Za-z0-9_.-]+/_/g')" >> custom.env + echo "export ARTIFACT_PATH=\"gcr.io/sightmachine-178216/celery\"" >> custom.env + echo "export BRANCH_NAME=$(echo ${CIRCLE_BRANCH} | sed -E 's/^[.-]|(^[.-])?[^A-Za-z0-9_.-]+/_/g')" >> custom.env + - persist_to_workspace: + root: . + paths: + - custom.env + - .coveragerc + + - configure_environment + + - run: + name: Build Celery Docker Image + command: | + source custom.env + echo "Building ${ARTIFACT_PATH}:${GIT_COMMIT}" + docker build -f docker/Dockerfile \ + --build-arg GIT_BRANCH=${GIT_BRANCH} \ + --build-arg GIT_TAG=${GIT_TAG} \ + --build-arg GIT_COMMIT=${GIT_COMMIT} \ + -t ${ARTIFACT_PATH}:${BRANCH_NAME}-dev \ + -t ${ARTIFACT_PATH}:${GIT_TAG}-dev \ + -t ${ARTIFACT_PATH}:${GIT_COMMIT}-dev . + + - run: + name: Upload Celery Docker Image to GCR + command: | + source custom.env + docker push ${ARTIFACT_PATH}:${BRANCH_NAME}-dev + docker push ${ARTIFACT_PATH}:${GIT_TAG}-dev + docker push ${ARTIFACT_PATH}:${GIT_COMMIT}-dev + + - send_slack_msg + + ## ------------------ Run Unit Tests ------------------ + + run-unit-tests: + parameters: + with_merge: # Part of the PR-merge workflow + type: boolean + default: false + docker: + - image: google/cloud-sdk + parallelism: 6 + steps: + - attach_workspace: + at: /tmp/workspace + + - configure_environment: + with_compose: true + + - setup_remote_docker: + docker_layer_caching: false + + - run: + name: Create the Celery container + command: | + source /tmp/workspace/custom.env + + echo "Spinning up Docker container: Celery ($ARTIFACT_PATH:${GIT_COMMIT}-dev)" + echo -e ' + version: "3" + services: + celery: + container_name: celery + image: ${ARTIFACT_PATH}:${GIT_COMMIT}-dev + environment: + TEST_BROKER: pyamqp://rabbit:5672 + TEST_BACKEND: redis://redis + PYTHONUNBUFFERED: 1 + PYTHONDONTWRITEBYTECODE: 1 + REDIS_HOST: redis + WORKER_LOGLEVEL: DEBUG + AZUREBLOCKBLOB_URL: azureblockblob://DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://azurite:10000/devstoreaccount1; + PYTHONPATH: /home/developer/celery + command: + - cat + tty: true + hostname: celery + depends_on: + - rabbit + - redis + - dynamodb + - azurite + + rabbit: + image: gcr.io/sightmachine-178216/rabbitmq:3.8.0 + + redis: + image: gcr.io/sightmachine-178216/redis:5.0.6 + + dynamodb: + image: gcr.io/sightmachine-178216/dwmkerr/dynamodb:38 + + azurite: + image: arafato/azurite:2.6.5 + ' > docker-compose.yml + + docker-compose up -d + docker-compose ps + while [[ "$(docker inspect -f '{{.State.Running}}' celery 2>/dev/null)" != "true" ]]; do echo "Waiting for celery container to start..."; done + - run: + name: Run Unit Tests + command: | + source /tmp/workspace/custom.env + docker-compose exec celery circleci_scripts/execute_unit_tests.sh + - run: + name: Move Unit Test Results Out of Container + when: always + command: docker cp celery:/home/developer/tmp/junit /tmp/workspace/junit + + # Store the test results on each node so we can see failures + - store_test_results: + path: /tmp/workspace/junit/ + + - store_artifacts: + name: Save Unit Test Results + path: /tmp/workspace/junit/ + + - run: + name: Copy Coverage Results to Workspace + command: | + mkdir -p /tmp/workspace/coverage-report-${CIRCLE_BUILD_NUM} + docker cp celery:/home/developer/celery/.coverage /tmp/workspace/coverage-report-${CIRCLE_BUILD_NUM}/.coverage + + - persist_to_workspace: + root: /tmp/workspace + paths: + - "coverage-report-*" + + - send_slack_msg + + ## ------------------ Generate Code Coverage Report ------------------ + + code-coverage-report: + parameters: + with_merge: # Part of the PR-merge workflow + type: boolean + default: false + docker: + - image: circleci/python:3.7 + steps: + - when: + condition: << parameters.with_merge >> + steps: + - run: + name: Abort if not building a pull request + command: | + if [ -z "$CIRCLE_PULL_REQUEST" ]; then + circleci-agent step halt + fi + # Coverage html requires source code to build HTML views + - checkout + + # Need a workspace for all the coverage reports + - attach_workspace: + at: /tmp/workspace + + - run: + name: Compile Coverage Reports + command: | + sudo pip install -r requirements-coveralls.txt + coverage combine /tmp/workspace/coverage-report-*/.coverage + sed "s@/home/developer@/home/circleci/project@g" -i .coverage + coveralls + + ## ------------------ Run nightly build for wheel upload ------------------ + + # upload-wheels: + # machine: + # image: ubuntu-1604:201903-01 + # docker_layer_caching: true + + # steps: + # - checkout + + # - configure_environment: + # with_compose: true + + # - run: + # name: Build pypi dependencies and upload wheels + # command: | + # # Set jfrog cli version to 1.33.1 + # curl -fL https://getcli.jfrog.io | sh -s 1.33.1 + # chmod 755 ./jfrog + # sudo mv ./jfrog /usr/local/bin/ + # jfrog rt c rt-server-sm --url=https://sightmachine.jfrog.io/sightmachine --user=circleci-admin-user --password=$JFROG_PWD --interactive=false + + # docker pull gcr.io/sightmachine-178216/ma:master-dev + # docker run -dit --name ma gcr.io/sightmachine-178216/ma:master-dev /bin/bash + # while [[ "$(docker inspect -f '{{.State.Running}}' ma 2>/dev/null)" != "true" ]]; do echo "Waiting for ma container to start..."; done + # docker ps -f name=ma + # docker cp /home/circleci/project ma:/ + # docker exec -ti ma sh -c " + # cd /project + # mkdir wheel-dir + # pip wheel --wheel-dir=./wheel-dir -r requirements.txt + # pip wheel --wheel-dir=./wheel-dir -r requirements-dev.txt + # pip wheel --wheel-dir=./wheel-dir -r requirements-test.txt + # " + # docker cp ma:/project/wheel-dir wheel-dir + + # echo "Uploading Updated Wheels...." + # jfrog rt u "wheel-dir/*.whl" pypi-dependencies/wheel/ + + # - send_slack_msg -workflows: - main: - jobs: - - build-and-test diff --git a/.editorconfig b/.editorconfig index 22fb1f902a9..38d889273b2 100644 --- a/.editorconfig +++ b/.editorconfig @@ -9,6 +9,7 @@ trim_trailing_whitespace = true insert_final_newline = true charset = utf-8 end_of_line = lf +max_line_length = 78 [Makefile] indent_style = tab diff --git a/.gitignore b/.gitignore index 740e1d9973c..91dd03e759e 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,7 @@ dist/ *.egg-info *.egg *.egg/ +*.eggs/ build/ .build/ _build/ diff --git a/.travis.yml b/.travis.yml index ef4ef3dd9cb..a8bfb76282c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,7 @@ language: python sudo: required -cache: false +dist: trusty +cache: pip python: - '2.7' - '3.4' @@ -19,13 +20,13 @@ env: matrix: include: - python: '3.5' - env: TOXENV=pypy-unit PYPY_VERSION="5.3" + env: TOXENV=pypy-unit PYPY_VERSION="pypy2.7-5.8.0" - python: '3.5' - env: TOXENV=pypy-integration-rabbitmq PYPY_VERSION="5.3" + env: TOXENV=pypy-integration-rabbitmq PYPY_VERSION="pypy2.7-5.8.0" - python: '3.5' - env: TOXENV=pypy-integration-redis PYPY_VERSION="5.3" + env: TOXENV=pypy-integration-redis PYPY_VERSION="pypy2.7-5.8.0" - python: '3.5' - env: TOXENV=pypy-integration-dynamodb PYPY_VERSION="5.3" + env: TOXENV=pypy-integration-dynamodb PYPY_VERSION="pypy2.7-5.8.0" - python: '3.5' env: TOXENV=flake8 - python: '3.5' @@ -39,22 +40,23 @@ matrix: before_install: - if [[ -v MATRIX_TOXENV ]]; then export TOXENV=${TRAVIS_PYTHON_VERSION}-${MATRIX_TOXENV}; fi; env - | - if [ "$TOXENV" = "pypy" ]; then + if [[ "$TOXENV" =~ "pypy" ]]; then export PYENV_ROOT="$HOME/.pyenv" if [ -f "$PYENV_ROOT/bin/pyenv" ]; then cd "$PYENV_ROOT" && git pull else - rm -rf "$PYENV_ROOT" && git clone --depth 1 https://github.com/yyuu/pyenv.git "$PYENV_ROOT" + rm -rf "$PYENV_ROOT" && git clone --depth 1 https://github.com/pyenv/pyenv.git "$PYENV_ROOT" fi - "$PYENV_ROOT/bin/pyenv" install "pypy-$PYPY_VERSION" - virtualenv --python="$PYENV_ROOT/versions/pypy-$PYPY_VERSION/bin/python" "$HOME/virtualenvs/pypy-$PYPY_VERSION" - source "$HOME/virtualenvs/pypy-$PYPY_VERSION/bin/activate" + "$PYENV_ROOT/bin/pyenv" install "$PYPY_VERSION" + virtualenv --python="$PYENV_ROOT/versions/$PYPY_VERSION/bin/python" "$HOME/virtualenvs/$PYPY_VERSION" + source "$HOME/virtualenvs/$PYPY_VERSION/bin/activate" + which python fi - | if [[ "$TOXENV" == *dynamodb ]]; then - sudo apt-get install -y default-jre supervisor + sudo apt-get update && sudo apt-get install -y default-jre supervisor mkdir /opt/dynamodb-local - cd /opt/dynamodb-local && curl -L http://dynamodb-local.s3-website-us-west-2.amazonaws.com/dynamodb_local_latest.tar.gz | tar zx + cd /opt/dynamodb-local && curl --retry 5 --retry-delay 1 -L http://dynamodb-local.s3-website-us-west-2.amazonaws.com/dynamodb_local_latest.tar.gz | tar zx cd - echo '[program:dynamodb-local]' | sudo tee /etc/supervisor/conf.d/dynamodb-local.conf echo 'command=java -Djava.library.path=./DynamoDBLocal_lib -jar DynamoDBLocal.jar -inMemory' | sudo tee -a /etc/supervisor/conf.d/dynamodb-local.conf @@ -67,7 +69,7 @@ before_install: after_success: - .tox/$TRAVIS_PYTHON_VERSION/bin/coverage xml - .tox/$TRAVIS_PYTHON_VERSION/bin/codecov -e TOXENV -install: travis_retry pip install -U tox +install: travis_retry pip install -U tox | cat script: tox -v -- -v notifications: irc: diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index f7b1cec4750..8ba80fe5a42 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -39,7 +39,7 @@ meeting or private correspondence. The Code of Conduct is heavily based on the `Ubuntu Code of Conduct`_, and the `Pylons Code of Conduct`_. -.. _`Ubuntu Code of Conduct`: http://www.ubuntu.com/community/conduct +.. _`Ubuntu Code of Conduct`: https://www.ubuntu.com/community/conduct .. _`Pylons Code of Conduct`: http://docs.pylonshq.com/community/conduct.html Be considerate @@ -285,6 +285,7 @@ Branches Current active version branches: * dev (which git calls "master") (https://github.com/celery/celery/tree/master) +* 4.0 (https://github.com/celery/celery/tree/4.0) * 3.1 (https://github.com/celery/celery/tree/3.1) * 3.0 (https://github.com/celery/celery/tree/3.0) @@ -442,10 +443,10 @@ fetch and checkout a remote branch like this:: git checkout --track -b 3.0-devel origin/3.0-devel -.. _`Fork a Repo`: http://help.github.com/fork-a-repo/ +.. _`Fork a Repo`: https://help.github.com/fork-a-repo/ .. _`Rebasing merge commits in git`: - http://notes.envato.com/developers/rebasing-merge-commits-in-git/ -.. _`Rebase`: http://help.github.com/rebase/ + https://notes.envato.com/developers/rebasing-merge-commits-in-git/ +.. _`Rebase`: https://help.github.com/rebase/ .. _contributing-testing: @@ -474,7 +475,7 @@ dependencies, so install these next: $ pip install -U -r requirements/default.txt After installing the dependencies required, you can now execute -the test suite by calling ``py.test ``: :: @@ -514,7 +515,7 @@ of your contribution. Read the `Pull Requests`_ section in the GitHub Guide to learn how this is done. You can also attach pull requests to existing issues by following -the steps outlined here: http://bit.ly/koJoso +the steps outlined here: https://bit.ly/koJoso .. _`Pull Requests`: http://help.github.com/send-pull-requests/ @@ -581,11 +582,12 @@ Building the documentation -------------------------- To build the documentation you need to install the dependencies -listed in ``requirements/docs.txt``: +listed in ``requirements/docs.txt`` and ``requirements/default.txt``: :: $ pip install -U -r requirements/docs.txt + $ pip install -U -r requirements/default.txt After these dependencies are installed you should be able to build the docs by running: @@ -628,7 +630,7 @@ the ``flakes`` target instead: :: - $ make flakes§ + $ make flakes API reference ~~~~~~~~~~~~~ @@ -747,14 +749,14 @@ is following the conventions. * Import order * Python standard library (`import xxx`) - * Python standard library ('from xxx import`) + * Python standard library (`from xxx import`) * Third-party packages. * Other modules from the current package. or in case of code using Django: * Python standard library (`import xxx`) - * Python standard library ('from xxx import`) + * Python standard library (`from xxx import`) * Third-party packages. * Django packages. * Other modules from the current package. @@ -804,7 +806,7 @@ is following the conventions. support for Python 2.5) -* Note that we use "new-style` relative imports when the distribution +* Note that we use "new-style" relative imports when the distribution doesn't support Python versions below 2.5 This requires Python 2.5 or later: @@ -896,7 +898,7 @@ Ask Solem ~~~~~~~~~ :github: https://github.com/ask -:twitter: http://twitter.com/#!/asksol +:twitter: https://twitter.com/#!/asksol Asif Saif Uddin ~~~~~~~~~~~~~~~ @@ -920,7 +922,7 @@ Mher Movsisyan ~~~~~~~~~~~~~~ :github: https://github.com/mher -:twitter: http://twitter.com/#!/movsm +:twitter: https://twitter.com/#!/movsm Omer Katz ~~~~~~~~~ @@ -931,7 +933,7 @@ Steeve Morin ~~~~~~~~~~~~ :github: https://github.com/steeve -:twitter: http://twitter.com/#!/steeve +:twitter: https://twitter.com/#!/steeve Website ------- @@ -950,7 +952,7 @@ Jan Henrik Helmers ~~~~~~~~~~~~~~~~~~ :web: http://www.helmersworks.com -:twitter: http://twitter.com/#!/helmers +:twitter: https://twitter.com/#!/helmers .. _packages: @@ -962,7 +964,7 @@ Packages ---------- :git: https://github.com/celery/celery -:CI: http://travis-ci.org/#!/celery/celery +:CI: https://travis-ci.org/#!/celery/celery :Windows-CI: https://ci.appveyor.com/project/ask/celery :PyPI: ``celery`` :docs: http://docs.celeryproject.org @@ -973,7 +975,7 @@ Packages Messaging library. :git: https://github.com/celery/kombu -:CI: http://travis-ci.org/#!/celery/kombu +:CI: https://travis-ci.org/#!/celery/kombu :Windows-CI: https://ci.appveyor.com/project/ask/kombu :PyPI: ``kombu`` :docs: https://kombu.readthedocs.io @@ -984,7 +986,7 @@ Messaging library. Python AMQP 0.9.1 client. :git: https://github.com/celery/py-amqp -:CI: http://travis-ci.org/#!/celery/py-amqp +:CI: https://travis-ci.org/#!/celery/py-amqp :Windows-CI: https://ci.appveyor.com/project/ask/py-amqp :PyPI: ``amqp`` :docs: https://amqp.readthedocs.io @@ -995,7 +997,7 @@ Python AMQP 0.9.1 client. Promise/deferred implementation. :git: https://github.com/celery/vine/ -:CI: http://travis-ci.org/#!/celery/vine/ +:CI: https://travis-ci.org/#!/celery/vine/ :Windows-CI: https://ci.appveyor.com/project/ask/vine :PyPI: ``vine`` :docs: https://vine.readthedocs.io @@ -1007,7 +1009,7 @@ Fork of multiprocessing containing improvements that'll eventually be merged into the Python stdlib. :git: https://github.com/celery/billiard -:CI: http://travis-ci.org/#!/celery/billiard/ +:CI: https://travis-ci.org/#!/celery/billiard/ :Windows-CI: https://ci.appveyor.com/project/ask/billiard :PyPI: ``billiard`` @@ -1017,7 +1019,7 @@ that'll eventually be merged into the Python stdlib. Database-backed Periodic Tasks with admin interface using the Django ORM. :git: https://github.com/celery/django-celery-beat -:CI: http://travis-ci.org/#!/celery/django-celery-beat +:CI: https://travis-ci.org/#!/celery/django-celery-beat :Windows-CI: https://ci.appveyor.com/project/ask/django-celery-beat :PyPI: ``django-celery-beat`` @@ -1027,7 +1029,7 @@ Database-backed Periodic Tasks with admin interface using the Django ORM. Store task results in the Django ORM, or using the Django Cache Framework. :git: https://github.com/celery/django-celery-results -:CI: http://travis-ci.org/#!/celery/django-celery-results +:CI: https://travis-ci.org/#!/celery/django-celery-results :Windows-CI: https://ci.appveyor.com/project/ask/django-celery-results :PyPI: ``django-celery-results`` @@ -1155,7 +1157,7 @@ If this is a new release series then you also need to do the following: * Go to the Read The Docs management interface at: - http://readthedocs.org/projects/celery/?fromdocs=celery + https://readthedocs.org/projects/celery/?fromdocs=celery * Enter "Edit project" @@ -1164,7 +1166,7 @@ following: * Also add the previous version under the "versions" tab. -.. _`mailing-list`: http://groups.google.com/group/celery-users +.. _`mailing-list`: https://groups.google.com/group/celery-users .. _`irc-channel`: http://docs.celeryproject.org/en/latest/getting-started/resources.html#irc diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 201098db2ab..bf3d4274445 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -231,3 +231,14 @@ Alejandro Pernin, 2016/12/23 Yuval Shalev, 2016/12/27 Morgan Doocy, 2017/01/02 Arcadiy Ivanov, 2017/01/08 +Ryan Hiebert, 2017/01/20 +Jianjian Yu, 2017/04/09 +Brian May, 2017/04/10 +Dmytro Petruk, 2017/04/12 +Joey Wilhelm, 2017/04/12 +Yoichi Nakayama, 2017/04/25 +Simon Schmidt, 2017/05/19 +Anthony Lukach, 2017/05/23 +Samuel Dion-Girardeau, 2017/05/29 +Aydin Sen, 2017/06/14 +Preston Moore, 2017/06/18 diff --git a/Changelog b/Changelog index 0e93749e84a..26a95f6f5d7 100644 --- a/Changelog +++ b/Changelog @@ -5,227 +5,325 @@ ================ This document contains change notes for bugfix releases in -the 4.0.x series (latentcall), please see :ref:`whatsnew-4.0` for -an overview of what's new in Celery 4.0. +the 4.1.x series (latentcall), please see :ref:`whatsnew-4.1` for +an overview of what's new in Celery 4.1. -.. _version-4.0.2: +.. _version-4.1.0: -4.0.2 +4.1.0 ===== -:release-date: 2016-12-15 03:40 PM PST -:release-by: Ask Solem +:release-date: 2017-07-25 00:00 PM PST +:release-by: Omer Katz -- **Requirements** - - Now depends on :ref:`Kombu 4.0.2 `. +- **Configuration**: CELERY_SEND_EVENTS instead of CELERYD_SEND_EVENTS for 3.1.x compatibility (#3997) -- **Tasks**: Fixed problem with JSON serialization of `group` - (``keys must be string`` error, Issue #3688). + Contributed by **abhinav nilaratna**. -- **Worker**: Fixed JSON serialization issue when using ``inspect active`` - and friends (Issue #3667). +- **App**: Restore behavior so Broadcast queues work. (#3934) -- **App**: Fixed saferef errors when using signals (Issue #3670). + Contributed by **Patrick Cloke**. -- **Prefork**: Fixed bug with pack requiring bytes argument - on Python 2.7.5 and earlier (Issue #3674). +- **Sphinx**: Make appstr use standard format (#4134) (#4139) -- **Tasks**: Saferepr did not handle unicode in bytestrings on Python 2 - (Issue #3676). + Contributed by **Preston Moore**. -- **Testing**: Added new ``celery_worker_paremeters`` fixture. +- **App**: Make id, name always accessible from logging.Formatter via extra (#3994) - Contributed by **Michael Howitz**. + Contributed by **Yoichi NAKAYAMA**. -- **Tasks**: Added new ``app`` argument to ``GroupResult.restore`` - (Issue #3669). +- **Worker**: Add worker_shutting_down signal (#3998) - This makes the restore method behave the same way as the ``GroupResult`` - constructor. + Contributed by **Daniel Huang**. - Contributed by **Andreas Pelme**. +- **PyPy**: Support PyPy version 5.8.0 (#4128) -- **Tasks**: Fixed type checking crash when task takes ``*args`` on Python 3 - (Issue #3678). + Contributed by **Omer Katz**. -- Documentation and examples improvements by: +- **Results**: Elasticsearch: Fix serializing keys (#3924) - - **BLAGA Razvan-Paul** - - **Michael Howitz** - - :github_user:`paradox41` + Contributed by :github_user:`staticfox`. -.. _version-4.0.1: +- **Canvas**: Deserialize all tasks in a chain (#4015) -4.0.1 -===== -:release-date: 2016-12-08 05:22 PM PST -:release-by: Ask Solem + Contributed by :github_user:`fcoelho`. -* [Security: `CELERYSA-0003`_] Insecure default configuration +- **Systemd**: Recover loglevel for ExecStart in systemd config (#4023) - The default :setting:`accept_content` setting was set to allow - deserialization of pickled messages in Celery 4.0.0. + Contributed by **Yoichi NAKAYAMA**. - The insecure default has been fixed in 4.0.1, and you can also - configure the 4.0.0 version to explicitly only allow json serialized - messages: +- **Sphinx**: Use the Sphinx add_directive_to_domain API. (#4037) - .. code-block:: python + Contributed by **Patrick Cloke**. - app.conf.accept_content = ['json'] +- **App**: Pass properties to before_task_publish signal (#4035) -.. _`CELERYSA-0003`: - https://github.com/celery/celery/tree/master/docs/sec/CELERYSA-0003.txt + Contributed by **Javier Domingo Cansino**. -- **Tasks**: Added new method to register class-based tasks (Issue #3615). +- **Results**: Add SSL option for Redis backends (#3831) - To register a class based task you should now call ``app.register_task``: + Contributed by **Chris Kuehl**. - .. code-block:: python +- **Beat**: celery.schedule.crontab: fix reduce (#3826) (#3827) - from celery import Celery, Task + Contributed by **Taylor C. Richberger**. - app = Celery() +- **State**: Fix celery issues when using flower REST API - class CustomTask(Task): + Contributed by **Thierry RAMORASOAVINA**. - def run(self): - return 'hello' +- **Results**: Elasticsearch: Fix serializing document id. - app.register_task(CustomTask()) + Contributed by **Acey9**. -- **Tasks**: Argument checking now supports keyword-only arguments on Python3 - (Issue #3658). +- **Beat**: Make shallow copy of schedules dictionary - Contributed by :github_user:`sww`. + Contributed by **Brian May**. -- **Tasks**: The ``task-sent`` event was not being sent even if - configured to do so (Issue #3646). +- **Beat**: Populate heap when periodic tasks are changed -- **Worker**: Fixed AMQP heartbeat support for eventlet/gevent pools - (Issue #3649). + Contributed by **Wojciech Żywno**. -- **App**: ``app.conf.humanize()`` would not work if configuration - not finalized (Issue #3652). +- **Task**: Allow class methods to define tasks (#3952) -- **Utils**: ``saferepr`` attempted to show iterables as lists - and mappings as dicts. + Contributed by **georgepsarakis**. -- **Utils**: ``saferepr`` did not handle unicode-errors - when attempting to format ``bytes`` on Python 3 (Issue #3610). +- **Platforms**: Always return boolean value when checking if signal is supported (#3962). -- **Utils**: ``saferepr`` should now properly represent byte strings - with non-ascii characters (Issue #3600). + Contributed by **Jian Yu**. -- **Results**: Fixed bug in elasticsearch where _index method missed - the body argument (Issue #3606). +- **Canvas**: Avoid duplicating chains in chords (#3779) - Fix contributed by **何翔宇** (Sean Ho). + Contributed by **Ryan Hiebert**. -- **Canvas**: Fixed :exc:`ValueError` in chord with single task header - (Issue #3608). +- **Canvas**: Lookup task only if list has items (#3847) - Fix contributed by **Viktor Holmqvist**. + Contributed by **Marc Gibbons**. -- **Task**: Ensure class-based task has name prior to registration - (Issue #3616). +- **Results**: Allow unicode message for exception raised in task (#3903) - Fix contributed by **Rick Wargo**. + Contributed by **George Psarakis**. -- **Beat**: Fixed problem with strings in shelve (Issue #3644). +- **Python3**: Support for Python 3.6 (#3904, #3903, #3736) - Fix contributed by **Alli**. + Contributed by **Jon Dufresne**, **George Psarakis**, **Asif Saifuddin Auvi**, **Omer Katz**. -- **Worker**: Fixed :exc:`KeyError` in ``inspect stats`` when ``-O`` argument - set to something other than ``fast`` or ``fair`` (Issue #3621). +- **App**: Fix retried tasks with expirations (#3790) -- **Task**: Retried tasks were no longer sent to the original queue - (Issue #3622). + Contributed by **Brendan MacDonell**. -- **Worker**: Python 3: Fixed None/int type comparison in - :file:`apps/worker.py` (Issue #3631). +- * Fixes items format route in docs (#3875) -- **Results**: Redis has a new :setting:`redis_socket_connect_timeout` - setting. + Contributed by **Slam**. -- **Results**: Redis result backend passed the ``socket_connect_timeout`` - argument to UNIX socket based connections by mistake, causing a crash. +- **Utils**: Fix maybe_make_aware (#3850) -- **Worker**: Fixed missing logo in worker splash screen when running on - Python 3.x (Issue #3627). + Contributed by **Taylor C. Richberger**. - Fix contributed by **Brian Luan**. +- **Task**: Fix task ETA issues when timezone is defined in configuration (#3867) -- **Deps**: Fixed ``celery[redis]`` bundle installation (Issue #3643). + Contributed by **George Psarakis**. - Fix contributed by **Rémi Marenco**. +- **Concurrency**: Consumer does not shutdown properly when embedded in gevent application (#3746) -- **Deps**: Bundle ``celery[sqs]`` now also requires :pypi:`pycurl` - (Issue #3619). + Contributed by **Arcadiy Ivanov**. -- **Worker**: Hard time limits were no longer being respected (Issue #3618). +- **Canvas**: Fix #3725: Task replaced with group does not complete (#3731) -- **Worker**: Soft time limit log showed ``Trues`` instead of the number - of seconds. + Contributed by **Morgan Doocy**. -- **App**: ``registry_cls`` argument no longer had any effect (Issue #3613). +- **Task**: Correct order in chains with replaced tasks (#3730) -- **Worker**: Event producer now uses ``connection_for_Write`` (Issue #3525). + Contributed by **Morgan Doocy**. -- **Results**: Redis/memcache backends now uses :setting:`result_expires` - to expire chord counter (Issue #3573). +- **Result**: Enable synchronous execution of sub-tasks (#3696) - Contributed by **Tayfun Sen**. + Contributed by **shalev67**. -- **Django**: Fixed command for upgrading settings with Django (Issue #3563). +- **Task**: Fix request context for blocking task apply (added hostname) (#3716) - Fix contributed by **François Voron**. + Contributed by **Marat Sharafutdinov**. -- **Testing**: Added a ``celery_parameters`` test fixture to be able to use - customized ``Celery`` init parameters. (#3626) +- **Utils**: Fix task argument handling (#3678) (#3693) - Contributed by **Steffen Allner**. + Contributed by **Roman Sichny**. -- Documentation improvements contributed by +- **Beat**: Provide a transparent method to update the Scheduler heap (#3721) - - :github_user:`csfeathers` - - **Moussa Taifi** - - **Yuhannaa** - - **Laurent Peuch** - - **Christian** - - **Bruno Alla** - - **Steven Johns** - - :github_user:`tnir` - - **GDR!** + Contributed by **Alejandro Pernin**. -.. _version-4.0.0: +- **Beat**: Specify default value for pidfile option of celery beat. (#3722) -4.0.0 -===== -:release-date: 2016-11-04 02:00 P.M PDT -:release-by: Ask Solem + Contributed by **Arnaud Rocher**. + +- **Results**: Elasticsearch: Stop generating a new field every time when a new result is being put (#3708) + + Contributed by **Mike Chen**. + +- **Requirements** + + - Now depends on :ref:`Kombu 4.1.0 `. + +- **Results**: Elasticsearch now reuses fields when new results are added. + + Contributed by **Mike Chen**. + +- **Results**: Fixed MongoDB integration when using binary encodings + (Issue #3575). + + Contributed by **Andrew de Quincey**. + +- **Worker**: Making missing ``*args`` and ``**kwargs`` in Task protocol 1 +return empty value in protocol 2 (Issue #3687). + + Contributed by **Roman Sichny**. + +- **App**: Fixed :exc:`TypeError` in AMQP when using deprecated signal + (Issue #3707). + + Contributed by :github_user:`michael-k`. + +- **Beat**: Added a transparent method to update the scheduler heap. + + Contributed by **Alejandro Pernin**. + +- **Task**: Fixed handling of tasks with keyword arguments on Python 3 + (Issue #3657). + + Contributed by **Roman Sichny**. + +- **Task**: Fixed request context for blocking task apply by adding missing + hostname attribute. + + Contributed by **Marat Sharafutdinov**. + +- **Task**: Added option to run subtasks synchronously with + ``disable_sync_subtasks`` argument. + + Contributed by :github_user:`shalev67`. + +- **App**: Fixed chaining of replaced tasks (Issue #3726). + + Contributed by **Morgan Doocy**. + +- **Canvas**: Fixed bug where replaced tasks with groups were not completing + (Issue #3725). -See :ref:`whatsnew-4.0` (in :file:`docs/whatsnew-4.0.rst`). + Contributed by **Morgan Doocy**. -.. _version-4.0.0rc7: +- **Worker**: Fixed problem where consumer does not shutdown properly when + embedded in a gevent application (Issue #3745). -4.0.0rc7 -======== -:release-date: 2016-11-02 01:30 P.M PDT + Contributed by **Arcadiy Ivanov**. -Important notes ---------------- +- **Results**: Added support for using AWS DynamoDB as a result backend (#3736). -- Database result backend related setting names changed from - ``sqlalchemy_*`` -> ``database_*``. + Contributed by **George Psarakis**. - The ``sqlalchemy_`` named settings won't work at all in this - version so you need to rename them. This is a last minute change, - and as they were not supported in 3.1 we will not be providing - aliases. +- **Testing**: Added caching on pip installs. -- ``chain(A, B, C)`` now works the same way as ``A | B | C``. + Contributed by :github_user:`orf`. - This means calling ``chain()`` might not actually return a chain, - it can return a group or any other type depending on how the - workflow can be optimized. +- **Worker**: Prevent consuming queue before ready on startup (Issue #3620). + + Contributed by **Alan Hamlett**. + +- **App**: Fixed task ETA issues when timezone is defined in configuration + (Issue #3753). + + Contributed by **George Psarakis**. + +- **Utils**: ``maybe_make_aware`` should not modify datetime when it is + already timezone-aware (Issue #3849). + + Contributed by **Taylor C. Richberger**. + +- **App**: Fixed retrying tasks with expirations (Issue #3734). + + Contributed by **Brendan MacDonell**. + +- **Results**: Allow unicode message for exceptions raised in task + (Issue #3858). + + Contributed by :github_user:`staticfox`. + +- **Canvas**: Fixed :exc:`IndexError` raised when chord has an empty header. + + Contributed by **Marc Gibbons**. + +- **Canvas**: Avoid duplicating chains in chords (Issue #3771). + + Contributed by **Ryan Hiebert** and **George Psarakis**. + +- **Utils**: Allow class methods to define tasks (Issue #3863). + + Contributed by **George Psarakis**. + +- **Beat**: Populate heap when periodic tasks are changed. + + Contributed by :github_user:`wzywno` and **Brian May**. + +- **Results**: Added support for Elasticsearch backend options settings. + + Contributed by :github_user:`Acey9`. + +- **Events**: Ensure ``Task.as_dict()`` works when not all information about + task is available. + + Contributed by :github_user:`tramora`. + +- **Schedules**: Fixed pickled crontab schedules to restore properly (Issue #3826). + + Contributed by **Taylor C. Richberger**. + +- **Results**: Added SSL option for redis backends (Issue #3830). + + Contributed by **Chris Kuehl**. + +- Documentation and examples improvements by: + + - **Bruno Alla** + - **Jamie Alessio** + - **Vivek Anand** + - **Peter Bittner** + - **Kalle Bronsen** + - **Jon Dufresne** + - **James Michael DuPont** + - **Sergey Fursov** + - **Samuel Dion-Girardeau** + - **Daniel Hahler** + - **Mike Helmick** + - **Marc Hörsken** + - **Christopher Hoskin** + - **Daniel Huang** + - **Primož Kerin** + - **Michal Kuffa** + - **Simon Legner** + - **Anthony Lukach** + - **Ed Morley** + - **Jay McGrath** + - **Rico Moorman** + - **Viraj Navkal** + - **Ross Patterson** + - **Dmytro Petruk** + - **Luke Plant** + - **Eric Poelke** + - **Salvatore Rinchiera** + - **Arnaud Rocher** + - **Kirill Romanov** + - **Simon Schmidt** + - **Tamer Sherif** + - **YuLun Shih** + - **Ask Solem** + - **Tom 'Biwaa' Riat** + - **Arthur Vigil** + - **Joey Wilhelm** + - **Jian Yu** + - **YuLun Shih** + - **Arthur Vigil** + - **Joey Wilhelm** + - :github_user:`baixuexue123` + - :github_user:`bronsen` + - :github_user:`michael-k` + - :github_user:`orf` + - :github_user:`3lnc` diff --git a/LICENSE b/LICENSE index 06221a278e7..c0fdb269466 100644 --- a/LICENSE +++ b/LICENSE @@ -42,7 +42,7 @@ The documentation portion of Celery (the rendered contents of the "docs" directory of a software distribution or checkout) is supplied under the "Creative Commons Attribution-ShareAlike 4.0 International" (CC BY-SA 4.0) License as described by -http://creativecommons.org/licenses/by-sa/4.0/ +https://creativecommons.org/licenses/by-sa/4.0/ Footnotes ========= diff --git a/README.rst b/README.rst index 9c47fce85f2..36b1a364e9b 100644 --- a/README.rst +++ b/README.rst @@ -2,9 +2,9 @@ |build-status| |license| |wheel| |pyversion| |pyimp| -:Version: 4.0.2 (latentcall) +:Version: 4.1.0 (latentcall) :Web: http://celeryproject.org/ -:Download: http://pypi.python.org/pypi/celery/ +:Download: https://pypi.python.org/pypi/celery/ :Source: https://github.com/celery/celery/ :Keywords: task, queue, job, async, rabbitmq, amqp, redis, python, distributed, actors @@ -154,8 +154,8 @@ It supports... .. _`Eventlet`: http://eventlet.net/ .. _`gevent`: http://gevent.org/ -.. _RabbitMQ: http://rabbitmq.com -.. _Redis: http://redis.io +.. _RabbitMQ: https://rabbitmq.com +.. _Redis: https://redis.io .. _SQLAlchemy: http://sqlalchemy.org Framework Integration @@ -182,15 +182,15 @@ The integration packages aren't strictly necessary, but they can make development easier, and sometimes they add important hooks like closing database connections at ``fork``. -.. _`Django`: http://djangoproject.com/ +.. _`Django`: https://djangoproject.com/ .. _`Pylons`: http://pylonsproject.org/ .. _`Flask`: http://flask.pocoo.org/ .. _`web2py`: http://web2py.com/ -.. _`Bottle`: http://bottlepy.org/ +.. _`Bottle`: https://bottlepy.org/ .. _`Pyramid`: http://docs.pylonsproject.org/en/latest/docs/pyramid.html -.. _`pyramid_celery`: http://pypi.python.org/pypi/pyramid_celery/ -.. _`celery-pylons`: http://pypi.python.org/pypi/celery-pylons -.. _`web2py-celery`: http://code.google.com/p/web2py-celery/ +.. _`pyramid_celery`: https://pypi.python.org/pypi/pyramid_celery/ +.. _`celery-pylons`: https://pypi.python.org/pypi/celery-pylons +.. _`web2py-celery`: https://code.google.com/p/web2py-celery/ .. _`Tornado`: http://www.tornadoweb.org/ .. _`tornado-celery`: https://github.com/mher/tornado-celery/ @@ -323,7 +323,7 @@ Downloading and installing from source Download the latest version of Celery from PyPI: -http://pypi.python.org/pypi/celery/ +https://pypi.python.org/pypi/celery/ You can install it by doing the following,: @@ -364,7 +364,7 @@ pip commands: With git ~~~~~~~~ -Please the Contributing section. +Please see the Contributing section. .. _getting-help: @@ -379,7 +379,7 @@ Mailing list For discussions about the usage, development, and future of Celery, please join the `celery-users`_ mailing list. -.. _`celery-users`: http://groups.google.com/group/celery-users/ +.. _`celery-users`: https://groups.google.com/group/celery-users/ .. _irc-channel: @@ -389,7 +389,7 @@ IRC Come chat with us on IRC. The **#celery** channel is located at the `Freenode`_ network. -.. _`Freenode`: http://freenode.net +.. _`Freenode`: https://freenode.net .. _bug-tracker: @@ -404,7 +404,7 @@ to our issue tracker at https://github.com/celery/celery/issues/ Wiki ==== -http://wiki.github.com/celery/celery/ +https://wiki.github.com/celery/celery/ .. _contributing-short: @@ -446,13 +446,13 @@ file in the top distribution directory for the full license text. .. |wheel| image:: https://img.shields.io/pypi/wheel/celery.svg :alt: Celery can be installed via wheel - :target: http://pypi.python.org/pypi/celery/ + :target: https://pypi.python.org/pypi/celery/ .. |pyversion| image:: https://img.shields.io/pypi/pyversions/celery.svg :alt: Supported Python versions. - :target: http://pypi.python.org/pypi/celery/ + :target: https://pypi.python.org/pypi/celery/ .. |pyimp| image:: https://img.shields.io/pypi/implementation/celery.svg :alt: Support Python implementations. - :target: http://pypi.python.org/pypi/celery/ + :target: https://pypi.python.org/pypi/celery/ diff --git a/TESTING.md b/TESTING.md new file mode 100644 index 00000000000..aa7cd5ec1a8 --- /dev/null +++ b/TESTING.md @@ -0,0 +1,41 @@ +# Testing Setup + +It's been some time since the last Celery version update, so the default versions of packages used +have now progressed considerably and in many cases broke the test system. + +Therefore we strongly suggest using a Python virtual environment with a specific Python version +like so: + +```bash +# If you don't have mkvirtualenv, do `pip install virtualenvwrapper` + +mkvirtualenv --python=/usr/local/bin/python3.6 celery +cd celery +workon celery +``` + +You must run a 3.6.x version of Python, I'm running the official Python 3.6.8 +distribution for Mac computers for example. + +# Package versions + +We must freeze the following packages in time, since they changed considerably since Celery +version 4.0.2 which is what we're running as of this writing. + +```bash +pip install eventlet==0.20.0 celery==4.0.2 kombu==4.1.0 pytest==3.10.1 vine==1.3.0 +``` + +Then proceed to install the regular requirements + +```bash +python setup.py develop +pip install -U -r requirements/default.txt requirements/test.txt requirements/deps/mock.txt +``` + +Then, you should be ready to run the tests from root celery directory: + +```bash +pytest t/unit +``` + diff --git a/appveyor.yml b/appveyor.yml index efcfc1c2703..b71d7007157 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -3,14 +3,14 @@ environment: global: # SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the # /E:ON and /V:ON options are not enabled in the batch script intepreter - # See: http://stackoverflow.com/a/13751649/163740 + # See: https://stackoverflow.com/a/13751649/163740 WITH_COMPILER: "cmd /E:ON /V:ON /C .\\extra\\appveyor\\run_with_compiler.cmd" matrix: # Pre-installed Python versions, which Appveyor may upgrade to # a later point release. - # See: http://www.appveyor.com/docs/installed-software#python + # See: https://www.appveyor.com/docs/installed-software#python - PYTHON: "C:\\Python27" PYTHON_VERSION: "2.7.x" diff --git a/celery/__init__.py b/celery/__init__.py index 13a5639fcee..2179af81118 100644 --- a/celery/__init__.py +++ b/celery/__init__.py @@ -14,7 +14,7 @@ SERIES = 'latentcall' -__version__ = '4.0.2' +__version__ = '4.1.0+sm3' __author__ = 'Ask Solem' __contact__ = 'ask@celeryproject.org' __homepage__ = 'http://celeryproject.org' diff --git a/celery/app/amqp.py b/celery/app/amqp.py index d37a8073717..ff26d4a34be 100644 --- a/celery/app/amqp.py +++ b/celery/app/amqp.py @@ -21,7 +21,7 @@ from celery.utils.nodenames import anon_nodename from celery.utils.saferepr import saferepr from celery.utils.text import indent as textindent -from celery.utils.time import maybe_make_aware, to_utc +from celery.utils.time import maybe_make_aware from . import routes as _routes @@ -331,7 +331,9 @@ def as_task_v2(self, task_id, name, args=None, kwargs=None, now + timedelta(seconds=expires), tz=timezone, ) eta = eta and eta.isoformat() - expires = expires and expires.isoformat() + # If we retry a task `expires` will already be ISO8601-formatted. + if not isinstance(expires, string_t): + expires = expires and expires.isoformat() if argsrepr is None: argsrepr = saferepr(args, self.argsrepr_maxsize) @@ -407,17 +409,11 @@ def as_task_v1(self, task_id, name, args=None, kwargs=None, if countdown: # convert countdown to ETA self._verify_seconds(countdown, 'countdown') now = now or self.app.now() - timezone = timezone or self.app.timezone eta = now + timedelta(seconds=countdown) - if utc: - eta = to_utc(eta).astimezone(timezone) if isinstance(expires, numbers.Real): self._verify_seconds(expires, 'expires') now = now or self.app.now() - timezone = timezone or self.app.timezone expires = now + timedelta(seconds=expires) - if utc: - expires = to_utc(expires).astimezone(timezone) eta = eta and eta.isoformat() expires = expires and expires.isoformat() @@ -525,7 +521,7 @@ def send_task_message(producer, name, message, exchange_type = 'direct' # convert to anon-exchange, when exchange not set and direct ex. - if not exchange or not routing_key and exchange_type == 'direct': + if (not exchange or not routing_key) and exchange_type == 'direct': exchange, routing_key = '', qname elif exchange is None: # not topic exchange, and exchange not undefined @@ -544,7 +540,7 @@ def send_task_message(producer, name, message, sender=name, body=body, exchange=exchange, routing_key=routing_key, declare=declare, headers=headers2, - properties=kwargs, retry_policy=retry_policy, + properties=properties, retry_policy=retry_policy, ) ret = producer.publish( body, diff --git a/celery/app/base.py b/celery/app/base.py index 74a0f441695..d6a23f94317 100644 --- a/celery/app/base.py +++ b/celery/app/base.py @@ -870,7 +870,8 @@ def prepare_config(self, c): def now(self): """Return the current time and date as a datetime.""" - return self.loader.now(utc=self.conf.enable_utc) + from datetime import datetime + return datetime.utcnow().replace(tzinfo=self.timezone) def select_queues(self, queues=None): """Select subset of queues. @@ -1231,6 +1232,10 @@ def tasks(self): def producer_pool(self): return self.amqp.producer_pool + def uses_utc_timezone(self): + """Check if the application uses the UTC timezone.""" + return self.conf.timezone == 'UTC' or self.conf.timezone is None + @cached_property def timezone(self): """Current timezone for this app. @@ -1239,9 +1244,12 @@ def timezone(self): :setting:`timezone` setting. """ conf = self.conf - tz = conf.timezone + tz = conf.timezone or 'UTC' if not tz: - return (timezone.get_timezone('UTC') if conf.enable_utc - else timezone.local) - return timezone.get_timezone(conf.timezone) + if conf.enable_utc: + return timezone.get_timezone('UTC') + else: + if not conf.timezone: + return timezone.local + return timezone.get_timezone(tz) App = Celery # noqa: E305 XXX compat diff --git a/celery/app/defaults.py b/celery/app/defaults.py index ac65d19e9e8..c517dee66ad 100644 --- a/celery/app/defaults.py +++ b/celery/app/defaults.py @@ -152,6 +152,7 @@ def __repr__(self): redis=Namespace( __old__=old_ns('celery_redis'), + backend_use_ssl=Option(type='dict'), db=Option(type='int'), host=Option(type='string'), max_connections=Option(type='int'), @@ -178,6 +179,13 @@ def __repr__(self): persistent=Option(None, type='bool'), serializer=Option('json'), ), + elasticsearch=Namespace( + __old__=old_ns('celery_elasticsearch'), + + retry_on_timeout=Option(type='bool'), + max_retries=Option(type='int'), + timeout=Option(type='float'), + ), riak=Namespace( __old__=old_ns('celery_riak'), @@ -277,7 +285,7 @@ def __repr__(self): 'WARNING', old={'celery_redirect_stdouts_level'}, ), send_task_events=Option( - False, type='bool', old={'celeryd_send_events'}, + False, type='bool', old={'celery_send_events'}, ), state_db=Option(), task_log_format=Option(DEFAULT_TASK_LOG_FMT), diff --git a/celery/app/routes.py b/celery/app/routes.py index ca3e6004724..1543d63e6f3 100644 --- a/celery/app/routes.py +++ b/celery/app/routes.py @@ -14,7 +14,13 @@ from celery.utils.functional import maybe_evaluate, mlazy from celery.utils.imports import symbol_by_name -__all__ = ['MapRoute', 'Router', 'prepare'] +try: + Pattern = re._pattern_type +except AttributeError: # pragma: no cover + # for support Python 3.7 + Pattern = re.Pattern + +__all__ = ('MapRoute', 'Router', 'prepare') def glob_to_re(glob, quote=string.punctuation.replace('*', '')): @@ -30,7 +36,7 @@ def __init__(self, map): self.map = {} self.patterns = OrderedDict() for k, v in map: - if isinstance(k, re._pattern_type): + if isinstance(k, Pattern): self.patterns[k] = v elif '*' in k: self.patterns[re.compile(glob_to_re(k))] = v diff --git a/celery/app/task.py b/celery/app/task.py index 8a5c83168b8..e0fb1f574e1 100644 --- a/celery/app/task.py +++ b/celery/app/task.py @@ -605,7 +605,7 @@ def retry(self, args=None, kwargs=None, exc=None, throw=True, Arguments: args (Tuple): Positional arguments to retry with. kwargs (Dict): Keyword arguments to retry with. - exc (Exception): Custom exception to report when the max restart + exc (Exception): Custom exception to report when the max retry limit has been exceeded (default: :exc:`~@MaxRetriesExceededError`). diff --git a/celery/app/trace.py b/celery/app/trace.py index 362880b0148..09f23714c49 100644 --- a/celery/app/trace.py +++ b/celery/app/trace.py @@ -49,7 +49,6 @@ ] logger = get_logger(__name__) -info = logger.info #: Format string used to log task success. LOG_SUCCESS = """\ @@ -116,6 +115,14 @@ trace_ok_t = namedtuple('trace_ok_t', ('retval', 'info', 'runtime', 'retstr')) +def info(fmt, context): + """Log 'fmt % context' with severity 'INFO'. + + 'context' is also passed in extra with key 'data' for custom handlers. + """ + logger.info(fmt, context, extra={'data': context}) + + def task_has_custom(task, attr): """Return true if the task overrides ``attr``.""" return mro_lookup(task.__class__, attr, stop={BaseTask, object}, @@ -189,13 +196,14 @@ def handle_retry(self, task, req, store_errors=True, **kwargs): def handle_failure(self, task, req, store_errors=True, call_errbacks=True): """Handle exception.""" _, _, tb = sys.exc_info() - try: - exc = self.retval - # make sure we only send pickleable exceptions back to parent. - einfo = ExceptionInfo() - einfo.exception = get_pickleable_exception(einfo.exception) - einfo.type = get_pickleable_etype(einfo.type) + exc = self.retval + # make sure we only send pickleable exceptions back to parent. + einfo = ExceptionInfo() + einfo.exception = get_pickleable_exception(einfo.exception) + einfo.type = get_pickleable_etype(einfo.type) + + try: task.backend.mark_as_failure( req.id, exc, einfo.traceback, request=req, store_result=store_errors, @@ -209,6 +217,16 @@ def handle_failure(self, task, req, store_errors=True, call_errbacks=True): traceback=tb, einfo=einfo) self._log_error(task, req, einfo) + return einfo + except Exception as fail_exc: + _, _, tmp_tb = sys.exc_info() + einfo = ExceptionInfo() + try: + report_internal_error(task, fail_exc) + self._log_error(task, req, einfo) + finally: + del tmp_tb + return einfo finally: del tb diff --git a/celery/app/utils.py b/celery/app/utils.py index 75c88aa9b5e..32aaf44c10b 100644 --- a/celery/app/utils.py +++ b/celery/app/utils.py @@ -70,7 +70,7 @@ def appstr(app): """String used in __repr__ etc, to id app instances.""" - return '{0}:{1:#x}'.format(app.main or '__main__', id(app)) + return '{0} at {1:#x}'.format(app.main or '__main__', id(app)) class Settings(ConfigurationView): diff --git a/celery/apps/worker.py b/celery/apps/worker.py index 1865e3056b5..4cda39bbce2 100644 --- a/celery/apps/worker.py +++ b/celery/apps/worker.py @@ -277,6 +277,10 @@ def _handle_request(*args): if callback: callback(worker) safe_say('worker: {0} shutdown (MainProcess)'.format(how)) + signals.worker_shutting_down.send( + sender=worker.hostname, sig=sig, how=how, + exitcode=exitcode, + ) if active_thread_count() > 1: setattr(state, {'Warm': 'should_stop', 'Cold': 'should_terminate'}[how], exitcode) diff --git a/celery/backends/async.py b/celery/backends/asynchronous.py similarity index 100% rename from celery/backends/async.py rename to celery/backends/asynchronous.py diff --git a/celery/backends/base.py b/celery/backends/base.py index fc799333ac4..b14c7587e75 100644 --- a/celery/backends/base.py +++ b/celery/backends/base.py @@ -29,7 +29,7 @@ from celery.exceptions import ( ChordError, TimeoutError, TaskRevokedError, ImproperlyConfigured, ) -from celery.five import items +from celery.five import items, string from celery.result import ( GroupResult, ResultBase, allow_join_result, result_from_tuple, ) @@ -237,7 +237,7 @@ def prepare_exception(self, exc, serializer=None): serializer = self.serializer if serializer is None else serializer if serializer in EXCEPTION_ABLE_CODECS: return get_pickleable_exception(exc) - return {'exc_type': type(exc).__name__, 'exc_message': str(exc)} + return {'exc_type': type(exc).__name__, 'exc_message': string(exc)} def exception_to_python(self, exc): """Convert serialized exception to Python exception.""" diff --git a/celery/backends/database/__init__.py b/celery/backends/database/__init__.py index ec10c388130..aa67deda60b 100644 --- a/celery/backends/database/__init__.py +++ b/celery/backends/database/__init__.py @@ -24,7 +24,7 @@ except ImportError: # pragma: no cover raise ImproperlyConfigured( 'The database result backend requires SQLAlchemy to be installed.' - 'See http://pypi.python.org/pypi/SQLAlchemy') + 'See https://pypi.python.org/pypi/SQLAlchemy') logger = logging.getLogger(__name__) diff --git a/celery/backends/elasticsearch.py b/celery/backends/elasticsearch.py index fba2246ff08..3f0445db8ab 100644 --- a/celery/backends/elasticsearch.py +++ b/celery/backends/elasticsearch.py @@ -3,7 +3,9 @@ from __future__ import absolute_import, unicode_literals from datetime import datetime from kombu.utils.url import _parse_url +from kombu.utils.encoding import bytes_to_str from celery.exceptions import ImproperlyConfigured +from celery.five import items from .base import KeyValueStoreBackend try: import elasticsearch @@ -31,10 +33,14 @@ class ElasticsearchBackend(KeyValueStoreBackend): scheme = 'http' host = 'localhost' port = 9200 + es_retry_on_timeout = False + es_timeout = 10 + es_max_retries = 3 def __init__(self, url=None, *args, **kwargs): super(ElasticsearchBackend, self).__init__(*args, **kwargs) self.url = url + _get = self.app.conf.get if elasticsearch is None: raise ImproperlyConfigured(E_LIB_MISSING) @@ -53,6 +59,18 @@ def __init__(self, url=None, *args, **kwargs): self.host = host or self.host self.port = port or self.port + self.es_retry_on_timeout = ( + _get('elasticsearch_retry_on_timeout') or self.es_retry_on_timeout + ) + + es_timeout = _get('elasticsearch_timeout') + if es_timeout is not None: + self.es_timeout = es_timeout + + es_max_retries = _get('elasticsearch_max_retries') + if es_max_retries is not None: + self.es_max_retries = es_max_retries + self._server = None def get(self, key): @@ -88,7 +106,9 @@ def set(self, key, value): self._index(key, data, refresh=True) def _index(self, id, body, **kwargs): + body = {bytes_to_str(k): v for k, v in items(body)} return self.server.index( + id=bytes_to_str(id), index=self.index, doc_type=self.doc_type, body=body, @@ -103,7 +123,12 @@ def delete(self, key): def _get_server(self): """Connect to the Elasticsearch server.""" - return elasticsearch.Elasticsearch('%s:%s' % (self.host, self.port)) + return elasticsearch.Elasticsearch( + '%s:%s' % (self.host, self.port), + retry_on_timeout=self.es_retry_on_timeout, + max_retries=self.es_max_retries, + timeout=self.es_timeout + ) @property def server(self): diff --git a/celery/backends/redis.py b/celery/backends/redis.py index 2acab48f903..a390e9f1390 100644 --- a/celery/backends/redis.py +++ b/celery/backends/redis.py @@ -18,8 +18,14 @@ from celery.utils.log import get_logger from celery.utils.time import humanize_seconds -from . import async -from . import base +from .asynchronous import AsyncBackendMixin, BaseResultConsumer +from .base import BaseKeyValueStoreBackend + +try: + from urllib.parse import unquote +except ImportError: + # Python 2 + from urlparse import unquote try: import redis @@ -40,8 +46,8 @@ logger = get_logger(__name__) -class ResultConsumer(async.BaseResultConsumer): +class ResultConsumer(BaseResultConsumer): _pubsub = None def __init__(self, *args, **kwargs): @@ -88,7 +94,7 @@ def cancel_for(self, task_id): self._pubsub.unsubscribe(key) -class RedisBackend(base.BaseKeyValueStoreBackend, async.AsyncBackendMixin): +class RedisBackend(BaseKeyValueStoreBackend, AsyncBackendMixin): """Redis task result store.""" ResultConsumer = ResultConsumer @@ -132,6 +138,15 @@ def __init__(self, host=None, port=None, db=None, password=None, 'socket_connect_timeout': socket_connect_timeout and float(socket_connect_timeout), } + + # "redis_backend_use_ssl" must be a dict with the keys: + # 'ssl_cert_reqs', 'ssl_ca_certs', 'ssl_certfile', 'ssl_keyfile' + # (the same as "broker_use_ssl") + ssl = _get('redis_backend_use_ssl') + if ssl: + self.connparams.update(ssl) + self.connparams['connection_class'] = redis.SSLConnection + if url: self.connparams = self._params_from_url(url, self.connparams) self.url = url diff --git a/celery/backends/rpc.py b/celery/backends/rpc.py index f5a3dcea8ec..41322f98d19 100644 --- a/celery/backends/rpc.py +++ b/celery/backends/rpc.py @@ -17,7 +17,7 @@ from celery.five import items, range from . import base -from .async import AsyncBackendMixin, BaseResultConsumer +from .asynchronous import AsyncBackendMixin, BaseResultConsumer __all__ = ['BacklogLimitExceeded', 'RPCBackend'] diff --git a/celery/beat.py b/celery/beat.py index 36cccb6892e..217a9441aeb 100644 --- a/celery/beat.py +++ b/celery/beat.py @@ -2,6 +2,7 @@ """The periodic task scheduler.""" from __future__ import absolute_import, unicode_literals +import copy import errno import heapq import os @@ -197,6 +198,7 @@ def __init__(self, app, schedule=None, max_interval=None, self.max_interval) self.Producer = Producer or app.amqp.Producer self._heap = None + self.old_schedulers = None self.sync_every_tasks = ( app.conf.beat_sync_every if sync_every_tasks is None else sync_every_tasks) @@ -257,7 +259,9 @@ def tick(self, event_t=event_t, min=min, heappop=heapq.heappop, adjust = self.adjust max_interval = self.max_interval - if self._heap is None: + if (self._heap is None or + not self.schedules_equal(self.old_schedulers, self.schedule)): + self.old_schedulers = copy.copy(self.schedule) self.populate_heap() H = self._heap @@ -281,6 +285,15 @@ def tick(self, event_t=event_t, min=min, heappop=heapq.heappop, return min(verify[0], max_interval) return min(adjust(next_time_to_run) or max_interval, max_interval) + def schedules_equal(self, old_schedules, new_schedules): + if set(old_schedules.keys()) != set(new_schedules.keys()): + return False + for name, old_entry in old_schedules.items(): + new_entry = new_schedules.get(name) + if not new_entry or old_entry.schedule != new_entry.schedule: + return False + return True + def should_sync(self): return ( (not self._last_sync or diff --git a/celery/bin/graph.py b/celery/bin/graph.py index 22fe46c4ad0..9a2841bf98c 100644 --- a/celery/bin/graph.py +++ b/celery/bin/graph.py @@ -161,7 +161,7 @@ def maybe_abbr(l, name, max=Wmax): workers = args['nodes'] threads = args.get('threads') or [] except KeyError: - replies = self.app.control.inspect().stats() + replies = self.app.control.inspect().stats() or {} workers, threads = [], [] for worker, reply in items(replies): workers.append(worker) diff --git a/celery/canvas.py b/celery/canvas.py index 085546a895c..9c393f7e92b 100644 --- a/celery/canvas.py +++ b/celery/canvas.py @@ -531,8 +531,7 @@ def from_dict(cls, d, app=None): if tasks: if isinstance(tasks, tuple): # aaaargh tasks = d['kwargs']['tasks'] = list(tasks) - # First task must be signature object to get app - tasks[0] = maybe_signature(tasks[0], app=app) + tasks = [maybe_signature(task, app=app) for task in tasks] return _upgrade(d, _chain(tasks, app=app, **d['options'])) def __init__(self, *tasks, **options): @@ -926,9 +925,9 @@ class group(Signature): [4, 8] Arguments: - *tasks (Signature): A list of signatures that this group will call. - If there's only one argument, and that argument is an iterable, - then that'll define the list of signatures instead. + *tasks (List[Signature]): A list of signatures that this group will + call. If there's only one argument, and that argument is an + iterable, then that'll define the list of signatures instead. **options (Any): Execution options applied to all tasks in the group. @@ -1282,6 +1281,9 @@ def run(self, header, body, partial_args, app=None, interval=None, group_id=group_id, chord=body, root_id=root_id).results bodyres = body.freeze(task_id, root_id=root_id) + # Chains should not be passed to the header tasks. See #3771 + options.pop('chain', None) + parent = app.backend.apply_chord( header, partial_args, group_id, body, interval=interval, countdown=countdown, @@ -1337,7 +1339,8 @@ def _get_app(self, body=None): tasks = self.tasks.tasks # is a group except AttributeError: tasks = self.tasks - app = tasks[0]._app + if len(tasks): + app = tasks[0]._app if app is None and body is not None: app = body._app return app if app is not None else current_app diff --git a/celery/concurrency/asynpool.py b/celery/concurrency/asynpool.py index 7a0c10841cd..c8a299ef647 100644 --- a/celery/concurrency/asynpool.py +++ b/celery/concurrency/asynpool.py @@ -35,7 +35,7 @@ from billiard import pool as _pool from billiard.compat import buf_t, setblocking, isblocking from billiard.queues import _SimpleQueue -from kombu.async import WRITE, ERR +from kombu.asynchronous import ERR, WRITE from kombu.serialization import pickle as _pickle from kombu.utils.eventio import SELECT_BAD_FD from kombu.utils.functional import fxrange diff --git a/celery/concurrency/eventlet.py b/celery/concurrency/eventlet.py index 893686d8128..9bd1bd0b9a8 100644 --- a/celery/concurrency/eventlet.py +++ b/celery/concurrency/eventlet.py @@ -2,6 +2,8 @@ """Eventlet execution pool.""" from __future__ import absolute_import, unicode_literals import sys + +from kombu.asynchronous import timer as _timer # noqa from kombu.five import monotonic __all__ = ['TaskPool'] @@ -22,7 +24,6 @@ # idiotic pep8.py does not allow expressions before imports # so have to silence errors here -from kombu.async import timer as _timer # noqa from celery import signals # noqa diff --git a/celery/concurrency/gevent.py b/celery/concurrency/gevent.py index d24ac52c7c0..ac037bffc41 100644 --- a/celery/concurrency/gevent.py +++ b/celery/concurrency/gevent.py @@ -1,7 +1,8 @@ # -*- coding: utf-8 -*- """Gevent execution pool.""" from __future__ import absolute_import, unicode_literals -from kombu.async import timer as _timer + +from kombu.asynchronous import timer as _timer from kombu.five import monotonic from . import base try: @@ -31,7 +32,7 @@ def apply_timeout(target, args=(), kwargs={}, callback=None, class Timer(_timer.Timer): def __init__(self, *args, **kwargs): - from gevent.greenlet import Greenlet, GreenletExit + from gevent import Greenlet, GreenletExit class _Greenlet(Greenlet): cancel = Greenlet.kill diff --git a/celery/contrib/abortable.py b/celery/contrib/abortable.py index 989a8550c03..cd2d8e745b3 100644 --- a/celery/contrib/abortable.py +++ b/celery/contrib/abortable.py @@ -107,7 +107,7 @@ def myview(request): class AbortableAsyncResult(AsyncResult): - """Represents a abortable result. + """Represents an abortable result. Specifically, this gives the `AsyncResult` a :meth:`abort()` method, that sets the state of the underlying Task to `'ABORTED'`. diff --git a/celery/contrib/pytest.py b/celery/contrib/pytest.py index 5dbcd6ba68c..810f0a113af 100644 --- a/celery/contrib/pytest.py +++ b/celery/contrib/pytest.py @@ -60,7 +60,7 @@ def celery_session_app(request, use_celery_app_trap): # type: (Any) -> Celery """Session Fixture: Return app for session fixtures.""" - mark = request.node.get_marker('celery') + mark = request.node.get_closest_marker('celery') config = dict(celery_config, **mark.kwargs if mark else {}) with _create_app(request, enable_logging=celery_enable_logging, @@ -161,7 +161,7 @@ def celery_app(request, celery_enable_logging, use_celery_app_trap): """Fixture creating a Celery application instance.""" - mark = request.node.get_marker('celery') + mark = request.node.get_closest_marker('celery') config = dict(celery_config, **mark.kwargs if mark else {}) with _create_app(request, enable_logging=celery_enable_logging, diff --git a/celery/contrib/rdb.py b/celery/contrib/rdb.py index ed6bb5bdc24..4f3cca98485 100644 --- a/celery/contrib/rdb.py +++ b/celery/contrib/rdb.py @@ -5,7 +5,7 @@ ============ This is a remote debugger for Celery tasks running in multiprocessing -pool workers. Inspired by http://snippets.dzone.com/posts/show/7248 +pool workers. Inspired by a lost post on dzone.com. Usage ----- diff --git a/celery/contrib/sphinx.py b/celery/contrib/sphinx.py index d6e8525e3ca..ae17f80be30 100644 --- a/celery/contrib/sphinx.py +++ b/celery/contrib/sphinx.py @@ -69,5 +69,5 @@ def get_signature_prefix(self, sig): def setup(app): """Setup Sphinx extension.""" app.add_autodocumenter(TaskDocumenter) - app.domains['py'].directives['task'] = TaskDirective + app.add_directive_to_domain('py', 'task', TaskDirective) app.add_config_value('celery_task_prefix', '(task)', True) diff --git a/celery/events/state.py b/celery/events/state.py index effdaa63e43..fd6edd48746 100644 --- a/celery/events/state.py +++ b/celery/events/state.py @@ -293,11 +293,14 @@ class Task(object): def __init__(self, uuid=None, cluster_state=None, children=None, **kwargs): self.uuid = uuid self.cluster_state = cluster_state - self.children = WeakSet( - self.cluster_state.tasks.get(task_id) - for task_id in children or () - if task_id in self.cluster_state.tasks - ) + if self.cluster_state is not None: + self.children = WeakSet( + self.cluster_state.tasks.get(task_id) + for task_id in children or () + if task_id in self.cluster_state.tasks + ) + else: + self.children = WeakSet() self._serializer_handlers = { 'children': self._serializable_children, 'root': self._serializable_root, @@ -384,11 +387,19 @@ def ready(self): @cached_property def parent(self): - return self.parent_id and self.cluster_state.tasks[self.parent_id] + # issue github.com/mher/flower/issues/648 + try: + return self.parent_id and self.cluster_state.tasks[self.parent_id] + except KeyError: + return None @cached_property def root(self): - return self.root_id and self.cluster_state.tasks[self.root_id] + # issue github.com/mher/flower/issues/648 + try: + return self.root_id and self.cluster_state.tasks[self.root_id] + except KeyError: + return None class State(object): diff --git a/celery/fixups/django.py b/celery/fixups/django.py index b7935ce4a53..9c92d9d7268 100644 --- a/celery/fixups/django.py +++ b/celery/fixups/django.py @@ -166,7 +166,7 @@ def on_task_prerun(self, sender, **kwargs): self.close_database() def on_task_postrun(self, sender, **kwargs): - # See http://groups.google.com/group/django-users/ + # See https://groups.google.com/group/django-users/ # browse_thread/thread/78200863d0c07c6d/ if not getattr(sender.request, 'is_eager', False): self.close_database() diff --git a/celery/platforms.py b/celery/platforms.py index 9e579353688..bd7ae58ea9f 100644 --- a/celery/platforms.py +++ b/celery/platforms.py @@ -632,9 +632,11 @@ def reset_alarm(self): def supported(self, name): """Return true value if signal by ``name`` exists on this platform.""" try: - return self.signum(name) + self.signum(name) except AttributeError: - pass + return False + else: + return True def signum(self, name): """Get signal number by name.""" diff --git a/celery/result.py b/celery/result.py index 63ec1c73a70..81c9709e256 100644 --- a/celery/result.py +++ b/celery/result.py @@ -273,7 +273,7 @@ def iterdeps(self, intermediate=False): raise IncompleteStream() def ready(self): - """Return :const:`True` if the task started executing. + """Return :const:`True` if the task has executed. If the task is still running, pending, or is waiting for retry then :const:`False` is returned. diff --git a/celery/schedules.py b/celery/schedules.py index 4a72fa4a411..eefe1115817 100644 --- a/celery/schedules.py +++ b/celery/schedules.py @@ -101,6 +101,11 @@ def to_local(self, dt): return timezone.to_local_fallback(dt) return dt + def __eq__(self, other): + if isinstance(other, BaseSchedule): + return other.nowfun == self.nowfun + return NotImplemented + @python_2_unicode_compatible class schedule(BaseSchedule): @@ -398,6 +403,7 @@ def __init__(self, minute='*', hour='*', day_of_week='*', self._orig_day_of_week = cronfield(day_of_week) self._orig_day_of_month = cronfield(day_of_month) self._orig_month_of_year = cronfield(month_of_year) + self._orig_kwargs = kwargs self.hour = self._expand_cronspec(hour, 24) self.minute = self._expand_cronspec(minute, 60) self.day_of_week = self._expand_cronspec(day_of_week, 7) @@ -529,7 +535,12 @@ def __reduce__(self): self._orig_hour, self._orig_day_of_week, self._orig_day_of_month, - self._orig_month_of_year), None) + self._orig_month_of_year), self._orig_kwargs) + + def __setstate__(self, state): + # Calling super's init because the kwargs aren't necessarily passed in + # the same form as they are stored by the superclass + super(crontab, self).__init__(**state) def remaining_delta(self, last_run_at, tz=None, ffwd=ffwd): # pylint: disable=redefined-outer-name @@ -624,7 +635,8 @@ def __eq__(self, other): other.day_of_month == self.day_of_month and other.day_of_week == self.day_of_week and other.hour == self.hour and - other.minute == self.minute + other.minute == self.minute and + super(crontab, self).__eq__(other) ) return NotImplemented diff --git a/celery/signals.py b/celery/signals.py index 22a3e9f7a89..6ab2ec534c4 100644 --- a/celery/signals.py +++ b/celery/signals.py @@ -19,8 +19,8 @@ 'task_prerun', 'task_postrun', 'task_success', 'task_retry', 'task_failure', 'task_revoked', 'celeryd_init', 'celeryd_after_setup', 'worker_init', 'worker_process_init', - 'worker_ready', 'worker_shutdown', 'setup_logging', - 'after_setup_logger', 'after_setup_task_logger', + 'worker_ready', 'worker_shutdown', 'worker_shutting_down', + 'setup_logging', 'after_setup_logger', 'after_setup_task_logger', 'beat_init', 'beat_embedded_init', 'heartbeat_sent', 'eventlet_pool_started', 'eventlet_pool_preshutdown', 'eventlet_pool_postshutdown', 'eventlet_pool_apply', @@ -99,6 +99,7 @@ worker_process_shutdown = Signal(name='worker_process_shutdown') worker_ready = Signal(name='worker_ready') worker_shutdown = Signal(name='worker_shutdown') +worker_shutting_down = Signal(name='worker_shutting_down') heartbeat_sent = Signal(name='heartbeat_sent') # - Logging diff --git a/celery/states.py b/celery/states.py index d2ed814001a..35139129dea 100644 --- a/celery/states.py +++ b/celery/states.py @@ -142,7 +142,6 @@ def __le__(self, other): #: Task is waiting for retry. RETRY = 'RETRY' IGNORED = 'IGNORED' -REJECTED = 'REJECTED' READY_STATES = frozenset({SUCCESS, FAILURE, REVOKED}) UNREADY_STATES = frozenset({PENDING, RECEIVED, STARTED, REJECTED, RETRY}) diff --git a/celery/utils/functional.py b/celery/utils/functional.py index b7ece350593..1a0bb3def4f 100644 --- a/celery/utils/functional.py +++ b/celery/utils/functional.py @@ -266,7 +266,11 @@ def head_from_fun(fun, bound=False, debug=False): # in pure-Python. Instead we use exec to create a new function # with an empty body, meaning it has the same performance as # as just calling a function. - if not inspect.isfunction(fun) and hasattr(fun, '__call__'): + is_function = inspect.isfunction(fun) + is_callable = hasattr(fun, '__call__') + is_method = inspect.ismethod(fun) + + if not is_function and is_callable and not is_method: name, fun = fun.__class__.__name__, fun.__call__ else: name = fun.__name__ diff --git a/celery/utils/iso8601.py b/celery/utils/iso8601.py index e92be3d7efb..8c584651589 100644 --- a/celery/utils/iso8601.py +++ b/celery/utils/iso8601.py @@ -1,7 +1,7 @@ """Parse ISO8601 dates. Originally taken from :pypi:`pyiso8601` -(http://code.google.com/p/pyiso8601/) +(https://bitbucket.org/micktwomey/pyiso8601) Modified to match the behavior of ``dateutil.parser``: diff --git a/celery/utils/time.py b/celery/utils/time.py index 12103addfa2..0d696af0554 100644 --- a/celery/utils/time.py +++ b/celery/utils/time.py @@ -322,9 +322,10 @@ def maybe_make_aware(dt, tz=None): """Convert dt to aware datetime, do nothing if dt is already aware.""" if is_naive(dt): dt = to_utc(dt) - return localize( - dt, timezone.utc if tz is None else timezone.tz_or_local(tz), - ) + return localize( + dt, timezone.utc if tz is None else timezone.tz_or_local(tz), + ) + return dt @python_2_unicode_compatible diff --git a/celery/utils/timer2.py b/celery/utils/timer2.py index 0dd09c8b2c3..1b9550300bc 100644 --- a/celery/utils/timer2.py +++ b/celery/utils/timer2.py @@ -3,7 +3,7 @@ .. note:: This is used for the thread-based worker only, - not for amqp/redis/sqs/qpid where :mod:`kombu.async.timer` is used. + not for amqp/redis/sqs/qpid where :mod:`kombu.asynchronous.timer` is used. """ from __future__ import absolute_import, print_function, unicode_literals @@ -14,9 +14,12 @@ from itertools import count from time import sleep +from kombu.asynchronous.timer import Entry +from kombu.asynchronous.timer import Timer as Schedule +from kombu.asynchronous.timer import logger, to_timestamp + from celery.five import THREAD_TIMEOUT_MAX -from kombu.async.timer import Entry, Timer as Schedule, to_timestamp, logger TIMER_DEBUG = os.environ.get('TIMER_DEBUG') diff --git a/celery/worker/autoscale.py b/celery/worker/autoscale.py index e21f73a0b61..299790f5735 100644 --- a/celery/worker/autoscale.py +++ b/celery/worker/autoscale.py @@ -15,7 +15,7 @@ from time import sleep -from kombu.async.semaphore import DummyLock +from kombu.asynchronous.semaphore import DummyLock from celery import bootsteps from celery.five import monotonic diff --git a/celery/worker/components.py b/celery/worker/components.py index 534c6a0d8b3..9f5427cafd6 100644 --- a/celery/worker/components.py +++ b/celery/worker/components.py @@ -5,9 +5,10 @@ import atexit import warnings -from kombu.async import Hub as _Hub, get_event_loop, set_event_loop -from kombu.async.semaphore import DummyLock, LaxBoundedSemaphore -from kombu.async.timer import Timer as _Timer +from kombu.asynchronous import Hub as _Hub +from kombu.asynchronous import get_event_loop, set_event_loop +from kombu.asynchronous.semaphore import DummyLock, LaxBoundedSemaphore +from kombu.asynchronous.timer import Timer as _Timer from celery import bootsteps from celery._state import _set_task_join_will_block diff --git a/celery/worker/consumer/consumer.py b/celery/worker/consumer/consumer.py index 3788cc195ba..39f30b5c722 100644 --- a/celery/worker/consumer/consumer.py +++ b/celery/worker/consumer/consumer.py @@ -16,7 +16,7 @@ from billiard.common import restart_state from billiard.exceptions import RestartFreqExceeded -from kombu.async.semaphore import DummyLock +from kombu.asynchronous.semaphore import DummyLock from kombu.utils.compat import _detect_environment from kombu.utils.encoding import safe_repr, bytes_t from kombu.utils.limits import TokenBucket diff --git a/celery/worker/consumer/gossip.py b/celery/worker/consumer/gossip.py index 67cf8e3467f..7d92462272f 100644 --- a/celery/worker/consumer/gossip.py +++ b/celery/worker/consumer/gossip.py @@ -7,7 +7,7 @@ from operator import itemgetter from kombu import Consumer -from kombu.async.semaphore import DummyLock +from kombu.asynchronous.semaphore import DummyLock from celery import bootsteps from celery.five import values diff --git a/celery/worker/heartbeat.py b/celery/worker/heartbeat.py index 8bd0dcc5f74..6a19ef62642 100644 --- a/celery/worker/heartbeat.py +++ b/celery/worker/heartbeat.py @@ -16,7 +16,7 @@ class Heart(object): """Timer sending heartbeats at regular intervals. Arguments: - timer (kombu.async.timer.Timer): Timer to use. + timer (kombu.asynchronous.timer.Timer): Timer to use. eventer (celery.events.EventDispatcher): Event dispatcher to use. interval (float): Time in seconds between sending diff --git a/celery/worker/loops.py b/celery/worker/loops.py index 3bc45993b3a..3dec9c3ce96 100644 --- a/celery/worker/loops.py +++ b/celery/worker/loops.py @@ -2,6 +2,7 @@ from __future__ import absolute_import, unicode_literals import errno import socket + from celery import bootsteps from celery.exceptions import WorkerShutdown, WorkerTerminate, WorkerLostError from celery.utils.log import get_logger @@ -25,11 +26,26 @@ def _quick_drain(connection, timeout=0.1): def _enable_amqheartbeats(timer, connection, rate=2.0): - if connection: - tick = connection.heartbeat_check - heartbeat = connection.get_heartbeat_interval() # negotiated - if heartbeat and connection.supports_heartbeats: - timer.call_repeatedly(heartbeat / rate, tick, (rate,)) + # Store the latest heartbeat error or None in a one-element list. Then the + # heartbeat thread and main loop can use it as shared memory; any exception + # caught by the heartbeat thread can be read by asynloop / synloop. + heartbeat_error = [None] + + if not connection: + return heartbeat_error + + heartbeat = connection.get_heartbeat_interval() # negotiated + if not (heartbeat and connection.supports_heartbeats): + return heartbeat_error + + def tick(rate): + try: + connection.heartbeat_check(rate) + except Exception as e: + heartbeat_error[0] = e + + timer.call_repeatedly(heartbeat / rate, tick, (rate,)) + return heartbeat_error def asynloop(obj, connection, consumer, blueprint, hub, qos, @@ -41,13 +57,13 @@ def asynloop(obj, connection, consumer, blueprint, hub, qos, on_task_received = obj.create_task_handler() - _enable_amqheartbeats(hub.timer, connection, rate=hbrate) + heartbeat_error = _enable_amqheartbeats(hub.timer, connection, rate=hbrate) consumer.on_message = on_task_received - consumer.consume() - obj.on_ready() obj.controller.register_with_event_loop(hub) obj.register_with_event_loop(hub) + consumer.consume() + obj.on_ready() # did_start_ok will verify that pool processes were able to start, # but this will only work the first time we start, as @@ -77,6 +93,8 @@ def asynloop(obj, connection, consumer, blueprint, hub, qos, raise WorkerShutdown(should_stop) elif should_terminate is not None and should_stop is not False: raise WorkerTerminate(should_terminate) + elif heartbeat_error[0] is not None: + raise heartbeat_error[0] # We only update QoS when there's no more messages to read. # This groups together qos calls, and makes sure that remote @@ -102,8 +120,9 @@ def synloop(obj, connection, consumer, blueprint, hub, qos, RUN = bootsteps.RUN on_task_received = obj.create_task_handler() perform_pending_operations = obj.perform_pending_operations + heartbeat_error = [None] if getattr(obj.pool, 'is_green', False): - _enable_amqheartbeats(obj.timer, connection, rate=hbrate) + heartbeat_error = _enable_amqheartbeats(obj.timer, connection, rate=hbrate) consumer.on_message = on_task_received consumer.consume() @@ -111,6 +130,8 @@ def synloop(obj, connection, consumer, blueprint, hub, qos, while blueprint.state == RUN and obj.connection: state.maybe_shutdown() + if heartbeat_error[0] is not None: + raise heartbeat_error[0] if qos.prev != qos.value: qos.update() try: diff --git a/celery/worker/state.py b/celery/worker/state.py index 2eba29cd3f2..bea9e1cdbff 100644 --- a/celery/worker/state.py +++ b/celery/worker/state.py @@ -24,7 +24,7 @@ __all__ = [ 'SOFTWARE_INFO', 'reserved_requests', 'active_requests', 'total_count', 'revoked', 'task_reserved', 'maybe_shutdown', - 'task_accepted', 'task_reserved', 'task_ready', 'Persistent', + 'task_accepted', 'task_ready', 'Persistent', ] #: Worker software/platform information. diff --git a/celery/worker/strategy.py b/celery/worker/strategy.py index 94fdf4339e5..c3dc068bc96 100644 --- a/celery/worker/strategy.py +++ b/celery/worker/strategy.py @@ -4,7 +4,7 @@ import logging -from kombu.async.timer import to_timestamp +from kombu.asynchronous.timer import to_timestamp from kombu.five import buffer_t from celery.exceptions import InvalidTaskError @@ -92,7 +92,7 @@ def task_message_handler(message, body, ack, reject, callbacks, to_timestamp=to_timestamp): if body is None: body, headers, decoded, utc = ( - message.body, message.headers, False, True, + message.body, message.headers, False, app.uses_utc_timezone(), ) if not body_can_be_buffer: body = bytes(body) if isinstance(body, buffer_t) else body @@ -126,7 +126,7 @@ def task_message_handler(message, body, ack, reject, callbacks, if req.utc: eta = to_timestamp(to_system_tz(req.eta)) else: - eta = to_timestamp(req.eta, timezone.local) + eta = to_timestamp(req.eta, app.timezone) except (OverflowError, ValueError) as exc: error("Couldn't convert ETA %r to timestamp: %r. Task: %r", req.eta, exc, req.info(safe=True), exc_info=True) diff --git a/celery/worker/worker.py b/celery/worker/worker.py index cde0e9381a8..2e8f4cf8dd3 100644 --- a/celery/worker/worker.py +++ b/celery/worker/worker.py @@ -238,7 +238,7 @@ def signal_consumer_close(self): def should_use_eventloop(self): return (detect_environment() == 'default' and - self._conninfo.transport.implements.async and + self._conninfo.transport.implements.asynchronous and not self.app.IS_WINDOWS) def stop(self, in_sighandler=False, exitcode=None): diff --git a/circleci_scripts/execute_unit_tests.sh b/circleci_scripts/execute_unit_tests.sh new file mode 100755 index 00000000000..b09852cd8c5 --- /dev/null +++ b/circleci_scripts/execute_unit_tests.sh @@ -0,0 +1,48 @@ +#!/usr/bin/env bash +set -ex + +TSTAMP=$(date -u +'%Y%m%d%H%M%S') +WORKSPACE=$(pwd) + +echo "Running on host: ${HOSTNAME}" >&2 + +# We want to record coverage even if the tests fail, so we need to capture and +# store the exit code until after `coverage xml` runs. +cd /home/developer/celery + +XML_RESULT_DIR=/home/developer/tmp/junit/ +mkdir -p ${XML_RESULT_DIR} + +xml_filename="${XML_RESULT_DIR}smcelery-${TSTAMP}.xml" + +echo "python3.6" > /home/developer/.python-version + +CMD="pyenv global python3.6" +${CMD} + +CMD="\ +pyenv exec coverage run --rcfile=/home/developer/celery/.coveragerc \ +/home/developer/.pyenv/versions/python3.6/bin/pytest --color=no -vv --junit-xml=${xml_filename} t/unit \ +" + +if ${CMD} >&2 +then exitcode=0 +else exitcode=$? +fi + +if [ ! -f $xml_filename ]; then + # No result files, assume we core dumped + cat >${xml_filename} < + + + + /home/developer/celery/circleci_scripts/execute_unit_tests.sh: line 77: \${CMD} 1>&2 + (core dumped) + + + +EOF +fi + +exit "${exitcode}" diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 00000000000..3db877cdbdd --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,89 @@ +FROM ubuntu:bionic + +ENV PYTHONIOENCODING UTF-8 + +ARG DEBIAN_FRONTEND=noninteractive + +# Pypy is installed from a package manager because it takes so long to build. +RUN apt-get update && apt-get install -y build-essential \ + libcurl4-openssl-dev \ + libffi-dev \ + tk-dev \ + xz-utils \ + curl \ + lsb-release \ + git \ + libmemcached-dev \ + make \ + liblzma-dev \ + libreadline-dev \ + libbz2-dev \ + llvm \ + libncurses5-dev \ + libsqlite3-dev \ + wget \ + pypy \ + python-openssl \ + libncursesw5-dev \ + zlib1g-dev \ + pkg-config \ + libssl-dev \ + sudo + +# Setup variables. Even though changing these may cause unnecessary invalidation of +# unrelated elements, grouping them together makes the Dockerfile read better. +ENV PROVISIONING /provisioning + +ARG CELERY_USER=developer + +# Check for mandatory build arguments +RUN : "${CELERY_USER:?CELERY_USER build argument needs to be set and non-empty.}" + +ENV HOME /home/$CELERY_USER +ENV PATH="$HOME/.pyenv/bin:$PATH" + +# Copy and run setup scripts +WORKDIR $PROVISIONING +COPY docker/scripts/install-couchbase.sh . +# Scripts will lose thier executable flags on copy. To avoid the extra instructions +# we call the shell directly. +RUN sh install-couchbase.sh +COPY docker/scripts/create-linux-user.sh . +RUN sh create-linux-user.sh + +# Swap to the celery user so packages and celery are not installed as root. +USER $CELERY_USER + +COPY docker/scripts/install-pyenv.sh . +RUN sh install-pyenv.sh + +# Install celery +WORKDIR $HOME +COPY --chown=1019:1019 requirements $HOME/requirements +COPY --chown=1019:1019 docker/entrypoint /entrypoint +RUN chmod gu+x /entrypoint + +# Define the local pyenvs +# RUN pyenv local python3.8 python3.7 python3.6 +RUN pyenv local python3.6 +RUN pyenv global python3.6 + +RUN pyenv exec python3.6 -m pip install --upgrade pip setuptools wheel + +# Setup one celery environment for basic development use +RUN pyenv exec python3.6 -m pip install \ + -r requirements/default.txt \ + -r requirements/test.txt \ + -r requirements/test-ci-default.txt \ + -r requirements/docs.txt \ + -r requirements/test-integration.txt \ + -r requirements/pkgutils.txt + +COPY --chown=1019:1019 . $HOME/celery + +WORKDIR $HOME/celery + +# Setup the entrypoint, this ensures pyenv is initialized when a container is started +# and that any compiled files from earlier steps or from mounts are removed to avoid +# py.test failing with an ImportMismatchError +ENTRYPOINT ["/entrypoint"] diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml new file mode 100644 index 00000000000..90623cc106f --- /dev/null +++ b/docker/docker-compose.yml @@ -0,0 +1,44 @@ +version: '3' + +# Developer script, may or may not work + +services: + celery: + build: + context: .. + dockerfile: docker/Dockerfile + args: + CELERY_USER: developer + # image: celery/celery:dev + # image: gcr.io/sightmachine-178216/celery:cac0660ab6e023792ee794918a6a23adc46d1904-dev + environment: + TEST_BROKER: pyamqp://rabbit:5672 + TEST_BACKEND: redis://redis + PYTHONUNBUFFERED: 1 + PYTHONDONTWRITEBYTECODE: 1 + REDIS_HOST: redis + WORKER_LOGLEVEL: DEBUG + AZUREBLOCKBLOB_URL: azureblockblob://DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://azurite:10000/devstoreaccount1; + PYTHONPATH: /home/developer/celery + command: + - cat + tty: true + volumes: + - ../.:/home/developer/celery + depends_on: + - rabbit + - redis + - dynamodb + - azurite + + rabbit: + image: rabbitmq:3.8.0 + + redis: + image: redis:5.0.6 + + dynamodb: + image: dwmkerr/dynamodb:38 + + azurite: + image: arafato/azurite:2.6.5 diff --git a/docker/entrypoint b/docker/entrypoint new file mode 100755 index 00000000000..e4c810930e2 --- /dev/null +++ b/docker/entrypoint @@ -0,0 +1,8 @@ +#!/bin/bash + +make --quiet --directory="$HOME/celery" clean-pyc + +eval "$(pyenv init -)" +eval "$(pyenv virtualenv-init -)" + +exec "$@" diff --git a/docker/scripts/create-linux-user.sh b/docker/scripts/create-linux-user.sh new file mode 100755 index 00000000000..b231c45ee33 --- /dev/null +++ b/docker/scripts/create-linux-user.sh @@ -0,0 +1,3 @@ +#!/bin/sh +addgroup --gid 1019 $CELERY_USER +adduser --system --disabled-password --uid 1019 --gid 1019 $CELERY_USER diff --git a/docker/scripts/install-couchbase.sh b/docker/scripts/install-couchbase.sh new file mode 100755 index 00000000000..165e6e17322 --- /dev/null +++ b/docker/scripts/install-couchbase.sh @@ -0,0 +1,8 @@ +#!/bin/sh +# Install Couchbase's GPG key +sudo wget -O - http://packages.couchbase.com/ubuntu/couchbase.key | sudo apt-key add - +# Adding Ubuntu 18.04 repo to apt/sources.list of 19.10 or 19.04 +echo "deb http://packages.couchbase.com/ubuntu bionic bionic/main" | sudo tee /etc/apt/sources.list.d/couchbase.list +# To install or upgrade packages +apt-get update +apt-get install -y libcouchbase-dev libcouchbase2-bin build-essential diff --git a/docker/scripts/install-pyenv.sh b/docker/scripts/install-pyenv.sh new file mode 100755 index 00000000000..0a90ac9607a --- /dev/null +++ b/docker/scripts/install-pyenv.sh @@ -0,0 +1,11 @@ +#!/bin/sh +# For managing all the local python installations for testing, use pyenv +curl -L https://raw.githubusercontent.com/pyenv/pyenv-installer/master/bin/pyenv-installer | bash + +# To enable testing versions like 3.4.8 as 3.4 in tox, we need to alias +# pyenv python versions +git clone https://github.com/s1341/pyenv-alias.git $(pyenv root)/plugins/pyenv-alias + +# Python versions to test against +VERSION_ALIAS="python3.6" pyenv install 3.6.9 +# Possible to add more versions like this: VERSION_ALIAS="python3.7" pyenv install 3.7.5 diff --git a/docs/AUTHORS.txt b/docs/AUTHORS.txt index e7534eca7cd..67bb80bf422 100644 --- a/docs/AUTHORS.txt +++ b/docs/AUTHORS.txt @@ -7,6 +7,7 @@ Aaron Ross Adam Endicott Adriano Petrich Akira Matsuzaki +Alan Brogan Alec Clowes Ales Zoulek Allan Caffee @@ -64,6 +65,7 @@ Iurii Kriachko Ivan Metzlar Jannis Leidel Jason Baker +Jay McGrath Jeff Balogh Jeff Terrace Jerzy Kozera @@ -94,6 +96,7 @@ Mark Stover Mark Thurman Martin Galpin Martin Melin +Matt Ullman Matt Williamson Matthew J Morrison Matthew Miller @@ -111,7 +114,9 @@ Neil Chintomby Noah Kantrowitz Norman Richards Patrick Altman +Peter Bittner Piotr Sikora +Primož Kerin Remy Noel Reza Lotun Roberto Gaiser diff --git a/docs/community.rst b/docs/community.rst index e2696baa75d..e3cedc46623 100644 --- a/docs/community.rst +++ b/docs/community.rst @@ -21,21 +21,21 @@ Resources Who's using Celery ------------------ -http://wiki.github.com/celery/celery/using +https://wiki.github.com/celery/celery/using .. _res-wiki: Wiki ---- -http://wiki.github.com/celery/celery/ +https://wiki.github.com/celery/celery/ .. _res-stackoverflow: Celery questions on Stack Overflow ---------------------------------- -http://stackoverflow.com/search?q=celery&tab=newest +https://stackoverflow.com/search?q=celery&tab=newest .. _res-mailing-list-archive: diff --git a/docs/contributing.rst b/docs/contributing.rst index 1d9acd4f253..8697180cd57 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -39,7 +39,7 @@ meeting or private correspondence. The Code of Conduct is heavily based on the `Ubuntu Code of Conduct`_, and the `Pylons Code of Conduct`_. -.. _`Ubuntu Code of Conduct`: http://www.ubuntu.com/community/conduct +.. _`Ubuntu Code of Conduct`: https://www.ubuntu.com/community/conduct .. _`Pylons Code of Conduct`: http://docs.pylonshq.com/community/conduct.html Be considerate @@ -285,6 +285,7 @@ Branches Current active version branches: * dev (which git calls "master") (https://github.com/celery/celery/tree/master) +* 4.0 (https://github.com/celery/celery/tree/4.0) * 3.1 (https://github.com/celery/celery/tree/3.1) * 3.0 (https://github.com/celery/celery/tree/3.0) @@ -442,10 +443,10 @@ fetch and checkout a remote branch like this:: git checkout --track -b 3.0-devel origin/3.0-devel -.. _`Fork a Repo`: http://help.github.com/fork-a-repo/ +.. _`Fork a Repo`: https://help.github.com/fork-a-repo/ .. _`Rebasing merge commits in git`: - http://notes.envato.com/developers/rebasing-merge-commits-in-git/ -.. _`Rebase`: http://help.github.com/rebase/ + https://notes.envato.com/developers/rebasing-merge-commits-in-git/ +.. _`Rebase`: https://help.github.com/rebase/ .. _contributing-testing: @@ -514,7 +515,7 @@ of your contribution. Read the `Pull Requests`_ section in the GitHub Guide to learn how this is done. You can also attach pull requests to existing issues by following -the steps outlined here: http://bit.ly/koJoso +the steps outlined here: https://bit.ly/koJoso .. _`Pull Requests`: http://help.github.com/send-pull-requests/ @@ -581,11 +582,12 @@ Building the documentation -------------------------- To build the documentation you need to install the dependencies -listed in :file:`requirements/docs.txt`: +listed in :file:`requirements/docs.txt` and :file:`requirements/default.txt`: .. code-block:: console $ pip install -U -r requirements/docs.txt + $ pip install -U -r requirements/default.txt After these dependencies are installed you should be able to build the docs by running: @@ -628,7 +630,7 @@ the ``flakes`` target instead: .. code-block:: console - $ make flakes§ + $ make flakes API reference ~~~~~~~~~~~~~ @@ -747,14 +749,14 @@ is following the conventions. * Import order * Python standard library (`import xxx`) - * Python standard library ('from xxx import`) + * Python standard library (`from xxx import`) * Third-party packages. * Other modules from the current package. or in case of code using Django: * Python standard library (`import xxx`) - * Python standard library ('from xxx import`) + * Python standard library (`from xxx import`) * Third-party packages. * Django packages. * Other modules from the current package. @@ -804,7 +806,7 @@ is following the conventions. support for Python 2.5) -* Note that we use "new-style` relative imports when the distribution +* Note that we use "new-style" relative imports when the distribution doesn't support Python versions below 2.5 This requires Python 2.5 or later: @@ -896,7 +898,7 @@ Ask Solem ~~~~~~~~~ :github: https://github.com/ask -:twitter: http://twitter.com/#!/asksol +:twitter: https://twitter.com/#!/asksol Asif Saif Uddin ~~~~~~~~~~~~~~~ @@ -920,7 +922,7 @@ Mher Movsisyan ~~~~~~~~~~~~~~ :github: https://github.com/mher -:twitter: http://twitter.com/#!/movsm +:twitter: https://twitter.com/#!/movsm Omer Katz ~~~~~~~~~ @@ -931,7 +933,7 @@ Steeve Morin ~~~~~~~~~~~~ :github: https://github.com/steeve -:twitter: http://twitter.com/#!/steeve +:twitter: https://twitter.com/#!/steeve Website ------- @@ -950,7 +952,7 @@ Jan Henrik Helmers ~~~~~~~~~~~~~~~~~~ :web: http://www.helmersworks.com -:twitter: http://twitter.com/#!/helmers +:twitter: https://twitter.com/#!/helmers .. _packages: @@ -962,7 +964,7 @@ Packages ---------- :git: https://github.com/celery/celery -:CI: http://travis-ci.org/#!/celery/celery +:CI: https://travis-ci.org/#!/celery/celery :Windows-CI: https://ci.appveyor.com/project/ask/celery :PyPI: :pypi:`celery` :docs: http://docs.celeryproject.org @@ -973,7 +975,7 @@ Packages Messaging library. :git: https://github.com/celery/kombu -:CI: http://travis-ci.org/#!/celery/kombu +:CI: https://travis-ci.org/#!/celery/kombu :Windows-CI: https://ci.appveyor.com/project/ask/kombu :PyPI: :pypi:`kombu` :docs: https://kombu.readthedocs.io @@ -984,7 +986,7 @@ Messaging library. Python AMQP 0.9.1 client. :git: https://github.com/celery/py-amqp -:CI: http://travis-ci.org/#!/celery/py-amqp +:CI: https://travis-ci.org/#!/celery/py-amqp :Windows-CI: https://ci.appveyor.com/project/ask/py-amqp :PyPI: :pypi:`amqp` :docs: https://amqp.readthedocs.io @@ -995,7 +997,7 @@ Python AMQP 0.9.1 client. Promise/deferred implementation. :git: https://github.com/celery/vine/ -:CI: http://travis-ci.org/#!/celery/vine/ +:CI: https://travis-ci.org/#!/celery/vine/ :Windows-CI: https://ci.appveyor.com/project/ask/vine :PyPI: :pypi:`vine` :docs: https://vine.readthedocs.io @@ -1007,7 +1009,7 @@ Fork of multiprocessing containing improvements that'll eventually be merged into the Python stdlib. :git: https://github.com/celery/billiard -:CI: http://travis-ci.org/#!/celery/billiard/ +:CI: https://travis-ci.org/#!/celery/billiard/ :Windows-CI: https://ci.appveyor.com/project/ask/billiard :PyPI: :pypi:`billiard` @@ -1017,7 +1019,7 @@ that'll eventually be merged into the Python stdlib. Database-backed Periodic Tasks with admin interface using the Django ORM. :git: https://github.com/celery/django-celery-beat -:CI: http://travis-ci.org/#!/celery/django-celery-beat +:CI: https://travis-ci.org/#!/celery/django-celery-beat :Windows-CI: https://ci.appveyor.com/project/ask/django-celery-beat :PyPI: :pypi:`django-celery-beat` @@ -1027,7 +1029,7 @@ Database-backed Periodic Tasks with admin interface using the Django ORM. Store task results in the Django ORM, or using the Django Cache Framework. :git: https://github.com/celery/django-celery-results -:CI: http://travis-ci.org/#!/celery/django-celery-results +:CI: https://travis-ci.org/#!/celery/django-celery-results :Windows-CI: https://ci.appveyor.com/project/ask/django-celery-results :PyPI: :pypi:`django-celery-results` @@ -1155,7 +1157,7 @@ If this is a new release series then you also need to do the following: * Go to the Read The Docs management interface at: - http://readthedocs.org/projects/celery/?fromdocs=celery + https://readthedocs.org/projects/celery/?fromdocs=celery * Enter "Edit project" diff --git a/docs/copyright.rst b/docs/copyright.rst index ac0d33cbb06..4a7b254fc73 100644 --- a/docs/copyright.rst +++ b/docs/copyright.rst @@ -12,7 +12,7 @@ Copyright |copy| 2009-2016, Ask Solem. All rights reserved. This material may be copied or distributed only subject to the terms and conditions set forth in the `Creative Commons Attribution-ShareAlike 4.0 International` -`_ license. +`_ license. You may share and adapt the material, even for commercial purposes, but you must give the original author credit. diff --git a/docs/django/first-steps-with-django.rst b/docs/django/first-steps-with-django.rst index cf422fd04f5..1d31045aad6 100644 --- a/docs/django/first-steps-with-django.rst +++ b/docs/django/first-steps-with-django.rst @@ -86,7 +86,7 @@ from the Django settings; but you can also separate them if wanted. The uppercase name-space means that all Celery configuration options must be specified in uppercase instead of lowercase, and start with -``CELERY_``, so for example the :setting:`task_always_eager`` setting +``CELERY_``, so for example the :setting:`task_always_eager` setting becomes ``CELERY_TASK_ALWAYS_EAGER``, and the :setting:`broker_url` setting becomes ``CELERY_BROKER_URL``. @@ -171,17 +171,23 @@ To use this with your project you need to follow these steps: $ pip install django-celery-results -2. Add ``django_celery_results`` to ``INSTALLED_APPS``. +#. Add ``django_celery_results`` to ``INSTALLED_APPS`` in your + Django project's :file:`settings.py`:: - Note that there's no dashes in this name, only underscores. + INSTALLED_APPS = ( + ..., + 'django_celery_results', + ) -3. Create the Celery database tables by performing a database migrations: + Note that there is no dash in the module name, only underscores. + +#. Create the Celery database tables by performing a database migrations: .. code-block:: console $ python manage.py migrate django_celery_results -4. Configure Celery to use the :pypi:`django-celery-results` backend. +#. Configure Celery to use the :pypi:`django-celery-results` backend. Assuming you are using Django's :file:`settings.py` to also configure Celery, add the following settings: diff --git a/docs/faq.rst b/docs/faq.rst index 921456a0661..ee963e1182b 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -21,7 +21,7 @@ What kinds of things should I use Celery for? describing why you'd use a queue in a web context. .. _`Queue everything and delight everyone`: - http://decafbad.com/blog/2008/07/04/queue-everything-and-delight-everyone + https://decafbad.com/blog/2008/07/04/queue-everything-and-delight-everyone These are some common use cases: @@ -175,7 +175,7 @@ See :ref:`brokers` for more information. Redis as a broker won't perform as well as an AMQP broker, but the combination RabbitMQ as broker and Redis as a result store is commonly used. If you have strict reliability requirements you're -encouraged to use RabbitMQ or another AMQP broker. Some transports also uses +encouraged to use RabbitMQ or another AMQP broker. Some transports also use polling, so they're likely to consume more resources. However, if you for some reason aren't able to use AMQP, feel free to use these alternatives. They will probably work fine for most use cases, and note that the above @@ -195,7 +195,7 @@ language has an AMQP client, there shouldn't be much work to create a worker in your language. A Celery worker is just a program connecting to the broker to process messages. -Also, there's another way to be language independent, and that's to use REST +Also, there's another way to be language-independent, and that's to use REST tasks, instead of your tasks being functions, they're URLs. With this information you can even create simple web servers that enable preloading of code. Simply expose an endpoint that performs an operation, and create a task @@ -223,15 +223,15 @@ Transaction Model and Locking`_ in the MySQL user manual. (Thanks to Honza Kral and Anton Tsigularov for this solution) -.. _`MySQL - The InnoDB Transaction Model and Locking`: http://dev.mysql.com/doc/refman/5.1/en/innodb-transaction-model.html +.. _`MySQL - The InnoDB Transaction Model and Locking`: https://dev.mysql.com/doc/refman/5.1/en/innodb-transaction-model.html .. _faq-worker-hanging: The worker isn't doing anything, just hanging --------------------------------------------- -**Answer:** See `MySQL is throwing deadlock errors, what can I do?`_. - or `Why is Task.delay/apply\* just hanging?`. +**Answer:** See `MySQL is throwing deadlock errors, what can I do?`_, +or `Why is Task.delay/apply\*/the worker just hanging?`_. .. _faq-results-unreliable: @@ -485,7 +485,7 @@ Why is RabbitMQ crashing? **Answer:** RabbitMQ will crash if it runs out of memory. This will be fixed in a future release of RabbitMQ. please refer to the RabbitMQ FAQ: -http://www.rabbitmq.com/faq.html#node-runs-out-of-memory +https://www.rabbitmq.com/faq.html#node-runs-out-of-memory .. note:: diff --git a/docs/getting-started/brokers/rabbitmq.rst b/docs/getting-started/brokers/rabbitmq.rst index 1a58d666743..6f5d95dd8ab 100644 --- a/docs/getting-started/brokers/rabbitmq.rst +++ b/docs/getting-started/brokers/rabbitmq.rst @@ -16,11 +16,12 @@ the broker instance you want to use: .. code-block:: python - broker_url = 'amqp://guest:guest@localhost:5672//' + broker_url = 'amqp://myuser:mypassword@localhost:5672/myvhost' For a description of broker URLs and a full list of the various broker configuration options available to Celery, -see :ref:`conf-broker-settings`. +see :ref:`conf-broker-settings`, and see below for setting up the +username, password and vhost. .. _installing-rabbitmq: @@ -64,6 +65,8 @@ allow that user access to that virtual host: $ sudo rabbitmqctl set_permissions -p myvhost myuser ".*" ".*" ".*" +Substitute in appropriate values for ``myuser``, ``mypassword`` and ``myvhost`` above. + See the RabbitMQ `Admin Guide`_ for more information about `access control`_. .. _`Admin Guide`: http://www.rabbitmq.com/admin-guide.html diff --git a/docs/getting-started/first-steps-with-celery.rst b/docs/getting-started/first-steps-with-celery.rst index 035e7df4974..093f839caad 100644 --- a/docs/getting-started/first-steps-with-celery.rst +++ b/docs/getting-started/first-steps-with-celery.rst @@ -24,10 +24,10 @@ Learn about; Celery may seem daunting at first - but don't worry - this tutorial will get you started in no time. It's deliberately kept simple, so -to not confuse you with advanced features. -After you have finished this tutorial -it's a good idea to browse the rest of the documentation, -for example the :ref:`next-steps` tutorial will +as to not confuse you with advanced features. +After you have finished this tutorial, +it's a good idea to browse the rest of the documentation. +For example the :ref:`next-steps` tutorial will showcase Celery's capabilities. .. contents:: @@ -61,10 +61,10 @@ command: $ sudo apt-get install rabbitmq-server -When the command completes the broker is already running in the background, +When the command completes, the broker will already be running in the background, ready to move messages for you: ``Starting rabbitmq-server: SUCCESS``. -And don't worry if you're not running Ubuntu or Debian, you can go to this +Don't worry if you're not running Ubuntu or Debian, you can go to this website to find similarly simple installation instructions for other platforms, including Microsoft Windows: @@ -78,7 +78,7 @@ the event of abrupt termination or power failures. Detailed information about us :ref:`broker-redis` -.. _`Redis`: http://redis.io/ +.. _`Redis`: https://redis.io/ Other brokers ------------- @@ -124,8 +124,8 @@ Let's create the file :file:`tasks.py`: def add(x, y): return x + y -The first argument to :class:`~celery.app.Celery` is the name of the current module, -this only needed so names can be automatically generated when the tasks are +The first argument to :class:`~celery.app.Celery` is the name of the current module. +This is only needed so that names can be automatically generated when the tasks are defined in the `__main__` module. The second argument is the broker keyword argument, specifying the URL of the @@ -142,7 +142,7 @@ You defined a single task, called ``add``, returning the sum of two numbers. Running the Celery worker server ================================ -You now run the worker by executing our program with the ``worker`` +You can now run the worker by executing our program with the ``worker`` argument: .. code-block:: console @@ -187,16 +187,16 @@ method that gives greater control of the task execution (see >>> from tasks import add >>> add.delay(4, 4) -The task has now been processed by the worker you started earlier, -and you can verify that by looking at the workers console output. +The task has now been processed by the worker you started earlier. +You can verify this by looking at the worker's console output. -Calling a task returns an :class:`~@AsyncResult` instance: -this can be used to check the state of the task, wait for the task to finish, -or get its return value (or if the task failed, the exception and traceback). +Calling a task returns an :class:`~@AsyncResult` instance. +This can be used to check the state of the task, wait for the task to finish, +or get its return value (or if the task failed, to get the exception and traceback). -Results aren't enabled by default, so if you want to do RPC or keep track -of task results in a database you have to configure Celery to use a result -backend. This is described by the next section. +Results are not enabled by default. In order to do remote procedure calls +or keep track of task results in a database, you will need to configure Celery to use a result +backend. This is described in the next section. .. _celerytut-keeping-results: @@ -265,13 +265,13 @@ the ``propagate`` argument: >>> result.get(propagate=False) -If the task raised an exception you can also gain access to the +If the task raised an exception, you can also gain access to the original traceback: .. code-block:: pycon >>> result.traceback - … + … See :mod:`celery.result` for the complete result object reference. @@ -280,14 +280,14 @@ See :mod:`celery.result` for the complete result object reference. Configuration ============= -Celery, like a consumer appliance, doesn't need much to be operated. -It has an input and an output, where you must connect the input to a broker and maybe -the output to a result backend if so wanted. But if you look closely at the back +Celery, like a consumer appliance, doesn't need much configuration to operate. +It has an input and an output. The input must be connected to a broker, and the output can +be optionally connected to a result backend. However, if you look closely at the back, there's a lid revealing loads of sliders, dials, and buttons: this is the configuration. -The default configuration should be good enough for most uses, but there are -many things to tweak so Celery works just the way you want it to. -Reading about the options available is a good idea to get familiar with what +The default configuration should be good enough for most use cases, but there are +many options that can be configured to make Celery work exactly as needed. +Reading about the options available is a good idea to familiarize yourself with what can be configured. You can read about the options in the :ref:`configuration` reference. @@ -312,15 +312,14 @@ If you're configuring many settings at once you can use ``update``: enable_utc=True, ) -For larger projects using a dedicated configuration module is useful, -in fact you're discouraged from hard coding -periodic task intervals and task routing options, as it's much -better to keep this in a centralized location, and especially for libraries -it makes it possible for users to control how they want your tasks to behave, -you can also imagine your SysAdmin making simple changes to the configuration +For larger projects, a dedicated configuration module is recommended. +Hard coding periodic task intervals and task routing options is discouraged. +It is much better to keep these in a centralized location. This is especially +true for libraries, as it enables users to control how their tasks behave. +A centralized configuration will also allow your SysAdmin to make simple changes in the event of system trouble. -You can tell your Celery instance to use a configuration module, +You can tell your Celery instance to use a configuration module by calling the :meth:`@config_from_object` method: .. code-block:: python @@ -330,8 +329,8 @@ by calling the :meth:`@config_from_object` method: This module is often called "``celeryconfig``", but you can use any module name. -A module named ``celeryconfig.py`` must then be available to load from the -current directory or on the Python path, it could look like this: +In the above case, a module named ``celeryconfig.py`` must be available to load from the +current directory or on the Python path. It could look something like this: :file:`celeryconfig.py`: @@ -346,7 +345,7 @@ current directory or on the Python path, it could look like this: timezone = 'Europe/Oslo' enable_utc = True -To verify that your configuration file works properly, and doesn't +To verify that your configuration file works properly and doesn't contain any syntax errors, you can try to import it: .. code-block:: console @@ -390,7 +389,7 @@ for the task at runtime: See :ref:`guide-routing` to read more about task routing, and the :setting:`task_annotations` setting for more about annotations, -or :ref:`guide-monitoring` for more about remote control commands, +or :ref:`guide-monitoring` for more about remote control commands and how to monitor what your workers are doing. Where to go from here @@ -398,7 +397,7 @@ Where to go from here If you want to learn more you should continue to the :ref:`Next Steps ` tutorial, and after that you -can study the :ref:`User Guide `. +can read the :ref:`User Guide `. .. _celerytut-troubleshooting: @@ -426,16 +425,16 @@ Worker doesn't start: Permission Error If you provide any of the :option:`--pidfile `, :option:`--logfile ` or :option:`--statedb ` arguments, then you must - make sure that they point to a file/directory that's writable and + make sure that they point to a file or directory that's writable and readable by the user starting the worker. Result backend doesn't work or tasks are always in ``PENDING`` state -------------------------------------------------------------------- All tasks are :state:`PENDING` by default, so the state would've been -better named "unknown". Celery doesn't update any state when a task +better named "unknown". Celery doesn't update the state when a task is sent, and any task with no history is assumed to be pending (you know -the task id after all). +the task id, after all). 1) Make sure that the task doesn't have ``ignore_result`` enabled. @@ -447,9 +446,9 @@ the task id after all). 3) Make sure that you don't have any old workers still running. It's easy to start multiple workers by accident, so make sure - that the previous worker is properly shutdown before you start a new one. + that the previous worker is properly shut down before you start a new one. - An old worker that aren't configured with the expected result backend + An old worker that isn't configured with the expected result backend may be running and is hijacking the tasks. The :option:`--pidfile ` argument can be set to @@ -457,9 +456,9 @@ the task id after all). 4) Make sure the client is configured with the right backend. - If for some reason the client is configured to use a different backend - than the worker, you won't be able to receive the result, - so make sure the backend is correct by inspecting it: + If, for some reason, the client is configured to use a different backend + than the worker, you won't be able to receive the result. + Make sure the backend is configured correctly: .. code-block:: pycon diff --git a/docs/getting-started/introduction.rst b/docs/getting-started/introduction.rst index 85f799c147d..831e180536e 100644 --- a/docs/getting-started/introduction.rst +++ b/docs/getting-started/introduction.rst @@ -78,7 +78,7 @@ then you should read our getting started tutorials: Celery is… ========== -.. _`mailing-list`: http://groups.google.com/group/celery-users +.. _`mailing-list`: https://groups.google.com/group/celery-users .. topic:: \ @@ -234,11 +234,11 @@ The integration packages aren't strictly necessary, but they can make development easier, and sometimes they add important hooks like closing database connections at :manpage:`fork(2)`. -.. _`Django`: http://djangoproject.com/ +.. _`Django`: https://djangoproject.com/ .. _`Pylons`: http://pylonshq.com/ .. _`Flask`: http://flask.pocoo.org/ .. _`web2py`: http://web2py.com/ -.. _`Bottle`: http://bottlepy.org/ +.. _`Bottle`: https://bottlepy.org/ .. _`Pyramid`: http://docs.pylonsproject.org/en/latest/docs/pyramid.html .. _`Tornado`: http://www.tornadoweb.org/ .. _`tornado-celery`: https://github.com/mher/tornado-celery/ diff --git a/docs/history/changelog-4.0.rst b/docs/history/changelog-4.0.rst new file mode 100644 index 00000000000..a3c0935177b --- /dev/null +++ b/docs/history/changelog-4.0.rst @@ -0,0 +1,231 @@ +.. _changelog-4.0: + +================ + Change history +================ + +This document contains change notes for bugfix releases in +the 4.0.x series (latentcall), please see :ref:`whatsnew-4.0` for +an overview of what's new in Celery 4.0. + +.. _version-4.0.2: + +4.0.2 +===== +:release-date: 2016-12-15 03:40 PM PST +:release-by: Ask Solem + +- **Requirements** + + - Now depends on :ref:`Kombu 4.0.2 `. + +- **Tasks**: Fixed problem with JSON serialization of `group` + (``keys must be string`` error, Issue #3688). + +- **Worker**: Fixed JSON serialization issue when using ``inspect active`` + and friends (Issue #3667). + +- **App**: Fixed saferef errors when using signals (Issue #3670). + +- **Prefork**: Fixed bug with pack requiring bytes argument + on Python 2.7.5 and earlier (Issue #3674). + +- **Tasks**: Saferepr did not handle unicode in bytestrings on Python 2 + (Issue #3676). + +- **Testing**: Added new ``celery_worker_paremeters`` fixture. + + Contributed by **Michael Howitz**. + +- **Tasks**: Added new ``app`` argument to ``GroupResult.restore`` + (Issue #3669). + + This makes the restore method behave the same way as the ``GroupResult`` + constructor. + + Contributed by **Andreas Pelme**. + +- **Tasks**: Fixed type checking crash when task takes ``*args`` on Python 3 + (Issue #3678). + +- Documentation and examples improvements by: + + - **BLAGA Razvan-Paul** + - **Michael Howitz** + - :github_user:`paradox41` + +.. _version-4.0.1: + +4.0.1 +===== +:release-date: 2016-12-08 05:22 PM PST +:release-by: Ask Solem + +* [Security: `CELERYSA-0003`_] Insecure default configuration + + The default :setting:`accept_content` setting was set to allow + deserialization of pickled messages in Celery 4.0.0. + + The insecure default has been fixed in 4.0.1, and you can also + configure the 4.0.0 version to explicitly only allow json serialized + messages: + + .. code-block:: python + + app.conf.accept_content = ['json'] + +.. _`CELERYSA-0003`: + https://github.com/celery/celery/tree/master/docs/sec/CELERYSA-0003.txt + +- **Tasks**: Added new method to register class-based tasks (Issue #3615). + + To register a class based task you should now call ``app.register_task``: + + .. code-block:: python + + from celery import Celery, Task + + app = Celery() + + class CustomTask(Task): + + def run(self): + return 'hello' + + app.register_task(CustomTask()) + +- **Tasks**: Argument checking now supports keyword-only arguments on Python3 + (Issue #3658). + + Contributed by :github_user:`sww`. + +- **Tasks**: The ``task-sent`` event was not being sent even if + configured to do so (Issue #3646). + +- **Worker**: Fixed AMQP heartbeat support for eventlet/gevent pools + (Issue #3649). + +- **App**: ``app.conf.humanize()`` would not work if configuration + not finalized (Issue #3652). + +- **Utils**: ``saferepr`` attempted to show iterables as lists + and mappings as dicts. + +- **Utils**: ``saferepr`` did not handle unicode-errors + when attempting to format ``bytes`` on Python 3 (Issue #3610). + +- **Utils**: ``saferepr`` should now properly represent byte strings + with non-ascii characters (Issue #3600). + +- **Results**: Fixed bug in elasticsearch where _index method missed + the body argument (Issue #3606). + + Fix contributed by **何翔宇** (Sean Ho). + +- **Canvas**: Fixed :exc:`ValueError` in chord with single task header + (Issue #3608). + + Fix contributed by **Viktor Holmqvist**. + +- **Task**: Ensure class-based task has name prior to registration + (Issue #3616). + + Fix contributed by **Rick Wargo**. + +- **Beat**: Fixed problem with strings in shelve (Issue #3644). + + Fix contributed by **Alli**. + +- **Worker**: Fixed :exc:`KeyError` in ``inspect stats`` when ``-O`` argument + set to something other than ``fast`` or ``fair`` (Issue #3621). + +- **Task**: Retried tasks were no longer sent to the original queue + (Issue #3622). + +- **Worker**: Python 3: Fixed None/int type comparison in + :file:`apps/worker.py` (Issue #3631). + +- **Results**: Redis has a new :setting:`redis_socket_connect_timeout` + setting. + +- **Results**: Redis result backend passed the ``socket_connect_timeout`` + argument to UNIX socket based connections by mistake, causing a crash. + +- **Worker**: Fixed missing logo in worker splash screen when running on + Python 3.x (Issue #3627). + + Fix contributed by **Brian Luan**. + +- **Deps**: Fixed ``celery[redis]`` bundle installation (Issue #3643). + + Fix contributed by **Rémi Marenco**. + +- **Deps**: Bundle ``celery[sqs]`` now also requires :pypi:`pycurl` + (Issue #3619). + +- **Worker**: Hard time limits were no longer being respected (Issue #3618). + +- **Worker**: Soft time limit log showed ``Trues`` instead of the number + of seconds. + +- **App**: ``registry_cls`` argument no longer had any effect (Issue #3613). + +- **Worker**: Event producer now uses ``connection_for_Write`` (Issue #3525). + +- **Results**: Redis/memcache backends now uses :setting:`result_expires` + to expire chord counter (Issue #3573). + + Contributed by **Tayfun Sen**. + +- **Django**: Fixed command for upgrading settings with Django (Issue #3563). + + Fix contributed by **François Voron**. + +- **Testing**: Added a ``celery_parameters`` test fixture to be able to use + customized ``Celery`` init parameters. (#3626) + + Contributed by **Steffen Allner**. + +- Documentation improvements contributed by + + - :github_user:`csfeathers` + - **Moussa Taifi** + - **Yuhannaa** + - **Laurent Peuch** + - **Christian** + - **Bruno Alla** + - **Steven Johns** + - :github_user:`tnir` + - **GDR!** + +.. _version-4.0.0: + +4.0.0 +===== +:release-date: 2016-11-04 02:00 P.M PDT +:release-by: Ask Solem + +See :ref:`whatsnew-4.0` (in :file:`docs/whatsnew-4.0.rst`). + +.. _version-4.0.0rc7: + +4.0.0rc7 +======== +:release-date: 2016-11-02 01:30 P.M PDT + +Important notes +--------------- + +- Database result backend related setting names changed from + ``sqlalchemy_*`` -> ``database_*``. + + The ``sqlalchemy_`` named settings won't work at all in this + version so you need to rename them. This is a last minute change, + and as they were not supported in 3.1 we will not be providing + aliases. + +- ``chain(A, B, C)`` now works the same way as ``A | B | C``. + + This means calling ``chain()`` might not actually return a chain, + it can return a group or any other type depending on how the + workflow can be optimized. diff --git a/docs/history/index.rst b/docs/history/index.rst index 9e656a15c49..834359be762 100644 --- a/docs/history/index.rst +++ b/docs/history/index.rst @@ -13,6 +13,7 @@ version please visit :ref:`changelog`. .. toctree:: :maxdepth: 2 + changelog-4.0 changelog-3.1 whatsnew-3.0 changelog-3.0 diff --git a/docs/images/celeryevshotsm.jpg b/docs/images/celeryevshotsm.jpg index e49927e098e..8de5f2ba424 100644 Binary files a/docs/images/celeryevshotsm.jpg and b/docs/images/celeryevshotsm.jpg differ diff --git a/docs/images/dashboard.png b/docs/images/dashboard.png index 20a8f7358c4..6951b448d56 100644 Binary files a/docs/images/dashboard.png and b/docs/images/dashboard.png differ diff --git a/docs/images/favicon.ico b/docs/images/favicon.ico index f16149fc5e5..163234f2051 100644 Binary files a/docs/images/favicon.ico and b/docs/images/favicon.ico differ diff --git a/docs/images/monitor.png b/docs/images/monitor.png index 47d7e3b58a5..39ffa529039 100644 Binary files a/docs/images/monitor.png and b/docs/images/monitor.png differ diff --git a/docs/includes/installation.txt b/docs/includes/installation.txt index 28081e2aeee..8eec9a9282a 100644 --- a/docs/includes/installation.txt +++ b/docs/includes/installation.txt @@ -120,7 +120,7 @@ Downloading and installing from source Download the latest version of Celery from PyPI: -http://pypi.python.org/pypi/celery/ +https://pypi.python.org/pypi/celery/ You can install it by doing the following,: diff --git a/docs/includes/introduction.txt b/docs/includes/introduction.txt index 02fd9ad9688..a00bddc4707 100644 --- a/docs/includes/introduction.txt +++ b/docs/includes/introduction.txt @@ -1,6 +1,6 @@ -:Version: 4.0.2 (latentcall) +:Version: 4.1.0 (latentcall) :Web: http://celeryproject.org/ -:Download: http://pypi.python.org/pypi/celery/ +:Download: https://pypi.python.org/pypi/celery/ :Source: https://github.com/celery/celery/ :Keywords: task, queue, job, async, rabbitmq, amqp, redis, python, distributed, actors @@ -150,8 +150,8 @@ It supports… .. _`Eventlet`: http://eventlet.net/ .. _`gevent`: http://gevent.org/ -.. _RabbitMQ: http://rabbitmq.com -.. _Redis: http://redis.io +.. _RabbitMQ: https://rabbitmq.com +.. _Redis: https://redis.io .. _SQLAlchemy: http://sqlalchemy.org Framework Integration @@ -178,15 +178,15 @@ The integration packages aren't strictly necessary, but they can make development easier, and sometimes they add important hooks like closing database connections at ``fork``. -.. _`Django`: http://djangoproject.com/ +.. _`Django`: https://djangoproject.com/ .. _`Pylons`: http://pylonsproject.org/ .. _`Flask`: http://flask.pocoo.org/ .. _`web2py`: http://web2py.com/ -.. _`Bottle`: http://bottlepy.org/ +.. _`Bottle`: https://bottlepy.org/ .. _`Pyramid`: http://docs.pylonsproject.org/en/latest/docs/pyramid.html -.. _`pyramid_celery`: http://pypi.python.org/pypi/pyramid_celery/ -.. _`celery-pylons`: http://pypi.python.org/pypi/celery-pylons -.. _`web2py-celery`: http://code.google.com/p/web2py-celery/ +.. _`pyramid_celery`: https://pypi.python.org/pypi/pyramid_celery/ +.. _`celery-pylons`: https://pypi.python.org/pypi/celery-pylons +.. _`web2py-celery`: https://code.google.com/p/web2py-celery/ .. _`Tornado`: http://www.tornadoweb.org/ .. _`tornado-celery`: https://github.com/mher/tornado-celery/ diff --git a/docs/includes/resources.txt b/docs/includes/resources.txt index 388f78405e0..81caf2420cf 100644 --- a/docs/includes/resources.txt +++ b/docs/includes/resources.txt @@ -11,7 +11,7 @@ Mailing list For discussions about the usage, development, and future of Celery, please join the `celery-users`_ mailing list. -.. _`celery-users`: http://groups.google.com/group/celery-users/ +.. _`celery-users`: https://groups.google.com/group/celery-users/ .. _irc-channel: @@ -21,7 +21,7 @@ IRC Come chat with us on IRC. The **#celery** channel is located at the `Freenode`_ network. -.. _`Freenode`: http://freenode.net +.. _`Freenode`: https://freenode.net .. _bug-tracker: @@ -36,7 +36,7 @@ to our issue tracker at https://github.com/celery/celery/issues/ Wiki ==== -http://wiki.github.com/celery/celery/ +https://wiki.github.com/celery/celery/ .. _contributing-short: diff --git a/docs/internals/guide.rst b/docs/internals/guide.rst index 14ab9272300..2f472969960 100644 --- a/docs/internals/guide.rst +++ b/docs/internals/guide.rst @@ -176,7 +176,7 @@ a large potential user base. In Django there's a global settings object, so multiple Django projects can't co-exist in the same process space, this later posed a problem -for using Celery with frameworks that doesn't have this limitation. +for using Celery with frameworks that don't have this limitation. Therefore the app concept was introduced. When using apps you use 'celery' objects instead of importing things from Celery sub-modules, this diff --git a/docs/internals/protocol.rst b/docs/internals/protocol.rst index a77b5309a30..1d8aa67fc8d 100644 --- a/docs/internals/protocol.rst +++ b/docs/internals/protocol.rst @@ -42,8 +42,8 @@ Definition # optional 'meth': string method_name, 'shadow': string alias_name, - 'eta': iso8601 ETA, - 'expires'; iso8601 expires, + 'eta': iso8601 ETA, + 'expires': iso8601 expires, 'retries': int retries, 'timelimit': (soft, hard), 'argsrepr': str repr(args), diff --git a/docs/internals/reference/celery.backends.async.rst b/docs/internals/reference/celery.backends.asynchronous.rst similarity index 52% rename from docs/internals/reference/celery.backends.async.rst rename to docs/internals/reference/celery.backends.asynchronous.rst index 03d10feb333..fef524294e9 100644 --- a/docs/internals/reference/celery.backends.async.rst +++ b/docs/internals/reference/celery.backends.asynchronous.rst @@ -1,12 +1,12 @@ ===================================== - ``celery.backends.async`` + ``celery.backends.asynchronous`` ===================================== .. contents:: :local: -.. currentmodule:: celery.backends.async +.. currentmodule:: celery.backends.asynchronous -.. automodule:: celery.backends.async +.. automodule:: celery.backends.asynchronous :members: :undoc-members: diff --git a/docs/internals/worker.rst b/docs/internals/worker.rst index 742c0b77b16..8e04202991c 100644 --- a/docs/internals/worker.rst +++ b/docs/internals/worker.rst @@ -38,6 +38,10 @@ When a message is received it's converted into a Tasks with an ETA, or rate-limit are entered into the `timer`, messages that can be immediately processed are sent to the execution pool. +ETA and rate-limit are 2 incompatible parameters, and the ETA is overriding +the rate-limit by default. A task with both will follow its ETA and ignore its +rate-limit. + Timer ----- diff --git a/docs/sec/CELERYSA-0001.txt b/docs/sec/CELERYSA-0001.txt index cdf5a879049..bb892965757 100644 --- a/docs/sec/CELERYSA-0001.txt +++ b/docs/sec/CELERYSA-0001.txt @@ -62,19 +62,19 @@ Users of the 2.4 series should upgrade to 2.4.4: * ``pip install -U celery``, or * ``easy_install -U celery``, or - * http://pypi.python.org/pypi/celery/2.4.4 + * https://pypi.python.org/pypi/celery/2.4.4 Users of the 2.3 series should upgrade to 2.3.4: * ``pip install -U celery==2.3.4``, or * ``easy_install -U celery==2.3.4``, or - * http://pypi.python.org/pypi/celery/2.3.4 + * https://pypi.python.org/pypi/celery/2.3.4 Users of the 2.2 series should upgrade to 2.2.8: * ``pip install -U celery==2.2.8``, or * ``easy_install -U celery==2.2.8``, or - * http://pypi.python.org/pypi/celery/2.2.8 + * https://pypi.python.org/pypi/celery/2.2.8 The 2.1 series is no longer being maintained, so we urge users of that series to upgrade to a more recent version. @@ -84,7 +84,7 @@ with updated packages. Please direct questions to the celery-users mailing-list: -http://groups.google.com/group/celery-users/, +https://groups.google.com/group/celery-users/, or if you're planning to report a security issue we request that you keep the information confidential by contacting diff --git a/docs/sec/CELERYSA-0002.txt b/docs/sec/CELERYSA-0002.txt index 0c44cde594e..5e239bb5f6e 100644 --- a/docs/sec/CELERYSA-0002.txt +++ b/docs/sec/CELERYSA-0002.txt @@ -69,19 +69,19 @@ Or you can upgrade to a more recent version: * ``pip install -U celery``, or * ``easy_install -U celery``, or - * http://pypi.python.org/pypi/celery/3.1.13 + * https://pypi.python.org/pypi/celery/3.1.13 - Users of the 3.0 series should upgrade to 3.0.25: * ``pip install -U celery==3.0.25``, or * ``easy_install -U celery==3.0.25``, or - * http://pypi.python.org/pypi/celery/3.0.25 + * https://pypi.python.org/pypi/celery/3.0.25 Distribution package maintainers are urged to provide their users with updated packages. Please direct questions to the celery-users mailing-list: -http://groups.google.com/group/celery-users/, +https://groups.google.com/group/celery-users/, or if you're planning to report a new security related issue we request that you keep the information confidential by contacting diff --git a/docs/sec/CELERYSA-0003.txt b/docs/sec/CELERYSA-0003.txt index f3eccd0d69e..13e48bc0a27 100644 --- a/docs/sec/CELERYSA-0003.txt +++ b/docs/sec/CELERYSA-0003.txt @@ -50,7 +50,7 @@ Distribution package maintainers are urged to provide their users with updated packages. Please direct questions to the celery-users mailing-list: -http://groups.google.com/group/celery-users/, +https://groups.google.com/group/celery-users/, or if you're planning to report a new security related issue we request that you keep the information confidential by contacting diff --git a/docs/templates/readme.txt b/docs/templates/readme.txt index ad75d66f2f2..3be7fee8ccc 100644 --- a/docs/templates/readme.txt +++ b/docs/templates/readme.txt @@ -21,12 +21,12 @@ .. |wheel| image:: https://img.shields.io/pypi/wheel/celery.svg :alt: Celery can be installed via wheel - :target: http://pypi.python.org/pypi/celery/ + :target: https://pypi.python.org/pypi/celery/ .. |pyversion| image:: https://img.shields.io/pypi/pyversions/celery.svg :alt: Supported Python versions. - :target: http://pypi.python.org/pypi/celery/ + :target: https://pypi.python.org/pypi/celery/ .. |pyimp| image:: https://img.shields.io/pypi/implementation/celery.svg :alt: Support Python implementations. - :target: http://pypi.python.org/pypi/celery/ + :target: https://pypi.python.org/pypi/celery/ diff --git a/docs/userguide/calling.rst b/docs/userguide/calling.rst index a21dc06a5f9..3e9ac57f340 100644 --- a/docs/userguide/calling.rst +++ b/docs/userguide/calling.rst @@ -45,10 +45,10 @@ The API defines a standard set of execution options, as well as three methods: - ``T.apply_async((arg,), {'kwarg': value})`` - ``T.apply_async(countdown=10)`` - executes 10 seconds from now. + executes in 10 seconds from now. - ``T.apply_async(eta=now + timedelta(seconds=10))`` - executes 10 seconds from now, specified using ``eta`` + executes in 10 seconds from now, specified using ``eta`` - ``T.apply_async(countdown=60, expires=120)`` executes in one minute from now, but expires after 2 minutes. diff --git a/docs/userguide/canvas.rst b/docs/userguide/canvas.rst index 13f5a505cfc..cf0e49f21df 100644 --- a/docs/userguide/canvas.rst +++ b/docs/userguide/canvas.rst @@ -766,7 +766,8 @@ Chords Tasks used within a chord must *not* ignore their results. If the result backend is disabled for *any* task (header or body) in your chord you - should read ":ref:`chord-important-notes`." + should read ":ref:`chord-important-notes`." Chords are not currently + supported with the RPC result backend. A chord is a task that only executes after all of the tasks in a group have diff --git a/docs/userguide/configuration.rst b/docs/userguide/configuration.rst index 9d531346cc3..1199668ccfa 100644 --- a/docs/userguide/configuration.rst +++ b/docs/userguide/configuration.rst @@ -109,24 +109,24 @@ rush in moving to the new settings format. ``CELERY_SECURITY_CERTIFICATE`` :setting:`security_certificate` ``CELERY_SECURITY_CERT_STORE`` :setting:`security_cert_store` ``CELERY_SECURITY_KEY`` :setting:`security_key` -``CELERY_ACKS_LATE`` :setting:`task_acks_late` -``CELERY_ALWAYS_EAGER`` :setting:`task_always_eager` -``CELERY_ANNOTATIONS`` :setting:`task_annotations` -``CELERY_MESSAGE_COMPRESSION`` :setting:`task_compression` -``CELERY_CREATE_MISSING_QUEUES`` :setting:`task_create_missing_queues` -``CELERY_DEFAULT_DELIVERY_MODE`` :setting:`task_default_delivery_mode` -``CELERY_DEFAULT_EXCHANGE`` :setting:`task_default_exchange` -``CELERY_DEFAULT_EXCHANGE_TYPE`` :setting:`task_default_exchange_type` -``CELERY_DEFAULT_QUEUE`` :setting:`task_default_queue` -``CELERY_DEFAULT_RATE_LIMIT`` :setting:`task_default_rate_limit` -``CELERY_DEFAULT_ROUTING_KEY`` :setting:`task_default_routing_key` -``[...]_EAGER_PROPAGATES_EXCEPTIONS`` :setting:`task_eager_propagates` -``CELERY_IGNORE_RESULT`` :setting:`task_ignore_result` +``CELERY_TASK_ACKS_LATE`` :setting:`task_acks_late` +``CELERY_TASK_ALWAYS_EAGER`` :setting:`task_always_eager` +``CELERY_TASK_ANNOTATIONS`` :setting:`task_annotations` +``CELERY_TASK_COMPRESSION`` :setting:`task_compression` +``CELERY_TASK_CREATE_MISSING_QUEUES`` :setting:`task_create_missing_queues` +``CELERY_TASK_DEFAULT_DELIVERY_MODE`` :setting:`task_default_delivery_mode` +``CELERY_TASK_DEFAULT_EXCHANGE`` :setting:`task_default_exchange` +``CELERY_TASK_DEFAULT_EXCHANGE_TYPE`` :setting:`task_default_exchange_type` +``CELERY_TASK_DEFAULT_QUEUE`` :setting:`task_default_queue` +``CELERY_TASK_DEFAULT_RATE_LIMIT`` :setting:`task_default_rate_limit` +``CELERY_TASK_DEFAULT_ROUTING_KEY`` :setting:`task_default_routing_key` +``CELERY_TASK_EAGER_PROPAGATES`` :setting:`task_eager_propagates` +``CELERY_TASK_IGNORE_RESULT`` :setting:`task_ignore_result` ``CELERY_TASK_PUBLISH_RETRY`` :setting:`task_publish_retry` ``CELERY_TASK_PUBLISH_RETRY_POLICY`` :setting:`task_publish_retry_policy` -``CELERY_QUEUES`` :setting:`task_queues` -``CELERY_ROUTES`` :setting:`task_routes` -``CELERY_SEND_TASK_SENT_EVENT`` :setting:`task_send_sent_event` +``CELERY_TASK_QUEUES`` :setting:`task_queues` +``CELERY_TASK_ROUTES`` :setting:`task_routes` +``CELERY_TASK_SEND_SENT_EVENT`` :setting:`task_send_sent_event` ``CELERY_TASK_SERIALIZER`` :setting:`task_serializer` ``CELERYD_TASK_SOFT_TIME_LIMIT`` :setting:`task_soft_time_limit` ``CELERYD_TASK_TIME_LIMIT`` :setting:`task_time_limit` @@ -584,13 +584,13 @@ Can be one of the following: .. _`SQLAlchemy`: http://sqlalchemy.org .. _`Memcached`: http://memcached.org -.. _`Redis`: http://redis.io +.. _`Redis`: https://redis.io .. _`Cassandra`: http://cassandra.apache.org/ .. _`Elasticsearch`: https://aws.amazon.com/elasticsearch-service/ .. _`IronCache`: http://www.iron.io/cache .. _`CouchDB`: http://www.couchdb.com/ -.. _`Couchbase`: http://www.couchbase.com/ -.. _`Consul`: http://consul.io/ +.. _`Couchbase`: https://www.couchbase.com/ +.. _`Consul`: https://consul.io/ .. setting:: result_serializer @@ -878,6 +878,16 @@ The fields of the URL are defined as follows: Database number to use. Default is 0. The db can include an optional leading slash. +.. setting:: redis_backend_use_ssl + +``redis_backend_use_ssl`` +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Default: Disabled. + +The Redis backend supports SSL. The valid values of this options are the same +as :setting:`broker_use_ssl`. + .. setting:: redis_max_connections ``redis_max_connections`` @@ -905,7 +915,7 @@ in seconds (int/float) ``redis_socket_timeout`` ~~~~~~~~~~~~~~~~~~~~~~~~ -Default: 5.0 seconds. +Default: 120.0 seconds. Socket timeout for reading/writing operations to the Redis server in seconds (int/float), used by the redis result backend. @@ -1055,6 +1065,33 @@ Example configuration result_backend = 'elasticsearch://example.com:9200/index_name/doc_type' +.. setting:: elasticsearch_retry_on_timeout + +``elasticsearch_retry_on_timeout`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Default: :const:`False` + +Should timeout trigger a retry on different node? + +.. setting:: elasticsearch_max_retries + +``elasticsearch_max_retries`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Default: 3. + +Maximum number of retries before an exception is propagated. + +.. setting:: elasticsearch_timeout + +``elasticsearch_timeout`` +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Default: 10.0 seconds. + +Global timeout,used by the elasticsearch result backend. + .. _conf-riak-result-backend: Riak backend settings @@ -1712,7 +1749,7 @@ Example:: # Random failover strategy def random_failover_strategy(servers): - it = list(it) # don't modify callers list + it = list(servers) # don't modify callers list shuffle = random.shuffle for _ in repeat(None): shuffle(it) @@ -1766,6 +1803,11 @@ Default: Disabled. Toggles SSL usage on broker connection and SSL settings. +The valid values for this option vary by transport. + +``pyamqp`` +__________ + If ``True`` the connection will use SSL with default SSL settings. If set to a dict, will configure SSL connection according to the specified policy. The format used is Python's :func:`ssl.wrap_socket` options. @@ -1793,6 +1835,21 @@ certificate authority: `ssl module security considerations `_. +``redis`` +_________ + + +The setting must be a dict the keys: + +* ``ssl_cert_reqs`` (required): one of the ``SSLContext.verify_mode`` values: + * ``ssl.CERT_NONE`` + * ``ssl.CERT_OPTIONAL`` + * ``ssl.CERT_REQUIRED`` +* ``ssl_ca_certs`` (optional): path to the CA certificate +* ``ssl_certfile`` (optional): path to the client certificate +* ``ssl_keyfile`` (optional): path to the client key + + .. setting:: broker_pool_limit ``broker_pool_limit`` @@ -2336,7 +2393,7 @@ Name of the consumer class used by the worker. ``worker_timer`` ~~~~~~~~~~~~~~~~ -Default: ``"kombu.async.hub.timer:Timer"``. +Default: ``"kombu.asynchronous.hub.timer:Timer"``. Name of the ETA scheduler class used by the worker. Default is or set by the pool implementation. diff --git a/docs/userguide/extending.rst b/docs/userguide/extending.rst index b81d39e868a..66fd06aa849 100644 --- a/docs/userguide/extending.rst +++ b/docs/userguide/extending.rst @@ -44,7 +44,7 @@ whenever the connection is established: message.ack() app.steps['consumer'].add(MyConsumerStep) - def send_me_a_message(self, who='world!', producer=None): + def send_me_a_message(who, producer=None): with app.producer_or_acquire(producer) as producer: producer.publish( {'hello': who}, @@ -56,7 +56,7 @@ whenever the connection is established: ) if __name__ == '__main__': - send_me_a_message('celery') + send_me_a_message('world!') .. note:: @@ -148,7 +148,7 @@ Attributes .. attribute:: hub - Event loop object (:class:`~kombu.async.Hub`). You can use + Event loop object (:class:`~kombu.asynchronous.Hub`). You can use this to register callbacks in the event loop. This is only supported by async I/O enabled transports (amqp, redis), @@ -179,7 +179,7 @@ Attributes .. attribute:: timer - :class:`~kombu.async.timer.Timer` used to schedule functions. + :class:`~kombu.asynchronous.timer.Timer` used to schedule functions. Your worker bootstep must require the Timer bootstep to use this: @@ -873,7 +873,7 @@ Worker API ========== -:class:`~kombu.async.Hub` - The workers async event loop +:class:`~kombu.asynchronous.Hub` - The workers async event loop -------------------------------------------------------- :supported transports: amqp, redis diff --git a/docs/userguide/periodic-tasks.rst b/docs/userguide/periodic-tasks.rst index 282a35fb3a6..0d5fa3ce663 100644 --- a/docs/userguide/periodic-tasks.rst +++ b/docs/userguide/periodic-tasks.rst @@ -105,7 +105,7 @@ that we'll not evaluate the app at module level when using ``test.s()``. The :meth:`~@add_periodic_task` function will add the entry to the :setting:`beat_schedule` setting behind the scenes, and the same setting -can also can be used to set up periodic tasks manually: +can also be used to set up periodic tasks manually: Example: Run the `tasks.add` task every 30 seconds. @@ -413,7 +413,7 @@ Using custom scheduler classes ------------------------------ Custom scheduler classes can be specified on the command-line (the -:option:`-S ` argument). +:option:`--scheduler ` argument). The default scheduler is the :class:`celery.beat.PersistentScheduler`, that simply keeps track of the last run times in a local :mod:`shelve` @@ -439,7 +439,7 @@ To install and use this extension: 'django_celery_beat', ) - Note that there is no dash in the module name, only underscores. + Note that there is no dash in the module name, only underscores. #. Apply Django database migrations so that the necessary tables are created: @@ -447,10 +447,12 @@ To install and use this extension: $ python manage.py migrate -#. Start the :program:`celery beat` service using the ``django`` scheduler: +#. Start the :program:`celery beat` service using the ``django_celery_beat.schedulers:DatabaseScheduler`` scheduler: .. code-block:: console - $ celery -A proj beat -l info -S django + $ celery -A proj beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler + + Note: You may also add this as an settings option directly. #. Visit the Django-Admin interface to set up some periodic tasks. diff --git a/docs/userguide/routing.rst b/docs/userguide/routing.rst index 16680bb64a7..f046c1faf35 100644 --- a/docs/userguide/routing.rst +++ b/docs/userguide/routing.rst @@ -54,8 +54,8 @@ specify the router in *items* format instead: .. code-block:: python task_routes = ([ - ('feed.tasks.*': {'queue': 'feeds'}), - ('web.tasks.*': {'queue': 'web'}), + ('feed.tasks.*', {'queue': 'feeds'}), + ('web.tasks.*', {'queue': 'web'}), (re.compile(r'(video|image)\.tasks\..*'), {'queue': 'media'}), ],) @@ -210,7 +210,7 @@ If you're confused about these terms, you should read up on AMQP. .. _`Rabbits and Warrens`: http://blogs.digitar.com/jjww/2009/01/rabbits-and-warrens/ .. _`CloudAMQP tutorial`: amqp in 10 minutes part 3 https://www.cloudamqp.com/blog/2015-09-03-part4-rabbitmq-for-beginners-exchanges-routing-keys-bindings.html -.. _`RabbitMQ FAQ`: http://www.rabbitmq.com/faq.html +.. _`RabbitMQ FAQ`: https://www.rabbitmq.com/faq.html .. _routing-special_options: @@ -676,7 +676,12 @@ copies of tasks to all workers connected to it: from kombu.common import Broadcast app.conf.task_queues = (Broadcast('broadcast_tasks'),) - app.conf.task_routes = {'tasks.reload_cache': {'queue': 'broadcast_tasks'}} + app.conf.task_routes = { + 'tasks.reload_cache': { + 'queue': 'broadcast_tasks', + 'exchange': 'broadcast_tasks' + } + } Now the ``tasks.reload_cache`` task will be sent to every worker consuming from this queue. diff --git a/docs/userguide/signals.rst b/docs/userguide/signals.rst index a54fa9a8ef9..787e3b75c48 100644 --- a/docs/userguide/signals.rst +++ b/docs/userguide/signals.rst @@ -88,8 +88,10 @@ Provides arguments: Task message body. - This is a mapping containing the task message fields - (see :ref:`message-protocol-task-v1`). + This is a mapping containing the task message fields, + see :ref:`message-protocol-task-v2` + and :ref:`message-protocol-task-v1` + for a reference of possible fields that can be defined. * ``exchange`` @@ -133,13 +135,13 @@ Provides arguments: * ``headers`` The task message headers, see :ref:`message-protocol-task-v2` - and :ref:`message-protocol-task-v1`. + and :ref:`message-protocol-task-v1` for a reference of possible fields that can be defined. * ``body`` The task message body, see :ref:`message-protocol-task-v2` - and :ref:`message-protocol-task-v1`. + and :ref:`message-protocol-task-v1` for a reference of possible fields that can be defined. * ``exchange`` @@ -497,6 +499,27 @@ Dispatched when Celery sends a worker heartbeat. Sender is the :class:`celery.worker.heartbeat.Heart` instance. +.. signal:: worker_shutting_down + +``worker_shutting_down`` +~~~~~~~~~~~~~~~~~~~~~~~~ + +Dispatched when the worker begins the shutdown process. + +Provides arguments: + +* ``sig`` + + The POSIX signal that was received. + +* ``how`` + + The shutdown method, warm or cold. + +* ``exitcode`` + + The exitcode that will be used when the main process exits. + .. signal:: worker_process_init ``worker_process_init`` diff --git a/docs/userguide/tasks.rst b/docs/userguide/tasks.rst index 053dc778b24..bddeb6dbf1a 100644 --- a/docs/userguide/tasks.rst +++ b/docs/userguide/tasks.rst @@ -66,10 +66,10 @@ consider enabling the :setting:`task_reject_on_worker_lost` setting. The default prefork pool scheduler is not friendly to long-running tasks, so if you have tasks that run for minutes/hours make sure you enable - the -Ofair`` command-line argument to the :program:`celery worker`. - See :ref:`prefork-pool-prefetch` for more information, and for the - best performance route long-running and short-running tasks to - dedicated workers (:ref:`routing-automatic`). + the :option:`-Ofair ` command-line argument to + the :program:`celery worker`. See :ref:`prefork-pool-prefetch` for more + information, and for the best performance route long-running and + short-running tasks to dedicated workers (:ref:`routing-automatic`). If your worker hangs then please investigate what tasks are running before submitting an issue, as most likely the hanging is caused @@ -173,7 +173,7 @@ The ``base`` argument to the task decorator specifies the base class of the task class MyTask(celery.Task): def on_failure(self, exc, task_id, args, kwargs, einfo): - print('{0!r} failed: {1!r}'.format(task_id, exc) + print('{0!r} failed: {1!r}'.format(task_id, exc)) @task(base=MyTask) def add(x, y): @@ -856,6 +856,10 @@ General maximum number of requests per second), you must restrict to a given queue. + .. note:: + + This attribute is ignored if the task is requested with an ETA. + .. attribute:: Task.time_limit The hard time limit, in seconds, for this task. @@ -1569,6 +1573,7 @@ By default celery will not enable you to run tasks within task synchronously in rare or extreme cases you might have to do so. **WARNING**: enabling subtasks run synchronously is not recommended! + .. code-block:: python @app.task @@ -1739,29 +1744,23 @@ There's a race condition if the task starts executing before the transaction has been committed; The database object doesn't exist yet! -The solution is to use the ``on_commit`` callback to launch your celery task +The solution is to use the ``on_commit`` callback to launch your celery task once all transactions have been committed successfully. .. code-block:: python + from django.db.transaction import on_commit - + def create_article(request): article = Article.objects.create() on_commit(lambda: expand_abbreviations.delay(article.pk)) .. note:: - Django 1.6 (and later) now enables autocommit mode by default, - and ``commit_on_success``/``commit_manually`` are deprecated. - - This means each SQL query is wrapped and executed in individual - transactions, making it less likely to experience the - problem described above. - - However, enabling ``ATOMIC_REQUESTS`` on the database - connection will bring back the transaction-per-request model and the - race condition along with it. In this case, the simple solution is - using the ``on_commit`` callback to launch your task after all - transactions are completed. + ``on_commit`` is available in Django 1.9 and above, if you are using a + version prior to that then the `django-transaction-hooks`_ library + adds support for this. + +.. _`django-transaction-hooks`: https://github.com/carljm/django-transaction-hooks .. _task-example: diff --git a/docs/userguide/testing.rst b/docs/userguide/testing.rst index 32afee70ff7..0782babedf9 100644 --- a/docs/userguide/testing.rst +++ b/docs/userguide/testing.rst @@ -76,8 +76,9 @@ in this example: name='Foo', ) - # set a side effect on the patched method - # so that it raises the error we want. + # Set a side effect on the patched methods + # so that they raise the errors we want. + send_order_retry.side_effect = Retry() product_order.side_effect = OperationalError() with raises(Retry): @@ -154,7 +155,7 @@ Example: .. code-block:: python - # Put this in your confttest.py + # Put this in your conftest.py @pytest.fixture(scope='session') def celery_config(): return { diff --git a/docs/userguide/workers.rst b/docs/userguide/workers.rst index 90d1f1437cf..b926fdb315d 100644 --- a/docs/userguide/workers.rst +++ b/docs/userguide/workers.rst @@ -1159,7 +1159,7 @@ for example one that reads the current prefetch count: @inspect_command def current_prefetch_count(state): - return {'prefetch_count': state.consumer.qos.value} + return {'prefetch_count': state.consumer.qos.value} After restarting the worker you can now query this value using the diff --git a/docs/whatsnew-3.1.rst b/docs/whatsnew-3.1.rst index 453e12cade9..a82faff07d8 100644 --- a/docs/whatsnew-3.1.rst +++ b/docs/whatsnew-3.1.rst @@ -848,7 +848,7 @@ In Other News Contributed by Alain Masiero. - .. _`Couchbase`: http://www.couchbase.com + .. _`Couchbase`: https://www.couchbase.com - CentOS init-script now supports starting multiple worker instances. diff --git a/docs/whatsnew-4.0.rst b/docs/whatsnew-4.0.rst index b7c72312f8a..24029bc6ea9 100644 --- a/docs/whatsnew-4.0.rst +++ b/docs/whatsnew-4.0.rst @@ -51,7 +51,7 @@ Not only does it come with many new features, but it also fixes a massive list of bugs, so in many ways you could call it our "Snow Leopard" release. -The next major version of Celery will support Python 3.5 only, were +The next major version of Celery will support Python 3.5 only, where we are planning to take advantage of the new asyncio library. This release would not have been possible without the support @@ -65,7 +65,7 @@ all the contributors who help make this happen, and my colleagues at `Robinhood`_. .. _`Ty Wilkins`: http://tywilkins.com -.. _`Robinhood`: http://robinhood.com +.. _`Robinhood`: https://robinhood.com Wall of Contributors -------------------- @@ -551,7 +551,7 @@ these manually: class CustomTask(Task): def run(self): print('running') - app.register_task(CustomTask()) + CustomTask = app.register_task(CustomTask()) The best practice is to use custom task classes only for overriding general behavior, and then using the task decorator to realize the task: @@ -1347,7 +1347,7 @@ This allows Celery to store Task results in the K/V store of Consul. Consul also allows to set a TTL on keys using the Sessions from Consul. This way the backend supports auto expiry of Task results. -For more information on Consul visit http://consul.io/ +For more information on Consul visit https://consul.io/ The backend uses :pypi:`python-consul` for talking to the HTTP API. This package is fully Python 3 compliant just as this backend is: @@ -2286,7 +2286,7 @@ Logging Settings ``CELERYD_LOG_LEVEL`` :option:`celery worker --loglevel` ``CELERYD_LOG_FILE`` :option:`celery worker --logfile` ``CELERYBEAT_LOG_LEVEL`` :option:`celery beat --loglevel` -``CELERYBEAT_LOG_FILE`` :option:`celery beat --loglevel` +``CELERYBEAT_LOG_FILE`` :option:`celery beat --logfile` ``CELERYMON_LOG_LEVEL`` celerymon is deprecated, use flower ``CELERYMON_LOG_FILE`` celerymon is deprecated, use flower ``CELERYMON_LOG_FORMAT`` celerymon is deprecated, use flower diff --git a/examples/django/README.rst b/examples/django/README.rst index ce4eb5bd97d..0334ef7df04 100644 --- a/examples/django/README.rst +++ b/examples/django/README.rst @@ -8,7 +8,7 @@ Contents ``proj/`` --------- -This is the project iself, created using +This is a project in itself, created using ``django-admin.py startproject proj``, and then the settings module (``proj/settings.py``) was modified to add ``demoapp`` to ``INSTALLED_APPS`` diff --git a/examples/django/proj/celery.py b/examples/django/proj/celery.py index ede583b4d2f..b7f56d2cf00 100644 --- a/examples/django/proj/celery.py +++ b/examples/django/proj/celery.py @@ -7,7 +7,7 @@ app = Celery('proj') -# Using a string here means the worker don't have to serialize +# Using a string here means the worker doesn't have to serialize # the configuration object to child processes. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. diff --git a/extra/appveyor/install.ps1 b/extra/appveyor/install.ps1 index 3f05628255a..7166f65e37a 100644 --- a/extra/appveyor/install.ps1 +++ b/extra/appveyor/install.ps1 @@ -1,6 +1,6 @@ # Sample script to install Python and pip under Windows # Authors: Olivier Grisel and Kyle Kastner -# License: CC0 1.0 Universal: http://creativecommons.org/publicdomain/zero/1.0/ +# License: CC0 1.0 Universal: https://creativecommons.org/publicdomain/zero/1.0/ $BASE_URL = "https://www.python.org/ftp/python/" $GET_PIP_URL = "https://bootstrap.pypa.io/get-pip.py" diff --git a/extra/appveyor/run_with_compiler.cmd b/extra/appveyor/run_with_compiler.cmd index 3a472bc836c..31bd205ecbb 100644 --- a/extra/appveyor/run_with_compiler.cmd +++ b/extra/appveyor/run_with_compiler.cmd @@ -13,10 +13,10 @@ :: :: More details at: :: https://github.com/cython/cython/wiki/64BitCythonExtensionsOnWindows -:: http://stackoverflow.com/a/13751649/163740 +:: https://stackoverflow.com/a/13751649/163740 :: :: Author: Olivier Grisel -:: License: CC0 1.0 Universal: http://creativecommons.org/publicdomain/zero/1.0/ +:: License: CC0 1.0 Universal: https://creativecommons.org/publicdomain/zero/1.0/ @ECHO OFF SET COMMAND_TO_RUN=%* diff --git a/extra/generic-init.d/celerybeat b/extra/generic-init.d/celerybeat index 08c815bdf93..8f977903e3a 100755 --- a/extra/generic-init.d/celerybeat +++ b/extra/generic-init.d/celerybeat @@ -6,7 +6,7 @@ # :Usage: /etc/init.d/celerybeat {start|stop|force-reload|restart|try-restart|status} # :Configuration file: /etc/default/celerybeat or /etc/default/celeryd # -# See http://docs.celeryproject.org/en/latest/tutorials/daemonizing.html#generic-init-scripts +# See http://docs.celeryproject.org/en/latest/userguide/daemonizing.html#generic-init-scripts ### BEGIN INIT INFO # Provides: celerybeat diff --git a/extra/generic-init.d/celeryd b/extra/generic-init.d/celeryd index de55414d1a4..1636619452e 100755 --- a/extra/generic-init.d/celeryd +++ b/extra/generic-init.d/celeryd @@ -6,7 +6,7 @@ # :Usage: /etc/init.d/celeryd {start|stop|force-reload|restart|try-restart|status} # :Configuration file: /etc/default/celeryd (or /usr/local/etc/celeryd on BSD) # -# See http://docs.celeryproject.org/en/latest/tutorials/daemonizing.html#generic-init-scripts +# See http://docs.celeryproject.org/en/latest/userguide/daemonizing.html#generic-init-scripts ### BEGIN INIT INFO diff --git a/extra/release/sphinx2rst_config.py b/extra/release/sphinx2rst_config.py index daca1061bb0..3f104caa32f 100644 --- a/extra/release/sphinx2rst_config.py +++ b/extra/release/sphinx2rst_config.py @@ -3,7 +3,7 @@ REFBASE = 'http://docs.celeryproject.org/en/latest' REFS = { 'mailing-list': - 'http://groups.google.com/group/celery-users', + 'https://groups.google.com/group/celery-users', 'irc-channel': 'getting-started/resources.html#irc', 'breakpoint-signal': 'tutorials/debugging.html', 'internals-guide': 'internals/guide.html', diff --git a/extra/systemd/celery.conf b/extra/systemd/celery.conf index 1531c3cbacc..53d5282ce2b 100644 --- a/extra/systemd/celery.conf +++ b/extra/systemd/celery.conf @@ -1,5 +1,5 @@ # See -# http://docs.celeryproject.org/en/latest/tutorials/daemonizing.html#available-options +# http://docs.celeryproject.org/en/latest/userguide/daemonizing.html#usage-systemd CELERY_APP="proj" CELERYD_NODES="worker" diff --git a/extra/systemd/celery.service b/extra/systemd/celery.service index 00649c5cac9..d0c4e6f579b 100644 --- a/extra/systemd/celery.service +++ b/extra/systemd/celery.service @@ -9,8 +9,8 @@ Group=celery EnvironmentFile=-/etc/conf.d/celery WorkingDirectory=/opt/celery ExecStart=/bin/sh -c '${CELERY_BIN} multi start $CELERYD_NODES \ - -A $CELERY_APP --logfile=${CELERYD_LOG_FILE} \ - --pidfile=${CELERYD_PID_FILE} $CELERYD_OPTS' + -A $CELERY_APP --pidfile=${CELERYD_PID_FILE} --logfile=${CELERYD_LOG_FILE} \ + --loglevel="${CELERYD_LOG_LEVEL}" $CELERYD_OPTS' ExecStop=/bin/sh -c '${CELERY_BIN} multi stopwait $CELERYD_NODES \ --pidfile=${CELERYD_PID_FILE}' ExecReload=/bin/sh -c '${CELERY_BIN} multi restart $CELERYD_NODES \ diff --git a/requirements-coveralls.txt b/requirements-coveralls.txt new file mode 100644 index 00000000000..0dffbee184d --- /dev/null +++ b/requirements-coveralls.txt @@ -0,0 +1,5 @@ +# Use public repo for coverall upload + +coverage==4.3.4 +gevent==1.4.0 +coveralls diff --git a/requirements/default.txt b/requirements/default.txt index fc5d29e6b08..5a65c088e24 100644 --- a/requirements/default.txt +++ b/requirements/default.txt @@ -1,3 +1,4 @@ pytz>dev billiard>=3.5.0.2,<3.6.0 -kombu>=4.0.2,<5.0 +kombu>=4.2.0,<5.0 +vine<1.4.0 diff --git a/requirements/docs.txt b/requirements/docs.txt index c5085cac21f..3714e3dc6e1 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,3 +1,4 @@ -sphinx_celery>=1.3 +sphinx_celery>=1.3,<2.0 +Sphinx<2.0 typing -r extras/sqlalchemy.txt diff --git a/requirements/pkgutils.txt b/requirements/pkgutils.txt index 343d0eb92e6..19824f9d7d3 100644 --- a/requirements/pkgutils.txt +++ b/requirements/pkgutils.txt @@ -2,8 +2,9 @@ setuptools>=20.6.7 wheel>=0.29.0 flake8>=2.5.4 flakeplus>=1.1 -pydocstyle -tox>=2.3.1 +pydocstyle==1.1.1 +tox>=2.3.1,<2.6 +importlib-metadata<2 sphinx2rst>=1.0 cyanide>=1.0.1 bumpversion diff --git a/requirements/test-ci-base.txt b/requirements/test-ci-base.txt index 0bc747a3167..a5ef45aca77 100644 --- a/requirements/test-ci-base.txt +++ b/requirements/test-ci-base.txt @@ -1,4 +1,4 @@ -pytest-cov +pytest-cov==2.4.0 codecov -r extras/redis.txt -r extras/sqlalchemy.txt diff --git a/requirements/test.txt b/requirements/test.txt index 0b61e932537..7d6e8b19cb1 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -1,2 +1,3 @@ case>=1.3.1 -pytest>=3.0 +pytest>=4.3.1,<4.4.0 +coverage<5.0 diff --git a/setup.cfg b/setup.cfg index a39fa5b909e..d901610cd70 100644 --- a/setup.cfg +++ b/setup.cfg @@ -20,5 +20,5 @@ requires = pytz >= 2016.7 billiard >= 3.5.0.2 kombu >= 4.0.2 -[wheel] +[bdist_wheel] universal = 1 diff --git a/setup.py b/setup.py index f23574ad005..455da23e1f6 100644 --- a/setup.py +++ b/setup.py @@ -90,6 +90,8 @@ def _pyimp(): Programming Language :: Python :: 3 Programming Language :: Python :: 3.4 Programming Language :: Python :: 3.5 + Programming Language :: Python :: 3.6 + Programming Language :: Python :: 3.7 Programming Language :: Python :: Implementation :: CPython Programming Language :: Python :: Implementation :: PyPy Operating System :: OS Independent diff --git a/t/integration/tasks.py b/t/integration/tasks.py index f10aaaa46b5..2987529b04f 100644 --- a/t/integration/tasks.py +++ b/t/integration/tasks.py @@ -55,3 +55,20 @@ def collect_ids(self, res, i): """ return res, (self.request.root_id, self.request.parent_id, i) + + +@shared_task(bind=True, expires=60.0, max_retries=1) +def retry_once(self): + """Task that fails and is retried. Returns the number of retries.""" + if self.request.retries: + return self.request.retries + raise self.retry(countdown=0.1) + + +@shared_task +def redis_echo(message): + """Task that appends the message to a redis list""" + from redis import StrictRedis + + redis_connection = StrictRedis() + redis_connection.rpush('redis-echo', message) diff --git a/t/integration/test_canvas.py b/t/integration/test_canvas.py index 3efe6532351..647034d7789 100644 --- a/t/integration/test_canvas.py +++ b/t/integration/test_canvas.py @@ -1,10 +1,11 @@ from __future__ import absolute_import, unicode_literals import pytest +from redis import StrictRedis from celery import chain, chord, group from celery.exceptions import TimeoutError from celery.result import AsyncResult, GroupResult from .conftest import flaky -from .tasks import add, add_replaced, add_to_all, collect_ids, ids +from .tasks import add, add_replaced, add_to_all, collect_ids, ids, redis_echo TIMEOUT = 120 @@ -27,6 +28,33 @@ def test_complex_chain(self, manager): res = c() assert res.get(timeout=TIMEOUT) == [64, 65, 66, 67] + @flaky + def test_group_chord_group_chain(self, manager): + from celery.five import bytes_if_py2 + + if not manager.app.conf.result_backend.startswith('redis'): + raise pytest.skip('Requires redis result backend.') + redis_connection = StrictRedis() + redis_connection.delete('redis-echo') + before = group(redis_echo.si('before {}'.format(i)) for i in range(3)) + connect = redis_echo.si('connect') + after = group(redis_echo.si('after {}'.format(i)) for i in range(2)) + + result = (before | connect | after).delay() + result.get(timeout=TIMEOUT) + redis_messages = list(map( + bytes_if_py2, + redis_connection.lrange('redis-echo', 0, -1) + )) + before_items = \ + set(map(bytes_if_py2, (b'before 0', b'before 1', b'before 2'))) + after_items = set(map(bytes_if_py2, (b'after 0', b'after 1'))) + + assert set(redis_messages[:3]) == before_items + assert redis_messages[3] == b'connect' + assert set(redis_messages[4:]) == after_items + redis_connection.delete('redis-echo') + @flaky def test_parent_ids(self, manager, num=10): assert manager.inspect().ping() diff --git a/t/integration/test_tasks.py b/t/integration/test_tasks.py index 965e79604c1..1b2ba71fd13 100644 --- a/t/integration/test_tasks.py +++ b/t/integration/test_tasks.py @@ -1,7 +1,7 @@ from __future__ import absolute_import, unicode_literals from celery import group from .conftest import flaky -from .tasks import print_unicode, sleeping +from .tasks import print_unicode, retry_once, sleeping class test_tasks: @@ -12,6 +12,11 @@ def test_task_accepted(self, manager, sleep=1): sleeping.delay(sleep) manager.assert_accepted([r1.id]) + @flaky + def test_task_retried(self): + res = retry_once.delay() + assert res.get(timeout=10) == 1 # retried once + @flaky def test_unicode_task(self, manager): manager.join( diff --git a/t/unit/app/test_amqp.py b/t/unit/app/test_amqp.py index 38271076261..d1157ad2a83 100644 --- a/t/unit/app/test_amqp.py +++ b/t/unit/app/test_amqp.py @@ -264,6 +264,41 @@ def test_send_task_message__queue_string(self): assert kwargs['routing_key'] == 'foo' assert kwargs['exchange'] == '' + def test_send_task_message__broadcast_without_exchange(self): + from kombu.common import Broadcast + evd = Mock(name='evd') + self.app.amqp.send_task_message( + Mock(), 'foo', self.simple_message, retry=False, + routing_key='xyz', queue=Broadcast('abc'), + event_dispatcher=evd, + ) + evd.publish.assert_called() + event = evd.publish.call_args[0][1] + assert event['routing_key'] == 'xyz' + assert event['exchange'] == 'abc' + + def test_send_event_exchange_direct_with_exchange(self): + prod = Mock(name='prod') + self.app.amqp.send_task_message( + prod, 'foo', self.simple_message_no_sent_event, queue='bar', + retry=False, exchange_type='direct', exchange='xyz', + ) + prod.publish.assert_called() + pub = prod.publish.call_args[1] + assert pub['routing_key'] == 'bar' + assert pub['exchange'] == '' + + def test_send_event_exchange_direct_with_routing_key(self): + prod = Mock(name='prod') + self.app.amqp.send_task_message( + prod, 'foo', self.simple_message_no_sent_event, queue='bar', + retry=False, exchange_type='direct', routing_key='xyb', + ) + prod.publish.assert_called() + pub = prod.publish.call_args[1] + assert pub['routing_key'] == 'bar' + assert pub['exchange'] == '' + def test_send_event_exchange_string(self): evd = Mock(name='evd') self.app.amqp.send_task_message( diff --git a/t/unit/app/test_app.py b/t/unit/app/test_app.py index 6f676f3f5f3..7524f08b8d7 100644 --- a/t/unit/app/test_app.py +++ b/t/unit/app/test_app.py @@ -1,5 +1,6 @@ from __future__ import absolute_import, unicode_literals +from datetime import datetime, timedelta import gc import itertools import os @@ -23,7 +24,7 @@ from celery.platforms import pyimplementation from celery.utils.collections import DictAttribute from celery.utils.serialization import pickle -from celery.utils.time import timezone +from celery.utils.time import timezone, to_utc, localize from celery.utils.objects import Bunch THIS_IS_A_KEY = 'this is a value' @@ -73,6 +74,36 @@ class test_App: def setup(self): self.app.add_defaults(deepcopy(self.CELERY_TEST_CONFIG)) + def test_now(self): + timezone_setting_value = 'US/Eastern' + tz_utc = timezone.get_timezone('UTC') + tz_us_eastern = timezone.get_timezone(timezone_setting_value) + + now = datetime.utcnow().replace(tzinfo=tz_utc) + app_now = self.app.now() + + assert app_now.tzinfo == tz_utc + assert app_now - now <= timedelta(seconds=1) + + # Check that timezone conversion is applied from configuration + self.app.conf.enable_utc = False + self.app.conf.timezone = timezone_setting_value + # timezone is a cached property + del self.app.timezone + + app_now = self.app.now() + assert app_now.tzinfo == tz_us_eastern + + diff = to_utc(datetime.utcnow()) - localize(app_now, tz_utc) + assert diff <= timedelta(seconds=1) + + # Verify that timezone setting overrides enable_utc=on setting + self.app.conf.enable_utc = True + del self.app.timezone + app_now = self.app.now() + assert self.app.timezone == tz_us_eastern + assert app_now.tzinfo == tz_us_eastern + @patch('celery.app.base.set_default_app') def test_set_default(self, set_default_app): self.app.set_default() @@ -647,7 +678,8 @@ def test_setting__broker_transport_options(self): _args = {'foo': 'bar', 'spam': 'baz'} self.app.config_from_object(Bunch()) - assert self.app.conf.broker_transport_options == {} + assert self.app.conf.broker_transport_options == \ + {'polling_interval': 0.1} self.app.config_from_object(Bunch(broker_transport_options=_args)) assert self.app.conf.broker_transport_options == _args @@ -797,6 +829,16 @@ def test_timezone__none_set(self): tz = self.app.timezone assert tz == timezone.get_timezone('UTC') + def test_uses_utc_timezone(self): + self.app.conf.timezone = None + assert self.app.uses_utc_timezone() is True + + self.app.conf.timezone = 'US/Eastern' + assert self.app.uses_utc_timezone() is False + + self.app.conf.timezone = 'UTC' + assert self.app.uses_utc_timezone() is True + def test_compat_on_configure(self): _on_configure = Mock(name='on_configure') diff --git a/t/unit/app/test_beat.py b/t/unit/app/test_beat.py index 31ba84707d9..dcfe95eb0c9 100644 --- a/t/unit/app/test_beat.py +++ b/t/unit/app/test_beat.py @@ -8,7 +8,7 @@ from celery import uuid from celery.beat import event_t from celery.five import keys, string_t -from celery.schedules import schedule +from celery.schedules import schedule, crontab from celery.utils.objects import Bunch @@ -315,6 +315,23 @@ def test_ticks(self): scheduler.update_from_dict(s) assert scheduler.tick() == min(nums) - 0.010 + def test_ticks_schedule_change(self): + # initialise schedule and check heap is not initialized + scheduler = mScheduler(app=self.app) + assert scheduler._heap is None + + # set initial schedule and check heap is updated + schedule_5 = schedule(5) + scheduler.add(name='test_schedule', schedule=schedule_5) + scheduler.tick() + assert scheduler._heap[0].entry.schedule == schedule_5 + + # update schedule and check heap is updated + schedule_10 = schedule(10) + scheduler.add(name='test_schedule', schedule=schedule(10)) + scheduler.tick() + assert scheduler._heap[0].entry.schedule == schedule_10 + def test_schedule_no_remain(self): scheduler = mScheduler(app=self.app) scheduler.add(name='test_schedule_no_remain', @@ -349,6 +366,50 @@ def test_populate_heap(self, _when): scheduler.populate_heap() assert scheduler._heap == [event_t(1, 5, scheduler.schedule['foo'])] + def create_schedule_entry(self, schedule): + entry = dict( + name='celery.unittest.add', + schedule=schedule, + app=self.app, + ) + return beat.ScheduleEntry(**dict(entry)) + + def test_schedule_equal_schedule_vs_schedule_success(self): + scheduler = beat.Scheduler(app=self.app) + a = {'a': self.create_schedule_entry(schedule(5))} + b = {'a': self.create_schedule_entry(schedule(5))} + assert scheduler.schedules_equal(a, b) + + def test_schedule_equal_schedule_vs_schedule_fail(self): + scheduler = beat.Scheduler(app=self.app) + a = {'a': self.create_schedule_entry(schedule(5))} + b = {'a': self.create_schedule_entry(schedule(10))} + assert not scheduler.schedules_equal(a, b) + + def test_schedule_equal_crontab_vs_crontab_success(self): + scheduler = beat.Scheduler(app=self.app) + a = {'a': self.create_schedule_entry(crontab(minute=5))} + b = {'a': self.create_schedule_entry(crontab(minute=5))} + assert scheduler.schedules_equal(a, b) + + def test_schedule_equal_crontab_vs_crontab_fail(self): + scheduler = beat.Scheduler(app=self.app) + a = {'a': self.create_schedule_entry(crontab(minute=5))} + b = {'a': self.create_schedule_entry(crontab(minute=10))} + assert not scheduler.schedules_equal(a, b) + + def test_schedule_equal_crontab_vs_schedule_fail(self): + scheduler = beat.Scheduler(app=self.app) + a = {'a': self.create_schedule_entry(crontab(minute=5))} + b = {'a': self.create_schedule_entry(schedule(5))} + assert not scheduler.schedules_equal(a, b) + + def test_schedule_equal_different_key_fail(self): + scheduler = beat.Scheduler(app=self.app) + a = {'a': self.create_schedule_entry(schedule(5))} + b = {'b': self.create_schedule_entry(schedule(5))} + assert not scheduler.schedules_equal(a, b) + def create_persistent_scheduler(shelv=None): if shelv is None: diff --git a/t/unit/app/test_routes.py b/t/unit/app/test_routes.py index 045896c4e24..5954ec4f79f 100644 --- a/t/unit/app/test_routes.py +++ b/t/unit/app/test_routes.py @@ -79,12 +79,17 @@ def test_route_for_task(self): assert route('celery.awesome') is None def test_route_for_task__glob(self): + from re import compile + route = routes.MapRoute([ ('proj.tasks.*', 'routeA'), ('demoapp.tasks.bar.*', {'exchange': 'routeB'}), + (compile(r'(video|image)\.tasks\..*'), {'queue': 'media'}), ]) assert route('proj.tasks.foo') == {'queue': 'routeA'} assert route('demoapp.tasks.bar.moo') == {'exchange': 'routeB'} + assert route('video.tasks.foo') == {'queue': 'media'} + assert route('image.tasks.foo') == {'queue': 'media'} assert route('demoapp.foo.bar.moo') is None def test_expand_route_not_found(self): diff --git a/t/unit/app/test_schedules.py b/t/unit/app/test_schedules.py index 16918e3b91f..6732047e910 100644 --- a/t/unit/app/test_schedules.py +++ b/t/unit/app/test_schedules.py @@ -91,13 +91,28 @@ def test_pickle(self): assert s1 == s2 +# This is needed for test_crontab_parser because datetime.utcnow doesn't pickle +# in python 2 +def utcnow(): + return datetime.utcnow() + + class test_crontab_parser: def crontab(self, *args, **kwargs): return crontab(*args, **dict(kwargs, app=self.app)) def test_crontab_reduce(self): - assert loads(dumps(self.crontab('*'))) + c = self.crontab('*') + assert c == loads(dumps(c)) + c = self.crontab( + minute='1', + hour='2', + day_of_week='3', + day_of_month='4', + month_of_year='5', + nowfun=utcnow) + assert c == loads(dumps(c)) def test_range_steps_not_enough(self): with pytest.raises(crontab_parser.ParseException): diff --git a/t/unit/backends/test_base.py b/t/unit/backends/test_base.py index d1a6c9cb7bd..5dc6047e1ae 100644 --- a/t/unit/backends/test_base.py +++ b/t/unit/backends/test_base.py @@ -139,6 +139,11 @@ def test_regular(self): y = self.b.exception_to_python(x) assert isinstance(y, KeyError) + def test_unicode_message(self): + message = u'\u03ac' + x = self.b.prepare_exception(Exception(message)) + assert x == {'exc_message': message, 'exc_type': 'Exception'} + class KVBackend(KeyValueStoreBackend): mget_returns_dict = False @@ -357,19 +362,25 @@ def test_get_many(self): ids = {uuid(): i for i in range(10)} for id, i in items(ids): self.b.mark_as_done(id, i) - it = self.b.get_many(list(ids)) + it = self.b.get_many(list(ids), interval=0.01) for i, (got_id, got_state) in enumerate(it): assert got_state['result'] == ids[got_id] assert i == 9 - assert list(self.b.get_many(list(ids))) + assert list(self.b.get_many(list(ids), interval=0.01)) self.b._cache.clear() callback = Mock(name='callback') - it = self.b.get_many(list(ids), on_message=callback) + it = self.b.get_many( + list(ids), + on_message=callback, + interval=0.05 + ) for i, (got_id, got_state) in enumerate(it): assert got_state['result'] == ids[got_id] assert i == 9 - assert list(self.b.get_many(list(ids))) + assert list( + self.b.get_many(list(ids), interval=0.01) + ) callback.assert_has_calls([ call(ANY) for id in ids ]) diff --git a/t/unit/backends/test_elasticsearch.py b/t/unit/backends/test_elasticsearch.py index 53e02166b41..48f3c56a656 100644 --- a/t/unit/backends/test_elasticsearch.py +++ b/t/unit/backends/test_elasticsearch.py @@ -80,3 +80,64 @@ def test_backend_params_by_url(self): assert x.scheme == 'elasticsearch' assert x.host == 'localhost' assert x.port == 9200 + + def test_index(self): + x = ElasticsearchBackend(app=self.app) + x.doc_type = 'test-doc-type' + x._server = Mock() + x._server.index = Mock() + expected_result = dict( + _id=sentinel.task_id, + _source={'result': sentinel.result} + ) + x._server.index.return_value = expected_result + + body = {"field1": "value1"} + x._index( + id=str(sentinel.task_id).encode(), + body=body, + kwarg1='test1' + ) + x._server.index.assert_called_once_with( + id=str(sentinel.task_id), + doc_type=x.doc_type, + index=x.index, + body=body, + kwarg1='test1' + ) + + def test_index_bytes_key(self): + x = ElasticsearchBackend(app=self.app) + x.doc_type = 'test-doc-type' + x._server = Mock() + x._server.index = Mock() + expected_result = dict( + _id=sentinel.task_id, + _source={'result': sentinel.result} + ) + x._server.index.return_value = expected_result + + body = {b"field1": "value1"} + x._index( + id=str(sentinel.task_id).encode(), + body=body, + kwarg1='test1' + ) + x._server.index.assert_called_once_with( + id=str(sentinel.task_id), + doc_type=x.doc_type, + index=x.index, + body={"field1": "value1"}, + kwarg1='test1' + ) + + def test_config_params(self): + self.app.conf.elasticsearch_max_retries = 10 + self.app.conf.elasticsearch_timeout = 20.0 + self.app.conf.elasticsearch_retry_on_timeout = True + + self.backend = ElasticsearchBackend(app=self.app) + + assert self.backend.es_max_retries == 10 + assert self.backend.es_timeout == 20.0 + assert self.backend.es_retry_on_timeout is True diff --git a/t/unit/bin/test_worker.py b/t/unit/bin/test_worker.py index f7a451861bb..6917398cf7a 100644 --- a/t/unit/bin/test_worker.py +++ b/t/unit/bin/test_worker.py @@ -425,6 +425,7 @@ def test_main(self): class test_signal_handlers: class _Worker(object): + hostname = 'foo' stopped = False terminated = False @@ -642,3 +643,16 @@ def test_worker_term_hard_handler_when_single_threaded(self): handlers = self.psig(cd.install_worker_term_hard_handler, worker) with pytest.raises(WorkerTerminate): handlers['SIGQUIT']('SIGQUIT', object()) + + def test_send_worker_shutting_down_signal(self): + with patch('celery.apps.worker.signals.worker_shutting_down') as wsd: + worker = self._Worker() + handlers = self.psig(cd.install_worker_term_handler, worker) + try: + with pytest.raises(WorkerShutdown): + handlers['SIGTERM']('SIGTERM', object()) + finally: + state.should_stop = None + wsd.send.assert_called_with( + sender='foo', sig='SIGTERM', how='Warm', exitcode=0, + ) diff --git a/t/unit/conftest.py b/t/unit/conftest.py index 32988a42ea4..383c3247ccd 100644 --- a/t/unit/conftest.py +++ b/t/unit/conftest.py @@ -18,7 +18,7 @@ # in case user did not do the `python setup.py develop` yet, # that installs the pytest plugin into the setuptools registry. from celery.contrib.pytest import ( - celery_app, celery_enable_logging, depends_on_current_app, + celery_app, celery_enable_logging, depends_on_current_app, celery_parameters ) from celery.contrib.testing.app import Trap, TestApp from celery.contrib.testing.mocks import ( @@ -48,8 +48,10 @@ class WindowsError(Exception): def celery_config(): return { 'broker_url': 'memory://', + 'broker_transport_options': { + 'polling_interval': 0.1 + }, 'result_backend': 'cache+memory://', - 'task_default_queue': 'testcelery', 'task_default_exchange': 'testcelery', 'task_default_routing_key': 'testcelery', @@ -73,6 +75,12 @@ def celery_config(): } +@pytest.fixture(scope='session') +def celery_parameters(): + # Including this so pytest doesn't complain + return {} + + @pytest.fixture(scope='session') def use_celery_app_trap(): return True @@ -230,9 +238,14 @@ def sanity_stdouts(request): @pytest.fixture(autouse=True) def sanity_logging_side_effects(request): + '''Bug-fix version of this method taken from Celery 4.3: + https://github.com/celery/celery/blob/4.3/t/unit/conftest.py + ''' + from _pytest.logging import LogCaptureHandler root = logging.getLogger() rootlevel = root.level - roothandlers = root.handlers + roothandlers = [ + x for x in root.handlers if not isinstance(x, LogCaptureHandler)] yield @@ -240,7 +253,9 @@ def sanity_logging_side_effects(request): root_now = logging.getLogger() if root_now.level != rootlevel: raise RuntimeError(CASE_LOG_LEVEL_EFFECT.format(this)) - if root_now.handlers != roothandlers: + newhandlers = [x for x in root_now.handlers if not isinstance( + x, LogCaptureHandler)] + if newhandlers != roothandlers: raise RuntimeError(CASE_LOG_HANDLER_EFFECT.format(this)) diff --git a/t/unit/contrib/test_migrate.py b/t/unit/contrib/test_migrate.py index e75e50405e8..55731137f3d 100644 --- a/t/unit/contrib/test_migrate.py +++ b/t/unit/contrib/test_migrate.py @@ -267,8 +267,11 @@ def test_removes_compression_header(self): class test_migrate_tasks: def test_migrate(self, app, name='testcelery'): - x = Connection('memory://foo') - y = Connection('memory://foo') + connection_kwargs = dict( + transport_options={'polling_interval': 0.01} + ) + x = Connection('memory://foo', **connection_kwargs) + y = Connection('memory://foo', **connection_kwargs) # use separate state x.default_channel.queues = {} y.default_channel.queues = {} @@ -281,7 +284,6 @@ def test_migrate(self, app, name='testcelery'): Producer(x).publish('baz', exchange=name, routing_key=name) assert x.default_channel.queues assert not y.default_channel.queues - migrate_tasks(x, y, accept=['text/plain'], app=app) yq = q(y.default_channel) @@ -309,7 +311,7 @@ def effect(*args, **kwargs): qd.side_effect = effect migrate_tasks(x, y, app=app) - x = Connection('memory://') + x = Connection('memory://', **connection_kwargs) x.default_channel.queues = {} y.default_channel.queues = {} callback = Mock() diff --git a/t/unit/tasks/test_canvas.py b/t/unit/tasks/test_canvas.py index 2d5a0a75d74..f8f55669644 100644 --- a/t/unit/tasks/test_canvas.py +++ b/t/unit/tasks/test_canvas.py @@ -1,4 +1,5 @@ from __future__ import absolute_import, unicode_literals +import json import pytest from case import MagicMock, Mock from celery._state import _task_stack @@ -260,6 +261,16 @@ def test_splices_chains(self): def test_from_dict_no_tasks(self): assert chain.from_dict(dict(chain(app=self.app)), app=self.app) + def test_from_dict_full_subtasks(self): + c = chain(self.add.si(1, 2), self.add.si(3, 4), self.add.si(5, 6)) + + serialized = json.loads(json.dumps(c)) + + deserialized = chain.from_dict(serialized) + + for task in deserialized.tasks: + assert isinstance(task, Signature) + @pytest.mark.usefixtures('depends_on_current_app') def test_app_falls_back_to_default(self): from celery._state import current_app @@ -607,6 +618,10 @@ def test_app_when_app_in_task(self): x = chord([t1], body=t2) assert x.app is t2._app + def test_app_when_header_is_empty(self): + x = chord([], self.add.s(4, 4)) + assert x.app is self.add.app + @pytest.mark.usefixtures('depends_on_current_app') def test_app_fallback_to_current(self): from celery._state import current_app diff --git a/t/unit/tasks/test_tasks.py b/t/unit/tasks/test_tasks.py index 0c2d9e067c5..e21ec1012d9 100644 --- a/t/unit/tasks/test_tasks.py +++ b/t/unit/tasks/test_tasks.py @@ -251,6 +251,16 @@ def test_autoretry(self): self.autoretry_task.apply((1, 0)) assert self.autoretry_task.iterations == 6 + def test_retry_wrong_eta_when_not_enable_utc(self): + """Issue #3753""" + self.app.conf.enable_utc = False + self.app.conf.timezone = 'US/Eastern' + self.autoretry_task.iterations = 0 + self.autoretry_task.default_retry_delay = 2 + + self.autoretry_task.apply((1, 0)) + assert self.autoretry_task.iterations == 6 + class test_canvas_utils(TasksCase): diff --git a/t/unit/utils/test_functional.py b/t/unit/utils/test_functional.py index 8be9500e295..301e33c5745 100644 --- a/t/unit/utils/test_functional.py +++ b/t/unit/utils/test_functional.py @@ -205,6 +205,18 @@ def test_from_fun_forced_kwargs(self): g(a=1, b=2) g(a=1, b=2, c=3) + def test_classmethod(self): + class A(object): + @classmethod + def f(cls, x): + return x + + fun = head_from_fun(A.f, bound=False) + assert fun(A, 1) == 1 + + fun = head_from_fun(A.f, bound=True) + assert fun(1) == 1 + class test_fun_takes_argument: diff --git a/t/unit/utils/test_platforms.py b/t/unit/utils/test_platforms.py index 0ecdd41fc3b..2192737d4ce 100644 --- a/t/unit/utils/test_platforms.py +++ b/t/unit/utils/test_platforms.py @@ -2,41 +2,24 @@ import errno import os -import pytest -import sys import signal +import sys import tempfile +import pytest from case import Mock, call, mock, patch, skip -from celery import _find_option_with_arg -from celery import platforms +from celery import _find_option_with_arg, platforms from celery.exceptions import SecurityError from celery.five import WhateverIO -from celery.platforms import ( - get_fdmax, - ignore_errno, - check_privileges, - set_process_title, - set_mp_process_title, - signals, - maybe_drop_privileges, - setuid, - setgid, - initgroups, - parse_uid, - parse_gid, - detached, - DaemonContext, - create_pidlock, - Pidfile, - LockFailed, - setgroups, - _setgroups_hack, - close_open_fds, - fd_by_path, - isatty, -) +from celery.platforms import (DaemonContext, LockFailed, Pidfile, + _setgroups_hack, check_privileges, + close_open_fds, create_pidlock, detached, + fd_by_path, get_fdmax, ignore_errno, initgroups, + isatty, maybe_drop_privileges, parse_gid, + parse_uid, set_mp_process_title, + set_process_title, setgid, setgroups, setuid, + signals) try: import resource diff --git a/t/unit/utils/test_time.py b/t/unit/utils/test_time.py index 475c6e1e435..0aeb0c60b75 100644 --- a/t/unit/utils/test_time.py +++ b/t/unit/utils/test_time.py @@ -171,6 +171,13 @@ def test_maybe_make_aware(self): assert maybe_make_aware(aware) naive = datetime.utcnow() assert maybe_make_aware(naive) + assert maybe_make_aware(naive).tzinfo is pytz.utc + + tz = pytz.timezone('US/Eastern') + eastern = datetime.utcnow().replace(tzinfo=tz) + assert maybe_make_aware(eastern).tzinfo is tz + utcnow = datetime.utcnow() + assert maybe_make_aware(utcnow, 'UTC').tzinfo is pytz.utc class test_localize: diff --git a/t/unit/worker/test_loops.py b/t/unit/worker/test_loops.py index 464d79abca4..3b4ea93fafd 100644 --- a/t/unit/worker/test_loops.py +++ b/t/unit/worker/test_loops.py @@ -5,7 +5,7 @@ import pytest from case import Mock -from kombu.async import Hub, READ, WRITE, ERR +from kombu.asynchronous import Hub, READ, WRITE, ERR from celery.bootsteps import CLOSE, RUN from celery.exceptions import ( @@ -157,9 +157,11 @@ def test_setup_heartbeat(self): asynloop(*x.args) x.consumer.consume.assert_called_with() x.obj.on_ready.assert_called_with() - x.hub.timer.call_repeatedly.assert_called_with( - 10 / 2.0, x.connection.heartbeat_check, (2.0,), - ) + # heartbeat timer is called with a "tick" function + # in _enable_amqheartbeats, but we can't reference it + # from here so we'll just make sure timer was called, + # and trust that it was called with the tick function + x.hub.timer.call_repeatedly.assert_called() def task_context(self, sig, **kwargs): x, on_task = get_task_callback(self.app, **kwargs) diff --git a/t/unit/worker/test_worker.py b/t/unit/worker/test_worker.py index d1ed4438fe1..af4cc196851 100644 --- a/t/unit/worker/test_worker.py +++ b/t/unit/worker/test_worker.py @@ -1049,7 +1049,7 @@ def test_Pool_pool_no_sem(self): assert w.process_task is w._process_task def test_Pool_create(self): - from kombu.async.semaphore import LaxBoundedSemaphore + from kombu.asynchronous.semaphore import LaxBoundedSemaphore w = Mock() w._conninfo.connection_errors = w._conninfo.channel_errors = () w.hub = Mock()