Skip to content

Commit

Permalink
Merge pull request #9 from sightmachine/4.1-sm
Browse files Browse the repository at this point in the history
4.1 sm to master
  • Loading branch information
amastilovic authored Nov 25, 2020
2 parents b699f6c + 02f9850 commit 86f83cd
Show file tree
Hide file tree
Showing 139 changed files with 2,046 additions and 565 deletions.
2 changes: 1 addition & 1 deletion .bumpversion.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 4.0.2
current_version = 4.1.0
commit = True
tag = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(?P<releaselevel>[a-z]+)?
Expand Down
285 changes: 278 additions & 7 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
@@ -1,16 +1,287 @@
version: 2.1

workflows:
celery:
jobs:
- build-and-upload
- run-unit-tests:
requires:
- build-and-upload
- code-coverage-report:
requires:
- run-unit-tests

orbs:
python: circleci/[email protected]
gcp-gke: circleci/[email protected]
slack: circleci/[email protected]

commands:
configure_environment:
description: "Initial environment setup: Configure Gcloud and conditionally installs Docker Compose."
parameters:
with_compose:
description: "If true, sets up Docker Compose, which can then be used to
spin up Docker containers."
type: boolean
default: false
steps:
- run:
name: Authorize gcloud
command: |
echo $GCLOUD_SERVICE_KEY > ${HOME}/gcloud-service-key.json
gcloud auth activate-service-account --key-file=${HOME}/gcloud-service-key.json
gcloud --quiet config set project sightmachine-178216
gcloud --quiet config set compute/zone us-west1-a
gcloud --quiet container clusters get-credentials dev-k8s-uw1
## To authenticate to Container Registry, use gcloud as a Docker credential helper
echo y | gcloud auth configure-docker
- when:
condition: << parameters.with_compose >>
steps:
- run:
name: Setup Docker Compose
command: |
curl -L https://github.com/docker/compose/releases/download/1.19.0/docker-compose-`uname -s`-`uname -m` > ~/docker-compose
chmod +x ~/docker-compose
mv ~/docker-compose /usr/local/bin/docker-compose
send_slack_msg:
description: "If the job fails for the master branch, send a message to the Slack channel."
steps:
- slack/status:
failure_message: Uh-oh! *$CIRCLE_PROJECT_REPONAME* (master) - Job Failed <$CIRCLE_BUILD_URL|#$CIRCLE_BUILD_NUM>
include_project_field: false
include_job_number_field: false
fail_only: true
only_for_branches: master
webhook: $SLACK_WEBHOOK_URL

jobs:
build-and-test:
executor: python/default
## ------------------ Build and Upload Celery Docker image to GCP Container Registry ------------------

build-and-upload:
machine:
image: ubuntu-1604:201903-01
docker_layer_caching: false
parameters:
run_merge: # do a prospective merge with target branch
type: boolean
default: false
steps:
- checkout
- run:
name: Configure Environment Variables
command: |
set -x
echo "export GIT_COMMIT=$(git rev-parse HEAD)" > custom.env
echo "export GIT_BRANCH=$(git symbolic-ref -q HEAD | sed -e 's:^refs/heads/::')" >> custom.env
# Sanitize branch name and Git tag (for docker image tag)
echo "export GIT_TAG=$(git describe --tag | sed -E 's/^[.-]|(^[.-])?[^A-Za-z0-9_.-]+/_/g')" >> custom.env
echo "export ARTIFACT_PATH=\"gcr.io/sightmachine-178216/celery\"" >> custom.env
echo "export BRANCH_NAME=$(echo ${CIRCLE_BRANCH} | sed -E 's/^[.-]|(^[.-])?[^A-Za-z0-9_.-]+/_/g')" >> custom.env
- persist_to_workspace:
root: .
paths:
- custom.env
- .coveragerc

- configure_environment

- run:
name: Build Celery Docker Image
command: |
source custom.env
echo "Building ${ARTIFACT_PATH}:${GIT_COMMIT}"
docker build -f docker/Dockerfile \
--build-arg GIT_BRANCH=${GIT_BRANCH} \
--build-arg GIT_TAG=${GIT_TAG} \
--build-arg GIT_COMMIT=${GIT_COMMIT} \
-t ${ARTIFACT_PATH}:${BRANCH_NAME}-dev \
-t ${ARTIFACT_PATH}:${GIT_TAG}-dev \
-t ${ARTIFACT_PATH}:${GIT_COMMIT}-dev .
- run:
name: Upload Celery Docker Image to GCR
command: |
source custom.env
docker push ${ARTIFACT_PATH}:${BRANCH_NAME}-dev
docker push ${ARTIFACT_PATH}:${GIT_TAG}-dev
docker push ${ARTIFACT_PATH}:${GIT_COMMIT}-dev
- send_slack_msg

## ------------------ Run Unit Tests ------------------

run-unit-tests:
parameters:
with_merge: # Part of the PR-merge workflow
type: boolean
default: false
docker:
- image: google/cloud-sdk
parallelism: 6
steps:
- attach_workspace:
at: /tmp/workspace

- configure_environment:
with_compose: true

- setup_remote_docker:
docker_layer_caching: false

- run:
name: Create the Celery container
command: |
source /tmp/workspace/custom.env
echo "Spinning up Docker container: Celery ($ARTIFACT_PATH:${GIT_COMMIT}-dev)"
echo -e '
version: "3"
services:
celery:
container_name: celery
image: ${ARTIFACT_PATH}:${GIT_COMMIT}-dev
environment:
TEST_BROKER: pyamqp://rabbit:5672
TEST_BACKEND: redis://redis
PYTHONUNBUFFERED: 1
PYTHONDONTWRITEBYTECODE: 1
REDIS_HOST: redis
WORKER_LOGLEVEL: DEBUG
AZUREBLOCKBLOB_URL: azureblockblob://DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://azurite:10000/devstoreaccount1;
PYTHONPATH: /home/developer/celery
command:
- cat
tty: true
hostname: celery
depends_on:
- rabbit
- redis
- dynamodb
- azurite
rabbit:
image: gcr.io/sightmachine-178216/rabbitmq:3.8.0
redis:
image: gcr.io/sightmachine-178216/redis:5.0.6
dynamodb:
image: gcr.io/sightmachine-178216/dwmkerr/dynamodb:38
azurite:
image: arafato/azurite:2.6.5
' > docker-compose.yml
docker-compose up -d
docker-compose ps
while [[ "$(docker inspect -f '{{.State.Running}}' celery 2>/dev/null)" != "true" ]]; do echo "Waiting for celery container to start..."; done
- run:
name: Run Unit Tests
command: |
source /tmp/workspace/custom.env
docker-compose exec celery circleci_scripts/execute_unit_tests.sh
- run:
name: Move Unit Test Results Out of Container
when: always
command: docker cp celery:/home/developer/tmp/junit /tmp/workspace/junit

# Store the test results on each node so we can see failures
- store_test_results:
path: /tmp/workspace/junit/

- store_artifacts:
name: Save Unit Test Results
path: /tmp/workspace/junit/

- run:
name: Copy Coverage Results to Workspace
command: |
mkdir -p /tmp/workspace/coverage-report-${CIRCLE_BUILD_NUM}
docker cp celery:/home/developer/celery/.coverage /tmp/workspace/coverage-report-${CIRCLE_BUILD_NUM}/.coverage
- persist_to_workspace:
root: /tmp/workspace
paths:
- "coverage-report-*"

- send_slack_msg

## ------------------ Generate Code Coverage Report ------------------

code-coverage-report:
parameters:
with_merge: # Part of the PR-merge workflow
type: boolean
default: false
docker:
- image: circleci/python:3.7
steps:
- when:
condition: << parameters.with_merge >>
steps:
- run:
name: Abort if not building a pull request
command: |
if [ -z "$CIRCLE_PULL_REQUEST" ]; then
circleci-agent step halt
fi
# Coverage html requires source code to build HTML views
- checkout

# Need a workspace for all the coverage reports
- attach_workspace:
at: /tmp/workspace

- run:
name: Compile Coverage Reports
command: |
sudo pip install -r requirements-coveralls.txt
coverage combine /tmp/workspace/coverage-report-*/.coverage
sed "s@/home/developer@/home/circleci/project@g" -i .coverage
coveralls
## ------------------ Run nightly build for wheel upload ------------------

# upload-wheels:
# machine:
# image: ubuntu-1604:201903-01
# docker_layer_caching: true

# steps:
# - checkout

# - configure_environment:
# with_compose: true

# - run:
# name: Build pypi dependencies and upload wheels
# command: |
# # Set jfrog cli version to 1.33.1
# curl -fL https://getcli.jfrog.io | sh -s 1.33.1
# chmod 755 ./jfrog
# sudo mv ./jfrog /usr/local/bin/
# jfrog rt c rt-server-sm --url=https://sightmachine.jfrog.io/sightmachine --user=circleci-admin-user --password=$JFROG_PWD --interactive=false

# docker pull gcr.io/sightmachine-178216/ma:master-dev
# docker run -dit --name ma gcr.io/sightmachine-178216/ma:master-dev /bin/bash
# while [[ "$(docker inspect -f '{{.State.Running}}' ma 2>/dev/null)" != "true" ]]; do echo "Waiting for ma container to start..."; done
# docker ps -f name=ma
# docker cp /home/circleci/project ma:/
# docker exec -ti ma sh -c "
# cd /project
# mkdir wheel-dir
# pip wheel --wheel-dir=./wheel-dir -r requirements.txt
# pip wheel --wheel-dir=./wheel-dir -r requirements-dev.txt
# pip wheel --wheel-dir=./wheel-dir -r requirements-test.txt
# "
# docker cp ma:/project/wheel-dir wheel-dir

# echo "Uploading Updated Wheels...."
# jfrog rt u "wheel-dir/*.whl" pypi-dependencies/wheel/

# - send_slack_msg

workflows:
main:
jobs:
- build-and-test
1 change: 1 addition & 0 deletions .editorconfig
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ trim_trailing_whitespace = true
insert_final_newline = true
charset = utf-8
end_of_line = lf
max_line_length = 78

[Makefile]
indent_style = tab
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ dist/
*.egg-info
*.egg
*.egg/
*.eggs/
build/
.build/
_build/
Expand Down
28 changes: 15 additions & 13 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
language: python
sudo: required
cache: false
dist: trusty
cache: pip
python:
- '2.7'
- '3.4'
Expand All @@ -19,13 +20,13 @@ env:
matrix:
include:
- python: '3.5'
env: TOXENV=pypy-unit PYPY_VERSION="5.3"
env: TOXENV=pypy-unit PYPY_VERSION="pypy2.7-5.8.0"
- python: '3.5'
env: TOXENV=pypy-integration-rabbitmq PYPY_VERSION="5.3"
env: TOXENV=pypy-integration-rabbitmq PYPY_VERSION="pypy2.7-5.8.0"
- python: '3.5'
env: TOXENV=pypy-integration-redis PYPY_VERSION="5.3"
env: TOXENV=pypy-integration-redis PYPY_VERSION="pypy2.7-5.8.0"
- python: '3.5'
env: TOXENV=pypy-integration-dynamodb PYPY_VERSION="5.3"
env: TOXENV=pypy-integration-dynamodb PYPY_VERSION="pypy2.7-5.8.0"
- python: '3.5'
env: TOXENV=flake8
- python: '3.5'
Expand All @@ -39,22 +40,23 @@ matrix:
before_install:
- if [[ -v MATRIX_TOXENV ]]; then export TOXENV=${TRAVIS_PYTHON_VERSION}-${MATRIX_TOXENV}; fi; env
- |
if [ "$TOXENV" = "pypy" ]; then
if [[ "$TOXENV" =~ "pypy" ]]; then
export PYENV_ROOT="$HOME/.pyenv"
if [ -f "$PYENV_ROOT/bin/pyenv" ]; then
cd "$PYENV_ROOT" && git pull
else
rm -rf "$PYENV_ROOT" && git clone --depth 1 https://github.com/yyuu/pyenv.git "$PYENV_ROOT"
rm -rf "$PYENV_ROOT" && git clone --depth 1 https://github.com/pyenv/pyenv.git "$PYENV_ROOT"
fi
"$PYENV_ROOT/bin/pyenv" install "pypy-$PYPY_VERSION"
virtualenv --python="$PYENV_ROOT/versions/pypy-$PYPY_VERSION/bin/python" "$HOME/virtualenvs/pypy-$PYPY_VERSION"
source "$HOME/virtualenvs/pypy-$PYPY_VERSION/bin/activate"
"$PYENV_ROOT/bin/pyenv" install "$PYPY_VERSION"
virtualenv --python="$PYENV_ROOT/versions/$PYPY_VERSION/bin/python" "$HOME/virtualenvs/$PYPY_VERSION"
source "$HOME/virtualenvs/$PYPY_VERSION/bin/activate"
which python
fi
- |
if [[ "$TOXENV" == *dynamodb ]]; then
sudo apt-get install -y default-jre supervisor
sudo apt-get update && sudo apt-get install -y default-jre supervisor
mkdir /opt/dynamodb-local
cd /opt/dynamodb-local && curl -L http://dynamodb-local.s3-website-us-west-2.amazonaws.com/dynamodb_local_latest.tar.gz | tar zx
cd /opt/dynamodb-local && curl --retry 5 --retry-delay 1 -L http://dynamodb-local.s3-website-us-west-2.amazonaws.com/dynamodb_local_latest.tar.gz | tar zx
cd -
echo '[program:dynamodb-local]' | sudo tee /etc/supervisor/conf.d/dynamodb-local.conf
echo 'command=java -Djava.library.path=./DynamoDBLocal_lib -jar DynamoDBLocal.jar -inMemory' | sudo tee -a /etc/supervisor/conf.d/dynamodb-local.conf
Expand All @@ -67,7 +69,7 @@ before_install:
after_success:
- .tox/$TRAVIS_PYTHON_VERSION/bin/coverage xml
- .tox/$TRAVIS_PYTHON_VERSION/bin/codecov -e TOXENV
install: travis_retry pip install -U tox
install: travis_retry pip install -U tox | cat
script: tox -v -- -v
notifications:
irc:
Expand Down
Loading

0 comments on commit 86f83cd

Please sign in to comment.