Skip to content

Commit

Permalink
Bump to Airflow 1.10.6
Browse files Browse the repository at this point in the history
  • Loading branch information
puckel committed Nov 20, 2019
1 parent c654707 commit 48f3f19
Show file tree
Hide file tree
Showing 5 changed files with 52 additions and 12 deletions.
2 changes: 1 addition & 1 deletion .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:
- run:
name: Test docker image
command: |
docker run puckel/docker-airflow version |grep '1.10.4'
docker run puckel/docker-airflow version |grep '1.10.6'
workflows:
version: 2
build_and_test:
Expand Down
4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# VERSION 1.10.4
# VERSION 1.10.6
# AUTHOR: Matthieu "Puckel_" Roisil
# DESCRIPTION: Basic Airflow container
# BUILD: docker build --rm -t puckel/docker-airflow .
Expand All @@ -12,7 +12,7 @@ ENV DEBIAN_FRONTEND noninteractive
ENV TERM linux

# Airflow
ARG AIRFLOW_VERSION=1.10.4
ARG AIRFLOW_VERSION=1.10.6
ARG AIRFLOW_USER_HOME=/usr/local/airflow
ARG AIRFLOW_DEPS=""
ARG PYTHON_DEPS=""
Expand Down
48 changes: 44 additions & 4 deletions config/airflow.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -84,9 +84,10 @@ sql_alchemy_max_overflow = 10
# a lower config value will allow the system to recover faster.
sql_alchemy_pool_recycle = 1800

# How many seconds to retry re-establishing a DB connection after
# disconnects. Setting this to 0 disables retries.
sql_alchemy_reconnect_timeout = 300
# Check connection at the start of each connection pool checkout.
# Typically, this is a simple statement like “SELECT 1”.
# More information here: https://docs.sqlalchemy.org/en/13/core/pooling.html#disconnect-handling-pessimistic
sql_alchemy_pool_pre_ping = True

# The schema to use for the metadata database
# SqlAlchemy supports databases with the concept of multiple schemas.
Expand Down Expand Up @@ -127,6 +128,9 @@ donot_pickle = False
# How long before timing out a python file import while filling the DagBag
dagbag_import_timeout = 30

# How long before timing out a DagFileProcessor, which processes a dag file
dag_file_processor_timeout = 50

# The class to use for running task instances in a subprocess
task_runner = StandardTaskRunner

Expand Down Expand Up @@ -167,6 +171,9 @@ worker_precheck = False
# When discovering DAGs, ignore any files that don't contain the strings `DAG` and `airflow`.
dag_discovery_safe_mode = True

# The number of retries each task is going to have by default. Can be overridden at dag or task level.
default_task_retries = 0

[cli]
# In what way should the cli access the API. The LocalClient will use the
# database directly, while the json_client will use the api running on the
Expand Down Expand Up @@ -315,6 +322,10 @@ cookie_samesite =
# Default setting for wrap toggle on DAG code and TI log views.
default_wrap = False

# Send anonymous user activity to your analytics tool
# analytics_tool = # choose from google_analytics, segment, or metarouter
# analytics_id = XXXXXXXXXXX

[email]
email_backend = airflow.utils.email.send_email_smtp

Expand Down Expand Up @@ -454,6 +465,13 @@ scheduler_heartbeat_sec = 5
# -1 indicates to run continuously (see also num_runs)
run_duration = -1

# The number of times to try to schedule each DAG file
# -1 indicates unlimited number
num_runs = -1

# The number of seconds to wait between consecutive DAG file processing
processor_poll_interval = 1

# after how much time (seconds) a new DAGs should be picked up from the filesystem
min_file_process_interval = 0

Expand Down Expand Up @@ -605,6 +623,11 @@ json_format = False
# Log fields to also attach to the json output, if enabled
json_fields = asctime, filename, lineno, levelname, message

[elasticsearch_configs]

use_ssl = False
verify_certs = True

[kubernetes]
# The repository, tag and imagePullPolicy of the Kubernetes Image for the Worker to Run
worker_container_repository =
Expand Down Expand Up @@ -697,10 +720,25 @@ git_dags_folder_mount_point =
git_ssh_key_secret_name =
git_ssh_known_hosts_configmap_name =

# To give the git_sync init container credentials via a secret, create a secret
# with two fields: GIT_SYNC_USERNAME and GIT_SYNC_PASSWORD (example below) and
# add `git_sync_credentials_secret = <secret_name>` to your airflow config under the kubernetes section
#
# Secret Example:
# apiVersion: v1
# kind: Secret
# metadata:
# name: git-credentials
# data:
# GIT_SYNC_USERNAME: <base64_encoded_git_username>
# GIT_SYNC_PASSWORD: <base64_encoded_git_password>
git_sync_credentials_secret =

# For cloning DAGs from git repositories into volumes: https://github.com/kubernetes/git-sync
git_sync_container_repository = k8s.gcr.io/git-sync
git_sync_container_tag = v3.1.1
git_sync_init_container_name = git-sync-clone
git_sync_run_as_user = 65533

# The name of the Kubernetes service account to be associated with airflow workers, if any.
# Service accounts are required for workers that require access to secrets or cluster resources.
Expand Down Expand Up @@ -742,7 +780,9 @@ tolerations =
# List of supported params in **kwargs are similar for all core_v1_apis, hence a single config variable for all apis
# See:
# https://raw.githubusercontent.com/kubernetes-client/python/master/kubernetes/client/apis/core_v1_api.py
kube_client_request_args =
# Note that if no _request_timeout is specified, the kubernetes client will wait indefinitely for kubernetes
# api responses, which will cause the scheduler to hang. The timeout is specified as [connect timeout, read timeout]
kube_client_request_args = {{"_request_timeout" : [60,60] }}

# Worker pods security context options
# See:
Expand Down
8 changes: 4 additions & 4 deletions docker-compose-CeleryExecutor.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ services:
# - ./pgdata:/var/lib/postgresql/data/pgdata

webserver:
image: puckel/docker-airflow:1.10.4
image: puckel/docker-airflow:1.10.6
restart: always
depends_on:
- postgres
Expand All @@ -43,7 +43,7 @@ services:
retries: 3

flower:
image: puckel/docker-airflow:1.10.4
image: puckel/docker-airflow:1.10.6
restart: always
depends_on:
- redis
Expand All @@ -55,7 +55,7 @@ services:
command: flower

scheduler:
image: puckel/docker-airflow:1.10.4
image: puckel/docker-airflow:1.10.6
restart: always
depends_on:
- webserver
Expand All @@ -74,7 +74,7 @@ services:
command: scheduler

worker:
image: puckel/docker-airflow:1.10.4
image: puckel/docker-airflow:1.10.6
restart: always
depends_on:
- scheduler
Expand Down
2 changes: 1 addition & 1 deletion docker-compose-LocalExecutor.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ services:
- POSTGRES_DB=airflow

webserver:
image: puckel/docker-airflow:1.10.4
image: puckel/docker-airflow:1.10.6
restart: always
depends_on:
- postgres
Expand Down

0 comments on commit 48f3f19

Please sign in to comment.