-
Notifications
You must be signed in to change notification settings - Fork 544
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #8 from puckel/v1.5.2
v1.5.2
- Loading branch information
Showing
5 changed files
with
89 additions
and
22 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,46 +1,66 @@ | ||
[core] | ||
# The home folder for airflow, default is ~/airflow | ||
airflow_home = /usr/local/airflow | ||
airflow_home = /usr/local/airflow | ||
|
||
# The folder where your airflow pipelines live, most likely a | ||
# subfolder in a code repository | ||
dags_folder = /usr/local/airflow/dags | ||
|
||
# The folder where airflow should store its log files | ||
base_log_folder = /usr/local/airflow/logs | ||
|
||
# The executor class that airflow should use. Choices include | ||
# SequentialExecutor, LocalExecutor, CeleryExecutor | ||
executor = CeleryExecutor | ||
executor = SequentialExecutor | ||
|
||
# The SqlAlchemy connection string to the metadata database. | ||
# SqlAlchemy supports many different database engine, more information | ||
# their website | ||
sql_alchemy_conn = mysql://airflow:airflow@mysqldb/airflow | ||
sql_alchemy_conn = mysql://airflow:airflow@mysql/airflow | ||
|
||
# The amount of parallelism as a setting to the executor. This defines | ||
# the max number of task instances that should run simultaneously | ||
# on this airflow installation | ||
parallelism = 32 | ||
|
||
# Whether to load the examples that ship with Airflow. It's good to | ||
# get started, but you probably want to set this to False in a production | ||
# environment | ||
load_examples = True | ||
|
||
# Where your Airflow plugins are stored | ||
plugins_folder = /usr/local/airflow/plugins | ||
|
||
# Secret key to save connection passwords in the db | ||
fernet_key = {FERNET_KEY} | ||
fernet_key = $FERNET_KEY | ||
|
||
# Whether to disable pickling dags | ||
donot_pickle = False | ||
|
||
[webserver] | ||
# The base url of your website as airflow cannot guess what domain or | ||
# cname you are using. This is use in automated emails that | ||
# airflow sends to point links to the right web server | ||
base_url = http://localhost:8080 | ||
|
||
# The ip specified when starting the web server | ||
web_server_host = 0.0.0.0 | ||
|
||
# The port on which to run the web server | ||
web_server_port = 8080 | ||
|
||
# Secret key used to run your flask app | ||
secret_key = temporary_key | ||
|
||
# number of threads to run the Gunicorn web server | ||
thread = 4 | ||
|
||
# Expose the configuration file in the web server | ||
expose_config = true | ||
|
||
# Set to true to turn on authentication : http://pythonhosted.org/airflow/installation.html#web-authentication | ||
authenticate = False | ||
|
||
# Filter the list of dags by owner name (requires authentication to be enabled) | ||
filter_by_owner = False | ||
|
||
|
@@ -58,28 +78,35 @@ smtp_mail_from = [email protected] | |
[celery] | ||
# This section only applies if you are using the CeleryExecutor in | ||
# [core] section above | ||
|
||
# The app name that will be used by celery | ||
celery_app_name = airflow.executors.celery_executor | ||
|
||
# The concurrency that will be used when starting workers with the | ||
# "airflow worker" command. This defines the number of task instances that | ||
# a worker will take, so size up your workers based on the resources on | ||
# your worker box and the nature of your tasks | ||
celeryd_concurrency = 16 | ||
|
||
# When you start an airflow worker, airflow starts a tiny web server | ||
# subprocess to serve the workers local log files to the airflow main | ||
# web server, who then builds pages and sends them to users. This defines | ||
# the port on which the logs are served. It needs to be unused, and open | ||
# visible from the main web server to connect into the workers. | ||
worker_log_server_port = 8793 | ||
|
||
# The Celery broker URL. Celery supports RabbitMQ, Redis and experimentally | ||
# a sqlalchemy database. Refer to the Celery documentation for more | ||
# information. | ||
broker_url = amqp://airflow:airflow@rabbitmq:5672/airflow | ||
|
||
# Another key Celery setting | ||
celery_result_backend = amqp://airflow:airflow@rabbitmq:5672/airflow | ||
|
||
# Celery Flower is a sweet UI for Celery. Airflow has a shortcut to start | ||
# it `airflow flower`. This defines the port that Celery Flower runs on | ||
flower_port = 5555 | ||
|
||
# Default queue that tasks get assigned to and that worker listen on. | ||
default_queue = default | ||
|
||
|
@@ -88,12 +115,50 @@ default_queue = default | |
# from the CLI or the UI), this defines the frequency at which they should | ||
# listen (in seconds). | ||
job_heartbeat_sec = 5 | ||
|
||
# The scheduler constantly tries to trigger new tasks (look at the | ||
# scheduler section in the docs for more information). This defines | ||
# how often the scheduler should run (in seconds). | ||
scheduler_heartbeat_sec = 5 | ||
|
||
# Statsd (https://github.com/etsy/statsd) integration settings | ||
# statsd_on = False | ||
# statsd_host = localhost | ||
# statsd_port = 8125 | ||
# statsd_prefix = airflow | ||
|
||
[mesos] | ||
# Mesos master address which MesosExecutor will connect to. | ||
master = localhost:5050 | ||
|
||
# The framework name which Airflow scheduler will register itself as on mesos | ||
framework_name = Airflow | ||
|
||
# Number of cpu cores required for running one task instance using | ||
# 'airflow run <dag_id> <task_id> <execution_date> --local -p <pickle_id>' | ||
# command on a mesos slave | ||
task_cpu = 1 | ||
|
||
# Memory in MB required for running one task instance using | ||
# 'airflow run <dag_id> <task_id> <execution_date> --local -p <pickle_id>' | ||
# command on a mesos slave | ||
task_memory = 256 | ||
|
||
# Enable framework checkpointing for mesos | ||
# See http://mesos.apache.org/documentation/latest/slave-recovery/ | ||
checkpoint = False | ||
|
||
# Failover timeout in milliseconds. | ||
# When checkpointing is enabled and this option is set, Mesos waits until the configured timeout for | ||
# the MesosExecutor framework to re-register after a failover. Mesos shuts down running tasks if the | ||
# MesosExecutor framework fails to re-register within this timeframe. | ||
# failover_timeout = 604800 | ||
|
||
# Enable framework authentication for mesos | ||
# See http://mesos.apache.org/documentation/latest/configuration/ | ||
authenticate = False | ||
|
||
# Mesos credentials, if authentication is enabled | ||
# default_principal = admin | ||
# default_secret = admin | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters