Skip to content

Commit d5df31d

Browse files
Merge branch 'main' into fernst/update_airflow_210
2 parents 4852eda + 2f14747 commit d5df31d

7 files changed

Lines changed: 28 additions & 79 deletions

File tree

.gitignore

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
/git
22
*.workspace
3-
dags/**/*.pyc
43
/.idea
54
/logs
65
/db-data
6+
dags/*
77
.DS_Store

dags/example_dag_with_taskflow_api.py

Lines changed: 0 additions & 60 deletions
This file was deleted.

docker/config/.env.aws

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
# You can add your AWS environment variables here
2+
# We keep that file separate from .env.localrunner so that it can be updated by the startup script

docker/config/airflow.cfg

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ executor = SequentialExecutor
2626
# This defines the maximum number of task instances that can run concurrently in Airflow
2727
# regardless of scheduler count and worker count. Generally, this value is reflective of
2828
# the number of task instances with the running state in the metadata database.
29-
parallelism = 150
29+
parallelism = 32
3030

3131
# The maximum number of task instances allowed to run concurrently in each DAG. To calculate
3232
# the number of tasks that is running concurrently for a DAG, add up the number of running
@@ -35,7 +35,7 @@ parallelism = 150
3535
#
3636
# An example scenario when this would be useful is when you want to stop a new dag with an early
3737
# start date from stealing all the executor slots in a cluster.
38-
max_active_tasks_per_dag = 150
38+
max_active_tasks_per_dag = 16
3939

4040
# Are DAGs paused by default at creation
4141
dags_are_paused_at_creation = True
@@ -336,13 +336,13 @@ statsd_datadog_tags =
336336
[secrets]
337337
# Full class name of secrets backend to enable (will precede env vars and metastore in search path)
338338
# Example: backend = airflow.providers.amazon.aws.secrets.systems_manager.SystemsManagerParameterStoreBackend
339-
backend =
339+
backend = airflow.providers.amazon.aws.secrets.secrets_manager.SecretsManagerBackend
340340

341341
# The backend_kwargs param is loaded into a dictionary and passed to __init__ of secrets backend class.
342342
# See documentation for the secrets backend you are using. JSON is expected.
343343
# Example for AWS Systems Manager ParameterStore:
344344
# ``{{"connections_prefix": "/airflow/connections", "profile_name": "default"}}``
345-
backend_kwargs = '{"connections_lookup_pattern":"^(?!aws_default$).*$"}'
345+
backend_kwargs = {"connections_prefix" : "airflow-prod/connection", "variables_prefix" : "airflow-prod/variable", "config_prefix": "airflow-prod/config", "connections_lookup_pattern":"^(?!aws_default$).*$"}
346346

347347
[cli]
348348
# In what way should the cli access the API. The LocalClient will use the
@@ -457,7 +457,7 @@ reload_on_plugin_change = False
457457
secret_key = $SECRET_KEY
458458

459459
# Number of workers to run the Gunicorn web server
460-
workers = 9
460+
workers = 4
461461

462462
# The worker class gunicorn should use. Choices include
463463
# sync (default), eventlet, gevent
@@ -587,15 +587,15 @@ email_backend = airflow.utils.email.send_email_smtp
587587
# If you want airflow to send emails on retries, failure, and you want to use
588588
# the airflow.utils.email.send_email_smtp function, you have to configure an
589589
# smtp server here
590-
smtp_host = localhost
590+
smtp_host = smtp.sendgrid.net
591591
smtp_starttls = True
592592
smtp_ssl = False
593593
# Example: smtp_user = airflow
594-
# smtp_user =
594+
smtp_user = apikey
595595
# Example: smtp_password = airflow
596-
# smtp_password =
597-
smtp_port = 25
598-
smtp_mail_from = airflow@example.com
596+
smtp_password_secret = pwd/sendgrid_smtp_server
597+
smtp_port = 587
598+
smtp_mail_from = airflow@alan.com
599599
smtp_timeout = 30
600600
smtp_retry_limit = 5
601601

@@ -836,7 +836,7 @@ schedule_after_task_execution = False
836836

837837
# The scheduler can run multiple processes in parallel to parse dags.
838838
# This defines how many processes will run.
839-
parsing_processes = 7
839+
parsing_processes = 2
840840

841841
# One of ``modified_time``, ``random_seeded_by_host`` and ``alphabetical``.
842842
# The scheduler will list and sort the dag files to decide the parsing order.

docker/docker-compose-local.yml

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,11 @@ services:
2121
environment:
2222
- LOAD_EX=n
2323
- EXECUTOR=Local
24+
- AIRFLOW__WEBSERVER__WORKERS=1
25+
- AIRFLOW__WEBSERVER__EXPOSE_CONFIG=true
26+
- AIRFLOW__SENTRY__SENTRY_ON
27+
- AIRFLOW__SENTRY__SENTRY_DSN
28+
- SENTRY_ENVIRONMENT=dev
2429
logging:
2530
options:
2631
max-size: 10m
@@ -30,6 +35,7 @@ services:
3035
- "${PWD}/plugins:/usr/local/airflow/plugins"
3136
- "${PWD}/requirements:/usr/local/airflow/requirements"
3237
- "${PWD}/startup_script:/usr/local/airflow/startup"
38+
- "${HOME}/.aws:/usr/local/airflow/.aws"
3339
ports:
3440
- "8080:8080"
3541
command: local-runner
@@ -40,3 +46,4 @@ services:
4046
retries: 3
4147
env_file:
4248
- ./config/.env.localrunner
49+
- ./config/.env.aws

docker/script/bootstrap.sh

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -53,13 +53,13 @@ sudo mkdir mariadb_rpm
5353
sudo chown airflow /mariadb_rpm
5454

5555
if [[ $(uname -p) == "aarch64" ]]; then
56-
wget https://mirror.mariadb.org/yum/11.4/fedora38-aarch64/rpms/MariaDB-common-11.4.2-1.fc38.$(uname -p).rpm -P /mariadb_rpm
57-
wget https://mirror.mariadb.org/yum/11.4/fedora38-aarch64/rpms/MariaDB-shared-11.4.2-1.fc38.$(uname -p).rpm -P /mariadb_rpm
58-
wget https://mirror.mariadb.org/yum/11.4/fedora38-aarch64/rpms/MariaDB-devel-11.4.2-1.fc38.$(uname -p).rpm -P /mariadb_rpm
56+
wget https://archive.mariadb.org/yum/11.4/fedora38-aarch64/rpms/MariaDB-common-11.4.2-1.fc38.$(uname -p).rpm -P /mariadb_rpm
57+
wget https://archive.mariadb.org/yum/11.4/fedora38-aarch64/rpms/MariaDB-shared-11.4.2-1.fc38.$(uname -p).rpm -P /mariadb_rpm
58+
wget https://archive.mariadb.org/yum/11.4/fedora38-aarch64/rpms/MariaDB-devel-11.4.2-1.fc38.$(uname -p).rpm -P /mariadb_rpm
5959
else
60-
wget https://mirror.mariadb.org/yum/11.4/fedora38-amd64/rpms/MariaDB-common-11.4.2-1.fc38.$(uname -p).rpm -P /mariadb_rpm
61-
wget https://mirror.mariadb.org/yum/11.4/fedora38-amd64/rpms/MariaDB-shared-11.4.2-1.fc38.$(uname -p).rpm -P /mariadb_rpm
62-
wget https://mirror.mariadb.org/yum/11.4/fedora38-amd64/rpms/MariaDB-devel-11.4.2-1.fc38.$(uname -p).rpm -P /mariadb_rpm
60+
wget https://archive.mariadb.org/yum/11.4/fedora38-amd64/rpms/MariaDB-common-11.4.2-1.fc38.$(uname -p).rpm -P /mariadb_rpm
61+
wget https://archive.mariadb.org/yum/11.4/fedora38-amd64/rpms/MariaDB-shared-11.4.2-1.fc38.$(uname -p).rpm -P /mariadb_rpm
62+
wget https://archive.mariadb.org/yum/11.4/fedora38-amd64/rpms/MariaDB-devel-11.4.2-1.fc38.$(uname -p).rpm -P /mariadb_rpm
6363
fi
6464

6565
# install mariadb_devel and its dependencies

mwaa-local-env

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ test-requirements)
6868
echo "Container amazon/mwaa-local:$AIRFLOW_VERSION not built. Building locally."
6969
build_image
7070
fi
71-
docker run -v $(pwd)/dags:/usr/local/airflow/dags -v $(pwd)/plugins:/usr/local/airflow/plugins -v $(pwd)/requirements:/usr/local/airflow/requirements -it amazon/mwaa-local:$AIRFLOW_VERSION test-requirements
71+
docker run -v $(pwd)/dags:/usr/local/airflow/dags -v $(pwd)/plugins:/usr/local/airflow/plugins -v $(pwd)/requirements:/usr/local/airflow/requirements amazon/mwaa-local:$AIRFLOW_VERSION test-requirements
7272
;;
7373
test-startup-script)
7474
BUILT_IMAGE=$(docker images -q amazon/mwaa-local:$AIRFLOW_VERSION)

0 commit comments

Comments
 (0)