@@ -26,7 +26,7 @@ executor = SequentialExecutor
2626# This defines the maximum number of task instances that can run concurrently in Airflow
2727# regardless of scheduler count and worker count. Generally, this value is reflective of
2828# the number of task instances with the running state in the metadata database.
29- parallelism = 150
29+ parallelism = 32
3030
3131# The maximum number of task instances allowed to run concurrently in each DAG. To calculate
3232# the number of tasks that is running concurrently for a DAG, add up the number of running
@@ -35,7 +35,7 @@ parallelism = 150
3535#
3636# An example scenario when this would be useful is when you want to stop a new dag with an early
3737# start date from stealing all the executor slots in a cluster.
38- max_active_tasks_per_dag = 150
38+ max_active_tasks_per_dag = 16
3939
4040# Are DAGs paused by default at creation
4141dags_are_paused_at_creation = True
@@ -336,13 +336,13 @@ statsd_datadog_tags =
336336[secrets]
337337# Full class name of secrets backend to enable (will precede env vars and metastore in search path)
338338# Example: backend = airflow.providers.amazon.aws.secrets.systems_manager.SystemsManagerParameterStoreBackend
339- backend =
339+ backend = airflow.providers.amazon.aws.secrets.secrets_manager.SecretsManagerBackend
340340
341341# The backend_kwargs param is loaded into a dictionary and passed to __init__ of secrets backend class.
342342# See documentation for the secrets backend you are using. JSON is expected.
343343# Example for AWS Systems Manager ParameterStore:
344344# ``{{"connections_prefix": "/airflow/connections", "profile_name": "default"}}``
345- backend_kwargs = ' {" connections_lookup_pattern":"^(?!aws_default$).*$"}'
345+ backend_kwargs = { " connections_prefix " : " airflow-prod/connection " , " variables_prefix " : " airflow-prod/variable " , " config_prefix " : " airflow-prod/config " , " connections_lookup_pattern" :" ^(?!aws_default$).*$" }
346346
347347[cli]
348348# In what way should the cli access the API. The LocalClient will use the
@@ -457,7 +457,7 @@ reload_on_plugin_change = False
457457secret_key = $SECRET_KEY
458458
459459# Number of workers to run the Gunicorn web server
460- workers = 9
460+ workers = 4
461461
462462# The worker class gunicorn should use. Choices include
463463# sync (default), eventlet, gevent
@@ -587,15 +587,15 @@ email_backend = airflow.utils.email.send_email_smtp
587587# If you want airflow to send emails on retries, failure, and you want to use
588588# the airflow.utils.email.send_email_smtp function, you have to configure an
589589# smtp server here
590- smtp_host = localhost
590+ smtp_host = smtp.sendgrid.net
591591smtp_starttls = True
592592smtp_ssl = False
593593# Example: smtp_user = airflow
594- # smtp_user =
594+ smtp_user = apikey
595595# Example: smtp_password = airflow
596- # smtp_password =
597- smtp_port = 25
598- smtp_mail_from = airflow@example .com
596+ smtp_password_secret = pwd/sendgrid_smtp_server
597+ smtp_port = 587
598+ smtp_mail_from = airflow@alan .com
599599smtp_timeout = 30
600600smtp_retry_limit = 5
601601
@@ -836,7 +836,7 @@ schedule_after_task_execution = False
836836
837837# The scheduler can run multiple processes in parallel to parse dags.
838838# This defines how many processes will run.
839- parsing_processes = 7
839+ parsing_processes = 2
840840
841841# One of ``modified_time``, ``random_seeded_by_host`` and ``alphabetical``.
842842# The scheduler will list and sort the dag files to decide the parsing order.
0 commit comments