-
Notifications
You must be signed in to change notification settings - Fork 2
/
unittests.cfg
105 lines (90 loc) · 2.37 KB
/
unittests.cfg
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
[core]
unit_test_mode = True
dags_folder = ./dags
plugins_folder = ./plugins
base_log_folder = ./logs
logging_level = INFO
fab_logging_level = WARN
log_filename_template = {{ ti.dag_id }}/{{ ti.task_id }}/{{ ts }}/{{ try_number }}.log
log_processor_filename_template = {{ filename }}.log
dag_processor_manager_log_location = ./logs/dag_processor_manager/dag_processor_manager.log
executor = SequentialExecutor
sql_alchemy_conn = sqlite:///./unittests.db
load_examples = True
donot_pickle = False
load_default_connections = True
dag_concurrency = 16
dags_are_paused_at_creation = False
fernet_key =
enable_xcom_pickling = False
killed_task_cleanup_time = 5
secure_mode = False
hostname_callable = socket:getfqdn
worker_precheck = False
default_task_retries = 0
[cli]
api_client = airflow.api.client.local_client
endpoint_url = http://localhost:8080
[api]
auth_backend = airflow.api.auth.backend.default
[operators]
default_owner = airflow
[hive]
default_hive_mapred_queue = airflow
[webserver]
base_url = http://localhost:8080
web_server_host = 0.0.0.0
web_server_port = 8080
dag_orientation = LR
dag_default_view = tree
log_fetch_timeout_sec = 5
hide_paused_dags_by_default = False
page_size = 100
rbac = False
[email]
email_backend = airflow.utils.email.send_email_smtp
[smtp]
smtp_host = localhost
smtp_user = airflow
smtp_port = 25
smtp_password = airflow
smtp_mail_from = [email protected]
[celery]
celery_app_name = airflow.executors.celery_executor
worker_concurrency = 16
worker_log_server_port = 8793
broker_url = sqla+mysql://airflow:airflow@localhost:3306/airflow
result_backend = db+mysql://airflow:airflow@localhost:3306/airflow
flower_host = 0.0.0.0
flower_port = 5555
default_queue = default
sync_parallelism = 0
[mesos]
master = localhost:5050
framework_name = Airflow
task_cpu = 1
task_memory = 256
checkpoint = False
authenticate = False
docker_image_slave = test/docker-airflow
[scheduler]
job_heartbeat_sec = 1
scheduler_heartbeat_sec = 5
scheduler_health_check_threshold = 30
authenticate = true
max_threads = 2
catchup_by_default = True
scheduler_zombie_task_threshold = 300
dag_dir_list_interval = 0
max_tis_per_query = 512
[admin]
hide_sensitive_variable_fields = True
[elasticsearch]
host =
log_id_template = {dag_id}-{task_id}-{execution_date}-{try_number}
end_of_log_mark = end_of_log
[elasticsearch_configs]
use_ssl = False
verify_certs = True
[kubernetes]
dags_volume_claim = default