forked from great-expectations/great_expectations
-
Notifications
You must be signed in to change notification settings - Fork 0
/
pyproject.toml
559 lines (531 loc) · 39.8 KB
/
pyproject.toml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
[build-system]
requires = ["setuptools", "wheel"]
# uncomment to enable pep517 after versioneer problem is fixed.
# https://github.com/python-versioneer/python-versioneer/issues/193
# build-backend = "setuptools.build_meta"
[tool.black]
target-version = ["py38", "py39", "py310"]
extend_excludes = '''
(
docs/.*
| tests/.*.fixture
| .*.ge_store_backend_id
)
'''
# exclude unparseable notebooks
# https://stackoverflow.com/a/73296261/6304433
force-exclude = '''
(tests/data_context/fixtures/post_init_project_v0.8.0_A/great_expectations/notebooks/.*\.ipynb)
'''
[tool.isort]
profile = "black"
skip_gitignore = true
extend_skip_glob = ['venv/*', 'docs/*']
[tool.mypy]
python_version = "3.8"
plugins = [
"pydantic.mypy",
"sqlalchemy.ext.mypy.plugin"
]
files = [
"great_expectations",
"tests/datasource/fluent",
"tests/integration/cloud",
"tests/integration/docusaurus",
"tests/core/test_batch_config.py",
"tests/validator/test_v1_validator.py",
# "contrib" # ignore entire `contrib` package
]
warn_unused_configs = true
ignore_missing_imports = true
# TODO: change this to 'normal' once we have 'full' type coverage
follow_imports = 'silent'
warn_redundant_casts = true
show_error_codes = true
enable_error_code = [
'ignore-without-code',
'explicit-override'
]
# The following list of codes are globally ignored, do not add to this list
disable_error_code = [
# annotation-unchecked are 'warning notes', not errors and won't cause mypy to fail
# but it does create a lot of noise in the CI mypy step.
# https://mypy-lang.blogspot.com/2022/11/mypy-0990-released.html
'annotation-unchecked',
]
exclude = [
# BEGIN ALWAYS EXCLUDE SECTION #####################################################
# If pattern should always be excluded add comment explaining why and put
# Docs should not be type checked with the rest of the library.
'docs/*',
'docs/adr',
# 'docs/checks', # We ignore docs, but not any CI checks written in python
'docs/docusaurus',
'docs/expectation_gallery',
'docs/readme_assets',
'docs/sphinx_api_docs_source',
'_version\.py', # generated by `versioneer`
'v012', # legacy code
'tests/datasource/fluent/test_metadatasource\.py', # metaprogramming leads to errors
'datasource/data_connector/configured_asset_sql_data_connector\.py', # 37 - This is legacy code and will not be typed.
'cli/suite\.py', # 24 - This is legacy code and will not be typed.
'cli/upgrade_helpers/upgrade_helper_v11\.py', # 59 - This is legacy code and will not be typed.
'cli/upgrade_helpers/upgrade_helper_v13\.py', # 17 - This is legacy code and will not be typed.
'dataset/sparkdf_dataset\.py', # 3 - This is legacy code and will not be typed.
'dataset/sqlalchemy_dataset\.py', # 16 - This is legacy code and will not be typed.
'core/usage_statistics/anonymizers/batch_anonymizer\.py', # 10 - This code will be removed in 1.0
'core/usage_statistics/anonymizers/batch_request_anonymizer\.py', # 16 - This code will be removed in 1.0
'core/usage_statistics/anonymizers/checkpoint_anonymizer\.py', # 16 - This code will be removed in 1.0
'core/usage_statistics/anonymizers/data_docs_anonymizer\.py', # 5 - This code will be removed in 1.0
'core/usage_statistics/anonymizers/datasource_anonymizer\.py', # 9 - This code will be removed in 1.0
'core/usage_statistics/anonymizers/expectation_anonymizer\.py', # 6 - This code will be removed in 1.0
'core/usage_statistics/anonymizers/validation_operator_anonymizer\.py', # 5 - This code will be removed in 1.0
'render/renderer/v3/suite_edit_notebook_renderer\.py', # 11 - This is legacy code and will not be typed.
'render/renderer/v3/suite_profile_notebook_renderer\.py', # 4 - This is legacy code and will not be typed.
'render/renderer/suite_edit_notebook_renderer\.py', # 7 - This is legacy code and will not be typed.
'render/renderer/suite_scaffold_notebook_renderer\.py', # 7 - This is legacy code and will not be typed.
'render/renderer/datasource_new_notebook_renderer\.py', # 4 - This is legacy code and will not be typed.
'render/renderer/checkpoint_new_notebook_renderer\.py', # 9 - This is legacy code and will not be typed.
# END ALWAYS EXCLUDE SECTION ######################################################
#
# #################################################################################
# TODO: complete typing for the following modules and remove from exclude list
# number is the current number of typing errors for the excluded pattern
'expectations/core/expect_column_values_to_be_of_type\.py', # 12
'expectations/core/expect_column_values_to_not_match_regex_list\.py', # 2
'expectations/core/expect_column_values_to_not_match_regex\.py', # 2
'expectations/core/expect_column_values_to_not_match_like_pattern_list\.py', # 3
'expectations/core/expect_column_values_to_not_match_like_pattern\.py', # 2
'expectations/core/expect_column_values_to_not_be_in_set\.py', # 2
'expectations/core/expect_column_values_to_match_strftime_format\.py', # 2
'expectations/core/expect_column_values_to_match_regex_list\.py', # 2
'expectations/core/expect_column_values_to_match_regex\.py', # 1
'expectations/core/expect_column_values_to_match_like_pattern_list\.py', # 3
'expectations/core/expect_column_values_to_match_like_pattern\.py', # 2
'expectations/core/expect_column_values_to_match_json_schema\.py', # 1
'expectations/core/expect_column_values_to_be_null\.py', # 3
'expectations/core/expect_column_values_to_be_json_parseable\.py', # 1
'expectations/core/expect_column_values_to_be_increasing\.py', # 1
'expectations/core/expect_column_values_to_be_in_type_list\.py', # 11
'expectations/core/expect_column_values_to_be_decreasing\.py', # 1
'expectations/core/expect_column_values_to_be_dateutil_parseable\.py', # 1
'expectations/core/expect_column_values_to_be_between\.py', # 3
'expectations/core/expect_column_unique_value_count_to_be_between\.py', # 1
'expectations/core/expect_column_stdev_to_be_between\.py', # 1
'expectations/core/expect_column_quantile_values_to_be_between\.py', # 15
'expectations/core/expect_column_value_lengths_to_equal\.py', # 1
'expectations/core/expect_column_value_lengths_to_be_between\.py', # 5
'expectations/core/expect_column_proportion_of_unique_values_to_be_between\.py', # 1
'expectations/core/expect_column_values_to_be_in_set\.py', # 2
'expectations/core/expect_column_values_to_be_equal\.py', # 3
'expectations/core/expect_column_values_a_to_be_greater_than_b\.py', # 3
'expectations/core/expect_column_pair_cramers_phi_value_to_be_less_than\.py', # 7
'expectations/core/expect_column_most_common_value_to_be_in_set\.py', # 3
'expectations/core/expect_column_kl_divergence_to_be_less_than\.py', # 22
'expectations/core/expect_column_pair_values_to_be_in_set\.py', # 2
'expectations/core/expect_column_pair_values_to_be_equal\.py', # 3
'expectations/core/expect_column_pair_values_a_to_be_greater_than_b\.py', # 3
'expectations/core/expect_column_distinct_values_to_equal_set\.py', # 2
'expectations/core/expect_column_distinct_values_to_contain_set\.py', # 4
'expectations/core/expect_column_distinct_values_to_be_in_set\.py', # 1
'expectations/core/expect_compound_columns_to_be_unique\.py', # 3
'expectations/core/expect_multicolumn_sum_to_equal\.py', # 4
'expectations/core/expect_multicolumn_values_to_be_unique\.py', # 3
'expectations/core/expect_select_column_values_to_be_unique_within_record\.py', # 3
'expectations/core/expect_table_columns_to_match_set\.py', # 8
'expectations/core/expect_table_columns_to_match_ordered_list\.py', # 11
'expectations/core/expect_table_column_count_to_equal\.py', # 5
'expectations/core/expect_table_row_count_to_equal_other_table\.py', # 11
'expectations/regex_based_column_map_expectation\.py', # 3
'expectations/row_conditions\.py', # 4
'expectations/set_based_column_map_expectation\.py', # 3
'render/renderer/content_block/content_block\.py', # 5
'render/renderer/content_block/exception_list_content_block\.py', # 4
'render/renderer/page_renderer\.py', # 10
'render/renderer/profiling_results_overview_section_renderer\.py', # 2
'render/renderer/site_builder\.py', # 3
'render/renderer/slack_renderer\.py', # 9
'rule_based_profiler/domain_builder/map_metric_column_domain_builder\.py', # 8
'rule_based_profiler/estimators/bootstrap_numeric_range_estimator\.py', # 8
'rule_based_profiler/estimators/kde_numeric_range_estimator\.py', # 7
'rule_based_profiler/expectation_configuration_builder', # 13
'rule_based_profiler/helpers/util\.py', # 46
'rule_based_profiler/parameter_builder/unexpected_count_statistics_multi_batch_parameter_builder\.py', # 69
'rule_based_profiler/parameter_builder/mean_unexpected_map_metric_multi_batch_parameter_builder\.py', # 19
'rule_based_profiler/parameter_builder/metric_multi_batch_parameter_builder\.py', # 15
'rule_based_profiler/parameter_builder/numeric_metric_range_multi_batch_parameter_builder\.py', # 27
'rule_based_profiler/parameter_builder/parameter_builder\.py', # 40
'rule_based_profiler/parameter_builder/partition_parameter_builder\.py', # 9
'rule_based_profiler/parameter_builder/regex_pattern_string_parameter_builder\.py', # 21
'rule_based_profiler/parameter_builder/simple_date_format_string_parameter_builder\.py', # 20
'rule_based_profiler/rule_based_profiler\.py', # 40
'validation_operators/types/validation_operator_result\.py', # 35
'validation_operators/validation_operators\.py', # 16
# tests
'tests/datasource/fluent/tasks\.py',
'tests/integration/docusaurus/tutorials',
'tests/integration/docusaurus/connecting_to_your_data',
'tests/integration/docusaurus/deployment_patterns',
'tests/integration/docusaurus/expectations',
'tests/integration/docusaurus/reference',
'tests/integration/docusaurus/setup',
'tests/integration/docusaurus/validation',
]
[[tool.mypy.overrides]]
# need to use override because a mypy bug prevents ignoring an assignment warning inline
# for `from azure import storage`
module = ["great_expectations.compatibility.azure"]
disable_error_code = [
'assignment', # cannot assign NotImported to a ModuleType
]
[tool.pydantic-mypy]
# https://pydantic-docs.helpmanual.io/mypy_plugin/#plugin-settings
init_typed = true
warn_required_dynamic_aliases = true
warn_untyped_fields = true
[tool.ruff]
target-version = "py38"
line-length = 88
select = [
# https://beta.ruff.rs/docs/rules/#pyflakes-f
"F", # Pyflakes
# https://beta.ruff.rs/docs/rules/#pycodestyle-e-w
"E", # pycodestyle
"W", # Warning
# https://beta.ruff.rs/docs/rules/#flake8-comprehensions-c4
# https://beta.ruff.rs/docs/rules/#mccabe-c90
"C", # Complexity (mccabe+) & comprehensions
# https://beta.ruff.rs/docs/rules/#pyupgrade-up
"UP", # pyupgrade
# https://beta.ruff.rs/docs/rules/#isort-i
"I", # isort
# https://beta.ruff.rs/docs/rules/#flake8-type-checking-tch
"TCH", # flake8-type-checking-tch
# https://beta.ruff.rs/docs/rules/#flake8-tidy-imports-tid
"TID", # flake8-tidy-imports
# https://beta.ruff.rs/docs/rules/#flake8-pyi-pyi
"PYI", # flake8-pyi - type stub files
# https://beta.ruff.rs/docs/rules/#flake8-use-pathlib-pth
"PTH", # use-pathlib - use pathlib for os.path and glob operations
# https://beta.ruff.rs/docs/rules/#flake8-bugbear-b
"B", # bugbear - common python bugs & design issues
# https://beta.ruff.rs/docs/rules/#flake8-datetimez-dtz
"DTZ", # flake8-datetimez-dtz - prevent use of tz naive datetimes
# https://beta.ruff.rs/docs/rules/#pylint-pl
"PL", # pylint
# https://beta.ruff.rs/docs/rules/#ruff-specific-rules-ruf
"RUF", # custom ruff rules
]
ignore = [
# https://beta.ruff.rs/docs/rules/#pyflakes-f
"F842", # variable annotated but unused # TODO enable
# https://beta.ruff.rs/docs/rules/#pycodestyle-e-w
"E501", # line-length # TODO: too many violations
"E402", # module level import not at top of file
# https://beta.ruff.rs/docs/rules/#flake8-comprehensions-c4
"C400", # TODO enable
"C408", # TODO enable
"C409", # TODO enable
"C413", # TODO enable
"C414", # TODO enable
"C416", # TODO enable
"C417", # TODO enable
# https://beta.ruff.rs/docs/rules/#pyupgrade-up
"UP006", # use-pep585-annotation
"UP007", # use-pep604-annotation
# https://beta.ruff.rs/docs/rules/#flake8-type-checking-tch
# minimal cost for standard lib imports; keep this disabled
"TCH003", # typing-only-standard-library-import
# gives false positives if we use try imports and type-checking import
"TCH004", # runtime-import-in-type-checking-block
"TID252", # Relative imports from parent modules are banned
# https://beta.ruff.rs/docs/rules/#flake8-use-pathlib-pth
"PTH123", # pathlib-open - this would force pathlib usage anytime open or with open was used.
# https://beta.ruff.rs/docs/rules/#flake8-pyi-pyi
"PYI053", # string-or-bytes-too-long - causes mypy to fail on some of our type stubs
"PYI054", # numeric-literal-too-long - causes mypy to fail on some of our type stubs
# https://beta.ruff.rs/docs/rules/#flake8-bugbear-b
# TODO: enable these
"B904", # raise-without-from-inside-except
"B028", # no-explicit-stacklevel - https://beta.ruff.rs/docs/rules/no-explicit-stacklevel/
"B007", # unused-loop-control-variable
# TODO: enable remaining ruf rules in followup PRs
"RUF005", # collection-literal-concatenation
"RUF012", # mutable-class-default - too many violations
"RUF015", # unnecessary-iterable-allocation-for-first-element - requires more careful review
]
extend-exclude = [
"docs/*",
"build/*",
"versioneer*",
"examples/*",
# TODO: remove the items below and fix linting issues
"tests/data_asset", # 10 warnings
"tests/dataset", # 10 warnings
"tests/test_fixtures/notebook_assets/suite_edit/footer.py", # 7 warnings
]
[tool.ruff.per-file-ignores]
"assets/benchmark/benchmark.py" = [
"DTZ", # flake8-datetimez-dtz - doesn't matter for benchmark tests
]
"assets/scripts/build_gallery.py" = [
"PLR0912", # Too many branches - scripts are not part of the main codebase
"PLR0913", # Too many arguments - scripts are not part of the main codebase
"PLR0915", # Too many statements - scripts are not part of the main codebase
]
"__init__.py" = [
"F401", # unused import
"F403", # star imports
"PTH207", # use glob - __all__ needs to be list of str, not Path
]
"*.pyi" = [
"TID251", # flake8-banned-api - type stubs are not executed
]
"great_expectations/_version.py" = [
"PLR", # pylint - versioneer code
]
"great_expectations/compatibility/*.py" = [
"TID251", # flake8-banned-api
]
[tool.ruff.flake8-type-checking]
# pydantic models use annotations at runtime
runtime-evaluated-base-classes = [
# NOTE: ruff is unable to detect that these are subclasses of pydantic.BaseModel
"pydantic.BaseModel",
"great_expectations.datasource.fluent.fluent_base_model.FluentBaseModel",
"great_expectations.datasource.fluent.interfaces.Datasource",
"great_expectations.datasource.fluent.sql_datasource.SQLDatasource",
]
runtime-evaluated-decorators = ["pydantic.dataclasses.dataclass"]
[tool.ruff.mccabe]
max-complexity = 15
[tool.ruff.pydocstyle]
convention = "google"
[tool.ruff.flake8-tidy-imports]
[tool.ruff.flake8-tidy-imports.banned-api]
"os.environ".msg = """Please do not use os.environ outside of configuration files.
If you are working in a configuration file you may use the inline comment \
"# noqa: TID251 # os.enrivon allowed in config files" to ignore this error."""
"sqlalchemy".msg = "Please do not import sqlalchemy directly, import from great_expectations.compatibility.sqlalchemy instead."
"pyspark".msg = "Please do not import pyspark directly, import from great_expectations.compatibility.pyspark instead."
"boto3".msg = "Please do not import boto3 directly, import from great_expectations.compatibility.aws instead."
"google".msg = "Please do not import google directly, import from great_expectations.compatibility.google instead."
"azure".msg = "Please do not import azure directly, import from great_expectations.compatibility.azure instead."
"trino".msg = "Please do not import trino directly, import from great_expectations.compatibility.trino instead."
"pyarrow".msg = "Please do not import pyarrow directly, import from great_expectations.compatibility.pyarrow instead."
"typing_extensions.override".msg = "Do not import typing_extensions.override directly, import `override` from great_expectations.compatibility.typing_extensions instead."
# TODO: remove pydantic once our min version is pydantic v2
"pydantic".msg = "Please do not import pydantic directly, import from great_expectations.compatibility.pydantic instead."
# -----------------------------------------------------------------
[tool.pytest.ini_options]
filterwarnings = [
# Turn all warnings not explicitly filtered below into errors
"error",
# This warning is common during testing where we intentionally use a COMPLETE format even in cases that would
# be potentially overly resource intensive in standard operation
"ignore:Setting result format to COMPLETE for a SqlAlchemyDataset:UserWarning",
# This deprecation warning was fixed in moto release 1.3.15, and the filter should be removed once we migrate
# to that minimum version
"ignore:Using or importing the ABCs:DeprecationWarning:moto.cloudformation.parsing",
# This deprecation warning comes from getsentry/responses, a mocking utility for requests. It is a dependency in moto.
"ignore:stream argument is deprecated. Use stream parameter in request directly:DeprecationWarning",
# We likely won't be updating to `marhsmallow` 4, these errors should be filtered out
"error::marshmallow.warnings.RemovedInMarshmallow4Warning",
# --------------------------------------- Great Expectations Warnings ----------------------------------
# This warning is for configuring the result_format parameter at the Validator-level, which will not be persisted,
# but is still useful for building the configuration.
"ignore:`result_format` configured at the Validator-level will not be persisted:UserWarning",
# This warning is for configuring the result_format parameter at the Expectation-level, which will not be persisted,
# but is still useful for building the configuration.
"ignore:`result_format` configured at the Expectation-level will not be persisted:UserWarning",
# This warning can be emitted when configuring splitters with fluent datasources
"ignore:The same option name is applied for your batch regex and splitter config:UserWarning",
# --------------------------------------- Great Expectations Warnings ----------------------------------
# --------------------------------------- Great Expectations Deprecation Warnings ----------------------------------
# Ignores in this section are for items in Great Expectations that are deprecated but not yet removed. Once the
# relevant code is removed, the warning ignore should also be removed.
# By ignoring these warnings, we will be able to turn on "warnings are errors" in our pipelines.
# Example Actual Warning: great_expectations.exceptions.exceptions.MetricResolutionError: The parameter "parse_strings_as_datetimes" is deprecated as of v0.13.41 in v0.16. As part of the V3 API transition, we've moved away from input transformation. For more information, please see: https://greatexpectations.io/blog/why-we-dont-do-transformations-for-expectations-and-when-we-do
# Example warning location: test_column_partition_metric_pd
'ignore: The parameter "parse_strings_as_datetimes" is deprecated as of v0.13.41 in v0.16.:DeprecationWarning',
# Example Actual Warning: UserWarning: Your query appears to have hard-coded references to your data. By not parameterizing your query with `{active_batch}`, {col}, etc., you may not be validating against your intended data asset, or the expectation may fail.
'ignore: Your query appears to have hard-coded references to your data. By not parameterizing your query with `{active_batch}`, {col}, etc., you may not be validating against your intended data asset, or the expectation may fail.:UserWarning',
# Example Actual Warning: UserWarning: Your query appears to not be parameterized for a data asset. By not parameterizing your query with `{active_batch}`, you may not be validating against your intended data asset, or the expectation may fail.
'ignore: Your query appears to not be parameterized for a data asset. By not parameterizing your query with `{active_batch}`, you may not be validating against your intended data asset, or the expectation may fail.:UserWarning',
# Example Actual Warning: DeprecationWarning: save_expectation_suite is deprecated as of v0.15.48 and will be removed in v0.18. Please use update_expectation_suite or add_or_update_expectation_suite instead.
# Found in test_checkpoint_new_happy_path_generates_a_notebook_and_checkpoint
'ignore: save_expectation_suite is deprecated as of v0.15.48 and will be removed in v0.18. Please use update_expectation_suite or add_or_update_expectation_suite instead.:DeprecationWarning',
# Example Actual Warning: (found in test_expect_queried_column_value_frequency_to_meet_threshold_override_query_sqlite)
# UserWarning: `row_condition` is an experimental feature. Combining this functionality with QueryExpectations may result in unexpected behavior.
'ignore: `row_condition` is an experimental feature. Combining this functionality with QueryExpectations may result in unexpected behavior.:UserWarning',
# Example Actual Warning: (found in test_golden_path_sql_datasource_configuration)
# DeprecationWarning: get_batch is deprecated for the V3 Batch Request API as of v0.13.20 and will be removed in v0.16. Please use get_batch_list instead.
'ignore: get_batch is deprecated for the V3 Batch Request API as of v0.13.20 and will be removed in v0.16.:DeprecationWarning',
# Example Actual Warning: (found in test_data_context)
# UserWarning: Warning. An existing `great_expectations.yml` was found here
'ignore: Warning. An existing `great_expectations.yml` was found here:UserWarning',
# Example Actual Warning: (found in test_data_context)
# UserWarning: Warning. An existing `config_variables.yml` was found here
'ignore: Warning. An existing `config_variables.yml` was found here:UserWarning',
# --------------------------------------- Great Expectations Deprecation Warnings ----------------------------------
# --------------------------------------- TEMPORARY IGNORES --------------------------------------------------------
# The warnings in this section should be addressed (fixed or ignored) but are ignored here temporarily to help allow
# turning on "warnings are errors" so new warnings become errors and are addressed during PRs.
'ignore: The distutils package is deprecated and slated for removal in Python 3.12. Use setuptools or check PEP 632 for potential alternatives:DeprecationWarning',
# python
# Example Actual Warning: RuntimeWarning: divide by zero encountered in divide, RuntimeWarning: divide by zero encountered in true_divide
# Found in test_atomic_prescriptive_summary_expect_column_kl_divergence_to_be_less_than, test_case_runner_v2_api
'ignore: divide by zero encountered:RuntimeWarning',
# Example Actual Warning: Found running tests/test_definitions/test_expectations_v3_api.py
# DeprecationWarning: NotImplemented should not be used in a boolean context
'ignore: NotImplemented should not be used in a boolean context:DeprecationWarning',
# python 3.10
# Example Actual Warning: Found in tests/datasource/fluent/data_asset/data_connector/test_s3_data_connector.py
# DeprecationWarning: ssl.PROTOCOL_TLS is deprecated
'ignore: ssl.PROTOCOL_TLS is deprecated:DeprecationWarning',
# python 3.11
# data_connector/util.py imports deprecated modules that will be removed in Python 3.12
"ignore: module 'sre_constants' is deprecated:DeprecationWarning",
"ignore: module 'sre_parse' is deprecated:DeprecationWarning",
# trino
# example actual warning: found in great_expectations/self_check/util.py:2752: in _create_trino_engine
# sqlalchemy.exc.SADeprecationWarning: The dbapi() classmethod on dialect classes has been renamed to import_dbapi(). Implement an import_dbapi() classmethod directly on class <class 'trino.sqlalchemy.dialect.TrinoDialect'> to remove this warning; the old .dbapi() classmethod may be maintained for backwards compatibility.
'ignore: The dbapi\(\) classmethod on dialect classes has been renamed to import_dbapi\(\):DeprecationWarning',
# six
# Example Actual Warning: Found in ImportError while loading conftest '/great_expectations/tests/conftest.py'.
# ImportWarning: _SixMetaPathImporter.exec_module() not found; falling back to load_module()
'ignore: _SixMetaPathImporter.exec_module\(\) not found:ImportWarning',
# distutils
# Example Actual Warning: Found in tests/datasource/test_batch_generators.py, test spark python 3.10
# DeprecationWarning: distutils Version classes are deprecated. Use packaging.version instead.
'ignore: distutils Version classes are deprecated. Use packaging.version instead.:DeprecationWarning',
# pandas
# Example Actual Warning: FutureWarning: The behavior of `series[i:j]` with an integer-dtype index is deprecated. In a future version, this will be treated as *label-based* indexing, consistent with e.g. `series[i]` lookups. To retain the old behavior, use `series.iloc[i:j]`. To get the future behavior, use `series.loc[i:j]`.
# Found when running pytest tests/test_definitions/test_expectations_v3_api.py
'ignore: The behavior of `series:FutureWarning',
# Example Actual Warning: UserWarning: Unknown extension is not supported and will be removed
# Found when running TestIO.test_read_excel
'ignore: Unknown extension is not supported and will be removed:UserWarning',
# Example Actual Warning: DeprecationWarning: The default dtype for empty Series will be 'object' instead of 'float64' in a future version. Specify a dtype explicitly to silence this warning.
# Found when running test_value_counts_metric_spark
"ignore: The default dtype for empty Series will be 'object' instead of 'float64' in a future version.:DeprecationWarning",
# Example Actual Warning: Found by running pytest tests/rule_based_profiler/data_assistant/test_volume_data_assistant.py
# FutureWarning: In a future version, object-dtype columns with all-bool values will not be included in reductions with bool_only=True. Explicitly cast to bool dtype instead.
'ignore: In a future version, object-dtype columns with all-bool values will not be included:FutureWarning',
# Example Actual Warning: Found by running tests/expectations/metrics/test_core.py::test_value_counts_metric_spark
# FutureWarning: The default dtype for empty Series will be 'object' instead of 'float64' in a future version. Specify a dtype explicitly to silence this warning.
'ignore: The default dtype for empty Series will be:FutureWarning',
# Example Actual Warning: Found by running tests/render/test_column_section_renderer.py::test_ProfilingResultsColumnSectionRenderer_render_bar_chart_table with Pandas 2.0. The warning is emitted through an Altair v5 codepath.
# FutureWarning: the convert_dtype parameter is deprecated and will be removed in a future version. Do ``ser.astype(object).apply()`` instead if you want ``convert_dtype=False``.
# GH Issue: https://github.com/altair-viz/altair/issues/3181
'ignore: the convert_dtype parameter is deprecated and will be removed in a future version:FutureWarning',
# Example Actual Warning: FutureWarning: The behavior of DataFrame concatenation with empty or all-NA entries is deprecated. In a future version, this will no longer exclude empty or all-NA columns when determining the result dtypes. To retain the old behavior, exclude the relevant entries before the concat operation.
'ignore: The behavior of DataFrame concatenation with empty or all-NA entries is deprecated. In a future version, this will no longer exclude empty or all-NA columns when determining the result dtypes. To retain the old behavior, exclude the relevant entries before the concat operation.',
# Example Actual Warning: FutureWarning: Returning a DataFrame from Series.apply when the supplied function returns a Series is deprecated and will be removed in a future version.
'ignore: Returning a DataFrame from Series.apply when the supplied function returns a Series is deprecated and will be removed in a future version.',
# numpy
# Example Actual Warning: RuntimeWarning: Mean of empty slice.
# Found when running test_case_runner_v3_api[spark/column_aggregate_expectations/expect_column_median_to_be_between:test_empty_column_should_be_false_no_observed_value_with_which_to_compare] 'ignore: Mean of empty slice.:RuntimeWarning',
# Example Actual Warning: RuntimeWarning: invalid value encountered in double_scalars
# Found when running test_case_runner_v3_api[spark/column_aggregate_expectations/expect_column_median_to_be_between:test_empty_column_should_be_false_no_observed_value_with_which_to_compare] 'ignore: invalid value encountered:RuntimeWarning',
# spark
# Example Actual Warning: FutureWarning: Deprecated in 3.0.0. Use SparkSession.builder.getOrCreate() instead.
# Found when running test_case_runner_v2_api[SparkDFDataset/column_pair_map_expectations/expect_column_pair_values_to_be_in_set:basic_positive_test_without_nulls] 'ignore: Deprecated in 3.0.0. Use SparkSession.builder.getOrCreate\(\) instead.:FutureWarning',
# Example Acutal Warning: FutureWarning: is_datetime64tz_dtype is deprecated and will be removed in a future version. Check `isinstance(dtype, pd.DatetimeTZDtype)` instead.
'ignore: is_datetime64tz_dtype is deprecated and will be removed in a future version. Check `isinstance\(dtype, pd.DatetimeTZDtype\)` instead.',
# pymysql
# Example Actual Warning: pymysql.err.Warning: (1292, "Truncated incorrect DOUBLE value: 'cat'")
# Found in tests/test_definitions/test_expectations_v2_api.py, if not found in v3 api remove this ignore directive with the v2 api code.
'ignore: \(1292, "Truncated incorrect DOUBLE value:Warning',
# numpy and python 3.11
# Found when running tests in tests/profile/test_basic_suite_builder_profiler.py
# Example Actual Warning: RuntimeWarning: invalid value encountered in reduce
"ignore: invalid value encountered in reduce",
# urllib3
"ignore:'urllib3.contrib.pyopenssl' module is deprecated and will be removed in a future release of urllib3 2.x.:DeprecationWarning",
# boto
# Example Actual Warning: Found when importing botocore when collecting tests with 3.10.
# ImportWarning: _SixMetaPathImporter.find_spec() not found; falling back to find_module()
'ignore: _SixMetaPathImporter.find_spec\(\) not found; falling back to find_module\(\):ImportWarning',
# ruamel
'ignore:\nsafe_load will be removed:PendingDeprecationWarning',
'ignore:\nload will be removed:PendingDeprecationWarning',
# Warning found in tests/integration/profiling/rule_based_profiler/test_profiler_basic_workflows.py, among other places.
'ignore:\ndump will be removed:PendingDeprecationWarning',
# Warning found in tests/integration/profiling/rule_based_profiler/test_profiler_basic_workflows.py
'ignore:\ndump_all will be removed:PendingDeprecationWarning',
# pyarrow
# Example Actual Warning:
# UserWarning: You have an incompatible version of 'pyarrow' installed (11.0.0), please install a version that adheres to: 'pyarrow<10.1.0,>=10.0.1; extra == "pandas"'
"ignore: You have an incompatible version of 'pyarrow' installed:UserWarning",
# jupyter
# Example Actual Warning:
# DeprecationWarning: Jupyter is migrating its paths to use standard platformdirs
# given by the platformdirs library. To remove this warning and
# see the appropriate new directories, set the environment variable
# `JUPYTER_PLATFORM_DIRS=1` and then run `jupyter --paths`.
# The use of platformdirs will be the default in `jupyter_core` v6
'ignore: Jupyter is migrating its paths to use standard platformdirs:DeprecationWarning',
# pytest
# Example Actual Warning:
# pytest.PytestCollectionWarning: cannot collect test class 'TestConnectionError' because it has a __init__ constructor (from: tests/datasource/fluent/test_pandas_azure_blob_storage_datasource.py)
"ignore: cannot collect test class 'TestConnectionError' because it has a __init__ constructor:UserWarning",
# Example Actual Warning:
# pytest.PytestUnraisableExceptionWarning: Exception ignored in: <socket.socket fd=-1, family=AddressFamily.AF_INET, type=SocketKind.SOCK_STREAM, proto=6>
"ignore: Exception ignored in:UserWarning",
# jsonschema (altair dependency)
# Example Actual Warning:
# DeprecationWarning: jsonschema.RefResolver is deprecated as of v4.18.0, in favor of the
# https://github.com/python-jsonschema/referencing library,
# which provides more compliant referencing behavior as well as more flexible APIs for customization.
# A future release will remove RefResolver. Please file a feature request (on referencing) if you are missing an API for the kind of customization you need.
"ignore: jsonschema.RefResolver is deprecated as of v4.18.0:DeprecationWarning",
# Example Actual Warning:
# DeprecationWarning: Importing ErrorTree directly from the jsonschema package is deprecated and will become an ImportError. Import it from jsonschema.exceptions instead.
"ignore: Importing ErrorTree directly from the jsonschema package is deprecated and will become an ImportError. Import it from jsonschema.exceptions instead.:DeprecationWarning",
# sqlalchemy
# Example Actual Warning:
# sqlalchemy.exc.RemovedIn20Warning: Deprecated API features detected! These feature(s) are not compatible with SQLAlchemy 2.0. To prevent incompatible upgrades prior to updating applications, ensure requirements files are pinned to "sqlalchemy<2.0". Set environment variable SQLALCHEMY_WARN_20=1 to show all deprecation warnings. Set environment variable SQLALCHEMY_SILENCE_UBER_WARNING=1 to silence this message. (Background on SQLAlchemy 2.0 at: https://sqlalche.me/e/b8d9)
'ignore: Deprecated API features detected! These feature\(s\) are not compatible with SQLAlchemy 2\.0\.',
# --------------------------------------- TEMPORARY IGNORES --------------------------------------------------------
]
junit_family = "xunit2"
markers = [
"all_backends: mark tests that run against all execution engine backends",
"athena: mark a test as AWS Athena-dependent.",
"aws_creds: mark all tests that require aws credentials",
"aws_deps: mark tests that need aws dependencies like boto",
"big: mark tests that don't have external dependencies but aren't unit tests",
"cli: mark all tests in the /cli directory.",
"clickhouse: mark a test as Clickhouse-dependent.",
"cloud: mark test as being relevant to Great Expectations Cloud.",
"docs-basic: mark a test as a docs test, that does not require additional credentials.",
"docs-creds-needed: mark a test as a docs test that needs additional cloud credentials (these will not run on public forks).",
"databricks: mark test as requiring databricks.",
"docs: mark a test as a docs test.",
"docs-spark: temporarily mark a test as a docs test that depends on spark.",
"e2e: mark test as an E2E test.",
"external_sqldialect: mark test as requiring install of an external sql dialect.",
"filesystem: mark tests using the filesystem as the storage backend.",
"mssql: mark a test as mssql-dependent.",
"mysql: mark a test as mysql-dependent.",
"openpyxl: mark a test for openpyxl-dependent, which is for Excel files.",
"performance: mark a test as a performance test for BigQuery. These aren't run in our PR or release pipeline",
"postgresql: mark a test as postgresql-dependent.",
"project: mark a test that verifies properties of the gx project",
"pyarrow: mark a test as PyArrow-dependent.",
"snowflake: mark a test as snowflake-dependent.",
"sqlalchemy_version_compatibility: mark test as required for sqlalchemy version compatibility.",
"sqlite: mark test requiring sqlite",
"slow: mark tests taking longer than 1 second.",
"spark: mark a test as Spark-dependent.",
"trino: mark a test as trino-dependent.",
"unit: mark a test as a unit test.",
"v2_api: mark test as specific to the v2 api (e.g. pre Data Connectors).",
]
testpaths = "tests"
# use `pytest-mock` drop-in replacement for `unittest.mock`
# https://pytest-mock.readthedocs.io/en/latest/configuration.html#use-standalone-mock-package
mock_use_standalone_module = false
# https://docs.pytest.org/en/7.1.x/how-to/logging.html#how-to-manage-logging
log_level = "info"