From 2e45505341ac7b53904d15283426cae78bce83c1 Mon Sep 17 00:00:00 2001 From: niccolum Date: Sat, 28 Mar 2026 14:29:48 +0300 Subject: [PATCH 01/36] rename file_ to file --- README.md | 2 +- changes/+file-path-expansion.feature | 2 +- docs/advanced/env-expansion.md | 2 +- docs/api-reference.md | 4 +- docs/comparison/why-not-pydantic-settings.md | 2 +- docs/index.md | 2 +- docs/introduction.md | 8 +- .../advanced/configure/advanced_configure.py | 6 +- .../configure/advanced_configure_env.py | 6 +- .../advanced_configure_type_loaders.py | 2 +- .../advanced/custom_types/custom_loader.py | 2 +- .../docs/advanced/custom_types/custom_type.py | 2 +- .../custom_types/custom_type_merge.py | 4 +- .../advanced/debug/advanced_debug_error.py | 4 +- .../advanced/debug/advanced_debug_logging.py | 4 +- .../advanced/debug/advanced_debug_report.py | 4 +- .../env_expansion/advanced_env_expansion.py | 2 +- ...vanced_env_expansion_file_path_combined.py | 2 +- .../advanced_env_expansion_file_path_dir.py | 2 +- .../advanced_env_expansion_file_path_name.py | 2 +- .../advanced_env_expansion_merge.py | 6 +- .../advanced_env_expansion_strict.py | 2 +- .../advanced_field_groups_expansion_error.py | 4 +- .../advanced_field_groups_multiple_error.py | 4 +- .../advanced_field_groups_nested_error.py | 4 +- .../advanced_merge_rules_callable.py | 4 +- .../advanced_merge_rules_conflict.py | 4 +- .../merge_rules/merging_field_append.py | 4 +- .../merging_field_append_unique.py | 4 +- .../merge_rules/merging_field_first_wins.py | 4 +- .../merge_rules/merging_field_groups.py | 4 +- .../merge_rules/merging_field_last_wins.py | 4 +- .../merge_rules/merging_field_prepend.py | 4 +- .../merging_field_prepend_unique.py | 4 +- .../merge_rules/merging_first_found.py | 4 +- .../merge_rules/merging_skip_broken.py | 4 +- .../merging_skip_broken_per_source.py | 6 +- .../merge_rules/merging_skip_invalid.py | 2 +- .../merging_skip_invalid_per_field.py | 4 +- .../nested_resolve_docker_secrets.py | 2 +- .../nested_resolve/nested_resolve_envfile.py | 2 +- .../why-not-dynaconf/dynaconf_basic.py | 2 +- .../why-not-dynaconf/dynaconf_merge.py | 4 +- .../dynaconf_root_validators.py | 2 +- .../why-not-dynaconf/dynaconf_validators.py | 2 +- .../why-not-hydra/hydra_dataclass.py | 2 +- .../comparison/why-not-hydra/hydra_merge.py | 4 +- .../why-not-hydra/hydra_validators.py | 2 +- .../pydantic_settings_auto_detect.py | 6 +- .../pydantic_settings_basic.py | 2 +- .../pydantic_settings_merge.py | 4 +- .../docs/features/masking/masking_by_name.py | 2 +- .../features/masking/masking_classic_style.py | 2 +- .../features/masking/masking_heuristic.py | 2 +- .../features/masking/masking_merge_mode.py | 4 +- .../docs/features/masking/masking_no_mask.py | 2 +- .../features/masking/masking_per_source.py | 2 +- .../features/masking/masking_secret_str.py | 2 +- .../docs/features/merging/merging_basic.py | 4 +- .../features/merging/merging_strategies.py | 8 +- .../merging/merging_strategy_first_found.py | 6 +- .../merging/merging_strategy_first_wins.py | 4 +- .../merging/merging_strategy_last_wins.py | 4 +- .../merging_strategy_raise_on_conflict.py | 4 +- .../merging/merging_tuple_shorthand.py | 4 +- .../merging_tuple_shorthand_decorator.py | 2 +- .../features/naming/naming_field_mapping.py | 2 +- .../docs/features/naming/naming_name_style.py | 2 +- .../features/naming/naming_nested_fields.py | 2 +- .../features/naming/naming_prefix_nested.py | 2 +- .../validation/validation_annotated.py | 2 +- .../features/validation/validation_custom.py | 2 +- .../validation/validation_metadata.py | 2 +- .../validation/validation_post_init.py | 2 +- .../features/validation/validation_root.py | 2 +- examples/docs/introduction/format_docker.py | 2 +- examples/docs/introduction/format_env.py | 2 +- examples/docs/introduction/format_ini.py | 2 +- examples/docs/introduction/format_json.py | 2 +- examples/docs/introduction/format_json5.py | 2 +- examples/docs/introduction/format_toml.py | 2 +- examples/docs/introduction/format_yaml.py | 2 +- .../docs/introduction/intro_decorator_file.py | 2 +- examples/docs/introduction/intro_file_like.py | 4 +- examples/load_all_formats.py | 18 +- src/dature/errors/exceptions.py | 14 +- src/dature/errors/formatter.py | 12 +- src/dature/errors/location.py | 18 +- src/dature/loading/context.py | 8 +- src/dature/loading/resolver.py | 18 +- src/dature/loading/single.py | 22 +-- src/dature/loading/source_loading.py | 52 +++--- src/dature/main.py | 16 +- src/dature/merging/deep_merge.py | 2 +- src/dature/metadata.py | 14 +- src/dature/protocols.py | 2 +- src/dature/sources_loader/base.py | 16 +- src/dature/sources_loader/docker_secrets.py | 8 +- src/dature/sources_loader/env_.py | 17 +- src/dature/sources_loader/json5_.py | 4 +- src/dature/sources_loader/json_.py | 4 +- src/dature/sources_loader/toml_.py | 4 +- src/dature/sources_loader/yaml_.py | 4 +- tests/errors/test_exceptions.py | 30 ++-- tests/errors/test_fixtures.py | 4 +- tests/errors/test_location.py | 26 +-- tests/expansion/test_expand_file_path.py | 16 +- tests/loading/test_field_merges.py | 162 +++++++++--------- tests/loading/test_multi.py | 140 +++++++-------- tests/loading/test_resolver.py | 52 +++--- tests/loading/test_single.py | 38 ++-- tests/loading/test_skip_invalid_fields.py | 60 +++---- tests/loading/test_source_loading.py | 52 +++--- tests/masking/test_masking.py | 26 +-- tests/merging/test_field_group.py | 104 +++++------ tests/sources_loader/test_base.py | 34 ++-- tests/sources_loader/test_docker_secrets.py | 6 +- tests/sources_loader/test_env_.py | 18 +- tests/sources_loader/test_ini_.py | 12 +- tests/sources_loader/test_json5_.py | 16 +- tests/sources_loader/test_json_.py | 16 +- tests/sources_loader/test_nested_resolve.py | 42 ++--- tests/sources_loader/test_toml10_.py | 16 +- tests/sources_loader/test_toml11_.py | 16 +- tests/sources_loader/test_yaml11_.py | 16 +- tests/sources_loader/test_yaml12_.py | 16 +- tests/test_custom_loader.py | 2 +- tests/test_load_report.py | 40 ++--- tests/test_main.py | 24 +-- tests/test_type_loaders.py | 12 +- tests/validators/test_complex.py | 18 +- tests/validators/test_custom_validator.py | 20 +-- tests/validators/test_metadata_validators.py | 36 ++-- tests/validators/test_number.py | 20 +-- .../validators/test_post_init_and_property.py | 26 +-- tests/validators/test_root_validator.py | 14 +- tests/validators/test_sequence.py | 16 +- tests/validators/test_string.py | 16 +- 138 files changed, 834 insertions(+), 845 deletions(-) diff --git a/README.md b/README.md index 30b602f..55a6c66 100644 --- a/README.md +++ b/README.md @@ -52,7 +52,7 @@ class Config: port: int debug: bool = False -config = load(Source(file_="config.yaml"), Config) +config = load(Source(file="config.yaml"), Config) ``` ## Key Features diff --git a/changes/+file-path-expansion.feature b/changes/+file-path-expansion.feature index 0fa9a0c..1ee561c 100644 --- a/changes/+file-path-expansion.feature +++ b/changes/+file-path-expansion.feature @@ -1 +1 @@ -Environment variables in `Source(file_=...)` are now expanded automatically in strict mode. Both directory paths (`$CONFIG_DIR/config.toml`) and file names (`config.$APP_ENV.toml`) are supported. +Environment variables in `Source(file=...)` are now expanded automatically in strict mode. Both directory paths (`$CONFIG_DIR/config.toml`) and file names (`config.$APP_ENV.toml`) are supported. diff --git a/docs/advanced/env-expansion.md b/docs/advanced/env-expansion.md index cb0d03d..f19058b 100644 --- a/docs/advanced/env-expansion.md +++ b/docs/advanced/env-expansion.md @@ -131,7 +131,7 @@ The `${VAR:-default}` fallback syntax works in all modes. ## File Path Expansion -Environment variables in `Source(file_=...)` are expanded automatically in `"strict"` mode — if a variable is missing, `EnvVarExpandError` is raised immediately at `Source` creation time. +Environment variables in `Source(file=...)` are expanded automatically in `"strict"` mode — if a variable is missing, `EnvVarExpandError` is raised immediately at `Source` creation time. This works for both directory paths and file names: diff --git a/docs/api-reference.md b/docs/api-reference.md index c8f9ea4..06d4d01 100644 --- a/docs/api-reference.md +++ b/docs/api-reference.md @@ -13,13 +13,13 @@ Main entry point. Two calling patterns: **Function mode** — pass `dataclass_`, get an instance back: ```python -config = load(Source(file_="config.yaml"), Config) +config = load(Source(file="config.yaml"), Config) ``` **Decorator mode** — omit `dataclass_`, get a decorator: ```python -@load(Source(file_="config.yaml")) +@load(Source(file="config.yaml")) @dataclass class Config: host: str diff --git a/docs/comparison/why-not-pydantic-settings.md b/docs/comparison/why-not-pydantic-settings.md index 3d7230e..97cb3e4 100644 --- a/docs/comparison/why-not-pydantic-settings.md +++ b/docs/comparison/why-not-pydantic-settings.md @@ -16,7 +16,7 @@ The trade-off is coupling: your config must be a Pydantic model, custom types ne | **Naming conventions** | `alias` / `alias_generator` (`to_camel`, `to_pascal`, `to_snake`) | Built-in `name_style` (6 conventions) + explicit `field_mapping` with multiple aliases | | **CLI** | Built-in `CliSettingsSource` with subcommands, async support | No CLI | | **Secrets** | `SecretStr`, `secrets_dir`, nested secrets directories | `SecretStr`, auto-masking in errors/logs (by type, name pattern, or heuristic) | -| **ENV expansion** | No | `${VAR:-default}` syntax in all file formats + file paths (`Source(file_="$DIR/config.toml")`) | +| **ENV expansion** | No | `${VAR:-default}` syntax in all file formats + file paths (`Source(file="$DIR/config.toml")`) | | **Error messages** | Pydantic `ValidationError` | Human-readable: source file, line number, context snippet | | **Debug / audit** | No | `debug=True` — which source provided each value | | **Validation** | Pydantic `field_validator`, `model_validator` (pre/post), constraints | `Annotated` validators, root validators, custom validators, `__post_init__` | diff --git a/docs/index.md b/docs/index.md index 1885abb..2b926fd 100644 --- a/docs/index.md +++ b/docs/index.md @@ -112,7 +112,7 @@ Load config from YAML, JSON, TOML, INI, ENV files, environment variables and Doc | Environment variables | — | `EnvLoader` | — | | Docker secrets | directory | `DockerSecretsLoader` | — | -The format is auto-detected from the file extension. When `file_` is not specified, environment variables are used. When `file_` points to a directory, `DockerSecretsLoader` is used. `file_` also accepts `Path` objects and file-like objects (`BytesIO`, `StringIO`) — for file-like objects, the `loader` parameter is required. +The format is auto-detected from the file extension. When `file` is not specified, environment variables are used. When `file` points to a directory, `DockerSecretsLoader` is used. `file` also accepts `Path` objects and file-like objects (`BytesIO`, `StringIO`) — for file-like objects, the `loader` parameter is required. ## mypy Plugin diff --git a/docs/introduction.md b/docs/introduction.md index 77cd98f..3e4c8e3 100644 --- a/docs/introduction.md +++ b/docs/introduction.md @@ -123,7 +123,7 @@ Override auto-detection with the `loader` parameter: ```python from dature.sources_loader.yaml_ import Yaml11Loader -Source(file_="config.yaml", loader=Yaml11Loader) +Source(file="config.yaml", loader=Yaml11Loader) ``` ## Source Reference @@ -134,7 +134,7 @@ Source(file_="config.yaml", loader=Yaml11Loader) | Parameter | Description | |-----------|-------------| -| `file_` | Path to config file (`str`, `Path`), file-like object (`BytesIO`, `StringIO`), or directory. `None` → environment variables. File-like objects require explicit `loader` | +| `file` | Path to config file (`str`, `Path`), file-like object (`BytesIO`, `StringIO`), or directory. `None` → environment variables. File-like objects require explicit `loader` | | `loader` | Explicit loader class. `None` → auto-detect from extension | | `prefix` | Filter ENV keys (`"APP_"`) or extract nested object (`"app.database"`) | | `split_symbols` | Delimiter for flat→nested conversion. Default: `"__"` | @@ -153,10 +153,10 @@ Source(file_="config.yaml", loader=Yaml11Loader) ### File-Like Objects -`file_` accepts file-like objects (`StringIO`, `BytesIO`, and any `TextIOBase`/`BufferedIOBase`/`RawIOBase` subclass). The `loader` parameter is required since there is no file extension to auto-detect from: +`file` accepts file-like objects (`StringIO`, `BytesIO`, and any `TextIOBase`/`BufferedIOBase`/`RawIOBase` subclass). The `loader` parameter is required since there is no file extension to auto-detect from: ```python ---8<-- "examples/docs/introduction/intro_file_like.py" +--8<-- "examples/docs/introduction/intro_filelike.py" ``` !!! note diff --git a/examples/docs/advanced/configure/advanced_configure.py b/examples/docs/advanced/configure/advanced_configure.py index 9ba32ad..265a434 100644 --- a/examples/docs/advanced/configure/advanced_configure.py +++ b/examples/docs/advanced/configure/advanced_configure.py @@ -17,20 +17,20 @@ class Config: # 1. Default config — debug is off, no report -config = load(Source(file_=SHARED_DIR / "common_app.yaml"), Config) +config = load(Source(file=SHARED_DIR / "common_app.yaml"), Config) report = get_load_report(config) assert report is None # 2. Enable debug globally via configure() configure(loading=LoadingConfig(debug=True)) -config = load(Source(file_=SHARED_DIR / "common_app.yaml"), Config) +config = load(Source(file=SHARED_DIR / "common_app.yaml"), Config) report = get_load_report(config) assert report is not None # 3. Reset to defaults — debug is off again configure(loading=LoadingConfig()) -config = load(Source(file_=SHARED_DIR / "common_app.yaml"), Config) +config = load(Source(file=SHARED_DIR / "common_app.yaml"), Config) report = get_load_report(config) assert report is None diff --git a/examples/docs/advanced/configure/advanced_configure_env.py b/examples/docs/advanced/configure/advanced_configure_env.py index 1864519..7c5bde3 100644 --- a/examples/docs/advanced/configure/advanced_configure_env.py +++ b/examples/docs/advanced/configure/advanced_configure_env.py @@ -21,20 +21,20 @@ class Config: # 1. DATURE_LOADING__DEBUG=true — debug is on, report attached -config = load(Source(file_=SHARED_DIR / "common_app.yaml"), Config) +config = load(Source(file=SHARED_DIR / "common_app.yaml"), Config) report = get_load_report(config) assert report is not None # 2. Override env with configure() — debug is off configure(loading=LoadingConfig(debug=False)) -config = load(Source(file_=SHARED_DIR / "common_app.yaml"), Config) +config = load(Source(file=SHARED_DIR / "common_app.yaml"), Config) report = get_load_report(config) assert report is None # 3. Reset to env defaults — debug is on again configure(loading=LoadingConfig(debug=True)) -config = load(Source(file_=SHARED_DIR / "common_app.yaml"), Config) +config = load(Source(file=SHARED_DIR / "common_app.yaml"), Config) report = get_load_report(config) assert report is not None diff --git a/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py b/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py index 8863a55..df66d83 100644 --- a/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py +++ b/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py @@ -29,5 +29,5 @@ class AppConfig: # Register Rgb parser globally — no need to pass type_loaders to every load() call configure(type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),)) -config = load(Source(file_=SOURCES_DIR / "custom_type_common.yaml"), AppConfig) +config = load(Source(file=SOURCES_DIR / "custom_type_common.yaml"), AppConfig) assert config == AppConfig(name="my-app", color=Rgb(r=255, g=128, b=0)) diff --git a/examples/docs/advanced/custom_types/custom_loader.py b/examples/docs/advanced/custom_types/custom_loader.py index c1f635f..c034cfe 100644 --- a/examples/docs/advanced/custom_types/custom_loader.py +++ b/examples/docs/advanced/custom_types/custom_loader.py @@ -42,7 +42,7 @@ class Config: config = load( Source( - file_=SOURCES_DIR / "custom_loader.xml", + file=SOURCES_DIR / "custom_loader.xml", loader=XmlLoader, ), Config, diff --git a/examples/docs/advanced/custom_types/custom_type.py b/examples/docs/advanced/custom_types/custom_type.py index 4d5f429..5f14977 100644 --- a/examples/docs/advanced/custom_types/custom_type.py +++ b/examples/docs/advanced/custom_types/custom_type.py @@ -28,7 +28,7 @@ class AppConfig: config = load( Source( - file_=SOURCES_DIR / "custom_type_common.yaml", + file=SOURCES_DIR / "custom_type_common.yaml", type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),), ), AppConfig, diff --git a/examples/docs/advanced/custom_types/custom_type_merge.py b/examples/docs/advanced/custom_types/custom_type_merge.py index 569126a..e0aafc5 100644 --- a/examples/docs/advanced/custom_types/custom_type_merge.py +++ b/examples/docs/advanced/custom_types/custom_type_merge.py @@ -28,8 +28,8 @@ class AppConfig: config = load( Merge( - Source(file_=SOURCES_DIR / "custom_type_common.yaml"), - Source(file_=SOURCES_DIR / "custom_type_merge_override.yaml"), + Source(file=SOURCES_DIR / "custom_type_common.yaml"), + Source(file=SOURCES_DIR / "custom_type_merge_override.yaml"), type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),), ), AppConfig, diff --git a/examples/docs/advanced/debug/advanced_debug_error.py b/examples/docs/advanced/debug/advanced_debug_error.py index 87fdc39..3707eb2 100644 --- a/examples/docs/advanced/debug/advanced_debug_error.py +++ b/examples/docs/advanced/debug/advanced_debug_error.py @@ -20,8 +20,8 @@ class Config: try: config = load( Merge( - Source(file_=SHARED_DIR / "common_overrides.yaml"), - Source(file_=SOURCES_DIR / "advanced_debug_error_defaults.yaml"), + Source(file=SHARED_DIR / "common_overrides.yaml"), + Source(file=SOURCES_DIR / "advanced_debug_error_defaults.yaml"), ), Config, debug=True, diff --git a/examples/docs/advanced/debug/advanced_debug_logging.py b/examples/docs/advanced/debug/advanced_debug_logging.py index 3baf018..8cce215 100644 --- a/examples/docs/advanced/debug/advanced_debug_logging.py +++ b/examples/docs/advanced/debug/advanced_debug_logging.py @@ -25,8 +25,8 @@ class Config: config = load( Merge( - Source(file_=SHARED_DIR / "common_defaults.yaml"), - Source(file_=SHARED_DIR / "common_overrides.yaml"), + Source(file=SHARED_DIR / "common_defaults.yaml"), + Source(file=SHARED_DIR / "common_overrides.yaml"), ), Config, ) diff --git a/examples/docs/advanced/debug/advanced_debug_report.py b/examples/docs/advanced/debug/advanced_debug_report.py index de2b425..cc3099b 100644 --- a/examples/docs/advanced/debug/advanced_debug_report.py +++ b/examples/docs/advanced/debug/advanced_debug_report.py @@ -17,8 +17,8 @@ class Config: config = load( Merge( - Source(file_=SHARED_DIR / "common_defaults.yaml"), - Source(file_=SHARED_DIR / "common_overrides.yaml"), + Source(file=SHARED_DIR / "common_defaults.yaml"), + Source(file=SHARED_DIR / "common_overrides.yaml"), ), Config, debug=True, diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion.py b/examples/docs/advanced/env_expansion/advanced_env_expansion.py index ef05feb..2995a5d 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion.py @@ -24,7 +24,7 @@ class Config: config = load( - Source(file_=SOURCES_DIR / "advanced_env_expansion.yaml", expand_env_vars="default"), + Source(file=SOURCES_DIR / "advanced_env_expansion.yaml", expand_env_vars="default"), Config, ) diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py index 7dec1d7..b249168 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py @@ -19,7 +19,7 @@ class Config: config = load( - Source(file_="$DATURE_SOURCES_DIR/config.$DATURE_APP_ENV.yaml"), + Source(file="$DATURE_SOURCES_DIR/config.$DATURE_APP_ENV.yaml"), Config, ) diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py index 2c6960b..11e3121 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py @@ -18,7 +18,7 @@ class Config: config = load( - Source(file_="$DATURE_SOURCES_DIR/advanced_env_expansion_file_path.yaml"), + Source(file="$DATURE_SOURCES_DIR/advanced_env_expansion_file_path.yaml"), Config, ) diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py index 794d638..8d9c1cf 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py @@ -18,7 +18,7 @@ class Config: config = load( - Source(file_=str(SOURCES_DIR / "config.$DATURE_APP_ENV.yaml")), + Source(file=str(SOURCES_DIR / "config.$DATURE_APP_ENV.yaml")), Config, ) diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py index add09de..d78f951 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py @@ -23,9 +23,9 @@ class Config: config = load( Merge( - Source(file_=SOURCES_DIR / "advanced_env_expansion_merge_default.yaml"), # uses global "default" - Source(file_=SOURCES_DIR / "advanced_env_expansion_merge_empty.yaml", expand_env_vars="empty"), - Source(file_=SOURCES_DIR / "advanced_env_expansion_merge_disabled.yaml", expand_env_vars="disabled"), + Source(file=SOURCES_DIR / "advanced_env_expansion_merge_default.yaml"), # uses global "default" + Source(file=SOURCES_DIR / "advanced_env_expansion_merge_empty.yaml", expand_env_vars="empty"), + Source(file=SOURCES_DIR / "advanced_env_expansion_merge_disabled.yaml", expand_env_vars="disabled"), expand_env_vars="default", # global default for all sources ), Config, diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py index 78f4e02..8a9d9b6 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py @@ -18,7 +18,7 @@ class Config: config = load( - Source(file_=SOURCES_DIR / "advanced_env_expansion_strict.yaml", expand_env_vars="strict"), + Source(file=SOURCES_DIR / "advanced_env_expansion_strict.yaml", expand_env_vars="strict"), Config, ) diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py index 07f50d8..4078a76 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py @@ -28,8 +28,8 @@ class Config: try: load( Merge( - Source(file_=SOURCES_DIR / "field_groups_nested_defaults.yaml"), - Source(file_=SOURCES_DIR / "advanced_field_groups_expansion_error_overrides.yaml"), + Source(file=SOURCES_DIR / "field_groups_nested_defaults.yaml"), + Source(file=SOURCES_DIR / "advanced_field_groups_expansion_error_overrides.yaml"), field_groups=(FieldGroup(F[Config].database, F[Config].port),), ), Config, diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py index e73a05c..f931d41 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py @@ -23,8 +23,8 @@ class Config: try: load( Merge( - Source(file_=SHARED_DIR / "common_field_groups_defaults.yaml"), - Source(file_=SOURCES_DIR / "advanced_field_groups_multiple_error_overrides.yaml"), + Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), + Source(file=SOURCES_DIR / "advanced_field_groups_multiple_error_overrides.yaml"), field_groups=( FieldGroup(F[Config].host, F[Config].port), FieldGroup(F[Config].user, F[Config].password), diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py index 7990e4f..6ec796b 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py @@ -23,8 +23,8 @@ class Config: try: load( Merge( - Source(file_=SHARED_DIR / "common_field_groups_defaults.yaml"), - Source(file_=SOURCES_DIR / "field_groups_partial_overrides.yaml"), + Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), + Source(file=SOURCES_DIR / "field_groups_partial_overrides.yaml"), field_groups=( FieldGroup(F[Config].host, F[Config].port), FieldGroup(F[Config].user, F[Config].password), diff --git a/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py b/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py index 36828e5..3af886f 100644 --- a/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py +++ b/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py @@ -22,8 +22,8 @@ def merge_tags(values: list[Any]) -> list[str]: config = load( Merge( - Source(file_=SHARED_DIR / "common_defaults.yaml"), - Source(file_=SHARED_DIR / "common_overrides.yaml"), + Source(file=SHARED_DIR / "common_defaults.yaml"), + Source(file=SHARED_DIR / "common_overrides.yaml"), strategy=MergeStrategy.LAST_WINS, field_merges=(MergeRule(F[Config].tags, merge_tags),), ), diff --git a/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py b/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py index 2b9b577..499d429 100644 --- a/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py +++ b/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py @@ -17,8 +17,8 @@ class Config: config = load( Merge( - Source(file_=SHARED_DIR / "common_defaults.yaml"), - Source(file_=SHARED_DIR / "common_overrides.yaml"), + Source(file=SHARED_DIR / "common_defaults.yaml"), + Source(file=SHARED_DIR / "common_overrides.yaml"), strategy=MergeStrategy.RAISE_ON_CONFLICT, field_merges=( MergeRule(F[Config].host, FieldMergeStrategy.LAST_WINS), diff --git a/examples/docs/advanced/merge_rules/merging_field_append.py b/examples/docs/advanced/merge_rules/merging_field_append.py index 338e190..1f8f86f 100644 --- a/examples/docs/advanced/merge_rules/merging_field_append.py +++ b/examples/docs/advanced/merge_rules/merging_field_append.py @@ -15,8 +15,8 @@ class Config: config = load( Merge( - Source(file_=SOURCES_DIR / "merging_field_base.yaml"), - Source(file_=SOURCES_DIR / "merging_field_override.yaml"), + Source(file=SOURCES_DIR / "merging_field_base.yaml"), + Source(file=SOURCES_DIR / "merging_field_override.yaml"), field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND),), ), Config, diff --git a/examples/docs/advanced/merge_rules/merging_field_append_unique.py b/examples/docs/advanced/merge_rules/merging_field_append_unique.py index 23f5ccd..3a082bc 100644 --- a/examples/docs/advanced/merge_rules/merging_field_append_unique.py +++ b/examples/docs/advanced/merge_rules/merging_field_append_unique.py @@ -15,8 +15,8 @@ class Config: config = load( Merge( - Source(file_=SOURCES_DIR / "merging_field_base.yaml"), - Source(file_=SOURCES_DIR / "merging_field_override.yaml"), + Source(file=SOURCES_DIR / "merging_field_base.yaml"), + Source(file=SOURCES_DIR / "merging_field_override.yaml"), field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND_UNIQUE),), ), Config, diff --git a/examples/docs/advanced/merge_rules/merging_field_first_wins.py b/examples/docs/advanced/merge_rules/merging_field_first_wins.py index d5d4879..7deec3a 100644 --- a/examples/docs/advanced/merge_rules/merging_field_first_wins.py +++ b/examples/docs/advanced/merge_rules/merging_field_first_wins.py @@ -15,8 +15,8 @@ class Config: config = load( Merge( - Source(file_=SOURCES_DIR / "merging_field_base.yaml"), - Source(file_=SOURCES_DIR / "merging_field_override.yaml"), + Source(file=SOURCES_DIR / "merging_field_base.yaml"), + Source(file=SOURCES_DIR / "merging_field_override.yaml"), field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.FIRST_WINS),), ), Config, diff --git a/examples/docs/advanced/merge_rules/merging_field_groups.py b/examples/docs/advanced/merge_rules/merging_field_groups.py index 23c3c86..185ae0e 100644 --- a/examples/docs/advanced/merge_rules/merging_field_groups.py +++ b/examples/docs/advanced/merge_rules/merging_field_groups.py @@ -19,8 +19,8 @@ class Config: config = load( Merge( - Source(file_=SHARED_DIR / "common_field_groups_defaults.yaml"), - Source(file_=SHARED_DIR / "common_field_groups_overrides.yaml"), + Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), + Source(file=SHARED_DIR / "common_field_groups_overrides.yaml"), field_groups=(FieldGroup(F[Config].host, F[Config].port),), ), Config, diff --git a/examples/docs/advanced/merge_rules/merging_field_last_wins.py b/examples/docs/advanced/merge_rules/merging_field_last_wins.py index e4302f0..d166721 100644 --- a/examples/docs/advanced/merge_rules/merging_field_last_wins.py +++ b/examples/docs/advanced/merge_rules/merging_field_last_wins.py @@ -15,8 +15,8 @@ class Config: config = load( Merge( - Source(file_=SOURCES_DIR / "merging_field_base.yaml"), - Source(file_=SOURCES_DIR / "merging_field_override.yaml"), + Source(file=SOURCES_DIR / "merging_field_base.yaml"), + Source(file=SOURCES_DIR / "merging_field_override.yaml"), field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.LAST_WINS),), ), Config, diff --git a/examples/docs/advanced/merge_rules/merging_field_prepend.py b/examples/docs/advanced/merge_rules/merging_field_prepend.py index 65c36c9..32732db 100644 --- a/examples/docs/advanced/merge_rules/merging_field_prepend.py +++ b/examples/docs/advanced/merge_rules/merging_field_prepend.py @@ -15,8 +15,8 @@ class Config: config = load( Merge( - Source(file_=SOURCES_DIR / "merging_field_base.yaml"), - Source(file_=SOURCES_DIR / "merging_field_override.yaml"), + Source(file=SOURCES_DIR / "merging_field_base.yaml"), + Source(file=SOURCES_DIR / "merging_field_override.yaml"), field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.PREPEND),), ), Config, diff --git a/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py b/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py index ee5d59a..e9ad5a8 100644 --- a/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py +++ b/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py @@ -15,8 +15,8 @@ class Config: config = load( Merge( - Source(file_=SOURCES_DIR / "merging_field_base.yaml"), - Source(file_=SOURCES_DIR / "merging_field_override.yaml"), + Source(file=SOURCES_DIR / "merging_field_base.yaml"), + Source(file=SOURCES_DIR / "merging_field_override.yaml"), field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.PREPEND_UNIQUE),), ), Config, diff --git a/examples/docs/advanced/merge_rules/merging_first_found.py b/examples/docs/advanced/merge_rules/merging_first_found.py index 0f4848e..a155958 100644 --- a/examples/docs/advanced/merge_rules/merging_first_found.py +++ b/examples/docs/advanced/merge_rules/merging_first_found.py @@ -16,8 +16,8 @@ class Config: config = load( Merge( - Source(file_=SOURCES_DIR / "merging_first_found_primary.yaml"), - Source(file_=SOURCES_DIR / "merging_first_found_fallback.yaml"), + Source(file=SOURCES_DIR / "merging_first_found_primary.yaml"), + Source(file=SOURCES_DIR / "merging_first_found_fallback.yaml"), strategy=MergeStrategy.FIRST_FOUND, ), Config, diff --git a/examples/docs/advanced/merge_rules/merging_skip_broken.py b/examples/docs/advanced/merge_rules/merging_skip_broken.py index 24c8a48..6728a6c 100644 --- a/examples/docs/advanced/merge_rules/merging_skip_broken.py +++ b/examples/docs/advanced/merge_rules/merging_skip_broken.py @@ -18,8 +18,8 @@ class Config: config = load( Merge( - Source(file_=SHARED_DIR / "common_defaults.yaml"), - Source(file_=SOURCES_DIR / "nonexistent.yaml", skip_if_broken=True), + Source(file=SHARED_DIR / "common_defaults.yaml"), + Source(file=SOURCES_DIR / "nonexistent.yaml", skip_if_broken=True), ), Config, ) diff --git a/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py b/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py index cc19595..fd9801b 100644 --- a/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py +++ b/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py @@ -18,13 +18,13 @@ class Config: config = load( Merge( - Source(file_=SHARED_DIR / "common_defaults.yaml"), # uses global + Source(file=SHARED_DIR / "common_defaults.yaml"), # uses global Source( - file_=SOURCES_DIR / "optional.yaml", + file=SOURCES_DIR / "optional.yaml", skip_if_broken=True, ), # always skip if broken Source( - file_=SHARED_DIR / "common_overrides.yaml", + file=SHARED_DIR / "common_overrides.yaml", skip_if_broken=False, ), # never skip, even if global is True skip_broken_sources=True, # global default diff --git a/examples/docs/advanced/merge_rules/merging_skip_invalid.py b/examples/docs/advanced/merge_rules/merging_skip_invalid.py index b4e4752..ed09ead 100644 --- a/examples/docs/advanced/merge_rules/merging_skip_invalid.py +++ b/examples/docs/advanced/merge_rules/merging_skip_invalid.py @@ -15,7 +15,7 @@ class Config: config = load( - Source(file_=SOURCES_DIR / "merging_skip_invalid_defaults.yaml", skip_if_invalid=True), + Source(file=SOURCES_DIR / "merging_skip_invalid_defaults.yaml", skip_if_invalid=True), Config, ) diff --git a/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py b/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py index a0e39a2..5f22b2f 100644 --- a/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py +++ b/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py @@ -17,9 +17,9 @@ class Config: config = load( Merge( - Source(file_=SOURCES_DIR / "merging_skip_invalid_per_field_defaults.yaml"), + Source(file=SOURCES_DIR / "merging_skip_invalid_per_field_defaults.yaml"), Source( - file_=SOURCES_DIR / "merging_skip_invalid_per_field_overrides.yaml", + file=SOURCES_DIR / "merging_skip_invalid_per_field_overrides.yaml", skip_if_invalid=(F[Config].port, F[Config].timeout), ), ), diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py b/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py index 0bae70a..4a38050 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py @@ -27,7 +27,7 @@ class Config: config = load( Source( - file_=secrets_path, + file=secrets_path, loader=DockerSecretsLoader, nested_resolve_strategy="json", ), diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py b/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py index 6109622..659203b 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py @@ -22,7 +22,7 @@ class Config: config = load( Source( - file_=SOURCES_DIR / "nested_resolve.env", + file=SOURCES_DIR / "nested_resolve.env", loader=EnvFileLoader, prefix="APP__", nested_resolve_strategy="json", diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py index 7390a64..e7c4b1a 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py @@ -16,7 +16,7 @@ class Config: debug: bool = False -config = load(Source(file_=SOURCES_DIR / "dynaconf_basic.toml"), Config) +config = load(Source(file=SOURCES_DIR / "dynaconf_basic.toml"), Config) # config.hostt → AttributeError immediately # config.port is always int — guaranteed # --8<-- [end:basic] diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py index 3afa371..469eb1c 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py @@ -17,8 +17,8 @@ class Config: # --8<-- [start:merge] config = load( Merge( - Source(file_=SOURCES_DIR / "dynaconf_merge_defaults.yaml"), - Source(file_=SOURCES_DIR / "dynaconf_merge_local.yaml", skip_if_broken=True), + Source(file=SOURCES_DIR / "dynaconf_merge_defaults.yaml"), + Source(file=SOURCES_DIR / "dynaconf_merge_local.yaml", skip_if_broken=True), strategy=MergeStrategy.LAST_WINS, ), Config, diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py index dca7fa3..50fdf62 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py @@ -27,7 +27,7 @@ def check_debug_port(config: Config) -> bool: try: load( Source( - file_=SOURCES_DIR / "dynaconf_root_validators_invalid.toml", + file=SOURCES_DIR / "dynaconf_root_validators_invalid.toml", root_validators=( RootValidator( func=check_debug_port, diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py index ded4216..c0e7480 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py @@ -20,7 +20,7 @@ class Config: try: - load(Source(file_=SOURCES_DIR / "dynaconf_validators_invalid.toml"), Config) + load(Source(file=SOURCES_DIR / "dynaconf_validators_invalid.toml"), Config) except DatureConfigError as exc: source = str(SOURCES_DIR / "dynaconf_validators_invalid.toml") assert str(exc) == "Config loading errors (1)" diff --git a/examples/docs/comparison/why-not-hydra/hydra_dataclass.py b/examples/docs/comparison/why-not-hydra/hydra_dataclass.py index 62e4082..5118a5d 100644 --- a/examples/docs/comparison/why-not-hydra/hydra_dataclass.py +++ b/examples/docs/comparison/why-not-hydra/hydra_dataclass.py @@ -15,7 +15,7 @@ class Config: # --8<-- [start:dataclass] -config = load(Source(file_=SOURCES_DIR / "hydra_defaults.yaml"), Config) +config = load(Source(file=SOURCES_DIR / "hydra_defaults.yaml"), Config) assert isinstance(config, Config) # Full IDE support, type safety, __post_init__ works # --8<-- [end:dataclass] diff --git a/examples/docs/comparison/why-not-hydra/hydra_merge.py b/examples/docs/comparison/why-not-hydra/hydra_merge.py index 5a13c26..ae90ed4 100644 --- a/examples/docs/comparison/why-not-hydra/hydra_merge.py +++ b/examples/docs/comparison/why-not-hydra/hydra_merge.py @@ -17,8 +17,8 @@ class Config: # --8<-- [start:merge] config = load( Merge( - Source(file_=SOURCES_DIR / "hydra_defaults.yaml"), - Source(file_=SOURCES_DIR / "hydra_config.toml", skip_if_broken=True), + Source(file=SOURCES_DIR / "hydra_defaults.yaml"), + Source(file=SOURCES_DIR / "hydra_config.toml", skip_if_broken=True), Source(prefix="APP_"), ), Config, diff --git a/examples/docs/comparison/why-not-hydra/hydra_validators.py b/examples/docs/comparison/why-not-hydra/hydra_validators.py index 7b5b24c..342a485 100644 --- a/examples/docs/comparison/why-not-hydra/hydra_validators.py +++ b/examples/docs/comparison/why-not-hydra/hydra_validators.py @@ -19,7 +19,7 @@ class Config: try: - load(Source(file_=SOURCES_DIR / "hydra_validators_invalid.yaml"), Config) + load(Source(file=SOURCES_DIR / "hydra_validators_invalid.yaml"), Config) except DatureConfigError as exc: source = str(SOURCES_DIR / "hydra_validators_invalid.yaml") assert str(exc) == "Config loading errors (1)" diff --git a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py index 5775f9d..c28d5d0 100644 --- a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py +++ b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py @@ -16,9 +16,9 @@ class Config: # --8<-- [start:auto-detect] # Just change the file — dature picks the right loader -yaml_config = load(Source(file_=SOURCES_DIR / "pydantic_settings_auto_detect.yaml"), Config) -toml_config = load(Source(file_=SOURCES_DIR / "pydantic_settings_auto_detect.toml"), Config) -json5_config = load(Source(file_=SOURCES_DIR / "pydantic_settings_auto_detect.json5"), Config) +yaml_config = load(Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.yaml"), Config) +toml_config = load(Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.toml"), Config) +json5_config = load(Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.json5"), Config) # --8<-- [end:auto-detect] assert yaml_config.host == "localhost" diff --git a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py index 12a7624..177d013 100644 --- a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py +++ b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py @@ -16,7 +16,7 @@ class Config: debug: bool = False -config = load(Source(file_=SOURCES_DIR / "pydantic_settings_basic.yaml"), Config) +config = load(Source(file=SOURCES_DIR / "pydantic_settings_basic.yaml"), Config) # config.hostt → AttributeError immediately # config.port is always int — guaranteed # --8<-- [end:basic] diff --git a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py index bed427e..ecd8040 100644 --- a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py +++ b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py @@ -17,8 +17,8 @@ class Config: # --8<-- [start:merge] config = load( Merge( - Source(file_=SOURCES_DIR / "pydantic_settings_merge_defaults.yaml"), - Source(file_=SOURCES_DIR / "pydantic_settings_merge_local.yaml", skip_if_broken=True), + Source(file=SOURCES_DIR / "pydantic_settings_merge_defaults.yaml"), + Source(file=SOURCES_DIR / "pydantic_settings_merge_local.yaml", skip_if_broken=True), Source(prefix="APP_"), ), Config, diff --git a/examples/docs/features/masking/masking_by_name.py b/examples/docs/features/masking/masking_by_name.py index c1ddff5..d1b20a8 100644 --- a/examples/docs/features/masking/masking_by_name.py +++ b/examples/docs/features/masking/masking_by_name.py @@ -17,7 +17,7 @@ class Config: try: - load(Source(file_=SOURCES_DIR / "masking_by_name.yaml"), Config) + load(Source(file=SOURCES_DIR / "masking_by_name.yaml"), Config) except DatureConfigError as exc: source = str(SOURCES_DIR / "masking_by_name.yaml") assert str(exc) == "Config loading errors (1)" diff --git a/examples/docs/features/masking/masking_classic_style.py b/examples/docs/features/masking/masking_classic_style.py index 1db8c52..ebd3964 100644 --- a/examples/docs/features/masking/masking_classic_style.py +++ b/examples/docs/features/masking/masking_classic_style.py @@ -22,7 +22,7 @@ class Config: host: str -config = load(Source(file_=SOURCES_DIR / "masking_by_name.yaml"), Config) +config = load(Source(file=SOURCES_DIR / "masking_by_name.yaml"), Config) assert mask_value("my_secret_password") == "my*****rd" assert mask_value("ab") == "ab" diff --git a/examples/docs/features/masking/masking_heuristic.py b/examples/docs/features/masking/masking_heuristic.py index d11f5a2..e2c8a2f 100644 --- a/examples/docs/features/masking/masking_heuristic.py +++ b/examples/docs/features/masking/masking_heuristic.py @@ -18,7 +18,7 @@ class Config: try: load( - Source(file_=SOURCES_DIR / "masking_heuristic.yaml", mask_secrets=True), + Source(file=SOURCES_DIR / "masking_heuristic.yaml", mask_secrets=True), Config, ) except DatureConfigError as exc: diff --git a/examples/docs/features/masking/masking_merge_mode.py b/examples/docs/features/masking/masking_merge_mode.py index c2a5007..5b61ec8 100644 --- a/examples/docs/features/masking/masking_merge_mode.py +++ b/examples/docs/features/masking/masking_merge_mode.py @@ -22,9 +22,9 @@ class Config: try: load( Merge( - Source(file_=SOURCES_DIR / "masking_merge_mode_defaults.yaml"), + Source(file=SOURCES_DIR / "masking_merge_mode_defaults.yaml"), Source( - file_=SOURCES_DIR / "masking_merge_mode_secrets.yaml", + file=SOURCES_DIR / "masking_merge_mode_secrets.yaml", secret_field_names=("api_key",), ), ), diff --git a/examples/docs/features/masking/masking_no_mask.py b/examples/docs/features/masking/masking_no_mask.py index 8c9761e..04421dd 100644 --- a/examples/docs/features/masking/masking_no_mask.py +++ b/examples/docs/features/masking/masking_no_mask.py @@ -21,7 +21,7 @@ class Config: try: load( Source( - file_=SOURCES_DIR / "masking_per_source.yaml", + file=SOURCES_DIR / "masking_per_source.yaml", mask_secrets=False, ), Config, diff --git a/examples/docs/features/masking/masking_per_source.py b/examples/docs/features/masking/masking_per_source.py index a5a6d72..1425de3 100644 --- a/examples/docs/features/masking/masking_per_source.py +++ b/examples/docs/features/masking/masking_per_source.py @@ -21,7 +21,7 @@ class Config: try: load( Source( - file_=SOURCES_DIR / "masking_per_source.yaml", + file=SOURCES_DIR / "masking_per_source.yaml", secret_field_names=("api_key",), ), Config, diff --git a/examples/docs/features/masking/masking_secret_str.py b/examples/docs/features/masking/masking_secret_str.py index 4734a7f..13389e2 100644 --- a/examples/docs/features/masking/masking_secret_str.py +++ b/examples/docs/features/masking/masking_secret_str.py @@ -20,7 +20,7 @@ class Config: try: config = load( - Source(file_=SOURCES_DIR / "masking_secret_str.yaml"), + Source(file=SOURCES_DIR / "masking_secret_str.yaml"), Config, ) except DatureConfigError as exc: diff --git a/examples/docs/features/merging/merging_basic.py b/examples/docs/features/merging/merging_basic.py index 9d5c175..d8d4f55 100644 --- a/examples/docs/features/merging/merging_basic.py +++ b/examples/docs/features/merging/merging_basic.py @@ -17,8 +17,8 @@ class Config: config = load( Merge( - Source(file_=SHARED_DIR / "common_defaults.yaml"), - Source(file_=SHARED_DIR / "common_overrides.yaml"), + Source(file=SHARED_DIR / "common_defaults.yaml"), + Source(file=SHARED_DIR / "common_overrides.yaml"), strategy=MergeStrategy.LAST_WINS, ), Config, diff --git a/examples/docs/features/merging/merging_strategies.py b/examples/docs/features/merging/merging_strategies.py index 05a4d4b..61344ee 100644 --- a/examples/docs/features/merging/merging_strategies.py +++ b/examples/docs/features/merging/merging_strategies.py @@ -17,8 +17,8 @@ class Config: last_wins = load( Merge( - Source(file_=SHARED_DIR / "common_defaults.yaml"), - Source(file_=SHARED_DIR / "common_overrides.yaml"), + Source(file=SHARED_DIR / "common_defaults.yaml"), + Source(file=SHARED_DIR / "common_overrides.yaml"), strategy=MergeStrategy.LAST_WINS, ), Config, @@ -26,8 +26,8 @@ class Config: first_wins = load( Merge( - Source(file_=SHARED_DIR / "common_defaults.yaml"), - Source(file_=SHARED_DIR / "common_overrides.yaml"), + Source(file=SHARED_DIR / "common_defaults.yaml"), + Source(file=SHARED_DIR / "common_overrides.yaml"), strategy=MergeStrategy.FIRST_WINS, ), Config, diff --git a/examples/docs/features/merging/merging_strategy_first_found.py b/examples/docs/features/merging/merging_strategy_first_found.py index d17859f..115f298 100644 --- a/examples/docs/features/merging/merging_strategy_first_found.py +++ b/examples/docs/features/merging/merging_strategy_first_found.py @@ -17,9 +17,9 @@ class Config: config = load( Merge( - Source(file_=SHARED_DIR / "nonexistent.yaml"), - Source(file_=SHARED_DIR / "common_defaults.yaml"), - Source(file_=SHARED_DIR / "common_overrides.yaml"), + Source(file=SHARED_DIR / "nonexistent.yaml"), + Source(file=SHARED_DIR / "common_defaults.yaml"), + Source(file=SHARED_DIR / "common_overrides.yaml"), strategy=MergeStrategy.FIRST_FOUND, ), Config, diff --git a/examples/docs/features/merging/merging_strategy_first_wins.py b/examples/docs/features/merging/merging_strategy_first_wins.py index 0322dca..347fea2 100644 --- a/examples/docs/features/merging/merging_strategy_first_wins.py +++ b/examples/docs/features/merging/merging_strategy_first_wins.py @@ -17,8 +17,8 @@ class Config: config = load( Merge( - Source(file_=SHARED_DIR / "common_defaults.yaml"), - Source(file_=SHARED_DIR / "common_overrides.yaml"), + Source(file=SHARED_DIR / "common_defaults.yaml"), + Source(file=SHARED_DIR / "common_overrides.yaml"), strategy=MergeStrategy.FIRST_WINS, ), Config, diff --git a/examples/docs/features/merging/merging_strategy_last_wins.py b/examples/docs/features/merging/merging_strategy_last_wins.py index 862f0eb..eef8578 100644 --- a/examples/docs/features/merging/merging_strategy_last_wins.py +++ b/examples/docs/features/merging/merging_strategy_last_wins.py @@ -17,8 +17,8 @@ class Config: config = load( Merge( - Source(file_=SHARED_DIR / "common_defaults.yaml"), - Source(file_=SHARED_DIR / "common_overrides.yaml"), + Source(file=SHARED_DIR / "common_defaults.yaml"), + Source(file=SHARED_DIR / "common_overrides.yaml"), strategy=MergeStrategy.LAST_WINS, ), Config, diff --git a/examples/docs/features/merging/merging_strategy_raise_on_conflict.py b/examples/docs/features/merging/merging_strategy_raise_on_conflict.py index 0e9513a..9a4312f 100644 --- a/examples/docs/features/merging/merging_strategy_raise_on_conflict.py +++ b/examples/docs/features/merging/merging_strategy_raise_on_conflict.py @@ -17,8 +17,8 @@ class Config: config = load( Merge( - Source(file_=SHARED_DIR / "common_raise_on_conflict_a.yaml"), - Source(file_=SHARED_DIR / "common_raise_on_conflict_b.yaml"), + Source(file=SHARED_DIR / "common_raise_on_conflict_a.yaml"), + Source(file=SHARED_DIR / "common_raise_on_conflict_b.yaml"), strategy=MergeStrategy.RAISE_ON_CONFLICT, ), Config, diff --git a/examples/docs/features/merging/merging_tuple_shorthand.py b/examples/docs/features/merging/merging_tuple_shorthand.py index 9c176a4..45aebff 100644 --- a/examples/docs/features/merging/merging_tuple_shorthand.py +++ b/examples/docs/features/merging/merging_tuple_shorthand.py @@ -17,8 +17,8 @@ class Config: config = load( ( - Source(file_=SHARED_DIR / "common_defaults.yaml"), - Source(file_=SHARED_DIR / "common_overrides.yaml"), + Source(file=SHARED_DIR / "common_defaults.yaml"), + Source(file=SHARED_DIR / "common_overrides.yaml"), ), Config, ) diff --git a/examples/docs/features/merging/merging_tuple_shorthand_decorator.py b/examples/docs/features/merging/merging_tuple_shorthand_decorator.py index c25a9c8..ddfc51a 100644 --- a/examples/docs/features/merging/merging_tuple_shorthand_decorator.py +++ b/examples/docs/features/merging/merging_tuple_shorthand_decorator.py @@ -13,7 +13,7 @@ @load( ( - Source(file_=SHARED_DIR / "common_defaults.yaml"), + Source(file=SHARED_DIR / "common_defaults.yaml"), Source(prefix="APP_"), ), ) diff --git a/examples/docs/features/naming/naming_field_mapping.py b/examples/docs/features/naming/naming_field_mapping.py index c229d94..831c691 100644 --- a/examples/docs/features/naming/naming_field_mapping.py +++ b/examples/docs/features/naming/naming_field_mapping.py @@ -17,7 +17,7 @@ class DbConfig: config = load( Source( - file_=SOURCES_DIR / "naming_field_mapping.yaml", + file=SOURCES_DIR / "naming_field_mapping.yaml", field_mapping={ F[DbConfig].database_url: "db_url", F[DbConfig].secret_key: "key", diff --git a/examples/docs/features/naming/naming_name_style.py b/examples/docs/features/naming/naming_name_style.py index de3b575..af77f2d 100644 --- a/examples/docs/features/naming/naming_name_style.py +++ b/examples/docs/features/naming/naming_name_style.py @@ -17,7 +17,7 @@ class ApiConfig: config = load( - Source(file_=SOURCES_DIR / "naming_name_style.yaml", name_style="lower_camel"), + Source(file=SOURCES_DIR / "naming_name_style.yaml", name_style="lower_camel"), ApiConfig, ) diff --git a/examples/docs/features/naming/naming_nested_fields.py b/examples/docs/features/naming/naming_nested_fields.py index 05d5bd5..f491f4c 100644 --- a/examples/docs/features/naming/naming_nested_fields.py +++ b/examples/docs/features/naming/naming_nested_fields.py @@ -22,7 +22,7 @@ class User: config = load( Source( - file_=SOURCES_DIR / "naming_nested_fields.yaml", + file=SOURCES_DIR / "naming_nested_fields.yaml", field_mapping={ F[User].name: "fullName", F[User].address: "location", diff --git a/examples/docs/features/naming/naming_prefix_nested.py b/examples/docs/features/naming/naming_prefix_nested.py index 7003763..5430887 100644 --- a/examples/docs/features/naming/naming_prefix_nested.py +++ b/examples/docs/features/naming/naming_prefix_nested.py @@ -14,7 +14,7 @@ class Database: port: int -db = load(Source(file_=SOURCES_DIR / "naming_prefix_nested.yaml", prefix="app.database"), Database) +db = load(Source(file=SOURCES_DIR / "naming_prefix_nested.yaml", prefix="app.database"), Database) assert db.host == "localhost" assert db.port == 5432 diff --git a/examples/docs/features/validation/validation_annotated.py b/examples/docs/features/validation/validation_annotated.py index 83b66ef..b5da2c9 100644 --- a/examples/docs/features/validation/validation_annotated.py +++ b/examples/docs/features/validation/validation_annotated.py @@ -23,7 +23,7 @@ class ServiceConfig: try: load( - Source(file_=SOURCES_DIR / "validation_annotated_invalid.json5"), + Source(file=SOURCES_DIR / "validation_annotated_invalid.json5"), ServiceConfig, ) except DatureConfigError as exc: diff --git a/examples/docs/features/validation/validation_custom.py b/examples/docs/features/validation/validation_custom.py index 5e284cf..032dcb6 100644 --- a/examples/docs/features/validation/validation_custom.py +++ b/examples/docs/features/validation/validation_custom.py @@ -37,7 +37,7 @@ class ServiceConfig: try: load( - Source(file_=SOURCES_DIR / "validation_custom_invalid.json5"), + Source(file=SOURCES_DIR / "validation_custom_invalid.json5"), ServiceConfig, ) except DatureConfigError as exc: diff --git a/examples/docs/features/validation/validation_metadata.py b/examples/docs/features/validation/validation_metadata.py index 333d35b..0fe7b59 100644 --- a/examples/docs/features/validation/validation_metadata.py +++ b/examples/docs/features/validation/validation_metadata.py @@ -21,7 +21,7 @@ class Config: try: load( Source( - file_=SOURCES_DIR / "validation_metadata_invalid.yaml", + file=SOURCES_DIR / "validation_metadata_invalid.yaml", validators={ F[Config].host: MinLength(value=1), F[Config].port: (Ge(value=1), Lt(value=65536)), diff --git a/examples/docs/features/validation/validation_post_init.py b/examples/docs/features/validation/validation_post_init.py index 78c36cf..6d55936 100644 --- a/examples/docs/features/validation/validation_post_init.py +++ b/examples/docs/features/validation/validation_post_init.py @@ -25,6 +25,6 @@ def address(self) -> str: try: - load(Source(file_=SOURCES_DIR / "validation_post_init_invalid.yaml"), Config) + load(Source(file=SOURCES_DIR / "validation_post_init_invalid.yaml"), Config) except ValueError as exc: assert str(exc) == "port must be between 1 and 65535, got 99999" diff --git a/examples/docs/features/validation/validation_root.py b/examples/docs/features/validation/validation_root.py index bf6fc6c..f60f2ea 100644 --- a/examples/docs/features/validation/validation_root.py +++ b/examples/docs/features/validation/validation_root.py @@ -26,7 +26,7 @@ def check_debug_not_on_production(obj: Config) -> bool: try: load( Source( - file_=SOURCES_DIR / "validation_root_invalid.yaml", + file=SOURCES_DIR / "validation_root_invalid.yaml", root_validators=( RootValidator( func=check_debug_not_on_production, diff --git a/examples/docs/introduction/format_docker.py b/examples/docs/introduction/format_docker.py index 055b1ad..cb1a1c3 100644 --- a/examples/docs/introduction/format_docker.py +++ b/examples/docs/introduction/format_docker.py @@ -16,7 +16,7 @@ class Config: config = load( - Source(file_=SOURCES_DIR / "intro_app_docker_secrets"), + Source(file=SOURCES_DIR / "intro_app_docker_secrets"), Config, ) diff --git a/examples/docs/introduction/format_env.py b/examples/docs/introduction/format_env.py index 0a212da..633b864 100644 --- a/examples/docs/introduction/format_env.py +++ b/examples/docs/introduction/format_env.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = load(Source(file_=SOURCES_DIR / "intro_app.env"), Config) +config = load(Source(file=SOURCES_DIR / "intro_app.env"), Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/format_ini.py b/examples/docs/introduction/format_ini.py index 33486ac..d4af00c 100644 --- a/examples/docs/introduction/format_ini.py +++ b/examples/docs/introduction/format_ini.py @@ -16,7 +16,7 @@ class Config: config = load( - Source(file_=SOURCES_DIR / "intro_app.ini", prefix="app"), + Source(file=SOURCES_DIR / "intro_app.ini", prefix="app"), Config, ) diff --git a/examples/docs/introduction/format_json.py b/examples/docs/introduction/format_json.py index 80dfb7c..5293c97 100644 --- a/examples/docs/introduction/format_json.py +++ b/examples/docs/introduction/format_json.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = load(Source(file_=SOURCES_DIR / "intro_app.json"), Config) +config = load(Source(file=SOURCES_DIR / "intro_app.json"), Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/format_json5.py b/examples/docs/introduction/format_json5.py index 97edaad..de205ad 100644 --- a/examples/docs/introduction/format_json5.py +++ b/examples/docs/introduction/format_json5.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = load(Source(file_=SOURCES_DIR / "intro_app.json5"), Config) +config = load(Source(file=SOURCES_DIR / "intro_app.json5"), Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/format_toml.py b/examples/docs/introduction/format_toml.py index ae72c85..a848344 100644 --- a/examples/docs/introduction/format_toml.py +++ b/examples/docs/introduction/format_toml.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = load(Source(file_=SOURCES_DIR / "intro_app.toml"), Config) +config = load(Source(file=SOURCES_DIR / "intro_app.toml"), Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/format_yaml.py b/examples/docs/introduction/format_yaml.py index babd3e0..d4ba2d0 100644 --- a/examples/docs/introduction/format_yaml.py +++ b/examples/docs/introduction/format_yaml.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = load(Source(file_=SHARED_DIR / "common_app.yaml"), Config) +config = load(Source(file=SHARED_DIR / "common_app.yaml"), Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/intro_decorator_file.py b/examples/docs/introduction/intro_decorator_file.py index 8e807a7..d81dc3e 100644 --- a/examples/docs/introduction/intro_decorator_file.py +++ b/examples/docs/introduction/intro_decorator_file.py @@ -8,7 +8,7 @@ SHARED_DIR = Path(__file__).parents[1] / "shared" -@load(Source(file_=SHARED_DIR / "common_app.yaml")) +@load(Source(file=SHARED_DIR / "common_app.yaml")) @dataclass class Config: host: str diff --git a/examples/docs/introduction/intro_file_like.py b/examples/docs/introduction/intro_file_like.py index ddaccf2..0ced9d7 100644 --- a/examples/docs/introduction/intro_file_like.py +++ b/examples/docs/introduction/intro_file_like.py @@ -16,14 +16,14 @@ class Config: # From StringIO text_stream = StringIO('{"host": "localhost", "port": 8080, "debug": true}') -config = load(Source(file_=text_stream, loader=JsonLoader), Config) +config = load(Source(file=text_stream, loader=JsonLoader), Config) assert config.host == "localhost" assert config.port == 8080 # From BytesIO binary_stream = BytesIO(b'{"host": "0.0.0.0", "port": 3000}') -config = load(Source(file_=binary_stream, loader=JsonLoader), Config) +config = load(Source(file=binary_stream, loader=JsonLoader), Config) assert config.host == "0.0.0.0" assert config.port == 3000 diff --git a/examples/load_all_formats.py b/examples/load_all_formats.py index 005936d..4ed8c65 100644 --- a/examples/load_all_formats.py +++ b/examples/load_all_formats.py @@ -12,16 +12,16 @@ SOURCES_DIR = Path(__file__).parent / "sources" FORMATS = { - "json": Source(file_=SOURCES_DIR / "all_types.json"), - "json5": Source(file_=SOURCES_DIR / "all_types.json5"), - "toml10": Source(file_=SOURCES_DIR / "all_types_toml10.toml", loader=Toml10Loader), - "toml11": Source(file_=SOURCES_DIR / "all_types_toml11.toml"), - "ini": Source(file_=SOURCES_DIR / "all_types.ini", prefix="all_types"), - "yaml11": Source(file_=SOURCES_DIR / "all_types_yaml11.yaml", loader=Yaml11Loader), - "yaml12": Source(file_=SOURCES_DIR / "all_types_yaml12.yaml", loader=Yaml12Loader), - "env": Source(file_=SOURCES_DIR / "all_types.env"), + "json": Source(file=SOURCES_DIR / "all_types.json"), + "json5": Source(file=SOURCES_DIR / "all_types.json5"), + "toml10": Source(file=SOURCES_DIR / "all_types_toml10.toml", loader=Toml10Loader), + "toml11": Source(file=SOURCES_DIR / "all_types_toml11.toml"), + "ini": Source(file=SOURCES_DIR / "all_types.ini", prefix="all_types"), + "yaml11": Source(file=SOURCES_DIR / "all_types_yaml11.yaml", loader=Yaml11Loader), + "yaml12": Source(file=SOURCES_DIR / "all_types_yaml12.yaml", loader=Yaml12Loader), + "env": Source(file=SOURCES_DIR / "all_types.env"), "docker_secrets": Source( - file_=SOURCES_DIR / "all_types_docker_secrets", + file=SOURCES_DIR / "all_types_docker_secrets", loader=DockerSecretsLoader, ), } diff --git a/src/dature/errors/exceptions.py b/src/dature/errors/exceptions.py index 347adc6..d970e58 100644 --- a/src/dature/errors/exceptions.py +++ b/src/dature/errors/exceptions.py @@ -105,23 +105,23 @@ def _format_location( return [ *_format_content_lines(loc.line_content, prefix=" ├── "), f" │ {' ' * found.pos}{'^' * caret_len}", - *_format_file_line(loc, connector="└──" if is_last else "├──", suffix=suffix), + *_format_fileline(loc, connector="└──" if is_last else "├──", suffix=suffix), ] if loc.line_content is not None: return [ *_format_content_lines(loc.line_content, prefix=" ├── "), - *_format_file_line(loc, connector="└──" if is_last else "├──", suffix=suffix), + *_format_fileline(loc, connector="└──" if is_last else "├──", suffix=suffix), ] - return _format_file_line(loc, connector="└──" if is_last else "├──", suffix=suffix) + return _format_fileline(loc, connector="└──" if is_last else "├──", suffix=suffix) -def _format_file_line(loc: SourceLocation, *, connector: str, suffix: str = "") -> list[str]: - file_main = f" {connector} {loc.display_label} '{loc.file_path}'" +def _format_fileline(loc: SourceLocation, *, connector: str, suffix: str = "") -> list[str]: + filemain = f" {connector} {loc.display_label} '{loc.file_path}'" if loc.line_range is not None: - file_main += f", {loc.line_range!r}" - return [file_main + suffix] + filemain += f", {loc.line_range!r}" + return [filemain + suffix] def _format_path(field_path: list[str]) -> str: diff --git a/src/dature/errors/formatter.py b/src/dature/errors/formatter.py index f202242..0c84339 100644 --- a/src/dature/errors/formatter.py +++ b/src/dature/errors/formatter.py @@ -23,7 +23,7 @@ FieldLoadError, MissingEnvVarError, ) -from dature.errors.location import ErrorContext, read_file_content, resolve_source_location +from dature.errors.location import ErrorContext, read_filecontent, resolve_source_location from dature.masking.masking import is_random_string, mask_value if TYPE_CHECKING: @@ -127,17 +127,17 @@ def handle_load_errors[T]( try: return func() except EnvVarExpandError as exc: - file_content = read_file_content(ctx.file_path) + filecontent = read_filecontent(ctx.file_path) enriched_env: list[MissingEnvVarError] = [] for e in exc.exceptions: if not isinstance(e, MissingEnvVarError): continue - locations = resolve_source_location(e.field_path, ctx, file_content) + locations = resolve_source_location(e.field_path, ctx, filecontent) e.location = locations[0] if locations else None enriched_env.append(e) raise EnvVarExpandError(enriched_env, dataclass_name=ctx.dataclass_name) from exc except (AggregateLoadError, LoadError) as exc: - file_content = read_file_content(ctx.file_path) + filecontent = read_filecontent(ctx.file_path) heuristic_paths: set[str] = set() field_errors: list[FieldLoadError] = [] _walk_exception( @@ -153,7 +153,7 @@ def handle_load_errors[T]( location_ctx = replace(ctx, secret_paths=ctx.secret_paths | heuristic_paths) enriched: list[FieldLoadError] = [] for fe in field_errors: - locations = resolve_source_location(fe.field_path, location_ctx, file_content) + locations = resolve_source_location(fe.field_path, location_ctx, filecontent) enriched.append( FieldLoadError( field_path=fe.field_path, @@ -188,7 +188,7 @@ def enrich_skipped_errors( source_reprs = ", ".join(repr(s.metadata) for s in sources) locations = [ - loc for s in sources for loc in resolve_source_location(exc.field_path, s.error_ctx, s.file_content) + loc for s in sources for loc in resolve_source_location(exc.field_path, s.error_ctx, s.filecontent) ] updated.append( FieldLoadError( diff --git a/src/dature/errors/location.py b/src/dature/errors/location.py index 840b2cb..ef095c0 100644 --- a/src/dature/errors/location.py +++ b/src/dature/errors/location.py @@ -23,7 +23,7 @@ class ErrorContext: nested_conflicts: NestedConflicts | None = None -def read_file_content(file_path: Path | None) -> str | None: +def read_filecontent(file_path: Path | None) -> str | None: if file_path is None: return None @@ -46,13 +46,13 @@ def _ranges_overlap(a: LineRange, b: LineRange) -> bool: def _secret_overlaps_lines( *, - file_content: str, + filecontent: str, line_range: LineRange, secret_paths: frozenset[str], prefix: str | None, path_finder_class: type, ) -> bool: - finder = path_finder_class(file_content) + finder = path_finder_class(filecontent) for secret_path in secret_paths: search_path = _build_search_path(secret_path.split("."), prefix) secret_range = finder.find_line_range(search_path) @@ -74,7 +74,7 @@ def _resolve_conflict( def _apply_masking( locations: list[SourceLocation], ctx: ErrorContext, - file_content: str | None, + filecontent: str | None, *, is_secret: bool, ) -> list[SourceLocation]: @@ -86,10 +86,10 @@ def _apply_masking( and ctx.secret_paths and location.line_range is not None and ctx.loader_class.path_finder_class is not None - and file_content is not None + and filecontent is not None ): should_mask = _secret_overlaps_lines( - file_content=file_content, + filecontent=filecontent, line_range=location.line_range, secret_paths=ctx.secret_paths, prefix=ctx.prefix, @@ -114,7 +114,7 @@ def _apply_masking( def resolve_source_location( field_path: list[str], ctx: ErrorContext, - file_content: str | None, + filecontent: str | None, ) -> list[SourceLocation]: is_secret = ".".join(field_path) in ctx.secret_paths conflict = _resolve_conflict(field_path, ctx) @@ -122,10 +122,10 @@ def resolve_source_location( locations = ctx.loader_class.resolve_location( field_path, ctx.file_path, - file_content, + filecontent, ctx.prefix, ctx.split_symbols, conflict, ) - return _apply_masking(locations, ctx, file_content, is_secret=is_secret) + return _apply_masking(locations, ctx, filecontent, is_secret=is_secret) diff --git a/src/dature/loading/context.py b/src/dature/loading/context.py index 9013f73..ad9bfd1 100644 --- a/src/dature/loading/context.py +++ b/src/dature/loading/context.py @@ -49,11 +49,11 @@ def build_error_ctx( mask_secrets: bool = False, nested_conflicts: NestedConflicts | None = None, ) -> ErrorContext: - loader_class = resolve_loader_class(metadata.loader, metadata.file_) - if isinstance(metadata.file_, FILE_LIKE_TYPES): + loader_class = resolve_loader_class(metadata.loader, metadata.file) + if isinstance(metadata.file, FILE_LIKE_TYPES): error_file_path = None - elif metadata.file_ is not None: - error_file_path = Path(metadata.file_) + elif metadata.file is not None: + error_file_path = Path(metadata.file) else: error_file_path = None return ErrorContext( diff --git a/src/dature/loading/resolver.py b/src/dature/loading/resolver.py index 79715ad..ef33f41 100644 --- a/src/dature/loading/resolver.py +++ b/src/dature/loading/resolver.py @@ -64,16 +64,16 @@ def _resolve_by_extension_inner(extension: str) -> "type[LoaderProtocol]": def resolve_loader_class( loader: "type[LoaderProtocol] | None", - file_: "FileLike | FilePath | None", + file: "FileLike | FilePath | None", ) -> "type[LoaderProtocol]": if loader is not None: - if file_ is not None and not isinstance(file_, FILE_LIKE_TYPES) and loader is EnvLoader: + if file is not None and not isinstance(file, FILE_LIKE_TYPES) and loader is EnvLoader: msg = ( "EnvLoader reads from environment variables and does not use files. " - "Remove file_ or use a file-based loader instead (e.g. EnvFileLoader)." + "Remove file or use a file-based loader instead (e.g. EnvFileLoader)." ) raise ValueError(msg) - if isinstance(file_, FILE_LIKE_TYPES) and loader in (EnvLoader, DockerSecretsLoader): + if isinstance(file, FILE_LIKE_TYPES) and loader in (EnvLoader, DockerSecretsLoader): msg = ( f"{loader.__name__} does not support file-like objects. " "Use a file-based loader (e.g. JsonLoader, TomlLoader) with file-like objects." @@ -81,18 +81,18 @@ def resolve_loader_class( raise ValueError(msg) return loader - if isinstance(file_, FILE_LIKE_TYPES): + if isinstance(file, FILE_LIKE_TYPES): msg = ( "Cannot determine loader type for a file-like object. " "Please specify loader explicitly (e.g. loader=JsonLoader)." ) raise TypeError(msg) - if file_ is None: + if file is None: return EnvLoader - # file-like objects are handled above; here file_ is str | Path - file_path = Path(file_) + # file-like objects are handled above; here file is str | Path + file_path = Path(file) if file_path.is_dir(): return DockerSecretsLoader @@ -111,7 +111,7 @@ def resolve_loader( nested_resolve_strategy: NestedResolveStrategy = "flat", nested_resolve: NestedResolve | None = None, ) -> "LoaderProtocol": - loader_class = resolve_loader_class(metadata.loader, metadata.file_) + loader_class = resolve_loader_class(metadata.loader, metadata.file) resolved_expand = expand_env_vars or metadata.expand_env_vars or "default" diff --git a/src/dature/loading/single.py b/src/dature/loading/single.py index 6ec5578..d6a858a 100644 --- a/src/dature/loading/single.py +++ b/src/dature/loading/single.py @@ -6,7 +6,7 @@ from dature.config import config from dature.errors.exceptions import DatureConfigError from dature.errors.formatter import enrich_skipped_errors, handle_load_errors -from dature.errors.location import read_file_content +from dature.errors.location import read_filecontent from dature.load_report import FieldOrigin, LoadReport, SourceEntry, attach_load_report from dature.loading.context import ( apply_skip_invalid, @@ -129,7 +129,7 @@ def __init__( self.validating = False self.loading = False - loader_class = resolve_loader_class(metadata.loader, metadata.file_) + loader_class = resolve_loader_class(metadata.loader, metadata.file) self.loader_type = loader_class.display_name mask_secrets = _resolve_single_mask_secrets(metadata) @@ -180,10 +180,10 @@ def _load_single_source(ctx: _PatchContext) -> DataclassInstance: raw_data = coerce_flag_fields(raw_data, ctx.cls) skipped_fields: dict[str, list[SkippedFieldSource]] = {} - file_content = read_file_content(ctx.error_ctx.file_path) + filecontent = read_filecontent(ctx.error_ctx.file_path) for path in filter_result.skipped_paths: skipped_fields.setdefault(path, []).append( - SkippedFieldSource(metadata=ctx.metadata, error_ctx=ctx.error_ctx, file_content=file_content), + SkippedFieldSource(metadata=ctx.metadata, error_ctx=ctx.error_ctx, filecontent=filecontent), ) def _transform(rd: JSONValue = raw_data) -> DataclassInstance: @@ -237,7 +237,7 @@ def new_init(self: DataclassInstance, *args: Any, **kwargs: Any) -> None: # noq dataclass_name=ctx.cls.__name__, loader_type=ctx.loader_type, file_path=str(ctx.file_path) - if not isinstance(ctx.metadata.file_, (*FILE_LIKE_TYPES, type(None))) + if not isinstance(ctx.metadata.file, (*FILE_LIKE_TYPES, type(None))) else None, raw_data=result_dict, secret_paths=ctx.secret_paths, @@ -258,7 +258,7 @@ def load_as_function( # noqa: C901, PLR0912 metadata: Source, debug: bool, ) -> DataclassInstance: - loader_class = resolve_loader_class(metadata.loader, metadata.file_) + loader_class = resolve_loader_class(metadata.loader, metadata.file) display_name = loader_class.display_name secret_paths: frozenset[str] = frozenset() @@ -293,18 +293,18 @@ def load_as_function( # noqa: C901, PLR0912 raw_data = filter_result.cleaned_dict skipped_fields: dict[str, list[SkippedFieldSource]] = {} - file_content = read_file_content(error_ctx.file_path) + filecontent = read_filecontent(error_ctx.file_path) for path in filter_result.skipped_paths: skipped_fields.setdefault(path, []).append( - SkippedFieldSource(metadata=metadata, error_ctx=error_ctx, file_content=file_content), + SkippedFieldSource(metadata=metadata, error_ctx=error_ctx, filecontent=filecontent), ) report: LoadReport | None = None if debug: - if isinstance(metadata.file_, FILE_LIKE_TYPES): + if isinstance(metadata.file, FILE_LIKE_TYPES): report_file_path = None - elif metadata.file_ is not None: - report_file_path = str(metadata.file_) + elif metadata.file is not None: + report_file_path = str(metadata.file) else: report_file_path = None report = _build_single_source_report( diff --git a/src/dature/loading/source_loading.py b/src/dature/loading/source_loading.py index f7f23d6..c4ed299 100644 --- a/src/dature/loading/source_loading.py +++ b/src/dature/loading/source_loading.py @@ -5,7 +5,7 @@ from dature.config import config from dature.errors.exceptions import DatureConfigError, SourceLoadError, SourceLocation from dature.errors.formatter import handle_load_errors -from dature.errors.location import ErrorContext, read_file_content +from dature.errors.location import ErrorContext, read_filecontent from dature.field_path import FieldPath from dature.load_report import SourceEntry from dature.loading.context import apply_skip_invalid, build_error_ctx @@ -34,7 +34,7 @@ def resolve_loader_for_source( def should_skip_broken(source_meta: Source, merge_meta: Merge) -> bool: if source_meta.skip_if_broken is not None: - if source_meta.file_ is None: + if source_meta.file is None: logger.warning( "skip_if_broken has no effect on environment variable sources — they cannot be broken", ) @@ -96,14 +96,14 @@ def apply_merge_skip_invalid( @dataclass(frozen=True, slots=True) class SourceContext: error_ctx: ErrorContext - file_content: str | None + filecontent: str | None @dataclass(frozen=True, slots=True) class SkippedFieldSource: metadata: Source error_ctx: ErrorContext - file_content: str | None + filecontent: str | None @dataclass(frozen=True, slots=True) @@ -141,18 +141,18 @@ def load_sources( # noqa: C901, PLR0912, PLR0913, PLR0915 expand_env_vars=resolved_expand, type_loaders=source_type_loaders, ) - file_or_path: FileOrStream - if isinstance(source_meta.file_, FILE_LIKE_TYPES): - file_or_path = source_meta.file_ - elif source_meta.file_ is not None: - file_or_path = Path(source_meta.file_) + fileor_path: FileOrStream + if isinstance(source_meta.file, FILE_LIKE_TYPES): + fileor_path = source_meta.file + elif source_meta.file is not None: + fileor_path = Path(source_meta.file) else: - file_or_path = Path() + fileor_path = Path() error_ctx = build_error_ctx(source_meta, dataclass_name, secret_paths=secret_paths, mask_secrets=mask_secrets) def _load_raw( li: LoaderProtocol = loader_instance, - fp: FileOrStream = file_or_path, + fp: FileOrStream = fileor_path, ) -> LoadRawResult: return li.load_raw(fp) @@ -168,14 +168,14 @@ def _load_raw( "[%s] Source %d skipped (broken): file=%s", dataclass_name, i, - source_meta.file_ - if isinstance(source_meta.file_, (str, Path)) - else ("" if source_meta.file_ is not None else ""), + source_meta.file + if isinstance(source_meta.file, (str, Path)) + else ("" if source_meta.file is not None else ""), ) continue except Exception as exc: if merge_meta.strategy != MergeStrategy.FIRST_FOUND and not should_skip_broken(source_meta, merge_meta): - loader_class = resolve_loader_class(source_meta.loader, source_meta.file_) + loader_class = resolve_loader_class(source_meta.loader, source_meta.file) location = SourceLocation( display_label=loader_class.display_label, file_path=error_ctx.file_path, @@ -192,9 +192,9 @@ def _load_raw( "[%s] Source %d skipped (broken): file=%s", dataclass_name, i, - source_meta.file_ - if isinstance(source_meta.file_, (str, Path)) - else ("" if source_meta.file_ is not None else ""), + source_meta.file + if isinstance(source_meta.file, (str, Path)) + else ("" if source_meta.file is not None else ""), ) continue @@ -208,7 +208,7 @@ def _load_raw( nested_conflicts=load_result.nested_conflicts, ) - file_content = read_file_content(error_ctx.file_path) + filecontent = read_filecontent(error_ctx.file_path) filter_result = apply_merge_skip_invalid( raw=raw, @@ -221,13 +221,13 @@ def _load_raw( for path in filter_result.skipped_paths: skipped_fields.setdefault(path, []).append( - SkippedFieldSource(metadata=source_meta, error_ctx=error_ctx, file_content=file_content), + SkippedFieldSource(metadata=source_meta, error_ctx=error_ctx, filecontent=filecontent), ) raw = filter_result.cleaned_dict raw_dicts.append(raw) - loader_class = resolve_loader_class(source_meta.loader, source_meta.file_) + loader_class = resolve_loader_class(source_meta.loader, source_meta.file) display_name = loader_class.display_name logger.debug( @@ -235,9 +235,9 @@ def _load_raw( dataclass_name, i, display_name, - source_meta.file_ - if isinstance(source_meta.file_, (str, Path)) - else ("" if source_meta.file_ is not None else ""), + source_meta.file + if isinstance(source_meta.file, (str, Path)) + else ("" if source_meta.file is not None else ""), sorted(raw.keys()) if isinstance(raw, dict) else "", ) if secret_paths: @@ -254,13 +254,13 @@ def _load_raw( source_entries.append( SourceEntry( index=i, - file_path=str(source_meta.file_) if isinstance(source_meta.file_, (str, Path)) else None, + file_path=str(source_meta.file) if isinstance(source_meta.file, (str, Path)) else None, loader_type=display_name, raw_data=raw, ), ) - source_ctxs.append(SourceContext(error_ctx=error_ctx, file_content=file_content)) + source_ctxs.append(SourceContext(error_ctx=error_ctx, filecontent=filecontent)) last_loader = loader_instance if merge_meta.strategy == MergeStrategy.FIRST_FOUND: diff --git a/src/dature/main.py b/src/dature/main.py index 06e5b07..b60bb1c 100644 --- a/src/dature/main.py +++ b/src/dature/main.py @@ -66,18 +66,18 @@ def load( nested_resolve_strategy=config.loading.nested_resolve_strategy, ) - file_or_path: FileOrStream - if isinstance(metadata.file_, FILE_LIKE_TYPES): - file_or_path = metadata.file_ - elif metadata.file_ is not None: - file_or_path = Path(metadata.file_) + fileor_path: FileOrStream + if isinstance(metadata.file, FILE_LIKE_TYPES): + fileor_path = metadata.file + elif metadata.file is not None: + fileor_path = Path(metadata.file) else: - file_or_path = Path() + fileor_path = Path() if dataclass_ is not None: return load_as_function( loader_instance=loader_instance, - file_path=file_or_path, + file_path=fileor_path, dataclass_=dataclass_, metadata=metadata, debug=debug, @@ -85,7 +85,7 @@ def load( return make_decorator( loader_instance=loader_instance, - file_path=file_or_path, + file_path=fileor_path, metadata=metadata, cache=cache, debug=debug, diff --git a/src/dature/merging/deep_merge.py b/src/dature/merging/deep_merge.py index 89a6531..85e3a23 100644 --- a/src/dature/merging/deep_merge.py +++ b/src/dature/merging/deep_merge.py @@ -204,7 +204,7 @@ def raise_on_conflict( locations: list[SourceLocation] = [] for source_idx, _ in sources: source_ctx = source_ctxs[source_idx] - locs = resolve_source_location(field_path, source_ctx.error_ctx, source_ctx.file_content) + locs = resolve_source_location(field_path, source_ctx.error_ctx, source_ctx.filecontent) locations.extend(locs) conflict_errors.append( MergeConflictFieldError( diff --git a/src/dature/metadata.py b/src/dature/metadata.py index 6876f39..d5e7cf0 100644 --- a/src/dature/metadata.py +++ b/src/dature/metadata.py @@ -62,7 +62,7 @@ class FieldMergeStrategy(StrEnum): # --8<-- [start:load-metadata] @dataclass(slots=True, kw_only=True) class Source: - file_: "FileLike | FilePath | None" = None + file: "FileLike | FilePath | None" = None loader: "type[LoaderProtocol] | None" = None prefix: "DotSeparatedPath | None" = None split_symbols: str = "__" @@ -81,16 +81,16 @@ class Source: # --8<-- [end:load-metadata] def __post_init__(self) -> None: - if isinstance(self.file_, (str, Path)): - self.file_ = expand_file_path(str(self.file_), mode="strict") + if isinstance(self.file, (str, Path)): + self.file = expand_file_path(str(self.file), mode="strict") def __repr__(self) -> str: - loader_class = resolve_loader_class(self.loader, self.file_) + loader_class = resolve_loader_class(self.loader, self.file) display = loader_class.display_name - if isinstance(self.file_, FILE_LIKE_TYPES): + if isinstance(self.file, FILE_LIKE_TYPES): return f"{display} ''" - if self.file_ is not None: - return f"{display} '{self.file_}'" + if self.file is not None: + return f"{display} '{self.file}'" return display diff --git a/src/dature/protocols.py b/src/dature/protocols.py index d0340d2..49e99a1 100644 --- a/src/dature/protocols.py +++ b/src/dature/protocols.py @@ -43,7 +43,7 @@ def resolve_location( cls, field_path: list[str], file_path: Path | None, - file_content: str | None, + filecontent: str | None, prefix: str | None, split_symbols: str, nested_conflict: NestedConflict | None, diff --git a/src/dature/sources_loader/base.py b/src/dature/sources_loader/base.py index c41a6e5..1fb4ac0 100644 --- a/src/dature/sources_loader/base.py +++ b/src/dature/sources_loader/base.py @@ -296,26 +296,26 @@ def resolve_location( cls, field_path: list[str], file_path: Path | None, - file_content: str | None, + filecontent: str | None, prefix: str | None, split_symbols: str, # noqa: ARG003 nested_conflict: NestedConflict | None, # noqa: ARG003 ) -> list[SourceLocation]: - if file_content is None or not field_path: - return [_empty_file_location(cls.display_label, file_path)] + if filecontent is None or not field_path: + return [_empty_filelocation(cls.display_label, file_path)] if cls.path_finder_class is None: - return [_empty_file_location(cls.display_label, file_path)] + return [_empty_filelocation(cls.display_label, file_path)] search_path = _build_search_path(field_path, prefix) - finder = cls.path_finder_class(file_content) + finder = cls.path_finder_class(filecontent) line_range = finder.find_line_range(search_path) if line_range is None: line_range = _find_parent_line_range(finder, search_path) if line_range is None: - return [_empty_file_location(cls.display_label, file_path)] + return [_empty_filelocation(cls.display_label, file_path)] - lines = file_content.splitlines() + lines = filecontent.splitlines() content_lines: list[str] | None = None if 0 < line_range.start <= len(lines): end = min(line_range.end, len(lines)) @@ -358,7 +358,7 @@ def _strip_common_indent(raw_lines: list[str]) -> list[str]: return [line[min_indent:] for line in raw_lines] -def _empty_file_location(display_label: str, file_path: Path | None) -> SourceLocation: +def _empty_filelocation(display_label: str, file_path: Path | None) -> SourceLocation: return SourceLocation( display_label=display_label, file_path=file_path, diff --git a/src/dature/sources_loader/docker_secrets.py b/src/dature/sources_loader/docker_secrets.py index 5a78452..dba42b4 100644 --- a/src/dature/sources_loader/docker_secrets.py +++ b/src/dature/sources_loader/docker_secrets.py @@ -3,11 +3,7 @@ from dature.errors.exceptions import SourceLocation from dature.sources_loader.flat_key import FlatKeyLoader -from dature.types import ( - FileOrStream, - JSONValue, - NestedConflict, -) +from dature.types import FileOrStream, JSONValue, NestedConflict class DockerSecretsLoader(FlatKeyLoader): @@ -19,7 +15,7 @@ def resolve_location( cls, field_path: list[str], file_path: Path | None, - file_content: str | None, # noqa: ARG003 + filecontent: str | None, # noqa: ARG003 prefix: str | None, split_symbols: str, nested_conflict: NestedConflict | None, diff --git a/src/dature/sources_loader/env_.py b/src/dature/sources_loader/env_.py index 124fac2..4d94556 100644 --- a/src/dature/sources_loader/env_.py +++ b/src/dature/sources_loader/env_.py @@ -6,14 +6,7 @@ from dature.errors.exceptions import LineRange, SourceLocation from dature.sources_loader.flat_key import FlatKeyLoader -from dature.types import ( - BINARY_IO_TYPES, - TEXT_IO_TYPES, - FileOrStream, - JSONValue, - NestedConflict, - NestedConflicts, -) +from dature.types import BINARY_IO_TYPES, TEXT_IO_TYPES, FileOrStream, JSONValue, NestedConflict, NestedConflicts class EnvLoader(FlatKeyLoader): @@ -28,7 +21,7 @@ def resolve_location( cls, field_path: list[str], file_path: Path | None, # noqa: ARG003 - file_content: str | None, # noqa: ARG003 + filecontent: str | None, # noqa: ARG003 prefix: str | None, split_symbols: str, nested_conflict: NestedConflict | None, @@ -76,7 +69,7 @@ def resolve_location( cls, field_path: list[str], file_path: Path | None, - file_content: str | None, + filecontent: str | None, prefix: str | None, split_symbols: str, nested_conflict: NestedConflict | None, @@ -84,8 +77,8 @@ def resolve_location( var_name = cls._resolve_var_name(field_path, prefix, split_symbols, nested_conflict) line_range: LineRange | None = None line_content: list[str] | None = None - if file_content is not None: - line_range, line_content = _find_env_line(file_content, var_name) + if filecontent is not None: + line_range, line_content = _find_env_line(filecontent, var_name) return [ SourceLocation( display_label=cls.display_label, diff --git a/src/dature/sources_loader/json5_.py b/src/dature/sources_loader/json5_.py index 4562953..c3df659 100644 --- a/src/dature/sources_loader/json5_.py +++ b/src/dature/sources_loader/json5_.py @@ -38,5 +38,5 @@ def _load(self, path: FileOrStream) -> JSONValue: return cast("JSONValue", json5.load(cast("TextIO", path))) if isinstance(path, BINARY_IO_TYPES): return cast("JSONValue", json5.load(io.TextIOWrapper(cast("io.BufferedReader", path)))) - with path.open() as file_: - return cast("JSONValue", json5.load(file_)) + with path.open() as file: + return cast("JSONValue", json5.load(file)) diff --git a/src/dature/sources_loader/json_.py b/src/dature/sources_loader/json_.py index 718b41b..9deb905 100644 --- a/src/dature/sources_loader/json_.py +++ b/src/dature/sources_loader/json_.py @@ -33,5 +33,5 @@ def _additional_loaders(self) -> list[Provider]: def _load(self, path: FileOrStream) -> JSONValue: if isinstance(path, FILE_LIKE_TYPES): return cast("JSONValue", json.load(path)) - with path.open() as file_: - return cast("JSONValue", json.load(file_)) + with path.open() as file: + return cast("JSONValue", json.load(file)) diff --git a/src/dature/sources_loader/toml_.py b/src/dature/sources_loader/toml_.py index bfa03a0..06f6647 100644 --- a/src/dature/sources_loader/toml_.py +++ b/src/dature/sources_loader/toml_.py @@ -30,8 +30,8 @@ def _load(self, path: FileOrStream) -> JSONValue: if isinstance(content, bytes): content = content.decode() return cast("JSONValue", toml_rs.loads(content, toml_version=self._toml_version())) - with path.open() as file_: - return cast("JSONValue", toml_rs.loads(file_.read(), toml_version=self._toml_version())) + with path.open() as file: + return cast("JSONValue", toml_rs.loads(file.read(), toml_version=self._toml_version())) def _additional_loaders(self) -> list[Provider]: return [ diff --git a/src/dature/sources_loader/yaml_.py b/src/dature/sources_loader/yaml_.py index 95b1e5a..0f969db 100644 --- a/src/dature/sources_loader/yaml_.py +++ b/src/dature/sources_loader/yaml_.py @@ -28,8 +28,8 @@ def _load(self, path: FileOrStream) -> JSONValue: yaml.version = self._yaml_version() if isinstance(path, FILE_LIKE_TYPES): return cast("JSONValue", yaml.load(path)) - with path.open() as file_: - return cast("JSONValue", yaml.load(file_)) + with path.open() as file: + return cast("JSONValue", yaml.load(file)) class Yaml11Loader(BaseYamlLoader): diff --git a/tests/errors/test_exceptions.py b/tests/errors/test_exceptions.py index 9ba64d7..279f3d0 100644 --- a/tests/errors/test_exceptions.py +++ b/tests/errors/test_exceptions.py @@ -157,7 +157,7 @@ def test_json_type_error_decorator(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"timeout": "abc", "name": "test"}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) @load(metadata) @dataclass @@ -192,7 +192,7 @@ class Config: name: str port: int - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -214,7 +214,7 @@ class Config: timeout: int name: str - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -249,7 +249,7 @@ class DB: class Config: db: DB - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -298,7 +298,7 @@ class Config: name: str timeout: int - metadata = Source(file_=toml_file) + metadata = Source(file=toml_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -326,7 +326,7 @@ class Config: name: str timeout: int - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -366,7 +366,7 @@ class TestLineTruncation: ), ], ) - def test_file_source_truncation( + def test_filesource_truncation( self, line_content: str, expected_content: str, @@ -413,7 +413,7 @@ def test_file_source_truncation( ), ], ) - def test_envfile_source_truncation( + def test_envfilesource_truncation( self, line_content: str, expected_content: str, @@ -619,7 +619,7 @@ def test_json_multiline_dict(self, tmp_path: Path): class Config: db: int - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -643,7 +643,7 @@ class Config: db: int name: str - metadata = Source(file_=yaml_file) + metadata = Source(file=yaml_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -666,7 +666,7 @@ def test_toml_multiline_array(self, tmp_path: Path): class Config: tags: int - metadata = Source(file_=toml_file) + metadata = Source(file=toml_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -689,7 +689,7 @@ def test_json_multiline_array(self, tmp_path: Path): class Config: tags: int - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -714,7 +714,7 @@ class Product: class Config: product: list[Product] - metadata = Source(file_=array_of_tables_toml_file) + metadata = Source(file=array_of_tables_toml_file) result = load(metadata, Config) assert result == Config( @@ -735,7 +735,7 @@ class Product: class Config: product: list[Product] - metadata = Source(file_=array_of_tables_error_first_toml_file) + metadata = Source(file=array_of_tables_error_first_toml_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -760,7 +760,7 @@ class Product: class Config: product: list[Product] - metadata = Source(file_=array_of_tables_error_last_toml_file) + metadata = Source(file=array_of_tables_error_last_toml_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) diff --git a/tests/errors/test_fixtures.py b/tests/errors/test_fixtures.py index 48f119b..58d4f66 100644 --- a/tests/errors/test_fixtures.py +++ b/tests/errors/test_fixtures.py @@ -98,7 +98,7 @@ def test_load_error_types( fixture_file: str, metadata_kwargs: dict[str, str], ) -> None: - metadata = Source(file_=str(FIXTURES_DIR / fixture_file), **metadata_kwargs) + metadata = Source(file=str(FIXTURES_DIR / fixture_file), **metadata_kwargs) with pytest.raises(DatureConfigError) as exc_info: load(metadata, LoadErrorConfig) @@ -113,7 +113,7 @@ def test_validation_error_types( fixture_file: str, metadata_kwargs: dict[str, str], ) -> None: - metadata = Source(file_=str(FIXTURES_DIR / fixture_file), **metadata_kwargs) + metadata = Source(file=str(FIXTURES_DIR / fixture_file), **metadata_kwargs) with pytest.raises(DatureConfigError) as exc_info: load(metadata, ValidationErrorConfig) diff --git a/tests/errors/test_location.py b/tests/errors/test_location.py index ae36a41..d7bb2c2 100644 --- a/tests/errors/test_location.py +++ b/tests/errors/test_location.py @@ -16,7 +16,7 @@ def test_env_source(self): prefix="APP_", split_symbols="__", ) - locs = resolve_source_location(["database", "port"], ctx, file_content=None) + locs = resolve_source_location(["database", "port"], ctx, filecontent=None) assert len(locs) == 1 assert locs[0].display_label == "ENV" assert locs[0].env_var_name == "APP_DATABASE__PORT" @@ -30,7 +30,7 @@ def test_env_source_no_prefix(self): prefix=None, split_symbols="__", ) - locs = resolve_source_location(["timeout"], ctx, file_content=None) + locs = resolve_source_location(["timeout"], ctx, filecontent=None) assert locs[0].env_var_name == "TIMEOUT" def test_env_source_custom_split_symbols(self): @@ -41,7 +41,7 @@ def test_env_source_custom_split_symbols(self): prefix="APP_", split_symbols="_", ) - locs = resolve_source_location(["database", "port"], ctx, file_content=None) + locs = resolve_source_location(["database", "port"], ctx, filecontent=None) assert locs[0].env_var_name == "APP_DATABASE_PORT" def test_json_source_with_line(self): @@ -53,7 +53,7 @@ def test_json_source_with_line(self): prefix=None, split_symbols="__", ) - locs = resolve_source_location(["timeout"], ctx, file_content=content) + locs = resolve_source_location(["timeout"], ctx, filecontent=content) assert locs[0].display_label == "FILE" assert locs[0].line_range == LineRange(start=2, end=2) assert locs[0].line_content == ['"timeout": "30",'] @@ -67,12 +67,12 @@ def test_toml_source_with_line(self): prefix=None, split_symbols="__", ) - locs = resolve_source_location(["timeout"], ctx, file_content=content) + locs = resolve_source_location(["timeout"], ctx, filecontent=content) assert locs[0].display_label == "FILE" assert locs[0].line_range == LineRange(start=1, end=1) assert locs[0].line_content == ['timeout = "30"'] - def test_envfile_source(self): + def test_envfilesource(self): content = "# comment\nAPP_TIMEOUT=30\nAPP_NAME=test" ctx = ErrorContext( dataclass_name="Config", @@ -81,13 +81,13 @@ def test_envfile_source(self): prefix="APP_", split_symbols="__", ) - locs = resolve_source_location(["timeout"], ctx, file_content=content) + locs = resolve_source_location(["timeout"], ctx, filecontent=content) assert locs[0].display_label == "ENV FILE" assert locs[0].env_var_name == "APP_TIMEOUT" assert locs[0].line_range == LineRange(start=2, end=2) assert locs[0].line_content == ["APP_TIMEOUT=30"] - def test_file_source_does_not_mask_non_secret_field(self): + def test_filesource_does_not_mask_non_secret_field(self): content = '{\n "password": "secret123",\n "timeout": "30"\n}' ctx = ErrorContext( dataclass_name="Config", @@ -97,10 +97,10 @@ def test_file_source_does_not_mask_non_secret_field(self): split_symbols="__", secret_paths=frozenset({"password"}), ) - locs = resolve_source_location(["timeout"], ctx, file_content=content) + locs = resolve_source_location(["timeout"], ctx, filecontent=content) assert locs[0].line_content == ['"timeout": "30"'] - def test_file_source_masks_secret_field(self): + def test_filesource_masks_secret_field(self): content = '{\n "password": "secret123",\n "timeout": "30"\n}' ctx = ErrorContext( dataclass_name="Config", @@ -110,10 +110,10 @@ def test_file_source_masks_secret_field(self): split_symbols="__", secret_paths=frozenset({"password"}), ) - locs = resolve_source_location(["password"], ctx, file_content=content) + locs = resolve_source_location(["password"], ctx, filecontent=content) assert locs[0].line_content == ['"password": "",'] - def test_file_source_masks_line_when_secret_on_same_line(self): + def test_filesource_masks_line_when_secret_on_same_line(self): content = '{"password": "secret123", "timeout": "30"}' ctx = ErrorContext( dataclass_name="Config", @@ -123,5 +123,5 @@ def test_file_source_masks_line_when_secret_on_same_line(self): split_symbols="__", secret_paths=frozenset({"password"}), ) - locs = resolve_source_location(["timeout"], ctx, file_content=content) + locs = resolve_source_location(["timeout"], ctx, filecontent=content) assert locs[0].line_content == ['{"password": "", "timeout": "30"}'] diff --git a/tests/expansion/test_expand_file_path.py b/tests/expansion/test_expand_file_path.py index fa2fd23..e03a473 100644 --- a/tests/expansion/test_expand_file_path.py +++ b/tests/expansion/test_expand_file_path.py @@ -100,7 +100,7 @@ def test_disabled_no_expansion(self) -> None: class TestSourceFileExpansion: @pytest.mark.parametrize( - ("file_", "env_vars", "expected"), + ("file", "env_vars", "expected"), [ ("$DATURE_DIR/config.toml", {"DATURE_DIR": "/etc/app"}, "/etc/app/config.toml"), ( @@ -130,10 +130,10 @@ class TestSourceFileExpansion: ], ids=["str-dir", "path-dir", "str-filename-env", "no-vars", "str-windows-percent", "path-dir-and-filename"], ) - def test_file_expanded( + def test_fileexpanded( self, monkeypatch: pytest.MonkeyPatch, - file_: str | Path, + file: str | Path, env_vars: dict[str, str], expected: str, ) -> None: @@ -142,17 +142,17 @@ def test_file_expanded( for key, value in env_vars.items(): monkeypatch.setenv(key, value) - source = Source(file_=file_) + source = Source(file=file) - assert source.file_ == expected + assert source.file == expected - def test_none_file_unchanged(self) -> None: + def test_none_fileunchanged(self) -> None: source = Source() - assert source.file_ is None + assert source.file is None def test_missing_var_raises(self, monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.delenv("DATURE_MISSING", raising=False) with pytest.raises(EnvVarExpandError): - Source(file_="$DATURE_MISSING/config.toml") + Source(file="$DATURE_MISSING/config.toml") diff --git a/tests/loading/test_field_merges.py b/tests/loading/test_field_merges.py index 67cf939..3c16309 100644 --- a/tests/loading/test_field_merges.py +++ b/tests/loading/test_field_merges.py @@ -25,8 +25,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), strategy=MergeStrategy.LAST_WINS, field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS),), ), @@ -50,8 +50,8 @@ class Config: result = load( Merge( - Source(file_=first), - Source(file_=second), + Source(file=first), + Source(file=second), strategy=MergeStrategy.FIRST_WINS, field_merges=(MergeRule(F[Config].port, FieldMergeStrategy.LAST_WINS),), ), @@ -75,8 +75,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND),), ), Config, @@ -98,8 +98,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND_UNIQUE),), ), Config, @@ -120,8 +120,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.PREPEND),), ), Config, @@ -142,8 +142,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.PREPEND_UNIQUE),), ), Config, @@ -169,8 +169,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), field_merges=(MergeRule(F[Config].database.host, FieldMergeStrategy.FIRST_WINS),), ), Config, @@ -193,8 +193,8 @@ class Config: with pytest.raises(TypeError, match="APPEND strategy requires both values to be lists"): load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), field_merges=(MergeRule(F[Config].value, FieldMergeStrategy.APPEND),), ), Config, @@ -215,8 +215,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), strategy=MergeStrategy.LAST_WINS, field_merges=( MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS), @@ -244,8 +244,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), field_merges=(), ), Config, @@ -264,8 +264,8 @@ def test_decorator_with_field_merges(self, tmp_path: Path): overrides.write_text('{"host": "override-host", "port": 9090, "tags": ["b"]}') meta = Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), field_merges=( MergeRule(F["Config"].host, FieldMergeStrategy.FIRST_WINS), MergeRule(F["Config"].tags, FieldMergeStrategy.APPEND), @@ -300,8 +300,8 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), strategy=MergeStrategy.RAISE_ON_CONFLICT, field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.LAST_WINS),), ), @@ -325,8 +325,8 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), strategy=MergeStrategy.RAISE_ON_CONFLICT, field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS),), ), @@ -351,8 +351,8 @@ class Config: with pytest.raises(MergeConflictError): load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), strategy=MergeStrategy.RAISE_ON_CONFLICT, field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.LAST_WINS),), ), @@ -377,8 +377,8 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), strategy=MergeStrategy.RAISE_ON_CONFLICT, field_merges=(MergeRule(F[Config].database.host, FieldMergeStrategy.LAST_WINS),), ), @@ -402,8 +402,8 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), strategy=MergeStrategy.RAISE_ON_CONFLICT, field_merges=( MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS), @@ -462,8 +462,8 @@ class Config: with pytest.raises(TypeError, match=match): load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), field_merges=(MergeRule(F[Config].value, strategy),), ), Config, @@ -513,8 +513,8 @@ class Config: with pytest.raises(TypeError, match=match): load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), field_merges=(MergeRule(F[Config].value, strategy),), ), Config, @@ -554,8 +554,8 @@ class Config: with pytest.raises(TypeError, match=match): load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), field_merges=(MergeRule(F[Config].value, strategy),), ), Config, @@ -586,8 +586,8 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), field_merges=(MergeRule(F[Config].value, strategy),), ), Config, @@ -621,8 +621,8 @@ class Config: with pytest.raises(TypeError, match=match): load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), field_merges=(MergeRule(F[Config].value, strategy),), ), Config, @@ -654,8 +654,8 @@ class Config: with pytest.raises(TypeError, match=match): load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), field_merges=(MergeRule(F[Config].value, strategy),), ), Config, @@ -675,8 +675,8 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS),), ), Config, @@ -701,9 +701,9 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), - Source(file_=c), + Source(file=a), + Source(file=b), + Source(file=c), field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND),), ), Config, @@ -727,9 +727,9 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), - Source(file_=c), + Source(file=a), + Source(file=b), + Source(file=c), field_merges=(MergeRule(F[Config].priority, max),), ), Config, @@ -753,9 +753,9 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), - Source(file_=c), + Source(file=a), + Source(file=b), + Source(file=c), field_merges=(MergeRule(F[Config].priority, min),), ), Config, @@ -783,8 +783,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), field_merges=( MergeRule(F[Config].user_name, FieldMergeStrategy.FIRST_WINS), MergeRule(F[Config].inner.user_name, FieldMergeStrategy.LAST_WINS), @@ -814,8 +814,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), field_merges=( MergeRule(F[Config].user_name, FieldMergeStrategy.LAST_WINS), MergeRule(F[Config].inner.user_name, FieldMergeStrategy.FIRST_WINS), @@ -842,8 +842,8 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), field_merges=(MergeRule(F[Config].score, sum),), ), Config, @@ -867,9 +867,9 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), - Source(file_=c), + Source(file=a), + Source(file=b), + Source(file=c), field_merges=(MergeRule(F[Config].score, sum),), ), Config, @@ -893,9 +893,9 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), - Source(file_=c), + Source(file=a), + Source(file=b), + Source(file=c), field_merges=(MergeRule(F[Config].weight, lambda vals: sum(vals) / len(vals)),), ), Config, @@ -919,9 +919,9 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), - Source(file_=c), + Source(file=a), + Source(file=b), + Source(file=c), field_merges=(MergeRule(F[Config].priority, max),), ), Config, @@ -949,9 +949,9 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), - Source(file_=c), + Source(file=a), + Source(file=b), + Source(file=c), field_merges=(MergeRule(F[Config].database.port, max),), ), Config, @@ -969,7 +969,7 @@ class Config: result = load( Merge( - Source(file_=a), + Source(file=a), field_merges=(MergeRule(F[Config].score, sum),), ), Config, @@ -991,8 +991,8 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), strategy=MergeStrategy.RAISE_ON_CONFLICT, field_merges=(MergeRule(F[Config].score, sum),), ), @@ -1017,8 +1017,8 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), field_merges=( MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS), MergeRule(F[Config].score, sum), @@ -1049,9 +1049,9 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), - Source(file_=c), + Source(file=a), + Source(file=b), + Source(file=c), field_merges=(MergeRule(F[Config].score, sum),), ), Config, diff --git a/tests/loading/test_multi.py b/tests/loading/test_multi.py index c5856b2..a966692 100644 --- a/tests/loading/test_multi.py +++ b/tests/loading/test_multi.py @@ -28,8 +28,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), ), Config, ) @@ -51,8 +51,8 @@ class Config: result = load( Merge( - Source(file_=first), - Source(file_=second), + Source(file=first), + Source(file=second), strategy=MergeStrategy.FIRST_WINS, ), Config, @@ -62,11 +62,11 @@ class Config: assert result.port == 3000 def test_partial_sources(self, tmp_path: Path): - file_a = tmp_path / "a.json" - file_a.write_text('{"host": "myhost"}') + filea = tmp_path / "a.json" + filea.write_text('{"host": "myhost"}') - file_b = tmp_path / "b.json" - file_b.write_text('{"port": 9090}') + fileb = tmp_path / "b.json" + fileb.write_text('{"port": 9090}') @dataclass class Config: @@ -75,8 +75,8 @@ class Config: result = load( Merge( - Source(file_=file_a), - Source(file_=file_b), + Source(file=filea), + Source(file=fileb), ), Config, ) @@ -102,8 +102,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), ), Config, ) @@ -129,9 +129,9 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), - Source(file_=c), + Source(file=a), + Source(file=b), + Source(file=c), ), Config, ) @@ -154,8 +154,8 @@ class Config: result = load( ( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), ), Config, ) @@ -177,7 +177,7 @@ class Config: result = load( Merge( - Source(file_=defaults), + Source(file=defaults), Source(prefix="APP_"), ), Config, @@ -201,7 +201,7 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( Merge( - Source(file_=defaults), + Source(file=defaults), Source(prefix="APP_"), ), Config, @@ -227,8 +227,8 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), ), Config, ) @@ -247,7 +247,7 @@ class Config: name: str port: int - result = load(Source(file_=json_file), Config) + result = load(Source(file=json_file), Config) assert result.name == "test" assert result.port == 8080 @@ -273,8 +273,8 @@ def test_decorator_with_merge(self, tmp_path: Path): overrides.write_text('{"port": 9090}') meta = Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), ) @load(meta) @@ -291,7 +291,7 @@ def test_decorator_cache(self, tmp_path: Path): defaults = tmp_path / "defaults.json" defaults.write_text('{"host": "original", "port": 3000}') - meta = Merge(Source(file_=defaults)) + meta = Merge(Source(file=defaults)) @load(meta) @dataclass @@ -310,7 +310,7 @@ def test_decorator_no_cache(self, tmp_path: Path): defaults = tmp_path / "defaults.json" defaults.write_text('{"host": "original", "port": 3000}') - meta = Merge(Source(file_=defaults)) + meta = Merge(Source(file=defaults)) @load(meta, cache=False) @dataclass @@ -334,8 +334,8 @@ def test_decorator_with_tuple(self, tmp_path: Path): @load( ( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), ), ) @dataclass @@ -351,7 +351,7 @@ def test_decorator_init_override(self, tmp_path: Path): defaults = tmp_path / "defaults.json" defaults.write_text('{"host": "localhost", "port": 3000}') - meta = Merge(Source(file_=defaults)) + meta = Merge(Source(file=defaults)) @load(meta) @dataclass @@ -380,8 +380,8 @@ def test_decorator_first_wins(self, tmp_path: Path): second.write_text('{"host": "second-host", "port": 2000}') meta = Merge( - Source(file_=first), - Source(file_=second), + Source(file=first), + Source(file=second), strategy=MergeStrategy.FIRST_WINS, ) @@ -412,8 +412,8 @@ class Config: with pytest.raises(MergeConflictError) as exc_info: load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), strategy=MergeStrategy.RAISE_ON_CONFLICT, ), Config, @@ -443,8 +443,8 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), strategy=MergeStrategy.RAISE_ON_CONFLICT, ), Config, @@ -467,8 +467,8 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), strategy=MergeStrategy.RAISE_ON_CONFLICT, ), Config, @@ -496,8 +496,8 @@ class Config: with pytest.raises(MergeConflictError) as exc_info: load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), strategy=MergeStrategy.RAISE_ON_CONFLICT, ), Config, @@ -527,8 +527,8 @@ class Config: with pytest.raises(MergeConflictError) as exc_info: load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), strategy=MergeStrategy.RAISE_ON_CONFLICT, ), Config, @@ -558,7 +558,7 @@ class Config: with pytest.raises(MergeConflictError) as exc_info: load( Merge( - Source(file_=a), + Source(file=a), Source(prefix="APP_"), strategy=MergeStrategy.RAISE_ON_CONFLICT, ), @@ -589,8 +589,8 @@ class Config: with pytest.raises(MergeConflictError) as exc_info: load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), strategy=MergeStrategy.RAISE_ON_CONFLICT, ), Config, @@ -632,8 +632,8 @@ class Config: result = load( Merge( - Source(file_=yaml_file), - Source(file_=env_file), + Source(file=yaml_file), + Source(file=env_file), ), Config, ) @@ -649,7 +649,7 @@ class _Permission(Flag): class TestCoerceFlagFieldsMergeMode: - def test_flag_from_env_file_merge(self, tmp_path: Path): + def test_flag_from_env_filemerge(self, tmp_path: Path): json_file = tmp_path / "defaults.json" json_file.write_text('{"name": "app"}') @@ -663,8 +663,8 @@ class Config: result = load( Merge( - Source(file_=json_file), - Source(file_=env_file), + Source(file=json_file), + Source(file=env_file), ), Config, ) @@ -684,7 +684,7 @@ class Config: result = load( Merge( - Source(file_=json_file), + Source(file=json_file), Source(prefix="APP_"), ), Config, @@ -706,8 +706,8 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), ), Config, ) @@ -727,8 +727,8 @@ class Config: perms: _Permission meta = Merge( - Source(file_=json_file), - Source(file_=env_file), + Source(file=json_file), + Source(file=env_file), ) @load(meta) @@ -756,8 +756,8 @@ class Config: result = load( Merge( - Source(file_=first), - Source(file_=second), + Source(file=first), + Source(file=second), strategy=MergeStrategy.FIRST_FOUND, ), Config, @@ -778,8 +778,8 @@ class Config: result = load( Merge( - Source(file_=missing), - Source(file_=fallback), + Source(file=missing), + Source(file=fallback), strategy=MergeStrategy.FIRST_FOUND, ), Config, @@ -802,8 +802,8 @@ class Config: result = load( Merge( - Source(file_=broken), - Source(file_=fallback), + Source(file=broken), + Source(file=fallback), strategy=MergeStrategy.FIRST_FOUND, ), Config, @@ -824,8 +824,8 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( Merge( - Source(file_=missing1), - Source(file_=missing2), + Source(file=missing1), + Source(file=missing2), strategy=MergeStrategy.FIRST_FOUND, ), Config, @@ -851,8 +851,8 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( Merge( - Source(file_=partial), - Source(file_=full), + Source(file=partial), + Source(file=full), strategy=MergeStrategy.FIRST_FOUND, ), Config, @@ -878,8 +878,8 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( Merge( - Source(file_=bad_type), - Source(file_=fallback), + Source(file=bad_type), + Source(file=fallback), strategy=MergeStrategy.FIRST_FOUND, ), Config, @@ -910,8 +910,8 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( Merge( - Source(file_=first), - Source(file_=second), + Source(file=first), + Source(file=second), strategy=MergeStrategy.FIRST_FOUND, ), Config, @@ -936,8 +936,8 @@ def test_validation_error_references_correct_source_decorator(self, tmp_path: Pa @load( Merge( - Source(file_=first), - Source(file_=second), + Source(file=first), + Source(file=second), strategy=MergeStrategy.FIRST_FOUND, ), cache=False, diff --git a/tests/loading/test_resolver.py b/tests/loading/test_resolver.py index 3f941e1..5cc77b6 100644 --- a/tests/loading/test_resolver.py +++ b/tests/loading/test_resolver.py @@ -24,10 +24,10 @@ def readinto(self, b: Buffer) -> int: # noqa: ARG002 class TestResolveLoaderClass: def test_explicit_loader(self) -> None: - assert resolve_loader_class(loader=Yaml11Loader, file_="config.json") is Yaml11Loader + assert resolve_loader_class(loader=Yaml11Loader, file="config.json") is Yaml11Loader - def test_no_file_returns_env(self) -> None: - assert resolve_loader_class(loader=None, file_=None) is EnvLoader + def test_no_filereturns_env(self) -> None: + assert resolve_loader_class(loader=None, file=None) is EnvLoader @pytest.mark.parametrize( ("extension", "expected"), @@ -43,36 +43,36 @@ def test_no_file_returns_env(self) -> None: ], ) def test_extension_mapping(self, extension: str, expected: type) -> None: - assert resolve_loader_class(loader=None, file_=f"config{extension}") is expected + assert resolve_loader_class(loader=None, file=f"config{extension}") is expected @pytest.mark.parametrize( "filename", [".env.local", ".env.development", ".env.production"], ) def test_dotenv_patterns(self, filename: str) -> None: - assert resolve_loader_class(loader=None, file_=filename) is EnvFileLoader + assert resolve_loader_class(loader=None, file=filename) is EnvFileLoader def test_unknown_extension_raises(self) -> None: with pytest.raises(ValueError, match="Cannot determine loader type"): - resolve_loader_class(loader=None, file_="config.xyz") + resolve_loader_class(loader=None, file="config.xyz") def test_uppercase_extension(self) -> None: - assert resolve_loader_class(loader=None, file_="config.JSON") is JsonLoader + assert resolve_loader_class(loader=None, file="config.JSON") is JsonLoader - def test_env_loader_with_file_raises(self) -> None: + def test_env_loader_with_fileraises(self) -> None: with pytest.raises(ValueError, match="EnvLoader reads from environment variables") as exc_info: - resolve_loader_class(loader=EnvLoader, file_="config.json") + resolve_loader_class(loader=EnvLoader, file="config.json") assert str(exc_info.value) == ( "EnvLoader reads from environment variables and does not use files. " - "Remove file_ or use a file-based loader instead (e.g. EnvFileLoader)." + "Remove file or use a file-based loader instead (e.g. EnvFileLoader)." ) - def test_env_file_loader_with_file_allowed(self) -> None: - assert resolve_loader_class(loader=EnvFileLoader, file_=".env.local") is EnvFileLoader + def test_env_fileloader_with_fileallowed(self) -> None: + assert resolve_loader_class(loader=EnvFileLoader, file=".env.local") is EnvFileLoader def test_directory_returns_docker_secrets(self, tmp_path) -> None: - assert resolve_loader_class(loader=None, file_=tmp_path) is DockerSecretsLoader + assert resolve_loader_class(loader=None, file=tmp_path) is DockerSecretsLoader class TestMissingOptionalDependency: @@ -94,7 +94,7 @@ def test_missing_extra_raises_helpful_error( ) -> None: with block_import(blocked_module): with pytest.raises(ImportError) as exc_info: - resolve_loader_class(loader=None, file_=f"config{extension}") + resolve_loader_class(loader=None, file=f"config{extension}") assert str(exc_info.value) == ( f"To use '{extension}' files, install the '{extra}' extra: pip install dature[{extra}]" @@ -103,7 +103,7 @@ def test_missing_extra_raises_helpful_error( class TestResolveLoader: def test_returns_correct_loader_type(self) -> None: - metadata = Source(file_="config.json") + metadata = Source(file="config.json") loader = resolve_loader(metadata) @@ -117,7 +117,7 @@ def test_passes_prefix(self) -> None: assert loader._prefix == "APP_" def test_passes_name_style(self) -> None: - metadata = Source(file_="config.json", name_style="lower_snake") + metadata = Source(file="config.json", name_style="lower_snake") loader = resolve_loader(metadata) @@ -129,7 +129,7 @@ class Config: key: str mapping = {F[Config].key: "value"} - metadata = Source(file_="config.json", field_mapping=mapping) + metadata = Source(file="config.json", field_mapping=mapping) loader = resolve_loader(metadata) @@ -145,7 +145,7 @@ def test_default_metadata_returns_env_loader(self) -> None: def test_env_with_file_path(self, tmp_path: Path) -> None: env_file = tmp_path / ".env" env_file.write_text("KEY=VALUE") - metadata = Source(file_=env_file) + metadata = Source(file=env_file) loader = resolve_loader(metadata) @@ -154,9 +154,9 @@ def test_env_with_file_path(self, tmp_path: Path) -> None: class TestFilelikeResolverValidation: @pytest.mark.parametrize("stream", [StringIO(), BytesIO(), _DummyRawIO()]) - def test_file_like_without_loader_raises(self, stream) -> None: + def test_filelike_without_loader_raises(self, stream) -> None: with pytest.raises(TypeError) as exc_info: - resolve_loader_class(loader=None, file_=stream) + resolve_loader_class(loader=None, file=stream) assert str(exc_info.value) == ( "Cannot determine loader type for a file-like object. " @@ -164,9 +164,9 @@ def test_file_like_without_loader_raises(self, stream) -> None: ) @pytest.mark.parametrize("stream", [StringIO(), BytesIO(), _DummyRawIO()]) - def test_file_like_with_env_loader_raises(self, stream) -> None: + def test_filelike_with_env_loader_raises(self, stream) -> None: with pytest.raises(ValueError, match="EnvLoader does not support file-like objects") as exc_info: - resolve_loader_class(loader=EnvLoader, file_=stream) + resolve_loader_class(loader=EnvLoader, file=stream) assert str(exc_info.value) == ( "EnvLoader does not support file-like objects. " @@ -174,9 +174,9 @@ def test_file_like_with_env_loader_raises(self, stream) -> None: ) @pytest.mark.parametrize("stream", [StringIO(), BytesIO(), _DummyRawIO()]) - def test_file_like_with_docker_secrets_loader_raises(self, stream) -> None: + def test_filelike_with_docker_secrets_loader_raises(self, stream) -> None: with pytest.raises(ValueError, match="DockerSecretsLoader does not support file-like objects") as exc_info: - resolve_loader_class(loader=DockerSecretsLoader, file_=stream) + resolve_loader_class(loader=DockerSecretsLoader, file=stream) assert str(exc_info.value) == ( "DockerSecretsLoader does not support file-like objects. " @@ -184,5 +184,5 @@ def test_file_like_with_docker_secrets_loader_raises(self, stream) -> None: ) @pytest.mark.parametrize("stream", [StringIO(), BytesIO(), _DummyRawIO()]) - def test_file_like_with_explicit_loader_allowed(self, stream) -> None: - assert resolve_loader_class(loader=JsonLoader, file_=stream) is JsonLoader + def test_filelike_with_explicit_loader_allowed(self, stream) -> None: + assert resolve_loader_class(loader=JsonLoader, file=stream) is JsonLoader diff --git a/tests/loading/test_single.py b/tests/loading/test_single.py index 796b60a..0cc6797 100644 --- a/tests/loading/test_single.py +++ b/tests/loading/test_single.py @@ -17,7 +17,7 @@ class TestMakeDecorator: def test_not_dataclass_raises(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "test"}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) decorator = make_decorator( loader_instance=JsonLoader(), @@ -36,7 +36,7 @@ class NotADataclass: def test_patches_init(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "test"}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) @dataclass class Config: @@ -57,7 +57,7 @@ class Config: def test_patches_post_init(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "test"}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) @dataclass class Config: @@ -77,7 +77,7 @@ class Config: def test_loads_on_init(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "from_file", "port": 8080}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) @dataclass class Config: @@ -100,7 +100,7 @@ class Config: def test_init_args_override_loaded(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "from_file", "port": 8080}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) @dataclass class Config: @@ -123,7 +123,7 @@ class Config: def test_returns_same_class(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "test"}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) @dataclass class Config: @@ -143,7 +143,7 @@ class Config: def test_preserves_original_post_init(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "test"}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) post_init_called = [] @@ -171,7 +171,7 @@ class TestCache: def test_cache_returns_same_data(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "original", "port": 8080}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) @dataclass class Config: @@ -198,7 +198,7 @@ class Config: def test_no_cache_rereads_file(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "original", "port": 8080}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) @dataclass class Config: @@ -225,7 +225,7 @@ class Config: def test_cache_allows_override(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "original", "port": 8080}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) @dataclass class Config: @@ -254,7 +254,7 @@ class TestLoadAsFunction: def test_returns_loaded_dataclass(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "test", "port": 3000}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) @dataclass class Config: @@ -275,7 +275,7 @@ class Config: def test_with_prefix(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"app": {"name": "nested"}}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) @dataclass class Config: @@ -302,7 +302,7 @@ class TestCoerceFlagFieldsFunctionMode: def test_flag_from_env_file(self, tmp_path: Path): env_file = tmp_path / "config.env" env_file.write_text("NAME=test\nPERMS=3\n") - metadata = Source(file_=env_file, loader=EnvFileLoader) + metadata = Source(file=env_file, loader=EnvFileLoader) @dataclass class Config: @@ -322,7 +322,7 @@ class Config: def test_flag_from_json_as_int(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "test", "perms": 3}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) @dataclass class Config: @@ -344,7 +344,7 @@ class TestCoerceFlagFieldsDecoratorMode: def test_flag_from_env_file(self, tmp_path: Path): env_file = tmp_path / "config.env" env_file.write_text("NAME=test\nPERMS=5\n") - metadata = Source(file_=env_file, loader=EnvFileLoader) + metadata = Source(file=env_file, loader=EnvFileLoader) @dataclass class Config: @@ -366,7 +366,7 @@ class Config: def test_flag_from_json_as_int(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "test", "perms": 7}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) @dataclass class Config: @@ -394,8 +394,8 @@ class TestFilelikeLoadAsFunction: StringIO('{"name": "test", "port": 3000}'), ], ) - def test_json_from_file_like(self, stream) -> None: - metadata = Source(file_=stream, loader=JsonLoader) + def test_json_from_filelike(self, stream) -> None: + metadata = Source(file=stream, loader=JsonLoader) @dataclass class Config: @@ -416,7 +416,7 @@ class Config: def test_path_object_directly(self, tmp_path: Path) -> None: json_file = tmp_path / "config.json" json_file.write_text('{"name": "direct_path"}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) @dataclass class Config: diff --git a/tests/loading/test_skip_invalid_fields.py b/tests/loading/test_skip_invalid_fields.py index 1ec2899..5b7b3a9 100644 --- a/tests/loading/test_skip_invalid_fields.py +++ b/tests/loading/test_skip_invalid_fields.py @@ -25,8 +25,8 @@ class Config: result = load( Merge( - Source(file_=source1), - Source(file_=source2), + Source(file=source1), + Source(file=source2), skip_invalid_fields=True, ), Config, @@ -49,8 +49,8 @@ class Config: result = load( Merge( - Source(file_=source1), - Source(file_=source2), + Source(file=source1), + Source(file=source2), skip_invalid_fields=True, ), Config, @@ -74,8 +74,8 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( Merge( - Source(file_=source1), - Source(file_=source2), + Source(file=source1), + Source(file=source2), skip_invalid_fields=True, ), Config, @@ -110,8 +110,8 @@ class Config: result = load( Merge( - Source(file_=source1), - Source(file_=source2), + Source(file=source1), + Source(file=source2), skip_invalid_fields=True, ), Config, @@ -134,8 +134,8 @@ class Config: result = load( Merge( - Source(file_=source1, skip_if_invalid=True), - Source(file_=source2), + Source(file=source1, skip_if_invalid=True), + Source(file=source2), ), Config, ) @@ -157,8 +157,8 @@ class Config: result = load( Merge( - Source(file_=source1), - Source(file_=source2), + Source(file=source1), + Source(file=source2), skip_invalid_fields=True, ), Config, @@ -179,7 +179,7 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( Merge( - Source(file_=source1), + Source(file=source1), ), Config, ) @@ -208,8 +208,8 @@ class Config: result = load( Merge( - Source(file_=source1), - Source(file_=source2), + Source(file=source1), + Source(file=source2), strategy=MergeStrategy.RAISE_ON_CONFLICT, skip_invalid_fields=True, ), @@ -235,10 +235,10 @@ class Config: result = load( Merge( Source( - file_=source1, + file=source1, skip_if_invalid=(F[Config].port, F[Config].timeout), ), - Source(file_=source2), + Source(file=source2), ), Config, ) @@ -260,7 +260,7 @@ class Config: load( Merge( Source( - file_=source1, + file=source1, skip_if_invalid=(F[Config].port,), ), ), @@ -297,8 +297,8 @@ class Config: with caplog.at_level(logging.WARNING, logger="dature"): load( Merge( - Source(file_=source1), - Source(file_=source2), + Source(file=source1), + Source(file=source2), skip_invalid_fields=True, ), Config, @@ -322,7 +322,7 @@ class Config: port: int = 8080 result = load( - Source(file_=json_file, skip_if_invalid=True), + Source(file=json_file, skip_if_invalid=True), Config, ) @@ -340,7 +340,7 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file_=json_file, skip_if_invalid=True), + Source(file=json_file, skip_if_invalid=True), Config, ) @@ -357,7 +357,7 @@ def test_single_source_decorator_skip(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"host": "localhost", "port": "abc"}') - @load(Source(file_=json_file, skip_if_invalid=True)) + @load(Source(file=json_file, skip_if_invalid=True)) @dataclass class Config: host: str @@ -379,7 +379,7 @@ class Config: result = load( Source( - file_=json_file, + file=json_file, skip_if_invalid=(F[Config].port,), ), Config, @@ -400,7 +400,7 @@ class Config: with caplog.at_level(logging.WARNING, logger="dature"): load( - Source(file_=json_file, skip_if_invalid=True), + Source(file=json_file, skip_if_invalid=True), Config, ) @@ -424,7 +424,7 @@ class Config: result = load( Source( - file_=source, + file=source, skip_if_invalid=(F[Config].port,), ), Config, @@ -452,10 +452,10 @@ class Config: result = load( Merge( Source( - file_=source1, + file=source1, skip_if_invalid=(F[Config].inner.port,), ), - Source(file_=source2), + Source(file=source2), ), Config, ) @@ -482,10 +482,10 @@ class Config: result = load( Merge( Source( - file_=source1, + file=source1, skip_if_invalid=(F[Config].port, F[Config].inner.port), ), - Source(file_=source2), + Source(file=source2), ), Config, ) diff --git a/tests/loading/test_source_loading.py b/tests/loading/test_source_loading.py index 55155a2..75e163a 100644 --- a/tests/loading/test_source_loading.py +++ b/tests/loading/test_source_loading.py @@ -24,8 +24,8 @@ class Config: result = load( Merge( - Source(file_=valid), - Source(file_=missing), + Source(file=valid), + Source(file=missing), skip_broken_sources=True, ), Config, @@ -48,8 +48,8 @@ class Config: result = load( Merge( - Source(file_=valid), - Source(file_=broken), + Source(file=valid), + Source(file=broken), skip_broken_sources=True, ), Config, @@ -72,8 +72,8 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( Merge( - Source(file_=broken_a), - Source(file_=broken_b), + Source(file=broken_a), + Source(file=broken_b), skip_broken_sources=True, ), Config, @@ -96,8 +96,8 @@ class Config: with pytest.raises(DatureConfigError): load( Merge( - Source(file_=valid), - Source(file_=broken), + Source(file=valid), + Source(file=broken), ), Config, ) @@ -119,9 +119,9 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=broken), - Source(file_=c), + Source(file=a), + Source(file=broken), + Source(file=c), skip_broken_sources=True, ), Config, @@ -144,8 +144,8 @@ class Config: result = load( Merge( - Source(file_=valid), - Source(file_=broken, skip_if_broken=True), + Source(file=valid), + Source(file=broken, skip_if_broken=True), skip_broken_sources=False, ), Config, @@ -169,8 +169,8 @@ class Config: with pytest.raises(DatureConfigError): load( Merge( - Source(file_=valid), - Source(file_=broken, skip_if_broken=False), + Source(file=valid), + Source(file=broken, skip_if_broken=False), skip_broken_sources=True, ), Config, @@ -190,8 +190,8 @@ class Config: result = load( Merge( - Source(file_=valid), - Source(file_=broken, skip_if_broken=None), + Source(file=valid), + Source(file=broken, skip_if_broken=None), skip_broken_sources=True, ), Config, @@ -217,8 +217,8 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( Merge( - Source(file_=missing), - Source(file_=broken), + Source(file=missing), + Source(file=broken), skip_broken_sources=True, ), Config, @@ -241,7 +241,7 @@ class Config: result = load( Merge( - Source(file_=json_file), + Source(file=json_file), ), Config, ) @@ -260,7 +260,7 @@ class Config: result = load( Merge( - Source(file_=json_file), + Source(file=json_file), expand_env_vars="disabled", ), Config, @@ -281,7 +281,7 @@ class Config: with pytest.raises(EnvVarExpandError): load( Merge( - Source(file_=json_file), + Source(file=json_file), expand_env_vars="strict", ), Config, @@ -299,7 +299,7 @@ class Config: result = load( Merge( - Source(file_=json_file, expand_env_vars="disabled"), + Source(file=json_file, expand_env_vars="disabled"), expand_env_vars="default", ), Config, @@ -319,7 +319,7 @@ class Config: result = load( Merge( - Source(file_=json_file, expand_env_vars=None), + Source(file=json_file, expand_env_vars=None), expand_env_vars="disabled", ), Config, @@ -339,7 +339,7 @@ class Config: result = load( Merge( - Source(file_=json_file), + Source(file=json_file), expand_env_vars="empty", ), Config, @@ -383,7 +383,7 @@ def test_error_format( with pytest.raises(EnvVarExpandError) as exc_info: load( - Source(file_=file, prefix=prefix, expand_env_vars="strict"), + Source(file=file, prefix=prefix, expand_env_vars="strict"), StrictConfig, ) diff --git a/tests/masking/test_masking.py b/tests/masking/test_masking.py index ea87625..b944abb 100644 --- a/tests/masking/test_masking.py +++ b/tests/masking/test_masking.py @@ -216,7 +216,7 @@ class Cfg: password: str host: str - result = load(Source(file_=json_file), Cfg, debug=True) + result = load(Source(file=json_file), Cfg, debug=True) report = get_load_report(result) assert report is not None @@ -242,8 +242,8 @@ class Cfg: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), ), Cfg, debug=True, @@ -269,7 +269,7 @@ class Cfg: api_key: SecretStr host: str - result = load(Source(file_=json_file), Cfg, debug=True) + result = load(Source(file=json_file), Cfg, debug=True) report = get_load_report(result) assert report is not None @@ -290,7 +290,7 @@ class Cfg: host: str with caplog.at_level("DEBUG", logger="dature"): - load(Source(file_=json_file), Cfg, debug=True) + load(Source(file=json_file), Cfg, debug=True) assert _SECRET_VALUE not in caplog.text @@ -309,8 +309,8 @@ class Cfg: with caplog.at_level("DEBUG", logger="dature"): load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), ), Cfg, debug=True, @@ -328,7 +328,7 @@ class Cfg: port: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file_=json_file), Cfg) + load(Source(file=json_file), Cfg) assert _SECRET_VALUE not in str(exc_info.value) @@ -337,7 +337,7 @@ def test_merge_decorator_error_message_masks_secrets(self, tmp_path: Path): json_file.write_text('{"password": "allowed", "host": "prod"}') meta = Merge( - Source(file_=json_file), + Source(file=json_file), ) @load(meta) @@ -363,7 +363,7 @@ class Cfg: host: str with pytest.raises(DatureConfigError) as exc_info: - load(Source(file_=json_file, mask_secrets=True), Cfg) + load(Source(file=json_file, mask_secrets=True), Cfg) assert str(exc_info.value) == "Cfg loading errors (1)" assert str(exc_info.value.exceptions[0]) == ( @@ -385,7 +385,7 @@ class Cfg: host: str with patch("dature.masking.masking._heuristic_detector", None), pytest.raises(DatureConfigError) as exc_info: - load(Source(file_=json_file, mask_secrets=True), Cfg) + load(Source(file=json_file, mask_secrets=True), Cfg) assert str(exc_info.value) == "Cfg loading errors (1)" assert str(exc_info.value.exceptions[0]) == ( @@ -418,7 +418,7 @@ class Cfg: host: str configure(masking=MaskingConfig(mask_secrets=mask_secrets)) - result = load(Source(file_=json_file), Cfg, debug=True) + result = load(Source(file=json_file), Cfg, debug=True) report = get_load_report(result) assert report is not None @@ -451,7 +451,7 @@ class Cfg: configure(masking=MaskingConfig(mask_secrets=mask_secrets)) with pytest.raises(DatureConfigError) as exc_info: - load(Source(file_=json_file), Cfg) + load(Source(file=json_file), Cfg) assert str(exc_info.value) == "Cfg loading errors (1)" content = f'{{"password": "{expected_password}", "port": "not_a_number"}}' diff --git a/tests/merging/test_field_group.py b/tests/merging/test_field_group.py index eae4bbc..6774fe4 100644 --- a/tests/merging/test_field_group.py +++ b/tests/merging/test_field_group.py @@ -26,8 +26,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), strategy=MergeStrategy.LAST_WINS, field_groups=(FieldGroup(F[Config].host, F[Config].port),), ), @@ -51,8 +51,8 @@ class Config: result = load( Merge( - Source(file_=first), - Source(file_=second), + Source(file=first), + Source(file=second), strategy=MergeStrategy.FIRST_WINS, field_groups=(FieldGroup(F[Config].host, F[Config].port),), ), @@ -78,8 +78,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), field_groups=(FieldGroup(F[Config].host, F[Config].port),), ), Config, @@ -103,8 +103,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), field_groups=(FieldGroup(F[Config].host, F[Config].port),), ), Config, @@ -123,8 +123,8 @@ def test_partial_change_raises(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"host": "remote"}') - defaults_meta = Source(file_=defaults) - overrides_meta = Source(file_=overrides) + defaults_meta = Source(file=defaults) + overrides_meta = Source(file=overrides) @dataclass class Config: @@ -156,8 +156,8 @@ def test_partial_change_field_present_but_equal(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"host": "remote", "port": 3000}') - defaults_meta = Source(file_=defaults) - overrides_meta = Source(file_=overrides) + defaults_meta = Source(file=defaults) + overrides_meta = Source(file=overrides) @dataclass class Config: @@ -197,8 +197,8 @@ class Config: with pytest.raises(FieldGroupError): load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), strategy=MergeStrategy.FIRST_WINS, field_groups=(FieldGroup(F[Config].host, F[Config].port),), ), @@ -220,8 +220,8 @@ class Config: with pytest.raises(FieldGroupError): load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), strategy=MergeStrategy.RAISE_ON_CONFLICT, field_groups=(FieldGroup(F[Config].host, F[Config].port),), ), @@ -237,8 +237,8 @@ def test_auto_expand_nested_dataclass(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"database": {"host": "remote"}}') - defaults_meta = Source(file_=defaults) - overrides_meta = Source(file_=overrides) + defaults_meta = Source(file=defaults) + overrides_meta = Source(file=overrides) @dataclass class Database: @@ -285,8 +285,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), field_groups=(FieldGroup(F[Config].database),), ), Config, @@ -307,9 +307,9 @@ def test_three_sources_violation_on_second(self, tmp_path: Path): c = tmp_path / "c.json" c.write_text('{"host": "c-host", "port": 3000}') - a_meta = Source(file_=a) - b_meta = Source(file_=b) - c_meta = Source(file_=c) + a_meta = Source(file=a) + b_meta = Source(file=b) + c_meta = Source(file=c) @dataclass class Config: @@ -352,9 +352,9 @@ class Config: result = load( Merge( - Source(file_=a), - Source(file_=b), - Source(file_=c), + Source(file=a), + Source(file=b), + Source(file=c), field_groups=(FieldGroup(F[Config].host, F[Config].port),), ), Config, @@ -372,8 +372,8 @@ def test_one_ok_one_violated(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"host": "remote", "port": 9090, "user": "root"}') - defaults_meta = Source(file_=defaults) - overrides_meta = Source(file_=overrides) + defaults_meta = Source(file=defaults) + overrides_meta = Source(file=overrides) @dataclass class Config: @@ -420,8 +420,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND),), field_groups=(FieldGroup(F[Config].host, F[Config].port),), ), @@ -442,8 +442,8 @@ def test_decorator_with_field_groups(self, tmp_path: Path): overrides.write_text('{"host": "remote", "port": 9090}') meta = Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), field_groups=(FieldGroup(F["Config"].host, F["Config"].port),), ) @@ -465,8 +465,8 @@ def test_decorator_partial_change_raises(self, tmp_path: Path): overrides.write_text('{"host": "remote"}') meta = Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), field_groups=(FieldGroup(F["Config"].host, F["Config"].port),), ) @@ -488,8 +488,8 @@ def test_error_message_format(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"host": "remote", "debug": true}') - defaults_meta = Source(file_=defaults) - overrides_meta = Source(file_=overrides) + defaults_meta = Source(file=defaults) + overrides_meta = Source(file=overrides) @dataclass class Config: @@ -522,8 +522,8 @@ def test_multiple_violations_message(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"host": "remote", "user": "root"}') - defaults_meta = Source(file_=defaults) - overrides_meta = Source(file_=overrides) + defaults_meta = Source(file=defaults) + overrides_meta = Source(file=overrides) @dataclass class Config: @@ -582,8 +582,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), field_groups=(FieldGroup(F[Config].database, F[Config].timeout),), ), Config, @@ -616,8 +616,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), field_groups=(FieldGroup(F[Config].database, F[Config].timeout),), ), Config, @@ -636,8 +636,8 @@ def test_flat_changed_nested_not(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"timeout": 60}') - defaults_meta = Source(file_=defaults) - overrides_meta = Source(file_=overrides) + defaults_meta = Source(file=defaults) + overrides_meta = Source(file=overrides) @dataclass class Database: @@ -676,8 +676,8 @@ def test_nested_partial_flat_not(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"database": {"host": "remote"}}') - defaults_meta = Source(file_=defaults) - overrides_meta = Source(file_=overrides) + defaults_meta = Source(file=defaults) + overrides_meta = Source(file=overrides) @dataclass class Database: @@ -716,8 +716,8 @@ def test_nested_all_changed_flat_not(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"database": {"host": "remote", "port": 3306}}') - defaults_meta = Source(file_=defaults) - overrides_meta = Source(file_=overrides) + defaults_meta = Source(file=defaults) + overrides_meta = Source(file=overrides) d = repr(defaults_meta) o = repr(overrides_meta) @@ -773,8 +773,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), field_groups=(FieldGroup(F[Config].user_name, F[Config].inner.user_name),), ), Config, @@ -792,8 +792,8 @@ def test_only_root_changed_raises(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"user_name": "root-new"}') - defaults_meta = Source(file_=defaults) - overrides_meta = Source(file_=overrides) + defaults_meta = Source(file=defaults) + overrides_meta = Source(file=overrides) @dataclass class Inner: diff --git a/tests/sources_loader/test_base.py b/tests/sources_loader/test_base.py index e73869b..5a829dc 100644 --- a/tests/sources_loader/test_base.py +++ b/tests/sources_loader/test_base.py @@ -174,7 +174,7 @@ class Config: json_file.write_text('{"userName": "John", "userAge": 25, "isActive": true}') result = load( - Source(file_=json_file, loader=JsonLoader, name_style="lower_camel"), + Source(file=json_file, loader=JsonLoader, name_style="lower_camel"), Config, ) @@ -192,7 +192,7 @@ class Config: json_file.write_text('{"user_name": "Alice", "user_age": 30}') result = load( - Source(file_=json_file, loader=JsonLoader, name_style="lower_snake"), + Source(file=json_file, loader=JsonLoader, name_style="lower_snake"), Config, ) @@ -209,7 +209,7 @@ class Config: json_file.write_text('{"UserName": "Bob", "TotalCount": 100}') result = load( - Source(file_=json_file, loader=JsonLoader, name_style="upper_camel"), + Source(file=json_file, loader=JsonLoader, name_style="upper_camel"), Config, ) @@ -226,7 +226,7 @@ class Config: json_file.write_text('{"user-name": "Charlie", "api-key": "secret123"}') result = load( - Source(file_=json_file, loader=JsonLoader, name_style="lower_kebab"), + Source(file=json_file, loader=JsonLoader, name_style="lower_kebab"), Config, ) @@ -243,7 +243,7 @@ class Config: json_file.write_text('{"USER-NAME": "Dave", "API-KEY": "secret456"}') result = load( - Source(file_=json_file, loader=JsonLoader, name_style="upper_kebab"), + Source(file=json_file, loader=JsonLoader, name_style="upper_kebab"), Config, ) @@ -260,7 +260,7 @@ class Config: json_file.write_text('{"USER_NAME": "Eve", "MAX_RETRIES": 3}') result = load( - Source(file_=json_file, loader=JsonLoader, name_style="upper_snake"), + Source(file=json_file, loader=JsonLoader, name_style="upper_snake"), Config, ) @@ -286,7 +286,7 @@ class Config: } result = load( - Source(file_=json_file, loader=JsonLoader, field_mapping=field_mapping), + Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), Config, ) @@ -307,7 +307,7 @@ class Config: field_mapping = {F[Config].name: "userName"} result = load( - Source(file_=json_file, loader=JsonLoader, field_mapping=field_mapping), + Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), Config, ) @@ -329,7 +329,7 @@ class Config: result = load( Source( - file_=json_file, + file=json_file, loader=JsonLoader, name_style="lower_camel", field_mapping=field_mapping, @@ -365,7 +365,7 @@ class User: } result = load( - Source(file_=json_file, loader=JsonLoader, field_mapping=field_mapping), + Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), User, ) @@ -384,7 +384,7 @@ class Config: field_mapping = {F[Config].name: ("fullName", "userName")} result = load( - Source(file_=json_file, loader=JsonLoader, field_mapping=field_mapping), + Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), Config, ) @@ -401,7 +401,7 @@ class Config: field_mapping = {F[Config].name: ("fullName", "userName")} result = load( - Source(file_=json_file, loader=JsonLoader, field_mapping=field_mapping), + Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), Config, ) @@ -422,7 +422,7 @@ class User: field_mapping = {F[User].address.city: "cityName"} result = load( - Source(file_=json_file, loader=JsonLoader, field_mapping=field_mapping), + Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), User, ) @@ -439,7 +439,7 @@ class Config: field_mapping = {F["Config"].name: "fullName"} result = load( - Source(file_=json_file, loader=JsonLoader, field_mapping=field_mapping), + Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), Config, ) @@ -456,7 +456,7 @@ class Config: field_mapping = {F[Config].name: "fullName"} result = load( - Source(file_=json_file, loader=JsonLoader, field_mapping=field_mapping), + Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), Config, ) @@ -485,7 +485,7 @@ class Config: result = load( Source( - file_=json_file, + file=json_file, loader=JsonLoader, name_style="lower_camel", field_mapping=field_mapping, @@ -520,7 +520,7 @@ class Config: result = load( Source( - file_=json_file, + file=json_file, loader=JsonLoader, name_style="lower_camel", field_mapping=field_mapping, diff --git a/tests/sources_loader/test_docker_secrets.py b/tests/sources_loader/test_docker_secrets.py index c130f70..d789484 100644 --- a/tests/sources_loader/test_docker_secrets.py +++ b/tests/sources_loader/test_docker_secrets.py @@ -10,7 +10,7 @@ class TestDockerSecretsLoader: def test_comprehensive_type_conversion(self, all_types_docker_secrets_dir: Path): result = load( - Source(file_=all_types_docker_secrets_dir, loader=DockerSecretsLoader), + Source(file=all_types_docker_secrets_dir, loader=DockerSecretsLoader), AllPythonTypesCompact, ) @@ -52,7 +52,7 @@ def test_empty_directory(self, tmp_path: Path): assert data == {} - def test_strip_file_content(self, tmp_path: Path): + def test_strip_filecontent(self, tmp_path: Path): (tmp_path / "secret").write_text(" password123\n") loader = DockerSecretsLoader() @@ -72,7 +72,7 @@ class Config: base: str result = load( - Source(file_=tmp_path, loader=DockerSecretsLoader), + Source(file=tmp_path, loader=DockerSecretsLoader), Config, ) diff --git a/tests/sources_loader/test_env_.py b/tests/sources_loader/test_env_.py index 68540c9..0728268 100644 --- a/tests/sources_loader/test_env_.py +++ b/tests/sources_loader/test_env_.py @@ -44,7 +44,7 @@ def test_custom_split_symbols(self, custom_separator_env_file: Path): def test_comprehensive_type_conversion(self, all_types_env_file: Path): """Test loading ENV with full type coercion to dataclass.""" - result = load(Source(file_=all_types_env_file, loader=EnvFileLoader), AllPythonTypesCompact) + result = load(Source(file=all_types_env_file, loader=EnvFileLoader), AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -58,7 +58,7 @@ def test_empty_file(self, tmp_path: Path): assert data == {} - def test_env_file_env_var_substitution(self, tmp_path: Path, monkeypatch): + def test_env_fileenv_var_substitution(self, tmp_path: Path, monkeypatch): monkeypatch.setenv("BASE_URL", "https://api.example.com") env_file = tmp_path / ".env" @@ -69,12 +69,12 @@ class Config: api_url: str base: str - result = load(Source(file_=env_file, loader=EnvFileLoader), Config) + result = load(Source(file=env_file, loader=EnvFileLoader), Config) assert result.api_url == "https://api.example.com/v1" assert result.base == "https://api.example.com" - def test_env_file_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): + def test_env_fileenv_var_partial_substitution(self, tmp_path: Path, monkeypatch): monkeypatch.setenv("HOST", "localhost") monkeypatch.setenv("PORT", "8080") @@ -85,11 +85,11 @@ def test_env_file_env_var_partial_substitution(self, tmp_path: Path, monkeypatch class Config: url: str - result = load(Source(file_=env_file, loader=EnvFileLoader), Config) + result = load(Source(file=env_file, loader=EnvFileLoader), Config) assert result.url == "http://localhost:8080/api" - def test_env_file_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypatch): + def test_env_filedollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypatch): monkeypatch.setenv("abc", "replaced") env_file = tmp_path / ".env" @@ -99,7 +99,7 @@ def test_env_file_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monk class Config: value: str - result = load(Source(file_=env_file, loader=EnvFileLoader), Config) + result = load(Source(file=env_file, loader=EnvFileLoader), Config) assert result.value == "prefixreplaced/suffix" @@ -130,7 +130,7 @@ def test_quote_stripping(self, tmp_path: Path, raw_value: str, expected: str): assert data == {"value": expected} - def test_env_file_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypatch): + def test_env_filedollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypatch): monkeypatch.delenv("nonexistent", raising=False) env_file = tmp_path / ".env" @@ -140,7 +140,7 @@ def test_env_file_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monke class Config: value: str - result = load(Source(file_=env_file, loader=EnvFileLoader), Config) + result = load(Source(file=env_file, loader=EnvFileLoader), Config) assert result.value == "prefix$nonexistent/suffix" diff --git a/tests/sources_loader/test_ini_.py b/tests/sources_loader/test_ini_.py index 61bc789..1b96a24 100644 --- a/tests/sources_loader/test_ini_.py +++ b/tests/sources_loader/test_ini_.py @@ -18,7 +18,7 @@ class TestIniLoader: def test_comprehensive_type_conversion(self, all_types_ini_file: Path): """Test loading INI with full type coercion to dataclass.""" result = load( - Source(file_=all_types_ini_file, loader=IniLoader, prefix="all_types"), + Source(file=all_types_ini_file, loader=IniLoader, prefix="all_types"), AllPythonTypesCompact, ) @@ -59,7 +59,7 @@ class PrefixedConfig: ) result = load( - Source(file_=prefixed_ini_file, loader=IniLoader, prefix="app"), + Source(file=prefixed_ini_file, loader=IniLoader, prefix="app"), PrefixedConfig, ) @@ -88,7 +88,7 @@ class DbConfig: port: int result = load( - Source(file_=ini_file, loader=IniLoader, prefix="database"), + Source(file=ini_file, loader=IniLoader, prefix="database"), DbConfig, ) @@ -107,7 +107,7 @@ class Config: url: str result = load( - Source(file_=ini_file, loader=IniLoader, prefix="section"), + Source(file=ini_file, loader=IniLoader, prefix="section"), Config, ) @@ -124,7 +124,7 @@ class Config: value: str result = load( - Source(file_=ini_file, loader=IniLoader, prefix="section"), + Source(file=ini_file, loader=IniLoader, prefix="section"), Config, ) @@ -141,7 +141,7 @@ class Config: value: str result = load( - Source(file_=ini_file, loader=IniLoader, prefix="section"), + Source(file=ini_file, loader=IniLoader, prefix="section"), Config, ) diff --git a/tests/sources_loader/test_json5_.py b/tests/sources_loader/test_json5_.py index de53b5f..34d46f9 100644 --- a/tests/sources_loader/test_json5_.py +++ b/tests/sources_loader/test_json5_.py @@ -17,7 +17,7 @@ class TestJson5Loader: def test_comprehensive_type_conversion(self, all_types_json5_file: Path): """Test loading JSON5 with full type coercion to dataclass.""" - result = load(Source(file_=all_types_json5_file, loader=Json5Loader), AllPythonTypesCompact) + result = load(Source(file=all_types_json5_file, loader=Json5Loader), AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -37,7 +37,7 @@ class PrefixedConfig: ) result = load( - Source(file_=prefixed_json5_file, loader=Json5Loader, prefix="app"), + Source(file=prefixed_json5_file, loader=Json5Loader, prefix="app"), PrefixedConfig, ) @@ -65,7 +65,7 @@ class DbConfig: host: str port: int - result = load(Source(file_=json5_file, loader=Json5Loader), DbConfig) + result = load(Source(file=json5_file, loader=Json5Loader), DbConfig) assert result.host == "db.example.com" assert result.port == 5432 @@ -81,7 +81,7 @@ def test_json5_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file_=json5_file, loader=Json5Loader), Config) + result = load(Source(file=json5_file, loader=Json5Loader), Config) assert result.url == "http://localhost:8080/api" @@ -95,7 +95,7 @@ def test_json5_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeyp class Config: value: str - result = load(Source(file_=json5_file, loader=Json5Loader), Config) + result = load(Source(file=json5_file, loader=Json5Loader), Config) assert result.value == "prefixreplaced/suffix" @@ -109,7 +109,7 @@ def test_json5_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file_=json5_file, loader=Json5Loader), Config) + result = load(Source(file=json5_file, loader=Json5Loader), Config) assert result.value == "prefix$nonexistent/suffix" @@ -122,7 +122,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file_=json5_file, loader=Json5Loader), Config) + load(Source(file=json5_file, loader=Json5Loader), Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -145,7 +145,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file_=json5_file, loader=Json5Loader), Config) + load(Source(file=json5_file, loader=Json5Loader), Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/sources_loader/test_json_.py b/tests/sources_loader/test_json_.py index fae8cef..41433a2 100644 --- a/tests/sources_loader/test_json_.py +++ b/tests/sources_loader/test_json_.py @@ -17,7 +17,7 @@ class TestJsonLoader: def test_comprehensive_type_conversion(self, all_types_json_file: Path): """Test loading JSON with full type coercion to dataclass.""" - result = load(Source(file_=all_types_json_file, loader=JsonLoader), AllPythonTypesCompact) + result = load(Source(file=all_types_json_file, loader=JsonLoader), AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -37,7 +37,7 @@ class PrefixedConfig: ) result = load( - Source(file_=prefixed_json_file, loader=JsonLoader, prefix="app"), + Source(file=prefixed_json_file, loader=JsonLoader, prefix="app"), PrefixedConfig, ) @@ -65,7 +65,7 @@ class DbConfig: host: str port: int - result = load(Source(file_=json_file, loader=JsonLoader), DbConfig) + result = load(Source(file=json_file, loader=JsonLoader), DbConfig) assert result.host == "db.example.com" assert result.port == 5432 @@ -81,7 +81,7 @@ def test_json_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file_=json_file, loader=JsonLoader), Config) + result = load(Source(file=json_file, loader=JsonLoader), Config) assert result.url == "http://localhost:8080/api" @@ -95,7 +95,7 @@ def test_json_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file_=json_file, loader=JsonLoader), Config) + result = load(Source(file=json_file, loader=JsonLoader), Config) assert result.value == "prefixreplaced/suffix" @@ -109,7 +109,7 @@ def test_json_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat class Config: value: str - result = load(Source(file_=json_file, loader=JsonLoader), Config) + result = load(Source(file=json_file, loader=JsonLoader), Config) assert result.value == "prefix$nonexistent/suffix" @@ -122,7 +122,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file_=json_file, loader=JsonLoader), Config) + load(Source(file=json_file, loader=JsonLoader), Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -145,7 +145,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file_=json_file, loader=JsonLoader), Config) + load(Source(file=json_file, loader=JsonLoader), Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/sources_loader/test_nested_resolve.py b/tests/sources_loader/test_nested_resolve.py index 892a0ed..6da268d 100644 --- a/tests/sources_loader/test_nested_resolve.py +++ b/tests/sources_loader/test_nested_resolve.py @@ -85,8 +85,8 @@ def make_metadata(**kwargs: Any) -> Source: if loader_type == "env": return Source(loader=EnvLoader, prefix="MYAPP__", **kwargs) if loader_type == "envfile": - return Source(file_=tmp_path / ".env", loader=EnvFileLoader, prefix="MYAPP__", **kwargs) - return Source(file_=tmp_path, loader=DockerSecretsLoader, **kwargs) + return Source(file=tmp_path / ".env", loader=EnvFileLoader, prefix="MYAPP__", **kwargs) + return Source(file=tmp_path, loader=DockerSecretsLoader, **kwargs) return FlatLoaderSetup(set_data=set_data, make_metadata=make_metadata) @@ -203,7 +203,7 @@ def test_partial_missing_field_flat(self, tmp_path: Path, local: bool) -> None: with pytest.raises(DatureConfigError) as exc_info: load( Source( - file_=env_file, + file=env_file, loader=EnvFileLoader, prefix="MYAPP__", **_strategy_kwargs("flat", local=local), @@ -226,7 +226,7 @@ def test_partial_missing_field_json(self, tmp_path: Path, local: bool) -> None: with pytest.raises(DatureConfigError) as exc_info: load( Source( - file_=env_file, + file=env_file, loader=EnvFileLoader, prefix="MYAPP__", **_strategy_kwargs("json", local=local), @@ -256,7 +256,7 @@ def test_partial_missing_field_flat(self, tmp_path: Path, local: bool) -> None: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file_=tmp_path, loader=DockerSecretsLoader, **_strategy_kwargs("flat", local=local)), + Source(file=tmp_path, loader=DockerSecretsLoader, **_strategy_kwargs("flat", local=local)), NestedConfig, ) @@ -274,7 +274,7 @@ def test_partial_missing_field_json(self, tmp_path: Path, local: bool) -> None: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file_=tmp_path, loader=DockerSecretsLoader, **_strategy_kwargs("json", local=local)), + Source(file=tmp_path, loader=DockerSecretsLoader, **_strategy_kwargs("json", local=local)), NestedConfig, ) @@ -376,7 +376,7 @@ def test_json_invalid_flat_strategy_succeeds(self, tmp_path: Path) -> None: ) result = load( - Source(file_=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), + Source(file=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), NestedIntConfig, ) @@ -390,7 +390,7 @@ def test_json_invalid_json_strategy_errors(self, tmp_path: Path) -> None: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file_=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="json"), + Source(file=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="json"), NestedIntConfig, ) @@ -421,7 +421,7 @@ def test_flat_invalid_json_strategy_succeeds(self, tmp_path: Path) -> None: ) result = load( - Source(file_=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="json"), + Source(file=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="json"), NestedIntConfig, ) @@ -435,7 +435,7 @@ def test_flat_invalid_flat_strategy_errors(self, tmp_path: Path) -> None: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file_=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), + Source(file=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), NestedIntConfig, ) @@ -469,7 +469,7 @@ def test_json_invalid_flat_strategy_succeeds(self, tmp_path: Path) -> None: (tmp_path / "var__bar").write_text("20") result = load( - Source(file_=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="flat"), + Source(file=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="flat"), NestedIntConfig, ) @@ -482,7 +482,7 @@ def test_json_invalid_json_strategy_errors(self, tmp_path: Path) -> None: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file_=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="json"), + Source(file=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="json"), NestedIntConfig, ) @@ -508,7 +508,7 @@ def test_flat_invalid_json_strategy_succeeds(self, tmp_path: Path) -> None: (tmp_path / "var__bar").write_text("not_a_number") result = load( - Source(file_=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="json"), + Source(file=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="json"), NestedIntConfig, ) @@ -521,7 +521,7 @@ def test_flat_invalid_flat_strategy_errors(self, tmp_path: Path) -> None: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file_=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="flat"), + Source(file=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="flat"), NestedIntConfig, ) @@ -794,7 +794,7 @@ def test_flat_strategy_deep_envfile(self, tmp_path: Path) -> None: result = load( Source( - file_=env_file, + file=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="flat", @@ -809,7 +809,7 @@ def test_json_strategy_deep_docker_secrets(self, tmp_path: Path) -> None: (tmp_path / "var__sub__key").write_text("from_flat") result = load( - Source(file_=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="json"), + Source(file=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="json"), DeepConfig, ) @@ -827,7 +827,7 @@ def test_flat_strategy_error(self, tmp_path: Path) -> None: with pytest.raises(DatureConfigError) as exc_info: load( Source( - file_=tmp_path, + file=tmp_path, loader=DockerSecretsLoader, prefix="myapp__", nested_resolve_strategy="flat", @@ -852,7 +852,7 @@ def test_json_strategy_error(self, tmp_path: Path) -> None: with pytest.raises(DatureConfigError) as exc_info: load( Source( - file_=tmp_path, + file=tmp_path, loader=DockerSecretsLoader, prefix="myapp__", nested_resolve_strategy="json", @@ -890,7 +890,7 @@ def test_flat_first_then_json(self, monkeypatch: pytest.MonkeyPatch) -> None: assert result_flat == NestedConfig(var=NestedVar(foo="from_flat", bar="from_flat")) assert result_json == NestedConfig(var=NestedVar(foo="from_json", bar="from_json")) - def test_envfile_reversed_order(self, tmp_path: Path) -> None: + def test_envfilereversed_order(self, tmp_path: Path) -> None: env_file = tmp_path / ".env" env_file.write_text( 'MYAPP__VAR__FOO=from_flat\nMYAPP__VAR__BAR=from_flat\nMYAPP__VAR={"foo": "from_json", "bar": "from_json"}', @@ -898,7 +898,7 @@ def test_envfile_reversed_order(self, tmp_path: Path) -> None: result_flat = load( Source( - file_=env_file, + file=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="flat", @@ -907,7 +907,7 @@ def test_envfile_reversed_order(self, tmp_path: Path) -> None: ) result_json = load( Source( - file_=env_file, + file=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="json", diff --git a/tests/sources_loader/test_toml10_.py b/tests/sources_loader/test_toml10_.py index cb49094..09ddae2 100644 --- a/tests/sources_loader/test_toml10_.py +++ b/tests/sources_loader/test_toml10_.py @@ -17,7 +17,7 @@ class TestToml10Loader: def test_comprehensive_type_conversion(self, all_types_toml10_file: Path): """Test loading TOML with full type coercion to dataclass.""" - result = load(Source(file_=all_types_toml10_file, loader=Toml10Loader), AllPythonTypesCompact) + result = load(Source(file=all_types_toml10_file, loader=Toml10Loader), AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -37,7 +37,7 @@ class PrefixedConfig: ) result = load( - Source(file_=prefixed_toml_file, loader=Toml10Loader, prefix="app"), + Source(file=prefixed_toml_file, loader=Toml10Loader, prefix="app"), PrefixedConfig, ) @@ -65,7 +65,7 @@ class Config: name: str port: int - result = load(Source(file_=toml_file, loader=Toml10Loader), Config) + result = load(Source(file=toml_file, loader=Toml10Loader), Config) assert result.name == "MyApp" assert result.port == 9090 @@ -81,7 +81,7 @@ def test_toml_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file_=toml_file, loader=Toml10Loader), Config) + result = load(Source(file=toml_file, loader=Toml10Loader), Config) assert result.url == "http://localhost:8080/api" @@ -95,7 +95,7 @@ def test_toml_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file_=toml_file, loader=Toml10Loader), Config) + result = load(Source(file=toml_file, loader=Toml10Loader), Config) assert result.value == "prefixreplaced/suffix" @@ -109,7 +109,7 @@ def test_toml_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat class Config: value: str - result = load(Source(file_=toml_file, loader=Toml10Loader), Config) + result = load(Source(file=toml_file, loader=Toml10Loader), Config) assert result.value == "prefix$nonexistent/suffix" @@ -122,7 +122,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file_=toml_file, loader=Toml10Loader), Config) + load(Source(file=toml_file, loader=Toml10Loader), Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -145,7 +145,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file_=toml_file, loader=Toml10Loader), Config) + load(Source(file=toml_file, loader=Toml10Loader), Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/sources_loader/test_toml11_.py b/tests/sources_loader/test_toml11_.py index b96ca89..257f7e8 100644 --- a/tests/sources_loader/test_toml11_.py +++ b/tests/sources_loader/test_toml11_.py @@ -17,7 +17,7 @@ class TestToml11Loader: def test_comprehensive_type_conversion(self, all_types_toml11_file: Path): """Test loading TOML with full type coercion to dataclass.""" - result = load(Source(file_=all_types_toml11_file, loader=Toml11Loader), AllPythonTypesCompact) + result = load(Source(file=all_types_toml11_file, loader=Toml11Loader), AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -37,7 +37,7 @@ class PrefixedConfig: ) result = load( - Source(file_=prefixed_toml_file, loader=Toml11Loader, prefix="app"), + Source(file=prefixed_toml_file, loader=Toml11Loader, prefix="app"), PrefixedConfig, ) @@ -65,7 +65,7 @@ class Config: name: str port: int - result = load(Source(file_=toml_file, loader=Toml11Loader), Config) + result = load(Source(file=toml_file, loader=Toml11Loader), Config) assert result.name == "MyApp" assert result.port == 9090 @@ -81,7 +81,7 @@ def test_toml_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file_=toml_file, loader=Toml11Loader), Config) + result = load(Source(file=toml_file, loader=Toml11Loader), Config) assert result.url == "http://localhost:8080/api" @@ -95,7 +95,7 @@ def test_toml_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file_=toml_file, loader=Toml11Loader), Config) + result = load(Source(file=toml_file, loader=Toml11Loader), Config) assert result.value == "prefixreplaced/suffix" @@ -109,7 +109,7 @@ def test_toml_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat class Config: value: str - result = load(Source(file_=toml_file, loader=Toml11Loader), Config) + result = load(Source(file=toml_file, loader=Toml11Loader), Config) assert result.value == "prefix$nonexistent/suffix" @@ -122,7 +122,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file_=toml_file, loader=Toml11Loader), Config) + load(Source(file=toml_file, loader=Toml11Loader), Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -145,7 +145,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file_=toml_file, loader=Toml11Loader), Config) + load(Source(file=toml_file, loader=Toml11Loader), Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/sources_loader/test_yaml11_.py b/tests/sources_loader/test_yaml11_.py index 836feb3..f0feb19 100644 --- a/tests/sources_loader/test_yaml11_.py +++ b/tests/sources_loader/test_yaml11_.py @@ -17,7 +17,7 @@ class TestYaml11Loader: def test_comprehensive_type_conversion(self, all_types_yaml11_file: Path): """Test loading YAML with full type coercion to dataclass.""" - result = load(Source(file_=all_types_yaml11_file, loader=Yaml11Loader), AllPythonTypesCompact) + result = load(Source(file=all_types_yaml11_file, loader=Yaml11Loader), AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -37,7 +37,7 @@ class PrefixedConfig: ) result = load( - Source(file_=prefixed_yaml_file, loader=Yaml11Loader, prefix="app"), + Source(file=prefixed_yaml_file, loader=Yaml11Loader, prefix="app"), PrefixedConfig, ) @@ -62,7 +62,7 @@ class EnvConfig: services: Services result = load( - Source(file_=yaml_config_with_env_vars_file, loader=Yaml11Loader), + Source(file=yaml_config_with_env_vars_file, loader=Yaml11Loader), EnvConfig, ) @@ -82,7 +82,7 @@ def test_yaml_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file_=yaml_file, loader=Yaml11Loader), Config) + result = load(Source(file=yaml_file, loader=Yaml11Loader), Config) assert result.url == "http://localhost:8080/api" @@ -96,7 +96,7 @@ def test_yaml_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file_=yaml_file, loader=Yaml11Loader), Config) + result = load(Source(file=yaml_file, loader=Yaml11Loader), Config) assert result.value == "prefixreplaced/suffix" @@ -110,7 +110,7 @@ def test_yaml_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat class Config: value: str - result = load(Source(file_=yaml_file, loader=Yaml11Loader), Config) + result = load(Source(file=yaml_file, loader=Yaml11Loader), Config) assert result.value == "prefix$nonexistent/suffix" @@ -133,7 +133,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file_=yaml_file, loader=Yaml11Loader), Config) + load(Source(file=yaml_file, loader=Yaml11Loader), Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -156,7 +156,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file_=yaml_file, loader=Yaml11Loader), Config) + load(Source(file=yaml_file, loader=Yaml11Loader), Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/sources_loader/test_yaml12_.py b/tests/sources_loader/test_yaml12_.py index a7fa425..6cb80f6 100644 --- a/tests/sources_loader/test_yaml12_.py +++ b/tests/sources_loader/test_yaml12_.py @@ -17,7 +17,7 @@ class TestYaml12Loader: def test_comprehensive_type_conversion(self, all_types_yaml12_file: Path): """Test loading YAML with full type coercion to dataclass.""" - result = load(Source(file_=all_types_yaml12_file, loader=Yaml12Loader), AllPythonTypesCompact) + result = load(Source(file=all_types_yaml12_file, loader=Yaml12Loader), AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -37,7 +37,7 @@ class PrefixedConfig: ) result = load( - Source(file_=prefixed_yaml_file, loader=Yaml12Loader, prefix="app"), + Source(file=prefixed_yaml_file, loader=Yaml12Loader, prefix="app"), PrefixedConfig, ) @@ -62,7 +62,7 @@ class EnvConfig: services: Services result = load( - Source(file_=yaml_config_with_env_vars_file, loader=Yaml12Loader), + Source(file=yaml_config_with_env_vars_file, loader=Yaml12Loader), EnvConfig, ) @@ -82,7 +82,7 @@ def test_yaml_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file_=yaml_file, loader=Yaml12Loader), Config) + result = load(Source(file=yaml_file, loader=Yaml12Loader), Config) assert result.url == "http://localhost:8080/api" @@ -96,7 +96,7 @@ def test_yaml_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file_=yaml_file, loader=Yaml12Loader), Config) + result = load(Source(file=yaml_file, loader=Yaml12Loader), Config) assert result.value == "prefixreplaced/suffix" @@ -110,7 +110,7 @@ def test_yaml_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat class Config: value: str - result = load(Source(file_=yaml_file, loader=Yaml12Loader), Config) + result = load(Source(file=yaml_file, loader=Yaml12Loader), Config) assert result.value == "prefix$nonexistent/suffix" @@ -133,7 +133,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file_=yaml_file, loader=Yaml12Loader), Config) + load(Source(file=yaml_file, loader=Yaml12Loader), Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -156,7 +156,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file_=yaml_file, loader=Yaml12Loader), Config) + load(Source(file=yaml_file, loader=Yaml12Loader), Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/test_custom_loader.py b/tests/test_custom_loader.py index 70a5761..487dc43 100644 --- a/tests/test_custom_loader.py +++ b/tests/test_custom_loader.py @@ -46,7 +46,7 @@ def test_xml_loader(self, tmp_path: Path) -> None: ) result = load( - Source(file_=xml_file, loader=XmlLoader), + Source(file=xml_file, loader=XmlLoader), XmlConfig, ) diff --git a/tests/test_load_report.py b/tests/test_load_report.py index e56f2dc..75fa1a1 100644 --- a/tests/test_load_report.py +++ b/tests/test_load_report.py @@ -29,8 +29,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), ), Config, debug=True, @@ -89,8 +89,8 @@ class Config: result = load( Merge( - Source(file_=first), - Source(file_=second), + Source(file=first), + Source(file=second), strategy=MergeStrategy.FIRST_WINS, ), Config, @@ -154,8 +154,8 @@ class Config: result = load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), ), Config, debug=True, @@ -193,7 +193,7 @@ class Config: name: str port: int - result = load(Source(file_=json_file), Config, debug=True) + result = load(Source(file=json_file), Config, debug=True) report = get_load_report(result) @@ -238,8 +238,8 @@ def test_merge_decorator(self, tmp_path: Path): overrides.write_text('{"port": 9090}') meta = Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), ) @load(meta, debug=True) @@ -258,7 +258,7 @@ def test_single_source_decorator(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"host": "localhost", "port": 3000}') - @load(Source(file_=json_file), debug=True) + @load(Source(file=json_file), debug=True) @dataclass class Config: host: str @@ -290,7 +290,7 @@ class Config: host: str port: int - result = load(Source(file_=json_file), Config) + result = load(Source(file=json_file), Config) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") @@ -317,8 +317,8 @@ class Config: with caplog.at_level(logging.DEBUG, logger="dature"): load( Merge( - Source(file_=defaults), - Source(file_=overrides), + Source(file=defaults), + Source(file=overrides), ), Config, ) @@ -353,7 +353,7 @@ class Config: port: int with caplog.at_level(logging.DEBUG, logger="dature"): - load(Source(file_=json_file), Config) + load(Source(file=json_file), Config) messages = [r.message for r in caplog.records if r.name == "dature"] @@ -382,8 +382,8 @@ class Config: with pytest.raises(DatureConfigError): load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), ), Config, debug=True, @@ -424,8 +424,8 @@ class Config: with pytest.raises(DatureConfigError): load( Merge( - Source(file_=a), - Source(file_=b), + Source(file=a), + Source(file=b), ), Config, debug=True, @@ -462,7 +462,7 @@ class Config: port: int with pytest.raises(DatureConfigError): - load(Source(file_=json_file), Config, debug=True) + load(Source(file=json_file), Config, debug=True) expected = LoadReport( dataclass_name="Config", @@ -492,7 +492,7 @@ class Config: port: Annotated[int, Ge(value=0)] with pytest.raises(DatureConfigError): - load(Source(file_=json_file), Config, debug=True) + load(Source(file=json_file), Config, debug=True) expected = LoadReport( dataclass_name="Config", diff --git a/tests/test_main.py b/tests/test_main.py index e25ec0e..85be970 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -14,7 +14,7 @@ from dature.sources_loader.yaml_ import Yaml11Loader, Yaml12Loader -def _all_file_loaders() -> list[type]: +def _all_fileloaders() -> list[type]: return [EnvFileLoader, Yaml11Loader, Yaml12Loader, JsonLoader, Json5Loader, Toml10Loader, Toml11Loader, IniLoader] @@ -23,7 +23,7 @@ def test_loads_from_file(self, tmp_path: Path) -> None: json_file = tmp_path / "config.json" json_file.write_text('{"name": "FromFile", "port": 8080}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) @load(metadata) @dataclass @@ -66,7 +66,7 @@ def test_explicit_loader_overrides_extension(self, tmp_path: Path) -> None: txt_file = tmp_path / "config.txt" txt_file.write_text('{"app_name": "OverrideApp"}') - metadata = Source(file_=txt_file, loader=JsonLoader) + metadata = Source(file=txt_file, loader=JsonLoader) @load(metadata) @dataclass @@ -107,7 +107,7 @@ def test_cache_enabled_by_default(self, tmp_path: Path) -> None: json_file = tmp_path / "config.json" json_file.write_text('{"name": "original", "port": 8080}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) @load(metadata) @dataclass @@ -126,7 +126,7 @@ def test_cache_disabled(self, tmp_path: Path) -> None: json_file = tmp_path / "config.json" json_file.write_text('{"name": "original", "port": 8080}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) @load(metadata, cache=False) @dataclass @@ -152,7 +152,7 @@ class Config: name: str port: int - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, Config) assert result.name == "FromFile" @@ -188,25 +188,25 @@ class Config: class TestFileNotFoundWithLoad: @pytest.mark.parametrize( "loader_class", - _all_file_loaders(), + _all_fileloaders(), ) - def test_load_function_single_source_file_not_found(self, loader_class: type) -> None: + def test_load_function_single_source_filenot_found(self, loader_class: type) -> None: @dataclass class Config: name: str - metadata = Source(file_="/non/existent/file.json", loader=loader_class) + metadata = Source(file="/non/existent/file.json", loader=loader_class) with pytest.raises(FileNotFoundError): load(metadata, Config) @pytest.mark.parametrize( "loader_class", - _all_file_loaders(), + _all_fileloaders(), ) - def test_load_decorator_single_source_file_not_found(self, loader_class: type) -> None: - metadata = Source(file_="/non/existent/config.json", loader=loader_class) + def test_load_decorator_single_source_filenot_found(self, loader_class: type) -> None: + metadata = Source(file="/non/existent/config.json", loader=loader_class) @load(metadata) @dataclass diff --git a/tests/test_type_loaders.py b/tests/test_type_loaders.py index eeba86a..45291cb 100644 --- a/tests/test_type_loaders.py +++ b/tests/test_type_loaders.py @@ -48,7 +48,7 @@ class TestTypeLoadersInSource: def test_single_source_with_type_loader(self, yaml_with_rgb: Path) -> None: result = load( Source( - file_=yaml_with_rgb, + file=yaml_with_rgb, type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),), ), ConfigWithRgb, @@ -67,7 +67,7 @@ def int_times_two(value: str) -> int: result = load( Source( - file_=p, + file=p, type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),), ), ConfigWithRgb, @@ -81,7 +81,7 @@ def test_global_type_loaders_via_configure(self, yaml_with_rgb: Path) -> None: configure( type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),), ) - result = load(Source(file_=yaml_with_rgb), ConfigWithRgb) + result = load(Source(file=yaml_with_rgb), ConfigWithRgb) assert result.color == Rgb(r=255, g=128, b=0) @@ -94,8 +94,8 @@ def test_merge_metadata_type_loaders(self, tmp_path: Path) -> None: result = load( Merge( - Source(file_=base), - Source(file_=override), + Source(file=base), + Source(file=override), type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),), ), ConfigWithRgb, @@ -124,7 +124,7 @@ def tag_upper(value: str) -> str: result = load( Source( - file_=p, + file=p, type_loaders=(TypeLoader(type_=str, func=tag_upper),), ), TwoCustom, diff --git a/tests/validators/test_complex.py b/tests/validators/test_complex.py index 0362a75..1514f77 100644 --- a/tests/validators/test_complex.py +++ b/tests/validators/test_complex.py @@ -22,7 +22,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"name": "Alice", "age": 30, "tags": ["python", "coding"]}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, Config) assert result.name == "Alice" @@ -40,7 +40,7 @@ class Config: content = '{"name": "AB", "age": 200, "tags": []}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -86,7 +86,7 @@ class User: '{"name": "Alice", "age": 30, "address": {"city": "NYC", "zip_code": "12345"}}', ) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, User) assert result.name == "Alice" @@ -110,7 +110,7 @@ class User: content = '{"name": "Al", "age": 15, "address": {"city": "N", "zip_code": "ABCDE"}}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, User) @@ -154,7 +154,7 @@ class Config: content = '{"age": 15}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -178,7 +178,7 @@ class Config: '{"groups": {"admins": [{"name": "Alice"}]}}', ) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, Config) assert result.groups == {"admins": [{"name": "Alice"}]} @@ -192,7 +192,7 @@ class Config: content = '{"groups": {}}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -222,7 +222,7 @@ class Config: '{"teams": {"backend": [{"name": "Alice", "role": "admin"}]}}', ) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, Config) assert result.teams["backend"][0].name == "Alice" @@ -242,7 +242,7 @@ class Config: content = '{"teams": {"backend": [{"name": "A", "role": "ab"}]}}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) diff --git a/tests/validators/test_custom_validator.py b/tests/validators/test_custom_validator.py index ee7b55a..15d3928 100644 --- a/tests/validators/test_custom_validator.py +++ b/tests/validators/test_custom_validator.py @@ -48,7 +48,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"count": 10}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, Config) assert result.count == 10 @@ -62,7 +62,7 @@ class Config: content = '{"count": 7}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -86,7 +86,7 @@ class Config: content = '{"count": 7}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -111,7 +111,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"url": "https://example.com"}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, Config) assert result.url == "https://example.com" @@ -125,7 +125,7 @@ class Config: content = '{"url": "http://example.com"}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -146,7 +146,7 @@ def test_success(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"port": 8080}') - @load(Source(file_=json_file)) + @load(Source(file=json_file)) @dataclass class Config: port: Annotated[int, Divisible(value=10)] @@ -159,7 +159,7 @@ def test_failure(self, tmp_path: Path): content = '{"port": 8081}' json_file.write_text(content) - @load(Source(file_=json_file)) + @load(Source(file=json_file)) @dataclass class Config: port: Annotated[int, Divisible(value=10)] @@ -182,7 +182,7 @@ def test_direct_instantiation_validates(self, tmp_path: Path): content = '{"port": 8080}' json_file.write_text(content) - @load(Source(file_=json_file)) + @load(Source(file=json_file)) @dataclass class Config: port: Annotated[int, Divisible(value=10)] @@ -208,7 +208,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"count": 15, "url": "https://example.com"}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, Config) assert result.count == 15 @@ -224,7 +224,7 @@ class Config: content = '{"count": 7, "url": "http://example.com"}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) diff --git a/tests/validators/test_metadata_validators.py b/tests/validators/test_metadata_validators.py index 2ab49ec..e1ea5b3 100644 --- a/tests/validators/test_metadata_validators.py +++ b/tests/validators/test_metadata_validators.py @@ -22,7 +22,7 @@ class Config: json_file.write_text('{"name": "Alice"}') metadata = Source( - file_=json_file, + file=json_file, validators={ F[Config].name: MinLength(value=3), }, @@ -40,7 +40,7 @@ class Config: json_file.write_text('{"port": 8080}') metadata = Source( - file_=json_file, + file=json_file, validators={ F[Config].port: (Gt(value=0), Lt(value=65536)), }, @@ -59,7 +59,7 @@ class Config: json_file.write_text('{"name": "Alice", "port": 8080}') metadata = Source( - file_=json_file, + file=json_file, validators={ F[Config].name: MinLength(value=3), F[Config].port: Gt(value=0), @@ -82,7 +82,7 @@ class Config: json_file.write_text(content) metadata = Source( - file_=json_file, + file=json_file, validators={ F[Config].name: MinLength(value=3), }, @@ -111,7 +111,7 @@ class Config: json_file.write_text(content) metadata = Source( - file_=json_file, + file=json_file, validators={ F[Config].port: (Gt(value=0), Lt(value=65536)), }, @@ -146,7 +146,7 @@ class Config: json_file.write_text('{"database": {"host": "localhost", "port": 5432}}') metadata = Source( - file_=json_file, + file=json_file, validators={ F[Config].database.host: MinLength(value=1), F[Config].database.port: Gt(value=0), @@ -172,7 +172,7 @@ class Config: json_file.write_text(content) metadata = Source( - file_=json_file, + file=json_file, validators={ F[Config].database.host: MinLength(value=1), }, @@ -203,7 +203,7 @@ class Config: json_file.write_text('{"name": "Alice", "port": 8080}') metadata = Source( - file_=json_file, + file=json_file, validators={ F[Config].name: MaxLength(value=50), F[Config].port: Gt(value=0), @@ -224,7 +224,7 @@ class Config: json_file.write_text(content) metadata = Source( - file_=json_file, + file=json_file, validators={ F[Config].name: MaxLength(value=50), }, @@ -253,7 +253,7 @@ class Config: json_file.write_text(content) metadata = Source( - file_=json_file, + file=json_file, validators={ F[Config].name: MaxLength(value=10), }, @@ -281,7 +281,7 @@ class Config: json_file.write_text('{"name": "Alice"}') metadata = Source( - file_=json_file, + file=json_file, validators={ F[Config].name: MaxLength(value=10), }, @@ -300,7 +300,7 @@ class Config: json_file.write_text(content) metadata = Source( - file_=json_file, + file=json_file, validators={ F[Config].name: MaxLength(value=50), }, @@ -328,7 +328,7 @@ class Config: json_file.write_text('{"port": 8080}') metadata = Source( - file_=json_file, + file=json_file, validators={ F[Config].port: Lt(value=65536), }, @@ -347,7 +347,7 @@ class Config: json_file.write_text(content) metadata = Source( - file_=json_file, + file=json_file, validators={ F[Config].port: Lt(value=65536), }, @@ -376,7 +376,7 @@ class Config: json_file.write_text(content) metadata = Source( - file_=json_file, + file=json_file, validators={ F[Config].port: Lt(value=65536), }, @@ -405,7 +405,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"name": "Alice"}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, Config) assert result.name == "Alice" @@ -427,7 +427,7 @@ def validate_config(obj: Config) -> bool: json_file.write_text('{"port": 8080, "user": "admin"}') metadata = Source( - file_=json_file, + file=json_file, root_validators=(RootValidator(func=validate_config),), validators={ F[Config].port: Ge(value=0), @@ -450,7 +450,7 @@ class Config: age: int metadata = Source( - file_=json_file, + file=json_file, validators={ F[Config].name: MinLength(value=2), F[Config].age: Ge(value=0), diff --git a/tests/validators/test_number.py b/tests/validators/test_number.py index d4e91e5..a63ff05 100644 --- a/tests/validators/test_number.py +++ b/tests/validators/test_number.py @@ -18,7 +18,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"age": 25}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, Config) assert result.age == 25 @@ -32,7 +32,7 @@ class Config: content = '{"age": 18}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -57,7 +57,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"age": 18}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, Config) assert result.age == 18 @@ -71,7 +71,7 @@ class Config: content = '{"age": 17}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -96,7 +96,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"age": 99}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, Config) assert result.age == 99 @@ -110,7 +110,7 @@ class Config: content = '{"age": 100}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -135,7 +135,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"age": 100}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, Config) assert result.age == 100 @@ -149,7 +149,7 @@ class Config: content = '{"age": 101}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -174,7 +174,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"age": 30}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, Config) assert result.age == 30 @@ -188,7 +188,7 @@ class Config: content = '{"age": 70}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) diff --git a/tests/validators/test_post_init_and_property.py b/tests/validators/test_post_init_and_property.py index c133189..9fc0f1a 100644 --- a/tests/validators/test_post_init_and_property.py +++ b/tests/validators/test_post_init_and_property.py @@ -21,7 +21,7 @@ def __post_init__(self) -> None: json_file = tmp_path / "config.json" json_file.write_text('{"port": 8080, "host": "localhost"}') - result = load(Source(file_=json_file), Config) + result = load(Source(file=json_file), Config) assert result.port == 8080 assert result.host == "localhost" @@ -41,7 +41,7 @@ def __post_init__(self) -> None: json_file.write_text('{"port": 99999, "host": "localhost"}') with pytest.raises(ValueError, match="Invalid port: 99999"): - load(Source(file_=json_file), Config) + load(Source(file=json_file), Config) def test_post_init_cross_field_validation(self, tmp_path: Path): @dataclass @@ -58,7 +58,7 @@ def __post_init__(self) -> None: json_file.write_text('{"min_value": 100, "max_value": 10}') with pytest.raises(ValueError, match=r"min_value \(100\) must be less than max_value \(10\)"): - load(Source(file_=json_file), Config) + load(Source(file=json_file), Config) def test_post_init_cross_field_success(self, tmp_path: Path): @dataclass @@ -74,7 +74,7 @@ def __post_init__(self) -> None: json_file = tmp_path / "config.json" json_file.write_text('{"min_value": 1, "max_value": 100}') - result = load(Source(file_=json_file), Config) + result = load(Source(file=json_file), Config) assert result.min_value == 1 assert result.max_value == 100 @@ -85,7 +85,7 @@ def test_post_init_success(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"port": 8080, "host": "localhost"}') - @load(Source(file_=json_file)) + @load(Source(file=json_file)) @dataclass class Config: port: int @@ -105,7 +105,7 @@ def test_post_init_failure_from_file(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"port": 99999, "host": "localhost"}') - @load(Source(file_=json_file)) + @load(Source(file=json_file)) @dataclass class Config: port: int @@ -123,7 +123,7 @@ def test_post_init_failure_from_override(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"port": 8080, "host": "localhost"}') - @load(Source(file_=json_file)) + @load(Source(file=json_file)) @dataclass class Config: port: int @@ -141,7 +141,7 @@ def test_post_init_cross_field(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"min_value": 50, "max_value": 10}') - @load(Source(file_=json_file)) + @load(Source(file=json_file)) @dataclass class Config: min_value: int @@ -170,7 +170,7 @@ def __post_init__(self) -> None: json_file = tmp_path / "config.json" json_file.write_text('{"host": "localhost", "port": 8080}') - result = load(Source(file_=json_file), Config) + result = load(Source(file=json_file), Config) assert result.base_url == "http://localhost:8080" @@ -178,7 +178,7 @@ def test_computed_field_via_post_init_decorator(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"host": "example.com", "port": 443}') - @load(Source(file_=json_file)) + @load(Source(file=json_file)) @dataclass class Config: host: str @@ -207,7 +207,7 @@ def address(self) -> str: json_file = tmp_path / "config.json" json_file.write_text('{"host": "localhost", "port": 8080}') - result = load(Source(file_=json_file), Config) + result = load(Source(file=json_file), Config) assert result.address == "localhost:8080" @@ -215,7 +215,7 @@ def test_property_computed_value_decorator(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"host": "localhost", "port": 3000}') - @load(Source(file_=json_file)) + @load(Source(file=json_file)) @dataclass class Config: host: str @@ -241,6 +241,6 @@ def email(self) -> str: json_file = tmp_path / "config.json" json_file.write_text('{"_email": " Admin@Example.COM "}') - result = load(Source(file_=json_file), Config) + result = load(Source(file=json_file), Config) assert result.email == "admin@example.com" diff --git a/tests/validators/test_root_validator.py b/tests/validators/test_root_validator.py index 1b70fb1..d99e03b 100644 --- a/tests/validators/test_root_validator.py +++ b/tests/validators/test_root_validator.py @@ -24,7 +24,7 @@ def validate_config(obj: Config) -> bool: json_file.write_text('{"port": 80, "user": "root"}') metadata = Source( - file_=json_file, + file=json_file, root_validators=(RootValidator(func=validate_config),), ) result = load(metadata, Config) @@ -47,7 +47,7 @@ def validate_config(obj: Config) -> bool: json_file.write_text('{"port": 80, "user": "admin"}') metadata = Source( - file_=json_file, + file=json_file, root_validators=(RootValidator(func=validate_config),), ) @@ -76,7 +76,7 @@ def validate_step(obj: Config) -> bool: json_file.write_text('{"min_value": 10, "max_value": 100, "step": 5}') metadata = Source( - file_=json_file, + file=json_file, root_validators=( RootValidator(func=validate_min_max), RootValidator(func=validate_step), @@ -105,7 +105,7 @@ def validate_step(obj: Config) -> bool: json_file.write_text('{"min_value": 100, "max_value": 10, "step": -5}') metadata = Source( - file_=json_file, + file=json_file, root_validators=( RootValidator(func=validate_min_max), RootValidator(func=validate_step), @@ -133,7 +133,7 @@ def validate_config(obj: Config) -> bool: json_file.write_text('{"port": 80, "host": "localhost"}') metadata = Source( - file_=json_file, + file=json_file, root_validators=(RootValidator(func=validate_config),), ) @@ -155,7 +155,7 @@ def validate_credentials(obj) -> bool: json_file.write_text('{"username": "admin", "password": "short"}') metadata = Source( - file_=json_file, + file=json_file, root_validators=(RootValidator(func=validate_credentials),), ) @@ -188,7 +188,7 @@ def validate_config(obj: Config) -> bool: json_file.write_text('{"port": 80, "user": "admin"}') metadata = Source( - file_=json_file, + file=json_file, root_validators=( RootValidator( func=validate_config, diff --git a/tests/validators/test_sequence.py b/tests/validators/test_sequence.py index 1b90bea..85cd932 100644 --- a/tests/validators/test_sequence.py +++ b/tests/validators/test_sequence.py @@ -18,7 +18,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"tags": ["python", "typing"]}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, Config) assert result.tags == ["python", "typing"] @@ -32,7 +32,7 @@ class Config: content = '{"tags": ["python"]}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -57,7 +57,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"tags": ["python", "typing"]}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, Config) assert result.tags == ["python", "typing"] @@ -71,7 +71,7 @@ class Config: content = '{"tags": ["python", "typing", "validation"]}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -96,7 +96,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"tags": ["python", "typing", "validation"]}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, Config) assert result.tags == ["python", "typing", "validation"] @@ -110,7 +110,7 @@ class Config: content = '{"tags": ["python", "typing", "python"]}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -135,7 +135,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"tags": ["python", "typing", "validation"]}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, Config) assert result.tags == ["python", "typing", "validation"] @@ -149,7 +149,7 @@ class Config: content = '{"tags": ["python", "typing", "validation", "testing", "coding", "extra"]}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) diff --git a/tests/validators/test_string.py b/tests/validators/test_string.py index 6128b9e..7feac88 100644 --- a/tests/validators/test_string.py +++ b/tests/validators/test_string.py @@ -18,7 +18,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"name": "Alice"}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, Config) assert result.name == "Alice" @@ -32,7 +32,7 @@ class Config: content = '{"name": "Bob"}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -57,7 +57,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"name": "Alice"}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, Config) assert result.name == "Alice" @@ -71,7 +71,7 @@ class Config: content = '{"name": "Alexander"}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -96,7 +96,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"email": "test@example.com"}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, Config) assert result.email == "test@example.com" @@ -110,7 +110,7 @@ class Config: content = '{"email": "invalid-email"}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) @@ -135,7 +135,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"username": "john_doe"}') - metadata = Source(file_=json_file) + metadata = Source(file=json_file) result = load(metadata, Config) assert result.username == "john_doe" @@ -149,7 +149,7 @@ class Config: content = '{"username": "this_is_a_very_long_username_that_exceeds_limit"}' json_file.write_text(content) - metadata = Source(file_=json_file) + metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, Config) From c401af2b95237da03ab28c460d4795660324a240 Mon Sep 17 00:00:00 2001 From: niccolum Date: Sat, 28 Mar 2026 14:32:58 +0300 Subject: [PATCH 02/36] dependabot once per month --- .github/dependabot.yml | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 1eabe43..389ea4c 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -3,9 +3,8 @@ updates: - package-ecosystem: "pip" directory: "/" schedule: - interval: "weekly" - day: "saturday" - time: "09:00" + interval: "cron" + cronjob: "0 9 1-7 * 6" timezone: "Europe/Moscow" open-pull-requests-limit: 10 labels: @@ -16,9 +15,8 @@ updates: - package-ecosystem: "github-actions" directory: "/" schedule: - interval: "weekly" - day: "saturday" - time: "09:00" + interval: "cron" + cronjob: "0 9 1-7 * 6" timezone: "Europe/Moscow" labels: - "ci" From 3c8c5770afda0706f2ff2f067348ddfdfb60805d Mon Sep 17 00:00:00 2001 From: niccolum Date: Sat, 28 Mar 2026 15:03:17 +0300 Subject: [PATCH 03/36] from dature import ... -> import dature --- README.md | 7 +++--- docs/advanced/configure.md | 2 +- docs/advanced/env-expansion.md | 2 +- docs/advanced/merge-rules.md | 2 +- docs/api-reference.md | 15 ++++-------- docs/features/naming.md | 2 +- docs/features/validation.md | 8 +++---- docs/index.md | 2 +- docs/introduction.md | 2 +- .../docs/advanced/caching/advanced_caching.py | 6 ++--- .../advanced/configure/advanced_configure.py | 22 +++++++++--------- .../configure/advanced_configure_env.py | 22 +++++++++--------- .../advanced_configure_type_loaders.py | 8 +++---- .../advanced/custom_types/custom_loader.py | 6 ++--- .../docs/advanced/custom_types/custom_type.py | 8 +++---- .../custom_types/custom_type_merge.py | 12 +++++----- .../advanced/debug/advanced_debug_error.py | 14 +++++------ .../advanced/debug/advanced_debug_logging.py | 10 ++++---- .../advanced/debug/advanced_debug_report.py | 14 +++++------ .../env_expansion/advanced_env_expansion.py | 6 ++--- ...vanced_env_expansion_file_path_combined.py | 6 ++--- .../advanced_env_expansion_file_path_dir.py | 6 ++--- .../advanced_env_expansion_file_path_name.py | 6 ++--- .../advanced_env_expansion_merge.py | 12 +++++----- .../advanced_env_expansion_strict.py | 6 ++--- .../advanced_field_groups_expansion_error.py | 14 +++++------ .../advanced_field_groups_multiple_error.py | 14 +++++------ .../advanced_field_groups_nested_error.py | 14 +++++------ .../advanced_merge_rules_callable.py | 14 +++++------ .../advanced_merge_rules_conflict.py | 18 +++++++-------- .../merge_rules/merging_field_append.py | 12 +++++----- .../merging_field_append_unique.py | 12 +++++----- .../merge_rules/merging_field_first_wins.py | 12 +++++----- .../merge_rules/merging_field_groups.py | 12 +++++----- .../merge_rules/merging_field_last_wins.py | 12 +++++----- .../merge_rules/merging_field_prepend.py | 12 +++++----- .../merging_field_prepend_unique.py | 12 +++++----- .../merge_rules/merging_first_found.py | 12 +++++----- .../merge_rules/merging_skip_broken.py | 10 ++++---- .../merging_skip_broken_per_source.py | 12 +++++----- .../merge_rules/merging_skip_invalid.py | 6 ++--- .../merging_skip_invalid_per_field.py | 12 +++++----- .../nested_resolve_docker_secrets.py | 6 ++--- .../nested_resolve/nested_resolve_envfile.py | 6 ++--- .../nested_resolve_global_flat.py | 6 ++--- .../nested_resolve_global_json.py | 6 ++--- .../nested_resolve_no_conflict.py | 6 ++--- .../nested_resolve/nested_resolve_override.py | 8 +++---- .../nested_resolve_per_field.py | 10 ++++---- .../nested_resolve/nested_resolve_problem.py | 4 ++-- .../api_reference_decorator_mode.py | 23 +++++++++++++++++++ .../api_reference/api_reference_field_path.py | 22 ++++++++++++++++++ .../api_reference_function_mode.py | 22 ++++++++++++++++++ .../why-not-dynaconf/dynaconf_basic.py | 4 ++-- .../why-not-dynaconf/dynaconf_merge.py | 12 +++++----- .../dynaconf_root_validators.py | 6 ++--- .../why-not-dynaconf/dynaconf_validators.py | 4 ++-- .../why-not-hydra/hydra_dataclass.py | 4 ++-- .../comparison/why-not-hydra/hydra_merge.py | 12 +++++----- .../why-not-hydra/hydra_validators.py | 4 ++-- .../pydantic_settings_auto_detect.py | 8 +++---- .../pydantic_settings_basic.py | 4 ++-- .../pydantic_settings_merge.py | 12 +++++----- .../docs/features/masking/masking_by_name.py | 4 ++-- .../features/masking/masking_classic_style.py | 10 ++++---- .../features/masking/masking_heuristic.py | 6 ++--- .../features/masking/masking_merge_mode.py | 10 ++++---- .../docs/features/masking/masking_no_mask.py | 6 ++--- .../features/masking/masking_per_source.py | 6 ++--- .../features/masking/masking_secret_str.py | 6 ++--- .../docs/features/merging/merging_basic.py | 12 +++++----- .../features/merging/merging_strategies.py | 22 +++++++++--------- .../merging/merging_strategy_first_found.py | 14 +++++------ .../merging/merging_strategy_first_wins.py | 12 +++++----- .../merging/merging_strategy_last_wins.py | 12 +++++----- .../merging_strategy_raise_on_conflict.py | 12 +++++----- .../merging/merging_tuple_shorthand.py | 8 +++---- .../merging_tuple_shorthand_decorator.py | 8 +++---- .../features/naming/naming_field_mapping.py | 12 +++++----- .../docs/features/naming/naming_name_style.py | 6 ++--- .../features/naming/naming_nested_fields.py | 14 +++++------ .../docs/features/naming/naming_prefix.py | 4 ++-- .../features/naming/naming_prefix_nested.py | 4 ++-- .../features/naming/naming_split_symbols.py | 4 ++-- .../validation/validation_annotated.py | 6 ++--- .../features/validation/validation_custom.py | 6 ++--- .../validation/validation_metadata.py | 10 ++++---- .../validation/validation_post_init.py | 4 ++-- .../features/validation/validation_root.py | 6 ++--- examples/docs/index/intro_decorator.py | 4 ++-- examples/docs/index/intro_function.py | 4 ++-- examples/docs/introduction/format_docker.py | 6 ++--- examples/docs/introduction/format_env.py | 4 ++-- examples/docs/introduction/format_ini.py | 6 ++--- examples/docs/introduction/format_json.py | 4 ++-- examples/docs/introduction/format_json5.py | 4 ++-- examples/docs/introduction/format_toml.py | 4 ++-- examples/docs/introduction/format_yaml.py | 4 ++-- .../docs/introduction/intro_decorator_file.py | 4 ++-- examples/docs/introduction/intro_file_like.py | 6 ++--- examples/load_all_formats.py | 22 +++++++++--------- 101 files changed, 482 insertions(+), 419 deletions(-) create mode 100644 examples/docs/api_reference/api_reference_decorator_mode.py create mode 100644 examples/docs/api_reference/api_reference_field_path.py create mode 100644 examples/docs/api_reference/api_reference_function_mode.py diff --git a/README.md b/README.md index 55a6c66..a323ac3 100644 --- a/README.md +++ b/README.md @@ -44,7 +44,8 @@ pip install dature[secure] # Secret detection heuristics ```python from dataclasses import dataclass -from dature import Source, load + +import dature @dataclass class Config: @@ -52,7 +53,7 @@ class Config: port: int debug: bool = False -config = load(Source(file="config.yaml"), Config) +config = dature.load(dature.Source(file="config.yaml"), Config) ``` ## Key Features @@ -65,7 +66,7 @@ config = load(Source(file="config.yaml"), Config) - **ENV expansion** — `$VAR`, `${VAR:-default}` substitution in all file formats - **Special types** — `SecretStr`, `ByteSize`, `PaymentCardNumber`, `URL`, `Base64UrlStr` - **Debug report** — `debug=True` shows which source provided each field value -- **Decorator mode** — `@load(meta)` auto-loads config on dataclass instantiation with caching +- **Decorator mode** — `@dature.load(meta)` auto-loads config on dataclass instantiation with caching See the **[documentation](https://dature.readthedocs.io/)** for detailed guides and API reference. diff --git a/docs/advanced/configure.md b/docs/advanced/configure.md index 1aebf26..c508bc1 100644 --- a/docs/advanced/configure.md +++ b/docs/advanced/configure.md @@ -42,7 +42,7 @@ Customize defaults for the entire application — programmatically or via enviro ### type_loaders -Register global custom type loaders that apply to all `load()` calls. See [Custom Types & Loaders](custom_types.md#per-source-vs-global). +Register global custom type loaders that apply to all `dature.load()` calls. See [Custom Types & Loaders](custom_types.md#per-source-vs-global). ## Environment Variables diff --git a/docs/advanced/env-expansion.md b/docs/advanced/env-expansion.md index f19058b..f715bee 100644 --- a/docs/advanced/env-expansion.md +++ b/docs/advanced/env-expansion.md @@ -131,7 +131,7 @@ The `${VAR:-default}` fallback syntax works in all modes. ## File Path Expansion -Environment variables in `Source(file=...)` are expanded automatically in `"strict"` mode — if a variable is missing, `EnvVarExpandError` is raised immediately at `Source` creation time. +Environment variables in `dature.Source(file=...)` are expanded automatically in `"strict"` mode — if a variable is missing, `EnvVarExpandError` is raised immediately at `dature.Source` creation time. This works for both directory paths and file names: diff --git a/docs/advanced/merge-rules.md b/docs/advanced/merge-rules.md index ba55516..327174a 100644 --- a/docs/advanced/merge-rules.md +++ b/docs/advanced/merge-rules.md @@ -86,7 +86,7 @@ Each strategy produces a different result: --8<-- "examples/docs/advanced/merge_rules/merging_field_prepend_unique.py" ``` -Nested fields are supported: `F[Config].database.host`. +Nested fields are supported: `dature.F[Config].database.host`. Per-field strategies work with `RAISE_ON_CONFLICT` — fields with an explicit strategy are excluded from conflict detection. diff --git a/docs/api-reference.md b/docs/api-reference.md index 06d4d01..12a137f 100644 --- a/docs/api-reference.md +++ b/docs/api-reference.md @@ -2,7 +2,7 @@ ## Core -### `load()` +### `dature.load()` ```python --8<-- "src/dature/main.py:load" @@ -13,16 +13,13 @@ Main entry point. Two calling patterns: **Function mode** — pass `dataclass_`, get an instance back: ```python -config = load(Source(file="config.yaml"), Config) +--8<-- "examples/docs/api_reference/api_reference_function_mode.py" ``` **Decorator mode** — omit `dataclass_`, get a decorator: ```python -@load(Source(file="config.yaml")) -@dataclass -class Config: - host: str +--8<-- "examples/docs/api_reference/api_reference_decorator_mode.py" ``` **Parameters:** @@ -98,7 +95,7 @@ See [Introduction — Source Reference](introduction.md#source-reference) for pa --8<-- "src/dature/metadata.py:field-group" ``` -Usage: `FieldGroup(F[Config].host, F[Config].port)` +Usage: `dature.FieldGroup(dature.F[Config].host, dature.F[Config].port)` --- @@ -109,9 +106,7 @@ Usage: `FieldGroup(F[Config].host, F[Config].port)` Factory for building field paths with validation: ```python -F[Config].host # FieldPath with eager validation -F[Config].database.host # nested path -F["Config"].host # string-based, no validation (for decorator mode) +--8<-- "examples/docs/api_reference/api_reference_field_path.py" ``` --- diff --git a/docs/features/naming.md b/docs/features/naming.md index 6c75d1b..5c4dc28 100644 --- a/docs/features/naming.md +++ b/docs/features/naming.md @@ -48,7 +48,7 @@ Explicit field renaming using `F` objects. Takes priority over `name_style`: A field can have multiple aliases — the first matching key in the source wins: ```python -field_mapping={F[Config].name: ("fullName", "userName")} +field_mapping={dature.F[Config].name: ("fullName", "userName")} ``` ### Nested Fields diff --git a/docs/features/validation.md b/docs/features/validation.md index 9e23ce2..13e61f9 100644 --- a/docs/features/validation.md +++ b/docs/features/validation.md @@ -90,8 +90,8 @@ A single validator can be passed directly. Multiple validators require a tuple: ```python validators={ - F[Config].port: (Gt(value=0), Lt(value=65536)), # tuple for multiple - F[Config].host: MinLength(value=1), # single, no tuple needed + dature.F[Config].port: (Gt(value=0), Lt(value=65536)), # tuple for multiple + dature.F[Config].host: MinLength(value=1), # single, no tuple needed } ``` @@ -99,8 +99,8 @@ Nested fields are supported: ```python validators={ - F[Config].database.host: MinLength(value=1), - F[Config].database.port: Gt(value=0), + dature.F[Config].database.host: MinLength(value=1), + dature.F[Config].database.port: Gt(value=0), } ``` diff --git a/docs/index.md b/docs/index.md index 2b926fd..cb14873 100644 --- a/docs/index.md +++ b/docs/index.md @@ -116,7 +116,7 @@ The format is auto-detected from the file extension. When `file` is not specifie ## mypy Plugin -When using `@load()` as a decorator, mypy will report `call-arg` errors because the original dataclass `__init__` still requires all fields. dature ships with a mypy plugin that makes all fields optional in decorated classes: +When using `@dature.load()` as a decorator, mypy will report `call-arg` errors because the original dataclass `__init__` still requires all fields. dature ships with a mypy plugin that makes all fields optional in decorated classes: ```toml [tool.mypy] diff --git a/docs/introduction.md b/docs/introduction.md index 3e4c8e3..5bf9162 100644 --- a/docs/introduction.md +++ b/docs/introduction.md @@ -123,7 +123,7 @@ Override auto-detection with the `loader` parameter: ```python from dature.sources_loader.yaml_ import Yaml11Loader -Source(file="config.yaml", loader=Yaml11Loader) +dature.Source(file="config.yaml", loader=Yaml11Loader) ``` ## Source Reference diff --git a/examples/docs/advanced/caching/advanced_caching.py b/examples/docs/advanced/caching/advanced_caching.py index 7d84db5..bdb1359 100644 --- a/examples/docs/advanced/caching/advanced_caching.py +++ b/examples/docs/advanced/caching/advanced_caching.py @@ -3,13 +3,13 @@ import os from dataclasses import dataclass -from dature import Source, load +import dature os.environ["CACHE_HOST"] = "localhost" os.environ["CACHE_PORT"] = "6379" -@load(Source(prefix="CACHE_"), cache=True) +@dature.load(dature.Source(prefix="CACHE_"), cache=True) @dataclass class CachedConfig: host: str @@ -26,7 +26,7 @@ class CachedConfig: os.environ["NOCACHE_PORT"] = "6379" -@load(Source(prefix="NOCACHE_"), cache=False) +@dature.load(dature.Source(prefix="NOCACHE_"), cache=False) @dataclass class UncachedConfig: host: str diff --git a/examples/docs/advanced/configure/advanced_configure.py b/examples/docs/advanced/configure/advanced_configure.py index 265a434..aa1c9b4 100644 --- a/examples/docs/advanced/configure/advanced_configure.py +++ b/examples/docs/advanced/configure/advanced_configure.py @@ -1,9 +1,9 @@ -"""Global configure() — customize masking, error display, loading defaults.""" +"""Global dature.configure() — customize masking, error display, loading defaults.""" from dataclasses import dataclass from pathlib import Path -from dature import Source, configure, get_load_report, load +import dature from dature.config import LoadingConfig SHARED_DIR = Path(__file__).parents[2] / "shared" @@ -17,20 +17,20 @@ class Config: # 1. Default config — debug is off, no report -config = load(Source(file=SHARED_DIR / "common_app.yaml"), Config) -report = get_load_report(config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), Config) +report = dature.get_load_report(config) assert report is None -# 2. Enable debug globally via configure() -configure(loading=LoadingConfig(debug=True)) +# 2. Enable debug globally via dature.configure() +dature.configure(loading=LoadingConfig(debug=True)) -config = load(Source(file=SHARED_DIR / "common_app.yaml"), Config) -report = get_load_report(config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), Config) +report = dature.get_load_report(config) assert report is not None # 3. Reset to defaults — debug is off again -configure(loading=LoadingConfig()) +dature.configure(loading=LoadingConfig()) -config = load(Source(file=SHARED_DIR / "common_app.yaml"), Config) -report = get_load_report(config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), Config) +report = dature.get_load_report(config) assert report is None diff --git a/examples/docs/advanced/configure/advanced_configure_env.py b/examples/docs/advanced/configure/advanced_configure_env.py index 7c5bde3..4f56d00 100644 --- a/examples/docs/advanced/configure/advanced_configure_env.py +++ b/examples/docs/advanced/configure/advanced_configure_env.py @@ -1,10 +1,10 @@ -"""Global configure() via environment variables — DATURE_ prefix.""" +"""Global dature.configure() via environment variables — DATURE_ prefix.""" import os from dataclasses import dataclass from pathlib import Path -from dature import Source, configure, get_load_report, load +import dature from dature.config import LoadingConfig SHARED_DIR = Path(__file__).parents[2] / "shared" @@ -21,20 +21,20 @@ class Config: # 1. DATURE_LOADING__DEBUG=true — debug is on, report attached -config = load(Source(file=SHARED_DIR / "common_app.yaml"), Config) -report = get_load_report(config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), Config) +report = dature.get_load_report(config) assert report is not None -# 2. Override env with configure() — debug is off -configure(loading=LoadingConfig(debug=False)) +# 2. Override env with dature.configure() — debug is off +dature.configure(loading=LoadingConfig(debug=False)) -config = load(Source(file=SHARED_DIR / "common_app.yaml"), Config) -report = get_load_report(config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), Config) +report = dature.get_load_report(config) assert report is None # 3. Reset to env defaults — debug is on again -configure(loading=LoadingConfig(debug=True)) +dature.configure(loading=LoadingConfig(debug=True)) -config = load(Source(file=SHARED_DIR / "common_app.yaml"), Config) -report = get_load_report(config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), Config) +report = dature.get_load_report(config) assert report is not None diff --git a/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py b/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py index df66d83..5f85d2a 100644 --- a/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py +++ b/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py @@ -1,9 +1,9 @@ -"""Global type_loaders via configure() — register custom type parsers for all load() calls.""" +"""Global type_loaders via dature.configure() — register custom type parsers for all load() calls.""" from dataclasses import dataclass from pathlib import Path -from dature import Source, TypeLoader, configure, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -27,7 +27,7 @@ class AppConfig: # Register Rgb parser globally — no need to pass type_loaders to every load() call -configure(type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),)) +dature.configure(type_loaders=(dature.TypeLoader(type_=Rgb, func=rgb_from_string),)) -config = load(Source(file=SOURCES_DIR / "custom_type_common.yaml"), AppConfig) +config = dature.load(dature.Source(file=SOURCES_DIR / "custom_type_common.yaml"), AppConfig) assert config == AppConfig(name="my-app", color=Rgb(r=255, g=128, b=0)) diff --git a/examples/docs/advanced/custom_types/custom_loader.py b/examples/docs/advanced/custom_types/custom_loader.py index c034cfe..ea23344 100644 --- a/examples/docs/advanced/custom_types/custom_loader.py +++ b/examples/docs/advanced/custom_types/custom_loader.py @@ -7,7 +7,7 @@ from adaptix import Provider, loader -from dature import Source, load +import dature from dature.sources_loader.base import BaseLoader from dature.sources_loader.loaders import bool_loader, float_from_string from dature.types import FileOrStream, JSONValue @@ -40,8 +40,8 @@ class Config: debug: bool -config = load( - Source( +config = dature.load( + dature.Source( file=SOURCES_DIR / "custom_loader.xml", loader=XmlLoader, ), diff --git a/examples/docs/advanced/custom_types/custom_type.py b/examples/docs/advanced/custom_types/custom_type.py index 5f14977..f000fc2 100644 --- a/examples/docs/advanced/custom_types/custom_type.py +++ b/examples/docs/advanced/custom_types/custom_type.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, TypeLoader, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -26,10 +26,10 @@ class AppConfig: color: Rgb -config = load( - Source( +config = dature.load( + dature.Source( file=SOURCES_DIR / "custom_type_common.yaml", - type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),), + type_loaders=(dature.TypeLoader(type_=Rgb, func=rgb_from_string),), ), AppConfig, ) diff --git a/examples/docs/advanced/custom_types/custom_type_merge.py b/examples/docs/advanced/custom_types/custom_type_merge.py index e0aafc5..a6fe3e2 100644 --- a/examples/docs/advanced/custom_types/custom_type_merge.py +++ b/examples/docs/advanced/custom_types/custom_type_merge.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Merge, Source, TypeLoader, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -26,11 +26,11 @@ class AppConfig: color: Rgb -config = load( - Merge( - Source(file=SOURCES_DIR / "custom_type_common.yaml"), - Source(file=SOURCES_DIR / "custom_type_merge_override.yaml"), - type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),), +config = dature.load( + dature.Merge( + dature.Source(file=SOURCES_DIR / "custom_type_common.yaml"), + dature.Source(file=SOURCES_DIR / "custom_type_merge_override.yaml"), + type_loaders=(dature.TypeLoader(type_=Rgb, func=rgb_from_string),), ), AppConfig, ) diff --git a/examples/docs/advanced/debug/advanced_debug_error.py b/examples/docs/advanced/debug/advanced_debug_error.py index 3707eb2..7f51fe3 100644 --- a/examples/docs/advanced/debug/advanced_debug_error.py +++ b/examples/docs/advanced/debug/advanced_debug_error.py @@ -1,9 +1,9 @@ -"""Report on error — get_load_report() from the dataclass type after a failed load.""" +"""Report on error — dature.get_load_report() from the dataclass type after a failed load.""" from dataclasses import dataclass from pathlib import Path -from dature import Merge, Source, get_load_report, load +import dature from dature.errors.exceptions import DatureConfigError SOURCES_DIR = Path(__file__).parent / "sources" @@ -18,16 +18,16 @@ class Config: try: - config = load( - Merge( - Source(file=SHARED_DIR / "common_overrides.yaml"), - Source(file=SOURCES_DIR / "advanced_debug_error_defaults.yaml"), + config = dature.load( + dature.Merge( + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dature.Source(file=SOURCES_DIR / "advanced_debug_error_defaults.yaml"), ), Config, debug=True, ) except DatureConfigError: - report = get_load_report(Config) + report = dature.get_load_report(Config) assert report is not None assert report.dataclass_name == "Config" diff --git a/examples/docs/advanced/debug/advanced_debug_logging.py b/examples/docs/advanced/debug/advanced_debug_logging.py index 8cce215..db22ecb 100644 --- a/examples/docs/advanced/debug/advanced_debug_logging.py +++ b/examples/docs/advanced/debug/advanced_debug_logging.py @@ -5,7 +5,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Merge, Source, load +import dature log_stream = io.StringIO() handler = logging.StreamHandler(log_stream) @@ -23,10 +23,10 @@ class Config: tags: list[str] -config = load( - Merge( - Source(file=SHARED_DIR / "common_defaults.yaml"), - Source(file=SHARED_DIR / "common_overrides.yaml"), +config = dature.load( + dature.Merge( + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), ), Config, ) diff --git a/examples/docs/advanced/debug/advanced_debug_report.py b/examples/docs/advanced/debug/advanced_debug_report.py index cc3099b..5c152d1 100644 --- a/examples/docs/advanced/debug/advanced_debug_report.py +++ b/examples/docs/advanced/debug/advanced_debug_report.py @@ -1,9 +1,9 @@ -"""Debug report — get_load_report() to inspect which source provided each field.""" +"""Debug report — dature.get_load_report() to inspect which source provided each field.""" from dataclasses import dataclass from pathlib import Path -from dature import Merge, Source, get_load_report, load +import dature SHARED_DIR = Path(__file__).parents[2] / "shared" @@ -15,16 +15,16 @@ class Config: tags: list[str] -config = load( - Merge( - Source(file=SHARED_DIR / "common_defaults.yaml"), - Source(file=SHARED_DIR / "common_overrides.yaml"), +config = dature.load( + dature.Merge( + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), ), Config, debug=True, ) -report = get_load_report(config) +report = dature.get_load_report(config) assert report is not None origins = report.field_origins diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion.py b/examples/docs/advanced/env_expansion/advanced_env_expansion.py index 2995a5d..023d0ec 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion.py @@ -4,7 +4,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -23,8 +23,8 @@ class Config: escape_percent: str -config = load( - Source(file=SOURCES_DIR / "advanced_env_expansion.yaml", expand_env_vars="default"), +config = dature.load( + dature.Source(file=SOURCES_DIR / "advanced_env_expansion.yaml", expand_env_vars="default"), Config, ) diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py index b249168..1f6e050 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py @@ -4,7 +4,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -18,8 +18,8 @@ class Config: port: int -config = load( - Source(file="$DATURE_SOURCES_DIR/config.$DATURE_APP_ENV.yaml"), +config = dature.load( + dature.Source(file="$DATURE_SOURCES_DIR/config.$DATURE_APP_ENV.yaml"), Config, ) diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py index 11e3121..57d7e69 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py @@ -4,7 +4,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -17,8 +17,8 @@ class Config: port: int -config = load( - Source(file="$DATURE_SOURCES_DIR/advanced_env_expansion_file_path.yaml"), +config = dature.load( + dature.Source(file="$DATURE_SOURCES_DIR/advanced_env_expansion_file_path.yaml"), Config, ) diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py index 8d9c1cf..5c7f893 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py @@ -4,7 +4,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -17,8 +17,8 @@ class Config: port: int -config = load( - Source(file=str(SOURCES_DIR / "config.$DATURE_APP_ENV.yaml")), +config = dature.load( + dature.Source(file=str(SOURCES_DIR / "config.$DATURE_APP_ENV.yaml")), Config, ) diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py index d78f951..3fd5f10 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py @@ -4,7 +4,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Merge, Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -21,11 +21,11 @@ class Config: disabled_unset_url: str -config = load( - Merge( - Source(file=SOURCES_DIR / "advanced_env_expansion_merge_default.yaml"), # uses global "default" - Source(file=SOURCES_DIR / "advanced_env_expansion_merge_empty.yaml", expand_env_vars="empty"), - Source(file=SOURCES_DIR / "advanced_env_expansion_merge_disabled.yaml", expand_env_vars="disabled"), +config = dature.load( + dature.Merge( + dature.Source(file=SOURCES_DIR / "advanced_env_expansion_merge_default.yaml"), # uses global "default" + dature.Source(file=SOURCES_DIR / "advanced_env_expansion_merge_empty.yaml", expand_env_vars="empty"), + dature.Source(file=SOURCES_DIR / "advanced_env_expansion_merge_disabled.yaml", expand_env_vars="disabled"), expand_env_vars="default", # global default for all sources ), Config, diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py index 8a9d9b6..38b5c03 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py @@ -4,7 +4,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -17,8 +17,8 @@ class Config: fallback_url: str -config = load( - Source(file=SOURCES_DIR / "advanced_env_expansion_strict.yaml", expand_env_vars="strict"), +config = dature.load( + dature.Source(file=SOURCES_DIR / "advanced_env_expansion_strict.yaml", expand_env_vars="strict"), Config, ) diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py index 4078a76..36de173 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py @@ -4,7 +4,7 @@ from pathlib import Path from textwrap import dedent -from dature import F, FieldGroup, Merge, Source, load +import dature from dature.errors.exceptions import FieldGroupError SOURCES_DIR = Path(__file__).parent / "sources" @@ -23,14 +23,14 @@ class Config: database: Database -# FieldGroup(F[Config].database, F[Config].port) +# dature.FieldGroup(dature.F[Config].database, dature.F[Config].port) # expands to (database.host, database.port, port) try: - load( - Merge( - Source(file=SOURCES_DIR / "field_groups_nested_defaults.yaml"), - Source(file=SOURCES_DIR / "advanced_field_groups_expansion_error_overrides.yaml"), - field_groups=(FieldGroup(F[Config].database, F[Config].port),), + dature.load( + dature.Merge( + dature.Source(file=SOURCES_DIR / "field_groups_nested_defaults.yaml"), + dature.Source(file=SOURCES_DIR / "advanced_field_groups_expansion_error_overrides.yaml"), + field_groups=(dature.FieldGroup(dature.F[Config].database, dature.F[Config].port),), ), Config, ) diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py index f931d41..6f339e7 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py @@ -4,7 +4,7 @@ from pathlib import Path from textwrap import dedent -from dature import F, FieldGroup, Merge, Source, load +import dature from dature.errors.exceptions import FieldGroupError SOURCES_DIR = Path(__file__).parent / "sources" @@ -21,13 +21,13 @@ class Config: try: - load( - Merge( - Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), - Source(file=SOURCES_DIR / "advanced_field_groups_multiple_error_overrides.yaml"), + dature.load( + dature.Merge( + dature.Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), + dature.Source(file=SOURCES_DIR / "advanced_field_groups_multiple_error_overrides.yaml"), field_groups=( - FieldGroup(F[Config].host, F[Config].port), - FieldGroup(F[Config].user, F[Config].password), + dature.FieldGroup(dature.F[Config].host, dature.F[Config].port), + dature.FieldGroup(dature.F[Config].user, dature.F[Config].password), ), ), Config, diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py index 6ec796b..f97a9a6 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py @@ -4,7 +4,7 @@ from pathlib import Path from textwrap import dedent -from dature import F, FieldGroup, Merge, Source, load +import dature from dature.errors.exceptions import FieldGroupError SOURCES_DIR = Path(__file__).parent / "sources" @@ -21,13 +21,13 @@ class Config: try: - load( - Merge( - Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), - Source(file=SOURCES_DIR / "field_groups_partial_overrides.yaml"), + dature.load( + dature.Merge( + dature.Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), + dature.Source(file=SOURCES_DIR / "field_groups_partial_overrides.yaml"), field_groups=( - FieldGroup(F[Config].host, F[Config].port), - FieldGroup(F[Config].user, F[Config].password), + dature.FieldGroup(dature.F[Config].host, dature.F[Config].port), + dature.FieldGroup(dature.F[Config].user, dature.F[Config].password), ), ), Config, diff --git a/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py b/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py index 3af886f..4f1a3ec 100644 --- a/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py +++ b/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py @@ -4,7 +4,7 @@ from pathlib import Path from typing import Any -from dature import F, Merge, MergeRule, MergeStrategy, Source, load +import dature SHARED_DIR = Path(__file__).parents[2] / "shared" @@ -20,12 +20,12 @@ def merge_tags(values: list[Any]) -> list[str]: return sorted({v for lst in values for v in lst}) -config = load( - Merge( - Source(file=SHARED_DIR / "common_defaults.yaml"), - Source(file=SHARED_DIR / "common_overrides.yaml"), - strategy=MergeStrategy.LAST_WINS, - field_merges=(MergeRule(F[Config].tags, merge_tags),), +config = dature.load( + dature.Merge( + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + strategy=dature.MergeStrategy.LAST_WINS, + field_merges=(dature.MergeRule(dature.F[Config].tags, merge_tags),), ), Config, ) diff --git a/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py b/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py index 499d429..970985c 100644 --- a/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py +++ b/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import F, FieldMergeStrategy, Merge, MergeRule, MergeStrategy, Source, load +import dature SHARED_DIR = Path(__file__).parents[2] / "shared" @@ -15,15 +15,15 @@ class Config: tags: list[str] -config = load( - Merge( - Source(file=SHARED_DIR / "common_defaults.yaml"), - Source(file=SHARED_DIR / "common_overrides.yaml"), - strategy=MergeStrategy.RAISE_ON_CONFLICT, +config = dature.load( + dature.Merge( + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + strategy=dature.MergeStrategy.RAISE_ON_CONFLICT, field_merges=( - MergeRule(F[Config].host, FieldMergeStrategy.LAST_WINS), - MergeRule(F[Config].port, FieldMergeStrategy.LAST_WINS), - MergeRule(F[Config].tags, FieldMergeStrategy.APPEND_UNIQUE), + dature.MergeRule(dature.F[Config].host, dature.FieldMergeStrategy.LAST_WINS), + dature.MergeRule(dature.F[Config].port, dature.FieldMergeStrategy.LAST_WINS), + dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.APPEND_UNIQUE), ), ), Config, diff --git a/examples/docs/advanced/merge_rules/merging_field_append.py b/examples/docs/advanced/merge_rules/merging_field_append.py index 1f8f86f..c46c4bb 100644 --- a/examples/docs/advanced/merge_rules/merging_field_append.py +++ b/examples/docs/advanced/merge_rules/merging_field_append.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import F, FieldMergeStrategy, Merge, MergeRule, Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -13,11 +13,11 @@ class Config: tags: list[str] -config = load( - Merge( - Source(file=SOURCES_DIR / "merging_field_base.yaml"), - Source(file=SOURCES_DIR / "merging_field_override.yaml"), - field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND),), +config = dature.load( + dature.Merge( + dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), + dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), + field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.APPEND),), ), Config, ) diff --git a/examples/docs/advanced/merge_rules/merging_field_append_unique.py b/examples/docs/advanced/merge_rules/merging_field_append_unique.py index 3a082bc..3ff0509 100644 --- a/examples/docs/advanced/merge_rules/merging_field_append_unique.py +++ b/examples/docs/advanced/merge_rules/merging_field_append_unique.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import F, FieldMergeStrategy, Merge, MergeRule, Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -13,11 +13,11 @@ class Config: tags: list[str] -config = load( - Merge( - Source(file=SOURCES_DIR / "merging_field_base.yaml"), - Source(file=SOURCES_DIR / "merging_field_override.yaml"), - field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND_UNIQUE),), +config = dature.load( + dature.Merge( + dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), + dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), + field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.APPEND_UNIQUE),), ), Config, ) diff --git a/examples/docs/advanced/merge_rules/merging_field_first_wins.py b/examples/docs/advanced/merge_rules/merging_field_first_wins.py index 7deec3a..627553c 100644 --- a/examples/docs/advanced/merge_rules/merging_field_first_wins.py +++ b/examples/docs/advanced/merge_rules/merging_field_first_wins.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import F, FieldMergeStrategy, Merge, MergeRule, Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -13,11 +13,11 @@ class Config: tags: list[str] -config = load( - Merge( - Source(file=SOURCES_DIR / "merging_field_base.yaml"), - Source(file=SOURCES_DIR / "merging_field_override.yaml"), - field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.FIRST_WINS),), +config = dature.load( + dature.Merge( + dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), + dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), + field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.FIRST_WINS),), ), Config, ) diff --git a/examples/docs/advanced/merge_rules/merging_field_groups.py b/examples/docs/advanced/merge_rules/merging_field_groups.py index 185ae0e..7e69d35 100644 --- a/examples/docs/advanced/merge_rules/merging_field_groups.py +++ b/examples/docs/advanced/merge_rules/merging_field_groups.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import F, FieldGroup, Merge, Source, load +import dature SHARED_DIR = Path(__file__).parents[2] / "shared" @@ -17,11 +17,11 @@ class Config: password: str -config = load( - Merge( - Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), - Source(file=SHARED_DIR / "common_field_groups_overrides.yaml"), - field_groups=(FieldGroup(F[Config].host, F[Config].port),), +config = dature.load( + dature.Merge( + dature.Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_field_groups_overrides.yaml"), + field_groups=(dature.FieldGroup(dature.F[Config].host, dature.F[Config].port),), ), Config, ) diff --git a/examples/docs/advanced/merge_rules/merging_field_last_wins.py b/examples/docs/advanced/merge_rules/merging_field_last_wins.py index d166721..8714044 100644 --- a/examples/docs/advanced/merge_rules/merging_field_last_wins.py +++ b/examples/docs/advanced/merge_rules/merging_field_last_wins.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import F, FieldMergeStrategy, Merge, MergeRule, Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -13,11 +13,11 @@ class Config: tags: list[str] -config = load( - Merge( - Source(file=SOURCES_DIR / "merging_field_base.yaml"), - Source(file=SOURCES_DIR / "merging_field_override.yaml"), - field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.LAST_WINS),), +config = dature.load( + dature.Merge( + dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), + dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), + field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.LAST_WINS),), ), Config, ) diff --git a/examples/docs/advanced/merge_rules/merging_field_prepend.py b/examples/docs/advanced/merge_rules/merging_field_prepend.py index 32732db..0632cbd 100644 --- a/examples/docs/advanced/merge_rules/merging_field_prepend.py +++ b/examples/docs/advanced/merge_rules/merging_field_prepend.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import F, FieldMergeStrategy, Merge, MergeRule, Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -13,11 +13,11 @@ class Config: tags: list[str] -config = load( - Merge( - Source(file=SOURCES_DIR / "merging_field_base.yaml"), - Source(file=SOURCES_DIR / "merging_field_override.yaml"), - field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.PREPEND),), +config = dature.load( + dature.Merge( + dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), + dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), + field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.PREPEND),), ), Config, ) diff --git a/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py b/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py index e9ad5a8..f10a659 100644 --- a/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py +++ b/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import F, FieldMergeStrategy, Merge, MergeRule, Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -13,11 +13,11 @@ class Config: tags: list[str] -config = load( - Merge( - Source(file=SOURCES_DIR / "merging_field_base.yaml"), - Source(file=SOURCES_DIR / "merging_field_override.yaml"), - field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.PREPEND_UNIQUE),), +config = dature.load( + dature.Merge( + dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), + dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), + field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.PREPEND_UNIQUE),), ), Config, ) diff --git a/examples/docs/advanced/merge_rules/merging_first_found.py b/examples/docs/advanced/merge_rules/merging_first_found.py index a155958..b04e050 100644 --- a/examples/docs/advanced/merge_rules/merging_first_found.py +++ b/examples/docs/advanced/merge_rules/merging_first_found.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Merge, MergeStrategy, Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -14,11 +14,11 @@ class Config: port: int -config = load( - Merge( - Source(file=SOURCES_DIR / "merging_first_found_primary.yaml"), - Source(file=SOURCES_DIR / "merging_first_found_fallback.yaml"), - strategy=MergeStrategy.FIRST_FOUND, +config = dature.load( + dature.Merge( + dature.Source(file=SOURCES_DIR / "merging_first_found_primary.yaml"), + dature.Source(file=SOURCES_DIR / "merging_first_found_fallback.yaml"), + strategy=dature.MergeStrategy.FIRST_FOUND, ), Config, ) diff --git a/examples/docs/advanced/merge_rules/merging_skip_broken.py b/examples/docs/advanced/merge_rules/merging_skip_broken.py index 6728a6c..6c28363 100644 --- a/examples/docs/advanced/merge_rules/merging_skip_broken.py +++ b/examples/docs/advanced/merge_rules/merging_skip_broken.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Merge, Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" SHARED_DIR = Path(__file__).parents[2] / "shared" @@ -16,10 +16,10 @@ class Config: debug: bool = False -config = load( - Merge( - Source(file=SHARED_DIR / "common_defaults.yaml"), - Source(file=SOURCES_DIR / "nonexistent.yaml", skip_if_broken=True), +config = dature.load( + dature.Merge( + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SOURCES_DIR / "nonexistent.yaml", skip_if_broken=True), ), Config, ) diff --git a/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py b/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py index fd9801b..5ae1fb1 100644 --- a/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py +++ b/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Merge, Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" SHARED_DIR = Path(__file__).parents[2] / "shared" @@ -16,14 +16,14 @@ class Config: tags: list[str] -config = load( - Merge( - Source(file=SHARED_DIR / "common_defaults.yaml"), # uses global - Source( +config = dature.load( + dature.Merge( + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), # uses global + dature.Source( file=SOURCES_DIR / "optional.yaml", skip_if_broken=True, ), # always skip if broken - Source( + dature.Source( file=SHARED_DIR / "common_overrides.yaml", skip_if_broken=False, ), # never skip, even if global is True diff --git a/examples/docs/advanced/merge_rules/merging_skip_invalid.py b/examples/docs/advanced/merge_rules/merging_skip_invalid.py index ed09ead..8416c54 100644 --- a/examples/docs/advanced/merge_rules/merging_skip_invalid.py +++ b/examples/docs/advanced/merge_rules/merging_skip_invalid.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -14,8 +14,8 @@ class Config: port: int = 3000 -config = load( - Source(file=SOURCES_DIR / "merging_skip_invalid_defaults.yaml", skip_if_invalid=True), +config = dature.load( + dature.Source(file=SOURCES_DIR / "merging_skip_invalid_defaults.yaml", skip_if_invalid=True), Config, ) diff --git a/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py b/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py index 5f22b2f..b015586 100644 --- a/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py +++ b/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import F, Merge, Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -15,12 +15,12 @@ class Config: timeout: int -config = load( - Merge( - Source(file=SOURCES_DIR / "merging_skip_invalid_per_field_defaults.yaml"), - Source( +config = dature.load( + dature.Merge( + dature.Source(file=SOURCES_DIR / "merging_skip_invalid_per_field_defaults.yaml"), + dature.Source( file=SOURCES_DIR / "merging_skip_invalid_per_field_overrides.yaml", - skip_if_invalid=(F[Config].port, F[Config].timeout), + skip_if_invalid=(dature.F[Config].port, dature.F[Config].timeout), ), ), Config, diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py b/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py index 4a38050..ad84ee7 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py @@ -4,7 +4,7 @@ from pathlib import Path from tempfile import TemporaryDirectory -from dature import Source, load +import dature from dature.sources_loader.docker_secrets import DockerSecretsLoader @@ -25,8 +25,8 @@ class Config: (secrets_path / "database__host").write_text("flat-host") (secrets_path / "database__port").write_text("3306") - config = load( - Source( + config = dature.load( + dature.Source( file=secrets_path, loader=DockerSecretsLoader, nested_resolve_strategy="json", diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py b/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py index 659203b..abdba11 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature from dature.sources_loader.env_ import EnvFileLoader SOURCES_DIR = Path(__file__).parent / "sources" @@ -20,8 +20,8 @@ class Config: database: Database -config = load( - Source( +config = dature.load( + dature.Source( file=SOURCES_DIR / "nested_resolve.env", loader=EnvFileLoader, prefix="APP__", diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_global_flat.py b/examples/docs/advanced/nested_resolve/nested_resolve_global_flat.py index 322fae9..b1685b7 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_global_flat.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_global_flat.py @@ -3,7 +3,7 @@ import os from dataclasses import dataclass -from dature import Source, load +import dature from dature.sources_loader.env_ import EnvLoader os.environ["APP__DATABASE"] = '{"host": "json-host", "port": "5432"}' @@ -22,8 +22,8 @@ class Config: database: Database -config = load( - Source(loader=EnvLoader, prefix="APP__", nested_resolve_strategy="flat"), +config = dature.load( + dature.Source(loader=EnvLoader, prefix="APP__", nested_resolve_strategy="flat"), Config, ) diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_global_json.py b/examples/docs/advanced/nested_resolve/nested_resolve_global_json.py index f0ff36b..de2399a 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_global_json.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_global_json.py @@ -3,7 +3,7 @@ import os from dataclasses import dataclass -from dature import Source, load +import dature from dature.sources_loader.env_ import EnvLoader os.environ["APP__DATABASE"] = '{"host": "json-host", "port": "5432"}' @@ -22,8 +22,8 @@ class Config: database: Database -config = load( - Source(loader=EnvLoader, prefix="APP__", nested_resolve_strategy="json"), +config = dature.load( + dature.Source(loader=EnvLoader, prefix="APP__", nested_resolve_strategy="json"), Config, ) diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_no_conflict.py b/examples/docs/advanced/nested_resolve/nested_resolve_no_conflict.py index 702b176..4fa6581 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_no_conflict.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_no_conflict.py @@ -3,7 +3,7 @@ import os from dataclasses import dataclass -from dature import Source, load +import dature from dature.sources_loader.env_ import EnvLoader # Only JSON form, no flat keys @@ -26,8 +26,8 @@ class Config: # Even with strategy="flat", JSON is parsed because there are no flat keys -config = load( - Source(loader=EnvLoader, prefix="APP__", nested_resolve_strategy="flat"), +config = dature.load( + dature.Source(loader=EnvLoader, prefix="APP__", nested_resolve_strategy="flat"), Config, ) diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_override.py b/examples/docs/advanced/nested_resolve/nested_resolve_override.py index fa37a51..ee8bd89 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_override.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_override.py @@ -3,7 +3,7 @@ import os from dataclasses import dataclass -from dature import F, Source, load +import dature from dature.sources_loader.env_ import EnvLoader os.environ["APP__DATABASE"] = '{"host": "json-host", "port": "5432"}' @@ -33,12 +33,12 @@ class Config: # Global: "flat" for everything, but database overridden to "json" -config = load( - Source( +config = dature.load( + dature.Source( loader=EnvLoader, prefix="APP__", nested_resolve_strategy="flat", - nested_resolve={"json": (F[Config].database,)}, + nested_resolve={"json": (dature.F[Config].database,)}, ), Config, ) diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_per_field.py b/examples/docs/advanced/nested_resolve/nested_resolve_per_field.py index a2f80b7..3be0f92 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_per_field.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_per_field.py @@ -3,7 +3,7 @@ import os from dataclasses import dataclass -from dature import F, Source, load +import dature from dature.sources_loader.env_ import EnvLoader os.environ["APP__DATABASE"] = '{"host": "json-host", "port": "5432"}' @@ -33,13 +33,13 @@ class Config: # database uses JSON, cache uses flat keys -config = load( - Source( +config = dature.load( + dature.Source( loader=EnvLoader, prefix="APP__", nested_resolve={ - "json": (F[Config].database,), - "flat": (F[Config].cache,), + "json": (dature.F[Config].database,), + "flat": (dature.F[Config].cache,), }, ), Config, diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_problem.py b/examples/docs/advanced/nested_resolve/nested_resolve_problem.py index d959e8c..dc85a3a 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_problem.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_problem.py @@ -3,7 +3,7 @@ import os from dataclasses import dataclass -from dature import Source, load +import dature from dature.sources_loader.env_ import EnvLoader os.environ["APP__DATABASE"] = '{"host": "json-host", "port": "5432"}' @@ -23,7 +23,7 @@ class Config: # Without nested_resolve_strategy, flat keys win by default -config = load(Source(loader=EnvLoader, prefix="APP__"), Config) +config = dature.load(dature.Source(loader=EnvLoader, prefix="APP__"), Config) assert config.database.host == "flat-host" assert config.database.port == 3306 diff --git a/examples/docs/api_reference/api_reference_decorator_mode.py b/examples/docs/api_reference/api_reference_decorator_mode.py new file mode 100644 index 0000000..cfd6463 --- /dev/null +++ b/examples/docs/api_reference/api_reference_decorator_mode.py @@ -0,0 +1,23 @@ +"""Decorator mode — omit dataclass_, get a decorator.""" + +from dataclasses import dataclass +from pathlib import Path + +import dature + +SHARED_DIR = Path(__file__).parents[1] / "shared" + + +@dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml")) +@dataclass +class Config: + host: str + port: int + debug: bool = False + + +config = Config() + +assert config.host == "localhost" +assert config.port == 8080 +assert config.debug is False diff --git a/examples/docs/api_reference/api_reference_field_path.py b/examples/docs/api_reference/api_reference_field_path.py new file mode 100644 index 0000000..9a7c64c --- /dev/null +++ b/examples/docs/api_reference/api_reference_field_path.py @@ -0,0 +1,22 @@ +"""F[] factory for building field paths with validation.""" + +from dataclasses import dataclass + +import dature + + +@dataclass +class Database: + host: str + port: int + + +@dataclass +class Config: + database: Database + host: str + + +path_eager = dature.F[Config].host +path_nested = dature.F[Config].database.host +path_string = dature.F["Config"].host diff --git a/examples/docs/api_reference/api_reference_function_mode.py b/examples/docs/api_reference/api_reference_function_mode.py new file mode 100644 index 0000000..8505e84 --- /dev/null +++ b/examples/docs/api_reference/api_reference_function_mode.py @@ -0,0 +1,22 @@ +"""Function mode — pass dataclass_, get an instance back.""" + +from dataclasses import dataclass +from pathlib import Path + +import dature + +SHARED_DIR = Path(__file__).parents[1] / "shared" + + +@dataclass +class Config: + host: str + port: int + debug: bool = False + + +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), Config) + +assert config.host == "localhost" +assert config.port == 8080 +assert config.debug is False diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py index e7c4b1a..e1ed8b9 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -16,7 +16,7 @@ class Config: debug: bool = False -config = load(Source(file=SOURCES_DIR / "dynaconf_basic.toml"), Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "dynaconf_basic.toml"), Config) # config.hostt → AttributeError immediately # config.port is always int — guaranteed # --8<-- [end:basic] diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py index 469eb1c..4b4942e 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Merge, MergeStrategy, Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -15,11 +15,11 @@ class Config: # --8<-- [start:merge] -config = load( - Merge( - Source(file=SOURCES_DIR / "dynaconf_merge_defaults.yaml"), - Source(file=SOURCES_DIR / "dynaconf_merge_local.yaml", skip_if_broken=True), - strategy=MergeStrategy.LAST_WINS, +config = dature.load( + dature.Merge( + dature.Source(file=SOURCES_DIR / "dynaconf_merge_defaults.yaml"), + dature.Source(file=SOURCES_DIR / "dynaconf_merge_local.yaml", skip_if_broken=True), + strategy=dature.MergeStrategy.LAST_WINS, ), Config, ) diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py index 50fdf62..1a1f9dc 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py @@ -4,7 +4,7 @@ from pathlib import Path from typing import Annotated -from dature import Source, load +import dature from dature.errors.exceptions import DatureConfigError from dature.validators.number import Gt, Lt from dature.validators.root import RootValidator @@ -25,8 +25,8 @@ def check_debug_port(config: Config) -> bool: try: - load( - Source( + dature.load( + dature.Source( file=SOURCES_DIR / "dynaconf_root_validators_invalid.toml", root_validators=( RootValidator( diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py index c0e7480..2710822 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py @@ -4,7 +4,7 @@ from pathlib import Path from typing import Annotated -from dature import Source, load +import dature from dature.errors.exceptions import DatureConfigError from dature.validators.number import Gt, Lt @@ -20,7 +20,7 @@ class Config: try: - load(Source(file=SOURCES_DIR / "dynaconf_validators_invalid.toml"), Config) + dature.load(dature.Source(file=SOURCES_DIR / "dynaconf_validators_invalid.toml"), Config) except DatureConfigError as exc: source = str(SOURCES_DIR / "dynaconf_validators_invalid.toml") assert str(exc) == "Config loading errors (1)" diff --git a/examples/docs/comparison/why-not-hydra/hydra_dataclass.py b/examples/docs/comparison/why-not-hydra/hydra_dataclass.py index 5118a5d..b2ebd40 100644 --- a/examples/docs/comparison/why-not-hydra/hydra_dataclass.py +++ b/examples/docs/comparison/why-not-hydra/hydra_dataclass.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -15,7 +15,7 @@ class Config: # --8<-- [start:dataclass] -config = load(Source(file=SOURCES_DIR / "hydra_defaults.yaml"), Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "hydra_defaults.yaml"), Config) assert isinstance(config, Config) # Full IDE support, type safety, __post_init__ works # --8<-- [end:dataclass] diff --git a/examples/docs/comparison/why-not-hydra/hydra_merge.py b/examples/docs/comparison/why-not-hydra/hydra_merge.py index ae90ed4..feff9c6 100644 --- a/examples/docs/comparison/why-not-hydra/hydra_merge.py +++ b/examples/docs/comparison/why-not-hydra/hydra_merge.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Merge, Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -15,11 +15,11 @@ class Config: # --8<-- [start:merge] -config = load( - Merge( - Source(file=SOURCES_DIR / "hydra_defaults.yaml"), - Source(file=SOURCES_DIR / "hydra_config.toml", skip_if_broken=True), - Source(prefix="APP_"), +config = dature.load( + dature.Merge( + dature.Source(file=SOURCES_DIR / "hydra_defaults.yaml"), + dature.Source(file=SOURCES_DIR / "hydra_config.toml", skip_if_broken=True), + dature.Source(prefix="APP_"), ), Config, ) diff --git a/examples/docs/comparison/why-not-hydra/hydra_validators.py b/examples/docs/comparison/why-not-hydra/hydra_validators.py index 342a485..d57744d 100644 --- a/examples/docs/comparison/why-not-hydra/hydra_validators.py +++ b/examples/docs/comparison/why-not-hydra/hydra_validators.py @@ -4,7 +4,7 @@ from pathlib import Path from typing import Annotated -from dature import Source, load +import dature from dature.errors.exceptions import DatureConfigError from dature.validators.number import Gt, Lt @@ -19,7 +19,7 @@ class Config: try: - load(Source(file=SOURCES_DIR / "hydra_validators_invalid.yaml"), Config) + dature.load(dature.Source(file=SOURCES_DIR / "hydra_validators_invalid.yaml"), Config) except DatureConfigError as exc: source = str(SOURCES_DIR / "hydra_validators_invalid.yaml") assert str(exc) == "Config loading errors (1)" diff --git a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py index c28d5d0..4ed4c73 100644 --- a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py +++ b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -16,9 +16,9 @@ class Config: # --8<-- [start:auto-detect] # Just change the file — dature picks the right loader -yaml_config = load(Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.yaml"), Config) -toml_config = load(Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.toml"), Config) -json5_config = load(Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.json5"), Config) +yaml_config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.yaml"), Config) +toml_config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.toml"), Config) +json5_config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.json5"), Config) # --8<-- [end:auto-detect] assert yaml_config.host == "localhost" diff --git a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py index 177d013..9e73e3e 100644 --- a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py +++ b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -16,7 +16,7 @@ class Config: debug: bool = False -config = load(Source(file=SOURCES_DIR / "pydantic_settings_basic.yaml"), Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_basic.yaml"), Config) # config.hostt → AttributeError immediately # config.port is always int — guaranteed # --8<-- [end:basic] diff --git a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py index ecd8040..4e8dd24 100644 --- a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py +++ b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Merge, Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -15,11 +15,11 @@ class Config: # --8<-- [start:merge] -config = load( - Merge( - Source(file=SOURCES_DIR / "pydantic_settings_merge_defaults.yaml"), - Source(file=SOURCES_DIR / "pydantic_settings_merge_local.yaml", skip_if_broken=True), - Source(prefix="APP_"), +config = dature.load( + dature.Merge( + dature.Source(file=SOURCES_DIR / "pydantic_settings_merge_defaults.yaml"), + dature.Source(file=SOURCES_DIR / "pydantic_settings_merge_local.yaml", skip_if_broken=True), + dature.Source(prefix="APP_"), ), Config, ) diff --git a/examples/docs/features/masking/masking_by_name.py b/examples/docs/features/masking/masking_by_name.py index d1b20a8..1489de1 100644 --- a/examples/docs/features/masking/masking_by_name.py +++ b/examples/docs/features/masking/masking_by_name.py @@ -4,7 +4,7 @@ from pathlib import Path from typing import Literal -from dature import Source, load +import dature from dature.errors.exceptions import DatureConfigError SOURCES_DIR = Path(__file__).parent / "sources" @@ -17,7 +17,7 @@ class Config: try: - load(Source(file=SOURCES_DIR / "masking_by_name.yaml"), Config) + dature.load(dature.Source(file=SOURCES_DIR / "masking_by_name.yaml"), Config) except DatureConfigError as exc: source = str(SOURCES_DIR / "masking_by_name.yaml") assert str(exc) == "Config loading errors (1)" diff --git a/examples/docs/features/masking/masking_classic_style.py b/examples/docs/features/masking/masking_classic_style.py index ebd3964..0b44eab 100644 --- a/examples/docs/features/masking/masking_classic_style.py +++ b/examples/docs/features/masking/masking_classic_style.py @@ -1,16 +1,16 @@ -"""Classic ab*****cd masking style via configure().""" +"""Classic ab*****cd masking style via dature.configure().""" from dataclasses import dataclass from pathlib import Path -from dature import Source, configure, load +import dature from dature.config import MaskingConfig from dature.masking.masking import mask_value SOURCES_DIR = Path(__file__).parent / "sources" # --8<-- [start:classic-style] -configure(masking=MaskingConfig(mask="*****", visible_prefix=2, visible_suffix=2)) +dature.configure(masking=MaskingConfig(mask="*****", visible_prefix=2, visible_suffix=2)) # "my_secret_password" → "my*****rd" # "ab" → "ab" (too short — shown as-is) # --8<-- [end:classic-style] @@ -22,8 +22,8 @@ class Config: host: str -config = load(Source(file=SOURCES_DIR / "masking_by_name.yaml"), Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "masking_by_name.yaml"), Config) assert mask_value("my_secret_password") == "my*****rd" assert mask_value("ab") == "ab" -configure(masking=MaskingConfig()) +dature.configure(masking=MaskingConfig()) diff --git a/examples/docs/features/masking/masking_heuristic.py b/examples/docs/features/masking/masking_heuristic.py index e2c8a2f..8564745 100644 --- a/examples/docs/features/masking/masking_heuristic.py +++ b/examples/docs/features/masking/masking_heuristic.py @@ -4,7 +4,7 @@ from pathlib import Path from typing import Literal -from dature import Source, load +import dature from dature.errors.exceptions import DatureConfigError SOURCES_DIR = Path(__file__).parent / "sources" @@ -17,8 +17,8 @@ class Config: try: - load( - Source(file=SOURCES_DIR / "masking_heuristic.yaml", mask_secrets=True), + dature.load( + dature.Source(file=SOURCES_DIR / "masking_heuristic.yaml", mask_secrets=True), Config, ) except DatureConfigError as exc: diff --git a/examples/docs/features/masking/masking_merge_mode.py b/examples/docs/features/masking/masking_merge_mode.py index 5b61ec8..ef75696 100644 --- a/examples/docs/features/masking/masking_merge_mode.py +++ b/examples/docs/features/masking/masking_merge_mode.py @@ -4,7 +4,7 @@ from pathlib import Path from typing import Annotated -from dature import Merge, Source, load +import dature from dature.errors.exceptions import DatureConfigError from dature.validators.string import MinLength @@ -20,10 +20,10 @@ class Config: # --8<-- [start:merge-mode] try: - load( - Merge( - Source(file=SOURCES_DIR / "masking_merge_mode_defaults.yaml"), - Source( + dature.load( + dature.Merge( + dature.Source(file=SOURCES_DIR / "masking_merge_mode_defaults.yaml"), + dature.Source( file=SOURCES_DIR / "masking_merge_mode_secrets.yaml", secret_field_names=("api_key",), ), diff --git a/examples/docs/features/masking/masking_no_mask.py b/examples/docs/features/masking/masking_no_mask.py index 04421dd..cc7652a 100644 --- a/examples/docs/features/masking/masking_no_mask.py +++ b/examples/docs/features/masking/masking_no_mask.py @@ -4,7 +4,7 @@ from pathlib import Path from typing import Annotated -from dature import Source, load +import dature from dature.errors.exceptions import DatureConfigError from dature.validators.string import MinLength @@ -19,8 +19,8 @@ class Config: # --8<-- [start:no-mask] try: - load( - Source( + dature.load( + dature.Source( file=SOURCES_DIR / "masking_per_source.yaml", mask_secrets=False, ), diff --git a/examples/docs/features/masking/masking_per_source.py b/examples/docs/features/masking/masking_per_source.py index 1425de3..f5a31e4 100644 --- a/examples/docs/features/masking/masking_per_source.py +++ b/examples/docs/features/masking/masking_per_source.py @@ -4,7 +4,7 @@ from pathlib import Path from typing import Annotated -from dature import Source, load +import dature from dature.errors.exceptions import DatureConfigError from dature.validators.string import MinLength @@ -19,8 +19,8 @@ class Config: # --8<-- [start:per-source] try: - load( - Source( + dature.load( + dature.Source( file=SOURCES_DIR / "masking_per_source.yaml", secret_field_names=("api_key",), ), diff --git a/examples/docs/features/masking/masking_secret_str.py b/examples/docs/features/masking/masking_secret_str.py index 13389e2..ead86bb 100644 --- a/examples/docs/features/masking/masking_secret_str.py +++ b/examples/docs/features/masking/masking_secret_str.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature from dature.errors.exceptions import DatureConfigError from dature.fields.payment_card import PaymentCardNumber from dature.fields.secret_str import SecretStr @@ -19,8 +19,8 @@ class Config: try: - config = load( - Source(file=SOURCES_DIR / "masking_secret_str.yaml"), + config = dature.load( + dature.Source(file=SOURCES_DIR / "masking_secret_str.yaml"), Config, ) except DatureConfigError as exc: diff --git a/examples/docs/features/merging/merging_basic.py b/examples/docs/features/merging/merging_basic.py index d8d4f55..7c6de7b 100644 --- a/examples/docs/features/merging/merging_basic.py +++ b/examples/docs/features/merging/merging_basic.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Merge, MergeStrategy, Source, load +import dature SHARED_DIR = Path(__file__).parents[2] / "shared" @@ -15,11 +15,11 @@ class Config: tags: list[str] -config = load( - Merge( - Source(file=SHARED_DIR / "common_defaults.yaml"), - Source(file=SHARED_DIR / "common_overrides.yaml"), - strategy=MergeStrategy.LAST_WINS, +config = dature.load( + dature.Merge( + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + strategy=dature.MergeStrategy.LAST_WINS, ), Config, ) diff --git a/examples/docs/features/merging/merging_strategies.py b/examples/docs/features/merging/merging_strategies.py index 61344ee..e808ad6 100644 --- a/examples/docs/features/merging/merging_strategies.py +++ b/examples/docs/features/merging/merging_strategies.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Merge, MergeStrategy, Source, load +import dature SHARED_DIR = Path(__file__).parents[2] / "shared" @@ -15,20 +15,20 @@ class Config: tags: list[str] -last_wins = load( - Merge( - Source(file=SHARED_DIR / "common_defaults.yaml"), - Source(file=SHARED_DIR / "common_overrides.yaml"), - strategy=MergeStrategy.LAST_WINS, +last_wins = dature.load( + dature.Merge( + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + strategy=dature.MergeStrategy.LAST_WINS, ), Config, ) -first_wins = load( - Merge( - Source(file=SHARED_DIR / "common_defaults.yaml"), - Source(file=SHARED_DIR / "common_overrides.yaml"), - strategy=MergeStrategy.FIRST_WINS, +first_wins = dature.load( + dature.Merge( + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + strategy=dature.MergeStrategy.FIRST_WINS, ), Config, ) diff --git a/examples/docs/features/merging/merging_strategy_first_found.py b/examples/docs/features/merging/merging_strategy_first_found.py index 115f298..1db5873 100644 --- a/examples/docs/features/merging/merging_strategy_first_found.py +++ b/examples/docs/features/merging/merging_strategy_first_found.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Merge, MergeStrategy, Source, load +import dature SHARED_DIR = Path(__file__).parents[2] / "shared" @@ -15,12 +15,12 @@ class Config: tags: list[str] -config = load( - Merge( - Source(file=SHARED_DIR / "nonexistent.yaml"), - Source(file=SHARED_DIR / "common_defaults.yaml"), - Source(file=SHARED_DIR / "common_overrides.yaml"), - strategy=MergeStrategy.FIRST_FOUND, +config = dature.load( + dature.Merge( + dature.Source(file=SHARED_DIR / "nonexistent.yaml"), + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + strategy=dature.MergeStrategy.FIRST_FOUND, ), Config, ) diff --git a/examples/docs/features/merging/merging_strategy_first_wins.py b/examples/docs/features/merging/merging_strategy_first_wins.py index 347fea2..aa6f6e8 100644 --- a/examples/docs/features/merging/merging_strategy_first_wins.py +++ b/examples/docs/features/merging/merging_strategy_first_wins.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Merge, MergeStrategy, Source, load +import dature SHARED_DIR = Path(__file__).parents[2] / "shared" @@ -15,11 +15,11 @@ class Config: tags: list[str] -config = load( - Merge( - Source(file=SHARED_DIR / "common_defaults.yaml"), - Source(file=SHARED_DIR / "common_overrides.yaml"), - strategy=MergeStrategy.FIRST_WINS, +config = dature.load( + dature.Merge( + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + strategy=dature.MergeStrategy.FIRST_WINS, ), Config, ) diff --git a/examples/docs/features/merging/merging_strategy_last_wins.py b/examples/docs/features/merging/merging_strategy_last_wins.py index eef8578..89d59ea 100644 --- a/examples/docs/features/merging/merging_strategy_last_wins.py +++ b/examples/docs/features/merging/merging_strategy_last_wins.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Merge, MergeStrategy, Source, load +import dature SHARED_DIR = Path(__file__).parents[2] / "shared" @@ -15,11 +15,11 @@ class Config: tags: list[str] -config = load( - Merge( - Source(file=SHARED_DIR / "common_defaults.yaml"), - Source(file=SHARED_DIR / "common_overrides.yaml"), - strategy=MergeStrategy.LAST_WINS, +config = dature.load( + dature.Merge( + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + strategy=dature.MergeStrategy.LAST_WINS, ), Config, ) diff --git a/examples/docs/features/merging/merging_strategy_raise_on_conflict.py b/examples/docs/features/merging/merging_strategy_raise_on_conflict.py index 9a4312f..035b348 100644 --- a/examples/docs/features/merging/merging_strategy_raise_on_conflict.py +++ b/examples/docs/features/merging/merging_strategy_raise_on_conflict.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Merge, MergeStrategy, Source, load +import dature SHARED_DIR = Path(__file__).parents[2] / "shared" @@ -15,11 +15,11 @@ class Config: debug: bool -config = load( - Merge( - Source(file=SHARED_DIR / "common_raise_on_conflict_a.yaml"), - Source(file=SHARED_DIR / "common_raise_on_conflict_b.yaml"), - strategy=MergeStrategy.RAISE_ON_CONFLICT, +config = dature.load( + dature.Merge( + dature.Source(file=SHARED_DIR / "common_raise_on_conflict_a.yaml"), + dature.Source(file=SHARED_DIR / "common_raise_on_conflict_b.yaml"), + strategy=dature.MergeStrategy.RAISE_ON_CONFLICT, ), Config, ) diff --git a/examples/docs/features/merging/merging_tuple_shorthand.py b/examples/docs/features/merging/merging_tuple_shorthand.py index 45aebff..fd6c9fa 100644 --- a/examples/docs/features/merging/merging_tuple_shorthand.py +++ b/examples/docs/features/merging/merging_tuple_shorthand.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SHARED_DIR = Path(__file__).parents[2] / "shared" @@ -15,10 +15,10 @@ class Config: tags: list[str] -config = load( +config = dature.load( ( - Source(file=SHARED_DIR / "common_defaults.yaml"), - Source(file=SHARED_DIR / "common_overrides.yaml"), + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), ), Config, ) diff --git a/examples/docs/features/merging/merging_tuple_shorthand_decorator.py b/examples/docs/features/merging/merging_tuple_shorthand_decorator.py index ddfc51a..98f0c76 100644 --- a/examples/docs/features/merging/merging_tuple_shorthand_decorator.py +++ b/examples/docs/features/merging/merging_tuple_shorthand_decorator.py @@ -4,17 +4,17 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SHARED_DIR = Path(__file__).parents[2] / "shared" os.environ["APP_HOST"] = "env_localhost" -@load( +@dature.load( ( - Source(file=SHARED_DIR / "common_defaults.yaml"), - Source(prefix="APP_"), + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(prefix="APP_"), ), ) @dataclass diff --git a/examples/docs/features/naming/naming_field_mapping.py b/examples/docs/features/naming/naming_field_mapping.py index 831c691..ab25a87 100644 --- a/examples/docs/features/naming/naming_field_mapping.py +++ b/examples/docs/features/naming/naming_field_mapping.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import F, Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -15,13 +15,13 @@ class DbConfig: pool_size: int -config = load( - Source( +config = dature.load( + dature.Source( file=SOURCES_DIR / "naming_field_mapping.yaml", field_mapping={ - F[DbConfig].database_url: "db_url", - F[DbConfig].secret_key: "key", - F[DbConfig].pool_size: "pool", + dature.F[DbConfig].database_url: "db_url", + dature.F[DbConfig].secret_key: "key", + dature.F[DbConfig].pool_size: "pool", }, ), DbConfig, diff --git a/examples/docs/features/naming/naming_name_style.py b/examples/docs/features/naming/naming_name_style.py index af77f2d..ec0de2c 100644 --- a/examples/docs/features/naming/naming_name_style.py +++ b/examples/docs/features/naming/naming_name_style.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -16,8 +16,8 @@ class ApiConfig: base_url: str -config = load( - Source(file=SOURCES_DIR / "naming_name_style.yaml", name_style="lower_camel"), +config = dature.load( + dature.Source(file=SOURCES_DIR / "naming_name_style.yaml", name_style="lower_camel"), ApiConfig, ) diff --git a/examples/docs/features/naming/naming_nested_fields.py b/examples/docs/features/naming/naming_nested_fields.py index f491f4c..79c109b 100644 --- a/examples/docs/features/naming/naming_nested_fields.py +++ b/examples/docs/features/naming/naming_nested_fields.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import F, Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -20,14 +20,14 @@ class User: address: Address -config = load( - Source( +config = dature.load( + dature.Source( file=SOURCES_DIR / "naming_nested_fields.yaml", field_mapping={ - F[User].name: "fullName", - F[User].address: "location", - F[Address].city: "cityName", - F[Address].street: "streetName", + dature.F[User].name: "fullName", + dature.F[User].address: "location", + dature.F[Address].city: "cityName", + dature.F[Address].street: "streetName", }, ), User, diff --git a/examples/docs/features/naming/naming_prefix.py b/examples/docs/features/naming/naming_prefix.py index fac6ad6..46b26d1 100644 --- a/examples/docs/features/naming/naming_prefix.py +++ b/examples/docs/features/naming/naming_prefix.py @@ -3,7 +3,7 @@ import os from dataclasses import dataclass -from dature import Source, load +import dature os.environ["MYAPP_HOST"] = "localhost" os.environ["MYAPP_PORT"] = "9090" @@ -17,7 +17,7 @@ class Config: debug: bool = False -config = load(Source(prefix="MYAPP_"), Config) +config = dature.load(dature.Source(prefix="MYAPP_"), Config) assert config.host == "localhost" assert config.port == 9090 diff --git a/examples/docs/features/naming/naming_prefix_nested.py b/examples/docs/features/naming/naming_prefix_nested.py index 5430887..36facf8 100644 --- a/examples/docs/features/naming/naming_prefix_nested.py +++ b/examples/docs/features/naming/naming_prefix_nested.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -14,7 +14,7 @@ class Database: port: int -db = load(Source(file=SOURCES_DIR / "naming_prefix_nested.yaml", prefix="app.database"), Database) +db = dature.load(dature.Source(file=SOURCES_DIR / "naming_prefix_nested.yaml", prefix="app.database"), Database) assert db.host == "localhost" assert db.port == 5432 diff --git a/examples/docs/features/naming/naming_split_symbols.py b/examples/docs/features/naming/naming_split_symbols.py index 285b83c..a287e26 100644 --- a/examples/docs/features/naming/naming_split_symbols.py +++ b/examples/docs/features/naming/naming_split_symbols.py @@ -3,7 +3,7 @@ import os from dataclasses import dataclass -from dature import Source, load +import dature os.environ["NS_DB__HOST"] = "localhost" os.environ["NS_DB__PORT"] = "5432" @@ -20,7 +20,7 @@ class Config: db: Database -config = load(Source(prefix="NS_", split_symbols="__"), Config) +config = dature.load(dature.Source(prefix="NS_", split_symbols="__"), Config) assert config.db.host == "localhost" assert config.db.port == 5432 diff --git a/examples/docs/features/validation/validation_annotated.py b/examples/docs/features/validation/validation_annotated.py index b5da2c9..6109e09 100644 --- a/examples/docs/features/validation/validation_annotated.py +++ b/examples/docs/features/validation/validation_annotated.py @@ -4,7 +4,7 @@ from pathlib import Path from typing import Annotated -from dature import Source, load +import dature from dature.errors.exceptions import DatureConfigError from dature.validators.number import Ge, Le from dature.validators.sequence import MinItems, UniqueItems @@ -22,8 +22,8 @@ class ServiceConfig: try: - load( - Source(file=SOURCES_DIR / "validation_annotated_invalid.json5"), + dature.load( + dature.Source(file=SOURCES_DIR / "validation_annotated_invalid.json5"), ServiceConfig, ) except DatureConfigError as exc: diff --git a/examples/docs/features/validation/validation_custom.py b/examples/docs/features/validation/validation_custom.py index 032dcb6..5f2f603 100644 --- a/examples/docs/features/validation/validation_custom.py +++ b/examples/docs/features/validation/validation_custom.py @@ -5,7 +5,7 @@ from pathlib import Path from typing import Annotated -from dature import Source, load +import dature from dature.errors.exceptions import DatureConfigError from dature.validators.number import Ge @@ -36,8 +36,8 @@ class ServiceConfig: try: - load( - Source(file=SOURCES_DIR / "validation_custom_invalid.json5"), + dature.load( + dature.Source(file=SOURCES_DIR / "validation_custom_invalid.json5"), ServiceConfig, ) except DatureConfigError as exc: diff --git a/examples/docs/features/validation/validation_metadata.py b/examples/docs/features/validation/validation_metadata.py index 0fe7b59..1ed2bdd 100644 --- a/examples/docs/features/validation/validation_metadata.py +++ b/examples/docs/features/validation/validation_metadata.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import F, Source, load +import dature from dature.errors.exceptions import DatureConfigError from dature.validators.number import Ge, Lt from dature.validators.string import MinLength @@ -19,12 +19,12 @@ class Config: try: - load( - Source( + dature.load( + dature.Source( file=SOURCES_DIR / "validation_metadata_invalid.yaml", validators={ - F[Config].host: MinLength(value=1), - F[Config].port: (Ge(value=1), Lt(value=65536)), + dature.F[Config].host: MinLength(value=1), + dature.F[Config].port: (Ge(value=1), Lt(value=65536)), }, ), Config, diff --git a/examples/docs/features/validation/validation_post_init.py b/examples/docs/features/validation/validation_post_init.py index 6d55936..24fcd80 100644 --- a/examples/docs/features/validation/validation_post_init.py +++ b/examples/docs/features/validation/validation_post_init.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -25,6 +25,6 @@ def address(self) -> str: try: - load(Source(file=SOURCES_DIR / "validation_post_init_invalid.yaml"), Config) + dature.load(dature.Source(file=SOURCES_DIR / "validation_post_init_invalid.yaml"), Config) except ValueError as exc: assert str(exc) == "port must be between 1 and 65535, got 99999" diff --git a/examples/docs/features/validation/validation_root.py b/examples/docs/features/validation/validation_root.py index f60f2ea..1aa9fc4 100644 --- a/examples/docs/features/validation/validation_root.py +++ b/examples/docs/features/validation/validation_root.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature from dature.errors.exceptions import DatureConfigError from dature.validators.root import RootValidator @@ -24,8 +24,8 @@ def check_debug_not_on_production(obj: Config) -> bool: try: - load( - Source( + dature.load( + dature.Source( file=SOURCES_DIR / "validation_root_invalid.yaml", root_validators=( RootValidator( diff --git a/examples/docs/index/intro_decorator.py b/examples/docs/index/intro_decorator.py index 322be9c..18ad788 100644 --- a/examples/docs/index/intro_decorator.py +++ b/examples/docs/index/intro_decorator.py @@ -3,14 +3,14 @@ import os from dataclasses import dataclass -from dature import Source, load +import dature os.environ["APP_HOST"] = "0.0.0.0" os.environ["APP_PORT"] = "8080" os.environ["APP_DEBUG"] = "true" -@load(Source(prefix="APP_")) +@dature.load(dature.Source(prefix="APP_")) @dataclass class AppConfig: host: str diff --git a/examples/docs/index/intro_function.py b/examples/docs/index/intro_function.py index e1f11c5..bccb2eb 100644 --- a/examples/docs/index/intro_function.py +++ b/examples/docs/index/intro_function.py @@ -3,7 +3,7 @@ import os from dataclasses import dataclass -from dature import Source, load +import dature os.environ["APP_HOST"] = "0.0.0.0" os.environ["APP_PORT"] = "8080" @@ -17,7 +17,7 @@ class AppConfig: debug: bool = False -config = load(Source(prefix="APP_"), AppConfig) +config = dature.load(dature.Source(prefix="APP_"), AppConfig) assert config.host == "0.0.0.0" assert config.port == 8080 diff --git a/examples/docs/introduction/format_docker.py b/examples/docs/introduction/format_docker.py index cb1a1c3..e5c49f5 100644 --- a/examples/docs/introduction/format_docker.py +++ b/examples/docs/introduction/format_docker.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -15,8 +15,8 @@ class Config: debug: bool = False -config = load( - Source(file=SOURCES_DIR / "intro_app_docker_secrets"), +config = dature.load( + dature.Source(file=SOURCES_DIR / "intro_app_docker_secrets"), Config, ) diff --git a/examples/docs/introduction/format_env.py b/examples/docs/introduction/format_env.py index 633b864..828aa19 100644 --- a/examples/docs/introduction/format_env.py +++ b/examples/docs/introduction/format_env.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = load(Source(file=SOURCES_DIR / "intro_app.env"), Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.env"), Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/format_ini.py b/examples/docs/introduction/format_ini.py index d4af00c..6998819 100644 --- a/examples/docs/introduction/format_ini.py +++ b/examples/docs/introduction/format_ini.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -15,8 +15,8 @@ class Config: debug: bool = False -config = load( - Source(file=SOURCES_DIR / "intro_app.ini", prefix="app"), +config = dature.load( + dature.Source(file=SOURCES_DIR / "intro_app.ini", prefix="app"), Config, ) diff --git a/examples/docs/introduction/format_json.py b/examples/docs/introduction/format_json.py index 5293c97..7884d3b 100644 --- a/examples/docs/introduction/format_json.py +++ b/examples/docs/introduction/format_json.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = load(Source(file=SOURCES_DIR / "intro_app.json"), Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.json"), Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/format_json5.py b/examples/docs/introduction/format_json5.py index de205ad..7c153ca 100644 --- a/examples/docs/introduction/format_json5.py +++ b/examples/docs/introduction/format_json5.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = load(Source(file=SOURCES_DIR / "intro_app.json5"), Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.json5"), Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/format_toml.py b/examples/docs/introduction/format_toml.py index a848344..432b1a9 100644 --- a/examples/docs/introduction/format_toml.py +++ b/examples/docs/introduction/format_toml.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SOURCES_DIR = Path(__file__).parent / "sources" @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = load(Source(file=SOURCES_DIR / "intro_app.toml"), Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.toml"), Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/format_yaml.py b/examples/docs/introduction/format_yaml.py index d4ba2d0..d564a3c 100644 --- a/examples/docs/introduction/format_yaml.py +++ b/examples/docs/introduction/format_yaml.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SHARED_DIR = Path(__file__).parents[1] / "shared" @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = load(Source(file=SHARED_DIR / "common_app.yaml"), Config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/intro_decorator_file.py b/examples/docs/introduction/intro_decorator_file.py index d81dc3e..6ba75b1 100644 --- a/examples/docs/introduction/intro_decorator_file.py +++ b/examples/docs/introduction/intro_decorator_file.py @@ -3,12 +3,12 @@ from dataclasses import dataclass from pathlib import Path -from dature import Source, load +import dature SHARED_DIR = Path(__file__).parents[1] / "shared" -@load(Source(file=SHARED_DIR / "common_app.yaml")) +@dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml")) @dataclass class Config: host: str diff --git a/examples/docs/introduction/intro_file_like.py b/examples/docs/introduction/intro_file_like.py index 0ced9d7..66913a5 100644 --- a/examples/docs/introduction/intro_file_like.py +++ b/examples/docs/introduction/intro_file_like.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from io import BytesIO, StringIO -from dature import Source, load +import dature from dature.sources_loader.json_ import JsonLoader @@ -16,14 +16,14 @@ class Config: # From StringIO text_stream = StringIO('{"host": "localhost", "port": 8080, "debug": true}') -config = load(Source(file=text_stream, loader=JsonLoader), Config) +config = dature.load(dature.Source(file=text_stream, loader=JsonLoader), Config) assert config.host == "localhost" assert config.port == 8080 # From BytesIO binary_stream = BytesIO(b'{"host": "0.0.0.0", "port": 3000}') -config = load(Source(file=binary_stream, loader=JsonLoader), Config) +config = dature.load(dature.Source(file=binary_stream, loader=JsonLoader), Config) assert config.host == "0.0.0.0" assert config.port == 3000 diff --git a/examples/load_all_formats.py b/examples/load_all_formats.py index 4ed8c65..e72728a 100644 --- a/examples/load_all_formats.py +++ b/examples/load_all_formats.py @@ -4,7 +4,7 @@ from all_types_dataclass import AllPythonTypesCompact # type: ignore[import-not-found] -from dature import Source, load +import dature from dature.sources_loader.docker_secrets import DockerSecretsLoader from dature.sources_loader.toml_ import Toml10Loader from dature.sources_loader.yaml_ import Yaml11Loader, Yaml12Loader @@ -12,21 +12,21 @@ SOURCES_DIR = Path(__file__).parent / "sources" FORMATS = { - "json": Source(file=SOURCES_DIR / "all_types.json"), - "json5": Source(file=SOURCES_DIR / "all_types.json5"), - "toml10": Source(file=SOURCES_DIR / "all_types_toml10.toml", loader=Toml10Loader), - "toml11": Source(file=SOURCES_DIR / "all_types_toml11.toml"), - "ini": Source(file=SOURCES_DIR / "all_types.ini", prefix="all_types"), - "yaml11": Source(file=SOURCES_DIR / "all_types_yaml11.yaml", loader=Yaml11Loader), - "yaml12": Source(file=SOURCES_DIR / "all_types_yaml12.yaml", loader=Yaml12Loader), - "env": Source(file=SOURCES_DIR / "all_types.env"), - "docker_secrets": Source( + "json": dature.Source(file=SOURCES_DIR / "all_types.json"), + "json5": dature.Source(file=SOURCES_DIR / "all_types.json5"), + "toml10": dature.Source(file=SOURCES_DIR / "all_types_toml10.toml", loader=Toml10Loader), + "toml11": dature.Source(file=SOURCES_DIR / "all_types_toml11.toml"), + "ini": dature.Source(file=SOURCES_DIR / "all_types.ini", prefix="all_types"), + "yaml11": dature.Source(file=SOURCES_DIR / "all_types_yaml11.yaml", loader=Yaml11Loader), + "yaml12": dature.Source(file=SOURCES_DIR / "all_types_yaml12.yaml", loader=Yaml12Loader), + "env": dature.Source(file=SOURCES_DIR / "all_types.env"), + "docker_secrets": dature.Source( file=SOURCES_DIR / "all_types_docker_secrets", loader=DockerSecretsLoader, ), } for meta in FORMATS.values(): - config = load(meta, AllPythonTypesCompact) + config = dature.load(meta, AllPythonTypesCompact) assert config.string_value == "hello world" assert config.integer_value == 42 From 668eff169435527a80f5103363df99b46a30af14 Mon Sep 17 00:00:00 2001 From: niccolum Date: Sat, 28 Mar 2026 22:41:57 +0300 Subject: [PATCH 04/36] no Merge class --- docs/advanced/custom_types.md | 4 +- docs/advanced/env-expansion.md | 2 +- docs/api-reference.md | 35 +- docs/comparison/why-not-hydra.md | 2 +- docs/features/masking.md | 2 +- docs/features/merging.md | 13 +- .../advanced/configure/advanced_configure.py | 6 +- .../configure/advanced_configure_env.py | 6 +- .../advanced_configure_type_loaders.py | 2 +- .../advanced/custom_types/custom_loader.py | 2 +- .../docs/advanced/custom_types/custom_type.py | 2 +- .../custom_types/custom_type_merge.py | 10 +- .../advanced/debug/advanced_debug_error.py | 8 +- .../advanced/debug/advanced_debug_logging.py | 8 +- .../advanced/debug/advanced_debug_report.py | 8 +- .../env_expansion/advanced_env_expansion.py | 2 +- ...vanced_env_expansion_file_path_combined.py | 2 +- .../advanced_env_expansion_file_path_dir.py | 2 +- .../advanced_env_expansion_file_path_name.py | 2 +- .../advanced_env_expansion_merge.py | 12 +- .../advanced_env_expansion_strict.py | 2 +- .../advanced_field_groups_expansion_error.py | 10 +- .../advanced_field_groups_multiple_error.py | 14 +- .../advanced_field_groups_nested_error.py | 14 +- .../advanced_merge_rules_callable.py | 12 +- .../advanced_merge_rules_conflict.py | 18 +- .../merge_rules/merging_field_append.py | 10 +- .../merging_field_append_unique.py | 10 +- .../merge_rules/merging_field_first_wins.py | 10 +- .../merge_rules/merging_field_groups.py | 10 +- .../merge_rules/merging_field_last_wins.py | 10 +- .../merge_rules/merging_field_prepend.py | 10 +- .../merging_field_prepend_unique.py | 10 +- .../merge_rules/merging_first_found.py | 10 +- .../merge_rules/merging_skip_broken.py | 8 +- .../merging_skip_broken_per_source.py | 24 +- .../merge_rules/merging_skip_invalid.py | 2 +- .../merging_skip_invalid_per_field.py | 12 +- .../nested_resolve_docker_secrets.py | 2 +- .../nested_resolve/nested_resolve_envfile.py | 2 +- .../nested_resolve_global_flat.py | 2 +- .../nested_resolve_global_json.py | 2 +- .../nested_resolve_no_conflict.py | 2 +- .../nested_resolve/nested_resolve_override.py | 2 +- .../nested_resolve_per_field.py | 2 +- .../nested_resolve/nested_resolve_problem.py | 2 +- .../api_reference_function_mode.py | 2 +- .../why-not-dynaconf/dynaconf_basic.py | 2 +- .../why-not-dynaconf/dynaconf_merge.py | 10 +- .../dynaconf_root_validators.py | 2 +- .../why-not-dynaconf/dynaconf_validators.py | 2 +- .../why-not-hydra/hydra_dataclass.py | 2 +- .../comparison/why-not-hydra/hydra_merge.py | 10 +- .../why-not-hydra/hydra_validators.py | 2 +- .../pydantic_settings_auto_detect.py | 6 +- .../pydantic_settings_basic.py | 2 +- .../pydantic_settings_merge.py | 10 +- .../docs/features/masking/masking_by_name.py | 2 +- .../features/masking/masking_classic_style.py | 2 +- .../features/masking/masking_heuristic.py | 2 +- .../features/masking/masking_merge_mode.py | 12 +- .../docs/features/masking/masking_no_mask.py | 2 +- .../features/masking/masking_per_source.py | 2 +- .../features/masking/masking_secret_str.py | 2 +- .../docs/features/merging/merging_basic.py | 10 +- .../features/merging/merging_strategies.py | 20 +- .../merging/merging_strategy_first_found.py | 12 +- .../merging/merging_strategy_first_wins.py | 10 +- .../merging/merging_strategy_last_wins.py | 10 +- .../merging_strategy_raise_on_conflict.py | 10 +- .../merging/merging_tuple_shorthand.py | 10 +- .../merging_tuple_shorthand_decorator.py | 8 +- .../features/naming/naming_field_mapping.py | 2 +- .../docs/features/naming/naming_name_style.py | 2 +- .../features/naming/naming_nested_fields.py | 2 +- .../docs/features/naming/naming_prefix.py | 2 +- .../features/naming/naming_prefix_nested.py | 5 +- .../features/naming/naming_split_symbols.py | 2 +- .../validation/validation_post_init.py | 2 +- examples/docs/index/intro_function.py | 2 +- examples/docs/introduction/format_docker.py | 2 +- examples/docs/introduction/format_env.py | 2 +- examples/docs/introduction/format_ini.py | 2 +- examples/docs/introduction/format_json.py | 2 +- examples/docs/introduction/format_json5.py | 2 +- examples/docs/introduction/format_toml.py | 2 +- examples/docs/introduction/format_yaml.py | 2 +- examples/docs/introduction/intro_file_like.py | 4 +- examples/load_all_formats.py | 2 +- src/dature/__init__.py | 3 +- src/dature/config.py | 2 +- src/dature/loading/multi.py | 16 +- src/dature/loading/resolver.py | 2 +- src/dature/loading/source_loading.py | 18 +- src/dature/main.py | 98 +++- src/dature/metadata.py | 40 +- tests/errors/test_exceptions.py | 24 +- tests/errors/test_fixtures.py | 4 +- tests/loading/test_field_merges.py | 420 ++++++++---------- tests/loading/test_multi.py | 296 +++++------- tests/loading/test_skip_invalid_fields.py | 144 +++--- tests/loading/test_source_loading.py | 144 +++--- tests/masking/test_masking.py | 40 +- tests/merging/test_field_group.py | 262 +++++------ tests/merging/test_predicate.py | 6 +- tests/sources_loader/test_base.py | 40 +- tests/sources_loader/test_docker_secrets.py | 4 +- tests/sources_loader/test_env_.py | 16 +- tests/sources_loader/test_ini_.py | 12 +- tests/sources_loader/test_json5_.py | 16 +- tests/sources_loader/test_json_.py | 16 +- tests/sources_loader/test_nested_resolve.py | 78 ++-- tests/sources_loader/test_toml10_.py | 16 +- tests/sources_loader/test_toml11_.py | 16 +- tests/sources_loader/test_yaml11_.py | 16 +- tests/sources_loader/test_yaml12_.py | 16 +- tests/test_custom_loader.py | 2 +- tests/test_load_report.py | 69 ++- tests/test_main.py | 14 +- tests/test_type_loaders.py | 22 +- tests/validators/test_complex.py | 18 +- tests/validators/test_custom_validator.py | 14 +- tests/validators/test_metadata_validators.py | 34 +- tests/validators/test_number.py | 20 +- .../validators/test_post_init_and_property.py | 14 +- tests/validators/test_root_validator.py | 12 +- tests/validators/test_sequence.py | 16 +- tests/validators/test_string.py | 16 +- 128 files changed, 1108 insertions(+), 1453 deletions(-) diff --git a/docs/advanced/custom_types.md b/docs/advanced/custom_types.md index 68442f3..b3daa26 100644 --- a/docs/advanced/custom_types.md +++ b/docs/advanced/custom_types.md @@ -16,7 +16,7 @@ Each `TypeLoader` maps a type to a conversion function: ### Per-source vs Global -`type_loaders` can be set per-source in `Source`, per-merge in `Merge`, or globally via `configure()`: +`type_loaders` can be set per-source in `Source`, in `dature.load()` for merge mode, or globally via `configure()`: === "Per-source (Source)" @@ -24,7 +24,7 @@ Each `TypeLoader` maps a type to a conversion function: --8<-- "examples/docs/advanced/custom_types/custom_type.py" ``` -=== "Per-merge (Merge)" +=== "Per-merge (load)" ```python --8<-- "examples/docs/advanced/custom_types/custom_type_merge.py" diff --git a/docs/advanced/env-expansion.md b/docs/advanced/env-expansion.md index f715bee..687e461 100644 --- a/docs/advanced/env-expansion.md +++ b/docs/advanced/env-expansion.md @@ -49,7 +49,7 @@ Set the mode on `Source`: --8<-- "examples/docs/advanced/env_expansion/sources/advanced_env_expansion_strict.yaml" ``` -For merge mode, set on `Merge` as default for all sources: +For merge mode, pass `expand_env_vars` to `dature.load()` as default for all sources: === "Python" diff --git a/docs/api-reference.md b/docs/api-reference.md index 12a137f..2eb31f9 100644 --- a/docs/api-reference.md +++ b/docs/api-reference.md @@ -26,10 +26,21 @@ Main entry point. Two calling patterns: | Parameter | Type | Description | |-----------|------|-------------| -| `metadata` | `Source \| Merge \| tuple[Source, ...] \| None` | Source descriptor. Tuple is shorthand for `Merge(...)` with `LAST_WINS`. `None` → `Source()` (env vars). | +| `*sources` | `Source` | One or more source descriptors. No sources → `Source()` (env vars). Multiple sources → merge mode. | | `dataclass_` | `type[T] \| None` | Target dataclass. If provided → function mode. If `None` → decorator mode. | | `cache` | `bool \| None` | Enable caching in decorator mode. Default from `configure()`. | | `debug` | `bool \| None` | Collect `LoadReport`. Default from `configure()`. | +| `strategy` | `MergeStrategy` | Merge strategy (default `LAST_WINS`). Only used with multiple sources. | +| `field_merges` | `tuple[MergeRule, ...]` | Per-field merge strategy overrides. | +| `field_groups` | `tuple[FieldGroup, ...]` | Groups of fields that must change together. | +| `skip_broken_sources` | `bool` | Skip sources that fail to load (default `False`). | +| `skip_invalid_fields` | `bool` | Skip fields that fail validation (default `False`). | +| `expand_env_vars` | `ExpandEnvVarsMode` | Env var expansion mode for all sources (default `"default"`). | +| `secret_field_names` | `tuple[str, ...] \| None` | Extra secret field name patterns. | +| `mask_secrets` | `bool \| None` | Enable/disable secret masking globally. | +| `type_loaders` | `tuple[TypeLoader, ...] \| None` | Custom type loaders. | +| `nested_resolve_strategy` | `NestedResolveStrategy \| None` | Default priority for JSON vs flat keys. See [Nested Resolve](advanced/nested-resolve.md). | +| `nested_resolve` | `NestedResolve \| None` | Per-field nested resolve strategy overrides. See [Nested Resolve](advanced/nested-resolve.md#per-field-strategy). | --- @@ -43,28 +54,6 @@ See [Introduction — Source Reference](introduction.md#source-reference) for pa --- -### `Merge` - -```python ---8<-- "src/dature/metadata.py:merge-metadata" -``` - -| Parameter | Description | -|-----------|-------------| -| `sources` | Ordered tuple of `Source` to merge | -| `strategy` | Global merge strategy | -| `field_merges` | Per-field strategy overrides | -| `field_groups` | Groups of fields that must change together | -| `skip_broken_sources` | Global default for broken source handling | -| `skip_invalid_fields` | Global default for invalid field handling | -| `expand_env_vars` | Default env expansion mode for all sources | -| `secret_field_names` | Extra secret patterns for all sources | -| `mask_secrets` | Enable/disable masking globally | -| `nested_resolve_strategy` | Default priority for JSON vs flat keys across all sources. See [Nested Resolve](advanced/nested-resolve.md) | -| `nested_resolve` | Default per-field strategy overrides for all sources. See [Nested Resolve](advanced/nested-resolve.md#per-field-strategy) | - ---- - ### `MergeStrategy` ```python diff --git a/docs/comparison/why-not-hydra.md b/docs/comparison/why-not-hydra.md index b5bf0f8..5e6b16b 100644 --- a/docs/comparison/why-not-hydra.md +++ b/docs/comparison/why-not-hydra.md @@ -13,7 +13,7 @@ The trade-off is scope: Hydra is a **framework** that takes over your entry poin | **Formats** | YAML only | YAML (1.1/1.2), JSON, JSON5, TOML (1.0/1.1), INI, `.env`, env vars, Docker secrets | | **Env variables** | `oc.env` resolver; no `.env` support | First-class: env vars, `.env` files, `${VAR:-default}` expansion in all formats + file paths | | **CLI overrides** | Built-in: `python app.py db.port=3306` + tab completion | No CLI | -| **Composition** | Config groups, defaults list, package overrides | Multi-source `Merge` with explicit strategies | +| **Composition** | Config groups, defaults list, package overrides | Multi-source merge with explicit strategies | | **Parameter sweeps** | Built-in multirun + sweeper plugins (Ax, Optuna, etc.) | No — not a use case | | **Object instantiation** | `instantiate()` — creates objects from config with DI | No — config loading only | | **Variable interpolation** | OmegaConf `${path.to.key}` + custom resolvers | `${VAR:-default}` env expansion in all formats + file paths | diff --git a/docs/features/masking.md b/docs/features/masking.md index 1c63138..1d5d205 100644 --- a/docs/features/masking.md +++ b/docs/features/masking.md @@ -117,7 +117,7 @@ Control masking via `Source`: --8<-- "examples/docs/features/masking/masking_merge_mode.py:merge-mode" ``` -`Source.mask_secrets` overrides `Merge.mask_secrets` when not `None`. `secret_field_names` from both are combined. +`Source.mask_secrets` overrides the `mask_secrets` parameter of `dature.load()` when not `None`. `secret_field_names` from both source and load-level are combined. ### Global diff --git a/docs/features/merging.md b/docs/features/merging.md index 23897b2..961dd5d 100644 --- a/docs/features/merging.md +++ b/docs/features/merging.md @@ -4,7 +4,7 @@ Load configuration from multiple sources and merge them into one dataclass. ## Basic Merging -Use `Merge` to combine sources: +Pass multiple `Source` objects to `dature.load()`: === "Python" @@ -24,9 +24,9 @@ Use `Merge` to combine sources: --8<-- "examples/docs/shared/common_overrides.yaml" ``` -## Tuple Shorthand +## Multiple Sources -Pass a tuple of `Source` directly — uses `LAST_WINS` by default: +Multiple sources use `LAST_WINS` by default: === "Python" @@ -147,15 +147,12 @@ Nested dicts are merged recursively. Lists and scalars are replaced entirely acc For per-field strategy overrides, see [Per-Field Merge Strategies](../advanced/merge-rules.md#per-field-merge-strategies). To enforce that related fields are always overridden together, see [Field Groups](../advanced/merge-rules.md#field-groups). -## Merge Reference +## Merge Parameters -```python ---8<-- "src/dature/metadata.py:merge-metadata" -``` +All merge-related parameters are passed directly to `dature.load()` as keyword arguments: | Parameter | Description | |-----------|-------------| -| `sources` | Tuple of `Source` descriptors — one per source to merge | | `strategy` | Global merge strategy. Default: `LAST_WINS`. See [Merge Strategies](#merge-strategies) | | `field_merges` | Per-field merge strategy overrides. See [Per-Field Merge Strategies](../advanced/merge-rules.md#per-field-merge-strategies) | | `field_groups` | Enforce related fields are overridden together. See [Field Groups](../advanced/merge-rules.md#field-groups) | diff --git a/examples/docs/advanced/configure/advanced_configure.py b/examples/docs/advanced/configure/advanced_configure.py index aa1c9b4..aa43109 100644 --- a/examples/docs/advanced/configure/advanced_configure.py +++ b/examples/docs/advanced/configure/advanced_configure.py @@ -17,20 +17,20 @@ class Config: # 1. Default config — debug is off, no report -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), Config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), dataclass_=Config) report = dature.get_load_report(config) assert report is None # 2. Enable debug globally via dature.configure() dature.configure(loading=LoadingConfig(debug=True)) -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), Config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), dataclass_=Config) report = dature.get_load_report(config) assert report is not None # 3. Reset to defaults — debug is off again dature.configure(loading=LoadingConfig()) -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), Config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), dataclass_=Config) report = dature.get_load_report(config) assert report is None diff --git a/examples/docs/advanced/configure/advanced_configure_env.py b/examples/docs/advanced/configure/advanced_configure_env.py index 4f56d00..49eea58 100644 --- a/examples/docs/advanced/configure/advanced_configure_env.py +++ b/examples/docs/advanced/configure/advanced_configure_env.py @@ -21,20 +21,20 @@ class Config: # 1. DATURE_LOADING__DEBUG=true — debug is on, report attached -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), Config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), dataclass_=Config) report = dature.get_load_report(config) assert report is not None # 2. Override env with dature.configure() — debug is off dature.configure(loading=LoadingConfig(debug=False)) -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), Config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), dataclass_=Config) report = dature.get_load_report(config) assert report is None # 3. Reset to env defaults — debug is on again dature.configure(loading=LoadingConfig(debug=True)) -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), Config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), dataclass_=Config) report = dature.get_load_report(config) assert report is not None diff --git a/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py b/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py index 5f85d2a..c70ec45 100644 --- a/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py +++ b/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py @@ -29,5 +29,5 @@ class AppConfig: # Register Rgb parser globally — no need to pass type_loaders to every load() call dature.configure(type_loaders=(dature.TypeLoader(type_=Rgb, func=rgb_from_string),)) -config = dature.load(dature.Source(file=SOURCES_DIR / "custom_type_common.yaml"), AppConfig) +config = dature.load(dature.Source(file=SOURCES_DIR / "custom_type_common.yaml"), dataclass_=AppConfig) assert config == AppConfig(name="my-app", color=Rgb(r=255, g=128, b=0)) diff --git a/examples/docs/advanced/custom_types/custom_loader.py b/examples/docs/advanced/custom_types/custom_loader.py index ea23344..31f9d43 100644 --- a/examples/docs/advanced/custom_types/custom_loader.py +++ b/examples/docs/advanced/custom_types/custom_loader.py @@ -45,7 +45,7 @@ class Config: file=SOURCES_DIR / "custom_loader.xml", loader=XmlLoader, ), - Config, + dataclass_=Config, ) assert config == Config(host="localhost", port=9090, debug=True) diff --git a/examples/docs/advanced/custom_types/custom_type.py b/examples/docs/advanced/custom_types/custom_type.py index f000fc2..4eb75d9 100644 --- a/examples/docs/advanced/custom_types/custom_type.py +++ b/examples/docs/advanced/custom_types/custom_type.py @@ -31,7 +31,7 @@ class AppConfig: file=SOURCES_DIR / "custom_type_common.yaml", type_loaders=(dature.TypeLoader(type_=Rgb, func=rgb_from_string),), ), - AppConfig, + dataclass_=AppConfig, ) assert config == AppConfig(name="my-app", color=Rgb(r=255, g=128, b=0)) diff --git a/examples/docs/advanced/custom_types/custom_type_merge.py b/examples/docs/advanced/custom_types/custom_type_merge.py index a6fe3e2..d0cb0bd 100644 --- a/examples/docs/advanced/custom_types/custom_type_merge.py +++ b/examples/docs/advanced/custom_types/custom_type_merge.py @@ -27,12 +27,10 @@ class AppConfig: config = dature.load( - dature.Merge( - dature.Source(file=SOURCES_DIR / "custom_type_common.yaml"), - dature.Source(file=SOURCES_DIR / "custom_type_merge_override.yaml"), - type_loaders=(dature.TypeLoader(type_=Rgb, func=rgb_from_string),), - ), - AppConfig, + dature.Source(file=SOURCES_DIR / "custom_type_common.yaml"), + dature.Source(file=SOURCES_DIR / "custom_type_merge_override.yaml"), + dataclass_=AppConfig, + type_loaders=(dature.TypeLoader(type_=Rgb, func=rgb_from_string),), ) assert config == AppConfig(name="my-app", color=Rgb(r=100, g=200, b=50)) diff --git a/examples/docs/advanced/debug/advanced_debug_error.py b/examples/docs/advanced/debug/advanced_debug_error.py index 7f51fe3..09c4d60 100644 --- a/examples/docs/advanced/debug/advanced_debug_error.py +++ b/examples/docs/advanced/debug/advanced_debug_error.py @@ -19,11 +19,9 @@ class Config: try: config = dature.load( - dature.Merge( - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - dature.Source(file=SOURCES_DIR / "advanced_debug_error_defaults.yaml"), - ), - Config, + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dature.Source(file=SOURCES_DIR / "advanced_debug_error_defaults.yaml"), + dataclass_=Config, debug=True, ) except DatureConfigError: diff --git a/examples/docs/advanced/debug/advanced_debug_logging.py b/examples/docs/advanced/debug/advanced_debug_logging.py index db22ecb..e628157 100644 --- a/examples/docs/advanced/debug/advanced_debug_logging.py +++ b/examples/docs/advanced/debug/advanced_debug_logging.py @@ -24,11 +24,9 @@ class Config: config = dature.load( - dature.Merge( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - ), - Config, + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dataclass_=Config, ) log_lines = [line for line in log_stream.getvalue().splitlines() if "[Config]" in line] diff --git a/examples/docs/advanced/debug/advanced_debug_report.py b/examples/docs/advanced/debug/advanced_debug_report.py index 5c152d1..c1d1128 100644 --- a/examples/docs/advanced/debug/advanced_debug_report.py +++ b/examples/docs/advanced/debug/advanced_debug_report.py @@ -16,11 +16,9 @@ class Config: config = dature.load( - dature.Merge( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - ), - Config, + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dataclass_=Config, debug=True, ) diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion.py b/examples/docs/advanced/env_expansion/advanced_env_expansion.py index 023d0ec..0f6b609 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion.py @@ -25,7 +25,7 @@ class Config: config = dature.load( dature.Source(file=SOURCES_DIR / "advanced_env_expansion.yaml", expand_env_vars="default"), - Config, + dataclass_=Config, ) assert config.simple == "https://api.example.com" diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py index 1f6e050..4d5f47e 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py @@ -20,7 +20,7 @@ class Config: config = dature.load( dature.Source(file="$DATURE_SOURCES_DIR/config.$DATURE_APP_ENV.yaml"), - Config, + dataclass_=Config, ) assert config.host == "prod.example.com" diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py index 57d7e69..dd9ce7b 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py @@ -19,7 +19,7 @@ class Config: config = dature.load( dature.Source(file="$DATURE_SOURCES_DIR/advanced_env_expansion_file_path.yaml"), - Config, + dataclass_=Config, ) assert config.host == "localhost" diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py index 5c7f893..6e1b502 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py @@ -19,7 +19,7 @@ class Config: config = dature.load( dature.Source(file=str(SOURCES_DIR / "config.$DATURE_APP_ENV.yaml")), - Config, + dataclass_=Config, ) assert config.host == "prod.example.com" diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py index 3fd5f10..9ff14b2 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py @@ -22,13 +22,11 @@ class Config: config = dature.load( - dature.Merge( - dature.Source(file=SOURCES_DIR / "advanced_env_expansion_merge_default.yaml"), # uses global "default" - dature.Source(file=SOURCES_DIR / "advanced_env_expansion_merge_empty.yaml", expand_env_vars="empty"), - dature.Source(file=SOURCES_DIR / "advanced_env_expansion_merge_disabled.yaml", expand_env_vars="disabled"), - expand_env_vars="default", # global default for all sources - ), - Config, + dature.Source(file=SOURCES_DIR / "advanced_env_expansion_merge_default.yaml"), # uses global "default" + dature.Source(file=SOURCES_DIR / "advanced_env_expansion_merge_empty.yaml", expand_env_vars="empty"), + dature.Source(file=SOURCES_DIR / "advanced_env_expansion_merge_disabled.yaml", expand_env_vars="disabled"), + dataclass_=Config, + expand_env_vars="default", # global default for all sources ) assert config.default_set_url == "https://api.example.com/api" diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py index 38b5c03..4d0d5d5 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py @@ -19,7 +19,7 @@ class Config: config = dature.load( dature.Source(file=SOURCES_DIR / "advanced_env_expansion_strict.yaml", expand_env_vars="strict"), - Config, + dataclass_=Config, ) assert config.resolved_url == "https://api.example.com/api/v1" diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py index 36de173..736ac92 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py @@ -27,12 +27,10 @@ class Config: # expands to (database.host, database.port, port) try: dature.load( - dature.Merge( - dature.Source(file=SOURCES_DIR / "field_groups_nested_defaults.yaml"), - dature.Source(file=SOURCES_DIR / "advanced_field_groups_expansion_error_overrides.yaml"), - field_groups=(dature.FieldGroup(dature.F[Config].database, dature.F[Config].port),), - ), - Config, + dature.Source(file=SOURCES_DIR / "field_groups_nested_defaults.yaml"), + dature.Source(file=SOURCES_DIR / "advanced_field_groups_expansion_error_overrides.yaml"), + dataclass_=Config, + field_groups=(dature.FieldGroup(dature.F[Config].database, dature.F[Config].port),), ) except FieldGroupError as exc: defaults_path = str(SOURCES_DIR / "field_groups_nested_defaults.yaml") diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py index 6f339e7..688a9b8 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py @@ -22,15 +22,13 @@ class Config: try: dature.load( - dature.Merge( - dature.Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), - dature.Source(file=SOURCES_DIR / "advanced_field_groups_multiple_error_overrides.yaml"), - field_groups=( - dature.FieldGroup(dature.F[Config].host, dature.F[Config].port), - dature.FieldGroup(dature.F[Config].user, dature.F[Config].password), - ), + dature.Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), + dature.Source(file=SOURCES_DIR / "advanced_field_groups_multiple_error_overrides.yaml"), + dataclass_=Config, + field_groups=( + dature.FieldGroup(dature.F[Config].host, dature.F[Config].port), + dature.FieldGroup(dature.F[Config].user, dature.F[Config].password), ), - Config, ) except FieldGroupError as exc: defaults_path = str(SHARED_DIR / "common_field_groups_defaults.yaml") diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py index f97a9a6..46efeb9 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py @@ -22,15 +22,13 @@ class Config: try: dature.load( - dature.Merge( - dature.Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), - dature.Source(file=SOURCES_DIR / "field_groups_partial_overrides.yaml"), - field_groups=( - dature.FieldGroup(dature.F[Config].host, dature.F[Config].port), - dature.FieldGroup(dature.F[Config].user, dature.F[Config].password), - ), + dature.Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), + dature.Source(file=SOURCES_DIR / "field_groups_partial_overrides.yaml"), + dataclass_=Config, + field_groups=( + dature.FieldGroup(dature.F[Config].host, dature.F[Config].port), + dature.FieldGroup(dature.F[Config].user, dature.F[Config].password), ), - Config, ) except FieldGroupError as exc: defaults_path = str(SHARED_DIR / "common_field_groups_defaults.yaml") diff --git a/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py b/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py index 4f1a3ec..84bc899 100644 --- a/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py +++ b/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py @@ -21,13 +21,11 @@ def merge_tags(values: list[Any]) -> list[str]: config = dature.load( - dature.Merge( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - strategy=dature.MergeStrategy.LAST_WINS, - field_merges=(dature.MergeRule(dature.F[Config].tags, merge_tags),), - ), - Config, + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dataclass_=Config, + strategy=dature.MergeStrategy.LAST_WINS, + field_merges=(dature.MergeRule(dature.F[Config].tags, merge_tags),), ) assert config.host == "production.example.com" diff --git a/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py b/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py index 970985c..aefdafc 100644 --- a/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py +++ b/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py @@ -16,17 +16,15 @@ class Config: config = dature.load( - dature.Merge( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - strategy=dature.MergeStrategy.RAISE_ON_CONFLICT, - field_merges=( - dature.MergeRule(dature.F[Config].host, dature.FieldMergeStrategy.LAST_WINS), - dature.MergeRule(dature.F[Config].port, dature.FieldMergeStrategy.LAST_WINS), - dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.APPEND_UNIQUE), - ), + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dataclass_=Config, + strategy=dature.MergeStrategy.RAISE_ON_CONFLICT, + field_merges=( + dature.MergeRule(dature.F[Config].host, dature.FieldMergeStrategy.LAST_WINS), + dature.MergeRule(dature.F[Config].port, dature.FieldMergeStrategy.LAST_WINS), + dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.APPEND_UNIQUE), ), - Config, ) assert config.host == "production.example.com" diff --git a/examples/docs/advanced/merge_rules/merging_field_append.py b/examples/docs/advanced/merge_rules/merging_field_append.py index c46c4bb..b9de4bd 100644 --- a/examples/docs/advanced/merge_rules/merging_field_append.py +++ b/examples/docs/advanced/merge_rules/merging_field_append.py @@ -14,12 +14,10 @@ class Config: config = dature.load( - dature.Merge( - dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), - dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), - field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.APPEND),), - ), - Config, + dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), + dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), + dataclass_=Config, + field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.APPEND),), ) assert config.tags == ["web", "default", "web", "api"] diff --git a/examples/docs/advanced/merge_rules/merging_field_append_unique.py b/examples/docs/advanced/merge_rules/merging_field_append_unique.py index 3ff0509..ce0b30d 100644 --- a/examples/docs/advanced/merge_rules/merging_field_append_unique.py +++ b/examples/docs/advanced/merge_rules/merging_field_append_unique.py @@ -14,12 +14,10 @@ class Config: config = dature.load( - dature.Merge( - dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), - dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), - field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.APPEND_UNIQUE),), - ), - Config, + dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), + dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), + dataclass_=Config, + field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.APPEND_UNIQUE),), ) assert config.tags == ["web", "default", "api"] diff --git a/examples/docs/advanced/merge_rules/merging_field_first_wins.py b/examples/docs/advanced/merge_rules/merging_field_first_wins.py index 627553c..d9e6d9c 100644 --- a/examples/docs/advanced/merge_rules/merging_field_first_wins.py +++ b/examples/docs/advanced/merge_rules/merging_field_first_wins.py @@ -14,12 +14,10 @@ class Config: config = dature.load( - dature.Merge( - dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), - dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), - field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.FIRST_WINS),), - ), - Config, + dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), + dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), + dataclass_=Config, + field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.FIRST_WINS),), ) assert config.tags == ["web", "default"] diff --git a/examples/docs/advanced/merge_rules/merging_field_groups.py b/examples/docs/advanced/merge_rules/merging_field_groups.py index 7e69d35..c2bf029 100644 --- a/examples/docs/advanced/merge_rules/merging_field_groups.py +++ b/examples/docs/advanced/merge_rules/merging_field_groups.py @@ -18,12 +18,10 @@ class Config: config = dature.load( - dature.Merge( - dature.Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_field_groups_overrides.yaml"), - field_groups=(dature.FieldGroup(dature.F[Config].host, dature.F[Config].port),), - ), - Config, + dature.Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_field_groups_overrides.yaml"), + dataclass_=Config, + field_groups=(dature.FieldGroup(dature.F[Config].host, dature.F[Config].port),), ) assert config.host == "production.example.com" diff --git a/examples/docs/advanced/merge_rules/merging_field_last_wins.py b/examples/docs/advanced/merge_rules/merging_field_last_wins.py index 8714044..2f5e4b8 100644 --- a/examples/docs/advanced/merge_rules/merging_field_last_wins.py +++ b/examples/docs/advanced/merge_rules/merging_field_last_wins.py @@ -14,12 +14,10 @@ class Config: config = dature.load( - dature.Merge( - dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), - dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), - field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.LAST_WINS),), - ), - Config, + dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), + dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), + dataclass_=Config, + field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.LAST_WINS),), ) assert config.tags == ["web", "api"] diff --git a/examples/docs/advanced/merge_rules/merging_field_prepend.py b/examples/docs/advanced/merge_rules/merging_field_prepend.py index 0632cbd..3991e33 100644 --- a/examples/docs/advanced/merge_rules/merging_field_prepend.py +++ b/examples/docs/advanced/merge_rules/merging_field_prepend.py @@ -14,12 +14,10 @@ class Config: config = dature.load( - dature.Merge( - dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), - dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), - field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.PREPEND),), - ), - Config, + dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), + dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), + dataclass_=Config, + field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.PREPEND),), ) assert config.tags == ["web", "api", "web", "default"] diff --git a/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py b/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py index f10a659..d5d31b4 100644 --- a/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py +++ b/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py @@ -14,12 +14,10 @@ class Config: config = dature.load( - dature.Merge( - dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), - dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), - field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.PREPEND_UNIQUE),), - ), - Config, + dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), + dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), + dataclass_=Config, + field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.PREPEND_UNIQUE),), ) assert config.tags == ["web", "api", "default"] diff --git a/examples/docs/advanced/merge_rules/merging_first_found.py b/examples/docs/advanced/merge_rules/merging_first_found.py index b04e050..925366c 100644 --- a/examples/docs/advanced/merge_rules/merging_first_found.py +++ b/examples/docs/advanced/merge_rules/merging_first_found.py @@ -15,12 +15,10 @@ class Config: config = dature.load( - dature.Merge( - dature.Source(file=SOURCES_DIR / "merging_first_found_primary.yaml"), - dature.Source(file=SOURCES_DIR / "merging_first_found_fallback.yaml"), - strategy=dature.MergeStrategy.FIRST_FOUND, - ), - Config, + dature.Source(file=SOURCES_DIR / "merging_first_found_primary.yaml"), + dature.Source(file=SOURCES_DIR / "merging_first_found_fallback.yaml"), + dataclass_=Config, + strategy=dature.MergeStrategy.FIRST_FOUND, ) assert config.host == "production-host" diff --git a/examples/docs/advanced/merge_rules/merging_skip_broken.py b/examples/docs/advanced/merge_rules/merging_skip_broken.py index 6c28363..a411de9 100644 --- a/examples/docs/advanced/merge_rules/merging_skip_broken.py +++ b/examples/docs/advanced/merge_rules/merging_skip_broken.py @@ -17,11 +17,9 @@ class Config: config = dature.load( - dature.Merge( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SOURCES_DIR / "nonexistent.yaml", skip_if_broken=True), - ), - Config, + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SOURCES_DIR / "nonexistent.yaml", skip_if_broken=True), + dataclass_=Config, ) assert config.host == "localhost" diff --git a/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py b/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py index 5ae1fb1..5edad78 100644 --- a/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py +++ b/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py @@ -17,19 +17,17 @@ class Config: config = dature.load( - dature.Merge( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), # uses global - dature.Source( - file=SOURCES_DIR / "optional.yaml", - skip_if_broken=True, - ), # always skip if broken - dature.Source( - file=SHARED_DIR / "common_overrides.yaml", - skip_if_broken=False, - ), # never skip, even if global is True - skip_broken_sources=True, # global default - ), - Config, + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), # uses global + dature.Source( + file=SOURCES_DIR / "optional.yaml", + skip_if_broken=True, + ), # always skip if broken + dature.Source( + file=SHARED_DIR / "common_overrides.yaml", + skip_if_broken=False, + ), # never skip, even if global is True + dataclass_=Config, + skip_broken_sources=True, # global default ) assert config.host == "production.example.com" diff --git a/examples/docs/advanced/merge_rules/merging_skip_invalid.py b/examples/docs/advanced/merge_rules/merging_skip_invalid.py index 8416c54..124e9e1 100644 --- a/examples/docs/advanced/merge_rules/merging_skip_invalid.py +++ b/examples/docs/advanced/merge_rules/merging_skip_invalid.py @@ -16,7 +16,7 @@ class Config: config = dature.load( dature.Source(file=SOURCES_DIR / "merging_skip_invalid_defaults.yaml", skip_if_invalid=True), - Config, + dataclass_=Config, ) assert config.host == "localhost" diff --git a/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py b/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py index b015586..658ab5b 100644 --- a/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py +++ b/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py @@ -16,14 +16,12 @@ class Config: config = dature.load( - dature.Merge( - dature.Source(file=SOURCES_DIR / "merging_skip_invalid_per_field_defaults.yaml"), - dature.Source( - file=SOURCES_DIR / "merging_skip_invalid_per_field_overrides.yaml", - skip_if_invalid=(dature.F[Config].port, dature.F[Config].timeout), - ), + dature.Source(file=SOURCES_DIR / "merging_skip_invalid_per_field_defaults.yaml"), + dature.Source( + file=SOURCES_DIR / "merging_skip_invalid_per_field_overrides.yaml", + skip_if_invalid=(dature.F[Config].port, dature.F[Config].timeout), ), - Config, + dataclass_=Config, ) assert config.host == "production.example.com" diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py b/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py index ad84ee7..1de18a7 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py @@ -31,7 +31,7 @@ class Config: loader=DockerSecretsLoader, nested_resolve_strategy="json", ), - Config, + dataclass_=Config, ) assert config.database.host == "json-host" diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py b/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py index abdba11..89fa02c 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py @@ -27,7 +27,7 @@ class Config: prefix="APP__", nested_resolve_strategy="json", ), - Config, + dataclass_=Config, ) assert config.database.host == "json-host" diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_global_flat.py b/examples/docs/advanced/nested_resolve/nested_resolve_global_flat.py index b1685b7..589b39b 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_global_flat.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_global_flat.py @@ -24,7 +24,7 @@ class Config: config = dature.load( dature.Source(loader=EnvLoader, prefix="APP__", nested_resolve_strategy="flat"), - Config, + dataclass_=Config, ) assert config.database.host == "flat-host" diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_global_json.py b/examples/docs/advanced/nested_resolve/nested_resolve_global_json.py index de2399a..89ee347 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_global_json.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_global_json.py @@ -24,7 +24,7 @@ class Config: config = dature.load( dature.Source(loader=EnvLoader, prefix="APP__", nested_resolve_strategy="json"), - Config, + dataclass_=Config, ) assert config.database.host == "json-host" diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_no_conflict.py b/examples/docs/advanced/nested_resolve/nested_resolve_no_conflict.py index 4fa6581..ee079bb 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_no_conflict.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_no_conflict.py @@ -28,7 +28,7 @@ class Config: # Even with strategy="flat", JSON is parsed because there are no flat keys config = dature.load( dature.Source(loader=EnvLoader, prefix="APP__", nested_resolve_strategy="flat"), - Config, + dataclass_=Config, ) assert config.database.host == "json-host" diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_override.py b/examples/docs/advanced/nested_resolve/nested_resolve_override.py index ee8bd89..3efc171 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_override.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_override.py @@ -40,7 +40,7 @@ class Config: nested_resolve_strategy="flat", nested_resolve={"json": (dature.F[Config].database,)}, ), - Config, + dataclass_=Config, ) assert config.database.host == "json-host" # per-field override wins diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_per_field.py b/examples/docs/advanced/nested_resolve/nested_resolve_per_field.py index 3be0f92..40efb1c 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_per_field.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_per_field.py @@ -42,7 +42,7 @@ class Config: "flat": (dature.F[Config].cache,), }, ), - Config, + dataclass_=Config, ) assert config.database.host == "json-host" diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_problem.py b/examples/docs/advanced/nested_resolve/nested_resolve_problem.py index dc85a3a..7514e20 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_problem.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_problem.py @@ -23,7 +23,7 @@ class Config: # Without nested_resolve_strategy, flat keys win by default -config = dature.load(dature.Source(loader=EnvLoader, prefix="APP__"), Config) +config = dature.load(dature.Source(loader=EnvLoader, prefix="APP__"), dataclass_=Config) assert config.database.host == "flat-host" assert config.database.port == 3306 diff --git a/examples/docs/api_reference/api_reference_function_mode.py b/examples/docs/api_reference/api_reference_function_mode.py index 8505e84..b8ed56e 100644 --- a/examples/docs/api_reference/api_reference_function_mode.py +++ b/examples/docs/api_reference/api_reference_function_mode.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), Config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), dataclass_=Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py index e1ed8b9..66ac1fa 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py @@ -16,7 +16,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SOURCES_DIR / "dynaconf_basic.toml"), Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "dynaconf_basic.toml"), dataclass_=Config) # config.hostt → AttributeError immediately # config.port is always int — guaranteed # --8<-- [end:basic] diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py index 4b4942e..ce34e34 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py @@ -16,12 +16,10 @@ class Config: # --8<-- [start:merge] config = dature.load( - dature.Merge( - dature.Source(file=SOURCES_DIR / "dynaconf_merge_defaults.yaml"), - dature.Source(file=SOURCES_DIR / "dynaconf_merge_local.yaml", skip_if_broken=True), - strategy=dature.MergeStrategy.LAST_WINS, - ), - Config, + dature.Source(file=SOURCES_DIR / "dynaconf_merge_defaults.yaml"), + dature.Source(file=SOURCES_DIR / "dynaconf_merge_local.yaml", skip_if_broken=True), + dataclass_=Config, + strategy=dature.MergeStrategy.LAST_WINS, ) # --8<-- [end:merge] diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py index 1a1f9dc..b1c2844 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py @@ -35,7 +35,7 @@ def check_debug_port(config: Config) -> bool: ), ), ), - Config, + dataclass_=Config, ) except DatureConfigError as exc: source = str(SOURCES_DIR / "dynaconf_root_validators_invalid.toml") diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py index 2710822..1151358 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py @@ -20,7 +20,7 @@ class Config: try: - dature.load(dature.Source(file=SOURCES_DIR / "dynaconf_validators_invalid.toml"), Config) + dature.load(dature.Source(file=SOURCES_DIR / "dynaconf_validators_invalid.toml"), dataclass_=Config) except DatureConfigError as exc: source = str(SOURCES_DIR / "dynaconf_validators_invalid.toml") assert str(exc) == "Config loading errors (1)" diff --git a/examples/docs/comparison/why-not-hydra/hydra_dataclass.py b/examples/docs/comparison/why-not-hydra/hydra_dataclass.py index b2ebd40..adad32c 100644 --- a/examples/docs/comparison/why-not-hydra/hydra_dataclass.py +++ b/examples/docs/comparison/why-not-hydra/hydra_dataclass.py @@ -15,7 +15,7 @@ class Config: # --8<-- [start:dataclass] -config = dature.load(dature.Source(file=SOURCES_DIR / "hydra_defaults.yaml"), Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "hydra_defaults.yaml"), dataclass_=Config) assert isinstance(config, Config) # Full IDE support, type safety, __post_init__ works # --8<-- [end:dataclass] diff --git a/examples/docs/comparison/why-not-hydra/hydra_merge.py b/examples/docs/comparison/why-not-hydra/hydra_merge.py index feff9c6..23ceecf 100644 --- a/examples/docs/comparison/why-not-hydra/hydra_merge.py +++ b/examples/docs/comparison/why-not-hydra/hydra_merge.py @@ -16,12 +16,10 @@ class Config: # --8<-- [start:merge] config = dature.load( - dature.Merge( - dature.Source(file=SOURCES_DIR / "hydra_defaults.yaml"), - dature.Source(file=SOURCES_DIR / "hydra_config.toml", skip_if_broken=True), - dature.Source(prefix="APP_"), - ), - Config, + dature.Source(file=SOURCES_DIR / "hydra_defaults.yaml"), + dature.Source(file=SOURCES_DIR / "hydra_config.toml", skip_if_broken=True), + dature.Source(prefix="APP_"), + dataclass_=Config, ) # --8<-- [end:merge] diff --git a/examples/docs/comparison/why-not-hydra/hydra_validators.py b/examples/docs/comparison/why-not-hydra/hydra_validators.py index d57744d..af6d288 100644 --- a/examples/docs/comparison/why-not-hydra/hydra_validators.py +++ b/examples/docs/comparison/why-not-hydra/hydra_validators.py @@ -19,7 +19,7 @@ class Config: try: - dature.load(dature.Source(file=SOURCES_DIR / "hydra_validators_invalid.yaml"), Config) + dature.load(dature.Source(file=SOURCES_DIR / "hydra_validators_invalid.yaml"), dataclass_=Config) except DatureConfigError as exc: source = str(SOURCES_DIR / "hydra_validators_invalid.yaml") assert str(exc) == "Config loading errors (1)" diff --git a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py index 4ed4c73..bc61c0c 100644 --- a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py +++ b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py @@ -16,9 +16,9 @@ class Config: # --8<-- [start:auto-detect] # Just change the file — dature picks the right loader -yaml_config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.yaml"), Config) -toml_config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.toml"), Config) -json5_config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.json5"), Config) +yaml_config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.yaml"), dataclass_=Config) +toml_config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.toml"), dataclass_=Config) +json5_config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.json5"), dataclass_=Config) # --8<-- [end:auto-detect] assert yaml_config.host == "localhost" diff --git a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py index 9e73e3e..7c01174 100644 --- a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py +++ b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py @@ -16,7 +16,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_basic.yaml"), Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_basic.yaml"), dataclass_=Config) # config.hostt → AttributeError immediately # config.port is always int — guaranteed # --8<-- [end:basic] diff --git a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py index 4e8dd24..3ad214b 100644 --- a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py +++ b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py @@ -16,12 +16,10 @@ class Config: # --8<-- [start:merge] config = dature.load( - dature.Merge( - dature.Source(file=SOURCES_DIR / "pydantic_settings_merge_defaults.yaml"), - dature.Source(file=SOURCES_DIR / "pydantic_settings_merge_local.yaml", skip_if_broken=True), - dature.Source(prefix="APP_"), - ), - Config, + dature.Source(file=SOURCES_DIR / "pydantic_settings_merge_defaults.yaml"), + dature.Source(file=SOURCES_DIR / "pydantic_settings_merge_local.yaml", skip_if_broken=True), + dature.Source(prefix="APP_"), + dataclass_=Config, ) # --8<-- [end:merge] diff --git a/examples/docs/features/masking/masking_by_name.py b/examples/docs/features/masking/masking_by_name.py index 1489de1..7f4ffd9 100644 --- a/examples/docs/features/masking/masking_by_name.py +++ b/examples/docs/features/masking/masking_by_name.py @@ -17,7 +17,7 @@ class Config: try: - dature.load(dature.Source(file=SOURCES_DIR / "masking_by_name.yaml"), Config) + dature.load(dature.Source(file=SOURCES_DIR / "masking_by_name.yaml"), dataclass_=Config) except DatureConfigError as exc: source = str(SOURCES_DIR / "masking_by_name.yaml") assert str(exc) == "Config loading errors (1)" diff --git a/examples/docs/features/masking/masking_classic_style.py b/examples/docs/features/masking/masking_classic_style.py index 0b44eab..e74ee72 100644 --- a/examples/docs/features/masking/masking_classic_style.py +++ b/examples/docs/features/masking/masking_classic_style.py @@ -22,7 +22,7 @@ class Config: host: str -config = dature.load(dature.Source(file=SOURCES_DIR / "masking_by_name.yaml"), Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "masking_by_name.yaml"), dataclass_=Config) assert mask_value("my_secret_password") == "my*****rd" assert mask_value("ab") == "ab" diff --git a/examples/docs/features/masking/masking_heuristic.py b/examples/docs/features/masking/masking_heuristic.py index 8564745..f113e9c 100644 --- a/examples/docs/features/masking/masking_heuristic.py +++ b/examples/docs/features/masking/masking_heuristic.py @@ -19,7 +19,7 @@ class Config: try: dature.load( dature.Source(file=SOURCES_DIR / "masking_heuristic.yaml", mask_secrets=True), - Config, + dataclass_=Config, ) except DatureConfigError as exc: source = str(SOURCES_DIR / "masking_heuristic.yaml") diff --git a/examples/docs/features/masking/masking_merge_mode.py b/examples/docs/features/masking/masking_merge_mode.py index ef75696..2fd000c 100644 --- a/examples/docs/features/masking/masking_merge_mode.py +++ b/examples/docs/features/masking/masking_merge_mode.py @@ -21,14 +21,12 @@ class Config: # --8<-- [start:merge-mode] try: dature.load( - dature.Merge( - dature.Source(file=SOURCES_DIR / "masking_merge_mode_defaults.yaml"), - dature.Source( - file=SOURCES_DIR / "masking_merge_mode_secrets.yaml", - secret_field_names=("api_key",), - ), + dature.Source(file=SOURCES_DIR / "masking_merge_mode_defaults.yaml"), + dature.Source( + file=SOURCES_DIR / "masking_merge_mode_secrets.yaml", + secret_field_names=("api_key",), ), - Config, + dataclass_=Config, ) except DatureConfigError as exc: source = str(SOURCES_DIR / "masking_merge_mode_secrets.yaml") diff --git a/examples/docs/features/masking/masking_no_mask.py b/examples/docs/features/masking/masking_no_mask.py index cc7652a..57fb745 100644 --- a/examples/docs/features/masking/masking_no_mask.py +++ b/examples/docs/features/masking/masking_no_mask.py @@ -24,7 +24,7 @@ class Config: file=SOURCES_DIR / "masking_per_source.yaml", mask_secrets=False, ), - Config, + dataclass_=Config, ) except DatureConfigError as exc: source = str(SOURCES_DIR / "masking_per_source.yaml") diff --git a/examples/docs/features/masking/masking_per_source.py b/examples/docs/features/masking/masking_per_source.py index f5a31e4..0f76244 100644 --- a/examples/docs/features/masking/masking_per_source.py +++ b/examples/docs/features/masking/masking_per_source.py @@ -24,7 +24,7 @@ class Config: file=SOURCES_DIR / "masking_per_source.yaml", secret_field_names=("api_key",), ), - Config, + dataclass_=Config, ) except DatureConfigError as exc: source = str(SOURCES_DIR / "masking_per_source.yaml") diff --git a/examples/docs/features/masking/masking_secret_str.py b/examples/docs/features/masking/masking_secret_str.py index ead86bb..9de1abc 100644 --- a/examples/docs/features/masking/masking_secret_str.py +++ b/examples/docs/features/masking/masking_secret_str.py @@ -21,7 +21,7 @@ class Config: try: config = dature.load( dature.Source(file=SOURCES_DIR / "masking_secret_str.yaml"), - Config, + dataclass_=Config, ) except DatureConfigError as exc: source = str(SOURCES_DIR / "masking_secret_str.yaml") diff --git a/examples/docs/features/merging/merging_basic.py b/examples/docs/features/merging/merging_basic.py index 7c6de7b..3a703e9 100644 --- a/examples/docs/features/merging/merging_basic.py +++ b/examples/docs/features/merging/merging_basic.py @@ -16,12 +16,10 @@ class Config: config = dature.load( - dature.Merge( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - strategy=dature.MergeStrategy.LAST_WINS, - ), - Config, + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dataclass_=Config, + strategy=dature.MergeStrategy.LAST_WINS, ) assert config.host == "production.example.com" diff --git a/examples/docs/features/merging/merging_strategies.py b/examples/docs/features/merging/merging_strategies.py index e808ad6..beed050 100644 --- a/examples/docs/features/merging/merging_strategies.py +++ b/examples/docs/features/merging/merging_strategies.py @@ -16,21 +16,17 @@ class Config: last_wins = dature.load( - dature.Merge( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - strategy=dature.MergeStrategy.LAST_WINS, - ), - Config, + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dataclass_=Config, + strategy=dature.MergeStrategy.LAST_WINS, ) first_wins = dature.load( - dature.Merge( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - strategy=dature.MergeStrategy.FIRST_WINS, - ), - Config, + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dataclass_=Config, + strategy=dature.MergeStrategy.FIRST_WINS, ) assert last_wins.host == "production.example.com" diff --git a/examples/docs/features/merging/merging_strategy_first_found.py b/examples/docs/features/merging/merging_strategy_first_found.py index 1db5873..8118d6b 100644 --- a/examples/docs/features/merging/merging_strategy_first_found.py +++ b/examples/docs/features/merging/merging_strategy_first_found.py @@ -16,13 +16,11 @@ class Config: config = dature.load( - dature.Merge( - dature.Source(file=SHARED_DIR / "nonexistent.yaml"), - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - strategy=dature.MergeStrategy.FIRST_FOUND, - ), - Config, + dature.Source(file=SHARED_DIR / "nonexistent.yaml"), + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dataclass_=Config, + strategy=dature.MergeStrategy.FIRST_FOUND, ) # nonexistent.yaml is skipped, common_defaults.yaml is used entirely diff --git a/examples/docs/features/merging/merging_strategy_first_wins.py b/examples/docs/features/merging/merging_strategy_first_wins.py index aa6f6e8..69e24c1 100644 --- a/examples/docs/features/merging/merging_strategy_first_wins.py +++ b/examples/docs/features/merging/merging_strategy_first_wins.py @@ -16,12 +16,10 @@ class Config: config = dature.load( - dature.Merge( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - strategy=dature.MergeStrategy.FIRST_WINS, - ), - Config, + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dataclass_=Config, + strategy=dature.MergeStrategy.FIRST_WINS, ) assert config.host == "localhost" diff --git a/examples/docs/features/merging/merging_strategy_last_wins.py b/examples/docs/features/merging/merging_strategy_last_wins.py index 89d59ea..1c27d36 100644 --- a/examples/docs/features/merging/merging_strategy_last_wins.py +++ b/examples/docs/features/merging/merging_strategy_last_wins.py @@ -16,12 +16,10 @@ class Config: config = dature.load( - dature.Merge( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - strategy=dature.MergeStrategy.LAST_WINS, - ), - Config, + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dataclass_=Config, + strategy=dature.MergeStrategy.LAST_WINS, ) assert config.host == "production.example.com" diff --git a/examples/docs/features/merging/merging_strategy_raise_on_conflict.py b/examples/docs/features/merging/merging_strategy_raise_on_conflict.py index 035b348..19e2f45 100644 --- a/examples/docs/features/merging/merging_strategy_raise_on_conflict.py +++ b/examples/docs/features/merging/merging_strategy_raise_on_conflict.py @@ -16,12 +16,10 @@ class Config: config = dature.load( - dature.Merge( - dature.Source(file=SHARED_DIR / "common_raise_on_conflict_a.yaml"), - dature.Source(file=SHARED_DIR / "common_raise_on_conflict_b.yaml"), - strategy=dature.MergeStrategy.RAISE_ON_CONFLICT, - ), - Config, + dature.Source(file=SHARED_DIR / "common_raise_on_conflict_a.yaml"), + dature.Source(file=SHARED_DIR / "common_raise_on_conflict_b.yaml"), + dataclass_=Config, + strategy=dature.MergeStrategy.RAISE_ON_CONFLICT, ) # Disjoint keys — no conflict diff --git a/examples/docs/features/merging/merging_tuple_shorthand.py b/examples/docs/features/merging/merging_tuple_shorthand.py index fd6c9fa..0de73a8 100644 --- a/examples/docs/features/merging/merging_tuple_shorthand.py +++ b/examples/docs/features/merging/merging_tuple_shorthand.py @@ -1,4 +1,4 @@ -"""Tuple shorthand — implicit LAST_WINS merge.""" +"""Multiple sources — implicit LAST_WINS merge.""" from dataclasses import dataclass from pathlib import Path @@ -16,11 +16,9 @@ class Config: config = dature.load( - ( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - ), - Config, + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dataclass_=Config, ) assert config.host == "production.example.com" diff --git a/examples/docs/features/merging/merging_tuple_shorthand_decorator.py b/examples/docs/features/merging/merging_tuple_shorthand_decorator.py index 98f0c76..6624bc1 100644 --- a/examples/docs/features/merging/merging_tuple_shorthand_decorator.py +++ b/examples/docs/features/merging/merging_tuple_shorthand_decorator.py @@ -1,4 +1,4 @@ -"""Tuple shorthand as a decorator — implicit LAST_WINS merge.""" +"""Multiple sources as a decorator — implicit LAST_WINS merge.""" import os from dataclasses import dataclass @@ -12,10 +12,8 @@ @dature.load( - ( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(prefix="APP_"), - ), + dature.Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Source(prefix="APP_"), ) @dataclass class Config: diff --git a/examples/docs/features/naming/naming_field_mapping.py b/examples/docs/features/naming/naming_field_mapping.py index ab25a87..27c3885 100644 --- a/examples/docs/features/naming/naming_field_mapping.py +++ b/examples/docs/features/naming/naming_field_mapping.py @@ -24,7 +24,7 @@ class DbConfig: dature.F[DbConfig].pool_size: "pool", }, ), - DbConfig, + dataclass_=DbConfig, ) assert config.database_url == "postgresql://localhost:5432/mydb" diff --git a/examples/docs/features/naming/naming_name_style.py b/examples/docs/features/naming/naming_name_style.py index ec0de2c..7fcac77 100644 --- a/examples/docs/features/naming/naming_name_style.py +++ b/examples/docs/features/naming/naming_name_style.py @@ -18,7 +18,7 @@ class ApiConfig: config = dature.load( dature.Source(file=SOURCES_DIR / "naming_name_style.yaml", name_style="lower_camel"), - ApiConfig, + dataclass_=ApiConfig, ) assert config.user_name == "admin" diff --git a/examples/docs/features/naming/naming_nested_fields.py b/examples/docs/features/naming/naming_nested_fields.py index 79c109b..4b4bd0a 100644 --- a/examples/docs/features/naming/naming_nested_fields.py +++ b/examples/docs/features/naming/naming_nested_fields.py @@ -30,7 +30,7 @@ class User: dature.F[Address].street: "streetName", }, ), - User, + dataclass_=User, ) assert config.name == "Alice" diff --git a/examples/docs/features/naming/naming_prefix.py b/examples/docs/features/naming/naming_prefix.py index 46b26d1..ea02ff7 100644 --- a/examples/docs/features/naming/naming_prefix.py +++ b/examples/docs/features/naming/naming_prefix.py @@ -17,7 +17,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(prefix="MYAPP_"), Config) +config = dature.load(dature.Source(prefix="MYAPP_"), dataclass_=Config) assert config.host == "localhost" assert config.port == 9090 diff --git a/examples/docs/features/naming/naming_prefix_nested.py b/examples/docs/features/naming/naming_prefix_nested.py index 36facf8..912c54d 100644 --- a/examples/docs/features/naming/naming_prefix_nested.py +++ b/examples/docs/features/naming/naming_prefix_nested.py @@ -14,7 +14,10 @@ class Database: port: int -db = dature.load(dature.Source(file=SOURCES_DIR / "naming_prefix_nested.yaml", prefix="app.database"), Database) +db = dature.load( + dature.Source(file=SOURCES_DIR / "naming_prefix_nested.yaml", prefix="app.database"), + dataclass_=Database, +) assert db.host == "localhost" assert db.port == 5432 diff --git a/examples/docs/features/naming/naming_split_symbols.py b/examples/docs/features/naming/naming_split_symbols.py index a287e26..150d931 100644 --- a/examples/docs/features/naming/naming_split_symbols.py +++ b/examples/docs/features/naming/naming_split_symbols.py @@ -20,7 +20,7 @@ class Config: db: Database -config = dature.load(dature.Source(prefix="NS_", split_symbols="__"), Config) +config = dature.load(dature.Source(prefix="NS_", split_symbols="__"), dataclass_=Config) assert config.db.host == "localhost" assert config.db.port == 5432 diff --git a/examples/docs/features/validation/validation_post_init.py b/examples/docs/features/validation/validation_post_init.py index 24fcd80..811935e 100644 --- a/examples/docs/features/validation/validation_post_init.py +++ b/examples/docs/features/validation/validation_post_init.py @@ -25,6 +25,6 @@ def address(self) -> str: try: - dature.load(dature.Source(file=SOURCES_DIR / "validation_post_init_invalid.yaml"), Config) + dature.load(dature.Source(file=SOURCES_DIR / "validation_post_init_invalid.yaml"), dataclass_=Config) except ValueError as exc: assert str(exc) == "port must be between 1 and 65535, got 99999" diff --git a/examples/docs/index/intro_function.py b/examples/docs/index/intro_function.py index bccb2eb..3082b92 100644 --- a/examples/docs/index/intro_function.py +++ b/examples/docs/index/intro_function.py @@ -17,7 +17,7 @@ class AppConfig: debug: bool = False -config = dature.load(dature.Source(prefix="APP_"), AppConfig) +config = dature.load(dature.Source(prefix="APP_"), dataclass_=AppConfig) assert config.host == "0.0.0.0" assert config.port == 8080 diff --git a/examples/docs/introduction/format_docker.py b/examples/docs/introduction/format_docker.py index e5c49f5..dff867d 100644 --- a/examples/docs/introduction/format_docker.py +++ b/examples/docs/introduction/format_docker.py @@ -17,7 +17,7 @@ class Config: config = dature.load( dature.Source(file=SOURCES_DIR / "intro_app_docker_secrets"), - Config, + dataclass_=Config, ) assert config.host == "localhost" diff --git a/examples/docs/introduction/format_env.py b/examples/docs/introduction/format_env.py index 828aa19..4c75b1c 100644 --- a/examples/docs/introduction/format_env.py +++ b/examples/docs/introduction/format_env.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.env"), Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.env"), dataclass_=Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/format_ini.py b/examples/docs/introduction/format_ini.py index 6998819..e93449a 100644 --- a/examples/docs/introduction/format_ini.py +++ b/examples/docs/introduction/format_ini.py @@ -17,7 +17,7 @@ class Config: config = dature.load( dature.Source(file=SOURCES_DIR / "intro_app.ini", prefix="app"), - Config, + dataclass_=Config, ) assert config.host == "localhost" diff --git a/examples/docs/introduction/format_json.py b/examples/docs/introduction/format_json.py index 7884d3b..ee7037c 100644 --- a/examples/docs/introduction/format_json.py +++ b/examples/docs/introduction/format_json.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.json"), Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.json"), dataclass_=Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/format_json5.py b/examples/docs/introduction/format_json5.py index 7c153ca..d250a9a 100644 --- a/examples/docs/introduction/format_json5.py +++ b/examples/docs/introduction/format_json5.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.json5"), Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.json5"), dataclass_=Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/format_toml.py b/examples/docs/introduction/format_toml.py index 432b1a9..bc31bba 100644 --- a/examples/docs/introduction/format_toml.py +++ b/examples/docs/introduction/format_toml.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.toml"), Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.toml"), dataclass_=Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/format_yaml.py b/examples/docs/introduction/format_yaml.py index d564a3c..3d9c1c4 100644 --- a/examples/docs/introduction/format_yaml.py +++ b/examples/docs/introduction/format_yaml.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), Config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), dataclass_=Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/intro_file_like.py b/examples/docs/introduction/intro_file_like.py index 66913a5..4e3d63e 100644 --- a/examples/docs/introduction/intro_file_like.py +++ b/examples/docs/introduction/intro_file_like.py @@ -16,14 +16,14 @@ class Config: # From StringIO text_stream = StringIO('{"host": "localhost", "port": 8080, "debug": true}') -config = dature.load(dature.Source(file=text_stream, loader=JsonLoader), Config) +config = dature.load(dature.Source(file=text_stream, loader=JsonLoader), dataclass_=Config) assert config.host == "localhost" assert config.port == 8080 # From BytesIO binary_stream = BytesIO(b'{"host": "0.0.0.0", "port": 3000}') -config = dature.load(dature.Source(file=binary_stream, loader=JsonLoader), Config) +config = dature.load(dature.Source(file=binary_stream, loader=JsonLoader), dataclass_=Config) assert config.host == "0.0.0.0" assert config.port == 3000 diff --git a/examples/load_all_formats.py b/examples/load_all_formats.py index e72728a..7539742 100644 --- a/examples/load_all_formats.py +++ b/examples/load_all_formats.py @@ -27,6 +27,6 @@ } for meta in FORMATS.values(): - config = dature.load(meta, AllPythonTypesCompact) + config = dature.load(meta, dataclass_=AllPythonTypesCompact) assert config.string_value == "hello world" assert config.integer_value == 42 diff --git a/src/dature/__init__.py b/src/dature/__init__.py index 874a077..09ee17b 100644 --- a/src/dature/__init__.py +++ b/src/dature/__init__.py @@ -3,13 +3,12 @@ from dature.field_path import F from dature.load_report import get_load_report from dature.main import load -from dature.metadata import FieldGroup, FieldMergeStrategy, Merge, MergeRule, MergeStrategy, Source, TypeLoader +from dature.metadata import FieldGroup, FieldMergeStrategy, MergeRule, MergeStrategy, Source, TypeLoader __all__ = [ "F", "FieldGroup", "FieldMergeStrategy", - "Merge", "MergeRule", "MergeStrategy", "Source", diff --git a/src/dature/config.py b/src/dature/config.py index e107f71..bc76cf5 100644 --- a/src/dature/config.py +++ b/src/dature/config.py @@ -68,7 +68,7 @@ def _load_config() -> DatureConfig: from dature.main import load # noqa: PLC0415 from dature.metadata import Source # noqa: PLC0415 - return load(Source(prefix="DATURE_"), DatureConfig) + return load(Source(prefix="DATURE_"), dataclass_=DatureConfig) class _ConfigProxy: diff --git a/src/dature/loading/multi.py b/src/dature/loading/multi.py index 6172baf..e013db6 100644 --- a/src/dature/loading/multi.py +++ b/src/dature/loading/multi.py @@ -29,20 +29,20 @@ from dature.merging.deep_merge import deep_merge, deep_merge_last_wins, raise_on_conflict from dature.merging.field_group import FieldGroupContext, validate_field_groups from dature.merging.predicate import ResolvedFieldGroup, build_field_group_paths, build_field_merge_map -from dature.metadata import FieldMergeStrategy, Merge, MergeStrategy, Source, TypeLoader +from dature.metadata import FieldMergeStrategy, MergeStrategy, Source, TypeLoader, _MergeConfig from dature.protocols import DataclassInstance, LoaderProtocol from dature.types import FieldMergeCallable, JSONValue logger = logging.getLogger("dature") -def _resolve_merge_mask_secrets(merge_meta: Merge) -> bool: +def _resolve_merge_mask_secrets(merge_meta: _MergeConfig) -> bool: if merge_meta.mask_secrets is not None: return merge_meta.mask_secrets return config.masking.mask_secrets -def _collect_extra_secret_patterns(merge_meta: Merge) -> tuple[str, ...]: +def _collect_extra_secret_patterns(merge_meta: _MergeConfig) -> tuple[str, ...]: merge_names = merge_meta.secret_field_names or () source_names: list[str] = [] for source_meta in merge_meta.sources: @@ -263,7 +263,7 @@ class _MergedData[T: DataclassInstance]: def _load_and_merge[T: DataclassInstance]( # noqa: C901 *, - merge_meta: Merge, + merge_meta: _MergeConfig, dataclass_: type[T], loaders: tuple[LoaderProtocol, ...] | None = None, debug: bool = False, @@ -380,7 +380,7 @@ def _load_and_merge[T: DataclassInstance]( # noqa: C901 def merge_load_as_function[T: DataclassInstance]( - merge_meta: Merge, + merge_meta: _MergeConfig, dataclass_: type[T], *, debug: bool, @@ -427,7 +427,7 @@ class _MergePatchContext: def __init__( self, *, - merge_meta: Merge, + merge_meta: _MergeConfig, cls: type[DataclassInstance], cache: bool, debug: bool, @@ -468,7 +468,7 @@ def __init__( @staticmethod def _prepare_loaders( *, - merge_meta: Merge, + merge_meta: _MergeConfig, cls: type[DataclassInstance], type_loaders: tuple[TypeLoader, ...] = (), ) -> tuple[LoaderProtocol, ...]: @@ -543,7 +543,7 @@ def new_init(self: DataclassInstance, *args: Any, **kwargs: Any) -> None: # noq def merge_make_decorator( - merge_meta: Merge, + merge_meta: _MergeConfig, *, cache: bool, debug: bool, diff --git a/src/dature/loading/resolver.py b/src/dature/loading/resolver.py index ef33f41..4188a7a 100644 --- a/src/dature/loading/resolver.py +++ b/src/dature/loading/resolver.py @@ -113,7 +113,7 @@ def resolve_loader( ) -> "LoaderProtocol": loader_class = resolve_loader_class(metadata.loader, metadata.file) - resolved_expand = expand_env_vars or metadata.expand_env_vars or "default" + resolved_expand = metadata.expand_env_vars or expand_env_vars or "default" kwargs: dict[str, Any] = { "prefix": metadata.prefix, diff --git a/src/dature/loading/source_loading.py b/src/dature/loading/source_loading.py index c4ed299..ace5aea 100644 --- a/src/dature/loading/source_loading.py +++ b/src/dature/loading/source_loading.py @@ -11,7 +11,7 @@ from dature.loading.context import apply_skip_invalid, build_error_ctx from dature.loading.resolver import resolve_loader, resolve_loader_class from dature.masking.masking import mask_json_value -from dature.metadata import Merge, MergeStrategy, Source, TypeLoader +from dature.metadata import MergeStrategy, Source, TypeLoader, _MergeConfig from dature.protocols import DataclassInstance, LoaderProtocol from dature.skip_field_provider import FilterResult from dature.types import FILE_LIKE_TYPES, ExpandEnvVarsMode, FileOrStream, JSONValue, LoadRawResult @@ -32,7 +32,7 @@ def resolve_loader_for_source( return resolve_loader(source_meta, expand_env_vars=expand_env_vars, type_loaders=type_loaders) -def should_skip_broken(source_meta: Source, merge_meta: Merge) -> bool: +def should_skip_broken(source_meta: Source, merge_meta: _MergeConfig) -> bool: if source_meta.skip_if_broken is not None: if source_meta.file is None: logger.warning( @@ -42,7 +42,7 @@ def should_skip_broken(source_meta: Source, merge_meta: Merge) -> bool: return merge_meta.skip_broken_sources -def resolve_expand_env_vars(source_meta: Source, merge_meta: Merge) -> ExpandEnvVarsMode: +def resolve_expand_env_vars(source_meta: Source, merge_meta: _MergeConfig) -> ExpandEnvVarsMode: if source_meta.expand_env_vars is not None: return source_meta.expand_env_vars return merge_meta.expand_env_vars @@ -50,14 +50,14 @@ def resolve_expand_env_vars(source_meta: Source, merge_meta: Merge) -> ExpandEnv def resolve_skip_invalid( source_meta: Source, - merge_meta: Merge, + merge_meta: _MergeConfig, ) -> bool | tuple[FieldPath, ...]: if source_meta.skip_if_invalid is not None: return source_meta.skip_if_invalid return merge_meta.skip_invalid_fields -def resolve_mask_secrets(source_meta: Source, merge_meta: Merge) -> bool: +def resolve_mask_secrets(source_meta: Source, merge_meta: _MergeConfig) -> bool: if source_meta.mask_secrets is not None: return source_meta.mask_secrets if merge_meta.mask_secrets is not None: @@ -65,7 +65,7 @@ def resolve_mask_secrets(source_meta: Source, merge_meta: Merge) -> bool: return config.masking.mask_secrets -def resolve_secret_field_names(source_meta: Source, merge_meta: Merge) -> tuple[str, ...]: +def resolve_secret_field_names(source_meta: Source, merge_meta: _MergeConfig) -> tuple[str, ...]: source_names = source_meta.secret_field_names or () merge_names = merge_meta.secret_field_names or () return source_names + merge_names @@ -75,7 +75,7 @@ def apply_merge_skip_invalid( *, raw: JSONValue, source_meta: Source, - merge_meta: Merge, + merge_meta: _MergeConfig, loader_instance: LoaderProtocol, dataclass_: type[DataclassInstance], source_index: int, @@ -117,7 +117,7 @@ class LoadedSources: def load_sources( # noqa: C901, PLR0912, PLR0913, PLR0915 *, - merge_meta: Merge, + merge_meta: _MergeConfig, dataclass_name: str, dataclass_: type[DataclassInstance], loaders: tuple[LoaderProtocol, ...] | None = None, @@ -270,7 +270,7 @@ def _load_raw( if merge_meta.sources: msg = f"All {len(merge_meta.sources)} source(s) failed to load" else: - msg = "Merge.sources must not be empty" + msg = "load() requires at least one Source for merge" source_error = SourceLoadError(message=msg) raise DatureConfigError(dataclass_name, [source_error]) diff --git a/src/dature/main.py b/src/dature/main.py index b60bb1c..4447a98 100644 --- a/src/dature/main.py +++ b/src/dature/main.py @@ -6,40 +6,74 @@ from dature.loading.multi import merge_load_as_function, merge_make_decorator from dature.loading.resolver import resolve_loader from dature.loading.single import load_as_function, make_decorator -from dature.metadata import Merge, Source +from dature.metadata import ( + FieldGroup, + MergeRule, + MergeStrategy, + Source, + TypeLoader, + _MergeConfig, +) from dature.protocols import DataclassInstance -from dature.types import FILE_LIKE_TYPES, FileOrStream +from dature.types import FILE_LIKE_TYPES, ExpandEnvVarsMode, FileOrStream, NestedResolve, NestedResolveStrategy @overload def load[T]( - metadata: Source | Merge | tuple[Source, ...] | None, - /, + *sources: Source, dataclass_: type[T], - *, debug: bool | None = None, + strategy: MergeStrategy = MergeStrategy.LAST_WINS, + field_merges: tuple[MergeRule, ...] = (), + field_groups: tuple[FieldGroup, ...] = (), + skip_broken_sources: bool = False, + skip_invalid_fields: bool = False, + expand_env_vars: ExpandEnvVarsMode | None = None, + secret_field_names: tuple[str, ...] | None = None, + mask_secrets: bool | None = None, + type_loaders: tuple[TypeLoader, ...] | None = None, + nested_resolve_strategy: NestedResolveStrategy | None = None, + nested_resolve: NestedResolve | None = None, ) -> T: ... @overload def load( - metadata: Source | Merge | tuple[Source, ...] | None = None, - /, + *sources: Source, dataclass_: None = None, - *, cache: bool | None = None, debug: bool | None = None, + strategy: MergeStrategy = MergeStrategy.LAST_WINS, + field_merges: tuple[MergeRule, ...] = (), + field_groups: tuple[FieldGroup, ...] = (), + skip_broken_sources: bool = False, + skip_invalid_fields: bool = False, + expand_env_vars: ExpandEnvVarsMode | None = None, + secret_field_names: tuple[str, ...] | None = None, + mask_secrets: bool | None = None, + type_loaders: tuple[TypeLoader, ...] | None = None, + nested_resolve_strategy: NestedResolveStrategy | None = None, + nested_resolve: NestedResolve | None = None, ) -> Callable[[type[DataclassInstance]], type[DataclassInstance]]: ... # --8<-- [start:load] -def load( - metadata: Source | Merge | tuple[Source, ...] | None = None, - /, +def load( # noqa: PLR0913 + *sources: Source, dataclass_: type[Any] | None = None, - *, cache: bool | None = None, debug: bool | None = None, + strategy: MergeStrategy = MergeStrategy.LAST_WINS, + field_merges: tuple[MergeRule, ...] = (), + field_groups: tuple[FieldGroup, ...] = (), + skip_broken_sources: bool = False, + skip_invalid_fields: bool = False, + expand_env_vars: ExpandEnvVarsMode | None = None, + secret_field_names: tuple[str, ...] | None = None, + mask_secrets: bool | None = None, + type_loaders: tuple[TypeLoader, ...] | None = None, + nested_resolve_strategy: NestedResolveStrategy | None = None, + nested_resolve: NestedResolve | None = None, ) -> Any: # --8<-- [end:load] if cache is None: @@ -47,23 +81,39 @@ def load( if debug is None: debug = config.loading.debug - if isinstance(metadata, tuple): - metadata = Merge(*metadata) - - if isinstance(metadata, Merge): - merge_type_loaders = (metadata.type_loaders or ()) + config.type_loaders + if len(sources) > 1: + merge_meta = _MergeConfig( + sources=sources, + strategy=strategy, + field_merges=field_merges, + field_groups=field_groups, + skip_broken_sources=skip_broken_sources, + skip_invalid_fields=skip_invalid_fields, + expand_env_vars=expand_env_vars or "default", + secret_field_names=secret_field_names, + mask_secrets=mask_secrets, + type_loaders=type_loaders, + nested_resolve_strategy=nested_resolve_strategy, + nested_resolve=nested_resolve, + ) + merge_type_loaders = (merge_meta.type_loaders or ()) + config.type_loaders if dataclass_ is not None: - return merge_load_as_function(metadata, dataclass_, debug=debug, type_loaders=merge_type_loaders) - return merge_make_decorator(metadata, cache=cache, debug=debug, type_loaders=merge_type_loaders) + return merge_load_as_function(merge_meta, dataclass_, debug=debug, type_loaders=merge_type_loaders) + return merge_make_decorator(merge_meta, cache=cache, debug=debug, type_loaders=merge_type_loaders) + + if not sources: + msg = "load() requires at least one Source" + raise TypeError(msg) - if metadata is None: - metadata = Source() + metadata = sources[0] - type_loaders = (metadata.type_loaders or ()) + config.type_loaders + source_type_loaders = (metadata.type_loaders or ()) + (type_loaders or ()) + config.type_loaders loader_instance = resolve_loader( metadata, - type_loaders=type_loaders, - nested_resolve_strategy=config.loading.nested_resolve_strategy, + expand_env_vars=expand_env_vars, + type_loaders=source_type_loaders, + nested_resolve_strategy=nested_resolve_strategy or config.loading.nested_resolve_strategy, + nested_resolve=nested_resolve, ) fileor_path: FileOrStream diff --git a/src/dature/metadata.py b/src/dature/metadata.py index d5e7cf0..7bc4bf6 100644 --- a/src/dature/metadata.py +++ b/src/dature/metadata.py @@ -116,9 +116,8 @@ def __init__(self, *fields: "FieldPath") -> None: # --8<-- [end:field-group] -# --8<-- [start:merge-metadata] -@dataclass(slots=True) -class Merge: +@dataclass(slots=True, kw_only=True) +class _MergeConfig: sources: tuple[Source, ...] strategy: MergeStrategy = MergeStrategy.LAST_WINS field_merges: tuple[MergeRule, ...] = () @@ -131,38 +130,3 @@ class Merge: type_loaders: "tuple[TypeLoader, ...] | None" = None nested_resolve_strategy: "NestedResolveStrategy | None" = None nested_resolve: "NestedResolve | None" = None - - def __init__( # noqa: PLR0913 - self, - *sources: Source, - strategy: MergeStrategy = MergeStrategy.LAST_WINS, - field_merges: tuple[MergeRule, ...] = (), - field_groups: tuple[FieldGroup, ...] = (), - skip_broken_sources: bool = False, - skip_invalid_fields: bool = False, - expand_env_vars: "ExpandEnvVarsMode" = "default", - secret_field_names: tuple[str, ...] | None = None, - mask_secrets: bool | None = None, - type_loaders: "tuple[TypeLoader, ...] | None" = None, - nested_resolve_strategy: "NestedResolveStrategy | None" = None, - nested_resolve: "NestedResolve | None" = None, - ) -> None: - if not sources: - msg = "Merge() requires at least one Source" - raise TypeError(msg) - - self.sources = sources - self.strategy = strategy - self.field_merges = field_merges - self.field_groups = field_groups - self.skip_broken_sources = skip_broken_sources - self.skip_invalid_fields = skip_invalid_fields - self.expand_env_vars = expand_env_vars - self.secret_field_names = secret_field_names - self.mask_secrets = mask_secrets - self.type_loaders = type_loaders - self.nested_resolve_strategy = nested_resolve_strategy - self.nested_resolve = nested_resolve - - -# --8<-- [end:merge-metadata] diff --git a/tests/errors/test_exceptions.py b/tests/errors/test_exceptions.py index 279f3d0..2180616 100644 --- a/tests/errors/test_exceptions.py +++ b/tests/errors/test_exceptions.py @@ -195,7 +195,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -217,7 +217,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) err = exc_info.value assert len(err.exceptions) == 2 @@ -252,7 +252,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -301,7 +301,7 @@ class Config: metadata = Source(file=toml_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -329,7 +329,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) err = exc_info.value first = err.exceptions[0] @@ -622,7 +622,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) err = exc_info.value assert str(err) == "Config loading errors (1)" @@ -646,7 +646,7 @@ class Config: metadata = Source(file=yaml_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) err = exc_info.value assert str(err) == "Config loading errors (1)" @@ -669,7 +669,7 @@ class Config: metadata = Source(file=toml_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) err = exc_info.value assert str(err) == "Config loading errors (1)" @@ -692,7 +692,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) err = exc_info.value assert str(err) == "Config loading errors (1)" @@ -715,7 +715,7 @@ class Config: product: list[Product] metadata = Source(file=array_of_tables_toml_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result == Config( product=[ @@ -738,7 +738,7 @@ class Config: metadata = Source(file=array_of_tables_error_first_toml_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -763,7 +763,7 @@ class Config: metadata = Source(file=array_of_tables_error_last_toml_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/errors/test_fixtures.py b/tests/errors/test_fixtures.py index 58d4f66..774a26b 100644 --- a/tests/errors/test_fixtures.py +++ b/tests/errors/test_fixtures.py @@ -101,7 +101,7 @@ def test_load_error_types( metadata = Source(file=str(FIXTURES_DIR / fixture_file), **metadata_kwargs) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, LoadErrorConfig) + load(metadata, dataclass_=LoadErrorConfig) err = exc_info.value assert str(err) == f"LoadErrorConfig loading errors ({len(EXPECTED_LOAD_ERRORS)})" @@ -116,7 +116,7 @@ def test_validation_error_types( metadata = Source(file=str(FIXTURES_DIR / fixture_file), **metadata_kwargs) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, ValidationErrorConfig) + load(metadata, dataclass_=ValidationErrorConfig) err = exc_info.value assert str(err) == f"ValidationErrorConfig loading errors ({len(EXPECTED_VALIDATION_ERRORS)})" diff --git a/tests/loading/test_field_merges.py b/tests/loading/test_field_merges.py index 3c16309..5e70d5c 100644 --- a/tests/loading/test_field_merges.py +++ b/tests/loading/test_field_merges.py @@ -5,7 +5,7 @@ import pytest -from dature import FieldMergeStrategy, Merge, MergeRule, MergeStrategy, Source, load +from dature import FieldMergeStrategy, MergeRule, MergeStrategy, Source, load from dature.errors.exceptions import MergeConflictError from dature.field_path import F @@ -24,13 +24,11 @@ class Config: port: int result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - strategy=MergeStrategy.LAST_WINS, - field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS),), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, + strategy=MergeStrategy.LAST_WINS, + field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS),), ) assert result.host == "default-host" @@ -49,13 +47,11 @@ class Config: port: int result = load( - Merge( - Source(file=first), - Source(file=second), - strategy=MergeStrategy.FIRST_WINS, - field_merges=(MergeRule(F[Config].port, FieldMergeStrategy.LAST_WINS),), - ), - Config, + Source(file=first), + Source(file=second), + dataclass_=Config, + strategy=MergeStrategy.FIRST_WINS, + field_merges=(MergeRule(F[Config].port, FieldMergeStrategy.LAST_WINS),), ) assert result.host == "first-host" @@ -74,12 +70,10 @@ class Config: name: str result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND),), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, + field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND),), ) assert result.tags == ["a", "b", "c", "d"] @@ -97,12 +91,10 @@ class Config: tags: list[str] result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND_UNIQUE),), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, + field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND_UNIQUE),), ) assert result.tags == ["a", "b", "c", "d"] @@ -119,12 +111,10 @@ class Config: tags: list[str] result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.PREPEND),), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, + field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.PREPEND),), ) assert result.tags == ["c", "d", "a", "b"] @@ -141,12 +131,10 @@ class Config: tags: list[str] result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.PREPEND_UNIQUE),), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, + field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.PREPEND_UNIQUE),), ) assert result.tags == ["b", "c", "d", "a"] @@ -168,12 +156,10 @@ class Config: database: Database result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - field_merges=(MergeRule(F[Config].database.host, FieldMergeStrategy.FIRST_WINS),), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, + field_merges=(MergeRule(F[Config].database.host, FieldMergeStrategy.FIRST_WINS),), ) assert result.database.host == "localhost" @@ -192,12 +178,10 @@ class Config: with pytest.raises(TypeError, match="APPEND strategy requires both values to be lists"): load( - Merge( - Source(file=defaults), - Source(file=overrides), - field_merges=(MergeRule(F[Config].value, FieldMergeStrategy.APPEND),), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, + field_merges=(MergeRule(F[Config].value, FieldMergeStrategy.APPEND),), ) def test_multiple_merge_rules(self, tmp_path: Path): @@ -214,16 +198,14 @@ class Config: tags: list[str] result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - strategy=MergeStrategy.LAST_WINS, - field_merges=( - MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS), - MergeRule(F[Config].tags, FieldMergeStrategy.APPEND), - ), + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, + strategy=MergeStrategy.LAST_WINS, + field_merges=( + MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS), + MergeRule(F[Config].tags, FieldMergeStrategy.APPEND), ), - Config, ) assert result.host == "default-host" @@ -243,12 +225,10 @@ class Config: port: int result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - field_merges=(), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, + field_merges=(), ) assert result.host == "localhost" @@ -263,7 +243,7 @@ def test_decorator_with_field_merges(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"host": "override-host", "port": 9090, "tags": ["b"]}') - meta = Merge( + @load( Source(file=defaults), Source(file=overrides), field_merges=( @@ -271,8 +251,6 @@ def test_decorator_with_field_merges(self, tmp_path: Path): MergeRule(F["Config"].tags, FieldMergeStrategy.APPEND), ), ) - - @load(meta) @dataclass class Config: host: str @@ -299,13 +277,11 @@ class Config: port: int result = load( - Merge( - Source(file=a), - Source(file=b), - strategy=MergeStrategy.RAISE_ON_CONFLICT, - field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.LAST_WINS),), - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, + strategy=MergeStrategy.RAISE_ON_CONFLICT, + field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.LAST_WINS),), ) assert result.host == "host-b" @@ -324,13 +300,11 @@ class Config: port: int result = load( - Merge( - Source(file=a), - Source(file=b), - strategy=MergeStrategy.RAISE_ON_CONFLICT, - field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS),), - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, + strategy=MergeStrategy.RAISE_ON_CONFLICT, + field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS),), ) assert result.host == "host-a" @@ -350,13 +324,11 @@ class Config: with pytest.raises(MergeConflictError): load( - Merge( - Source(file=a), - Source(file=b), - strategy=MergeStrategy.RAISE_ON_CONFLICT, - field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.LAST_WINS),), - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, + strategy=MergeStrategy.RAISE_ON_CONFLICT, + field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.LAST_WINS),), ) def test_nested_field_merge_suppresses_conflict(self, tmp_path: Path): @@ -376,13 +348,11 @@ class Config: name: str result = load( - Merge( - Source(file=a), - Source(file=b), - strategy=MergeStrategy.RAISE_ON_CONFLICT, - field_merges=(MergeRule(F[Config].database.host, FieldMergeStrategy.LAST_WINS),), - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, + strategy=MergeStrategy.RAISE_ON_CONFLICT, + field_merges=(MergeRule(F[Config].database.host, FieldMergeStrategy.LAST_WINS),), ) assert result.database.host == "host-b" @@ -401,16 +371,14 @@ class Config: port: int result = load( - Merge( - Source(file=a), - Source(file=b), - strategy=MergeStrategy.RAISE_ON_CONFLICT, - field_merges=( - MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS), - MergeRule(F[Config].port, max), - ), + Source(file=a), + Source(file=b), + dataclass_=Config, + strategy=MergeStrategy.RAISE_ON_CONFLICT, + field_merges=( + MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS), + MergeRule(F[Config].port, max), ), - Config, ) assert result.host == "host-a" @@ -461,12 +429,10 @@ class Config: with pytest.raises(TypeError, match=match): load( - Merge( - Source(file=a), - Source(file=b), - field_merges=(MergeRule(F[Config].value, strategy),), - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, + field_merges=(MergeRule(F[Config].value, strategy),), ) @pytest.mark.parametrize( @@ -512,12 +478,10 @@ class Config: with pytest.raises(TypeError, match=match): load( - Merge( - Source(file=a), - Source(file=b), - field_merges=(MergeRule(F[Config].value, strategy),), - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, + field_merges=(MergeRule(F[Config].value, strategy),), ) @pytest.mark.parametrize( @@ -553,12 +517,10 @@ class Config: with pytest.raises(TypeError, match=match): load( - Merge( - Source(file=a), - Source(file=b), - field_merges=(MergeRule(F[Config].value, strategy),), - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, + field_merges=(MergeRule(F[Config].value, strategy),), ) @pytest.mark.parametrize( @@ -585,12 +547,10 @@ class Config: value: list[int] result = load( - Merge( - Source(file=a), - Source(file=b), - field_merges=(MergeRule(F[Config].value, strategy),), - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, + field_merges=(MergeRule(F[Config].value, strategy),), ) assert result.value == expected @@ -620,12 +580,10 @@ class Config: with pytest.raises(TypeError, match=match): load( - Merge( - Source(file=a), - Source(file=b), - field_merges=(MergeRule(F[Config].value, strategy),), - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, + field_merges=(MergeRule(F[Config].value, strategy),), ) @pytest.mark.parametrize( @@ -653,12 +611,10 @@ class Config: with pytest.raises(TypeError, match=match): load( - Merge( - Source(file=a), - Source(file=b), - field_merges=(MergeRule(F[Config].value, strategy),), - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, + field_merges=(MergeRule(F[Config].value, strategy),), ) def test_field_merge_on_missing_key_in_one_source(self, tmp_path: Path): @@ -674,12 +630,10 @@ class Config: port: int result = load( - Merge( - Source(file=a), - Source(file=b), - field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS),), - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, + field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS),), ) assert result.host == "localhost" @@ -700,13 +654,11 @@ class Config: tags: list[str] result = load( - Merge( - Source(file=a), - Source(file=b), - Source(file=c), - field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND),), - ), - Config, + Source(file=a), + Source(file=b), + Source(file=c), + dataclass_=Config, + field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND),), ) assert result.tags == ["a", "b", "c"] @@ -726,13 +678,11 @@ class Config: priority: int result = load( - Merge( - Source(file=a), - Source(file=b), - Source(file=c), - field_merges=(MergeRule(F[Config].priority, max),), - ), - Config, + Source(file=a), + Source(file=b), + Source(file=c), + dataclass_=Config, + field_merges=(MergeRule(F[Config].priority, max),), ) assert result.priority == 15 @@ -752,13 +702,11 @@ class Config: priority: int result = load( - Merge( - Source(file=a), - Source(file=b), - Source(file=c), - field_merges=(MergeRule(F[Config].priority, min),), - ), - Config, + Source(file=a), + Source(file=b), + Source(file=c), + dataclass_=Config, + field_merges=(MergeRule(F[Config].priority, min),), ) assert result.priority == 5 @@ -782,15 +730,13 @@ class Config: inner: Inner result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - field_merges=( - MergeRule(F[Config].user_name, FieldMergeStrategy.FIRST_WINS), - MergeRule(F[Config].inner.user_name, FieldMergeStrategy.LAST_WINS), - ), + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, + field_merges=( + MergeRule(F[Config].user_name, FieldMergeStrategy.FIRST_WINS), + MergeRule(F[Config].inner.user_name, FieldMergeStrategy.LAST_WINS), ), - Config, ) assert result.user_name == "root-first" @@ -813,15 +759,13 @@ class Config: inner: Inner result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - field_merges=( - MergeRule(F[Config].user_name, FieldMergeStrategy.LAST_WINS), - MergeRule(F[Config].inner.user_name, FieldMergeStrategy.FIRST_WINS), - ), + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, + field_merges=( + MergeRule(F[Config].user_name, FieldMergeStrategy.LAST_WINS), + MergeRule(F[Config].inner.user_name, FieldMergeStrategy.FIRST_WINS), ), - Config, ) assert result.user_name == "root-second" @@ -841,12 +785,10 @@ class Config: score: int result = load( - Merge( - Source(file=a), - Source(file=b), - field_merges=(MergeRule(F[Config].score, sum),), - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, + field_merges=(MergeRule(F[Config].score, sum),), ) assert result.score == 30 @@ -866,13 +808,11 @@ class Config: score: int result = load( - Merge( - Source(file=a), - Source(file=b), - Source(file=c), - field_merges=(MergeRule(F[Config].score, sum),), - ), - Config, + Source(file=a), + Source(file=b), + Source(file=c), + dataclass_=Config, + field_merges=(MergeRule(F[Config].score, sum),), ) assert result.score == 30 @@ -892,13 +832,11 @@ class Config: weight: float result = load( - Merge( - Source(file=a), - Source(file=b), - Source(file=c), - field_merges=(MergeRule(F[Config].weight, lambda vals: sum(vals) / len(vals)),), - ), - Config, + Source(file=a), + Source(file=b), + Source(file=c), + dataclass_=Config, + field_merges=(MergeRule(F[Config].weight, lambda vals: sum(vals) / len(vals)),), ) assert result.weight == 6.0 @@ -918,13 +856,11 @@ class Config: priority: int result = load( - Merge( - Source(file=a), - Source(file=b), - Source(file=c), - field_merges=(MergeRule(F[Config].priority, max),), - ), - Config, + Source(file=a), + Source(file=b), + Source(file=c), + dataclass_=Config, + field_merges=(MergeRule(F[Config].priority, max),), ) assert result.priority == 15 @@ -948,13 +884,11 @@ class Config: database: Database result = load( - Merge( - Source(file=a), - Source(file=b), - Source(file=c), - field_merges=(MergeRule(F[Config].database.port, max),), - ), - Config, + Source(file=a), + Source(file=b), + Source(file=c), + dataclass_=Config, + field_merges=(MergeRule(F[Config].database.port, max),), ) assert result.database.port == 7000 @@ -968,11 +902,9 @@ class Config: score: int result = load( - Merge( - Source(file=a), - field_merges=(MergeRule(F[Config].score, sum),), - ), - Config, + Source(file=a), + dataclass_=Config, + field_merges=(MergeRule(F[Config].score, sum),), ) assert result.score == 42 @@ -990,13 +922,11 @@ class Config: name: str result = load( - Merge( - Source(file=a), - Source(file=b), - strategy=MergeStrategy.RAISE_ON_CONFLICT, - field_merges=(MergeRule(F[Config].score, sum),), - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, + strategy=MergeStrategy.RAISE_ON_CONFLICT, + field_merges=(MergeRule(F[Config].score, sum),), ) assert result.score == 30 @@ -1016,16 +946,14 @@ class Config: tags: list[str] result = load( - Merge( - Source(file=a), - Source(file=b), - field_merges=( - MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS), - MergeRule(F[Config].score, sum), - MergeRule(F[Config].tags, FieldMergeStrategy.APPEND), - ), + Source(file=a), + Source(file=b), + dataclass_=Config, + field_merges=( + MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS), + MergeRule(F[Config].score, sum), + MergeRule(F[Config].tags, FieldMergeStrategy.APPEND), ), - Config, ) assert result.host == "host-a" @@ -1048,13 +976,11 @@ class Config: name: str result = load( - Merge( - Source(file=a), - Source(file=b), - Source(file=c), - field_merges=(MergeRule(F[Config].score, sum),), - ), - Config, + Source(file=a), + Source(file=b), + Source(file=c), + dataclass_=Config, + field_merges=(MergeRule(F[Config].score, sum),), ) assert result.score == 30 diff --git a/tests/loading/test_multi.py b/tests/loading/test_multi.py index a966692..f586ff3 100644 --- a/tests/loading/test_multi.py +++ b/tests/loading/test_multi.py @@ -8,7 +8,7 @@ import pytest -from dature import Merge, MergeStrategy, Source, load +from dature import MergeStrategy, Source, load from dature.errors.exceptions import DatureConfigError, MergeConflictError from dature.validators.number import Ge @@ -27,11 +27,9 @@ class Config: port: int result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, ) assert result.host == "localhost" @@ -50,12 +48,10 @@ class Config: port: int result = load( - Merge( - Source(file=first), - Source(file=second), - strategy=MergeStrategy.FIRST_WINS, - ), - Config, + Source(file=first), + Source(file=second), + dataclass_=Config, + strategy=MergeStrategy.FIRST_WINS, ) assert result.host == "first-host" @@ -74,11 +70,9 @@ class Config: port: int result = load( - Merge( - Source(file=filea), - Source(file=fileb), - ), - Config, + Source(file=filea), + Source(file=fileb), + dataclass_=Config, ) assert result.host == "myhost" @@ -101,11 +95,9 @@ class Config: database: Database result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, ) assert result.database.host == "prod-host" @@ -128,12 +120,10 @@ class Config: debug: bool result = load( - Merge( - Source(file=a), - Source(file=b), - Source(file=c), - ), - Config, + Source(file=a), + Source(file=b), + Source(file=c), + dataclass_=Config, ) assert result.host == "a-host" @@ -153,11 +143,9 @@ class Config: port: int result = load( - ( - Source(file=defaults), - Source(file=overrides), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, ) assert result.host == "localhost" @@ -176,11 +164,9 @@ class Config: port: int result = load( - Merge( - Source(file=defaults), - Source(prefix="APP_"), - ), - Config, + Source(file=defaults), + Source(prefix="APP_"), + dataclass_=Config, ) assert result.host == "env-host" @@ -200,11 +186,9 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Merge( - Source(file=defaults), - Source(prefix="APP_"), - ), - Config, + Source(file=defaults), + Source(prefix="APP_"), + dataclass_=Config, ) err = exc_info.value @@ -226,11 +210,9 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Merge( - Source(file=a), - Source(file=b), - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, ) err = exc_info.value @@ -247,7 +229,7 @@ class Config: name: str port: int - result = load(Source(file=json_file), Config) + result = load(Source(file=json_file), dataclass_=Config) assert result.name == "test" assert result.port == 8080 @@ -259,7 +241,7 @@ def test_backward_compat_none_metadata(self, monkeypatch): class Config: my_var: str - result = load(None, Config) + result = load(Source(), dataclass_=Config) assert result.my_var == "from_env" @@ -272,12 +254,10 @@ def test_decorator_with_merge(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"port": 9090}') - meta = Merge( + @load( Source(file=defaults), Source(file=overrides), ) - - @load(meta) @dataclass class Config: host: str @@ -291,9 +271,7 @@ def test_decorator_cache(self, tmp_path: Path): defaults = tmp_path / "defaults.json" defaults.write_text('{"host": "original", "port": 3000}') - meta = Merge(Source(file=defaults)) - - @load(meta) + @load(Source(file=defaults)) @dataclass class Config: host: str @@ -310,9 +288,7 @@ def test_decorator_no_cache(self, tmp_path: Path): defaults = tmp_path / "defaults.json" defaults.write_text('{"host": "original", "port": 3000}') - meta = Merge(Source(file=defaults)) - - @load(meta, cache=False) + @load(Source(file=defaults), cache=False) @dataclass class Config: host: str @@ -333,10 +309,8 @@ def test_decorator_with_tuple(self, tmp_path: Path): overrides.write_text('{"port": 8080}') @load( - ( - Source(file=defaults), - Source(file=overrides), - ), + Source(file=defaults), + Source(file=overrides), ) @dataclass class Config: @@ -351,9 +325,7 @@ def test_decorator_init_override(self, tmp_path: Path): defaults = tmp_path / "defaults.json" defaults.write_text('{"host": "localhost", "port": 3000}') - meta = Merge(Source(file=defaults)) - - @load(meta) + @load(Source(file=defaults)) @dataclass class Config: host: str @@ -364,11 +336,9 @@ class Config: assert config.port == 3000 def test_decorator_not_dataclass(self): - meta = Merge(Source()) - with pytest.raises(TypeError, match="must be a dataclass"): - @load(meta) + @load(Source()) class NotDataclass: pass @@ -379,13 +349,11 @@ def test_decorator_first_wins(self, tmp_path: Path): second = tmp_path / "second.json" second.write_text('{"host": "second-host", "port": 2000}') - meta = Merge( + @load( Source(file=first), Source(file=second), strategy=MergeStrategy.FIRST_WINS, ) - - @load(meta) @dataclass class Config: host: str @@ -411,12 +379,10 @@ class Config: with pytest.raises(MergeConflictError) as exc_info: load( - Merge( - Source(file=a), - Source(file=b), - strategy=MergeStrategy.RAISE_ON_CONFLICT, - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, + strategy=MergeStrategy.RAISE_ON_CONFLICT, ) assert str(exc_info.value) == dedent(f"""\ @@ -442,12 +408,10 @@ class Config: port: int result = load( - Merge( - Source(file=a), - Source(file=b), - strategy=MergeStrategy.RAISE_ON_CONFLICT, - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, + strategy=MergeStrategy.RAISE_ON_CONFLICT, ) assert result.host == "localhost" @@ -466,12 +430,10 @@ class Config: port: int result = load( - Merge( - Source(file=a), - Source(file=b), - strategy=MergeStrategy.RAISE_ON_CONFLICT, - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, + strategy=MergeStrategy.RAISE_ON_CONFLICT, ) assert result.host == "same" @@ -495,12 +457,10 @@ class Config: with pytest.raises(MergeConflictError) as exc_info: load( - Merge( - Source(file=a), - Source(file=b), - strategy=MergeStrategy.RAISE_ON_CONFLICT, - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, + strategy=MergeStrategy.RAISE_ON_CONFLICT, ) assert str(exc_info.value) == dedent(f"""\ @@ -526,12 +486,10 @@ class Config: with pytest.raises(MergeConflictError) as exc_info: load( - Merge( - Source(file=a), - Source(file=b), - strategy=MergeStrategy.RAISE_ON_CONFLICT, - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, + strategy=MergeStrategy.RAISE_ON_CONFLICT, ) assert str(exc_info.value) == dedent(f"""\ @@ -557,12 +515,10 @@ class Config: with pytest.raises(MergeConflictError) as exc_info: load( - Merge( - Source(file=a), - Source(prefix="APP_"), - strategy=MergeStrategy.RAISE_ON_CONFLICT, - ), - Config, + Source(file=a), + Source(prefix="APP_"), + dataclass_=Config, + strategy=MergeStrategy.RAISE_ON_CONFLICT, ) assert str(exc_info.value) == dedent(f"""\ @@ -588,12 +544,10 @@ class Config: with pytest.raises(MergeConflictError) as exc_info: load( - Merge( - Source(file=a), - Source(file=b), - strategy=MergeStrategy.RAISE_ON_CONFLICT, - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, + strategy=MergeStrategy.RAISE_ON_CONFLICT, ) assert len(exc_info.value.exceptions) == 2 @@ -631,11 +585,9 @@ class Config: port: int result = load( - Merge( - Source(file=yaml_file), - Source(file=env_file), - ), - Config, + Source(file=yaml_file), + Source(file=env_file), + dataclass_=Config, ) assert result.host == "localhost" @@ -662,11 +614,9 @@ class Config: perms: _Permission result = load( - Merge( - Source(file=json_file), - Source(file=env_file), - ), - Config, + Source(file=json_file), + Source(file=env_file), + dataclass_=Config, ) assert result.perms == _Permission.READ | _Permission.WRITE @@ -683,11 +633,9 @@ class Config: perms: _Permission result = load( - Merge( - Source(file=json_file), - Source(prefix="APP_"), - ), - Config, + Source(file=json_file), + Source(prefix="APP_"), + dataclass_=Config, ) assert result.perms == _Permission.READ | _Permission.EXECUTE @@ -705,11 +653,9 @@ class Config: perms: _Permission result = load( - Merge( - Source(file=a), - Source(file=b), - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, ) assert result.perms == _Permission.READ | _Permission.WRITE | _Permission.EXECUTE @@ -726,12 +672,10 @@ class Config: name: str perms: _Permission - meta = Merge( + @load( Source(file=json_file), Source(file=env_file), ) - - @load(meta) @dataclass class MergedConfig: name: str @@ -755,12 +699,10 @@ class Config: port: int result = load( - Merge( - Source(file=first), - Source(file=second), - strategy=MergeStrategy.FIRST_FOUND, - ), - Config, + Source(file=first), + Source(file=second), + dataclass_=Config, + strategy=MergeStrategy.FIRST_FOUND, ) assert result.host == "first-host" @@ -777,12 +719,10 @@ class Config: port: int result = load( - Merge( - Source(file=missing), - Source(file=fallback), - strategy=MergeStrategy.FIRST_FOUND, - ), - Config, + Source(file=missing), + Source(file=fallback), + dataclass_=Config, + strategy=MergeStrategy.FIRST_FOUND, ) assert result.host == "fallback-host" @@ -801,12 +741,10 @@ class Config: port: int result = load( - Merge( - Source(file=broken), - Source(file=fallback), - strategy=MergeStrategy.FIRST_FOUND, - ), - Config, + Source(file=broken), + Source(file=fallback), + dataclass_=Config, + strategy=MergeStrategy.FIRST_FOUND, ) assert result.host == "fallback-host" @@ -823,12 +761,10 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Merge( - Source(file=missing1), - Source(file=missing2), - strategy=MergeStrategy.FIRST_FOUND, - ), - Config, + Source(file=missing1), + Source(file=missing2), + dataclass_=Config, + strategy=MergeStrategy.FIRST_FOUND, ) err = exc_info.value @@ -850,12 +786,10 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Merge( - Source(file=partial), - Source(file=full), - strategy=MergeStrategy.FIRST_FOUND, - ), - Config, + Source(file=partial), + Source(file=full), + dataclass_=Config, + strategy=MergeStrategy.FIRST_FOUND, ) err = exc_info.value @@ -877,12 +811,10 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Merge( - Source(file=bad_type), - Source(file=fallback), - strategy=MergeStrategy.FIRST_FOUND, - ), - Config, + Source(file=bad_type), + Source(file=fallback), + dataclass_=Config, + strategy=MergeStrategy.FIRST_FOUND, ) err = exc_info.value @@ -909,12 +841,10 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Merge( - Source(file=first), - Source(file=second), - strategy=MergeStrategy.FIRST_FOUND, - ), - Config, + Source(file=first), + Source(file=second), + dataclass_=Config, + strategy=MergeStrategy.FIRST_FOUND, ) err = exc_info.value @@ -935,11 +865,9 @@ def test_validation_error_references_correct_source_decorator(self, tmp_path: Pa second.write_text("host: second-host\nport: 5000\n") @load( - Merge( - Source(file=first), - Source(file=second), - strategy=MergeStrategy.FIRST_FOUND, - ), + Source(file=first), + Source(file=second), + strategy=MergeStrategy.FIRST_FOUND, cache=False, ) @dataclass diff --git a/tests/loading/test_skip_invalid_fields.py b/tests/loading/test_skip_invalid_fields.py index 5b7b3a9..7321fa9 100644 --- a/tests/loading/test_skip_invalid_fields.py +++ b/tests/loading/test_skip_invalid_fields.py @@ -6,7 +6,7 @@ import pytest -from dature import F, Merge, MergeStrategy, Source, load +from dature import F, MergeStrategy, Source, load from dature.errors.exceptions import DatureConfigError @@ -24,12 +24,10 @@ class Config: port: int result = load( - Merge( - Source(file=source1), - Source(file=source2), - skip_invalid_fields=True, - ), - Config, + Source(file=source1), + Source(file=source2), + dataclass_=Config, + skip_invalid_fields=True, ) assert result.host == "localhost" @@ -48,12 +46,10 @@ class Config: port: int = 9090 result = load( - Merge( - Source(file=source1), - Source(file=source2), - skip_invalid_fields=True, - ), - Config, + Source(file=source1), + Source(file=source2), + dataclass_=Config, + skip_invalid_fields=True, ) assert result.host == "localhost" @@ -73,12 +69,10 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Merge( - Source(file=source1), - Source(file=source2), - skip_invalid_fields=True, - ), - Config, + Source(file=source1), + Source(file=source2), + dataclass_=Config, + skip_invalid_fields=True, ) err = exc_info.value @@ -109,12 +103,10 @@ class Config: db: Database result = load( - Merge( - Source(file=source1), - Source(file=source2), - skip_invalid_fields=True, - ), - Config, + Source(file=source1), + Source(file=source2), + dataclass_=Config, + skip_invalid_fields=True, ) assert result.db.host == "s2-host" @@ -133,11 +125,9 @@ class Config: port: int result = load( - Merge( - Source(file=source1, skip_if_invalid=True), - Source(file=source2), - ), - Config, + Source(file=source1, skip_if_invalid=True), + Source(file=source2), + dataclass_=Config, ) assert result.host == "localhost" @@ -156,12 +146,10 @@ class Config: port: int result = load( - Merge( - Source(file=source1), - Source(file=source2), - skip_invalid_fields=True, - ), - Config, + Source(file=source1), + Source(file=source2), + dataclass_=Config, + skip_invalid_fields=True, ) assert result.host == "localhost" @@ -178,10 +166,8 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Merge( - Source(file=source1), - ), - Config, + Source(file=source1), + dataclass_=Config, ) err = exc_info.value @@ -207,13 +193,11 @@ class Config: port: int result = load( - Merge( - Source(file=source1), - Source(file=source2), - strategy=MergeStrategy.RAISE_ON_CONFLICT, - skip_invalid_fields=True, - ), - Config, + Source(file=source1), + Source(file=source2), + dataclass_=Config, + strategy=MergeStrategy.RAISE_ON_CONFLICT, + skip_invalid_fields=True, ) assert result.host == "localhost" @@ -233,14 +217,12 @@ class Config: timeout: int = 30 result = load( - Merge( - Source( - file=source1, - skip_if_invalid=(F[Config].port, F[Config].timeout), - ), - Source(file=source2), + Source( + file=source1, + skip_if_invalid=(F[Config].port, F[Config].timeout), ), - Config, + Source(file=source2), + dataclass_=Config, ) assert result.host == "localhost" @@ -258,13 +240,11 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Merge( - Source( - file=source1, - skip_if_invalid=(F[Config].port,), - ), + Source( + file=source1, + skip_if_invalid=(F[Config].port,), ), - Config, + dataclass_=Config, ) err = exc_info.value @@ -296,12 +276,10 @@ class Config: with caplog.at_level(logging.WARNING, logger="dature"): load( - Merge( - Source(file=source1), - Source(file=source2), - skip_invalid_fields=True, - ), - Config, + Source(file=source1), + Source(file=source2), + dataclass_=Config, + skip_invalid_fields=True, ) warning_messages = [r.getMessage() for r in caplog.records if r.levelno >= logging.WARNING] @@ -323,7 +301,7 @@ class Config: result = load( Source(file=json_file, skip_if_invalid=True), - Config, + dataclass_=Config, ) assert result.host == "localhost" @@ -341,7 +319,7 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( Source(file=json_file, skip_if_invalid=True), - Config, + dataclass_=Config, ) err = exc_info.value @@ -382,7 +360,7 @@ class Config: file=json_file, skip_if_invalid=(F[Config].port,), ), - Config, + dataclass_=Config, ) assert result.host == "localhost" @@ -401,7 +379,7 @@ class Config: with caplog.at_level(logging.WARNING, logger="dature"): load( Source(file=json_file, skip_if_invalid=True), - Config, + dataclass_=Config, ) warning_messages = [r.getMessage() for r in caplog.records if r.levelno >= logging.WARNING] @@ -427,7 +405,7 @@ class Config: file=source, skip_if_invalid=(F[Config].port,), ), - Config, + dataclass_=Config, ) assert result.port == 3000 @@ -450,14 +428,12 @@ class Config: inner: Inner result = load( - Merge( - Source( - file=source1, - skip_if_invalid=(F[Config].inner.port,), - ), - Source(file=source2), + Source( + file=source1, + skip_if_invalid=(F[Config].inner.port,), ), - Config, + Source(file=source2), + dataclass_=Config, ) assert result.port == 8080 @@ -480,14 +456,12 @@ class Config: inner: Inner result = load( - Merge( - Source( - file=source1, - skip_if_invalid=(F[Config].port, F[Config].inner.port), - ), - Source(file=source2), + Source( + file=source1, + skip_if_invalid=(F[Config].port, F[Config].inner.port), ), - Config, + Source(file=source2), + dataclass_=Config, ) assert result.port == 8080 diff --git a/tests/loading/test_source_loading.py b/tests/loading/test_source_loading.py index 75e163a..5852071 100644 --- a/tests/loading/test_source_loading.py +++ b/tests/loading/test_source_loading.py @@ -6,7 +6,7 @@ import pytest -from dature import Merge, Source, load +from dature import Source, load from dature.errors.exceptions import DatureConfigError, EnvVarExpandError @@ -23,12 +23,10 @@ class Config: port: int result = load( - Merge( - Source(file=valid), - Source(file=missing), - skip_broken_sources=True, - ), - Config, + Source(file=valid), + Source(file=missing), + dataclass_=Config, + skip_broken_sources=True, ) assert result.host == "localhost" @@ -47,12 +45,10 @@ class Config: port: int result = load( - Merge( - Source(file=valid), - Source(file=broken), - skip_broken_sources=True, - ), - Config, + Source(file=valid), + Source(file=broken), + dataclass_=Config, + skip_broken_sources=True, ) assert result.host == "localhost" @@ -71,12 +67,10 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Merge( - Source(file=broken_a), - Source(file=broken_b), - skip_broken_sources=True, - ), - Config, + Source(file=broken_a), + Source(file=broken_b), + dataclass_=Config, + skip_broken_sources=True, ) assert str(exc_info.value) == "Config loading errors (1)" @@ -95,11 +89,9 @@ class Config: with pytest.raises(DatureConfigError): load( - Merge( - Source(file=valid), - Source(file=broken), - ), - Config, + Source(file=valid), + Source(file=broken), + dataclass_=Config, ) def test_skip_middle_source(self, tmp_path: Path): @@ -118,13 +110,11 @@ class Config: port: int result = load( - Merge( - Source(file=a), - Source(file=broken), - Source(file=c), - skip_broken_sources=True, - ), - Config, + Source(file=a), + Source(file=broken), + Source(file=c), + dataclass_=Config, + skip_broken_sources=True, ) assert result.host == "a-host" @@ -143,12 +133,10 @@ class Config: port: int result = load( - Merge( - Source(file=valid), - Source(file=broken, skip_if_broken=True), - skip_broken_sources=False, - ), - Config, + Source(file=valid), + Source(file=broken, skip_if_broken=True), + dataclass_=Config, + skip_broken_sources=False, ) assert result.host == "localhost" @@ -168,12 +156,10 @@ class Config: with pytest.raises(DatureConfigError): load( - Merge( - Source(file=valid), - Source(file=broken, skip_if_broken=False), - skip_broken_sources=True, - ), - Config, + Source(file=valid), + Source(file=broken, skip_if_broken=False), + dataclass_=Config, + skip_broken_sources=True, ) def test_per_source_none_uses_global(self, tmp_path: Path): @@ -189,20 +175,18 @@ class Config: port: int result = load( - Merge( - Source(file=valid), - Source(file=broken, skip_if_broken=None), - skip_broken_sources=True, - ), - Config, + Source(file=valid), + Source(file=broken, skip_if_broken=None), + dataclass_=Config, + skip_broken_sources=True, ) assert result.host == "localhost" assert result.port == 3000 def test_empty_sources_raises(self): - with pytest.raises(TypeError, match="Merge\\(\\) requires at least one Source"): - Merge() + with pytest.raises(TypeError, match="load\\(\\) requires at least one Source"): + load(dataclass_=int) def test_all_sources_broken_mixed_errors(self, tmp_path: Path): missing = str(tmp_path / "does_not_exist.json") @@ -216,12 +200,10 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Merge( - Source(file=missing), - Source(file=broken), - skip_broken_sources=True, - ), - Config, + Source(file=missing), + Source(file=broken), + dataclass_=Config, + skip_broken_sources=True, ) assert str(exc_info.value) == "Config loading errors (1)" @@ -240,10 +222,8 @@ class Config: port: int result = load( - Merge( - Source(file=json_file), - ), - Config, + Source(file=json_file), + dataclass_=Config, ) assert result.host == "from-env" @@ -259,11 +239,9 @@ class Config: port: int result = load( - Merge( - Source(file=json_file), - expand_env_vars="disabled", - ), - Config, + Source(file=json_file), + dataclass_=Config, + expand_env_vars="disabled", ) assert result.host == "$DATURE_HOST" @@ -280,11 +258,9 @@ class Config: with pytest.raises(EnvVarExpandError): load( - Merge( - Source(file=json_file), - expand_env_vars="strict", - ), - Config, + Source(file=json_file), + dataclass_=Config, + expand_env_vars="strict", ) def test_source_overrides_merge(self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch): @@ -298,11 +274,9 @@ class Config: port: int result = load( - Merge( - Source(file=json_file, expand_env_vars="disabled"), - expand_env_vars="default", - ), - Config, + Source(file=json_file, expand_env_vars="disabled"), + dataclass_=Config, + expand_env_vars="default", ) assert result.host == "$DATURE_HOST" @@ -318,11 +292,9 @@ class Config: port: int result = load( - Merge( - Source(file=json_file, expand_env_vars=None), - expand_env_vars="disabled", - ), - Config, + Source(file=json_file, expand_env_vars=None), + dataclass_=Config, + expand_env_vars="disabled", ) assert result.host == "$DATURE_HOST" @@ -338,11 +310,9 @@ class Config: port: int result = load( - Merge( - Source(file=json_file), - expand_env_vars="empty", - ), - Config, + Source(file=json_file), + dataclass_=Config, + expand_env_vars="empty", ) assert result.host == "" @@ -384,7 +354,7 @@ def test_error_format( with pytest.raises(EnvVarExpandError) as exc_info: load( Source(file=file, prefix=prefix, expand_env_vars="strict"), - StrictConfig, + dataclass_=StrictConfig, ) assert str(exc_info.value) == dedent(f"""\ diff --git a/tests/masking/test_masking.py b/tests/masking/test_masking.py index b944abb..0f3ed71 100644 --- a/tests/masking/test_masking.py +++ b/tests/masking/test_masking.py @@ -5,7 +5,7 @@ import pytest -from dature import Merge, Source, configure, get_load_report, load +from dature import Source, configure, get_load_report, load from dature.config import MaskingConfig from dature.errors.exceptions import DatureConfigError from dature.fields.secret_str import SecretStr @@ -216,7 +216,7 @@ class Cfg: password: str host: str - result = load(Source(file=json_file), Cfg, debug=True) + result = load(Source(file=json_file), dataclass_=Cfg, debug=True) report = get_load_report(result) assert report is not None @@ -241,11 +241,9 @@ class Cfg: host: str result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - ), - Cfg, + Source(file=defaults), + Source(file=overrides), + dataclass_=Cfg, debug=True, ) @@ -269,7 +267,7 @@ class Cfg: api_key: SecretStr host: str - result = load(Source(file=json_file), Cfg, debug=True) + result = load(Source(file=json_file), dataclass_=Cfg, debug=True) report = get_load_report(result) assert report is not None @@ -290,7 +288,7 @@ class Cfg: host: str with caplog.at_level("DEBUG", logger="dature"): - load(Source(file=json_file), Cfg, debug=True) + load(Source(file=json_file), dataclass_=Cfg, debug=True) assert _SECRET_VALUE not in caplog.text @@ -308,11 +306,9 @@ class Cfg: with caplog.at_level("DEBUG", logger="dature"): load( - Merge( - Source(file=defaults), - Source(file=overrides), - ), - Cfg, + Source(file=defaults), + Source(file=overrides), + dataclass_=Cfg, debug=True, ) @@ -328,7 +324,7 @@ class Cfg: port: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json_file), Cfg) + load(Source(file=json_file), dataclass_=Cfg) assert _SECRET_VALUE not in str(exc_info.value) @@ -336,11 +332,7 @@ def test_merge_decorator_error_message_masks_secrets(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"password": "allowed", "host": "prod"}') - meta = Merge( - Source(file=json_file), - ) - - @load(meta) + @load(Source(file=json_file)) @dataclass class Cfg: password: Literal["allowed"] @@ -363,7 +355,7 @@ class Cfg: host: str with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json_file, mask_secrets=True), Cfg) + load(Source(file=json_file, mask_secrets=True), dataclass_=Cfg) assert str(exc_info.value) == "Cfg loading errors (1)" assert str(exc_info.value.exceptions[0]) == ( @@ -385,7 +377,7 @@ class Cfg: host: str with patch("dature.masking.masking._heuristic_detector", None), pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json_file, mask_secrets=True), Cfg) + load(Source(file=json_file, mask_secrets=True), dataclass_=Cfg) assert str(exc_info.value) == "Cfg loading errors (1)" assert str(exc_info.value.exceptions[0]) == ( @@ -418,7 +410,7 @@ class Cfg: host: str configure(masking=MaskingConfig(mask_secrets=mask_secrets)) - result = load(Source(file=json_file), Cfg, debug=True) + result = load(Source(file=json_file), dataclass_=Cfg, debug=True) report = get_load_report(result) assert report is not None @@ -451,7 +443,7 @@ class Cfg: configure(masking=MaskingConfig(mask_secrets=mask_secrets)) with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json_file), Cfg) + load(Source(file=json_file), dataclass_=Cfg) assert str(exc_info.value) == "Cfg loading errors (1)" content = f'{{"password": "{expected_password}", "port": "not_a_number"}}' diff --git a/tests/merging/test_field_group.py b/tests/merging/test_field_group.py index 6774fe4..839b720 100644 --- a/tests/merging/test_field_group.py +++ b/tests/merging/test_field_group.py @@ -6,7 +6,7 @@ import pytest -from dature import FieldGroup, FieldMergeStrategy, Merge, MergeRule, MergeStrategy, Source, load +from dature import FieldGroup, FieldMergeStrategy, MergeRule, MergeStrategy, Source, load from dature.errors.exceptions import FieldGroupError from dature.field_path import F @@ -25,13 +25,11 @@ class Config: port: int result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - strategy=MergeStrategy.LAST_WINS, - field_groups=(FieldGroup(F[Config].host, F[Config].port),), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, + strategy=MergeStrategy.LAST_WINS, + field_groups=(FieldGroup(F[Config].host, F[Config].port),), ) assert result.host == "remote" @@ -50,13 +48,11 @@ class Config: port: int result = load( - Merge( - Source(file=first), - Source(file=second), - strategy=MergeStrategy.FIRST_WINS, - field_groups=(FieldGroup(F[Config].host, F[Config].port),), - ), - Config, + Source(file=first), + Source(file=second), + dataclass_=Config, + strategy=MergeStrategy.FIRST_WINS, + field_groups=(FieldGroup(F[Config].host, F[Config].port),), ) assert result.host == "first-host" @@ -77,12 +73,10 @@ class Config: port: int result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - field_groups=(FieldGroup(F[Config].host, F[Config].port),), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, + field_groups=(FieldGroup(F[Config].host, F[Config].port),), ) assert result.host == "localhost" @@ -102,12 +96,10 @@ class Config: debug: bool result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - field_groups=(FieldGroup(F[Config].host, F[Config].port),), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, + field_groups=(FieldGroup(F[Config].host, F[Config].port),), ) assert result.host == "localhost" @@ -133,12 +125,10 @@ class Config: with pytest.raises(FieldGroupError) as exc_info: load( - Merge( - defaults_meta, - overrides_meta, - field_groups=(FieldGroup(F[Config].host, F[Config].port),), - ), - Config, + defaults_meta, + overrides_meta, + dataclass_=Config, + field_groups=(FieldGroup(F[Config].host, F[Config].port),), ) assert str(exc_info.value) == dedent(f"""\ @@ -166,12 +156,10 @@ class Config: with pytest.raises(FieldGroupError) as exc_info: load( - Merge( - defaults_meta, - overrides_meta, - field_groups=(FieldGroup(F[Config].host, F[Config].port),), - ), - Config, + defaults_meta, + overrides_meta, + dataclass_=Config, + field_groups=(FieldGroup(F[Config].host, F[Config].port),), ) assert str(exc_info.value) == dedent(f"""\ @@ -196,13 +184,11 @@ class Config: with pytest.raises(FieldGroupError): load( - Merge( - Source(file=defaults), - Source(file=overrides), - strategy=MergeStrategy.FIRST_WINS, - field_groups=(FieldGroup(F[Config].host, F[Config].port),), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, + strategy=MergeStrategy.FIRST_WINS, + field_groups=(FieldGroup(F[Config].host, F[Config].port),), ) def test_partial_change_with_raise_on_conflict(self, tmp_path: Path): @@ -219,13 +205,11 @@ class Config: with pytest.raises(FieldGroupError): load( - Merge( - Source(file=defaults), - Source(file=overrides), - strategy=MergeStrategy.RAISE_ON_CONFLICT, - field_groups=(FieldGroup(F[Config].host, F[Config].port),), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, + strategy=MergeStrategy.RAISE_ON_CONFLICT, + field_groups=(FieldGroup(F[Config].host, F[Config].port),), ) @@ -251,12 +235,10 @@ class Config: with pytest.raises(FieldGroupError) as exc_info: load( - Merge( - defaults_meta, - overrides_meta, - field_groups=(FieldGroup(F[Config].database),), - ), - Config, + defaults_meta, + overrides_meta, + dataclass_=Config, + field_groups=(FieldGroup(F[Config].database),), ) assert str(exc_info.value) == dedent(f"""\ @@ -284,12 +266,10 @@ class Config: database: Database result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - field_groups=(FieldGroup(F[Config].database),), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, + field_groups=(FieldGroup(F[Config].database),), ) assert result.database.host == "remote" @@ -318,13 +298,11 @@ class Config: with pytest.raises(FieldGroupError) as exc_info: load( - Merge( - a_meta, - b_meta, - c_meta, - field_groups=(FieldGroup(F[Config].host, F[Config].port),), - ), - Config, + a_meta, + b_meta, + c_meta, + dataclass_=Config, + field_groups=(FieldGroup(F[Config].host, F[Config].port),), ) assert str(exc_info.value) == dedent(f"""\ @@ -351,13 +329,11 @@ class Config: port: int result = load( - Merge( - Source(file=a), - Source(file=b), - Source(file=c), - field_groups=(FieldGroup(F[Config].host, F[Config].port),), - ), - Config, + Source(file=a), + Source(file=b), + Source(file=c), + dataclass_=Config, + field_groups=(FieldGroup(F[Config].host, F[Config].port),), ) assert result.host == "c-host" @@ -384,15 +360,13 @@ class Config: with pytest.raises(FieldGroupError) as exc_info: load( - Merge( - defaults_meta, - overrides_meta, - field_groups=( - FieldGroup(F[Config].host, F[Config].port), - FieldGroup(F[Config].user, F[Config].password), - ), + defaults_meta, + overrides_meta, + dataclass_=Config, + field_groups=( + FieldGroup(F[Config].host, F[Config].port), + FieldGroup(F[Config].user, F[Config].password), ), - Config, ) assert str(exc_info.value) == dedent(f"""\ @@ -419,13 +393,11 @@ class Config: tags: list[str] result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND),), - field_groups=(FieldGroup(F[Config].host, F[Config].port),), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, + field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND),), + field_groups=(FieldGroup(F[Config].host, F[Config].port),), ) assert result.host == "remote" @@ -441,13 +413,11 @@ def test_decorator_with_field_groups(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"host": "remote", "port": 9090}') - meta = Merge( + @load( Source(file=defaults), Source(file=overrides), field_groups=(FieldGroup(F["Config"].host, F["Config"].port),), ) - - @load(meta) @dataclass class Config: host: str @@ -464,13 +434,11 @@ def test_decorator_partial_change_raises(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"host": "remote"}') - meta = Merge( + @load( Source(file=defaults), Source(file=overrides), field_groups=(FieldGroup(F["Config"].host, F["Config"].port),), ) - - @load(meta) @dataclass class Config: host: str @@ -499,12 +467,10 @@ class Config: with pytest.raises(FieldGroupError) as exc_info: load( - Merge( - defaults_meta, - overrides_meta, - field_groups=(FieldGroup(F[Config].host, F[Config].port),), - ), - Config, + defaults_meta, + overrides_meta, + dataclass_=Config, + field_groups=(FieldGroup(F[Config].host, F[Config].port),), ) assert str(exc_info.value) == dedent(f"""\ @@ -534,15 +500,13 @@ class Config: with pytest.raises(FieldGroupError) as exc_info: load( - Merge( - defaults_meta, - overrides_meta, - field_groups=( - FieldGroup(F[Config].host, F[Config].port), - FieldGroup(F[Config].user, F[Config].password), - ), + defaults_meta, + overrides_meta, + dataclass_=Config, + field_groups=( + FieldGroup(F[Config].host, F[Config].port), + FieldGroup(F[Config].user, F[Config].password), ), - Config, ) assert str(exc_info.value) == dedent(f"""\ @@ -581,12 +545,10 @@ class Config: timeout: int result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - field_groups=(FieldGroup(F[Config].database, F[Config].timeout),), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, + field_groups=(FieldGroup(F[Config].database, F[Config].timeout),), ) assert result.database.host == "remote" @@ -615,12 +577,10 @@ class Config: timeout: int result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - field_groups=(FieldGroup(F[Config].database, F[Config].timeout),), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, + field_groups=(FieldGroup(F[Config].database, F[Config].timeout),), ) assert result.database.host == "localhost" @@ -651,12 +611,10 @@ class Config: with pytest.raises(FieldGroupError) as exc_info: load( - Merge( - defaults_meta, - overrides_meta, - field_groups=(FieldGroup(F[Config].database, F[Config].timeout),), - ), - Config, + defaults_meta, + overrides_meta, + dataclass_=Config, + field_groups=(FieldGroup(F[Config].database, F[Config].timeout),), ) assert str(exc_info.value) == dedent(f"""\ @@ -691,12 +649,10 @@ class Config: with pytest.raises(FieldGroupError) as exc_info: load( - Merge( - defaults_meta, - overrides_meta, - field_groups=(FieldGroup(F[Config].database, F[Config].timeout),), - ), - Config, + defaults_meta, + overrides_meta, + dataclass_=Config, + field_groups=(FieldGroup(F[Config].database, F[Config].timeout),), ) assert str(exc_info.value) == dedent(f"""\ @@ -733,12 +689,10 @@ class Config: with pytest.raises(FieldGroupError) as exc_info: load( - Merge( - defaults_meta, - overrides_meta, - field_groups=(FieldGroup(F[Config].database, F[Config].timeout),), - ), - Config, + defaults_meta, + overrides_meta, + dataclass_=Config, + field_groups=(FieldGroup(F[Config].database, F[Config].timeout),), ) assert str(exc_info.value) == dedent(f"""\ @@ -772,12 +726,10 @@ class Config: inner: Inner result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - field_groups=(FieldGroup(F[Config].user_name, F[Config].inner.user_name),), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, + field_groups=(FieldGroup(F[Config].user_name, F[Config].inner.user_name),), ) assert result.user_name == "root-new" @@ -806,12 +758,10 @@ class Config: with pytest.raises(FieldGroupError) as exc_info: load( - Merge( - defaults_meta, - overrides_meta, - field_groups=(FieldGroup(F[Config].user_name, F[Config].inner.user_name),), - ), - Config, + defaults_meta, + overrides_meta, + dataclass_=Config, + field_groups=(FieldGroup(F[Config].user_name, F[Config].inner.user_name),), ) assert str(exc_info.value) == dedent(f"""\ diff --git a/tests/merging/test_predicate.py b/tests/merging/test_predicate.py index 9b812ca..ed8f1e8 100644 --- a/tests/merging/test_predicate.py +++ b/tests/merging/test_predicate.py @@ -107,7 +107,7 @@ class Other: rules = (MergeRule(F[Other].host, FieldMergeStrategy.FIRST_WINS),) with pytest.raises(TypeError) as exc_info: - build_field_merge_map(rules, Config) + build_field_merge_map(rules, dataclass_=Config) assert str(exc_info.value) == "FieldPath owner 'Other' does not match target dataclass 'Config'" @@ -118,7 +118,7 @@ class Config: host: str with pytest.raises(TypeError) as exc_info: - extract_field_path(F["Other"].host, Config) + extract_field_path(F["Other"].host, dataclass_=Config) assert str(exc_info.value) == "FieldPath owner 'Other' does not match target dataclass 'Config'" def test_passes_with_correct_string_owner(self): @@ -126,4 +126,4 @@ def test_passes_with_correct_string_owner(self): class Config: host: str - assert extract_field_path(F["Config"].host, Config) == "host" + assert extract_field_path(F["Config"].host, dataclass_=Config) == "host" diff --git a/tests/sources_loader/test_base.py b/tests/sources_loader/test_base.py index 5a829dc..ba47b78 100644 --- a/tests/sources_loader/test_base.py +++ b/tests/sources_loader/test_base.py @@ -109,7 +109,7 @@ class Config: data = {"name": "TestApp", "port": 8080} loader = MockLoader(test_data=data) - result = loader.transform_to_dataclass(data, Config) + result = loader.transform_to_dataclass(data, dataclass_=Config) assert result == expected_data @@ -129,7 +129,7 @@ class Config: data = {"database": {"host": "localhost", "port": 5432}} loader = MockLoader(test_data=data) - result = loader.transform_to_dataclass(data, Config) + result = loader.transform_to_dataclass(data, dataclass_=Config) assert result == expected_data @@ -148,7 +148,7 @@ class Config: loader = MockLoader(prefix="app", test_data=data) load_result = loader.load_raw(Path()) - result = loader.transform_to_dataclass(load_result.data, Config) + result = loader.transform_to_dataclass(load_result.data, dataclass_=Config) assert result == expected_data @@ -175,7 +175,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, name_style="lower_camel"), - Config, + dataclass_=Config, ) assert result.user_name == "John" @@ -193,7 +193,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, name_style="lower_snake"), - Config, + dataclass_=Config, ) assert result.user_name == "Alice" @@ -210,7 +210,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, name_style="upper_camel"), - Config, + dataclass_=Config, ) assert result.user_name == "Bob" @@ -227,7 +227,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, name_style="lower_kebab"), - Config, + dataclass_=Config, ) assert result.user_name == "Charlie" @@ -244,7 +244,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, name_style="upper_kebab"), - Config, + dataclass_=Config, ) assert result.user_name == "Dave" @@ -261,7 +261,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, name_style="upper_snake"), - Config, + dataclass_=Config, ) assert result.user_name == "Eve" @@ -287,7 +287,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), - Config, + dataclass_=Config, ) assert result.name == "John Doe" @@ -308,7 +308,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), - Config, + dataclass_=Config, ) assert result.name == "Alice" @@ -334,7 +334,7 @@ class Config: name_style="lower_camel", field_mapping=field_mapping, ), - Config, + dataclass_=Config, ) assert result.user_name == "Bob" @@ -366,7 +366,7 @@ class User: result = load( Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), - User, + dataclass_=User, ) assert result.name == "Charlie" @@ -385,7 +385,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), - Config, + dataclass_=Config, ) assert result.name == "Alice" @@ -402,7 +402,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), - Config, + dataclass_=Config, ) assert result.name == "Bob" @@ -423,7 +423,7 @@ class User: result = load( Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), - User, + dataclass_=User, ) assert result.address.city == "LA" @@ -440,7 +440,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), - Config, + dataclass_=Config, ) assert result.name == "Eve" @@ -457,7 +457,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), - Config, + dataclass_=Config, ) assert result.name == "Direct" @@ -490,7 +490,7 @@ class Config: name_style="lower_camel", field_mapping=field_mapping, ), - Config, + dataclass_=Config, ) assert result.user_name == "Alice" @@ -525,7 +525,7 @@ class Config: name_style="lower_camel", field_mapping=field_mapping, ), - Config, + dataclass_=Config, ) assert result.user_name == "Alice" diff --git a/tests/sources_loader/test_docker_secrets.py b/tests/sources_loader/test_docker_secrets.py index d789484..f783d7a 100644 --- a/tests/sources_loader/test_docker_secrets.py +++ b/tests/sources_loader/test_docker_secrets.py @@ -11,7 +11,7 @@ class TestDockerSecretsLoader: def test_comprehensive_type_conversion(self, all_types_docker_secrets_dir: Path): result = load( Source(file=all_types_docker_secrets_dir, loader=DockerSecretsLoader), - AllPythonTypesCompact, + dataclass_=AllPythonTypesCompact, ) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -73,7 +73,7 @@ class Config: result = load( Source(file=tmp_path, loader=DockerSecretsLoader), - Config, + dataclass_=Config, ) assert result.api_url == "https://api.example.com/v1" diff --git a/tests/sources_loader/test_env_.py b/tests/sources_loader/test_env_.py index 0728268..0fc5220 100644 --- a/tests/sources_loader/test_env_.py +++ b/tests/sources_loader/test_env_.py @@ -44,7 +44,7 @@ def test_custom_split_symbols(self, custom_separator_env_file: Path): def test_comprehensive_type_conversion(self, all_types_env_file: Path): """Test loading ENV with full type coercion to dataclass.""" - result = load(Source(file=all_types_env_file, loader=EnvFileLoader), AllPythonTypesCompact) + result = load(Source(file=all_types_env_file, loader=EnvFileLoader), dataclass_=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -69,7 +69,7 @@ class Config: api_url: str base: str - result = load(Source(file=env_file, loader=EnvFileLoader), Config) + result = load(Source(file=env_file, loader=EnvFileLoader), dataclass_=Config) assert result.api_url == "https://api.example.com/v1" assert result.base == "https://api.example.com" @@ -85,7 +85,7 @@ def test_env_fileenv_var_partial_substitution(self, tmp_path: Path, monkeypatch) class Config: url: str - result = load(Source(file=env_file, loader=EnvFileLoader), Config) + result = load(Source(file=env_file, loader=EnvFileLoader), dataclass_=Config) assert result.url == "http://localhost:8080/api" @@ -99,7 +99,7 @@ def test_env_filedollar_sign_mid_string_existing_var(self, tmp_path: Path, monke class Config: value: str - result = load(Source(file=env_file, loader=EnvFileLoader), Config) + result = load(Source(file=env_file, loader=EnvFileLoader), dataclass_=Config) assert result.value == "prefixreplaced/suffix" @@ -140,7 +140,7 @@ def test_env_filedollar_sign_mid_string_missing_var(self, tmp_path: Path, monkey class Config: value: str - result = load(Source(file=env_file, loader=EnvFileLoader), Config) + result = load(Source(file=env_file, loader=EnvFileLoader), dataclass_=Config) assert result.value == "prefix$nonexistent/suffix" @@ -256,7 +256,7 @@ def test_comprehensive_type_conversion(self, monkeypatch): for key, value in env_vars.items(): monkeypatch.setenv(key, value) - result = load(Source(loader=EnvLoader, prefix="APP_"), AllPythonTypesCompact) + result = load(Source(loader=EnvLoader, prefix="APP_"), dataclass_=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -273,7 +273,7 @@ class TestConfig: expected_data = TestConfig(var="included", key="also_included") - data = load(Source(loader=EnvLoader, prefix="APP_"), TestConfig) + data = load(Source(loader=EnvLoader, prefix="APP_"), dataclass_=TestConfig) assert data == expected_data @@ -295,7 +295,7 @@ class TestConfig: data = load( Source(loader=EnvLoader, prefix="APP_", split_symbols="."), - TestConfig, + dataclass_=TestConfig, ) assert data == expected_data diff --git a/tests/sources_loader/test_ini_.py b/tests/sources_loader/test_ini_.py index 1b96a24..fb0b27f 100644 --- a/tests/sources_loader/test_ini_.py +++ b/tests/sources_loader/test_ini_.py @@ -19,7 +19,7 @@ def test_comprehensive_type_conversion(self, all_types_ini_file: Path): """Test loading INI with full type coercion to dataclass.""" result = load( Source(file=all_types_ini_file, loader=IniLoader, prefix="all_types"), - AllPythonTypesCompact, + dataclass_=AllPythonTypesCompact, ) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -60,7 +60,7 @@ class PrefixedConfig: result = load( Source(file=prefixed_ini_file, loader=IniLoader, prefix="app"), - PrefixedConfig, + dataclass_=PrefixedConfig, ) assert result == expected_data @@ -89,7 +89,7 @@ class DbConfig: result = load( Source(file=ini_file, loader=IniLoader, prefix="database"), - DbConfig, + dataclass_=DbConfig, ) assert result.host == "db.example.com" @@ -108,7 +108,7 @@ class Config: result = load( Source(file=ini_file, loader=IniLoader, prefix="section"), - Config, + dataclass_=Config, ) assert result.url == "http://localhost:8080/api" @@ -125,7 +125,7 @@ class Config: result = load( Source(file=ini_file, loader=IniLoader, prefix="section"), - Config, + dataclass_=Config, ) assert result.value == "prefixreplaced/suffix" @@ -142,7 +142,7 @@ class Config: result = load( Source(file=ini_file, loader=IniLoader, prefix="section"), - Config, + dataclass_=Config, ) assert result.value == "prefix$nonexistent/suffix" diff --git a/tests/sources_loader/test_json5_.py b/tests/sources_loader/test_json5_.py index 34d46f9..0f2c74b 100644 --- a/tests/sources_loader/test_json5_.py +++ b/tests/sources_loader/test_json5_.py @@ -17,7 +17,7 @@ class TestJson5Loader: def test_comprehensive_type_conversion(self, all_types_json5_file: Path): """Test loading JSON5 with full type coercion to dataclass.""" - result = load(Source(file=all_types_json5_file, loader=Json5Loader), AllPythonTypesCompact) + result = load(Source(file=all_types_json5_file, loader=Json5Loader), dataclass_=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -38,7 +38,7 @@ class PrefixedConfig: result = load( Source(file=prefixed_json5_file, loader=Json5Loader, prefix="app"), - PrefixedConfig, + dataclass_=PrefixedConfig, ) assert result == expected_data @@ -65,7 +65,7 @@ class DbConfig: host: str port: int - result = load(Source(file=json5_file, loader=Json5Loader), DbConfig) + result = load(Source(file=json5_file, loader=Json5Loader), dataclass_=DbConfig) assert result.host == "db.example.com" assert result.port == 5432 @@ -81,7 +81,7 @@ def test_json5_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file=json5_file, loader=Json5Loader), Config) + result = load(Source(file=json5_file, loader=Json5Loader), dataclass_=Config) assert result.url == "http://localhost:8080/api" @@ -95,7 +95,7 @@ def test_json5_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeyp class Config: value: str - result = load(Source(file=json5_file, loader=Json5Loader), Config) + result = load(Source(file=json5_file, loader=Json5Loader), dataclass_=Config) assert result.value == "prefixreplaced/suffix" @@ -109,7 +109,7 @@ def test_json5_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file=json5_file, loader=Json5Loader), Config) + result = load(Source(file=json5_file, loader=Json5Loader), dataclass_=Config) assert result.value == "prefix$nonexistent/suffix" @@ -122,7 +122,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json5_file, loader=Json5Loader), Config) + load(Source(file=json5_file, loader=Json5Loader), dataclass_=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -145,7 +145,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json5_file, loader=Json5Loader), Config) + load(Source(file=json5_file, loader=Json5Loader), dataclass_=Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/sources_loader/test_json_.py b/tests/sources_loader/test_json_.py index 41433a2..2094b95 100644 --- a/tests/sources_loader/test_json_.py +++ b/tests/sources_loader/test_json_.py @@ -17,7 +17,7 @@ class TestJsonLoader: def test_comprehensive_type_conversion(self, all_types_json_file: Path): """Test loading JSON with full type coercion to dataclass.""" - result = load(Source(file=all_types_json_file, loader=JsonLoader), AllPythonTypesCompact) + result = load(Source(file=all_types_json_file, loader=JsonLoader), dataclass_=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -38,7 +38,7 @@ class PrefixedConfig: result = load( Source(file=prefixed_json_file, loader=JsonLoader, prefix="app"), - PrefixedConfig, + dataclass_=PrefixedConfig, ) assert result == expected_data @@ -65,7 +65,7 @@ class DbConfig: host: str port: int - result = load(Source(file=json_file, loader=JsonLoader), DbConfig) + result = load(Source(file=json_file, loader=JsonLoader), dataclass_=DbConfig) assert result.host == "db.example.com" assert result.port == 5432 @@ -81,7 +81,7 @@ def test_json_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file=json_file, loader=JsonLoader), Config) + result = load(Source(file=json_file, loader=JsonLoader), dataclass_=Config) assert result.url == "http://localhost:8080/api" @@ -95,7 +95,7 @@ def test_json_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file=json_file, loader=JsonLoader), Config) + result = load(Source(file=json_file, loader=JsonLoader), dataclass_=Config) assert result.value == "prefixreplaced/suffix" @@ -109,7 +109,7 @@ def test_json_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat class Config: value: str - result = load(Source(file=json_file, loader=JsonLoader), Config) + result = load(Source(file=json_file, loader=JsonLoader), dataclass_=Config) assert result.value == "prefix$nonexistent/suffix" @@ -122,7 +122,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json_file, loader=JsonLoader), Config) + load(Source(file=json_file, loader=JsonLoader), dataclass_=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -145,7 +145,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json_file, loader=JsonLoader), Config) + load(Source(file=json_file, loader=JsonLoader), dataclass_=Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/sources_loader/test_nested_resolve.py b/tests/sources_loader/test_nested_resolve.py index 6da268d..e038402 100644 --- a/tests/sources_loader/test_nested_resolve.py +++ b/tests/sources_loader/test_nested_resolve.py @@ -103,14 +103,14 @@ class TestNestedResolve: def test_json_only(self, flat_loader_setup: FlatLoaderSetup) -> None: flat_loader_setup.set_data({"var": '{"foo": "from_json", "bar": "from_json"}'}) - result = load(flat_loader_setup.make_metadata(), NestedConfig) + result = load(flat_loader_setup.make_metadata(), dataclass_=NestedConfig) assert result == NestedConfig(var=NestedVar(foo="from_json", bar="from_json")) def test_flat_only(self, flat_loader_setup: FlatLoaderSetup) -> None: flat_loader_setup.set_data({"var__foo": "from_flat", "var__bar": "from_flat"}) - result = load(flat_loader_setup.make_metadata(), NestedConfig) + result = load(flat_loader_setup.make_metadata(), dataclass_=NestedConfig) assert result == NestedConfig(var=NestedVar(foo="from_flat", bar="from_flat")) @@ -141,7 +141,7 @@ def test_both_sources( result = load( flat_loader_setup.make_metadata(**_strategy_kwargs(strategy, local=local)), - NestedConfig, + dataclass_=NestedConfig, ) assert result == NestedConfig(var=NestedVar(foo=expected_source, bar=expected_source)) @@ -181,7 +181,7 @@ def test_partial_missing_field( with pytest.raises(DatureConfigError) as exc_info: load( Source(loader=EnvLoader, prefix="MYAPP__", **_strategy_kwargs(strategy, local=local)), - NestedConfig, + dataclass_=NestedConfig, ) err = exc_info.value @@ -208,7 +208,7 @@ def test_partial_missing_field_flat(self, tmp_path: Path, local: bool) -> None: prefix="MYAPP__", **_strategy_kwargs("flat", local=local), ), - NestedConfig, + dataclass_=NestedConfig, ) err = exc_info.value @@ -231,7 +231,7 @@ def test_partial_missing_field_json(self, tmp_path: Path, local: bool) -> None: prefix="MYAPP__", **_strategy_kwargs("json", local=local), ), - NestedConfig, + dataclass_=NestedConfig, ) err = exc_info.value @@ -257,7 +257,7 @@ def test_partial_missing_field_flat(self, tmp_path: Path, local: bool) -> None: with pytest.raises(DatureConfigError) as exc_info: load( Source(file=tmp_path, loader=DockerSecretsLoader, **_strategy_kwargs("flat", local=local)), - NestedConfig, + dataclass_=NestedConfig, ) err = exc_info.value @@ -275,7 +275,7 @@ def test_partial_missing_field_json(self, tmp_path: Path, local: bool) -> None: with pytest.raises(DatureConfigError) as exc_info: load( Source(file=tmp_path, loader=DockerSecretsLoader, **_strategy_kwargs("json", local=local)), - NestedConfig, + dataclass_=NestedConfig, ) err = exc_info.value @@ -296,7 +296,7 @@ def test_json_invalid_flat_strategy_succeeds(self, monkeypatch: pytest.MonkeyPat result = load( Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), - NestedIntConfig, + dataclass_=NestedIntConfig, ) assert result == NestedIntConfig(var=NestedIntVar(foo=10, bar=20)) @@ -309,7 +309,7 @@ def test_json_invalid_json_strategy_errors(self, monkeypatch: pytest.MonkeyPatch with pytest.raises(DatureConfigError) as exc_info: load( Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="json"), - NestedIntConfig, + dataclass_=NestedIntConfig, ) err = exc_info.value @@ -335,7 +335,7 @@ def test_flat_invalid_json_strategy_succeeds(self, monkeypatch: pytest.MonkeyPat result = load( Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="json"), - NestedIntConfig, + dataclass_=NestedIntConfig, ) assert result == NestedIntConfig(var=NestedIntVar(foo=10, bar=20)) @@ -348,7 +348,7 @@ def test_flat_invalid_flat_strategy_errors(self, monkeypatch: pytest.MonkeyPatch with pytest.raises(DatureConfigError) as exc_info: load( Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), - NestedIntConfig, + dataclass_=NestedIntConfig, ) err = exc_info.value @@ -377,7 +377,7 @@ def test_json_invalid_flat_strategy_succeeds(self, tmp_path: Path) -> None: result = load( Source(file=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), - NestedIntConfig, + dataclass_=NestedIntConfig, ) assert result == NestedIntConfig(var=NestedIntVar(foo=10, bar=20)) @@ -391,7 +391,7 @@ def test_json_invalid_json_strategy_errors(self, tmp_path: Path) -> None: with pytest.raises(DatureConfigError) as exc_info: load( Source(file=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="json"), - NestedIntConfig, + dataclass_=NestedIntConfig, ) err = exc_info.value @@ -422,7 +422,7 @@ def test_flat_invalid_json_strategy_succeeds(self, tmp_path: Path) -> None: result = load( Source(file=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="json"), - NestedIntConfig, + dataclass_=NestedIntConfig, ) assert result == NestedIntConfig(var=NestedIntVar(foo=10, bar=20)) @@ -436,7 +436,7 @@ def test_flat_invalid_flat_strategy_errors(self, tmp_path: Path) -> None: with pytest.raises(DatureConfigError) as exc_info: load( Source(file=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), - NestedIntConfig, + dataclass_=NestedIntConfig, ) err = exc_info.value @@ -470,7 +470,7 @@ def test_json_invalid_flat_strategy_succeeds(self, tmp_path: Path) -> None: result = load( Source(file=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="flat"), - NestedIntConfig, + dataclass_=NestedIntConfig, ) assert result == NestedIntConfig(var=NestedIntVar(foo=10, bar=20)) @@ -483,7 +483,7 @@ def test_json_invalid_json_strategy_errors(self, tmp_path: Path) -> None: with pytest.raises(DatureConfigError) as exc_info: load( Source(file=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="json"), - NestedIntConfig, + dataclass_=NestedIntConfig, ) err = exc_info.value @@ -509,7 +509,7 @@ def test_flat_invalid_json_strategy_succeeds(self, tmp_path: Path) -> None: result = load( Source(file=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="json"), - NestedIntConfig, + dataclass_=NestedIntConfig, ) assert result == NestedIntConfig(var=NestedIntVar(foo=10, bar=20)) @@ -522,7 +522,7 @@ def test_flat_invalid_flat_strategy_errors(self, tmp_path: Path) -> None: with pytest.raises(DatureConfigError) as exc_info: load( Source(file=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="flat"), - NestedIntConfig, + dataclass_=NestedIntConfig, ) err = exc_info.value @@ -554,7 +554,7 @@ def test_multiline_json_strategy_errors(self, monkeypatch: pytest.MonkeyPatch) - with pytest.raises(DatureConfigError) as exc_info: load( Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="json"), - NestedIntConfig, + dataclass_=NestedIntConfig, ) err = exc_info.value @@ -583,7 +583,7 @@ def test_multiline_flat_strategy_ignores_json(self, monkeypatch: pytest.MonkeyPa result = load( Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), - NestedIntConfig, + dataclass_=NestedIntConfig, ) assert result == NestedIntConfig(var=NestedIntVar(foo=10, bar=20)) @@ -624,7 +624,7 @@ def test_different_strategies( var2_strategy: (F[TwoNestedConfig].var2,), }, ), - TwoNestedConfig, + dataclass_=TwoNestedConfig, ) assert result == TwoNestedConfig(var1=expected_var1, var2=expected_var2) @@ -659,7 +659,7 @@ def test_local_overrides_global( nested_resolve_strategy=global_strategy, nested_resolve={local_strategy: (F[NestedConfig].var,)}, ), - NestedConfig, + dataclass_=NestedConfig, ) assert result == NestedConfig(var=NestedVar(foo=expected_source, bar=expected_source)) @@ -680,7 +680,7 @@ def test_flat_strategy_single_underscore(self, monkeypatch: pytest.MonkeyPatch) split_symbols="_", nested_resolve_strategy="flat", ), - NestedConfig, + dataclass_=NestedConfig, ) assert result == NestedConfig(var=NestedVar(foo="from_flat", bar="from_flat")) @@ -698,7 +698,7 @@ def test_json_strategy_single_underscore_error(self, monkeypatch: pytest.MonkeyP split_symbols="_", nested_resolve_strategy="json", ), - NestedIntConfig, + dataclass_=NestedIntConfig, ) err = exc_info.value @@ -722,7 +722,7 @@ def test_flat_strategy_single_underscore_error(self, monkeypatch: pytest.MonkeyP split_symbols="_", nested_resolve_strategy="flat", ), - NestedIntConfig, + dataclass_=NestedIntConfig, ) err = exc_info.value @@ -746,7 +746,7 @@ def test_only_json_no_conflict( result = load( flat_loader_setup.make_metadata(nested_resolve_strategy=strategy), - NestedConfig, + dataclass_=NestedConfig, ) assert result == NestedConfig(var=NestedVar(foo="val1", bar="val2")) @@ -761,7 +761,7 @@ def test_only_flat_no_conflict( result = load( flat_loader_setup.make_metadata(nested_resolve_strategy=strategy), - NestedConfig, + dataclass_=NestedConfig, ) assert result == NestedConfig(var=NestedVar(foo="val1", bar="val2")) @@ -781,7 +781,7 @@ def test_deep_env(self, monkeypatch: pytest.MonkeyPatch, strategy: str, expected result = load( Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy=strategy), - DeepConfig, + dataclass_=DeepConfig, ) assert result == DeepConfig(var=DeepVar(sub=DeepSub(key=expected_key))) @@ -799,7 +799,7 @@ def test_flat_strategy_deep_envfile(self, tmp_path: Path) -> None: prefix="MYAPP__", nested_resolve_strategy="flat", ), - DeepConfig, + dataclass_=DeepConfig, ) assert result == DeepConfig(var=DeepVar(sub=DeepSub(key="from_flat"))) @@ -810,7 +810,7 @@ def test_json_strategy_deep_docker_secrets(self, tmp_path: Path) -> None: result = load( Source(file=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="json"), - DeepConfig, + dataclass_=DeepConfig, ) assert result == DeepConfig(var=DeepVar(sub=DeepSub(key="from_json"))) @@ -832,7 +832,7 @@ def test_flat_strategy_error(self, tmp_path: Path) -> None: prefix="myapp__", nested_resolve_strategy="flat", ), - NestedIntConfig, + dataclass_=NestedIntConfig, ) err = exc_info.value @@ -857,7 +857,7 @@ def test_json_strategy_error(self, tmp_path: Path) -> None: prefix="myapp__", nested_resolve_strategy="json", ), - NestedIntConfig, + dataclass_=NestedIntConfig, ) err = exc_info.value @@ -880,11 +880,11 @@ def test_flat_first_then_json(self, monkeypatch: pytest.MonkeyPatch) -> None: result_flat = load( Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), - NestedConfig, + dataclass_=NestedConfig, ) result_json = load( Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="json"), - NestedConfig, + dataclass_=NestedConfig, ) assert result_flat == NestedConfig(var=NestedVar(foo="from_flat", bar="from_flat")) @@ -903,7 +903,7 @@ def test_envfilereversed_order(self, tmp_path: Path) -> None: prefix="MYAPP__", nested_resolve_strategy="flat", ), - NestedConfig, + dataclass_=NestedConfig, ) result_json = load( Source( @@ -912,7 +912,7 @@ def test_envfilereversed_order(self, tmp_path: Path) -> None: prefix="MYAPP__", nested_resolve_strategy="json", ), - NestedConfig, + dataclass_=NestedConfig, ) assert result_flat == NestedConfig(var=NestedVar(foo="from_flat", bar="from_flat")) @@ -944,7 +944,7 @@ def test_empty_dict_uses_global( nested_resolve_strategy=strategy, nested_resolve={}, ), - NestedConfig, + dataclass_=NestedConfig, ) assert result == NestedConfig(var=NestedVar(foo=expected_source, bar=expected_source)) diff --git a/tests/sources_loader/test_toml10_.py b/tests/sources_loader/test_toml10_.py index 09ddae2..fac994a 100644 --- a/tests/sources_loader/test_toml10_.py +++ b/tests/sources_loader/test_toml10_.py @@ -17,7 +17,7 @@ class TestToml10Loader: def test_comprehensive_type_conversion(self, all_types_toml10_file: Path): """Test loading TOML with full type coercion to dataclass.""" - result = load(Source(file=all_types_toml10_file, loader=Toml10Loader), AllPythonTypesCompact) + result = load(Source(file=all_types_toml10_file, loader=Toml10Loader), dataclass_=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -38,7 +38,7 @@ class PrefixedConfig: result = load( Source(file=prefixed_toml_file, loader=Toml10Loader, prefix="app"), - PrefixedConfig, + dataclass_=PrefixedConfig, ) assert result == expected_data @@ -65,7 +65,7 @@ class Config: name: str port: int - result = load(Source(file=toml_file, loader=Toml10Loader), Config) + result = load(Source(file=toml_file, loader=Toml10Loader), dataclass_=Config) assert result.name == "MyApp" assert result.port == 9090 @@ -81,7 +81,7 @@ def test_toml_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file=toml_file, loader=Toml10Loader), Config) + result = load(Source(file=toml_file, loader=Toml10Loader), dataclass_=Config) assert result.url == "http://localhost:8080/api" @@ -95,7 +95,7 @@ def test_toml_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file=toml_file, loader=Toml10Loader), Config) + result = load(Source(file=toml_file, loader=Toml10Loader), dataclass_=Config) assert result.value == "prefixreplaced/suffix" @@ -109,7 +109,7 @@ def test_toml_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat class Config: value: str - result = load(Source(file=toml_file, loader=Toml10Loader), Config) + result = load(Source(file=toml_file, loader=Toml10Loader), dataclass_=Config) assert result.value == "prefix$nonexistent/suffix" @@ -122,7 +122,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=toml_file, loader=Toml10Loader), Config) + load(Source(file=toml_file, loader=Toml10Loader), dataclass_=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -145,7 +145,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=toml_file, loader=Toml10Loader), Config) + load(Source(file=toml_file, loader=Toml10Loader), dataclass_=Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/sources_loader/test_toml11_.py b/tests/sources_loader/test_toml11_.py index 257f7e8..4a6bf09 100644 --- a/tests/sources_loader/test_toml11_.py +++ b/tests/sources_loader/test_toml11_.py @@ -17,7 +17,7 @@ class TestToml11Loader: def test_comprehensive_type_conversion(self, all_types_toml11_file: Path): """Test loading TOML with full type coercion to dataclass.""" - result = load(Source(file=all_types_toml11_file, loader=Toml11Loader), AllPythonTypesCompact) + result = load(Source(file=all_types_toml11_file, loader=Toml11Loader), dataclass_=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -38,7 +38,7 @@ class PrefixedConfig: result = load( Source(file=prefixed_toml_file, loader=Toml11Loader, prefix="app"), - PrefixedConfig, + dataclass_=PrefixedConfig, ) assert result == expected_data @@ -65,7 +65,7 @@ class Config: name: str port: int - result = load(Source(file=toml_file, loader=Toml11Loader), Config) + result = load(Source(file=toml_file, loader=Toml11Loader), dataclass_=Config) assert result.name == "MyApp" assert result.port == 9090 @@ -81,7 +81,7 @@ def test_toml_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file=toml_file, loader=Toml11Loader), Config) + result = load(Source(file=toml_file, loader=Toml11Loader), dataclass_=Config) assert result.url == "http://localhost:8080/api" @@ -95,7 +95,7 @@ def test_toml_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file=toml_file, loader=Toml11Loader), Config) + result = load(Source(file=toml_file, loader=Toml11Loader), dataclass_=Config) assert result.value == "prefixreplaced/suffix" @@ -109,7 +109,7 @@ def test_toml_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat class Config: value: str - result = load(Source(file=toml_file, loader=Toml11Loader), Config) + result = load(Source(file=toml_file, loader=Toml11Loader), dataclass_=Config) assert result.value == "prefix$nonexistent/suffix" @@ -122,7 +122,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=toml_file, loader=Toml11Loader), Config) + load(Source(file=toml_file, loader=Toml11Loader), dataclass_=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -145,7 +145,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=toml_file, loader=Toml11Loader), Config) + load(Source(file=toml_file, loader=Toml11Loader), dataclass_=Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/sources_loader/test_yaml11_.py b/tests/sources_loader/test_yaml11_.py index f0feb19..24928f5 100644 --- a/tests/sources_loader/test_yaml11_.py +++ b/tests/sources_loader/test_yaml11_.py @@ -17,7 +17,7 @@ class TestYaml11Loader: def test_comprehensive_type_conversion(self, all_types_yaml11_file: Path): """Test loading YAML with full type coercion to dataclass.""" - result = load(Source(file=all_types_yaml11_file, loader=Yaml11Loader), AllPythonTypesCompact) + result = load(Source(file=all_types_yaml11_file, loader=Yaml11Loader), dataclass_=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -38,7 +38,7 @@ class PrefixedConfig: result = load( Source(file=prefixed_yaml_file, loader=Yaml11Loader, prefix="app"), - PrefixedConfig, + dataclass_=PrefixedConfig, ) assert result == expected_data @@ -63,7 +63,7 @@ class EnvConfig: result = load( Source(file=yaml_config_with_env_vars_file, loader=Yaml11Loader), - EnvConfig, + dataclass_=EnvConfig, ) assert result.database_url == "postgresql://localhost/db" @@ -82,7 +82,7 @@ def test_yaml_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file=yaml_file, loader=Yaml11Loader), Config) + result = load(Source(file=yaml_file, loader=Yaml11Loader), dataclass_=Config) assert result.url == "http://localhost:8080/api" @@ -96,7 +96,7 @@ def test_yaml_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file=yaml_file, loader=Yaml11Loader), Config) + result = load(Source(file=yaml_file, loader=Yaml11Loader), dataclass_=Config) assert result.value == "prefixreplaced/suffix" @@ -110,7 +110,7 @@ def test_yaml_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat class Config: value: str - result = load(Source(file=yaml_file, loader=Yaml11Loader), Config) + result = load(Source(file=yaml_file, loader=Yaml11Loader), dataclass_=Config) assert result.value == "prefix$nonexistent/suffix" @@ -133,7 +133,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=yaml_file, loader=Yaml11Loader), Config) + load(Source(file=yaml_file, loader=Yaml11Loader), dataclass_=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -156,7 +156,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=yaml_file, loader=Yaml11Loader), Config) + load(Source(file=yaml_file, loader=Yaml11Loader), dataclass_=Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/sources_loader/test_yaml12_.py b/tests/sources_loader/test_yaml12_.py index 6cb80f6..c256c37 100644 --- a/tests/sources_loader/test_yaml12_.py +++ b/tests/sources_loader/test_yaml12_.py @@ -17,7 +17,7 @@ class TestYaml12Loader: def test_comprehensive_type_conversion(self, all_types_yaml12_file: Path): """Test loading YAML with full type coercion to dataclass.""" - result = load(Source(file=all_types_yaml12_file, loader=Yaml12Loader), AllPythonTypesCompact) + result = load(Source(file=all_types_yaml12_file, loader=Yaml12Loader), dataclass_=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -38,7 +38,7 @@ class PrefixedConfig: result = load( Source(file=prefixed_yaml_file, loader=Yaml12Loader, prefix="app"), - PrefixedConfig, + dataclass_=PrefixedConfig, ) assert result == expected_data @@ -63,7 +63,7 @@ class EnvConfig: result = load( Source(file=yaml_config_with_env_vars_file, loader=Yaml12Loader), - EnvConfig, + dataclass_=EnvConfig, ) assert result.database_url == "postgresql://localhost/db" @@ -82,7 +82,7 @@ def test_yaml_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file=yaml_file, loader=Yaml12Loader), Config) + result = load(Source(file=yaml_file, loader=Yaml12Loader), dataclass_=Config) assert result.url == "http://localhost:8080/api" @@ -96,7 +96,7 @@ def test_yaml_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file=yaml_file, loader=Yaml12Loader), Config) + result = load(Source(file=yaml_file, loader=Yaml12Loader), dataclass_=Config) assert result.value == "prefixreplaced/suffix" @@ -110,7 +110,7 @@ def test_yaml_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat class Config: value: str - result = load(Source(file=yaml_file, loader=Yaml12Loader), Config) + result = load(Source(file=yaml_file, loader=Yaml12Loader), dataclass_=Config) assert result.value == "prefix$nonexistent/suffix" @@ -133,7 +133,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=yaml_file, loader=Yaml12Loader), Config) + load(Source(file=yaml_file, loader=Yaml12Loader), dataclass_=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -156,7 +156,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=yaml_file, loader=Yaml12Loader), Config) + load(Source(file=yaml_file, loader=Yaml12Loader), dataclass_=Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/test_custom_loader.py b/tests/test_custom_loader.py index 487dc43..e3392a5 100644 --- a/tests/test_custom_loader.py +++ b/tests/test_custom_loader.py @@ -47,7 +47,7 @@ def test_xml_loader(self, tmp_path: Path) -> None: result = load( Source(file=xml_file, loader=XmlLoader), - XmlConfig, + dataclass_=XmlConfig, ) assert result.host == "localhost" diff --git a/tests/test_load_report.py b/tests/test_load_report.py index 75fa1a1..01485ad 100644 --- a/tests/test_load_report.py +++ b/tests/test_load_report.py @@ -8,7 +8,7 @@ import pytest -from dature import Merge, MergeStrategy, Source, get_load_report, load +from dature import MergeStrategy, Source, get_load_report, load from dature.errors.exceptions import DatureConfigError from dature.load_report import FieldOrigin, LoadReport, SourceEntry from dature.validators.number import Ge @@ -28,11 +28,9 @@ class Config: port: int result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, debug=True, ) @@ -88,12 +86,10 @@ class Config: port: int result = load( - Merge( - Source(file=first), - Source(file=second), - strategy=MergeStrategy.FIRST_WINS, - ), - Config, + Source(file=first), + Source(file=second), + dataclass_=Config, + strategy=MergeStrategy.FIRST_WINS, debug=True, ) @@ -153,11 +149,9 @@ class Config: database: Database result = load( - Merge( - Source(file=defaults), - Source(file=overrides), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, debug=True, ) @@ -193,7 +187,7 @@ class Config: name: str port: int - result = load(Source(file=json_file), Config, debug=True) + result = load(Source(file=json_file), dataclass_=Config, debug=True) report = get_load_report(result) @@ -237,12 +231,7 @@ def test_merge_decorator(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"port": 9090}') - meta = Merge( - Source(file=defaults), - Source(file=overrides), - ) - - @load(meta, debug=True) + @load(Source(file=defaults), Source(file=overrides), debug=True) @dataclass class Config: host: str @@ -290,7 +279,7 @@ class Config: host: str port: int - result = load(Source(file=json_file), Config) + result = load(Source(file=json_file), dataclass_=Config) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") @@ -316,11 +305,9 @@ class Config: with caplog.at_level(logging.DEBUG, logger="dature"): load( - Merge( - Source(file=defaults), - Source(file=overrides), - ), - Config, + Source(file=defaults), + Source(file=overrides), + dataclass_=Config, ) messages = [r.message for r in caplog.records if r.name == "dature"] @@ -353,7 +340,7 @@ class Config: port: int with caplog.at_level(logging.DEBUG, logger="dature"): - load(Source(file=json_file), Config) + load(Source(file=json_file), dataclass_=Config) messages = [r.message for r in caplog.records if r.name == "dature"] @@ -381,11 +368,9 @@ class Config: with pytest.raises(DatureConfigError): load( - Merge( - Source(file=a), - Source(file=b), - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, debug=True, ) @@ -423,11 +408,9 @@ class Config: with pytest.raises(DatureConfigError): load( - Merge( - Source(file=a), - Source(file=b), - ), - Config, + Source(file=a), + Source(file=b), + dataclass_=Config, debug=True, ) @@ -462,7 +445,7 @@ class Config: port: int with pytest.raises(DatureConfigError): - load(Source(file=json_file), Config, debug=True) + load(Source(file=json_file), dataclass_=Config, debug=True) expected = LoadReport( dataclass_name="Config", @@ -492,7 +475,7 @@ class Config: port: Annotated[int, Ge(value=0)] with pytest.raises(DatureConfigError): - load(Source(file=json_file), Config, debug=True) + load(Source(file=json_file), dataclass_=Config, debug=True) expected = LoadReport( dataclass_name="Config", diff --git a/tests/test_main.py b/tests/test_main.py index 85be970..91c3ac9 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -54,7 +54,7 @@ class Config: def test_default_metadata(self, monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setenv("MY_VAR", "test_value") - @load() + @load(Source()) @dataclass class Config: my_var: str @@ -80,7 +80,7 @@ def test_priority(self, monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setenv("LOADED_VAR", "loaded") monkeypatch.setenv("OVERRIDDEN_VAR", "loaded") - @load() + @load(Source()) @dataclass class Config: overridden_var: str @@ -97,7 +97,7 @@ def test_invalid_decorator_order(self) -> None: with pytest.raises(TypeError, match="Config must be a dataclass"): @dataclass - @load() + @load(Source()) class Config: pass @@ -153,7 +153,7 @@ class Config: port: int metadata = Source(file=json_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.name == "FromFile" assert result.port == 9090 @@ -168,7 +168,7 @@ class Config: debug: bool metadata = Source(prefix="APP_") - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.name == "EnvFunc" assert result.debug is True @@ -180,7 +180,7 @@ def test_default_metadata(self, monkeypatch: pytest.MonkeyPatch) -> None: class Config: my_var: str - result = load(None, Config) + result = load(Source(), dataclass_=Config) assert result.my_var == "from_env" @@ -199,7 +199,7 @@ class Config: metadata = Source(file="/non/existent/file.json", loader=loader_class) with pytest.raises(FileNotFoundError): - load(metadata, Config) + load(metadata, dataclass_=Config) @pytest.mark.parametrize( "loader_class", diff --git a/tests/test_type_loaders.py b/tests/test_type_loaders.py index 45291cb..30571d3 100644 --- a/tests/test_type_loaders.py +++ b/tests/test_type_loaders.py @@ -1,4 +1,4 @@ -"""Tests for TypeLoader — custom type loading via Source, configure(), and Merge.""" +"""Tests for TypeLoader — custom type loading via Source, configure(), and load().""" from collections.abc import Generator from dataclasses import dataclass @@ -6,7 +6,7 @@ import pytest -from dature import Merge, Source, TypeLoader, configure, load +from dature import Source, TypeLoader, configure, load from dature.config import _ConfigProxy @@ -51,7 +51,7 @@ def test_single_source_with_type_loader(self, yaml_with_rgb: Path) -> None: file=yaml_with_rgb, type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),), ), - ConfigWithRgb, + dataclass_=ConfigWithRgb, ) assert result.name == "test" assert result.color == Rgb(r=255, g=128, b=0) @@ -70,7 +70,7 @@ def int_times_two(value: str) -> int: file=p, type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),), ), - ConfigWithRgb, + dataclass_=ConfigWithRgb, ) assert result.color == Rgb(r=10, g=20, b=30) @@ -81,7 +81,7 @@ def test_global_type_loaders_via_configure(self, yaml_with_rgb: Path) -> None: configure( type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),), ) - result = load(Source(file=yaml_with_rgb), ConfigWithRgb) + result = load(Source(file=yaml_with_rgb), dataclass_=ConfigWithRgb) assert result.color == Rgb(r=255, g=128, b=0) @@ -93,12 +93,10 @@ def test_merge_metadata_type_loaders(self, tmp_path: Path) -> None: override.write_text("name: override\n") result = load( - Merge( - Source(file=base), - Source(file=override), - type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),), - ), - ConfigWithRgb, + Source(file=base), + Source(file=override), + dataclass_=ConfigWithRgb, + type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),), ) assert result.name == "override" assert result.color == Rgb(r=1, g=2, b=3) @@ -127,7 +125,7 @@ def tag_upper(value: str) -> str: file=p, type_loaders=(TypeLoader(type_=str, func=tag_upper),), ), - TwoCustom, + dataclass_=TwoCustom, ) assert result.color == Rgb(r=10, g=20, b=30) assert result.tag == "HELLO" diff --git a/tests/validators/test_complex.py b/tests/validators/test_complex.py index 1514f77..e14273f 100644 --- a/tests/validators/test_complex.py +++ b/tests/validators/test_complex.py @@ -23,7 +23,7 @@ class Config: json_file.write_text('{"name": "Alice", "age": 30, "tags": ["python", "coding"]}') metadata = Source(file=json_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.name == "Alice" assert result.age == 30 @@ -43,7 +43,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 3 @@ -87,7 +87,7 @@ class User: ) metadata = Source(file=json_file) - result = load(metadata, User) + result = load(metadata, dataclass_=User) assert result.name == "Alice" assert result.age == 30 @@ -113,7 +113,7 @@ class User: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, User) + load(metadata, dataclass_=User) e = exc_info.value assert len(e.exceptions) == 4 @@ -157,7 +157,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -179,7 +179,7 @@ class Config: ) metadata = Source(file=json_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.groups == {"admins": [{"name": "Alice"}]} @@ -195,7 +195,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -223,7 +223,7 @@ class Config: ) metadata = Source(file=json_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.teams["backend"][0].name == "Alice" assert result.teams["backend"][0].role == "admin" @@ -245,7 +245,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 2 diff --git a/tests/validators/test_custom_validator.py b/tests/validators/test_custom_validator.py index 15d3928..37e23ca 100644 --- a/tests/validators/test_custom_validator.py +++ b/tests/validators/test_custom_validator.py @@ -49,7 +49,7 @@ class Config: json_file.write_text('{"count": 10}') metadata = Source(file=json_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.count == 10 @@ -65,7 +65,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -89,7 +89,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -112,7 +112,7 @@ class Config: json_file.write_text('{"url": "https://example.com"}') metadata = Source(file=json_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.url == "https://example.com" @@ -128,7 +128,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -209,7 +209,7 @@ class Config: json_file.write_text('{"count": 15, "url": "https://example.com"}') metadata = Source(file=json_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.count == 15 assert result.url == "https://example.com" @@ -227,7 +227,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 2 diff --git a/tests/validators/test_metadata_validators.py b/tests/validators/test_metadata_validators.py index e1ea5b3..a7f8a57 100644 --- a/tests/validators/test_metadata_validators.py +++ b/tests/validators/test_metadata_validators.py @@ -27,7 +27,7 @@ class Config: F[Config].name: MinLength(value=3), }, ) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.name == "Alice" @@ -45,7 +45,7 @@ class Config: F[Config].port: (Gt(value=0), Lt(value=65536)), }, ) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.port == 8080 @@ -65,7 +65,7 @@ class Config: F[Config].port: Gt(value=0), }, ) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.name == "Alice" assert result.port == 8080 @@ -89,7 +89,7 @@ class Config: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -118,7 +118,7 @@ class Config: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -152,7 +152,7 @@ class Config: F[Config].database.port: Gt(value=0), }, ) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.database.host == "localhost" assert result.database.port == 5432 @@ -179,7 +179,7 @@ class Config: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -209,7 +209,7 @@ class Config: F[Config].port: Gt(value=0), }, ) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.name == "Alice" assert result.port == 8080 @@ -231,7 +231,7 @@ class Config: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -260,7 +260,7 @@ class Config: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -286,7 +286,7 @@ class Config: F[Config].name: MaxLength(value=10), }, ) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.name == "Alice" @@ -307,7 +307,7 @@ class Config: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -333,7 +333,7 @@ class Config: F[Config].port: Lt(value=65536), }, ) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.port == 8080 @@ -354,7 +354,7 @@ class Config: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -383,7 +383,7 @@ class Config: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -406,7 +406,7 @@ class Config: json_file.write_text('{"name": "Alice"}') metadata = Source(file=json_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.name == "Alice" @@ -433,7 +433,7 @@ def validate_config(obj: Config) -> bool: F[Config].port: Ge(value=0), }, ) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.port == 8080 assert result.user == "admin" diff --git a/tests/validators/test_number.py b/tests/validators/test_number.py index a63ff05..d714f18 100644 --- a/tests/validators/test_number.py +++ b/tests/validators/test_number.py @@ -19,7 +19,7 @@ class Config: json_file.write_text('{"age": 25}') metadata = Source(file=json_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.age == 25 @@ -35,7 +35,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -58,7 +58,7 @@ class Config: json_file.write_text('{"age": 18}') metadata = Source(file=json_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.age == 18 @@ -74,7 +74,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -97,7 +97,7 @@ class Config: json_file.write_text('{"age": 99}') metadata = Source(file=json_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.age == 99 @@ -113,7 +113,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -136,7 +136,7 @@ class Config: json_file.write_text('{"age": 100}') metadata = Source(file=json_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.age == 100 @@ -152,7 +152,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -175,7 +175,7 @@ class Config: json_file.write_text('{"age": 30}') metadata = Source(file=json_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.age == 30 @@ -191,7 +191,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 diff --git a/tests/validators/test_post_init_and_property.py b/tests/validators/test_post_init_and_property.py index 9fc0f1a..07ec73d 100644 --- a/tests/validators/test_post_init_and_property.py +++ b/tests/validators/test_post_init_and_property.py @@ -21,7 +21,7 @@ def __post_init__(self) -> None: json_file = tmp_path / "config.json" json_file.write_text('{"port": 8080, "host": "localhost"}') - result = load(Source(file=json_file), Config) + result = load(Source(file=json_file), dataclass_=Config) assert result.port == 8080 assert result.host == "localhost" @@ -41,7 +41,7 @@ def __post_init__(self) -> None: json_file.write_text('{"port": 99999, "host": "localhost"}') with pytest.raises(ValueError, match="Invalid port: 99999"): - load(Source(file=json_file), Config) + load(Source(file=json_file), dataclass_=Config) def test_post_init_cross_field_validation(self, tmp_path: Path): @dataclass @@ -58,7 +58,7 @@ def __post_init__(self) -> None: json_file.write_text('{"min_value": 100, "max_value": 10}') with pytest.raises(ValueError, match=r"min_value \(100\) must be less than max_value \(10\)"): - load(Source(file=json_file), Config) + load(Source(file=json_file), dataclass_=Config) def test_post_init_cross_field_success(self, tmp_path: Path): @dataclass @@ -74,7 +74,7 @@ def __post_init__(self) -> None: json_file = tmp_path / "config.json" json_file.write_text('{"min_value": 1, "max_value": 100}') - result = load(Source(file=json_file), Config) + result = load(Source(file=json_file), dataclass_=Config) assert result.min_value == 1 assert result.max_value == 100 @@ -170,7 +170,7 @@ def __post_init__(self) -> None: json_file = tmp_path / "config.json" json_file.write_text('{"host": "localhost", "port": 8080}') - result = load(Source(file=json_file), Config) + result = load(Source(file=json_file), dataclass_=Config) assert result.base_url == "http://localhost:8080" @@ -207,7 +207,7 @@ def address(self) -> str: json_file = tmp_path / "config.json" json_file.write_text('{"host": "localhost", "port": 8080}') - result = load(Source(file=json_file), Config) + result = load(Source(file=json_file), dataclass_=Config) assert result.address == "localhost:8080" @@ -241,6 +241,6 @@ def email(self) -> str: json_file = tmp_path / "config.json" json_file.write_text('{"_email": " Admin@Example.COM "}') - result = load(Source(file=json_file), Config) + result = load(Source(file=json_file), dataclass_=Config) assert result.email == "admin@example.com" diff --git a/tests/validators/test_root_validator.py b/tests/validators/test_root_validator.py index d99e03b..d75448b 100644 --- a/tests/validators/test_root_validator.py +++ b/tests/validators/test_root_validator.py @@ -27,7 +27,7 @@ def validate_config(obj: Config) -> bool: file=json_file, root_validators=(RootValidator(func=validate_config),), ) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.port == 80 assert result.user == "root" @@ -52,7 +52,7 @@ def validate_config(obj: Config) -> bool: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -82,7 +82,7 @@ def validate_step(obj: Config) -> bool: RootValidator(func=validate_step), ), ) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.min_value == 10 assert result.max_value == 100 @@ -113,7 +113,7 @@ def validate_step(obj: Config) -> bool: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -138,7 +138,7 @@ def validate_config(obj: Config) -> bool: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -198,7 +198,7 @@ def validate_config(obj: Config) -> bool: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 diff --git a/tests/validators/test_sequence.py b/tests/validators/test_sequence.py index 85cd932..8d78c75 100644 --- a/tests/validators/test_sequence.py +++ b/tests/validators/test_sequence.py @@ -19,7 +19,7 @@ class Config: json_file.write_text('{"tags": ["python", "typing"]}') metadata = Source(file=json_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.tags == ["python", "typing"] @@ -35,7 +35,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -58,7 +58,7 @@ class Config: json_file.write_text('{"tags": ["python", "typing"]}') metadata = Source(file=json_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.tags == ["python", "typing"] @@ -74,7 +74,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -97,7 +97,7 @@ class Config: json_file.write_text('{"tags": ["python", "typing", "validation"]}') metadata = Source(file=json_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.tags == ["python", "typing", "validation"] @@ -113,7 +113,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -136,7 +136,7 @@ class Config: json_file.write_text('{"tags": ["python", "typing", "validation"]}') metadata = Source(file=json_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.tags == ["python", "typing", "validation"] @@ -152,7 +152,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 diff --git a/tests/validators/test_string.py b/tests/validators/test_string.py index 7feac88..a0f1e61 100644 --- a/tests/validators/test_string.py +++ b/tests/validators/test_string.py @@ -19,7 +19,7 @@ class Config: json_file.write_text('{"name": "Alice"}') metadata = Source(file=json_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.name == "Alice" @@ -35,7 +35,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -58,7 +58,7 @@ class Config: json_file.write_text('{"name": "Alice"}') metadata = Source(file=json_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.name == "Alice" @@ -74,7 +74,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -97,7 +97,7 @@ class Config: json_file.write_text('{"email": "test@example.com"}') metadata = Source(file=json_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.email == "test@example.com" @@ -113,7 +113,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -136,7 +136,7 @@ class Config: json_file.write_text('{"username": "john_doe"}') metadata = Source(file=json_file) - result = load(metadata, Config) + result = load(metadata, dataclass_=Config) assert result.username == "john_doe" @@ -152,7 +152,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, Config) + load(metadata, dataclass_=Config) e = exc_info.value assert len(e.exceptions) == 1 From 1f00da3a0e22dfe24c3446d3e7ac136c71b50eaf Mon Sep 17 00:00:00 2001 From: niccolum Date: Sun, 29 Mar 2026 13:10:37 +0300 Subject: [PATCH 05/36] fix dature error for loading without sources or with wrong sources --- .../validation/validation_annotated.py | 2 +- .../features/validation/validation_custom.py | 2 +- .../validation/validation_metadata.py | 2 +- .../features/validation/validation_root.py | 2 +- src/dature/main.py | 66 ++++++++++++++++--- 5 files changed, 60 insertions(+), 14 deletions(-) diff --git a/examples/docs/features/validation/validation_annotated.py b/examples/docs/features/validation/validation_annotated.py index 6109e09..d5babf4 100644 --- a/examples/docs/features/validation/validation_annotated.py +++ b/examples/docs/features/validation/validation_annotated.py @@ -24,7 +24,7 @@ class ServiceConfig: try: dature.load( dature.Source(file=SOURCES_DIR / "validation_annotated_invalid.json5"), - ServiceConfig, + dataclass_=ServiceConfig, ) except DatureConfigError as exc: source = str(SOURCES_DIR / "validation_annotated_invalid.json5") diff --git a/examples/docs/features/validation/validation_custom.py b/examples/docs/features/validation/validation_custom.py index 5f2f603..d7d541c 100644 --- a/examples/docs/features/validation/validation_custom.py +++ b/examples/docs/features/validation/validation_custom.py @@ -38,7 +38,7 @@ class ServiceConfig: try: dature.load( dature.Source(file=SOURCES_DIR / "validation_custom_invalid.json5"), - ServiceConfig, + dataclass_=ServiceConfig, ) except DatureConfigError as exc: source = str(SOURCES_DIR / "validation_custom_invalid.json5") diff --git a/examples/docs/features/validation/validation_metadata.py b/examples/docs/features/validation/validation_metadata.py index 1ed2bdd..968a6c2 100644 --- a/examples/docs/features/validation/validation_metadata.py +++ b/examples/docs/features/validation/validation_metadata.py @@ -27,7 +27,7 @@ class Config: dature.F[Config].port: (Ge(value=1), Lt(value=65536)), }, ), - Config, + dataclass_=Config, ) except DatureConfigError as exc: source = str(SOURCES_DIR / "validation_metadata_invalid.yaml") diff --git a/examples/docs/features/validation/validation_root.py b/examples/docs/features/validation/validation_root.py index 1aa9fc4..9a37270 100644 --- a/examples/docs/features/validation/validation_root.py +++ b/examples/docs/features/validation/validation_root.py @@ -34,7 +34,7 @@ def check_debug_not_on_production(obj: Config) -> bool: ), ), ), - Config, + dataclass_=Config, ) except DatureConfigError as exc: source = str(SOURCES_DIR / "validation_root_invalid.yaml") diff --git a/src/dature/main.py b/src/dature/main.py index 4447a98..44ebb75 100644 --- a/src/dature/main.py +++ b/src/dature/main.py @@ -81,29 +81,26 @@ def load( # noqa: PLR0913 if debug is None: debug = config.loading.debug + _validate_sources(sources) + if len(sources) > 1: - merge_meta = _MergeConfig( + return _load_multi( sources=sources, + dataclass_=dataclass_, + cache=cache, + debug=debug, strategy=strategy, field_merges=field_merges, field_groups=field_groups, skip_broken_sources=skip_broken_sources, skip_invalid_fields=skip_invalid_fields, - expand_env_vars=expand_env_vars or "default", + expand_env_vars=expand_env_vars, secret_field_names=secret_field_names, mask_secrets=mask_secrets, type_loaders=type_loaders, nested_resolve_strategy=nested_resolve_strategy, nested_resolve=nested_resolve, ) - merge_type_loaders = (merge_meta.type_loaders or ()) + config.type_loaders - if dataclass_ is not None: - return merge_load_as_function(merge_meta, dataclass_, debug=debug, type_loaders=merge_type_loaders) - return merge_make_decorator(merge_meta, cache=cache, debug=debug, type_loaders=merge_type_loaders) - - if not sources: - msg = "load() requires at least one Source" - raise TypeError(msg) metadata = sources[0] @@ -140,3 +137,52 @@ def load( # noqa: PLR0913 cache=cache, debug=debug, ) + + +def _validate_sources(sources: tuple[Source, ...]) -> None: + for source in sources: + if not isinstance(source, Source): + msg = f"load() positional arguments must be Source instances, got {source!r}" + raise TypeError(msg) + + if not sources: + msg = "load() requires at least one Source" + raise TypeError(msg) + + +def _load_multi( # noqa: PLR0913 + *, + sources: tuple[Source, ...], + dataclass_: type[DataclassInstance] | None, + cache: bool, + debug: bool, + strategy: MergeStrategy, + field_merges: tuple[MergeRule, ...], + field_groups: tuple[FieldGroup, ...], + skip_broken_sources: bool, + skip_invalid_fields: bool, + expand_env_vars: ExpandEnvVarsMode | None, + secret_field_names: tuple[str, ...] | None, + mask_secrets: bool | None, + type_loaders: tuple[TypeLoader, ...] | None, + nested_resolve_strategy: NestedResolveStrategy | None, + nested_resolve: NestedResolve | None, +) -> DataclassInstance | Callable[[type[DataclassInstance]], type[DataclassInstance]]: + merge_meta = _MergeConfig( + sources=sources, + strategy=strategy, + field_merges=field_merges, + field_groups=field_groups, + skip_broken_sources=skip_broken_sources, + skip_invalid_fields=skip_invalid_fields, + expand_env_vars=expand_env_vars or "default", + secret_field_names=secret_field_names, + mask_secrets=mask_secrets, + type_loaders=type_loaders, + nested_resolve_strategy=nested_resolve_strategy, + nested_resolve=nested_resolve, + ) + merge_type_loaders = (merge_meta.type_loaders or ()) + config.type_loaders + if dataclass_ is not None: + return merge_load_as_function(merge_meta, dataclass_, debug=debug, type_loaders=merge_type_loaders) + return merge_make_decorator(merge_meta, cache=cache, debug=debug, type_loaders=merge_type_loaders) From ee883daa7bb057a596efc764863f7bee30f56b69 Mon Sep 17 00:00:00 2001 From: niccolum Date: Sun, 29 Mar 2026 13:14:00 +0300 Subject: [PATCH 06/36] add towncrier changes --- changes/+import-style.refactor | 1 + changes/+remove-merge-class.refactor | 1 + changes/+rename-file-param.refactor | 1 + pyproject.toml | 5 +++++ 4 files changed, 8 insertions(+) create mode 100644 changes/+import-style.refactor create mode 100644 changes/+remove-merge-class.refactor create mode 100644 changes/+rename-file-param.refactor diff --git a/changes/+import-style.refactor b/changes/+import-style.refactor new file mode 100644 index 0000000..2270769 --- /dev/null +++ b/changes/+import-style.refactor @@ -0,0 +1 @@ +Recommended import style changed from `from dature import load, Source` to `import dature` with access via `dature.load()`, `dature.Source()`. diff --git a/changes/+remove-merge-class.refactor b/changes/+remove-merge-class.refactor new file mode 100644 index 0000000..03b850d --- /dev/null +++ b/changes/+remove-merge-class.refactor @@ -0,0 +1 @@ +`Merge` class has been removed. Use `load()` with multiple `Source` arguments instead. diff --git a/changes/+rename-file-param.refactor b/changes/+rename-file-param.refactor new file mode 100644 index 0000000..b143f9c --- /dev/null +++ b/changes/+rename-file-param.refactor @@ -0,0 +1 @@ +`Source(file_=...)` has been renamed to `Source(file=...)`. diff --git a/pyproject.toml b/pyproject.toml index 419fa92..f857933 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -181,6 +181,11 @@ directory = "doc" name = "Docs" showcontent = true +[[tool.towncrier.type]] +directory = "refactor" +name = "Refactoring" +showcontent = true + [[tool.towncrier.type]] directory = "removal" name = "Removals" From b684ea12bf8ebe2a06c09f255ad954fa1b2bb924 Mon Sep 17 00:00:00 2001 From: niccolum Date: Mon, 30 Mar 2026 22:17:18 +0300 Subject: [PATCH 07/36] refactor enums --- README.md | 2 +- changes/+internal-enum-hints.refactor | 1 + changes/+simplify-configure.refactor | 1 + changes/+simplify-enums.removal | 1 + changes/+simplify-field-group.removal | 1 + changes/+simplify-merge-rule.removal | 1 + changes/+simplify-type-loader.removal | 1 + docs/advanced/custom_types.md | 15 +- docs/advanced/merge-rules.md | 30 ++-- docs/api-reference.md | 54 +++--- docs/comparison/why-not-dynaconf.md | 2 +- docs/comparison/why-not-pydantic-settings.md | 2 +- docs/features/merging.md | 20 +-- .../advanced/configure/advanced_configure.py | 5 +- .../configure/advanced_configure_env.py | 5 +- .../advanced_configure_type_loaders.py | 2 +- .../docs/advanced/custom_types/custom_type.py | 2 +- .../custom_types/custom_type_merge.py | 2 +- .../advanced_field_groups_expansion_error.py | 4 +- .../advanced_field_groups_multiple_error.py | 4 +- .../advanced_field_groups_nested_error.py | 4 +- .../advanced_merge_rules_callable.py | 4 +- .../advanced_merge_rules_conflict.py | 12 +- .../merge_rules/merging_field_append.py | 2 +- .../merging_field_append_unique.py | 2 +- .../merge_rules/merging_field_first_wins.py | 2 +- .../merge_rules/merging_field_groups.py | 2 +- .../merge_rules/merging_field_last_wins.py | 2 +- .../merge_rules/merging_field_prepend.py | 2 +- .../merging_field_prepend_unique.py | 2 +- .../merge_rules/merging_first_found.py | 2 +- .../why-not-dynaconf/dynaconf_merge.py | 2 +- .../features/masking/masking_classic_style.py | 5 +- .../docs/features/merging/merging_basic.py | 2 +- .../features/merging/merging_strategies.py | 4 +- .../merging/merging_strategy_first_found.py | 2 +- .../merging/merging_strategy_first_wins.py | 2 +- .../merging/merging_strategy_last_wins.py | 2 +- .../merging_strategy_raise_on_conflict.py | 2 +- src/dature/__init__.py | 7 +- src/dature/config.py | 73 +++++--- src/dature/load_report.py | 8 +- src/dature/loading/multi.py | 33 ++-- src/dature/loading/resolver.py | 6 +- src/dature/loading/source_loading.py | 17 +- src/dature/main.py | 61 ++++--- src/dature/merging/deep_merge.py | 34 ++-- src/dature/merging/predicate.py | 27 +-- src/dature/merging/strategy.py | 17 ++ src/dature/metadata.py | 75 ++------ src/dature/sources_loader/base.py | 6 +- src/dature/sources_loader/flat_key.py | 4 +- src/dature/types.py | 21 ++- tests/loading/test_field_merges.py | 167 +++++++++--------- tests/loading/test_multi.py | 36 ++-- tests/loading/test_skip_invalid_fields.py | 4 +- tests/masking/test_masking.py | 15 +- tests/merging/test_deep_merge.py | 35 ++-- tests/merging/test_field_group.py | 66 +++---- tests/merging/test_predicate.py | 39 ++-- tests/test_config.py | 22 +-- tests/test_load_report.py | 14 +- tests/test_type_loaders.py | 18 +- 63 files changed, 503 insertions(+), 514 deletions(-) create mode 100644 changes/+internal-enum-hints.refactor create mode 100644 changes/+simplify-configure.refactor create mode 100644 changes/+simplify-enums.removal create mode 100644 changes/+simplify-field-group.removal create mode 100644 changes/+simplify-merge-rule.removal create mode 100644 changes/+simplify-type-loader.removal create mode 100644 src/dature/merging/strategy.py diff --git a/README.md b/README.md index a323ac3..3241b5b 100644 --- a/README.md +++ b/README.md @@ -59,7 +59,7 @@ config = dature.load(dature.Source(file="config.yaml"), Config) ## Key Features - **Multiple formats** — YAML, JSON, JSON5, TOML, INI, ENV, environment variables, Docker secrets -- **Merging** — combine multiple sources with configurable strategies (`LAST_WINS`, `FIRST_WINS`, `RAISE_ON_CONFLICT`) +- **Merging** — combine multiple sources with configurable strategies (`"last_wins"`, `"first_wins"`, `"raise_on_conflict"`) - **Validation** — `Annotated` field validators, root validators, `__post_init__` support - **Naming** — automatic field name mapping (`snake_case` ↔ `camelCase` ↔ `UPPER_SNAKE` etc.) - **Secret masking** — automatic masking in error messages and logs by field type, name, or heuristic diff --git a/changes/+internal-enum-hints.refactor b/changes/+internal-enum-hints.refactor new file mode 100644 index 0000000..5f09956 --- /dev/null +++ b/changes/+internal-enum-hints.refactor @@ -0,0 +1 @@ +Internal type hints now use `MergeStrategyEnum`/`FieldMergeStrategyEnum` instead of `MergeStrategyName`/`FieldMergeStrategyName` Literal aliases. Public API type hints remain unchanged. diff --git a/changes/+simplify-configure.refactor b/changes/+simplify-configure.refactor new file mode 100644 index 0000000..72620f9 --- /dev/null +++ b/changes/+simplify-configure.refactor @@ -0,0 +1 @@ +`configure()` now accepts dicts instead of dataclass instances: `masking={"mask": "***"}`, `error_display={"max_visible_lines": 5}`, `loading={"debug": True}`, `type_loaders={MyType: my_loader}`. diff --git a/changes/+simplify-enums.removal b/changes/+simplify-enums.removal new file mode 100644 index 0000000..8adfc48 --- /dev/null +++ b/changes/+simplify-enums.removal @@ -0,0 +1 @@ +Removed `MergeStrategy` and `FieldMergeStrategy` enums from public API. Use string literals instead: `"last_wins"`, `"first_wins"`, `"first_found"`, `"raise_on_conflict"` for merge strategies; `"first_wins"`, `"last_wins"`, `"append"`, `"append_unique"`, `"prepend"`, `"prepend_unique"` for field merge strategies. diff --git a/changes/+simplify-field-group.removal b/changes/+simplify-field-group.removal new file mode 100644 index 0000000..7c216fc --- /dev/null +++ b/changes/+simplify-field-group.removal @@ -0,0 +1 @@ +Removed `FieldGroup` dataclass from public API. Pass `field_groups` as `tuple[tuple[F[Config].field, ...], ...]` instead. diff --git a/changes/+simplify-merge-rule.removal b/changes/+simplify-merge-rule.removal new file mode 100644 index 0000000..6039352 --- /dev/null +++ b/changes/+simplify-merge-rule.removal @@ -0,0 +1 @@ +Removed `MergeRule` dataclass from public API. Pass `field_merges` as `dict` mapping `F[Config].field` to a strategy string or callable instead. diff --git a/changes/+simplify-type-loader.removal b/changes/+simplify-type-loader.removal new file mode 100644 index 0000000..8e36483 --- /dev/null +++ b/changes/+simplify-type-loader.removal @@ -0,0 +1 @@ +Removed `TypeLoader` dataclass from public API. Pass `type_loaders` as `dict[type, Callable]` instead. diff --git a/docs/advanced/custom_types.md b/docs/advanced/custom_types.md index b3daa26..8fae43f 100644 --- a/docs/advanced/custom_types.md +++ b/docs/advanced/custom_types.md @@ -4,7 +4,7 @@ Use `type_loaders` to teach dature how to parse custom types from strings. -Each `TypeLoader` maps a type to a conversion function: +Pass `type_loaders` as a `dict[type, Callable]` mapping types to conversion functions: ```python --8<-- "examples/docs/advanced/custom_types/custom_type.py" @@ -36,18 +36,7 @@ Each `TypeLoader` maps a type to a conversion function: --8<-- "examples/docs/advanced/custom_types/advanced_configure_type_loaders.py" ``` -When both per-source and global `type_loaders` are set, they merge — per-source loaders take priority (placed first in the recipe). - -### TypeLoader Reference - -```python ---8<-- "src/dature/metadata.py:type-loader" -``` - -| Parameter | Description | -|-----------|-------------| -| `type_` | The target type to register a loader for | -| `func` | A callable that converts the raw value to the target type | +When both per-source and global `type_loaders` are set, they merge — per-source loaders take priority. ## Custom Loaders diff --git a/docs/advanced/merge-rules.md b/docs/advanced/merge-rules.md index 327174a..d27d7d5 100644 --- a/docs/advanced/merge-rules.md +++ b/docs/advanced/merge-rules.md @@ -23,16 +23,16 @@ graph TD Override the global strategy for individual fields using `field_merges`. -All available `FieldMergeStrategy` values: +Available field merge strategies: | Strategy | Behavior | |----------|----------| -| `FIRST_WINS` | Keep the value from the first source | -| `LAST_WINS` | Keep the value from the last source | -| `APPEND` | Concatenate lists: `base + override` | -| `APPEND_UNIQUE` | Concatenate lists, removing duplicates | -| `PREPEND` | Concatenate lists: `override + base` | -| `PREPEND_UNIQUE` | Concatenate lists in reverse order, removing duplicates | +| `"first_wins"` | Keep the value from the first source | +| `"last_wins"` | Keep the value from the last source | +| `"append"` | Concatenate lists: `base + override` | +| `"append_unique"` | Concatenate lists, removing duplicates | +| `"prepend"` | Concatenate lists: `override + base` | +| `"prepend_unique"` | Concatenate lists in reverse order, removing duplicates | Given two sources with overlapping `tags`: @@ -50,37 +50,37 @@ Given two sources with overlapping `tags`: Each strategy produces a different result: -=== "FIRST_WINS" +=== "first_wins" ```python --8<-- "examples/docs/advanced/merge_rules/merging_field_first_wins.py" ``` -=== "LAST_WINS" +=== "last_wins" ```python --8<-- "examples/docs/advanced/merge_rules/merging_field_last_wins.py" ``` -=== "APPEND" +=== "append" ```python --8<-- "examples/docs/advanced/merge_rules/merging_field_append.py" ``` -=== "APPEND_UNIQUE" +=== "append_unique" ```python --8<-- "examples/docs/advanced/merge_rules/merging_field_append_unique.py" ``` -=== "PREPEND" +=== "prepend" ```python --8<-- "examples/docs/advanced/merge_rules/merging_field_prepend.py" ``` -=== "PREPEND_UNIQUE" +=== "prepend_unique" ```python --8<-- "examples/docs/advanced/merge_rules/merging_field_prepend_unique.py" @@ -88,9 +88,9 @@ Each strategy produces a different result: Nested fields are supported: `dature.F[Config].database.host`. -Per-field strategies work with `RAISE_ON_CONFLICT` — fields with an explicit strategy are excluded from conflict detection. +Per-field strategies work with `"raise_on_conflict"` — fields with an explicit strategy are excluded from conflict detection. -## With RAISE_ON_CONFLICT +## With raise_on_conflict Fields with an explicit strategy are excluded from conflict detection: diff --git a/docs/api-reference.md b/docs/api-reference.md index 2eb31f9..56eee2b 100644 --- a/docs/api-reference.md +++ b/docs/api-reference.md @@ -30,15 +30,15 @@ Main entry point. Two calling patterns: | `dataclass_` | `type[T] \| None` | Target dataclass. If provided → function mode. If `None` → decorator mode. | | `cache` | `bool \| None` | Enable caching in decorator mode. Default from `configure()`. | | `debug` | `bool \| None` | Collect `LoadReport`. Default from `configure()`. | -| `strategy` | `MergeStrategy` | Merge strategy (default `LAST_WINS`). Only used with multiple sources. | -| `field_merges` | `tuple[MergeRule, ...]` | Per-field merge strategy overrides. | -| `field_groups` | `tuple[FieldGroup, ...]` | Groups of fields that must change together. | +| `strategy` | `str` | Merge strategy: `"last_wins"` (default), `"first_wins"`, `"first_found"`, `"raise_on_conflict"`. Only used with multiple sources. | +| `field_merges` | `dict` | Per-field merge strategy overrides. Maps `F[Config].field` to a strategy string or callable. | +| `field_groups` | `tuple[tuple[...], ...]` | Groups of fields that must change together. Each group is a tuple of `F[Config].field` references. | | `skip_broken_sources` | `bool` | Skip sources that fail to load (default `False`). | | `skip_invalid_fields` | `bool` | Skip fields that fail validation (default `False`). | | `expand_env_vars` | `ExpandEnvVarsMode` | Env var expansion mode for all sources (default `"default"`). | | `secret_field_names` | `tuple[str, ...] \| None` | Extra secret field name patterns. | | `mask_secrets` | `bool \| None` | Enable/disable secret masking globally. | -| `type_loaders` | `tuple[TypeLoader, ...] \| None` | Custom type loaders. | +| `type_loaders` | `dict[type, Callable] \| None` | Custom type loaders mapping types to conversion functions. | | `nested_resolve_strategy` | `NestedResolveStrategy \| None` | Default priority for JSON vs flat keys. See [Nested Resolve](advanced/nested-resolve.md). | | `nested_resolve` | `NestedResolve \| None` | Per-field nested resolve strategy overrides. See [Nested Resolve](advanced/nested-resolve.md#per-field-strategy). | @@ -54,37 +54,25 @@ See [Introduction — Source Reference](introduction.md#source-reference) for pa --- -### `MergeStrategy` +### Merge Strategies -```python ---8<-- "src/dature/metadata.py:merge-strategy" -``` - ---- - -### `FieldMergeStrategy` - -```python ---8<-- "src/dature/metadata.py:field-merge-strategy" -``` - ---- +| Strategy | Behavior | +|----------|----------| +| `"last_wins"` | Last source overrides (default) | +| `"first_wins"` | First source wins | +| `"first_found"` | Uses the first source that loads successfully | +| `"raise_on_conflict"` | Raises `MergeConflictError` on conflicting values | -### `MergeRule` - -```python ---8<-- "src/dature/metadata.py:merge-rule" -``` - ---- - -### `FieldGroup` - -```python ---8<-- "src/dature/metadata.py:field-group" -``` +### Field Merge Strategies -Usage: `dature.FieldGroup(dature.F[Config].host, dature.F[Config].port)` +| Strategy | Behavior | +|----------|----------| +| `"first_wins"` | Keep the value from the first source | +| `"last_wins"` | Keep the value from the last source | +| `"append"` | Concatenate lists: `base + override` | +| `"append_unique"` | Concatenate lists, removing duplicates | +| `"prepend"` | Concatenate lists: `override + base` | +| `"prepend_unique"` | Concatenate lists in reverse order, removing duplicates | --- @@ -126,7 +114,7 @@ Returns the `LoadReport` attached to a loaded instance (or type on error). Retur --8<-- "src/dature/config.py:configure" ``` -Set global configuration. `None` parameters keep their current values. +Set global configuration. Pass dicts to override specific options: `masking={"mask": "***"}`, `loading={"debug": True}`. `None` parameters keep their current values. Empty dict resets the group to defaults. ### `MaskingConfig` diff --git a/docs/comparison/why-not-dynaconf.md b/docs/comparison/why-not-dynaconf.md index f051857..a856576 100644 --- a/docs/comparison/why-not-dynaconf.md +++ b/docs/comparison/why-not-dynaconf.md @@ -15,7 +15,7 @@ The trade-off is **how** it covers it: Dynaconf is powerful and battle-tested, b | **Validation** | Separate `Validator` objects | Both: `Annotated` inline validators + separate root/custom validators | | **Formats** | YAML, TOML, JSON, INI, `.env`, Python files | YAML (1.1/1.2), JSON, JSON5, TOML (1.0/1.1), INI, `.env`, env vars, Docker secrets | | **Remote sources** | Vault, Redis + community plugins | Not yet (planned) | -| **Merging** | Layered override + `dynaconf_merge` | 4 strategies + per-field rules (`APPEND`, `PREPEND`, field groups, etc.) | +| **Merging** | Layered override + `dynaconf_merge` | 4 strategies + per-field rules (`"append"`, `"prepend"`, field groups, etc.) | | **Dynamic variables** | `@format`, `@jinja` templates with lazy evaluation | `${VAR:-default}` env expansion in all formats + file paths | | **CLI** | `dynaconf list`, `inspect`, `write`, `validate`, etc. | No CLI | | **Per-environment files** | Built-in (`[development]`, `[production]` sections) | Manual via multiple `Source` objects | diff --git a/docs/comparison/why-not-pydantic-settings.md b/docs/comparison/why-not-pydantic-settings.md index 97cb3e4..cc781de 100644 --- a/docs/comparison/why-not-pydantic-settings.md +++ b/docs/comparison/why-not-pydantic-settings.md @@ -10,7 +10,7 @@ The trade-off is coupling: your config must be a Pydantic model, custom types ne |---|---|---| | **Base class** | `BaseSettings` (Pydantic model) | stdlib `@dataclass` | | **Formats** | `.env`, env vars, JSON, YAML, TOML + custom sources | YAML (1.1/1.2), JSON, JSON5, TOML (1.0/1.1), INI, `.env`, env vars, Docker secrets — auto-detected | -| **Merging** | Fixed priority order (init > env > dotenv > secrets > defaults) | 4 strategies + per-field rules (`APPEND`, `PREPEND`, field groups, etc.) | +| **Merging** | Fixed priority order (init > env > dotenv > secrets > defaults) | 4 strategies + per-field rules (`"append"`, `"prepend"`, field groups, etc.) | | **Skip broken sources** | No | Yes — `skip_if_broken`, `skip_if_invalid` | | **Field groups** | No | Yes — enforce related fields are overridden together | | **Naming conventions** | `alias` / `alias_generator` (`to_camel`, `to_pascal`, `to_snake`) | Built-in `name_style` (6 conventions) + explicit `field_mapping` with multiple aliases | diff --git a/docs/features/merging.md b/docs/features/merging.md index 961dd5d..80da4c2 100644 --- a/docs/features/merging.md +++ b/docs/features/merging.md @@ -26,7 +26,7 @@ Pass multiple `Source` objects to `dature.load()`: ## Multiple Sources -Multiple sources use `LAST_WINS` by default: +Multiple sources use `"last_wins"` by default: === "Python" @@ -64,14 +64,14 @@ Works as a decorator too: | Strategy | Behavior | |----------|----------| -| `LAST_WINS` | Last source overrides (default) | -| `FIRST_WINS` | First source wins | -| `FIRST_FOUND` | Uses the first source that loads successfully, skips broken sources automatically | -| `RAISE_ON_CONFLICT` | Raises `MergeConflictError` if the same key appears in multiple sources with different values | +| `"last_wins"` | Last source overrides (default) | +| `"first_wins"` | First source wins | +| `"first_found"` | Uses the first source that loads successfully, skips broken sources automatically | +| `"raise_on_conflict"` | Raises `MergeConflictError` if the same key appears in multiple sources with different values | Nested dicts are merged recursively. Lists and scalars are replaced entirely according to the strategy. -=== "LAST_WINS" +=== "last_wins" Last source overrides earlier ones. This is the default strategy. @@ -91,7 +91,7 @@ Nested dicts are merged recursively. Lists and scalars are replaced entirely acc --8<-- "examples/docs/shared/common_overrides.yaml" ``` -=== "FIRST_WINS" +=== "first_wins" First source wins on conflict. Later sources only fill in missing keys. @@ -111,7 +111,7 @@ Nested dicts are merged recursively. Lists and scalars are replaced entirely acc --8<-- "examples/docs/shared/common_overrides.yaml" ``` -=== "FIRST_FOUND" +=== "first_found" Uses the first source that loads successfully and ignores the rest. Broken sources (missing file, parse error) are skipped automatically — no `skip_if_broken` needed. Type errors (wrong type, missing field) are **not** skipped. @@ -125,7 +125,7 @@ Nested dicts are merged recursively. Lists and scalars are replaced entirely acc --8<-- "examples/docs/shared/common_defaults.yaml" ``` -=== "RAISE_ON_CONFLICT" +=== "raise_on_conflict" Raises `MergeConflictError` if the same key appears in multiple sources with different values. Works best when sources have disjoint keys. @@ -153,7 +153,7 @@ All merge-related parameters are passed directly to `dature.load()` as keyword a | Parameter | Description | |-----------|-------------| -| `strategy` | Global merge strategy. Default: `LAST_WINS`. See [Merge Strategies](#merge-strategies) | +| `strategy` | Global merge strategy. Default: `"last_wins"`. See [Merge Strategies](#merge-strategies) | | `field_merges` | Per-field merge strategy overrides. See [Per-Field Merge Strategies](../advanced/merge-rules.md#per-field-merge-strategies) | | `field_groups` | Enforce related fields are overridden together. See [Field Groups](../advanced/merge-rules.md#field-groups) | | `skip_broken_sources` | Skip sources that fail to load. See [Skipping Broken Sources](../advanced/merge-rules.md#skipping-broken-sources) | diff --git a/examples/docs/advanced/configure/advanced_configure.py b/examples/docs/advanced/configure/advanced_configure.py index aa43109..c455f52 100644 --- a/examples/docs/advanced/configure/advanced_configure.py +++ b/examples/docs/advanced/configure/advanced_configure.py @@ -4,7 +4,6 @@ from pathlib import Path import dature -from dature.config import LoadingConfig SHARED_DIR = Path(__file__).parents[2] / "shared" @@ -22,14 +21,14 @@ class Config: assert report is None # 2. Enable debug globally via dature.configure() -dature.configure(loading=LoadingConfig(debug=True)) +dature.configure(loading={"debug": True}) config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), dataclass_=Config) report = dature.get_load_report(config) assert report is not None # 3. Reset to defaults — debug is off again -dature.configure(loading=LoadingConfig()) +dature.configure(loading={}) config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), dataclass_=Config) report = dature.get_load_report(config) diff --git a/examples/docs/advanced/configure/advanced_configure_env.py b/examples/docs/advanced/configure/advanced_configure_env.py index 49eea58..bd1be13 100644 --- a/examples/docs/advanced/configure/advanced_configure_env.py +++ b/examples/docs/advanced/configure/advanced_configure_env.py @@ -5,7 +5,6 @@ from pathlib import Path import dature -from dature.config import LoadingConfig SHARED_DIR = Path(__file__).parents[2] / "shared" @@ -26,14 +25,14 @@ class Config: assert report is not None # 2. Override env with dature.configure() — debug is off -dature.configure(loading=LoadingConfig(debug=False)) +dature.configure(loading={"debug": False}) config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), dataclass_=Config) report = dature.get_load_report(config) assert report is None # 3. Reset to env defaults — debug is on again -dature.configure(loading=LoadingConfig(debug=True)) +dature.configure(loading={"debug": True}) config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), dataclass_=Config) report = dature.get_load_report(config) diff --git a/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py b/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py index c70ec45..e0595ea 100644 --- a/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py +++ b/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py @@ -27,7 +27,7 @@ class AppConfig: # Register Rgb parser globally — no need to pass type_loaders to every load() call -dature.configure(type_loaders=(dature.TypeLoader(type_=Rgb, func=rgb_from_string),)) +dature.configure(type_loaders={Rgb: rgb_from_string}) config = dature.load(dature.Source(file=SOURCES_DIR / "custom_type_common.yaml"), dataclass_=AppConfig) assert config == AppConfig(name="my-app", color=Rgb(r=255, g=128, b=0)) diff --git a/examples/docs/advanced/custom_types/custom_type.py b/examples/docs/advanced/custom_types/custom_type.py index 4eb75d9..8327a55 100644 --- a/examples/docs/advanced/custom_types/custom_type.py +++ b/examples/docs/advanced/custom_types/custom_type.py @@ -29,7 +29,7 @@ class AppConfig: config = dature.load( dature.Source( file=SOURCES_DIR / "custom_type_common.yaml", - type_loaders=(dature.TypeLoader(type_=Rgb, func=rgb_from_string),), + type_loaders={Rgb: rgb_from_string}, ), dataclass_=AppConfig, ) diff --git a/examples/docs/advanced/custom_types/custom_type_merge.py b/examples/docs/advanced/custom_types/custom_type_merge.py index d0cb0bd..0452872 100644 --- a/examples/docs/advanced/custom_types/custom_type_merge.py +++ b/examples/docs/advanced/custom_types/custom_type_merge.py @@ -30,7 +30,7 @@ class AppConfig: dature.Source(file=SOURCES_DIR / "custom_type_common.yaml"), dature.Source(file=SOURCES_DIR / "custom_type_merge_override.yaml"), dataclass_=AppConfig, - type_loaders=(dature.TypeLoader(type_=Rgb, func=rgb_from_string),), + type_loaders={Rgb: rgb_from_string}, ) assert config == AppConfig(name="my-app", color=Rgb(r=100, g=200, b=50)) diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py index 736ac92..37897e5 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py @@ -23,14 +23,14 @@ class Config: database: Database -# dature.FieldGroup(dature.F[Config].database, dature.F[Config].port) +# (dature.F[Config].database, dature.F[Config].port) # expands to (database.host, database.port, port) try: dature.load( dature.Source(file=SOURCES_DIR / "field_groups_nested_defaults.yaml"), dature.Source(file=SOURCES_DIR / "advanced_field_groups_expansion_error_overrides.yaml"), dataclass_=Config, - field_groups=(dature.FieldGroup(dature.F[Config].database, dature.F[Config].port),), + field_groups=((dature.F[Config].database, dature.F[Config].port),), ) except FieldGroupError as exc: defaults_path = str(SOURCES_DIR / "field_groups_nested_defaults.yaml") diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py index 688a9b8..ae4a6e4 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py @@ -26,8 +26,8 @@ class Config: dature.Source(file=SOURCES_DIR / "advanced_field_groups_multiple_error_overrides.yaml"), dataclass_=Config, field_groups=( - dature.FieldGroup(dature.F[Config].host, dature.F[Config].port), - dature.FieldGroup(dature.F[Config].user, dature.F[Config].password), + (dature.F[Config].host, dature.F[Config].port), + (dature.F[Config].user, dature.F[Config].password), ), ) except FieldGroupError as exc: diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py index 46efeb9..7b0eca8 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py @@ -26,8 +26,8 @@ class Config: dature.Source(file=SOURCES_DIR / "field_groups_partial_overrides.yaml"), dataclass_=Config, field_groups=( - dature.FieldGroup(dature.F[Config].host, dature.F[Config].port), - dature.FieldGroup(dature.F[Config].user, dature.F[Config].password), + (dature.F[Config].host, dature.F[Config].port), + (dature.F[Config].user, dature.F[Config].password), ), ) except FieldGroupError as exc: diff --git a/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py b/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py index 84bc899..1b61004 100644 --- a/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py +++ b/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py @@ -24,8 +24,8 @@ def merge_tags(values: list[Any]) -> list[str]: dature.Source(file=SHARED_DIR / "common_defaults.yaml"), dature.Source(file=SHARED_DIR / "common_overrides.yaml"), dataclass_=Config, - strategy=dature.MergeStrategy.LAST_WINS, - field_merges=(dature.MergeRule(dature.F[Config].tags, merge_tags),), + strategy="last_wins", + field_merges={dature.F[Config].tags: merge_tags}, ) assert config.host == "production.example.com" diff --git a/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py b/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py index aefdafc..a002a21 100644 --- a/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py +++ b/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py @@ -19,12 +19,12 @@ class Config: dature.Source(file=SHARED_DIR / "common_defaults.yaml"), dature.Source(file=SHARED_DIR / "common_overrides.yaml"), dataclass_=Config, - strategy=dature.MergeStrategy.RAISE_ON_CONFLICT, - field_merges=( - dature.MergeRule(dature.F[Config].host, dature.FieldMergeStrategy.LAST_WINS), - dature.MergeRule(dature.F[Config].port, dature.FieldMergeStrategy.LAST_WINS), - dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.APPEND_UNIQUE), - ), + strategy="raise_on_conflict", + field_merges={ + dature.F[Config].host: "last_wins", + dature.F[Config].port: "last_wins", + dature.F[Config].tags: "append_unique", + }, ) assert config.host == "production.example.com" diff --git a/examples/docs/advanced/merge_rules/merging_field_append.py b/examples/docs/advanced/merge_rules/merging_field_append.py index b9de4bd..b2c6d4d 100644 --- a/examples/docs/advanced/merge_rules/merging_field_append.py +++ b/examples/docs/advanced/merge_rules/merging_field_append.py @@ -17,7 +17,7 @@ class Config: dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), dataclass_=Config, - field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.APPEND),), + field_merges={dature.F[Config].tags: "append"}, ) assert config.tags == ["web", "default", "web", "api"] diff --git a/examples/docs/advanced/merge_rules/merging_field_append_unique.py b/examples/docs/advanced/merge_rules/merging_field_append_unique.py index ce0b30d..83e27cb 100644 --- a/examples/docs/advanced/merge_rules/merging_field_append_unique.py +++ b/examples/docs/advanced/merge_rules/merging_field_append_unique.py @@ -17,7 +17,7 @@ class Config: dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), dataclass_=Config, - field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.APPEND_UNIQUE),), + field_merges={dature.F[Config].tags: "append_unique"}, ) assert config.tags == ["web", "default", "api"] diff --git a/examples/docs/advanced/merge_rules/merging_field_first_wins.py b/examples/docs/advanced/merge_rules/merging_field_first_wins.py index d9e6d9c..e65b0c6 100644 --- a/examples/docs/advanced/merge_rules/merging_field_first_wins.py +++ b/examples/docs/advanced/merge_rules/merging_field_first_wins.py @@ -17,7 +17,7 @@ class Config: dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), dataclass_=Config, - field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.FIRST_WINS),), + field_merges={dature.F[Config].tags: "first_wins"}, ) assert config.tags == ["web", "default"] diff --git a/examples/docs/advanced/merge_rules/merging_field_groups.py b/examples/docs/advanced/merge_rules/merging_field_groups.py index c2bf029..fbaccc2 100644 --- a/examples/docs/advanced/merge_rules/merging_field_groups.py +++ b/examples/docs/advanced/merge_rules/merging_field_groups.py @@ -21,7 +21,7 @@ class Config: dature.Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), dature.Source(file=SHARED_DIR / "common_field_groups_overrides.yaml"), dataclass_=Config, - field_groups=(dature.FieldGroup(dature.F[Config].host, dature.F[Config].port),), + field_groups=((dature.F[Config].host, dature.F[Config].port),), ) assert config.host == "production.example.com" diff --git a/examples/docs/advanced/merge_rules/merging_field_last_wins.py b/examples/docs/advanced/merge_rules/merging_field_last_wins.py index 2f5e4b8..f332cfb 100644 --- a/examples/docs/advanced/merge_rules/merging_field_last_wins.py +++ b/examples/docs/advanced/merge_rules/merging_field_last_wins.py @@ -17,7 +17,7 @@ class Config: dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), dataclass_=Config, - field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.LAST_WINS),), + field_merges={dature.F[Config].tags: "last_wins"}, ) assert config.tags == ["web", "api"] diff --git a/examples/docs/advanced/merge_rules/merging_field_prepend.py b/examples/docs/advanced/merge_rules/merging_field_prepend.py index 3991e33..18155bb 100644 --- a/examples/docs/advanced/merge_rules/merging_field_prepend.py +++ b/examples/docs/advanced/merge_rules/merging_field_prepend.py @@ -17,7 +17,7 @@ class Config: dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), dataclass_=Config, - field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.PREPEND),), + field_merges={dature.F[Config].tags: "prepend"}, ) assert config.tags == ["web", "api", "web", "default"] diff --git a/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py b/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py index d5d31b4..c80af2e 100644 --- a/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py +++ b/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py @@ -17,7 +17,7 @@ class Config: dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), dataclass_=Config, - field_merges=(dature.MergeRule(dature.F[Config].tags, dature.FieldMergeStrategy.PREPEND_UNIQUE),), + field_merges={dature.F[Config].tags: "prepend_unique"}, ) assert config.tags == ["web", "api", "default"] diff --git a/examples/docs/advanced/merge_rules/merging_first_found.py b/examples/docs/advanced/merge_rules/merging_first_found.py index 925366c..c2e83e5 100644 --- a/examples/docs/advanced/merge_rules/merging_first_found.py +++ b/examples/docs/advanced/merge_rules/merging_first_found.py @@ -18,7 +18,7 @@ class Config: dature.Source(file=SOURCES_DIR / "merging_first_found_primary.yaml"), dature.Source(file=SOURCES_DIR / "merging_first_found_fallback.yaml"), dataclass_=Config, - strategy=dature.MergeStrategy.FIRST_FOUND, + strategy="first_found", ) assert config.host == "production-host" diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py index ce34e34..b674f08 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py @@ -19,7 +19,7 @@ class Config: dature.Source(file=SOURCES_DIR / "dynaconf_merge_defaults.yaml"), dature.Source(file=SOURCES_DIR / "dynaconf_merge_local.yaml", skip_if_broken=True), dataclass_=Config, - strategy=dature.MergeStrategy.LAST_WINS, + strategy="last_wins", ) # --8<-- [end:merge] diff --git a/examples/docs/features/masking/masking_classic_style.py b/examples/docs/features/masking/masking_classic_style.py index e74ee72..2969e12 100644 --- a/examples/docs/features/masking/masking_classic_style.py +++ b/examples/docs/features/masking/masking_classic_style.py @@ -4,13 +4,12 @@ from pathlib import Path import dature -from dature.config import MaskingConfig from dature.masking.masking import mask_value SOURCES_DIR = Path(__file__).parent / "sources" # --8<-- [start:classic-style] -dature.configure(masking=MaskingConfig(mask="*****", visible_prefix=2, visible_suffix=2)) +dature.configure(masking={"mask": "*****", "visible_prefix": 2, "visible_suffix": 2}) # "my_secret_password" → "my*****rd" # "ab" → "ab" (too short — shown as-is) # --8<-- [end:classic-style] @@ -26,4 +25,4 @@ class Config: assert mask_value("my_secret_password") == "my*****rd" assert mask_value("ab") == "ab" -dature.configure(masking=MaskingConfig()) +dature.configure(masking={}) diff --git a/examples/docs/features/merging/merging_basic.py b/examples/docs/features/merging/merging_basic.py index 3a703e9..f6f4f5b 100644 --- a/examples/docs/features/merging/merging_basic.py +++ b/examples/docs/features/merging/merging_basic.py @@ -19,7 +19,7 @@ class Config: dature.Source(file=SHARED_DIR / "common_defaults.yaml"), dature.Source(file=SHARED_DIR / "common_overrides.yaml"), dataclass_=Config, - strategy=dature.MergeStrategy.LAST_WINS, + strategy="last_wins", ) assert config.host == "production.example.com" diff --git a/examples/docs/features/merging/merging_strategies.py b/examples/docs/features/merging/merging_strategies.py index beed050..a91785e 100644 --- a/examples/docs/features/merging/merging_strategies.py +++ b/examples/docs/features/merging/merging_strategies.py @@ -19,14 +19,14 @@ class Config: dature.Source(file=SHARED_DIR / "common_defaults.yaml"), dature.Source(file=SHARED_DIR / "common_overrides.yaml"), dataclass_=Config, - strategy=dature.MergeStrategy.LAST_WINS, + strategy="last_wins", ) first_wins = dature.load( dature.Source(file=SHARED_DIR / "common_defaults.yaml"), dature.Source(file=SHARED_DIR / "common_overrides.yaml"), dataclass_=Config, - strategy=dature.MergeStrategy.FIRST_WINS, + strategy="first_wins", ) assert last_wins.host == "production.example.com" diff --git a/examples/docs/features/merging/merging_strategy_first_found.py b/examples/docs/features/merging/merging_strategy_first_found.py index 8118d6b..8c5f3f4 100644 --- a/examples/docs/features/merging/merging_strategy_first_found.py +++ b/examples/docs/features/merging/merging_strategy_first_found.py @@ -20,7 +20,7 @@ class Config: dature.Source(file=SHARED_DIR / "common_defaults.yaml"), dature.Source(file=SHARED_DIR / "common_overrides.yaml"), dataclass_=Config, - strategy=dature.MergeStrategy.FIRST_FOUND, + strategy="first_found", ) # nonexistent.yaml is skipped, common_defaults.yaml is used entirely diff --git a/examples/docs/features/merging/merging_strategy_first_wins.py b/examples/docs/features/merging/merging_strategy_first_wins.py index 69e24c1..5d65aa3 100644 --- a/examples/docs/features/merging/merging_strategy_first_wins.py +++ b/examples/docs/features/merging/merging_strategy_first_wins.py @@ -19,7 +19,7 @@ class Config: dature.Source(file=SHARED_DIR / "common_defaults.yaml"), dature.Source(file=SHARED_DIR / "common_overrides.yaml"), dataclass_=Config, - strategy=dature.MergeStrategy.FIRST_WINS, + strategy="first_wins", ) assert config.host == "localhost" diff --git a/examples/docs/features/merging/merging_strategy_last_wins.py b/examples/docs/features/merging/merging_strategy_last_wins.py index 1c27d36..5946ae4 100644 --- a/examples/docs/features/merging/merging_strategy_last_wins.py +++ b/examples/docs/features/merging/merging_strategy_last_wins.py @@ -19,7 +19,7 @@ class Config: dature.Source(file=SHARED_DIR / "common_defaults.yaml"), dature.Source(file=SHARED_DIR / "common_overrides.yaml"), dataclass_=Config, - strategy=dature.MergeStrategy.LAST_WINS, + strategy="last_wins", ) assert config.host == "production.example.com" diff --git a/examples/docs/features/merging/merging_strategy_raise_on_conflict.py b/examples/docs/features/merging/merging_strategy_raise_on_conflict.py index 19e2f45..47a0540 100644 --- a/examples/docs/features/merging/merging_strategy_raise_on_conflict.py +++ b/examples/docs/features/merging/merging_strategy_raise_on_conflict.py @@ -19,7 +19,7 @@ class Config: dature.Source(file=SHARED_DIR / "common_raise_on_conflict_a.yaml"), dature.Source(file=SHARED_DIR / "common_raise_on_conflict_b.yaml"), dataclass_=Config, - strategy=dature.MergeStrategy.RAISE_ON_CONFLICT, + strategy="raise_on_conflict", ) # Disjoint keys — no conflict diff --git a/src/dature/__init__.py b/src/dature/__init__.py index 09ee17b..6fe3f9d 100644 --- a/src/dature/__init__.py +++ b/src/dature/__init__.py @@ -3,16 +3,11 @@ from dature.field_path import F from dature.load_report import get_load_report from dature.main import load -from dature.metadata import FieldGroup, FieldMergeStrategy, MergeRule, MergeStrategy, Source, TypeLoader +from dature.metadata import Source __all__ = [ "F", - "FieldGroup", - "FieldMergeStrategy", - "MergeRule", - "MergeStrategy", "Source", - "TypeLoader", "__version__", "configure", "get_load_report", diff --git a/src/dature/config.py b/src/dature/config.py index bc76cf5..b77c6cd 100644 --- a/src/dature/config.py +++ b/src/dature/config.py @@ -1,13 +1,10 @@ -from dataclasses import dataclass -from typing import TYPE_CHECKING, Annotated +from dataclasses import asdict, dataclass +from typing import Annotated, Any, ClassVar, TypedDict, cast -from dature.types import NestedResolveStrategy +from dature.types import NestedResolveStrategy, TypeLoaderMap from dature.validators.number import Ge from dature.validators.string import MinLength -if TYPE_CHECKING: - from dature.metadata import TypeLoader - # --8<-- [start:masking-config] @dataclass(frozen=True, slots=True) @@ -71,10 +68,31 @@ def _load_config() -> DatureConfig: return load(Source(prefix="DATURE_"), dataclass_=DatureConfig) +class MaskingOptions(TypedDict, total=False): + mask: str + visible_prefix: int + visible_suffix: int + min_heuristic_length: int + heuristic_threshold: float + secret_field_names: tuple[str, ...] + mask_secrets: bool + + +class ErrorDisplayOptions(TypedDict, total=False): + max_visible_lines: int + max_line_length: int + + +class LoadingOptions(TypedDict, total=False): + cache: bool + debug: bool + nested_resolve_strategy: NestedResolveStrategy + + class _ConfigProxy: _instance: DatureConfig | None = None _loading: bool = False - _type_loaders: "tuple[TypeLoader, ...]" = () + _type_loaders: ClassVar[TypeLoaderMap] = {} @staticmethod def ensure_loaded() -> DatureConfig: @@ -94,7 +112,7 @@ def set_instance(value: DatureConfig | None) -> None: _ConfigProxy._instance = value @staticmethod - def set_type_loaders(value: "tuple[TypeLoader, ...]") -> None: + def set_type_loaders(value: TypeLoaderMap) -> None: _ConfigProxy._type_loaders = value @property @@ -110,7 +128,7 @@ def loading(self) -> LoadingConfig: return self.ensure_loaded().loading @property - def type_loaders(self) -> "tuple[TypeLoader, ...]": + def type_loaders(self) -> TypeLoaderMap: return _ConfigProxy._type_loaders @@ -120,24 +138,35 @@ def type_loaders(self) -> "tuple[TypeLoader, ...]": # --8<-- [start:configure] def configure( *, - masking: MaskingConfig | None = None, - error_display: ErrorDisplayConfig | None = None, - loading: LoadingConfig | None = None, - type_loaders: "tuple[TypeLoader, ...] | None" = None, + masking: MaskingOptions | None = None, + error_display: ErrorDisplayOptions | None = None, + loading: LoadingOptions | None = None, + type_loaders: TypeLoaderMap | None = None, ) -> None: # --8<-- [end:configure] current = config.ensure_loaded() - if masking is None: - masking = current.masking - if error_display is None: - error_display = current.error_display - if loading is None: - loading = current.loading + + merged_masking = ( + MaskingConfig(**cast("dict[str, Any]", asdict(MaskingConfig()) | masking)) + if masking is not None + else current.masking + ) + merged_error = ( + ErrorDisplayConfig(**cast("dict[str, Any]", asdict(ErrorDisplayConfig()) | error_display)) + if error_display is not None + else current.error_display + ) + merged_loading = ( + LoadingConfig(**cast("dict[str, Any]", asdict(LoadingConfig()) | loading)) + if loading is not None + else current.loading + ) + config.set_instance( DatureConfig( - masking=masking, - error_display=error_display, - loading=loading, + masking=merged_masking, + error_display=merged_error, + loading=merged_loading, ), ) if type_loaders is not None: diff --git a/src/dature/load_report.py b/src/dature/load_report.py index 62ec1f4..f94f37d 100644 --- a/src/dature/load_report.py +++ b/src/dature/load_report.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from typing import Any -from dature.metadata import MergeStrategy +from dature.merging.strategy import MergeStrategyEnum from dature.types import JSONValue logger = logging.getLogger("dature") @@ -32,7 +32,7 @@ class FieldOrigin: @dataclass(frozen=True, slots=True, kw_only=True) class LoadReport: dataclass_name: str - strategy: MergeStrategy | None + strategy: MergeStrategyEnum | None sources: tuple[SourceEntry, ...] field_origins: tuple[FieldOrigin, ...] merged_data: JSONValue @@ -43,7 +43,7 @@ def compute_field_origins( *, raw_dicts: list[JSONValue], source_entries: tuple[SourceEntry, ...], - strategy: MergeStrategy, + strategy: MergeStrategyEnum, ) -> tuple[FieldOrigin, ...]: first_source: dict[str, int] = {} last_source: dict[str, int] = {} @@ -60,7 +60,7 @@ def compute_field_origins( origins: list[FieldOrigin] = [] for key in sorted(last_source): - if strategy in (MergeStrategy.FIRST_WINS, MergeStrategy.FIRST_FOUND): + if strategy in (MergeStrategyEnum.FIRST_WINS, MergeStrategyEnum.FIRST_FOUND): winner_idx = first_source[key] else: winner_idx = last_source[key] diff --git a/src/dature/loading/multi.py b/src/dature/loading/multi.py index e013db6..8570394 100644 --- a/src/dature/loading/multi.py +++ b/src/dature/loading/multi.py @@ -29,9 +29,10 @@ from dature.merging.deep_merge import deep_merge, deep_merge_last_wins, raise_on_conflict from dature.merging.field_group import FieldGroupContext, validate_field_groups from dature.merging.predicate import ResolvedFieldGroup, build_field_group_paths, build_field_merge_map -from dature.metadata import FieldMergeStrategy, MergeStrategy, Source, TypeLoader, _MergeConfig +from dature.merging.strategy import FieldMergeStrategyEnum, MergeStrategyEnum +from dature.metadata import Source, _MergeConfig from dature.protocols import DataclassInstance, LoaderProtocol -from dature.types import FieldMergeCallable, JSONValue +from dature.types import FieldMergeCallable, JSONValue, TypeLoaderMap logger = logging.getLogger("dature") @@ -55,7 +56,7 @@ def _log_merge_step( # noqa: PLR0913 *, dataclass_name: str, step_idx: int, - strategy: MergeStrategy, + strategy: MergeStrategyEnum, before: JSONValue, source_data: JSONValue, after: JSONValue, @@ -68,7 +69,7 @@ def _log_merge_step( # noqa: PLR0913 "[%s] Merge step %d (strategy=%s): added=%s, overwritten=%s", dataclass_name, step_idx, - strategy.value, + strategy, sorted(added_keys), sorted(overwritten_keys), ) @@ -115,7 +116,7 @@ def _log_field_origins( def _build_merge_report( *, dataclass_name: str, - strategy: MergeStrategy, + strategy: MergeStrategyEnum, source_entries: tuple[SourceEntry, ...], field_origins: tuple[FieldOrigin, ...], merged_data: JSONValue, @@ -217,9 +218,9 @@ def _set_nested_value( def _merge_raw_dicts( *, raw_dicts: list[JSONValue], - strategy: MergeStrategy, + strategy: MergeStrategyEnum, dataclass_name: str, - field_merge_map: dict[str, FieldMergeStrategy] | None = None, + field_merge_map: dict[str, FieldMergeStrategyEnum] | None = None, callable_merge_map: dict[str, FieldMergeCallable] | None = None, secret_paths: frozenset[str] = frozenset(), ) -> JSONValue: @@ -227,7 +228,7 @@ def _merge_raw_dicts( for step_idx, raw in enumerate(raw_dicts): before = merged - if strategy == MergeStrategy.RAISE_ON_CONFLICT: + if strategy == MergeStrategyEnum.RAISE_ON_CONFLICT: merged = deep_merge_last_wins(merged, raw, field_merge_map=field_merge_map) else: merged = deep_merge(merged, raw, strategy=strategy, field_merge_map=field_merge_map) @@ -267,7 +268,7 @@ def _load_and_merge[T: DataclassInstance]( # noqa: C901 dataclass_: type[T], loaders: tuple[LoaderProtocol, ...] | None = None, debug: bool = False, - type_loaders: tuple[TypeLoader, ...] = (), + type_loaders: TypeLoaderMap | None = None, ) -> _MergedData[T]: secret_paths: frozenset[str] = frozenset() if _resolve_merge_mask_secrets(merge_meta): @@ -299,7 +300,7 @@ def _load_and_merge[T: DataclassInstance]( # noqa: C901 source_reprs=source_reprs, ) - if merge_meta.strategy == MergeStrategy.RAISE_ON_CONFLICT: + if merge_meta.strategy == MergeStrategyEnum.RAISE_ON_CONFLICT: raise_on_conflict( loaded.raw_dicts, loaded.source_ctxs, @@ -324,7 +325,7 @@ def _load_and_merge[T: DataclassInstance]( # noqa: C901 logger.debug( "[%s] Merged result (strategy=%s, %d sources): %s", dataclass_.__name__, - merge_meta.strategy.value, + merge_meta.strategy, len(loaded.raw_dicts), masked_merged, ) @@ -384,7 +385,7 @@ def merge_load_as_function[T: DataclassInstance]( dataclass_: type[T], *, debug: bool, - type_loaders: tuple[TypeLoader, ...] = (), + type_loaders: TypeLoaderMap | None = None, ) -> T: data = _load_and_merge( merge_meta=merge_meta, @@ -431,7 +432,7 @@ def __init__( cls: type[DataclassInstance], cache: bool, debug: bool, - type_loaders: tuple[TypeLoader, ...] = (), + type_loaders: TypeLoaderMap | None = None, ) -> None: self.loaders = self._prepare_loaders(merge_meta=merge_meta, cls=cls, type_loaders=type_loaders) @@ -470,12 +471,12 @@ def _prepare_loaders( *, merge_meta: _MergeConfig, cls: type[DataclassInstance], - type_loaders: tuple[TypeLoader, ...] = (), + type_loaders: TypeLoaderMap | None = None, ) -> tuple[LoaderProtocol, ...]: loaders: list[LoaderProtocol] = [] for source_meta in merge_meta.sources: resolved_expand = resolve_expand_env_vars(source_meta, merge_meta) - source_type_loaders = (source_meta.type_loaders or ()) + type_loaders + source_type_loaders = {**(type_loaders or {}), **(source_meta.type_loaders or {})} resolved_strategy = ( source_meta.nested_resolve_strategy or merge_meta.nested_resolve_strategy @@ -547,7 +548,7 @@ def merge_make_decorator( *, cache: bool, debug: bool, - type_loaders: tuple[TypeLoader, ...] = (), + type_loaders: TypeLoaderMap | None = None, ) -> Callable[[type[DataclassInstance]], type[DataclassInstance]]: def decorator(cls: type[DataclassInstance]) -> type[DataclassInstance]: if not is_dataclass(cls): diff --git a/src/dature/loading/resolver.py b/src/dature/loading/resolver.py index 4188a7a..9871785 100644 --- a/src/dature/loading/resolver.py +++ b/src/dature/loading/resolver.py @@ -8,9 +8,9 @@ from dature.types import FILE_LIKE_TYPES, ExpandEnvVarsMode, NestedResolve, NestedResolveStrategy if TYPE_CHECKING: - from dature.metadata import Source, TypeLoader + from dature.metadata import Source from dature.protocols import LoaderProtocol - from dature.types import FileLike, FilePath + from dature.types import FileLike, FilePath, TypeLoaderMap SUPPORTED_EXTENSIONS = (".cfg", ".env", ".ini", ".json", ".json5", ".toml", ".yaml", ".yml") @@ -107,7 +107,7 @@ def resolve_loader( metadata: "Source", *, expand_env_vars: ExpandEnvVarsMode | None = None, - type_loaders: "tuple[TypeLoader, ...]" = (), + type_loaders: "TypeLoaderMap | None" = None, nested_resolve_strategy: NestedResolveStrategy = "flat", nested_resolve: NestedResolve | None = None, ) -> "LoaderProtocol": diff --git a/src/dature/loading/source_loading.py b/src/dature/loading/source_loading.py index ace5aea..da9b713 100644 --- a/src/dature/loading/source_loading.py +++ b/src/dature/loading/source_loading.py @@ -11,10 +11,11 @@ from dature.loading.context import apply_skip_invalid, build_error_ctx from dature.loading.resolver import resolve_loader, resolve_loader_class from dature.masking.masking import mask_json_value -from dature.metadata import MergeStrategy, Source, TypeLoader, _MergeConfig +from dature.merging.strategy import MergeStrategyEnum +from dature.metadata import Source, _MergeConfig from dature.protocols import DataclassInstance, LoaderProtocol from dature.skip_field_provider import FilterResult -from dature.types import FILE_LIKE_TYPES, ExpandEnvVarsMode, FileOrStream, JSONValue, LoadRawResult +from dature.types import FILE_LIKE_TYPES, ExpandEnvVarsMode, FileOrStream, JSONValue, LoadRawResult, TypeLoaderMap logger = logging.getLogger("dature") @@ -25,7 +26,7 @@ def resolve_loader_for_source( index: int, source_meta: Source, expand_env_vars: ExpandEnvVarsMode | None = None, - type_loaders: "tuple[TypeLoader, ...]" = (), + type_loaders: TypeLoaderMap | None = None, ) -> LoaderProtocol: if loaders is not None: return loaders[index] @@ -123,7 +124,7 @@ def load_sources( # noqa: C901, PLR0912, PLR0913, PLR0915 loaders: tuple[LoaderProtocol, ...] | None = None, secret_paths: frozenset[str] = frozenset(), mask_secrets: bool = False, - type_loaders: "tuple[TypeLoader, ...]" = (), + type_loaders: TypeLoaderMap | None = None, ) -> LoadedSources: raw_dicts: list[JSONValue] = [] source_ctxs: list[SourceContext] = [] @@ -133,7 +134,7 @@ def load_sources( # noqa: C901, PLR0912, PLR0913, PLR0915 for i, source_meta in enumerate(merge_meta.sources): resolved_expand = resolve_expand_env_vars(source_meta, merge_meta) - source_type_loaders = (source_meta.type_loaders or ()) + type_loaders + source_type_loaders = {**(type_loaders or {}), **(source_meta.type_loaders or {})} loader_instance = resolve_loader_for_source( loaders=loaders, index=i, @@ -162,7 +163,7 @@ def _load_raw( ctx=error_ctx, ) except (DatureConfigError, FileNotFoundError): - if merge_meta.strategy != MergeStrategy.FIRST_FOUND and not should_skip_broken(source_meta, merge_meta): + if merge_meta.strategy != MergeStrategyEnum.FIRST_FOUND and not should_skip_broken(source_meta, merge_meta): raise logger.warning( "[%s] Source %d skipped (broken): file=%s", @@ -174,7 +175,7 @@ def _load_raw( ) continue except Exception as exc: - if merge_meta.strategy != MergeStrategy.FIRST_FOUND and not should_skip_broken(source_meta, merge_meta): + if merge_meta.strategy != MergeStrategyEnum.FIRST_FOUND and not should_skip_broken(source_meta, merge_meta): loader_class = resolve_loader_class(source_meta.loader, source_meta.file) location = SourceLocation( display_label=loader_class.display_label, @@ -263,7 +264,7 @@ def _load_raw( source_ctxs.append(SourceContext(error_ctx=error_ctx, filecontent=filecontent)) last_loader = loader_instance - if merge_meta.strategy == MergeStrategy.FIRST_FOUND: + if merge_meta.strategy == MergeStrategyEnum.FIRST_FOUND: break if last_loader is None: diff --git a/src/dature/main.py b/src/dature/main.py index 44ebb75..447e4ae 100644 --- a/src/dature/main.py +++ b/src/dature/main.py @@ -6,16 +6,23 @@ from dature.loading.multi import merge_load_as_function, merge_make_decorator from dature.loading.resolver import resolve_loader from dature.loading.single import load_as_function, make_decorator +from dature.merging.strategy import MergeStrategyEnum from dature.metadata import ( - FieldGroup, - MergeRule, - MergeStrategy, Source, - TypeLoader, _MergeConfig, ) from dature.protocols import DataclassInstance -from dature.types import FILE_LIKE_TYPES, ExpandEnvVarsMode, FileOrStream, NestedResolve, NestedResolveStrategy +from dature.types import ( + FILE_LIKE_TYPES, + ExpandEnvVarsMode, + FieldGroupTuple, + FieldMergeMap, + FileOrStream, + MergeStrategyName, + NestedResolve, + NestedResolveStrategy, + TypeLoaderMap, +) @overload @@ -23,15 +30,15 @@ def load[T]( *sources: Source, dataclass_: type[T], debug: bool | None = None, - strategy: MergeStrategy = MergeStrategy.LAST_WINS, - field_merges: tuple[MergeRule, ...] = (), - field_groups: tuple[FieldGroup, ...] = (), + strategy: MergeStrategyName = "last_wins", + field_merges: FieldMergeMap | None = None, + field_groups: tuple[FieldGroupTuple, ...] = (), skip_broken_sources: bool = False, skip_invalid_fields: bool = False, expand_env_vars: ExpandEnvVarsMode | None = None, secret_field_names: tuple[str, ...] | None = None, mask_secrets: bool | None = None, - type_loaders: tuple[TypeLoader, ...] | None = None, + type_loaders: TypeLoaderMap | None = None, nested_resolve_strategy: NestedResolveStrategy | None = None, nested_resolve: NestedResolve | None = None, ) -> T: ... @@ -43,15 +50,15 @@ def load( dataclass_: None = None, cache: bool | None = None, debug: bool | None = None, - strategy: MergeStrategy = MergeStrategy.LAST_WINS, - field_merges: tuple[MergeRule, ...] = (), - field_groups: tuple[FieldGroup, ...] = (), + strategy: MergeStrategyName = "last_wins", + field_merges: FieldMergeMap | None = None, + field_groups: tuple[FieldGroupTuple, ...] = (), skip_broken_sources: bool = False, skip_invalid_fields: bool = False, expand_env_vars: ExpandEnvVarsMode | None = None, secret_field_names: tuple[str, ...] | None = None, mask_secrets: bool | None = None, - type_loaders: tuple[TypeLoader, ...] | None = None, + type_loaders: TypeLoaderMap | None = None, nested_resolve_strategy: NestedResolveStrategy | None = None, nested_resolve: NestedResolve | None = None, ) -> Callable[[type[DataclassInstance]], type[DataclassInstance]]: ... @@ -63,15 +70,15 @@ def load( # noqa: PLR0913 dataclass_: type[Any] | None = None, cache: bool | None = None, debug: bool | None = None, - strategy: MergeStrategy = MergeStrategy.LAST_WINS, - field_merges: tuple[MergeRule, ...] = (), - field_groups: tuple[FieldGroup, ...] = (), + strategy: MergeStrategyName = "last_wins", + field_merges: FieldMergeMap | None = None, + field_groups: tuple[FieldGroupTuple, ...] = (), skip_broken_sources: bool = False, skip_invalid_fields: bool = False, expand_env_vars: ExpandEnvVarsMode | None = None, secret_field_names: tuple[str, ...] | None = None, mask_secrets: bool | None = None, - type_loaders: tuple[TypeLoader, ...] | None = None, + type_loaders: TypeLoaderMap | None = None, nested_resolve_strategy: NestedResolveStrategy | None = None, nested_resolve: NestedResolve | None = None, ) -> Any: @@ -104,11 +111,11 @@ def load( # noqa: PLR0913 metadata = sources[0] - source_type_loaders = (metadata.type_loaders or ()) + (type_loaders or ()) + config.type_loaders + source_type_loaders = {**(config.type_loaders or {}), **(type_loaders or {}), **(metadata.type_loaders or {})} loader_instance = resolve_loader( metadata, expand_env_vars=expand_env_vars, - type_loaders=source_type_loaders, + type_loaders=source_type_loaders or None, nested_resolve_strategy=nested_resolve_strategy or config.loading.nested_resolve_strategy, nested_resolve=nested_resolve, ) @@ -156,21 +163,21 @@ def _load_multi( # noqa: PLR0913 dataclass_: type[DataclassInstance] | None, cache: bool, debug: bool, - strategy: MergeStrategy, - field_merges: tuple[MergeRule, ...], - field_groups: tuple[FieldGroup, ...], + strategy: MergeStrategyName, + field_merges: FieldMergeMap | None, + field_groups: tuple[FieldGroupTuple, ...], skip_broken_sources: bool, skip_invalid_fields: bool, expand_env_vars: ExpandEnvVarsMode | None, secret_field_names: tuple[str, ...] | None, mask_secrets: bool | None, - type_loaders: tuple[TypeLoader, ...] | None, + type_loaders: TypeLoaderMap | None, nested_resolve_strategy: NestedResolveStrategy | None, nested_resolve: NestedResolve | None, ) -> DataclassInstance | Callable[[type[DataclassInstance]], type[DataclassInstance]]: merge_meta = _MergeConfig( sources=sources, - strategy=strategy, + strategy=MergeStrategyEnum(strategy), field_merges=field_merges, field_groups=field_groups, skip_broken_sources=skip_broken_sources, @@ -182,7 +189,7 @@ def _load_multi( # noqa: PLR0913 nested_resolve_strategy=nested_resolve_strategy, nested_resolve=nested_resolve, ) - merge_type_loaders = (merge_meta.type_loaders or ()) + config.type_loaders + merge_type_loaders = {**(config.type_loaders or {}), **(merge_meta.type_loaders or {})} if dataclass_ is not None: - return merge_load_as_function(merge_meta, dataclass_, debug=debug, type_loaders=merge_type_loaders) - return merge_make_decorator(merge_meta, cache=cache, debug=debug, type_loaders=merge_type_loaders) + return merge_load_as_function(merge_meta, dataclass_, debug=debug, type_loaders=merge_type_loaders or None) + return merge_make_decorator(merge_meta, cache=cache, debug=debug, type_loaders=merge_type_loaders or None) diff --git a/src/dature/merging/deep_merge.py b/src/dature/merging/deep_merge.py index 85e3a23..6149c17 100644 --- a/src/dature/merging/deep_merge.py +++ b/src/dature/merging/deep_merge.py @@ -3,7 +3,7 @@ from dature.errors.exceptions import MergeConflictError, MergeConflictFieldError, SourceLocation from dature.errors.location import resolve_source_location from dature.loading.source_loading import SourceContext -from dature.metadata import FieldMergeStrategy, MergeStrategy +from dature.merging.strategy import FieldMergeStrategyEnum, MergeStrategyEnum from dature.types import JSONValue _MIN_CONFLICT_SOURCES = 2 @@ -44,21 +44,21 @@ def _ensure_both_lists( def _apply_list_merge( base: JSONValue, override: JSONValue, - strategy: FieldMergeStrategy, + strategy: FieldMergeStrategyEnum, ) -> list[JSONValue]: - if strategy == FieldMergeStrategy.APPEND: + if strategy == FieldMergeStrategyEnum.APPEND: pair = _ensure_both_lists(base, override, "APPEND") return list(pair.base) + list(pair.override) - if strategy == FieldMergeStrategy.APPEND_UNIQUE: + if strategy == FieldMergeStrategyEnum.APPEND_UNIQUE: pair = _ensure_both_lists(base, override, "APPEND_UNIQUE") return _deduplicate_list(list(pair.base) + list(pair.override)) - if strategy == FieldMergeStrategy.PREPEND: + if strategy == FieldMergeStrategyEnum.PREPEND: pair = _ensure_both_lists(base, override, "PREPEND") return list(pair.override) + list(pair.base) - # PREPEND_UNIQUE + # prepend_unique pair = _ensure_both_lists(base, override, "PREPEND_UNIQUE") return _deduplicate_list(list(pair.override) + list(pair.base)) @@ -66,12 +66,12 @@ def _apply_list_merge( def apply_field_merge( base: JSONValue, override: JSONValue, - strategy: FieldMergeStrategy, + strategy: FieldMergeStrategyEnum, ) -> JSONValue: - if strategy == FieldMergeStrategy.FIRST_WINS: + if strategy == FieldMergeStrategyEnum.FIRST_WINS: return base - if strategy == FieldMergeStrategy.LAST_WINS: + if strategy == FieldMergeStrategyEnum.LAST_WINS: return override return _apply_list_merge(base, override, strategy) @@ -81,7 +81,7 @@ def deep_merge_last_wins( base: JSONValue, override: JSONValue, *, - field_merge_map: dict[str, FieldMergeStrategy] | None = None, + field_merge_map: dict[str, FieldMergeStrategyEnum] | None = None, _path: str = "", ) -> JSONValue: if field_merge_map is not None and _path in field_merge_map: @@ -108,7 +108,7 @@ def deep_merge_first_wins( base: JSONValue, override: JSONValue, *, - field_merge_map: dict[str, FieldMergeStrategy] | None = None, + field_merge_map: dict[str, FieldMergeStrategyEnum] | None = None, _path: str = "", ) -> JSONValue: if field_merge_map is not None and _path in field_merge_map: @@ -136,7 +136,7 @@ def _collect_conflicts( source_contexts: list[SourceContext], path: list[str], conflicts: list[tuple[list[str], list[tuple[int, JSONValue]]]], - field_merge_map: dict[str, FieldMergeStrategy] | None = None, + field_merge_map: dict[str, FieldMergeStrategyEnum] | None = None, callable_merge_paths: frozenset[str] | None = None, ) -> None: key_sources: dict[str, list[tuple[int, JSONValue]]] = {} @@ -183,7 +183,7 @@ def raise_on_conflict( dicts: list[JSONValue], source_ctxs: list[SourceContext], dataclass_name: str, - field_merge_map: dict[str, FieldMergeStrategy] | None = None, + field_merge_map: dict[str, FieldMergeStrategyEnum] | None = None, callable_merge_paths: frozenset[str] | None = None, ) -> None: conflicts: list[tuple[list[str], list[tuple[int, JSONValue]]]] = [] @@ -221,12 +221,12 @@ def deep_merge( base: JSONValue, override: JSONValue, *, - strategy: MergeStrategy, - field_merge_map: dict[str, FieldMergeStrategy] | None = None, + strategy: MergeStrategyEnum, + field_merge_map: dict[str, FieldMergeStrategyEnum] | None = None, ) -> JSONValue: - if strategy == MergeStrategy.LAST_WINS: + if strategy == MergeStrategyEnum.LAST_WINS: return deep_merge_last_wins(base, override, field_merge_map=field_merge_map) - if strategy in (MergeStrategy.FIRST_WINS, MergeStrategy.FIRST_FOUND): + if strategy in (MergeStrategyEnum.FIRST_WINS, MergeStrategyEnum.FIRST_FOUND): return deep_merge_first_wins(base, override, field_merge_map=field_merge_map) msg = "Use merge_sources for RAISE_ON_CONFLICT strategy" raise ValueError(msg) diff --git a/src/dature/merging/predicate.py b/src/dature/merging/predicate.py index 82d7338..ea013a8 100644 --- a/src/dature/merging/predicate.py +++ b/src/dature/merging/predicate.py @@ -2,12 +2,11 @@ from typing import TYPE_CHECKING, Any, get_type_hints from dature.field_path import FieldPath, resolve_field_type, validate_field_path_owner -from dature.metadata import FieldMergeStrategy +from dature.merging.strategy import FieldMergeStrategyEnum from dature.protocols import DataclassInstance if TYPE_CHECKING: - from dature.metadata import FieldGroup, MergeRule - from dature.types import FieldMergeCallable + from dature.types import FieldGroupTuple, FieldMergeCallable, FieldMergeMap @dataclass(frozen=True, slots=True) @@ -17,7 +16,7 @@ class ResolvedFieldGroup: @dataclass(frozen=True, slots=True) class FieldMergeMaps: - enum_map: "dict[str, FieldMergeStrategy]" + enum_map: dict[str, FieldMergeStrategyEnum] callable_map: "dict[str, FieldMergeCallable]" @property @@ -35,17 +34,19 @@ def extract_field_path(predicate: Any, dataclass_: type[DataclassInstance] | Non def build_field_merge_map( - field_merges: "tuple[MergeRule, ...]", + field_merges: "FieldMergeMap | None", dataclass_: type[DataclassInstance] | None = None, ) -> FieldMergeMaps: - enum_map: dict[str, FieldMergeStrategy] = {} + enum_map: dict[str, FieldMergeStrategyEnum] = {} callable_map: dict[str, FieldMergeCallable] = {} - for rule in field_merges: - path = extract_field_path(rule.predicate, dataclass_) - if isinstance(rule.strategy, FieldMergeStrategy): - enum_map[path] = rule.strategy + if not field_merges: + return FieldMergeMaps(enum_map=enum_map, callable_map=callable_map) + for predicate, strategy in field_merges.items(): + path = extract_field_path(predicate, dataclass_) + if isinstance(strategy, str): + enum_map[path] = FieldMergeStrategyEnum(strategy) else: - callable_map[path] = rule.strategy + callable_map[path] = strategy return FieldMergeMaps(enum_map=enum_map, callable_map=callable_map) @@ -63,13 +64,13 @@ def _expand_dataclass_fields(prefix: str, dc_type: type) -> list[str]: def build_field_group_paths( - field_groups: "tuple[FieldGroup, ...]", + field_groups: "tuple[FieldGroupTuple, ...]", dataclass_: type[DataclassInstance], ) -> tuple[ResolvedFieldGroup, ...]: resolved: list[ResolvedFieldGroup] = [] for group in field_groups: paths: list[str] = [] - for field in group.fields: + for field in group: path = extract_field_path(field, dataclass_) if isinstance(field, FieldPath) and isinstance(field.owner, type): resolved_type = resolve_field_type(field.owner, field.parts) diff --git a/src/dature/merging/strategy.py b/src/dature/merging/strategy.py new file mode 100644 index 0000000..798cea2 --- /dev/null +++ b/src/dature/merging/strategy.py @@ -0,0 +1,17 @@ +from enum import StrEnum + + +class MergeStrategyEnum(StrEnum): + LAST_WINS = "last_wins" + FIRST_WINS = "first_wins" + FIRST_FOUND = "first_found" + RAISE_ON_CONFLICT = "raise_on_conflict" + + +class FieldMergeStrategyEnum(StrEnum): + FIRST_WINS = "first_wins" + LAST_WINS = "last_wins" + APPEND = "append" + APPEND_UNIQUE = "append_unique" + PREPEND = "prepend" + PREPEND_UNIQUE = "prepend_unique" diff --git a/src/dature/metadata.py b/src/dature/metadata.py index 7bc4bf6..2e3ff45 100644 --- a/src/dature/metadata.py +++ b/src/dature/metadata.py @@ -1,8 +1,5 @@ -from collections.abc import Callable -from dataclasses import dataclass -from enum import StrEnum from pathlib import Path -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING from dature.expansion.env_expand import expand_file_path from dature.loading.resolver import resolve_loader_class @@ -14,49 +11,21 @@ from dature.types import ( DotSeparatedPath, ExpandEnvVarsMode, + FieldGroupTuple, FieldMapping, - FieldMergeCallable, + FieldMergeMap, FieldValidators, FileLike, FilePath, NameStyle, NestedResolve, NestedResolveStrategy, + TypeLoaderMap, ) +from dataclasses import dataclass -# --8<-- [start:type-loader] -@dataclass(frozen=True, slots=True) -class TypeLoader: - type_: type - func: Callable[..., Any] - - -# --8<-- [end:type-loader] - - -# --8<-- [start:merge-strategy] -class MergeStrategy(StrEnum): - LAST_WINS = "last_wins" - FIRST_WINS = "first_wins" - FIRST_FOUND = "first_found" - RAISE_ON_CONFLICT = "raise_on_conflict" - - -# --8<-- [end:merge-strategy] - - -# --8<-- [start:field-merge-strategy] -class FieldMergeStrategy(StrEnum): - FIRST_WINS = "first_wins" - LAST_WINS = "last_wins" - APPEND = "append" - APPEND_UNIQUE = "append_unique" - PREPEND = "prepend" - PREPEND_UNIQUE = "prepend_unique" - - -# --8<-- [end:field-merge-strategy] +from dature.merging.strategy import MergeStrategyEnum # --8<-- [start:load-metadata] @@ -75,7 +44,7 @@ class Source: skip_if_invalid: "bool | tuple[FieldPath, ...] | None" = None secret_field_names: tuple[str, ...] | None = None mask_secrets: bool | None = None - type_loaders: "tuple[TypeLoader, ...] | None" = None + type_loaders: "TypeLoaderMap | None" = None nested_resolve_strategy: "NestedResolveStrategy | None" = None nested_resolve: "NestedResolve | None" = None # --8<-- [end:load-metadata] @@ -94,39 +63,17 @@ def __repr__(self) -> str: return display -# --8<-- [start:merge-rule] -@dataclass(frozen=True, slots=True) -class MergeRule: - predicate: "FieldPath" - strategy: "FieldMergeStrategy | FieldMergeCallable" - - -# --8<-- [end:merge-rule] - - -# --8<-- [start:field-group] -@dataclass(slots=True) -class FieldGroup: - fields: "tuple[FieldPath, ...]" - - def __init__(self, *fields: "FieldPath") -> None: - self.fields = fields - - -# --8<-- [end:field-group] - - @dataclass(slots=True, kw_only=True) class _MergeConfig: sources: tuple[Source, ...] - strategy: MergeStrategy = MergeStrategy.LAST_WINS - field_merges: tuple[MergeRule, ...] = () - field_groups: tuple[FieldGroup, ...] = () + strategy: MergeStrategyEnum = MergeStrategyEnum.LAST_WINS + field_merges: "FieldMergeMap | None" = None + field_groups: "tuple[FieldGroupTuple, ...]" = () skip_broken_sources: bool = False skip_invalid_fields: bool = False expand_env_vars: "ExpandEnvVarsMode" = "default" secret_field_names: tuple[str, ...] | None = None mask_secrets: bool | None = None - type_loaders: "tuple[TypeLoader, ...] | None" = None + type_loaders: "TypeLoaderMap | None" = None nested_resolve_strategy: "NestedResolveStrategy | None" = None nested_resolve: "NestedResolve | None" = None diff --git a/src/dature/sources_loader/base.py b/src/dature/sources_loader/base.py index 1fb4ac0..12c9d10 100644 --- a/src/dature/sources_loader/base.py +++ b/src/dature/sources_loader/base.py @@ -55,7 +55,7 @@ ) if TYPE_CHECKING: - from dature.metadata import TypeLoader + from dature.types import TypeLoaderMap T = TypeVar("T") @@ -76,7 +76,7 @@ def __init__( # noqa: PLR0913 root_validators: tuple[ValidatorProtocol, ...] | None = None, validators: FieldValidators | None = None, expand_env_vars: ExpandEnvVarsMode = "default", - type_loaders: "tuple[TypeLoader, ...]" = (), + type_loaders: "TypeLoaderMap | None" = None, ) -> None: self._prefix = prefix self._name_style = name_style @@ -203,7 +203,7 @@ def _parse_string_values(cls, data: JSONValue, *, infer_scalars: bool = False) - return result def _base_recipe(self) -> list[Provider]: - user_loaders: list[Provider] = [loader(tl.type_, tl.func) for tl in self._type_loaders] + user_loaders: list[Provider] = [loader(type_, func) for type_, func in (self._type_loaders or {}).items()] default_loaders: list[Provider] = [ loader(int, int_from_string), loader(float, float_passthrough), diff --git a/src/dature/sources_loader/flat_key.py b/src/dature/sources_loader/flat_key.py index 2b45397..40c77e8 100644 --- a/src/dature/sources_loader/flat_key.py +++ b/src/dature/sources_loader/flat_key.py @@ -36,7 +36,7 @@ if TYPE_CHECKING: from dature.field_path import FieldPath - from dature.metadata import TypeLoader + from dature.types import TypeLoaderMap def set_nested(d: dict[str, JSONValue], keys: list[str], value: str) -> None: @@ -56,7 +56,7 @@ def __init__( # noqa: PLR0913 root_validators: tuple[ValidatorProtocol, ...] | None = None, validators: FieldValidators | None = None, expand_env_vars: ExpandEnvVarsMode = "default", - type_loaders: "tuple[TypeLoader, ...]" = (), + type_loaders: "TypeLoaderMap | None" = None, nested_resolve_strategy: NestedResolveStrategy = "flat", nested_resolve: NestedResolve | None = None, ) -> None: diff --git a/src/dature/types.py b/src/dature/types.py index 8e821da..ac41bb8 100644 --- a/src/dature/types.py +++ b/src/dature/types.py @@ -52,10 +52,14 @@ def __hash__(self) -> int: "upper_kebab", ] -# Keys are FieldPath at runtime, but F[Type].field returns the field's static type (str, int, etc.) -# due to the overload trick for IDE autocompletion, so we accept those types here too. -type _FieldMappingKey = "FieldPath | str | int | float | bool | None" -type FieldMapping = dict[_FieldMappingKey, str | tuple[str, ...]] +# F[Type].field is FieldPath at runtime, but the overload trick makes mypy see the +# field's static type (str, int, list[str], dict, etc.) for IDE autocompletion. +# This union covers all types mypy can infer from F expressions. +type FieldRef = ( + "FieldPath | str | int | float | bool | list[Any] | dict[str, Any] | tuple[Any, ...] | set[Any] | bytes | None" +) + +type FieldMapping = dict[FieldRef, str | tuple[str, ...]] type URL = ParseResult @@ -70,11 +74,16 @@ def __hash__(self) -> int: type _NestedResolveValue = "tuple[FieldPath | Any, ...]" type NestedResolve = dict[NestedResolveStrategy, _NestedResolveValue] -type _ValidatorKey = "FieldPath | str | int | float | bool | None" -type FieldValidators = dict[_ValidatorKey, "ValidatorProtocol | tuple[ValidatorProtocol, ...]"] +type FieldValidators = dict[FieldRef, "ValidatorProtocol | tuple[ValidatorProtocol, ...]"] type FieldMergeCallable = Callable[[list[JSONValue]], JSONValue] +type MergeStrategyName = Literal["last_wins", "first_wins", "first_found", "raise_on_conflict"] +type FieldMergeStrategyName = Literal["first_wins", "last_wins", "append", "append_unique", "prepend", "prepend_unique"] +type TypeLoaderMap = dict[type, Callable[..., Any]] +type FieldMergeMap = dict[FieldRef, "FieldMergeStrategyName | Callable[..., Any]"] +type FieldGroupTuple = tuple[FieldRef, ...] + type FileLike = TextIOBase | BufferedIOBase | RawIOBase FILE_LIKE_TYPES: Final = (TextIOBase, BufferedIOBase, RawIOBase) TEXT_IO_TYPES: Final = TextIOBase diff --git a/tests/loading/test_field_merges.py b/tests/loading/test_field_merges.py index 5e70d5c..2f59859 100644 --- a/tests/loading/test_field_merges.py +++ b/tests/loading/test_field_merges.py @@ -1,13 +1,16 @@ """Tests for per-field merge strategies (field_merges).""" +from collections.abc import Callable from dataclasses import dataclass from pathlib import Path +from typing import Any import pytest -from dature import FieldMergeStrategy, MergeRule, MergeStrategy, Source, load +from dature import Source, load from dature.errors.exceptions import MergeConflictError from dature.field_path import F +from dature.types import FieldMergeStrategyName class TestFieldMergesFunction: @@ -27,8 +30,8 @@ class Config: Source(file=defaults), Source(file=overrides), dataclass_=Config, - strategy=MergeStrategy.LAST_WINS, - field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS),), + strategy="last_wins", + field_merges={F[Config].host: "first_wins"}, ) assert result.host == "default-host" @@ -50,8 +53,8 @@ class Config: Source(file=first), Source(file=second), dataclass_=Config, - strategy=MergeStrategy.FIRST_WINS, - field_merges=(MergeRule(F[Config].port, FieldMergeStrategy.LAST_WINS),), + strategy="first_wins", + field_merges={F[Config].port: "last_wins"}, ) assert result.host == "first-host" @@ -73,7 +76,7 @@ class Config: Source(file=defaults), Source(file=overrides), dataclass_=Config, - field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND),), + field_merges={F[Config].tags: "append"}, ) assert result.tags == ["a", "b", "c", "d"] @@ -94,7 +97,7 @@ class Config: Source(file=defaults), Source(file=overrides), dataclass_=Config, - field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND_UNIQUE),), + field_merges={F[Config].tags: "append_unique"}, ) assert result.tags == ["a", "b", "c", "d"] @@ -114,7 +117,7 @@ class Config: Source(file=defaults), Source(file=overrides), dataclass_=Config, - field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.PREPEND),), + field_merges={F[Config].tags: "prepend"}, ) assert result.tags == ["c", "d", "a", "b"] @@ -134,7 +137,7 @@ class Config: Source(file=defaults), Source(file=overrides), dataclass_=Config, - field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.PREPEND_UNIQUE),), + field_merges={F[Config].tags: "prepend_unique"}, ) assert result.tags == ["b", "c", "d", "a"] @@ -159,7 +162,7 @@ class Config: Source(file=defaults), Source(file=overrides), dataclass_=Config, - field_merges=(MergeRule(F[Config].database.host, FieldMergeStrategy.FIRST_WINS),), + field_merges={F[Config].database.host: "first_wins"}, ) assert result.database.host == "localhost" @@ -181,7 +184,7 @@ class Config: Source(file=defaults), Source(file=overrides), dataclass_=Config, - field_merges=(MergeRule(F[Config].value, FieldMergeStrategy.APPEND),), + field_merges={F[Config].value: "append"}, ) def test_multiple_merge_rules(self, tmp_path: Path): @@ -201,11 +204,11 @@ class Config: Source(file=defaults), Source(file=overrides), dataclass_=Config, - strategy=MergeStrategy.LAST_WINS, - field_merges=( - MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS), - MergeRule(F[Config].tags, FieldMergeStrategy.APPEND), - ), + strategy="last_wins", + field_merges={ + F[Config].host: "first_wins", + F[Config].tags: "append", + }, ) assert result.host == "default-host" @@ -228,7 +231,7 @@ class Config: Source(file=defaults), Source(file=overrides), dataclass_=Config, - field_merges=(), + field_merges={}, ) assert result.host == "localhost" @@ -246,10 +249,10 @@ def test_decorator_with_field_merges(self, tmp_path: Path): @load( Source(file=defaults), Source(file=overrides), - field_merges=( - MergeRule(F["Config"].host, FieldMergeStrategy.FIRST_WINS), - MergeRule(F["Config"].tags, FieldMergeStrategy.APPEND), - ), + field_merges={ + F["Config"].host: "first_wins", + F["Config"].tags: "append", + }, ) @dataclass class Config: @@ -280,8 +283,8 @@ class Config: Source(file=a), Source(file=b), dataclass_=Config, - strategy=MergeStrategy.RAISE_ON_CONFLICT, - field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.LAST_WINS),), + strategy="raise_on_conflict", + field_merges={F[Config].host: "last_wins"}, ) assert result.host == "host-b" @@ -303,8 +306,8 @@ class Config: Source(file=a), Source(file=b), dataclass_=Config, - strategy=MergeStrategy.RAISE_ON_CONFLICT, - field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS),), + strategy="raise_on_conflict", + field_merges={F[Config].host: "first_wins"}, ) assert result.host == "host-a" @@ -327,8 +330,8 @@ class Config: Source(file=a), Source(file=b), dataclass_=Config, - strategy=MergeStrategy.RAISE_ON_CONFLICT, - field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.LAST_WINS),), + strategy="raise_on_conflict", + field_merges={F[Config].host: "last_wins"}, ) def test_nested_field_merge_suppresses_conflict(self, tmp_path: Path): @@ -351,8 +354,8 @@ class Config: Source(file=a), Source(file=b), dataclass_=Config, - strategy=MergeStrategy.RAISE_ON_CONFLICT, - field_merges=(MergeRule(F[Config].database.host, FieldMergeStrategy.LAST_WINS),), + strategy="raise_on_conflict", + field_merges={F[Config].database.host: "last_wins"}, ) assert result.database.host == "host-b" @@ -374,11 +377,11 @@ class Config: Source(file=a), Source(file=b), dataclass_=Config, - strategy=MergeStrategy.RAISE_ON_CONFLICT, - field_merges=( - MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS), - MergeRule(F[Config].port, max), - ), + strategy="raise_on_conflict", + field_merges={ + F[Config].host: "first_wins", + F[Config].port: max, + }, ) assert result.host == "host-a" @@ -390,22 +393,22 @@ class TestFieldMergesErrors: ("strategy", "match"), [ pytest.param( - FieldMergeStrategy.APPEND, + "append", "APPEND strategy requires both values to be lists", id="append", ), pytest.param( - FieldMergeStrategy.APPEND_UNIQUE, + "append_unique", "APPEND_UNIQUE strategy requires both values to be lists", id="append_unique", ), pytest.param( - FieldMergeStrategy.PREPEND, + "prepend", "PREPEND strategy requires both values to be lists", id="prepend", ), pytest.param( - FieldMergeStrategy.PREPEND_UNIQUE, + "prepend_unique", "PREPEND_UNIQUE strategy requires both values to be lists", id="prepend_unique", ), @@ -414,7 +417,7 @@ class TestFieldMergesErrors: def test_list_strategy_on_strings_raises_type_error( self, tmp_path: Path, - strategy: FieldMergeStrategy, + strategy: FieldMergeStrategyName, match: str, ): a = tmp_path / "a.json" @@ -432,29 +435,29 @@ class Config: Source(file=a), Source(file=b), dataclass_=Config, - field_merges=(MergeRule(F[Config].value, strategy),), + field_merges={F[Config].value: strategy}, ) @pytest.mark.parametrize( ("strategy", "match"), [ pytest.param( - FieldMergeStrategy.APPEND, + "append", "APPEND strategy requires both values to be lists", id="append", ), pytest.param( - FieldMergeStrategy.APPEND_UNIQUE, + "append_unique", "APPEND_UNIQUE strategy requires both values to be lists", id="append_unique", ), pytest.param( - FieldMergeStrategy.PREPEND, + "prepend", "PREPEND strategy requires both values to be lists", id="prepend", ), pytest.param( - FieldMergeStrategy.PREPEND_UNIQUE, + "prepend_unique", "PREPEND_UNIQUE strategy requires both values to be lists", id="prepend_unique", ), @@ -463,7 +466,7 @@ class Config: def test_list_strategy_on_integers_raises_type_error( self, tmp_path: Path, - strategy: FieldMergeStrategy, + strategy: FieldMergeStrategyName, match: str, ): a = tmp_path / "a.json" @@ -481,19 +484,19 @@ class Config: Source(file=a), Source(file=b), dataclass_=Config, - field_merges=(MergeRule(F[Config].value, strategy),), + field_merges={F[Config].value: strategy}, ) @pytest.mark.parametrize( ("strategy", "match"), [ pytest.param( - FieldMergeStrategy.APPEND, + "append", "APPEND strategy requires both values to be lists, got list and str", id="append", ), pytest.param( - FieldMergeStrategy.PREPEND, + "prepend", "PREPEND strategy requires both values to be lists, got list and str", id="prepend", ), @@ -502,7 +505,7 @@ class Config: def test_list_strategy_mixed_types_raises_type_error( self, tmp_path: Path, - strategy: FieldMergeStrategy, + strategy: FieldMergeStrategyName, match: str, ): a = tmp_path / "a.json" @@ -520,7 +523,7 @@ class Config: Source(file=a), Source(file=b), dataclass_=Config, - field_merges=(MergeRule(F[Config].value, strategy),), + field_merges={F[Config].value: strategy}, ) @pytest.mark.parametrize( @@ -533,7 +536,7 @@ class Config: def test_max_min_on_lists_compares_elementwise( self, tmp_path: Path, - strategy: object, + strategy: Callable[..., Any], expected: list[int], ): a = tmp_path / "a.json" @@ -550,7 +553,7 @@ class Config: Source(file=a), Source(file=b), dataclass_=Config, - field_merges=(MergeRule(F[Config].value, strategy),), + field_merges={F[Config].value: strategy}, ) assert result.value == expected @@ -565,7 +568,7 @@ class Config: def test_max_min_on_dicts_raises_type_error( self, tmp_path: Path, - strategy: object, + strategy: Callable[..., Any], match: str, ): a = tmp_path / "a.json" @@ -583,7 +586,7 @@ class Config: Source(file=a), Source(file=b), dataclass_=Config, - field_merges=(MergeRule(F[Config].value, strategy),), + field_merges={F[Config].value: strategy}, ) @pytest.mark.parametrize( @@ -596,7 +599,7 @@ class Config: def test_max_min_on_null_raises_type_error( self, tmp_path: Path, - strategy: object, + strategy: Callable[..., Any], match: str, ): a = tmp_path / "a.json" @@ -614,7 +617,7 @@ class Config: Source(file=a), Source(file=b), dataclass_=Config, - field_merges=(MergeRule(F[Config].value, strategy),), + field_merges={F[Config].value: strategy}, ) def test_field_merge_on_missing_key_in_one_source(self, tmp_path: Path): @@ -633,7 +636,7 @@ class Config: Source(file=a), Source(file=b), dataclass_=Config, - field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS),), + field_merges={F[Config].host: "first_wins"}, ) assert result.host == "localhost" @@ -658,7 +661,7 @@ class Config: Source(file=b), Source(file=c), dataclass_=Config, - field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND),), + field_merges={F[Config].tags: "append"}, ) assert result.tags == ["a", "b", "c"] @@ -682,7 +685,7 @@ class Config: Source(file=b), Source(file=c), dataclass_=Config, - field_merges=(MergeRule(F[Config].priority, max),), + field_merges={F[Config].priority: max}, ) assert result.priority == 15 @@ -706,7 +709,7 @@ class Config: Source(file=b), Source(file=c), dataclass_=Config, - field_merges=(MergeRule(F[Config].priority, min),), + field_merges={F[Config].priority: min}, ) assert result.priority == 5 @@ -733,10 +736,10 @@ class Config: Source(file=defaults), Source(file=overrides), dataclass_=Config, - field_merges=( - MergeRule(F[Config].user_name, FieldMergeStrategy.FIRST_WINS), - MergeRule(F[Config].inner.user_name, FieldMergeStrategy.LAST_WINS), - ), + field_merges={ + F[Config].user_name: "first_wins", + F[Config].inner.user_name: "last_wins", + }, ) assert result.user_name == "root-first" @@ -762,10 +765,10 @@ class Config: Source(file=defaults), Source(file=overrides), dataclass_=Config, - field_merges=( - MergeRule(F[Config].user_name, FieldMergeStrategy.LAST_WINS), - MergeRule(F[Config].inner.user_name, FieldMergeStrategy.FIRST_WINS), - ), + field_merges={ + F[Config].user_name: "last_wins", + F[Config].inner.user_name: "first_wins", + }, ) assert result.user_name == "root-second" @@ -788,7 +791,7 @@ class Config: Source(file=a), Source(file=b), dataclass_=Config, - field_merges=(MergeRule(F[Config].score, sum),), + field_merges={F[Config].score: sum}, ) assert result.score == 30 @@ -812,7 +815,7 @@ class Config: Source(file=b), Source(file=c), dataclass_=Config, - field_merges=(MergeRule(F[Config].score, sum),), + field_merges={F[Config].score: sum}, ) assert result.score == 30 @@ -836,7 +839,7 @@ class Config: Source(file=b), Source(file=c), dataclass_=Config, - field_merges=(MergeRule(F[Config].weight, lambda vals: sum(vals) / len(vals)),), + field_merges={F[Config].weight: lambda vals: sum(vals) / len(vals)}, ) assert result.weight == 6.0 @@ -860,7 +863,7 @@ class Config: Source(file=b), Source(file=c), dataclass_=Config, - field_merges=(MergeRule(F[Config].priority, max),), + field_merges={F[Config].priority: max}, ) assert result.priority == 15 @@ -888,7 +891,7 @@ class Config: Source(file=b), Source(file=c), dataclass_=Config, - field_merges=(MergeRule(F[Config].database.port, max),), + field_merges={F[Config].database.port: max}, ) assert result.database.port == 7000 @@ -904,7 +907,7 @@ class Config: result = load( Source(file=a), dataclass_=Config, - field_merges=(MergeRule(F[Config].score, sum),), + field_merges={F[Config].score: sum}, ) assert result.score == 42 @@ -925,8 +928,8 @@ class Config: Source(file=a), Source(file=b), dataclass_=Config, - strategy=MergeStrategy.RAISE_ON_CONFLICT, - field_merges=(MergeRule(F[Config].score, sum),), + strategy="raise_on_conflict", + field_merges={F[Config].score: sum}, ) assert result.score == 30 @@ -949,11 +952,11 @@ class Config: Source(file=a), Source(file=b), dataclass_=Config, - field_merges=( - MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS), - MergeRule(F[Config].score, sum), - MergeRule(F[Config].tags, FieldMergeStrategy.APPEND), - ), + field_merges={ + F[Config].host: "first_wins", + F[Config].score: sum, + F[Config].tags: "append", + }, ) assert result.host == "host-a" @@ -980,7 +983,7 @@ class Config: Source(file=b), Source(file=c), dataclass_=Config, - field_merges=(MergeRule(F[Config].score, sum),), + field_merges={F[Config].score: sum}, ) assert result.score == 30 diff --git a/tests/loading/test_multi.py b/tests/loading/test_multi.py index f586ff3..e3ab0b3 100644 --- a/tests/loading/test_multi.py +++ b/tests/loading/test_multi.py @@ -8,7 +8,7 @@ import pytest -from dature import MergeStrategy, Source, load +from dature import Source, load from dature.errors.exceptions import DatureConfigError, MergeConflictError from dature.validators.number import Ge @@ -51,7 +51,7 @@ class Config: Source(file=first), Source(file=second), dataclass_=Config, - strategy=MergeStrategy.FIRST_WINS, + strategy="first_wins", ) assert result.host == "first-host" @@ -352,7 +352,7 @@ def test_decorator_first_wins(self, tmp_path: Path): @load( Source(file=first), Source(file=second), - strategy=MergeStrategy.FIRST_WINS, + strategy="first_wins", ) @dataclass class Config: @@ -382,7 +382,7 @@ class Config: Source(file=a), Source(file=b), dataclass_=Config, - strategy=MergeStrategy.RAISE_ON_CONFLICT, + strategy="raise_on_conflict", ) assert str(exc_info.value) == dedent(f"""\ @@ -411,7 +411,7 @@ class Config: Source(file=a), Source(file=b), dataclass_=Config, - strategy=MergeStrategy.RAISE_ON_CONFLICT, + strategy="raise_on_conflict", ) assert result.host == "localhost" @@ -433,7 +433,7 @@ class Config: Source(file=a), Source(file=b), dataclass_=Config, - strategy=MergeStrategy.RAISE_ON_CONFLICT, + strategy="raise_on_conflict", ) assert result.host == "same" @@ -460,7 +460,7 @@ class Config: Source(file=a), Source(file=b), dataclass_=Config, - strategy=MergeStrategy.RAISE_ON_CONFLICT, + strategy="raise_on_conflict", ) assert str(exc_info.value) == dedent(f"""\ @@ -489,7 +489,7 @@ class Config: Source(file=a), Source(file=b), dataclass_=Config, - strategy=MergeStrategy.RAISE_ON_CONFLICT, + strategy="raise_on_conflict", ) assert str(exc_info.value) == dedent(f"""\ @@ -518,7 +518,7 @@ class Config: Source(file=a), Source(prefix="APP_"), dataclass_=Config, - strategy=MergeStrategy.RAISE_ON_CONFLICT, + strategy="raise_on_conflict", ) assert str(exc_info.value) == dedent(f"""\ @@ -547,7 +547,7 @@ class Config: Source(file=a), Source(file=b), dataclass_=Config, - strategy=MergeStrategy.RAISE_ON_CONFLICT, + strategy="raise_on_conflict", ) assert len(exc_info.value.exceptions) == 2 @@ -702,7 +702,7 @@ class Config: Source(file=first), Source(file=second), dataclass_=Config, - strategy=MergeStrategy.FIRST_FOUND, + strategy="first_found", ) assert result.host == "first-host" @@ -722,7 +722,7 @@ class Config: Source(file=missing), Source(file=fallback), dataclass_=Config, - strategy=MergeStrategy.FIRST_FOUND, + strategy="first_found", ) assert result.host == "fallback-host" @@ -744,7 +744,7 @@ class Config: Source(file=broken), Source(file=fallback), dataclass_=Config, - strategy=MergeStrategy.FIRST_FOUND, + strategy="first_found", ) assert result.host == "fallback-host" @@ -764,7 +764,7 @@ class Config: Source(file=missing1), Source(file=missing2), dataclass_=Config, - strategy=MergeStrategy.FIRST_FOUND, + strategy="first_found", ) err = exc_info.value @@ -789,7 +789,7 @@ class Config: Source(file=partial), Source(file=full), dataclass_=Config, - strategy=MergeStrategy.FIRST_FOUND, + strategy="first_found", ) err = exc_info.value @@ -814,7 +814,7 @@ class Config: Source(file=bad_type), Source(file=fallback), dataclass_=Config, - strategy=MergeStrategy.FIRST_FOUND, + strategy="first_found", ) err = exc_info.value @@ -844,7 +844,7 @@ class Config: Source(file=first), Source(file=second), dataclass_=Config, - strategy=MergeStrategy.FIRST_FOUND, + strategy="first_found", ) err = exc_info.value @@ -867,7 +867,7 @@ def test_validation_error_references_correct_source_decorator(self, tmp_path: Pa @load( Source(file=first), Source(file=second), - strategy=MergeStrategy.FIRST_FOUND, + strategy="first_found", cache=False, ) @dataclass diff --git a/tests/loading/test_skip_invalid_fields.py b/tests/loading/test_skip_invalid_fields.py index 7321fa9..26394d9 100644 --- a/tests/loading/test_skip_invalid_fields.py +++ b/tests/loading/test_skip_invalid_fields.py @@ -6,7 +6,7 @@ import pytest -from dature import F, MergeStrategy, Source, load +from dature import F, Source, load from dature.errors.exceptions import DatureConfigError @@ -196,7 +196,7 @@ class Config: Source(file=source1), Source(file=source2), dataclass_=Config, - strategy=MergeStrategy.RAISE_ON_CONFLICT, + strategy="raise_on_conflict", skip_invalid_fields=True, ) diff --git a/tests/masking/test_masking.py b/tests/masking/test_masking.py index 0f3ed71..6c6a1ed 100644 --- a/tests/masking/test_masking.py +++ b/tests/masking/test_masking.py @@ -6,7 +6,6 @@ import pytest from dature import Source, configure, get_load_report, load -from dature.config import MaskingConfig from dature.errors.exceptions import DatureConfigError from dature.fields.secret_str import SecretStr from dature.load_report import FieldOrigin, SourceEntry @@ -58,11 +57,11 @@ def test_mask_value_with_custom_config( expected: str, ): configure( - masking=MaskingConfig( - mask=mask, - visible_prefix=visible_prefix, - visible_suffix=visible_suffix, - ), + masking={ + "mask": mask, + "visible_prefix": visible_prefix, + "visible_suffix": visible_suffix, + }, ) assert mask_value(input_value) == expected @@ -409,7 +408,7 @@ class Cfg: password: str host: str - configure(masking=MaskingConfig(mask_secrets=mask_secrets)) + configure(masking={"mask_secrets": mask_secrets}) result = load(Source(file=json_file), dataclass_=Cfg, debug=True) report = get_load_report(result) @@ -440,7 +439,7 @@ class Cfg: password: str port: int - configure(masking=MaskingConfig(mask_secrets=mask_secrets)) + configure(masking={"mask_secrets": mask_secrets}) with pytest.raises(DatureConfigError) as exc_info: load(Source(file=json_file), dataclass_=Cfg) diff --git a/tests/merging/test_deep_merge.py b/tests/merging/test_deep_merge.py index b502f29..02eeb36 100644 --- a/tests/merging/test_deep_merge.py +++ b/tests/merging/test_deep_merge.py @@ -3,7 +3,6 @@ import pytest from dature.merging.deep_merge import deep_merge, deep_merge_first_wins, deep_merge_last_wins -from dature.metadata import MergeStrategy class TestDeepMerge: @@ -13,112 +12,112 @@ class TestDeepMerge: pytest.param( {"a": 1, "b": 2}, {"b": 3, "c": 4}, - MergeStrategy.LAST_WINS, + "last_wins", {"a": 1, "b": 3, "c": 4}, id="flat_last_wins", ), pytest.param( {"a": 1, "b": 2}, {"b": 3, "c": 4}, - MergeStrategy.FIRST_WINS, + "first_wins", {"a": 1, "b": 2, "c": 4}, id="flat_first_wins", ), pytest.param( {"db": {"host": "localhost", "port": 5432}}, {"db": {"host": "prod-host", "name": "mydb"}}, - MergeStrategy.LAST_WINS, + "last_wins", {"db": {"host": "prod-host", "port": 5432, "name": "mydb"}}, id="nested_last_wins", ), pytest.param( {"db": {"host": "localhost", "port": 5432}}, {"db": {"host": "prod-host", "name": "mydb"}}, - MergeStrategy.FIRST_WINS, + "first_wins", {"db": {"host": "localhost", "port": 5432, "name": "mydb"}}, id="nested_first_wins", ), pytest.param( {"a": {"b": {"c": 1, "d": 2}}}, {"a": {"b": {"c": 99, "e": 3}}}, - MergeStrategy.LAST_WINS, + "last_wins", {"a": {"b": {"c": 99, "d": 2, "e": 3}}}, id="deeply_nested", ), pytest.param( {"tags": ["a", "b"]}, {"tags": ["c"]}, - MergeStrategy.LAST_WINS, + "last_wins", {"tags": ["c"]}, id="lists_replaced_entirely", ), pytest.param( {}, {"a": 1}, - MergeStrategy.LAST_WINS, + "last_wins", {"a": 1}, id="empty_base", ), pytest.param( {"a": 1}, {}, - MergeStrategy.LAST_WINS, + "last_wins", {"a": 1}, id="empty_override", ), pytest.param( {}, {}, - MergeStrategy.LAST_WINS, + "last_wins", {}, id="both_empty", ), pytest.param( "old", "new", - MergeStrategy.LAST_WINS, + "last_wins", "new", id="scalar_last_wins", ), pytest.param( "old", "new", - MergeStrategy.FIRST_WINS, + "first_wins", "old", id="scalar_first_wins", ), pytest.param( {"a": None}, {"a": 1}, - MergeStrategy.LAST_WINS, + "last_wins", {"a": 1}, id="none_value_last_wins", ), pytest.param( {"a": None}, {"a": 1}, - MergeStrategy.FIRST_WINS, + "first_wins", {"a": None}, id="none_value_first_wins", ), pytest.param( {"a": {"nested": 1}}, {"a": "scalar"}, - MergeStrategy.LAST_WINS, + "last_wins", {"a": "scalar"}, id="dict_vs_scalar_last_wins", ), pytest.param( {"a": "scalar"}, {"a": {"nested": 1}}, - MergeStrategy.LAST_WINS, + "last_wins", {"a": {"nested": 1}}, id="scalar_vs_dict_last_wins", ), pytest.param( {"a": {"nested": 1}}, {"a": "scalar"}, - MergeStrategy.FIRST_WINS, + "first_wins", {"a": {"nested": 1}}, id="dict_vs_scalar_first_wins", ), @@ -129,7 +128,7 @@ def test_merge(self, base, override, strategy, expected): def test_raise_on_conflict_strategy_raises_value_error(self): with pytest.raises(ValueError, match="RAISE_ON_CONFLICT"): - deep_merge({"a": 1}, {"a": 2}, strategy=MergeStrategy.RAISE_ON_CONFLICT) + deep_merge({"a": 1}, {"a": 2}, strategy="raise_on_conflict") class TestDeepMergeLastWins: diff --git a/tests/merging/test_field_group.py b/tests/merging/test_field_group.py index 839b720..e9f72b4 100644 --- a/tests/merging/test_field_group.py +++ b/tests/merging/test_field_group.py @@ -6,7 +6,7 @@ import pytest -from dature import FieldGroup, FieldMergeStrategy, MergeRule, MergeStrategy, Source, load +from dature import Source, load from dature.errors.exceptions import FieldGroupError from dature.field_path import F @@ -28,8 +28,8 @@ class Config: Source(file=defaults), Source(file=overrides), dataclass_=Config, - strategy=MergeStrategy.LAST_WINS, - field_groups=(FieldGroup(F[Config].host, F[Config].port),), + strategy="last_wins", + field_groups=((F[Config].host, F[Config].port),), ) assert result.host == "remote" @@ -51,8 +51,8 @@ class Config: Source(file=first), Source(file=second), dataclass_=Config, - strategy=MergeStrategy.FIRST_WINS, - field_groups=(FieldGroup(F[Config].host, F[Config].port),), + strategy="first_wins", + field_groups=((F[Config].host, F[Config].port),), ) assert result.host == "first-host" @@ -76,7 +76,7 @@ class Config: Source(file=defaults), Source(file=overrides), dataclass_=Config, - field_groups=(FieldGroup(F[Config].host, F[Config].port),), + field_groups=((F[Config].host, F[Config].port),), ) assert result.host == "localhost" @@ -99,7 +99,7 @@ class Config: Source(file=defaults), Source(file=overrides), dataclass_=Config, - field_groups=(FieldGroup(F[Config].host, F[Config].port),), + field_groups=((F[Config].host, F[Config].port),), ) assert result.host == "localhost" @@ -128,7 +128,7 @@ class Config: defaults_meta, overrides_meta, dataclass_=Config, - field_groups=(FieldGroup(F[Config].host, F[Config].port),), + field_groups=((F[Config].host, F[Config].port),), ) assert str(exc_info.value) == dedent(f"""\ @@ -159,7 +159,7 @@ class Config: defaults_meta, overrides_meta, dataclass_=Config, - field_groups=(FieldGroup(F[Config].host, F[Config].port),), + field_groups=((F[Config].host, F[Config].port),), ) assert str(exc_info.value) == dedent(f"""\ @@ -187,8 +187,8 @@ class Config: Source(file=defaults), Source(file=overrides), dataclass_=Config, - strategy=MergeStrategy.FIRST_WINS, - field_groups=(FieldGroup(F[Config].host, F[Config].port),), + strategy="first_wins", + field_groups=((F[Config].host, F[Config].port),), ) def test_partial_change_with_raise_on_conflict(self, tmp_path: Path): @@ -208,8 +208,8 @@ class Config: Source(file=defaults), Source(file=overrides), dataclass_=Config, - strategy=MergeStrategy.RAISE_ON_CONFLICT, - field_groups=(FieldGroup(F[Config].host, F[Config].port),), + strategy="raise_on_conflict", + field_groups=((F[Config].host, F[Config].port),), ) @@ -238,7 +238,7 @@ class Config: defaults_meta, overrides_meta, dataclass_=Config, - field_groups=(FieldGroup(F[Config].database),), + field_groups=((F[Config].database,),), ) assert str(exc_info.value) == dedent(f"""\ @@ -269,7 +269,7 @@ class Config: Source(file=defaults), Source(file=overrides), dataclass_=Config, - field_groups=(FieldGroup(F[Config].database),), + field_groups=((F[Config].database,),), ) assert result.database.host == "remote" @@ -302,7 +302,7 @@ class Config: b_meta, c_meta, dataclass_=Config, - field_groups=(FieldGroup(F[Config].host, F[Config].port),), + field_groups=((F[Config].host, F[Config].port),), ) assert str(exc_info.value) == dedent(f"""\ @@ -333,7 +333,7 @@ class Config: Source(file=b), Source(file=c), dataclass_=Config, - field_groups=(FieldGroup(F[Config].host, F[Config].port),), + field_groups=((F[Config].host, F[Config].port),), ) assert result.host == "c-host" @@ -364,8 +364,8 @@ class Config: overrides_meta, dataclass_=Config, field_groups=( - FieldGroup(F[Config].host, F[Config].port), - FieldGroup(F[Config].user, F[Config].password), + (F[Config].host, F[Config].port), + (F[Config].user, F[Config].password), ), ) @@ -396,8 +396,8 @@ class Config: Source(file=defaults), Source(file=overrides), dataclass_=Config, - field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND),), - field_groups=(FieldGroup(F[Config].host, F[Config].port),), + field_merges={F[Config].tags: "append"}, + field_groups=((F[Config].host, F[Config].port),), ) assert result.host == "remote" @@ -416,7 +416,7 @@ def test_decorator_with_field_groups(self, tmp_path: Path): @load( Source(file=defaults), Source(file=overrides), - field_groups=(FieldGroup(F["Config"].host, F["Config"].port),), + field_groups=((F["Config"].host, F["Config"].port),), ) @dataclass class Config: @@ -437,7 +437,7 @@ def test_decorator_partial_change_raises(self, tmp_path: Path): @load( Source(file=defaults), Source(file=overrides), - field_groups=(FieldGroup(F["Config"].host, F["Config"].port),), + field_groups=((F["Config"].host, F["Config"].port),), ) @dataclass class Config: @@ -470,7 +470,7 @@ class Config: defaults_meta, overrides_meta, dataclass_=Config, - field_groups=(FieldGroup(F[Config].host, F[Config].port),), + field_groups=((F[Config].host, F[Config].port),), ) assert str(exc_info.value) == dedent(f"""\ @@ -504,8 +504,8 @@ class Config: overrides_meta, dataclass_=Config, field_groups=( - FieldGroup(F[Config].host, F[Config].port), - FieldGroup(F[Config].user, F[Config].password), + (F[Config].host, F[Config].port), + (F[Config].user, F[Config].password), ), ) @@ -548,7 +548,7 @@ class Config: Source(file=defaults), Source(file=overrides), dataclass_=Config, - field_groups=(FieldGroup(F[Config].database, F[Config].timeout),), + field_groups=((F[Config].database, F[Config].timeout),), ) assert result.database.host == "remote" @@ -580,7 +580,7 @@ class Config: Source(file=defaults), Source(file=overrides), dataclass_=Config, - field_groups=(FieldGroup(F[Config].database, F[Config].timeout),), + field_groups=((F[Config].database, F[Config].timeout),), ) assert result.database.host == "localhost" @@ -614,7 +614,7 @@ class Config: defaults_meta, overrides_meta, dataclass_=Config, - field_groups=(FieldGroup(F[Config].database, F[Config].timeout),), + field_groups=((F[Config].database, F[Config].timeout),), ) assert str(exc_info.value) == dedent(f"""\ @@ -652,7 +652,7 @@ class Config: defaults_meta, overrides_meta, dataclass_=Config, - field_groups=(FieldGroup(F[Config].database, F[Config].timeout),), + field_groups=((F[Config].database, F[Config].timeout),), ) assert str(exc_info.value) == dedent(f"""\ @@ -692,7 +692,7 @@ class Config: defaults_meta, overrides_meta, dataclass_=Config, - field_groups=(FieldGroup(F[Config].database, F[Config].timeout),), + field_groups=((F[Config].database, F[Config].timeout),), ) assert str(exc_info.value) == dedent(f"""\ @@ -729,7 +729,7 @@ class Config: Source(file=defaults), Source(file=overrides), dataclass_=Config, - field_groups=(FieldGroup(F[Config].user_name, F[Config].inner.user_name),), + field_groups=((F[Config].user_name, F[Config].inner.user_name),), ) assert result.user_name == "root-new" @@ -761,7 +761,7 @@ class Config: defaults_meta, overrides_meta, dataclass_=Config, - field_groups=(FieldGroup(F[Config].user_name, F[Config].inner.user_name),), + field_groups=((F[Config].user_name, F[Config].inner.user_name),), ) assert str(exc_info.value) == dedent(f"""\ diff --git a/tests/merging/test_predicate.py b/tests/merging/test_predicate.py index ed8f1e8..d186fd2 100644 --- a/tests/merging/test_predicate.py +++ b/tests/merging/test_predicate.py @@ -6,7 +6,6 @@ from dature.field_path import F from dature.merging.predicate import build_field_merge_map, extract_field_path -from dature.metadata import FieldMergeStrategy, MergeRule class TestExtractFieldPath: @@ -45,21 +44,21 @@ class Config: port: int tags: list[str] - rules = ( - MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS), - MergeRule(F[Config].tags, FieldMergeStrategy.APPEND), - ) + field_merges = { + F[Config].host: "first_wins", + F[Config].tags: "append", + } - result = build_field_merge_map(rules) + result = build_field_merge_map(field_merges) assert result.enum_map == { - "host": FieldMergeStrategy.FIRST_WINS, - "tags": FieldMergeStrategy.APPEND, + "host": "first_wins", + "tags": "append", } assert result.callable_map == {} def test_empty_rules(self): - result = build_field_merge_map(()) + result = build_field_merge_map({}) assert result.enum_map == {} assert result.callable_map == {} @@ -72,11 +71,11 @@ class Database: class Config: database: Database - rules = (MergeRule(F[Config].database.host, FieldMergeStrategy.LAST_WINS),) + field_merges = {F[Config].database.host: "last_wins"} - result = build_field_merge_map(rules) + result = build_field_merge_map(field_merges) - assert result.enum_map == {"database.host": FieldMergeStrategy.LAST_WINS} + assert result.enum_map == {"database.host": "last_wins"} assert result.callable_map == {} def test_callable_strategy(self): @@ -85,14 +84,14 @@ class Config: host: str score: int - rules = ( - MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS), - MergeRule(F[Config].score, sum), - ) + field_merges = { + F[Config].host: "first_wins", + F[Config].score: sum, + } - result = build_field_merge_map(rules) + result = build_field_merge_map(field_merges) - assert result.enum_map == {"host": FieldMergeStrategy.FIRST_WINS} + assert result.enum_map == {"host": "first_wins"} assert result.callable_map == {"score": sum} def test_validates_owner_mismatch(self): @@ -104,10 +103,10 @@ class Config: class Other: host: str - rules = (MergeRule(F[Other].host, FieldMergeStrategy.FIRST_WINS),) + field_merges = {F[Other].host: "first_wins"} with pytest.raises(TypeError) as exc_info: - build_field_merge_map(rules, dataclass_=Config) + build_field_merge_map(field_merges, dataclass_=Config) assert str(exc_info.value) == "FieldPath owner 'Other' does not match target dataclass 'Config'" diff --git a/tests/test_config.py b/tests/test_config.py index d801687..35a20b9 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -1,3 +1,5 @@ +from typing import Any + import pytest from dature.config import ( @@ -27,27 +29,27 @@ class TestConfigure: ("kwargs", "attr_path", "expected"), [ ( - {"masking": MaskingConfig(mask="[HIDDEN]")}, + {"masking": {"mask": "[HIDDEN]"}}, ("masking", "mask"), "[HIDDEN]", ), ( - {"masking": MaskingConfig(visible_prefix=3)}, + {"masking": {"visible_prefix": 3}}, ("masking", "visible_prefix"), 3, ), ( - {"error_display": ErrorDisplayConfig(max_visible_lines=10)}, + {"error_display": {"max_visible_lines": 10}}, ("error_display", "max_visible_lines"), 10, ), ( - {"loading": LoadingConfig(cache=False, debug=True)}, + {"loading": {"cache": False, "debug": True}}, ("loading", "cache"), False, ), ( - {"loading": LoadingConfig(cache=False, debug=True)}, + {"loading": {"cache": False, "debug": True}}, ("loading", "debug"), True, ), @@ -61,7 +63,7 @@ class TestConfigure: ], ) def test_configure_overrides( - kwargs: dict[str, MaskingConfig | ErrorDisplayConfig | LoadingConfig], + kwargs: dict[str, Any], attr_path: tuple[str, str], expected: str | int | bool, ) -> None: @@ -74,17 +76,17 @@ def test_configure_overrides( ("kwargs", "unchanged_group", "expected_default"), [ ( - {"masking": MaskingConfig(mask="###")}, + {"masking": {"mask": "###"}}, "error_display", ErrorDisplayConfig(), ), ( - {"masking": MaskingConfig(mask="###")}, + {"masking": {"mask": "###"}}, "loading", LoadingConfig(), ), ( - {"error_display": ErrorDisplayConfig(max_visible_lines=10)}, + {"error_display": {"max_visible_lines": 10}}, "masking", MaskingConfig(), ), @@ -96,7 +98,7 @@ def test_configure_overrides( ], ) def test_configure_preserves_other_groups( - kwargs: dict[str, MaskingConfig | ErrorDisplayConfig | LoadingConfig], + kwargs: dict[str, Any], unchanged_group: str, expected_default: MaskingConfig | ErrorDisplayConfig | LoadingConfig, ) -> None: diff --git a/tests/test_load_report.py b/tests/test_load_report.py index 01485ad..8b131b8 100644 --- a/tests/test_load_report.py +++ b/tests/test_load_report.py @@ -8,7 +8,7 @@ import pytest -from dature import MergeStrategy, Source, get_load_report, load +from dature import Source, get_load_report, load from dature.errors.exceptions import DatureConfigError from dature.load_report import FieldOrigin, LoadReport, SourceEntry from dature.validators.number import Ge @@ -38,7 +38,7 @@ class Config: expected = LoadReport( dataclass_name="Config", - strategy=MergeStrategy.LAST_WINS, + strategy="last_wins", sources=( SourceEntry( index=0, @@ -89,7 +89,7 @@ class Config: Source(file=first), Source(file=second), dataclass_=Config, - strategy=MergeStrategy.FIRST_WINS, + strategy="first_wins", debug=True, ) @@ -97,7 +97,7 @@ class Config: expected = LoadReport( dataclass_name="Config", - strategy=MergeStrategy.FIRST_WINS, + strategy="first_wins", sources=( SourceEntry( index=0, @@ -240,7 +240,7 @@ class Config: config = Config() report = get_load_report(config) assert report is not None - assert report.strategy == MergeStrategy.LAST_WINS + assert report.strategy == "last_wins" assert len(report.sources) == 2 def test_single_source_decorator(self, tmp_path: Path): @@ -376,7 +376,7 @@ class Config: expected = LoadReport( dataclass_name="Config", - strategy=MergeStrategy.LAST_WINS, + strategy="last_wins", sources=( SourceEntry(index=0, file_path=str(a), loader_type="json", raw_data={"host": "localhost"}), SourceEntry(index=1, file_path=str(b), loader_type="json", raw_data={"host": "override"}), @@ -416,7 +416,7 @@ class Config: expected = LoadReport( dataclass_name="Config", - strategy=MergeStrategy.LAST_WINS, + strategy="last_wins", sources=( SourceEntry(index=0, file_path=str(a), loader_type="json", raw_data={"port": -5}), SourceEntry(index=1, file_path=str(b), loader_type="json", raw_data={"host": "localhost"}), diff --git a/tests/test_type_loaders.py b/tests/test_type_loaders.py index 30571d3..419a690 100644 --- a/tests/test_type_loaders.py +++ b/tests/test_type_loaders.py @@ -6,7 +6,7 @@ import pytest -from dature import Source, TypeLoader, configure, load +from dature import Source, configure, load from dature.config import _ConfigProxy @@ -31,10 +31,10 @@ class ConfigWithRgb: @pytest.fixture def _reset_config() -> Generator[None]: _ConfigProxy.set_instance(None) - _ConfigProxy.set_type_loaders(()) + _ConfigProxy.set_type_loaders({}) yield _ConfigProxy.set_instance(None) - _ConfigProxy.set_type_loaders(()) + _ConfigProxy.set_type_loaders({}) @pytest.fixture @@ -49,7 +49,7 @@ def test_single_source_with_type_loader(self, yaml_with_rgb: Path) -> None: result = load( Source( file=yaml_with_rgb, - type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),), + type_loaders={Rgb: rgb_from_string}, ), dataclass_=ConfigWithRgb, ) @@ -68,7 +68,7 @@ def int_times_two(value: str) -> int: result = load( Source( file=p, - type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),), + type_loaders={Rgb: rgb_from_string}, ), dataclass_=ConfigWithRgb, ) @@ -79,7 +79,7 @@ class TestTypeLoadersInConfigure: @pytest.mark.usefixtures("_reset_config") def test_global_type_loaders_via_configure(self, yaml_with_rgb: Path) -> None: configure( - type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),), + type_loaders={Rgb: rgb_from_string}, ) result = load(Source(file=yaml_with_rgb), dataclass_=ConfigWithRgb) assert result.color == Rgb(r=255, g=128, b=0) @@ -96,7 +96,7 @@ def test_merge_metadata_type_loaders(self, tmp_path: Path) -> None: Source(file=base), Source(file=override), dataclass_=ConfigWithRgb, - type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),), + type_loaders={Rgb: rgb_from_string}, ) assert result.name == "override" assert result.color == Rgb(r=1, g=2, b=3) @@ -114,7 +114,7 @@ def tag_upper(value: str) -> str: return value.upper() configure( - type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),), + type_loaders={Rgb: rgb_from_string}, ) p = tmp_path / "cfg.yaml" @@ -123,7 +123,7 @@ def tag_upper(value: str) -> str: result = load( Source( file=p, - type_loaders=(TypeLoader(type_=str, func=tag_upper),), + type_loaders={str: tag_upper}, ), dataclass_=TwoCustom, ) From d031fc471aa22e10408bbbb49d3df8f28d55b2b0 Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 31 Mar 2026 10:11:05 +0300 Subject: [PATCH 08/36] refactor validators --- changes/+simplify-validator-args.refactor | 1 + docs/features/validation.md | 30 +++++----- .../dynaconf_root_validators.py | 4 +- .../why-not-dynaconf/dynaconf_validators.py | 2 +- .../why-not-hydra/hydra_validators.py | 2 +- .../features/masking/masking_merge_mode.py | 2 +- .../docs/features/masking/masking_no_mask.py | 2 +- .../features/masking/masking_per_source.py | 2 +- .../validation/validation_annotated.py | 8 +-- .../features/validation/validation_custom.py | 4 +- .../validation/validation_metadata.py | 4 +- src/dature/validators/number.py | 18 +++--- src/dature/validators/root.py | 6 +- src/dature/validators/sequence.py | 14 ++--- src/dature/validators/string.py | 14 ++--- tests/errors/test_fixtures.py | 20 +++---- tests/loading/test_multi.py | 4 +- tests/test_load_report.py | 4 +- tests/validators/test_complex.py | 42 ++++++------- tests/validators/test_custom_validator.py | 28 ++++----- tests/validators/test_metadata_validators.py | 60 +++++++++---------- tests/validators/test_number.py | 20 +++---- tests/validators/test_root_validator.py | 16 ++--- tests/validators/test_sequence.py | 12 ++-- tests/validators/test_string.py | 16 ++--- 25 files changed, 168 insertions(+), 167 deletions(-) create mode 100644 changes/+simplify-validator-args.refactor diff --git a/changes/+simplify-validator-args.refactor b/changes/+simplify-validator-args.refactor new file mode 100644 index 0000000..cb72ee0 --- /dev/null +++ b/changes/+simplify-validator-args.refactor @@ -0,0 +1 @@ +Built-in validators (`Ge`, `Le`, `Gt`, `Lt`, `MinLength`, `MaxLength`, `RegexPattern`, `MinItems`, `MaxItems`, `UniqueItems`) now accept `value` as a positional argument: `Ge(1)` instead of `Ge(value=1)`. `RootValidator` now accepts `func` as a positional argument: `RootValidator(check)` instead of `RootValidator(func=check)`. `error_message` remains keyword-only in all validators. diff --git a/docs/features/validation.md b/docs/features/validation.md index 13e61f9..9af1ed5 100644 --- a/docs/features/validation.md +++ b/docs/features/validation.md @@ -24,32 +24,32 @@ Declare validators using `typing.Annotated`: | Validator | Description | |-----------|-------------| -| `Gt(value=N)` | Greater than N | -| `Ge(value=N)` | Greater than or equal to N | -| `Lt(value=N)` | Less than N | -| `Le(value=N)` | Less than or equal to N | +| `Gt(N)` | Greater than N | +| `Ge(N)` | Greater than or equal to N | +| `Lt(N)` | Less than N | +| `Le(N)` | Less than or equal to N | **Strings** (`dature.validators.string`): | Validator | Description | |-----------|-------------| -| `MinLength(value=N)` | Minimum string length | -| `MaxLength(value=N)` | Maximum string length | -| `RegexPattern(pattern=r"...")` | Match regex pattern | +| `MinLength(N)` | Minimum string length | +| `MaxLength(N)` | Maximum string length | +| `RegexPattern(r"...")` | Match regex pattern | **Sequences** (`dature.validators.sequence`): | Validator | Description | |-----------|-------------| -| `MinItems(value=N)` | Minimum number of items | -| `MaxItems(value=N)` | Maximum number of items | +| `MinItems(N)` | Minimum number of items | +| `MaxItems(N)` | Maximum number of items | | `UniqueItems()` | All items must be unique | Multiple validators can be combined: ```python -port: Annotated[int, Ge(value=1), Le(value=65535)] -tags: Annotated[list[str], MinItems(value=1), MaxItems(value=10), UniqueItems()] +port: Annotated[int, Ge(1), Le(65535)] +tags: Annotated[list[str], MinItems(1), MaxItems(10), UniqueItems()] ``` ## Root Validators @@ -90,8 +90,8 @@ A single validator can be passed directly. Multiple validators require a tuple: ```python validators={ - dature.F[Config].port: (Gt(value=0), Lt(value=65536)), # tuple for multiple - dature.F[Config].host: MinLength(value=1), # single, no tuple needed + dature.F[Config].port: (Gt(0), Lt(65536)), # tuple for multiple + dature.F[Config].host: MinLength(1), # single, no tuple needed } ``` @@ -99,8 +99,8 @@ Nested fields are supported: ```python validators={ - dature.F[Config].database.host: MinLength(value=1), - dature.F[Config].database.port: Gt(value=0), + dature.F[Config].database.host: MinLength(1), + dature.F[Config].database.port: Gt(0), } ``` diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py index b1c2844..c2354f9 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py @@ -15,7 +15,7 @@ @dataclass class Config: host: str - port: Annotated[int, Gt(value=0), Lt(value=65536)] + port: Annotated[int, Gt(0), Lt(65536)] debug: bool = False @@ -30,7 +30,7 @@ def check_debug_port(config: Config) -> bool: file=SOURCES_DIR / "dynaconf_root_validators_invalid.toml", root_validators=( RootValidator( - func=check_debug_port, + check_debug_port, error_message="debug mode should not use port 80", ), ), diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py index 1151358..cbd3d4d 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py @@ -15,7 +15,7 @@ @dataclass class Config: host: str - port: Annotated[int, Gt(value=0), Lt(value=65536)] + port: Annotated[int, Gt(0), Lt(65536)] debug: bool = False diff --git a/examples/docs/comparison/why-not-hydra/hydra_validators.py b/examples/docs/comparison/why-not-hydra/hydra_validators.py index af6d288..0700d99 100644 --- a/examples/docs/comparison/why-not-hydra/hydra_validators.py +++ b/examples/docs/comparison/why-not-hydra/hydra_validators.py @@ -15,7 +15,7 @@ @dataclass class Config: host: str - port: Annotated[int, Gt(value=0), Lt(value=65536)] = 8080 + port: Annotated[int, Gt(0), Lt(65536)] = 8080 try: diff --git a/examples/docs/features/masking/masking_merge_mode.py b/examples/docs/features/masking/masking_merge_mode.py index 2fd000c..76d09a7 100644 --- a/examples/docs/features/masking/masking_merge_mode.py +++ b/examples/docs/features/masking/masking_merge_mode.py @@ -15,7 +15,7 @@ class Config: host: str port: int - api_key: Annotated[str, MinLength(value=20)] = "" + api_key: Annotated[str, MinLength(20)] = "" # --8<-- [start:merge-mode] diff --git a/examples/docs/features/masking/masking_no_mask.py b/examples/docs/features/masking/masking_no_mask.py index 57fb745..00220cf 100644 --- a/examples/docs/features/masking/masking_no_mask.py +++ b/examples/docs/features/masking/masking_no_mask.py @@ -13,7 +13,7 @@ @dataclass class Config: - api_key: Annotated[str, MinLength(value=20)] + api_key: Annotated[str, MinLength(20)] host: str diff --git a/examples/docs/features/masking/masking_per_source.py b/examples/docs/features/masking/masking_per_source.py index 0f76244..4d60c8c 100644 --- a/examples/docs/features/masking/masking_per_source.py +++ b/examples/docs/features/masking/masking_per_source.py @@ -13,7 +13,7 @@ @dataclass class Config: - api_key: Annotated[str, MinLength(value=20)] + api_key: Annotated[str, MinLength(20)] host: str diff --git a/examples/docs/features/validation/validation_annotated.py b/examples/docs/features/validation/validation_annotated.py index d5babf4..7321b0d 100644 --- a/examples/docs/features/validation/validation_annotated.py +++ b/examples/docs/features/validation/validation_annotated.py @@ -15,10 +15,10 @@ @dataclass class ServiceConfig: - port: Annotated[int, Ge(value=1), Le(value=65535)] - name: Annotated[str, MinLength(value=3), MaxLength(value=50)] - tags: Annotated[list[str], MinItems(value=1), UniqueItems()] - workers: Annotated[int, Ge(value=1)] + port: Annotated[int, Ge(1), Le(65535)] + name: Annotated[str, MinLength(3), MaxLength(50)] + tags: Annotated[list[str], MinItems(1), UniqueItems()] + workers: Annotated[int, Ge(1)] try: diff --git a/examples/docs/features/validation/validation_custom.py b/examples/docs/features/validation/validation_custom.py index d7d541c..229a819 100644 --- a/examples/docs/features/validation/validation_custom.py +++ b/examples/docs/features/validation/validation_custom.py @@ -12,7 +12,7 @@ SOURCES_DIR = Path(__file__).parent / "sources" -@dataclass(frozen=True, slots=True, kw_only=True) +@dataclass(frozen=True, slots=True) class Divisible: value: int error_message: str = "Value must be divisible by {value}" @@ -32,7 +32,7 @@ class ServiceConfig: port: int name: str tags: list[str] - workers: Annotated[int, Ge(value=1), Divisible(value=2)] + workers: Annotated[int, Ge(1), Divisible(2)] try: diff --git a/examples/docs/features/validation/validation_metadata.py b/examples/docs/features/validation/validation_metadata.py index 968a6c2..be4fe50 100644 --- a/examples/docs/features/validation/validation_metadata.py +++ b/examples/docs/features/validation/validation_metadata.py @@ -23,8 +23,8 @@ class Config: dature.Source( file=SOURCES_DIR / "validation_metadata_invalid.yaml", validators={ - dature.F[Config].host: MinLength(value=1), - dature.F[Config].port: (Ge(value=1), Lt(value=65536)), + dature.F[Config].host: MinLength(1), + dature.F[Config].port: (Ge(1), Lt(65536)), }, ), dataclass_=Config, diff --git a/src/dature/validators/number.py b/src/dature/validators/number.py index 5198b47..d1ea239 100644 --- a/src/dature/validators/number.py +++ b/src/dature/validators/number.py @@ -1,11 +1,11 @@ from collections.abc import Callable -from dataclasses import dataclass +from dataclasses import dataclass, field -@dataclass(frozen=True, slots=True, kw_only=True) +@dataclass(frozen=True, slots=True) class Gt: value: int | float - error_message: str = "Value must be greater than {value}" + error_message: str = field(default="Value must be greater than {value}", kw_only=True) def get_validator_func(self) -> Callable[[int | float], bool]: def validate(val: float) -> bool: @@ -17,10 +17,10 @@ def get_error_message(self) -> str: return self.error_message.format(value=self.value) -@dataclass(frozen=True, slots=True, kw_only=True) +@dataclass(frozen=True, slots=True) class Ge: value: int | float - error_message: str = "Value must be greater than or equal to {value}" + error_message: str = field(default="Value must be greater than or equal to {value}", kw_only=True) def get_validator_func(self) -> Callable[[int | float], bool]: def validate(val: float) -> bool: @@ -32,10 +32,10 @@ def get_error_message(self) -> str: return self.error_message.format(value=self.value) -@dataclass(frozen=True, slots=True, kw_only=True) +@dataclass(frozen=True, slots=True) class Lt: value: int | float - error_message: str = "Value must be less than {value}" + error_message: str = field(default="Value must be less than {value}", kw_only=True) def get_validator_func(self) -> Callable[[int | float], bool]: def validate(val: float) -> bool: @@ -47,10 +47,10 @@ def get_error_message(self) -> str: return self.error_message.format(value=self.value) -@dataclass(frozen=True, slots=True, kw_only=True) +@dataclass(frozen=True, slots=True) class Le: value: int | float - error_message: str = "Value must be less than or equal to {value}" + error_message: str = field(default="Value must be less than or equal to {value}", kw_only=True) def get_validator_func(self) -> Callable[[int | float], bool]: def validate(val: float) -> bool: diff --git a/src/dature/validators/root.py b/src/dature/validators/root.py index 6c3aac6..9b5360b 100644 --- a/src/dature/validators/root.py +++ b/src/dature/validators/root.py @@ -1,12 +1,12 @@ from collections.abc import Callable -from dataclasses import dataclass +from dataclasses import dataclass, field # --8<-- [start:root-validator] -@dataclass(frozen=True, slots=True, kw_only=True) +@dataclass(frozen=True, slots=True) class RootValidator: func: Callable[..., bool] - error_message: str = "Root validation failed" + error_message: str = field(default="Root validation failed", kw_only=True) # --8<-- [end:root-validator] def get_validator_func(self) -> Callable[..., bool]: diff --git a/src/dature/validators/sequence.py b/src/dature/validators/sequence.py index 99a0288..ac67f72 100644 --- a/src/dature/validators/sequence.py +++ b/src/dature/validators/sequence.py @@ -1,12 +1,12 @@ from collections.abc import Callable, Sequence -from dataclasses import dataclass +from dataclasses import dataclass, field from typing import Any -@dataclass(frozen=True, slots=True, kw_only=True) +@dataclass(frozen=True, slots=True) class MinItems: value: int - error_message: str = "Value must have at least {value} items" + error_message: str = field(default="Value must have at least {value} items", kw_only=True) def get_validator_func(self) -> Callable[[Sequence[Any]], bool]: def validate(val: Sequence[Any]) -> bool: @@ -18,10 +18,10 @@ def get_error_message(self) -> str: return self.error_message.format(value=self.value) -@dataclass(frozen=True, slots=True, kw_only=True) +@dataclass(frozen=True, slots=True) class MaxItems: value: int - error_message: str = "Value must have at most {value} items" + error_message: str = field(default="Value must have at most {value} items", kw_only=True) def get_validator_func(self) -> Callable[[Sequence[Any]], bool]: def validate(val: Sequence[Any]) -> bool: @@ -33,9 +33,9 @@ def get_error_message(self) -> str: return self.error_message.format(value=self.value) -@dataclass(frozen=True, slots=True, kw_only=True) +@dataclass(frozen=True, slots=True) class UniqueItems: - error_message: str = "Value must contain unique items" + error_message: str = field(default="Value must contain unique items", kw_only=True) def get_validator_func(self) -> Callable[[Sequence[Any]], bool]: def validate(val: Sequence[Any]) -> bool: diff --git a/src/dature/validators/string.py b/src/dature/validators/string.py index 6562aa9..482dbf7 100644 --- a/src/dature/validators/string.py +++ b/src/dature/validators/string.py @@ -1,12 +1,12 @@ import re from collections.abc import Callable -from dataclasses import dataclass +from dataclasses import dataclass, field -@dataclass(frozen=True, slots=True, kw_only=True) +@dataclass(frozen=True, slots=True) class MinLength: value: int - error_message: str = "Value must have at least {value} characters" + error_message: str = field(default="Value must have at least {value} characters", kw_only=True) def get_validator_func(self) -> Callable[[str], bool]: def validate(val: str) -> bool: @@ -18,10 +18,10 @@ def get_error_message(self) -> str: return self.error_message.format(value=self.value) -@dataclass(frozen=True, slots=True, kw_only=True) +@dataclass(frozen=True, slots=True) class MaxLength: value: int - error_message: str = "Value must have at most {value} characters" + error_message: str = field(default="Value must have at most {value} characters", kw_only=True) def get_validator_func(self) -> Callable[[str], bool]: def validate(val: str) -> bool: @@ -33,10 +33,10 @@ def get_error_message(self) -> str: return self.error_message.format(value=self.value) -@dataclass(frozen=True, slots=True, kw_only=True) +@dataclass(frozen=True, slots=True) class RegexPattern: pattern: str - error_message: str = "Value must match pattern '{pattern}'" + error_message: str = field(default="Value must match pattern '{pattern}'", kw_only=True) def get_validator_func(self) -> Callable[[str], bool]: def validate(val: str) -> bool: diff --git a/tests/errors/test_fixtures.py b/tests/errors/test_fixtures.py index 774a26b..75eb011 100644 --- a/tests/errors/test_fixtures.py +++ b/tests/errors/test_fixtures.py @@ -14,8 +14,8 @@ @dataclass class Address: - city: Annotated[str, MinLength(value=2)] - zip_code: Annotated[str, RegexPattern(pattern=r"^\d{5}$")] + city: Annotated[str, MinLength(2)] + zip_code: Annotated[str, RegexPattern(r"^\d{5}$")] @dataclass @@ -23,10 +23,10 @@ class ErrorConfig: port: int host: str status: Literal["active", "inactive"] - name: Annotated[str, MinLength(value=3), MaxLength(value=50)] - email: Annotated[str, RegexPattern(pattern=r"^[\w.-]+@[\w.-]+\.\w+$")] - age: Annotated[int, Ge(value=0), Le(value=150)] - tags: Annotated[list[str], MinItems(value=1), UniqueItems()] + name: Annotated[str, MinLength(3), MaxLength(50)] + email: Annotated[str, RegexPattern(r"^[\w.-]+@[\w.-]+\.\w+$")] + age: Annotated[int, Ge(0), Le(150)] + tags: Annotated[list[str], MinItems(1), UniqueItems()] address: Address @@ -44,10 +44,10 @@ class LoadErrorConfig: @dataclass class ValidationErrorConfig: - name: Annotated[str, MinLength(value=3), MaxLength(value=50)] - email: Annotated[str, RegexPattern(pattern=r"^[\w.-]+@[\w.-]+\.\w+$")] - age: Annotated[int, Ge(value=0), Le(value=150)] - tags: Annotated[list[str], MinItems(value=1), UniqueItems()] + name: Annotated[str, MinLength(3), MaxLength(50)] + email: Annotated[str, RegexPattern(r"^[\w.-]+@[\w.-]+\.\w+$")] + age: Annotated[int, Ge(0), Le(150)] + tags: Annotated[list[str], MinItems(1), UniqueItems()] address: Address diff --git a/tests/loading/test_multi.py b/tests/loading/test_multi.py index e3ab0b3..918c98f 100644 --- a/tests/loading/test_multi.py +++ b/tests/loading/test_multi.py @@ -837,7 +837,7 @@ def test_validation_error_references_correct_source(self, tmp_path: Path): @dataclass class Config: host: str - port: Annotated[int, Ge(value=1)] + port: Annotated[int, Ge(1)] with pytest.raises(DatureConfigError) as exc_info: load( @@ -873,7 +873,7 @@ def test_validation_error_references_correct_source_decorator(self, tmp_path: Pa @dataclass class Config: host: str - port: Annotated[int, Ge(value=1)] + port: Annotated[int, Ge(1)] with pytest.raises(DatureConfigError) as exc_info: Config() diff --git a/tests/test_load_report.py b/tests/test_load_report.py index 8b131b8..3b22f2a 100644 --- a/tests/test_load_report.py +++ b/tests/test_load_report.py @@ -404,7 +404,7 @@ def test_merge_validation_error(self, tmp_path: Path): @dataclass class Config: host: str - port: Annotated[int, Ge(value=0)] + port: Annotated[int, Ge(0)] with pytest.raises(DatureConfigError): load( @@ -472,7 +472,7 @@ def test_single_source_validation_error(self, tmp_path: Path): @dataclass class Config: - port: Annotated[int, Ge(value=0)] + port: Annotated[int, Ge(0)] with pytest.raises(DatureConfigError): load(Source(file=json_file), dataclass_=Config, debug=True) diff --git a/tests/validators/test_complex.py b/tests/validators/test_complex.py index e14273f..0cd8552 100644 --- a/tests/validators/test_complex.py +++ b/tests/validators/test_complex.py @@ -15,9 +15,9 @@ class TestMultipleFields: def test_success(self, tmp_path: Path): @dataclass class Config: - name: Annotated[str, MinLength(value=3), MaxLength(value=50)] - age: Annotated[int, Ge(value=0), Le(value=150)] - tags: Annotated[list[str], MinItems(value=1), UniqueItems()] + name: Annotated[str, MinLength(3), MaxLength(50)] + age: Annotated[int, Ge(0), Le(150)] + tags: Annotated[list[str], MinItems(1), UniqueItems()] json_file = tmp_path / "config.json" json_file.write_text('{"name": "Alice", "age": 30, "tags": ["python", "coding"]}') @@ -32,9 +32,9 @@ class Config: def test_all_invalid(self, tmp_path: Path): @dataclass class Config: - name: Annotated[str, MinLength(value=3), MaxLength(value=50)] - age: Annotated[int, Ge(value=0), Le(value=150)] - tags: Annotated[list[str], MinItems(value=1), UniqueItems()] + name: Annotated[str, MinLength(3), MaxLength(50)] + age: Annotated[int, Ge(0), Le(150)] + tags: Annotated[list[str], MinItems(1), UniqueItems()] json_file = tmp_path / "config.json" content = '{"name": "AB", "age": 200, "tags": []}' @@ -72,13 +72,13 @@ class TestNestedDataclass: def test_success(self, tmp_path: Path): @dataclass class Address: - city: Annotated[str, MinLength(value=2)] - zip_code: Annotated[str, RegexPattern(pattern=r"^\d{5}$")] + city: Annotated[str, MinLength(2)] + zip_code: Annotated[str, RegexPattern(r"^\d{5}$")] @dataclass class User: - name: Annotated[str, MinLength(value=3)] - age: Annotated[int, Ge(value=18)] + name: Annotated[str, MinLength(3)] + age: Annotated[int, Ge(18)] address: Address json_file = tmp_path / "config.json" @@ -97,13 +97,13 @@ class User: def test_all_invalid(self, tmp_path: Path): @dataclass class Address: - city: Annotated[str, MinLength(value=2)] - zip_code: Annotated[str, RegexPattern(pattern=r"^\d{5}$")] + city: Annotated[str, MinLength(2)] + zip_code: Annotated[str, RegexPattern(r"^\d{5}$")] @dataclass class User: - name: Annotated[str, MinLength(value=3)] - age: Annotated[int, Ge(value=18)] + name: Annotated[str, MinLength(3)] + age: Annotated[int, Ge(18)] address: Address json_file = tmp_path / "config.json" @@ -148,7 +148,7 @@ class TestCustomErrorMessage: def test_custom_error_message(self, tmp_path: Path): @dataclass class Config: - age: Annotated[int, Ge(value=18, error_message="Age must be 18 or older")] + age: Annotated[int, Ge(18, error_message="Age must be 18 or older")] json_file = tmp_path / "config.json" content = '{"age": 15}' @@ -171,7 +171,7 @@ class TestDictListDict: def test_raw_dict_field_validator_success(self, tmp_path: Path): @dataclass class Config: - groups: Annotated[dict[str, list[dict[str, Any]]], MinItems(value=1)] + groups: Annotated[dict[str, list[dict[str, Any]]], MinItems(1)] json_file = tmp_path / "config.json" json_file.write_text( @@ -186,7 +186,7 @@ class Config: def test_raw_dict_field_validator_failure(self, tmp_path: Path): @dataclass class Config: - groups: Annotated[dict[str, list[dict[str, Any]]], MinItems(value=1)] + groups: Annotated[dict[str, list[dict[str, Any]]], MinItems(1)] json_file = tmp_path / "config.json" content = '{"groups": {}}' @@ -210,8 +210,8 @@ class Config: def test_nested_dataclass_in_dict_list_success(self, tmp_path: Path): @dataclass class Member: - name: Annotated[str, MinLength(value=2)] - role: Annotated[str, MinLength(value=3)] + name: Annotated[str, MinLength(2)] + role: Annotated[str, MinLength(3)] @dataclass class Config: @@ -231,8 +231,8 @@ class Config: def test_nested_dataclass_in_dict_list_validation_fails(self, tmp_path: Path): @dataclass class Member: - name: Annotated[str, MinLength(value=2)] - role: Annotated[str, MinLength(value=3)] + name: Annotated[str, MinLength(2)] + role: Annotated[str, MinLength(3)] @dataclass class Config: diff --git a/tests/validators/test_custom_validator.py b/tests/validators/test_custom_validator.py index 37e23ca..afd8beb 100644 --- a/tests/validators/test_custom_validator.py +++ b/tests/validators/test_custom_validator.py @@ -9,7 +9,7 @@ from dature.errors.exceptions import DatureConfigError -@dataclass(frozen=True, slots=True, kw_only=True) +@dataclass(frozen=True, slots=True) class Divisible: value: int error_message: str = "Value must be divisible by {value}" @@ -24,7 +24,7 @@ def get_error_message(self) -> str: return self.error_message.format(value=self.value) -@dataclass(frozen=True, slots=True, kw_only=True) +@dataclass(frozen=True, slots=True) class StartsWith: prefix: str error_message: str = "Value must start with '{prefix}'" @@ -43,7 +43,7 @@ class TestCustomFieldValidator: def test_success(self, tmp_path: Path): @dataclass class Config: - count: Annotated[int, Divisible(value=5)] + count: Annotated[int, Divisible(5)] json_file = tmp_path / "config.json" json_file.write_text('{"count": 10}') @@ -56,7 +56,7 @@ class Config: def test_failure(self, tmp_path: Path): @dataclass class Config: - count: Annotated[int, Divisible(value=5)] + count: Annotated[int, Divisible(5)] json_file = tmp_path / "config.json" content = '{"count": 7}' @@ -80,7 +80,7 @@ class Config: def test_custom_error_message(self, tmp_path: Path): @dataclass class Config: - count: Annotated[int, Divisible(value=3, error_message="Must be a multiple of {value}")] + count: Annotated[int, Divisible(3, error_message="Must be a multiple of {value}")] json_file = tmp_path / "config.json" content = '{"count": 7}' @@ -106,7 +106,7 @@ class TestCustomStringValidator: def test_success(self, tmp_path: Path): @dataclass class Config: - url: Annotated[str, StartsWith(prefix="https://")] + url: Annotated[str, StartsWith("https://")] json_file = tmp_path / "config.json" json_file.write_text('{"url": "https://example.com"}') @@ -119,7 +119,7 @@ class Config: def test_failure(self, tmp_path: Path): @dataclass class Config: - url: Annotated[str, StartsWith(prefix="https://")] + url: Annotated[str, StartsWith("https://")] json_file = tmp_path / "config.json" content = '{"url": "http://example.com"}' @@ -149,7 +149,7 @@ def test_success(self, tmp_path: Path): @load(Source(file=json_file)) @dataclass class Config: - port: Annotated[int, Divisible(value=10)] + port: Annotated[int, Divisible(10)] config = Config() assert config.port == 8080 @@ -162,7 +162,7 @@ def test_failure(self, tmp_path: Path): @load(Source(file=json_file)) @dataclass class Config: - port: Annotated[int, Divisible(value=10)] + port: Annotated[int, Divisible(10)] with pytest.raises(DatureConfigError) as exc_info: Config() @@ -185,7 +185,7 @@ def test_direct_instantiation_validates(self, tmp_path: Path): @load(Source(file=json_file)) @dataclass class Config: - port: Annotated[int, Divisible(value=10)] + port: Annotated[int, Divisible(10)] with pytest.raises(DatureConfigError) as exc_info: Config(port=8081) @@ -202,8 +202,8 @@ class TestMultipleCustomValidators: def test_combined_success(self, tmp_path: Path): @dataclass class Config: - count: Annotated[int, Divisible(value=5)] - url: Annotated[str, StartsWith(prefix="https://")] + count: Annotated[int, Divisible(5)] + url: Annotated[str, StartsWith("https://")] json_file = tmp_path / "config.json" json_file.write_text('{"count": 15, "url": "https://example.com"}') @@ -217,8 +217,8 @@ class Config: def test_all_fail(self, tmp_path: Path): @dataclass class Config: - count: Annotated[int, Divisible(value=5)] - url: Annotated[str, StartsWith(prefix="https://")] + count: Annotated[int, Divisible(5)] + url: Annotated[str, StartsWith("https://")] json_file = tmp_path / "config.json" content = '{"count": 7, "url": "http://example.com"}' diff --git a/tests/validators/test_metadata_validators.py b/tests/validators/test_metadata_validators.py index a7f8a57..20bb0c5 100644 --- a/tests/validators/test_metadata_validators.py +++ b/tests/validators/test_metadata_validators.py @@ -24,7 +24,7 @@ class Config: metadata = Source( file=json_file, validators={ - F[Config].name: MinLength(value=3), + F[Config].name: MinLength(3), }, ) result = load(metadata, dataclass_=Config) @@ -42,7 +42,7 @@ class Config: metadata = Source( file=json_file, validators={ - F[Config].port: (Gt(value=0), Lt(value=65536)), + F[Config].port: (Gt(0), Lt(65536)), }, ) result = load(metadata, dataclass_=Config) @@ -61,8 +61,8 @@ class Config: metadata = Source( file=json_file, validators={ - F[Config].name: MinLength(value=3), - F[Config].port: Gt(value=0), + F[Config].name: MinLength(3), + F[Config].port: Gt(0), }, ) result = load(metadata, dataclass_=Config) @@ -84,7 +84,7 @@ class Config: metadata = Source( file=json_file, validators={ - F[Config].name: MinLength(value=3), + F[Config].name: MinLength(3), }, ) @@ -113,7 +113,7 @@ class Config: metadata = Source( file=json_file, validators={ - F[Config].port: (Gt(value=0), Lt(value=65536)), + F[Config].port: (Gt(0), Lt(65536)), }, ) @@ -148,8 +148,8 @@ class Config: metadata = Source( file=json_file, validators={ - F[Config].database.host: MinLength(value=1), - F[Config].database.port: Gt(value=0), + F[Config].database.host: MinLength(1), + F[Config].database.port: Gt(0), }, ) result = load(metadata, dataclass_=Config) @@ -174,7 +174,7 @@ class Config: metadata = Source( file=json_file, validators={ - F[Config].database.host: MinLength(value=1), + F[Config].database.host: MinLength(1), }, ) @@ -196,7 +196,7 @@ class TestMetadataValidatorsComplement: def test_metadata_validators_complement_annotated(self, tmp_path: Path): @dataclass class Config: - name: Annotated[str, MinLength(value=3)] + name: Annotated[str, MinLength(3)] port: int json_file = tmp_path / "config.json" @@ -205,8 +205,8 @@ class Config: metadata = Source( file=json_file, validators={ - F[Config].name: MaxLength(value=50), - F[Config].port: Gt(value=0), + F[Config].name: MaxLength(50), + F[Config].port: Gt(0), }, ) result = load(metadata, dataclass_=Config) @@ -217,7 +217,7 @@ class Config: def test_annotated_still_validates(self, tmp_path: Path): @dataclass class Config: - name: Annotated[str, MinLength(value=5)] + name: Annotated[str, MinLength(5)] json_file = tmp_path / "config.json" content = '{"name": "Al"}' @@ -226,7 +226,7 @@ class Config: metadata = Source( file=json_file, validators={ - F[Config].name: MaxLength(value=50), + F[Config].name: MaxLength(50), }, ) @@ -246,7 +246,7 @@ class Config: def test_metadata_validator_fails_with_annotated_present(self, tmp_path: Path): @dataclass class Config: - name: Annotated[str, MinLength(value=3)] + name: Annotated[str, MinLength(3)] json_file = tmp_path / "config.json" content = '{"name": "This is a very long name that exceeds the limit"}' @@ -255,7 +255,7 @@ class Config: metadata = Source( file=json_file, validators={ - F[Config].name: MaxLength(value=10), + F[Config].name: MaxLength(10), }, ) @@ -275,7 +275,7 @@ class Config: def test_both_annotated_and_metadata_on_same_field_pass(self, tmp_path: Path): @dataclass class Config: - name: Annotated[str, MinLength(value=3)] + name: Annotated[str, MinLength(3)] json_file = tmp_path / "config.json" json_file.write_text('{"name": "Alice"}') @@ -283,7 +283,7 @@ class Config: metadata = Source( file=json_file, validators={ - F[Config].name: MaxLength(value=10), + F[Config].name: MaxLength(10), }, ) result = load(metadata, dataclass_=Config) @@ -293,7 +293,7 @@ class Config: def test_annotated_fails_while_metadata_would_pass(self, tmp_path: Path): @dataclass class Config: - name: Annotated[str, MinLength(value=5)] + name: Annotated[str, MinLength(5)] json_file = tmp_path / "config.json" content = '{"name": "AB"}' @@ -302,7 +302,7 @@ class Config: metadata = Source( file=json_file, validators={ - F[Config].name: MaxLength(value=50), + F[Config].name: MaxLength(50), }, ) @@ -322,7 +322,7 @@ class Config: def test_same_validator_type_in_annotated_and_metadata(self, tmp_path: Path): @dataclass class Config: - port: Annotated[int, Ge(value=0)] + port: Annotated[int, Ge(0)] json_file = tmp_path / "config.json" json_file.write_text('{"port": 8080}') @@ -330,7 +330,7 @@ class Config: metadata = Source( file=json_file, validators={ - F[Config].port: Lt(value=65536), + F[Config].port: Lt(65536), }, ) result = load(metadata, dataclass_=Config) @@ -340,7 +340,7 @@ class Config: def test_same_validator_type_in_annotated_and_metadata_fails(self, tmp_path: Path): @dataclass class Config: - port: Annotated[int, Ge(value=1024)] + port: Annotated[int, Ge(1024)] json_file = tmp_path / "config.json" content = '{"port": 80}' @@ -349,7 +349,7 @@ class Config: metadata = Source( file=json_file, validators={ - F[Config].port: Lt(value=65536), + F[Config].port: Lt(65536), }, ) @@ -369,7 +369,7 @@ class Config: def test_metadata_fails_while_annotated_passes(self, tmp_path: Path): @dataclass class Config: - port: Annotated[int, Ge(value=0)] + port: Annotated[int, Ge(0)] json_file = tmp_path / "config.json" content = '{"port": 70000}' @@ -378,7 +378,7 @@ class Config: metadata = Source( file=json_file, validators={ - F[Config].port: Lt(value=65536), + F[Config].port: Lt(65536), }, ) @@ -428,9 +428,9 @@ def validate_config(obj: Config) -> bool: metadata = Source( file=json_file, - root_validators=(RootValidator(func=validate_config),), + root_validators=(RootValidator(validate_config),), validators={ - F[Config].port: Ge(value=0), + F[Config].port: Ge(0), }, ) result = load(metadata, dataclass_=Config) @@ -452,8 +452,8 @@ class Config: metadata = Source( file=json_file, validators={ - F[Config].name: MinLength(value=2), - F[Config].age: Ge(value=0), + F[Config].name: MinLength(2), + F[Config].age: Ge(0), }, ) diff --git a/tests/validators/test_number.py b/tests/validators/test_number.py index d714f18..d2be0b7 100644 --- a/tests/validators/test_number.py +++ b/tests/validators/test_number.py @@ -13,7 +13,7 @@ class TestGt: def test_success(self, tmp_path: Path): @dataclass class Config: - age: Annotated[int, Gt(value=0)] + age: Annotated[int, Gt(0)] json_file = tmp_path / "config.json" json_file.write_text('{"age": 25}') @@ -26,7 +26,7 @@ class Config: def test_failure(self, tmp_path: Path): @dataclass class Config: - age: Annotated[int, Gt(value=18)] + age: Annotated[int, Gt(18)] json_file = tmp_path / "config.json" content = '{"age": 18}' @@ -52,7 +52,7 @@ class TestGe: def test_success(self, tmp_path: Path): @dataclass class Config: - age: Annotated[int, Ge(value=18)] + age: Annotated[int, Ge(18)] json_file = tmp_path / "config.json" json_file.write_text('{"age": 18}') @@ -65,7 +65,7 @@ class Config: def test_failure(self, tmp_path: Path): @dataclass class Config: - age: Annotated[int, Ge(value=18)] + age: Annotated[int, Ge(18)] json_file = tmp_path / "config.json" content = '{"age": 17}' @@ -91,7 +91,7 @@ class TestLt: def test_success(self, tmp_path: Path): @dataclass class Config: - age: Annotated[int, Lt(value=100)] + age: Annotated[int, Lt(100)] json_file = tmp_path / "config.json" json_file.write_text('{"age": 99}') @@ -104,7 +104,7 @@ class Config: def test_failure(self, tmp_path: Path): @dataclass class Config: - age: Annotated[int, Lt(value=100)] + age: Annotated[int, Lt(100)] json_file = tmp_path / "config.json" content = '{"age": 100}' @@ -130,7 +130,7 @@ class TestLe: def test_success(self, tmp_path: Path): @dataclass class Config: - age: Annotated[int, Le(value=100)] + age: Annotated[int, Le(100)] json_file = tmp_path / "config.json" json_file.write_text('{"age": 100}') @@ -143,7 +143,7 @@ class Config: def test_failure(self, tmp_path: Path): @dataclass class Config: - age: Annotated[int, Le(value=100)] + age: Annotated[int, Le(100)] json_file = tmp_path / "config.json" content = '{"age": 101}' @@ -169,7 +169,7 @@ class TestCombined: def test_combined_numeric_validators(self, tmp_path: Path): @dataclass class Config: - age: Annotated[int, Ge(value=18), Le(value=65)] + age: Annotated[int, Ge(18), Le(65)] json_file = tmp_path / "config.json" json_file.write_text('{"age": 30}') @@ -182,7 +182,7 @@ class Config: def test_combined_numeric_validators_failure(self, tmp_path: Path): @dataclass class Config: - age: Annotated[int, Ge(value=18), Le(value=65)] + age: Annotated[int, Ge(18), Le(65)] json_file = tmp_path / "config.json" content = '{"age": 70}' diff --git a/tests/validators/test_root_validator.py b/tests/validators/test_root_validator.py index d75448b..1bd7f48 100644 --- a/tests/validators/test_root_validator.py +++ b/tests/validators/test_root_validator.py @@ -25,7 +25,7 @@ def validate_config(obj: Config) -> bool: metadata = Source( file=json_file, - root_validators=(RootValidator(func=validate_config),), + root_validators=(RootValidator(validate_config),), ) result = load(metadata, dataclass_=Config) @@ -48,7 +48,7 @@ def validate_config(obj: Config) -> bool: metadata = Source( file=json_file, - root_validators=(RootValidator(func=validate_config),), + root_validators=(RootValidator(validate_config),), ) with pytest.raises(DatureConfigError) as exc_info: @@ -78,8 +78,8 @@ def validate_step(obj: Config) -> bool: metadata = Source( file=json_file, root_validators=( - RootValidator(func=validate_min_max), - RootValidator(func=validate_step), + RootValidator(validate_min_max), + RootValidator(validate_step), ), ) result = load(metadata, dataclass_=Config) @@ -107,8 +107,8 @@ def validate_step(obj: Config) -> bool: metadata = Source( file=json_file, root_validators=( - RootValidator(func=validate_min_max), - RootValidator(func=validate_step), + RootValidator(validate_min_max), + RootValidator(validate_step), ), ) @@ -134,7 +134,7 @@ def validate_config(obj: Config) -> bool: metadata = Source( file=json_file, - root_validators=(RootValidator(func=validate_config),), + root_validators=(RootValidator(validate_config),), ) with pytest.raises(DatureConfigError) as exc_info: @@ -156,7 +156,7 @@ def validate_credentials(obj) -> bool: metadata = Source( file=json_file, - root_validators=(RootValidator(func=validate_credentials),), + root_validators=(RootValidator(validate_credentials),), ) @load(metadata) diff --git a/tests/validators/test_sequence.py b/tests/validators/test_sequence.py index 8d78c75..3a983af 100644 --- a/tests/validators/test_sequence.py +++ b/tests/validators/test_sequence.py @@ -13,7 +13,7 @@ class TestMinItems: def test_success(self, tmp_path: Path): @dataclass class Config: - tags: Annotated[list[str], MinItems(value=2)] + tags: Annotated[list[str], MinItems(2)] json_file = tmp_path / "config.json" json_file.write_text('{"tags": ["python", "typing"]}') @@ -26,7 +26,7 @@ class Config: def test_failure(self, tmp_path: Path): @dataclass class Config: - tags: Annotated[list[str], MinItems(value=3)] + tags: Annotated[list[str], MinItems(3)] json_file = tmp_path / "config.json" content = '{"tags": ["python"]}' @@ -52,7 +52,7 @@ class TestMaxItems: def test_success(self, tmp_path: Path): @dataclass class Config: - tags: Annotated[list[str], MaxItems(value=5)] + tags: Annotated[list[str], MaxItems(5)] json_file = tmp_path / "config.json" json_file.write_text('{"tags": ["python", "typing"]}') @@ -65,7 +65,7 @@ class Config: def test_failure(self, tmp_path: Path): @dataclass class Config: - tags: Annotated[list[str], MaxItems(value=2)] + tags: Annotated[list[str], MaxItems(2)] json_file = tmp_path / "config.json" content = '{"tags": ["python", "typing", "validation"]}' @@ -130,7 +130,7 @@ class TestCombined: def test_combined_list_validators(self, tmp_path: Path): @dataclass class Config: - tags: Annotated[list[str], MinItems(value=2), MaxItems(value=5), UniqueItems()] + tags: Annotated[list[str], MinItems(2), MaxItems(5), UniqueItems()] json_file = tmp_path / "config.json" json_file.write_text('{"tags": ["python", "typing", "validation"]}') @@ -143,7 +143,7 @@ class Config: def test_combined_list_validators_failure(self, tmp_path: Path): @dataclass class Config: - tags: Annotated[list[str], MinItems(value=2), MaxItems(value=5), UniqueItems()] + tags: Annotated[list[str], MinItems(2), MaxItems(5), UniqueItems()] json_file = tmp_path / "config.json" content = '{"tags": ["python", "typing", "validation", "testing", "coding", "extra"]}' diff --git a/tests/validators/test_string.py b/tests/validators/test_string.py index a0f1e61..6a124e6 100644 --- a/tests/validators/test_string.py +++ b/tests/validators/test_string.py @@ -13,7 +13,7 @@ class TestMinLength: def test_success(self, tmp_path: Path): @dataclass class Config: - name: Annotated[str, MinLength(value=3)] + name: Annotated[str, MinLength(3)] json_file = tmp_path / "config.json" json_file.write_text('{"name": "Alice"}') @@ -26,7 +26,7 @@ class Config: def test_failure(self, tmp_path: Path): @dataclass class Config: - name: Annotated[str, MinLength(value=5)] + name: Annotated[str, MinLength(5)] json_file = tmp_path / "config.json" content = '{"name": "Bob"}' @@ -52,7 +52,7 @@ class TestMaxLength: def test_success(self, tmp_path: Path): @dataclass class Config: - name: Annotated[str, MaxLength(value=10)] + name: Annotated[str, MaxLength(10)] json_file = tmp_path / "config.json" json_file.write_text('{"name": "Alice"}') @@ -65,7 +65,7 @@ class Config: def test_failure(self, tmp_path: Path): @dataclass class Config: - name: Annotated[str, MaxLength(value=5)] + name: Annotated[str, MaxLength(5)] json_file = tmp_path / "config.json" content = '{"name": "Alexander"}' @@ -91,7 +91,7 @@ class TestRegexPattern: def test_success(self, tmp_path: Path): @dataclass class Config: - email: Annotated[str, RegexPattern(pattern=r"^[\w\.-]+@[\w\.-]+\.\w+$")] + email: Annotated[str, RegexPattern(r"^[\w\.-]+@[\w\.-]+\.\w+$")] json_file = tmp_path / "config.json" json_file.write_text('{"email": "test@example.com"}') @@ -104,7 +104,7 @@ class Config: def test_failure(self, tmp_path: Path): @dataclass class Config: - email: Annotated[str, RegexPattern(pattern=r"^[\w\.-]+@[\w\.-]+\.\w+$")] + email: Annotated[str, RegexPattern(r"^[\w\.-]+@[\w\.-]+\.\w+$")] json_file = tmp_path / "config.json" content = '{"email": "invalid-email"}' @@ -130,7 +130,7 @@ class TestCombined: def test_combined_string_validators(self, tmp_path: Path): @dataclass class Config: - username: Annotated[str, MinLength(value=3), MaxLength(value=20)] + username: Annotated[str, MinLength(3), MaxLength(20)] json_file = tmp_path / "config.json" json_file.write_text('{"username": "john_doe"}') @@ -143,7 +143,7 @@ class Config: def test_combined_string_validators_failure(self, tmp_path: Path): @dataclass class Config: - username: Annotated[str, MinLength(value=3), MaxLength(value=20)] + username: Annotated[str, MinLength(3), MaxLength(20)] json_file = tmp_path / "config.json" content = '{"username": "this_is_a_very_long_username_that_exceeds_limit"}' From 342764be18ef086a5cfc2931d6e90994cfc14b69 Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 31 Mar 2026 10:13:46 +0300 Subject: [PATCH 09/36] dature.errors.exceptions -> dature.errors --- .../advanced/debug/advanced_debug_error.py | 2 +- .../advanced_field_groups_expansion_error.py | 2 +- .../advanced_field_groups_multiple_error.py | 2 +- .../advanced_field_groups_nested_error.py | 2 +- .../dynaconf_root_validators.py | 2 +- .../why-not-dynaconf/dynaconf_validators.py | 2 +- .../why-not-hydra/hydra_validators.py | 2 +- .../docs/features/masking/masking_by_name.py | 2 +- .../features/masking/masking_heuristic.py | 2 +- .../features/masking/masking_merge_mode.py | 2 +- .../docs/features/masking/masking_no_mask.py | 2 +- .../features/masking/masking_per_source.py | 2 +- .../features/masking/masking_secret_str.py | 2 +- .../validation/validation_annotated.py | 2 +- .../features/validation/validation_custom.py | 2 +- .../validation/validation_metadata.py | 2 +- .../features/validation/validation_root.py | 2 +- src/dature/errors/__init__.py | 29 +++++++++++++++++++ src/dature/expansion/env_expand.py | 2 +- src/dature/loading/multi.py | 2 +- src/dature/loading/single.py | 2 +- src/dature/loading/source_loading.py | 2 +- src/dature/merging/deep_merge.py | 2 +- src/dature/merging/field_group.py | 2 +- src/dature/path_finders/base.py | 2 +- src/dature/path_finders/ini_.py | 2 +- src/dature/path_finders/json5_.py | 2 +- src/dature/path_finders/json_.py | 2 +- src/dature/path_finders/toml_.py | 2 +- src/dature/path_finders/yaml_.py | 2 +- src/dature/protocols.py | 2 +- src/dature/sources_loader/base.py | 2 +- src/dature/sources_loader/docker_secrets.py | 2 +- src/dature/sources_loader/env_.py | 2 +- tests/errors/test_exceptions.py | 2 +- tests/errors/test_fixtures.py | 2 +- tests/errors/test_location.py | 2 +- tests/expansion/test_env_expand.py | 2 +- tests/expansion/test_expand_file_path.py | 2 +- tests/loading/test_field_merges.py | 2 +- tests/loading/test_multi.py | 2 +- tests/loading/test_skip_invalid_fields.py | 2 +- tests/loading/test_source_loading.py | 2 +- tests/masking/test_masking.py | 2 +- tests/merging/test_field_group.py | 2 +- tests/path_finders/test_ini.py | 2 +- tests/path_finders/test_json.py | 2 +- tests/path_finders/test_json5.py | 2 +- tests/path_finders/test_toml.py | 2 +- tests/path_finders/test_yaml.py | 2 +- tests/sources_loader/test_base.py | 2 +- tests/sources_loader/test_json5_.py | 2 +- tests/sources_loader/test_json_.py | 2 +- tests/sources_loader/test_nested_resolve.py | 2 +- tests/sources_loader/test_toml10_.py | 2 +- tests/sources_loader/test_toml11_.py | 2 +- tests/sources_loader/test_yaml11_.py | 2 +- tests/sources_loader/test_yaml12_.py | 2 +- tests/test_config.py | 2 +- tests/test_load_report.py | 2 +- tests/validators/test_complex.py | 2 +- tests/validators/test_custom_validator.py | 2 +- tests/validators/test_metadata_validators.py | 2 +- tests/validators/test_number.py | 2 +- tests/validators/test_root_validator.py | 2 +- tests/validators/test_sequence.py | 2 +- tests/validators/test_string.py | 2 +- 67 files changed, 95 insertions(+), 66 deletions(-) diff --git a/examples/docs/advanced/debug/advanced_debug_error.py b/examples/docs/advanced/debug/advanced_debug_error.py index 09c4d60..88d293f 100644 --- a/examples/docs/advanced/debug/advanced_debug_error.py +++ b/examples/docs/advanced/debug/advanced_debug_error.py @@ -4,7 +4,7 @@ from pathlib import Path import dature -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError SOURCES_DIR = Path(__file__).parent / "sources" SHARED_DIR = Path(__file__).parents[2] / "shared" diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py index 37897e5..2a58587 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py @@ -5,7 +5,7 @@ from textwrap import dedent import dature -from dature.errors.exceptions import FieldGroupError +from dature.errors import FieldGroupError SOURCES_DIR = Path(__file__).parent / "sources" diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py index ae4a6e4..5931c0a 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py @@ -5,7 +5,7 @@ from textwrap import dedent import dature -from dature.errors.exceptions import FieldGroupError +from dature.errors import FieldGroupError SOURCES_DIR = Path(__file__).parent / "sources" SHARED_DIR = Path(__file__).parents[2] / "shared" diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py index 7b0eca8..d021524 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py @@ -5,7 +5,7 @@ from textwrap import dedent import dature -from dature.errors.exceptions import FieldGroupError +from dature.errors import FieldGroupError SOURCES_DIR = Path(__file__).parent / "sources" SHARED_DIR = Path(__file__).parents[2] / "shared" diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py index c2354f9..48e84f4 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py @@ -5,7 +5,7 @@ from typing import Annotated import dature -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError from dature.validators.number import Gt, Lt from dature.validators.root import RootValidator diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py index cbd3d4d..a942abf 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py @@ -5,7 +5,7 @@ from typing import Annotated import dature -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError from dature.validators.number import Gt, Lt SOURCES_DIR = Path(__file__).parent / "sources" diff --git a/examples/docs/comparison/why-not-hydra/hydra_validators.py b/examples/docs/comparison/why-not-hydra/hydra_validators.py index 0700d99..3d59942 100644 --- a/examples/docs/comparison/why-not-hydra/hydra_validators.py +++ b/examples/docs/comparison/why-not-hydra/hydra_validators.py @@ -5,7 +5,7 @@ from typing import Annotated import dature -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError from dature.validators.number import Gt, Lt SOURCES_DIR = Path(__file__).parent / "sources" diff --git a/examples/docs/features/masking/masking_by_name.py b/examples/docs/features/masking/masking_by_name.py index 7f4ffd9..edbd432 100644 --- a/examples/docs/features/masking/masking_by_name.py +++ b/examples/docs/features/masking/masking_by_name.py @@ -5,7 +5,7 @@ from typing import Literal import dature -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError SOURCES_DIR = Path(__file__).parent / "sources" diff --git a/examples/docs/features/masking/masking_heuristic.py b/examples/docs/features/masking/masking_heuristic.py index f113e9c..9a070f0 100644 --- a/examples/docs/features/masking/masking_heuristic.py +++ b/examples/docs/features/masking/masking_heuristic.py @@ -5,7 +5,7 @@ from typing import Literal import dature -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError SOURCES_DIR = Path(__file__).parent / "sources" diff --git a/examples/docs/features/masking/masking_merge_mode.py b/examples/docs/features/masking/masking_merge_mode.py index 76d09a7..5edf6f4 100644 --- a/examples/docs/features/masking/masking_merge_mode.py +++ b/examples/docs/features/masking/masking_merge_mode.py @@ -5,7 +5,7 @@ from typing import Annotated import dature -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError from dature.validators.string import MinLength SOURCES_DIR = Path(__file__).parent / "sources" diff --git a/examples/docs/features/masking/masking_no_mask.py b/examples/docs/features/masking/masking_no_mask.py index 00220cf..b5585c4 100644 --- a/examples/docs/features/masking/masking_no_mask.py +++ b/examples/docs/features/masking/masking_no_mask.py @@ -5,7 +5,7 @@ from typing import Annotated import dature -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError from dature.validators.string import MinLength SOURCES_DIR = Path(__file__).parent / "sources" diff --git a/examples/docs/features/masking/masking_per_source.py b/examples/docs/features/masking/masking_per_source.py index 4d60c8c..320ead1 100644 --- a/examples/docs/features/masking/masking_per_source.py +++ b/examples/docs/features/masking/masking_per_source.py @@ -5,7 +5,7 @@ from typing import Annotated import dature -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError from dature.validators.string import MinLength SOURCES_DIR = Path(__file__).parent / "sources" diff --git a/examples/docs/features/masking/masking_secret_str.py b/examples/docs/features/masking/masking_secret_str.py index 9de1abc..d58e0db 100644 --- a/examples/docs/features/masking/masking_secret_str.py +++ b/examples/docs/features/masking/masking_secret_str.py @@ -4,7 +4,7 @@ from pathlib import Path import dature -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError from dature.fields.payment_card import PaymentCardNumber from dature.fields.secret_str import SecretStr diff --git a/examples/docs/features/validation/validation_annotated.py b/examples/docs/features/validation/validation_annotated.py index 7321b0d..7d1f314 100644 --- a/examples/docs/features/validation/validation_annotated.py +++ b/examples/docs/features/validation/validation_annotated.py @@ -5,7 +5,7 @@ from typing import Annotated import dature -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError from dature.validators.number import Ge, Le from dature.validators.sequence import MinItems, UniqueItems from dature.validators.string import MaxLength, MinLength diff --git a/examples/docs/features/validation/validation_custom.py b/examples/docs/features/validation/validation_custom.py index 229a819..3be5e34 100644 --- a/examples/docs/features/validation/validation_custom.py +++ b/examples/docs/features/validation/validation_custom.py @@ -6,7 +6,7 @@ from typing import Annotated import dature -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError from dature.validators.number import Ge SOURCES_DIR = Path(__file__).parent / "sources" diff --git a/examples/docs/features/validation/validation_metadata.py b/examples/docs/features/validation/validation_metadata.py index be4fe50..53cf3f6 100644 --- a/examples/docs/features/validation/validation_metadata.py +++ b/examples/docs/features/validation/validation_metadata.py @@ -4,7 +4,7 @@ from pathlib import Path import dature -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError from dature.validators.number import Ge, Lt from dature.validators.string import MinLength diff --git a/examples/docs/features/validation/validation_root.py b/examples/docs/features/validation/validation_root.py index 9a37270..a62613a 100644 --- a/examples/docs/features/validation/validation_root.py +++ b/examples/docs/features/validation/validation_root.py @@ -4,7 +4,7 @@ from pathlib import Path import dature -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError from dature.validators.root import RootValidator SOURCES_DIR = Path(__file__).parent / "sources" diff --git a/src/dature/errors/__init__.py b/src/dature/errors/__init__.py index e69de29..efb7cb7 100644 --- a/src/dature/errors/__init__.py +++ b/src/dature/errors/__init__.py @@ -0,0 +1,29 @@ +from dature.errors.exceptions import ( + DatureConfigError, + DatureError, + EnvVarExpandError, + FieldGroupError, + FieldGroupViolationError, + FieldLoadError, + LineRange, + MergeConflictError, + MergeConflictFieldError, + MissingEnvVarError, + SourceLoadError, + SourceLocation, +) + +__all__ = [ + "DatureConfigError", + "DatureError", + "EnvVarExpandError", + "FieldGroupError", + "FieldGroupViolationError", + "FieldLoadError", + "LineRange", + "MergeConflictError", + "MergeConflictFieldError", + "MissingEnvVarError", + "SourceLoadError", + "SourceLocation", +] diff --git a/src/dature/expansion/env_expand.py b/src/dature/expansion/env_expand.py index bb20e50..b7e2dbe 100644 --- a/src/dature/expansion/env_expand.py +++ b/src/dature/expansion/env_expand.py @@ -1,7 +1,7 @@ import os import re -from dature.errors.exceptions import EnvVarExpandError, MissingEnvVarError +from dature.errors import EnvVarExpandError, MissingEnvVarError from dature.types import ExpandEnvVarsMode, FilePath, JSONValue # $VAR, ${VAR}, ${VAR:-default}, %VAR%, $$, %% diff --git a/src/dature/loading/multi.py b/src/dature/loading/multi.py index 8570394..4845a64 100644 --- a/src/dature/loading/multi.py +++ b/src/dature/loading/multi.py @@ -5,7 +5,7 @@ from typing import Any from dature.config import config -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError from dature.errors.formatter import enrich_skipped_errors, handle_load_errors from dature.load_report import ( FieldOrigin, diff --git a/src/dature/loading/single.py b/src/dature/loading/single.py index d6a858a..69826fe 100644 --- a/src/dature/loading/single.py +++ b/src/dature/loading/single.py @@ -4,7 +4,7 @@ from typing import TYPE_CHECKING, Any from dature.config import config -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError from dature.errors.formatter import enrich_skipped_errors, handle_load_errors from dature.errors.location import read_filecontent from dature.load_report import FieldOrigin, LoadReport, SourceEntry, attach_load_report diff --git a/src/dature/loading/source_loading.py b/src/dature/loading/source_loading.py index da9b713..f8c69a8 100644 --- a/src/dature/loading/source_loading.py +++ b/src/dature/loading/source_loading.py @@ -3,7 +3,7 @@ from pathlib import Path from dature.config import config -from dature.errors.exceptions import DatureConfigError, SourceLoadError, SourceLocation +from dature.errors import DatureConfigError, SourceLoadError, SourceLocation from dature.errors.formatter import handle_load_errors from dature.errors.location import ErrorContext, read_filecontent from dature.field_path import FieldPath diff --git a/src/dature/merging/deep_merge.py b/src/dature/merging/deep_merge.py index 6149c17..8278c28 100644 --- a/src/dature/merging/deep_merge.py +++ b/src/dature/merging/deep_merge.py @@ -1,6 +1,6 @@ from dataclasses import dataclass -from dature.errors.exceptions import MergeConflictError, MergeConflictFieldError, SourceLocation +from dature.errors import MergeConflictError, MergeConflictFieldError, SourceLocation from dature.errors.location import resolve_source_location from dature.loading.source_loading import SourceContext from dature.merging.strategy import FieldMergeStrategyEnum, MergeStrategyEnum diff --git a/src/dature/merging/field_group.py b/src/dature/merging/field_group.py index 900fe6f..61c46c6 100644 --- a/src/dature/merging/field_group.py +++ b/src/dature/merging/field_group.py @@ -1,7 +1,7 @@ from dataclasses import dataclass from typing import Any -from dature.errors.exceptions import FieldGroupError, FieldGroupViolationError +from dature.errors import FieldGroupError, FieldGroupViolationError from dature.merging.predicate import ResolvedFieldGroup from dature.types import JSONValue diff --git a/src/dature/path_finders/base.py b/src/dature/path_finders/base.py index bd58bb7..0665b1b 100644 --- a/src/dature/path_finders/base.py +++ b/src/dature/path_finders/base.py @@ -1,6 +1,6 @@ import abc -from dature.errors.exceptions import LineRange +from dature.errors import LineRange class PathFinder(abc.ABC): diff --git a/src/dature/path_finders/ini_.py b/src/dature/path_finders/ini_.py index 827557a..d17c78f 100644 --- a/src/dature/path_finders/ini_.py +++ b/src/dature/path_finders/ini_.py @@ -1,7 +1,7 @@ import configparser import sys -from dature.errors.exceptions import LineRange +from dature.errors import LineRange from dature.path_finders.base import PathFinder _MIN_INI_PATH_DEPTH = 2 diff --git a/src/dature/path_finders/json5_.py b/src/dature/path_finders/json5_.py index 4e51aa2..f17ccb9 100644 --- a/src/dature/path_finders/json5_.py +++ b/src/dature/path_finders/json5_.py @@ -1,7 +1,7 @@ from json5.model import Identifier, JSONArray, JSONObject, String, Value from json5.parser import parse_source -from dature.errors.exceptions import LineRange +from dature.errors import LineRange from dature.path_finders.base import PathFinder diff --git a/src/dature/path_finders/json_.py b/src/dature/path_finders/json_.py index eb50a8c..aca276e 100644 --- a/src/dature/path_finders/json_.py +++ b/src/dature/path_finders/json_.py @@ -5,7 +5,7 @@ from json.scanner import py_make_scanner # type: ignore[attr-defined] from typing import TYPE_CHECKING -from dature.errors.exceptions import LineRange +from dature.errors import LineRange from dature.path_finders.base import PathFinder if TYPE_CHECKING: diff --git a/src/dature/path_finders/toml_.py b/src/dature/path_finders/toml_.py index da4c86c..3364f25 100644 --- a/src/dature/path_finders/toml_.py +++ b/src/dature/path_finders/toml_.py @@ -3,7 +3,7 @@ import toml_rs from toml_rs._lib import TomlVersion -from dature.errors.exceptions import LineRange +from dature.errors import LineRange from dature.path_finders.base import PathFinder if TYPE_CHECKING: diff --git a/src/dature/path_finders/yaml_.py b/src/dature/path_finders/yaml_.py index 104203f..b2a286a 100644 --- a/src/dature/path_finders/yaml_.py +++ b/src/dature/path_finders/yaml_.py @@ -5,7 +5,7 @@ from ruamel.yaml.docinfo import Version from ruamel.yaml.scalarstring import ScalarString -from dature.errors.exceptions import LineRange +from dature.errors import LineRange from dature.path_finders.base import PathFinder diff --git a/src/dature/protocols.py b/src/dature/protocols.py index 49e99a1..013c7f7 100644 --- a/src/dature/protocols.py +++ b/src/dature/protocols.py @@ -5,7 +5,7 @@ from adaptix import Retort -from dature.errors.exceptions import SourceLocation +from dature.errors import SourceLocation from dature.path_finders.base import PathFinder from dature.types import FileOrStream, JSONValue, LoadRawResult, NestedConflict diff --git a/src/dature/sources_loader/base.py b/src/dature/sources_loader/base.py index 12c9d10..578d276 100644 --- a/src/dature/sources_loader/base.py +++ b/src/dature/sources_loader/base.py @@ -10,7 +10,7 @@ from adaptix import Retort, loader, name_mapping from adaptix.provider import Provider -from dature.errors.exceptions import LineRange, SourceLocation +from dature.errors import LineRange, SourceLocation from dature.expansion.alias_provider import AliasProvider, resolve_nested_owner from dature.expansion.env_expand import expand_env_vars from dature.field_path import FieldPath diff --git a/src/dature/sources_loader/docker_secrets.py b/src/dature/sources_loader/docker_secrets.py index dba42b4..a602402 100644 --- a/src/dature/sources_loader/docker_secrets.py +++ b/src/dature/sources_loader/docker_secrets.py @@ -1,7 +1,7 @@ from pathlib import Path from typing import ClassVar -from dature.errors.exceptions import SourceLocation +from dature.errors import SourceLocation from dature.sources_loader.flat_key import FlatKeyLoader from dature.types import FileOrStream, JSONValue, NestedConflict diff --git a/src/dature/sources_loader/env_.py b/src/dature/sources_loader/env_.py index 4d94556..e3750a5 100644 --- a/src/dature/sources_loader/env_.py +++ b/src/dature/sources_loader/env_.py @@ -4,7 +4,7 @@ from pathlib import Path from typing import ClassVar, cast -from dature.errors.exceptions import LineRange, SourceLocation +from dature.errors import LineRange, SourceLocation from dature.sources_loader.flat_key import FlatKeyLoader from dature.types import BINARY_IO_TYPES, TEXT_IO_TYPES, FileOrStream, JSONValue, NestedConflict, NestedConflicts diff --git a/tests/errors/test_exceptions.py b/tests/errors/test_exceptions.py index 2180616..d2780d3 100644 --- a/tests/errors/test_exceptions.py +++ b/tests/errors/test_exceptions.py @@ -4,7 +4,7 @@ import pytest from dature import Source, load -from dature.errors.exceptions import DatureConfigError, FieldLoadError, LineRange, SourceLocation +from dature.errors import DatureConfigError, FieldLoadError, LineRange, SourceLocation class TestDatureConfigErrorFormat: diff --git a/tests/errors/test_fixtures.py b/tests/errors/test_fixtures.py index 75eb011..d8f6b89 100644 --- a/tests/errors/test_fixtures.py +++ b/tests/errors/test_fixtures.py @@ -5,7 +5,7 @@ import pytest from dature import Source, load -from dature.errors.exceptions import DatureConfigError, FieldLoadError +from dature.errors import DatureConfigError, FieldLoadError from dature.sources_loader.yaml_ import Yaml11Loader, Yaml12Loader from dature.validators.number import Ge, Le from dature.validators.sequence import MinItems, UniqueItems diff --git a/tests/errors/test_location.py b/tests/errors/test_location.py index d7bb2c2..72b23e0 100644 --- a/tests/errors/test_location.py +++ b/tests/errors/test_location.py @@ -1,6 +1,6 @@ from pathlib import Path -from dature.errors.exceptions import LineRange +from dature.errors import LineRange from dature.errors.location import ErrorContext, resolve_source_location from dature.sources_loader.env_ import EnvFileLoader, EnvLoader from dature.sources_loader.json_ import JsonLoader diff --git a/tests/expansion/test_env_expand.py b/tests/expansion/test_env_expand.py index a5d3179..461aa5f 100644 --- a/tests/expansion/test_env_expand.py +++ b/tests/expansion/test_env_expand.py @@ -1,6 +1,6 @@ import pytest -from dature.errors.exceptions import EnvVarExpandError +from dature.errors import EnvVarExpandError from dature.expansion.env_expand import expand_env_vars, expand_string from dature.types import JSONValue diff --git a/tests/expansion/test_expand_file_path.py b/tests/expansion/test_expand_file_path.py index e03a473..7d4371b 100644 --- a/tests/expansion/test_expand_file_path.py +++ b/tests/expansion/test_expand_file_path.py @@ -3,7 +3,7 @@ import pytest -from dature.errors.exceptions import EnvVarExpandError +from dature.errors import EnvVarExpandError from dature.expansion.env_expand import expand_file_path from dature.metadata import Source diff --git a/tests/loading/test_field_merges.py b/tests/loading/test_field_merges.py index 2f59859..092dbec 100644 --- a/tests/loading/test_field_merges.py +++ b/tests/loading/test_field_merges.py @@ -8,7 +8,7 @@ import pytest from dature import Source, load -from dature.errors.exceptions import MergeConflictError +from dature.errors import MergeConflictError from dature.field_path import F from dature.types import FieldMergeStrategyName diff --git a/tests/loading/test_multi.py b/tests/loading/test_multi.py index 918c98f..1d79944 100644 --- a/tests/loading/test_multi.py +++ b/tests/loading/test_multi.py @@ -9,7 +9,7 @@ import pytest from dature import Source, load -from dature.errors.exceptions import DatureConfigError, MergeConflictError +from dature.errors import DatureConfigError, MergeConflictError from dature.validators.number import Ge diff --git a/tests/loading/test_skip_invalid_fields.py b/tests/loading/test_skip_invalid_fields.py index 26394d9..cff3409 100644 --- a/tests/loading/test_skip_invalid_fields.py +++ b/tests/loading/test_skip_invalid_fields.py @@ -7,7 +7,7 @@ import pytest from dature import F, Source, load -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError class TestMergeSkipInvalidFields: diff --git a/tests/loading/test_source_loading.py b/tests/loading/test_source_loading.py index 5852071..caf91c9 100644 --- a/tests/loading/test_source_loading.py +++ b/tests/loading/test_source_loading.py @@ -7,7 +7,7 @@ import pytest from dature import Source, load -from dature.errors.exceptions import DatureConfigError, EnvVarExpandError +from dature.errors import DatureConfigError, EnvVarExpandError class TestSkipBrokenSources: diff --git a/tests/masking/test_masking.py b/tests/masking/test_masking.py index 6c6a1ed..192df54 100644 --- a/tests/masking/test_masking.py +++ b/tests/masking/test_masking.py @@ -6,7 +6,7 @@ import pytest from dature import Source, configure, get_load_report, load -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError from dature.fields.secret_str import SecretStr from dature.load_report import FieldOrigin, SourceEntry from dature.masking.masking import mask_env_line, mask_field_origins, mask_json_value, mask_source_entries, mask_value diff --git a/tests/merging/test_field_group.py b/tests/merging/test_field_group.py index e9f72b4..9173d25 100644 --- a/tests/merging/test_field_group.py +++ b/tests/merging/test_field_group.py @@ -7,7 +7,7 @@ import pytest from dature import Source, load -from dature.errors.exceptions import FieldGroupError +from dature.errors import FieldGroupError from dature.field_path import F diff --git a/tests/path_finders/test_ini.py b/tests/path_finders/test_ini.py index 8c68d36..2871e61 100644 --- a/tests/path_finders/test_ini.py +++ b/tests/path_finders/test_ini.py @@ -1,4 +1,4 @@ -from dature.errors.exceptions import LineRange +from dature.errors import LineRange from dature.path_finders.ini_ import TablePathFinder diff --git a/tests/path_finders/test_json.py b/tests/path_finders/test_json.py index 887ffcd..82094a2 100644 --- a/tests/path_finders/test_json.py +++ b/tests/path_finders/test_json.py @@ -1,4 +1,4 @@ -from dature.errors.exceptions import LineRange +from dature.errors import LineRange from dature.path_finders.json_ import JsonPathFinder diff --git a/tests/path_finders/test_json5.py b/tests/path_finders/test_json5.py index 40727fc..eddb51a 100644 --- a/tests/path_finders/test_json5.py +++ b/tests/path_finders/test_json5.py @@ -1,4 +1,4 @@ -from dature.errors.exceptions import LineRange +from dature.errors import LineRange from dature.path_finders.json5_ import Json5PathFinder diff --git a/tests/path_finders/test_toml.py b/tests/path_finders/test_toml.py index b7b55dd..d798f0b 100644 --- a/tests/path_finders/test_toml.py +++ b/tests/path_finders/test_toml.py @@ -1,4 +1,4 @@ -from dature.errors.exceptions import LineRange +from dature.errors import LineRange from dature.path_finders.toml_ import Toml10PathFinder, Toml11PathFinder diff --git a/tests/path_finders/test_yaml.py b/tests/path_finders/test_yaml.py index 2b5a069..83f02cf 100644 --- a/tests/path_finders/test_yaml.py +++ b/tests/path_finders/test_yaml.py @@ -1,4 +1,4 @@ -from dature.errors.exceptions import LineRange +from dature.errors import LineRange from dature.path_finders.yaml_ import Yaml11PathFinder, Yaml12PathFinder diff --git a/tests/sources_loader/test_base.py b/tests/sources_loader/test_base.py index ba47b78..c7cc250 100644 --- a/tests/sources_loader/test_base.py +++ b/tests/sources_loader/test_base.py @@ -4,7 +4,7 @@ import pytest from dature import Source, load -from dature.errors.exceptions import EnvVarExpandError +from dature.errors import EnvVarExpandError from dature.field_path import F from dature.sources_loader.base import BaseLoader from dature.sources_loader.json_ import JsonLoader diff --git a/tests/sources_loader/test_json5_.py b/tests/sources_loader/test_json5_.py index 0f2c74b..08dcdfa 100644 --- a/tests/sources_loader/test_json5_.py +++ b/tests/sources_loader/test_json5_.py @@ -6,7 +6,7 @@ import pytest from dature import Source, load -from dature.errors.exceptions import DatureConfigError, FieldLoadError +from dature.errors import DatureConfigError, FieldLoadError from dature.sources_loader.json5_ import Json5Loader from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact from tests.sources_loader.checker import assert_all_types_equal diff --git a/tests/sources_loader/test_json_.py b/tests/sources_loader/test_json_.py index 2094b95..7fca045 100644 --- a/tests/sources_loader/test_json_.py +++ b/tests/sources_loader/test_json_.py @@ -6,7 +6,7 @@ import pytest from dature import Source, load -from dature.errors.exceptions import DatureConfigError, FieldLoadError +from dature.errors import DatureConfigError, FieldLoadError from dature.sources_loader.json_ import JsonLoader from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact from tests.sources_loader.checker import assert_all_types_equal diff --git a/tests/sources_loader/test_nested_resolve.py b/tests/sources_loader/test_nested_resolve.py index e038402..f3fdd4c 100644 --- a/tests/sources_loader/test_nested_resolve.py +++ b/tests/sources_loader/test_nested_resolve.py @@ -8,7 +8,7 @@ import pytest from dature import F, Source, load -from dature.errors.exceptions import DatureConfigError, FieldLoadError +from dature.errors import DatureConfigError, FieldLoadError from dature.sources_loader.docker_secrets import DockerSecretsLoader from dature.sources_loader.env_ import EnvFileLoader, EnvLoader diff --git a/tests/sources_loader/test_toml10_.py b/tests/sources_loader/test_toml10_.py index fac994a..b33e978 100644 --- a/tests/sources_loader/test_toml10_.py +++ b/tests/sources_loader/test_toml10_.py @@ -6,7 +6,7 @@ import pytest from dature import Source, load -from dature.errors.exceptions import DatureConfigError, FieldLoadError +from dature.errors import DatureConfigError, FieldLoadError from dature.sources_loader.toml_ import Toml10Loader from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact from tests.sources_loader.checker import assert_all_types_equal diff --git a/tests/sources_loader/test_toml11_.py b/tests/sources_loader/test_toml11_.py index 4a6bf09..3253eb0 100644 --- a/tests/sources_loader/test_toml11_.py +++ b/tests/sources_loader/test_toml11_.py @@ -6,7 +6,7 @@ import pytest from dature import Source, load -from dature.errors.exceptions import DatureConfigError, FieldLoadError +from dature.errors import DatureConfigError, FieldLoadError from dature.sources_loader.toml_ import Toml11Loader from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact from tests.sources_loader.checker import assert_all_types_equal diff --git a/tests/sources_loader/test_yaml11_.py b/tests/sources_loader/test_yaml11_.py index 24928f5..50bffad 100644 --- a/tests/sources_loader/test_yaml11_.py +++ b/tests/sources_loader/test_yaml11_.py @@ -6,7 +6,7 @@ import pytest from dature import Source, load -from dature.errors.exceptions import DatureConfigError, FieldLoadError +from dature.errors import DatureConfigError, FieldLoadError from dature.sources_loader.yaml_ import Yaml11Loader from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact from tests.sources_loader.checker import assert_all_types_equal diff --git a/tests/sources_loader/test_yaml12_.py b/tests/sources_loader/test_yaml12_.py index c256c37..612860a 100644 --- a/tests/sources_loader/test_yaml12_.py +++ b/tests/sources_loader/test_yaml12_.py @@ -6,7 +6,7 @@ import pytest from dature import Source, load -from dature.errors.exceptions import DatureConfigError, FieldLoadError +from dature.errors import DatureConfigError, FieldLoadError from dature.sources_loader.yaml_ import Yaml12Loader from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact from tests.sources_loader.checker import assert_all_types_equal diff --git a/tests/test_config.py b/tests/test_config.py index 35a20b9..cfe1c47 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -10,7 +10,7 @@ config, configure, ) -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError @pytest.mark.usefixtures("_reset_config") diff --git a/tests/test_load_report.py b/tests/test_load_report.py index 3b22f2a..9714ab9 100644 --- a/tests/test_load_report.py +++ b/tests/test_load_report.py @@ -9,7 +9,7 @@ import pytest from dature import Source, get_load_report, load -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError from dature.load_report import FieldOrigin, LoadReport, SourceEntry from dature.validators.number import Ge diff --git a/tests/validators/test_complex.py b/tests/validators/test_complex.py index 0cd8552..2822ec7 100644 --- a/tests/validators/test_complex.py +++ b/tests/validators/test_complex.py @@ -5,7 +5,7 @@ import pytest from dature import Source, load -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError from dature.validators.number import Ge, Le from dature.validators.sequence import MinItems, UniqueItems from dature.validators.string import MaxLength, MinLength, RegexPattern diff --git a/tests/validators/test_custom_validator.py b/tests/validators/test_custom_validator.py index afd8beb..30e4c42 100644 --- a/tests/validators/test_custom_validator.py +++ b/tests/validators/test_custom_validator.py @@ -6,7 +6,7 @@ import pytest from dature import Source, load -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError @dataclass(frozen=True, slots=True) diff --git a/tests/validators/test_metadata_validators.py b/tests/validators/test_metadata_validators.py index 20bb0c5..9df618b 100644 --- a/tests/validators/test_metadata_validators.py +++ b/tests/validators/test_metadata_validators.py @@ -5,7 +5,7 @@ import pytest from dature import Source, load -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError from dature.field_path import F from dature.validators.number import Ge, Gt, Lt from dature.validators.root import RootValidator diff --git a/tests/validators/test_number.py b/tests/validators/test_number.py index d2be0b7..3a2f2dc 100644 --- a/tests/validators/test_number.py +++ b/tests/validators/test_number.py @@ -5,7 +5,7 @@ import pytest from dature import Source, load -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError from dature.validators.number import Ge, Gt, Le, Lt diff --git a/tests/validators/test_root_validator.py b/tests/validators/test_root_validator.py index 1bd7f48..f6e0aff 100644 --- a/tests/validators/test_root_validator.py +++ b/tests/validators/test_root_validator.py @@ -4,7 +4,7 @@ import pytest from dature import Source, load -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError from dature.validators.root import RootValidator diff --git a/tests/validators/test_sequence.py b/tests/validators/test_sequence.py index 3a983af..9dc797c 100644 --- a/tests/validators/test_sequence.py +++ b/tests/validators/test_sequence.py @@ -5,7 +5,7 @@ import pytest from dature import Source, load -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError from dature.validators.sequence import MaxItems, MinItems, UniqueItems diff --git a/tests/validators/test_string.py b/tests/validators/test_string.py index 6a124e6..f2eb75b 100644 --- a/tests/validators/test_string.py +++ b/tests/validators/test_string.py @@ -5,7 +5,7 @@ import pytest from dature import Source, load -from dature.errors.exceptions import DatureConfigError +from dature.errors import DatureConfigError from dature.validators.string import MaxLength, MinLength, RegexPattern From dc4cf1fd1f192749a3c3892611f1f8481ee0932d Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 31 Mar 2026 10:17:11 +0300 Subject: [PATCH 10/36] fix field group docs --- docs/advanced/field-groups.md | 22 +++++++++++- docs/advanced/merge-rules.md | 34 ------------------- .../field_groups_basic.py} | 0 3 files changed, 21 insertions(+), 35 deletions(-) rename examples/docs/advanced/{merge_rules/merging_field_groups.py => field_groups/field_groups_basic.py} (100%) diff --git a/docs/advanced/field-groups.md b/docs/advanced/field-groups.md index 477059e..59d441d 100644 --- a/docs/advanced/field-groups.md +++ b/docs/advanced/field-groups.md @@ -1,6 +1,26 @@ # Field Groups -Ensure related fields are always overridden together. If a source partially overrides a group, `FieldGroupError` is raised: +Ensure related fields are always overridden together: + +=== "Python" + + ```python + --8<-- "examples/docs/advanced/field_groups/field_groups_basic.py" + ``` + +=== "common_field_groups_defaults.yaml" + + ```yaml + --8<-- "examples/docs/shared/common_field_groups_defaults.yaml" + ``` + +=== "common_field_groups_overrides.yaml" + + ```yaml + --8<-- "examples/docs/shared/common_field_groups_overrides.yaml" + ``` + +If `overrides.yaml` changes `host` and `port` together, the group constraint is satisfied. If a source partially overrides a group, `FieldGroupError` is raised: === "Python" diff --git a/docs/advanced/merge-rules.md b/docs/advanced/merge-rules.md index d27d7d5..ea7e319 100644 --- a/docs/advanced/merge-rules.md +++ b/docs/advanced/merge-rules.md @@ -136,40 +136,6 @@ You can also pass a callable as the strategy: The callable receives a `list[JSONValue]` (one value per source) and returns the merged value. -## Field Groups - -Ensure that related fields are always overridden together. If a source changes some fields in a group but not others, `FieldGroupError` is raised: - -=== "Python" - - ```python - --8<-- "examples/docs/advanced/merge_rules/merging_field_groups.py" - ``` - -=== "common_field_groups_defaults.yaml" - - ```yaml - --8<-- "examples/docs/shared/common_field_groups_defaults.yaml" - ``` - -=== "common_field_groups_overrides.yaml" - - ```yaml - --8<-- "examples/docs/shared/common_field_groups_overrides.yaml" - ``` - -If `overrides.yaml` changes `host` and `port` together, the group constraint is satisfied. If it changed only `host` but not `port`, loading would fail: - -``` -Config field group errors (1) - - Field group (host, port) partially overridden in source 1 - changed: host (from source yaml 'overrides.yaml') - unchanged: port (from source yaml 'defaults.yaml') -``` - -For nested dataclass expansion and multiple groups, see [Field Groups](field-groups.md). - ## Skipping Broken Sources Skip sources that fail to load (missing file, invalid syntax): diff --git a/examples/docs/advanced/merge_rules/merging_field_groups.py b/examples/docs/advanced/field_groups/field_groups_basic.py similarity index 100% rename from examples/docs/advanced/merge_rules/merging_field_groups.py rename to examples/docs/advanced/field_groups/field_groups_basic.py From 8a93a2a253a292d217d6d5cfc4f79a6735c61571 Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 31 Mar 2026 10:18:26 +0300 Subject: [PATCH 11/36] fix cache docs --- docs/advanced/caching.md | 14 ++++++++--- ...aching.py => advanced_caching_disabled.py} | 19 +-------------- .../caching/advanced_caching_enabled.py | 23 +++++++++++++++++++ 3 files changed, 35 insertions(+), 21 deletions(-) rename examples/docs/advanced/caching/{advanced_caching.py => advanced_caching_disabled.py} (50%) create mode 100644 examples/docs/advanced/caching/advanced_caching_enabled.py diff --git a/docs/advanced/caching.md b/docs/advanced/caching.md index 0344a0f..5ddaa64 100644 --- a/docs/advanced/caching.md +++ b/docs/advanced/caching.md @@ -2,8 +2,16 @@ In decorator mode, caching is enabled by default: -```python ---8<-- "examples/docs/advanced/caching/advanced_caching.py" -``` +=== "cache=True" + + ```python + --8<-- "examples/docs/advanced/caching/advanced_caching_enabled.py" + ``` + +=== "cache=False" + + ```python + --8<-- "examples/docs/advanced/caching/advanced_caching_disabled.py" + ``` Caching can also be configured globally via `configure()`. diff --git a/examples/docs/advanced/caching/advanced_caching.py b/examples/docs/advanced/caching/advanced_caching_disabled.py similarity index 50% rename from examples/docs/advanced/caching/advanced_caching.py rename to examples/docs/advanced/caching/advanced_caching_disabled.py index bdb1359..5497fd1 100644 --- a/examples/docs/advanced/caching/advanced_caching.py +++ b/examples/docs/advanced/caching/advanced_caching_disabled.py @@ -1,27 +1,10 @@ -"""Caching — decorator mode with cache enabled/disabled.""" +"""Caching — decorator mode with cache disabled.""" import os from dataclasses import dataclass import dature -os.environ["CACHE_HOST"] = "localhost" -os.environ["CACHE_PORT"] = "6379" - - -@dature.load(dature.Source(prefix="CACHE_"), cache=True) -@dataclass -class CachedConfig: - host: str - port: int - - -config1 = CachedConfig() -os.environ["CACHE_PORT"] = "9999" -config2 = CachedConfig() -assert config1.port == 6379 -assert config2.port == 6379 - os.environ["NOCACHE_HOST"] = "localhost" os.environ["NOCACHE_PORT"] = "6379" diff --git a/examples/docs/advanced/caching/advanced_caching_enabled.py b/examples/docs/advanced/caching/advanced_caching_enabled.py new file mode 100644 index 0000000..3fe8b2b --- /dev/null +++ b/examples/docs/advanced/caching/advanced_caching_enabled.py @@ -0,0 +1,23 @@ +"""Caching — decorator mode with cache enabled.""" + +import os +from dataclasses import dataclass + +import dature + +os.environ["CACHE_HOST"] = "localhost" +os.environ["CACHE_PORT"] = "6379" + + +@dature.load(dature.Source(prefix="CACHE_"), cache=True) +@dataclass +class CachedConfig: + host: str + port: int + + +config1 = CachedConfig() +os.environ["CACHE_PORT"] = "9999" +config2 = CachedConfig() +assert config1.port == 6379 +assert config2.port == 6379 From 9c3488e15ee70863091c57d067960c95a0ba299e Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 31 Mar 2026 10:21:19 +0300 Subject: [PATCH 12/36] refactor docs --- docs/advanced/custom_types.md | 16 ++++++++++------ .../advanced/custom_types/custom_type_merge.py | 2 +- src/dature/config.py | 12 ++++++------ 3 files changed, 17 insertions(+), 13 deletions(-) diff --git a/docs/advanced/custom_types.md b/docs/advanced/custom_types.md index 8fae43f..6202eae 100644 --- a/docs/advanced/custom_types.md +++ b/docs/advanced/custom_types.md @@ -6,13 +6,17 @@ Use `type_loaders` to teach dature how to parse custom types from strings. Pass `type_loaders` as a `dict[type, Callable]` mapping types to conversion functions: -```python ---8<-- "examples/docs/advanced/custom_types/custom_type.py" -``` +=== "Python" -```yaml title="custom_type_common.yaml" ---8<-- "examples/docs/advanced/custom_types/sources/custom_type_common.yaml" -``` + ```python + --8<-- "examples/docs/advanced/custom_types/custom_type.py" + ``` + +=== "custom_type_common.yaml" + + ```yaml + --8<-- "examples/docs/advanced/custom_types/sources/custom_type_common.yaml" + ``` ### Per-source vs Global diff --git a/examples/docs/advanced/custom_types/custom_type_merge.py b/examples/docs/advanced/custom_types/custom_type_merge.py index 0452872..551aa7f 100644 --- a/examples/docs/advanced/custom_types/custom_type_merge.py +++ b/examples/docs/advanced/custom_types/custom_type_merge.py @@ -1,4 +1,4 @@ -"""Per-merge type_loaders — set type_loaders on Merge for multi-source loads.""" +"""Per-merge type_loaders — set type_loaders on load() for multi-source loads.""" from dataclasses import dataclass from pathlib import Path diff --git a/src/dature/config.py b/src/dature/config.py index b77c6cd..6129f12 100644 --- a/src/dature/config.py +++ b/src/dature/config.py @@ -9,10 +9,10 @@ # --8<-- [start:masking-config] @dataclass(frozen=True, slots=True) class MaskingConfig: - mask: Annotated[str, MinLength(value=1)] = "" - visible_prefix: Annotated[int, Ge(value=0)] = 0 - visible_suffix: Annotated[int, Ge(value=0)] = 0 - min_heuristic_length: Annotated[int, Ge(value=1)] = 8 + mask: Annotated[str, MinLength(1)] = "" + visible_prefix: Annotated[int, Ge(0)] = 0 + visible_suffix: Annotated[int, Ge(0)] = 0 + min_heuristic_length: Annotated[int, Ge(1)] = 8 heuristic_threshold: float = 0.5 secret_field_names: tuple[str, ...] = ( "password", @@ -36,8 +36,8 @@ class MaskingConfig: # --8<-- [start:error-display-config] @dataclass(frozen=True, slots=True) class ErrorDisplayConfig: - max_visible_lines: Annotated[int, Ge(value=1)] = 3 - max_line_length: Annotated[int, Ge(value=1)] = 80 + max_visible_lines: Annotated[int, Ge(1)] = 3 + max_line_length: Annotated[int, Ge(1)] = 80 # --8<-- [end:error-display-config] From 976e7211830b874f4a5140553ab62264b2aeb861 Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 31 Mar 2026 10:22:36 +0300 Subject: [PATCH 13/36] fix --- README.md | 6 +++--- changes/+docs-tuning.doc | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changes/+docs-tuning.doc diff --git a/README.md b/README.md index 3241b5b..e0fb9e7 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,8 @@ -# dature +
---- +dature -
+--- [![PyPI](https://img.shields.io/pypi/v/dature)](https://pypi.org/project/dature/) [![Supported Python versions](https://img.shields.io/pypi/pyversions/dature)](https://pypi.org/project/dature/) diff --git a/changes/+docs-tuning.doc b/changes/+docs-tuning.doc new file mode 100644 index 0000000..11f3c95 --- /dev/null +++ b/changes/+docs-tuning.doc @@ -0,0 +1 @@ +Improved documentation for Caching, Merge Rules, Configure, Custom Types, and Field Groups sections. From f56e8ec778275db9fd2c205aad48d8a7919f5d6c Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 31 Mar 2026 10:35:23 +0300 Subject: [PATCH 14/36] dataclass_ -> schema --- docs/api-reference.md | 6 +- .../advanced/configure/advanced_configure.py | 6 +- .../configure/advanced_configure_env.py | 6 +- .../advanced_configure_type_loaders.py | 2 +- .../advanced/custom_types/custom_loader.py | 2 +- .../docs/advanced/custom_types/custom_type.py | 2 +- .../custom_types/custom_type_merge.py | 2 +- .../advanced/debug/advanced_debug_error.py | 2 +- .../advanced/debug/advanced_debug_logging.py | 2 +- .../advanced/debug/advanced_debug_report.py | 2 +- .../env_expansion/advanced_env_expansion.py | 2 +- ...vanced_env_expansion_file_path_combined.py | 2 +- .../advanced_env_expansion_file_path_dir.py | 2 +- .../advanced_env_expansion_file_path_name.py | 2 +- .../advanced_env_expansion_merge.py | 2 +- .../advanced_env_expansion_strict.py | 2 +- .../advanced_field_groups_expansion_error.py | 2 +- .../advanced_field_groups_multiple_error.py | 2 +- .../advanced_field_groups_nested_error.py | 2 +- .../field_groups/field_groups_basic.py | 2 +- .../advanced_merge_rules_callable.py | 2 +- .../advanced_merge_rules_conflict.py | 2 +- .../merge_rules/merging_field_append.py | 2 +- .../merging_field_append_unique.py | 2 +- .../merge_rules/merging_field_first_wins.py | 2 +- .../merge_rules/merging_field_last_wins.py | 2 +- .../merge_rules/merging_field_prepend.py | 2 +- .../merging_field_prepend_unique.py | 2 +- .../merge_rules/merging_first_found.py | 2 +- .../merge_rules/merging_skip_broken.py | 2 +- .../merging_skip_broken_per_source.py | 2 +- .../merge_rules/merging_skip_invalid.py | 2 +- .../merging_skip_invalid_per_field.py | 2 +- .../nested_resolve_docker_secrets.py | 2 +- .../nested_resolve/nested_resolve_envfile.py | 2 +- .../nested_resolve_global_flat.py | 2 +- .../nested_resolve_global_json.py | 2 +- .../nested_resolve_no_conflict.py | 2 +- .../nested_resolve/nested_resolve_override.py | 2 +- .../nested_resolve_per_field.py | 2 +- .../nested_resolve/nested_resolve_problem.py | 2 +- .../api_reference_decorator_mode.py | 2 +- .../api_reference_function_mode.py | 4 +- .../why-not-dynaconf/dynaconf_basic.py | 2 +- .../why-not-dynaconf/dynaconf_merge.py | 2 +- .../dynaconf_root_validators.py | 2 +- .../why-not-dynaconf/dynaconf_validators.py | 2 +- .../why-not-hydra/hydra_dataclass.py | 2 +- .../comparison/why-not-hydra/hydra_merge.py | 2 +- .../why-not-hydra/hydra_validators.py | 2 +- .../pydantic_settings_auto_detect.py | 6 +- .../pydantic_settings_basic.py | 2 +- .../pydantic_settings_merge.py | 2 +- .../docs/features/masking/masking_by_name.py | 2 +- .../features/masking/masking_classic_style.py | 2 +- .../features/masking/masking_heuristic.py | 2 +- .../features/masking/masking_merge_mode.py | 2 +- .../docs/features/masking/masking_no_mask.py | 2 +- .../features/masking/masking_per_source.py | 2 +- .../features/masking/masking_secret_str.py | 2 +- .../docs/features/merging/merging_basic.py | 2 +- .../features/merging/merging_strategies.py | 4 +- .../merging/merging_strategy_first_found.py | 2 +- .../merging/merging_strategy_first_wins.py | 2 +- .../merging/merging_strategy_last_wins.py | 2 +- .../merging_strategy_raise_on_conflict.py | 2 +- .../merging/merging_tuple_shorthand.py | 2 +- .../features/naming/naming_field_mapping.py | 2 +- .../docs/features/naming/naming_name_style.py | 2 +- .../features/naming/naming_nested_fields.py | 2 +- .../docs/features/naming/naming_prefix.py | 2 +- .../features/naming/naming_prefix_nested.py | 2 +- .../features/naming/naming_split_symbols.py | 2 +- .../validation/validation_annotated.py | 2 +- .../features/validation/validation_custom.py | 2 +- .../validation/validation_metadata.py | 2 +- .../validation/validation_post_init.py | 2 +- .../features/validation/validation_root.py | 2 +- examples/docs/index/intro_function.py | 2 +- examples/docs/introduction/format_docker.py | 2 +- examples/docs/introduction/format_env.py | 2 +- examples/docs/introduction/format_ini.py | 2 +- examples/docs/introduction/format_json.py | 2 +- examples/docs/introduction/format_json5.py | 2 +- examples/docs/introduction/format_toml.py | 2 +- examples/docs/introduction/format_yaml.py | 2 +- examples/docs/introduction/intro_file_like.py | 4 +- examples/load_all_formats.py | 3 +- src/dature/config.py | 2 +- src/dature/field_path.py | 16 ++-- src/dature/loading/context.py | 18 ++--- src/dature/loading/multi.py | 46 +++++------ src/dature/loading/single.py | 30 +++---- src/dature/loading/source_loading.py | 10 +-- src/dature/main.py | 23 +++--- src/dature/merging/predicate.py | 20 +++-- src/dature/protocols.py | 4 +- src/dature/skip_field_provider.py | 4 +- src/dature/sources_loader/base.py | 22 +++--- src/dature/validators/base.py | 8 +- tests/errors/test_exceptions.py | 24 +++--- tests/errors/test_fixtures.py | 4 +- tests/loading/test_field_merges.py | 72 ++++++++--------- tests/loading/test_multi.py | 58 +++++++------- tests/loading/test_single.py | 12 +-- tests/loading/test_skip_invalid_fields.py | 36 ++++----- tests/loading/test_source_loading.py | 34 ++++---- tests/masking/test_masking.py | 20 ++--- tests/merging/test_field_group.py | 46 +++++------ tests/merging/test_predicate.py | 6 +- tests/sources_loader/test_base.py | 40 +++++----- tests/sources_loader/test_docker_secrets.py | 5 +- tests/sources_loader/test_env_.py | 17 ++-- tests/sources_loader/test_ini_.py | 13 ++-- tests/sources_loader/test_json5_.py | 16 ++-- tests/sources_loader/test_json_.py | 16 ++-- tests/sources_loader/test_nested_resolve.py | 78 +++++++++---------- tests/sources_loader/test_toml10_.py | 16 ++-- tests/sources_loader/test_toml11_.py | 16 ++-- tests/sources_loader/test_yaml11_.py | 16 ++-- tests/sources_loader/test_yaml12_.py | 16 ++-- tests/test_custom_loader.py | 2 +- tests/test_load_report.py | 22 +++--- tests/test_main.py | 8 +- tests/test_type_loaders.py | 10 +-- tests/validators/test_complex.py | 18 ++--- tests/validators/test_custom_validator.py | 14 ++-- tests/validators/test_metadata_validators.py | 34 ++++---- tests/validators/test_number.py | 20 ++--- .../validators/test_post_init_and_property.py | 14 ++-- tests/validators/test_root_validator.py | 12 +-- tests/validators/test_sequence.py | 16 ++-- tests/validators/test_string.py | 16 ++-- 133 files changed, 577 insertions(+), 572 deletions(-) diff --git a/docs/api-reference.md b/docs/api-reference.md index 56eee2b..887bac8 100644 --- a/docs/api-reference.md +++ b/docs/api-reference.md @@ -10,13 +10,13 @@ Main entry point. Two calling patterns: -**Function mode** — pass `dataclass_`, get an instance back: +**Function mode** — pass `schema`, get an instance back: ```python --8<-- "examples/docs/api_reference/api_reference_function_mode.py" ``` -**Decorator mode** — omit `dataclass_`, get a decorator: +**Decorator mode** — omit `schema`, get a decorator: ```python --8<-- "examples/docs/api_reference/api_reference_decorator_mode.py" @@ -27,7 +27,7 @@ Main entry point. Two calling patterns: | Parameter | Type | Description | |-----------|------|-------------| | `*sources` | `Source` | One or more source descriptors. No sources → `Source()` (env vars). Multiple sources → merge mode. | -| `dataclass_` | `type[T] \| None` | Target dataclass. If provided → function mode. If `None` → decorator mode. | +| `schema` | `type[T] \| None` | Target dataclass. If provided → function mode. If `None` → decorator mode. | | `cache` | `bool \| None` | Enable caching in decorator mode. Default from `configure()`. | | `debug` | `bool \| None` | Collect `LoadReport`. Default from `configure()`. | | `strategy` | `str` | Merge strategy: `"last_wins"` (default), `"first_wins"`, `"first_found"`, `"raise_on_conflict"`. Only used with multiple sources. | diff --git a/examples/docs/advanced/configure/advanced_configure.py b/examples/docs/advanced/configure/advanced_configure.py index c455f52..9669f48 100644 --- a/examples/docs/advanced/configure/advanced_configure.py +++ b/examples/docs/advanced/configure/advanced_configure.py @@ -16,20 +16,20 @@ class Config: # 1. Default config — debug is off, no report -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), dataclass_=Config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) report = dature.get_load_report(config) assert report is None # 2. Enable debug globally via dature.configure() dature.configure(loading={"debug": True}) -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), dataclass_=Config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) report = dature.get_load_report(config) assert report is not None # 3. Reset to defaults — debug is off again dature.configure(loading={}) -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), dataclass_=Config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) report = dature.get_load_report(config) assert report is None diff --git a/examples/docs/advanced/configure/advanced_configure_env.py b/examples/docs/advanced/configure/advanced_configure_env.py index bd1be13..596aca3 100644 --- a/examples/docs/advanced/configure/advanced_configure_env.py +++ b/examples/docs/advanced/configure/advanced_configure_env.py @@ -20,20 +20,20 @@ class Config: # 1. DATURE_LOADING__DEBUG=true — debug is on, report attached -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), dataclass_=Config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) report = dature.get_load_report(config) assert report is not None # 2. Override env with dature.configure() — debug is off dature.configure(loading={"debug": False}) -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), dataclass_=Config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) report = dature.get_load_report(config) assert report is None # 3. Reset to env defaults — debug is on again dature.configure(loading={"debug": True}) -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), dataclass_=Config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) report = dature.get_load_report(config) assert report is not None diff --git a/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py b/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py index e0595ea..46ed386 100644 --- a/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py +++ b/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py @@ -29,5 +29,5 @@ class AppConfig: # Register Rgb parser globally — no need to pass type_loaders to every load() call dature.configure(type_loaders={Rgb: rgb_from_string}) -config = dature.load(dature.Source(file=SOURCES_DIR / "custom_type_common.yaml"), dataclass_=AppConfig) +config = dature.load(dature.Source(file=SOURCES_DIR / "custom_type_common.yaml"), schema=AppConfig) assert config == AppConfig(name="my-app", color=Rgb(r=255, g=128, b=0)) diff --git a/examples/docs/advanced/custom_types/custom_loader.py b/examples/docs/advanced/custom_types/custom_loader.py index 31f9d43..51aa01f 100644 --- a/examples/docs/advanced/custom_types/custom_loader.py +++ b/examples/docs/advanced/custom_types/custom_loader.py @@ -45,7 +45,7 @@ class Config: file=SOURCES_DIR / "custom_loader.xml", loader=XmlLoader, ), - dataclass_=Config, + schema=Config, ) assert config == Config(host="localhost", port=9090, debug=True) diff --git a/examples/docs/advanced/custom_types/custom_type.py b/examples/docs/advanced/custom_types/custom_type.py index 8327a55..4ece40f 100644 --- a/examples/docs/advanced/custom_types/custom_type.py +++ b/examples/docs/advanced/custom_types/custom_type.py @@ -31,7 +31,7 @@ class AppConfig: file=SOURCES_DIR / "custom_type_common.yaml", type_loaders={Rgb: rgb_from_string}, ), - dataclass_=AppConfig, + schema=AppConfig, ) assert config == AppConfig(name="my-app", color=Rgb(r=255, g=128, b=0)) diff --git a/examples/docs/advanced/custom_types/custom_type_merge.py b/examples/docs/advanced/custom_types/custom_type_merge.py index 551aa7f..ebe34ee 100644 --- a/examples/docs/advanced/custom_types/custom_type_merge.py +++ b/examples/docs/advanced/custom_types/custom_type_merge.py @@ -29,7 +29,7 @@ class AppConfig: config = dature.load( dature.Source(file=SOURCES_DIR / "custom_type_common.yaml"), dature.Source(file=SOURCES_DIR / "custom_type_merge_override.yaml"), - dataclass_=AppConfig, + schema=AppConfig, type_loaders={Rgb: rgb_from_string}, ) diff --git a/examples/docs/advanced/debug/advanced_debug_error.py b/examples/docs/advanced/debug/advanced_debug_error.py index 88d293f..87916c5 100644 --- a/examples/docs/advanced/debug/advanced_debug_error.py +++ b/examples/docs/advanced/debug/advanced_debug_error.py @@ -21,7 +21,7 @@ class Config: config = dature.load( dature.Source(file=SHARED_DIR / "common_overrides.yaml"), dature.Source(file=SOURCES_DIR / "advanced_debug_error_defaults.yaml"), - dataclass_=Config, + schema=Config, debug=True, ) except DatureConfigError: diff --git a/examples/docs/advanced/debug/advanced_debug_logging.py b/examples/docs/advanced/debug/advanced_debug_logging.py index e628157..1efe7e8 100644 --- a/examples/docs/advanced/debug/advanced_debug_logging.py +++ b/examples/docs/advanced/debug/advanced_debug_logging.py @@ -26,7 +26,7 @@ class Config: config = dature.load( dature.Source(file=SHARED_DIR / "common_defaults.yaml"), dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - dataclass_=Config, + schema=Config, ) log_lines = [line for line in log_stream.getvalue().splitlines() if "[Config]" in line] diff --git a/examples/docs/advanced/debug/advanced_debug_report.py b/examples/docs/advanced/debug/advanced_debug_report.py index c1d1128..4de2f6a 100644 --- a/examples/docs/advanced/debug/advanced_debug_report.py +++ b/examples/docs/advanced/debug/advanced_debug_report.py @@ -18,7 +18,7 @@ class Config: config = dature.load( dature.Source(file=SHARED_DIR / "common_defaults.yaml"), dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - dataclass_=Config, + schema=Config, debug=True, ) diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion.py b/examples/docs/advanced/env_expansion/advanced_env_expansion.py index 0f6b609..d16ee46 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion.py @@ -25,7 +25,7 @@ class Config: config = dature.load( dature.Source(file=SOURCES_DIR / "advanced_env_expansion.yaml", expand_env_vars="default"), - dataclass_=Config, + schema=Config, ) assert config.simple == "https://api.example.com" diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py index 4d5f47e..f83df73 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py @@ -20,7 +20,7 @@ class Config: config = dature.load( dature.Source(file="$DATURE_SOURCES_DIR/config.$DATURE_APP_ENV.yaml"), - dataclass_=Config, + schema=Config, ) assert config.host == "prod.example.com" diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py index dd9ce7b..1ab9f02 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py @@ -19,7 +19,7 @@ class Config: config = dature.load( dature.Source(file="$DATURE_SOURCES_DIR/advanced_env_expansion_file_path.yaml"), - dataclass_=Config, + schema=Config, ) assert config.host == "localhost" diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py index 6e1b502..0fdb28c 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py @@ -19,7 +19,7 @@ class Config: config = dature.load( dature.Source(file=str(SOURCES_DIR / "config.$DATURE_APP_ENV.yaml")), - dataclass_=Config, + schema=Config, ) assert config.host == "prod.example.com" diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py index 9ff14b2..315fa32 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py @@ -25,7 +25,7 @@ class Config: dature.Source(file=SOURCES_DIR / "advanced_env_expansion_merge_default.yaml"), # uses global "default" dature.Source(file=SOURCES_DIR / "advanced_env_expansion_merge_empty.yaml", expand_env_vars="empty"), dature.Source(file=SOURCES_DIR / "advanced_env_expansion_merge_disabled.yaml", expand_env_vars="disabled"), - dataclass_=Config, + schema=Config, expand_env_vars="default", # global default for all sources ) diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py index 4d0d5d5..ff475de 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py @@ -19,7 +19,7 @@ class Config: config = dature.load( dature.Source(file=SOURCES_DIR / "advanced_env_expansion_strict.yaml", expand_env_vars="strict"), - dataclass_=Config, + schema=Config, ) assert config.resolved_url == "https://api.example.com/api/v1" diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py index 2a58587..d0cdd34 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py @@ -29,7 +29,7 @@ class Config: dature.load( dature.Source(file=SOURCES_DIR / "field_groups_nested_defaults.yaml"), dature.Source(file=SOURCES_DIR / "advanced_field_groups_expansion_error_overrides.yaml"), - dataclass_=Config, + schema=Config, field_groups=((dature.F[Config].database, dature.F[Config].port),), ) except FieldGroupError as exc: diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py index 5931c0a..9b991e8 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py @@ -24,7 +24,7 @@ class Config: dature.load( dature.Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), dature.Source(file=SOURCES_DIR / "advanced_field_groups_multiple_error_overrides.yaml"), - dataclass_=Config, + schema=Config, field_groups=( (dature.F[Config].host, dature.F[Config].port), (dature.F[Config].user, dature.F[Config].password), diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py index d021524..492fa19 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py @@ -24,7 +24,7 @@ class Config: dature.load( dature.Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), dature.Source(file=SOURCES_DIR / "field_groups_partial_overrides.yaml"), - dataclass_=Config, + schema=Config, field_groups=( (dature.F[Config].host, dature.F[Config].port), (dature.F[Config].user, dature.F[Config].password), diff --git a/examples/docs/advanced/field_groups/field_groups_basic.py b/examples/docs/advanced/field_groups/field_groups_basic.py index fbaccc2..79769e7 100644 --- a/examples/docs/advanced/field_groups/field_groups_basic.py +++ b/examples/docs/advanced/field_groups/field_groups_basic.py @@ -20,7 +20,7 @@ class Config: config = dature.load( dature.Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), dature.Source(file=SHARED_DIR / "common_field_groups_overrides.yaml"), - dataclass_=Config, + schema=Config, field_groups=((dature.F[Config].host, dature.F[Config].port),), ) diff --git a/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py b/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py index 1b61004..e8032aa 100644 --- a/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py +++ b/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py @@ -23,7 +23,7 @@ def merge_tags(values: list[Any]) -> list[str]: config = dature.load( dature.Source(file=SHARED_DIR / "common_defaults.yaml"), dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - dataclass_=Config, + schema=Config, strategy="last_wins", field_merges={dature.F[Config].tags: merge_tags}, ) diff --git a/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py b/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py index a002a21..fd710d5 100644 --- a/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py +++ b/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py @@ -18,7 +18,7 @@ class Config: config = dature.load( dature.Source(file=SHARED_DIR / "common_defaults.yaml"), dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - dataclass_=Config, + schema=Config, strategy="raise_on_conflict", field_merges={ dature.F[Config].host: "last_wins", diff --git a/examples/docs/advanced/merge_rules/merging_field_append.py b/examples/docs/advanced/merge_rules/merging_field_append.py index b2c6d4d..545420d 100644 --- a/examples/docs/advanced/merge_rules/merging_field_append.py +++ b/examples/docs/advanced/merge_rules/merging_field_append.py @@ -16,7 +16,7 @@ class Config: config = dature.load( dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), - dataclass_=Config, + schema=Config, field_merges={dature.F[Config].tags: "append"}, ) diff --git a/examples/docs/advanced/merge_rules/merging_field_append_unique.py b/examples/docs/advanced/merge_rules/merging_field_append_unique.py index 83e27cb..b29fffe 100644 --- a/examples/docs/advanced/merge_rules/merging_field_append_unique.py +++ b/examples/docs/advanced/merge_rules/merging_field_append_unique.py @@ -16,7 +16,7 @@ class Config: config = dature.load( dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), - dataclass_=Config, + schema=Config, field_merges={dature.F[Config].tags: "append_unique"}, ) diff --git a/examples/docs/advanced/merge_rules/merging_field_first_wins.py b/examples/docs/advanced/merge_rules/merging_field_first_wins.py index e65b0c6..e207a7a 100644 --- a/examples/docs/advanced/merge_rules/merging_field_first_wins.py +++ b/examples/docs/advanced/merge_rules/merging_field_first_wins.py @@ -16,7 +16,7 @@ class Config: config = dature.load( dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), - dataclass_=Config, + schema=Config, field_merges={dature.F[Config].tags: "first_wins"}, ) diff --git a/examples/docs/advanced/merge_rules/merging_field_last_wins.py b/examples/docs/advanced/merge_rules/merging_field_last_wins.py index f332cfb..25b9295 100644 --- a/examples/docs/advanced/merge_rules/merging_field_last_wins.py +++ b/examples/docs/advanced/merge_rules/merging_field_last_wins.py @@ -16,7 +16,7 @@ class Config: config = dature.load( dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), - dataclass_=Config, + schema=Config, field_merges={dature.F[Config].tags: "last_wins"}, ) diff --git a/examples/docs/advanced/merge_rules/merging_field_prepend.py b/examples/docs/advanced/merge_rules/merging_field_prepend.py index 18155bb..b144515 100644 --- a/examples/docs/advanced/merge_rules/merging_field_prepend.py +++ b/examples/docs/advanced/merge_rules/merging_field_prepend.py @@ -16,7 +16,7 @@ class Config: config = dature.load( dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), - dataclass_=Config, + schema=Config, field_merges={dature.F[Config].tags: "prepend"}, ) diff --git a/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py b/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py index c80af2e..39b3609 100644 --- a/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py +++ b/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py @@ -16,7 +16,7 @@ class Config: config = dature.load( dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), - dataclass_=Config, + schema=Config, field_merges={dature.F[Config].tags: "prepend_unique"}, ) diff --git a/examples/docs/advanced/merge_rules/merging_first_found.py b/examples/docs/advanced/merge_rules/merging_first_found.py index c2e83e5..b863b1a 100644 --- a/examples/docs/advanced/merge_rules/merging_first_found.py +++ b/examples/docs/advanced/merge_rules/merging_first_found.py @@ -17,7 +17,7 @@ class Config: config = dature.load( dature.Source(file=SOURCES_DIR / "merging_first_found_primary.yaml"), dature.Source(file=SOURCES_DIR / "merging_first_found_fallback.yaml"), - dataclass_=Config, + schema=Config, strategy="first_found", ) diff --git a/examples/docs/advanced/merge_rules/merging_skip_broken.py b/examples/docs/advanced/merge_rules/merging_skip_broken.py index a411de9..d526970 100644 --- a/examples/docs/advanced/merge_rules/merging_skip_broken.py +++ b/examples/docs/advanced/merge_rules/merging_skip_broken.py @@ -19,7 +19,7 @@ class Config: config = dature.load( dature.Source(file=SHARED_DIR / "common_defaults.yaml"), dature.Source(file=SOURCES_DIR / "nonexistent.yaml", skip_if_broken=True), - dataclass_=Config, + schema=Config, ) assert config.host == "localhost" diff --git a/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py b/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py index 5edad78..5e74810 100644 --- a/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py +++ b/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py @@ -26,7 +26,7 @@ class Config: file=SHARED_DIR / "common_overrides.yaml", skip_if_broken=False, ), # never skip, even if global is True - dataclass_=Config, + schema=Config, skip_broken_sources=True, # global default ) diff --git a/examples/docs/advanced/merge_rules/merging_skip_invalid.py b/examples/docs/advanced/merge_rules/merging_skip_invalid.py index 124e9e1..41bf025 100644 --- a/examples/docs/advanced/merge_rules/merging_skip_invalid.py +++ b/examples/docs/advanced/merge_rules/merging_skip_invalid.py @@ -16,7 +16,7 @@ class Config: config = dature.load( dature.Source(file=SOURCES_DIR / "merging_skip_invalid_defaults.yaml", skip_if_invalid=True), - dataclass_=Config, + schema=Config, ) assert config.host == "localhost" diff --git a/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py b/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py index 658ab5b..84d5127 100644 --- a/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py +++ b/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py @@ -21,7 +21,7 @@ class Config: file=SOURCES_DIR / "merging_skip_invalid_per_field_overrides.yaml", skip_if_invalid=(dature.F[Config].port, dature.F[Config].timeout), ), - dataclass_=Config, + schema=Config, ) assert config.host == "production.example.com" diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py b/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py index 1de18a7..8176fa3 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py @@ -31,7 +31,7 @@ class Config: loader=DockerSecretsLoader, nested_resolve_strategy="json", ), - dataclass_=Config, + schema=Config, ) assert config.database.host == "json-host" diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py b/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py index 89fa02c..5bc82a2 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py @@ -27,7 +27,7 @@ class Config: prefix="APP__", nested_resolve_strategy="json", ), - dataclass_=Config, + schema=Config, ) assert config.database.host == "json-host" diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_global_flat.py b/examples/docs/advanced/nested_resolve/nested_resolve_global_flat.py index 589b39b..28b6171 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_global_flat.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_global_flat.py @@ -24,7 +24,7 @@ class Config: config = dature.load( dature.Source(loader=EnvLoader, prefix="APP__", nested_resolve_strategy="flat"), - dataclass_=Config, + schema=Config, ) assert config.database.host == "flat-host" diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_global_json.py b/examples/docs/advanced/nested_resolve/nested_resolve_global_json.py index 89ee347..84eb4a8 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_global_json.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_global_json.py @@ -24,7 +24,7 @@ class Config: config = dature.load( dature.Source(loader=EnvLoader, prefix="APP__", nested_resolve_strategy="json"), - dataclass_=Config, + schema=Config, ) assert config.database.host == "json-host" diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_no_conflict.py b/examples/docs/advanced/nested_resolve/nested_resolve_no_conflict.py index ee079bb..460f258 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_no_conflict.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_no_conflict.py @@ -28,7 +28,7 @@ class Config: # Even with strategy="flat", JSON is parsed because there are no flat keys config = dature.load( dature.Source(loader=EnvLoader, prefix="APP__", nested_resolve_strategy="flat"), - dataclass_=Config, + schema=Config, ) assert config.database.host == "json-host" diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_override.py b/examples/docs/advanced/nested_resolve/nested_resolve_override.py index 3efc171..bbd6e36 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_override.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_override.py @@ -40,7 +40,7 @@ class Config: nested_resolve_strategy="flat", nested_resolve={"json": (dature.F[Config].database,)}, ), - dataclass_=Config, + schema=Config, ) assert config.database.host == "json-host" # per-field override wins diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_per_field.py b/examples/docs/advanced/nested_resolve/nested_resolve_per_field.py index 40efb1c..7a8204a 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_per_field.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_per_field.py @@ -42,7 +42,7 @@ class Config: "flat": (dature.F[Config].cache,), }, ), - dataclass_=Config, + schema=Config, ) assert config.database.host == "json-host" diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_problem.py b/examples/docs/advanced/nested_resolve/nested_resolve_problem.py index 7514e20..ae348ba 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_problem.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_problem.py @@ -23,7 +23,7 @@ class Config: # Without nested_resolve_strategy, flat keys win by default -config = dature.load(dature.Source(loader=EnvLoader, prefix="APP__"), dataclass_=Config) +config = dature.load(dature.Source(loader=EnvLoader, prefix="APP__"), schema=Config) assert config.database.host == "flat-host" assert config.database.port == 3306 diff --git a/examples/docs/api_reference/api_reference_decorator_mode.py b/examples/docs/api_reference/api_reference_decorator_mode.py index cfd6463..c631a96 100644 --- a/examples/docs/api_reference/api_reference_decorator_mode.py +++ b/examples/docs/api_reference/api_reference_decorator_mode.py @@ -1,4 +1,4 @@ -"""Decorator mode — omit dataclass_, get a decorator.""" +"""Decorator mode — omit schema, get a decorator.""" from dataclasses import dataclass from pathlib import Path diff --git a/examples/docs/api_reference/api_reference_function_mode.py b/examples/docs/api_reference/api_reference_function_mode.py index b8ed56e..89611df 100644 --- a/examples/docs/api_reference/api_reference_function_mode.py +++ b/examples/docs/api_reference/api_reference_function_mode.py @@ -1,4 +1,4 @@ -"""Function mode — pass dataclass_, get an instance back.""" +"""Function mode — pass schema, get an instance back.""" from dataclasses import dataclass from pathlib import Path @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), dataclass_=Config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py index 66ac1fa..8527e03 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py @@ -16,7 +16,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SOURCES_DIR / "dynaconf_basic.toml"), dataclass_=Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "dynaconf_basic.toml"), schema=Config) # config.hostt → AttributeError immediately # config.port is always int — guaranteed # --8<-- [end:basic] diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py index b674f08..f598311 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py @@ -18,7 +18,7 @@ class Config: config = dature.load( dature.Source(file=SOURCES_DIR / "dynaconf_merge_defaults.yaml"), dature.Source(file=SOURCES_DIR / "dynaconf_merge_local.yaml", skip_if_broken=True), - dataclass_=Config, + schema=Config, strategy="last_wins", ) # --8<-- [end:merge] diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py index 48e84f4..6592e53 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py @@ -35,7 +35,7 @@ def check_debug_port(config: Config) -> bool: ), ), ), - dataclass_=Config, + schema=Config, ) except DatureConfigError as exc: source = str(SOURCES_DIR / "dynaconf_root_validators_invalid.toml") diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py index a942abf..c2107b0 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py @@ -20,7 +20,7 @@ class Config: try: - dature.load(dature.Source(file=SOURCES_DIR / "dynaconf_validators_invalid.toml"), dataclass_=Config) + dature.load(dature.Source(file=SOURCES_DIR / "dynaconf_validators_invalid.toml"), schema=Config) except DatureConfigError as exc: source = str(SOURCES_DIR / "dynaconf_validators_invalid.toml") assert str(exc) == "Config loading errors (1)" diff --git a/examples/docs/comparison/why-not-hydra/hydra_dataclass.py b/examples/docs/comparison/why-not-hydra/hydra_dataclass.py index adad32c..affd914 100644 --- a/examples/docs/comparison/why-not-hydra/hydra_dataclass.py +++ b/examples/docs/comparison/why-not-hydra/hydra_dataclass.py @@ -15,7 +15,7 @@ class Config: # --8<-- [start:dataclass] -config = dature.load(dature.Source(file=SOURCES_DIR / "hydra_defaults.yaml"), dataclass_=Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "hydra_defaults.yaml"), schema=Config) assert isinstance(config, Config) # Full IDE support, type safety, __post_init__ works # --8<-- [end:dataclass] diff --git a/examples/docs/comparison/why-not-hydra/hydra_merge.py b/examples/docs/comparison/why-not-hydra/hydra_merge.py index 23ceecf..a94986b 100644 --- a/examples/docs/comparison/why-not-hydra/hydra_merge.py +++ b/examples/docs/comparison/why-not-hydra/hydra_merge.py @@ -19,7 +19,7 @@ class Config: dature.Source(file=SOURCES_DIR / "hydra_defaults.yaml"), dature.Source(file=SOURCES_DIR / "hydra_config.toml", skip_if_broken=True), dature.Source(prefix="APP_"), - dataclass_=Config, + schema=Config, ) # --8<-- [end:merge] diff --git a/examples/docs/comparison/why-not-hydra/hydra_validators.py b/examples/docs/comparison/why-not-hydra/hydra_validators.py index 3d59942..d128f18 100644 --- a/examples/docs/comparison/why-not-hydra/hydra_validators.py +++ b/examples/docs/comparison/why-not-hydra/hydra_validators.py @@ -19,7 +19,7 @@ class Config: try: - dature.load(dature.Source(file=SOURCES_DIR / "hydra_validators_invalid.yaml"), dataclass_=Config) + dature.load(dature.Source(file=SOURCES_DIR / "hydra_validators_invalid.yaml"), schema=Config) except DatureConfigError as exc: source = str(SOURCES_DIR / "hydra_validators_invalid.yaml") assert str(exc) == "Config loading errors (1)" diff --git a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py index bc61c0c..6b8f56f 100644 --- a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py +++ b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py @@ -16,9 +16,9 @@ class Config: # --8<-- [start:auto-detect] # Just change the file — dature picks the right loader -yaml_config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.yaml"), dataclass_=Config) -toml_config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.toml"), dataclass_=Config) -json5_config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.json5"), dataclass_=Config) +yaml_config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.yaml"), schema=Config) +toml_config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.toml"), schema=Config) +json5_config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.json5"), schema=Config) # --8<-- [end:auto-detect] assert yaml_config.host == "localhost" diff --git a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py index 7c01174..af7c4d4 100644 --- a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py +++ b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py @@ -16,7 +16,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_basic.yaml"), dataclass_=Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_basic.yaml"), schema=Config) # config.hostt → AttributeError immediately # config.port is always int — guaranteed # --8<-- [end:basic] diff --git a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py index 3ad214b..35d0e18 100644 --- a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py +++ b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py @@ -19,7 +19,7 @@ class Config: dature.Source(file=SOURCES_DIR / "pydantic_settings_merge_defaults.yaml"), dature.Source(file=SOURCES_DIR / "pydantic_settings_merge_local.yaml", skip_if_broken=True), dature.Source(prefix="APP_"), - dataclass_=Config, + schema=Config, ) # --8<-- [end:merge] diff --git a/examples/docs/features/masking/masking_by_name.py b/examples/docs/features/masking/masking_by_name.py index edbd432..ee33b16 100644 --- a/examples/docs/features/masking/masking_by_name.py +++ b/examples/docs/features/masking/masking_by_name.py @@ -17,7 +17,7 @@ class Config: try: - dature.load(dature.Source(file=SOURCES_DIR / "masking_by_name.yaml"), dataclass_=Config) + dature.load(dature.Source(file=SOURCES_DIR / "masking_by_name.yaml"), schema=Config) except DatureConfigError as exc: source = str(SOURCES_DIR / "masking_by_name.yaml") assert str(exc) == "Config loading errors (1)" diff --git a/examples/docs/features/masking/masking_classic_style.py b/examples/docs/features/masking/masking_classic_style.py index 2969e12..aa09928 100644 --- a/examples/docs/features/masking/masking_classic_style.py +++ b/examples/docs/features/masking/masking_classic_style.py @@ -21,7 +21,7 @@ class Config: host: str -config = dature.load(dature.Source(file=SOURCES_DIR / "masking_by_name.yaml"), dataclass_=Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "masking_by_name.yaml"), schema=Config) assert mask_value("my_secret_password") == "my*****rd" assert mask_value("ab") == "ab" diff --git a/examples/docs/features/masking/masking_heuristic.py b/examples/docs/features/masking/masking_heuristic.py index 9a070f0..03a0614 100644 --- a/examples/docs/features/masking/masking_heuristic.py +++ b/examples/docs/features/masking/masking_heuristic.py @@ -19,7 +19,7 @@ class Config: try: dature.load( dature.Source(file=SOURCES_DIR / "masking_heuristic.yaml", mask_secrets=True), - dataclass_=Config, + schema=Config, ) except DatureConfigError as exc: source = str(SOURCES_DIR / "masking_heuristic.yaml") diff --git a/examples/docs/features/masking/masking_merge_mode.py b/examples/docs/features/masking/masking_merge_mode.py index 5edf6f4..be107c3 100644 --- a/examples/docs/features/masking/masking_merge_mode.py +++ b/examples/docs/features/masking/masking_merge_mode.py @@ -26,7 +26,7 @@ class Config: file=SOURCES_DIR / "masking_merge_mode_secrets.yaml", secret_field_names=("api_key",), ), - dataclass_=Config, + schema=Config, ) except DatureConfigError as exc: source = str(SOURCES_DIR / "masking_merge_mode_secrets.yaml") diff --git a/examples/docs/features/masking/masking_no_mask.py b/examples/docs/features/masking/masking_no_mask.py index b5585c4..fbca751 100644 --- a/examples/docs/features/masking/masking_no_mask.py +++ b/examples/docs/features/masking/masking_no_mask.py @@ -24,7 +24,7 @@ class Config: file=SOURCES_DIR / "masking_per_source.yaml", mask_secrets=False, ), - dataclass_=Config, + schema=Config, ) except DatureConfigError as exc: source = str(SOURCES_DIR / "masking_per_source.yaml") diff --git a/examples/docs/features/masking/masking_per_source.py b/examples/docs/features/masking/masking_per_source.py index 320ead1..063e98b 100644 --- a/examples/docs/features/masking/masking_per_source.py +++ b/examples/docs/features/masking/masking_per_source.py @@ -24,7 +24,7 @@ class Config: file=SOURCES_DIR / "masking_per_source.yaml", secret_field_names=("api_key",), ), - dataclass_=Config, + schema=Config, ) except DatureConfigError as exc: source = str(SOURCES_DIR / "masking_per_source.yaml") diff --git a/examples/docs/features/masking/masking_secret_str.py b/examples/docs/features/masking/masking_secret_str.py index d58e0db..325bfa2 100644 --- a/examples/docs/features/masking/masking_secret_str.py +++ b/examples/docs/features/masking/masking_secret_str.py @@ -21,7 +21,7 @@ class Config: try: config = dature.load( dature.Source(file=SOURCES_DIR / "masking_secret_str.yaml"), - dataclass_=Config, + schema=Config, ) except DatureConfigError as exc: source = str(SOURCES_DIR / "masking_secret_str.yaml") diff --git a/examples/docs/features/merging/merging_basic.py b/examples/docs/features/merging/merging_basic.py index f6f4f5b..2434422 100644 --- a/examples/docs/features/merging/merging_basic.py +++ b/examples/docs/features/merging/merging_basic.py @@ -18,7 +18,7 @@ class Config: config = dature.load( dature.Source(file=SHARED_DIR / "common_defaults.yaml"), dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - dataclass_=Config, + schema=Config, strategy="last_wins", ) diff --git a/examples/docs/features/merging/merging_strategies.py b/examples/docs/features/merging/merging_strategies.py index a91785e..0bc5317 100644 --- a/examples/docs/features/merging/merging_strategies.py +++ b/examples/docs/features/merging/merging_strategies.py @@ -18,14 +18,14 @@ class Config: last_wins = dature.load( dature.Source(file=SHARED_DIR / "common_defaults.yaml"), dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - dataclass_=Config, + schema=Config, strategy="last_wins", ) first_wins = dature.load( dature.Source(file=SHARED_DIR / "common_defaults.yaml"), dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - dataclass_=Config, + schema=Config, strategy="first_wins", ) diff --git a/examples/docs/features/merging/merging_strategy_first_found.py b/examples/docs/features/merging/merging_strategy_first_found.py index 8c5f3f4..2321cf5 100644 --- a/examples/docs/features/merging/merging_strategy_first_found.py +++ b/examples/docs/features/merging/merging_strategy_first_found.py @@ -19,7 +19,7 @@ class Config: dature.Source(file=SHARED_DIR / "nonexistent.yaml"), dature.Source(file=SHARED_DIR / "common_defaults.yaml"), dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - dataclass_=Config, + schema=Config, strategy="first_found", ) diff --git a/examples/docs/features/merging/merging_strategy_first_wins.py b/examples/docs/features/merging/merging_strategy_first_wins.py index 5d65aa3..2dafb5e 100644 --- a/examples/docs/features/merging/merging_strategy_first_wins.py +++ b/examples/docs/features/merging/merging_strategy_first_wins.py @@ -18,7 +18,7 @@ class Config: config = dature.load( dature.Source(file=SHARED_DIR / "common_defaults.yaml"), dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - dataclass_=Config, + schema=Config, strategy="first_wins", ) diff --git a/examples/docs/features/merging/merging_strategy_last_wins.py b/examples/docs/features/merging/merging_strategy_last_wins.py index 5946ae4..48fa310 100644 --- a/examples/docs/features/merging/merging_strategy_last_wins.py +++ b/examples/docs/features/merging/merging_strategy_last_wins.py @@ -18,7 +18,7 @@ class Config: config = dature.load( dature.Source(file=SHARED_DIR / "common_defaults.yaml"), dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - dataclass_=Config, + schema=Config, strategy="last_wins", ) diff --git a/examples/docs/features/merging/merging_strategy_raise_on_conflict.py b/examples/docs/features/merging/merging_strategy_raise_on_conflict.py index 47a0540..62fc896 100644 --- a/examples/docs/features/merging/merging_strategy_raise_on_conflict.py +++ b/examples/docs/features/merging/merging_strategy_raise_on_conflict.py @@ -18,7 +18,7 @@ class Config: config = dature.load( dature.Source(file=SHARED_DIR / "common_raise_on_conflict_a.yaml"), dature.Source(file=SHARED_DIR / "common_raise_on_conflict_b.yaml"), - dataclass_=Config, + schema=Config, strategy="raise_on_conflict", ) diff --git a/examples/docs/features/merging/merging_tuple_shorthand.py b/examples/docs/features/merging/merging_tuple_shorthand.py index 0de73a8..5e5fc30 100644 --- a/examples/docs/features/merging/merging_tuple_shorthand.py +++ b/examples/docs/features/merging/merging_tuple_shorthand.py @@ -18,7 +18,7 @@ class Config: config = dature.load( dature.Source(file=SHARED_DIR / "common_defaults.yaml"), dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - dataclass_=Config, + schema=Config, ) assert config.host == "production.example.com" diff --git a/examples/docs/features/naming/naming_field_mapping.py b/examples/docs/features/naming/naming_field_mapping.py index 27c3885..8cbc1b0 100644 --- a/examples/docs/features/naming/naming_field_mapping.py +++ b/examples/docs/features/naming/naming_field_mapping.py @@ -24,7 +24,7 @@ class DbConfig: dature.F[DbConfig].pool_size: "pool", }, ), - dataclass_=DbConfig, + schema=DbConfig, ) assert config.database_url == "postgresql://localhost:5432/mydb" diff --git a/examples/docs/features/naming/naming_name_style.py b/examples/docs/features/naming/naming_name_style.py index 7fcac77..c72b8b9 100644 --- a/examples/docs/features/naming/naming_name_style.py +++ b/examples/docs/features/naming/naming_name_style.py @@ -18,7 +18,7 @@ class ApiConfig: config = dature.load( dature.Source(file=SOURCES_DIR / "naming_name_style.yaml", name_style="lower_camel"), - dataclass_=ApiConfig, + schema=ApiConfig, ) assert config.user_name == "admin" diff --git a/examples/docs/features/naming/naming_nested_fields.py b/examples/docs/features/naming/naming_nested_fields.py index 4b4bd0a..44ea119 100644 --- a/examples/docs/features/naming/naming_nested_fields.py +++ b/examples/docs/features/naming/naming_nested_fields.py @@ -30,7 +30,7 @@ class User: dature.F[Address].street: "streetName", }, ), - dataclass_=User, + schema=User, ) assert config.name == "Alice" diff --git a/examples/docs/features/naming/naming_prefix.py b/examples/docs/features/naming/naming_prefix.py index ea02ff7..2d72039 100644 --- a/examples/docs/features/naming/naming_prefix.py +++ b/examples/docs/features/naming/naming_prefix.py @@ -17,7 +17,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(prefix="MYAPP_"), dataclass_=Config) +config = dature.load(dature.Source(prefix="MYAPP_"), schema=Config) assert config.host == "localhost" assert config.port == 9090 diff --git a/examples/docs/features/naming/naming_prefix_nested.py b/examples/docs/features/naming/naming_prefix_nested.py index 912c54d..868fd94 100644 --- a/examples/docs/features/naming/naming_prefix_nested.py +++ b/examples/docs/features/naming/naming_prefix_nested.py @@ -16,7 +16,7 @@ class Database: db = dature.load( dature.Source(file=SOURCES_DIR / "naming_prefix_nested.yaml", prefix="app.database"), - dataclass_=Database, + schema=Database, ) assert db.host == "localhost" diff --git a/examples/docs/features/naming/naming_split_symbols.py b/examples/docs/features/naming/naming_split_symbols.py index 150d931..e3e5c42 100644 --- a/examples/docs/features/naming/naming_split_symbols.py +++ b/examples/docs/features/naming/naming_split_symbols.py @@ -20,7 +20,7 @@ class Config: db: Database -config = dature.load(dature.Source(prefix="NS_", split_symbols="__"), dataclass_=Config) +config = dature.load(dature.Source(prefix="NS_", split_symbols="__"), schema=Config) assert config.db.host == "localhost" assert config.db.port == 5432 diff --git a/examples/docs/features/validation/validation_annotated.py b/examples/docs/features/validation/validation_annotated.py index 7d1f314..54bd771 100644 --- a/examples/docs/features/validation/validation_annotated.py +++ b/examples/docs/features/validation/validation_annotated.py @@ -24,7 +24,7 @@ class ServiceConfig: try: dature.load( dature.Source(file=SOURCES_DIR / "validation_annotated_invalid.json5"), - dataclass_=ServiceConfig, + schema=ServiceConfig, ) except DatureConfigError as exc: source = str(SOURCES_DIR / "validation_annotated_invalid.json5") diff --git a/examples/docs/features/validation/validation_custom.py b/examples/docs/features/validation/validation_custom.py index 3be5e34..da50e1f 100644 --- a/examples/docs/features/validation/validation_custom.py +++ b/examples/docs/features/validation/validation_custom.py @@ -38,7 +38,7 @@ class ServiceConfig: try: dature.load( dature.Source(file=SOURCES_DIR / "validation_custom_invalid.json5"), - dataclass_=ServiceConfig, + schema=ServiceConfig, ) except DatureConfigError as exc: source = str(SOURCES_DIR / "validation_custom_invalid.json5") diff --git a/examples/docs/features/validation/validation_metadata.py b/examples/docs/features/validation/validation_metadata.py index 53cf3f6..42f5a30 100644 --- a/examples/docs/features/validation/validation_metadata.py +++ b/examples/docs/features/validation/validation_metadata.py @@ -27,7 +27,7 @@ class Config: dature.F[Config].port: (Ge(1), Lt(65536)), }, ), - dataclass_=Config, + schema=Config, ) except DatureConfigError as exc: source = str(SOURCES_DIR / "validation_metadata_invalid.yaml") diff --git a/examples/docs/features/validation/validation_post_init.py b/examples/docs/features/validation/validation_post_init.py index 811935e..8c4d25d 100644 --- a/examples/docs/features/validation/validation_post_init.py +++ b/examples/docs/features/validation/validation_post_init.py @@ -25,6 +25,6 @@ def address(self) -> str: try: - dature.load(dature.Source(file=SOURCES_DIR / "validation_post_init_invalid.yaml"), dataclass_=Config) + dature.load(dature.Source(file=SOURCES_DIR / "validation_post_init_invalid.yaml"), schema=Config) except ValueError as exc: assert str(exc) == "port must be between 1 and 65535, got 99999" diff --git a/examples/docs/features/validation/validation_root.py b/examples/docs/features/validation/validation_root.py index a62613a..e4b98bc 100644 --- a/examples/docs/features/validation/validation_root.py +++ b/examples/docs/features/validation/validation_root.py @@ -34,7 +34,7 @@ def check_debug_not_on_production(obj: Config) -> bool: ), ), ), - dataclass_=Config, + schema=Config, ) except DatureConfigError as exc: source = str(SOURCES_DIR / "validation_root_invalid.yaml") diff --git a/examples/docs/index/intro_function.py b/examples/docs/index/intro_function.py index 3082b92..a044e21 100644 --- a/examples/docs/index/intro_function.py +++ b/examples/docs/index/intro_function.py @@ -17,7 +17,7 @@ class AppConfig: debug: bool = False -config = dature.load(dature.Source(prefix="APP_"), dataclass_=AppConfig) +config = dature.load(dature.Source(prefix="APP_"), schema=AppConfig) assert config.host == "0.0.0.0" assert config.port == 8080 diff --git a/examples/docs/introduction/format_docker.py b/examples/docs/introduction/format_docker.py index dff867d..2725da8 100644 --- a/examples/docs/introduction/format_docker.py +++ b/examples/docs/introduction/format_docker.py @@ -17,7 +17,7 @@ class Config: config = dature.load( dature.Source(file=SOURCES_DIR / "intro_app_docker_secrets"), - dataclass_=Config, + schema=Config, ) assert config.host == "localhost" diff --git a/examples/docs/introduction/format_env.py b/examples/docs/introduction/format_env.py index 4c75b1c..5d63228 100644 --- a/examples/docs/introduction/format_env.py +++ b/examples/docs/introduction/format_env.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.env"), dataclass_=Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.env"), schema=Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/format_ini.py b/examples/docs/introduction/format_ini.py index e93449a..eb92f89 100644 --- a/examples/docs/introduction/format_ini.py +++ b/examples/docs/introduction/format_ini.py @@ -17,7 +17,7 @@ class Config: config = dature.load( dature.Source(file=SOURCES_DIR / "intro_app.ini", prefix="app"), - dataclass_=Config, + schema=Config, ) assert config.host == "localhost" diff --git a/examples/docs/introduction/format_json.py b/examples/docs/introduction/format_json.py index ee7037c..068bab2 100644 --- a/examples/docs/introduction/format_json.py +++ b/examples/docs/introduction/format_json.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.json"), dataclass_=Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.json"), schema=Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/format_json5.py b/examples/docs/introduction/format_json5.py index d250a9a..b9a7c7c 100644 --- a/examples/docs/introduction/format_json5.py +++ b/examples/docs/introduction/format_json5.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.json5"), dataclass_=Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.json5"), schema=Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/format_toml.py b/examples/docs/introduction/format_toml.py index bc31bba..8c16303 100644 --- a/examples/docs/introduction/format_toml.py +++ b/examples/docs/introduction/format_toml.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.toml"), dataclass_=Config) +config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.toml"), schema=Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/format_yaml.py b/examples/docs/introduction/format_yaml.py index 3d9c1c4..fc2c9f0 100644 --- a/examples/docs/introduction/format_yaml.py +++ b/examples/docs/introduction/format_yaml.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), dataclass_=Config) +config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/intro_file_like.py b/examples/docs/introduction/intro_file_like.py index 4e3d63e..80f0ae3 100644 --- a/examples/docs/introduction/intro_file_like.py +++ b/examples/docs/introduction/intro_file_like.py @@ -16,14 +16,14 @@ class Config: # From StringIO text_stream = StringIO('{"host": "localhost", "port": 8080, "debug": true}') -config = dature.load(dature.Source(file=text_stream, loader=JsonLoader), dataclass_=Config) +config = dature.load(dature.Source(file=text_stream, loader=JsonLoader), schema=Config) assert config.host == "localhost" assert config.port == 8080 # From BytesIO binary_stream = BytesIO(b'{"host": "0.0.0.0", "port": 3000}') -config = dature.load(dature.Source(file=binary_stream, loader=JsonLoader), dataclass_=Config) +config = dature.load(dature.Source(file=binary_stream, loader=JsonLoader), schema=Config) assert config.host == "0.0.0.0" assert config.port == 3000 diff --git a/examples/load_all_formats.py b/examples/load_all_formats.py index 7539742..d266f21 100644 --- a/examples/load_all_formats.py +++ b/examples/load_all_formats.py @@ -27,6 +27,7 @@ } for meta in FORMATS.values(): - config = dature.load(meta, dataclass_=AllPythonTypesCompact) + config = dature.load(meta, schema=AllPythonTypesCompact) assert config.string_value == "hello world" assert config.integer_value == 42 + assert config.integer_value == 42 diff --git a/src/dature/config.py b/src/dature/config.py index 6129f12..adaafa7 100644 --- a/src/dature/config.py +++ b/src/dature/config.py @@ -65,7 +65,7 @@ def _load_config() -> DatureConfig: from dature.main import load # noqa: PLC0415 from dature.metadata import Source # noqa: PLC0415 - return load(Source(prefix="DATURE_"), dataclass_=DatureConfig) + return load(Source(prefix="DATURE_"), schema=DatureConfig) class MaskingOptions(TypedDict, total=False): diff --git a/src/dature/field_path.py b/src/dature/field_path.py index acc9d66..ad96e18 100644 --- a/src/dature/field_path.py +++ b/src/dature/field_path.py @@ -57,20 +57,20 @@ def as_path(self) -> str: # --8<-- [end:field-path] -def _validate_field_path_parts(field_path: FieldPath, dataclass_: type) -> None: +def _validate_field_path_parts(field_path: FieldPath, schema: type) -> None: for i, part in enumerate(field_path.parts): - _validate_field(dataclass_, field_path.parts[:i], part) + _validate_field(schema, field_path.parts[:i], part) -def validate_field_path_owner(field_path: FieldPath, dataclass_: type[DataclassInstance]) -> None: +def validate_field_path_owner(field_path: FieldPath, schema: type[DataclassInstance]) -> None: if isinstance(field_path.owner, str): - if field_path.owner != dataclass_.__name__: - msg = f"FieldPath owner '{field_path.owner}' does not match target dataclass '{dataclass_.__name__}'" + if field_path.owner != schema.__name__: + msg = f"FieldPath owner '{field_path.owner}' does not match target dataclass '{schema.__name__}'" raise TypeError(msg) - _validate_field_path_parts(field_path, dataclass_) + _validate_field_path_parts(field_path, schema) return - if field_path.owner is not dataclass_: - msg = f"FieldPath owner '{field_path.owner.__name__}' does not match target dataclass '{dataclass_.__name__}'" + if field_path.owner is not schema: + msg = f"FieldPath owner '{field_path.owner.__name__}' does not match target dataclass '{schema.__name__}'" raise TypeError(msg) diff --git a/src/dature/loading/context.py b/src/dature/loading/context.py index ad9bfd1..cea2c68 100644 --- a/src/dature/loading/context.py +++ b/src/dature/loading/context.py @@ -21,13 +21,13 @@ logger = logging.getLogger("dature") -def coerce_flag_fields[T](data: JSONValue, dataclass_: type[T]) -> JSONValue: - if not isinstance(data, dict) or not is_dataclass(dataclass_): +def coerce_flag_fields[T](data: JSONValue, schema: type[T]) -> JSONValue: + if not isinstance(data, dict) or not is_dataclass(schema): return data - type_hints = get_type_hints(dataclass_) + type_hints = get_type_hints(schema) coerced = dict(data) - for field in fields(cast("type[DataclassInstance]", dataclass_)): + for field in fields(cast("type[DataclassInstance]", schema)): hint = type_hints.get(field.name) if hint is None: continue @@ -71,12 +71,12 @@ def build_error_ctx( def get_allowed_fields( *, skip_value: bool | tuple[FieldPath, ...], - dataclass_: type[DataclassInstance] | None = None, + schema: type[DataclassInstance] | None = None, ) -> set[str] | None: if skip_value is True: return None if isinstance(skip_value, tuple): - return {extract_field_path(fp, dataclass_) for fp in skip_value} + return {extract_field_path(fp, schema) for fp in skip_value} return None @@ -85,19 +85,19 @@ def apply_skip_invalid( raw: JSONValue, skip_if_invalid: bool | tuple[FieldPath, ...] | None, loader_instance: LoaderProtocol, - dataclass_: type[DataclassInstance], + schema: type[DataclassInstance], log_prefix: str, probe_retort: Retort | None = None, ) -> FilterResult: if not skip_if_invalid: return FilterResult(cleaned_dict=raw, skipped_paths=[]) - allowed_fields = get_allowed_fields(skip_value=skip_if_invalid, dataclass_=dataclass_) + allowed_fields = get_allowed_fields(skip_value=skip_if_invalid, schema=schema) if probe_retort is None: probe_retort = loader_instance.create_probe_retort() - result = filter_invalid_fields(raw, probe_retort, dataclass_, allowed_fields) + result = filter_invalid_fields(raw, probe_retort, schema, allowed_fields) for path in result.skipped_paths: logger.warning( "%s Skipped invalid field '%s'", diff --git a/src/dature/loading/multi.py b/src/dature/loading/multi.py index 4845a64..b46f186 100644 --- a/src/dature/loading/multi.py +++ b/src/dature/loading/multi.py @@ -265,7 +265,7 @@ class _MergedData[T: DataclassInstance]: def _load_and_merge[T: DataclassInstance]( # noqa: C901 *, merge_meta: _MergeConfig, - dataclass_: type[T], + schema: type[T], loaders: tuple[LoaderProtocol, ...] | None = None, debug: bool = False, type_loaders: TypeLoaderMap | None = None, @@ -273,30 +273,30 @@ def _load_and_merge[T: DataclassInstance]( # noqa: C901 secret_paths: frozenset[str] = frozenset() if _resolve_merge_mask_secrets(merge_meta): extra_patterns = _collect_extra_secret_patterns(merge_meta) - secret_paths = build_secret_paths(dataclass_, extra_patterns=extra_patterns) + secret_paths = build_secret_paths(schema, extra_patterns=extra_patterns) loaded = load_sources( merge_meta=merge_meta, - dataclass_name=dataclass_.__name__, - dataclass_=dataclass_, + dataclass_name=schema.__name__, + schema=schema, loaders=loaders, secret_paths=secret_paths, mask_secrets=_resolve_merge_mask_secrets(merge_meta), type_loaders=type_loaders, ) - merge_maps = build_field_merge_map(merge_meta.field_merges, dataclass_) + merge_maps = build_field_merge_map(merge_meta.field_merges, schema) field_group_paths: tuple[ResolvedFieldGroup, ...] = () if merge_meta.field_groups: - field_group_paths = build_field_group_paths(merge_meta.field_groups, dataclass_) + field_group_paths = build_field_group_paths(merge_meta.field_groups, schema) if field_group_paths: source_reprs = tuple(repr(merge_meta.sources[entry.index]) for entry in loaded.source_entries) _validate_all_field_groups( raw_dicts=loaded.raw_dicts, field_group_paths=field_group_paths, - dataclass_name=dataclass_.__name__, + dataclass_name=schema.__name__, source_reprs=source_reprs, ) @@ -304,7 +304,7 @@ def _load_and_merge[T: DataclassInstance]( # noqa: C901 raise_on_conflict( loaded.raw_dicts, loaded.source_ctxs, - dataclass_.__name__, + schema.__name__, field_merge_map=merge_maps.enum_map or None, callable_merge_paths=merge_maps.callable_paths or None, ) @@ -312,7 +312,7 @@ def _load_and_merge[T: DataclassInstance]( # noqa: C901 merged = _merge_raw_dicts( raw_dicts=loaded.raw_dicts, strategy=merge_meta.strategy, - dataclass_name=dataclass_.__name__, + dataclass_name=schema.__name__, field_merge_map=merge_maps.enum_map or None, callable_merge_map=merge_maps.callable_map or None, secret_paths=secret_paths, @@ -324,7 +324,7 @@ def _load_and_merge[T: DataclassInstance]( # noqa: C901 masked_merged = merged logger.debug( "[%s] Merged result (strategy=%s, %d sources): %s", - dataclass_.__name__, + schema.__name__, merge_meta.strategy, len(loaded.raw_dicts), masked_merged, @@ -338,7 +338,7 @@ def _load_and_merge[T: DataclassInstance]( # noqa: C901 ) _log_field_origins( - dataclass_name=dataclass_.__name__, + dataclass_name=schema.__name__, field_origins=field_origins, secret_paths=secret_paths, ) @@ -346,7 +346,7 @@ def _load_and_merge[T: DataclassInstance]( # noqa: C901 report: LoadReport | None = None if debug: report = _build_merge_report( - dataclass_name=dataclass_.__name__, + dataclass_name=schema.__name__, strategy=merge_meta.strategy, source_entries=frozen_entries, field_origins=field_origins, @@ -355,15 +355,15 @@ def _load_and_merge[T: DataclassInstance]( # noqa: C901 ) last_error_ctx = loaded.source_ctxs[-1].error_ctx - merged = coerce_flag_fields(merged, dataclass_) + merged = coerce_flag_fields(merged, schema) try: result = handle_load_errors( - func=lambda: loaded.last_loader.transform_to_dataclass(merged, dataclass_), + func=lambda: loaded.last_loader.transform_to_dataclass(merged, schema), ctx=last_error_ctx, ) except DatureConfigError as exc: if report is not None: - attach_load_report(dataclass_, report) + attach_load_report(schema, report) if loaded.skipped_fields: raise enrich_skipped_errors(exc, loaded.skipped_fields) from exc raise @@ -382,30 +382,30 @@ def _load_and_merge[T: DataclassInstance]( # noqa: C901 def merge_load_as_function[T: DataclassInstance]( merge_meta: _MergeConfig, - dataclass_: type[T], + schema: type[T], *, debug: bool, type_loaders: TypeLoaderMap | None = None, ) -> T: data = _load_and_merge( merge_meta=merge_meta, - dataclass_=dataclass_, + schema=schema, debug=debug, type_loaders=type_loaders, ) - validating_retort = data.last_loader.create_validating_retort(dataclass_) - validation_loader = validating_retort.get_loader(dataclass_) + validating_retort = data.last_loader.create_validating_retort(schema) + validation_loader = validating_retort.get_loader(schema) last_meta = data.last_source_meta mask_secrets = _resolve_merge_mask_secrets(merge_meta) secret_paths: frozenset[str] = frozenset() if mask_secrets: extra_patterns = _collect_extra_secret_patterns(merge_meta) - secret_paths = build_secret_paths(dataclass_, extra_patterns=extra_patterns) + secret_paths = build_secret_paths(schema, extra_patterns=extra_patterns) last_error_ctx = build_error_ctx( last_meta, - dataclass_.__name__, + schema.__name__, secret_paths=secret_paths, mask_secrets=mask_secrets, ) @@ -418,7 +418,7 @@ def merge_load_as_function[T: DataclassInstance]( if debug: report = get_load_report(data.result) if report is not None: - attach_load_report(dataclass_, report) + attach_load_report(schema, report) raise return data.result @@ -512,7 +512,7 @@ def new_init(self: DataclassInstance, *args: Any, **kwargs: Any) -> None: # noq try: merged_data = _load_and_merge( merge_meta=ctx.merge_meta, - dataclass_=ctx.cls, + schema=ctx.cls, loaders=ctx.loaders, debug=ctx.debug, type_loaders=ctx.type_loaders, diff --git a/src/dature/loading/single.py b/src/dature/loading/single.py index 69826fe..aaff90d 100644 --- a/src/dature/loading/single.py +++ b/src/dature/loading/single.py @@ -172,7 +172,7 @@ def _load_single_source(ctx: _PatchContext) -> DataclassInstance: raw=raw_data, skip_if_invalid=ctx.metadata.skip_if_invalid, loader_instance=ctx.loader_instance, - dataclass_=ctx.cls, + schema=ctx.cls, log_prefix=f"[{ctx.cls.__name__}]", probe_retort=ctx.probe_retort, ) @@ -254,7 +254,7 @@ def load_as_function( # noqa: C901, PLR0912 *, loader_instance: LoaderProtocol, file_path: FileOrStream, - dataclass_: type[DataclassInstance], + schema: type[DataclassInstance], metadata: Source, debug: bool, ) -> DataclassInstance: @@ -265,8 +265,8 @@ def load_as_function( # noqa: C901, PLR0912 mask_secrets = _resolve_single_mask_secrets(metadata) if mask_secrets: extra_patterns = metadata.secret_field_names or () - secret_paths = build_secret_paths(dataclass_, extra_patterns=extra_patterns) - error_ctx = build_error_ctx(metadata, dataclass_.__name__, secret_paths=secret_paths, mask_secrets=mask_secrets) + secret_paths = build_secret_paths(schema, extra_patterns=extra_patterns) + error_ctx = build_error_ctx(metadata, schema.__name__, secret_paths=secret_paths, mask_secrets=mask_secrets) load_result = handle_load_errors( func=lambda: loader_instance.load_raw(file_path), @@ -277,7 +277,7 @@ def load_as_function( # noqa: C901, PLR0912 if load_result.nested_conflicts: error_ctx = build_error_ctx( metadata, - dataclass_.__name__, + schema.__name__, secret_paths=secret_paths, mask_secrets=mask_secrets, nested_conflicts=load_result.nested_conflicts, @@ -287,8 +287,8 @@ def load_as_function( # noqa: C901, PLR0912 raw=raw_data, skip_if_invalid=metadata.skip_if_invalid, loader_instance=loader_instance, - dataclass_=dataclass_, - log_prefix=f"[{dataclass_.__name__}]", + schema=schema, + log_prefix=f"[{schema.__name__}]", ) raw_data = filter_result.cleaned_dict @@ -308,7 +308,7 @@ def load_as_function( # noqa: C901, PLR0912 else: report_file_path = None report = _build_single_source_report( - dataclass_name=dataclass_.__name__, + dataclass_name=schema.__name__, loader_type=display_name, file_path=report_file_path, raw_data=raw_data, @@ -316,16 +316,16 @@ def load_as_function( # noqa: C901, PLR0912 ) _log_single_source_load( - dataclass_name=dataclass_.__name__, + dataclass_name=schema.__name__, loader_type=display_name, file_path="" if isinstance(file_path, FILE_LIKE_TYPES) else str(file_path), data=raw_data if isinstance(raw_data, dict) else {}, secret_paths=secret_paths, ) - validating_retort = loader_instance.create_validating_retort(dataclass_) - validation_loader = validating_retort.get_loader(dataclass_) - raw_data = coerce_flag_fields(raw_data, dataclass_) + validating_retort = loader_instance.create_validating_retort(schema) + validation_loader = validating_retort.get_loader(schema) + raw_data = coerce_flag_fields(raw_data, schema) try: handle_load_errors( @@ -334,19 +334,19 @@ def load_as_function( # noqa: C901, PLR0912 ) except DatureConfigError as exc: if report is not None: - attach_load_report(dataclass_, report) + attach_load_report(schema, report) if skipped_fields: raise enrich_skipped_errors(exc, skipped_fields) from exc raise try: result = handle_load_errors( - func=lambda: loader_instance.transform_to_dataclass(raw_data, dataclass_), + func=lambda: loader_instance.transform_to_dataclass(raw_data, schema), ctx=error_ctx, ) except DatureConfigError as exc: if report is not None: - attach_load_report(dataclass_, report) + attach_load_report(schema, report) if skipped_fields: raise enrich_skipped_errors(exc, skipped_fields) from exc raise diff --git a/src/dature/loading/source_loading.py b/src/dature/loading/source_loading.py index f8c69a8..168c4a0 100644 --- a/src/dature/loading/source_loading.py +++ b/src/dature/loading/source_loading.py @@ -78,7 +78,7 @@ def apply_merge_skip_invalid( source_meta: Source, merge_meta: _MergeConfig, loader_instance: LoaderProtocol, - dataclass_: type[DataclassInstance], + schema: type[DataclassInstance], source_index: int, ) -> FilterResult: skip_value = resolve_skip_invalid(source_meta, merge_meta) @@ -89,8 +89,8 @@ def apply_merge_skip_invalid( raw=raw, skip_if_invalid=skip_value, loader_instance=loader_instance, - dataclass_=dataclass_, - log_prefix=f"[{dataclass_.__name__}] Source {source_index}:", + schema=schema, + log_prefix=f"[{schema.__name__}] Source {source_index}:", ) @@ -120,7 +120,7 @@ def load_sources( # noqa: C901, PLR0912, PLR0913, PLR0915 *, merge_meta: _MergeConfig, dataclass_name: str, - dataclass_: type[DataclassInstance], + schema: type[DataclassInstance], loaders: tuple[LoaderProtocol, ...] | None = None, secret_paths: frozenset[str] = frozenset(), mask_secrets: bool = False, @@ -216,7 +216,7 @@ def _load_raw( source_meta=source_meta, merge_meta=merge_meta, loader_instance=loader_instance, - dataclass_=dataclass_, + schema=schema, source_index=i, ) diff --git a/src/dature/main.py b/src/dature/main.py index 447e4ae..9cc940b 100644 --- a/src/dature/main.py +++ b/src/dature/main.py @@ -7,10 +7,7 @@ from dature.loading.resolver import resolve_loader from dature.loading.single import load_as_function, make_decorator from dature.merging.strategy import MergeStrategyEnum -from dature.metadata import ( - Source, - _MergeConfig, -) +from dature.metadata import Source, _MergeConfig from dature.protocols import DataclassInstance from dature.types import ( FILE_LIKE_TYPES, @@ -28,7 +25,7 @@ @overload def load[T]( *sources: Source, - dataclass_: type[T], + schema: type[T], debug: bool | None = None, strategy: MergeStrategyName = "last_wins", field_merges: FieldMergeMap | None = None, @@ -47,7 +44,7 @@ def load[T]( @overload def load( *sources: Source, - dataclass_: None = None, + schema: None = None, cache: bool | None = None, debug: bool | None = None, strategy: MergeStrategyName = "last_wins", @@ -67,7 +64,7 @@ def load( # --8<-- [start:load] def load( # noqa: PLR0913 *sources: Source, - dataclass_: type[Any] | None = None, + schema: type[Any] | None = None, cache: bool | None = None, debug: bool | None = None, strategy: MergeStrategyName = "last_wins", @@ -93,7 +90,7 @@ def load( # noqa: PLR0913 if len(sources) > 1: return _load_multi( sources=sources, - dataclass_=dataclass_, + schema=schema, cache=cache, debug=debug, strategy=strategy, @@ -128,11 +125,11 @@ def load( # noqa: PLR0913 else: fileor_path = Path() - if dataclass_ is not None: + if schema is not None: return load_as_function( loader_instance=loader_instance, file_path=fileor_path, - dataclass_=dataclass_, + schema=schema, metadata=metadata, debug=debug, ) @@ -160,7 +157,7 @@ def _validate_sources(sources: tuple[Source, ...]) -> None: def _load_multi( # noqa: PLR0913 *, sources: tuple[Source, ...], - dataclass_: type[DataclassInstance] | None, + schema: type[DataclassInstance] | None, cache: bool, debug: bool, strategy: MergeStrategyName, @@ -190,6 +187,6 @@ def _load_multi( # noqa: PLR0913 nested_resolve=nested_resolve, ) merge_type_loaders = {**(config.type_loaders or {}), **(merge_meta.type_loaders or {})} - if dataclass_ is not None: - return merge_load_as_function(merge_meta, dataclass_, debug=debug, type_loaders=merge_type_loaders or None) + if schema is not None: + return merge_load_as_function(merge_meta, schema, debug=debug, type_loaders=merge_type_loaders or None) return merge_make_decorator(merge_meta, cache=cache, debug=debug, type_loaders=merge_type_loaders or None) diff --git a/src/dature/merging/predicate.py b/src/dature/merging/predicate.py index ea013a8..95e71c5 100644 --- a/src/dature/merging/predicate.py +++ b/src/dature/merging/predicate.py @@ -24,25 +24,25 @@ def callable_paths(self) -> frozenset[str]: return frozenset(self.callable_map.keys()) -def extract_field_path(predicate: Any, dataclass_: type[DataclassInstance] | None = None) -> str: # noqa: ANN401 +def extract_field_path(predicate: Any, schema: type[DataclassInstance] | None = None) -> str: # noqa: ANN401 if not isinstance(predicate, FieldPath): msg = f"Expected FieldPath, got {type(predicate).__name__}" raise TypeError(msg) - if dataclass_ is not None: - validate_field_path_owner(predicate, dataclass_) + if schema is not None: + validate_field_path_owner(predicate, schema) return predicate.as_path() def build_field_merge_map( field_merges: "FieldMergeMap | None", - dataclass_: type[DataclassInstance] | None = None, + schema: type[DataclassInstance] | None = None, ) -> FieldMergeMaps: enum_map: dict[str, FieldMergeStrategyEnum] = {} callable_map: dict[str, FieldMergeCallable] = {} if not field_merges: return FieldMergeMaps(enum_map=enum_map, callable_map=callable_map) for predicate, strategy in field_merges.items(): - path = extract_field_path(predicate, dataclass_) + path = extract_field_path(predicate, schema) if isinstance(strategy, str): enum_map[path] = FieldMergeStrategyEnum(strategy) else: @@ -65,20 +65,24 @@ def _expand_dataclass_fields(prefix: str, dc_type: type) -> list[str]: def build_field_group_paths( field_groups: "tuple[FieldGroupTuple, ...]", - dataclass_: type[DataclassInstance], + schema: type[DataclassInstance], ) -> tuple[ResolvedFieldGroup, ...]: resolved: list[ResolvedFieldGroup] = [] for group in field_groups: paths: list[str] = [] for field in group: - path = extract_field_path(field, dataclass_) + path = extract_field_path(field, schema) if isinstance(field, FieldPath) and isinstance(field.owner, type): resolved_type = resolve_field_type(field.owner, field.parts) else: - resolved_type = resolve_field_type(dataclass_, tuple(path.split("."))) + resolved_type = resolve_field_type(schema, tuple(path.split("."))) if resolved_type is not None: paths.extend(_expand_dataclass_fields(path, resolved_type)) else: paths.append(path) resolved.append(ResolvedFieldGroup(paths=tuple(paths))) return tuple(resolved) + return tuple(resolved) + return tuple(resolved) + return tuple(resolved) + return tuple(resolved) diff --git a/src/dature/protocols.py b/src/dature/protocols.py index 013c7f7..536f9ea 100644 --- a/src/dature/protocols.py +++ b/src/dature/protocols.py @@ -30,13 +30,13 @@ class LoaderProtocol(Protocol): def load_raw(self, path: FileOrStream) -> LoadRawResult: ... - def transform_to_dataclass(self, data: JSONValue, dataclass_: type[_T]) -> _T: ... + def transform_to_dataclass(self, data: JSONValue, schema: type[_T]) -> _T: ... def create_retort(self) -> Retort: ... def create_probe_retort(self) -> Retort: ... - def create_validating_retort(self, dataclass_: type[_T]) -> Retort: ... + def create_validating_retort(self, schema: type[_T]) -> Retort: ... @classmethod def resolve_location( diff --git a/src/dature/skip_field_provider.py b/src/dature/skip_field_provider.py index e73840b..c4e0264 100644 --- a/src/dature/skip_field_provider.py +++ b/src/dature/skip_field_provider.py @@ -100,13 +100,13 @@ class FilterResult: def filter_invalid_fields( raw_dict: JSONValue, probe_retort: Retort, - dataclass_: type[DataclassInstance], + schema: type[DataclassInstance], allowed_fields: set[str] | None, ) -> FilterResult: if not isinstance(raw_dict, dict): return FilterResult(cleaned_dict=raw_dict, skipped_paths=[]) - probed: ProbeDict = probe_retort.load(raw_dict, dataclass_) + probed: ProbeDict = probe_retort.load(raw_dict, schema) all_not_loaded = _collect_not_loaded_paths(probed, "") skipped: list[str] = [] diff --git a/src/dature/sources_loader/base.py b/src/dature/sources_loader/base.py index 578d276..d3f425b 100644 --- a/src/dature/sources_loader/base.py +++ b/src/dature/sources_loader/base.py @@ -134,11 +134,11 @@ def _get_name_mapping_provider(self) -> list[Provider]: return providers - def _get_validator_providers(self, dataclass_: type[T]) -> list[Provider]: + def _get_validator_providers(self, schema: type[T]) -> list[Provider]: providers: list[Provider] = [] - type_hints = get_type_hints(dataclass_, include_extras=True) + type_hints = get_type_hints(schema, include_extras=True) - for field in fields(cast("type[DataclassInstance]", dataclass_)): + for field in fields(cast("type[DataclassInstance]", schema)): if field.name not in type_hints: continue @@ -146,7 +146,7 @@ def _get_validator_providers(self, dataclass_: type[T]) -> list[Provider]: validators = extract_validators_from_type(field_type) if validators: - field_providers = create_validator_providers(dataclass_, field.name, validators) + field_providers = create_validator_providers(schema, field.name, validators) providers.extend(field_providers) for nested_dc in self._find_nested_dataclasses(field_type): @@ -236,9 +236,9 @@ def create_probe_retort(self) -> Retort: recipe=[SkipFieldProvider(), ModelToDictProvider(), *self._base_recipe()], ) - def create_validating_retort(self, dataclass_: type[T]) -> Retort: + def create_validating_retort(self, schema: type[T]) -> Retort: root_validator_providers = create_root_validator_providers( - dataclass_, + schema, self._root_validators, ) metadata_validator_providers = create_metadata_validator_providers( @@ -247,7 +247,7 @@ def create_validating_retort(self, dataclass_: type[T]) -> Retort: return Retort( strict_coercion=True, recipe=[ - *self._get_validator_providers(dataclass_), + *self._get_validator_providers(schema), *metadata_validator_providers, *root_validator_providers, *self._base_recipe(), @@ -274,10 +274,10 @@ def _pre_processing(self, data: JSONValue) -> JSONValue: prefixed = self._apply_prefix(data) return expand_env_vars(prefixed, mode=self._expand_env_vars_mode) - def transform_to_dataclass(self, data: JSONValue, dataclass_: type[T]) -> T: - if dataclass_ not in self.retorts: - self.retorts[dataclass_] = self.create_retort() - return self.retorts[dataclass_].load(data, dataclass_) + def transform_to_dataclass(self, data: JSONValue, schema: type[T]) -> T: + if schema not in self.retorts: + self.retorts[schema] = self.create_retort() + return self.retorts[schema].load(data, schema) def load_raw(self, path: FileOrStream) -> LoadRawResult: data = self._load(path) diff --git a/src/dature/validators/base.py b/src/dature/validators/base.py index 9678200..7900202 100644 --- a/src/dature/validators/base.py +++ b/src/dature/validators/base.py @@ -23,7 +23,7 @@ def extract_validators_from_type(field_type: Any) -> list[ValidatorProtocol]: # def create_validator_providers( - dataclass_: type, + schema: type, field_name: str, validators: list[ValidatorProtocol], ) -> list[Provider]: @@ -33,7 +33,7 @@ def create_validator_providers( func = v.get_validator_func() error = v.get_error_message() provider = validator( - P[dataclass_][field_name], + P[schema][field_name], func, error, ) @@ -91,14 +91,14 @@ def create_metadata_validator_providers( def create_root_validator_providers( - dataclass_: type, + schema: type, root_validators: tuple[ValidatorProtocol, ...], ) -> list[Provider]: providers = [] for root_validator in root_validators: provider = validator( - P[dataclass_], + P[schema], root_validator.get_validator_func(), root_validator.get_error_message(), ) diff --git a/tests/errors/test_exceptions.py b/tests/errors/test_exceptions.py index d2780d3..241c89f 100644 --- a/tests/errors/test_exceptions.py +++ b/tests/errors/test_exceptions.py @@ -195,7 +195,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -217,7 +217,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) err = exc_info.value assert len(err.exceptions) == 2 @@ -252,7 +252,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -301,7 +301,7 @@ class Config: metadata = Source(file=toml_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -329,7 +329,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) err = exc_info.value first = err.exceptions[0] @@ -622,7 +622,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) err = exc_info.value assert str(err) == "Config loading errors (1)" @@ -646,7 +646,7 @@ class Config: metadata = Source(file=yaml_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) err = exc_info.value assert str(err) == "Config loading errors (1)" @@ -669,7 +669,7 @@ class Config: metadata = Source(file=toml_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) err = exc_info.value assert str(err) == "Config loading errors (1)" @@ -692,7 +692,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) err = exc_info.value assert str(err) == "Config loading errors (1)" @@ -715,7 +715,7 @@ class Config: product: list[Product] metadata = Source(file=array_of_tables_toml_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result == Config( product=[ @@ -738,7 +738,7 @@ class Config: metadata = Source(file=array_of_tables_error_first_toml_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -763,7 +763,7 @@ class Config: metadata = Source(file=array_of_tables_error_last_toml_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/errors/test_fixtures.py b/tests/errors/test_fixtures.py index d8f6b89..cd8be37 100644 --- a/tests/errors/test_fixtures.py +++ b/tests/errors/test_fixtures.py @@ -101,7 +101,7 @@ def test_load_error_types( metadata = Source(file=str(FIXTURES_DIR / fixture_file), **metadata_kwargs) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=LoadErrorConfig) + load(metadata, schema=LoadErrorConfig) err = exc_info.value assert str(err) == f"LoadErrorConfig loading errors ({len(EXPECTED_LOAD_ERRORS)})" @@ -116,7 +116,7 @@ def test_validation_error_types( metadata = Source(file=str(FIXTURES_DIR / fixture_file), **metadata_kwargs) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=ValidationErrorConfig) + load(metadata, schema=ValidationErrorConfig) err = exc_info.value assert str(err) == f"ValidationErrorConfig loading errors ({len(EXPECTED_VALIDATION_ERRORS)})" diff --git a/tests/loading/test_field_merges.py b/tests/loading/test_field_merges.py index 092dbec..394a7aa 100644 --- a/tests/loading/test_field_merges.py +++ b/tests/loading/test_field_merges.py @@ -29,7 +29,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, strategy="last_wins", field_merges={F[Config].host: "first_wins"}, ) @@ -52,7 +52,7 @@ class Config: result = load( Source(file=first), Source(file=second), - dataclass_=Config, + schema=Config, strategy="first_wins", field_merges={F[Config].port: "last_wins"}, ) @@ -75,7 +75,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, field_merges={F[Config].tags: "append"}, ) @@ -96,7 +96,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, field_merges={F[Config].tags: "append_unique"}, ) @@ -116,7 +116,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, field_merges={F[Config].tags: "prepend"}, ) @@ -136,7 +136,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, field_merges={F[Config].tags: "prepend_unique"}, ) @@ -161,7 +161,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, field_merges={F[Config].database.host: "first_wins"}, ) @@ -183,7 +183,7 @@ class Config: load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, field_merges={F[Config].value: "append"}, ) @@ -203,7 +203,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, strategy="last_wins", field_merges={ F[Config].host: "first_wins", @@ -230,7 +230,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, field_merges={}, ) @@ -282,7 +282,7 @@ class Config: result = load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, strategy="raise_on_conflict", field_merges={F[Config].host: "last_wins"}, ) @@ -305,7 +305,7 @@ class Config: result = load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, strategy="raise_on_conflict", field_merges={F[Config].host: "first_wins"}, ) @@ -329,7 +329,7 @@ class Config: load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, strategy="raise_on_conflict", field_merges={F[Config].host: "last_wins"}, ) @@ -353,7 +353,7 @@ class Config: result = load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, strategy="raise_on_conflict", field_merges={F[Config].database.host: "last_wins"}, ) @@ -376,7 +376,7 @@ class Config: result = load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, strategy="raise_on_conflict", field_merges={ F[Config].host: "first_wins", @@ -434,7 +434,7 @@ class Config: load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, field_merges={F[Config].value: strategy}, ) @@ -483,7 +483,7 @@ class Config: load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, field_merges={F[Config].value: strategy}, ) @@ -522,7 +522,7 @@ class Config: load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, field_merges={F[Config].value: strategy}, ) @@ -552,7 +552,7 @@ class Config: result = load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, field_merges={F[Config].value: strategy}, ) @@ -585,7 +585,7 @@ class Config: load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, field_merges={F[Config].value: strategy}, ) @@ -616,7 +616,7 @@ class Config: load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, field_merges={F[Config].value: strategy}, ) @@ -635,7 +635,7 @@ class Config: result = load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, field_merges={F[Config].host: "first_wins"}, ) @@ -660,7 +660,7 @@ class Config: Source(file=a), Source(file=b), Source(file=c), - dataclass_=Config, + schema=Config, field_merges={F[Config].tags: "append"}, ) @@ -684,7 +684,7 @@ class Config: Source(file=a), Source(file=b), Source(file=c), - dataclass_=Config, + schema=Config, field_merges={F[Config].priority: max}, ) @@ -708,7 +708,7 @@ class Config: Source(file=a), Source(file=b), Source(file=c), - dataclass_=Config, + schema=Config, field_merges={F[Config].priority: min}, ) @@ -735,7 +735,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, field_merges={ F[Config].user_name: "first_wins", F[Config].inner.user_name: "last_wins", @@ -764,7 +764,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, field_merges={ F[Config].user_name: "last_wins", F[Config].inner.user_name: "first_wins", @@ -790,7 +790,7 @@ class Config: result = load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, field_merges={F[Config].score: sum}, ) @@ -814,7 +814,7 @@ class Config: Source(file=a), Source(file=b), Source(file=c), - dataclass_=Config, + schema=Config, field_merges={F[Config].score: sum}, ) @@ -838,7 +838,7 @@ class Config: Source(file=a), Source(file=b), Source(file=c), - dataclass_=Config, + schema=Config, field_merges={F[Config].weight: lambda vals: sum(vals) / len(vals)}, ) @@ -862,7 +862,7 @@ class Config: Source(file=a), Source(file=b), Source(file=c), - dataclass_=Config, + schema=Config, field_merges={F[Config].priority: max}, ) @@ -890,7 +890,7 @@ class Config: Source(file=a), Source(file=b), Source(file=c), - dataclass_=Config, + schema=Config, field_merges={F[Config].database.port: max}, ) @@ -906,7 +906,7 @@ class Config: result = load( Source(file=a), - dataclass_=Config, + schema=Config, field_merges={F[Config].score: sum}, ) @@ -927,7 +927,7 @@ class Config: result = load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, strategy="raise_on_conflict", field_merges={F[Config].score: sum}, ) @@ -951,7 +951,7 @@ class Config: result = load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, field_merges={ F[Config].host: "first_wins", F[Config].score: sum, @@ -982,7 +982,7 @@ class Config: Source(file=a), Source(file=b), Source(file=c), - dataclass_=Config, + schema=Config, field_merges={F[Config].score: sum}, ) diff --git a/tests/loading/test_multi.py b/tests/loading/test_multi.py index 1d79944..86c0267 100644 --- a/tests/loading/test_multi.py +++ b/tests/loading/test_multi.py @@ -29,7 +29,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, ) assert result.host == "localhost" @@ -50,7 +50,7 @@ class Config: result = load( Source(file=first), Source(file=second), - dataclass_=Config, + schema=Config, strategy="first_wins", ) @@ -72,7 +72,7 @@ class Config: result = load( Source(file=filea), Source(file=fileb), - dataclass_=Config, + schema=Config, ) assert result.host == "myhost" @@ -97,7 +97,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, ) assert result.database.host == "prod-host" @@ -123,7 +123,7 @@ class Config: Source(file=a), Source(file=b), Source(file=c), - dataclass_=Config, + schema=Config, ) assert result.host == "a-host" @@ -145,7 +145,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, ) assert result.host == "localhost" @@ -166,7 +166,7 @@ class Config: result = load( Source(file=defaults), Source(prefix="APP_"), - dataclass_=Config, + schema=Config, ) assert result.host == "env-host" @@ -188,7 +188,7 @@ class Config: load( Source(file=defaults), Source(prefix="APP_"), - dataclass_=Config, + schema=Config, ) err = exc_info.value @@ -212,7 +212,7 @@ class Config: load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, ) err = exc_info.value @@ -229,7 +229,7 @@ class Config: name: str port: int - result = load(Source(file=json_file), dataclass_=Config) + result = load(Source(file=json_file), schema=Config) assert result.name == "test" assert result.port == 8080 @@ -241,7 +241,7 @@ def test_backward_compat_none_metadata(self, monkeypatch): class Config: my_var: str - result = load(Source(), dataclass_=Config) + result = load(Source(), schema=Config) assert result.my_var == "from_env" @@ -381,7 +381,7 @@ class Config: load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, strategy="raise_on_conflict", ) @@ -410,7 +410,7 @@ class Config: result = load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, strategy="raise_on_conflict", ) @@ -432,7 +432,7 @@ class Config: result = load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, strategy="raise_on_conflict", ) @@ -459,7 +459,7 @@ class Config: load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, strategy="raise_on_conflict", ) @@ -488,7 +488,7 @@ class Config: load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, strategy="raise_on_conflict", ) @@ -517,7 +517,7 @@ class Config: load( Source(file=a), Source(prefix="APP_"), - dataclass_=Config, + schema=Config, strategy="raise_on_conflict", ) @@ -546,7 +546,7 @@ class Config: load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, strategy="raise_on_conflict", ) @@ -587,7 +587,7 @@ class Config: result = load( Source(file=yaml_file), Source(file=env_file), - dataclass_=Config, + schema=Config, ) assert result.host == "localhost" @@ -616,7 +616,7 @@ class Config: result = load( Source(file=json_file), Source(file=env_file), - dataclass_=Config, + schema=Config, ) assert result.perms == _Permission.READ | _Permission.WRITE @@ -635,7 +635,7 @@ class Config: result = load( Source(file=json_file), Source(prefix="APP_"), - dataclass_=Config, + schema=Config, ) assert result.perms == _Permission.READ | _Permission.EXECUTE @@ -655,7 +655,7 @@ class Config: result = load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, ) assert result.perms == _Permission.READ | _Permission.WRITE | _Permission.EXECUTE @@ -701,7 +701,7 @@ class Config: result = load( Source(file=first), Source(file=second), - dataclass_=Config, + schema=Config, strategy="first_found", ) @@ -721,7 +721,7 @@ class Config: result = load( Source(file=missing), Source(file=fallback), - dataclass_=Config, + schema=Config, strategy="first_found", ) @@ -743,7 +743,7 @@ class Config: result = load( Source(file=broken), Source(file=fallback), - dataclass_=Config, + schema=Config, strategy="first_found", ) @@ -763,7 +763,7 @@ class Config: load( Source(file=missing1), Source(file=missing2), - dataclass_=Config, + schema=Config, strategy="first_found", ) @@ -788,7 +788,7 @@ class Config: load( Source(file=partial), Source(file=full), - dataclass_=Config, + schema=Config, strategy="first_found", ) @@ -813,7 +813,7 @@ class Config: load( Source(file=bad_type), Source(file=fallback), - dataclass_=Config, + schema=Config, strategy="first_found", ) @@ -843,7 +843,7 @@ class Config: load( Source(file=first), Source(file=second), - dataclass_=Config, + schema=Config, strategy="first_found", ) diff --git a/tests/loading/test_single.py b/tests/loading/test_single.py index 0cc6797..5dc4b3c 100644 --- a/tests/loading/test_single.py +++ b/tests/loading/test_single.py @@ -264,7 +264,7 @@ class Config: result = load_as_function( loader_instance=JsonLoader(), file_path=json_file, - dataclass_=Config, + schema=Config, metadata=metadata, debug=False, ) @@ -284,7 +284,7 @@ class Config: result = load_as_function( loader_instance=JsonLoader(prefix="app"), file_path=json_file, - dataclass_=Config, + schema=Config, metadata=metadata, debug=False, ) @@ -312,7 +312,7 @@ class Config: result = load_as_function( loader_instance=EnvFileLoader(), file_path=env_file, - dataclass_=Config, + schema=Config, metadata=metadata, debug=False, ) @@ -332,7 +332,7 @@ class Config: result = load_as_function( loader_instance=JsonLoader(), file_path=json_file, - dataclass_=Config, + schema=Config, metadata=metadata, debug=False, ) @@ -405,7 +405,7 @@ class Config: result = load_as_function( loader_instance=JsonLoader(), file_path=stream, - dataclass_=Config, + schema=Config, metadata=metadata, debug=False, ) @@ -425,7 +425,7 @@ class Config: result = load_as_function( loader_instance=JsonLoader(), file_path=json_file, - dataclass_=Config, + schema=Config, metadata=metadata, debug=False, ) diff --git a/tests/loading/test_skip_invalid_fields.py b/tests/loading/test_skip_invalid_fields.py index cff3409..6de1fa0 100644 --- a/tests/loading/test_skip_invalid_fields.py +++ b/tests/loading/test_skip_invalid_fields.py @@ -26,7 +26,7 @@ class Config: result = load( Source(file=source1), Source(file=source2), - dataclass_=Config, + schema=Config, skip_invalid_fields=True, ) @@ -48,7 +48,7 @@ class Config: result = load( Source(file=source1), Source(file=source2), - dataclass_=Config, + schema=Config, skip_invalid_fields=True, ) @@ -71,7 +71,7 @@ class Config: load( Source(file=source1), Source(file=source2), - dataclass_=Config, + schema=Config, skip_invalid_fields=True, ) @@ -105,7 +105,7 @@ class Config: result = load( Source(file=source1), Source(file=source2), - dataclass_=Config, + schema=Config, skip_invalid_fields=True, ) @@ -127,7 +127,7 @@ class Config: result = load( Source(file=source1, skip_if_invalid=True), Source(file=source2), - dataclass_=Config, + schema=Config, ) assert result.host == "localhost" @@ -148,7 +148,7 @@ class Config: result = load( Source(file=source1), Source(file=source2), - dataclass_=Config, + schema=Config, skip_invalid_fields=True, ) @@ -167,7 +167,7 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( Source(file=source1), - dataclass_=Config, + schema=Config, ) err = exc_info.value @@ -195,7 +195,7 @@ class Config: result = load( Source(file=source1), Source(file=source2), - dataclass_=Config, + schema=Config, strategy="raise_on_conflict", skip_invalid_fields=True, ) @@ -222,7 +222,7 @@ class Config: skip_if_invalid=(F[Config].port, F[Config].timeout), ), Source(file=source2), - dataclass_=Config, + schema=Config, ) assert result.host == "localhost" @@ -244,7 +244,7 @@ class Config: file=source1, skip_if_invalid=(F[Config].port,), ), - dataclass_=Config, + schema=Config, ) err = exc_info.value @@ -278,7 +278,7 @@ class Config: load( Source(file=source1), Source(file=source2), - dataclass_=Config, + schema=Config, skip_invalid_fields=True, ) @@ -301,7 +301,7 @@ class Config: result = load( Source(file=json_file, skip_if_invalid=True), - dataclass_=Config, + schema=Config, ) assert result.host == "localhost" @@ -319,7 +319,7 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( Source(file=json_file, skip_if_invalid=True), - dataclass_=Config, + schema=Config, ) err = exc_info.value @@ -360,7 +360,7 @@ class Config: file=json_file, skip_if_invalid=(F[Config].port,), ), - dataclass_=Config, + schema=Config, ) assert result.host == "localhost" @@ -379,7 +379,7 @@ class Config: with caplog.at_level(logging.WARNING, logger="dature"): load( Source(file=json_file, skip_if_invalid=True), - dataclass_=Config, + schema=Config, ) warning_messages = [r.getMessage() for r in caplog.records if r.levelno >= logging.WARNING] @@ -405,7 +405,7 @@ class Config: file=source, skip_if_invalid=(F[Config].port,), ), - dataclass_=Config, + schema=Config, ) assert result.port == 3000 @@ -433,7 +433,7 @@ class Config: skip_if_invalid=(F[Config].inner.port,), ), Source(file=source2), - dataclass_=Config, + schema=Config, ) assert result.port == 8080 @@ -461,7 +461,7 @@ class Config: skip_if_invalid=(F[Config].port, F[Config].inner.port), ), Source(file=source2), - dataclass_=Config, + schema=Config, ) assert result.port == 8080 diff --git a/tests/loading/test_source_loading.py b/tests/loading/test_source_loading.py index caf91c9..e69abad 100644 --- a/tests/loading/test_source_loading.py +++ b/tests/loading/test_source_loading.py @@ -25,7 +25,7 @@ class Config: result = load( Source(file=valid), Source(file=missing), - dataclass_=Config, + schema=Config, skip_broken_sources=True, ) @@ -47,7 +47,7 @@ class Config: result = load( Source(file=valid), Source(file=broken), - dataclass_=Config, + schema=Config, skip_broken_sources=True, ) @@ -69,7 +69,7 @@ class Config: load( Source(file=broken_a), Source(file=broken_b), - dataclass_=Config, + schema=Config, skip_broken_sources=True, ) @@ -91,7 +91,7 @@ class Config: load( Source(file=valid), Source(file=broken), - dataclass_=Config, + schema=Config, ) def test_skip_middle_source(self, tmp_path: Path): @@ -113,7 +113,7 @@ class Config: Source(file=a), Source(file=broken), Source(file=c), - dataclass_=Config, + schema=Config, skip_broken_sources=True, ) @@ -135,7 +135,7 @@ class Config: result = load( Source(file=valid), Source(file=broken, skip_if_broken=True), - dataclass_=Config, + schema=Config, skip_broken_sources=False, ) @@ -158,7 +158,7 @@ class Config: load( Source(file=valid), Source(file=broken, skip_if_broken=False), - dataclass_=Config, + schema=Config, skip_broken_sources=True, ) @@ -177,7 +177,7 @@ class Config: result = load( Source(file=valid), Source(file=broken, skip_if_broken=None), - dataclass_=Config, + schema=Config, skip_broken_sources=True, ) @@ -186,7 +186,7 @@ class Config: def test_empty_sources_raises(self): with pytest.raises(TypeError, match="load\\(\\) requires at least one Source"): - load(dataclass_=int) + load(schema=int) def test_all_sources_broken_mixed_errors(self, tmp_path: Path): missing = str(tmp_path / "does_not_exist.json") @@ -202,7 +202,7 @@ class Config: load( Source(file=missing), Source(file=broken), - dataclass_=Config, + schema=Config, skip_broken_sources=True, ) @@ -223,7 +223,7 @@ class Config: result = load( Source(file=json_file), - dataclass_=Config, + schema=Config, ) assert result.host == "from-env" @@ -240,7 +240,7 @@ class Config: result = load( Source(file=json_file), - dataclass_=Config, + schema=Config, expand_env_vars="disabled", ) @@ -259,7 +259,7 @@ class Config: with pytest.raises(EnvVarExpandError): load( Source(file=json_file), - dataclass_=Config, + schema=Config, expand_env_vars="strict", ) @@ -275,7 +275,7 @@ class Config: result = load( Source(file=json_file, expand_env_vars="disabled"), - dataclass_=Config, + schema=Config, expand_env_vars="default", ) @@ -293,7 +293,7 @@ class Config: result = load( Source(file=json_file, expand_env_vars=None), - dataclass_=Config, + schema=Config, expand_env_vars="disabled", ) @@ -311,7 +311,7 @@ class Config: result = load( Source(file=json_file), - dataclass_=Config, + schema=Config, expand_env_vars="empty", ) @@ -354,7 +354,7 @@ def test_error_format( with pytest.raises(EnvVarExpandError) as exc_info: load( Source(file=file, prefix=prefix, expand_env_vars="strict"), - dataclass_=StrictConfig, + schema=StrictConfig, ) assert str(exc_info.value) == dedent(f"""\ diff --git a/tests/masking/test_masking.py b/tests/masking/test_masking.py index 192df54..e192d0a 100644 --- a/tests/masking/test_masking.py +++ b/tests/masking/test_masking.py @@ -215,7 +215,7 @@ class Cfg: password: str host: str - result = load(Source(file=json_file), dataclass_=Cfg, debug=True) + result = load(Source(file=json_file), schema=Cfg, debug=True) report = get_load_report(result) assert report is not None @@ -242,7 +242,7 @@ class Cfg: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Cfg, + schema=Cfg, debug=True, ) @@ -266,7 +266,7 @@ class Cfg: api_key: SecretStr host: str - result = load(Source(file=json_file), dataclass_=Cfg, debug=True) + result = load(Source(file=json_file), schema=Cfg, debug=True) report = get_load_report(result) assert report is not None @@ -287,7 +287,7 @@ class Cfg: host: str with caplog.at_level("DEBUG", logger="dature"): - load(Source(file=json_file), dataclass_=Cfg, debug=True) + load(Source(file=json_file), schema=Cfg, debug=True) assert _SECRET_VALUE not in caplog.text @@ -307,7 +307,7 @@ class Cfg: load( Source(file=defaults), Source(file=overrides), - dataclass_=Cfg, + schema=Cfg, debug=True, ) @@ -323,7 +323,7 @@ class Cfg: port: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json_file), dataclass_=Cfg) + load(Source(file=json_file), schema=Cfg) assert _SECRET_VALUE not in str(exc_info.value) @@ -354,7 +354,7 @@ class Cfg: host: str with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json_file, mask_secrets=True), dataclass_=Cfg) + load(Source(file=json_file, mask_secrets=True), schema=Cfg) assert str(exc_info.value) == "Cfg loading errors (1)" assert str(exc_info.value.exceptions[0]) == ( @@ -376,7 +376,7 @@ class Cfg: host: str with patch("dature.masking.masking._heuristic_detector", None), pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json_file, mask_secrets=True), dataclass_=Cfg) + load(Source(file=json_file, mask_secrets=True), schema=Cfg) assert str(exc_info.value) == "Cfg loading errors (1)" assert str(exc_info.value.exceptions[0]) == ( @@ -409,7 +409,7 @@ class Cfg: host: str configure(masking={"mask_secrets": mask_secrets}) - result = load(Source(file=json_file), dataclass_=Cfg, debug=True) + result = load(Source(file=json_file), schema=Cfg, debug=True) report = get_load_report(result) assert report is not None @@ -442,7 +442,7 @@ class Cfg: configure(masking={"mask_secrets": mask_secrets}) with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json_file), dataclass_=Cfg) + load(Source(file=json_file), schema=Cfg) assert str(exc_info.value) == "Cfg loading errors (1)" content = f'{{"password": "{expected_password}", "port": "not_a_number"}}' diff --git a/tests/merging/test_field_group.py b/tests/merging/test_field_group.py index 9173d25..c998276 100644 --- a/tests/merging/test_field_group.py +++ b/tests/merging/test_field_group.py @@ -27,7 +27,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, strategy="last_wins", field_groups=((F[Config].host, F[Config].port),), ) @@ -50,7 +50,7 @@ class Config: result = load( Source(file=first), Source(file=second), - dataclass_=Config, + schema=Config, strategy="first_wins", field_groups=((F[Config].host, F[Config].port),), ) @@ -75,7 +75,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, field_groups=((F[Config].host, F[Config].port),), ) @@ -98,7 +98,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, field_groups=((F[Config].host, F[Config].port),), ) @@ -127,7 +127,7 @@ class Config: load( defaults_meta, overrides_meta, - dataclass_=Config, + schema=Config, field_groups=((F[Config].host, F[Config].port),), ) @@ -158,7 +158,7 @@ class Config: load( defaults_meta, overrides_meta, - dataclass_=Config, + schema=Config, field_groups=((F[Config].host, F[Config].port),), ) @@ -186,7 +186,7 @@ class Config: load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, strategy="first_wins", field_groups=((F[Config].host, F[Config].port),), ) @@ -207,7 +207,7 @@ class Config: load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, strategy="raise_on_conflict", field_groups=((F[Config].host, F[Config].port),), ) @@ -237,7 +237,7 @@ class Config: load( defaults_meta, overrides_meta, - dataclass_=Config, + schema=Config, field_groups=((F[Config].database,),), ) @@ -268,7 +268,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, field_groups=((F[Config].database,),), ) @@ -301,7 +301,7 @@ class Config: a_meta, b_meta, c_meta, - dataclass_=Config, + schema=Config, field_groups=((F[Config].host, F[Config].port),), ) @@ -332,7 +332,7 @@ class Config: Source(file=a), Source(file=b), Source(file=c), - dataclass_=Config, + schema=Config, field_groups=((F[Config].host, F[Config].port),), ) @@ -362,7 +362,7 @@ class Config: load( defaults_meta, overrides_meta, - dataclass_=Config, + schema=Config, field_groups=( (F[Config].host, F[Config].port), (F[Config].user, F[Config].password), @@ -395,7 +395,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, field_merges={F[Config].tags: "append"}, field_groups=((F[Config].host, F[Config].port),), ) @@ -469,7 +469,7 @@ class Config: load( defaults_meta, overrides_meta, - dataclass_=Config, + schema=Config, field_groups=((F[Config].host, F[Config].port),), ) @@ -502,7 +502,7 @@ class Config: load( defaults_meta, overrides_meta, - dataclass_=Config, + schema=Config, field_groups=( (F[Config].host, F[Config].port), (F[Config].user, F[Config].password), @@ -547,7 +547,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, field_groups=((F[Config].database, F[Config].timeout),), ) @@ -579,7 +579,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, field_groups=((F[Config].database, F[Config].timeout),), ) @@ -613,7 +613,7 @@ class Config: load( defaults_meta, overrides_meta, - dataclass_=Config, + schema=Config, field_groups=((F[Config].database, F[Config].timeout),), ) @@ -651,7 +651,7 @@ class Config: load( defaults_meta, overrides_meta, - dataclass_=Config, + schema=Config, field_groups=((F[Config].database, F[Config].timeout),), ) @@ -691,7 +691,7 @@ class Config: load( defaults_meta, overrides_meta, - dataclass_=Config, + schema=Config, field_groups=((F[Config].database, F[Config].timeout),), ) @@ -728,7 +728,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, field_groups=((F[Config].user_name, F[Config].inner.user_name),), ) @@ -760,7 +760,7 @@ class Config: load( defaults_meta, overrides_meta, - dataclass_=Config, + schema=Config, field_groups=((F[Config].user_name, F[Config].inner.user_name),), ) diff --git a/tests/merging/test_predicate.py b/tests/merging/test_predicate.py index d186fd2..5872c91 100644 --- a/tests/merging/test_predicate.py +++ b/tests/merging/test_predicate.py @@ -106,7 +106,7 @@ class Other: field_merges = {F[Other].host: "first_wins"} with pytest.raises(TypeError) as exc_info: - build_field_merge_map(field_merges, dataclass_=Config) + build_field_merge_map(field_merges, schema=Config) assert str(exc_info.value) == "FieldPath owner 'Other' does not match target dataclass 'Config'" @@ -117,7 +117,7 @@ class Config: host: str with pytest.raises(TypeError) as exc_info: - extract_field_path(F["Other"].host, dataclass_=Config) + extract_field_path(F["Other"].host, schema=Config) assert str(exc_info.value) == "FieldPath owner 'Other' does not match target dataclass 'Config'" def test_passes_with_correct_string_owner(self): @@ -125,4 +125,4 @@ def test_passes_with_correct_string_owner(self): class Config: host: str - assert extract_field_path(F["Config"].host, dataclass_=Config) == "host" + assert extract_field_path(F["Config"].host, schema=Config) == "host" diff --git a/tests/sources_loader/test_base.py b/tests/sources_loader/test_base.py index c7cc250..52cef52 100644 --- a/tests/sources_loader/test_base.py +++ b/tests/sources_loader/test_base.py @@ -109,7 +109,7 @@ class Config: data = {"name": "TestApp", "port": 8080} loader = MockLoader(test_data=data) - result = loader.transform_to_dataclass(data, dataclass_=Config) + result = loader.transform_to_dataclass(data, schema=Config) assert result == expected_data @@ -129,7 +129,7 @@ class Config: data = {"database": {"host": "localhost", "port": 5432}} loader = MockLoader(test_data=data) - result = loader.transform_to_dataclass(data, dataclass_=Config) + result = loader.transform_to_dataclass(data, schema=Config) assert result == expected_data @@ -148,7 +148,7 @@ class Config: loader = MockLoader(prefix="app", test_data=data) load_result = loader.load_raw(Path()) - result = loader.transform_to_dataclass(load_result.data, dataclass_=Config) + result = loader.transform_to_dataclass(load_result.data, schema=Config) assert result == expected_data @@ -175,7 +175,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, name_style="lower_camel"), - dataclass_=Config, + schema=Config, ) assert result.user_name == "John" @@ -193,7 +193,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, name_style="lower_snake"), - dataclass_=Config, + schema=Config, ) assert result.user_name == "Alice" @@ -210,7 +210,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, name_style="upper_camel"), - dataclass_=Config, + schema=Config, ) assert result.user_name == "Bob" @@ -227,7 +227,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, name_style="lower_kebab"), - dataclass_=Config, + schema=Config, ) assert result.user_name == "Charlie" @@ -244,7 +244,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, name_style="upper_kebab"), - dataclass_=Config, + schema=Config, ) assert result.user_name == "Dave" @@ -261,7 +261,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, name_style="upper_snake"), - dataclass_=Config, + schema=Config, ) assert result.user_name == "Eve" @@ -287,7 +287,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), - dataclass_=Config, + schema=Config, ) assert result.name == "John Doe" @@ -308,7 +308,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), - dataclass_=Config, + schema=Config, ) assert result.name == "Alice" @@ -334,7 +334,7 @@ class Config: name_style="lower_camel", field_mapping=field_mapping, ), - dataclass_=Config, + schema=Config, ) assert result.user_name == "Bob" @@ -366,7 +366,7 @@ class User: result = load( Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), - dataclass_=User, + schema=User, ) assert result.name == "Charlie" @@ -385,7 +385,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), - dataclass_=Config, + schema=Config, ) assert result.name == "Alice" @@ -402,7 +402,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), - dataclass_=Config, + schema=Config, ) assert result.name == "Bob" @@ -423,7 +423,7 @@ class User: result = load( Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), - dataclass_=User, + schema=User, ) assert result.address.city == "LA" @@ -440,7 +440,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), - dataclass_=Config, + schema=Config, ) assert result.name == "Eve" @@ -457,7 +457,7 @@ class Config: result = load( Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), - dataclass_=Config, + schema=Config, ) assert result.name == "Direct" @@ -490,7 +490,7 @@ class Config: name_style="lower_camel", field_mapping=field_mapping, ), - dataclass_=Config, + schema=Config, ) assert result.user_name == "Alice" @@ -525,7 +525,7 @@ class Config: name_style="lower_camel", field_mapping=field_mapping, ), - dataclass_=Config, + schema=Config, ) assert result.user_name == "Alice" diff --git a/tests/sources_loader/test_docker_secrets.py b/tests/sources_loader/test_docker_secrets.py index f783d7a..af2bdf9 100644 --- a/tests/sources_loader/test_docker_secrets.py +++ b/tests/sources_loader/test_docker_secrets.py @@ -11,7 +11,7 @@ class TestDockerSecretsLoader: def test_comprehensive_type_conversion(self, all_types_docker_secrets_dir: Path): result = load( Source(file=all_types_docker_secrets_dir, loader=DockerSecretsLoader), - dataclass_=AllPythonTypesCompact, + schema=AllPythonTypesCompact, ) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -73,9 +73,10 @@ class Config: result = load( Source(file=tmp_path, loader=DockerSecretsLoader), - dataclass_=Config, + schema=Config, ) assert result.api_url == "https://api.example.com/v1" assert result.base == "https://api.example.com" assert result.base == "https://api.example.com" + assert result.base == "https://api.example.com" diff --git a/tests/sources_loader/test_env_.py b/tests/sources_loader/test_env_.py index 0fc5220..d984b57 100644 --- a/tests/sources_loader/test_env_.py +++ b/tests/sources_loader/test_env_.py @@ -44,7 +44,7 @@ def test_custom_split_symbols(self, custom_separator_env_file: Path): def test_comprehensive_type_conversion(self, all_types_env_file: Path): """Test loading ENV with full type coercion to dataclass.""" - result = load(Source(file=all_types_env_file, loader=EnvFileLoader), dataclass_=AllPythonTypesCompact) + result = load(Source(file=all_types_env_file, loader=EnvFileLoader), schema=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -69,7 +69,7 @@ class Config: api_url: str base: str - result = load(Source(file=env_file, loader=EnvFileLoader), dataclass_=Config) + result = load(Source(file=env_file, loader=EnvFileLoader), schema=Config) assert result.api_url == "https://api.example.com/v1" assert result.base == "https://api.example.com" @@ -85,7 +85,7 @@ def test_env_fileenv_var_partial_substitution(self, tmp_path: Path, monkeypatch) class Config: url: str - result = load(Source(file=env_file, loader=EnvFileLoader), dataclass_=Config) + result = load(Source(file=env_file, loader=EnvFileLoader), schema=Config) assert result.url == "http://localhost:8080/api" @@ -99,7 +99,7 @@ def test_env_filedollar_sign_mid_string_existing_var(self, tmp_path: Path, monke class Config: value: str - result = load(Source(file=env_file, loader=EnvFileLoader), dataclass_=Config) + result = load(Source(file=env_file, loader=EnvFileLoader), schema=Config) assert result.value == "prefixreplaced/suffix" @@ -140,7 +140,7 @@ def test_env_filedollar_sign_mid_string_missing_var(self, tmp_path: Path, monkey class Config: value: str - result = load(Source(file=env_file, loader=EnvFileLoader), dataclass_=Config) + result = load(Source(file=env_file, loader=EnvFileLoader), schema=Config) assert result.value == "prefix$nonexistent/suffix" @@ -256,7 +256,7 @@ def test_comprehensive_type_conversion(self, monkeypatch): for key, value in env_vars.items(): monkeypatch.setenv(key, value) - result = load(Source(loader=EnvLoader, prefix="APP_"), dataclass_=AllPythonTypesCompact) + result = load(Source(loader=EnvLoader, prefix="APP_"), schema=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -273,7 +273,7 @@ class TestConfig: expected_data = TestConfig(var="included", key="also_included") - data = load(Source(loader=EnvLoader, prefix="APP_"), dataclass_=TestConfig) + data = load(Source(loader=EnvLoader, prefix="APP_"), schema=TestConfig) assert data == expected_data @@ -295,8 +295,9 @@ class TestConfig: data = load( Source(loader=EnvLoader, prefix="APP_", split_symbols="."), - dataclass_=TestConfig, + schema=TestConfig, ) assert data == expected_data assert data == expected_data + assert data == expected_data diff --git a/tests/sources_loader/test_ini_.py b/tests/sources_loader/test_ini_.py index fb0b27f..5a2ac73 100644 --- a/tests/sources_loader/test_ini_.py +++ b/tests/sources_loader/test_ini_.py @@ -19,7 +19,7 @@ def test_comprehensive_type_conversion(self, all_types_ini_file: Path): """Test loading INI with full type coercion to dataclass.""" result = load( Source(file=all_types_ini_file, loader=IniLoader, prefix="all_types"), - dataclass_=AllPythonTypesCompact, + schema=AllPythonTypesCompact, ) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -60,7 +60,7 @@ class PrefixedConfig: result = load( Source(file=prefixed_ini_file, loader=IniLoader, prefix="app"), - dataclass_=PrefixedConfig, + schema=PrefixedConfig, ) assert result == expected_data @@ -89,7 +89,7 @@ class DbConfig: result = load( Source(file=ini_file, loader=IniLoader, prefix="database"), - dataclass_=DbConfig, + schema=DbConfig, ) assert result.host == "db.example.com" @@ -108,7 +108,7 @@ class Config: result = load( Source(file=ini_file, loader=IniLoader, prefix="section"), - dataclass_=Config, + schema=Config, ) assert result.url == "http://localhost:8080/api" @@ -125,7 +125,7 @@ class Config: result = load( Source(file=ini_file, loader=IniLoader, prefix="section"), - dataclass_=Config, + schema=Config, ) assert result.value == "prefixreplaced/suffix" @@ -142,7 +142,8 @@ class Config: result = load( Source(file=ini_file, loader=IniLoader, prefix="section"), - dataclass_=Config, + schema=Config, ) assert result.value == "prefix$nonexistent/suffix" + assert result.value == "prefix$nonexistent/suffix" diff --git a/tests/sources_loader/test_json5_.py b/tests/sources_loader/test_json5_.py index 08dcdfa..067fe84 100644 --- a/tests/sources_loader/test_json5_.py +++ b/tests/sources_loader/test_json5_.py @@ -17,7 +17,7 @@ class TestJson5Loader: def test_comprehensive_type_conversion(self, all_types_json5_file: Path): """Test loading JSON5 with full type coercion to dataclass.""" - result = load(Source(file=all_types_json5_file, loader=Json5Loader), dataclass_=AllPythonTypesCompact) + result = load(Source(file=all_types_json5_file, loader=Json5Loader), schema=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -38,7 +38,7 @@ class PrefixedConfig: result = load( Source(file=prefixed_json5_file, loader=Json5Loader, prefix="app"), - dataclass_=PrefixedConfig, + schema=PrefixedConfig, ) assert result == expected_data @@ -65,7 +65,7 @@ class DbConfig: host: str port: int - result = load(Source(file=json5_file, loader=Json5Loader), dataclass_=DbConfig) + result = load(Source(file=json5_file, loader=Json5Loader), schema=DbConfig) assert result.host == "db.example.com" assert result.port == 5432 @@ -81,7 +81,7 @@ def test_json5_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file=json5_file, loader=Json5Loader), dataclass_=Config) + result = load(Source(file=json5_file, loader=Json5Loader), schema=Config) assert result.url == "http://localhost:8080/api" @@ -95,7 +95,7 @@ def test_json5_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeyp class Config: value: str - result = load(Source(file=json5_file, loader=Json5Loader), dataclass_=Config) + result = load(Source(file=json5_file, loader=Json5Loader), schema=Config) assert result.value == "prefixreplaced/suffix" @@ -109,7 +109,7 @@ def test_json5_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file=json5_file, loader=Json5Loader), dataclass_=Config) + result = load(Source(file=json5_file, loader=Json5Loader), schema=Config) assert result.value == "prefix$nonexistent/suffix" @@ -122,7 +122,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json5_file, loader=Json5Loader), dataclass_=Config) + load(Source(file=json5_file, loader=Json5Loader), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -145,7 +145,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json5_file, loader=Json5Loader), dataclass_=Config) + load(Source(file=json5_file, loader=Json5Loader), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/sources_loader/test_json_.py b/tests/sources_loader/test_json_.py index 7fca045..bbd6294 100644 --- a/tests/sources_loader/test_json_.py +++ b/tests/sources_loader/test_json_.py @@ -17,7 +17,7 @@ class TestJsonLoader: def test_comprehensive_type_conversion(self, all_types_json_file: Path): """Test loading JSON with full type coercion to dataclass.""" - result = load(Source(file=all_types_json_file, loader=JsonLoader), dataclass_=AllPythonTypesCompact) + result = load(Source(file=all_types_json_file, loader=JsonLoader), schema=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -38,7 +38,7 @@ class PrefixedConfig: result = load( Source(file=prefixed_json_file, loader=JsonLoader, prefix="app"), - dataclass_=PrefixedConfig, + schema=PrefixedConfig, ) assert result == expected_data @@ -65,7 +65,7 @@ class DbConfig: host: str port: int - result = load(Source(file=json_file, loader=JsonLoader), dataclass_=DbConfig) + result = load(Source(file=json_file, loader=JsonLoader), schema=DbConfig) assert result.host == "db.example.com" assert result.port == 5432 @@ -81,7 +81,7 @@ def test_json_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file=json_file, loader=JsonLoader), dataclass_=Config) + result = load(Source(file=json_file, loader=JsonLoader), schema=Config) assert result.url == "http://localhost:8080/api" @@ -95,7 +95,7 @@ def test_json_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file=json_file, loader=JsonLoader), dataclass_=Config) + result = load(Source(file=json_file, loader=JsonLoader), schema=Config) assert result.value == "prefixreplaced/suffix" @@ -109,7 +109,7 @@ def test_json_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat class Config: value: str - result = load(Source(file=json_file, loader=JsonLoader), dataclass_=Config) + result = load(Source(file=json_file, loader=JsonLoader), schema=Config) assert result.value == "prefix$nonexistent/suffix" @@ -122,7 +122,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json_file, loader=JsonLoader), dataclass_=Config) + load(Source(file=json_file, loader=JsonLoader), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -145,7 +145,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json_file, loader=JsonLoader), dataclass_=Config) + load(Source(file=json_file, loader=JsonLoader), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/sources_loader/test_nested_resolve.py b/tests/sources_loader/test_nested_resolve.py index f3fdd4c..519fd5c 100644 --- a/tests/sources_loader/test_nested_resolve.py +++ b/tests/sources_loader/test_nested_resolve.py @@ -103,14 +103,14 @@ class TestNestedResolve: def test_json_only(self, flat_loader_setup: FlatLoaderSetup) -> None: flat_loader_setup.set_data({"var": '{"foo": "from_json", "bar": "from_json"}'}) - result = load(flat_loader_setup.make_metadata(), dataclass_=NestedConfig) + result = load(flat_loader_setup.make_metadata(), schema=NestedConfig) assert result == NestedConfig(var=NestedVar(foo="from_json", bar="from_json")) def test_flat_only(self, flat_loader_setup: FlatLoaderSetup) -> None: flat_loader_setup.set_data({"var__foo": "from_flat", "var__bar": "from_flat"}) - result = load(flat_loader_setup.make_metadata(), dataclass_=NestedConfig) + result = load(flat_loader_setup.make_metadata(), schema=NestedConfig) assert result == NestedConfig(var=NestedVar(foo="from_flat", bar="from_flat")) @@ -141,7 +141,7 @@ def test_both_sources( result = load( flat_loader_setup.make_metadata(**_strategy_kwargs(strategy, local=local)), - dataclass_=NestedConfig, + schema=NestedConfig, ) assert result == NestedConfig(var=NestedVar(foo=expected_source, bar=expected_source)) @@ -181,7 +181,7 @@ def test_partial_missing_field( with pytest.raises(DatureConfigError) as exc_info: load( Source(loader=EnvLoader, prefix="MYAPP__", **_strategy_kwargs(strategy, local=local)), - dataclass_=NestedConfig, + schema=NestedConfig, ) err = exc_info.value @@ -208,7 +208,7 @@ def test_partial_missing_field_flat(self, tmp_path: Path, local: bool) -> None: prefix="MYAPP__", **_strategy_kwargs("flat", local=local), ), - dataclass_=NestedConfig, + schema=NestedConfig, ) err = exc_info.value @@ -231,7 +231,7 @@ def test_partial_missing_field_json(self, tmp_path: Path, local: bool) -> None: prefix="MYAPP__", **_strategy_kwargs("json", local=local), ), - dataclass_=NestedConfig, + schema=NestedConfig, ) err = exc_info.value @@ -257,7 +257,7 @@ def test_partial_missing_field_flat(self, tmp_path: Path, local: bool) -> None: with pytest.raises(DatureConfigError) as exc_info: load( Source(file=tmp_path, loader=DockerSecretsLoader, **_strategy_kwargs("flat", local=local)), - dataclass_=NestedConfig, + schema=NestedConfig, ) err = exc_info.value @@ -275,7 +275,7 @@ def test_partial_missing_field_json(self, tmp_path: Path, local: bool) -> None: with pytest.raises(DatureConfigError) as exc_info: load( Source(file=tmp_path, loader=DockerSecretsLoader, **_strategy_kwargs("json", local=local)), - dataclass_=NestedConfig, + schema=NestedConfig, ) err = exc_info.value @@ -296,7 +296,7 @@ def test_json_invalid_flat_strategy_succeeds(self, monkeypatch: pytest.MonkeyPat result = load( Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), - dataclass_=NestedIntConfig, + schema=NestedIntConfig, ) assert result == NestedIntConfig(var=NestedIntVar(foo=10, bar=20)) @@ -309,7 +309,7 @@ def test_json_invalid_json_strategy_errors(self, monkeypatch: pytest.MonkeyPatch with pytest.raises(DatureConfigError) as exc_info: load( Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="json"), - dataclass_=NestedIntConfig, + schema=NestedIntConfig, ) err = exc_info.value @@ -335,7 +335,7 @@ def test_flat_invalid_json_strategy_succeeds(self, monkeypatch: pytest.MonkeyPat result = load( Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="json"), - dataclass_=NestedIntConfig, + schema=NestedIntConfig, ) assert result == NestedIntConfig(var=NestedIntVar(foo=10, bar=20)) @@ -348,7 +348,7 @@ def test_flat_invalid_flat_strategy_errors(self, monkeypatch: pytest.MonkeyPatch with pytest.raises(DatureConfigError) as exc_info: load( Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), - dataclass_=NestedIntConfig, + schema=NestedIntConfig, ) err = exc_info.value @@ -377,7 +377,7 @@ def test_json_invalid_flat_strategy_succeeds(self, tmp_path: Path) -> None: result = load( Source(file=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), - dataclass_=NestedIntConfig, + schema=NestedIntConfig, ) assert result == NestedIntConfig(var=NestedIntVar(foo=10, bar=20)) @@ -391,7 +391,7 @@ def test_json_invalid_json_strategy_errors(self, tmp_path: Path) -> None: with pytest.raises(DatureConfigError) as exc_info: load( Source(file=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="json"), - dataclass_=NestedIntConfig, + schema=NestedIntConfig, ) err = exc_info.value @@ -422,7 +422,7 @@ def test_flat_invalid_json_strategy_succeeds(self, tmp_path: Path) -> None: result = load( Source(file=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="json"), - dataclass_=NestedIntConfig, + schema=NestedIntConfig, ) assert result == NestedIntConfig(var=NestedIntVar(foo=10, bar=20)) @@ -436,7 +436,7 @@ def test_flat_invalid_flat_strategy_errors(self, tmp_path: Path) -> None: with pytest.raises(DatureConfigError) as exc_info: load( Source(file=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), - dataclass_=NestedIntConfig, + schema=NestedIntConfig, ) err = exc_info.value @@ -470,7 +470,7 @@ def test_json_invalid_flat_strategy_succeeds(self, tmp_path: Path) -> None: result = load( Source(file=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="flat"), - dataclass_=NestedIntConfig, + schema=NestedIntConfig, ) assert result == NestedIntConfig(var=NestedIntVar(foo=10, bar=20)) @@ -483,7 +483,7 @@ def test_json_invalid_json_strategy_errors(self, tmp_path: Path) -> None: with pytest.raises(DatureConfigError) as exc_info: load( Source(file=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="json"), - dataclass_=NestedIntConfig, + schema=NestedIntConfig, ) err = exc_info.value @@ -509,7 +509,7 @@ def test_flat_invalid_json_strategy_succeeds(self, tmp_path: Path) -> None: result = load( Source(file=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="json"), - dataclass_=NestedIntConfig, + schema=NestedIntConfig, ) assert result == NestedIntConfig(var=NestedIntVar(foo=10, bar=20)) @@ -522,7 +522,7 @@ def test_flat_invalid_flat_strategy_errors(self, tmp_path: Path) -> None: with pytest.raises(DatureConfigError) as exc_info: load( Source(file=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="flat"), - dataclass_=NestedIntConfig, + schema=NestedIntConfig, ) err = exc_info.value @@ -554,7 +554,7 @@ def test_multiline_json_strategy_errors(self, monkeypatch: pytest.MonkeyPatch) - with pytest.raises(DatureConfigError) as exc_info: load( Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="json"), - dataclass_=NestedIntConfig, + schema=NestedIntConfig, ) err = exc_info.value @@ -583,7 +583,7 @@ def test_multiline_flat_strategy_ignores_json(self, monkeypatch: pytest.MonkeyPa result = load( Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), - dataclass_=NestedIntConfig, + schema=NestedIntConfig, ) assert result == NestedIntConfig(var=NestedIntVar(foo=10, bar=20)) @@ -624,7 +624,7 @@ def test_different_strategies( var2_strategy: (F[TwoNestedConfig].var2,), }, ), - dataclass_=TwoNestedConfig, + schema=TwoNestedConfig, ) assert result == TwoNestedConfig(var1=expected_var1, var2=expected_var2) @@ -659,7 +659,7 @@ def test_local_overrides_global( nested_resolve_strategy=global_strategy, nested_resolve={local_strategy: (F[NestedConfig].var,)}, ), - dataclass_=NestedConfig, + schema=NestedConfig, ) assert result == NestedConfig(var=NestedVar(foo=expected_source, bar=expected_source)) @@ -680,7 +680,7 @@ def test_flat_strategy_single_underscore(self, monkeypatch: pytest.MonkeyPatch) split_symbols="_", nested_resolve_strategy="flat", ), - dataclass_=NestedConfig, + schema=NestedConfig, ) assert result == NestedConfig(var=NestedVar(foo="from_flat", bar="from_flat")) @@ -698,7 +698,7 @@ def test_json_strategy_single_underscore_error(self, monkeypatch: pytest.MonkeyP split_symbols="_", nested_resolve_strategy="json", ), - dataclass_=NestedIntConfig, + schema=NestedIntConfig, ) err = exc_info.value @@ -722,7 +722,7 @@ def test_flat_strategy_single_underscore_error(self, monkeypatch: pytest.MonkeyP split_symbols="_", nested_resolve_strategy="flat", ), - dataclass_=NestedIntConfig, + schema=NestedIntConfig, ) err = exc_info.value @@ -746,7 +746,7 @@ def test_only_json_no_conflict( result = load( flat_loader_setup.make_metadata(nested_resolve_strategy=strategy), - dataclass_=NestedConfig, + schema=NestedConfig, ) assert result == NestedConfig(var=NestedVar(foo="val1", bar="val2")) @@ -761,7 +761,7 @@ def test_only_flat_no_conflict( result = load( flat_loader_setup.make_metadata(nested_resolve_strategy=strategy), - dataclass_=NestedConfig, + schema=NestedConfig, ) assert result == NestedConfig(var=NestedVar(foo="val1", bar="val2")) @@ -781,7 +781,7 @@ def test_deep_env(self, monkeypatch: pytest.MonkeyPatch, strategy: str, expected result = load( Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy=strategy), - dataclass_=DeepConfig, + schema=DeepConfig, ) assert result == DeepConfig(var=DeepVar(sub=DeepSub(key=expected_key))) @@ -799,7 +799,7 @@ def test_flat_strategy_deep_envfile(self, tmp_path: Path) -> None: prefix="MYAPP__", nested_resolve_strategy="flat", ), - dataclass_=DeepConfig, + schema=DeepConfig, ) assert result == DeepConfig(var=DeepVar(sub=DeepSub(key="from_flat"))) @@ -810,7 +810,7 @@ def test_json_strategy_deep_docker_secrets(self, tmp_path: Path) -> None: result = load( Source(file=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="json"), - dataclass_=DeepConfig, + schema=DeepConfig, ) assert result == DeepConfig(var=DeepVar(sub=DeepSub(key="from_json"))) @@ -832,7 +832,7 @@ def test_flat_strategy_error(self, tmp_path: Path) -> None: prefix="myapp__", nested_resolve_strategy="flat", ), - dataclass_=NestedIntConfig, + schema=NestedIntConfig, ) err = exc_info.value @@ -857,7 +857,7 @@ def test_json_strategy_error(self, tmp_path: Path) -> None: prefix="myapp__", nested_resolve_strategy="json", ), - dataclass_=NestedIntConfig, + schema=NestedIntConfig, ) err = exc_info.value @@ -880,11 +880,11 @@ def test_flat_first_then_json(self, monkeypatch: pytest.MonkeyPatch) -> None: result_flat = load( Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), - dataclass_=NestedConfig, + schema=NestedConfig, ) result_json = load( Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="json"), - dataclass_=NestedConfig, + schema=NestedConfig, ) assert result_flat == NestedConfig(var=NestedVar(foo="from_flat", bar="from_flat")) @@ -903,7 +903,7 @@ def test_envfilereversed_order(self, tmp_path: Path) -> None: prefix="MYAPP__", nested_resolve_strategy="flat", ), - dataclass_=NestedConfig, + schema=NestedConfig, ) result_json = load( Source( @@ -912,7 +912,7 @@ def test_envfilereversed_order(self, tmp_path: Path) -> None: prefix="MYAPP__", nested_resolve_strategy="json", ), - dataclass_=NestedConfig, + schema=NestedConfig, ) assert result_flat == NestedConfig(var=NestedVar(foo="from_flat", bar="from_flat")) @@ -944,7 +944,7 @@ def test_empty_dict_uses_global( nested_resolve_strategy=strategy, nested_resolve={}, ), - dataclass_=NestedConfig, + schema=NestedConfig, ) assert result == NestedConfig(var=NestedVar(foo=expected_source, bar=expected_source)) diff --git a/tests/sources_loader/test_toml10_.py b/tests/sources_loader/test_toml10_.py index b33e978..4c03c71 100644 --- a/tests/sources_loader/test_toml10_.py +++ b/tests/sources_loader/test_toml10_.py @@ -17,7 +17,7 @@ class TestToml10Loader: def test_comprehensive_type_conversion(self, all_types_toml10_file: Path): """Test loading TOML with full type coercion to dataclass.""" - result = load(Source(file=all_types_toml10_file, loader=Toml10Loader), dataclass_=AllPythonTypesCompact) + result = load(Source(file=all_types_toml10_file, loader=Toml10Loader), schema=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -38,7 +38,7 @@ class PrefixedConfig: result = load( Source(file=prefixed_toml_file, loader=Toml10Loader, prefix="app"), - dataclass_=PrefixedConfig, + schema=PrefixedConfig, ) assert result == expected_data @@ -65,7 +65,7 @@ class Config: name: str port: int - result = load(Source(file=toml_file, loader=Toml10Loader), dataclass_=Config) + result = load(Source(file=toml_file, loader=Toml10Loader), schema=Config) assert result.name == "MyApp" assert result.port == 9090 @@ -81,7 +81,7 @@ def test_toml_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file=toml_file, loader=Toml10Loader), dataclass_=Config) + result = load(Source(file=toml_file, loader=Toml10Loader), schema=Config) assert result.url == "http://localhost:8080/api" @@ -95,7 +95,7 @@ def test_toml_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file=toml_file, loader=Toml10Loader), dataclass_=Config) + result = load(Source(file=toml_file, loader=Toml10Loader), schema=Config) assert result.value == "prefixreplaced/suffix" @@ -109,7 +109,7 @@ def test_toml_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat class Config: value: str - result = load(Source(file=toml_file, loader=Toml10Loader), dataclass_=Config) + result = load(Source(file=toml_file, loader=Toml10Loader), schema=Config) assert result.value == "prefix$nonexistent/suffix" @@ -122,7 +122,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=toml_file, loader=Toml10Loader), dataclass_=Config) + load(Source(file=toml_file, loader=Toml10Loader), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -145,7 +145,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=toml_file, loader=Toml10Loader), dataclass_=Config) + load(Source(file=toml_file, loader=Toml10Loader), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/sources_loader/test_toml11_.py b/tests/sources_loader/test_toml11_.py index 3253eb0..f20e134 100644 --- a/tests/sources_loader/test_toml11_.py +++ b/tests/sources_loader/test_toml11_.py @@ -17,7 +17,7 @@ class TestToml11Loader: def test_comprehensive_type_conversion(self, all_types_toml11_file: Path): """Test loading TOML with full type coercion to dataclass.""" - result = load(Source(file=all_types_toml11_file, loader=Toml11Loader), dataclass_=AllPythonTypesCompact) + result = load(Source(file=all_types_toml11_file, loader=Toml11Loader), schema=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -38,7 +38,7 @@ class PrefixedConfig: result = load( Source(file=prefixed_toml_file, loader=Toml11Loader, prefix="app"), - dataclass_=PrefixedConfig, + schema=PrefixedConfig, ) assert result == expected_data @@ -65,7 +65,7 @@ class Config: name: str port: int - result = load(Source(file=toml_file, loader=Toml11Loader), dataclass_=Config) + result = load(Source(file=toml_file, loader=Toml11Loader), schema=Config) assert result.name == "MyApp" assert result.port == 9090 @@ -81,7 +81,7 @@ def test_toml_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file=toml_file, loader=Toml11Loader), dataclass_=Config) + result = load(Source(file=toml_file, loader=Toml11Loader), schema=Config) assert result.url == "http://localhost:8080/api" @@ -95,7 +95,7 @@ def test_toml_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file=toml_file, loader=Toml11Loader), dataclass_=Config) + result = load(Source(file=toml_file, loader=Toml11Loader), schema=Config) assert result.value == "prefixreplaced/suffix" @@ -109,7 +109,7 @@ def test_toml_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat class Config: value: str - result = load(Source(file=toml_file, loader=Toml11Loader), dataclass_=Config) + result = load(Source(file=toml_file, loader=Toml11Loader), schema=Config) assert result.value == "prefix$nonexistent/suffix" @@ -122,7 +122,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=toml_file, loader=Toml11Loader), dataclass_=Config) + load(Source(file=toml_file, loader=Toml11Loader), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -145,7 +145,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=toml_file, loader=Toml11Loader), dataclass_=Config) + load(Source(file=toml_file, loader=Toml11Loader), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/sources_loader/test_yaml11_.py b/tests/sources_loader/test_yaml11_.py index 50bffad..ddd2be3 100644 --- a/tests/sources_loader/test_yaml11_.py +++ b/tests/sources_loader/test_yaml11_.py @@ -17,7 +17,7 @@ class TestYaml11Loader: def test_comprehensive_type_conversion(self, all_types_yaml11_file: Path): """Test loading YAML with full type coercion to dataclass.""" - result = load(Source(file=all_types_yaml11_file, loader=Yaml11Loader), dataclass_=AllPythonTypesCompact) + result = load(Source(file=all_types_yaml11_file, loader=Yaml11Loader), schema=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -38,7 +38,7 @@ class PrefixedConfig: result = load( Source(file=prefixed_yaml_file, loader=Yaml11Loader, prefix="app"), - dataclass_=PrefixedConfig, + schema=PrefixedConfig, ) assert result == expected_data @@ -63,7 +63,7 @@ class EnvConfig: result = load( Source(file=yaml_config_with_env_vars_file, loader=Yaml11Loader), - dataclass_=EnvConfig, + schema=EnvConfig, ) assert result.database_url == "postgresql://localhost/db" @@ -82,7 +82,7 @@ def test_yaml_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file=yaml_file, loader=Yaml11Loader), dataclass_=Config) + result = load(Source(file=yaml_file, loader=Yaml11Loader), schema=Config) assert result.url == "http://localhost:8080/api" @@ -96,7 +96,7 @@ def test_yaml_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file=yaml_file, loader=Yaml11Loader), dataclass_=Config) + result = load(Source(file=yaml_file, loader=Yaml11Loader), schema=Config) assert result.value == "prefixreplaced/suffix" @@ -110,7 +110,7 @@ def test_yaml_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat class Config: value: str - result = load(Source(file=yaml_file, loader=Yaml11Loader), dataclass_=Config) + result = load(Source(file=yaml_file, loader=Yaml11Loader), schema=Config) assert result.value == "prefix$nonexistent/suffix" @@ -133,7 +133,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=yaml_file, loader=Yaml11Loader), dataclass_=Config) + load(Source(file=yaml_file, loader=Yaml11Loader), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -156,7 +156,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=yaml_file, loader=Yaml11Loader), dataclass_=Config) + load(Source(file=yaml_file, loader=Yaml11Loader), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/sources_loader/test_yaml12_.py b/tests/sources_loader/test_yaml12_.py index 612860a..c1796e3 100644 --- a/tests/sources_loader/test_yaml12_.py +++ b/tests/sources_loader/test_yaml12_.py @@ -17,7 +17,7 @@ class TestYaml12Loader: def test_comprehensive_type_conversion(self, all_types_yaml12_file: Path): """Test loading YAML with full type coercion to dataclass.""" - result = load(Source(file=all_types_yaml12_file, loader=Yaml12Loader), dataclass_=AllPythonTypesCompact) + result = load(Source(file=all_types_yaml12_file, loader=Yaml12Loader), schema=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -38,7 +38,7 @@ class PrefixedConfig: result = load( Source(file=prefixed_yaml_file, loader=Yaml12Loader, prefix="app"), - dataclass_=PrefixedConfig, + schema=PrefixedConfig, ) assert result == expected_data @@ -63,7 +63,7 @@ class EnvConfig: result = load( Source(file=yaml_config_with_env_vars_file, loader=Yaml12Loader), - dataclass_=EnvConfig, + schema=EnvConfig, ) assert result.database_url == "postgresql://localhost/db" @@ -82,7 +82,7 @@ def test_yaml_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file=yaml_file, loader=Yaml12Loader), dataclass_=Config) + result = load(Source(file=yaml_file, loader=Yaml12Loader), schema=Config) assert result.url == "http://localhost:8080/api" @@ -96,7 +96,7 @@ def test_yaml_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file=yaml_file, loader=Yaml12Loader), dataclass_=Config) + result = load(Source(file=yaml_file, loader=Yaml12Loader), schema=Config) assert result.value == "prefixreplaced/suffix" @@ -110,7 +110,7 @@ def test_yaml_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat class Config: value: str - result = load(Source(file=yaml_file, loader=Yaml12Loader), dataclass_=Config) + result = load(Source(file=yaml_file, loader=Yaml12Loader), schema=Config) assert result.value == "prefix$nonexistent/suffix" @@ -133,7 +133,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=yaml_file, loader=Yaml12Loader), dataclass_=Config) + load(Source(file=yaml_file, loader=Yaml12Loader), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -156,7 +156,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=yaml_file, loader=Yaml12Loader), dataclass_=Config) + load(Source(file=yaml_file, loader=Yaml12Loader), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/test_custom_loader.py b/tests/test_custom_loader.py index e3392a5..9be7c63 100644 --- a/tests/test_custom_loader.py +++ b/tests/test_custom_loader.py @@ -47,7 +47,7 @@ def test_xml_loader(self, tmp_path: Path) -> None: result = load( Source(file=xml_file, loader=XmlLoader), - dataclass_=XmlConfig, + schema=XmlConfig, ) assert result.host == "localhost" diff --git a/tests/test_load_report.py b/tests/test_load_report.py index 9714ab9..c86730d 100644 --- a/tests/test_load_report.py +++ b/tests/test_load_report.py @@ -30,7 +30,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, debug=True, ) @@ -88,7 +88,7 @@ class Config: result = load( Source(file=first), Source(file=second), - dataclass_=Config, + schema=Config, strategy="first_wins", debug=True, ) @@ -151,7 +151,7 @@ class Config: result = load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, debug=True, ) @@ -187,7 +187,7 @@ class Config: name: str port: int - result = load(Source(file=json_file), dataclass_=Config, debug=True) + result = load(Source(file=json_file), schema=Config, debug=True) report = get_load_report(result) @@ -279,7 +279,7 @@ class Config: host: str port: int - result = load(Source(file=json_file), dataclass_=Config) + result = load(Source(file=json_file), schema=Config) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") @@ -307,7 +307,7 @@ class Config: load( Source(file=defaults), Source(file=overrides), - dataclass_=Config, + schema=Config, ) messages = [r.message for r in caplog.records if r.name == "dature"] @@ -340,7 +340,7 @@ class Config: port: int with caplog.at_level(logging.DEBUG, logger="dature"): - load(Source(file=json_file), dataclass_=Config) + load(Source(file=json_file), schema=Config) messages = [r.message for r in caplog.records if r.name == "dature"] @@ -370,7 +370,7 @@ class Config: load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, debug=True, ) @@ -410,7 +410,7 @@ class Config: load( Source(file=a), Source(file=b), - dataclass_=Config, + schema=Config, debug=True, ) @@ -445,7 +445,7 @@ class Config: port: int with pytest.raises(DatureConfigError): - load(Source(file=json_file), dataclass_=Config, debug=True) + load(Source(file=json_file), schema=Config, debug=True) expected = LoadReport( dataclass_name="Config", @@ -475,7 +475,7 @@ class Config: port: Annotated[int, Ge(0)] with pytest.raises(DatureConfigError): - load(Source(file=json_file), dataclass_=Config, debug=True) + load(Source(file=json_file), schema=Config, debug=True) expected = LoadReport( dataclass_name="Config", diff --git a/tests/test_main.py b/tests/test_main.py index 91c3ac9..69fbedc 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -153,7 +153,7 @@ class Config: port: int metadata = Source(file=json_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.name == "FromFile" assert result.port == 9090 @@ -168,7 +168,7 @@ class Config: debug: bool metadata = Source(prefix="APP_") - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.name == "EnvFunc" assert result.debug is True @@ -180,7 +180,7 @@ def test_default_metadata(self, monkeypatch: pytest.MonkeyPatch) -> None: class Config: my_var: str - result = load(Source(), dataclass_=Config) + result = load(Source(), schema=Config) assert result.my_var == "from_env" @@ -199,7 +199,7 @@ class Config: metadata = Source(file="/non/existent/file.json", loader=loader_class) with pytest.raises(FileNotFoundError): - load(metadata, dataclass_=Config) + load(metadata, schema=Config) @pytest.mark.parametrize( "loader_class", diff --git a/tests/test_type_loaders.py b/tests/test_type_loaders.py index 419a690..8492ffb 100644 --- a/tests/test_type_loaders.py +++ b/tests/test_type_loaders.py @@ -51,7 +51,7 @@ def test_single_source_with_type_loader(self, yaml_with_rgb: Path) -> None: file=yaml_with_rgb, type_loaders={Rgb: rgb_from_string}, ), - dataclass_=ConfigWithRgb, + schema=ConfigWithRgb, ) assert result.name == "test" assert result.color == Rgb(r=255, g=128, b=0) @@ -70,7 +70,7 @@ def int_times_two(value: str) -> int: file=p, type_loaders={Rgb: rgb_from_string}, ), - dataclass_=ConfigWithRgb, + schema=ConfigWithRgb, ) assert result.color == Rgb(r=10, g=20, b=30) @@ -81,7 +81,7 @@ def test_global_type_loaders_via_configure(self, yaml_with_rgb: Path) -> None: configure( type_loaders={Rgb: rgb_from_string}, ) - result = load(Source(file=yaml_with_rgb), dataclass_=ConfigWithRgb) + result = load(Source(file=yaml_with_rgb), schema=ConfigWithRgb) assert result.color == Rgb(r=255, g=128, b=0) @@ -95,7 +95,7 @@ def test_merge_metadata_type_loaders(self, tmp_path: Path) -> None: result = load( Source(file=base), Source(file=override), - dataclass_=ConfigWithRgb, + schema=ConfigWithRgb, type_loaders={Rgb: rgb_from_string}, ) assert result.name == "override" @@ -125,7 +125,7 @@ def tag_upper(value: str) -> str: file=p, type_loaders={str: tag_upper}, ), - dataclass_=TwoCustom, + schema=TwoCustom, ) assert result.color == Rgb(r=10, g=20, b=30) assert result.tag == "HELLO" diff --git a/tests/validators/test_complex.py b/tests/validators/test_complex.py index 2822ec7..473e876 100644 --- a/tests/validators/test_complex.py +++ b/tests/validators/test_complex.py @@ -23,7 +23,7 @@ class Config: json_file.write_text('{"name": "Alice", "age": 30, "tags": ["python", "coding"]}') metadata = Source(file=json_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.name == "Alice" assert result.age == 30 @@ -43,7 +43,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 3 @@ -87,7 +87,7 @@ class User: ) metadata = Source(file=json_file) - result = load(metadata, dataclass_=User) + result = load(metadata, schema=User) assert result.name == "Alice" assert result.age == 30 @@ -113,7 +113,7 @@ class User: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=User) + load(metadata, schema=User) e = exc_info.value assert len(e.exceptions) == 4 @@ -157,7 +157,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -179,7 +179,7 @@ class Config: ) metadata = Source(file=json_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.groups == {"admins": [{"name": "Alice"}]} @@ -195,7 +195,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -223,7 +223,7 @@ class Config: ) metadata = Source(file=json_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.teams["backend"][0].name == "Alice" assert result.teams["backend"][0].role == "admin" @@ -245,7 +245,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 2 diff --git a/tests/validators/test_custom_validator.py b/tests/validators/test_custom_validator.py index 30e4c42..28808d1 100644 --- a/tests/validators/test_custom_validator.py +++ b/tests/validators/test_custom_validator.py @@ -49,7 +49,7 @@ class Config: json_file.write_text('{"count": 10}') metadata = Source(file=json_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.count == 10 @@ -65,7 +65,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -89,7 +89,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -112,7 +112,7 @@ class Config: json_file.write_text('{"url": "https://example.com"}') metadata = Source(file=json_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.url == "https://example.com" @@ -128,7 +128,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -209,7 +209,7 @@ class Config: json_file.write_text('{"count": 15, "url": "https://example.com"}') metadata = Source(file=json_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.count == 15 assert result.url == "https://example.com" @@ -227,7 +227,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 2 diff --git a/tests/validators/test_metadata_validators.py b/tests/validators/test_metadata_validators.py index 9df618b..c729168 100644 --- a/tests/validators/test_metadata_validators.py +++ b/tests/validators/test_metadata_validators.py @@ -27,7 +27,7 @@ class Config: F[Config].name: MinLength(3), }, ) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.name == "Alice" @@ -45,7 +45,7 @@ class Config: F[Config].port: (Gt(0), Lt(65536)), }, ) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.port == 8080 @@ -65,7 +65,7 @@ class Config: F[Config].port: Gt(0), }, ) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.name == "Alice" assert result.port == 8080 @@ -89,7 +89,7 @@ class Config: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -118,7 +118,7 @@ class Config: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -152,7 +152,7 @@ class Config: F[Config].database.port: Gt(0), }, ) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.database.host == "localhost" assert result.database.port == 5432 @@ -179,7 +179,7 @@ class Config: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -209,7 +209,7 @@ class Config: F[Config].port: Gt(0), }, ) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.name == "Alice" assert result.port == 8080 @@ -231,7 +231,7 @@ class Config: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -260,7 +260,7 @@ class Config: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -286,7 +286,7 @@ class Config: F[Config].name: MaxLength(10), }, ) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.name == "Alice" @@ -307,7 +307,7 @@ class Config: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -333,7 +333,7 @@ class Config: F[Config].port: Lt(65536), }, ) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.port == 8080 @@ -354,7 +354,7 @@ class Config: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -383,7 +383,7 @@ class Config: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -406,7 +406,7 @@ class Config: json_file.write_text('{"name": "Alice"}') metadata = Source(file=json_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.name == "Alice" @@ -433,7 +433,7 @@ def validate_config(obj: Config) -> bool: F[Config].port: Ge(0), }, ) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.port == 8080 assert result.user == "admin" diff --git a/tests/validators/test_number.py b/tests/validators/test_number.py index 3a2f2dc..c2681ac 100644 --- a/tests/validators/test_number.py +++ b/tests/validators/test_number.py @@ -19,7 +19,7 @@ class Config: json_file.write_text('{"age": 25}') metadata = Source(file=json_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.age == 25 @@ -35,7 +35,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -58,7 +58,7 @@ class Config: json_file.write_text('{"age": 18}') metadata = Source(file=json_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.age == 18 @@ -74,7 +74,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -97,7 +97,7 @@ class Config: json_file.write_text('{"age": 99}') metadata = Source(file=json_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.age == 99 @@ -113,7 +113,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -136,7 +136,7 @@ class Config: json_file.write_text('{"age": 100}') metadata = Source(file=json_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.age == 100 @@ -152,7 +152,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -175,7 +175,7 @@ class Config: json_file.write_text('{"age": 30}') metadata = Source(file=json_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.age == 30 @@ -191,7 +191,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 diff --git a/tests/validators/test_post_init_and_property.py b/tests/validators/test_post_init_and_property.py index 07ec73d..827d35e 100644 --- a/tests/validators/test_post_init_and_property.py +++ b/tests/validators/test_post_init_and_property.py @@ -21,7 +21,7 @@ def __post_init__(self) -> None: json_file = tmp_path / "config.json" json_file.write_text('{"port": 8080, "host": "localhost"}') - result = load(Source(file=json_file), dataclass_=Config) + result = load(Source(file=json_file), schema=Config) assert result.port == 8080 assert result.host == "localhost" @@ -41,7 +41,7 @@ def __post_init__(self) -> None: json_file.write_text('{"port": 99999, "host": "localhost"}') with pytest.raises(ValueError, match="Invalid port: 99999"): - load(Source(file=json_file), dataclass_=Config) + load(Source(file=json_file), schema=Config) def test_post_init_cross_field_validation(self, tmp_path: Path): @dataclass @@ -58,7 +58,7 @@ def __post_init__(self) -> None: json_file.write_text('{"min_value": 100, "max_value": 10}') with pytest.raises(ValueError, match=r"min_value \(100\) must be less than max_value \(10\)"): - load(Source(file=json_file), dataclass_=Config) + load(Source(file=json_file), schema=Config) def test_post_init_cross_field_success(self, tmp_path: Path): @dataclass @@ -74,7 +74,7 @@ def __post_init__(self) -> None: json_file = tmp_path / "config.json" json_file.write_text('{"min_value": 1, "max_value": 100}') - result = load(Source(file=json_file), dataclass_=Config) + result = load(Source(file=json_file), schema=Config) assert result.min_value == 1 assert result.max_value == 100 @@ -170,7 +170,7 @@ def __post_init__(self) -> None: json_file = tmp_path / "config.json" json_file.write_text('{"host": "localhost", "port": 8080}') - result = load(Source(file=json_file), dataclass_=Config) + result = load(Source(file=json_file), schema=Config) assert result.base_url == "http://localhost:8080" @@ -207,7 +207,7 @@ def address(self) -> str: json_file = tmp_path / "config.json" json_file.write_text('{"host": "localhost", "port": 8080}') - result = load(Source(file=json_file), dataclass_=Config) + result = load(Source(file=json_file), schema=Config) assert result.address == "localhost:8080" @@ -241,6 +241,6 @@ def email(self) -> str: json_file = tmp_path / "config.json" json_file.write_text('{"_email": " Admin@Example.COM "}') - result = load(Source(file=json_file), dataclass_=Config) + result = load(Source(file=json_file), schema=Config) assert result.email == "admin@example.com" diff --git a/tests/validators/test_root_validator.py b/tests/validators/test_root_validator.py index f6e0aff..24f89dc 100644 --- a/tests/validators/test_root_validator.py +++ b/tests/validators/test_root_validator.py @@ -27,7 +27,7 @@ def validate_config(obj: Config) -> bool: file=json_file, root_validators=(RootValidator(validate_config),), ) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.port == 80 assert result.user == "root" @@ -52,7 +52,7 @@ def validate_config(obj: Config) -> bool: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -82,7 +82,7 @@ def validate_step(obj: Config) -> bool: RootValidator(validate_step), ), ) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.min_value == 10 assert result.max_value == 100 @@ -113,7 +113,7 @@ def validate_step(obj: Config) -> bool: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -138,7 +138,7 @@ def validate_config(obj: Config) -> bool: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -198,7 +198,7 @@ def validate_config(obj: Config) -> bool: ) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 diff --git a/tests/validators/test_sequence.py b/tests/validators/test_sequence.py index 9dc797c..dcc10e9 100644 --- a/tests/validators/test_sequence.py +++ b/tests/validators/test_sequence.py @@ -19,7 +19,7 @@ class Config: json_file.write_text('{"tags": ["python", "typing"]}') metadata = Source(file=json_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.tags == ["python", "typing"] @@ -35,7 +35,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -58,7 +58,7 @@ class Config: json_file.write_text('{"tags": ["python", "typing"]}') metadata = Source(file=json_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.tags == ["python", "typing"] @@ -74,7 +74,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -97,7 +97,7 @@ class Config: json_file.write_text('{"tags": ["python", "typing", "validation"]}') metadata = Source(file=json_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.tags == ["python", "typing", "validation"] @@ -113,7 +113,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -136,7 +136,7 @@ class Config: json_file.write_text('{"tags": ["python", "typing", "validation"]}') metadata = Source(file=json_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.tags == ["python", "typing", "validation"] @@ -152,7 +152,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 diff --git a/tests/validators/test_string.py b/tests/validators/test_string.py index f2eb75b..5f24a46 100644 --- a/tests/validators/test_string.py +++ b/tests/validators/test_string.py @@ -19,7 +19,7 @@ class Config: json_file.write_text('{"name": "Alice"}') metadata = Source(file=json_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.name == "Alice" @@ -35,7 +35,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -58,7 +58,7 @@ class Config: json_file.write_text('{"name": "Alice"}') metadata = Source(file=json_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.name == "Alice" @@ -74,7 +74,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -97,7 +97,7 @@ class Config: json_file.write_text('{"email": "test@example.com"}') metadata = Source(file=json_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.email == "test@example.com" @@ -113,7 +113,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 @@ -136,7 +136,7 @@ class Config: json_file.write_text('{"username": "john_doe"}') metadata = Source(file=json_file) - result = load(metadata, dataclass_=Config) + result = load(metadata, schema=Config) assert result.username == "john_doe" @@ -152,7 +152,7 @@ class Config: metadata = Source(file=json_file) with pytest.raises(DatureConfigError) as exc_info: - load(metadata, dataclass_=Config) + load(metadata, schema=Config) e = exc_info.value assert len(e.exceptions) == 1 From f46e93eb2c9cd5af30c1df0f071eb2e597c6b39b Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 31 Mar 2026 10:40:36 +0300 Subject: [PATCH 15/36] fix version --- docs/javascript/readthedocs.js | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/docs/javascript/readthedocs.js b/docs/javascript/readthedocs.js index a71af0a..580721f 100644 --- a/docs/javascript/readthedocs.js +++ b/docs/javascript/readthedocs.js @@ -54,12 +54,17 @@ function renderVersionItem(version) { // Cached HTML fragments, built once from RTD data let versioningHtml = ""; let olderItemsHtml = ""; +// Set when MutationObserver detects missing selector but data isn't ready yet +let pendingInject = false; function injectVersionSelector() { if (versioningHtml === "") { + pendingInject = true; return; } + pendingInject = false; + const topic = document.querySelector(".md-header__topic"); if (topic === null) { return; @@ -120,19 +125,19 @@ document.addEventListener("readthedocs-addons-data-ready", function (event) { injectVersionSelector(); }); -// Re-inject after Material instant navigation replaces the DOM +// Re-inject after Material instant navigation replaces the DOM. +// Debounce via setTimeout so we inject only after Material finishes its +// batch of DOM mutations, not in between them. document.addEventListener("DOMContentLoaded", function () { if (typeof document.body.dataset.mdColorScheme === "undefined") { return; } - let injecting = false; + let timer = 0; new MutationObserver(function () { - if (injecting) return; const topic = document.querySelector(".md-header__topic"); if (topic !== null && topic.querySelector(".md-version") === null) { - injecting = true; - injectVersionSelector(); - injecting = false; + clearTimeout(timer); + timer = setTimeout(injectVersionSelector, 50); } }).observe(document.querySelector(".md-header") || document.body, { childList: true, From abe0a7ae88a2d26fe4356fb3d02bff1d6f2221fa Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 31 Mar 2026 19:12:45 +0300 Subject: [PATCH 16/36] fix --- .github/dependabot.yml | 8 ++++---- examples/load_all_formats.py | 1 - src/dature/config.py | 6 +++--- src/dature/main.py | 14 ++++++++++++++ src/dature/merging/predicate.py | 4 ---- tests/loading/test_field_merges.py | 23 +++++++++++++++++++++++ 6 files changed, 44 insertions(+), 12 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 389ea4c..452aa35 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -3,8 +3,8 @@ updates: - package-ecosystem: "pip" directory: "/" schedule: - interval: "cron" - cronjob: "0 9 1-7 * 6" + interval: "monthly" + time: "09:00" timezone: "Europe/Moscow" open-pull-requests-limit: 10 labels: @@ -15,8 +15,8 @@ updates: - package-ecosystem: "github-actions" directory: "/" schedule: - interval: "cron" - cronjob: "0 9 1-7 * 6" + interval: "monthly" + time: "09:00" timezone: "Europe/Moscow" labels: - "ci" diff --git a/examples/load_all_formats.py b/examples/load_all_formats.py index d266f21..3626da0 100644 --- a/examples/load_all_formats.py +++ b/examples/load_all_formats.py @@ -30,4 +30,3 @@ config = dature.load(meta, schema=AllPythonTypesCompact) assert config.string_value == "hello world" assert config.integer_value == 42 - assert config.integer_value == 42 diff --git a/src/dature/config.py b/src/dature/config.py index adaafa7..895841e 100644 --- a/src/dature/config.py +++ b/src/dature/config.py @@ -147,17 +147,17 @@ def configure( current = config.ensure_loaded() merged_masking = ( - MaskingConfig(**cast("dict[str, Any]", asdict(MaskingConfig()) | masking)) + MaskingConfig(**cast("dict[str, Any]", asdict(current.masking) | masking)) if masking is not None else current.masking ) merged_error = ( - ErrorDisplayConfig(**cast("dict[str, Any]", asdict(ErrorDisplayConfig()) | error_display)) + ErrorDisplayConfig(**cast("dict[str, Any]", asdict(current.error_display) | error_display)) if error_display is not None else current.error_display ) merged_loading = ( - LoadingConfig(**cast("dict[str, Any]", asdict(LoadingConfig()) | loading)) + LoadingConfig(**cast("dict[str, Any]", asdict(current.loading) | loading)) if loading is not None else current.loading ) diff --git a/src/dature/main.py b/src/dature/main.py index 9cc940b..25d12ec 100644 --- a/src/dature/main.py +++ b/src/dature/main.py @@ -1,3 +1,4 @@ +import logging from collections.abc import Callable from pathlib import Path from typing import Any, overload @@ -21,6 +22,8 @@ TypeLoaderMap, ) +logger = logging.getLogger("dature") + @overload def load[T]( @@ -87,6 +90,17 @@ def load( # noqa: PLR0913 _validate_sources(sources) + if len(sources) == 1 and ( + strategy != "last_wins" + or field_merges is not None + or field_groups != () + or skip_broken_sources + or skip_invalid_fields + or secret_field_names is not None + or mask_secrets is not None + ): + logger.warning("Merge-related parameters have no effect with a single source") + if len(sources) > 1: return _load_multi( sources=sources, diff --git a/src/dature/merging/predicate.py b/src/dature/merging/predicate.py index 95e71c5..116efe5 100644 --- a/src/dature/merging/predicate.py +++ b/src/dature/merging/predicate.py @@ -82,7 +82,3 @@ def build_field_group_paths( paths.append(path) resolved.append(ResolvedFieldGroup(paths=tuple(paths))) return tuple(resolved) - return tuple(resolved) - return tuple(resolved) - return tuple(resolved) - return tuple(resolved) diff --git a/tests/loading/test_field_merges.py b/tests/loading/test_field_merges.py index 394a7aa..38f5612 100644 --- a/tests/loading/test_field_merges.py +++ b/tests/loading/test_field_merges.py @@ -1,5 +1,6 @@ """Tests for per-field merge strategies (field_merges).""" +import logging from collections.abc import Callable from dataclasses import dataclass from pathlib import Path @@ -912,6 +913,28 @@ class Config: assert result.score == 42 + def test_single_source_merge_params_warning( + self, + tmp_path: Path, + caplog: pytest.LogCaptureFixture, + ) -> None: + a = tmp_path / "a.json" + a.write_text('{"score": 42}') + + @dataclass + class Config: + score: int + + with caplog.at_level(logging.WARNING, logger="dature"): + load( + Source(file=a), + schema=Config, + field_merges={F[Config].score: sum}, + ) + + messages = [r.message for r in caplog.records if r.name == "dature"] + assert messages == ["Merge-related parameters have no effect with a single source"] + def test_callable_with_raise_on_conflict(self, tmp_path: Path): a = tmp_path / "a.json" a.write_text('{"score": 10, "name": "app"}') From f6197e8c1e39a0fb06f5f7d608258e756b8a6c16 Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 31 Mar 2026 21:16:27 +0300 Subject: [PATCH 17/36] fix test --- examples/docs/advanced/configure/advanced_configure.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/docs/advanced/configure/advanced_configure.py b/examples/docs/advanced/configure/advanced_configure.py index 9669f48..154879b 100644 --- a/examples/docs/advanced/configure/advanced_configure.py +++ b/examples/docs/advanced/configure/advanced_configure.py @@ -27,8 +27,8 @@ class Config: report = dature.get_load_report(config) assert report is not None -# 3. Reset to defaults — debug is off again -dature.configure(loading={}) +# 3. Disable debug globally via dature.configure() +dature.configure(loading={"debug": False}) config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) report = dature.get_load_report(config) From 2b96cd2de064dae53495162c32d4ea4943c89302 Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 31 Mar 2026 21:24:55 +0300 Subject: [PATCH 18/36] fix mypy --- pyproject.toml | 2 +- src/dature/mypy_plugin.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f857933..3d48522 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -126,7 +126,7 @@ warn_unused_ignores = true warn_no_return = true strict_equality = true explicit_package_bases = true -mypy_path = "src" +mypy_path = "src:." plugins = ["dature.mypy_plugin"] [[tool.mypy.overrides]] diff --git a/src/dature/mypy_plugin.py b/src/dature/mypy_plugin.py index 5e3f5f5..1c5f93a 100644 --- a/src/dature/mypy_plugin.py +++ b/src/dature/mypy_plugin.py @@ -13,7 +13,7 @@ from collections.abc import Callable -from mypy.nodes import ARG_NAMED_OPT, ARG_STAR, ARG_STAR2 +from mypy.nodes import ARG_NAMED_OPT, ARG_STAR, ARG_STAR2, ArgKind from mypy.options import Options from mypy.plugin import ClassDefContext, FunctionSigContext, Plugin from mypy.types import CallableType, FunctionLike @@ -27,7 +27,7 @@ def _make_args_optional(sig: CallableType) -> CallableType: - new_arg_kinds = [] + new_arg_kinds: list[ArgKind] = [] for kind in sig.arg_kinds: if kind in (ARG_STAR, ARG_STAR2): new_arg_kinds.append(kind) From 9bf07a6f9f74140e7edac706119a80987e64bc79 Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 31 Mar 2026 21:38:40 +0300 Subject: [PATCH 19/36] fix global config --- .../advanced/configure/advanced_configure.py | 4 +-- src/dature/config.py | 34 ++++++++++--------- 2 files changed, 20 insertions(+), 18 deletions(-) diff --git a/examples/docs/advanced/configure/advanced_configure.py b/examples/docs/advanced/configure/advanced_configure.py index 154879b..9669f48 100644 --- a/examples/docs/advanced/configure/advanced_configure.py +++ b/examples/docs/advanced/configure/advanced_configure.py @@ -27,8 +27,8 @@ class Config: report = dature.get_load_report(config) assert report is not None -# 3. Disable debug globally via dature.configure() -dature.configure(loading={"debug": False}) +# 3. Reset to defaults — debug is off again +dature.configure(loading={}) config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) report = dature.get_load_report(config) diff --git a/src/dature/config.py b/src/dature/config.py index 895841e..6ab9eed 100644 --- a/src/dature/config.py +++ b/src/dature/config.py @@ -1,10 +1,14 @@ +from collections.abc import Mapping from dataclasses import asdict, dataclass -from typing import Annotated, Any, ClassVar, TypedDict, cast +from typing import TYPE_CHECKING, Annotated, Any, ClassVar, TypedDict, TypeVar, cast from dature.types import NestedResolveStrategy, TypeLoaderMap from dature.validators.number import Ge from dature.validators.string import MinLength +if TYPE_CHECKING: + from dature.protocols import DataclassInstance + # --8<-- [start:masking-config] @dataclass(frozen=True, slots=True) @@ -134,6 +138,16 @@ def type_loaders(self) -> TypeLoaderMap: config: _ConfigProxy = _ConfigProxy() +_D = TypeVar("_D", bound="DataclassInstance") + + +def _merge_group(current: _D, options: Mapping[str, Any] | None, cls: type[_D]) -> _D: + if options is None: + return current + if not options: + return cls() + return cls(**cast("dict[str, Any]", asdict(current) | dict(options))) + # --8<-- [start:configure] def configure( @@ -146,21 +160,9 @@ def configure( # --8<-- [end:configure] current = config.ensure_loaded() - merged_masking = ( - MaskingConfig(**cast("dict[str, Any]", asdict(current.masking) | masking)) - if masking is not None - else current.masking - ) - merged_error = ( - ErrorDisplayConfig(**cast("dict[str, Any]", asdict(current.error_display) | error_display)) - if error_display is not None - else current.error_display - ) - merged_loading = ( - LoadingConfig(**cast("dict[str, Any]", asdict(current.loading) | loading)) - if loading is not None - else current.loading - ) + merged_masking = _merge_group(current.masking, masking, MaskingConfig) + merged_error = _merge_group(current.error_display, error_display, ErrorDisplayConfig) + merged_loading = _merge_group(current.loading, loading, LoadingConfig) config.set_instance( DatureConfig( From ff718191c1d80e84aba06542fe556ff850cb9c9a Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 31 Mar 2026 21:40:34 +0300 Subject: [PATCH 20/36] fix filename --- docs/introduction.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/introduction.md b/docs/introduction.md index 5bf9162..2830393 100644 --- a/docs/introduction.md +++ b/docs/introduction.md @@ -156,7 +156,7 @@ dature.Source(file="config.yaml", loader=Yaml11Loader) `file` accepts file-like objects (`StringIO`, `BytesIO`, and any `TextIOBase`/`BufferedIOBase`/`RawIOBase` subclass). The `loader` parameter is required since there is no file extension to auto-detect from: ```python ---8<-- "examples/docs/introduction/intro_filelike.py" +--8<-- "examples/docs/introduction/intro_file_like.py" ``` !!! note From e00792a924268456ef1aec62c1c15fd95bcaed79 Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 31 Mar 2026 21:45:09 +0300 Subject: [PATCH 21/36] fix mypy --- src/dature/config.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/dature/config.py b/src/dature/config.py index 6ab9eed..cbc7eae 100644 --- a/src/dature/config.py +++ b/src/dature/config.py @@ -1,6 +1,6 @@ from collections.abc import Mapping from dataclasses import asdict, dataclass -from typing import TYPE_CHECKING, Annotated, Any, ClassVar, TypedDict, TypeVar, cast +from typing import TYPE_CHECKING, Annotated, Any, ClassVar, TypedDict, cast from dature.types import NestedResolveStrategy, TypeLoaderMap from dature.validators.number import Ge @@ -138,10 +138,8 @@ def type_loaders(self) -> TypeLoaderMap: config: _ConfigProxy = _ConfigProxy() -_D = TypeVar("_D", bound="DataclassInstance") - -def _merge_group(current: _D, options: Mapping[str, Any] | None, cls: type[_D]) -> _D: +def _merge_group[D: DataclassInstance](current: D, options: Mapping[str, Any] | None, cls: type[D]) -> D: if options is None: return current if not options: @@ -173,3 +171,4 @@ def configure( ) if type_loaders is not None: config.set_type_loaders(type_loaders) + config.set_type_loaders(type_loaders) From cfdc8bf14f342599ff65329a286e62b89fe53de2 Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 31 Mar 2026 23:55:21 +0300 Subject: [PATCH 22/36] Trigger RTD build From c02aba50677569a61a8c591da0c787cea728e06b Mon Sep 17 00:00:00 2001 From: niccolum Date: Wed, 1 Apr 2026 00:08:42 +0300 Subject: [PATCH 23/36] Trigger RTD build From db1e20851b508ecc463f93f091cfa32d83f7dd51 Mon Sep 17 00:00:00 2001 From: niccolum Date: Wed, 1 Apr 2026 00:27:46 +0300 Subject: [PATCH 24/36] fix mask_secrets/secret_field_names behaviour --- src/dature/config.py | 1 - src/dature/loading/single.py | 41 +++++++++++------ src/dature/main.py | 6 ++- tests/masking/test_masking.py | 84 +++++++++++++++++++++++++++++++++++ tests/test_config.py | 45 +++++++++++++++++++ 5 files changed, 161 insertions(+), 16 deletions(-) diff --git a/src/dature/config.py b/src/dature/config.py index cbc7eae..ffd6b79 100644 --- a/src/dature/config.py +++ b/src/dature/config.py @@ -171,4 +171,3 @@ def configure( ) if type_loaders is not None: config.set_type_loaders(type_loaders) - config.set_type_loaders(type_loaders) diff --git a/src/dature/loading/single.py b/src/dature/loading/single.py index aaff90d..1836a79 100644 --- a/src/dature/loading/single.py +++ b/src/dature/loading/single.py @@ -30,9 +30,11 @@ logger = logging.getLogger("dature") -def _resolve_single_mask_secrets(metadata: Source) -> bool: +def _resolve_single_mask_secrets(metadata: Source, *, load_level: bool | None = None) -> bool: if metadata.mask_secrets is not None: return metadata.mask_secrets + if load_level is not None: + return load_level return config.masking.mask_secrets @@ -102,7 +104,7 @@ def _build_single_source_report( class _PatchContext: - def __init__( + def __init__( # noqa: PLR0913 self, *, loader_instance: LoaderProtocol, @@ -111,6 +113,8 @@ def __init__( metadata: Source, cache: bool, debug: bool, + secret_field_names: tuple[str, ...] | None = None, + mask_secrets: bool | None = None, ) -> None: ensure_retort(loader_instance, cls) validating_retort = loader_instance.create_validating_retort(cls) @@ -132,17 +136,17 @@ def __init__( loader_class = resolve_loader_class(metadata.loader, metadata.file) self.loader_type = loader_class.display_name - mask_secrets = _resolve_single_mask_secrets(metadata) + resolved_mask_secrets = _resolve_single_mask_secrets(metadata, load_level=mask_secrets) self.secret_paths: frozenset[str] = frozenset() - if mask_secrets: - extra_patterns = metadata.secret_field_names or () + if resolved_mask_secrets: + extra_patterns = metadata.secret_field_names or secret_field_names or () self.secret_paths = build_secret_paths(cls, extra_patterns=extra_patterns) self.error_ctx = build_error_ctx( metadata, cls.__name__, secret_paths=self.secret_paths, - mask_secrets=mask_secrets, + mask_secrets=resolved_mask_secrets, ) # probe_retort is created early so adaptix sees the original signature @@ -250,23 +254,30 @@ def new_init(self: DataclassInstance, *args: Any, **kwargs: Any) -> None: # noq return new_init -def load_as_function( # noqa: C901, PLR0912 +def load_as_function( # noqa: C901, PLR0912, PLR0913 *, loader_instance: LoaderProtocol, file_path: FileOrStream, schema: type[DataclassInstance], metadata: Source, debug: bool, + secret_field_names: tuple[str, ...] | None = None, + mask_secrets: bool | None = None, ) -> DataclassInstance: loader_class = resolve_loader_class(metadata.loader, metadata.file) display_name = loader_class.display_name secret_paths: frozenset[str] = frozenset() - mask_secrets = _resolve_single_mask_secrets(metadata) - if mask_secrets: - extra_patterns = metadata.secret_field_names or () + resolved_mask_secrets = _resolve_single_mask_secrets(metadata, load_level=mask_secrets) + if resolved_mask_secrets: + extra_patterns = metadata.secret_field_names or secret_field_names or () secret_paths = build_secret_paths(schema, extra_patterns=extra_patterns) - error_ctx = build_error_ctx(metadata, schema.__name__, secret_paths=secret_paths, mask_secrets=mask_secrets) + error_ctx = build_error_ctx( + metadata, + schema.__name__, + secret_paths=secret_paths, + mask_secrets=resolved_mask_secrets, + ) load_result = handle_load_errors( func=lambda: loader_instance.load_raw(file_path), @@ -279,7 +290,7 @@ def load_as_function( # noqa: C901, PLR0912 metadata, schema.__name__, secret_paths=secret_paths, - mask_secrets=mask_secrets, + mask_secrets=resolved_mask_secrets, nested_conflicts=load_result.nested_conflicts, ) @@ -357,13 +368,15 @@ def load_as_function( # noqa: C901, PLR0912 return result -def make_decorator( +def make_decorator( # noqa: PLR0913 *, loader_instance: LoaderProtocol, file_path: FileOrStream, metadata: Source, cache: bool, debug: bool, + secret_field_names: tuple[str, ...] | None = None, + mask_secrets: bool | None = None, ) -> Callable[[type[DataclassInstance]], type[DataclassInstance]]: def decorator(cls: type[DataclassInstance]) -> type[DataclassInstance]: if not is_dataclass(cls): @@ -377,6 +390,8 @@ def decorator(cls: type[DataclassInstance]) -> type[DataclassInstance]: metadata=metadata, cache=cache, debug=debug, + secret_field_names=secret_field_names, + mask_secrets=mask_secrets, ) cls.__init__ = _make_new_init(ctx) # type: ignore[method-assign] cls.__post_init__ = make_validating_post_init(ctx) # type: ignore[attr-defined] diff --git a/src/dature/main.py b/src/dature/main.py index 25d12ec..be60134 100644 --- a/src/dature/main.py +++ b/src/dature/main.py @@ -96,8 +96,6 @@ def load( # noqa: PLR0913 or field_groups != () or skip_broken_sources or skip_invalid_fields - or secret_field_names is not None - or mask_secrets is not None ): logger.warning("Merge-related parameters have no effect with a single source") @@ -146,6 +144,8 @@ def load( # noqa: PLR0913 schema=schema, metadata=metadata, debug=debug, + secret_field_names=secret_field_names, + mask_secrets=mask_secrets, ) return make_decorator( @@ -154,6 +154,8 @@ def load( # noqa: PLR0913 metadata=metadata, cache=cache, debug=debug, + secret_field_names=secret_field_names, + mask_secrets=mask_secrets, ) diff --git a/tests/masking/test_masking.py b/tests/masking/test_masking.py index e192d0a..932f91f 100644 --- a/tests/masking/test_masking.py +++ b/tests/masking/test_masking.py @@ -453,3 +453,87 @@ class Cfg: f" │ {' ' * caret_pos}{'^^^^^^^^^^^^'}\n" f" └── FILE '{json_file}', line 1" ) + + +@pytest.mark.usefixtures("_reset_config") +class TestLoadLevelMaskingParams: + def test_load_level_mask_secrets(self, tmp_path: Path): + json_file = tmp_path / "config.json" + json_file.write_text(f'{{"password": "{_SECRET_VALUE}", "host": "{_PUBLIC_VALUE}"}}') + + @dataclass + class Cfg: + password: str + host: str + + result = load(Source(file=json_file), schema=Cfg, debug=True, mask_secrets=True) + + report = get_load_report(result) + assert report is not None + assert report.merged_data == {"password": _MASKED_SECRET, "host": _PUBLIC_VALUE} + + def test_load_level_secret_field_names(self, tmp_path: Path): + json_file = tmp_path / "config.json" + json_file.write_text(f'{{"my_token": "{_SECRET_VALUE}", "host": "{_PUBLIC_VALUE}"}}') + + @dataclass + class Cfg: + my_token: str + host: str + + result = load( + Source(file=json_file), + schema=Cfg, + debug=True, + mask_secrets=True, + secret_field_names=("my_token",), + ) + + report = get_load_report(result) + assert report is not None + assert report.merged_data == {"my_token": _MASKED_SECRET, "host": _PUBLIC_VALUE} + + def test_source_mask_secrets_overrides_load_level(self, tmp_path: Path): + json_file = tmp_path / "config.json" + json_file.write_text(f'{{"password": "{_SECRET_VALUE}", "host": "{_PUBLIC_VALUE}"}}') + + @dataclass + class Cfg: + password: str + host: str + + result = load( + Source(file=json_file, mask_secrets=False), + schema=Cfg, + debug=True, + mask_secrets=True, + ) + + report = get_load_report(result) + assert report is not None + assert report.merged_data == {"password": _SECRET_VALUE, "host": _PUBLIC_VALUE} + + def test_source_secret_field_names_overrides_load_level(self, tmp_path: Path): + json_file = tmp_path / "config.json" + json_file.write_text( + f'{{"nickname": "{_SECRET_VALUE}", "label": "{_SECRET_VALUE}", "host": "{_PUBLIC_VALUE}"}}', + ) + + @dataclass + class Cfg: + nickname: str + label: str + host: str + + result = load( + Source(file=json_file, secret_field_names=("label",)), + schema=Cfg, + debug=True, + mask_secrets=True, + secret_field_names=("nickname",), + ) + + report = get_load_report(result) + assert report is not None + assert report.merged_data["label"] == _MASKED_SECRET + assert report.merged_data["nickname"] == _SECRET_VALUE diff --git a/tests/test_config.py b/tests/test_config.py index cfe1c47..3f6cb5f 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -106,6 +106,51 @@ def test_configure_preserves_other_groups( assert getattr(config, unchanged_group) == expected_default +@pytest.mark.usefixtures("_reset_config") +class TestConfigureEmptyDictReset: + @staticmethod + @pytest.mark.parametrize( + ("group", "override", "expected_default"), + [ + ( + "masking", + {"mask": "*****", "visible_prefix": 2, "visible_suffix": 2}, + MaskingConfig(), + ), + ( + "error_display", + {"max_visible_lines": 10, "max_line_length": 200}, + ErrorDisplayConfig(), + ), + ( + "loading", + {"cache": False, "debug": True}, + LoadingConfig(), + ), + ], + ids=["masking", "error_display", "loading"], + ) + def test_empty_dict_resets_group_to_defaults( + group: str, + override: dict[str, Any], + expected_default: MaskingConfig | ErrorDisplayConfig | LoadingConfig, + ) -> None: + configure(**{group: override}) + assert getattr(config, group) != expected_default + + configure(**{group: {}}) + assert getattr(config, group) == expected_default + + @staticmethod + def test_empty_dict_preserves_other_groups() -> None: + configure(masking={"mask": "*****"}, error_display={"max_visible_lines": 10}) + + configure(masking={}) + + assert config.masking == MaskingConfig() + assert config.error_display.max_visible_lines == 10 + + @pytest.mark.usefixtures("_reset_config") class TestEnvLoading: @staticmethod From 3ffb91753cbf77fd9dd27b124348b18ef0e74d6a Mon Sep 17 00:00:00 2001 From: niccolum Date: Wed, 1 Apr 2026 00:31:11 +0300 Subject: [PATCH 25/36] fix docs for masking parameters --- docs/features/masking.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/features/masking.md b/docs/features/masking.md index 1d5d205..32eebb1 100644 --- a/docs/features/masking.md +++ b/docs/features/masking.md @@ -111,14 +111,16 @@ Control masking via `Source`: --8<-- "examples/docs/features/masking/masking_no_mask.py:no-mask" ``` +### Per-load + +`mask_secrets` and `secret_field_names` can be passed directly to `dature.load()`. They apply to both single-source and multi-source modes. `Source`-level values take priority: `Source.mask_secrets` overrides `load(mask_secrets=...)` when not `None`, and `Source.secret_field_names` overrides `load(secret_field_names=...)` when not `None`. + ### In merge mode ```python --8<-- "examples/docs/features/masking/masking_merge_mode.py:merge-mode" ``` -`Source.mask_secrets` overrides the `mask_secrets` parameter of `dature.load()` when not `None`. `secret_field_names` from both source and load-level are combined. - ### Global See [Advanced — Configure](../advanced/configure.md#global-configure) for global masking defaults and all available config options. From d893e3104f851f4a606c7464d52ca354efe2f32c Mon Sep 17 00:00:00 2001 From: niccolum Date: Wed, 1 Apr 2026 09:11:34 +0300 Subject: [PATCH 26/36] secret_field_names combined --- docs/features/masking.md | 2 +- src/dature/loading/single.py | 4 ++-- tests/masking/test_masking.py | 5 +++-- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/docs/features/masking.md b/docs/features/masking.md index 32eebb1..7368de8 100644 --- a/docs/features/masking.md +++ b/docs/features/masking.md @@ -113,7 +113,7 @@ Control masking via `Source`: ### Per-load -`mask_secrets` and `secret_field_names` can be passed directly to `dature.load()`. They apply to both single-source and multi-source modes. `Source`-level values take priority: `Source.mask_secrets` overrides `load(mask_secrets=...)` when not `None`, and `Source.secret_field_names` overrides `load(secret_field_names=...)` when not `None`. +`mask_secrets` and `secret_field_names` can be passed directly to `dature.load()`. They apply to both single-source and multi-source modes. `Source.mask_secrets` overrides `load(mask_secrets=...)` when not `None`. `Source.secret_field_names` and `load(secret_field_names=...)` are combined. ### In merge mode diff --git a/src/dature/loading/single.py b/src/dature/loading/single.py index 1836a79..f0a59cf 100644 --- a/src/dature/loading/single.py +++ b/src/dature/loading/single.py @@ -139,7 +139,7 @@ def __init__( # noqa: PLR0913 resolved_mask_secrets = _resolve_single_mask_secrets(metadata, load_level=mask_secrets) self.secret_paths: frozenset[str] = frozenset() if resolved_mask_secrets: - extra_patterns = metadata.secret_field_names or secret_field_names or () + extra_patterns = (metadata.secret_field_names or ()) + (secret_field_names or ()) self.secret_paths = build_secret_paths(cls, extra_patterns=extra_patterns) self.error_ctx = build_error_ctx( @@ -270,7 +270,7 @@ def load_as_function( # noqa: C901, PLR0912, PLR0913 secret_paths: frozenset[str] = frozenset() resolved_mask_secrets = _resolve_single_mask_secrets(metadata, load_level=mask_secrets) if resolved_mask_secrets: - extra_patterns = metadata.secret_field_names or secret_field_names or () + extra_patterns = (metadata.secret_field_names or ()) + (secret_field_names or ()) secret_paths = build_secret_paths(schema, extra_patterns=extra_patterns) error_ctx = build_error_ctx( metadata, diff --git a/tests/masking/test_masking.py b/tests/masking/test_masking.py index 932f91f..601e8df 100644 --- a/tests/masking/test_masking.py +++ b/tests/masking/test_masking.py @@ -513,7 +513,7 @@ class Cfg: assert report is not None assert report.merged_data == {"password": _SECRET_VALUE, "host": _PUBLIC_VALUE} - def test_source_secret_field_names_overrides_load_level(self, tmp_path: Path): + def test_source_and_load_secret_field_names_combined(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text( f'{{"nickname": "{_SECRET_VALUE}", "label": "{_SECRET_VALUE}", "host": "{_PUBLIC_VALUE}"}}', @@ -536,4 +536,5 @@ class Cfg: report = get_load_report(result) assert report is not None assert report.merged_data["label"] == _MASKED_SECRET - assert report.merged_data["nickname"] == _SECRET_VALUE + assert report.merged_data["nickname"] == _MASKED_SECRET + assert report.merged_data["host"] == _PUBLIC_VALUE From d01d22b6ab87c6d915867bcdf6473215f0c5fcac Mon Sep 17 00:00:00 2001 From: niccolum Date: Wed, 1 Apr 2026 16:20:38 +0300 Subject: [PATCH 27/36] traceback as separate file --- docs/advanced/field-groups.md | 18 ++ docs/comparison/why-not-dynaconf.md | 12 +- docs/comparison/why-not-hydra.md | 6 +- docs/features/masking.md | 30 +++- docs/features/merging.md | 4 +- docs/features/validation.md | 30 ++++ .../advanced_field_groups_expansion_error.py | 25 +-- ...vanced_field_groups_expansion_error.stderr | 6 + .../advanced_field_groups_multiple_error.py | 35 +--- ...dvanced_field_groups_multiple_error.stderr | 10 ++ .../advanced_field_groups_nested_error.py | 31 +--- .../advanced_field_groups_nested_error.stderr | 6 + .../dynaconf_root_validators.py | 35 ++-- .../dynaconf_root_validators.stderr | 5 + .../why-not-dynaconf/dynaconf_validators.py | 19 +-- .../dynaconf_validators.stderr | 7 + .../why-not-hydra/hydra_validators.py | 19 +-- .../why-not-hydra/hydra_validators.stderr | 7 + .../docs/features/masking/masking_by_name.py | 16 +- .../features/masking/masking_by_name.stderr | 7 + .../features/masking/masking_heuristic.py | 22 +-- .../features/masking/masking_heuristic.stderr | 7 + .../features/masking/masking_merge_mode.py | 31 +--- .../masking/masking_merge_mode.stderr | 7 + .../docs/features/masking/masking_no_mask.py | 29 +--- .../features/masking/masking_no_mask.stderr | 7 + .../features/masking/masking_per_source.py | 29 +--- .../masking/masking_per_source.stderr | 7 + .../features/masking/masking_secret_str.py | 22 +-- .../masking/masking_secret_str.stderr | 7 + .../validation/validation_annotated.py | 38 +---- .../validation/validation_annotated.stderr | 22 +++ .../features/validation/validation_custom.py | 20 +-- .../validation/validation_custom.stderr | 7 + .../validation/validation_metadata.py | 38 ++--- .../validation/validation_metadata.stderr | 12 ++ .../validation/validation_post_init.py | 5 +- .../validation/validation_post_init.stderr | 1 + .../features/validation/validation_root.py | 29 ++-- .../validation/validation_root.stderr | 5 + src/dature/errors/exceptions.py | 6 +- tests/merging/test_field_group.py | 160 +++++++++--------- tests/test_examples.py | 40 ++++- 43 files changed, 432 insertions(+), 447 deletions(-) create mode 100644 examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.stderr create mode 100644 examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.stderr create mode 100644 examples/docs/advanced/field_groups/advanced_field_groups_nested_error.stderr create mode 100644 examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.stderr create mode 100644 examples/docs/comparison/why-not-dynaconf/dynaconf_validators.stderr create mode 100644 examples/docs/comparison/why-not-hydra/hydra_validators.stderr create mode 100644 examples/docs/features/masking/masking_by_name.stderr create mode 100644 examples/docs/features/masking/masking_heuristic.stderr create mode 100644 examples/docs/features/masking/masking_merge_mode.stderr create mode 100644 examples/docs/features/masking/masking_no_mask.stderr create mode 100644 examples/docs/features/masking/masking_per_source.stderr create mode 100644 examples/docs/features/masking/masking_secret_str.stderr create mode 100644 examples/docs/features/validation/validation_annotated.stderr create mode 100644 examples/docs/features/validation/validation_custom.stderr create mode 100644 examples/docs/features/validation/validation_metadata.stderr create mode 100644 examples/docs/features/validation/validation_post_init.stderr create mode 100644 examples/docs/features/validation/validation_root.stderr diff --git a/docs/advanced/field-groups.md b/docs/advanced/field-groups.md index 59d441d..8b456f6 100644 --- a/docs/advanced/field-groups.md +++ b/docs/advanced/field-groups.md @@ -40,6 +40,12 @@ If `overrides.yaml` changes `host` and `port` together, the group constraint is --8<-- "examples/docs/advanced/field_groups/sources/field_groups_partial_overrides.yaml" ``` +=== "Error" + + ``` + --8<-- "examples/docs/advanced/field_groups/advanced_field_groups_nested_error.stderr" + ``` + ## Nested Dataclass Expansion Passing a dataclass field expands it into all its leaf fields: @@ -62,6 +68,12 @@ Passing a dataclass field expands it into all its leaf fields: --8<-- "examples/docs/advanced/field_groups/sources/advanced_field_groups_expansion_error_overrides.yaml" ``` +=== "Error" + + ``` + --8<-- "examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.stderr" + ``` + ## Multiple Groups If a source partially overrides multiple groups, all violations are reported: @@ -84,4 +96,10 @@ If a source partially overrides multiple groups, all violations are reported: --8<-- "examples/docs/advanced/field_groups/sources/advanced_field_groups_multiple_error_overrides.yaml" ``` +=== "Error" + + ``` + --8<-- "examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.stderr" + ``` + Field groups work with all merge strategies and can be combined with `field_merges`. diff --git a/docs/comparison/why-not-dynaconf.md b/docs/comparison/why-not-dynaconf.md index a856576..0a2b4fe 100644 --- a/docs/comparison/why-not-dynaconf.md +++ b/docs/comparison/why-not-dynaconf.md @@ -76,13 +76,21 @@ This gives flexibility — validators can be defined in a different module, reus dature supports **both approaches**. Inline validators live with the type: ```python ---8<-- "examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py:validators" +--8<-- "examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py" +``` + +```title="Error" +--8<-- "examples/docs/comparison/why-not-dynaconf/dynaconf_validators.stderr" ``` And separate validators when you need cross-field checks or decoupled validation logic: ```python ---8<-- "examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py:root-validators" +--8<-- "examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py" +``` + +```title="Error" +--8<-- "examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.stderr" ``` You choose the style that fits — or mix them. diff --git a/docs/comparison/why-not-hydra.md b/docs/comparison/why-not-hydra.md index 5e6b16b..0e9ac71 100644 --- a/docs/comparison/why-not-hydra.md +++ b/docs/comparison/why-not-hydra.md @@ -89,7 +89,11 @@ class Config: dature uses `Annotated` validators: ```python ---8<-- "examples/docs/comparison/why-not-hydra/hydra_validators.py:validators" +--8<-- "examples/docs/comparison/why-not-hydra/hydra_validators.py" +``` + +```title="Error" +--8<-- "examples/docs/comparison/why-not-hydra/hydra_validators.stderr" ``` Plus root validators for cross-field checks, custom validators, and standard `__post_init__`. diff --git a/docs/features/masking.md b/docs/features/masking.md index 7368de8..d2c34d7 100644 --- a/docs/features/masking.md +++ b/docs/features/masking.md @@ -52,6 +52,10 @@ dature uses three methods to identify secrets: --8<-- "examples/docs/features/masking/sources/masking_secret_str.yaml" ``` + ```title="Error" + --8<-- "examples/docs/features/masking/masking_secret_str.stderr" + ``` + === "By name" Fields whose names contain known patterns are automatically masked in error messages: @@ -64,6 +68,10 @@ dature uses three methods to identify secrets: --8<-- "examples/docs/features/masking/sources/masking_by_name.yaml" ``` + ```title="Error" + --8<-- "examples/docs/features/masking/masking_by_name.stderr" + ``` + === "Heuristic" With `dature[secure]`, values that look like random tokens are masked in error messages even if the field name is not a known secret pattern: @@ -76,6 +84,10 @@ dature uses three methods to identify secrets: --8<-- "examples/docs/features/masking/sources/masking_heuristic.yaml" ``` + ```title="Error" + --8<-- "examples/docs/features/masking/masking_heuristic.stderr" + ``` + ## Mask Format By default, the entire value is replaced with ``: @@ -102,13 +114,21 @@ Control masking via `Source`: === "secret_field_names" ```python - --8<-- "examples/docs/features/masking/masking_per_source.py:per-source" + --8<-- "examples/docs/features/masking/masking_per_source.py" + ``` + + ```title="Error" + --8<-- "examples/docs/features/masking/masking_per_source.stderr" ``` === "mask_secrets=False" ```python - --8<-- "examples/docs/features/masking/masking_no_mask.py:no-mask" + --8<-- "examples/docs/features/masking/masking_no_mask.py" + ``` + + ```title="Error" + --8<-- "examples/docs/features/masking/masking_no_mask.stderr" ``` ### Per-load @@ -118,7 +138,11 @@ Control masking via `Source`: ### In merge mode ```python ---8<-- "examples/docs/features/masking/masking_merge_mode.py:merge-mode" +--8<-- "examples/docs/features/masking/masking_merge_mode.py" +``` + +```title="Error" +--8<-- "examples/docs/features/masking/masking_merge_mode.stderr" ``` ### Global diff --git a/docs/features/merging.md b/docs/features/merging.md index 80da4c2..9410b95 100644 --- a/docs/features/merging.md +++ b/docs/features/merging.md @@ -145,7 +145,7 @@ Nested dicts are merged recursively. Lists and scalars are replaced entirely acc --8<-- "examples/docs/shared/common_raise_on_conflict_b.yaml" ``` -For per-field strategy overrides, see [Per-Field Merge Strategies](../advanced/merge-rules.md#per-field-merge-strategies). To enforce that related fields are always overridden together, see [Field Groups](../advanced/merge-rules.md#field-groups). +For per-field strategy overrides, see [Per-Field Merge Strategies](../advanced/merge-rules.md#per-field-merge-strategies). To enforce that related fields are always overridden together, see [Field Groups](../advanced/field-groups.md). ## Merge Parameters @@ -155,7 +155,7 @@ All merge-related parameters are passed directly to `dature.load()` as keyword a |-----------|-------------| | `strategy` | Global merge strategy. Default: `"last_wins"`. See [Merge Strategies](#merge-strategies) | | `field_merges` | Per-field merge strategy overrides. See [Per-Field Merge Strategies](../advanced/merge-rules.md#per-field-merge-strategies) | -| `field_groups` | Enforce related fields are overridden together. See [Field Groups](../advanced/merge-rules.md#field-groups) | +| `field_groups` | Enforce related fields are overridden together. See [Field Groups](../advanced/field-groups.md) | | `skip_broken_sources` | Skip sources that fail to load. See [Skipping Broken Sources](../advanced/merge-rules.md#skipping-broken-sources) | | `skip_invalid_fields` | Drop fields with invalid values. See [Skipping Invalid Fields](../advanced/merge-rules.md#skipping-invalid-fields) | | `expand_env_vars` | ENV variable expansion mode. See [ENV Expansion](../advanced/env-expansion.md) | diff --git a/docs/features/validation.md b/docs/features/validation.md index 9af1ed5..7592428 100644 --- a/docs/features/validation.md +++ b/docs/features/validation.md @@ -18,6 +18,12 @@ Declare validators using `typing.Annotated`: --8<-- "examples/docs/features/validation/sources/validation_annotated_invalid.json5" ``` +=== "Error" + + ``` + --8<-- "examples/docs/features/validation/validation_annotated.stderr" + ``` + ### Available Validators **Numbers** (`dature.validators.number`): @@ -68,6 +74,12 @@ Validate the entire object after loading: --8<-- "examples/docs/features/validation/sources/validation_root_invalid.yaml" ``` +=== "Error" + + ``` + --8<-- "examples/docs/features/validation/validation_root.stderr" + ``` + Root validators receive the fully constructed dataclass instance and return `True` if valid. ## Metadata Validators @@ -86,6 +98,12 @@ Field validators can be specified in `Source` using the `validators` parameter. --8<-- "examples/docs/features/validation/sources/validation_metadata_invalid.yaml" ``` +=== "Error" + + ``` + --8<-- "examples/docs/features/validation/validation_metadata.stderr" + ``` + A single validator can be passed directly. Multiple validators require a tuple: ```python @@ -120,6 +138,12 @@ Create your own validators by implementing `get_validator_func()` and `get_error --8<-- "examples/docs/features/validation/sources/validation_custom_invalid.json5" ``` +=== "Error" + + ``` + --8<-- "examples/docs/features/validation/validation_custom.stderr" + ``` + Custom validators can be combined with built-in ones in `Annotated`. ## `__post_init__` and `@property` @@ -138,6 +162,12 @@ Standard dataclass `__post_init__` and `@property` work as expected — dature p --8<-- "examples/docs/features/validation/sources/validation_post_init_invalid.yaml" ``` +=== "Error" + + ``` + --8<-- "examples/docs/features/validation/validation_post_init.stderr" + ``` + Both approaches work in function mode and decorator mode. ## Error Format diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py index d0cdd34..b8f0ab5 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py @@ -2,10 +2,8 @@ from dataclasses import dataclass from pathlib import Path -from textwrap import dedent import dature -from dature.errors import FieldGroupError SOURCES_DIR = Path(__file__).parent / "sources" @@ -25,20 +23,9 @@ class Config: # (dature.F[Config].database, dature.F[Config].port) # expands to (database.host, database.port, port) -try: - dature.load( - dature.Source(file=SOURCES_DIR / "field_groups_nested_defaults.yaml"), - dature.Source(file=SOURCES_DIR / "advanced_field_groups_expansion_error_overrides.yaml"), - schema=Config, - field_groups=((dature.F[Config].database, dature.F[Config].port),), - ) -except FieldGroupError as exc: - defaults_path = str(SOURCES_DIR / "field_groups_nested_defaults.yaml") - overrides_path = str(SOURCES_DIR / "advanced_field_groups_expansion_error_overrides.yaml") - assert str(exc) == dedent(f"""\ - Config field group errors (1) - - Field group (database.host, database.port, port) partially overridden in source 1 - changed: database.host (from source yaml1.2 '{overrides_path}'), port (from source yaml1.2 '{overrides_path}') - unchanged: database.port (from source yaml1.2 '{defaults_path}') -""") +dature.load( + dature.Source(file=SOURCES_DIR / "field_groups_nested_defaults.yaml"), + dature.Source(file=SOURCES_DIR / "advanced_field_groups_expansion_error_overrides.yaml"), + schema=Config, + field_groups=((dature.F[Config].database, dature.F[Config].port),), +) diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.stderr b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.stderr new file mode 100644 index 0000000..b690322 --- /dev/null +++ b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.stderr @@ -0,0 +1,6 @@ + | dature.errors.exceptions.FieldGroupError: Config field group errors (1) + +-+---------------- 1 ---------------- + | dature.errors.exceptions.FieldGroupViolationError: Field group (database.host, database.port, port) partially overridden in source 1 + | changed: database.host (from source yaml1.2 '{SOURCES_DIR}/advanced_field_groups_expansion_error_overrides.yaml'), port (from source yaml1.2 '{SOURCES_DIR}/advanced_field_groups_expansion_error_overrides.yaml') + | unchanged: database.port (from source yaml1.2 '{SOURCES_DIR}/field_groups_nested_defaults.yaml') + +------------------------------------ diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py index 9b991e8..df6bb5e 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py @@ -2,10 +2,8 @@ from dataclasses import dataclass from pathlib import Path -from textwrap import dedent import dature -from dature.errors import FieldGroupError SOURCES_DIR = Path(__file__).parent / "sources" SHARED_DIR = Path(__file__).parents[2] / "shared" @@ -20,27 +18,12 @@ class Config: password: str -try: - dature.load( - dature.Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), - dature.Source(file=SOURCES_DIR / "advanced_field_groups_multiple_error_overrides.yaml"), - schema=Config, - field_groups=( - (dature.F[Config].host, dature.F[Config].port), - (dature.F[Config].user, dature.F[Config].password), - ), - ) -except FieldGroupError as exc: - defaults_path = str(SHARED_DIR / "common_field_groups_defaults.yaml") - overrides_path = str(SOURCES_DIR / "advanced_field_groups_multiple_error_overrides.yaml") - assert str(exc) == dedent(f"""\ - Config field group errors (2) - - Field group (host, port) partially overridden in source 1 - changed: host (from source yaml1.2 '{overrides_path}') - unchanged: port (from source yaml1.2 '{defaults_path}') - - Field group (user, password) partially overridden in source 1 - changed: user (from source yaml1.2 '{overrides_path}') - unchanged: password (from source yaml1.2 '{defaults_path}') -""") +dature.load( + dature.Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), + dature.Source(file=SOURCES_DIR / "advanced_field_groups_multiple_error_overrides.yaml"), + schema=Config, + field_groups=( + (dature.F[Config].host, dature.F[Config].port), + (dature.F[Config].user, dature.F[Config].password), + ), +) diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.stderr b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.stderr new file mode 100644 index 0000000..1e3a6a3 --- /dev/null +++ b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.stderr @@ -0,0 +1,10 @@ + | dature.errors.exceptions.FieldGroupError: Config field group errors (2) + +-+---------------- 1 ---------------- + | dature.errors.exceptions.FieldGroupViolationError: Field group (host, port) partially overridden in source 1 + | changed: host (from source yaml1.2 '{SOURCES_DIR}/advanced_field_groups_multiple_error_overrides.yaml') + | unchanged: port (from source yaml1.2 '{SHARED_DIR}/common_field_groups_defaults.yaml') + +---------------- 2 ---------------- + | dature.errors.exceptions.FieldGroupViolationError: Field group (user, password) partially overridden in source 1 + | changed: user (from source yaml1.2 '{SOURCES_DIR}/advanced_field_groups_multiple_error_overrides.yaml') + | unchanged: password (from source yaml1.2 '{SHARED_DIR}/common_field_groups_defaults.yaml') + +------------------------------------ diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py index 492fa19..32f67e9 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py @@ -2,10 +2,8 @@ from dataclasses import dataclass from pathlib import Path -from textwrap import dedent import dature -from dature.errors import FieldGroupError SOURCES_DIR = Path(__file__).parent / "sources" SHARED_DIR = Path(__file__).parents[2] / "shared" @@ -20,23 +18,12 @@ class Config: password: str -try: - dature.load( - dature.Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), - dature.Source(file=SOURCES_DIR / "field_groups_partial_overrides.yaml"), - schema=Config, - field_groups=( - (dature.F[Config].host, dature.F[Config].port), - (dature.F[Config].user, dature.F[Config].password), - ), - ) -except FieldGroupError as exc: - defaults_path = str(SHARED_DIR / "common_field_groups_defaults.yaml") - overrides_path = str(SOURCES_DIR / "field_groups_partial_overrides.yaml") - assert str(exc) == dedent(f"""\ - Config field group errors (1) - - Field group (host, port) partially overridden in source 1 - changed: host (from source yaml1.2 '{overrides_path}') - unchanged: port (from source yaml1.2 '{defaults_path}') -""") +dature.load( + dature.Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), + dature.Source(file=SOURCES_DIR / "field_groups_partial_overrides.yaml"), + schema=Config, + field_groups=( + (dature.F[Config].host, dature.F[Config].port), + (dature.F[Config].user, dature.F[Config].password), + ), +) diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.stderr b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.stderr new file mode 100644 index 0000000..e427c26 --- /dev/null +++ b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.stderr @@ -0,0 +1,6 @@ + | dature.errors.exceptions.FieldGroupError: Config field group errors (1) + +-+---------------- 1 ---------------- + | dature.errors.exceptions.FieldGroupViolationError: Field group (host, port) partially overridden in source 1 + | changed: host (from source yaml1.2 '{SOURCES_DIR}/field_groups_partial_overrides.yaml') + | unchanged: port (from source yaml1.2 '{SHARED_DIR}/common_field_groups_defaults.yaml') + +------------------------------------ diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py index 6592e53..da358d6 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py @@ -5,7 +5,6 @@ from typing import Annotated import dature -from dature.errors import DatureConfigError from dature.validators.number import Gt, Lt from dature.validators.root import RootValidator @@ -19,33 +18,19 @@ class Config: debug: bool = False -# --8<-- [start:root-validators] def check_debug_port(config: Config) -> bool: return not (config.debug and config.port == 80) -try: - dature.load( - dature.Source( - file=SOURCES_DIR / "dynaconf_root_validators_invalid.toml", - root_validators=( - RootValidator( - check_debug_port, - error_message="debug mode should not use port 80", - ), +dature.load( + dature.Source( + file=SOURCES_DIR / "dynaconf_root_validators_invalid.toml", + root_validators=( + RootValidator( + check_debug_port, + error_message="debug mode should not use port 80", ), ), - schema=Config, - ) -except DatureConfigError as exc: - source = str(SOURCES_DIR / "dynaconf_root_validators_invalid.toml") - assert str(exc) == "Config loading errors (1)" - # fmt: off - assert str(exc.exceptions[0]) == ( - " [] debug mode should not use port 80\n" - f" └── FILE '{source}'" - ) - # fmt: on -else: - raise AssertionError("Expected DatureConfigError") -# --8<-- [end:root-validators] + ), + schema=Config, +) diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.stderr b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.stderr new file mode 100644 index 0000000..c6583ad --- /dev/null +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.stderr @@ -0,0 +1,5 @@ + | dature.errors.exceptions.DatureConfigError: Config loading errors (1) + +-+---------------- 1 ---------------- + | dature.errors.exceptions.FieldLoadError: [] debug mode should not use port 80 + | └── FILE '{SOURCES_DIR}/dynaconf_root_validators_invalid.toml' + +------------------------------------ diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py index c2107b0..e5770a8 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py @@ -5,13 +5,11 @@ from typing import Annotated import dature -from dature.errors import DatureConfigError from dature.validators.number import Gt, Lt SOURCES_DIR = Path(__file__).parent / "sources" -# --8<-- [start:validators] @dataclass class Config: host: str @@ -19,19 +17,4 @@ class Config: debug: bool = False -try: - dature.load(dature.Source(file=SOURCES_DIR / "dynaconf_validators_invalid.toml"), schema=Config) -except DatureConfigError as exc: - source = str(SOURCES_DIR / "dynaconf_validators_invalid.toml") - assert str(exc) == "Config loading errors (1)" - # fmt: off - assert str(exc.exceptions[0]) == ( - " [port] Value must be greater than 0\n" - " ├── port = -1\n" - " │ ^^\n" - f" └── FILE '{source}', line 2" - ) - # fmt: on -else: - raise AssertionError("Expected DatureConfigError") -# --8<-- [end:validators] +dature.load(dature.Source(file=SOURCES_DIR / "dynaconf_validators_invalid.toml"), schema=Config) diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.stderr b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.stderr new file mode 100644 index 0000000..5acfd40 --- /dev/null +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.stderr @@ -0,0 +1,7 @@ + | dature.errors.exceptions.DatureConfigError: Config loading errors (1) + +-+---------------- 1 ---------------- + | dature.errors.exceptions.FieldLoadError: [port] Value must be greater than 0 + | ├── port = -1 + | │ ^^ + | └── FILE '{SOURCES_DIR}/dynaconf_validators_invalid.toml', line 2 + +------------------------------------ diff --git a/examples/docs/comparison/why-not-hydra/hydra_validators.py b/examples/docs/comparison/why-not-hydra/hydra_validators.py index d128f18..fa15302 100644 --- a/examples/docs/comparison/why-not-hydra/hydra_validators.py +++ b/examples/docs/comparison/why-not-hydra/hydra_validators.py @@ -5,32 +5,15 @@ from typing import Annotated import dature -from dature.errors import DatureConfigError from dature.validators.number import Gt, Lt SOURCES_DIR = Path(__file__).parent / "sources" -# --8<-- [start:validators] @dataclass class Config: host: str port: Annotated[int, Gt(0), Lt(65536)] = 8080 -try: - dature.load(dature.Source(file=SOURCES_DIR / "hydra_validators_invalid.yaml"), schema=Config) -except DatureConfigError as exc: - source = str(SOURCES_DIR / "hydra_validators_invalid.yaml") - assert str(exc) == "Config loading errors (1)" - # fmt: off - assert str(exc.exceptions[0]) == ( - " [port] Value must be greater than 0\n" - " ├── port: -1\n" - " │ ^^\n" - f" └── FILE '{source}', line 2" - ) - # fmt: on -else: - raise AssertionError("Expected DatureConfigError") -# --8<-- [end:validators] +dature.load(dature.Source(file=SOURCES_DIR / "hydra_validators_invalid.yaml"), schema=Config) diff --git a/examples/docs/comparison/why-not-hydra/hydra_validators.stderr b/examples/docs/comparison/why-not-hydra/hydra_validators.stderr new file mode 100644 index 0000000..3f028a2 --- /dev/null +++ b/examples/docs/comparison/why-not-hydra/hydra_validators.stderr @@ -0,0 +1,7 @@ + | dature.errors.exceptions.DatureConfigError: Config loading errors (1) + +-+---------------- 1 ---------------- + | dature.errors.exceptions.FieldLoadError: [port] Value must be greater than 0 + | ├── port: -1 + | │ ^^ + | └── FILE '{SOURCES_DIR}/hydra_validators_invalid.yaml', line 2 + +------------------------------------ diff --git a/examples/docs/features/masking/masking_by_name.py b/examples/docs/features/masking/masking_by_name.py index ee33b16..2e7ff9e 100644 --- a/examples/docs/features/masking/masking_by_name.py +++ b/examples/docs/features/masking/masking_by_name.py @@ -5,7 +5,6 @@ from typing import Literal import dature -from dature.errors import DatureConfigError SOURCES_DIR = Path(__file__).parent / "sources" @@ -16,17 +15,4 @@ class Config: host: str -try: - dature.load(dature.Source(file=SOURCES_DIR / "masking_by_name.yaml"), schema=Config) -except DatureConfigError as exc: - source = str(SOURCES_DIR / "masking_by_name.yaml") - assert str(exc) == "Config loading errors (1)" - assert len(exc.exceptions) == 1 - assert str(exc.exceptions[0]) == ( - f" [password] Invalid variant: ''\n" - f' ├── password: ""\n' - f" │ ^^^^^^^^^^\n" - f" └── FILE '{source}', line 1" - ) -else: - raise AssertionError("Expected DatureConfigError") +dature.load(dature.Source(file=SOURCES_DIR / "masking_by_name.yaml"), schema=Config) diff --git a/examples/docs/features/masking/masking_by_name.stderr b/examples/docs/features/masking/masking_by_name.stderr new file mode 100644 index 0000000..50473e9 --- /dev/null +++ b/examples/docs/features/masking/masking_by_name.stderr @@ -0,0 +1,7 @@ + | dature.errors.exceptions.DatureConfigError: Config loading errors (1) + +-+---------------- 1 ---------------- + | dature.errors.exceptions.FieldLoadError: [password] Invalid variant: '' + | ├── password: "" + | │ ^^^^^^^^^^ + | └── FILE '{SOURCES_DIR}/masking_by_name.yaml', line 1 + +------------------------------------ diff --git a/examples/docs/features/masking/masking_heuristic.py b/examples/docs/features/masking/masking_heuristic.py index 03a0614..f30e560 100644 --- a/examples/docs/features/masking/masking_heuristic.py +++ b/examples/docs/features/masking/masking_heuristic.py @@ -5,7 +5,6 @@ from typing import Literal import dature -from dature.errors import DatureConfigError SOURCES_DIR = Path(__file__).parent / "sources" @@ -16,20 +15,7 @@ class Config: host: str -try: - dature.load( - dature.Source(file=SOURCES_DIR / "masking_heuristic.yaml", mask_secrets=True), - schema=Config, - ) -except DatureConfigError as exc: - source = str(SOURCES_DIR / "masking_heuristic.yaml") - assert str(exc) == "Config loading errors (1)" - assert len(exc.exceptions) == 1 - assert str(exc.exceptions[0]) == ( - " [connection_id] Invalid variant: ''\n" - ' ├── connection_id: ""\n' - " │ ^^^^^^^^^^\n" - f" └── FILE '{source}', line 1" - ) -else: - raise AssertionError("Expected DatureConfigError") +dature.load( + dature.Source(file=SOURCES_DIR / "masking_heuristic.yaml", mask_secrets=True), + schema=Config, +) diff --git a/examples/docs/features/masking/masking_heuristic.stderr b/examples/docs/features/masking/masking_heuristic.stderr new file mode 100644 index 0000000..67151ba --- /dev/null +++ b/examples/docs/features/masking/masking_heuristic.stderr @@ -0,0 +1,7 @@ + | dature.errors.exceptions.DatureConfigError: Config loading errors (1) + +-+---------------- 1 ---------------- + | dature.errors.exceptions.FieldLoadError: [connection_id] Invalid variant: '' + | ├── connection_id: "" + | │ ^^^^^^^^^^ + | └── FILE '{SOURCES_DIR}/masking_heuristic.yaml', line 1 + +------------------------------------ diff --git a/examples/docs/features/masking/masking_merge_mode.py b/examples/docs/features/masking/masking_merge_mode.py index be107c3..6e30537 100644 --- a/examples/docs/features/masking/masking_merge_mode.py +++ b/examples/docs/features/masking/masking_merge_mode.py @@ -5,7 +5,6 @@ from typing import Annotated import dature -from dature.errors import DatureConfigError from dature.validators.string import MinLength SOURCES_DIR = Path(__file__).parent / "sources" @@ -18,25 +17,11 @@ class Config: api_key: Annotated[str, MinLength(20)] = "" -# --8<-- [start:merge-mode] -try: - dature.load( - dature.Source(file=SOURCES_DIR / "masking_merge_mode_defaults.yaml"), - dature.Source( - file=SOURCES_DIR / "masking_merge_mode_secrets.yaml", - secret_field_names=("api_key",), - ), - schema=Config, - ) -except DatureConfigError as exc: - source = str(SOURCES_DIR / "masking_merge_mode_secrets.yaml") - assert str(exc) == "Config loading errors (1)" - assert str(exc.exceptions[0]) == ( - " [api_key] Value must have at least 20 characters\n" - ' ├── api_key: ""\n' - " │ ^^^^^^^^^^\n" - f" └── FILE '{source}', line 1" - ) -else: - raise AssertionError("Expected DatureConfigError") -# --8<-- [end:merge-mode] +dature.load( + dature.Source(file=SOURCES_DIR / "masking_merge_mode_defaults.yaml"), + dature.Source( + file=SOURCES_DIR / "masking_merge_mode_secrets.yaml", + secret_field_names=("api_key",), + ), + schema=Config, +) diff --git a/examples/docs/features/masking/masking_merge_mode.stderr b/examples/docs/features/masking/masking_merge_mode.stderr new file mode 100644 index 0000000..ce627aa --- /dev/null +++ b/examples/docs/features/masking/masking_merge_mode.stderr @@ -0,0 +1,7 @@ + | dature.errors.exceptions.DatureConfigError: Config loading errors (1) + +-+---------------- 1 ---------------- + | dature.errors.exceptions.FieldLoadError: [api_key] Value must have at least 20 characters + | ├── api_key: "" + | │ ^^^^^^^^^^ + | └── FILE '{SOURCES_DIR}/masking_merge_mode_secrets.yaml', line 1 + +------------------------------------ diff --git a/examples/docs/features/masking/masking_no_mask.py b/examples/docs/features/masking/masking_no_mask.py index fbca751..b7167e5 100644 --- a/examples/docs/features/masking/masking_no_mask.py +++ b/examples/docs/features/masking/masking_no_mask.py @@ -5,7 +5,6 @@ from typing import Annotated import dature -from dature.errors import DatureConfigError from dature.validators.string import MinLength SOURCES_DIR = Path(__file__).parent / "sources" @@ -17,24 +16,10 @@ class Config: host: str -# --8<-- [start:no-mask] -try: - dature.load( - dature.Source( - file=SOURCES_DIR / "masking_per_source.yaml", - mask_secrets=False, - ), - schema=Config, - ) -except DatureConfigError as exc: - source = str(SOURCES_DIR / "masking_per_source.yaml") - assert str(exc) == "Config loading errors (1)" - assert str(exc.exceptions[0]) == ( - " [api_key] Value must have at least 20 characters\n" - ' ├── api_key: "short"\n' - " │ ^^^^^\n" - f" └── FILE '{source}', line 1" - ) -else: - raise AssertionError("Expected DatureConfigError") -# --8<-- [end:no-mask] +dature.load( + dature.Source( + file=SOURCES_DIR / "masking_per_source.yaml", + mask_secrets=False, + ), + schema=Config, +) diff --git a/examples/docs/features/masking/masking_no_mask.stderr b/examples/docs/features/masking/masking_no_mask.stderr new file mode 100644 index 0000000..bd35ea3 --- /dev/null +++ b/examples/docs/features/masking/masking_no_mask.stderr @@ -0,0 +1,7 @@ + | dature.errors.exceptions.DatureConfigError: Config loading errors (1) + +-+---------------- 1 ---------------- + | dature.errors.exceptions.FieldLoadError: [api_key] Value must have at least 20 characters + | ├── api_key: "short" + | │ ^^^^^ + | └── FILE '{SOURCES_DIR}/masking_per_source.yaml', line 1 + +------------------------------------ diff --git a/examples/docs/features/masking/masking_per_source.py b/examples/docs/features/masking/masking_per_source.py index 063e98b..7b77817 100644 --- a/examples/docs/features/masking/masking_per_source.py +++ b/examples/docs/features/masking/masking_per_source.py @@ -5,7 +5,6 @@ from typing import Annotated import dature -from dature.errors import DatureConfigError from dature.validators.string import MinLength SOURCES_DIR = Path(__file__).parent / "sources" @@ -17,24 +16,10 @@ class Config: host: str -# --8<-- [start:per-source] -try: - dature.load( - dature.Source( - file=SOURCES_DIR / "masking_per_source.yaml", - secret_field_names=("api_key",), - ), - schema=Config, - ) -except DatureConfigError as exc: - source = str(SOURCES_DIR / "masking_per_source.yaml") - assert str(exc) == "Config loading errors (1)" - assert str(exc.exceptions[0]) == ( - " [api_key] Value must have at least 20 characters\n" - ' ├── api_key: ""\n' - " │ ^^^^^^^^^^\n" - f" └── FILE '{source}', line 1" - ) -else: - raise AssertionError("Expected DatureConfigError") -# --8<-- [end:per-source] +dature.load( + dature.Source( + file=SOURCES_DIR / "masking_per_source.yaml", + secret_field_names=("api_key",), + ), + schema=Config, +) diff --git a/examples/docs/features/masking/masking_per_source.stderr b/examples/docs/features/masking/masking_per_source.stderr new file mode 100644 index 0000000..c4a1368 --- /dev/null +++ b/examples/docs/features/masking/masking_per_source.stderr @@ -0,0 +1,7 @@ + | dature.errors.exceptions.DatureConfigError: Config loading errors (1) + +-+---------------- 1 ---------------- + | dature.errors.exceptions.FieldLoadError: [api_key] Value must have at least 20 characters + | ├── api_key: "" + | │ ^^^^^^^^^^ + | └── FILE '{SOURCES_DIR}/masking_per_source.yaml', line 1 + +------------------------------------ diff --git a/examples/docs/features/masking/masking_secret_str.py b/examples/docs/features/masking/masking_secret_str.py index 325bfa2..8eebf2b 100644 --- a/examples/docs/features/masking/masking_secret_str.py +++ b/examples/docs/features/masking/masking_secret_str.py @@ -4,7 +4,6 @@ from pathlib import Path import dature -from dature.errors import DatureConfigError from dature.fields.payment_card import PaymentCardNumber from dature.fields.secret_str import SecretStr @@ -18,20 +17,7 @@ class Config: host: str -try: - config = dature.load( - dature.Source(file=SOURCES_DIR / "masking_secret_str.yaml"), - schema=Config, - ) -except DatureConfigError as exc: - source = str(SOURCES_DIR / "masking_secret_str.yaml") - assert str(exc) == "Config loading errors (1)" - assert len(exc.exceptions) == 1 - assert str(exc.exceptions[0]) == ( - " [card_number] Card number must contain only digits\n" - f' ├── card_number: ""\n' - " │ ^^^^^^^^^^\n" - f" └── FILE '{source}', line 2" - ) -else: - raise AssertionError("Expected DatureConfigError") +dature.load( + dature.Source(file=SOURCES_DIR / "masking_secret_str.yaml"), + schema=Config, +) diff --git a/examples/docs/features/masking/masking_secret_str.stderr b/examples/docs/features/masking/masking_secret_str.stderr new file mode 100644 index 0000000..ec31dac --- /dev/null +++ b/examples/docs/features/masking/masking_secret_str.stderr @@ -0,0 +1,7 @@ + | dature.errors.exceptions.DatureConfigError: Config loading errors (1) + +-+---------------- 1 ---------------- + | dature.errors.exceptions.FieldLoadError: [card_number] Card number must contain only digits + | ├── card_number: "" + | │ ^^^^^^^^^^ + | └── FILE '{SOURCES_DIR}/masking_secret_str.yaml', line 2 + +------------------------------------ diff --git a/examples/docs/features/validation/validation_annotated.py b/examples/docs/features/validation/validation_annotated.py index 54bd771..b52925e 100644 --- a/examples/docs/features/validation/validation_annotated.py +++ b/examples/docs/features/validation/validation_annotated.py @@ -5,7 +5,6 @@ from typing import Annotated import dature -from dature.errors import DatureConfigError from dature.validators.number import Ge, Le from dature.validators.sequence import MinItems, UniqueItems from dature.validators.string import MaxLength, MinLength @@ -21,36 +20,7 @@ class ServiceConfig: workers: Annotated[int, Ge(1)] -try: - dature.load( - dature.Source(file=SOURCES_DIR / "validation_annotated_invalid.json5"), - schema=ServiceConfig, - ) -except DatureConfigError as exc: - source = str(SOURCES_DIR / "validation_annotated_invalid.json5") - assert str(exc) == "ServiceConfig loading errors (4)" - assert len(exc.exceptions) == 4 - assert str(exc.exceptions[0]) == ( - f" [port] Value must be greater than or equal to 1\n" - f" ├── port: 0,\n" - f" │ ^\n" - f" └── FILE '{source}', line 3" - ) - assert str(exc.exceptions[1]) == ( - f" [name] Value must have at least 3 characters\n" - f' ├── name: "ab",\n' - f" │ ^^\n" - f" └── FILE '{source}', line 4" - ) - assert str(exc.exceptions[2]) == ( - f" [tags] Value must contain unique items\n" - f' ├── tags: ["web", "web"],\n' - f" │ ^^^^^^^^^^^^^^\n" - f" └── FILE '{source}', line 5" - ) - assert str(exc.exceptions[3]) == ( - f" [workers] Value must be greater than or equal to 1\n" - f" ├── workers: 0,\n" - f" │ ^\n" - f" └── FILE '{source}', line 6" - ) +dature.load( + dature.Source(file=SOURCES_DIR / "validation_annotated_invalid.json5"), + schema=ServiceConfig, +) diff --git a/examples/docs/features/validation/validation_annotated.stderr b/examples/docs/features/validation/validation_annotated.stderr new file mode 100644 index 0000000..b9babba --- /dev/null +++ b/examples/docs/features/validation/validation_annotated.stderr @@ -0,0 +1,22 @@ + | dature.errors.exceptions.DatureConfigError: ServiceConfig loading errors (4) + +-+---------------- 1 ---------------- + | dature.errors.exceptions.FieldLoadError: [port] Value must be greater than or equal to 1 + | ├── port: 0, + | │ ^ + | └── FILE '{SOURCES_DIR}/validation_annotated_invalid.json5', line 3 + +---------------- 2 ---------------- + | dature.errors.exceptions.FieldLoadError: [name] Value must have at least 3 characters + | ├── name: "ab", + | │ ^^ + | └── FILE '{SOURCES_DIR}/validation_annotated_invalid.json5', line 4 + +---------------- 3 ---------------- + | dature.errors.exceptions.FieldLoadError: [tags] Value must contain unique items + | ├── tags: ["web", "web"], + | │ ^^^^^^^^^^^^^^ + | └── FILE '{SOURCES_DIR}/validation_annotated_invalid.json5', line 5 + +---------------- 4 ---------------- + | dature.errors.exceptions.FieldLoadError: [workers] Value must be greater than or equal to 1 + | ├── workers: 0, + | │ ^ + | └── FILE '{SOURCES_DIR}/validation_annotated_invalid.json5', line 6 + +------------------------------------ diff --git a/examples/docs/features/validation/validation_custom.py b/examples/docs/features/validation/validation_custom.py index da50e1f..a334408 100644 --- a/examples/docs/features/validation/validation_custom.py +++ b/examples/docs/features/validation/validation_custom.py @@ -6,7 +6,6 @@ from typing import Annotated import dature -from dature.errors import DatureConfigError from dature.validators.number import Ge SOURCES_DIR = Path(__file__).parent / "sources" @@ -35,18 +34,7 @@ class ServiceConfig: workers: Annotated[int, Ge(1), Divisible(2)] -try: - dature.load( - dature.Source(file=SOURCES_DIR / "validation_custom_invalid.json5"), - schema=ServiceConfig, - ) -except DatureConfigError as exc: - source = str(SOURCES_DIR / "validation_custom_invalid.json5") - assert str(exc) == "ServiceConfig loading errors (1)" - assert len(exc.exceptions) == 1 - assert str(exc.exceptions[0]) == ( - f" [workers] Value must be divisible by 2\n" - f" ├── workers: 3,\n" - f" │ ^\n" - f" └── FILE '{source}', line 5" - ) +dature.load( + dature.Source(file=SOURCES_DIR / "validation_custom_invalid.json5"), + schema=ServiceConfig, +) diff --git a/examples/docs/features/validation/validation_custom.stderr b/examples/docs/features/validation/validation_custom.stderr new file mode 100644 index 0000000..f9fbfad --- /dev/null +++ b/examples/docs/features/validation/validation_custom.stderr @@ -0,0 +1,7 @@ + | dature.errors.exceptions.DatureConfigError: ServiceConfig loading errors (1) + +-+---------------- 1 ---------------- + | dature.errors.exceptions.FieldLoadError: [workers] Value must be divisible by 2 + | ├── workers: 3, + | │ ^ + | └── FILE '{SOURCES_DIR}/validation_custom_invalid.json5', line 5 + +------------------------------------ diff --git a/examples/docs/features/validation/validation_metadata.py b/examples/docs/features/validation/validation_metadata.py index 42f5a30..9d98c42 100644 --- a/examples/docs/features/validation/validation_metadata.py +++ b/examples/docs/features/validation/validation_metadata.py @@ -4,7 +4,6 @@ from pathlib import Path import dature -from dature.errors import DatureConfigError from dature.validators.number import Ge, Lt from dature.validators.string import MinLength @@ -18,30 +17,13 @@ class Config: debug: bool = False -try: - dature.load( - dature.Source( - file=SOURCES_DIR / "validation_metadata_invalid.yaml", - validators={ - dature.F[Config].host: MinLength(1), - dature.F[Config].port: (Ge(1), Lt(65536)), - }, - ), - schema=Config, - ) -except DatureConfigError as exc: - source = str(SOURCES_DIR / "validation_metadata_invalid.yaml") - assert str(exc) == "Config loading errors (2)" - assert len(exc.exceptions) == 2 - assert str(exc.exceptions[0]) == ( - f" [host] Value must have at least 1 characters\n" - f' ├── host: ""\n' - f" │ ^^\n" - f" └── FILE '{source}', line 1" - ) # fmt: skip - assert str(exc.exceptions[1]) == ( - f" [port] Value must be greater than or equal to 1\n" - f" ├── port: 0\n" - f" │ ^\n" - f" └── FILE '{source}', line 2" - ) +dature.load( + dature.Source( + file=SOURCES_DIR / "validation_metadata_invalid.yaml", + validators={ + dature.F[Config].host: MinLength(1), + dature.F[Config].port: (Ge(1), Lt(65536)), + }, + ), + schema=Config, +) diff --git a/examples/docs/features/validation/validation_metadata.stderr b/examples/docs/features/validation/validation_metadata.stderr new file mode 100644 index 0000000..db21bb5 --- /dev/null +++ b/examples/docs/features/validation/validation_metadata.stderr @@ -0,0 +1,12 @@ + | dature.errors.exceptions.DatureConfigError: Config loading errors (2) + +-+---------------- 1 ---------------- + | dature.errors.exceptions.FieldLoadError: [host] Value must have at least 1 characters + | ├── host: "" + | │ ^^ + | └── FILE '{SOURCES_DIR}/validation_metadata_invalid.yaml', line 1 + +---------------- 2 ---------------- + | dature.errors.exceptions.FieldLoadError: [port] Value must be greater than or equal to 1 + | ├── port: 0 + | │ ^ + | └── FILE '{SOURCES_DIR}/validation_metadata_invalid.yaml', line 2 + +------------------------------------ diff --git a/examples/docs/features/validation/validation_post_init.py b/examples/docs/features/validation/validation_post_init.py index 8c4d25d..9673cad 100644 --- a/examples/docs/features/validation/validation_post_init.py +++ b/examples/docs/features/validation/validation_post_init.py @@ -24,7 +24,4 @@ def address(self) -> str: return f"{self.host}:{self.port}" -try: - dature.load(dature.Source(file=SOURCES_DIR / "validation_post_init_invalid.yaml"), schema=Config) -except ValueError as exc: - assert str(exc) == "port must be between 1 and 65535, got 99999" +dature.load(dature.Source(file=SOURCES_DIR / "validation_post_init_invalid.yaml"), schema=Config) diff --git a/examples/docs/features/validation/validation_post_init.stderr b/examples/docs/features/validation/validation_post_init.stderr new file mode 100644 index 0000000..5bfa833 --- /dev/null +++ b/examples/docs/features/validation/validation_post_init.stderr @@ -0,0 +1 @@ +ValueError: port must be between 1 and 65535, got 99999 diff --git a/examples/docs/features/validation/validation_root.py b/examples/docs/features/validation/validation_root.py index e4b98bc..23a3ca5 100644 --- a/examples/docs/features/validation/validation_root.py +++ b/examples/docs/features/validation/validation_root.py @@ -4,7 +4,6 @@ from pathlib import Path import dature -from dature.errors import DatureConfigError from dature.validators.root import RootValidator SOURCES_DIR = Path(__file__).parent / "sources" @@ -23,23 +22,15 @@ def check_debug_not_on_production(obj: Config) -> bool: return True -try: - dature.load( - dature.Source( - file=SOURCES_DIR / "validation_root_invalid.yaml", - root_validators=( - RootValidator( - func=check_debug_not_on_production, - error_message="debug=True is not allowed on non-localhost hosts", - ), +dature.load( + dature.Source( + file=SOURCES_DIR / "validation_root_invalid.yaml", + root_validators=( + RootValidator( + func=check_debug_not_on_production, + error_message="debug=True is not allowed on non-localhost hosts", ), ), - schema=Config, - ) -except DatureConfigError as exc: - source = str(SOURCES_DIR / "validation_root_invalid.yaml") - assert str(exc) == "Config loading errors (1)" - assert len(exc.exceptions) == 1 - assert str(exc.exceptions[0]) == ( - f" [] debug=True is not allowed on non-localhost hosts\n └── FILE '{source}'" - ) + ), + schema=Config, +) diff --git a/examples/docs/features/validation/validation_root.stderr b/examples/docs/features/validation/validation_root.stderr new file mode 100644 index 0000000..cceaa7f --- /dev/null +++ b/examples/docs/features/validation/validation_root.stderr @@ -0,0 +1,5 @@ + | dature.errors.exceptions.DatureConfigError: Config loading errors (1) + +-+---------------- 1 ---------------- + | dature.errors.exceptions.FieldLoadError: [] debug=True is not allowed on non-localhost hosts + | └── FILE '{SOURCES_DIR}/validation_root_invalid.yaml' + +------------------------------------ diff --git a/src/dature/errors/exceptions.py b/src/dature/errors/exceptions.py index d970e58..40e009b 100644 --- a/src/dature/errors/exceptions.py +++ b/src/dature/errors/exceptions.py @@ -344,8 +344,4 @@ def __new__( return super().__new__(cls, dataclass_name, errors) def __str__(self) -> str: - lines = [f"{self.dataclass_name} field group errors ({len(self.exceptions)})", ""] - for exc in self.exceptions: - lines.append(str(exc)) - lines.append("") - return "\n".join(lines) + return f"{self.dataclass_name} field group errors ({len(self.exceptions)})" diff --git a/tests/merging/test_field_group.py b/tests/merging/test_field_group.py index c998276..7f8660e 100644 --- a/tests/merging/test_field_group.py +++ b/tests/merging/test_field_group.py @@ -2,7 +2,6 @@ from dataclasses import dataclass from pathlib import Path -from textwrap import dedent import pytest @@ -131,13 +130,12 @@ class Config: field_groups=((F[Config].host, F[Config].port),), ) - assert str(exc_info.value) == dedent(f"""\ - Config field group errors (1) - - Field group (host, port) partially overridden in source 1 - changed: host (from source {overrides_meta!r}) - unchanged: port (from source {defaults_meta!r}) - """) + assert str(exc_info.value) == "Config field group errors (1)" + assert str(exc_info.value.exceptions[0]) == ( + f" Field group (host, port) partially overridden in source 1\n" + f" changed: host (from source {overrides_meta!r})\n" + f" unchanged: port (from source {defaults_meta!r})" + ) def test_partial_change_field_present_but_equal(self, tmp_path: Path): defaults = tmp_path / "defaults.json" @@ -162,13 +160,12 @@ class Config: field_groups=((F[Config].host, F[Config].port),), ) - assert str(exc_info.value) == dedent(f"""\ - Config field group errors (1) - - Field group (host, port) partially overridden in source 1 - changed: host (from source {overrides_meta!r}) - unchanged: port (from source {defaults_meta!r}) - """) + assert str(exc_info.value) == "Config field group errors (1)" + assert str(exc_info.value.exceptions[0]) == ( + f" Field group (host, port) partially overridden in source 1\n" + f" changed: host (from source {overrides_meta!r})\n" + f" unchanged: port (from source {defaults_meta!r})" + ) def test_partial_change_with_first_wins(self, tmp_path: Path): defaults = tmp_path / "defaults.json" @@ -241,13 +238,12 @@ class Config: field_groups=((F[Config].database,),), ) - assert str(exc_info.value) == dedent(f"""\ - Config field group errors (1) - - Field group (database.host, database.port) partially overridden in source 1 - changed: database.host (from source {overrides_meta!r}) - unchanged: database.port (from source {defaults_meta!r}) - """) + assert str(exc_info.value) == "Config field group errors (1)" + assert str(exc_info.value.exceptions[0]) == ( + f" Field group (database.host, database.port) partially overridden in source 1\n" + f" changed: database.host (from source {overrides_meta!r})\n" + f" unchanged: database.port (from source {defaults_meta!r})" + ) def test_auto_expand_all_changed_ok(self, tmp_path: Path): defaults = tmp_path / "defaults.json" @@ -305,13 +301,12 @@ class Config: field_groups=((F[Config].host, F[Config].port),), ) - assert str(exc_info.value) == dedent(f"""\ - Config field group errors (1) - - Field group (host, port) partially overridden in source 1 - changed: host (from source {b_meta!r}) - unchanged: port (from source {a_meta!r}) - """) + assert str(exc_info.value) == "Config field group errors (1)" + assert str(exc_info.value.exceptions[0]) == ( + f" Field group (host, port) partially overridden in source 1\n" + f" changed: host (from source {b_meta!r})\n" + f" unchanged: port (from source {a_meta!r})" + ) def test_three_sources_all_ok(self, tmp_path: Path): a = tmp_path / "a.json" @@ -369,13 +364,12 @@ class Config: ), ) - assert str(exc_info.value) == dedent(f"""\ - Config field group errors (1) - - Field group (user, password) partially overridden in source 1 - changed: user (from source {overrides_meta!r}) - unchanged: password (from source {defaults_meta!r}) - """) + assert str(exc_info.value) == "Config field group errors (1)" + assert str(exc_info.value.exceptions[0]) == ( + f" Field group (user, password) partially overridden in source 1\n" + f" changed: user (from source {overrides_meta!r})\n" + f" unchanged: password (from source {defaults_meta!r})" + ) class TestFieldGroupWithFieldMerges: @@ -473,13 +467,12 @@ class Config: field_groups=((F[Config].host, F[Config].port),), ) - assert str(exc_info.value) == dedent(f"""\ - Config field group errors (1) - - Field group (host, port) partially overridden in source 1 - changed: host (from source {overrides_meta!r}) - unchanged: port (from source {defaults_meta!r}) - """) + assert str(exc_info.value) == "Config field group errors (1)" + assert str(exc_info.value.exceptions[0]) == ( + f" Field group (host, port) partially overridden in source 1\n" + f" changed: host (from source {overrides_meta!r})\n" + f" unchanged: port (from source {defaults_meta!r})" + ) def test_multiple_violations_message(self, tmp_path: Path): defaults = tmp_path / "defaults.json" @@ -509,17 +502,17 @@ class Config: ), ) - assert str(exc_info.value) == dedent(f"""\ - Config field group errors (2) - - Field group (host, port) partially overridden in source 1 - changed: host (from source {overrides_meta!r}) - unchanged: port (from source {defaults_meta!r}) - - Field group (user, password) partially overridden in source 1 - changed: user (from source {overrides_meta!r}) - unchanged: password (from source {defaults_meta!r}) - """) + assert str(exc_info.value) == "Config field group errors (2)" + assert str(exc_info.value.exceptions[0]) == ( + f" Field group (host, port) partially overridden in source 1\n" + f" changed: host (from source {overrides_meta!r})\n" + f" unchanged: port (from source {defaults_meta!r})" + ) + assert str(exc_info.value.exceptions[1]) == ( + f" Field group (user, password) partially overridden in source 1\n" + f" changed: user (from source {overrides_meta!r})\n" + f" unchanged: password (from source {defaults_meta!r})" + ) class TestFieldGroupMixedExpandAndFlat: @@ -617,13 +610,14 @@ class Config: field_groups=((F[Config].database, F[Config].timeout),), ) - assert str(exc_info.value) == dedent(f"""\ - Config field group errors (1) - - Field group (database.host, database.port, timeout) partially overridden in source 1 - changed: timeout (from source {overrides_meta!r}) - unchanged: database.host (from source {defaults_meta!r}), database.port (from source {defaults_meta!r}) - """) + assert str(exc_info.value) == "Config field group errors (1)" + defaults_repr = repr(defaults_meta) + overrides_repr = repr(overrides_meta) + assert str(exc_info.value.exceptions[0]) == ( + f" Field group (database.host, database.port, timeout) partially overridden in source 1\n" + f" changed: timeout (from source {overrides_repr})\n" + f" unchanged: database.host (from source {defaults_repr}), database.port (from source {defaults_repr})" + ) def test_nested_partial_flat_not(self, tmp_path: Path): defaults = tmp_path / "defaults.json" @@ -655,13 +649,12 @@ class Config: field_groups=((F[Config].database, F[Config].timeout),), ) - assert str(exc_info.value) == dedent(f"""\ - Config field group errors (1) - - Field group (database.host, database.port, timeout) partially overridden in source 1 - changed: database.host (from source {overrides_meta!r}) - unchanged: database.port (from source {defaults_meta!r}), timeout (from source {defaults_meta!r}) - """) + assert str(exc_info.value) == "Config field group errors (1)" + assert str(exc_info.value.exceptions[0]) == ( + f" Field group (database.host, database.port, timeout) partially overridden in source 1\n" + f" changed: database.host (from source {overrides_meta!r})\n" + f" unchanged: database.port (from source {defaults_meta!r}), timeout (from source {defaults_meta!r})" + ) def test_nested_all_changed_flat_not(self, tmp_path: Path): defaults = tmp_path / "defaults.json" @@ -674,8 +667,8 @@ def test_nested_all_changed_flat_not(self, tmp_path: Path): defaults_meta = Source(file=defaults) overrides_meta = Source(file=overrides) - d = repr(defaults_meta) - o = repr(overrides_meta) + defaults_repr = repr(defaults_meta) + overrides_repr = repr(overrides_meta) @dataclass class Database: @@ -695,13 +688,13 @@ class Config: field_groups=((F[Config].database, F[Config].timeout),), ) - assert str(exc_info.value) == dedent(f"""\ - Config field group errors (1) - - Field group (database.host, database.port, timeout) partially overridden in source 1 - changed: database.host (from source {o}), database.port (from source {o}) - unchanged: timeout (from source {d}) - """) + assert str(exc_info.value) == "Config field group errors (1)" + changed = f"database.host (from source {overrides_repr}), database.port (from source {overrides_repr})" + assert str(exc_info.value.exceptions[0]) == ( + f" Field group (database.host, database.port, timeout) partially overridden in source 1\n" + f" changed: {changed}\n" + f" unchanged: timeout (from source {defaults_repr})" + ) class TestFieldGroupSameFieldNameNested: @@ -764,10 +757,9 @@ class Config: field_groups=((F[Config].user_name, F[Config].inner.user_name),), ) - assert str(exc_info.value) == dedent(f"""\ - Config field group errors (1) - - Field group (user_name, inner.user_name) partially overridden in source 1 - changed: user_name (from source {overrides_meta!r}) - unchanged: inner.user_name (from source {defaults_meta!r}) - """) + assert str(exc_info.value) == "Config field group errors (1)" + assert str(exc_info.value.exceptions[0]) == ( + f" Field group (user_name, inner.user_name) partially overridden in source 1\n" + f" changed: user_name (from source {overrides_meta!r})\n" + f" unchanged: inner.user_name (from source {defaults_meta!r})" + ) diff --git a/tests/test_examples.py b/tests/test_examples.py index 5343ec3..fa1280e 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -9,18 +9,52 @@ example_scripts = sorted(examples_dir.rglob("*.py")) -@pytest.mark.parametrize("script_path", example_scripts, ids=lambda p: p.name) -def test_example_execution(script_path): +def _run_example(script_path: pathlib.Path) -> subprocess.CompletedProcess[str]: env = os.environ.copy() project_root = pathlib.Path(__file__).parent.parent / "src" env["PYTHONPATH"] = str(project_root) + os.pathsep + env.get("PYTHONPATH", "") - result = subprocess.run( # noqa: PLW1510, S603 + return subprocess.run( # noqa: PLW1510, S603 [sys.executable, str(script_path)], capture_output=True, text=True, env=env, ) + +def _resolve_stderr_placeholders(template: str, script_path: pathlib.Path) -> str: + sources_dir = str(script_path.parent / "sources") + shared_dir = str(script_path.parents[2] / "shared") + + return template.replace("{SOURCES_DIR}", sources_dir).replace("{SHARED_DIR}", shared_dir) + + +def _normalize_trailing_whitespace(text: str) -> str: + return "\n".join(line.rstrip() for line in text.splitlines()) + + +_success_scripts = [s for s in example_scripts if not s.with_suffix(".stderr").exists()] +_error_scripts = [s for s in example_scripts if s.with_suffix(".stderr").exists()] + + +@pytest.mark.parametrize("script_path", _success_scripts, ids=lambda p: p.name) +def test_example_execution(script_path: pathlib.Path) -> None: + result = _run_example(script_path) assert result.returncode == 0, f"Script {script_path.name} Failed!\n\nError:\n{result.stderr}" + + +@pytest.mark.parametrize("script_path", _error_scripts, ids=lambda p: p.name) +def test_example_expected_error(script_path: pathlib.Path) -> None: + result = _run_example(script_path) + assert result.returncode != 0, f"Script {script_path.name} should have failed but exited with 0" + + stderr_file = script_path.with_suffix(".stderr") + expected = _resolve_stderr_placeholders(stderr_file.read_text(), script_path) + normalized_stderr = _normalize_trailing_whitespace(result.stderr) + normalized_expected = _normalize_trailing_whitespace(expected.strip()) + assert normalized_expected in normalized_stderr, ( + f"Script {script_path.name} stderr mismatch.\n\n" + f"Expected fragment:\n{expected.strip()}\n\n" + f"Actual stderr:\n{result.stderr}" + ) From 750f14268baf8b41741304a10db2ef123d991a96 Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 7 Apr 2026 15:39:22 +0300 Subject: [PATCH 28/36] refactor CustomLoader -> CustomSource --- README.md | 2 +- changes/+common-loading.refactor | 1 + changes/+concrete-sources-exported.feature | 1 + .../+fix-merge-conflict-filecontent.bugfix | 1 + changes/+merge-config-public.refactor | 1 + changes/+normalize-metadata-naming.refactor | 1 + changes/+remove-loader-protocol.removal | 1 + changes/+remove-source-mutations.refactor | 1 + changes/+rename-display-name.refactor | 1 + .../+rename-sources-loader-package.refactor | 1 + changes/+retort-factory.refactor | 1 + changes/+type-utils.refactor | 1 + changes/+unit-tests.misc | 1 + docs/advanced/custom_types.md | 56 +- docs/advanced/env-expansion.md | 2 +- docs/advanced/nested-resolve.md | 6 +- docs/api-reference.md | 613 +++++++++++++++--- docs/comparison/why-not-hydra.md | 4 +- docs/comparison/why-not-pydantic-settings.md | 10 +- docs/features/naming.md | 4 +- docs/features/validation.md | 13 +- docs/index.md | 28 +- docs/introduction.md | 46 +- .../caching/advanced_caching_disabled.py | 2 +- .../caching/advanced_caching_enabled.py | 2 +- .../advanced/configure/advanced_configure.py | 6 +- .../configure/advanced_configure_env.py | 6 +- .../advanced_configure_type_loaders.py | 2 +- .../custom_types/custom_dict_source.py | 31 + .../advanced/custom_types/custom_loader.py | 21 +- .../custom_types/custom_source_import.py | 3 + .../docs/advanced/custom_types/custom_type.py | 2 +- .../custom_types/custom_type_merge.py | 4 +- .../advanced/debug/advanced_debug_error.py | 4 +- .../advanced/debug/advanced_debug_logging.py | 4 +- .../advanced/debug/advanced_debug_report.py | 4 +- .../env_expansion/advanced_env_expansion.py | 2 +- ...vanced_env_expansion_file_path_combined.py | 2 +- .../advanced_env_expansion_file_path_dir.py | 2 +- .../advanced_env_expansion_file_path_name.py | 2 +- .../advanced_env_expansion_merge.py | 6 +- .../advanced_env_expansion_strict.py | 2 +- .../advanced_field_groups_expansion_error.py | 4 +- .../advanced_field_groups_multiple_error.py | 4 +- .../advanced_field_groups_nested_error.py | 4 +- .../field_groups/field_groups_basic.py | 4 +- .../advanced_merge_rules_callable.py | 4 +- .../advanced_merge_rules_conflict.py | 4 +- .../merge_rules/merging_field_append.py | 4 +- .../merging_field_append_unique.py | 4 +- .../merge_rules/merging_field_first_wins.py | 4 +- .../merge_rules/merging_field_last_wins.py | 4 +- .../merge_rules/merging_field_prepend.py | 4 +- .../merging_field_prepend_unique.py | 4 +- .../merge_rules/merging_first_found.py | 4 +- .../merge_rules/merging_skip_broken.py | 4 +- .../merging_skip_broken_per_source.py | 6 +- .../merge_rules/merging_skip_invalid.py | 2 +- .../merging_skip_invalid_per_field.py | 4 +- .../nested_resolve_docker_secrets.py | 6 +- .../nested_resolve/nested_resolve_envfile.py | 4 +- .../nested_resolve_global_flat.py | 3 +- .../nested_resolve_global_json.py | 3 +- .../nested_resolve_no_conflict.py | 3 +- .../nested_resolve/nested_resolve_override.py | 4 +- .../nested_resolve_per_field.py | 4 +- .../nested_resolve/nested_resolve_problem.py | 3 +- .../api_reference_decorator_mode.py | 2 +- .../api_reference_function_mode.py | 2 +- .../why-not-dynaconf/dynaconf_basic.py | 2 +- .../why-not-dynaconf/dynaconf_merge.py | 4 +- .../dynaconf_root_validators.py | 2 +- .../why-not-dynaconf/dynaconf_validators.py | 2 +- .../why-not-hydra/hydra_dataclass.py | 2 +- .../comparison/why-not-hydra/hydra_merge.py | 8 +- .../why-not-hydra/hydra_validators.py | 2 +- .../pydantic_settings_auto_detect.py | 26 - .../pydantic_settings_basic.py | 2 +- .../pydantic_settings_formats.py | 25 + .../pydantic_settings_merge.py | 6 +- ....json5 => pydantic_settings_formats.json5} | 0 ...ct.toml => pydantic_settings_formats.toml} | 0 ...ct.yaml => pydantic_settings_formats.yaml} | 0 .../docs/features/masking/masking_by_name.py | 2 +- .../features/masking/masking_classic_style.py | 2 +- .../features/masking/masking_heuristic.py | 2 +- .../features/masking/masking_merge_mode.py | 4 +- .../docs/features/masking/masking_no_mask.py | 2 +- .../features/masking/masking_per_source.py | 2 +- .../features/masking/masking_secret_str.py | 2 +- .../docs/features/merging/merging_basic.py | 4 +- .../features/merging/merging_strategies.py | 8 +- .../merging/merging_strategy_first_found.py | 6 +- .../merging/merging_strategy_first_wins.py | 4 +- .../merging/merging_strategy_last_wins.py | 4 +- .../merging_strategy_raise_on_conflict.py | 4 +- .../merging/merging_tuple_shorthand.py | 4 +- .../merging_tuple_shorthand_decorator.py | 4 +- .../features/naming/naming_field_mapping.py | 2 +- .../naming/naming_field_mapping_aliases.py | 15 + .../naming/naming_field_mapping_decorator.py | 9 + .../docs/features/naming/naming_name_style.py | 2 +- .../features/naming/naming_nested_fields.py | 2 +- .../docs/features/naming/naming_prefix.py | 2 +- .../features/naming/naming_prefix_nested.py | 2 +- .../features/naming/naming_split_symbols.py | 2 +- .../validation/validation_annotated.py | 2 +- .../validation_annotated_combined.py | 15 + .../features/validation/validation_custom.py | 2 +- .../validation/validation_metadata.py | 2 +- .../validation/validation_metadata_nested.py | 26 + .../validation/validation_metadata_syntax.py | 21 + .../validation/validation_post_init.py | 2 +- .../features/validation/validation_root.py | 2 +- examples/docs/index/intro_decorator.py | 2 +- examples/docs/index/intro_function.py | 2 +- examples/docs/introduction/format_docker.py | 2 +- examples/docs/introduction/format_env.py | 2 +- examples/docs/introduction/format_ini.py | 2 +- examples/docs/introduction/format_json.py | 2 +- examples/docs/introduction/format_json5.py | 2 +- examples/docs/introduction/format_toml.py | 2 +- examples/docs/introduction/format_yaml.py | 2 +- .../docs/introduction/intro_decorator_file.py | 2 +- .../introduction/intro_decorator_override.py | 24 + examples/docs/introduction/intro_file_like.py | 5 +- examples/load_all_formats.py | 27 +- pyproject.toml | 2 +- src/dature/__init__.py | 20 +- src/dature/_descriptors.py | 13 + src/dature/config.py | 4 +- src/dature/errors/exceptions.py | 6 +- src/dature/errors/formatter.py | 14 +- src/dature/errors/location.py | 45 +- .../{sources_loader => }/loaders/__init__.py | 4 +- .../{sources_loader => }/loaders/base.py | 0 .../{sources_loader => }/loaders/common.py | 0 .../{sources_loader => }/loaders/json5_.py | 0 .../{sources_loader => }/loaders/toml_.py | 0 .../{sources_loader => }/loaders/yaml_.py | 0 src/dature/loading/common.py | 9 + src/dature/loading/context.py | 36 +- src/dature/loading/merge_config.py | 31 + src/dature/loading/multi.py | 146 ++--- src/dature/loading/resolver.py | 136 ---- src/dature/loading/single.py | 179 ++--- src/dature/loading/source_loading.py | 227 ++++--- src/dature/main.py | 49 +- src/dature/masking/detection.py | 23 +- src/dature/merging/deep_merge.py | 2 +- src/dature/metadata.py | 79 --- src/dature/protocols.py | 39 +- .../{sources_loader => sources}/__init__.py | 0 src/dature/sources/base.py | 473 ++++++++++++++ src/dature/sources/docker_secrets.py | 85 +++ .../{sources_loader => sources}/env_.py | 92 ++- src/dature/sources/ini_.py | 61 ++ .../{sources_loader => sources}/json5_.py | 33 +- .../{sources_loader => sources}/json_.py | 16 +- src/dature/sources/retort.py | 221 +++++++ src/dature/sources/toml_.py | 80 +++ src/dature/sources/yaml_.py | 87 +++ src/dature/sources_loader/base.py | 368 ----------- src/dature/sources_loader/docker_secrets.py | 65 -- src/dature/sources_loader/flat_key.py | 191 ------ src/dature/sources_loader/ini_.py | 77 --- src/dature/sources_loader/toml_.py | 61 -- src/dature/sources_loader/yaml_.py | 64 -- src/dature/type_utils.py | 24 + tests/errors/test_exceptions.py | 58 +- tests/errors/test_fixtures.py | 32 +- tests/errors/test_location.py | 48 +- tests/expansion/test_expand_file_path.py | 10 +- .../{sources_loader => }/loaders/__init__.py | 0 .../{sources_loader => }/loaders/test_base.py | 40 +- .../loaders/test_common.py | 78 ++- .../loaders/test_json5_.py | 2 +- .../loaders/test_toml_.py | 2 +- .../loaders/test_yaml_.py | 2 +- tests/loading/test_context.py | 147 ++++- tests/loading/test_field_merges.py | 166 ++--- tests/loading/test_loading_common.py | 38 ++ tests/loading/test_multi.py | 144 ++-- tests/loading/test_resolver.py | 188 ------ tests/loading/test_single.py | 112 ++-- tests/loading/test_skip_invalid_fields.py | 62 +- tests/loading/test_source_loading.py | 261 +++++++- tests/masking/test_detection.py | 55 +- tests/masking/test_masking.py | 36 +- tests/merging/test_field_group.py | 106 +-- tests/sources/__init__.py | 1 + tests/{sources_loader => sources}/checker.py | 0 .../{sources_loader => sources}/test_base.py | 394 ++++++++--- tests/sources/test_docker_secrets.py | 141 ++++ .../{sources_loader => sources}/test_env_.py | 157 ++++- .../{sources_loader => sources}/test_ini_.py | 54 +- .../test_json5_.py | 37 +- .../{sources_loader => sources}/test_json_.py | 51 +- .../test_nested_resolve.py | 128 ++-- tests/sources/test_retort.py | 280 ++++++++ .../test_toml10_.py | 37 +- .../test_toml11_.py | 37 +- .../test_yaml11_.py | 37 +- .../test_yaml12_.py | 37 +- tests/sources_loader/__init__.py | 1 - tests/sources_loader/test_docker_secrets.py | 82 --- tests/test_custom_loader.py | 21 +- tests/test_load_report.py | 46 +- tests/test_main.py | 62 +- tests/test_type_loaders.py | 14 +- tests/test_type_utils.py | 58 ++ tests/validators/test_complex.py | 20 +- tests/validators/test_custom_validator.py | 22 +- tests/validators/test_metadata_validators.py | 38 +- tests/validators/test_number.py | 22 +- .../validators/test_post_init_and_property.py | 28 +- tests/validators/test_root_validator.py | 16 +- tests/validators/test_sequence.py | 18 +- tests/validators/test_string.py | 18 +- tests/validators/test_validators_base.py | 142 ++++ 220 files changed, 4930 insertions(+), 3042 deletions(-) create mode 100644 changes/+common-loading.refactor create mode 100644 changes/+concrete-sources-exported.feature create mode 100644 changes/+fix-merge-conflict-filecontent.bugfix create mode 100644 changes/+merge-config-public.refactor create mode 100644 changes/+normalize-metadata-naming.refactor create mode 100644 changes/+remove-loader-protocol.removal create mode 100644 changes/+remove-source-mutations.refactor create mode 100644 changes/+rename-display-name.refactor create mode 100644 changes/+rename-sources-loader-package.refactor create mode 100644 changes/+retort-factory.refactor create mode 100644 changes/+type-utils.refactor create mode 100644 changes/+unit-tests.misc create mode 100644 examples/docs/advanced/custom_types/custom_dict_source.py create mode 100644 examples/docs/advanced/custom_types/custom_source_import.py delete mode 100644 examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py create mode 100644 examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_formats.py rename examples/docs/comparison/why-not-pydantic-settings/sources/{pydantic_settings_auto_detect.json5 => pydantic_settings_formats.json5} (100%) rename examples/docs/comparison/why-not-pydantic-settings/sources/{pydantic_settings_auto_detect.toml => pydantic_settings_formats.toml} (100%) rename examples/docs/comparison/why-not-pydantic-settings/sources/{pydantic_settings_auto_detect.yaml => pydantic_settings_formats.yaml} (100%) create mode 100644 examples/docs/features/naming/naming_field_mapping_aliases.py create mode 100644 examples/docs/features/naming/naming_field_mapping_decorator.py create mode 100644 examples/docs/features/validation/validation_annotated_combined.py create mode 100644 examples/docs/features/validation/validation_metadata_nested.py create mode 100644 examples/docs/features/validation/validation_metadata_syntax.py create mode 100644 examples/docs/introduction/intro_decorator_override.py create mode 100644 src/dature/_descriptors.py rename src/dature/{sources_loader => }/loaders/__init__.py (92%) rename src/dature/{sources_loader => }/loaders/base.py (100%) rename src/dature/{sources_loader => }/loaders/common.py (100%) rename src/dature/{sources_loader => }/loaders/json5_.py (100%) rename src/dature/{sources_loader => }/loaders/toml_.py (100%) rename src/dature/{sources_loader => }/loaders/yaml_.py (100%) create mode 100644 src/dature/loading/common.py create mode 100644 src/dature/loading/merge_config.py delete mode 100644 src/dature/loading/resolver.py delete mode 100644 src/dature/metadata.py rename src/dature/{sources_loader => sources}/__init__.py (100%) create mode 100644 src/dature/sources/base.py create mode 100644 src/dature/sources/docker_secrets.py rename src/dature/{sources_loader => sources}/env_.py (59%) create mode 100644 src/dature/sources/ini_.py rename src/dature/{sources_loader => sources}/json5_.py (56%) rename src/dature/{sources_loader => sources}/json_.py (74%) create mode 100644 src/dature/sources/retort.py create mode 100644 src/dature/sources/toml_.py create mode 100644 src/dature/sources/yaml_.py delete mode 100644 src/dature/sources_loader/base.py delete mode 100644 src/dature/sources_loader/docker_secrets.py delete mode 100644 src/dature/sources_loader/flat_key.py delete mode 100644 src/dature/sources_loader/ini_.py delete mode 100644 src/dature/sources_loader/toml_.py delete mode 100644 src/dature/sources_loader/yaml_.py create mode 100644 src/dature/type_utils.py rename tests/{sources_loader => }/loaders/__init__.py (100%) rename tests/{sources_loader => }/loaders/test_base.py (82%) rename tests/{sources_loader => }/loaders/test_common.py (68%) rename tests/{sources_loader => }/loaders/test_json5_.py (84%) rename tests/{sources_loader => }/loaders/test_toml_.py (84%) rename tests/{sources_loader => }/loaders/test_yaml_.py (84%) create mode 100644 tests/loading/test_loading_common.py delete mode 100644 tests/loading/test_resolver.py create mode 100644 tests/sources/__init__.py rename tests/{sources_loader => sources}/checker.py (100%) rename tests/{sources_loader => sources}/test_base.py (56%) create mode 100644 tests/sources/test_docker_secrets.py rename tests/{sources_loader => sources}/test_env_.py (68%) rename tests/{sources_loader => sources}/test_ini_.py (73%) rename tests/{sources_loader => sources}/test_json5_.py (80%) rename tests/{sources_loader => sources}/test_json_.py (75%) rename tests/{sources_loader => sources}/test_nested_resolve.py (87%) create mode 100644 tests/sources/test_retort.py rename tests/{sources_loader => sources}/test_toml10_.py (80%) rename tests/{sources_loader => sources}/test_toml11_.py (80%) rename tests/{sources_loader => sources}/test_yaml11_.py (81%) rename tests/{sources_loader => sources}/test_yaml12_.py (81%) delete mode 100644 tests/sources_loader/__init__.py delete mode 100644 tests/sources_loader/test_docker_secrets.py create mode 100644 tests/test_type_utils.py create mode 100644 tests/validators/test_validators_base.py diff --git a/README.md b/README.md index e0fb9e7..6e69dd8 100644 --- a/README.md +++ b/README.md @@ -53,7 +53,7 @@ class Config: port: int debug: bool = False -config = dature.load(dature.Source(file="config.yaml"), Config) +config = dature.load(dature.Yaml12Source(file="config.yaml"), Config) ``` ## Key Features diff --git a/changes/+common-loading.refactor b/changes/+common-loading.refactor new file mode 100644 index 0000000..84253d2 --- /dev/null +++ b/changes/+common-loading.refactor @@ -0,0 +1 @@ +Extracted shared ``resolve_mask_secrets`` logic from ``single.py`` and ``multi.py`` into ``loading/common.py``. \ No newline at end of file diff --git a/changes/+concrete-sources-exported.feature b/changes/+concrete-sources-exported.feature new file mode 100644 index 0000000..cfae81b --- /dev/null +++ b/changes/+concrete-sources-exported.feature @@ -0,0 +1 @@ +All concrete source classes (``EnvSource``, ``JsonSource``, ``Yaml11Source``, ``Yaml12Source``, ``Toml10Source``, ``Toml11Source``, ``IniSource``, ``Json5Source``, ``EnvFileSource``, ``DockerSecretsSource``, ``FileSource``) are now exported from ``dature`` directly. diff --git a/changes/+fix-merge-conflict-filecontent.bugfix b/changes/+fix-merge-conflict-filecontent.bugfix new file mode 100644 index 0000000..2b12276 --- /dev/null +++ b/changes/+fix-merge-conflict-filecontent.bugfix @@ -0,0 +1 @@ +Fixed attribute name typo (``filecontent`` → ``file_content``) in ``raise_on_conflict`` merge strategy that caused ``AttributeError`` when conflicting fields were detected. diff --git a/changes/+merge-config-public.refactor b/changes/+merge-config-public.refactor new file mode 100644 index 0000000..62747cb --- /dev/null +++ b/changes/+merge-config-public.refactor @@ -0,0 +1 @@ +Renamed ``_MergeConfig`` to ``MergeConfig``. \ No newline at end of file diff --git a/changes/+normalize-metadata-naming.refactor b/changes/+normalize-metadata-naming.refactor new file mode 100644 index 0000000..caaa03e --- /dev/null +++ b/changes/+normalize-metadata-naming.refactor @@ -0,0 +1 @@ +Renamed ``metadata``/``source_meta`` parameters to ``source`` throughout the loading module. \ No newline at end of file diff --git a/changes/+remove-loader-protocol.removal b/changes/+remove-loader-protocol.removal new file mode 100644 index 0000000..f298562 --- /dev/null +++ b/changes/+remove-loader-protocol.removal @@ -0,0 +1 @@ +Removed ``LoaderProtocol`` from ``dature.protocols``. Source classes now handle loading internally. diff --git a/changes/+remove-source-mutations.refactor b/changes/+remove-source-mutations.refactor new file mode 100644 index 0000000..79b8816 --- /dev/null +++ b/changes/+remove-source-mutations.refactor @@ -0,0 +1 @@ +Source objects are no longer mutated during ``load()``. All parameter resolution happens via ``resolve_source_params()`` in ``source_loading.py``. \ No newline at end of file diff --git a/changes/+rename-display-name.refactor b/changes/+rename-display-name.refactor new file mode 100644 index 0000000..14b6ee8 --- /dev/null +++ b/changes/+rename-display-name.refactor @@ -0,0 +1 @@ +Renamed ``display_name`` to ``format_name`` and ``display_label`` to ``location_label`` across all source classes and error types. \ No newline at end of file diff --git a/changes/+rename-sources-loader-package.refactor b/changes/+rename-sources-loader-package.refactor new file mode 100644 index 0000000..5b0e656 --- /dev/null +++ b/changes/+rename-sources-loader-package.refactor @@ -0,0 +1 @@ +Renamed internal package ``sources_loader`` to ``sources`` (source classes) and ``loaders`` (type conversion). All public imports from ``dature`` are unchanged. diff --git a/changes/+retort-factory.refactor b/changes/+retort-factory.refactor new file mode 100644 index 0000000..73db551 --- /dev/null +++ b/changes/+retort-factory.refactor @@ -0,0 +1 @@ +Extracted retort factory methods from ``Source`` into free functions in ``sources/retort.py``. ``transform_to_dataclass`` is now a free function. \ No newline at end of file diff --git a/changes/+type-utils.refactor b/changes/+type-utils.refactor new file mode 100644 index 0000000..73d7cf6 --- /dev/null +++ b/changes/+type-utils.refactor @@ -0,0 +1 @@ +Deduplicated ``_find_nested_dataclasses`` into shared ``type_utils.find_nested_dataclasses``. \ No newline at end of file diff --git a/changes/+unit-tests.misc b/changes/+unit-tests.misc new file mode 100644 index 0000000..0bf969d --- /dev/null +++ b/changes/+unit-tests.misc @@ -0,0 +1 @@ +Added unit tests for ``loading/context``, ``loading/source_loading``, ``masking/detection``, ``validators/base``, ``loaders/common``, ``loaders/base``. diff --git a/docs/advanced/custom_types.md b/docs/advanced/custom_types.md index 6202eae..7abda02 100644 --- a/docs/advanced/custom_types.md +++ b/docs/advanced/custom_types.md @@ -42,12 +42,44 @@ Pass `type_loaders` as a `dict[type, Callable]` mapping types to conversion func When both per-source and global `type_loaders` are set, they merge — per-source loaders take priority. -## Custom Loaders +## Custom Source Classes -For formats that dature doesn't support out of the box, subclass `BaseLoader` and implement two things: +For formats that dature doesn't support out of the box, you can create your own source by subclassing one of the base classes from `dature.sources.base`: -1. `display_name` — a class-level string shown in error messages -2. `_load(path)` — returns `JSONValue` (a nested dict) from the source +### Choosing a base class + +| Base class | Use when | You implement | You get for free | +|------------|----------|---------------|------------------| +| [`Source`](../api-reference.md#source) | Non-file data (API, database, custom protocol) | `format_name`, `_load() -> JSONValue` | Prefix filtering, env var expansion, type coercion, validation, merge support | +| [`FileSource`](../api-reference.md#filesourcesource) | File-based format (XML, CSV, HCL, …) | `format_name`, `_load_file(path: FileOrStream) -> JSONValue` | Everything from `Source` + `file` parameter, stream support, `file_display()`, `file_path_for_errors()`, `__repr__` | +| [`FlatKeySource`](../api-reference.md#flatkeysourcesource) | Flat key=value data (custom env store, Consul KV, …) | `format_name`, `_load() -> JSONValue` (flat `dict[str, str]`) | Everything from `Source` + `split_symbols` nesting, `nested_resolve`, automatic string→type parsing (`int`, `bool`, `date`, …) | + +All base classes are in `dature.sources.base`: + +```python +--8<-- "examples/docs/advanced/custom_types/custom_source_import.py" +``` + +### Minimal interface + +Every custom source needs: + +1. **`format_name`** — class-level string shown in `__repr__` and error messages (e.g. `"xml"`, `"consul"`) +2. **A load method** — `_load()` for `Source`/`FlatKeySource`, or `_load_file(path)` for `FileSource`. Must return `JSONValue` (a nested dict). + +### Optional overrides + +| Method | Default | Override when | +|--------|---------|---------------| +| `additional_loaders()` | `[]` (FileSource) or string-value loaders (FlatKeySource) | Your format stores all values as strings and needs extra type parsers (e.g. `bool`, `float`). | +| `file_display()` | `None` | Your source has a meaningful display path (shown in logs and errors). | +| `file_path_for_errors()` | `None` | Your source points to a file on disk (used in error messages). | +| `resolve_location(...)` | Empty `SourceLocation` | You want errors to show line numbers or variable names from your source. | +| `location_label` | inherited | Change the label in error messages (e.g. `"FILE"`, `"ENV"`, `"API"`). | + +### Example: FileSource subclass + +The most common case — reading a file format: ```python --8<-- "examples/docs/advanced/custom_types/custom_loader.py" @@ -57,4 +89,18 @@ For formats that dature doesn't support out of the box, subclass `BaseLoader` an --8<-- "examples/docs/advanced/custom_types/sources/custom_loader.xml" ``` -Pass your custom loader via the `loader` parameter in `Source`. All built-in features (type coercion, validation, prefix extraction, ENV expansion) work automatically. +`FileSource` handles the `file` parameter, path expansion, and stream detection. Your `_load_file()` receives a `Path` or file-like object and returns a dict. + +### Example: Source subclass (non-file) + +For sources that don't read files — e.g. an API, a database, or an in-memory dict: + +```python +--8<-- "examples/docs/advanced/custom_types/custom_dict_source.py" +``` + +### Tips + +- All built-in features (type coercion, validation, prefix extraction, ENV expansion, merge support) work automatically with any custom source. +- Override `additional_loaders()` to return `_string_value_loaders()` from `dature.sources.base` if your format stores everything as strings (like INI or ENV). +- Pass your custom source to `dature.load()` the same way as any built-in source. diff --git a/docs/advanced/env-expansion.md b/docs/advanced/env-expansion.md index 687e461..76bb93c 100644 --- a/docs/advanced/env-expansion.md +++ b/docs/advanced/env-expansion.md @@ -131,7 +131,7 @@ The `${VAR:-default}` fallback syntax works in all modes. ## File Path Expansion -Environment variables in `dature.Source(file=...)` are expanded automatically in `"strict"` mode — if a variable is missing, `EnvVarExpandError` is raised immediately at `dature.Source` creation time. +Environment variables in the `file=...` parameter of Source subclasses are expanded automatically in `"strict"` mode — if a variable is missing, `EnvVarExpandError` is raised immediately at Source creation time. This works for both directory paths and file names: diff --git a/docs/advanced/nested-resolve.md b/docs/advanced/nested-resolve.md index cba3e05..04b2d15 100644 --- a/docs/advanced/nested-resolve.md +++ b/docs/advanced/nested-resolve.md @@ -1,6 +1,6 @@ # Nested Resolve -Flat-key loaders (ENV, `.env` file, Docker secrets) store nested dataclasses as either a single JSON string or as separate flat keys: +Flat-key sources (ENV, `.env` file, Docker secrets) store nested dataclasses as either a single JSON string or as separate flat keys: ``` # JSON form @@ -65,9 +65,9 @@ When both `nested_resolve_strategy` and `nested_resolve` are set, per-field take --8<-- "examples/docs/advanced/nested_resolve/nested_resolve_override.py" ``` -## All Flat-Key Loaders +## All Flat-Key Sources -The mechanism works identically across all flat-key loaders: +The mechanism works identically across all flat-key sources: === "ENV" diff --git a/docs/api-reference.md b/docs/api-reference.md index 887bac8..b64be03 100644 --- a/docs/api-reference.md +++ b/docs/api-reference.md @@ -24,55 +24,137 @@ Main entry point. Two calling patterns: **Parameters:** -| Parameter | Type | Description | -|-----------|------|-------------| -| `*sources` | `Source` | One or more source descriptors. No sources → `Source()` (env vars). Multiple sources → merge mode. | -| `schema` | `type[T] \| None` | Target dataclass. If provided → function mode. If `None` → decorator mode. | -| `cache` | `bool \| None` | Enable caching in decorator mode. Default from `configure()`. | -| `debug` | `bool \| None` | Collect `LoadReport`. Default from `configure()`. | -| `strategy` | `str` | Merge strategy: `"last_wins"` (default), `"first_wins"`, `"first_found"`, `"raise_on_conflict"`. Only used with multiple sources. | -| `field_merges` | `dict` | Per-field merge strategy overrides. Maps `F[Config].field` to a strategy string or callable. | -| `field_groups` | `tuple[tuple[...], ...]` | Groups of fields that must change together. Each group is a tuple of `F[Config].field` references. | -| `skip_broken_sources` | `bool` | Skip sources that fail to load (default `False`). | -| `skip_invalid_fields` | `bool` | Skip fields that fail validation (default `False`). | -| `expand_env_vars` | `ExpandEnvVarsMode` | Env var expansion mode for all sources (default `"default"`). | -| `secret_field_names` | `tuple[str, ...] \| None` | Extra secret field name patterns. | -| `mask_secrets` | `bool \| None` | Enable/disable secret masking globally. | -| `type_loaders` | `dict[type, Callable] \| None` | Custom type loaders mapping types to conversion functions. | -| `nested_resolve_strategy` | `NestedResolveStrategy \| None` | Default priority for JSON vs flat keys. See [Nested Resolve](advanced/nested-resolve.md). | -| `nested_resolve` | `NestedResolve \| None` | Per-field nested resolve strategy overrides. See [Nested Resolve](advanced/nested-resolve.md#per-field-strategy). | +| Parameter | Type | Default | Description | +|-----------|------|---------|-------------| +| `*sources` | `Source` | — | One or more source descriptors (e.g. `JsonSource(file=...)`, `EnvSource()`). Multiple sources → merge mode. | +| `schema` | `type[T] \| None` | `None` | Target dataclass. If provided → function mode. If `None` → decorator mode. | +| `cache` | `bool \| None` | `None` | Enable caching in decorator mode. Default from `configure()`. Ignored in function mode. | +| `debug` | `bool \| None` | `None` | Collect `LoadReport` on the result instance. Default from `configure()`. Retrieve with `get_load_report()`. | +| `strategy` | `MergeStrategyName` | `"last_wins"` | Merge strategy. Only used with multiple sources. See [Merge Strategies](#merge-strategies). | +| `field_merges` | `FieldMergeMap \| None` | `None` | Per-field merge strategy overrides. Maps `F[Config].field` to a strategy string or callable. See [Field Merge Strategies](#field-merge-strategies). | +| `field_groups` | `tuple[FieldGroupTuple, ...]` | `()` | Groups of fields that must change together. Each group is a tuple of `F[Config].field` references. | +| `skip_broken_sources` | `bool` | `False` | Skip sources that fail to load instead of raising. | +| `skip_invalid_fields` | `bool` | `False` | Skip fields that fail validation instead of raising. | +| `expand_env_vars` | `ExpandEnvVarsMode \| None` | `None` | Env var expansion mode applied to all sources. Source-level setting takes priority. | +| `secret_field_names` | `tuple[str, ...] \| None` | `None` | Extra secret field name patterns for masking. | +| `mask_secrets` | `bool \| None` | `None` | Enable/disable secret masking globally. | +| `type_loaders` | `TypeLoaderMap \| None` | `None` | Custom type loaders mapping types to conversion functions. Merged with source-level and global loaders. | +| `nested_resolve_strategy` | `NestedResolveStrategy \| None` | `None` | Default priority for JSON vs flat keys in `FlatKeySource`. See [Nested Resolve](advanced/nested-resolve.md). | +| `nested_resolve` | `NestedResolve \| None` | `None` | Per-field nested resolve strategy overrides. See [Nested Resolve](advanced/nested-resolve.md#per-field-strategy). | + +**Returns:** + +- **Function mode** (`schema` provided): an instance of `schema` populated from the sources. +- **Decorator mode** (`schema=None`): a decorator that adds `load()` logic to the decorated dataclass. + +**Raises:** + +- `TypeError` — no sources passed, or a positional argument is not a `Source` instance. +- `DatureConfigError` — aggregated field loading errors. +- `MergeConflictError` — conflicting values with `strategy="raise_on_conflict"`. +- `FieldGroupError` — field group constraint violation. +- `EnvVarExpandError` — missing env vars with `expand_env_vars="strict"`. --- ### `Source` ```python ---8<-- "src/dature/metadata.py:load-metadata" +--8<-- "src/dature/sources/base.py:load-metadata" +``` + +Abstract base class for all sources. See [Introduction — Source Reference](introduction.md#source-reference) for parameter descriptions. + +**Parameters:** + +| Parameter | Type | Default | Description | +|-----------|------|---------|-------------| +| `prefix` | `DotSeparatedPath \| None` | `None` | Filter ENV keys (`"APP_"`) or extract nested object (`"app.database"`). | +| `name_style` | `NameStyle \| None` | `None` | Naming convention mapping: `"lower_snake"`, `"upper_snake"`, `"lower_camel"`, `"upper_camel"`, `"lower_kebab"`, `"upper_kebab"`. | +| `field_mapping` | `FieldMapping \| None` | `None` | Explicit field renaming with `F` objects. | +| `root_validators` | `tuple[ValidatorProtocol, ...] \| None` | `None` | Post-load validation of the entire object. | +| `validators` | `FieldValidators \| None` | `None` | Per-field validators via `Annotated` metadata or explicit mapping. | +| `expand_env_vars` | `ExpandEnvVarsMode \| None` | `None` | ENV variable expansion: `"disabled"`, `"default"`, `"empty"`, `"strict"`. | +| `skip_if_broken` | `bool \| None` | `None` | Skip this source if it fails to load. | +| `skip_if_invalid` | `bool \| tuple[FieldPath, ...] \| None` | `None` | Skip invalid fields from this source. `True` for all, or a tuple of specific fields. | +| `secret_field_names` | `tuple[str, ...] \| None` | `None` | Extra secret name patterns for masking. | +| `mask_secrets` | `bool \| None` | `None` | Enable/disable secret masking for this source. | +| `type_loaders` | `TypeLoaderMap \| None` | `None` | Custom type converters `{type: callable}` for this source. | + +**Public methods:** + +| Method | Return type | Description | +|--------|-------------|-------------| +| `load_raw()` | `LoadRawResult` | Load raw data, apply prefix filtering and env var expansion. Returns `LoadRawResult(data, nested_conflicts)`. | +| `transform_to_dataclass(data, schema)` | `T` | Convert a `JSONValue` dict into a dataclass instance using adaptix. Caches the retort per schema type. | +| `create_retort()` | `Retort` | Build an adaptix `Retort` with base loaders, name mapping, and type loaders. | +| `create_validating_retort(schema)` | `Retort` | Like `create_retort()`, plus field and root validators extracted from `schema`. | +| `create_probe_retort()` | `Retort` | Retort that skips missing fields — used internally for partial loading in merge mode. | +| `file_display()` | `str \| None` | Human-readable file identifier for logging. Returns `None` by default. | +| `file_path_for_errors()` | `Path \| None` | File path used in error messages. Returns `None` by default. | +| `resolve_location(...)` | `list[SourceLocation]` | Locate a field in the source content for error reporting. Class method. Returns `SourceLocation` with line range, env var name, etc. | + +### `FileSource(Source)` + +Base class for file-based sources (`JsonSource`, `Yaml11Source`, `Toml10Source`, `IniSource`, etc.). + +```python +--8<-- "src/dature/sources/base.py:file-source" +``` + +| Parameter | Type | Default | Description | +|-----------|------|---------|-------------| +| `file` | `FileLike \| FilePath \| None` | `None` | Path to the config file (`str`, `Path`), or an open file-like object (`StringIO`, `BytesIO`, any `TextIOBase`/`BufferedIOBase`/`RawIOBase`). If `None`, the path defaults to the current directory. | + +**Overridden methods:** + +| Method | Behavior | +|--------|----------| +| `file_display()` | Returns the path as string, `""` for file-like objects, or `None` when `file=None`. | +| `file_path_for_errors()` | Returns `Path` for string/Path inputs, `None` for streams or `None`. | +| `__repr__()` | Returns `"format_name 'file_path'"` or just `"format_name"`. | + +### `FlatKeySource(Source)` + +Base class for flat key=value sources (`EnvSource`, `EnvFileSource`, `DockerSecretsSource`). + +```python +--8<-- "src/dature/sources/base.py:flat-key-source" ``` -See [Introduction — Source Reference](introduction.md#source-reference) for parameter descriptions. +| Parameter | Type | Default | Description | +|-----------|------|---------|-------------| +| `split_symbols` | `str` | `"__"` | Separator for nested key splitting. `APP__DB__HOST` → `{"db": {"host": ...}}` | +| `nested_resolve_strategy` | `NestedResolveStrategy` | `"flat"` | Default priority when both flat and JSON keys exist: `"flat"` or `"json"`. See [Nested Resolve](advanced/nested-resolve.md). | +| `nested_resolve` | `NestedResolve \| None` | `None` | Per-field nested resolve strategy overrides. See [Nested Resolve](advanced/nested-resolve.md#per-field-strategy). | + +**Behavior:** All values are strings. Automatic parsing of `str`, `float`, `date`, `datetime`, `time`, `bytearray`, `bool`, `None`, `str | None`. Nested JSON in values (`[...]`, `{...}`) is inferred. `load_raw()` returns `LoadRawResult` with `nested_conflicts` populated when both flat and JSON keys exist for the same field. --- ### Merge Strategies +Strategies for resolving field values across multiple sources. Set via `strategy` parameter of `load()`. + | Strategy | Behavior | |----------|----------| -| `"last_wins"` | Last source overrides (default) | -| `"first_wins"` | First source wins | -| `"first_found"` | Uses the first source that loads successfully | -| `"raise_on_conflict"` | Raises `MergeConflictError` on conflicting values | +| `"last_wins"` | Last source overrides (default). | +| `"first_wins"` | First source wins. | +| `"first_found"` | Uses the first source that loads successfully. | +| `"raise_on_conflict"` | Raises `MergeConflictError` on conflicting values. | ### Field Merge Strategies +Per-field overrides via `field_merges` parameter. Maps `F[Config].field` to a strategy name or a `Callable[[list[JSONValue]], JSONValue]`. + | Strategy | Behavior | |----------|----------| -| `"first_wins"` | Keep the value from the first source | -| `"last_wins"` | Keep the value from the last source | -| `"append"` | Concatenate lists: `base + override` | -| `"append_unique"` | Concatenate lists, removing duplicates | -| `"prepend"` | Concatenate lists: `override + base` | -| `"prepend_unique"` | Concatenate lists in reverse order, removing duplicates | +| `"first_wins"` | Keep the value from the first source. | +| `"last_wins"` | Keep the value from the last source. | +| `"append"` | Concatenate lists: `base + override`. | +| `"append_unique"` | Concatenate lists, removing duplicates. | +| `"prepend"` | Concatenate lists: `override + base`. | +| `"prepend_unique"` | Concatenate lists in reverse order, removing duplicates. | --- @@ -80,12 +162,28 @@ See [Introduction — Source Reference](introduction.md#source-reference) for pa ### `F` -Factory for building field paths with validation: +Factory for building type-safe field paths. Used for `field_mapping`, `field_merges`, `field_groups`, `validators`, `skip_if_invalid`, and `nested_resolve`. ```python --8<-- "examples/docs/api_reference/api_reference_field_path.py" ``` +### `FieldPath` + +Immutable dataclass (`frozen=True, slots=True`) created via `F[Config].field_name`. + +| Field | Type | Description | +|-------|------|-------------| +| `owner` | `type \| str` | The dataclass type (or its string name) this path belongs to. | +| `parts` | `tuple[str, ...]` | Sequence of field names forming the path. | + +**Methods:** + +| Method | Return type | Description | +|--------|-------------|-------------| +| `__getattr__(name)` | `FieldPath` | Chain to nested fields. Validates that the field exists on the owner dataclass. Returns a new `FieldPath` with extended parts. | +| `as_path()` | `str` | Dot-separated string representation (e.g. `"database.host"`). Raises `ValueError` if parts is empty. | + --- ## Report @@ -96,7 +194,11 @@ Factory for building field paths with validation: --8<-- "src/dature/load_report.py:get-load-report" ``` -Returns the `LoadReport` attached to a loaded instance (or type on error). Returns `None` and emits a warning if `debug=True` was not passed. +Retrieves the `LoadReport` attached to a loaded instance. Returns `None` and emits a warning if `debug=True` was not passed to `load()`. + +| Parameter | Type | Description | +|-----------|------|-------------| +| `instance` | `Any` | The loaded dataclass instance (or the type in decorator mode on error). | ### `LoadReport`, `SourceEntry`, `FieldOrigin` @@ -104,6 +206,41 @@ Returns the `LoadReport` attached to a loaded instance (or type on error). Retur --8<-- "src/dature/load_report.py:report-structure" ``` +#### `SourceEntry` + +Frozen dataclass describing one source in the load pipeline. + +| Field | Type | Description | +|-------|------|-------------| +| `index` | `int` | Source position (0-based) in the `load()` call. | +| `file_path` | `str \| None` | File path string, or `None` for non-file sources. | +| `loader_type` | `str` | Source class name (e.g. `"JsonSource"`, `"EnvSource"`). | +| `raw_data` | `JSONValue` | Raw data loaded from this source before merging. | + +#### `FieldOrigin` + +Frozen dataclass describing which source provided a specific field value. + +| Field | Type | Description | +|-------|------|-------------| +| `key` | `str` | Dot-separated field path (e.g. `"database.host"`). | +| `value` | `JSONValue` | The value that was used. | +| `source_index` | `int` | Index of the winning source. | +| `source_file` | `str \| None` | File path of the winning source. | +| `source_loader_type` | `str` | Class name of the winning source. | + +#### `LoadReport` + +Frozen dataclass with full load diagnostics. + +| Field | Type | Description | +|-------|------|-------------| +| `dataclass_name` | `str` | Name of the target dataclass. | +| `strategy` | `MergeStrategyEnum \| None` | Merge strategy used, or `None` for single source. | +| `sources` | `tuple[SourceEntry, ...]` | All sources in order. | +| `field_origins` | `tuple[FieldOrigin, ...]` | Per-field origin info, sorted by key. | +| `merged_data` | `JSONValue` | Final merged data dict before dataclass conversion. | + --- ## Configuration @@ -114,7 +251,16 @@ Returns the `LoadReport` attached to a loaded instance (or type on error). Retur --8<-- "src/dature/config.py:configure" ``` -Set global configuration. Pass dicts to override specific options: `masking={"mask": "***"}`, `loading={"debug": True}`. `None` parameters keep their current values. Empty dict resets the group to defaults. +Set global configuration. Pass dicts to override specific options: `masking={"mask": "***"}`, `loading={"debug": True}`. `None` parameters keep their current values. Empty dict `{}` resets the group to defaults. + +Global config is also loaded from `DATURE_*` environment variables on first access. + +| Parameter | Type | Default | Description | +|-----------|------|---------|-------------| +| `masking` | `MaskingOptions \| None` | `None` | Secret masking options. | +| `error_display` | `ErrorDisplayOptions \| None` | `None` | Error formatting options. | +| `loading` | `LoadingOptions \| None` | `None` | Loading behavior options. | +| `type_loaders` | `TypeLoaderMap \| None` | `None` | Global custom type loaders `{type: callable}`. Merged with source-level loaders (source takes priority). | ### `MaskingConfig` @@ -122,48 +268,82 @@ Set global configuration. Pass dicts to override specific options: `masking={"ma --8<-- "src/dature/config.py:masking-config" ``` +Frozen dataclass controlling secret masking behavior. + +| Field | Type | Default | Description | +|-------|------|---------|-------------| +| `mask` | `str` | `""` | Replacement string for masked values. Must be non-empty. | +| `visible_prefix` | `int` | `0` | Number of leading characters to keep visible. | +| `visible_suffix` | `int` | `0` | Number of trailing characters to keep visible. | +| `min_heuristic_length` | `int` | `8` | Minimum string length for heuristic-based detection. | +| `heuristic_threshold` | `float` | `0.5` | Entropy threshold for heuristic secret detection. | +| `secret_field_names` | `tuple[str, ...]` | `("password", "passwd", ...)` | Field name patterns that trigger masking. | +| `mask_secrets` | `bool` | `True` | Global on/off switch for masking. | + ### `ErrorDisplayConfig` ```python --8<-- "src/dature/config.py:error-display-config" ``` +Frozen dataclass controlling error message formatting. + +| Field | Type | Default | Description | +|-------|------|---------|-------------| +| `max_visible_lines` | `int` | `3` | Maximum lines of source content shown in errors. | +| `max_line_length` | `int` | `80` | Maximum characters per line before truncation. | + ### `LoadingConfig` ```python --8<-- "src/dature/config.py:loading-config" ``` +Frozen dataclass controlling load behavior defaults. + +| Field | Type | Default | Description | +|-------|------|---------|-------------| +| `cache` | `bool` | `True` | Default caching in decorator mode. | +| `debug` | `bool` | `False` | Default debug mode (collect `LoadReport`). | +| `nested_resolve_strategy` | `NestedResolveStrategy` | `"flat"` | Default nested resolve strategy for `FlatKeySource`. | + --- ## Validators -All validators are frozen dataclasses implementing `get_validator_func()` and `get_error_message()`. +All validators are frozen dataclasses (`frozen=True, slots=True`) with two methods: + +| Method | Return type | Description | +|--------|-------------|-------------| +| `get_validator_func()` | `Callable` | Returns a function that takes the field value and returns `bool`. | +| `get_error_message()` | `str` | Returns the formatted error message. | + +All validators accept an optional `error_message` parameter to override the default message. Use `{value}` / `{pattern}` placeholders in custom messages. ### Number Validators (`dature.validators.number`) -| Class | Parameter | Description | -|-------|-----------|-------------| -| `Gt` | `value: int \| float` | Greater than | -| `Ge` | `value: int \| float` | Greater than or equal | -| `Lt` | `value: int \| float` | Less than | -| `Le` | `value: int \| float` | Less than or equal | +| Class | Parameter | Default message | Description | +|-------|-----------|-----------------|-------------| +| `Gt` | `value: int \| float` | `"Value must be greater than {value}"` | Strictly greater than. | +| `Ge` | `value: int \| float` | `"Value must be greater than or equal to {value}"` | Greater than or equal. | +| `Lt` | `value: int \| float` | `"Value must be less than {value}"` | Strictly less than. | +| `Le` | `value: int \| float` | `"Value must be less than or equal to {value}"` | Less than or equal. | ### String Validators (`dature.validators.string`) -| Class | Parameter | Description | -|-------|-----------|-------------| -| `MinLength` | `value: int` | Minimum length | -| `MaxLength` | `value: int` | Maximum length | -| `RegexPattern` | `pattern: str` | Regex match | +| Class | Parameter | Default message | Description | +|-------|-----------|-----------------|-------------| +| `MinLength` | `value: int` | `"Value must have at least {value} characters"` | Minimum string length. | +| `MaxLength` | `value: int` | `"Value must have at most {value} characters"` | Maximum string length. | +| `RegexPattern` | `pattern: str` | `"Value must match pattern '{pattern}'"` | Full regex match (`re.match`). | ### Sequence Validators (`dature.validators.sequence`) -| Class | Parameter | Description | -|-------|-----------|-------------| -| `MinItems` | `value: int` | Minimum items | -| `MaxItems` | `value: int` | Maximum items | -| `UniqueItems` | — | All items unique | +| Class | Parameter | Default message | Description | +|-------|-----------|-----------------|-------------| +| `MinItems` | `value: int` | `"Value must have at least {value} items"` | Minimum number of items. | +| `MaxItems` | `value: int` | `"Value must have at most {value} items"` | Maximum number of items. | +| `UniqueItems` | — | `"Value must contain unique items"` | All items must be unique. | ### Root Validator (`dature.validators.root`) @@ -171,51 +351,313 @@ All validators are frozen dataclasses implementing `get_validator_func()` and `g --8<-- "src/dature/validators/root.py:root-validator" ``` +| Field | Type | Default | Description | +|-------|------|---------|-------------| +| `func` | `Callable[..., bool]` | — | Validation function. Receives the loaded dataclass instance, returns `True` if valid. | +| `error_message` | `str` | `"Root validation failed"` | Error message on failure. | + +**Methods:** `get_validator_func()` → returns `func`. `get_error_message()` → returns `error_message`. + --- ## Special Types +### `SecretStr` + +Module: `dature.fields.secret_str`. A string wrapper that hides its value in `str()` and `repr()`. + +| Method / Property | Return type | Description | +|-------------------|-------------|-------------| +| `SecretStr(secret_value)` | — | Constructor. Takes the raw secret string. | +| `get_secret_value()` | `str` | Returns the actual secret value. | +| `__str__()` | `str` | Returns `"**********"`. | +| `__repr__()` | `str` | Returns `"SecretStr('**********')"`. | +| `__len__()` | `int` | Length of the underlying secret. | +| `__eq__()`, `__hash__()` | — | Equality and hashing based on the secret value. | + +### `ByteSize` + +Module: `dature.fields.byte_size`. Parses human-readable byte sizes (`"1.5 GB"`, `"512 KiB"`) into an integer byte count. + +**Accepted formats:** `` where unit is one of: `B`, `KB`, `MB`, `GB`, `TB`, `PB` (decimal) or `KiB`, `MiB`, `GiB`, `TiB`, `PiB` (binary). Case-insensitive. Whitespace between number and unit is allowed. + +| Method / Property | Return type | Description | +|-------------------|-------------|-------------| +| `ByteSize(value)` | — | Constructor. Accepts `int` (raw bytes) or `str` (e.g. `"1.5 GB"`). | +| `human_readable(*, decimal=False)` | `str` | Format as human-readable string. `decimal=True` for KB/MB/GB, `False` for KiB/MiB/GiB. | +| `__int__()` | `int` | Raw byte count. | +| `__str__()` | `str` | Same as `human_readable()`. | +| `__repr__()` | `str` | Returns `"ByteSize()"`. | +| `__eq__()`, `__hash__()` | — | Equality and hashing based on byte count. | +| `__lt__()`, `__le__()`, `__gt__()`, `__ge__()` | `bool` | Comparison operators based on byte count. | + +### `PaymentCardNumber` + +Module: `dature.fields.payment_card`. Luhn-validated payment card number with brand detection. + +Constructor strips spaces and dashes, validates digit-only 12–19 chars, and runs Luhn check. Raises `ValueError` on invalid input. + +| Method / Property | Return type | Description | +|-------------------|-------------|-------------| +| `PaymentCardNumber(card_number)` | — | Constructor. Accepts string with digits, spaces, dashes. | +| `get_raw_number()` | `str` | Returns the cleaned digit-only number. | +| `masked` | `str` | Property. Returns `"************1234"` (last 4 digits visible). | +| `brand` | `str` | Property. Detected brand: `"Visa"`, `"Mastercard"`, `"American Express"`, `"Discover"`, `"JCB"`, `"Diners Club"`, `"UnionPay"`, `"Maestro"`, `"Mir"`, `"Troy"`, `"RuPay"`, `"Verve"`, or `"Unknown"`. | +| `__str__()` | `str` | Same as `masked`. | +| `__repr__()` | `str` | Returns `"PaymentCardNumber('')"`. | +| `__eq__()`, `__hash__()` | — | Equality and hashing based on the raw number. | + +### Other Type Aliases + | Type | Module | Description | |------|--------|-------------| -| `SecretStr` | `dature.fields.secret_str` | Masked string with `get_secret_value()` | -| `ByteSize` | `dature.fields.byte_size` | Human-readable byte sizes | -| `PaymentCardNumber` | `dature.fields.payment_card` | Luhn-validated card with brand detection | -| `URL` | `dature.types` | Alias for `urllib.parse.ParseResult` | -| `Base64UrlStr` | `dature.types` | Base64-decoded string | -| `Base64UrlBytes` | `dature.types` | Base64-decoded bytes | +| `URL` | `dature.types` | Alias for `urllib.parse.ParseResult`. Parsed from URL strings. | +| `Base64UrlStr` | `dature.types` | `NewType` over `str`. Decoded from base64url-encoded strings. | +| `Base64UrlBytes` | `dature.types` | `NewType` over `bytes`. Decoded from base64url-encoded strings. | --- -## Loaders - -| Loader | Module | Format | -|--------|--------|--------| -| `JsonLoader` | `dature.sources_loader.json_` | JSON | -| `Json5Loader` | `dature.sources_loader.json5_` | JSON5 | -| `Yaml11Loader` | `dature.sources_loader.yaml_` | YAML 1.1 | -| `Yaml12Loader` | `dature.sources_loader.yaml_` | YAML 1.2 | -| `Toml10Loader` | `dature.sources_loader.toml_` | TOML 1.0 | -| `Toml11Loader` | `dature.sources_loader.toml_` | TOML 1.1 | -| `IniLoader` | `dature.sources_loader.ini_` | INI | -| `EnvLoader` | `dature.sources_loader.env_` | Environment variables | -| `EnvFileLoader` | `dature.sources_loader.env_` | .env files | -| `DockerSecretsLoader` | `dature.sources_loader.docker_secrets` | Docker secrets directory | +## Source Classes + +### File-based sources (inherit `FileSource`) + +All file-based sources accept the `file` parameter from [`FileSource`](#filesourcesource) plus all common parameters from [`Source`](#source). + +`file` accepts `str`, `Path`, or file-like objects (`StringIO`, `BytesIO`, any `TextIOBase`/`BufferedIOBase`/`RawIOBase`). When `file=None`, the path defaults to the current directory. + +`file_display()` returns the path as string, `""` for file-like objects, or `None` when `file=None`. + +#### `JsonSource(FileSource)` + +| | | +|---|---| +| **Format** | JSON | +| **Module** | `dature.sources.json_` | +| **Dependencies** | stdlib `json` | +| **Error label** | `FILE` | +| **String parsing** | `float`, `date`, `datetime`, `time`, `bytearray` from strings | + +#### `Json5Source(FileSource)` + +| | | +|---|---| +| **Format** | JSON5 (comments, trailing commas, unquoted keys) | +| **Module** | `dature.sources.json5_` | +| **Dependencies** | `json5` | +| **Error label** | `FILE` | +| **String parsing** | `str` (from JSON5 identifiers), `float`, `date`, `datetime`, `time`, `bytearray` from strings | + +#### `Yaml11Source(FileSource)` + +| | | +|---|---| +| **Format** | YAML 1.1 | +| **Module** | `dature.sources.yaml_` | +| **Dependencies** | `ruamel.yaml` | +| **Error label** | `FILE` | +| **Native types** | `date`, `datetime` parsed natively by YAML. `time` from int, `bytearray` from strings | + +#### `Yaml12Source(FileSource)` + +| | | +|---|---| +| **Format** | YAML 1.2 | +| **Module** | `dature.sources.yaml_` | +| **Dependencies** | `ruamel.yaml` | +| **Error label** | `FILE` | +| **Native types** | `date`, `datetime` parsed natively by YAML. `time`, `bytearray` from strings | + +#### `Toml10Source(FileSource)` + +| | | +|---|---| +| **Format** | TOML 1.0 | +| **Module** | `dature.sources.toml_` | +| **Dependencies** | `toml_rs` | +| **Error label** | `FILE` | +| **Native types** | `date`, `datetime`, `time` parsed natively by TOML. `bytearray`, `None`, `str \| None` from strings | + +#### `Toml11Source(FileSource)` + +| | | +|---|---| +| **Format** | TOML 1.1 | +| **Module** | `dature.sources.toml_` | +| **Dependencies** | `toml_rs` | +| **Error label** | `FILE` | +| **Native types** | `date`, `datetime`, `time` parsed natively by TOML. `bytearray`, `None`, `str \| None` from strings | + +#### `IniSource(FileSource)` + +| | | +|---|---| +| **Format** | INI (stdlib `configparser`) | +| **Module** | `dature.sources.ini_` | +| **Dependencies** | stdlib `configparser` | +| **Error label** | `FILE` | +| **String parsing** | All values are strings. Automatic parsing of `str`, `float`, `date`, `datetime`, `time`, `bytearray`, `bool`, `None`, `str \| None`. Nested JSON in values (`[...]`, `{...}`) is inferred. | + +Section headers become top-level dict keys. Dotted sections (`database.pool`) create nested dicts. `prefix` selects a single section. + +### Flat key-value sources (inherit `FlatKeySource`) + +All flat key-value sources accept `split_symbols`, `nested_resolve_strategy` and `nested_resolve` from [`FlatKeySource`](#flatkeysourcesource) plus all common parameters from [`Source`](#source). + +All values are strings. Automatic parsing of `str`, `float`, `date`, `datetime`, `time`, `bytearray`, `bool`, `None`, `str | None`. Nested JSON in values (`[...]`, `{...}`) is inferred. + +Nesting is built from `split_symbols` (default `"__"`): `APP__DB__HOST=x` → `{"db": {"host": "x"}}`. + +#### `EnvSource(FlatKeySource)` + +| | | +|---|---| +| **Format** | Environment variables (`os.environ`) | +| **Module** | `dature.sources.env_` | +| **Dependencies** | — | +| **Error label** | `ENV` | + +Keys are lowercased after stripping `prefix`. `resolve_location()` returns `env_var_name` instead of file/line info. + +#### `EnvFileSource(FlatKeySource)` + +| | | +|---|---| +| **Format** | `.env` files (`KEY=value`, `#` comments, quoted values) | +| **Module** | `dature.sources.env_` | +| **Dependencies** | — | +| **Error label** | `ENV FILE` | + +Inherits from both `FileFieldMixin` and `EnvSource`, so accepts the `file` parameter. `resolve_location()` returns line range within the `.env` file. + +#### `DockerSecretsSource(FlatKeySource)` + +| | | +|---|---| +| **Format** | Docker secrets directory (one file per secret) | +| **Module** | `dature.sources.docker_secrets` | +| **Dependencies** | — | +| **Error label** | `SECRET FILE` | + +| Parameter | Type | Default | Description | +|-----------|------|---------|-------------| +| `dir_` | `FilePath` | — | Path to the Docker secrets directory (e.g. `/run/secrets`). Required. | + +Each file in `dir_` becomes a key (filename, lowercased) with the file content (stripped) as value. Subdirectories are skipped. `resolve_location()` returns the path `dir_/secret_name` as `file_path`. --- ## Exceptions -| Exception | Description | -|-----------|-------------| -| `DatureError` | Base exception | -| `DatureConfigError` | Aggregated config loading errors | -| `MergeConflictError` | Merge conflict between sources | -| `FieldGroupError` | Field group constraint violation | -| `EnvVarExpandError` | Missing environment variables in strict mode | -| `FieldLoadError` | Single field loading error | -| `SourceLoadError` | Source loading failure | +All exceptions are in `dature.errors`. + +### `DatureError` + +Base exception for all dature errors. + +### `DatureConfigError(ExceptionGroup[DatureError])` + +Aggregated config loading errors. Contains one or more `FieldLoadError` sub-exceptions. + +| Field | Type | Description | +|-------|------|-------------| +| `dataclass_name` | `str` | Name of the target dataclass. | +| `exceptions` | `tuple[DatureError, ...]` | Individual errors (inherited from `ExceptionGroup`). | + +`str()` returns `" loading errors ()"`. + +### `FieldLoadError(DatureError)` + +Single field loading error with source location. + +| Field | Type | Description | +|-------|------|-------------| +| `field_path` | `list[str]` | Path to the field (e.g. `["database", "host"]`). | +| `message` | `str` | Human-readable error description. | +| `input_value` | `JSONValue` | The raw value that failed to load. | +| `locations` | `list[SourceLocation]` | Source locations for error reporting (file path, line range, env var name). | + +`str()` returns a formatted multi-line message with source context and caret pointing at the value. + +### `SourceLoadError(DatureError)` + +Source-level loading failure (e.g. file not found, parse error). + +| Field | Type | Description | +|-------|------|-------------| +| `message` | `str` | Error description. | +| `location` | `SourceLocation \| None` | Source location, if available. | + +### `MergeConflictError(DatureConfigError)` + +Raised with `strategy="raise_on_conflict"` when sources provide different values. Contains `MergeConflictFieldError` sub-exceptions. + +### `MergeConflictFieldError(DatureError)` + +Per-field merge conflict. + +| Field | Type | Description | +|-------|------|-------------| +| `field_path` | `list[str]` | Path to the conflicting field. | +| `message` | `str` | Conflict description. | +| `locations` | `list[SourceLocation]` | Conflicting source locations. | + +### `FieldGroupError(DatureConfigError)` + +Field group constraint violation. Contains `FieldGroupViolationError` sub-exceptions. + +### `FieldGroupViolationError(DatureError)` + +Single field group violation. + +| Field | Type | Description | +|-------|------|-------------| +| `group_fields` | `tuple[str, ...]` | All fields in the group. | +| `changed_fields` | `tuple[str, ...]` | Fields that were overridden. | +| `unchanged_fields` | `tuple[str, ...]` | Fields that were not overridden. | +| `changed_sources` | `tuple[str, ...]` | Source names for changed fields. | +| `unchanged_sources` | `tuple[str, ...]` | Source names for unchanged fields. | +| `source_index` | `int` | Index of the source that caused the violation. | + +### `EnvVarExpandError(DatureConfigError)` + +Missing environment variables in `expand_env_vars="strict"` mode. Contains `MissingEnvVarError` sub-exceptions. + +### `MissingEnvVarError(DatureError)` + +Single missing env var. + +| Field | Type | Description | +|-------|------|-------------| +| `var_name` | `str` | Name of the missing variable. | +| `position` | `int` | Character position in the source string. | +| `source_text` | `str` | The original string containing `$VAR`. | +| `field_path` | `list[str]` | Field path, if known. | +| `location` | `SourceLocation \| None` | Source location, if available. | + +### `SourceLocation` + +Frozen dataclass used in error messages to point at the source of a value. + +| Field | Type | Description | +|-------|------|-------------| +| `location_label` | `str` | Source type label: `"FILE"`, `"ENV"`, `"ENV FILE"`, `"SECRET FILE"`. | +| `file_path` | `Path \| None` | File path, or `None` for env vars. | +| `line_range` | `LineRange \| None` | Start/end line numbers in the file. | +| `line_content` | `list[str] \| None` | Relevant source lines for context. | +| `env_var_name` | `str \| None` | Environment variable name, for ENV sources. | +| `annotation` | `str \| None` | Extra annotation (e.g. merge conflict info). | +| `env_var_value` | `str \| None` | Raw env var value for conflict reporting. | + +### `LineRange` + +Frozen dataclass for file line ranges. + +| Field | Type | Description | +|-------|------|-------------| +| `start` | `int` | Start line (1-based). | +| `end` | `int` | End line (1-based, inclusive). | -All exceptions are in `dature.errors.exceptions`. +`repr()` returns `"line 5"` or `"line 5-8"`. --- @@ -228,7 +670,16 @@ All exceptions are in `dature.errors.exceptions`. | `FileOrStream` | `Path \| FileLike` | `dature.types` | | `NameStyle` | `Literal["lower_snake", "upper_snake", "lower_camel", "upper_camel", "lower_kebab", "upper_kebab"]` | `dature.types` | | `ExpandEnvVarsMode` | `Literal["disabled", "default", "empty", "strict"]` | `dature.types` | -| `FieldMapping` | `dict[FieldPath, str \| tuple[str, ...]]` | `dature.types` | -| `FieldValidators` | `dict[FieldPath, ValidatorProtocol \| tuple[ValidatorProtocol, ...]]` | `dature.types` | +| `FieldRef` | `FieldPath \| str \| int \| float \| bool \| list \| dict \| tuple \| set \| bytes \| None` | `dature.types` | +| `FieldMapping` | `dict[FieldRef, str \| tuple[str, ...]]` | `dature.types` | +| `FieldValidators` | `dict[FieldRef, ValidatorProtocol \| tuple[ValidatorProtocol, ...]]` | `dature.types` | +| `FieldMergeMap` | `dict[FieldRef, FieldMergeStrategyName \| Callable[..., Any]]` | `dature.types` | | `FieldMergeCallable` | `Callable[[list[JSONValue]], JSONValue]` | `dature.types` | +| `FieldMergeStrategyName` | `Literal["first_wins", "last_wins", "append", "append_unique", "prepend", "prepend_unique"]` | `dature.types` | +| `FieldGroupTuple` | `tuple[FieldRef, ...]` | `dature.types` | +| `TypeLoaderMap` | `dict[type, Callable[..., Any]]` | `dature.types` | +| `MergeStrategyName` | `Literal["last_wins", "first_wins", "first_found", "raise_on_conflict"]` | `dature.types` | +| `NestedResolveStrategy` | `Literal["flat", "json"]` | `dature.types` | +| `NestedResolve` | `dict[NestedResolveStrategy, tuple[FieldPath \| Any, ...]]` | `dature.types` | | `JSONValue` | `dict[str, JSONValue] \| list[JSONValue] \| str \| int \| float \| bool \| None` | `dature.types` | +| `LoadRawResult` | `dataclass(data: JSONValue, nested_conflicts: NestedConflicts)` | `dature.types` | diff --git a/docs/comparison/why-not-hydra.md b/docs/comparison/why-not-hydra.md index 0e9ac71..f613ce7 100644 --- a/docs/comparison/why-not-hydra.md +++ b/docs/comparison/why-not-hydra.md @@ -22,7 +22,7 @@ The trade-off is scope: Hydra is a **framework** that takes over your entry poin | **Error messages** | OmegaConf exceptions | Human-readable: source file, line number, context snippet | | **Secret masking** | No | Auto-masks secrets in errors and logs | | **Debug / audit** | Output dir with saved config + logs | `debug=True` — which source provided each value | -| **Plugin system** | Sweepers, launchers, config sources, search path | Custom loaders via `BaseLoader` subclass | +| **Plugin system** | Sweepers, launchers, config sources, search path | Custom loaders via `Source` subclass | | **Dependencies** | `hydra-core` + `omegaconf` + `antlr4-runtime` | `adaptix` (pure Python) | | **Config result** | `OmegaConf.DictConfig` (dict-like wrapper) | Your actual `@dataclass` instance | | **Maintenance** | Last release: Dec 2022. [Acknowledged as unmaintained](https://github.com/facebookresearch/xformers/issues/848) by other Meta teams | Active development | @@ -36,7 +36,7 @@ Hydra reads YAML exclusively. You can reference env vars via OmegaConf's `${oc.e - **JSON / JSON5** — common in web services and JavaScript-adjacent tooling. Not supported. - **INI** — legacy format still common in enterprise. Not supported. -dature handles all of these out of the box, with auto-detection from file extension: +dature handles all of these out of the box: ```python --8<-- "examples/docs/comparison/why-not-hydra/hydra_merge.py:merge" diff --git a/docs/comparison/why-not-pydantic-settings.md b/docs/comparison/why-not-pydantic-settings.md index cc781de..c6a8f50 100644 --- a/docs/comparison/why-not-pydantic-settings.md +++ b/docs/comparison/why-not-pydantic-settings.md @@ -9,14 +9,14 @@ The trade-off is coupling: your config must be a Pydantic model, custom types ne | | pydantic-settings | dature | |---|---|---| | **Base class** | `BaseSettings` (Pydantic model) | stdlib `@dataclass` | -| **Formats** | `.env`, env vars, JSON, YAML, TOML + custom sources | YAML (1.1/1.2), JSON, JSON5, TOML (1.0/1.1), INI, `.env`, env vars, Docker secrets — auto-detected | +| **Formats** | `.env`, env vars, JSON, YAML, TOML + custom sources | YAML (1.1/1.2), JSON, JSON5, TOML (1.0/1.1), INI, `.env`, env vars, Docker secrets | | **Merging** | Fixed priority order (init > env > dotenv > secrets > defaults) | 4 strategies + per-field rules (`"append"`, `"prepend"`, field groups, etc.) | | **Skip broken sources** | No | Yes — `skip_if_broken`, `skip_if_invalid` | | **Field groups** | No | Yes — enforce related fields are overridden together | | **Naming conventions** | `alias` / `alias_generator` (`to_camel`, `to_pascal`, `to_snake`) | Built-in `name_style` (6 conventions) + explicit `field_mapping` with multiple aliases | | **CLI** | Built-in `CliSettingsSource` with subcommands, async support | No CLI | | **Secrets** | `SecretStr`, `secrets_dir`, nested secrets directories | `SecretStr`, auto-masking in errors/logs (by type, name pattern, or heuristic) | -| **ENV expansion** | No | `${VAR:-default}` syntax in all file formats + file paths (`Source(file="$DIR/config.toml")`) | +| **ENV expansion** | No | `${VAR:-default}` syntax in all file formats + file paths (`Toml11Source(file="$DIR/config.toml")`) | | **Error messages** | Pydantic `ValidationError` | Human-readable: source file, line number, context snippet | | **Debug / audit** | No | `debug=True` — which source provided each value | | **Validation** | Pydantic `field_validator`, `model_validator` (pre/post), constraints | `Annotated` validators, root validators, custom validators, `__post_init__` | @@ -73,15 +73,15 @@ class Settings(BaseSettings): ) ``` -dature **auto-detects** the format from the file extension — no boilerplate: +dature uses explicit Source subclasses — no boilerplate: ```python ---8<-- "examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py:auto-detect" +--8<-- "examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_formats.py:formats" ``` dature also supports INI, JSON5, and YAML 1.1/1.2 + TOML 1.0/1.1 version variants — formats that pydantic-settings doesn't cover. -Need a custom format? Subclass `BaseLoader` — one method to implement, not an entire `SettingsSource`. +Need a custom format? Subclass `Source` — one method to implement, not an entire `SettingsSource`. ## Error Messages You Can Actually Read diff --git a/docs/features/naming.md b/docs/features/naming.md index 5c4dc28..e3e6864 100644 --- a/docs/features/naming.md +++ b/docs/features/naming.md @@ -48,7 +48,7 @@ Explicit field renaming using `F` objects. Takes priority over `name_style`: A field can have multiple aliases — the first matching key in the source wins: ```python -field_mapping={dature.F[Config].name: ("fullName", "userName")} +--8<-- "examples/docs/features/naming/naming_field_mapping_aliases.py:aliases" ``` ### Nested Fields @@ -72,7 +72,7 @@ Nested fields are supported via `F[Owner].field` syntax on inner dataclasses: In decorator mode where the class is not yet defined, use a string: ```python -F["Config"].name # autocomplete doesn't work here +--8<-- "examples/docs/features/naming/naming_field_mapping_decorator.py:decorator" ``` ## prefix diff --git a/docs/features/validation.md b/docs/features/validation.md index 7592428..0862141 100644 --- a/docs/features/validation.md +++ b/docs/features/validation.md @@ -54,8 +54,7 @@ Declare validators using `typing.Annotated`: Multiple validators can be combined: ```python -port: Annotated[int, Ge(1), Le(65535)] -tags: Annotated[list[str], MinItems(1), MaxItems(10), UniqueItems()] +--8<-- "examples/docs/features/validation/validation_annotated_combined.py:combined" ``` ## Root Validators @@ -107,19 +106,13 @@ Field validators can be specified in `Source` using the `validators` parameter. A single validator can be passed directly. Multiple validators require a tuple: ```python -validators={ - dature.F[Config].port: (Gt(0), Lt(65536)), # tuple for multiple - dature.F[Config].host: MinLength(1), # single, no tuple needed -} +--8<-- "examples/docs/features/validation/validation_metadata_syntax.py:syntax" ``` Nested fields are supported: ```python -validators={ - dature.F[Config].database.host: MinLength(1), - dature.F[Config].database.port: Gt(0), -} +--8<-- "examples/docs/features/validation/validation_metadata_nested.py:nested" ``` ## Custom Validators diff --git a/docs/index.md b/docs/index.md index cb14873..b2e6793 100644 --- a/docs/index.md +++ b/docs/index.md @@ -99,20 +99,20 @@ Load config from YAML, JSON, TOML, INI, ENV files, environment variables and Doc ## Supported Formats -| Format | Extension | Loader | Extra dependency | -|--------|-----------|--------|------------------| -| YAML 1.1 | `.yaml`, `.yml` | `Yaml11Loader` | `ruamel.yaml` | -| YAML 1.2 | `.yaml`, `.yml` | `Yaml12Loader` | `ruamel.yaml` | -| JSON | `.json` | `JsonLoader` | — | -| JSON5 | `.json5` | `Json5Loader` | `json-five` | -| TOML 1.0 | `.toml` | `Toml10Loader` | `toml-rs` | -| TOML 1.1 | `.toml` | `Toml11Loader` | `toml-rs` | -| INI | `.ini`, `.cfg` | `IniLoader` | — | -| ENV file | `.env` | `EnvFileLoader` | — | -| Environment variables | — | `EnvLoader` | — | -| Docker secrets | directory | `DockerSecretsLoader` | — | - -The format is auto-detected from the file extension. When `file` is not specified, environment variables are used. When `file` points to a directory, `DockerSecretsLoader` is used. `file` also accepts `Path` objects and file-like objects (`BytesIO`, `StringIO`) — for file-like objects, the `loader` parameter is required. +| Format | Source Class | Extra dependency | +|--------|--------------|------------------| +| YAML 1.1 | `Yaml11Source` | `ruamel.yaml` | +| YAML 1.2 | `Yaml12Source` | `ruamel.yaml` | +| JSON | `JsonSource` | — | +| JSON5 | `Json5Source` | `json-five` | +| TOML 1.0 | `Toml10Source` | `toml-rs` | +| TOML 1.1 | `Toml11Source` | `toml-rs` | +| INI | `IniSource` | — | +| ENV file | `EnvFileSource` | — | +| Environment variables | `EnvSource` | — | +| Docker secrets | `DockerSecretsSource` | — | + +Use the specific Source subclass for your format. File-based sources (`FileSource` subclasses) accept `file` as `str`, `Path`, or file-like object (`BytesIO`, `StringIO`). `EnvSource` reads from environment variables (no `file` parameter). `DockerSecretsSource` accepts `dir` pointing to a secrets directory. ## mypy Plugin diff --git a/docs/introduction.md b/docs/introduction.md index 2830393..0ca4c96 100644 --- a/docs/introduction.md +++ b/docs/introduction.md @@ -25,12 +25,12 @@ dature offers two ways to load configuration: **function mode** and **decorator Explicit arguments to `__init__` take priority over loaded values: ```python - config = Config(port=9090) # host from source, port overridden + --8<-- "examples/docs/introduction/intro_decorator_override.py:override" ``` ## All Formats -dature auto-detects the format from the file extension. Here's the same config loaded from every supported format: +Use the specific Source subclass for your format. Here's the same config loaded from every supported format: === "YAML" @@ -105,39 +105,17 @@ dature auto-detects the format from the file extension. Here's the same config l --8<-- "examples/docs/introduction/format_docker.py" ``` -### Auto-Detection - -| Extension | Loader | -|-----------|--------| -| `.yaml`, `.yml` | `Yaml12Loader` (default) | -| `.json` | `JsonLoader` | -| `.json5` | `Json5Loader` | -| `.toml` | `Toml11Loader` (default) | -| `.ini`, `.cfg` | `IniLoader` | -| `.env` | `EnvFileLoader` | -| directory | `DockerSecretsLoader` | -| not specified | `EnvLoader` (environment variables) | - -Override auto-detection with the `loader` parameter: - -```python -from dature.sources_loader.yaml_ import Yaml11Loader - -dature.Source(file="config.yaml", loader=Yaml11Loader) -``` +See the full list of Source classes and their extra dependencies on the [main page](index.md#supported-formats). ## Source Reference ```python ---8<-- "src/dature/metadata.py:load-metadata" +--8<-- "src/dature/sources/base.py:load-metadata" ``` | Parameter | Description | |-----------|-------------| -| `file` | Path to config file (`str`, `Path`), file-like object (`BytesIO`, `StringIO`), or directory. `None` → environment variables. File-like objects require explicit `loader` | -| `loader` | Explicit loader class. `None` → auto-detect from extension | | `prefix` | Filter ENV keys (`"APP_"`) or extract nested object (`"app.database"`) | -| `split_symbols` | Delimiter for flat→nested conversion. Default: `"__"` | | `name_style` | Naming convention mapping. See [Naming](features/naming.md) | | `field_mapping` | Explicit field renaming with `F` objects. See [Naming](features/naming.md) | | `root_validators` | Post-load validation of the entire object. See [Validation](features/validation.md) | @@ -148,19 +126,31 @@ dature.Source(file="config.yaml", loader=Yaml11Loader) | `secret_field_names` | Extra secret name patterns for masking. See [Masking](features/masking.md) | | `mask_secrets` | Enable/disable secret masking for this source. See [Masking](features/masking.md) | | `type_loaders` | Custom type converters for this source. See [Custom Types & Loaders](advanced/custom_types.md#custom-types) | + +**FileSource** subclasses (`JsonSource`, `Yaml*Source`, `Toml*Source`, `IniSource`, `Json5Source`) also have: + +| Parameter | Description | +|-----------|-------------| +| `file` | Path to config file (`str`, `Path`) or file-like object (`BytesIO`, `StringIO`). `None` → empty path | + +**FlatKeySource** subclasses (`EnvSource`, `EnvFileSource`, `DockerSecretsSource`) also have: + +| Parameter | Description | +|-----------|-------------| +| `split_symbols` | Delimiter for flat→nested conversion. Default: `"__"` | | `nested_resolve_strategy` | Priority when both JSON and flat keys exist for a nested field: `"flat"` (default) or `"json"`. See [Nested Resolve](advanced/nested-resolve.md) | | `nested_resolve` | Per-field strategy overrides using `F` objects. Takes priority over `nested_resolve_strategy`. See [Nested Resolve](advanced/nested-resolve.md#per-field-strategy) | ### File-Like Objects -`file` accepts file-like objects (`StringIO`, `BytesIO`, and any `TextIOBase`/`BufferedIOBase`/`RawIOBase` subclass). The `loader` parameter is required since there is no file extension to auto-detect from: +`file` accepts file-like objects (`StringIO`, `BytesIO`, and any `TextIOBase`/`BufferedIOBase`/`RawIOBase` subclass): ```python --8<-- "examples/docs/introduction/intro_file_like.py" ``` !!! note - `EnvLoader` and `DockerSecretsLoader` do not support file-like objects — they read from environment variables and directories respectively. + `EnvSource` and `DockerSecretsSource` do not support file-like objects — they read from environment variables and directories respectively. ## Type Coercion diff --git a/examples/docs/advanced/caching/advanced_caching_disabled.py b/examples/docs/advanced/caching/advanced_caching_disabled.py index 5497fd1..8b54b98 100644 --- a/examples/docs/advanced/caching/advanced_caching_disabled.py +++ b/examples/docs/advanced/caching/advanced_caching_disabled.py @@ -9,7 +9,7 @@ os.environ["NOCACHE_PORT"] = "6379" -@dature.load(dature.Source(prefix="NOCACHE_"), cache=False) +@dature.load(dature.EnvSource(prefix="NOCACHE_"), cache=False) @dataclass class UncachedConfig: host: str diff --git a/examples/docs/advanced/caching/advanced_caching_enabled.py b/examples/docs/advanced/caching/advanced_caching_enabled.py index 3fe8b2b..bec4cc6 100644 --- a/examples/docs/advanced/caching/advanced_caching_enabled.py +++ b/examples/docs/advanced/caching/advanced_caching_enabled.py @@ -9,7 +9,7 @@ os.environ["CACHE_PORT"] = "6379" -@dature.load(dature.Source(prefix="CACHE_"), cache=True) +@dature.load(dature.EnvSource(prefix="CACHE_"), cache=True) @dataclass class CachedConfig: host: str diff --git a/examples/docs/advanced/configure/advanced_configure.py b/examples/docs/advanced/configure/advanced_configure.py index 9669f48..1db8677 100644 --- a/examples/docs/advanced/configure/advanced_configure.py +++ b/examples/docs/advanced/configure/advanced_configure.py @@ -16,20 +16,20 @@ class Config: # 1. Default config — debug is off, no report -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) +config = dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) report = dature.get_load_report(config) assert report is None # 2. Enable debug globally via dature.configure() dature.configure(loading={"debug": True}) -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) +config = dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) report = dature.get_load_report(config) assert report is not None # 3. Reset to defaults — debug is off again dature.configure(loading={}) -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) +config = dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) report = dature.get_load_report(config) assert report is None diff --git a/examples/docs/advanced/configure/advanced_configure_env.py b/examples/docs/advanced/configure/advanced_configure_env.py index 596aca3..31af68d 100644 --- a/examples/docs/advanced/configure/advanced_configure_env.py +++ b/examples/docs/advanced/configure/advanced_configure_env.py @@ -20,20 +20,20 @@ class Config: # 1. DATURE_LOADING__DEBUG=true — debug is on, report attached -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) +config = dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) report = dature.get_load_report(config) assert report is not None # 2. Override env with dature.configure() — debug is off dature.configure(loading={"debug": False}) -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) +config = dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) report = dature.get_load_report(config) assert report is None # 3. Reset to env defaults — debug is on again dature.configure(loading={"debug": True}) -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) +config = dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) report = dature.get_load_report(config) assert report is not None diff --git a/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py b/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py index 46ed386..f7d56a3 100644 --- a/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py +++ b/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py @@ -29,5 +29,5 @@ class AppConfig: # Register Rgb parser globally — no need to pass type_loaders to every load() call dature.configure(type_loaders={Rgb: rgb_from_string}) -config = dature.load(dature.Source(file=SOURCES_DIR / "custom_type_common.yaml"), schema=AppConfig) +config = dature.load(dature.Yaml12Source(file=SOURCES_DIR / "custom_type_common.yaml"), schema=AppConfig) assert config == AppConfig(name="my-app", color=Rgb(r=255, g=128, b=0)) diff --git a/examples/docs/advanced/custom_types/custom_dict_source.py b/examples/docs/advanced/custom_types/custom_dict_source.py new file mode 100644 index 0000000..ad6cd3e --- /dev/null +++ b/examples/docs/advanced/custom_types/custom_dict_source.py @@ -0,0 +1,31 @@ +"""Custom source — subclass Source to load from a plain dict.""" + +from dataclasses import dataclass +from typing import Any, cast + +import dature +from dature.sources.base import Source +from dature.types import JSONValue + + +@dataclass(kw_only=True, repr=False) +class DictSource(Source): + format_name = "dict" + data: dict[str, Any] + + def _load(self) -> JSONValue: + return cast("JSONValue", self.data) + + +@dataclass +class Config: + host: str + port: int + + +config = dature.load( + DictSource(data={"host": "localhost", "port": 8080}), + schema=Config, +) + +assert config == Config(host="localhost", port=8080) diff --git a/examples/docs/advanced/custom_types/custom_loader.py b/examples/docs/advanced/custom_types/custom_loader.py index 51aa01f..6be884c 100644 --- a/examples/docs/advanced/custom_types/custom_loader.py +++ b/examples/docs/advanced/custom_types/custom_loader.py @@ -1,32 +1,32 @@ -"""Custom loader — subclass BaseLoader to read XML files.""" +"""Custom source — subclass Source to read XML files.""" import xml.etree.ElementTree as ET from dataclasses import dataclass from pathlib import Path -from typing import ClassVar from adaptix import Provider, loader import dature -from dature.sources_loader.base import BaseLoader -from dature.sources_loader.loaders import bool_loader, float_from_string +from dature.loaders import bool_loader, float_from_string +from dature.sources.base import FileSource from dature.types import FileOrStream, JSONValue SOURCES_DIR = Path(__file__).parent / "sources" -class XmlLoader(BaseLoader): - display_name: ClassVar[str] = "xml" +@dataclass(kw_only=True, repr=False) +class XmlSource(FileSource): + format_name = "xml" - def _load(self, path: FileOrStream) -> JSONValue: + def _load_file(self, path: FileOrStream) -> JSONValue: if not isinstance(path, Path): - msg = "XmlLoader only supports file paths" + msg = "XmlSource only supports file paths" raise TypeError(msg) tree = ET.parse(path) # noqa: S314 root = tree.getroot() return {child.tag: child.text or "" for child in root} - def _additional_loaders(self) -> list[Provider]: + def additional_loaders(self) -> list[Provider]: return [ loader(bool, bool_loader), loader(float, float_from_string), @@ -41,9 +41,8 @@ class Config: config = dature.load( - dature.Source( + XmlSource( file=SOURCES_DIR / "custom_loader.xml", - loader=XmlLoader, ), schema=Config, ) diff --git a/examples/docs/advanced/custom_types/custom_source_import.py b/examples/docs/advanced/custom_types/custom_source_import.py new file mode 100644 index 0000000..6ea548f --- /dev/null +++ b/examples/docs/advanced/custom_types/custom_source_import.py @@ -0,0 +1,3 @@ +from dature.sources.base import FileSource, FlatKeySource, Source + +__all__ = ["FileSource", "FlatKeySource", "Source"] diff --git a/examples/docs/advanced/custom_types/custom_type.py b/examples/docs/advanced/custom_types/custom_type.py index 4ece40f..a1f88a6 100644 --- a/examples/docs/advanced/custom_types/custom_type.py +++ b/examples/docs/advanced/custom_types/custom_type.py @@ -27,7 +27,7 @@ class AppConfig: config = dature.load( - dature.Source( + dature.Yaml12Source( file=SOURCES_DIR / "custom_type_common.yaml", type_loaders={Rgb: rgb_from_string}, ), diff --git a/examples/docs/advanced/custom_types/custom_type_merge.py b/examples/docs/advanced/custom_types/custom_type_merge.py index ebe34ee..ea5fee0 100644 --- a/examples/docs/advanced/custom_types/custom_type_merge.py +++ b/examples/docs/advanced/custom_types/custom_type_merge.py @@ -27,8 +27,8 @@ class AppConfig: config = dature.load( - dature.Source(file=SOURCES_DIR / "custom_type_common.yaml"), - dature.Source(file=SOURCES_DIR / "custom_type_merge_override.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "custom_type_common.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "custom_type_merge_override.yaml"), schema=AppConfig, type_loaders={Rgb: rgb_from_string}, ) diff --git a/examples/docs/advanced/debug/advanced_debug_error.py b/examples/docs/advanced/debug/advanced_debug_error.py index 87916c5..ce18d6c 100644 --- a/examples/docs/advanced/debug/advanced_debug_error.py +++ b/examples/docs/advanced/debug/advanced_debug_error.py @@ -19,8 +19,8 @@ class Config: try: config = dature.load( - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), - dature.Source(file=SOURCES_DIR / "advanced_debug_error_defaults.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "advanced_debug_error_defaults.yaml"), schema=Config, debug=True, ) diff --git a/examples/docs/advanced/debug/advanced_debug_logging.py b/examples/docs/advanced/debug/advanced_debug_logging.py index 1efe7e8..4a03917 100644 --- a/examples/docs/advanced/debug/advanced_debug_logging.py +++ b/examples/docs/advanced/debug/advanced_debug_logging.py @@ -24,8 +24,8 @@ class Config: config = dature.load( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"), schema=Config, ) diff --git a/examples/docs/advanced/debug/advanced_debug_report.py b/examples/docs/advanced/debug/advanced_debug_report.py index 4de2f6a..fde3641 100644 --- a/examples/docs/advanced/debug/advanced_debug_report.py +++ b/examples/docs/advanced/debug/advanced_debug_report.py @@ -16,8 +16,8 @@ class Config: config = dature.load( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"), schema=Config, debug=True, ) diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion.py b/examples/docs/advanced/env_expansion/advanced_env_expansion.py index d16ee46..65d028f 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion.py @@ -24,7 +24,7 @@ class Config: config = dature.load( - dature.Source(file=SOURCES_DIR / "advanced_env_expansion.yaml", expand_env_vars="default"), + dature.Yaml12Source(file=SOURCES_DIR / "advanced_env_expansion.yaml", expand_env_vars="default"), schema=Config, ) diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py index f83df73..414a321 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py @@ -19,7 +19,7 @@ class Config: config = dature.load( - dature.Source(file="$DATURE_SOURCES_DIR/config.$DATURE_APP_ENV.yaml"), + dature.Yaml12Source(file="$DATURE_SOURCES_DIR/config.$DATURE_APP_ENV.yaml"), schema=Config, ) diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py index 1ab9f02..17a2206 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py @@ -18,7 +18,7 @@ class Config: config = dature.load( - dature.Source(file="$DATURE_SOURCES_DIR/advanced_env_expansion_file_path.yaml"), + dature.Yaml12Source(file="$DATURE_SOURCES_DIR/advanced_env_expansion_file_path.yaml"), schema=Config, ) diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py index 0fdb28c..5518ce9 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py @@ -18,7 +18,7 @@ class Config: config = dature.load( - dature.Source(file=str(SOURCES_DIR / "config.$DATURE_APP_ENV.yaml")), + dature.Yaml12Source(file=str(SOURCES_DIR / "config.$DATURE_APP_ENV.yaml")), schema=Config, ) diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py index 315fa32..bd2dfa8 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py @@ -22,9 +22,9 @@ class Config: config = dature.load( - dature.Source(file=SOURCES_DIR / "advanced_env_expansion_merge_default.yaml"), # uses global "default" - dature.Source(file=SOURCES_DIR / "advanced_env_expansion_merge_empty.yaml", expand_env_vars="empty"), - dature.Source(file=SOURCES_DIR / "advanced_env_expansion_merge_disabled.yaml", expand_env_vars="disabled"), + dature.Yaml12Source(file=SOURCES_DIR / "advanced_env_expansion_merge_default.yaml"), # uses global "default" + dature.Yaml12Source(file=SOURCES_DIR / "advanced_env_expansion_merge_empty.yaml", expand_env_vars="empty"), + dature.Yaml12Source(file=SOURCES_DIR / "advanced_env_expansion_merge_disabled.yaml", expand_env_vars="disabled"), schema=Config, expand_env_vars="default", # global default for all sources ) diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py index ff475de..918ae87 100644 --- a/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py +++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py @@ -18,7 +18,7 @@ class Config: config = dature.load( - dature.Source(file=SOURCES_DIR / "advanced_env_expansion_strict.yaml", expand_env_vars="strict"), + dature.Yaml12Source(file=SOURCES_DIR / "advanced_env_expansion_strict.yaml", expand_env_vars="strict"), schema=Config, ) diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py index b8f0ab5..62ac98a 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py @@ -24,8 +24,8 @@ class Config: # (dature.F[Config].database, dature.F[Config].port) # expands to (database.host, database.port, port) dature.load( - dature.Source(file=SOURCES_DIR / "field_groups_nested_defaults.yaml"), - dature.Source(file=SOURCES_DIR / "advanced_field_groups_expansion_error_overrides.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "field_groups_nested_defaults.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "advanced_field_groups_expansion_error_overrides.yaml"), schema=Config, field_groups=((dature.F[Config].database, dature.F[Config].port),), ) diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py index df6bb5e..699b0ec 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py @@ -19,8 +19,8 @@ class Config: dature.load( - dature.Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), - dature.Source(file=SOURCES_DIR / "advanced_field_groups_multiple_error_overrides.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "advanced_field_groups_multiple_error_overrides.yaml"), schema=Config, field_groups=( (dature.F[Config].host, dature.F[Config].port), diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py index 32f67e9..e77bf78 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py +++ b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py @@ -19,8 +19,8 @@ class Config: dature.load( - dature.Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), - dature.Source(file=SOURCES_DIR / "field_groups_partial_overrides.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "field_groups_partial_overrides.yaml"), schema=Config, field_groups=( (dature.F[Config].host, dature.F[Config].port), diff --git a/examples/docs/advanced/field_groups/field_groups_basic.py b/examples/docs/advanced/field_groups/field_groups_basic.py index 79769e7..af2f0f6 100644 --- a/examples/docs/advanced/field_groups/field_groups_basic.py +++ b/examples/docs/advanced/field_groups/field_groups_basic.py @@ -18,8 +18,8 @@ class Config: config = dature.load( - dature.Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_field_groups_overrides.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_field_groups_overrides.yaml"), schema=Config, field_groups=((dature.F[Config].host, dature.F[Config].port),), ) diff --git a/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py b/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py index e8032aa..5143a28 100644 --- a/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py +++ b/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py @@ -21,8 +21,8 @@ def merge_tags(values: list[Any]) -> list[str]: config = dature.load( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"), schema=Config, strategy="last_wins", field_merges={dature.F[Config].tags: merge_tags}, diff --git a/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py b/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py index fd710d5..4aafc20 100644 --- a/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py +++ b/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py @@ -16,8 +16,8 @@ class Config: config = dature.load( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"), schema=Config, strategy="raise_on_conflict", field_merges={ diff --git a/examples/docs/advanced/merge_rules/merging_field_append.py b/examples/docs/advanced/merge_rules/merging_field_append.py index 545420d..bee8872 100644 --- a/examples/docs/advanced/merge_rules/merging_field_append.py +++ b/examples/docs/advanced/merge_rules/merging_field_append.py @@ -14,8 +14,8 @@ class Config: config = dature.load( - dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), - dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "merging_field_base.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "merging_field_override.yaml"), schema=Config, field_merges={dature.F[Config].tags: "append"}, ) diff --git a/examples/docs/advanced/merge_rules/merging_field_append_unique.py b/examples/docs/advanced/merge_rules/merging_field_append_unique.py index b29fffe..8ea9f38 100644 --- a/examples/docs/advanced/merge_rules/merging_field_append_unique.py +++ b/examples/docs/advanced/merge_rules/merging_field_append_unique.py @@ -14,8 +14,8 @@ class Config: config = dature.load( - dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), - dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "merging_field_base.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "merging_field_override.yaml"), schema=Config, field_merges={dature.F[Config].tags: "append_unique"}, ) diff --git a/examples/docs/advanced/merge_rules/merging_field_first_wins.py b/examples/docs/advanced/merge_rules/merging_field_first_wins.py index e207a7a..ebd5466 100644 --- a/examples/docs/advanced/merge_rules/merging_field_first_wins.py +++ b/examples/docs/advanced/merge_rules/merging_field_first_wins.py @@ -14,8 +14,8 @@ class Config: config = dature.load( - dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), - dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "merging_field_base.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "merging_field_override.yaml"), schema=Config, field_merges={dature.F[Config].tags: "first_wins"}, ) diff --git a/examples/docs/advanced/merge_rules/merging_field_last_wins.py b/examples/docs/advanced/merge_rules/merging_field_last_wins.py index 25b9295..1a0544c 100644 --- a/examples/docs/advanced/merge_rules/merging_field_last_wins.py +++ b/examples/docs/advanced/merge_rules/merging_field_last_wins.py @@ -14,8 +14,8 @@ class Config: config = dature.load( - dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), - dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "merging_field_base.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "merging_field_override.yaml"), schema=Config, field_merges={dature.F[Config].tags: "last_wins"}, ) diff --git a/examples/docs/advanced/merge_rules/merging_field_prepend.py b/examples/docs/advanced/merge_rules/merging_field_prepend.py index b144515..6d448a3 100644 --- a/examples/docs/advanced/merge_rules/merging_field_prepend.py +++ b/examples/docs/advanced/merge_rules/merging_field_prepend.py @@ -14,8 +14,8 @@ class Config: config = dature.load( - dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), - dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "merging_field_base.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "merging_field_override.yaml"), schema=Config, field_merges={dature.F[Config].tags: "prepend"}, ) diff --git a/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py b/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py index 39b3609..f9a98c2 100644 --- a/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py +++ b/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py @@ -14,8 +14,8 @@ class Config: config = dature.load( - dature.Source(file=SOURCES_DIR / "merging_field_base.yaml"), - dature.Source(file=SOURCES_DIR / "merging_field_override.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "merging_field_base.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "merging_field_override.yaml"), schema=Config, field_merges={dature.F[Config].tags: "prepend_unique"}, ) diff --git a/examples/docs/advanced/merge_rules/merging_first_found.py b/examples/docs/advanced/merge_rules/merging_first_found.py index b863b1a..d8571c9 100644 --- a/examples/docs/advanced/merge_rules/merging_first_found.py +++ b/examples/docs/advanced/merge_rules/merging_first_found.py @@ -15,8 +15,8 @@ class Config: config = dature.load( - dature.Source(file=SOURCES_DIR / "merging_first_found_primary.yaml"), - dature.Source(file=SOURCES_DIR / "merging_first_found_fallback.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "merging_first_found_primary.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "merging_first_found_fallback.yaml"), schema=Config, strategy="first_found", ) diff --git a/examples/docs/advanced/merge_rules/merging_skip_broken.py b/examples/docs/advanced/merge_rules/merging_skip_broken.py index d526970..0b0b7c7 100644 --- a/examples/docs/advanced/merge_rules/merging_skip_broken.py +++ b/examples/docs/advanced/merge_rules/merging_skip_broken.py @@ -17,8 +17,8 @@ class Config: config = dature.load( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SOURCES_DIR / "nonexistent.yaml", skip_if_broken=True), + dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "nonexistent.yaml", skip_if_broken=True), schema=Config, ) diff --git a/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py b/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py index 5e74810..72a399d 100644 --- a/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py +++ b/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py @@ -17,12 +17,12 @@ class Config: config = dature.load( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), # uses global - dature.Source( + dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"), # uses global + dature.Yaml12Source( file=SOURCES_DIR / "optional.yaml", skip_if_broken=True, ), # always skip if broken - dature.Source( + dature.Yaml12Source( file=SHARED_DIR / "common_overrides.yaml", skip_if_broken=False, ), # never skip, even if global is True diff --git a/examples/docs/advanced/merge_rules/merging_skip_invalid.py b/examples/docs/advanced/merge_rules/merging_skip_invalid.py index 41bf025..16ec80f 100644 --- a/examples/docs/advanced/merge_rules/merging_skip_invalid.py +++ b/examples/docs/advanced/merge_rules/merging_skip_invalid.py @@ -15,7 +15,7 @@ class Config: config = dature.load( - dature.Source(file=SOURCES_DIR / "merging_skip_invalid_defaults.yaml", skip_if_invalid=True), + dature.Yaml12Source(file=SOURCES_DIR / "merging_skip_invalid_defaults.yaml", skip_if_invalid=True), schema=Config, ) diff --git a/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py b/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py index 84d5127..d78b72e 100644 --- a/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py +++ b/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py @@ -16,8 +16,8 @@ class Config: config = dature.load( - dature.Source(file=SOURCES_DIR / "merging_skip_invalid_per_field_defaults.yaml"), - dature.Source( + dature.Yaml12Source(file=SOURCES_DIR / "merging_skip_invalid_per_field_defaults.yaml"), + dature.Yaml12Source( file=SOURCES_DIR / "merging_skip_invalid_per_field_overrides.yaml", skip_if_invalid=(dature.F[Config].port, dature.F[Config].timeout), ), diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py b/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py index 8176fa3..fb6fe84 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py @@ -5,7 +5,6 @@ from tempfile import TemporaryDirectory import dature -from dature.sources_loader.docker_secrets import DockerSecretsLoader @dataclass @@ -26,9 +25,8 @@ class Config: (secrets_path / "database__port").write_text("3306") config = dature.load( - dature.Source( - file=secrets_path, - loader=DockerSecretsLoader, + dature.DockerSecretsSource( + dir_=secrets_path, nested_resolve_strategy="json", ), schema=Config, diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py b/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py index 5bc82a2..9f9386a 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py @@ -4,7 +4,6 @@ from pathlib import Path import dature -from dature.sources_loader.env_ import EnvFileLoader SOURCES_DIR = Path(__file__).parent / "sources" @@ -21,9 +20,8 @@ class Config: config = dature.load( - dature.Source( + dature.EnvFileSource( file=SOURCES_DIR / "nested_resolve.env", - loader=EnvFileLoader, prefix="APP__", nested_resolve_strategy="json", ), diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_global_flat.py b/examples/docs/advanced/nested_resolve/nested_resolve_global_flat.py index 28b6171..7f37668 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_global_flat.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_global_flat.py @@ -4,7 +4,6 @@ from dataclasses import dataclass import dature -from dature.sources_loader.env_ import EnvLoader os.environ["APP__DATABASE"] = '{"host": "json-host", "port": "5432"}' os.environ["APP__DATABASE__HOST"] = "flat-host" @@ -23,7 +22,7 @@ class Config: config = dature.load( - dature.Source(loader=EnvLoader, prefix="APP__", nested_resolve_strategy="flat"), + dature.EnvSource(prefix="APP__", nested_resolve_strategy="flat"), schema=Config, ) diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_global_json.py b/examples/docs/advanced/nested_resolve/nested_resolve_global_json.py index 84eb4a8..0f26fc1 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_global_json.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_global_json.py @@ -4,7 +4,6 @@ from dataclasses import dataclass import dature -from dature.sources_loader.env_ import EnvLoader os.environ["APP__DATABASE"] = '{"host": "json-host", "port": "5432"}' os.environ["APP__DATABASE__HOST"] = "flat-host" @@ -23,7 +22,7 @@ class Config: config = dature.load( - dature.Source(loader=EnvLoader, prefix="APP__", nested_resolve_strategy="json"), + dature.EnvSource(prefix="APP__", nested_resolve_strategy="json"), schema=Config, ) diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_no_conflict.py b/examples/docs/advanced/nested_resolve/nested_resolve_no_conflict.py index 460f258..144c7a0 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_no_conflict.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_no_conflict.py @@ -4,7 +4,6 @@ from dataclasses import dataclass import dature -from dature.sources_loader.env_ import EnvLoader # Only JSON form, no flat keys os.environ["APP__DATABASE"] = '{"host": "json-host", "port": "5432"}' @@ -27,7 +26,7 @@ class Config: # Even with strategy="flat", JSON is parsed because there are no flat keys config = dature.load( - dature.Source(loader=EnvLoader, prefix="APP__", nested_resolve_strategy="flat"), + dature.EnvSource(prefix="APP__", nested_resolve_strategy="flat"), schema=Config, ) diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_override.py b/examples/docs/advanced/nested_resolve/nested_resolve_override.py index bbd6e36..b63c675 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_override.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_override.py @@ -4,7 +4,6 @@ from dataclasses import dataclass import dature -from dature.sources_loader.env_ import EnvLoader os.environ["APP__DATABASE"] = '{"host": "json-host", "port": "5432"}' os.environ["APP__DATABASE__HOST"] = "flat-host" @@ -34,8 +33,7 @@ class Config: # Global: "flat" for everything, but database overridden to "json" config = dature.load( - dature.Source( - loader=EnvLoader, + dature.EnvSource( prefix="APP__", nested_resolve_strategy="flat", nested_resolve={"json": (dature.F[Config].database,)}, diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_per_field.py b/examples/docs/advanced/nested_resolve/nested_resolve_per_field.py index 7a8204a..ce0d384 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_per_field.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_per_field.py @@ -4,7 +4,6 @@ from dataclasses import dataclass import dature -from dature.sources_loader.env_ import EnvLoader os.environ["APP__DATABASE"] = '{"host": "json-host", "port": "5432"}' os.environ["APP__DATABASE__HOST"] = "flat-host" @@ -34,8 +33,7 @@ class Config: # database uses JSON, cache uses flat keys config = dature.load( - dature.Source( - loader=EnvLoader, + dature.EnvSource( prefix="APP__", nested_resolve={ "json": (dature.F[Config].database,), diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_problem.py b/examples/docs/advanced/nested_resolve/nested_resolve_problem.py index ae348ba..d7b668e 100644 --- a/examples/docs/advanced/nested_resolve/nested_resolve_problem.py +++ b/examples/docs/advanced/nested_resolve/nested_resolve_problem.py @@ -4,7 +4,6 @@ from dataclasses import dataclass import dature -from dature.sources_loader.env_ import EnvLoader os.environ["APP__DATABASE"] = '{"host": "json-host", "port": "5432"}' os.environ["APP__DATABASE__HOST"] = "flat-host" @@ -23,7 +22,7 @@ class Config: # Without nested_resolve_strategy, flat keys win by default -config = dature.load(dature.Source(loader=EnvLoader, prefix="APP__"), schema=Config) +config = dature.load(dature.EnvSource(prefix="APP__"), schema=Config) assert config.database.host == "flat-host" assert config.database.port == 3306 diff --git a/examples/docs/api_reference/api_reference_decorator_mode.py b/examples/docs/api_reference/api_reference_decorator_mode.py index c631a96..5bb6acb 100644 --- a/examples/docs/api_reference/api_reference_decorator_mode.py +++ b/examples/docs/api_reference/api_reference_decorator_mode.py @@ -8,7 +8,7 @@ SHARED_DIR = Path(__file__).parents[1] / "shared" -@dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml")) +@dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml")) @dataclass class Config: host: str diff --git a/examples/docs/api_reference/api_reference_function_mode.py b/examples/docs/api_reference/api_reference_function_mode.py index 89611df..c86999f 100644 --- a/examples/docs/api_reference/api_reference_function_mode.py +++ b/examples/docs/api_reference/api_reference_function_mode.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) +config = dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py index 8527e03..b275722 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py @@ -16,7 +16,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SOURCES_DIR / "dynaconf_basic.toml"), schema=Config) +config = dature.load(dature.Toml11Source(file=SOURCES_DIR / "dynaconf_basic.toml"), schema=Config) # config.hostt → AttributeError immediately # config.port is always int — guaranteed # --8<-- [end:basic] diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py index f598311..dc61315 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py @@ -16,8 +16,8 @@ class Config: # --8<-- [start:merge] config = dature.load( - dature.Source(file=SOURCES_DIR / "dynaconf_merge_defaults.yaml"), - dature.Source(file=SOURCES_DIR / "dynaconf_merge_local.yaml", skip_if_broken=True), + dature.Yaml12Source(file=SOURCES_DIR / "dynaconf_merge_defaults.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "dynaconf_merge_local.yaml", skip_if_broken=True), schema=Config, strategy="last_wins", ) diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py index da358d6..02ebda1 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py @@ -23,7 +23,7 @@ def check_debug_port(config: Config) -> bool: dature.load( - dature.Source( + dature.Toml11Source( file=SOURCES_DIR / "dynaconf_root_validators_invalid.toml", root_validators=( RootValidator( diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py index e5770a8..f119529 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py @@ -17,4 +17,4 @@ class Config: debug: bool = False -dature.load(dature.Source(file=SOURCES_DIR / "dynaconf_validators_invalid.toml"), schema=Config) +dature.load(dature.Toml11Source(file=SOURCES_DIR / "dynaconf_validators_invalid.toml"), schema=Config) diff --git a/examples/docs/comparison/why-not-hydra/hydra_dataclass.py b/examples/docs/comparison/why-not-hydra/hydra_dataclass.py index affd914..3a634a8 100644 --- a/examples/docs/comparison/why-not-hydra/hydra_dataclass.py +++ b/examples/docs/comparison/why-not-hydra/hydra_dataclass.py @@ -15,7 +15,7 @@ class Config: # --8<-- [start:dataclass] -config = dature.load(dature.Source(file=SOURCES_DIR / "hydra_defaults.yaml"), schema=Config) +config = dature.load(dature.Yaml12Source(file=SOURCES_DIR / "hydra_defaults.yaml"), schema=Config) assert isinstance(config, Config) # Full IDE support, type safety, __post_init__ works # --8<-- [end:dataclass] diff --git a/examples/docs/comparison/why-not-hydra/hydra_merge.py b/examples/docs/comparison/why-not-hydra/hydra_merge.py index a94986b..ed004ea 100644 --- a/examples/docs/comparison/why-not-hydra/hydra_merge.py +++ b/examples/docs/comparison/why-not-hydra/hydra_merge.py @@ -1,4 +1,4 @@ -"""dature vs Hydra — multi-format merge with auto-detection.""" +"""dature vs Hydra — multi-format merge.""" from dataclasses import dataclass from pathlib import Path @@ -16,9 +16,9 @@ class Config: # --8<-- [start:merge] config = dature.load( - dature.Source(file=SOURCES_DIR / "hydra_defaults.yaml"), - dature.Source(file=SOURCES_DIR / "hydra_config.toml", skip_if_broken=True), - dature.Source(prefix="APP_"), + dature.Yaml12Source(file=SOURCES_DIR / "hydra_defaults.yaml"), + dature.Toml11Source(file=SOURCES_DIR / "hydra_config.toml", skip_if_broken=True), + dature.EnvSource(prefix="APP_"), schema=Config, ) # --8<-- [end:merge] diff --git a/examples/docs/comparison/why-not-hydra/hydra_validators.py b/examples/docs/comparison/why-not-hydra/hydra_validators.py index fa15302..6166b9f 100644 --- a/examples/docs/comparison/why-not-hydra/hydra_validators.py +++ b/examples/docs/comparison/why-not-hydra/hydra_validators.py @@ -16,4 +16,4 @@ class Config: port: Annotated[int, Gt(0), Lt(65536)] = 8080 -dature.load(dature.Source(file=SOURCES_DIR / "hydra_validators_invalid.yaml"), schema=Config) +dature.load(dature.Yaml12Source(file=SOURCES_DIR / "hydra_validators_invalid.yaml"), schema=Config) diff --git a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py deleted file mode 100644 index 6b8f56f..0000000 --- a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py +++ /dev/null @@ -1,26 +0,0 @@ -"""dature vs pydantic-settings — auto-detection of file format.""" - -from dataclasses import dataclass -from pathlib import Path - -import dature - -SOURCES_DIR = Path(__file__).parent / "sources" - - -@dataclass -class Config: - host: str - port: int - - -# --8<-- [start:auto-detect] -# Just change the file — dature picks the right loader -yaml_config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.yaml"), schema=Config) -toml_config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.toml"), schema=Config) -json5_config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_auto_detect.json5"), schema=Config) -# --8<-- [end:auto-detect] - -assert yaml_config.host == "localhost" -assert toml_config.host == "localhost" -assert json5_config.host == "localhost" diff --git a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py index af7c4d4..ebc2ccf 100644 --- a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py +++ b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py @@ -16,7 +16,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SOURCES_DIR / "pydantic_settings_basic.yaml"), schema=Config) +config = dature.load(dature.Yaml12Source(file=SOURCES_DIR / "pydantic_settings_basic.yaml"), schema=Config) # config.hostt → AttributeError immediately # config.port is always int — guaranteed # --8<-- [end:basic] diff --git a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_formats.py b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_formats.py new file mode 100644 index 0000000..9412f49 --- /dev/null +++ b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_formats.py @@ -0,0 +1,25 @@ +"""dature vs pydantic-settings — multiple file formats.""" + +from dataclasses import dataclass +from pathlib import Path + +import dature + +SOURCES_DIR = Path(__file__).parent / "sources" + + +@dataclass +class Config: + host: str + port: int + + +# --8<-- [start:formats] +yaml_config = dature.load(dature.Yaml12Source(file=SOURCES_DIR / "pydantic_settings_formats.yaml"), schema=Config) +toml_config = dature.load(dature.Toml11Source(file=SOURCES_DIR / "pydantic_settings_formats.toml"), schema=Config) +json5_config = dature.load(dature.Json5Source(file=SOURCES_DIR / "pydantic_settings_formats.json5"), schema=Config) +# --8<-- [end:formats] + +assert yaml_config.host == "localhost" +assert toml_config.host == "localhost" +assert json5_config.host == "localhost" diff --git a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py index 35d0e18..23392de 100644 --- a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py +++ b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py @@ -16,9 +16,9 @@ class Config: # --8<-- [start:merge] config = dature.load( - dature.Source(file=SOURCES_DIR / "pydantic_settings_merge_defaults.yaml"), - dature.Source(file=SOURCES_DIR / "pydantic_settings_merge_local.yaml", skip_if_broken=True), - dature.Source(prefix="APP_"), + dature.Yaml12Source(file=SOURCES_DIR / "pydantic_settings_merge_defaults.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "pydantic_settings_merge_local.yaml", skip_if_broken=True), + dature.EnvSource(prefix="APP_"), schema=Config, ) # --8<-- [end:merge] diff --git a/examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_auto_detect.json5 b/examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_formats.json5 similarity index 100% rename from examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_auto_detect.json5 rename to examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_formats.json5 diff --git a/examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_auto_detect.toml b/examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_formats.toml similarity index 100% rename from examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_auto_detect.toml rename to examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_formats.toml diff --git a/examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_auto_detect.yaml b/examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_formats.yaml similarity index 100% rename from examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_auto_detect.yaml rename to examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_formats.yaml diff --git a/examples/docs/features/masking/masking_by_name.py b/examples/docs/features/masking/masking_by_name.py index 2e7ff9e..e78e57b 100644 --- a/examples/docs/features/masking/masking_by_name.py +++ b/examples/docs/features/masking/masking_by_name.py @@ -15,4 +15,4 @@ class Config: host: str -dature.load(dature.Source(file=SOURCES_DIR / "masking_by_name.yaml"), schema=Config) +dature.load(dature.Yaml12Source(file=SOURCES_DIR / "masking_by_name.yaml"), schema=Config) diff --git a/examples/docs/features/masking/masking_classic_style.py b/examples/docs/features/masking/masking_classic_style.py index aa09928..e1b267d 100644 --- a/examples/docs/features/masking/masking_classic_style.py +++ b/examples/docs/features/masking/masking_classic_style.py @@ -21,7 +21,7 @@ class Config: host: str -config = dature.load(dature.Source(file=SOURCES_DIR / "masking_by_name.yaml"), schema=Config) +config = dature.load(dature.Yaml12Source(file=SOURCES_DIR / "masking_by_name.yaml"), schema=Config) assert mask_value("my_secret_password") == "my*****rd" assert mask_value("ab") == "ab" diff --git a/examples/docs/features/masking/masking_heuristic.py b/examples/docs/features/masking/masking_heuristic.py index f30e560..26ee856 100644 --- a/examples/docs/features/masking/masking_heuristic.py +++ b/examples/docs/features/masking/masking_heuristic.py @@ -16,6 +16,6 @@ class Config: dature.load( - dature.Source(file=SOURCES_DIR / "masking_heuristic.yaml", mask_secrets=True), + dature.Yaml12Source(file=SOURCES_DIR / "masking_heuristic.yaml", mask_secrets=True), schema=Config, ) diff --git a/examples/docs/features/masking/masking_merge_mode.py b/examples/docs/features/masking/masking_merge_mode.py index 6e30537..0355297 100644 --- a/examples/docs/features/masking/masking_merge_mode.py +++ b/examples/docs/features/masking/masking_merge_mode.py @@ -18,8 +18,8 @@ class Config: dature.load( - dature.Source(file=SOURCES_DIR / "masking_merge_mode_defaults.yaml"), - dature.Source( + dature.Yaml12Source(file=SOURCES_DIR / "masking_merge_mode_defaults.yaml"), + dature.Yaml12Source( file=SOURCES_DIR / "masking_merge_mode_secrets.yaml", secret_field_names=("api_key",), ), diff --git a/examples/docs/features/masking/masking_no_mask.py b/examples/docs/features/masking/masking_no_mask.py index b7167e5..ad63725 100644 --- a/examples/docs/features/masking/masking_no_mask.py +++ b/examples/docs/features/masking/masking_no_mask.py @@ -17,7 +17,7 @@ class Config: dature.load( - dature.Source( + dature.Yaml12Source( file=SOURCES_DIR / "masking_per_source.yaml", mask_secrets=False, ), diff --git a/examples/docs/features/masking/masking_per_source.py b/examples/docs/features/masking/masking_per_source.py index 7b77817..0c969a4 100644 --- a/examples/docs/features/masking/masking_per_source.py +++ b/examples/docs/features/masking/masking_per_source.py @@ -17,7 +17,7 @@ class Config: dature.load( - dature.Source( + dature.Yaml12Source( file=SOURCES_DIR / "masking_per_source.yaml", secret_field_names=("api_key",), ), diff --git a/examples/docs/features/masking/masking_secret_str.py b/examples/docs/features/masking/masking_secret_str.py index 8eebf2b..fed1b28 100644 --- a/examples/docs/features/masking/masking_secret_str.py +++ b/examples/docs/features/masking/masking_secret_str.py @@ -18,6 +18,6 @@ class Config: dature.load( - dature.Source(file=SOURCES_DIR / "masking_secret_str.yaml"), + dature.Yaml12Source(file=SOURCES_DIR / "masking_secret_str.yaml"), schema=Config, ) diff --git a/examples/docs/features/merging/merging_basic.py b/examples/docs/features/merging/merging_basic.py index 2434422..536f512 100644 --- a/examples/docs/features/merging/merging_basic.py +++ b/examples/docs/features/merging/merging_basic.py @@ -16,8 +16,8 @@ class Config: config = dature.load( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"), schema=Config, strategy="last_wins", ) diff --git a/examples/docs/features/merging/merging_strategies.py b/examples/docs/features/merging/merging_strategies.py index 0bc5317..00b3af1 100644 --- a/examples/docs/features/merging/merging_strategies.py +++ b/examples/docs/features/merging/merging_strategies.py @@ -16,15 +16,15 @@ class Config: last_wins = dature.load( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"), schema=Config, strategy="last_wins", ) first_wins = dature.load( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"), schema=Config, strategy="first_wins", ) diff --git a/examples/docs/features/merging/merging_strategy_first_found.py b/examples/docs/features/merging/merging_strategy_first_found.py index 2321cf5..f78905a 100644 --- a/examples/docs/features/merging/merging_strategy_first_found.py +++ b/examples/docs/features/merging/merging_strategy_first_found.py @@ -16,9 +16,9 @@ class Config: config = dature.load( - dature.Source(file=SHARED_DIR / "nonexistent.yaml"), - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "nonexistent.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"), schema=Config, strategy="first_found", ) diff --git a/examples/docs/features/merging/merging_strategy_first_wins.py b/examples/docs/features/merging/merging_strategy_first_wins.py index 2dafb5e..6331d63 100644 --- a/examples/docs/features/merging/merging_strategy_first_wins.py +++ b/examples/docs/features/merging/merging_strategy_first_wins.py @@ -16,8 +16,8 @@ class Config: config = dature.load( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"), schema=Config, strategy="first_wins", ) diff --git a/examples/docs/features/merging/merging_strategy_last_wins.py b/examples/docs/features/merging/merging_strategy_last_wins.py index 48fa310..7e458d4 100644 --- a/examples/docs/features/merging/merging_strategy_last_wins.py +++ b/examples/docs/features/merging/merging_strategy_last_wins.py @@ -16,8 +16,8 @@ class Config: config = dature.load( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"), schema=Config, strategy="last_wins", ) diff --git a/examples/docs/features/merging/merging_strategy_raise_on_conflict.py b/examples/docs/features/merging/merging_strategy_raise_on_conflict.py index 62fc896..aa22dac 100644 --- a/examples/docs/features/merging/merging_strategy_raise_on_conflict.py +++ b/examples/docs/features/merging/merging_strategy_raise_on_conflict.py @@ -16,8 +16,8 @@ class Config: config = dature.load( - dature.Source(file=SHARED_DIR / "common_raise_on_conflict_a.yaml"), - dature.Source(file=SHARED_DIR / "common_raise_on_conflict_b.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_raise_on_conflict_a.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_raise_on_conflict_b.yaml"), schema=Config, strategy="raise_on_conflict", ) diff --git a/examples/docs/features/merging/merging_tuple_shorthand.py b/examples/docs/features/merging/merging_tuple_shorthand.py index 5e5fc30..b82b153 100644 --- a/examples/docs/features/merging/merging_tuple_shorthand.py +++ b/examples/docs/features/merging/merging_tuple_shorthand.py @@ -16,8 +16,8 @@ class Config: config = dature.load( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(file=SHARED_DIR / "common_overrides.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"), schema=Config, ) diff --git a/examples/docs/features/merging/merging_tuple_shorthand_decorator.py b/examples/docs/features/merging/merging_tuple_shorthand_decorator.py index 6624bc1..e4b73cf 100644 --- a/examples/docs/features/merging/merging_tuple_shorthand_decorator.py +++ b/examples/docs/features/merging/merging_tuple_shorthand_decorator.py @@ -12,8 +12,8 @@ @dature.load( - dature.Source(file=SHARED_DIR / "common_defaults.yaml"), - dature.Source(prefix="APP_"), + dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"), + dature.EnvSource(prefix="APP_"), ) @dataclass class Config: diff --git a/examples/docs/features/naming/naming_field_mapping.py b/examples/docs/features/naming/naming_field_mapping.py index 8cbc1b0..aa046ef 100644 --- a/examples/docs/features/naming/naming_field_mapping.py +++ b/examples/docs/features/naming/naming_field_mapping.py @@ -16,7 +16,7 @@ class DbConfig: config = dature.load( - dature.Source( + dature.Yaml12Source( file=SOURCES_DIR / "naming_field_mapping.yaml", field_mapping={ dature.F[DbConfig].database_url: "db_url", diff --git a/examples/docs/features/naming/naming_field_mapping_aliases.py b/examples/docs/features/naming/naming_field_mapping_aliases.py new file mode 100644 index 0000000..9b6aaaa --- /dev/null +++ b/examples/docs/features/naming/naming_field_mapping_aliases.py @@ -0,0 +1,15 @@ +"""field_mapping — multiple aliases for a single field.""" + +from dataclasses import dataclass + +import dature + + +@dataclass +class Config: + name: str + + +# --8<-- [start:aliases] +field_mapping = {dature.F[Config].name: ("fullName", "userName")} +# --8<-- [end:aliases] diff --git a/examples/docs/features/naming/naming_field_mapping_decorator.py b/examples/docs/features/naming/naming_field_mapping_decorator.py new file mode 100644 index 0000000..a4df74d --- /dev/null +++ b/examples/docs/features/naming/naming_field_mapping_decorator.py @@ -0,0 +1,9 @@ +"""Decorator mode — use string instead of class reference in F[].""" + +from dature import F + +# --8<-- [start:decorator] +field_ref = F["Config"].name # autocomplete doesn't work here +# --8<-- [end:decorator] + +assert field_ref is not None diff --git a/examples/docs/features/naming/naming_name_style.py b/examples/docs/features/naming/naming_name_style.py index c72b8b9..b8e0878 100644 --- a/examples/docs/features/naming/naming_name_style.py +++ b/examples/docs/features/naming/naming_name_style.py @@ -17,7 +17,7 @@ class ApiConfig: config = dature.load( - dature.Source(file=SOURCES_DIR / "naming_name_style.yaml", name_style="lower_camel"), + dature.Yaml12Source(file=SOURCES_DIR / "naming_name_style.yaml", name_style="lower_camel"), schema=ApiConfig, ) diff --git a/examples/docs/features/naming/naming_nested_fields.py b/examples/docs/features/naming/naming_nested_fields.py index 44ea119..9845e5b 100644 --- a/examples/docs/features/naming/naming_nested_fields.py +++ b/examples/docs/features/naming/naming_nested_fields.py @@ -21,7 +21,7 @@ class User: config = dature.load( - dature.Source( + dature.Yaml12Source( file=SOURCES_DIR / "naming_nested_fields.yaml", field_mapping={ dature.F[User].name: "fullName", diff --git a/examples/docs/features/naming/naming_prefix.py b/examples/docs/features/naming/naming_prefix.py index 2d72039..6db4437 100644 --- a/examples/docs/features/naming/naming_prefix.py +++ b/examples/docs/features/naming/naming_prefix.py @@ -17,7 +17,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(prefix="MYAPP_"), schema=Config) +config = dature.load(dature.EnvSource(prefix="MYAPP_"), schema=Config) assert config.host == "localhost" assert config.port == 9090 diff --git a/examples/docs/features/naming/naming_prefix_nested.py b/examples/docs/features/naming/naming_prefix_nested.py index 868fd94..cbe9d3b 100644 --- a/examples/docs/features/naming/naming_prefix_nested.py +++ b/examples/docs/features/naming/naming_prefix_nested.py @@ -15,7 +15,7 @@ class Database: db = dature.load( - dature.Source(file=SOURCES_DIR / "naming_prefix_nested.yaml", prefix="app.database"), + dature.Yaml12Source(file=SOURCES_DIR / "naming_prefix_nested.yaml", prefix="app.database"), schema=Database, ) diff --git a/examples/docs/features/naming/naming_split_symbols.py b/examples/docs/features/naming/naming_split_symbols.py index e3e5c42..472a6cd 100644 --- a/examples/docs/features/naming/naming_split_symbols.py +++ b/examples/docs/features/naming/naming_split_symbols.py @@ -20,7 +20,7 @@ class Config: db: Database -config = dature.load(dature.Source(prefix="NS_", split_symbols="__"), schema=Config) +config = dature.load(dature.EnvSource(prefix="NS_", split_symbols="__"), schema=Config) assert config.db.host == "localhost" assert config.db.port == 5432 diff --git a/examples/docs/features/validation/validation_annotated.py b/examples/docs/features/validation/validation_annotated.py index b52925e..09d51c8 100644 --- a/examples/docs/features/validation/validation_annotated.py +++ b/examples/docs/features/validation/validation_annotated.py @@ -21,6 +21,6 @@ class ServiceConfig: dature.load( - dature.Source(file=SOURCES_DIR / "validation_annotated_invalid.json5"), + dature.Json5Source(file=SOURCES_DIR / "validation_annotated_invalid.json5"), schema=ServiceConfig, ) diff --git a/examples/docs/features/validation/validation_annotated_combined.py b/examples/docs/features/validation/validation_annotated_combined.py new file mode 100644 index 0000000..48ff297 --- /dev/null +++ b/examples/docs/features/validation/validation_annotated_combined.py @@ -0,0 +1,15 @@ +"""Multiple Annotated validators can be combined on a single field.""" + +from dataclasses import dataclass +from typing import Annotated + +from dature.validators.number import Ge, Le +from dature.validators.sequence import MaxItems, MinItems, UniqueItems + + +@dataclass +class Config: + # --8<-- [start:combined] + port: Annotated[int, Ge(1), Le(65535)] + tags: Annotated[list[str], MinItems(1), MaxItems(10), UniqueItems()] + # --8<-- [end:combined] diff --git a/examples/docs/features/validation/validation_custom.py b/examples/docs/features/validation/validation_custom.py index a334408..ad3d737 100644 --- a/examples/docs/features/validation/validation_custom.py +++ b/examples/docs/features/validation/validation_custom.py @@ -35,6 +35,6 @@ class ServiceConfig: dature.load( - dature.Source(file=SOURCES_DIR / "validation_custom_invalid.json5"), + dature.Json5Source(file=SOURCES_DIR / "validation_custom_invalid.json5"), schema=ServiceConfig, ) diff --git a/examples/docs/features/validation/validation_metadata.py b/examples/docs/features/validation/validation_metadata.py index 9d98c42..2da9fe7 100644 --- a/examples/docs/features/validation/validation_metadata.py +++ b/examples/docs/features/validation/validation_metadata.py @@ -18,7 +18,7 @@ class Config: dature.load( - dature.Source( + dature.Yaml12Source( file=SOURCES_DIR / "validation_metadata_invalid.yaml", validators={ dature.F[Config].host: MinLength(1), diff --git a/examples/docs/features/validation/validation_metadata_nested.py b/examples/docs/features/validation/validation_metadata_nested.py new file mode 100644 index 0000000..2137989 --- /dev/null +++ b/examples/docs/features/validation/validation_metadata_nested.py @@ -0,0 +1,26 @@ +"""Metadata validators for nested dataclass fields.""" + +from dataclasses import dataclass + +import dature +from dature.validators.number import Gt +from dature.validators.string import MinLength + + +@dataclass +class Database: + host: str + port: int + + +@dataclass +class Config: + database: Database + + +# --8<-- [start:nested] +validators = { + dature.F[Config].database.host: MinLength(1), + dature.F[Config].database.port: Gt(0), +} +# --8<-- [end:nested] diff --git a/examples/docs/features/validation/validation_metadata_syntax.py b/examples/docs/features/validation/validation_metadata_syntax.py new file mode 100644 index 0000000..7c2402b --- /dev/null +++ b/examples/docs/features/validation/validation_metadata_syntax.py @@ -0,0 +1,21 @@ +"""Metadata validators syntax — single validator vs tuple for multiple.""" + +from dataclasses import dataclass + +import dature +from dature.validators.number import Gt, Lt +from dature.validators.string import MinLength + + +@dataclass +class Config: + host: str + port: int + + +# --8<-- [start:syntax] +validators = { + dature.F[Config].port: (Gt(0), Lt(65536)), # tuple for multiple + dature.F[Config].host: MinLength(1), # single, no tuple needed +} +# --8<-- [end:syntax] diff --git a/examples/docs/features/validation/validation_post_init.py b/examples/docs/features/validation/validation_post_init.py index 9673cad..29ac938 100644 --- a/examples/docs/features/validation/validation_post_init.py +++ b/examples/docs/features/validation/validation_post_init.py @@ -24,4 +24,4 @@ def address(self) -> str: return f"{self.host}:{self.port}" -dature.load(dature.Source(file=SOURCES_DIR / "validation_post_init_invalid.yaml"), schema=Config) +dature.load(dature.Yaml12Source(file=SOURCES_DIR / "validation_post_init_invalid.yaml"), schema=Config) diff --git a/examples/docs/features/validation/validation_root.py b/examples/docs/features/validation/validation_root.py index 23a3ca5..4b447ab 100644 --- a/examples/docs/features/validation/validation_root.py +++ b/examples/docs/features/validation/validation_root.py @@ -23,7 +23,7 @@ def check_debug_not_on_production(obj: Config) -> bool: dature.load( - dature.Source( + dature.Yaml12Source( file=SOURCES_DIR / "validation_root_invalid.yaml", root_validators=( RootValidator( diff --git a/examples/docs/index/intro_decorator.py b/examples/docs/index/intro_decorator.py index 18ad788..f889723 100644 --- a/examples/docs/index/intro_decorator.py +++ b/examples/docs/index/intro_decorator.py @@ -10,7 +10,7 @@ os.environ["APP_DEBUG"] = "true" -@dature.load(dature.Source(prefix="APP_")) +@dature.load(dature.EnvSource(prefix="APP_")) @dataclass class AppConfig: host: str diff --git a/examples/docs/index/intro_function.py b/examples/docs/index/intro_function.py index a044e21..2e91319 100644 --- a/examples/docs/index/intro_function.py +++ b/examples/docs/index/intro_function.py @@ -17,7 +17,7 @@ class AppConfig: debug: bool = False -config = dature.load(dature.Source(prefix="APP_"), schema=AppConfig) +config = dature.load(dature.EnvSource(prefix="APP_"), schema=AppConfig) assert config.host == "0.0.0.0" assert config.port == 8080 diff --git a/examples/docs/introduction/format_docker.py b/examples/docs/introduction/format_docker.py index 2725da8..e20ac53 100644 --- a/examples/docs/introduction/format_docker.py +++ b/examples/docs/introduction/format_docker.py @@ -16,7 +16,7 @@ class Config: config = dature.load( - dature.Source(file=SOURCES_DIR / "intro_app_docker_secrets"), + dature.DockerSecretsSource(dir_=SOURCES_DIR / "intro_app_docker_secrets"), schema=Config, ) diff --git a/examples/docs/introduction/format_env.py b/examples/docs/introduction/format_env.py index 5d63228..916b0ce 100644 --- a/examples/docs/introduction/format_env.py +++ b/examples/docs/introduction/format_env.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.env"), schema=Config) +config = dature.load(dature.EnvFileSource(file=SOURCES_DIR / "intro_app.env"), schema=Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/format_ini.py b/examples/docs/introduction/format_ini.py index eb92f89..ab45524 100644 --- a/examples/docs/introduction/format_ini.py +++ b/examples/docs/introduction/format_ini.py @@ -16,7 +16,7 @@ class Config: config = dature.load( - dature.Source(file=SOURCES_DIR / "intro_app.ini", prefix="app"), + dature.IniSource(file=SOURCES_DIR / "intro_app.ini", prefix="app"), schema=Config, ) diff --git a/examples/docs/introduction/format_json.py b/examples/docs/introduction/format_json.py index 068bab2..6b1e6a3 100644 --- a/examples/docs/introduction/format_json.py +++ b/examples/docs/introduction/format_json.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.json"), schema=Config) +config = dature.load(dature.JsonSource(file=SOURCES_DIR / "intro_app.json"), schema=Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/format_json5.py b/examples/docs/introduction/format_json5.py index b9a7c7c..1a87c84 100644 --- a/examples/docs/introduction/format_json5.py +++ b/examples/docs/introduction/format_json5.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.json5"), schema=Config) +config = dature.load(dature.Json5Source(file=SOURCES_DIR / "intro_app.json5"), schema=Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/format_toml.py b/examples/docs/introduction/format_toml.py index 8c16303..869493e 100644 --- a/examples/docs/introduction/format_toml.py +++ b/examples/docs/introduction/format_toml.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SOURCES_DIR / "intro_app.toml"), schema=Config) +config = dature.load(dature.Toml11Source(file=SOURCES_DIR / "intro_app.toml"), schema=Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/format_yaml.py b/examples/docs/introduction/format_yaml.py index fc2c9f0..fd99146 100644 --- a/examples/docs/introduction/format_yaml.py +++ b/examples/docs/introduction/format_yaml.py @@ -15,7 +15,7 @@ class Config: debug: bool = False -config = dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) +config = dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml"), schema=Config) assert config.host == "localhost" assert config.port == 8080 diff --git a/examples/docs/introduction/intro_decorator_file.py b/examples/docs/introduction/intro_decorator_file.py index 6ba75b1..8bef9f1 100644 --- a/examples/docs/introduction/intro_decorator_file.py +++ b/examples/docs/introduction/intro_decorator_file.py @@ -8,7 +8,7 @@ SHARED_DIR = Path(__file__).parents[1] / "shared" -@dature.load(dature.Source(file=SHARED_DIR / "common_app.yaml")) +@dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml")) @dataclass class Config: host: str diff --git a/examples/docs/introduction/intro_decorator_override.py b/examples/docs/introduction/intro_decorator_override.py new file mode 100644 index 0000000..55c2f33 --- /dev/null +++ b/examples/docs/introduction/intro_decorator_override.py @@ -0,0 +1,24 @@ +"""Decorator mode — explicit __init__ arguments take priority over loaded values.""" + +from dataclasses import dataclass +from pathlib import Path + +import dature + +SHARED_DIR = Path(__file__).parents[1] / "shared" + + +@dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml")) +@dataclass +class Config: + host: str + port: int + debug: bool = False + + +# --8<-- [start:override] +config = Config(port=9090) # host from source, port overridden +# --8<-- [end:override] + +assert config.host == "localhost" +assert config.port == 9090 diff --git a/examples/docs/introduction/intro_file_like.py b/examples/docs/introduction/intro_file_like.py index 80f0ae3..4741e6c 100644 --- a/examples/docs/introduction/intro_file_like.py +++ b/examples/docs/introduction/intro_file_like.py @@ -4,7 +4,6 @@ from io import BytesIO, StringIO import dature -from dature.sources_loader.json_ import JsonLoader @dataclass @@ -16,14 +15,14 @@ class Config: # From StringIO text_stream = StringIO('{"host": "localhost", "port": 8080, "debug": true}') -config = dature.load(dature.Source(file=text_stream, loader=JsonLoader), schema=Config) +config = dature.load(dature.JsonSource(file=text_stream), schema=Config) assert config.host == "localhost" assert config.port == 8080 # From BytesIO binary_stream = BytesIO(b'{"host": "0.0.0.0", "port": 3000}') -config = dature.load(dature.Source(file=binary_stream, loader=JsonLoader), schema=Config) +config = dature.load(dature.JsonSource(file=binary_stream), schema=Config) assert config.host == "0.0.0.0" assert config.port == 3000 diff --git a/examples/load_all_formats.py b/examples/load_all_formats.py index 3626da0..e06f5f1 100644 --- a/examples/load_all_formats.py +++ b/examples/load_all_formats.py @@ -1,28 +1,24 @@ -"""dature.load() as a function — auto-detect format by file extension.""" +"""dature.load() as a function — load from every supported format.""" from pathlib import Path from all_types_dataclass import AllPythonTypesCompact # type: ignore[import-not-found] import dature -from dature.sources_loader.docker_secrets import DockerSecretsLoader -from dature.sources_loader.toml_ import Toml10Loader -from dature.sources_loader.yaml_ import Yaml11Loader, Yaml12Loader SOURCES_DIR = Path(__file__).parent / "sources" FORMATS = { - "json": dature.Source(file=SOURCES_DIR / "all_types.json"), - "json5": dature.Source(file=SOURCES_DIR / "all_types.json5"), - "toml10": dature.Source(file=SOURCES_DIR / "all_types_toml10.toml", loader=Toml10Loader), - "toml11": dature.Source(file=SOURCES_DIR / "all_types_toml11.toml"), - "ini": dature.Source(file=SOURCES_DIR / "all_types.ini", prefix="all_types"), - "yaml11": dature.Source(file=SOURCES_DIR / "all_types_yaml11.yaml", loader=Yaml11Loader), - "yaml12": dature.Source(file=SOURCES_DIR / "all_types_yaml12.yaml", loader=Yaml12Loader), - "env": dature.Source(file=SOURCES_DIR / "all_types.env"), - "docker_secrets": dature.Source( - file=SOURCES_DIR / "all_types_docker_secrets", - loader=DockerSecretsLoader, + "json": dature.JsonSource(file=SOURCES_DIR / "all_types.json"), + "json5": dature.Json5Source(file=SOURCES_DIR / "all_types.json5"), + "toml10": dature.Toml10Source(file=SOURCES_DIR / "all_types_toml10.toml"), + "toml11": dature.Toml11Source(file=SOURCES_DIR / "all_types_toml11.toml"), + "ini": dature.IniSource(file=SOURCES_DIR / "all_types.ini", prefix="all_types"), + "yaml11": dature.Yaml11Source(file=SOURCES_DIR / "all_types_yaml11.yaml"), + "yaml12": dature.Yaml12Source(file=SOURCES_DIR / "all_types_yaml12.yaml"), + "env": dature.EnvFileSource(file=SOURCES_DIR / "all_types.env"), + "docker_secrets": dature.DockerSecretsSource( + dir_=SOURCES_DIR / "all_types_docker_secrets", ), } @@ -30,3 +26,4 @@ config = dature.load(meta, schema=AllPythonTypesCompact) assert config.string_value == "hello world" assert config.integer_value == 42 + assert config.integer_value == 42 diff --git a/pyproject.toml b/pyproject.toml index 3d48522..c6d7536 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -83,7 +83,7 @@ ignore = [ "S105", # possible hardcoded password "FBT001", # boolean argument in function definition ] -"src/dature/sources_loader/loaders/*.py" = [ +"src/dature/loaders/*.py" = [ "FBT001", # loader functions accept bool as scalar value, not as a flag ] "docs/generate_*.py" = [ diff --git a/src/dature/__init__.py b/src/dature/__init__.py index 6fe3f9d..a7334ec 100644 --- a/src/dature/__init__.py +++ b/src/dature/__init__.py @@ -3,11 +3,29 @@ from dature.field_path import F from dature.load_report import get_load_report from dature.main import load -from dature.metadata import Source +from dature.sources.base import FileSource, Source +from dature.sources.docker_secrets import DockerSecretsSource +from dature.sources.env_ import EnvFileSource, EnvSource +from dature.sources.ini_ import IniSource +from dature.sources.json5_ import Json5Source +from dature.sources.json_ import JsonSource +from dature.sources.toml_ import Toml10Source, Toml11Source +from dature.sources.yaml_ import Yaml11Source, Yaml12Source __all__ = [ + "DockerSecretsSource", + "EnvFileSource", + "EnvSource", "F", + "FileSource", + "IniSource", + "Json5Source", + "JsonSource", "Source", + "Toml10Source", + "Toml11Source", + "Yaml11Source", + "Yaml12Source", "__version__", "configure", "get_load_report", diff --git a/src/dature/_descriptors.py b/src/dature/_descriptors.py new file mode 100644 index 0000000..f58b211 --- /dev/null +++ b/src/dature/_descriptors.py @@ -0,0 +1,13 @@ +from collections.abc import Callable +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from dature.path_finders.base import PathFinder + + +class classproperty: # noqa: N801 + def __init__(self, func: Callable[..., "type[PathFinder]"]) -> None: + self.fget = func + + def __get__(self, obj: object | None, owner: type) -> "type[PathFinder]": + return self.fget(owner) diff --git a/src/dature/config.py b/src/dature/config.py index ffd6b79..30a1afd 100644 --- a/src/dature/config.py +++ b/src/dature/config.py @@ -67,9 +67,9 @@ class DatureConfig: def _load_config() -> DatureConfig: from dature.main import load # noqa: PLC0415 - from dature.metadata import Source # noqa: PLC0415 + from dature.sources.env_ import EnvSource # noqa: PLC0415 - return load(Source(prefix="DATURE_"), schema=DatureConfig) + return load(EnvSource(prefix="DATURE_"), schema=DatureConfig) class MaskingOptions(TypedDict, total=False): diff --git a/src/dature/errors/exceptions.py b/src/dature/errors/exceptions.py index 40e009b..90696ee 100644 --- a/src/dature/errors/exceptions.py +++ b/src/dature/errors/exceptions.py @@ -21,7 +21,7 @@ def __repr__(self) -> str: @dataclass(frozen=True, slots=True) class SourceLocation: - display_label: str + location_label: str file_path: Path | None line_range: LineRange | None line_content: list[str] | None @@ -88,7 +88,7 @@ def _format_location( suffix = f" ({loc.annotation})" if loc.annotation is not None else "" if loc.env_var_name is not None and loc.file_path is None: - main = f" {connector} {loc.display_label} '{loc.env_var_name}'" + main = f" {connector} {loc.location_label} '{loc.env_var_name}'" if loc.env_var_value is not None: main += f" = '{loc.env_var_value}'" return [main + suffix] @@ -118,7 +118,7 @@ def _format_location( def _format_fileline(loc: SourceLocation, *, connector: str, suffix: str = "") -> list[str]: - filemain = f" {connector} {loc.display_label} '{loc.file_path}'" + filemain = f" {connector} {loc.location_label} '{loc.file_path}'" if loc.line_range is not None: filemain += f", {loc.line_range!r}" return [filemain + suffix] diff --git a/src/dature/errors/formatter.py b/src/dature/errors/formatter.py index 0c84339..3a564ad 100644 --- a/src/dature/errors/formatter.py +++ b/src/dature/errors/formatter.py @@ -23,7 +23,7 @@ FieldLoadError, MissingEnvVarError, ) -from dature.errors.location import ErrorContext, read_filecontent, resolve_source_location +from dature.errors.location import ErrorContext, read_file_content, resolve_source_location from dature.masking.masking import is_random_string, mask_value if TYPE_CHECKING: @@ -127,17 +127,17 @@ def handle_load_errors[T]( try: return func() except EnvVarExpandError as exc: - filecontent = read_filecontent(ctx.file_path) + file_content = read_file_content(ctx.file_path) enriched_env: list[MissingEnvVarError] = [] for e in exc.exceptions: if not isinstance(e, MissingEnvVarError): continue - locations = resolve_source_location(e.field_path, ctx, filecontent) + locations = resolve_source_location(e.field_path, ctx, file_content) e.location = locations[0] if locations else None enriched_env.append(e) raise EnvVarExpandError(enriched_env, dataclass_name=ctx.dataclass_name) from exc except (AggregateLoadError, LoadError) as exc: - filecontent = read_filecontent(ctx.file_path) + file_content = read_file_content(ctx.file_path) heuristic_paths: set[str] = set() field_errors: list[FieldLoadError] = [] _walk_exception( @@ -153,7 +153,7 @@ def handle_load_errors[T]( location_ctx = replace(ctx, secret_paths=ctx.secret_paths | heuristic_paths) enriched: list[FieldLoadError] = [] for fe in field_errors: - locations = resolve_source_location(fe.field_path, location_ctx, filecontent) + locations = resolve_source_location(fe.field_path, location_ctx, file_content) enriched.append( FieldLoadError( field_path=fe.field_path, @@ -186,9 +186,9 @@ def enrich_skipped_errors( updated.append(exc) continue - source_reprs = ", ".join(repr(s.metadata) for s in sources) + source_reprs = ", ".join(repr(s.source) for s in sources) locations = [ - loc for s in sources for loc in resolve_source_location(exc.field_path, s.error_ctx, s.filecontent) + loc for s in sources for loc in resolve_source_location(exc.field_path, s.error_ctx, s.file_content) ] updated.append( FieldLoadError( diff --git a/src/dature/errors/location.py b/src/dature/errors/location.py index ef095c0..281c5ee 100644 --- a/src/dature/errors/location.py +++ b/src/dature/errors/location.py @@ -5,25 +5,26 @@ from dature.errors.exceptions import LineRange, SourceLocation from dature.masking.masking import mask_env_line +from dature.path_finders.base import PathFinder from dature.types import NestedConflict, NestedConflicts if TYPE_CHECKING: - from dature.protocols import LoaderProtocol + from dature.sources.base import Source @dataclass(frozen=True) class ErrorContext: dataclass_name: str - loader_class: "type[LoaderProtocol]" + source_class: "type[Source]" file_path: Path | None prefix: str | None - split_symbols: str + split_symbols: str | None = None secret_paths: frozenset[str] = frozenset() mask_secrets: bool = False nested_conflicts: NestedConflicts | None = None -def read_filecontent(file_path: Path | None) -> str | None: +def read_file_content(file_path: Path | None) -> str | None: if file_path is None: return None @@ -46,13 +47,13 @@ def _ranges_overlap(a: LineRange, b: LineRange) -> bool: def _secret_overlaps_lines( *, - filecontent: str, + file_content: str, line_range: LineRange, secret_paths: frozenset[str], prefix: str | None, - path_finder_class: type, + path_finder_class: type[PathFinder], ) -> bool: - finder = path_finder_class(filecontent) + finder = path_finder_class(file_content) for secret_path in secret_paths: search_path = _build_search_path(secret_path.split("."), prefix) secret_range = finder.find_line_range(search_path) @@ -74,7 +75,7 @@ def _resolve_conflict( def _apply_masking( locations: list[SourceLocation], ctx: ErrorContext, - filecontent: str | None, + file_content: str | None, *, is_secret: bool, ) -> list[SourceLocation]: @@ -85,21 +86,21 @@ def _apply_masking( not should_mask and ctx.secret_paths and location.line_range is not None - and ctx.loader_class.path_finder_class is not None - and filecontent is not None + and ctx.source_class.path_finder_class is not None + and file_content is not None ): should_mask = _secret_overlaps_lines( - filecontent=filecontent, + file_content=file_content, line_range=location.line_range, secret_paths=ctx.secret_paths, prefix=ctx.prefix, - path_finder_class=ctx.loader_class.path_finder_class, + path_finder_class=ctx.source_class.path_finder_class, ) if should_mask and location.line_content is not None: masked_lines = [mask_env_line(line) for line in location.line_content] result.append( SourceLocation( - display_label=location.display_label, + location_label=location.location_label, file_path=location.file_path, line_range=location.line_range, line_content=masked_lines, @@ -114,18 +115,18 @@ def _apply_masking( def resolve_source_location( field_path: list[str], ctx: ErrorContext, - filecontent: str | None, + file_content: str | None, ) -> list[SourceLocation]: is_secret = ".".join(field_path) in ctx.secret_paths conflict = _resolve_conflict(field_path, ctx) - locations = ctx.loader_class.resolve_location( - field_path, - ctx.file_path, - filecontent, - ctx.prefix, - ctx.split_symbols, - conflict, + locations = ctx.source_class.resolve_location( + field_path=field_path, + file_path=ctx.file_path, + file_content=file_content, + prefix=ctx.prefix, + nested_conflict=conflict, + split_symbols=ctx.split_symbols, ) - return _apply_masking(locations, ctx, filecontent, is_secret=is_secret) + return _apply_masking(locations, ctx, file_content, is_secret=is_secret) diff --git a/src/dature/sources_loader/loaders/__init__.py b/src/dature/loaders/__init__.py similarity index 92% rename from src/dature/sources_loader/loaders/__init__.py rename to src/dature/loaders/__init__.py index de9cfeb..a0b94bc 100644 --- a/src/dature/sources_loader/loaders/__init__.py +++ b/src/dature/loaders/__init__.py @@ -1,4 +1,4 @@ -from dature.sources_loader.loaders.base import ( +from dature.loaders.base import ( base64url_bytes_from_string, base64url_str_from_string, byte_size_from_string, @@ -9,7 +9,7 @@ timedelta_from_string, url_from_string, ) -from dature.sources_loader.loaders.common import ( +from dature.loaders.common import ( bool_loader, bytearray_from_json_string, bytearray_from_string, diff --git a/src/dature/sources_loader/loaders/base.py b/src/dature/loaders/base.py similarity index 100% rename from src/dature/sources_loader/loaders/base.py rename to src/dature/loaders/base.py diff --git a/src/dature/sources_loader/loaders/common.py b/src/dature/loaders/common.py similarity index 100% rename from src/dature/sources_loader/loaders/common.py rename to src/dature/loaders/common.py diff --git a/src/dature/sources_loader/loaders/json5_.py b/src/dature/loaders/json5_.py similarity index 100% rename from src/dature/sources_loader/loaders/json5_.py rename to src/dature/loaders/json5_.py diff --git a/src/dature/sources_loader/loaders/toml_.py b/src/dature/loaders/toml_.py similarity index 100% rename from src/dature/sources_loader/loaders/toml_.py rename to src/dature/loaders/toml_.py diff --git a/src/dature/sources_loader/loaders/yaml_.py b/src/dature/loaders/yaml_.py similarity index 100% rename from src/dature/sources_loader/loaders/yaml_.py rename to src/dature/loaders/yaml_.py diff --git a/src/dature/loading/common.py b/src/dature/loading/common.py new file mode 100644 index 0000000..5ea072b --- /dev/null +++ b/src/dature/loading/common.py @@ -0,0 +1,9 @@ +from dature.config import config + + +def resolve_mask_secrets(*, source_level: bool | None = None, load_level: bool | None = None) -> bool: + if source_level is not None: + return source_level + if load_level is not None: + return load_level + return config.masking.mask_secrets diff --git a/src/dature/loading/context.py b/src/dature/loading/context.py index cea2c68..d7ff494 100644 --- a/src/dature/loading/context.py +++ b/src/dature/loading/context.py @@ -3,7 +3,6 @@ from collections.abc import Callable from dataclasses import Field, asdict, fields, is_dataclass from enum import Flag -from pathlib import Path from typing import Any, Protocol, cast, get_type_hints, runtime_checkable from adaptix import Retort @@ -11,12 +10,12 @@ from dature.errors.formatter import handle_load_errors from dature.errors.location import ErrorContext from dature.field_path import FieldPath -from dature.loading.resolver import resolve_loader_class from dature.merging.predicate import extract_field_path -from dature.metadata import Source -from dature.protocols import DataclassInstance, LoaderProtocol +from dature.protocols import DataclassInstance from dature.skip_field_provider import FilterResult, filter_invalid_fields -from dature.types import FILE_LIKE_TYPES, JSONValue, NestedConflicts +from dature.sources.base import FlatKeySource, Source +from dature.sources.retort import create_probe_retort +from dature.types import JSONValue, NestedConflicts logger = logging.getLogger("dature") @@ -49,19 +48,15 @@ def build_error_ctx( mask_secrets: bool = False, nested_conflicts: NestedConflicts | None = None, ) -> ErrorContext: - loader_class = resolve_loader_class(metadata.loader, metadata.file) - if isinstance(metadata.file, FILE_LIKE_TYPES): - error_file_path = None - elif metadata.file is not None: - error_file_path = Path(metadata.file) - else: - error_file_path = None + error_file_path = metadata.file_path_for_errors() + + split_symbols = metadata.split_symbols if isinstance(metadata, FlatKeySource) else None return ErrorContext( dataclass_name=dataclass_name, - loader_class=loader_class, + source_class=type(metadata), file_path=error_file_path, prefix=metadata.prefix, - split_symbols=metadata.split_symbols, + split_symbols=split_symbols, secret_paths=secret_paths, mask_secrets=mask_secrets, nested_conflicts=nested_conflicts, @@ -76,7 +71,7 @@ def get_allowed_fields( if skip_value is True: return None if isinstance(skip_value, tuple): - return {extract_field_path(fp, schema) for fp in skip_value} + return {extract_field_path(field_path, schema) for field_path in skip_value} return None @@ -84,7 +79,7 @@ def apply_skip_invalid( *, raw: JSONValue, skip_if_invalid: bool | tuple[FieldPath, ...] | None, - loader_instance: LoaderProtocol, + source: Source, schema: type[DataclassInstance], log_prefix: str, probe_retort: Retort | None = None, @@ -95,7 +90,7 @@ def apply_skip_invalid( allowed_fields = get_allowed_fields(skip_value=skip_if_invalid, schema=schema) if probe_retort is None: - probe_retort = loader_instance.create_probe_retort() + probe_retort = create_probe_retort(source) result = filter_invalid_fields(raw, probe_retort, schema, allowed_fields) for path in result.skipped_paths: @@ -126,13 +121,6 @@ def merge_fields( return complete_kwargs -def ensure_retort(loader_instance: LoaderProtocol, cls: type[DataclassInstance]) -> None: - """Creates a replacement response to __init__ so that Adaptix sees the original signature.""" - if cls not in loader_instance.retorts: - loader_instance.retorts[cls] = loader_instance.create_retort() - loader_instance.retorts[cls].get_loader(cls) - - @runtime_checkable class PatchContext(Protocol): loading: bool diff --git a/src/dature/loading/merge_config.py b/src/dature/loading/merge_config.py new file mode 100644 index 0000000..547f953 --- /dev/null +++ b/src/dature/loading/merge_config.py @@ -0,0 +1,31 @@ +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from dature.merging.strategy import MergeStrategyEnum +from dature.sources.base import Source + +if TYPE_CHECKING: + from dature.types import ( + ExpandEnvVarsMode, + FieldGroupTuple, + FieldMergeMap, + NestedResolve, + NestedResolveStrategy, + TypeLoaderMap, + ) + + +@dataclass(slots=True, kw_only=True) +class MergeConfig: + sources: tuple[Source, ...] + strategy: MergeStrategyEnum = MergeStrategyEnum.LAST_WINS + field_merges: "FieldMergeMap | None" = None + field_groups: "tuple[FieldGroupTuple, ...]" = () + skip_broken_sources: bool = False + skip_invalid_fields: bool = False + expand_env_vars: "ExpandEnvVarsMode" = "default" + secret_field_names: tuple[str, ...] | None = None + mask_secrets: bool | None = None + type_loaders: "TypeLoaderMap | None" = None + nested_resolve_strategy: "NestedResolveStrategy | None" = None + nested_resolve: "NestedResolve | None" = None diff --git a/src/dature/loading/multi.py b/src/dature/loading/multi.py index b46f186..d1b0aa1 100644 --- a/src/dature/loading/multi.py +++ b/src/dature/loading/multi.py @@ -4,7 +4,6 @@ from dataclasses import fields, is_dataclass from typing import Any -from dature.config import config from dature.errors import DatureConfigError from dature.errors.formatter import enrich_skipped_errors, handle_load_errors from dature.load_report import ( @@ -15,40 +14,39 @@ compute_field_origins, get_load_report, ) +from dature.loading.common import resolve_mask_secrets from dature.loading.context import ( build_error_ctx, coerce_flag_fields, - ensure_retort, make_validating_post_init, merge_fields, ) -from dature.loading.resolver import resolve_loader -from dature.loading.source_loading import load_sources, resolve_expand_env_vars +from dature.loading.merge_config import MergeConfig +from dature.loading.source_loading import ResolvedSourceParams, load_sources, resolve_source_params from dature.masking.detection import build_secret_paths from dature.masking.masking import mask_field_origins, mask_json_value, mask_source_entries, mask_value from dature.merging.deep_merge import deep_merge, deep_merge_last_wins, raise_on_conflict from dature.merging.field_group import FieldGroupContext, validate_field_groups from dature.merging.predicate import ResolvedFieldGroup, build_field_group_paths, build_field_merge_map from dature.merging.strategy import FieldMergeStrategyEnum, MergeStrategyEnum -from dature.metadata import Source, _MergeConfig -from dature.protocols import DataclassInstance, LoaderProtocol -from dature.types import FieldMergeCallable, JSONValue, TypeLoaderMap +from dature.protocols import DataclassInstance +from dature.sources.base import Source +from dature.sources.retort import ( + create_validating_retort, + ensure_retort, + transform_to_dataclass, +) +from dature.types import FieldMergeCallable, JSONValue logger = logging.getLogger("dature") -def _resolve_merge_mask_secrets(merge_meta: _MergeConfig) -> bool: - if merge_meta.mask_secrets is not None: - return merge_meta.mask_secrets - return config.masking.mask_secrets - - -def _collect_extra_secret_patterns(merge_meta: _MergeConfig) -> tuple[str, ...]: +def _collect_extra_secret_patterns(merge_meta: MergeConfig) -> tuple[str, ...]: merge_names = merge_meta.secret_field_names or () source_names: list[str] = [] - for source_meta in merge_meta.sources: - if source_meta.secret_field_names is not None: - source_names.extend(source_meta.secret_field_names) + for source_item in merge_meta.sources: + if source_item.secret_field_names is not None: + source_names.extend(source_item.secret_field_names) return merge_names + tuple(source_names) @@ -258,20 +256,18 @@ def _merge_raw_dicts( class _MergedData[T: DataclassInstance]: result: T merged_raw: JSONValue - last_loader: LoaderProtocol - last_source_meta: Source + last_source: Source + last_resolved: ResolvedSourceParams def _load_and_merge[T: DataclassInstance]( # noqa: C901 *, - merge_meta: _MergeConfig, + merge_meta: MergeConfig, schema: type[T], - loaders: tuple[LoaderProtocol, ...] | None = None, debug: bool = False, - type_loaders: TypeLoaderMap | None = None, ) -> _MergedData[T]: secret_paths: frozenset[str] = frozenset() - if _resolve_merge_mask_secrets(merge_meta): + if resolve_mask_secrets(load_level=merge_meta.mask_secrets): extra_patterns = _collect_extra_secret_patterns(merge_meta) secret_paths = build_secret_paths(schema, extra_patterns=extra_patterns) @@ -279,10 +275,8 @@ def _load_and_merge[T: DataclassInstance]( # noqa: C901 merge_meta=merge_meta, dataclass_name=schema.__name__, schema=schema, - loaders=loaders, secret_paths=secret_paths, - mask_secrets=_resolve_merge_mask_secrets(merge_meta), - type_loaders=type_loaders, + mask_secrets=resolve_mask_secrets(load_level=merge_meta.mask_secrets), ) merge_maps = build_field_merge_map(merge_meta.field_merges, schema) @@ -354,11 +348,17 @@ def _load_and_merge[T: DataclassInstance]( # noqa: C901 secret_paths=secret_paths, ) + last_resolved = loaded.last_resolved last_error_ctx = loaded.source_ctxs[-1].error_ctx merged = coerce_flag_fields(merged, schema) try: result = handle_load_errors( - func=lambda: loaded.last_loader.transform_to_dataclass(merged, schema), + func=lambda: transform_to_dataclass( + loaded.last_source, + merged, + schema, + resolved_type_loaders=last_resolved.type_loaders, + ), ctx=last_error_ctx, ) except DatureConfigError as exc: @@ -371,34 +371,36 @@ def _load_and_merge[T: DataclassInstance]( # noqa: C901 if report is not None: attach_load_report(result, report) - last_source_idx = loaded.source_entries[-1].index return _MergedData( result=result, merged_raw=merged, - last_loader=loaded.last_loader, - last_source_meta=merge_meta.sources[last_source_idx], + last_source=loaded.last_source, + last_resolved=loaded.last_resolved, ) def merge_load_as_function[T: DataclassInstance]( - merge_meta: _MergeConfig, + merge_meta: MergeConfig, schema: type[T], *, debug: bool, - type_loaders: TypeLoaderMap | None = None, ) -> T: data = _load_and_merge( merge_meta=merge_meta, schema=schema, debug=debug, - type_loaders=type_loaders, ) - validating_retort = data.last_loader.create_validating_retort(schema) + last_resolved = data.last_resolved + validating_retort = create_validating_retort( + data.last_source, + schema, + resolved_type_loaders=last_resolved.type_loaders, + ) validation_loader = validating_retort.get_loader(schema) - last_meta = data.last_source_meta - mask_secrets = _resolve_merge_mask_secrets(merge_meta) + last_meta = data.last_source + mask_secrets = resolve_mask_secrets(load_level=merge_meta.mask_secrets) secret_paths: frozenset[str] = frozenset() if mask_secrets: extra_patterns = _collect_extra_secret_patterns(merge_meta) @@ -428,19 +430,17 @@ class _MergePatchContext: def __init__( self, *, - merge_meta: _MergeConfig, + merge_meta: MergeConfig, cls: type[DataclassInstance], cache: bool, debug: bool, - type_loaders: TypeLoaderMap | None = None, ) -> None: - self.loaders = self._prepare_loaders(merge_meta=merge_meta, cls=cls, type_loaders=type_loaders) + self._prepare_sources(merge_meta=merge_meta, cls=cls) self.merge_meta = merge_meta self.cls = cls self.cache = cache self.debug = debug - self.type_loaders = type_loaders self.cached_data: DataclassInstance | None = None self.field_list = fields(cls) self.original_init = cls.__init__ @@ -448,11 +448,23 @@ def __init__( self.loading = False self.validating = False - last_loader = self.loaders[-1] - validating_retort = last_loader.create_validating_retort(cls) + last_source = merge_meta.sources[-1] + last_resolved = resolve_source_params( + last_source, + load_expand_env_vars=merge_meta.expand_env_vars, + load_type_loaders=merge_meta.type_loaders, + load_nested_resolve_strategy=merge_meta.nested_resolve_strategy, + load_nested_resolve=merge_meta.nested_resolve, + ) + ensure_retort(last_source, cls, resolved_type_loaders=last_resolved.type_loaders) + validating_retort = create_validating_retort( + last_source, + cls, + resolved_type_loaders=last_resolved.type_loaders, + ) self.validation_loader: Callable[[JSONValue], DataclassInstance] = validating_retort.get_loader(cls) - mask_secrets = _resolve_merge_mask_secrets(merge_meta) + mask_secrets = resolve_mask_secrets(load_level=merge_meta.mask_secrets) self.secret_paths: frozenset[str] = frozenset() if mask_secrets: extra_patterns = _collect_extra_secret_patterns(merge_meta) @@ -466,37 +478,21 @@ def __init__( mask_secrets=mask_secrets, ) - @staticmethod - def _prepare_loaders( + def _prepare_sources( + self, *, - merge_meta: _MergeConfig, + merge_meta: MergeConfig, cls: type[DataclassInstance], - type_loaders: TypeLoaderMap | None = None, - ) -> tuple[LoaderProtocol, ...]: - loaders: list[LoaderProtocol] = [] - for source_meta in merge_meta.sources: - resolved_expand = resolve_expand_env_vars(source_meta, merge_meta) - source_type_loaders = {**(type_loaders or {}), **(source_meta.type_loaders or {})} - resolved_strategy = ( - source_meta.nested_resolve_strategy - or merge_meta.nested_resolve_strategy - or config.loading.nested_resolve_strategy - ) - resolve_kwargs: dict[str, Any] = { - "expand_env_vars": resolved_expand, - "type_loaders": source_type_loaders, - "nested_resolve_strategy": resolved_strategy, - } - resolved_resolve = source_meta.nested_resolve or merge_meta.nested_resolve - if resolved_resolve is not None: - resolve_kwargs["nested_resolve"] = resolved_resolve - loader_instance = resolve_loader( - source_meta, - **resolve_kwargs, + ) -> None: + for source_item in merge_meta.sources: + resolved = resolve_source_params( + source_item, + load_expand_env_vars=merge_meta.expand_env_vars, + load_type_loaders=merge_meta.type_loaders, + load_nested_resolve_strategy=merge_meta.nested_resolve_strategy, + load_nested_resolve=merge_meta.nested_resolve, ) - ensure_retort(loader_instance, cls) - loaders.append(loader_instance) - return tuple(loaders) + ensure_retort(source_item, cls, resolved_type_loaders=resolved.type_loaders) def _make_merge_new_init(ctx: _MergePatchContext) -> Callable[..., None]: @@ -513,15 +509,13 @@ def new_init(self: DataclassInstance, *args: Any, **kwargs: Any) -> None: # noq merged_data = _load_and_merge( merge_meta=ctx.merge_meta, schema=ctx.cls, - loaders=ctx.loaders, debug=ctx.debug, - type_loaders=ctx.type_loaders, ) finally: ctx.loading = False loaded_data = merged_data.result ctx.error_ctx = build_error_ctx( - merged_data.last_source_meta, + merged_data.last_source, ctx.cls.__name__, secret_paths=ctx.secret_paths, mask_secrets=ctx.error_ctx.mask_secrets, @@ -544,11 +538,10 @@ def new_init(self: DataclassInstance, *args: Any, **kwargs: Any) -> None: # noq def merge_make_decorator( - merge_meta: _MergeConfig, + merge_meta: MergeConfig, *, cache: bool, debug: bool, - type_loaders: TypeLoaderMap | None = None, ) -> Callable[[type[DataclassInstance]], type[DataclassInstance]]: def decorator(cls: type[DataclassInstance]) -> type[DataclassInstance]: if not is_dataclass(cls): @@ -560,7 +553,6 @@ def decorator(cls: type[DataclassInstance]) -> type[DataclassInstance]: cls=cls, cache=cache, debug=debug, - type_loaders=type_loaders, ) cls.__init__ = _make_merge_new_init(ctx) # type: ignore[method-assign] cls.__post_init__ = make_validating_post_init(ctx) # type: ignore[attr-defined] diff --git a/src/dature/loading/resolver.py b/src/dature/loading/resolver.py deleted file mode 100644 index 9871785..0000000 --- a/src/dature/loading/resolver.py +++ /dev/null @@ -1,136 +0,0 @@ -from pathlib import Path -from typing import TYPE_CHECKING, Any - -from dature.sources_loader.docker_secrets import DockerSecretsLoader -from dature.sources_loader.env_ import EnvFileLoader, EnvLoader -from dature.sources_loader.ini_ import IniLoader -from dature.sources_loader.json_ import JsonLoader -from dature.types import FILE_LIKE_TYPES, ExpandEnvVarsMode, NestedResolve, NestedResolveStrategy - -if TYPE_CHECKING: - from dature.metadata import Source - from dature.protocols import LoaderProtocol - from dature.types import FileLike, FilePath, TypeLoaderMap - -SUPPORTED_EXTENSIONS = (".cfg", ".env", ".ini", ".json", ".json5", ".toml", ".yaml", ".yml") - -_EXTRA_BY_EXTENSION: dict[str, str] = { - ".toml": "toml", - ".yaml": "yaml", - ".yml": "yaml", - ".json5": "json5", -} - - -def _resolve_by_extension(extension: str) -> "type[LoaderProtocol]": - try: - return _resolve_by_extension_inner(extension) - except ImportError: - extra = _EXTRA_BY_EXTENSION.get(extension) - if extra is None: - raise - msg = f"To use '{extension}' files, install the '{extra}' extra: pip install dature[{extra}]" - raise ImportError(msg) from None - - -def _resolve_by_extension_inner(extension: str) -> "type[LoaderProtocol]": - match extension: - case ".json": - return JsonLoader - case ".toml": - from dature.sources_loader.toml_ import Toml11Loader # noqa: PLC0415 - - return Toml11Loader - case ".ini" | ".cfg": - return IniLoader - case ".env": - return EnvFileLoader - case ".yaml" | ".yml": - from dature.sources_loader.yaml_ import Yaml12Loader # noqa: PLC0415 - - return Yaml12Loader - case ".json5": - from dature.sources_loader.json5_ import Json5Loader # noqa: PLC0415 - - return Json5Loader - case _: - supported = ", ".join(SUPPORTED_EXTENSIONS) - msg = ( - f"Cannot determine loader type for extension '{extension}'. " - f"Please specify loader explicitly or use a supported extension: {supported}" - ) - raise ValueError(msg) - - -def resolve_loader_class( - loader: "type[LoaderProtocol] | None", - file: "FileLike | FilePath | None", -) -> "type[LoaderProtocol]": - if loader is not None: - if file is not None and not isinstance(file, FILE_LIKE_TYPES) and loader is EnvLoader: - msg = ( - "EnvLoader reads from environment variables and does not use files. " - "Remove file or use a file-based loader instead (e.g. EnvFileLoader)." - ) - raise ValueError(msg) - if isinstance(file, FILE_LIKE_TYPES) and loader in (EnvLoader, DockerSecretsLoader): - msg = ( - f"{loader.__name__} does not support file-like objects. " - "Use a file-based loader (e.g. JsonLoader, TomlLoader) with file-like objects." - ) - raise ValueError(msg) - return loader - - if isinstance(file, FILE_LIKE_TYPES): - msg = ( - "Cannot determine loader type for a file-like object. " - "Please specify loader explicitly (e.g. loader=JsonLoader)." - ) - raise TypeError(msg) - - if file is None: - return EnvLoader - - # file-like objects are handled above; here file is str | Path - file_path = Path(file) - - if file_path.is_dir(): - return DockerSecretsLoader - - if file_path.name.startswith(".env"): - return EnvFileLoader - - return _resolve_by_extension(file_path.suffix.lower()) - - -def resolve_loader( - metadata: "Source", - *, - expand_env_vars: ExpandEnvVarsMode | None = None, - type_loaders: "TypeLoaderMap | None" = None, - nested_resolve_strategy: NestedResolveStrategy = "flat", - nested_resolve: NestedResolve | None = None, -) -> "LoaderProtocol": - loader_class = resolve_loader_class(metadata.loader, metadata.file) - - resolved_expand = metadata.expand_env_vars or expand_env_vars or "default" - - kwargs: dict[str, Any] = { - "prefix": metadata.prefix, - "name_style": metadata.name_style, - "field_mapping": metadata.field_mapping, - "root_validators": metadata.root_validators, - "validators": metadata.validators, - "expand_env_vars": resolved_expand, - "type_loaders": type_loaders, - } - - if issubclass(loader_class, (EnvLoader, DockerSecretsLoader)): - kwargs["split_symbols"] = metadata.split_symbols - resolved_strategy = metadata.nested_resolve_strategy or nested_resolve_strategy - kwargs["nested_resolve_strategy"] = resolved_strategy - resolved_resolve = metadata.nested_resolve or nested_resolve - if resolved_resolve is not None: - kwargs["nested_resolve"] = resolved_resolve - - return loader_class(**kwargs) diff --git a/src/dature/loading/single.py b/src/dature/loading/single.py index f0a59cf..d1eab16 100644 --- a/src/dature/loading/single.py +++ b/src/dature/loading/single.py @@ -3,39 +3,47 @@ from dataclasses import asdict, fields, is_dataclass from typing import TYPE_CHECKING, Any -from dature.config import config from dature.errors import DatureConfigError from dature.errors.formatter import enrich_skipped_errors, handle_load_errors -from dature.errors.location import read_filecontent +from dature.errors.location import read_file_content from dature.load_report import FieldOrigin, LoadReport, SourceEntry, attach_load_report +from dature.loading.common import resolve_mask_secrets from dature.loading.context import ( apply_skip_invalid, build_error_ctx, coerce_flag_fields, - ensure_retort, make_validating_post_init, merge_fields, ) -from dature.loading.resolver import resolve_loader_class -from dature.loading.source_loading import SkippedFieldSource +from dature.loading.source_loading import ( + ResolvedSourceParams, + SkippedFieldSource, + load_source_raw, + resolve_source_params, +) from dature.masking.detection import build_secret_paths from dature.masking.masking import mask_json_value -from dature.metadata import Source -from dature.protocols import DataclassInstance, LoaderProtocol -from dature.types import FILE_LIKE_TYPES, FileOrStream, JSONValue +from dature.protocols import DataclassInstance +from dature.sources.base import Source +from dature.sources.retort import ( + create_probe_retort, + create_validating_retort, + ensure_retort, + transform_to_dataclass, +) +from dature.types import JSONValue if TYPE_CHECKING: from adaptix import Retort -logger = logging.getLogger("dature") - + from dature.types import ( + ExpandEnvVarsMode, + NestedResolve, + NestedResolveStrategy, + TypeLoaderMap, + ) -def _resolve_single_mask_secrets(metadata: Source, *, load_level: bool | None = None) -> bool: - if metadata.mask_secrets is not None: - return metadata.mask_secrets - if load_level is not None: - return load_level - return config.masking.mask_secrets +logger = logging.getLogger("dature") def _log_single_source_load( @@ -107,22 +115,24 @@ class _PatchContext: def __init__( # noqa: PLR0913 self, *, - loader_instance: LoaderProtocol, - file_path: FileOrStream, + source: Source, cls: type[DataclassInstance], - metadata: Source, cache: bool, debug: bool, secret_field_names: tuple[str, ...] | None = None, mask_secrets: bool | None = None, + resolved: ResolvedSourceParams, ) -> None: - ensure_retort(loader_instance, cls) - validating_retort = loader_instance.create_validating_retort(cls) + self.resolved = resolved + ensure_retort(source, cls, resolved_type_loaders=self.resolved.type_loaders) + validating_retort = create_validating_retort( + source, + cls, + resolved_type_loaders=self.resolved.type_loaders, + ) - self.loader_instance = loader_instance - self.file_path = file_path + self.source = source self.cls = cls - self.metadata = metadata self.cache = cache self.debug = debug self.cached_data: DataclassInstance | None = None @@ -133,17 +143,16 @@ def __init__( # noqa: PLR0913 self.validating = False self.loading = False - loader_class = resolve_loader_class(metadata.loader, metadata.file) - self.loader_type = loader_class.display_name + self.loader_type = source.format_name - resolved_mask_secrets = _resolve_single_mask_secrets(metadata, load_level=mask_secrets) + resolved_mask_secrets = resolve_mask_secrets(source_level=source.mask_secrets, load_level=mask_secrets) self.secret_paths: frozenset[str] = frozenset() if resolved_mask_secrets: - extra_patterns = (metadata.secret_field_names or ()) + (secret_field_names or ()) + extra_patterns = (source.secret_field_names or ()) + (secret_field_names or ()) self.secret_paths = build_secret_paths(cls, extra_patterns=extra_patterns) self.error_ctx = build_error_ctx( - metadata, + source, cls.__name__, secret_paths=self.secret_paths, mask_secrets=resolved_mask_secrets, @@ -151,21 +160,21 @@ def __init__( # noqa: PLR0913 # probe_retort is created early so adaptix sees the original signature self.probe_retort: Retort | None = None - if metadata.skip_if_invalid: - self.probe_retort = loader_instance.create_probe_retort() + if source.skip_if_invalid: + self.probe_retort = create_probe_retort(source, resolved_type_loaders=self.resolved.type_loaders) self.probe_retort.get_loader(cls) def _load_single_source(ctx: _PatchContext) -> DataclassInstance: load_result = handle_load_errors( - func=lambda: ctx.loader_instance.load_raw(ctx.file_path), + func=lambda: load_source_raw(ctx.source, ctx.resolved), ctx=ctx.error_ctx, ) raw_data = load_result.data if load_result.nested_conflicts: ctx.error_ctx = build_error_ctx( - ctx.metadata, + ctx.source, ctx.cls.__name__, secret_paths=ctx.secret_paths, mask_secrets=ctx.error_ctx.mask_secrets, @@ -174,8 +183,8 @@ def _load_single_source(ctx: _PatchContext) -> DataclassInstance: filter_result = apply_skip_invalid( raw=raw_data, - skip_if_invalid=ctx.metadata.skip_if_invalid, - loader_instance=ctx.loader_instance, + skip_if_invalid=ctx.source.skip_if_invalid, + source=ctx.source, schema=ctx.cls, log_prefix=f"[{ctx.cls.__name__}]", probe_retort=ctx.probe_retort, @@ -184,14 +193,14 @@ def _load_single_source(ctx: _PatchContext) -> DataclassInstance: raw_data = coerce_flag_fields(raw_data, ctx.cls) skipped_fields: dict[str, list[SkippedFieldSource]] = {} - filecontent = read_filecontent(ctx.error_ctx.file_path) + file_content = read_file_content(ctx.error_ctx.file_path) for path in filter_result.skipped_paths: skipped_fields.setdefault(path, []).append( - SkippedFieldSource(metadata=ctx.metadata, error_ctx=ctx.error_ctx, filecontent=filecontent), + SkippedFieldSource(source=ctx.source, error_ctx=ctx.error_ctx, file_content=file_content), ) - def _transform(rd: JSONValue = raw_data) -> DataclassInstance: - return ctx.loader_instance.transform_to_dataclass(rd, ctx.cls) + def _transform(data: JSONValue = raw_data) -> DataclassInstance: + return transform_to_dataclass(ctx.source, data, ctx.cls, resolved_type_loaders=ctx.resolved.type_loaders) try: loaded_data = handle_load_errors( @@ -224,7 +233,7 @@ def new_init(self: DataclassInstance, *args: Any, **kwargs: Any) -> None: # noq _log_single_source_load( dataclass_name=ctx.cls.__name__, loader_type=ctx.loader_type, - file_path="" if isinstance(ctx.file_path, FILE_LIKE_TYPES) else str(ctx.file_path), + file_path=ctx.source.file_display() or "", data=asdict(loaded_data), secret_paths=ctx.secret_paths, ) @@ -240,9 +249,7 @@ def new_init(self: DataclassInstance, *args: Any, **kwargs: Any) -> None: # noq report = _build_single_source_report( dataclass_name=ctx.cls.__name__, loader_type=ctx.loader_type, - file_path=str(ctx.file_path) - if not isinstance(ctx.metadata.file, (*FILE_LIKE_TYPES, type(None))) - else None, + file_path=str(path) if (path := ctx.source.file_path_for_errors()) else None, raw_data=result_dict, secret_paths=ctx.secret_paths, ) @@ -254,40 +261,48 @@ def new_init(self: DataclassInstance, *args: Any, **kwargs: Any) -> None: # noq return new_init -def load_as_function( # noqa: C901, PLR0912, PLR0913 +def load_as_function( # noqa: C901, PLR0913 *, - loader_instance: LoaderProtocol, - file_path: FileOrStream, + source: Source, schema: type[DataclassInstance], - metadata: Source, debug: bool, secret_field_names: tuple[str, ...] | None = None, mask_secrets: bool | None = None, + expand_env_vars: "ExpandEnvVarsMode | None" = None, + type_loaders: "TypeLoaderMap | None" = None, + nested_resolve_strategy: "NestedResolveStrategy | None" = None, + nested_resolve: "NestedResolve | None" = None, ) -> DataclassInstance: - loader_class = resolve_loader_class(metadata.loader, metadata.file) - display_name = loader_class.display_name + resolved = resolve_source_params( + source, + load_expand_env_vars=expand_env_vars, + load_type_loaders=type_loaders, + load_nested_resolve_strategy=nested_resolve_strategy, + load_nested_resolve=nested_resolve, + ) + format_name = source.format_name secret_paths: frozenset[str] = frozenset() - resolved_mask_secrets = _resolve_single_mask_secrets(metadata, load_level=mask_secrets) + resolved_mask_secrets = resolve_mask_secrets(source_level=source.mask_secrets, load_level=mask_secrets) if resolved_mask_secrets: - extra_patterns = (metadata.secret_field_names or ()) + (secret_field_names or ()) + extra_patterns = (source.secret_field_names or ()) + (secret_field_names or ()) secret_paths = build_secret_paths(schema, extra_patterns=extra_patterns) error_ctx = build_error_ctx( - metadata, + source, schema.__name__, secret_paths=secret_paths, mask_secrets=resolved_mask_secrets, ) load_result = handle_load_errors( - func=lambda: loader_instance.load_raw(file_path), + func=lambda: load_source_raw(source, resolved), ctx=error_ctx, ) raw_data = load_result.data if load_result.nested_conflicts: error_ctx = build_error_ctx( - metadata, + source, schema.__name__, secret_paths=secret_paths, mask_secrets=resolved_mask_secrets, @@ -296,31 +311,27 @@ def load_as_function( # noqa: C901, PLR0912, PLR0913 filter_result = apply_skip_invalid( raw=raw_data, - skip_if_invalid=metadata.skip_if_invalid, - loader_instance=loader_instance, + skip_if_invalid=source.skip_if_invalid, + source=source, schema=schema, log_prefix=f"[{schema.__name__}]", ) raw_data = filter_result.cleaned_dict skipped_fields: dict[str, list[SkippedFieldSource]] = {} - filecontent = read_filecontent(error_ctx.file_path) + file_content = read_file_content(error_ctx.file_path) for path in filter_result.skipped_paths: skipped_fields.setdefault(path, []).append( - SkippedFieldSource(metadata=metadata, error_ctx=error_ctx, filecontent=filecontent), + SkippedFieldSource(source=source, error_ctx=error_ctx, file_content=file_content), ) report: LoadReport | None = None if debug: - if isinstance(metadata.file, FILE_LIKE_TYPES): - report_file_path = None - elif metadata.file is not None: - report_file_path = str(metadata.file) - else: - report_file_path = None + source_path = source.file_path_for_errors() + report_file_path = str(source_path) if source_path is not None else None report = _build_single_source_report( dataclass_name=schema.__name__, - loader_type=display_name, + loader_type=format_name, file_path=report_file_path, raw_data=raw_data, secret_paths=secret_paths, @@ -328,13 +339,17 @@ def load_as_function( # noqa: C901, PLR0912, PLR0913 _log_single_source_load( dataclass_name=schema.__name__, - loader_type=display_name, - file_path="" if isinstance(file_path, FILE_LIKE_TYPES) else str(file_path), + loader_type=format_name, + file_path=source.file_display() or "", data=raw_data if isinstance(raw_data, dict) else {}, secret_paths=secret_paths, ) - validating_retort = loader_instance.create_validating_retort(schema) + validating_retort = create_validating_retort( + source, + schema, + resolved_type_loaders=resolved.type_loaders, + ) validation_loader = validating_retort.get_loader(schema) raw_data = coerce_flag_fields(raw_data, schema) @@ -352,7 +367,12 @@ def load_as_function( # noqa: C901, PLR0912, PLR0913 try: result = handle_load_errors( - func=lambda: loader_instance.transform_to_dataclass(raw_data, schema), + func=lambda: transform_to_dataclass( + source, + raw_data, + schema, + resolved_type_loaders=resolved.type_loaders, + ), ctx=error_ctx, ) except DatureConfigError as exc: @@ -370,28 +390,37 @@ def load_as_function( # noqa: C901, PLR0912, PLR0913 def make_decorator( # noqa: PLR0913 *, - loader_instance: LoaderProtocol, - file_path: FileOrStream, - metadata: Source, + source: Source, cache: bool, debug: bool, secret_field_names: tuple[str, ...] | None = None, mask_secrets: bool | None = None, + expand_env_vars: "ExpandEnvVarsMode | None" = None, + type_loaders: "TypeLoaderMap | None" = None, + nested_resolve_strategy: "NestedResolveStrategy | None" = None, + nested_resolve: "NestedResolve | None" = None, ) -> Callable[[type[DataclassInstance]], type[DataclassInstance]]: + resolved = resolve_source_params( + source, + load_expand_env_vars=expand_env_vars, + load_type_loaders=type_loaders, + load_nested_resolve_strategy=nested_resolve_strategy, + load_nested_resolve=nested_resolve, + ) + def decorator(cls: type[DataclassInstance]) -> type[DataclassInstance]: if not is_dataclass(cls): msg = f"{cls.__name__} must be a dataclass" raise TypeError(msg) ctx = _PatchContext( - loader_instance=loader_instance, - file_path=file_path, + source=source, cls=cls, - metadata=metadata, cache=cache, debug=debug, secret_field_names=secret_field_names, mask_secrets=mask_secrets, + resolved=resolved, ) cls.__init__ = _make_new_init(ctx) # type: ignore[method-assign] cls.__post_init__ = make_validating_post_init(ctx) # type: ignore[attr-defined] diff --git a/src/dature/loading/source_loading.py b/src/dature/loading/source_loading.py index 168c4a0..ec8fef0 100644 --- a/src/dature/loading/source_loading.py +++ b/src/dature/loading/source_loading.py @@ -1,73 +1,125 @@ import logging from dataclasses import dataclass -from pathlib import Path +from functools import partial from dature.config import config from dature.errors import DatureConfigError, SourceLoadError, SourceLocation from dature.errors.formatter import handle_load_errors -from dature.errors.location import ErrorContext, read_filecontent +from dature.errors.location import ErrorContext, read_file_content from dature.field_path import FieldPath from dature.load_report import SourceEntry from dature.loading.context import apply_skip_invalid, build_error_ctx -from dature.loading.resolver import resolve_loader, resolve_loader_class +from dature.loading.merge_config import MergeConfig from dature.masking.masking import mask_json_value from dature.merging.strategy import MergeStrategyEnum -from dature.metadata import Source, _MergeConfig -from dature.protocols import DataclassInstance, LoaderProtocol +from dature.protocols import DataclassInstance from dature.skip_field_provider import FilterResult -from dature.types import FILE_LIKE_TYPES, ExpandEnvVarsMode, FileOrStream, JSONValue, LoadRawResult, TypeLoaderMap +from dature.sources.base import FlatKeySource, Source +from dature.types import ( + ExpandEnvVarsMode, + JSONValue, + LoadRawResult, + NestedResolve, + NestedResolveStrategy, + TypeLoaderMap, +) logger = logging.getLogger("dature") -def resolve_loader_for_source( +def load_source_raw(source: Source, resolved: "ResolvedSourceParams") -> LoadRawResult: + if isinstance(source, FlatKeySource): + return source.load_raw( + resolved_expand=resolved.expand_env_vars, + resolved_nested_strategy=resolved.nested_resolve_strategy, + resolved_nested_resolve=resolved.nested_resolve, + ) + return source.load_raw(resolved_expand=resolved.expand_env_vars) + + +@dataclass(frozen=True, slots=True) +class ResolvedSourceParams: + expand_env_vars: ExpandEnvVarsMode + type_loaders: TypeLoaderMap | None + nested_resolve_strategy: NestedResolveStrategy + nested_resolve: NestedResolve | None + + +def resolve_source_params( + source: Source, *, - loaders: tuple[LoaderProtocol, ...] | None, - index: int, - source_meta: Source, - expand_env_vars: ExpandEnvVarsMode | None = None, - type_loaders: TypeLoaderMap | None = None, -) -> LoaderProtocol: - if loaders is not None: - return loaders[index] - return resolve_loader(source_meta, expand_env_vars=expand_env_vars, type_loaders=type_loaders) - - -def should_skip_broken(source_meta: Source, merge_meta: _MergeConfig) -> bool: - if source_meta.skip_if_broken is not None: - if source_meta.file is None: + load_expand_env_vars: ExpandEnvVarsMode | None = None, + load_type_loaders: TypeLoaderMap | None = None, + load_nested_resolve_strategy: NestedResolveStrategy | None = None, + load_nested_resolve: NestedResolve | None = None, +) -> ResolvedSourceParams: + resolved_expand: ExpandEnvVarsMode = "default" + if source.expand_env_vars is not None: + resolved_expand = source.expand_env_vars + elif load_expand_env_vars is not None: + resolved_expand = load_expand_env_vars + + source_loaders = source.type_loaders or {} + load_loaders = load_type_loaders or {} + config_loaders = config.type_loaders or {} + merged_loaders = {**config_loaders, **load_loaders, **source_loaders} + resolved_type_loaders = merged_loaders or None + + resolved_nested_strategy: NestedResolveStrategy = config.loading.nested_resolve_strategy + if isinstance(source, FlatKeySource) and source.nested_resolve_strategy != "flat": + resolved_nested_strategy = source.nested_resolve_strategy + elif load_nested_resolve_strategy is not None: + resolved_nested_strategy = load_nested_resolve_strategy + + resolved_nested_resolve: NestedResolve | None = None + if isinstance(source, FlatKeySource) and source.nested_resolve is not None: + resolved_nested_resolve = source.nested_resolve + elif load_nested_resolve is not None: + resolved_nested_resolve = load_nested_resolve + + return ResolvedSourceParams( + expand_env_vars=resolved_expand, + type_loaders=resolved_type_loaders, + nested_resolve_strategy=resolved_nested_strategy, + nested_resolve=resolved_nested_resolve, + ) + + +def should_skip_broken(source: Source, merge_meta: MergeConfig) -> bool: + if source.skip_if_broken is not None: + if source.file_display() is None: logger.warning( "skip_if_broken has no effect on environment variable sources — they cannot be broken", ) - return source_meta.skip_if_broken + return source.skip_if_broken return merge_meta.skip_broken_sources -def resolve_expand_env_vars(source_meta: Source, merge_meta: _MergeConfig) -> ExpandEnvVarsMode: - if source_meta.expand_env_vars is not None: - return source_meta.expand_env_vars +def resolve_expand_env_vars(source: Source, merge_meta: MergeConfig) -> ExpandEnvVarsMode: + if source.expand_env_vars is not None: + return source.expand_env_vars return merge_meta.expand_env_vars def resolve_skip_invalid( - source_meta: Source, - merge_meta: _MergeConfig, + source: Source, + merge_meta: MergeConfig, ) -> bool | tuple[FieldPath, ...]: - if source_meta.skip_if_invalid is not None: - return source_meta.skip_if_invalid + if source.skip_if_invalid is not None: + return source.skip_if_invalid return merge_meta.skip_invalid_fields -def resolve_mask_secrets(source_meta: Source, merge_meta: _MergeConfig) -> bool: - if source_meta.mask_secrets is not None: - return source_meta.mask_secrets +def resolve_mask_secrets(source: Source, merge_meta: MergeConfig) -> bool: + if source.mask_secrets is not None: + return source.mask_secrets if merge_meta.mask_secrets is not None: return merge_meta.mask_secrets return config.masking.mask_secrets -def resolve_secret_field_names(source_meta: Source, merge_meta: _MergeConfig) -> tuple[str, ...]: - source_names = source_meta.secret_field_names or () +def resolve_secret_field_names(source: Source, merge_meta: MergeConfig) -> tuple[str, ...]: + source_names = source.secret_field_names or () merge_names = merge_meta.secret_field_names or () return source_names + merge_names @@ -75,20 +127,19 @@ def resolve_secret_field_names(source_meta: Source, merge_meta: _MergeConfig) -> def apply_merge_skip_invalid( *, raw: JSONValue, - source_meta: Source, - merge_meta: _MergeConfig, - loader_instance: LoaderProtocol, + source: Source, + merge_meta: MergeConfig, schema: type[DataclassInstance], source_index: int, ) -> FilterResult: - skip_value = resolve_skip_invalid(source_meta, merge_meta) + skip_value = resolve_skip_invalid(source, merge_meta) if not skip_value: return FilterResult(cleaned_dict=raw, skipped_paths=[]) return apply_skip_invalid( raw=raw, skip_if_invalid=skip_value, - loader_instance=loader_instance, + source=source, schema=schema, log_prefix=f"[{schema.__name__}] Source {source_index}:", ) @@ -97,14 +148,14 @@ def apply_merge_skip_invalid( @dataclass(frozen=True, slots=True) class SourceContext: error_ctx: ErrorContext - filecontent: str | None + file_content: str | None @dataclass(frozen=True, slots=True) class SkippedFieldSource: - metadata: Source + source: Source error_ctx: ErrorContext - filecontent: str | None + file_content: str | None @dataclass(frozen=True, slots=True) @@ -112,73 +163,55 @@ class LoadedSources: raw_dicts: list[JSONValue] source_ctxs: list[SourceContext] source_entries: list[SourceEntry] - last_loader: LoaderProtocol + last_source: Source + last_resolved: ResolvedSourceParams skipped_fields: dict[str, list[SkippedFieldSource]] -def load_sources( # noqa: C901, PLR0912, PLR0913, PLR0915 +def load_sources( # noqa: C901, PLR0912, PLR0915 *, - merge_meta: _MergeConfig, + merge_meta: MergeConfig, dataclass_name: str, schema: type[DataclassInstance], - loaders: tuple[LoaderProtocol, ...] | None = None, secret_paths: frozenset[str] = frozenset(), mask_secrets: bool = False, - type_loaders: TypeLoaderMap | None = None, ) -> LoadedSources: raw_dicts: list[JSONValue] = [] source_ctxs: list[SourceContext] = [] source_entries: list[SourceEntry] = [] - last_loader: LoaderProtocol | None = None + last_source: Source | None = None + last_resolved: ResolvedSourceParams | None = None skipped_fields: dict[str, list[SkippedFieldSource]] = {} - for i, source_meta in enumerate(merge_meta.sources): - resolved_expand = resolve_expand_env_vars(source_meta, merge_meta) - source_type_loaders = {**(type_loaders or {}), **(source_meta.type_loaders or {})} - loader_instance = resolve_loader_for_source( - loaders=loaders, - index=i, - source_meta=source_meta, - expand_env_vars=resolved_expand, - type_loaders=source_type_loaders, + for i, source_item in enumerate(merge_meta.sources): + resolved = resolve_source_params( + source_item, + load_expand_env_vars=merge_meta.expand_env_vars, + load_type_loaders=merge_meta.type_loaders, + load_nested_resolve_strategy=merge_meta.nested_resolve_strategy, + load_nested_resolve=merge_meta.nested_resolve, ) - fileor_path: FileOrStream - if isinstance(source_meta.file, FILE_LIKE_TYPES): - fileor_path = source_meta.file - elif source_meta.file is not None: - fileor_path = Path(source_meta.file) - else: - fileor_path = Path() - error_ctx = build_error_ctx(source_meta, dataclass_name, secret_paths=secret_paths, mask_secrets=mask_secrets) - - def _load_raw( - li: LoaderProtocol = loader_instance, - fp: FileOrStream = fileor_path, - ) -> LoadRawResult: - return li.load_raw(fp) + error_ctx = build_error_ctx(source_item, dataclass_name, secret_paths=secret_paths, mask_secrets=mask_secrets) try: load_result = handle_load_errors( - func=_load_raw, + func=partial(load_source_raw, source_item, resolved), ctx=error_ctx, ) except (DatureConfigError, FileNotFoundError): - if merge_meta.strategy != MergeStrategyEnum.FIRST_FOUND and not should_skip_broken(source_meta, merge_meta): + if merge_meta.strategy != MergeStrategyEnum.FIRST_FOUND and not should_skip_broken(source_item, merge_meta): raise logger.warning( "[%s] Source %d skipped (broken): file=%s", dataclass_name, i, - source_meta.file - if isinstance(source_meta.file, (str, Path)) - else ("" if source_meta.file is not None else ""), + source_item.file_display() or "", ) continue except Exception as exc: - if merge_meta.strategy != MergeStrategyEnum.FIRST_FOUND and not should_skip_broken(source_meta, merge_meta): - loader_class = resolve_loader_class(source_meta.loader, source_meta.file) + if merge_meta.strategy != MergeStrategyEnum.FIRST_FOUND and not should_skip_broken(source_item, merge_meta): location = SourceLocation( - display_label=loader_class.display_label, + location_label=type(source_item).location_label, file_path=error_ctx.file_path, line_range=None, line_content=None, @@ -193,52 +226,46 @@ def _load_raw( "[%s] Source %d skipped (broken): file=%s", dataclass_name, i, - source_meta.file - if isinstance(source_meta.file, (str, Path)) - else ("" if source_meta.file is not None else ""), + source_item.file_display() or "", ) continue raw = load_result.data if load_result.nested_conflicts: error_ctx = build_error_ctx( - source_meta, + source_item, dataclass_name, secret_paths=secret_paths, mask_secrets=mask_secrets, nested_conflicts=load_result.nested_conflicts, ) - filecontent = read_filecontent(error_ctx.file_path) + file_content = read_file_content(error_ctx.file_path) filter_result = apply_merge_skip_invalid( raw=raw, - source_meta=source_meta, + source=source_item, merge_meta=merge_meta, - loader_instance=loader_instance, schema=schema, source_index=i, ) for path in filter_result.skipped_paths: skipped_fields.setdefault(path, []).append( - SkippedFieldSource(metadata=source_meta, error_ctx=error_ctx, filecontent=filecontent), + SkippedFieldSource(source=source_item, error_ctx=error_ctx, file_content=file_content), ) raw = filter_result.cleaned_dict raw_dicts.append(raw) - loader_class = resolve_loader_class(source_meta.loader, source_meta.file) - display_name = loader_class.display_name + format_name = type(source_item).format_name logger.debug( "[%s] Source %d loaded: loader=%s, file=%s, keys=%s", dataclass_name, i, - display_name, - source_meta.file - if isinstance(source_meta.file, (str, Path)) - else ("" if source_meta.file is not None else ""), + format_name, + source_item.file_display() or "", sorted(raw.keys()) if isinstance(raw, dict) else "", ) if secret_paths: @@ -255,19 +282,20 @@ def _load_raw( source_entries.append( SourceEntry( index=i, - file_path=str(source_meta.file) if isinstance(source_meta.file, (str, Path)) else None, - loader_type=display_name, + file_path=str(src_path) if (src_path := source_item.file_path_for_errors()) is not None else None, + loader_type=format_name, raw_data=raw, ), ) - source_ctxs.append(SourceContext(error_ctx=error_ctx, filecontent=filecontent)) - last_loader = loader_instance + source_ctxs.append(SourceContext(error_ctx=error_ctx, file_content=file_content)) + last_source = source_item + last_resolved = resolved if merge_meta.strategy == MergeStrategyEnum.FIRST_FOUND: break - if last_loader is None: + if last_source is None or last_resolved is None: if merge_meta.sources: msg = f"All {len(merge_meta.sources)} source(s) failed to load" else: @@ -279,6 +307,7 @@ def _load_raw( raw_dicts=raw_dicts, source_ctxs=source_ctxs, source_entries=source_entries, - last_loader=last_loader, + last_source=last_source, + last_resolved=last_resolved, skipped_fields=skipped_fields, ) diff --git a/src/dature/main.py b/src/dature/main.py index be60134..536a0bd 100644 --- a/src/dature/main.py +++ b/src/dature/main.py @@ -1,21 +1,18 @@ import logging from collections.abc import Callable -from pathlib import Path from typing import Any, overload from dature.config import config +from dature.loading.merge_config import MergeConfig from dature.loading.multi import merge_load_as_function, merge_make_decorator -from dature.loading.resolver import resolve_loader from dature.loading.single import load_as_function, make_decorator from dature.merging.strategy import MergeStrategyEnum -from dature.metadata import Source, _MergeConfig from dature.protocols import DataclassInstance +from dature.sources.base import Source from dature.types import ( - FILE_LIKE_TYPES, ExpandEnvVarsMode, FieldGroupTuple, FieldMergeMap, - FileOrStream, MergeStrategyName, NestedResolve, NestedResolveStrategy, @@ -118,44 +115,31 @@ def load( # noqa: PLR0913 nested_resolve=nested_resolve, ) - metadata = sources[0] - - source_type_loaders = {**(config.type_loaders or {}), **(type_loaders or {}), **(metadata.type_loaders or {})} - loader_instance = resolve_loader( - metadata, - expand_env_vars=expand_env_vars, - type_loaders=source_type_loaders or None, - nested_resolve_strategy=nested_resolve_strategy or config.loading.nested_resolve_strategy, - nested_resolve=nested_resolve, - ) - - fileor_path: FileOrStream - if isinstance(metadata.file, FILE_LIKE_TYPES): - fileor_path = metadata.file - elif metadata.file is not None: - fileor_path = Path(metadata.file) - else: - fileor_path = Path() + source = sources[0] if schema is not None: return load_as_function( - loader_instance=loader_instance, - file_path=fileor_path, + source=source, schema=schema, - metadata=metadata, debug=debug, secret_field_names=secret_field_names, mask_secrets=mask_secrets, + expand_env_vars=expand_env_vars, + type_loaders=type_loaders, + nested_resolve_strategy=nested_resolve_strategy, + nested_resolve=nested_resolve, ) return make_decorator( - loader_instance=loader_instance, - file_path=fileor_path, - metadata=metadata, + source=source, cache=cache, debug=debug, secret_field_names=secret_field_names, mask_secrets=mask_secrets, + expand_env_vars=expand_env_vars, + type_loaders=type_loaders, + nested_resolve_strategy=nested_resolve_strategy, + nested_resolve=nested_resolve, ) @@ -188,7 +172,7 @@ def _load_multi( # noqa: PLR0913 nested_resolve_strategy: NestedResolveStrategy | None, nested_resolve: NestedResolve | None, ) -> DataclassInstance | Callable[[type[DataclassInstance]], type[DataclassInstance]]: - merge_meta = _MergeConfig( + merge_meta = MergeConfig( sources=sources, strategy=MergeStrategyEnum(strategy), field_merges=field_merges, @@ -202,7 +186,6 @@ def _load_multi( # noqa: PLR0913 nested_resolve_strategy=nested_resolve_strategy, nested_resolve=nested_resolve, ) - merge_type_loaders = {**(config.type_loaders or {}), **(merge_meta.type_loaders or {})} if schema is not None: - return merge_load_as_function(merge_meta, schema, debug=debug, type_loaders=merge_type_loaders or None) - return merge_make_decorator(merge_meta, cache=cache, debug=debug, type_loaders=merge_type_loaders or None) + return merge_load_as_function(merge_meta, schema, debug=debug) + return merge_make_decorator(merge_meta, cache=cache, debug=debug) diff --git a/src/dature/masking/detection.py b/src/dature/masking/detection.py index 73d720b..04b7e4f 100644 --- a/src/dature/masking/detection.py +++ b/src/dature/masking/detection.py @@ -5,6 +5,7 @@ from dature.config import config from dature.fields.payment_card import PaymentCardNumber from dature.fields.secret_str import SecretStr +from dature.type_utils import find_nested_dataclasses from dature.types import TypeAnnotation _secret_paths_cache: dict[tuple[type, tuple[str, ...]], frozenset[str]] = {} @@ -59,7 +60,7 @@ def _walk_dataclass_fields( if _is_secret_type(field_type) or _matches_secret_pattern(field_name, all_patterns): result.add(full_path) - nested_types = _find_nested_dataclasses(field_type) + nested_types = find_nested_dataclasses(field_type) for nested_dc in nested_types: _walk_dataclass_fields( nested_dc, @@ -69,26 +70,6 @@ def _walk_dataclass_fields( ) -def _find_nested_dataclasses(field_type: TypeAnnotation) -> list[type]: - result: list[type] = [] - queue: list[TypeAnnotation] = [field_type] - - while queue: - current = queue.pop() - - if is_dataclass(current) and isinstance(current, type): - result.append(current) - continue - - origin = get_origin(current) - if origin is Annotated: - queue.append(get_args(current)[0]) - elif origin is not None: - queue.extend(get_args(current)) - - return result - - def build_secret_paths( dataclass_type: type, *, diff --git a/src/dature/merging/deep_merge.py b/src/dature/merging/deep_merge.py index 8278c28..107dac9 100644 --- a/src/dature/merging/deep_merge.py +++ b/src/dature/merging/deep_merge.py @@ -204,7 +204,7 @@ def raise_on_conflict( locations: list[SourceLocation] = [] for source_idx, _ in sources: source_ctx = source_ctxs[source_idx] - locs = resolve_source_location(field_path, source_ctx.error_ctx, source_ctx.filecontent) + locs = resolve_source_location(field_path, source_ctx.error_ctx, source_ctx.file_content) locations.extend(locs) conflict_errors.append( MergeConflictFieldError( diff --git a/src/dature/metadata.py b/src/dature/metadata.py deleted file mode 100644 index 2e3ff45..0000000 --- a/src/dature/metadata.py +++ /dev/null @@ -1,79 +0,0 @@ -from pathlib import Path -from typing import TYPE_CHECKING - -from dature.expansion.env_expand import expand_file_path -from dature.loading.resolver import resolve_loader_class -from dature.types import FILE_LIKE_TYPES - -if TYPE_CHECKING: - from dature.field_path import FieldPath - from dature.protocols import LoaderProtocol, ValidatorProtocol - from dature.types import ( - DotSeparatedPath, - ExpandEnvVarsMode, - FieldGroupTuple, - FieldMapping, - FieldMergeMap, - FieldValidators, - FileLike, - FilePath, - NameStyle, - NestedResolve, - NestedResolveStrategy, - TypeLoaderMap, - ) - -from dataclasses import dataclass - -from dature.merging.strategy import MergeStrategyEnum - - -# --8<-- [start:load-metadata] -@dataclass(slots=True, kw_only=True) -class Source: - file: "FileLike | FilePath | None" = None - loader: "type[LoaderProtocol] | None" = None - prefix: "DotSeparatedPath | None" = None - split_symbols: str = "__" - name_style: "NameStyle | None" = None - field_mapping: "FieldMapping | None" = None - root_validators: "tuple[ValidatorProtocol, ...] | None" = None - validators: "FieldValidators | None" = None - expand_env_vars: "ExpandEnvVarsMode | None" = None - skip_if_broken: bool | None = None - skip_if_invalid: "bool | tuple[FieldPath, ...] | None" = None - secret_field_names: tuple[str, ...] | None = None - mask_secrets: bool | None = None - type_loaders: "TypeLoaderMap | None" = None - nested_resolve_strategy: "NestedResolveStrategy | None" = None - nested_resolve: "NestedResolve | None" = None - # --8<-- [end:load-metadata] - - def __post_init__(self) -> None: - if isinstance(self.file, (str, Path)): - self.file = expand_file_path(str(self.file), mode="strict") - - def __repr__(self) -> str: - loader_class = resolve_loader_class(self.loader, self.file) - display = loader_class.display_name - if isinstance(self.file, FILE_LIKE_TYPES): - return f"{display} ''" - if self.file is not None: - return f"{display} '{self.file}'" - return display - - -@dataclass(slots=True, kw_only=True) -class _MergeConfig: - sources: tuple[Source, ...] - strategy: MergeStrategyEnum = MergeStrategyEnum.LAST_WINS - field_merges: "FieldMergeMap | None" = None - field_groups: "tuple[FieldGroupTuple, ...]" = () - skip_broken_sources: bool = False - skip_invalid_fields: bool = False - expand_env_vars: "ExpandEnvVarsMode" = "default" - secret_field_names: tuple[str, ...] | None = None - mask_secrets: bool | None = None - type_loaders: "TypeLoaderMap | None" = None - nested_resolve_strategy: "NestedResolveStrategy | None" = None - nested_resolve: "NestedResolve | None" = None diff --git a/src/dature/protocols.py b/src/dature/protocols.py index 536f9ea..dd7b8d2 100644 --- a/src/dature/protocols.py +++ b/src/dature/protocols.py @@ -1,15 +1,6 @@ from collections.abc import Callable from dataclasses import Field -from pathlib import Path -from typing import Any, ClassVar, Protocol, TypeVar - -from adaptix import Retort - -from dature.errors import SourceLocation -from dature.path_finders.base import PathFinder -from dature.types import FileOrStream, JSONValue, LoadRawResult, NestedConflict - -_T = TypeVar("_T") +from typing import Any, ClassVar, Protocol class DataclassInstance(Protocol): @@ -20,31 +11,3 @@ class ValidatorProtocol(Protocol): def get_validator_func(self) -> Callable[..., bool]: ... def get_error_message(self) -> str: ... - - -class LoaderProtocol(Protocol): - display_name: ClassVar[str] - display_label: ClassVar[str] - path_finder_class: type[PathFinder] | None - retorts: dict[type, Retort] - - def load_raw(self, path: FileOrStream) -> LoadRawResult: ... - - def transform_to_dataclass(self, data: JSONValue, schema: type[_T]) -> _T: ... - - def create_retort(self) -> Retort: ... - - def create_probe_retort(self) -> Retort: ... - - def create_validating_retort(self, schema: type[_T]) -> Retort: ... - - @classmethod - def resolve_location( - cls, - field_path: list[str], - file_path: Path | None, - filecontent: str | None, - prefix: str | None, - split_symbols: str, - nested_conflict: NestedConflict | None, - ) -> list[SourceLocation]: ... diff --git a/src/dature/sources_loader/__init__.py b/src/dature/sources/__init__.py similarity index 100% rename from src/dature/sources_loader/__init__.py rename to src/dature/sources/__init__.py diff --git a/src/dature/sources/base.py b/src/dature/sources/base.py new file mode 100644 index 0000000..e9717ae --- /dev/null +++ b/src/dature/sources/base.py @@ -0,0 +1,473 @@ +import abc +import json +import logging +from dataclasses import dataclass, field +from datetime import date, datetime, time +from pathlib import Path +from typing import TYPE_CHECKING, ClassVar, cast + +from adaptix import Retort, loader +from adaptix.provider import Provider + +from dature.errors import LineRange, SourceLocation +from dature.expansion.env_expand import expand_env_vars, expand_file_path +from dature.field_path import FieldPath +from dature.loaders import ( + bool_loader, + bytearray_from_json_string, + date_from_string, + datetime_from_string, + float_from_string, + none_from_empty_string, + optional_from_empty_string, + str_from_scalar, + time_from_string, +) +from dature.path_finders.base import PathFinder +from dature.types import ( + FILE_LIKE_TYPES, + DotSeparatedPath, + ExpandEnvVarsMode, + FileOrStream, + JSONValue, + LoadRawResult, + NestedConflict, + NestedConflicts, + NestedResolve, + NestedResolveStrategy, +) + +if TYPE_CHECKING: + from dature.protocols import ValidatorProtocol + from dature.types import ( + FieldMapping, + FieldValidators, + FileLike, + FilePath, + NameStyle, + TypeLoaderMap, + ) + +logger = logging.getLogger("dature") + + +def _string_value_loaders() -> list[Provider]: + return [ + loader(str, str_from_scalar), + loader(float, float_from_string), + loader(date, date_from_string), + loader(datetime, datetime_from_string), + loader(time, time_from_string), + loader(bytearray, bytearray_from_json_string), + loader(type(None), none_from_empty_string), + loader(str | None, optional_from_empty_string), + loader(bool, bool_loader), + ] + + +# --8<-- [start:load-metadata] +@dataclass(kw_only=True, repr=False) +class Source(abc.ABC): + prefix: "DotSeparatedPath | None" = None + name_style: "NameStyle | None" = None + field_mapping: "FieldMapping | None" = None + root_validators: "tuple[ValidatorProtocol, ...] | None" = None + validators: "FieldValidators | None" = None + expand_env_vars: "ExpandEnvVarsMode | None" = None + skip_if_broken: bool | None = None + skip_if_invalid: "bool | tuple[FieldPath, ...] | None" = None + secret_field_names: tuple[str, ...] | None = None + mask_secrets: bool | None = None + type_loaders: "TypeLoaderMap | None" = None + # --8<-- [end:load-metadata] + + format_name: ClassVar[str] + location_label: ClassVar[str] + path_finder_class: ClassVar[type[PathFinder] | None] = None + + retorts: dict[type, Retort] = field(default_factory=dict, init=False, repr=False) + + def __repr__(self) -> str: + return self.format_name + + def file_display(self) -> str | None: + return None + + def file_path_for_errors(self) -> Path | None: + return None + + def additional_loaders(self) -> list[Provider]: + return [] + + @staticmethod + def _infer_type(value: str) -> JSONValue: + if value == "": + return value + + try: + return cast("JSONValue", json.loads(value)) + except (json.JSONDecodeError, ValueError): + return value + + @classmethod + def _parse_string_values(cls, data: JSONValue, *, infer_scalars: bool = False) -> JSONValue: + if not isinstance(data, dict): + return data + + result: dict[str, JSONValue] = {} + for key, value in data.items(): + if isinstance(value, dict): + result[key] = cls._parse_string_values(value, infer_scalars=True) + elif isinstance(value, str) and (infer_scalars or value.startswith(("[", "{"))): + result[key] = cls._infer_type(value) + else: + result[key] = value + return result + + @abc.abstractmethod + def _load(self) -> JSONValue: ... + + def _apply_prefix(self, data: JSONValue) -> JSONValue: + if not self.prefix: + return data + + for key in self.prefix.split("."): + if not isinstance(data, dict): + return {} + if key not in data: + return {} + data = data[key] + + return data + + def _pre_processing( + self, + data: JSONValue, + *, + resolved_expand: ExpandEnvVarsMode, + ) -> JSONValue: + prefixed = self._apply_prefix(data) + return expand_env_vars(prefixed, mode=resolved_expand) + + def load_raw(self, *, resolved_expand: ExpandEnvVarsMode = "default") -> LoadRawResult: + data = self._load() + processed = self._pre_processing(data, resolved_expand=resolved_expand) + logger.debug( + "[%s] load_raw: source=%s, raw_keys=%s, after_preprocessing_keys=%s", + type(self).__name__, + self.file_display() or "", + sorted(data.keys()) if isinstance(data, dict) else "", + sorted(processed.keys()) if isinstance(processed, dict) else "", + ) + return LoadRawResult(data=processed) + + @staticmethod + def _empty_location(location_label: str, file_path: Path | None) -> SourceLocation: + return SourceLocation( + location_label=location_label, + file_path=file_path, + line_range=None, + line_content=None, + env_var_name=None, + ) + + @staticmethod + def _build_search_path(field_path: list[str], prefix: str | None) -> list[str]: + if not prefix: + return field_path + prefix_parts = prefix.split(".") + return prefix_parts + field_path + + @staticmethod + def _find_parent_line_range(finder: PathFinder, search_path: list[str]) -> LineRange | None: + path = search_path[:-1] + while path: + line_range = finder.find_line_range(path) + if line_range is not None: + return line_range + path = path[:-1] + return None + + @staticmethod + def _strip_common_indent(raw_lines: list[str]) -> list[str]: + indents = [len(line) - len(line.lstrip()) for line in raw_lines if line.strip()] + if not indents: + return raw_lines + min_indent = min(indents) + return [line[min_indent:] for line in raw_lines] + + @classmethod + def resolve_location( + cls, + *, + field_path: list[str], + file_path: Path | None, + file_content: str | None, + prefix: str | None, + nested_conflict: NestedConflict | None, # noqa: ARG003 + split_symbols: str | None = None, # noqa: ARG003 + ) -> list[SourceLocation]: + if file_content is None or not field_path: + return [cls._empty_location(cls.location_label, file_path)] + + if cls.path_finder_class is None: + return [cls._empty_location(cls.location_label, file_path)] + + search_path = cls._build_search_path(field_path, prefix) + finder = cls.path_finder_class(file_content) + line_range = finder.find_line_range(search_path) + if line_range is None: + line_range = cls._find_parent_line_range(finder, search_path) + if line_range is None: + return [cls._empty_location(cls.location_label, file_path)] + + lines = file_content.splitlines() + content_lines: list[str] | None = None + if 0 < line_range.start <= len(lines): + end = min(line_range.end, len(lines)) + raw = lines[line_range.start - 1 : end] + content_lines = cls._strip_common_indent(raw) + + return [ + SourceLocation( + location_label=cls.location_label, + file_path=file_path, + line_range=line_range, + line_content=content_lines, + env_var_name=None, + ), + ] + + +# --8<-- [start:file-source] +@dataclass(kw_only=True, repr=False) +class FileFieldMixin: + file: "FileLike | FilePath | None" = None + # --8<-- [end:file-source] + + def _init_file_field(self) -> None: + if isinstance(self.file, (str, Path)): + self.file = expand_file_path(str(self.file), mode="strict") + + @staticmethod + def resolve_file_field(file: "FileLike | FilePath | None") -> FileOrStream: + if isinstance(file, FILE_LIKE_TYPES): + return file + if file is not None: + return Path(file) + return Path() + + @staticmethod + def file_field_display(file: "FileLike | FilePath | None") -> str | None: + if isinstance(file, FILE_LIKE_TYPES): + return "" + if file is not None: + return str(file) + return None + + @staticmethod + def file_field_path_for_errors(file: "FileLike | FilePath | None") -> Path | None: + if isinstance(file, FILE_LIKE_TYPES): + return None + if file is not None: + return Path(file) + return None + + def file_display(self) -> str | None: + return self.file_field_display(self.file) + + def file_path_for_errors(self) -> Path | None: + return self.file_field_path_for_errors(self.file) + + +@dataclass(kw_only=True, repr=False) +class FileSource(FileFieldMixin, Source, abc.ABC): + location_label: ClassVar[str] = "FILE" + + def __post_init__(self) -> None: + self._init_file_field() + + def __repr__(self) -> str: + display = self.format_name + file_path_display = self.file_display() + if file_path_display is not None: + return f"{display} '{file_path_display}'" + return display + + def _load(self) -> JSONValue: + path = self.resolve_file_field(self.file) + return self._load_file(path) + + @abc.abstractmethod + def _load_file(self, path: FileOrStream) -> JSONValue: ... + + +# --8<-- [start:flat-key-source] +@dataclass(kw_only=True, repr=False) +class FlatKeySource(Source, abc.ABC): + split_symbols: str = "__" + nested_resolve_strategy: NestedResolveStrategy = "flat" + nested_resolve: NestedResolve | None = None + # --8<-- [end:flat-key-source] + + @staticmethod + def _set_nested(target: dict[str, JSONValue], keys: list[str], value: str) -> None: + for key in keys[:-1]: + target = cast("dict[str, JSONValue]", target.setdefault(key, {})) + target[keys[-1]] = value + + def _resolve_field_strategy( + self, + field_name: str, + *, + resolved_nested_strategy: NestedResolveStrategy = "flat", + resolved_nested_resolve: NestedResolve | None = None, + ) -> NestedResolveStrategy: + effective_nested_resolve = ( + resolved_nested_resolve if resolved_nested_resolve is not None else self.nested_resolve + ) + if effective_nested_resolve is not None: + for strategy, field_paths in effective_nested_resolve.items(): + paths = field_paths if isinstance(field_paths, tuple) else (field_paths,) + for field_path in paths: + if self._field_path_matches(field_path, field_name): + return strategy + return resolved_nested_strategy + + @staticmethod + def _field_path_matches(field_path: FieldPath, field_name: str) -> bool: + if not field_path.parts: + return True + return field_path.parts[0] == field_name + + def additional_loaders(self) -> list[Provider]: + return _string_value_loaders() + + @staticmethod + def _resolve_var_name( + field_path: list[str], + prefix: str | None, + split_symbols: str, + conflict: NestedConflict | None, + ) -> str: + def _build_name(parts: list[str]) -> str: + var_name = split_symbols.join(part.upper() for part in parts) + if prefix is not None: + return prefix + var_name + return var_name + + json_var = _build_name(field_path[:1]) + if conflict is not None and conflict.used_var == json_var: + return json_var + return _build_name(field_path) + + def _build_var_name(self, key: str) -> str: + if self.prefix: + return self.prefix + key.upper() + return key.upper() + + def _build_nested_var_name(self, top_field: str, nested: dict[str, JSONValue]) -> str: + for sub_key in nested: + full_key = f"{top_field}{self.split_symbols}{sub_key}" + return self._build_var_name(full_key) + return self._build_var_name(top_field) + + def _pre_process_row( + self, + key: str, + value: str, + result: dict[str, JSONValue], + conflicts: NestedConflicts, + *, + resolved_nested_strategy: NestedResolveStrategy = "flat", + resolved_nested_resolve: NestedResolve | None = None, + ) -> None: + parts = key.split(self.split_symbols) + self._process_key_value( + parts=parts, + value=value, + result=result, + conflicts=conflicts, + resolved_nested_strategy=resolved_nested_strategy, + resolved_nested_resolve=resolved_nested_resolve, + ) + + def load_raw( + self, + *, + resolved_expand: ExpandEnvVarsMode = "default", + resolved_nested_strategy: NestedResolveStrategy | None = None, + resolved_nested_resolve: NestedResolve | None = None, + ) -> LoadRawResult: + data = self._load() + data_dict = cast("dict[str, str]", data) + result: dict[str, JSONValue] = {} + conflicts: NestedConflicts = {} + + effective_nested_strategy: NestedResolveStrategy = self.nested_resolve_strategy + if self.nested_resolve_strategy == "flat" and resolved_nested_strategy is not None: + effective_nested_strategy = resolved_nested_strategy + + effective_nested_resolve = self.nested_resolve if self.nested_resolve is not None else resolved_nested_resolve + + for key, value in data_dict.items(): + self._pre_process_row( + key=key, + value=value, + result=result, + conflicts=conflicts, + resolved_nested_strategy=effective_nested_strategy, + resolved_nested_resolve=effective_nested_resolve, + ) + + expanded = expand_env_vars(result, mode=resolved_expand) + processed = self._parse_string_values(expanded) + return LoadRawResult(data=processed, nested_conflicts=conflicts) + + def _process_key_value( + self, + *, + parts: list[str], + value: str, + result: dict[str, JSONValue], + conflicts: NestedConflicts, + resolved_nested_strategy: NestedResolveStrategy = "flat", + resolved_nested_resolve: NestedResolve | None = None, + ) -> None: + if len(parts) > 1: + top_field = parts[0] + strategy = self._resolve_field_strategy( + top_field, + resolved_nested_strategy=resolved_nested_strategy, + resolved_nested_resolve=resolved_nested_resolve, + ) + existing = result.get(top_field) + if isinstance(existing, str): + flat_var = self._build_var_name(self.split_symbols.join(parts)) + json_var = self._build_var_name(top_field) + if strategy == "flat": + result.pop(top_field) + self._set_nested(result, parts, value) + conflicts[top_field] = NestedConflict(flat_var, json_var, existing) + elif strategy == "json": + conflicts[top_field] = NestedConflict(json_var, flat_var, existing) + else: + self._set_nested(result, parts, value) + else: + top_field = parts[0] + strategy = self._resolve_field_strategy( + top_field, + resolved_nested_strategy=resolved_nested_strategy, + resolved_nested_resolve=resolved_nested_resolve, + ) + existing = result.get(top_field) + if isinstance(existing, dict): + json_var = self._build_var_name(top_field) + flat_var = self._build_nested_var_name(top_field, existing) + if strategy == "json": + result[top_field] = value + conflicts[top_field] = NestedConflict(json_var, flat_var, value) + elif strategy == "flat": + conflicts[top_field] = NestedConflict(flat_var, json_var, value) + else: + result[top_field] = value diff --git a/src/dature/sources/docker_secrets.py b/src/dature/sources/docker_secrets.py new file mode 100644 index 0000000..4ffe701 --- /dev/null +++ b/src/dature/sources/docker_secrets.py @@ -0,0 +1,85 @@ +from dataclasses import dataclass +from pathlib import Path +from typing import TYPE_CHECKING, ClassVar + +from dature.errors import SourceLocation +from dature.expansion.env_expand import expand_file_path +from dature.sources.base import FlatKeySource +from dature.types import JSONValue, NestedConflict + +if TYPE_CHECKING: + from dature.types import FilePath + + +@dataclass(kw_only=True, repr=False) +class DockerSecretsSource(FlatKeySource): + dir_: "FilePath" + format_name = "docker_secrets" + location_label: ClassVar[str] = "SECRET FILE" + + def __post_init__(self) -> None: + if isinstance(self.dir_, (str, Path)): + self.dir_ = expand_file_path(str(self.dir_), mode="strict") + + def __repr__(self) -> str: + return f"{self.format_name} '{self.dir_}'" + + def file_display(self) -> str | None: + return str(self.dir_) + + def file_path_for_errors(self) -> Path | None: + return Path(self.dir_) + + @classmethod + def resolve_location( + cls, + *, + field_path: list[str], + file_path: Path | None, + file_content: str | None, # noqa: ARG003 + prefix: str | None, + nested_conflict: NestedConflict | None, + split_symbols: str | None = None, + ) -> list[SourceLocation]: + resolved_symbols = split_symbols or "__" + if nested_conflict is not None: + json_var = cls._resolve_var_name(field_path[:1], prefix, resolved_symbols, None) + if nested_conflict.used_var == json_var: + secret_name = field_path[0] + else: + secret_name = resolved_symbols.join(field_path) + else: + secret_name = resolved_symbols.join(field_path) + if prefix is not None: + secret_name = prefix + secret_name + secret_file = file_path / secret_name if file_path is not None else None + return [ + SourceLocation( + location_label=cls.location_label, + file_path=secret_file, + line_range=None, + line_content=None, + env_var_name=None, + ), + ] + + def _load(self) -> JSONValue: + path = Path(self.dir_) + + result: dict[str, JSONValue] = {} + for entry in sorted(path.iterdir()): + if not entry.is_file(): + continue + + key = entry.name.lower() + value = entry.read_text().strip() + + if self.prefix and not key.startswith(self.prefix.lower()): + continue + + if self.prefix: + key = key[len(self.prefix) :] + + result[key] = value + + return result diff --git a/src/dature/sources_loader/env_.py b/src/dature/sources/env_.py similarity index 59% rename from src/dature/sources_loader/env_.py rename to src/dature/sources/env_.py index e3750a5..3835f46 100644 --- a/src/dature/sources_loader/env_.py +++ b/src/dature/sources/env_.py @@ -1,40 +1,52 @@ import io import os from collections.abc import Iterable +from dataclasses import dataclass from pathlib import Path from typing import ClassVar, cast from dature.errors import LineRange, SourceLocation -from dature.sources_loader.flat_key import FlatKeyLoader -from dature.types import BINARY_IO_TYPES, TEXT_IO_TYPES, FileOrStream, JSONValue, NestedConflict, NestedConflicts - - -class EnvLoader(FlatKeyLoader): - display_name = "env" - display_label: ClassVar[str] = "ENV" - - def _load(self, _: FileOrStream) -> JSONValue: +from dature.sources.base import FileFieldMixin, FlatKeySource +from dature.types import ( + BINARY_IO_TYPES, + TEXT_IO_TYPES, + JSONValue, + NestedConflict, + NestedConflicts, + NestedResolve, + NestedResolveStrategy, +) + + +@dataclass(kw_only=True, repr=False) +class EnvSource(FlatKeySource): + format_name = "env" + location_label: ClassVar[str] = "ENV" + + def _load(self) -> JSONValue: return cast("JSONValue", os.environ) @classmethod def resolve_location( cls, + *, field_path: list[str], file_path: Path | None, # noqa: ARG003 - filecontent: str | None, # noqa: ARG003 + file_content: str | None, # noqa: ARG003 prefix: str | None, - split_symbols: str, nested_conflict: NestedConflict | None, + split_symbols: str | None = None, ) -> list[SourceLocation]: - var_name = cls._resolve_var_name(field_path, prefix, split_symbols, nested_conflict) + resolved_symbols = split_symbols or "__" + var_name = cls._resolve_var_name(field_path, prefix, resolved_symbols, nested_conflict) env_var_value: str | None = None if nested_conflict is not None: - json_var = cls._resolve_var_name(field_path[:1], prefix, split_symbols, None) + json_var = cls._resolve_var_name(field_path[:1], prefix, resolved_symbols, None) if nested_conflict.used_var == json_var: env_var_value = nested_conflict.json_raw_value return [ SourceLocation( - display_label=cls.display_label, + location_label=cls.location_label, file_path=None, line_range=None, line_content=None, @@ -49,39 +61,62 @@ def _pre_process_row( value: str, result: dict[str, JSONValue], conflicts: NestedConflicts, + *, + resolved_nested_strategy: NestedResolveStrategy = "flat", + resolved_nested_resolve: NestedResolve | None = None, ) -> None: - if self._prefix and not key.startswith(self._prefix): + if self.prefix and not key.startswith(self.prefix): return - processed_key = key[len(self._prefix) :] if self._prefix else key + processed_key = key[len(self.prefix) :] if self.prefix else key processed_key = processed_key.lower() - parts = processed_key.split(self._split_symbols) - self._process_key_value(parts=parts, value=value, result=result, conflicts=conflicts) + parts = processed_key.split(self.split_symbols) + self._process_key_value( + parts=parts, + value=value, + result=result, + conflicts=conflicts, + resolved_nested_strategy=resolved_nested_strategy, + resolved_nested_resolve=resolved_nested_resolve, + ) + + +@dataclass(kw_only=True, repr=False) +class EnvFileSource(FileFieldMixin, EnvSource): + format_name = "envfile" + location_label: ClassVar[str] = "ENV FILE" + def __post_init__(self) -> None: + self._init_file_field() -class EnvFileLoader(EnvLoader): - display_name = "envfile" - display_label: ClassVar[str] = "ENV FILE" + def __repr__(self) -> str: + display = self.format_name + file_path_display = self.file_display() + if file_path_display is not None: + return f"{display} '{file_path_display}'" + return display @classmethod def resolve_location( cls, + *, field_path: list[str], file_path: Path | None, - filecontent: str | None, + file_content: str | None, prefix: str | None, - split_symbols: str, nested_conflict: NestedConflict | None, + split_symbols: str | None = None, ) -> list[SourceLocation]: - var_name = cls._resolve_var_name(field_path, prefix, split_symbols, nested_conflict) + resolved_symbols = split_symbols or "__" + var_name = cls._resolve_var_name(field_path, prefix, resolved_symbols, nested_conflict) line_range: LineRange | None = None line_content: list[str] | None = None - if filecontent is not None: - line_range, line_content = _find_env_line(filecontent, var_name) + if file_content is not None: + line_range, line_content = _find_env_line(file_content, var_name) return [ SourceLocation( - display_label=cls.display_label, + location_label=cls.location_label, file_path=file_path, line_range=line_range, line_content=line_content, @@ -89,8 +124,9 @@ def resolve_location( ), ] - def _load(self, path: FileOrStream) -> JSONValue: + def _load(self) -> JSONValue: """Parse .env file into a flat key=value dict (before nesting/expand/parse).""" + path = self.resolve_file_field(self.file) raw_pairs: dict[str, JSONValue] = {} if isinstance(path, TEXT_IO_TYPES): diff --git a/src/dature/sources/ini_.py b/src/dature/sources/ini_.py new file mode 100644 index 0000000..d011033 --- /dev/null +++ b/src/dature/sources/ini_.py @@ -0,0 +1,61 @@ +import configparser +import io +from dataclasses import dataclass +from typing import cast + +from adaptix.provider import Provider + +from dature.expansion.env_expand import expand_env_vars +from dature.path_finders.ini_ import TablePathFinder +from dature.sources.base import FileSource, _string_value_loaders +from dature.types import BINARY_IO_TYPES, TEXT_IO_TYPES, ExpandEnvVarsMode, FileOrStream, JSONValue + + +@dataclass(kw_only=True, repr=False) +class IniSource(FileSource): + format_name = "ini" + path_finder_class = TablePathFinder + + def additional_loaders(self) -> list[Provider]: + return _string_value_loaders() + + def _pre_processing( + self, + data: JSONValue, + *, + resolved_expand: ExpandEnvVarsMode, + ) -> JSONValue: + prefixed = self._apply_prefix(data) + expanded = expand_env_vars(prefixed, mode=resolved_expand) + return self._parse_string_values(expanded) + + def _load_file(self, path: FileOrStream) -> JSONValue: + config = configparser.ConfigParser(interpolation=None) + if isinstance(path, TEXT_IO_TYPES): + config.read_file(path) + elif isinstance(path, BINARY_IO_TYPES): + config.read_file(io.TextIOWrapper(cast("io.BufferedReader", path))) + else: + with path.open() as f: + config.read_file(f) + if self.prefix and self.prefix in config: + result: dict[str, JSONValue] = dict(config[self.prefix]) + child_prefix = self.prefix + "." + for section in config.sections(): + if section.startswith(child_prefix): + nested_key = section[len(child_prefix) :] + result[nested_key] = dict(config[section]) + return {self.prefix: result} + + all_sections: dict[str, JSONValue] = {} + if config.defaults(): + all_sections["DEFAULT"] = dict(config.defaults()) + for section in config.sections(): + parts = section.split(".") + target = all_sections + for part in parts[:-1]: + if part not in target: + target[part] = {} + target = cast("dict[str, JSONValue]", target[part]) + target[parts[-1]] = dict(config[section]) + return all_sections diff --git a/src/dature/sources_loader/json5_.py b/src/dature/sources/json5_.py similarity index 56% rename from src/dature/sources_loader/json5_.py rename to src/dature/sources/json5_.py index c3df659..18d7787 100644 --- a/src/dature/sources_loader/json5_.py +++ b/src/dature/sources/json5_.py @@ -1,29 +1,38 @@ import io +from dataclasses import dataclass from datetime import date, datetime, time -from typing import TextIO, cast +from typing import TYPE_CHECKING, TextIO, cast -import json5 from adaptix import loader from adaptix.provider import Provider -from dature.path_finders.json5_ import Json5PathFinder -from dature.sources_loader.base import BaseLoader -from dature.sources_loader.loaders import ( +from dature._descriptors import classproperty +from dature.sources.base import FileSource + +if TYPE_CHECKING: + from dature.path_finders.base import PathFinder +from dature.loaders import ( bytearray_from_string, date_from_string, datetime_from_string, float_from_string, time_from_string, ) -from dature.sources_loader.loaders.json5_ import str_from_json_identifier +from dature.loaders.json5_ import str_from_json_identifier from dature.types import BINARY_IO_TYPES, TEXT_IO_TYPES, FileOrStream, JSONValue -class Json5Loader(BaseLoader): - display_name = "json5" - path_finder_class = Json5PathFinder +@dataclass(kw_only=True, repr=False) +class Json5Source(FileSource): + format_name = "json5" + + @classproperty + def path_finder_class(cls) -> "type[PathFinder]": # noqa: N805 + from dature.path_finders.json5_ import Json5PathFinder # noqa: PLC0415 - def _additional_loaders(self) -> list[Provider]: + return Json5PathFinder + + def additional_loaders(self) -> list[Provider]: return [ loader(str, str_from_json_identifier), loader(float, float_from_string), @@ -33,7 +42,9 @@ def _additional_loaders(self) -> list[Provider]: loader(bytearray, bytearray_from_string), ] - def _load(self, path: FileOrStream) -> JSONValue: + def _load_file(self, path: FileOrStream) -> JSONValue: + import json5 # noqa: PLC0415 + if isinstance(path, TEXT_IO_TYPES): return cast("JSONValue", json5.load(cast("TextIO", path))) if isinstance(path, BINARY_IO_TYPES): diff --git a/src/dature/sources_loader/json_.py b/src/dature/sources/json_.py similarity index 74% rename from src/dature/sources_loader/json_.py rename to src/dature/sources/json_.py index 9deb905..23e38b0 100644 --- a/src/dature/sources_loader/json_.py +++ b/src/dature/sources/json_.py @@ -1,27 +1,29 @@ import json +from dataclasses import dataclass from datetime import date, datetime, time from typing import cast from adaptix import loader from adaptix.provider import Provider -from dature.path_finders.json_ import JsonPathFinder -from dature.sources_loader.base import BaseLoader -from dature.sources_loader.loaders import ( +from dature.loaders import ( bytearray_from_string, date_from_string, datetime_from_string, float_from_string, time_from_string, ) +from dature.path_finders.json_ import JsonPathFinder +from dature.sources.base import FileSource from dature.types import FILE_LIKE_TYPES, FileOrStream, JSONValue -class JsonLoader(BaseLoader): - display_name = "json" +@dataclass(kw_only=True, repr=False) +class JsonSource(FileSource): + format_name = "json" path_finder_class = JsonPathFinder - def _additional_loaders(self) -> list[Provider]: + def additional_loaders(self) -> list[Provider]: return [ loader(float, float_from_string), loader(date, date_from_string), @@ -30,7 +32,7 @@ def _additional_loaders(self) -> list[Provider]: loader(bytearray, bytearray_from_string), ] - def _load(self, path: FileOrStream) -> JSONValue: + def _load_file(self, path: FileOrStream) -> JSONValue: if isinstance(path, FILE_LIKE_TYPES): return cast("JSONValue", json.load(path)) with path.open() as file: diff --git a/src/dature/sources/retort.py b/src/dature/sources/retort.py new file mode 100644 index 0000000..5257374 --- /dev/null +++ b/src/dature/sources/retort.py @@ -0,0 +1,221 @@ +from dataclasses import fields +from datetime import timedelta +from typing import TYPE_CHECKING, cast, get_type_hints + +from adaptix import NameStyle as AdaptixNameStyle +from adaptix import Retort, loader, name_mapping +from adaptix.provider import Provider + +from dature.expansion.alias_provider import AliasProvider, resolve_nested_owner +from dature.field_path import FieldPath +from dature.fields.byte_size import ByteSize +from dature.fields.payment_card import PaymentCardNumber +from dature.fields.secret_str import SecretStr +from dature.loaders.base import ( + base64url_bytes_from_string, + base64url_str_from_string, + byte_size_from_string, + bytes_from_string, + complex_from_string, + payment_card_number_from_string, + secret_str_from_string, + timedelta_from_string, + url_from_string, +) +from dature.loaders.common import float_passthrough, int_from_string +from dature.skip_field_provider import ModelToDictProvider, SkipFieldProvider +from dature.type_utils import find_nested_dataclasses +from dature.types import ( + URL, + Base64UrlBytes, + Base64UrlStr, +) +from dature.validators.base import ( + create_metadata_validator_providers, + create_root_validator_providers, + create_validator_providers, + extract_validators_from_type, +) + +if TYPE_CHECKING: + from dature.protocols import DataclassInstance + from dature.sources.base import Source + from dature.types import ( + FieldMapping, + JSONValue, + NameStyle, + TypeLoaderMap, + ) + + +def get_adaptix_name_style(name_style: "NameStyle | None") -> AdaptixNameStyle | None: + if name_style is None: + return None + + name_style_map = { + "lower_snake": AdaptixNameStyle.LOWER_SNAKE, + "upper_snake": AdaptixNameStyle.UPPER_SNAKE, + "lower_camel": AdaptixNameStyle.CAMEL, + "upper_camel": AdaptixNameStyle.PASCAL, + "lower_kebab": AdaptixNameStyle.LOWER_KEBAB, + "upper_kebab": AdaptixNameStyle.UPPER_KEBAB, + } + return name_style_map.get(name_style) + + +def get_name_mapping_providers( + name_style: "NameStyle | None", + field_mapping: "FieldMapping | None", +) -> list[Provider]: + providers: list[Provider] = [] + + adaptix_name_style = get_adaptix_name_style(name_style) + if adaptix_name_style is not None: + providers.append(name_mapping(name_style=adaptix_name_style)) + + if field_mapping: + owner_fields: dict[type[DataclassInstance] | str, dict[str, str]] = {} + for field_path_key in field_mapping: + if not isinstance(field_path_key, FieldPath): + continue + owner: type[DataclassInstance] | str = field_path_key.owner + if len(field_path_key.parts) > 1 and not isinstance(field_path_key.owner, str): + owner = resolve_nested_owner(field_path_key.owner, field_path_key.parts[:-1]) + field_name = field_path_key.parts[-1] + if owner not in owner_fields: + owner_fields[owner] = {} + owner_fields[owner][field_name] = field_name + + for owner, identity_map in owner_fields.items(): + if isinstance(owner, str): + providers.append(name_mapping(map=identity_map)) + else: + providers.append(name_mapping(owner, map=identity_map)) + + providers.append(AliasProvider(field_mapping)) + + return providers + + +def get_validator_providers[T](schema: type[T]) -> list[Provider]: + providers: list[Provider] = [] + type_hints = get_type_hints(schema, include_extras=True) + + for f in fields(cast("type[DataclassInstance]", schema)): + if f.name not in type_hints: + continue + + field_type = type_hints[f.name] + validators_list = extract_validators_from_type(field_type) + + if validators_list: + field_providers = create_validator_providers(schema, f.name, validators_list) + providers.extend(field_providers) + + for nested_dc in find_nested_dataclasses(field_type): + nested_providers = get_validator_providers(nested_dc) + providers.extend(nested_providers) + + return providers + + +def build_base_recipe( + source: "Source", + *, + resolved_type_loaders: "TypeLoaderMap | None" = None, +) -> list[Provider]: + user_loaders: list[Provider] = [ + loader(type_, func) for type_, func in (resolved_type_loaders or source.type_loaders or {}).items() + ] + default_loaders: list[Provider] = [ + loader(int, int_from_string), + loader(float, float_passthrough), + loader(bytes, bytes_from_string), + loader(complex, complex_from_string), + loader(timedelta, timedelta_from_string), + loader(URL, url_from_string), + loader(Base64UrlBytes, base64url_bytes_from_string), + loader(Base64UrlStr, base64url_str_from_string), + loader(SecretStr, secret_str_from_string), + loader(PaymentCardNumber, payment_card_number_from_string), + loader(ByteSize, byte_size_from_string), + ] + return [ + *user_loaders, + *source.additional_loaders(), + *default_loaders, + *get_name_mapping_providers(source.name_style, source.field_mapping), + ] + + +def create_retort( + source: "Source", + *, + resolved_type_loaders: "TypeLoaderMap | None" = None, +) -> Retort: + return Retort( + strict_coercion=True, + recipe=build_base_recipe(source, resolved_type_loaders=resolved_type_loaders), + ) + + +def create_probe_retort( + source: "Source", + *, + resolved_type_loaders: "TypeLoaderMap | None" = None, +) -> Retort: + return Retort( + strict_coercion=True, + recipe=[ + SkipFieldProvider(), + ModelToDictProvider(), + *build_base_recipe(source, resolved_type_loaders=resolved_type_loaders), + ], + ) + + +def create_validating_retort[T]( + source: "Source", + schema: type[T], + *, + resolved_type_loaders: "TypeLoaderMap | None" = None, +) -> Retort: + root_validator_providers = create_root_validator_providers( + schema, + source.root_validators or (), + ) + metadata_validator_providers = create_metadata_validator_providers( + source.validators or {}, + ) + return Retort( + strict_coercion=True, + recipe=[ + *get_validator_providers(schema), + *metadata_validator_providers, + *root_validator_providers, + *build_base_recipe(source, resolved_type_loaders=resolved_type_loaders), + ], + ) + + +def transform_to_dataclass[T]( + source: "Source", + data: "JSONValue", + schema: type[T], + *, + resolved_type_loaders: "TypeLoaderMap | None" = None, +) -> T: + if schema not in source.retorts: + source.retorts[schema] = create_retort(source, resolved_type_loaders=resolved_type_loaders) + return source.retorts[schema].load(data, schema) + + +def ensure_retort( + source: "Source", + cls: "type[DataclassInstance]", + *, + resolved_type_loaders: "TypeLoaderMap | None" = None, +) -> None: + if cls not in source.retorts: + source.retorts[cls] = create_retort(source, resolved_type_loaders=resolved_type_loaders) + source.retorts[cls].get_loader(cls) diff --git a/src/dature/sources/toml_.py b/src/dature/sources/toml_.py new file mode 100644 index 0000000..609724f --- /dev/null +++ b/src/dature/sources/toml_.py @@ -0,0 +1,80 @@ +import abc +from dataclasses import dataclass +from datetime import date, datetime, time +from typing import TYPE_CHECKING, Any, cast + +from adaptix import loader +from adaptix.provider import Provider + +from dature._descriptors import classproperty +from dature.sources.base import FileSource + +if TYPE_CHECKING: + from toml_rs._lib import TomlVersion + + from dature.path_finders.base import PathFinder +from dature.loaders import ( + bytearray_from_string, + date_passthrough, + datetime_passthrough, + none_from_empty_string, + optional_from_empty_string, +) +from dature.loaders.toml_ import time_passthrough +from dature.types import FILE_LIKE_TYPES, FileOrStream, JSONValue + + +@dataclass(kw_only=True, repr=False) +class _BaseTomlSource(FileSource, abc.ABC): + @abc.abstractmethod + def _toml_version(self) -> "TomlVersion": ... + + def _load_file(self, path: FileOrStream) -> JSONValue: + import toml_rs # noqa: PLC0415 + + if isinstance(path, FILE_LIKE_TYPES): + content = path.read() + if isinstance(content, bytes): + content = content.decode() + return cast("JSONValue", toml_rs.loads(content, toml_version=self._toml_version())) + with path.open() as file: + return cast("JSONValue", toml_rs.loads(file.read(), toml_version=self._toml_version())) + + def additional_loaders(self) -> list[Provider]: + return [ + loader(date, date_passthrough), + loader(datetime, datetime_passthrough), + loader(time, time_passthrough), + loader(bytearray, bytearray_from_string), + loader(type(None), none_from_empty_string), + loader(str | None, optional_from_empty_string), + loader(Any, optional_from_empty_string), + ] + + +@dataclass(kw_only=True, repr=False) +class Toml10Source(_BaseTomlSource): + format_name = "toml1.0" + + @classproperty + def path_finder_class(cls) -> "type[PathFinder]": # noqa: N805 + from dature.path_finders.toml_ import Toml10PathFinder # noqa: PLC0415 + + return Toml10PathFinder + + def _toml_version(self) -> "TomlVersion": + return "1.0.0" + + +@dataclass(kw_only=True, repr=False) +class Toml11Source(_BaseTomlSource): + format_name = "toml1.1" + + @classproperty + def path_finder_class(cls) -> "type[PathFinder]": # noqa: N805 + from dature.path_finders.toml_ import Toml11PathFinder # noqa: PLC0415 + + return Toml11PathFinder + + def _toml_version(self) -> "TomlVersion": + return "1.1.0" diff --git a/src/dature/sources/yaml_.py b/src/dature/sources/yaml_.py new file mode 100644 index 0000000..5e27e32 --- /dev/null +++ b/src/dature/sources/yaml_.py @@ -0,0 +1,87 @@ +import abc +from dataclasses import dataclass +from datetime import date, datetime, time +from typing import TYPE_CHECKING, cast + +from adaptix import loader +from adaptix.provider import Provider + +from dature._descriptors import classproperty +from dature.loaders import ( + bytearray_from_string, + date_passthrough, + datetime_passthrough, + time_from_string, +) +from dature.loaders.yaml_ import time_from_int +from dature.sources.base import FileSource +from dature.types import FILE_LIKE_TYPES, FileOrStream, JSONValue + +if TYPE_CHECKING: + from ruamel.yaml.docinfo import Version + + from dature.path_finders.base import PathFinder + + +@dataclass(kw_only=True, repr=False) +class _BaseYamlSource(FileSource, abc.ABC): + @abc.abstractmethod + def _yaml_version(self) -> "Version": ... + + def _load_file(self, path: FileOrStream) -> JSONValue: + from ruamel.yaml import YAML # noqa: PLC0415 + + yaml = YAML(typ="safe") + yaml.version = self._yaml_version() + if isinstance(path, FILE_LIKE_TYPES): + return cast("JSONValue", yaml.load(path)) + with path.open() as file: + return cast("JSONValue", yaml.load(file)) + + +@dataclass(kw_only=True, repr=False) +class Yaml11Source(_BaseYamlSource): + format_name = "yaml1.1" + + @classproperty + def path_finder_class(cls) -> "type[PathFinder]": # noqa: N805 + from dature.path_finders.yaml_ import Yaml11PathFinder # noqa: PLC0415 + + return Yaml11PathFinder + + def _yaml_version(self) -> "Version": + from ruamel.yaml.docinfo import Version # noqa: PLC0415 + + return Version(1, 1) + + def additional_loaders(self) -> list[Provider]: + return [ + loader(date, date_passthrough), + loader(datetime, datetime_passthrough), + loader(time, time_from_int), + loader(bytearray, bytearray_from_string), + ] + + +@dataclass(kw_only=True, repr=False) +class Yaml12Source(_BaseYamlSource): + format_name = "yaml1.2" + + @classproperty + def path_finder_class(cls) -> "type[PathFinder]": # noqa: N805 + from dature.path_finders.yaml_ import Yaml12PathFinder # noqa: PLC0415 + + return Yaml12PathFinder + + def _yaml_version(self) -> "Version": + from ruamel.yaml.docinfo import Version # noqa: PLC0415 + + return Version(1, 2) + + def additional_loaders(self) -> list[Provider]: + return [ + loader(date, date_passthrough), + loader(datetime, datetime_passthrough), + loader(time, time_from_string), + loader(bytearray, bytearray_from_string), + ] diff --git a/src/dature/sources_loader/base.py b/src/dature/sources_loader/base.py deleted file mode 100644 index d3f425b..0000000 --- a/src/dature/sources_loader/base.py +++ /dev/null @@ -1,368 +0,0 @@ -import abc -import json -import logging -from dataclasses import fields, is_dataclass -from datetime import timedelta -from pathlib import Path -from typing import TYPE_CHECKING, Annotated, ClassVar, TypeVar, cast, get_args, get_origin, get_type_hints - -from adaptix import NameStyle as AdaptixNameStyle -from adaptix import Retort, loader, name_mapping -from adaptix.provider import Provider - -from dature.errors import LineRange, SourceLocation -from dature.expansion.alias_provider import AliasProvider, resolve_nested_owner -from dature.expansion.env_expand import expand_env_vars -from dature.field_path import FieldPath -from dature.fields.byte_size import ByteSize -from dature.fields.payment_card import PaymentCardNumber -from dature.fields.secret_str import SecretStr -from dature.path_finders.base import PathFinder -from dature.protocols import DataclassInstance, LoaderProtocol, ValidatorProtocol -from dature.skip_field_provider import ModelToDictProvider, SkipFieldProvider -from dature.sources_loader.loaders.base import ( - base64url_bytes_from_string, - base64url_str_from_string, - byte_size_from_string, - bytes_from_string, - complex_from_string, - payment_card_number_from_string, - secret_str_from_string, - timedelta_from_string, - url_from_string, -) -from dature.sources_loader.loaders.common import float_passthrough, int_from_string -from dature.types import ( - URL, - Base64UrlBytes, - Base64UrlStr, - DotSeparatedPath, - ExpandEnvVarsMode, - FieldMapping, - FieldValidators, - FileOrStream, - JSONValue, - LoadRawResult, - NameStyle, - NestedConflict, - TypeAnnotation, -) -from dature.validators.base import ( - create_metadata_validator_providers, - create_root_validator_providers, - create_validator_providers, - extract_validators_from_type, -) - -if TYPE_CHECKING: - from dature.types import TypeLoaderMap - -T = TypeVar("T") - -logger = logging.getLogger("dature") - - -class BaseLoader(LoaderProtocol, abc.ABC): - display_name: ClassVar[str] - display_label: ClassVar[str] = "FILE" - path_finder_class: type[PathFinder] | None = None - - def __init__( # noqa: PLR0913 - self, - *, - prefix: DotSeparatedPath | None = None, - name_style: NameStyle | None = None, - field_mapping: FieldMapping | None = None, - root_validators: tuple[ValidatorProtocol, ...] | None = None, - validators: FieldValidators | None = None, - expand_env_vars: ExpandEnvVarsMode = "default", - type_loaders: "TypeLoaderMap | None" = None, - ) -> None: - self._prefix = prefix - self._name_style = name_style - self._field_mapping = field_mapping - self._root_validators = root_validators or () - self._validators = validators or {} - self._expand_env_vars_mode = expand_env_vars - self._type_loaders = type_loaders - self.retorts: dict[type, Retort] = {} - - def _additional_loaders(self) -> list[Provider]: - return [] - - def _get_adaptix_name_style(self) -> AdaptixNameStyle | None: - if self._name_style is None: - return None - - name_style_map = { - "lower_snake": AdaptixNameStyle.LOWER_SNAKE, - "upper_snake": AdaptixNameStyle.UPPER_SNAKE, - "lower_camel": AdaptixNameStyle.CAMEL, - "upper_camel": AdaptixNameStyle.PASCAL, - "lower_kebab": AdaptixNameStyle.LOWER_KEBAB, - "upper_kebab": AdaptixNameStyle.UPPER_KEBAB, - } - return name_style_map.get(self._name_style) - - def _get_name_mapping_provider(self) -> list[Provider]: - providers: list[Provider] = [] - - adaptix_name_style = self._get_adaptix_name_style() - if adaptix_name_style is not None: - providers.append(name_mapping(name_style=adaptix_name_style)) - - if self._field_mapping: - owner_fields: dict[type[DataclassInstance] | str, dict[str, str]] = {} - for field_path_key in self._field_mapping: - if not isinstance(field_path_key, FieldPath): - continue - owner: type[DataclassInstance] | str = field_path_key.owner - if len(field_path_key.parts) > 1 and not isinstance(field_path_key.owner, str): - owner = resolve_nested_owner(field_path_key.owner, field_path_key.parts[:-1]) - field_name = field_path_key.parts[-1] - if owner not in owner_fields: - owner_fields[owner] = {} - owner_fields[owner][field_name] = field_name - - for owner, identity_map in owner_fields.items(): - if isinstance(owner, str): - providers.append(name_mapping(map=identity_map)) - else: - providers.append(name_mapping(owner, map=identity_map)) - - providers.append(AliasProvider(self._field_mapping)) - - return providers - - def _get_validator_providers(self, schema: type[T]) -> list[Provider]: - providers: list[Provider] = [] - type_hints = get_type_hints(schema, include_extras=True) - - for field in fields(cast("type[DataclassInstance]", schema)): - if field.name not in type_hints: - continue - - field_type = type_hints[field.name] - validators = extract_validators_from_type(field_type) - - if validators: - field_providers = create_validator_providers(schema, field.name, validators) - providers.extend(field_providers) - - for nested_dc in self._find_nested_dataclasses(field_type): - nested_providers = self._get_validator_providers(nested_dc) - providers.extend(nested_providers) - - return providers - - @staticmethod - def _find_nested_dataclasses( - field_type: TypeAnnotation, - ) -> list[type[DataclassInstance]]: - result: list[type[DataclassInstance]] = [] - queue: list[TypeAnnotation] = [field_type] - - while queue: - current = queue.pop() - - if is_dataclass(current): - result.append(current) - continue - - origin = get_origin(current) - if origin is Annotated: - queue.append(get_args(current)[0]) - elif origin is not None: - queue.extend(get_args(current)) - - return result - - @staticmethod - def _infer_type(value: str) -> JSONValue: - if value == "": - return value - - try: - return cast("JSONValue", json.loads(value)) - except (json.JSONDecodeError, ValueError): - return value - - @classmethod - def _parse_string_values(cls, data: JSONValue, *, infer_scalars: bool = False) -> JSONValue: - if not isinstance(data, dict): - return data - - result: dict[str, JSONValue] = {} - for key, value in data.items(): - if isinstance(value, dict): - result[key] = cls._parse_string_values(value, infer_scalars=True) - elif isinstance(value, str) and (infer_scalars or value.startswith(("[", "{"))): - result[key] = cls._infer_type(value) - else: - result[key] = value - return result - - def _base_recipe(self) -> list[Provider]: - user_loaders: list[Provider] = [loader(type_, func) for type_, func in (self._type_loaders or {}).items()] - default_loaders: list[Provider] = [ - loader(int, int_from_string), - loader(float, float_passthrough), - loader(bytes, bytes_from_string), - loader(complex, complex_from_string), - loader(timedelta, timedelta_from_string), - loader(URL, url_from_string), - loader(Base64UrlBytes, base64url_bytes_from_string), - loader(Base64UrlStr, base64url_str_from_string), - loader(SecretStr, secret_str_from_string), - loader(PaymentCardNumber, payment_card_number_from_string), - loader(ByteSize, byte_size_from_string), - ] - return [ - *user_loaders, - *self._additional_loaders(), - *default_loaders, - *self._get_name_mapping_provider(), - ] - - def create_retort(self) -> Retort: - return Retort( - strict_coercion=True, - recipe=self._base_recipe(), - ) - - def create_probe_retort(self) -> Retort: - return Retort( - strict_coercion=True, - recipe=[SkipFieldProvider(), ModelToDictProvider(), *self._base_recipe()], - ) - - def create_validating_retort(self, schema: type[T]) -> Retort: - root_validator_providers = create_root_validator_providers( - schema, - self._root_validators, - ) - metadata_validator_providers = create_metadata_validator_providers( - self._validators, - ) - return Retort( - strict_coercion=True, - recipe=[ - *self._get_validator_providers(schema), - *metadata_validator_providers, - *root_validator_providers, - *self._base_recipe(), - ], - ) - - @abc.abstractmethod - def _load(self, path: FileOrStream) -> JSONValue: ... - - def _apply_prefix(self, data: JSONValue) -> JSONValue: - if not self._prefix: - return data - - for key in self._prefix.split("."): - if not isinstance(data, dict): - return {} - if key not in data: - return {} - data = data[key] - - return data - - def _pre_processing(self, data: JSONValue) -> JSONValue: - prefixed = self._apply_prefix(data) - return expand_env_vars(prefixed, mode=self._expand_env_vars_mode) - - def transform_to_dataclass(self, data: JSONValue, schema: type[T]) -> T: - if schema not in self.retorts: - self.retorts[schema] = self.create_retort() - return self.retorts[schema].load(data, schema) - - def load_raw(self, path: FileOrStream) -> LoadRawResult: - data = self._load(path) - processed = self._pre_processing(data) - logger.debug( - "[%s] load_raw: path=%s, raw_keys=%s, after_preprocessing_keys=%s", - type(self).__name__, - path, - sorted(data.keys()) if isinstance(data, dict) else "", - sorted(processed.keys()) if isinstance(processed, dict) else "", - ) - return LoadRawResult(data=processed) - - @classmethod - def resolve_location( - cls, - field_path: list[str], - file_path: Path | None, - filecontent: str | None, - prefix: str | None, - split_symbols: str, # noqa: ARG003 - nested_conflict: NestedConflict | None, # noqa: ARG003 - ) -> list[SourceLocation]: - if filecontent is None or not field_path: - return [_empty_filelocation(cls.display_label, file_path)] - - if cls.path_finder_class is None: - return [_empty_filelocation(cls.display_label, file_path)] - - search_path = _build_search_path(field_path, prefix) - finder = cls.path_finder_class(filecontent) - line_range = finder.find_line_range(search_path) - if line_range is None: - line_range = _find_parent_line_range(finder, search_path) - if line_range is None: - return [_empty_filelocation(cls.display_label, file_path)] - - lines = filecontent.splitlines() - content_lines: list[str] | None = None - if 0 < line_range.start <= len(lines): - end = min(line_range.end, len(lines)) - raw = lines[line_range.start - 1 : end] - content_lines = _strip_common_indent(raw) - - return [ - SourceLocation( - display_label=cls.display_label, - file_path=file_path, - line_range=line_range, - line_content=content_lines, - env_var_name=None, - ), - ] - - -def _find_parent_line_range(finder: PathFinder, search_path: list[str]) -> LineRange | None: - path = search_path[:-1] - while path: - line_range = finder.find_line_range(path) - if line_range is not None: - return line_range - path = path[:-1] - return None - - -def _build_search_path(field_path: list[str], prefix: str | None) -> list[str]: - if not prefix: - return field_path - prefix_parts = prefix.split(".") - return prefix_parts + field_path - - -def _strip_common_indent(raw_lines: list[str]) -> list[str]: - indents = [len(line) - len(line.lstrip()) for line in raw_lines if line.strip()] - if not indents: - return raw_lines - min_indent = min(indents) - return [line[min_indent:] for line in raw_lines] - - -def _empty_filelocation(display_label: str, file_path: Path | None) -> SourceLocation: - return SourceLocation( - display_label=display_label, - file_path=file_path, - line_range=None, - line_content=None, - env_var_name=None, - ) diff --git a/src/dature/sources_loader/docker_secrets.py b/src/dature/sources_loader/docker_secrets.py deleted file mode 100644 index a602402..0000000 --- a/src/dature/sources_loader/docker_secrets.py +++ /dev/null @@ -1,65 +0,0 @@ -from pathlib import Path -from typing import ClassVar - -from dature.errors import SourceLocation -from dature.sources_loader.flat_key import FlatKeyLoader -from dature.types import FileOrStream, JSONValue, NestedConflict - - -class DockerSecretsLoader(FlatKeyLoader): - display_name = "docker_secrets" - display_label: ClassVar[str] = "SECRET FILE" - - @classmethod - def resolve_location( - cls, - field_path: list[str], - file_path: Path | None, - filecontent: str | None, # noqa: ARG003 - prefix: str | None, - split_symbols: str, - nested_conflict: NestedConflict | None, - ) -> list[SourceLocation]: - if nested_conflict is not None: - json_var = cls._resolve_var_name(field_path[:1], prefix, split_symbols, None) - if nested_conflict.used_var == json_var: - secret_name = field_path[0] - else: - secret_name = split_symbols.join(field_path) - else: - secret_name = split_symbols.join(field_path) - if prefix is not None: - secret_name = prefix + secret_name - secret_file = file_path / secret_name if file_path is not None else None - return [ - SourceLocation( - display_label=cls.display_label, - file_path=secret_file, - line_range=None, - line_content=None, - env_var_name=None, - ), - ] - - def _load(self, path: FileOrStream) -> JSONValue: - if not isinstance(path, Path): - msg = "DockerSecretsLoader does not support file-like objects" - raise TypeError(msg) - - result: dict[str, JSONValue] = {} - for entry in sorted(path.iterdir()): - if not entry.is_file(): - continue - - key = entry.name.lower() - value = entry.read_text().strip() - - if self._prefix and not key.startswith(self._prefix.lower()): - continue - - if self._prefix: - key = key[len(self._prefix) :] - - result[key] = value - - return result diff --git a/src/dature/sources_loader/flat_key.py b/src/dature/sources_loader/flat_key.py deleted file mode 100644 index 40c77e8..0000000 --- a/src/dature/sources_loader/flat_key.py +++ /dev/null @@ -1,191 +0,0 @@ -import abc -from datetime import date, datetime, time -from typing import TYPE_CHECKING, cast - -from adaptix import loader -from adaptix.provider import Provider - -from dature.expansion.env_expand import expand_env_vars -from dature.protocols import ValidatorProtocol -from dature.sources_loader.base import BaseLoader -from dature.sources_loader.loaders import ( - bool_loader, - bytearray_from_json_string, - date_from_string, - datetime_from_string, - float_from_string, - none_from_empty_string, - optional_from_empty_string, - str_from_scalar, - time_from_string, -) -from dature.types import ( - DotSeparatedPath, - ExpandEnvVarsMode, - FieldMapping, - FieldValidators, - FileOrStream, - JSONValue, - LoadRawResult, - NameStyle, - NestedConflict, - NestedConflicts, - NestedResolve, - NestedResolveStrategy, -) - -if TYPE_CHECKING: - from dature.field_path import FieldPath - from dature.types import TypeLoaderMap - - -def set_nested(d: dict[str, JSONValue], keys: list[str], value: str) -> None: - for key in keys[:-1]: - d = cast("dict[str, JSONValue]", d.setdefault(key, {})) - d[keys[-1]] = value - - -class FlatKeyLoader(BaseLoader, abc.ABC): - def __init__( # noqa: PLR0913 - self, - *, - prefix: DotSeparatedPath | None = None, - split_symbols: str = "__", - name_style: NameStyle | None = None, - field_mapping: FieldMapping | None = None, - root_validators: tuple[ValidatorProtocol, ...] | None = None, - validators: FieldValidators | None = None, - expand_env_vars: ExpandEnvVarsMode = "default", - type_loaders: "TypeLoaderMap | None" = None, - nested_resolve_strategy: NestedResolveStrategy = "flat", - nested_resolve: NestedResolve | None = None, - ) -> None: - self._split_symbols = split_symbols - self._nested_resolve_strategy = nested_resolve_strategy - self._nested_resolve = nested_resolve - super().__init__( - prefix=prefix, - name_style=name_style, - field_mapping=field_mapping, - root_validators=root_validators, - validators=validators, - expand_env_vars=expand_env_vars, - type_loaders=type_loaders, - ) - - def _resolve_field_strategy(self, field_name: str) -> NestedResolveStrategy: - if self._nested_resolve is not None: - for strategy, field_paths in self._nested_resolve.items(): - for fp in field_paths: - if self._field_path_matches(fp, field_name): - return strategy - return self._nested_resolve_strategy - - @staticmethod - def _field_path_matches(fp: "FieldPath", field_name: str) -> bool: - if not fp.parts: - return True - return fp.parts[0] == field_name - - def _additional_loaders(self) -> list[Provider]: - return [ - loader(str, str_from_scalar), - loader(float, float_from_string), - loader(date, date_from_string), - loader(datetime, datetime_from_string), - loader(time, time_from_string), - loader(bytearray, bytearray_from_json_string), - loader(type(None), none_from_empty_string), - loader(str | None, optional_from_empty_string), - loader(bool, bool_loader), - ] - - @staticmethod - def _resolve_var_name( - field_path: list[str], - prefix: str | None, - split_symbols: str, - conflict: NestedConflict | None, - ) -> str: - def _build_name(parts: list[str]) -> str: - var = split_symbols.join(p.upper() for p in parts) - if prefix is not None: - return prefix + var - return var - - json_var = _build_name(field_path[:1]) - if conflict is not None and conflict.used_var == json_var: - return json_var - return _build_name(field_path) - - def _build_var_name(self, key: str) -> str: - if self._prefix: - return self._prefix + key.upper() - return key.upper() - - def _build_nested_var_name(self, top_field: str, nested: dict[str, JSONValue]) -> str: - for sub_key in nested: - full_key = f"{top_field}{self._split_symbols}{sub_key}" - return self._build_var_name(full_key) - return self._build_var_name(top_field) - - def _pre_process_row( - self, - key: str, - value: str, - result: dict[str, JSONValue], - conflicts: NestedConflicts, - ) -> None: - parts = key.split(self._split_symbols) - self._process_key_value(parts=parts, value=value, result=result, conflicts=conflicts) - - def load_raw(self, path: FileOrStream) -> LoadRawResult: - data = self._load(path) - data_dict = cast("dict[str, str]", data) - result: dict[str, JSONValue] = {} - conflicts: NestedConflicts = {} - - for key, value in data_dict.items(): - self._pre_process_row(key=key, value=value, result=result, conflicts=conflicts) - - expanded = expand_env_vars(result, mode=self._expand_env_vars_mode) - processed = self._parse_string_values(expanded) - return LoadRawResult(data=processed, nested_conflicts=conflicts) - - def _process_key_value( - self, - *, - parts: list[str], - value: str, - result: dict[str, JSONValue], - conflicts: NestedConflicts, - ) -> None: - if len(parts) > 1: - top_field = parts[0] - strategy = self._resolve_field_strategy(top_field) - existing = result.get(top_field) - if isinstance(existing, str): - flat_var = self._build_var_name(self._split_symbols.join(parts)) - json_var = self._build_var_name(top_field) - if strategy == "flat": - result.pop(top_field) - set_nested(result, parts, value) - conflicts[top_field] = NestedConflict(flat_var, json_var, existing) - elif strategy == "json": - conflicts[top_field] = NestedConflict(json_var, flat_var, existing) - else: - set_nested(result, parts, value) - else: - top_field = parts[0] - strategy = self._resolve_field_strategy(top_field) - existing = result.get(top_field) - if isinstance(existing, dict): - json_var = self._build_var_name(top_field) - flat_var = self._build_nested_var_name(top_field, existing) - if strategy == "json": - result[top_field] = value - conflicts[top_field] = NestedConflict(json_var, flat_var, value) - elif strategy == "flat": - conflicts[top_field] = NestedConflict(flat_var, json_var, value) - else: - result[top_field] = value diff --git a/src/dature/sources_loader/ini_.py b/src/dature/sources_loader/ini_.py deleted file mode 100644 index 24fa85c..0000000 --- a/src/dature/sources_loader/ini_.py +++ /dev/null @@ -1,77 +0,0 @@ -import configparser -import io -from datetime import date, datetime, time -from typing import cast - -from adaptix import loader -from adaptix.provider import Provider - -from dature.expansion.env_expand import expand_env_vars -from dature.path_finders.ini_ import TablePathFinder -from dature.sources_loader.base import BaseLoader -from dature.sources_loader.loaders import ( - bool_loader, - bytearray_from_json_string, - date_from_string, - datetime_from_string, - float_from_string, - none_from_empty_string, - optional_from_empty_string, - str_from_scalar, - time_from_string, -) -from dature.types import BINARY_IO_TYPES, TEXT_IO_TYPES, FileOrStream, JSONValue - - -class IniLoader(BaseLoader): - display_name = "ini" - path_finder_class = TablePathFinder - - def _additional_loaders(self) -> list[Provider]: - return [ - loader(str, str_from_scalar), - loader(float, float_from_string), - loader(date, date_from_string), - loader(datetime, datetime_from_string), - loader(time, time_from_string), - loader(bytearray, bytearray_from_json_string), - loader(type(None), none_from_empty_string), - loader(str | None, optional_from_empty_string), - loader(bool, bool_loader), - ] - - def _pre_processing(self, data: JSONValue) -> JSONValue: - prefixed = self._apply_prefix(data) - expanded = expand_env_vars(prefixed, mode=self._expand_env_vars_mode) - return self._parse_string_values(expanded) - - def _load(self, path: FileOrStream) -> JSONValue: - config = configparser.ConfigParser(interpolation=None) - if isinstance(path, TEXT_IO_TYPES): - config.read_file(path) - elif isinstance(path, BINARY_IO_TYPES): - config.read_file(io.TextIOWrapper(cast("io.BufferedReader", path))) - else: - with path.open() as f: - config.read_file(f) - if self._prefix and self._prefix in config: - result: dict[str, JSONValue] = dict(config[self._prefix]) - child_prefix = self._prefix + "." - for section in config.sections(): - if section.startswith(child_prefix): - nested_key = section[len(child_prefix) :] - result[nested_key] = dict(config[section]) - return {self._prefix: result} - - all_sections: dict[str, JSONValue] = {} - if config.defaults(): - all_sections["DEFAULT"] = dict(config.defaults()) - for section in config.sections(): - parts = section.split(".") - target = all_sections - for part in parts[:-1]: - if part not in target: - target[part] = {} - target = cast("dict[str, JSONValue]", target[part]) - target[parts[-1]] = dict(config[section]) - return all_sections diff --git a/src/dature/sources_loader/toml_.py b/src/dature/sources_loader/toml_.py deleted file mode 100644 index 06f6647..0000000 --- a/src/dature/sources_loader/toml_.py +++ /dev/null @@ -1,61 +0,0 @@ -import abc -from datetime import date, datetime, time -from typing import Any, cast - -import toml_rs -from adaptix import loader -from adaptix.provider import Provider -from toml_rs._lib import TomlVersion - -from dature.path_finders.toml_ import Toml10PathFinder, Toml11PathFinder -from dature.sources_loader.base import BaseLoader -from dature.sources_loader.loaders import ( - bytearray_from_string, - date_passthrough, - datetime_passthrough, - none_from_empty_string, - optional_from_empty_string, -) -from dature.sources_loader.loaders.toml_ import time_passthrough -from dature.types import FILE_LIKE_TYPES, FileOrStream, JSONValue - - -class BaseTomlLoader(BaseLoader, abc.ABC): - @abc.abstractmethod - def _toml_version(self) -> TomlVersion: ... - - def _load(self, path: FileOrStream) -> JSONValue: - if isinstance(path, FILE_LIKE_TYPES): - content = path.read() - if isinstance(content, bytes): - content = content.decode() - return cast("JSONValue", toml_rs.loads(content, toml_version=self._toml_version())) - with path.open() as file: - return cast("JSONValue", toml_rs.loads(file.read(), toml_version=self._toml_version())) - - def _additional_loaders(self) -> list[Provider]: - return [ - loader(date, date_passthrough), - loader(datetime, datetime_passthrough), - loader(time, time_passthrough), - loader(bytearray, bytearray_from_string), - loader(type(None), none_from_empty_string), - loader(str | None, optional_from_empty_string), - loader(Any, optional_from_empty_string), - ] - - -class Toml10Loader(BaseTomlLoader): - display_name = "toml1.0" - path_finder_class = Toml10PathFinder - - def _toml_version(self) -> TomlVersion: - return "1.0.0" - - -class Toml11Loader(BaseTomlLoader): - display_name = "toml1.1" - path_finder_class = Toml11PathFinder - - def _toml_version(self) -> TomlVersion: - return "1.1.0" diff --git a/src/dature/sources_loader/yaml_.py b/src/dature/sources_loader/yaml_.py deleted file mode 100644 index 0f969db..0000000 --- a/src/dature/sources_loader/yaml_.py +++ /dev/null @@ -1,64 +0,0 @@ -import abc -from datetime import date, datetime, time -from typing import cast - -from adaptix import loader -from adaptix.provider import Provider -from ruamel.yaml import YAML -from ruamel.yaml.docinfo import Version - -from dature.path_finders.yaml_ import Yaml11PathFinder, Yaml12PathFinder -from dature.sources_loader.base import BaseLoader -from dature.sources_loader.loaders import ( - bytearray_from_string, - date_passthrough, - datetime_passthrough, - time_from_string, -) -from dature.sources_loader.loaders.yaml_ import time_from_int -from dature.types import FILE_LIKE_TYPES, FileOrStream, JSONValue - - -class BaseYamlLoader(BaseLoader, abc.ABC): - @abc.abstractmethod - def _yaml_version(self) -> Version: ... - - def _load(self, path: FileOrStream) -> JSONValue: - yaml = YAML(typ="safe") - yaml.version = self._yaml_version() - if isinstance(path, FILE_LIKE_TYPES): - return cast("JSONValue", yaml.load(path)) - with path.open() as file: - return cast("JSONValue", yaml.load(file)) - - -class Yaml11Loader(BaseYamlLoader): - display_name = "yaml1.1" - path_finder_class = Yaml11PathFinder - - def _yaml_version(self) -> Version: - return Version(1, 1) - - def _additional_loaders(self) -> list[Provider]: - return [ - loader(date, date_passthrough), - loader(datetime, datetime_passthrough), - loader(time, time_from_int), - loader(bytearray, bytearray_from_string), - ] - - -class Yaml12Loader(BaseYamlLoader): - display_name = "yaml1.2" - path_finder_class = Yaml12PathFinder - - def _yaml_version(self) -> Version: - return Version(1, 2) - - def _additional_loaders(self) -> list[Provider]: - return [ - loader(date, date_passthrough), - loader(datetime, datetime_passthrough), - loader(time, time_from_string), - loader(bytearray, bytearray_from_string), - ] diff --git a/src/dature/type_utils.py b/src/dature/type_utils.py new file mode 100644 index 0000000..e52a255 --- /dev/null +++ b/src/dature/type_utils.py @@ -0,0 +1,24 @@ +from dataclasses import is_dataclass +from typing import Annotated, get_args, get_origin + +from dature.types import TypeAnnotation + + +def find_nested_dataclasses(field_type: TypeAnnotation) -> list[type]: + result: list[type] = [] + queue: list[TypeAnnotation] = [field_type] + + while queue: + current = queue.pop() + + if is_dataclass(current): + result.append(current) + continue + + origin = get_origin(current) + if origin is Annotated: + queue.append(get_args(current)[0]) + elif origin is not None: + queue.extend(get_args(current)) + + return result diff --git a/tests/errors/test_exceptions.py b/tests/errors/test_exceptions.py index 241c89f..e0cd3f0 100644 --- a/tests/errors/test_exceptions.py +++ b/tests/errors/test_exceptions.py @@ -3,7 +3,7 @@ import pytest -from dature import Source, load +from dature import EnvSource, JsonSource, Toml11Source, Yaml12Source, load from dature.errors import DatureConfigError, FieldLoadError, LineRange, SourceLocation @@ -16,7 +16,7 @@ def test_single_error_message(self): input_value="30", locations=[ SourceLocation( - display_label="FILE", + location_label="FILE", file_path=Path("config.toml"), line_range=LineRange(start=2, end=2), line_content=['timeout = "30"'], @@ -42,7 +42,7 @@ def test_multiple_errors_message(self): input_value="abc", locations=[ SourceLocation( - display_label="FILE", + location_label="FILE", file_path=Path("config.json"), line_range=LineRange(start=2, end=2), line_content=['"timeout": "abc"'], @@ -56,7 +56,7 @@ def test_multiple_errors_message(self): input_value=None, locations=[ SourceLocation( - display_label="FILE", + location_label="FILE", file_path=Path("config.json"), line_range=None, line_content=None, @@ -83,7 +83,7 @@ def test_env_error_message(self): input_value="abc", locations=[ SourceLocation( - display_label="ENV", + location_label="ENV", file_path=None, line_range=None, line_content=None, @@ -109,7 +109,7 @@ def test_caret_points_to_value_not_key_when_same_string(self) -> None: input_value="name", locations=[ SourceLocation( - display_label="FILE", + location_label="FILE", file_path=Path("config.toml"), line_range=LineRange(start=1, end=1), line_content=['name = "name"'], @@ -134,7 +134,7 @@ def test_caret_points_to_value_in_json_duplicate_string(self) -> None: input_value="host", locations=[ SourceLocation( - display_label="FILE", + location_label="FILE", file_path=Path("config.json"), line_range=LineRange(start=2, end=2), line_content=['"host": "host"'], @@ -157,7 +157,7 @@ def test_json_type_error_decorator(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"timeout": "abc", "name": "test"}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) @load(metadata) @dataclass @@ -192,7 +192,7 @@ class Config: name: str port: int - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -214,7 +214,7 @@ class Config: timeout: int name: str - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -249,7 +249,7 @@ class DB: class Config: db: DB - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -271,7 +271,7 @@ def test_env_type_error(self, monkeypatch: pytest.MonkeyPatch): monkeypatch.setenv("APP_TIMEOUT", "abc") monkeypatch.setenv("APP_NAME", "test") - metadata = Source(prefix="APP_") + metadata = EnvSource(prefix="APP_") @load(metadata) @dataclass @@ -298,7 +298,7 @@ class Config: name: str timeout: int - metadata = Source(file=toml_file) + metadata = Toml11Source(file=toml_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -326,7 +326,7 @@ class Config: name: str timeout: int - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -378,7 +378,7 @@ def test_filesource_truncation( input_value="30", locations=[ SourceLocation( - display_label="FILE", + location_label="FILE", file_path=Path("config.toml"), line_range=LineRange(start=2, end=2), line_content=[line_content], @@ -425,7 +425,7 @@ def test_envfilesource_truncation( input_value="abc", locations=[ SourceLocation( - display_label="ENV FILE", + location_label="ENV FILE", file_path=Path(".env"), line_range=LineRange(start=2, end=2), line_content=[line_content], @@ -452,7 +452,7 @@ def test_multiline_content_each_line_truncated(self) -> None: input_value=None, locations=[ SourceLocation( - display_label="FILE", + location_label="FILE", file_path=Path("config.json"), line_range=LineRange(start=2, end=4), line_content=[line_long, line_short, line_long], @@ -480,7 +480,7 @@ def test_four_lines_shows_two_and_ellipsis(self) -> None: input_value=None, locations=[ SourceLocation( - display_label="FILE", + location_label="FILE", file_path=Path("config.json"), line_range=LineRange(start=2, end=5), line_content=["line1", "line2", "line3", "line4"], @@ -507,7 +507,7 @@ def test_five_lines_shows_two_and_ellipsis(self) -> None: input_value=None, locations=[ SourceLocation( - display_label="FILE", + location_label="FILE", file_path=Path("config.json"), line_range=LineRange(start=2, end=6), line_content=["line1", "line2", "line3", "line4", "line5"], @@ -538,7 +538,7 @@ def test_value_fully_past_truncation_skips_caret(self) -> None: input_value=0, locations=[ SourceLocation( - display_label="FILE", + location_label="FILE", file_path=Path("config.json"), line_range=LineRange(start=1, end=1), line_content=[line], @@ -564,7 +564,7 @@ def test_value_partially_truncated_shows_partial_caret(self) -> None: input_value="abcdefghij", locations=[ SourceLocation( - display_label="FILE", + location_label="FILE", file_path=Path("config.toml"), line_range=LineRange(start=1, end=1), line_content=[line], @@ -591,7 +591,7 @@ def test_value_within_visible_area_shows_full_caret(self) -> None: input_value="30", locations=[ SourceLocation( - display_label="FILE", + location_label="FILE", file_path=Path("config.toml"), line_range=LineRange(start=2, end=2), line_content=[line], @@ -619,7 +619,7 @@ def test_json_multiline_dict(self, tmp_path: Path): class Config: db: int - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -643,7 +643,7 @@ class Config: db: int name: str - metadata = Source(file=yaml_file) + metadata = Yaml12Source(file=yaml_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -666,7 +666,7 @@ def test_toml_multiline_array(self, tmp_path: Path): class Config: tags: int - metadata = Source(file=toml_file) + metadata = Toml11Source(file=toml_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -689,7 +689,7 @@ def test_json_multiline_array(self, tmp_path: Path): class Config: tags: int - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -714,7 +714,7 @@ class Product: class Config: product: list[Product] - metadata = Source(file=array_of_tables_toml_file) + metadata = Toml11Source(file=array_of_tables_toml_file) result = load(metadata, schema=Config) assert result == Config( @@ -735,7 +735,7 @@ class Product: class Config: product: list[Product] - metadata = Source(file=array_of_tables_error_first_toml_file) + metadata = Toml11Source(file=array_of_tables_error_first_toml_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -760,7 +760,7 @@ class Product: class Config: product: list[Product] - metadata = Source(file=array_of_tables_error_last_toml_file) + metadata = Toml11Source(file=array_of_tables_error_last_toml_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) diff --git a/tests/errors/test_fixtures.py b/tests/errors/test_fixtures.py index cd8be37..8860b4d 100644 --- a/tests/errors/test_fixtures.py +++ b/tests/errors/test_fixtures.py @@ -4,9 +4,8 @@ import pytest -from dature import Source, load +from dature import EnvFileSource, IniSource, Json5Source, JsonSource, Toml11Source, Yaml11Source, Yaml12Source, load from dature.errors import DatureConfigError, FieldLoadError -from dature.sources_loader.yaml_ import Yaml11Loader, Yaml12Loader from dature.validators.number import Ge, Le from dature.validators.sequence import MinItems, UniqueItems from dature.validators.string import MaxLength, MinLength, RegexPattern @@ -54,14 +53,13 @@ class ValidationErrorConfig: FIXTURES_DIR = Path(__file__).parent.parent / "fixtures" ALL_SOURCES = [ - ("errors.json", {}), - ("errors.json5", {}), - ("errors.yaml", {}), - ("errors.yaml", {"loader": Yaml11Loader}), - ("errors.yaml", {"loader": Yaml12Loader}), - ("errors.toml", {}), - ("errors.ini", {"prefix": "config"}), - ("errors.env", {}), + ("errors.json", JsonSource, {}), + ("errors.json5", Json5Source, {}), + ("errors.yaml", Yaml11Source, {}), + ("errors.yaml", Yaml12Source, {}), + ("errors.toml", Toml11Source, {}), + ("errors.ini", IniSource, {"prefix": "config"}), + ("errors.env", EnvFileSource, {}), ] EXPECTED_LOAD_ERRORS = [ @@ -93,12 +91,13 @@ def _assert_field_errors( assert exc.message == message -@pytest.mark.parametrize(("fixture_file", "metadata_kwargs"), ALL_SOURCES) +@pytest.mark.parametrize(("fixture_file", "source_class", "source_kwargs"), ALL_SOURCES) def test_load_error_types( fixture_file: str, - metadata_kwargs: dict[str, str], + source_class: type, + source_kwargs: dict[str, str], ) -> None: - metadata = Source(file=str(FIXTURES_DIR / fixture_file), **metadata_kwargs) + metadata = source_class(file=str(FIXTURES_DIR / fixture_file), **source_kwargs) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=LoadErrorConfig) @@ -108,12 +107,13 @@ def test_load_error_types( _assert_field_errors(err.exceptions, EXPECTED_LOAD_ERRORS) -@pytest.mark.parametrize(("fixture_file", "metadata_kwargs"), ALL_SOURCES) +@pytest.mark.parametrize(("fixture_file", "source_class", "source_kwargs"), ALL_SOURCES) def test_validation_error_types( fixture_file: str, - metadata_kwargs: dict[str, str], + source_class: type, + source_kwargs: dict[str, str], ) -> None: - metadata = Source(file=str(FIXTURES_DIR / fixture_file), **metadata_kwargs) + metadata = source_class(file=str(FIXTURES_DIR / fixture_file), **source_kwargs) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=ValidationErrorConfig) diff --git a/tests/errors/test_location.py b/tests/errors/test_location.py index 72b23e0..2295f35 100644 --- a/tests/errors/test_location.py +++ b/tests/errors/test_location.py @@ -1,60 +1,58 @@ from pathlib import Path +from dature import EnvFileSource, EnvSource, JsonSource, Toml11Source from dature.errors import LineRange from dature.errors.location import ErrorContext, resolve_source_location -from dature.sources_loader.env_ import EnvFileLoader, EnvLoader -from dature.sources_loader.json_ import JsonLoader -from dature.sources_loader.toml_ import Toml11Loader class TestResolveSourceLocation: def test_env_source(self): ctx = ErrorContext( dataclass_name="Config", - loader_class=EnvLoader, + source_class=EnvSource, file_path=None, prefix="APP_", split_symbols="__", ) - locs = resolve_source_location(["database", "port"], ctx, filecontent=None) + locs = resolve_source_location(["database", "port"], ctx, file_content=None) assert len(locs) == 1 - assert locs[0].display_label == "ENV" + assert locs[0].location_label == "ENV" assert locs[0].env_var_name == "APP_DATABASE__PORT" assert locs[0].file_path is None def test_env_source_no_prefix(self): ctx = ErrorContext( dataclass_name="Config", - loader_class=EnvLoader, + source_class=EnvSource, file_path=None, prefix=None, split_symbols="__", ) - locs = resolve_source_location(["timeout"], ctx, filecontent=None) + locs = resolve_source_location(["timeout"], ctx, file_content=None) assert locs[0].env_var_name == "TIMEOUT" def test_env_source_custom_split_symbols(self): ctx = ErrorContext( dataclass_name="Config", - loader_class=EnvLoader, + source_class=EnvSource, file_path=None, prefix="APP_", split_symbols="_", ) - locs = resolve_source_location(["database", "port"], ctx, filecontent=None) + locs = resolve_source_location(["database", "port"], ctx, file_content=None) assert locs[0].env_var_name == "APP_DATABASE_PORT" def test_json_source_with_line(self): content = '{\n "timeout": "30",\n "name": "test"\n}' ctx = ErrorContext( dataclass_name="Config", - loader_class=JsonLoader, + source_class=JsonSource, file_path=Path("config.json"), prefix=None, split_symbols="__", ) - locs = resolve_source_location(["timeout"], ctx, filecontent=content) - assert locs[0].display_label == "FILE" + locs = resolve_source_location(["timeout"], ctx, file_content=content) + assert locs[0].location_label == "FILE" assert locs[0].line_range == LineRange(start=2, end=2) assert locs[0].line_content == ['"timeout": "30",'] @@ -62,13 +60,13 @@ def test_toml_source_with_line(self): content = 'timeout = "30"\nname = "test"' ctx = ErrorContext( dataclass_name="Config", - loader_class=Toml11Loader, + source_class=Toml11Source, file_path=Path("config.toml"), prefix=None, split_symbols="__", ) - locs = resolve_source_location(["timeout"], ctx, filecontent=content) - assert locs[0].display_label == "FILE" + locs = resolve_source_location(["timeout"], ctx, file_content=content) + assert locs[0].location_label == "FILE" assert locs[0].line_range == LineRange(start=1, end=1) assert locs[0].line_content == ['timeout = "30"'] @@ -76,13 +74,13 @@ def test_envfilesource(self): content = "# comment\nAPP_TIMEOUT=30\nAPP_NAME=test" ctx = ErrorContext( dataclass_name="Config", - loader_class=EnvFileLoader, + source_class=EnvFileSource, file_path=Path(".env"), prefix="APP_", split_symbols="__", ) - locs = resolve_source_location(["timeout"], ctx, filecontent=content) - assert locs[0].display_label == "ENV FILE" + locs = resolve_source_location(["timeout"], ctx, file_content=content) + assert locs[0].location_label == "ENV FILE" assert locs[0].env_var_name == "APP_TIMEOUT" assert locs[0].line_range == LineRange(start=2, end=2) assert locs[0].line_content == ["APP_TIMEOUT=30"] @@ -91,37 +89,37 @@ def test_filesource_does_not_mask_non_secret_field(self): content = '{\n "password": "secret123",\n "timeout": "30"\n}' ctx = ErrorContext( dataclass_name="Config", - loader_class=JsonLoader, + source_class=JsonSource, file_path=Path("config.json"), prefix=None, split_symbols="__", secret_paths=frozenset({"password"}), ) - locs = resolve_source_location(["timeout"], ctx, filecontent=content) + locs = resolve_source_location(["timeout"], ctx, file_content=content) assert locs[0].line_content == ['"timeout": "30"'] def test_filesource_masks_secret_field(self): content = '{\n "password": "secret123",\n "timeout": "30"\n}' ctx = ErrorContext( dataclass_name="Config", - loader_class=JsonLoader, + source_class=JsonSource, file_path=Path("config.json"), prefix=None, split_symbols="__", secret_paths=frozenset({"password"}), ) - locs = resolve_source_location(["password"], ctx, filecontent=content) + locs = resolve_source_location(["password"], ctx, file_content=content) assert locs[0].line_content == ['"password": "",'] def test_filesource_masks_line_when_secret_on_same_line(self): content = '{"password": "secret123", "timeout": "30"}' ctx = ErrorContext( dataclass_name="Config", - loader_class=JsonLoader, + source_class=JsonSource, file_path=Path("config.json"), prefix=None, split_symbols="__", secret_paths=frozenset({"password"}), ) - locs = resolve_source_location(["timeout"], ctx, filecontent=content) + locs = resolve_source_location(["timeout"], ctx, file_content=content) assert locs[0].line_content == ['{"password": "", "timeout": "30"}'] diff --git a/tests/expansion/test_expand_file_path.py b/tests/expansion/test_expand_file_path.py index 7d4371b..8823ed5 100644 --- a/tests/expansion/test_expand_file_path.py +++ b/tests/expansion/test_expand_file_path.py @@ -3,9 +3,9 @@ import pytest +from dature import EnvSource, Toml11Source from dature.errors import EnvVarExpandError from dature.expansion.env_expand import expand_file_path -from dature.metadata import Source SEP = os.sep @@ -142,17 +142,17 @@ def test_fileexpanded( for key, value in env_vars.items(): monkeypatch.setenv(key, value) - source = Source(file=file) + source = Toml11Source(file=file) assert source.file == expected def test_none_fileunchanged(self) -> None: - source = Source() + source = EnvSource() - assert source.file is None + assert not hasattr(source, "file") def test_missing_var_raises(self, monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.delenv("DATURE_MISSING", raising=False) with pytest.raises(EnvVarExpandError): - Source(file="$DATURE_MISSING/config.toml") + Toml11Source(file="$DATURE_MISSING/config.toml") diff --git a/tests/sources_loader/loaders/__init__.py b/tests/loaders/__init__.py similarity index 100% rename from tests/sources_loader/loaders/__init__.py rename to tests/loaders/__init__.py diff --git a/tests/sources_loader/loaders/test_base.py b/tests/loaders/test_base.py similarity index 82% rename from tests/sources_loader/loaders/test_base.py rename to tests/loaders/test_base.py index afdff07..0e7405e 100644 --- a/tests/sources_loader/loaders/test_base.py +++ b/tests/loaders/test_base.py @@ -8,7 +8,7 @@ from dature.fields.byte_size import ByteSize from dature.fields.payment_card import PaymentCardNumber from dature.fields.secret_str import SecretStr -from dature.sources_loader.loaders.base import ( +from dature.loaders.base import ( base64url_bytes_from_string, base64url_str_from_string, byte_size_from_string, @@ -35,12 +35,19 @@ def test_bytes_from_string(input_value, expected): ("input_value", "expected"), [ ("1+2j", 1 + 2j), + ("1 + 2j", 1 + 2j), ], + ids=["compact", "with-spaces"], ) def test_complex_from_string(input_value, expected): assert complex_from_string(input_value) == expected +def test_complex_from_string_invalid(): + with pytest.raises(ValueError, match="complex\\(\\) arg is a malformed string"): + complex_from_string("not-a-complex") + + @pytest.mark.parametrize( ("input_value", "expected"), [ @@ -49,6 +56,7 @@ def test_complex_from_string(input_value, expected): ("0:00:01", timedelta(seconds=1)), ("0:45:00", timedelta(minutes=45)), ("2:03:04.500000", timedelta(hours=2, minutes=3, seconds=4, microseconds=500000)), + ("0:00:01.5", timedelta(seconds=1, microseconds=500000)), ("1 day, 2:30:00", timedelta(days=1, hours=2, minutes=30)), ("2 days, 0:00:00", timedelta(days=2)), ("1 day, 2:03:04.500000", timedelta(days=1, hours=2, minutes=3, seconds=4, microseconds=500000)), @@ -83,9 +91,14 @@ def test_timedelta_from_string(input_value: str, expected: timedelta): assert timedelta_from_string(input_value) == expected -def test_timedelta_from_string_invalid(): +@pytest.mark.parametrize( + "input_value", + ["not a timedelta", ""], + ids=["invalid-text", "empty-string"], +) +def test_timedelta_from_string_invalid(input_value: str): with pytest.raises(ValueError, match="Invalid timedelta format"): - timedelta_from_string("not a timedelta") + timedelta_from_string(input_value) @pytest.mark.parametrize( @@ -146,18 +159,24 @@ def test_secret_str_from_string(input_value: str, expected: SecretStr): @pytest.mark.parametrize( - ("input_value", "expected_brand"), + ("card_number", "expected_brand"), [ - ("4111111111111111", "Visa"), - ("5500000000000004", "Mastercard"), + ("4000000000000002", "Visa"), + ("5100000000000008", "Mastercard"), ], + ids=["visa", "mastercard"], ) -def test_payment_card_number_from_string(input_value: str, expected_brand: str): - result = payment_card_number_from_string(input_value) +def test_payment_card_number_from_string(card_number, expected_brand: str): + result = payment_card_number_from_string(card_number) assert isinstance(result, PaymentCardNumber) assert result.brand == expected_brand +def test_payment_card_number_from_string_invalid(): + with pytest.raises(ValueError, match="Card number must be 12-19 digits"): + payment_card_number_from_string("1234") + + @pytest.mark.parametrize( ("input_value", "expected"), [ @@ -167,3 +186,8 @@ def test_payment_card_number_from_string(input_value: str, expected_brand: str): ) def test_byte_size_from_string(input_value: str | int, expected: ByteSize): assert byte_size_from_string(input_value) == expected + + +def test_byte_size_from_string_invalid(): + with pytest.raises(ValueError, match="Invalid byte size format"): + byte_size_from_string("not-a-size") diff --git a/tests/sources_loader/loaders/test_common.py b/tests/loaders/test_common.py similarity index 68% rename from tests/sources_loader/loaders/test_common.py rename to tests/loaders/test_common.py index f300faf..7876dc4 100644 --- a/tests/sources_loader/loaders/test_common.py +++ b/tests/loaders/test_common.py @@ -1,11 +1,12 @@ """Tests for common loader functions (used across multiple formats).""" +import math from datetime import date, datetime, time import pytest from adaptix.load_error import TypeLoadError -from dature.sources_loader.loaders.common import ( +from dature.loaders.common import ( bool_loader, bytearray_from_json_string, bytearray_from_string, @@ -13,10 +14,12 @@ date_passthrough, datetime_from_string, datetime_passthrough, + float_from_string, float_passthrough, int_from_string, none_from_empty_string, optional_from_empty_string, + str_from_scalar, time_from_string, ) @@ -54,6 +57,16 @@ def test_time_from_string(input_value, expected): assert time_from_string(input_value) == expected +@pytest.mark.parametrize( + "input_value", + ["10", "10:30:45:99", "abc"], + ids=["one-part", "four-parts", "non-numeric"], +) +def test_time_from_string_invalid(input_value): + with pytest.raises(ValueError, match="Invalid time format"): + time_from_string(input_value) + + @pytest.mark.parametrize( ("input_value", "expected"), [ @@ -97,6 +110,11 @@ def test_none_from_empty_string(input_value, expected): assert none_from_empty_string(input_value) is expected +def test_none_from_empty_string_non_empty_raises(): + with pytest.raises(TypeLoadError): + none_from_empty_string("not empty") + + @pytest.mark.parametrize( ("input_value", "expected"), [ @@ -108,6 +126,22 @@ def test_optional_from_empty_string(input_value, expected): assert optional_from_empty_string(input_value) == expected +# === str_from_scalar === + + +@pytest.mark.parametrize( + ("input_value", "expected"), + [ + ("hello", "hello"), + (3.14, "3.14"), + (True, "True"), + ], + ids=["string", "float", "bool"], +) +def test_str_from_scalar(input_value, expected): + assert str_from_scalar(input_value) == expected + + # === Bool converter === @@ -133,6 +167,11 @@ def test_bool_loader(input_value, expected): assert bool_loader(input_value) is expected +def test_bool_loader_invalid_string(): + with pytest.raises(TypeLoadError): + bool_loader("maybe") + + # === Int converter === @@ -158,6 +197,11 @@ def test_int_from_string_rejects_invalid(input_value): int_from_string(input_value) +def test_int_from_string_invalid_string(): + with pytest.raises(ValueError, match="invalid literal for int"): + int_from_string("not-a-number") + + # === Float passthrough === @@ -183,6 +227,35 @@ def test_float_passthrough_rejects_invalid(input_value): float_passthrough(input_value) +# === Float from string === + + +@pytest.mark.parametrize( + ("input_value", "expected"), + [ + ("3.14", 3.14), + ("inf", float("inf")), + ("+inf", float("inf")), + ("-inf", float("-inf")), + (3.14, 3.14), + (42, 42.0), + ], + ids=["string", "inf", "plus-inf", "minus-inf", "float-passthrough", "int-to-float"], +) +def test_float_from_string(input_value, expected): + assert float_from_string(input_value) == expected + + +def test_float_from_string_nan(): + result = float_from_string("nan") + assert math.isnan(result) + + +def test_float_from_string_invalid(): + with pytest.raises(ValueError, match="could not convert string to float"): + float_from_string("not-a-number") + + # === JSON string converters === @@ -191,7 +264,10 @@ def test_float_passthrough_rejects_invalid(input_value): [ ("hello", bytearray(b"hello")), ("", bytearray()), + ("[72, 101, 108]", bytearray([72, 101, 108])), + ('{"key": "val"}', bytearray(b'{"key": "val"}')), ], + ids=["plain-string", "empty", "json-array", "non-bracket-string"], ) def test_bytearray_from_json_string(input_value, expected): assert bytearray_from_json_string(input_value) == expected diff --git a/tests/sources_loader/loaders/test_json5_.py b/tests/loaders/test_json5_.py similarity index 84% rename from tests/sources_loader/loaders/test_json5_.py rename to tests/loaders/test_json5_.py index 0430f0b..8a095b6 100644 --- a/tests/sources_loader/loaders/test_json5_.py +++ b/tests/loaders/test_json5_.py @@ -3,7 +3,7 @@ import pytest from json5 import JsonIdentifier -from dature.sources_loader.loaders.json5_ import str_from_json_identifier +from dature.loaders.json5_ import str_from_json_identifier @pytest.mark.parametrize( diff --git a/tests/sources_loader/loaders/test_toml_.py b/tests/loaders/test_toml_.py similarity index 84% rename from tests/sources_loader/loaders/test_toml_.py rename to tests/loaders/test_toml_.py index 4948c85..cb15714 100644 --- a/tests/sources_loader/loaders/test_toml_.py +++ b/tests/loaders/test_toml_.py @@ -4,7 +4,7 @@ import pytest -from dature.sources_loader.loaders.toml_ import time_passthrough +from dature.loaders.toml_ import time_passthrough @pytest.mark.parametrize( diff --git a/tests/sources_loader/loaders/test_yaml_.py b/tests/loaders/test_yaml_.py similarity index 84% rename from tests/sources_loader/loaders/test_yaml_.py rename to tests/loaders/test_yaml_.py index 39c363f..16f7fff 100644 --- a/tests/sources_loader/loaders/test_yaml_.py +++ b/tests/loaders/test_yaml_.py @@ -4,7 +4,7 @@ import pytest -from dature.sources_loader.loaders.yaml_ import time_from_int +from dature.loaders.yaml_ import time_from_int @pytest.mark.parametrize( diff --git a/tests/loading/test_context.py b/tests/loading/test_context.py index 2f6d6b7..772bf93 100644 --- a/tests/loading/test_context.py +++ b/tests/loading/test_context.py @@ -2,9 +2,24 @@ from dataclasses import dataclass, fields from enum import Flag +from pathlib import Path from typing import Any +from unittest.mock import MagicMock -from dature.loading.context import coerce_flag_fields, merge_fields +import pytest + +from dature.field_path import FieldPath +from dature.loading.context import ( + apply_skip_invalid, + build_error_ctx, + coerce_flag_fields, + get_allowed_fields, + make_validating_post_init, + merge_fields, +) +from dature.sources.env_ import EnvSource +from dature.sources.json_ import JsonSource +from dature.sources.retort import ensure_retort class TestMergeFields: @@ -141,3 +156,133 @@ def test_flag_object_coerced_to_int(self): result = coerce_flag_fields(data, self.FlagConfig) assert result == {"name": "test", "perms": 3} + + +class TestBuildErrorCtx: + def test_file_source_no_split_symbols(self, tmp_path: Path): + json_file = tmp_path / "config.json" + json_file.write_text("{}") + source = JsonSource(file=json_file, prefix="app") + + ctx = build_error_ctx(source, "MyConfig") + + assert ctx.dataclass_name == "MyConfig" + assert ctx.source_class is JsonSource + assert ctx.prefix == "app" + assert ctx.split_symbols is None + + def test_flat_key_source_has_split_symbols(self): + source = EnvSource(prefix="APP", split_symbols="__") + + ctx = build_error_ctx(source, "MyConfig") + + assert ctx.split_symbols == "__" + + +class TestGetAllowedFields: + def test_bool_returns_none(self): + assert get_allowed_fields(skip_value=True) is None + assert get_allowed_fields(skip_value=False) is None + + def test_tuple_of_field_paths(self): + @dataclass + class Cfg: + name: str + port: int + + fp = FieldPath(owner=Cfg, parts=("name",)) + + result = get_allowed_fields(skip_value=(fp,), schema=Cfg) + + assert result == {"name"} + + +class TestApplySkipInvalid: + @pytest.mark.parametrize("skip_if_invalid", [False, None], ids=["false", "none"]) + def test_falsy_returns_raw_unchanged(self, tmp_path: Path, skip_if_invalid): + json_file = tmp_path / "config.json" + json_file.write_text("{}") + + @dataclass + class Cfg: + name: str + + source = JsonSource(file=json_file) + raw = {"name": "hello"} + + result = apply_skip_invalid( + raw=raw, + skip_if_invalid=skip_if_invalid, + source=source, + schema=Cfg, + log_prefix="[test]", + ) + + assert result.cleaned_dict == raw + assert result.skipped_paths == [] + + +class TestEnsureRetort: + def test_creates_and_caches_retort(self, tmp_path: Path): + json_file = tmp_path / "config.json" + json_file.write_text("{}") + + @dataclass + class Cfg: + name: str + + source = JsonSource(file=json_file) + assert Cfg not in source.retorts + + ensure_retort(source, Cfg) + assert Cfg in source.retorts + + first = source.retorts[Cfg] + ensure_retort(source, Cfg) + assert source.retorts[Cfg] is first + + +class TestMakeValidatingPostInit: + @dataclass + class Cfg: + name: str + + def test_loading_flag_skips_validation(self): + ctx = MagicMock() + ctx.loading = True + ctx.validating = False + ctx.original_post_init = None + + post_init = make_validating_post_init(ctx) + instance = MagicMock() + post_init(instance) + + ctx.validation_loader.assert_not_called() + + def test_validating_flag_skips_reentrant(self): + ctx = MagicMock() + ctx.loading = False + ctx.validating = True + ctx.original_post_init = None + + post_init = make_validating_post_init(ctx) + instance = MagicMock() + post_init(instance) + + ctx.validation_loader.assert_not_called() + + def test_calls_original_post_init(self): + original = MagicMock() + ctx = MagicMock() + ctx.loading = False + ctx.validating = False + ctx.original_post_init = original + ctx.cls = self.Cfg + ctx.validation_loader = MagicMock() + ctx.error_ctx = MagicMock() + + post_init = make_validating_post_init(ctx) + instance = self.Cfg(name="test") + post_init(instance) + + original.assert_called_once_with(instance) diff --git a/tests/loading/test_field_merges.py b/tests/loading/test_field_merges.py index 38f5612..a56684e 100644 --- a/tests/loading/test_field_merges.py +++ b/tests/loading/test_field_merges.py @@ -8,7 +8,7 @@ import pytest -from dature import Source, load +from dature import JsonSource, load from dature.errors import MergeConflictError from dature.field_path import F from dature.types import FieldMergeStrategyName @@ -28,8 +28,8 @@ class Config: port: int result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, strategy="last_wins", field_merges={F[Config].host: "first_wins"}, @@ -51,8 +51,8 @@ class Config: port: int result = load( - Source(file=first), - Source(file=second), + JsonSource(file=first), + JsonSource(file=second), schema=Config, strategy="first_wins", field_merges={F[Config].port: "last_wins"}, @@ -74,8 +74,8 @@ class Config: name: str result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, field_merges={F[Config].tags: "append"}, ) @@ -95,8 +95,8 @@ class Config: tags: list[str] result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, field_merges={F[Config].tags: "append_unique"}, ) @@ -115,8 +115,8 @@ class Config: tags: list[str] result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, field_merges={F[Config].tags: "prepend"}, ) @@ -135,8 +135,8 @@ class Config: tags: list[str] result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, field_merges={F[Config].tags: "prepend_unique"}, ) @@ -160,8 +160,8 @@ class Config: database: Database result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, field_merges={F[Config].database.host: "first_wins"}, ) @@ -182,8 +182,8 @@ class Config: with pytest.raises(TypeError, match="APPEND strategy requires both values to be lists"): load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, field_merges={F[Config].value: "append"}, ) @@ -202,8 +202,8 @@ class Config: tags: list[str] result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, strategy="last_wins", field_merges={ @@ -229,8 +229,8 @@ class Config: port: int result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, field_merges={}, ) @@ -248,8 +248,8 @@ def test_decorator_with_field_merges(self, tmp_path: Path): overrides.write_text('{"host": "override-host", "port": 9090, "tags": ["b"]}') @load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), field_merges={ F["Config"].host: "first_wins", F["Config"].tags: "append", @@ -281,8 +281,8 @@ class Config: port: int result = load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, strategy="raise_on_conflict", field_merges={F[Config].host: "last_wins"}, @@ -304,8 +304,8 @@ class Config: port: int result = load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, strategy="raise_on_conflict", field_merges={F[Config].host: "first_wins"}, @@ -328,8 +328,8 @@ class Config: with pytest.raises(MergeConflictError): load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, strategy="raise_on_conflict", field_merges={F[Config].host: "last_wins"}, @@ -352,8 +352,8 @@ class Config: name: str result = load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, strategy="raise_on_conflict", field_merges={F[Config].database.host: "last_wins"}, @@ -375,8 +375,8 @@ class Config: port: int result = load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, strategy="raise_on_conflict", field_merges={ @@ -433,8 +433,8 @@ class Config: with pytest.raises(TypeError, match=match): load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, field_merges={F[Config].value: strategy}, ) @@ -482,8 +482,8 @@ class Config: with pytest.raises(TypeError, match=match): load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, field_merges={F[Config].value: strategy}, ) @@ -521,8 +521,8 @@ class Config: with pytest.raises(TypeError, match=match): load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, field_merges={F[Config].value: strategy}, ) @@ -551,8 +551,8 @@ class Config: value: list[int] result = load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, field_merges={F[Config].value: strategy}, ) @@ -584,8 +584,8 @@ class Config: with pytest.raises(TypeError, match=match): load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, field_merges={F[Config].value: strategy}, ) @@ -615,8 +615,8 @@ class Config: with pytest.raises(TypeError, match=match): load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, field_merges={F[Config].value: strategy}, ) @@ -634,8 +634,8 @@ class Config: port: int result = load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, field_merges={F[Config].host: "first_wins"}, ) @@ -658,9 +658,9 @@ class Config: tags: list[str] result = load( - Source(file=a), - Source(file=b), - Source(file=c), + JsonSource(file=a), + JsonSource(file=b), + JsonSource(file=c), schema=Config, field_merges={F[Config].tags: "append"}, ) @@ -682,9 +682,9 @@ class Config: priority: int result = load( - Source(file=a), - Source(file=b), - Source(file=c), + JsonSource(file=a), + JsonSource(file=b), + JsonSource(file=c), schema=Config, field_merges={F[Config].priority: max}, ) @@ -706,9 +706,9 @@ class Config: priority: int result = load( - Source(file=a), - Source(file=b), - Source(file=c), + JsonSource(file=a), + JsonSource(file=b), + JsonSource(file=c), schema=Config, field_merges={F[Config].priority: min}, ) @@ -734,8 +734,8 @@ class Config: inner: Inner result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, field_merges={ F[Config].user_name: "first_wins", @@ -763,8 +763,8 @@ class Config: inner: Inner result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, field_merges={ F[Config].user_name: "last_wins", @@ -789,8 +789,8 @@ class Config: score: int result = load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, field_merges={F[Config].score: sum}, ) @@ -812,9 +812,9 @@ class Config: score: int result = load( - Source(file=a), - Source(file=b), - Source(file=c), + JsonSource(file=a), + JsonSource(file=b), + JsonSource(file=c), schema=Config, field_merges={F[Config].score: sum}, ) @@ -836,9 +836,9 @@ class Config: weight: float result = load( - Source(file=a), - Source(file=b), - Source(file=c), + JsonSource(file=a), + JsonSource(file=b), + JsonSource(file=c), schema=Config, field_merges={F[Config].weight: lambda vals: sum(vals) / len(vals)}, ) @@ -860,9 +860,9 @@ class Config: priority: int result = load( - Source(file=a), - Source(file=b), - Source(file=c), + JsonSource(file=a), + JsonSource(file=b), + JsonSource(file=c), schema=Config, field_merges={F[Config].priority: max}, ) @@ -888,9 +888,9 @@ class Config: database: Database result = load( - Source(file=a), - Source(file=b), - Source(file=c), + JsonSource(file=a), + JsonSource(file=b), + JsonSource(file=c), schema=Config, field_merges={F[Config].database.port: max}, ) @@ -906,7 +906,7 @@ class Config: score: int result = load( - Source(file=a), + JsonSource(file=a), schema=Config, field_merges={F[Config].score: sum}, ) @@ -927,7 +927,7 @@ class Config: with caplog.at_level(logging.WARNING, logger="dature"): load( - Source(file=a), + JsonSource(file=a), schema=Config, field_merges={F[Config].score: sum}, ) @@ -948,8 +948,8 @@ class Config: name: str result = load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, strategy="raise_on_conflict", field_merges={F[Config].score: sum}, @@ -972,8 +972,8 @@ class Config: tags: list[str] result = load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, field_merges={ F[Config].host: "first_wins", @@ -1002,9 +1002,9 @@ class Config: name: str result = load( - Source(file=a), - Source(file=b), - Source(file=c), + JsonSource(file=a), + JsonSource(file=b), + JsonSource(file=c), schema=Config, field_merges={F[Config].score: sum}, ) diff --git a/tests/loading/test_loading_common.py b/tests/loading/test_loading_common.py new file mode 100644 index 0000000..d7a8c8a --- /dev/null +++ b/tests/loading/test_loading_common.py @@ -0,0 +1,38 @@ +import pytest + +from dature.config import DatureConfig, MaskingConfig +from dature.loading.common import resolve_mask_secrets + + +@pytest.mark.parametrize( + ("source_level", "load_level", "config_default", "expected"), + [ + (True, None, False, True), + (False, None, True, False), + (True, False, False, True), + (None, True, False, True), + (None, False, True, False), + (None, None, True, True), + (None, None, False, False), + ], + ids=[ + "source_true_wins", + "source_false_wins", + "source_beats_load", + "load_true_wins", + "load_false_wins", + "config_true_default", + "config_false_default", + ], +) +def test_resolve_mask_secrets( + monkeypatch: pytest.MonkeyPatch, + source_level: bool | None, + load_level: bool | None, + config_default: bool, + expected: bool, +) -> None: + fake_config = DatureConfig(masking=MaskingConfig(mask_secrets=config_default)) + monkeypatch.setattr("dature.loading.common.config", fake_config) + result = resolve_mask_secrets(source_level=source_level, load_level=load_level) + assert result == expected diff --git a/tests/loading/test_multi.py b/tests/loading/test_multi.py index 86c0267..06d6bd1 100644 --- a/tests/loading/test_multi.py +++ b/tests/loading/test_multi.py @@ -8,7 +8,7 @@ import pytest -from dature import Source, load +from dature import EnvFileSource, EnvSource, JsonSource, Yaml12Source, load from dature.errors import DatureConfigError, MergeConflictError from dature.validators.number import Ge @@ -27,8 +27,8 @@ class Config: port: int result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, ) @@ -48,8 +48,8 @@ class Config: port: int result = load( - Source(file=first), - Source(file=second), + JsonSource(file=first), + JsonSource(file=second), schema=Config, strategy="first_wins", ) @@ -70,8 +70,8 @@ class Config: port: int result = load( - Source(file=filea), - Source(file=fileb), + JsonSource(file=filea), + JsonSource(file=fileb), schema=Config, ) @@ -95,8 +95,8 @@ class Config: database: Database result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, ) @@ -120,9 +120,9 @@ class Config: debug: bool result = load( - Source(file=a), - Source(file=b), - Source(file=c), + JsonSource(file=a), + JsonSource(file=b), + JsonSource(file=c), schema=Config, ) @@ -143,8 +143,8 @@ class Config: port: int result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, ) @@ -164,8 +164,8 @@ class Config: port: int result = load( - Source(file=defaults), - Source(prefix="APP_"), + JsonSource(file=defaults), + EnvSource(prefix="APP_"), schema=Config, ) @@ -186,8 +186,8 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file=defaults), - Source(prefix="APP_"), + JsonSource(file=defaults), + EnvSource(prefix="APP_"), schema=Config, ) @@ -210,8 +210,8 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, ) @@ -229,7 +229,7 @@ class Config: name: str port: int - result = load(Source(file=json_file), schema=Config) + result = load(JsonSource(file=json_file), schema=Config) assert result.name == "test" assert result.port == 8080 @@ -241,7 +241,7 @@ def test_backward_compat_none_metadata(self, monkeypatch): class Config: my_var: str - result = load(Source(), schema=Config) + result = load(EnvSource(), schema=Config) assert result.my_var == "from_env" @@ -255,8 +255,8 @@ def test_decorator_with_merge(self, tmp_path: Path): overrides.write_text('{"port": 9090}') @load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), ) @dataclass class Config: @@ -271,7 +271,7 @@ def test_decorator_cache(self, tmp_path: Path): defaults = tmp_path / "defaults.json" defaults.write_text('{"host": "original", "port": 3000}') - @load(Source(file=defaults)) + @load(JsonSource(file=defaults)) @dataclass class Config: host: str @@ -288,7 +288,7 @@ def test_decorator_no_cache(self, tmp_path: Path): defaults = tmp_path / "defaults.json" defaults.write_text('{"host": "original", "port": 3000}') - @load(Source(file=defaults), cache=False) + @load(JsonSource(file=defaults), cache=False) @dataclass class Config: host: str @@ -309,8 +309,8 @@ def test_decorator_with_tuple(self, tmp_path: Path): overrides.write_text('{"port": 8080}') @load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), ) @dataclass class Config: @@ -325,7 +325,7 @@ def test_decorator_init_override(self, tmp_path: Path): defaults = tmp_path / "defaults.json" defaults.write_text('{"host": "localhost", "port": 3000}') - @load(Source(file=defaults)) + @load(JsonSource(file=defaults)) @dataclass class Config: host: str @@ -338,7 +338,7 @@ class Config: def test_decorator_not_dataclass(self): with pytest.raises(TypeError, match="must be a dataclass"): - @load(Source()) + @load(EnvSource()) class NotDataclass: pass @@ -350,8 +350,8 @@ def test_decorator_first_wins(self, tmp_path: Path): second.write_text('{"host": "second-host", "port": 2000}') @load( - Source(file=first), - Source(file=second), + JsonSource(file=first), + JsonSource(file=second), strategy="first_wins", ) @dataclass @@ -379,8 +379,8 @@ class Config: with pytest.raises(MergeConflictError) as exc_info: load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, strategy="raise_on_conflict", ) @@ -408,8 +408,8 @@ class Config: port: int result = load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, strategy="raise_on_conflict", ) @@ -430,8 +430,8 @@ class Config: port: int result = load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, strategy="raise_on_conflict", ) @@ -457,8 +457,8 @@ class Config: with pytest.raises(MergeConflictError) as exc_info: load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, strategy="raise_on_conflict", ) @@ -486,8 +486,8 @@ class Config: with pytest.raises(MergeConflictError) as exc_info: load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, strategy="raise_on_conflict", ) @@ -515,8 +515,8 @@ class Config: with pytest.raises(MergeConflictError) as exc_info: load( - Source(file=a), - Source(prefix="APP_"), + JsonSource(file=a), + EnvSource(prefix="APP_"), schema=Config, strategy="raise_on_conflict", ) @@ -544,8 +544,8 @@ class Config: with pytest.raises(MergeConflictError) as exc_info: load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, strategy="raise_on_conflict", ) @@ -585,8 +585,8 @@ class Config: port: int result = load( - Source(file=yaml_file), - Source(file=env_file), + Yaml12Source(file=yaml_file), + EnvFileSource(file=env_file), schema=Config, ) @@ -614,8 +614,8 @@ class Config: perms: _Permission result = load( - Source(file=json_file), - Source(file=env_file), + JsonSource(file=json_file), + EnvFileSource(file=env_file), schema=Config, ) @@ -633,8 +633,8 @@ class Config: perms: _Permission result = load( - Source(file=json_file), - Source(prefix="APP_"), + JsonSource(file=json_file), + EnvSource(prefix="APP_"), schema=Config, ) @@ -653,8 +653,8 @@ class Config: perms: _Permission result = load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, ) @@ -673,8 +673,8 @@ class Config: perms: _Permission @load( - Source(file=json_file), - Source(file=env_file), + JsonSource(file=json_file), + EnvFileSource(file=env_file), ) @dataclass class MergedConfig: @@ -699,8 +699,8 @@ class Config: port: int result = load( - Source(file=first), - Source(file=second), + Yaml12Source(file=first), + Yaml12Source(file=second), schema=Config, strategy="first_found", ) @@ -719,8 +719,8 @@ class Config: port: int result = load( - Source(file=missing), - Source(file=fallback), + Yaml12Source(file=missing), + Yaml12Source(file=fallback), schema=Config, strategy="first_found", ) @@ -741,8 +741,8 @@ class Config: port: int result = load( - Source(file=broken), - Source(file=fallback), + Yaml12Source(file=broken), + Yaml12Source(file=fallback), schema=Config, strategy="first_found", ) @@ -761,8 +761,8 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file=missing1), - Source(file=missing2), + Yaml12Source(file=missing1), + Yaml12Source(file=missing2), schema=Config, strategy="first_found", ) @@ -786,8 +786,8 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file=partial), - Source(file=full), + Yaml12Source(file=partial), + Yaml12Source(file=full), schema=Config, strategy="first_found", ) @@ -811,8 +811,8 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file=bad_type), - Source(file=fallback), + Yaml12Source(file=bad_type), + Yaml12Source(file=fallback), schema=Config, strategy="first_found", ) @@ -841,8 +841,8 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file=first), - Source(file=second), + Yaml12Source(file=first), + Yaml12Source(file=second), schema=Config, strategy="first_found", ) @@ -865,8 +865,8 @@ def test_validation_error_references_correct_source_decorator(self, tmp_path: Pa second.write_text("host: second-host\nport: 5000\n") @load( - Source(file=first), - Source(file=second), + Yaml12Source(file=first), + Yaml12Source(file=second), strategy="first_found", cache=False, ) diff --git a/tests/loading/test_resolver.py b/tests/loading/test_resolver.py deleted file mode 100644 index 5cc77b6..0000000 --- a/tests/loading/test_resolver.py +++ /dev/null @@ -1,188 +0,0 @@ -from collections.abc import Buffer -from dataclasses import dataclass -from io import BytesIO, RawIOBase, StringIO -from pathlib import Path - -import pytest - -from dature.field_path import F -from dature.loading.resolver import resolve_loader, resolve_loader_class -from dature.metadata import Source -from dature.sources_loader.docker_secrets import DockerSecretsLoader -from dature.sources_loader.env_ import EnvFileLoader, EnvLoader -from dature.sources_loader.ini_ import IniLoader -from dature.sources_loader.json5_ import Json5Loader -from dature.sources_loader.json_ import JsonLoader -from dature.sources_loader.toml_ import Toml11Loader -from dature.sources_loader.yaml_ import Yaml11Loader, Yaml12Loader - - -class _DummyRawIO(RawIOBase): - def readinto(self, b: Buffer) -> int: # noqa: ARG002 - return 0 - - -class TestResolveLoaderClass: - def test_explicit_loader(self) -> None: - assert resolve_loader_class(loader=Yaml11Loader, file="config.json") is Yaml11Loader - - def test_no_filereturns_env(self) -> None: - assert resolve_loader_class(loader=None, file=None) is EnvLoader - - @pytest.mark.parametrize( - ("extension", "expected"), - [ - (".env", EnvFileLoader), - (".yaml", Yaml12Loader), - (".yml", Yaml12Loader), - (".json", JsonLoader), - (".json5", Json5Loader), - (".toml", Toml11Loader), - (".ini", IniLoader), - (".cfg", IniLoader), - ], - ) - def test_extension_mapping(self, extension: str, expected: type) -> None: - assert resolve_loader_class(loader=None, file=f"config{extension}") is expected - - @pytest.mark.parametrize( - "filename", - [".env.local", ".env.development", ".env.production"], - ) - def test_dotenv_patterns(self, filename: str) -> None: - assert resolve_loader_class(loader=None, file=filename) is EnvFileLoader - - def test_unknown_extension_raises(self) -> None: - with pytest.raises(ValueError, match="Cannot determine loader type"): - resolve_loader_class(loader=None, file="config.xyz") - - def test_uppercase_extension(self) -> None: - assert resolve_loader_class(loader=None, file="config.JSON") is JsonLoader - - def test_env_loader_with_fileraises(self) -> None: - with pytest.raises(ValueError, match="EnvLoader reads from environment variables") as exc_info: - resolve_loader_class(loader=EnvLoader, file="config.json") - - assert str(exc_info.value) == ( - "EnvLoader reads from environment variables and does not use files. " - "Remove file or use a file-based loader instead (e.g. EnvFileLoader)." - ) - - def test_env_fileloader_with_fileallowed(self) -> None: - assert resolve_loader_class(loader=EnvFileLoader, file=".env.local") is EnvFileLoader - - def test_directory_returns_docker_secrets(self, tmp_path) -> None: - assert resolve_loader_class(loader=None, file=tmp_path) is DockerSecretsLoader - - -class TestMissingOptionalDependency: - @pytest.mark.parametrize( - ("extension", "extra", "blocked_module"), - [ - (".toml", "toml", "toml_rs"), - (".yaml", "yaml", "ruamel"), - (".yml", "yaml", "ruamel"), - (".json5", "json5", "json5"), - ], - ) - def test_missing_extra_raises_helpful_error( - self, - extension, - extra, - blocked_module, - block_import, - ) -> None: - with block_import(blocked_module): - with pytest.raises(ImportError) as exc_info: - resolve_loader_class(loader=None, file=f"config{extension}") - - assert str(exc_info.value) == ( - f"To use '{extension}' files, install the '{extra}' extra: pip install dature[{extra}]" - ) - - -class TestResolveLoader: - def test_returns_correct_loader_type(self) -> None: - metadata = Source(file="config.json") - - loader = resolve_loader(metadata) - - assert isinstance(loader, JsonLoader) - - def test_passes_prefix(self) -> None: - metadata = Source(prefix="APP_") - - loader = resolve_loader(metadata) - - assert loader._prefix == "APP_" - - def test_passes_name_style(self) -> None: - metadata = Source(file="config.json", name_style="lower_snake") - - loader = resolve_loader(metadata) - - assert loader._name_style == "lower_snake" - - def test_passes_field_mapping(self) -> None: - @dataclass - class Config: - key: str - - mapping = {F[Config].key: "value"} - metadata = Source(file="config.json", field_mapping=mapping) - - loader = resolve_loader(metadata) - - assert loader._field_mapping == mapping - - def test_default_metadata_returns_env_loader(self) -> None: - metadata = Source() - - loader = resolve_loader(metadata) - - assert isinstance(loader, EnvLoader) - - def test_env_with_file_path(self, tmp_path: Path) -> None: - env_file = tmp_path / ".env" - env_file.write_text("KEY=VALUE") - metadata = Source(file=env_file) - - loader = resolve_loader(metadata) - - assert isinstance(loader, EnvFileLoader) - - -class TestFilelikeResolverValidation: - @pytest.mark.parametrize("stream", [StringIO(), BytesIO(), _DummyRawIO()]) - def test_filelike_without_loader_raises(self, stream) -> None: - with pytest.raises(TypeError) as exc_info: - resolve_loader_class(loader=None, file=stream) - - assert str(exc_info.value) == ( - "Cannot determine loader type for a file-like object. " - "Please specify loader explicitly (e.g. loader=JsonLoader)." - ) - - @pytest.mark.parametrize("stream", [StringIO(), BytesIO(), _DummyRawIO()]) - def test_filelike_with_env_loader_raises(self, stream) -> None: - with pytest.raises(ValueError, match="EnvLoader does not support file-like objects") as exc_info: - resolve_loader_class(loader=EnvLoader, file=stream) - - assert str(exc_info.value) == ( - "EnvLoader does not support file-like objects. " - "Use a file-based loader (e.g. JsonLoader, TomlLoader) with file-like objects." - ) - - @pytest.mark.parametrize("stream", [StringIO(), BytesIO(), _DummyRawIO()]) - def test_filelike_with_docker_secrets_loader_raises(self, stream) -> None: - with pytest.raises(ValueError, match="DockerSecretsLoader does not support file-like objects") as exc_info: - resolve_loader_class(loader=DockerSecretsLoader, file=stream) - - assert str(exc_info.value) == ( - "DockerSecretsLoader does not support file-like objects. " - "Use a file-based loader (e.g. JsonLoader, TomlLoader) with file-like objects." - ) - - @pytest.mark.parametrize("stream", [StringIO(), BytesIO(), _DummyRawIO()]) - def test_filelike_with_explicit_loader_allowed(self, stream) -> None: - assert resolve_loader_class(loader=JsonLoader, file=stream) is JsonLoader diff --git a/tests/loading/test_single.py b/tests/loading/test_single.py index 5dc4b3c..1c0f4ac 100644 --- a/tests/loading/test_single.py +++ b/tests/loading/test_single.py @@ -7,22 +7,18 @@ import pytest +from dature import EnvFileSource, JsonSource from dature.loading.single import load_as_function, make_decorator -from dature.metadata import Source -from dature.sources_loader.env_ import EnvFileLoader -from dature.sources_loader.json_ import JsonLoader class TestMakeDecorator: def test_not_dataclass_raises(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "test"}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) decorator = make_decorator( - loader_instance=JsonLoader(), - file_path=json_file, - metadata=metadata, + source=metadata, cache=True, debug=False, ) @@ -36,7 +32,7 @@ class NotADataclass: def test_patches_init(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "test"}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) @dataclass class Config: @@ -44,9 +40,7 @@ class Config: original_init = Config.__init__ decorator = make_decorator( - loader_instance=JsonLoader(), - file_path=json_file, - metadata=metadata, + source=metadata, cache=True, debug=False, ) @@ -57,16 +51,14 @@ class Config: def test_patches_post_init(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "test"}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) @dataclass class Config: name: str decorator = make_decorator( - loader_instance=JsonLoader(), - file_path=json_file, - metadata=metadata, + source=metadata, cache=True, debug=False, ) @@ -77,7 +69,7 @@ class Config: def test_loads_on_init(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "from_file", "port": 8080}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) @dataclass class Config: @@ -85,9 +77,7 @@ class Config: port: int decorator = make_decorator( - loader_instance=JsonLoader(), - file_path=json_file, - metadata=metadata, + source=metadata, cache=True, debug=False, ) @@ -100,7 +90,7 @@ class Config: def test_init_args_override_loaded(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "from_file", "port": 8080}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) @dataclass class Config: @@ -108,9 +98,7 @@ class Config: port: int decorator = make_decorator( - loader_instance=JsonLoader(), - file_path=json_file, - metadata=metadata, + source=metadata, cache=True, debug=False, ) @@ -123,16 +111,14 @@ class Config: def test_returns_same_class(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "test"}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) @dataclass class Config: name: str decorator = make_decorator( - loader_instance=JsonLoader(), - file_path=json_file, - metadata=metadata, + source=metadata, cache=True, debug=False, ) @@ -143,7 +129,7 @@ class Config: def test_preserves_original_post_init(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "test"}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) post_init_called = [] @@ -155,9 +141,7 @@ def __post_init__(self): post_init_called.append(True) decorator = make_decorator( - loader_instance=JsonLoader(), - file_path=json_file, - metadata=metadata, + source=metadata, cache=True, debug=False, ) @@ -171,7 +155,7 @@ class TestCache: def test_cache_returns_same_data(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "original", "port": 8080}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) @dataclass class Config: @@ -179,9 +163,7 @@ class Config: port: int decorator = make_decorator( - loader_instance=JsonLoader(), - file_path=json_file, - metadata=metadata, + source=metadata, cache=True, debug=False, ) @@ -198,7 +180,7 @@ class Config: def test_no_cache_rereads_file(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "original", "port": 8080}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) @dataclass class Config: @@ -206,9 +188,7 @@ class Config: port: int decorator = make_decorator( - loader_instance=JsonLoader(), - file_path=json_file, - metadata=metadata, + source=metadata, cache=False, debug=False, ) @@ -225,7 +205,7 @@ class Config: def test_cache_allows_override(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "original", "port": 8080}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) @dataclass class Config: @@ -233,9 +213,7 @@ class Config: port: int decorator = make_decorator( - loader_instance=JsonLoader(), - file_path=json_file, - metadata=metadata, + source=metadata, cache=True, debug=False, ) @@ -254,7 +232,7 @@ class TestLoadAsFunction: def test_returns_loaded_dataclass(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "test", "port": 3000}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) @dataclass class Config: @@ -262,10 +240,8 @@ class Config: port: int result = load_as_function( - loader_instance=JsonLoader(), - file_path=json_file, + source=metadata, schema=Config, - metadata=metadata, debug=False, ) @@ -275,17 +251,15 @@ class Config: def test_with_prefix(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"app": {"name": "nested"}}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file, prefix="app") @dataclass class Config: name: str result = load_as_function( - loader_instance=JsonLoader(prefix="app"), - file_path=json_file, + source=metadata, schema=Config, - metadata=metadata, debug=False, ) @@ -302,7 +276,7 @@ class TestCoerceFlagFieldsFunctionMode: def test_flag_from_env_file(self, tmp_path: Path): env_file = tmp_path / "config.env" env_file.write_text("NAME=test\nPERMS=3\n") - metadata = Source(file=env_file, loader=EnvFileLoader) + metadata = EnvFileSource(file=env_file) @dataclass class Config: @@ -310,10 +284,8 @@ class Config: perms: _Permission result = load_as_function( - loader_instance=EnvFileLoader(), - file_path=env_file, + source=metadata, schema=Config, - metadata=metadata, debug=False, ) @@ -322,7 +294,7 @@ class Config: def test_flag_from_json_as_int(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "test", "perms": 3}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) @dataclass class Config: @@ -330,10 +302,8 @@ class Config: perms: _Permission result = load_as_function( - loader_instance=JsonLoader(), - file_path=json_file, + source=metadata, schema=Config, - metadata=metadata, debug=False, ) @@ -344,7 +314,7 @@ class TestCoerceFlagFieldsDecoratorMode: def test_flag_from_env_file(self, tmp_path: Path): env_file = tmp_path / "config.env" env_file.write_text("NAME=test\nPERMS=5\n") - metadata = Source(file=env_file, loader=EnvFileLoader) + metadata = EnvFileSource(file=env_file) @dataclass class Config: @@ -352,9 +322,7 @@ class Config: perms: _Permission decorator = make_decorator( - loader_instance=EnvFileLoader(), - file_path=env_file, - metadata=metadata, + source=metadata, cache=True, debug=False, ) @@ -366,7 +334,7 @@ class Config: def test_flag_from_json_as_int(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"name": "test", "perms": 7}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) @dataclass class Config: @@ -374,9 +342,7 @@ class Config: perms: _Permission decorator = make_decorator( - loader_instance=JsonLoader(), - file_path=json_file, - metadata=metadata, + source=metadata, cache=True, debug=False, ) @@ -395,7 +361,7 @@ class TestFilelikeLoadAsFunction: ], ) def test_json_from_filelike(self, stream) -> None: - metadata = Source(file=stream, loader=JsonLoader) + metadata = JsonSource(file=stream) @dataclass class Config: @@ -403,10 +369,8 @@ class Config: port: int result = load_as_function( - loader_instance=JsonLoader(), - file_path=stream, + source=metadata, schema=Config, - metadata=metadata, debug=False, ) @@ -416,17 +380,15 @@ class Config: def test_path_object_directly(self, tmp_path: Path) -> None: json_file = tmp_path / "config.json" json_file.write_text('{"name": "direct_path"}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) @dataclass class Config: name: str result = load_as_function( - loader_instance=JsonLoader(), - file_path=json_file, + source=metadata, schema=Config, - metadata=metadata, debug=False, ) diff --git a/tests/loading/test_skip_invalid_fields.py b/tests/loading/test_skip_invalid_fields.py index 6de1fa0..d39a756 100644 --- a/tests/loading/test_skip_invalid_fields.py +++ b/tests/loading/test_skip_invalid_fields.py @@ -6,7 +6,7 @@ import pytest -from dature import F, Source, load +from dature import F, JsonSource, load from dature.errors import DatureConfigError @@ -24,8 +24,8 @@ class Config: port: int result = load( - Source(file=source1), - Source(file=source2), + JsonSource(file=source1), + JsonSource(file=source2), schema=Config, skip_invalid_fields=True, ) @@ -46,8 +46,8 @@ class Config: port: int = 9090 result = load( - Source(file=source1), - Source(file=source2), + JsonSource(file=source1), + JsonSource(file=source2), schema=Config, skip_invalid_fields=True, ) @@ -69,8 +69,8 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file=source1), - Source(file=source2), + JsonSource(file=source1), + JsonSource(file=source2), schema=Config, skip_invalid_fields=True, ) @@ -103,8 +103,8 @@ class Config: db: Database result = load( - Source(file=source1), - Source(file=source2), + JsonSource(file=source1), + JsonSource(file=source2), schema=Config, skip_invalid_fields=True, ) @@ -125,8 +125,8 @@ class Config: port: int result = load( - Source(file=source1, skip_if_invalid=True), - Source(file=source2), + JsonSource(file=source1, skip_if_invalid=True), + JsonSource(file=source2), schema=Config, ) @@ -146,8 +146,8 @@ class Config: port: int result = load( - Source(file=source1), - Source(file=source2), + JsonSource(file=source1), + JsonSource(file=source2), schema=Config, skip_invalid_fields=True, ) @@ -166,7 +166,7 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file=source1), + JsonSource(file=source1), schema=Config, ) @@ -193,8 +193,8 @@ class Config: port: int result = load( - Source(file=source1), - Source(file=source2), + JsonSource(file=source1), + JsonSource(file=source2), schema=Config, strategy="raise_on_conflict", skip_invalid_fields=True, @@ -217,11 +217,11 @@ class Config: timeout: int = 30 result = load( - Source( + JsonSource( file=source1, skip_if_invalid=(F[Config].port, F[Config].timeout), ), - Source(file=source2), + JsonSource(file=source2), schema=Config, ) @@ -240,7 +240,7 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Source( + JsonSource( file=source1, skip_if_invalid=(F[Config].port,), ), @@ -276,8 +276,8 @@ class Config: with caplog.at_level(logging.WARNING, logger="dature"): load( - Source(file=source1), - Source(file=source2), + JsonSource(file=source1), + JsonSource(file=source2), schema=Config, skip_invalid_fields=True, ) @@ -300,7 +300,7 @@ class Config: port: int = 8080 result = load( - Source(file=json_file, skip_if_invalid=True), + JsonSource(file=json_file, skip_if_invalid=True), schema=Config, ) @@ -318,7 +318,7 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file=json_file, skip_if_invalid=True), + JsonSource(file=json_file, skip_if_invalid=True), schema=Config, ) @@ -335,7 +335,7 @@ def test_single_source_decorator_skip(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"host": "localhost", "port": "abc"}') - @load(Source(file=json_file, skip_if_invalid=True)) + @load(JsonSource(file=json_file, skip_if_invalid=True)) @dataclass class Config: host: str @@ -356,7 +356,7 @@ class Config: timeout: int = 30 result = load( - Source( + JsonSource( file=json_file, skip_if_invalid=(F[Config].port,), ), @@ -378,7 +378,7 @@ class Config: with caplog.at_level(logging.WARNING, logger="dature"): load( - Source(file=json_file, skip_if_invalid=True), + JsonSource(file=json_file, skip_if_invalid=True), schema=Config, ) @@ -401,7 +401,7 @@ class Config: inner: Inner = None # type: ignore[assignment] result = load( - Source( + JsonSource( file=source, skip_if_invalid=(F[Config].port,), ), @@ -428,11 +428,11 @@ class Config: inner: Inner result = load( - Source( + JsonSource( file=source1, skip_if_invalid=(F[Config].inner.port,), ), - Source(file=source2), + JsonSource(file=source2), schema=Config, ) @@ -456,11 +456,11 @@ class Config: inner: Inner result = load( - Source( + JsonSource( file=source1, skip_if_invalid=(F[Config].port, F[Config].inner.port), ), - Source(file=source2), + JsonSource(file=source2), schema=Config, ) diff --git a/tests/loading/test_source_loading.py b/tests/loading/test_source_loading.py index e69abad..df653b3 100644 --- a/tests/loading/test_source_loading.py +++ b/tests/loading/test_source_loading.py @@ -6,8 +6,18 @@ import pytest -from dature import Source, load +from dature import EnvFileSource, IniSource, JsonSource, Toml11Source, Yaml12Source, load from dature.errors import DatureConfigError, EnvVarExpandError +from dature.loading.merge_config import MergeConfig +from dature.loading.source_loading import ( + apply_merge_skip_invalid, + resolve_expand_env_vars, + resolve_mask_secrets, + resolve_secret_field_names, + resolve_skip_invalid, + should_skip_broken, +) +from dature.sources.env_ import EnvSource class TestSkipBrokenSources: @@ -23,8 +33,8 @@ class Config: port: int result = load( - Source(file=valid), - Source(file=missing), + JsonSource(file=valid), + JsonSource(file=missing), schema=Config, skip_broken_sources=True, ) @@ -45,8 +55,8 @@ class Config: port: int result = load( - Source(file=valid), - Source(file=broken), + JsonSource(file=valid), + JsonSource(file=broken), schema=Config, skip_broken_sources=True, ) @@ -67,8 +77,8 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file=broken_a), - Source(file=broken_b), + JsonSource(file=broken_a), + JsonSource(file=broken_b), schema=Config, skip_broken_sources=True, ) @@ -89,8 +99,8 @@ class Config: with pytest.raises(DatureConfigError): load( - Source(file=valid), - Source(file=broken), + JsonSource(file=valid), + JsonSource(file=broken), schema=Config, ) @@ -110,9 +120,9 @@ class Config: port: int result = load( - Source(file=a), - Source(file=broken), - Source(file=c), + JsonSource(file=a), + JsonSource(file=broken), + JsonSource(file=c), schema=Config, skip_broken_sources=True, ) @@ -133,8 +143,8 @@ class Config: port: int result = load( - Source(file=valid), - Source(file=broken, skip_if_broken=True), + JsonSource(file=valid), + JsonSource(file=broken, skip_if_broken=True), schema=Config, skip_broken_sources=False, ) @@ -156,8 +166,8 @@ class Config: with pytest.raises(DatureConfigError): load( - Source(file=valid), - Source(file=broken, skip_if_broken=False), + JsonSource(file=valid), + JsonSource(file=broken, skip_if_broken=False), schema=Config, skip_broken_sources=True, ) @@ -175,8 +185,8 @@ class Config: port: int result = load( - Source(file=valid), - Source(file=broken, skip_if_broken=None), + JsonSource(file=valid), + JsonSource(file=broken, skip_if_broken=None), schema=Config, skip_broken_sources=True, ) @@ -200,8 +210,8 @@ class Config: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file=missing), - Source(file=broken), + JsonSource(file=missing), + JsonSource(file=broken), schema=Config, skip_broken_sources=True, ) @@ -222,7 +232,7 @@ class Config: port: int result = load( - Source(file=json_file), + JsonSource(file=json_file), schema=Config, ) @@ -239,7 +249,7 @@ class Config: port: int result = load( - Source(file=json_file), + JsonSource(file=json_file), schema=Config, expand_env_vars="disabled", ) @@ -258,7 +268,7 @@ class Config: with pytest.raises(EnvVarExpandError): load( - Source(file=json_file), + JsonSource(file=json_file), schema=Config, expand_env_vars="strict", ) @@ -274,7 +284,7 @@ class Config: port: int result = load( - Source(file=json_file, expand_env_vars="disabled"), + JsonSource(file=json_file, expand_env_vars="disabled"), schema=Config, expand_env_vars="default", ) @@ -292,7 +302,7 @@ class Config: port: int result = load( - Source(file=json_file, expand_env_vars=None), + JsonSource(file=json_file, expand_env_vars=None), schema=Config, expand_env_vars="disabled", ) @@ -310,7 +320,7 @@ class Config: port: int result = load( - Source(file=json_file), + JsonSource(file=json_file), schema=Config, expand_env_vars="empty", ) @@ -329,31 +339,43 @@ class StrictConfig: class TestEnvVarExpandErrorFormat: @pytest.mark.parametrize( - ("ext", "prefix", "source_label", "line", "line_content"), + ("source_cls", "source_kwargs", "source_label", "line", "line_content"), [ - ("yaml", None, "FILE", 1, 'host: "$MISSING_HOST"'), - ("json", None, "FILE", 1, '{"host": "$MISSING_HOST", "port": 8080}'), - ("toml", None, "FILE", 1, 'host = "$MISSING_HOST"'), - ("ini", "section", "FILE", 2, "host = $MISSING_HOST"), - ("env", None, "ENV FILE", 1, "HOST=$MISSING_HOST"), + (Yaml12Source, {"file": FIXTURES_DIR / "env_expand_strict.yaml"}, "FILE", 1, 'host: "$MISSING_HOST"'), + ( + JsonSource, + {"file": FIXTURES_DIR / "env_expand_strict.json"}, + "FILE", + 1, + '{"host": "$MISSING_HOST", "port": 8080}', + ), + (Toml11Source, {"file": FIXTURES_DIR / "env_expand_strict.toml"}, "FILE", 1, 'host = "$MISSING_HOST"'), + ( + IniSource, + {"file": FIXTURES_DIR / "env_expand_strict.ini", "prefix": "section"}, + "FILE", + 2, + "host = $MISSING_HOST", + ), + (EnvFileSource, {"file": FIXTURES_DIR / "env_expand_strict.env"}, "ENV FILE", 1, "HOST=$MISSING_HOST"), ], ids=["yaml", "json", "toml", "ini", "env"], ) def test_error_format( self, monkeypatch: pytest.MonkeyPatch, - ext: str, - prefix: str | None, + source_cls: type, + source_kwargs: dict[str, object], source_label: str, line: int, line_content: str, ) -> None: monkeypatch.delenv("MISSING_HOST", raising=False) - file = FIXTURES_DIR / f"env_expand_strict.{ext}" + file = source_kwargs["file"] with pytest.raises(EnvVarExpandError) as exc_info: load( - Source(file=file, prefix=prefix, expand_env_vars="strict"), + source_cls(**source_kwargs, expand_env_vars="strict"), schema=StrictConfig, ) @@ -364,3 +386,170 @@ def test_error_format( ├── {line_content} └── {source_label} '{file}', line {line} """) + + +class TestShouldSkipBroken: + @pytest.mark.parametrize( + ("skip_if_broken", "skip_broken_sources", "expected"), + [ + (True, False, True), + (False, True, False), + (None, True, True), + ], + ids=["source-true", "source-false", "source-none-uses-merge"], + ) + def test_resolve( + self, + tmp_path: Path, + skip_if_broken: bool | None, + skip_broken_sources: bool, + expected: bool, + ): + json_file = tmp_path / "c.json" + json_file.write_text("{}") + kwargs = {} if skip_if_broken is None else {"skip_if_broken": skip_if_broken} + source = JsonSource(file=json_file, **kwargs) + merge = MergeConfig(sources=(source,), skip_broken_sources=skip_broken_sources) + + assert should_skip_broken(source, merge) is expected + + def test_env_source_warns(self, caplog: pytest.LogCaptureFixture): + source = EnvSource(skip_if_broken=True) + merge = MergeConfig(sources=(source,)) + + should_skip_broken(source, merge) + + assert "skip_if_broken has no effect on environment variable sources" in caplog.text + + +class TestResolveExpandEnvVars: + @pytest.mark.parametrize( + ("source_expand", "merge_expand", "expected"), + [ + ("disabled", "strict", "disabled"), + (None, "strict", "strict"), + ], + ids=["source-overrides", "source-none-inherits"], + ) + def test_resolve( + self, + tmp_path: Path, + source_expand: str | None, + merge_expand: str, + expected: str, + ): + json_file = tmp_path / "c.json" + json_file.write_text("{}") + kwargs = {} if source_expand is None else {"expand_env_vars": source_expand} + source = JsonSource(file=json_file, **kwargs) + merge = MergeConfig(sources=(source,), expand_env_vars=merge_expand) + + assert resolve_expand_env_vars(source, merge) == expected + + +class TestResolveSkipInvalid: + @pytest.mark.parametrize( + ("source_skip", "merge_skip", "expected"), + [ + (True, False, True), + (None, True, True), + ], + ids=["source-overrides", "source-none-inherits"], + ) + def test_resolve( + self, + tmp_path: Path, + source_skip: bool | None, + merge_skip: bool, + expected: bool, + ): + json_file = tmp_path / "c.json" + json_file.write_text("{}") + kwargs = {} if source_skip is None else {"skip_if_invalid": source_skip} + source = JsonSource(file=json_file, **kwargs) + merge = MergeConfig(sources=(source,), skip_invalid_fields=merge_skip) + + assert resolve_skip_invalid(source, merge) is expected + + +class TestResolveMaskSecrets: + @pytest.mark.usefixtures("_reset_config") + def test_source_overrides_all(self, tmp_path: Path): + json_file = tmp_path / "c.json" + json_file.write_text("{}") + source = JsonSource(file=json_file, mask_secrets=False) + merge = MergeConfig(sources=(source,), mask_secrets=True) + + assert resolve_mask_secrets(source, merge) is False + + @pytest.mark.usefixtures("_reset_config") + def test_merge_overrides_config(self, tmp_path: Path): + json_file = tmp_path / "c.json" + json_file.write_text("{}") + source = JsonSource(file=json_file) + merge = MergeConfig(sources=(source,), mask_secrets=False) + + assert resolve_mask_secrets(source, merge) is False + + @pytest.mark.usefixtures("_reset_config") + def test_falls_back_to_config(self, tmp_path: Path): + json_file = tmp_path / "c.json" + json_file.write_text("{}") + source = JsonSource(file=json_file) + merge = MergeConfig(sources=(source,)) + + result = resolve_mask_secrets(source, merge) + + assert isinstance(result, bool) + + +class TestResolveSecretFieldNames: + @pytest.mark.parametrize( + ("source_names", "merge_names", "expected"), + [ + (("api_key",), ("token",), ("api_key", "token")), + (None, ("token",), ("token",)), + (None, None, ()), + ], + ids=["combines-both", "source-none", "both-none"], + ) + def test_resolve( + self, + tmp_path: Path, + source_names: tuple[str, ...] | None, + merge_names: tuple[str, ...] | None, + expected: tuple[str, ...], + ): + json_file = tmp_path / "c.json" + json_file.write_text("{}") + kwargs = {} if source_names is None else {"secret_field_names": source_names} + source = JsonSource(file=json_file, **kwargs) + merge_kwargs = {} if merge_names is None else {"secret_field_names": merge_names} + merge = MergeConfig(sources=(source,), **merge_kwargs) + + assert resolve_secret_field_names(source, merge) == expected + + +class TestApplyMergeSkipInvalid: + def test_skip_false_returns_raw(self, tmp_path: Path): + json_file = tmp_path / "c.json" + json_file.write_text("{}") + + @dataclass + class Cfg: + name: str + + source = JsonSource(file=json_file) + merge = MergeConfig(sources=(source,), skip_invalid_fields=False) + raw = {"name": "hello"} + + result = apply_merge_skip_invalid( + raw=raw, + source=source, + merge_meta=merge, + schema=Cfg, + source_index=0, + ) + + assert result.cleaned_dict == raw + assert result.skipped_paths == [] diff --git a/tests/masking/test_detection.py b/tests/masking/test_detection.py index 7c391ce..7f2b3c1 100644 --- a/tests/masking/test_detection.py +++ b/tests/masking/test_detection.py @@ -1,9 +1,15 @@ from dataclasses import dataclass from typing import Annotated +import pytest + from dature.fields.payment_card import PaymentCardNumber from dature.fields.secret_str import SecretStr -from dature.masking.detection import build_secret_paths +from dature.masking.detection import ( + _is_secret_type, + _matches_secret_pattern, + build_secret_paths, +) class TestBuildSecretPaths: @@ -97,3 +103,50 @@ class Cfg: paths1 = build_secret_paths(Cfg) paths2 = build_secret_paths(Cfg) assert paths1 is paths2 + + def test_non_dataclass_returns_empty(self): + result = build_secret_paths(str) + + assert result == frozenset() + + def test_cache_differs_by_extra_patterns(self): + @dataclass + class Cfg2: + my_field: str + + paths_without = build_secret_paths(Cfg2) + paths_with = build_secret_paths(Cfg2, extra_patterns=("my_field",)) + + assert paths_without == frozenset() + assert paths_with == frozenset({"my_field"}) + + +class TestIsSecretType: + @pytest.mark.parametrize( + ("field_type", "expected"), + [ + (str, False), + (SecretStr, True), + (PaymentCardNumber, True), + (SecretStr | None, True), + (Annotated[SecretStr, "meta"], True), + (Annotated[SecretStr | None, "meta"], True), + ], + ids=["plain-str", "secret-str", "payment-card", "optional", "annotated", "annotated-optional"], + ) + def test_detection(self, field_type: type, expected: bool): + assert _is_secret_type(field_type) is expected + + +class TestMatchesSecretPattern: + @pytest.mark.parametrize( + ("name", "patterns", "expected"), + [ + ("DB_PASSWORD", ("password",), True), + ("my_api_key_v2", ("api_key",), True), + ("hostname", ("password", "secret"), False), + ], + ids=["case-insensitive", "substring", "no-match"], + ) + def test_matching(self, name: str, patterns: tuple[str, ...], expected: bool): + assert _matches_secret_pattern(name, patterns) is expected diff --git a/tests/masking/test_masking.py b/tests/masking/test_masking.py index 601e8df..b52f9a8 100644 --- a/tests/masking/test_masking.py +++ b/tests/masking/test_masking.py @@ -5,7 +5,7 @@ import pytest -from dature import Source, configure, get_load_report, load +from dature import JsonSource, configure, get_load_report, load from dature.errors import DatureConfigError from dature.fields.secret_str import SecretStr from dature.load_report import FieldOrigin, SourceEntry @@ -215,7 +215,7 @@ class Cfg: password: str host: str - result = load(Source(file=json_file), schema=Cfg, debug=True) + result = load(JsonSource(file=json_file), schema=Cfg, debug=True) report = get_load_report(result) assert report is not None @@ -240,8 +240,8 @@ class Cfg: host: str result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Cfg, debug=True, ) @@ -266,7 +266,7 @@ class Cfg: api_key: SecretStr host: str - result = load(Source(file=json_file), schema=Cfg, debug=True) + result = load(JsonSource(file=json_file), schema=Cfg, debug=True) report = get_load_report(result) assert report is not None @@ -287,7 +287,7 @@ class Cfg: host: str with caplog.at_level("DEBUG", logger="dature"): - load(Source(file=json_file), schema=Cfg, debug=True) + load(JsonSource(file=json_file), schema=Cfg, debug=True) assert _SECRET_VALUE not in caplog.text @@ -305,8 +305,8 @@ class Cfg: with caplog.at_level("DEBUG", logger="dature"): load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Cfg, debug=True, ) @@ -323,7 +323,7 @@ class Cfg: port: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json_file), schema=Cfg) + load(JsonSource(file=json_file), schema=Cfg) assert _SECRET_VALUE not in str(exc_info.value) @@ -331,7 +331,7 @@ def test_merge_decorator_error_message_masks_secrets(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"password": "allowed", "host": "prod"}') - @load(Source(file=json_file)) + @load(JsonSource(file=json_file)) @dataclass class Cfg: password: Literal["allowed"] @@ -354,7 +354,7 @@ class Cfg: host: str with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json_file, mask_secrets=True), schema=Cfg) + load(JsonSource(file=json_file, mask_secrets=True), schema=Cfg) assert str(exc_info.value) == "Cfg loading errors (1)" assert str(exc_info.value.exceptions[0]) == ( @@ -376,7 +376,7 @@ class Cfg: host: str with patch("dature.masking.masking._heuristic_detector", None), pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json_file, mask_secrets=True), schema=Cfg) + load(JsonSource(file=json_file, mask_secrets=True), schema=Cfg) assert str(exc_info.value) == "Cfg loading errors (1)" assert str(exc_info.value.exceptions[0]) == ( @@ -409,7 +409,7 @@ class Cfg: host: str configure(masking={"mask_secrets": mask_secrets}) - result = load(Source(file=json_file), schema=Cfg, debug=True) + result = load(JsonSource(file=json_file), schema=Cfg, debug=True) report = get_load_report(result) assert report is not None @@ -442,7 +442,7 @@ class Cfg: configure(masking={"mask_secrets": mask_secrets}) with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json_file), schema=Cfg) + load(JsonSource(file=json_file), schema=Cfg) assert str(exc_info.value) == "Cfg loading errors (1)" content = f'{{"password": "{expected_password}", "port": "not_a_number"}}' @@ -466,7 +466,7 @@ class Cfg: password: str host: str - result = load(Source(file=json_file), schema=Cfg, debug=True, mask_secrets=True) + result = load(JsonSource(file=json_file), schema=Cfg, debug=True, mask_secrets=True) report = get_load_report(result) assert report is not None @@ -482,7 +482,7 @@ class Cfg: host: str result = load( - Source(file=json_file), + JsonSource(file=json_file), schema=Cfg, debug=True, mask_secrets=True, @@ -503,7 +503,7 @@ class Cfg: host: str result = load( - Source(file=json_file, mask_secrets=False), + JsonSource(file=json_file, mask_secrets=False), schema=Cfg, debug=True, mask_secrets=True, @@ -526,7 +526,7 @@ class Cfg: host: str result = load( - Source(file=json_file, secret_field_names=("label",)), + JsonSource(file=json_file, secret_field_names=("label",)), schema=Cfg, debug=True, mask_secrets=True, diff --git a/tests/merging/test_field_group.py b/tests/merging/test_field_group.py index 7f8660e..547c0d4 100644 --- a/tests/merging/test_field_group.py +++ b/tests/merging/test_field_group.py @@ -5,7 +5,7 @@ import pytest -from dature import Source, load +from dature import JsonSource, load from dature.errors import FieldGroupError from dature.field_path import F @@ -24,8 +24,8 @@ class Config: port: int result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, strategy="last_wins", field_groups=((F[Config].host, F[Config].port),), @@ -47,8 +47,8 @@ class Config: port: int result = load( - Source(file=first), - Source(file=second), + JsonSource(file=first), + JsonSource(file=second), schema=Config, strategy="first_wins", field_groups=((F[Config].host, F[Config].port),), @@ -72,8 +72,8 @@ class Config: port: int result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, field_groups=((F[Config].host, F[Config].port),), ) @@ -95,8 +95,8 @@ class Config: debug: bool result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, field_groups=((F[Config].host, F[Config].port),), ) @@ -114,8 +114,8 @@ def test_partial_change_raises(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"host": "remote"}') - defaults_meta = Source(file=defaults) - overrides_meta = Source(file=overrides) + defaults_meta = JsonSource(file=defaults) + overrides_meta = JsonSource(file=overrides) @dataclass class Config: @@ -144,8 +144,8 @@ def test_partial_change_field_present_but_equal(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"host": "remote", "port": 3000}') - defaults_meta = Source(file=defaults) - overrides_meta = Source(file=overrides) + defaults_meta = JsonSource(file=defaults) + overrides_meta = JsonSource(file=overrides) @dataclass class Config: @@ -181,8 +181,8 @@ class Config: with pytest.raises(FieldGroupError): load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, strategy="first_wins", field_groups=((F[Config].host, F[Config].port),), @@ -202,8 +202,8 @@ class Config: with pytest.raises(FieldGroupError): load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, strategy="raise_on_conflict", field_groups=((F[Config].host, F[Config].port),), @@ -218,8 +218,8 @@ def test_auto_expand_nested_dataclass(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"database": {"host": "remote"}}') - defaults_meta = Source(file=defaults) - overrides_meta = Source(file=overrides) + defaults_meta = JsonSource(file=defaults) + overrides_meta = JsonSource(file=overrides) @dataclass class Database: @@ -262,8 +262,8 @@ class Config: database: Database result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, field_groups=((F[Config].database,),), ) @@ -283,9 +283,9 @@ def test_three_sources_violation_on_second(self, tmp_path: Path): c = tmp_path / "c.json" c.write_text('{"host": "c-host", "port": 3000}') - a_meta = Source(file=a) - b_meta = Source(file=b) - c_meta = Source(file=c) + a_meta = JsonSource(file=a) + b_meta = JsonSource(file=b) + c_meta = JsonSource(file=c) @dataclass class Config: @@ -324,9 +324,9 @@ class Config: port: int result = load( - Source(file=a), - Source(file=b), - Source(file=c), + JsonSource(file=a), + JsonSource(file=b), + JsonSource(file=c), schema=Config, field_groups=((F[Config].host, F[Config].port),), ) @@ -343,8 +343,8 @@ def test_one_ok_one_violated(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"host": "remote", "port": 9090, "user": "root"}') - defaults_meta = Source(file=defaults) - overrides_meta = Source(file=overrides) + defaults_meta = JsonSource(file=defaults) + overrides_meta = JsonSource(file=overrides) @dataclass class Config: @@ -387,8 +387,8 @@ class Config: tags: list[str] result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, field_merges={F[Config].tags: "append"}, field_groups=((F[Config].host, F[Config].port),), @@ -408,8 +408,8 @@ def test_decorator_with_field_groups(self, tmp_path: Path): overrides.write_text('{"host": "remote", "port": 9090}') @load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), field_groups=((F["Config"].host, F["Config"].port),), ) @dataclass @@ -429,8 +429,8 @@ def test_decorator_partial_change_raises(self, tmp_path: Path): overrides.write_text('{"host": "remote"}') @load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), field_groups=((F["Config"].host, F["Config"].port),), ) @dataclass @@ -450,8 +450,8 @@ def test_error_message_format(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"host": "remote", "debug": true}') - defaults_meta = Source(file=defaults) - overrides_meta = Source(file=overrides) + defaults_meta = JsonSource(file=defaults) + overrides_meta = JsonSource(file=overrides) @dataclass class Config: @@ -481,8 +481,8 @@ def test_multiple_violations_message(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"host": "remote", "user": "root"}') - defaults_meta = Source(file=defaults) - overrides_meta = Source(file=overrides) + defaults_meta = JsonSource(file=defaults) + overrides_meta = JsonSource(file=overrides) @dataclass class Config: @@ -538,8 +538,8 @@ class Config: timeout: int result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, field_groups=((F[Config].database, F[Config].timeout),), ) @@ -570,8 +570,8 @@ class Config: timeout: int result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, field_groups=((F[Config].database, F[Config].timeout),), ) @@ -589,8 +589,8 @@ def test_flat_changed_nested_not(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"timeout": 60}') - defaults_meta = Source(file=defaults) - overrides_meta = Source(file=overrides) + defaults_meta = JsonSource(file=defaults) + overrides_meta = JsonSource(file=overrides) @dataclass class Database: @@ -628,8 +628,8 @@ def test_nested_partial_flat_not(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"database": {"host": "remote"}}') - defaults_meta = Source(file=defaults) - overrides_meta = Source(file=overrides) + defaults_meta = JsonSource(file=defaults) + overrides_meta = JsonSource(file=overrides) @dataclass class Database: @@ -665,8 +665,8 @@ def test_nested_all_changed_flat_not(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"database": {"host": "remote", "port": 3306}}') - defaults_meta = Source(file=defaults) - overrides_meta = Source(file=overrides) + defaults_meta = JsonSource(file=defaults) + overrides_meta = JsonSource(file=overrides) defaults_repr = repr(defaults_meta) overrides_repr = repr(overrides_meta) @@ -719,8 +719,8 @@ class Config: inner: Inner result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, field_groups=((F[Config].user_name, F[Config].inner.user_name),), ) @@ -737,8 +737,8 @@ def test_only_root_changed_raises(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"user_name": "root-new"}') - defaults_meta = Source(file=defaults) - overrides_meta = Source(file=overrides) + defaults_meta = JsonSource(file=defaults) + overrides_meta = JsonSource(file=overrides) @dataclass class Inner: diff --git a/tests/sources/__init__.py b/tests/sources/__init__.py new file mode 100644 index 0000000..d949306 --- /dev/null +++ b/tests/sources/__init__.py @@ -0,0 +1 @@ +"""Tests for sources.""" diff --git a/tests/sources_loader/checker.py b/tests/sources/checker.py similarity index 100% rename from tests/sources_loader/checker.py rename to tests/sources/checker.py diff --git a/tests/sources_loader/test_base.py b/tests/sources/test_base.py similarity index 56% rename from tests/sources_loader/test_base.py rename to tests/sources/test_base.py index 52cef52..1a78e64 100644 --- a/tests/sources_loader/test_base.py +++ b/tests/sources/test_base.py @@ -1,43 +1,41 @@ from dataclasses import dataclass +from io import BytesIO, StringIO from pathlib import Path import pytest -from dature import Source, load +from dature import JsonSource, Source, load from dature.errors import EnvVarExpandError from dature.field_path import F -from dature.sources_loader.base import BaseLoader -from dature.sources_loader.json_ import JsonLoader -from dature.types import ExpandEnvVarsMode, FileOrStream, JSONValue +from dature.sources.base import FileFieldMixin, _string_value_loaders +from dature.sources.retort import transform_to_dataclass +from dature.types import JSONValue -class MockLoader(BaseLoader): - """Mock loader for testing base class functionality.""" +@dataclass(kw_only=True) +class MockSource(Source): + """Mock source for testing base class functionality.""" - display_name = "mock" + format_name = "mock" + location_label = "MOCK" + test_data: JSONValue = None - def __init__( - self, - *, - prefix: str | None = None, - test_data: JSONValue = None, - expand_env_vars: ExpandEnvVarsMode = "default", - ): - super().__init__(prefix=prefix, expand_env_vars=expand_env_vars) - self._test_data = test_data or {} + def __post_init__(self) -> None: + if self.test_data is None: + self.test_data = {} - def _load(self, path: FileOrStream) -> JSONValue: # noqa: ARG002 + def _load(self) -> JSONValue: """Return test data.""" - return self._test_data + return self.test_data -class TestBaseLoader: - """Tests for BaseLoader base class.""" +class TestBaseSource: + """Tests for Source base class.""" def test_apply_prefix_simple(self): """Test applying simple prefix.""" data = {"app": {"name": "Test", "port": 8080}, "other": "value"} - loader = MockLoader(prefix="app", test_data=data) + loader = MockSource(prefix="app", test_data=data) result = loader._apply_prefix(data) @@ -46,7 +44,7 @@ def test_apply_prefix_simple(self): def test_apply_prefix_nested(self): """Test applying nested prefix with dots.""" data = {"app": {"database": {"host": "localhost", "port": 5432}}} - loader = MockLoader(prefix="app.database", test_data=data) + loader = MockSource(prefix="app.database", test_data=data) result = loader._apply_prefix(data) @@ -55,7 +53,7 @@ def test_apply_prefix_nested(self): def test_apply_prefix_none(self): """Test that None prefix returns original data.""" data = {"key": "value"} - loader = MockLoader(test_data=data) + loader = MockSource(test_data=data) result = loader._apply_prefix(data) @@ -64,7 +62,7 @@ def test_apply_prefix_none(self): def test_apply_prefix_empty_string(self): """Test that empty string prefix returns original data.""" data = {"key": "value"} - loader = MockLoader(prefix="", test_data=data) + loader = MockSource(prefix="", test_data=data) result = loader._apply_prefix(data) @@ -73,7 +71,7 @@ def test_apply_prefix_empty_string(self): def test_apply_prefix_nonexistent(self): """Test applying nonexistent prefix returns empty dict.""" data = {"app": {"name": "Test"}} - loader = MockLoader(prefix="nonexistent", test_data=data) + loader = MockSource(prefix="nonexistent", test_data=data) result = loader._apply_prefix(data) @@ -82,7 +80,7 @@ def test_apply_prefix_nonexistent(self): def test_apply_prefix_deep_nesting(self): """Test applying deeply nested prefix.""" data = {"a": {"b": {"c": {"d": {"value": "deep"}}}}} - loader = MockLoader(prefix="a.b.c.d", test_data=data) + loader = MockSource(prefix="a.b.c.d", test_data=data) result = loader._apply_prefix(data) @@ -91,7 +89,7 @@ def test_apply_prefix_deep_nesting(self): def test_apply_prefix_invalid_path(self): """Test applying prefix with invalid path.""" data = {"app": "not_a_dict"} - loader = MockLoader(prefix="app.nested", test_data=data) + loader = MockSource(prefix="app.nested", test_data=data) result = loader._apply_prefix(data) @@ -107,9 +105,9 @@ class Config: expected_data = Config(name="TestApp", port=8080) data = {"name": "TestApp", "port": 8080} - loader = MockLoader(test_data=data) + loader = MockSource(test_data=data) - result = loader.transform_to_dataclass(data, schema=Config) + result = transform_to_dataclass(loader, data, schema=Config) assert result == expected_data @@ -127,9 +125,9 @@ class Config: expected_data = Config(database=DatabaseConfig(host="localhost", port=5432)) data = {"database": {"host": "localhost", "port": 5432}} - loader = MockLoader(test_data=data) + loader = MockSource(test_data=data) - result = loader.transform_to_dataclass(data, schema=Config) + result = transform_to_dataclass(loader, data, schema=Config) assert result == expected_data @@ -145,17 +143,17 @@ class Config: expected_data = Config(name="TestApp", port=8080, debug=True, default="value") data = {"app": {"name": "TestApp", "port": 8080, "debug": True}} - loader = MockLoader(prefix="app", test_data=data) + loader = MockSource(prefix="app", test_data=data) - load_result = loader.load_raw(Path()) - result = loader.transform_to_dataclass(load_result.data, schema=Config) + load_result = loader.load_raw() + result = transform_to_dataclass(loader, load_result.data, schema=Config) assert result == expected_data def test_apply_prefix_with_list(self): """Test that apply_prefix returns data as-is when prefix points to non-dict.""" data = {"items": [1, 2, 3]} - loader = MockLoader(prefix="items", test_data=data) + loader = MockSource(prefix="items", test_data=data) result = loader._apply_prefix(data) @@ -174,7 +172,7 @@ class Config: json_file.write_text('{"userName": "John", "userAge": 25, "isActive": true}') result = load( - Source(file=json_file, loader=JsonLoader, name_style="lower_camel"), + JsonSource(file=json_file, name_style="lower_camel"), schema=Config, ) @@ -192,7 +190,7 @@ class Config: json_file.write_text('{"user_name": "Alice", "user_age": 30}') result = load( - Source(file=json_file, loader=JsonLoader, name_style="lower_snake"), + JsonSource(file=json_file, name_style="lower_snake"), schema=Config, ) @@ -209,7 +207,7 @@ class Config: json_file.write_text('{"UserName": "Bob", "TotalCount": 100}') result = load( - Source(file=json_file, loader=JsonLoader, name_style="upper_camel"), + JsonSource(file=json_file, name_style="upper_camel"), schema=Config, ) @@ -226,7 +224,7 @@ class Config: json_file.write_text('{"user-name": "Charlie", "api-key": "secret123"}') result = load( - Source(file=json_file, loader=JsonLoader, name_style="lower_kebab"), + JsonSource(file=json_file, name_style="lower_kebab"), schema=Config, ) @@ -243,7 +241,7 @@ class Config: json_file.write_text('{"USER-NAME": "Dave", "API-KEY": "secret456"}') result = load( - Source(file=json_file, loader=JsonLoader, name_style="upper_kebab"), + JsonSource(file=json_file, name_style="upper_kebab"), schema=Config, ) @@ -260,7 +258,7 @@ class Config: json_file.write_text('{"USER_NAME": "Eve", "MAX_RETRIES": 3}') result = load( - Source(file=json_file, loader=JsonLoader, name_style="upper_snake"), + JsonSource(file=json_file, name_style="upper_snake"), schema=Config, ) @@ -286,7 +284,7 @@ class Config: } result = load( - Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), + JsonSource(file=json_file, field_mapping=field_mapping), schema=Config, ) @@ -307,7 +305,7 @@ class Config: field_mapping = {F[Config].name: "userName"} result = load( - Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), + JsonSource(file=json_file, field_mapping=field_mapping), schema=Config, ) @@ -328,12 +326,7 @@ class Config: field_mapping = {F[Config].special_field: "customKey"} result = load( - Source( - file=json_file, - loader=JsonLoader, - name_style="lower_camel", - field_mapping=field_mapping, - ), + JsonSource(file=json_file, name_style="lower_camel", field_mapping=field_mapping), schema=Config, ) @@ -365,7 +358,7 @@ class User: } result = load( - Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), + JsonSource(file=json_file, field_mapping=field_mapping), schema=User, ) @@ -384,7 +377,7 @@ class Config: field_mapping = {F[Config].name: ("fullName", "userName")} result = load( - Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), + JsonSource(file=json_file, field_mapping=field_mapping), schema=Config, ) @@ -401,7 +394,7 @@ class Config: field_mapping = {F[Config].name: ("fullName", "userName")} result = load( - Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), + JsonSource(file=json_file, field_mapping=field_mapping), schema=Config, ) @@ -422,7 +415,7 @@ class User: field_mapping = {F[User].address.city: "cityName"} result = load( - Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), + JsonSource(file=json_file, field_mapping=field_mapping), schema=User, ) @@ -439,7 +432,7 @@ class Config: field_mapping = {F["Config"].name: "fullName"} result = load( - Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), + JsonSource(file=json_file, field_mapping=field_mapping), schema=Config, ) @@ -456,7 +449,7 @@ class Config: field_mapping = {F[Config].name: "fullName"} result = load( - Source(file=json_file, loader=JsonLoader, field_mapping=field_mapping), + JsonSource(file=json_file, field_mapping=field_mapping), schema=Config, ) @@ -484,12 +477,7 @@ class Config: } result = load( - Source( - file=json_file, - loader=JsonLoader, - name_style="lower_camel", - field_mapping=field_mapping, - ), + JsonSource(file=json_file, name_style="lower_camel", field_mapping=field_mapping), schema=Config, ) @@ -519,12 +507,7 @@ class Config: } result = load( - Source( - file=json_file, - loader=JsonLoader, - name_style="lower_camel", - field_mapping=field_mapping, - ), + JsonSource(file=json_file, name_style="lower_camel", field_mapping=field_mapping), schema=Config, ) @@ -537,57 +520,300 @@ class TestExpandEnvVars: def test_default_expands_existing(self, monkeypatch): monkeypatch.setenv("DATURE_TEST_HOST", "localhost") data = {"host": "$DATURE_TEST_HOST", "port": 8080} - loader = MockLoader(test_data=data) + loader = MockSource(test_data=data) - load_result = loader.load_raw(Path()) - result = loader.transform_to_dataclass(load_result.data, dict) + load_result = loader.load_raw() + result = transform_to_dataclass(loader, load_result.data, dict) assert result == {"host": "localhost", "port": 8080} def test_default_keeps_missing(self, monkeypatch): monkeypatch.delenv("DATURE_MISSING", raising=False) data = {"host": "$DATURE_MISSING", "port": 8080} - loader = MockLoader(test_data=data) + loader = MockSource(test_data=data) - load_result = loader.load_raw(Path()) - result = loader.transform_to_dataclass(load_result.data, dict) + load_result = loader.load_raw() + result = transform_to_dataclass(loader, load_result.data, dict) assert result == {"host": "$DATURE_MISSING", "port": 8080} def test_disabled(self, monkeypatch): monkeypatch.setenv("DATURE_TEST_HOST", "localhost") data = {"host": "$DATURE_TEST_HOST", "port": 8080} - loader = MockLoader(test_data=data, expand_env_vars="disabled") + loader = MockSource(test_data=data) - load_result = loader.load_raw(Path()) - result = loader.transform_to_dataclass(load_result.data, dict) + load_result = loader.load_raw(resolved_expand="disabled") + result = transform_to_dataclass(loader, load_result.data, dict) assert result == {"host": "$DATURE_TEST_HOST", "port": 8080} def test_empty_replaces_missing_with_empty_string(self, monkeypatch): monkeypatch.delenv("DATURE_MISSING", raising=False) data = {"host": "$DATURE_MISSING", "port": 8080} - loader = MockLoader(test_data=data, expand_env_vars="empty") + loader = MockSource(test_data=data) - load_result = loader.load_raw(Path()) - result = loader.transform_to_dataclass(load_result.data, dict) + load_result = loader.load_raw(resolved_expand="empty") + result = transform_to_dataclass(loader, load_result.data, dict) assert result == {"host": "", "port": 8080} def test_strict_raises_on_missing(self, monkeypatch): monkeypatch.delenv("DATURE_MISSING", raising=False) data = {"host": "$DATURE_MISSING", "port": 8080} - loader = MockLoader(test_data=data, expand_env_vars="strict") + loader = MockSource(test_data=data) with pytest.raises(EnvVarExpandError): - loader.load_raw(Path()) + loader.load_raw(resolved_expand="strict") def test_strict_expands_existing(self, monkeypatch): monkeypatch.setenv("DATURE_TEST_HOST", "localhost") data = {"host": "$DATURE_TEST_HOST", "port": 8080} - loader = MockLoader(test_data=data, expand_env_vars="strict") + loader = MockSource(test_data=data) - load_result = loader.load_raw(Path()) - result = loader.transform_to_dataclass(load_result.data, dict) + load_result = loader.load_raw(resolved_expand="strict") + result = transform_to_dataclass(loader, load_result.data, dict) assert result == {"host": "localhost", "port": 8080} + + +class TestFileFieldMixin: + def test_init_file_field_str(self): + @dataclass + class Src(FileFieldMixin): + pass + + src = Src(file="/data/test.json") + src._init_file_field() + + assert src.file == "/data/test.json" + assert isinstance(src.file, str) + + def test_init_file_field_path(self): + @dataclass + class Src(FileFieldMixin): + pass + + src = Src(file=Path("/data/test.json")) + src._init_file_field() + + assert src.file == "/data/test.json" + assert isinstance(src.file, str) + + def test_init_file_field_none(self): + @dataclass + class Src(FileFieldMixin): + pass + + src = Src(file=None) + src._init_file_field() + + assert src.file is None + + def test_init_file_field_stream(self): + @dataclass + class Src(FileFieldMixin): + pass + + stream = StringIO("data") + src = Src(file=stream) + src._init_file_field() + + assert src.file is stream + + @pytest.mark.parametrize( + ("file_input", "expected_type"), + [ + ("config.json", Path), + (Path("config.json"), Path), + (None, Path), + ], + ) + def test_resolve_file_field_path_types(self, file_input, expected_type): + result = FileFieldMixin.resolve_file_field(file_input) + + assert isinstance(result, expected_type) + + def test_resolve_file_field_stream(self): + stream = StringIO("data") + + result = FileFieldMixin.resolve_file_field(stream) + + assert result is stream + + def test_resolve_file_field_binary_stream(self): + stream = BytesIO(b"data") + + result = FileFieldMixin.resolve_file_field(stream) + + assert result is stream + + @pytest.mark.parametrize( + ("file_input", "expected"), + [ + ("config.json", "config.json"), + (Path("config.json"), "config.json"), + (None, None), + ], + ) + def test_file_field_display(self, file_input, expected): + result = FileFieldMixin.file_field_display(file_input) + + assert result == expected + + def test_file_field_display_stream(self): + result = FileFieldMixin.file_field_display(StringIO("data")) + + assert result == "" + + def test_file_field_display_binary_stream(self): + result = FileFieldMixin.file_field_display(BytesIO(b"data")) + + assert result == "" + + @pytest.mark.parametrize( + ("file_input", "expected"), + [ + ("config.json", Path("config.json")), + (Path("config.json"), Path("config.json")), + (None, None), + ], + ) + def test_file_field_path_for_errors(self, file_input, expected): + result = FileFieldMixin.file_field_path_for_errors(file_input) + + assert result == expected + + def test_file_field_path_for_errors_stream(self): + result = FileFieldMixin.file_field_path_for_errors(StringIO("data")) + + assert result is None + + def test_file_field_path_for_errors_binary_stream(self): + result = FileFieldMixin.file_field_path_for_errors(BytesIO(b"data")) + + assert result is None + + def test_file_display_delegates(self): + @dataclass + class Src(FileFieldMixin): + pass + + src = Src(file="config.json") + + assert src.file_display() == "config.json" + + def test_file_display_none(self): + @dataclass + class Src(FileFieldMixin): + pass + + src = Src(file=None) + + assert src.file_display() is None + + def test_file_path_for_errors_delegates(self): + @dataclass + class Src(FileFieldMixin): + pass + + src = Src(file=Path("config.json")) + + assert src.file_path_for_errors() == Path("config.json") + + def test_file_path_for_errors_none(self): + @dataclass + class Src(FileFieldMixin): + pass + + src = Src(file=None) + + assert src.file_path_for_errors() is None + + +class TestStringValueLoaders: + def test_returns_nine_providers(self): + loaders = _string_value_loaders() + + assert len(loaders) == 9 + + +class TestResolveLocation: + def test_file_content_none_returns_empty(self): + locations = MockSource.resolve_location( + field_path=["name"], + file_path=Path("config.json"), + file_content=None, + prefix=None, + nested_conflict=None, + ) + + assert len(locations) == 1 + assert locations[0].line_range is None + assert locations[0].line_content is None + + def test_empty_field_path_returns_empty(self): + locations = MockSource.resolve_location( + field_path=[], + file_path=Path("config.json"), + file_content='{"name": "test"}', + prefix=None, + nested_conflict=None, + ) + + assert len(locations) == 1 + assert locations[0].line_range is None + + def test_path_finder_none_returns_empty(self): + locations = MockSource.resolve_location( + field_path=["name"], + file_path=Path("config.json"), + file_content='{"name": "test"}', + prefix=None, + nested_conflict=None, + ) + + assert len(locations) == 1 + assert locations[0].line_range is None + + def test_json_source_finds_line_range(self, tmp_path): + content = '{\n "name": "test",\n "port": 8080\n}' + + locations = JsonSource.resolve_location( + field_path=["name"], + file_path=tmp_path / "config.json", + file_content=content, + prefix=None, + nested_conflict=None, + ) + + assert len(locations) == 1 + assert locations[0].line_range is not None + assert locations[0].line_content is not None + + def test_json_source_with_prefix(self, tmp_path): + content = '{\n "app": {\n "name": "test"\n }\n}' + + locations = JsonSource.resolve_location( + field_path=["name"], + file_path=tmp_path / "config.json", + file_content=content, + prefix="app", + nested_conflict=None, + ) + + assert len(locations) == 1 + assert locations[0].line_range is not None + + def test_json_source_field_not_found_returns_empty(self, tmp_path): + content = '{\n "name": "test"\n}' + + locations = JsonSource.resolve_location( + field_path=["nonexistent"], + file_path=tmp_path / "config.json", + file_content=content, + prefix=None, + nested_conflict=None, + ) + + assert len(locations) == 1 + assert locations[0].line_range is None diff --git a/tests/sources/test_docker_secrets.py b/tests/sources/test_docker_secrets.py new file mode 100644 index 0000000..60bf140 --- /dev/null +++ b/tests/sources/test_docker_secrets.py @@ -0,0 +1,141 @@ +from dataclasses import dataclass +from pathlib import Path + +from dature import DockerSecretsSource, load +from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact +from tests.sources.checker import assert_all_types_equal + + +class TestDockerSecretsSource: + def test_comprehensive_type_conversion(self, all_types_docker_secrets_dir: Path): + result = load( + DockerSecretsSource(dir_=all_types_docker_secrets_dir), + schema=AllPythonTypesCompact, + ) + + assert_all_types_equal(result, EXPECTED_ALL_TYPES) + + def test_custom_split_symbols(self, tmp_path: Path): + (tmp_path / "db.host").write_text("localhost") + (tmp_path / "db.port").write_text("5432") + + loader = DockerSecretsSource(dir_=tmp_path, split_symbols=".") + result = loader.load_raw() + + assert result.data == {"db": {"host": "localhost", "port": 5432}} + + def test_prefix_filtering(self, tmp_path: Path): + (tmp_path / "APP_name").write_text("myapp") + (tmp_path / "APP_port").write_text("8080") + (tmp_path / "OTHER_key").write_text("ignored") + + loader = DockerSecretsSource(dir_=tmp_path, prefix="APP_") + data = loader._load() + + assert data == {"name": "myapp", "port": "8080"} + + def test_skip_subdirectories(self, tmp_path: Path): + (tmp_path / "name").write_text("myapp") + subdir = tmp_path / "subdir" + subdir.mkdir() + (subdir / "nested_file").write_text("should_be_ignored") + + loader = DockerSecretsSource(dir_=tmp_path) + data = loader._load() + + assert data == {"name": "myapp"} + + def test_empty_directory(self, tmp_path: Path): + loader = DockerSecretsSource(dir_=tmp_path) + data = loader._load() + + assert data == {} + + def test_strip_file_content(self, tmp_path: Path): + (tmp_path / "secret").write_text(" password123\n") + + loader = DockerSecretsSource(dir_=tmp_path) + data = loader._load() + + assert data == {"secret": "password123"} + + def test_env_var_substitution(self, tmp_path: Path, monkeypatch): + monkeypatch.setenv("BASE_URL", "https://api.example.com") + + (tmp_path / "api_url").write_text("$BASE_URL/v1") + (tmp_path / "base").write_text("$BASE_URL") + + @dataclass + class Config: + api_url: str + base: str + + result = load( + DockerSecretsSource(dir_=tmp_path), + schema=Config, + ) + + assert result.api_url == "https://api.example.com/v1" + assert result.base == "https://api.example.com" + assert result.base == "https://api.example.com" + assert result.base == "https://api.example.com" + assert result.base == "https://api.example.com" + assert result.base == "https://api.example.com" + + +class TestDockerSecretsDisplayProperties: + def test_format_name_and_label(self): + assert DockerSecretsSource.format_name == "docker_secrets" + assert DockerSecretsSource.location_label == "SECRET FILE" + + +class TestDockerSecretsResolveLocation: + def test_resolve_builds_secret_path(self, tmp_path: Path): + locations = DockerSecretsSource.resolve_location( + field_path=["db_password"], + file_path=tmp_path, + file_content=None, + prefix=None, + nested_conflict=None, + ) + + assert len(locations) == 1 + assert locations[0].file_path == tmp_path / "db_password" + assert locations[0].line_range is None + assert locations[0].location_label == "SECRET FILE" + + def test_resolve_with_prefix(self, tmp_path: Path): + locations = DockerSecretsSource.resolve_location( + field_path=["password"], + file_path=tmp_path, + file_content=None, + prefix="APP_", + nested_conflict=None, + ) + + assert len(locations) == 1 + assert locations[0].file_path == tmp_path / "APP_password" + + def test_resolve_nested_path(self, tmp_path: Path): + locations = DockerSecretsSource.resolve_location( + field_path=["database", "host"], + file_path=tmp_path, + file_content=None, + prefix=None, + nested_conflict=None, + ) + + assert len(locations) == 1 + assert locations[0].file_path == tmp_path / "database__host" + + def test_resolve_file_path_none(self): + locations = DockerSecretsSource.resolve_location( + field_path=["secret"], + file_path=None, + file_content=None, + prefix=None, + nested_conflict=None, + ) + + assert len(locations) == 1 + assert locations[0].file_path is None diff --git a/tests/sources_loader/test_env_.py b/tests/sources/test_env_.py similarity index 68% rename from tests/sources_loader/test_env_.py rename to tests/sources/test_env_.py index d984b57..a70e979 100644 --- a/tests/sources_loader/test_env_.py +++ b/tests/sources/test_env_.py @@ -1,23 +1,23 @@ -"""Tests for env_ module (EnvLoader and EnvFileLoader).""" +"""Tests for env_ module (EnvSource and EnvFileSource).""" from dataclasses import dataclass +from io import StringIO from pathlib import Path import pytest -from dature import Source, load -from dature.sources_loader.env_ import EnvFileLoader, EnvLoader +from dature import EnvFileSource, EnvSource, load from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact -from tests.sources_loader.checker import assert_all_types_equal +from tests.sources.checker import assert_all_types_equal -class TestEnvFileLoader: - """Tests for EnvFileLoader class.""" +class TestEnvFileSource: + """Tests for EnvFileSource class.""" def test_prefix_filtering(self, prefixed_env_file: Path): """Test prefix filtering with nested structures.""" - loader = EnvFileLoader(prefix="APP_") - result = loader.load_raw(prefixed_env_file) + loader = EnvFileSource(file=prefixed_env_file, prefix="APP_") + result = loader.load_raw() assert result.data == { "name": "PrefixedApp", @@ -30,8 +30,8 @@ def test_prefix_filtering(self, prefixed_env_file: Path): def test_custom_split_symbols(self, custom_separator_env_file: Path): """Test custom separator for nested keys.""" - loader = EnvFileLoader(prefix="APP_", split_symbols=".") - result = loader.load_raw(custom_separator_env_file) + loader = EnvFileSource(file=custom_separator_env_file, prefix="APP_", split_symbols=".") + result = loader.load_raw() assert result.data == { "name": "CustomApp", @@ -44,7 +44,7 @@ def test_custom_split_symbols(self, custom_separator_env_file: Path): def test_comprehensive_type_conversion(self, all_types_env_file: Path): """Test loading ENV with full type coercion to dataclass.""" - result = load(Source(file=all_types_env_file, loader=EnvFileLoader), schema=AllPythonTypesCompact) + result = load(EnvFileSource(file=all_types_env_file), schema=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -53,8 +53,8 @@ def test_empty_file(self, tmp_path: Path): env_file = tmp_path / ".env" env_file.write_text("") - loader = EnvFileLoader() - data = loader._load(env_file) + loader = EnvFileSource(file=env_file) + data = loader._load() assert data == {} @@ -69,7 +69,7 @@ class Config: api_url: str base: str - result = load(Source(file=env_file, loader=EnvFileLoader), schema=Config) + result = load(EnvFileSource(file=env_file), schema=Config) assert result.api_url == "https://api.example.com/v1" assert result.base == "https://api.example.com" @@ -85,7 +85,7 @@ def test_env_fileenv_var_partial_substitution(self, tmp_path: Path, monkeypatch) class Config: url: str - result = load(Source(file=env_file, loader=EnvFileLoader), schema=Config) + result = load(EnvFileSource(file=env_file), schema=Config) assert result.url == "http://localhost:8080/api" @@ -99,7 +99,7 @@ def test_env_filedollar_sign_mid_string_existing_var(self, tmp_path: Path, monke class Config: value: str - result = load(Source(file=env_file, loader=EnvFileLoader), schema=Config) + result = load(EnvFileSource(file=env_file), schema=Config) assert result.value == "prefixreplaced/suffix" @@ -125,8 +125,8 @@ def test_quote_stripping(self, tmp_path: Path, raw_value: str, expected: str): env_file = tmp_path / ".env" env_file.write_text(f"value={raw_value}") - loader = EnvFileLoader() - data = loader._load(env_file) + loader = EnvFileSource(file=env_file) + data = loader._load() assert data == {"value": expected} @@ -140,13 +140,13 @@ def test_env_filedollar_sign_mid_string_missing_var(self, tmp_path: Path, monkey class Config: value: str - result = load(Source(file=env_file, loader=EnvFileLoader), schema=Config) + result = load(EnvFileSource(file=env_file), schema=Config) assert result.value == "prefix$nonexistent/suffix" -class TestEnvLoader: - """Tests for EnvLoader class.""" +class TestEnvSource: + """Tests for EnvSource class.""" def test_comprehensive_type_conversion(self, monkeypatch): """Test loading from os.environ with full type coercion to dataclass.""" @@ -256,7 +256,7 @@ def test_comprehensive_type_conversion(self, monkeypatch): for key, value in env_vars.items(): monkeypatch.setenv(key, value) - result = load(Source(loader=EnvLoader, prefix="APP_"), schema=AllPythonTypesCompact) + result = load(EnvSource(prefix="APP_"), schema=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -273,7 +273,7 @@ class TestConfig: expected_data = TestConfig(var="included", key="also_included") - data = load(Source(loader=EnvLoader, prefix="APP_"), schema=TestConfig) + data = load(EnvSource(prefix="APP_"), schema=TestConfig) assert data == expected_data @@ -294,10 +294,119 @@ class TestConfig: expected_data = TestConfig(db=TestData(host="localhost", port="5432")) data = load( - Source(loader=EnvLoader, prefix="APP_", split_symbols="."), + EnvSource(prefix="APP_", split_symbols="."), schema=TestConfig, ) assert data == expected_data assert data == expected_data assert data == expected_data + + +class TestEnvSourceDisplayProperties: + def test_format_name_and_label(self): + assert EnvSource.format_name == "env" + assert EnvSource.location_label == "ENV" + + +class TestEnvFileSourceDisplayProperties: + def test_format_name_and_label(self): + assert EnvFileSource.format_name == "envfile" + assert EnvFileSource.location_label == "ENV FILE" + + +class TestEnvSourceResolveLocation: + def test_resolve_returns_env_var_name(self): + locations = EnvSource.resolve_location( + field_path=["host"], + file_path=None, + file_content=None, + prefix=None, + nested_conflict=None, + ) + + assert len(locations) == 1 + assert locations[0].env_var_name == "HOST" + assert locations[0].file_path is None + assert locations[0].location_label == "ENV" + + def test_resolve_with_prefix(self): + locations = EnvSource.resolve_location( + field_path=["host"], + file_path=None, + file_content=None, + prefix="APP_", + nested_conflict=None, + ) + + assert len(locations) == 1 + assert locations[0].env_var_name == "APP_HOST" + + def test_resolve_nested_path(self): + locations = EnvSource.resolve_location( + field_path=["database", "host"], + file_path=None, + file_content=None, + prefix=None, + nested_conflict=None, + ) + + assert len(locations) == 1 + assert locations[0].env_var_name == "DATABASE__HOST" + + def test_resolve_with_custom_split_symbols(self): + locations = EnvSource.resolve_location( + field_path=["database", "host"], + file_path=None, + file_content=None, + prefix=None, + nested_conflict=None, + split_symbols=".", + ) + + assert len(locations) == 1 + assert locations[0].env_var_name == "DATABASE.HOST" + + +class TestEnvFileSourceResolveLocation: + def test_resolve_finds_line_in_content(self): + content = "HOST=localhost\nPORT=8080" + + locations = EnvFileSource.resolve_location( + field_path=["port"], + file_path=Path(".env"), + file_content=content, + prefix=None, + nested_conflict=None, + ) + + assert len(locations) == 1 + assert locations[0].env_var_name == "PORT" + assert locations[0].line_range is not None + assert locations[0].line_range.start == 2 + + def test_resolve_no_content(self): + locations = EnvFileSource.resolve_location( + field_path=["host"], + file_path=Path(".env"), + file_content=None, + prefix=None, + nested_conflict=None, + ) + + assert len(locations) == 1 + assert locations[0].line_range is None + assert locations[0].location_label == "ENV FILE" + + +class TestEnvFileSourceStream: + def test_load_from_string_stream(self): + @dataclass + class Config: + name: str + port: int + + result = load(EnvFileSource(file=StringIO("NAME=test\nPORT=8080")), schema=Config) + + assert result.name == "test" + assert result.port == 8080 diff --git a/tests/sources_loader/test_ini_.py b/tests/sources/test_ini_.py similarity index 73% rename from tests/sources_loader/test_ini_.py rename to tests/sources/test_ini_.py index 5a2ac73..d9788f0 100644 --- a/tests/sources_loader/test_ini_.py +++ b/tests/sources/test_ini_.py @@ -1,24 +1,24 @@ -"""Tests for ini_ module (IniLoader).""" +"""Tests for ini_ module (IniSource).""" import configparser from dataclasses import dataclass +from io import StringIO from pathlib import Path import pytest -from dature import Source, load -from dature.sources_loader.ini_ import IniLoader +from dature import IniSource, load from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact -from tests.sources_loader.checker import assert_all_types_equal +from tests.sources.checker import assert_all_types_equal -class TestIniLoader: - """Tests for IniLoader class.""" +class TestIniSource: + """Tests for IniSource class.""" def test_comprehensive_type_conversion(self, all_types_ini_file: Path): """Test loading INI with full type coercion to dataclass.""" result = load( - Source(file=all_types_ini_file, loader=IniLoader, prefix="all_types"), + IniSource(file=all_types_ini_file, prefix="all_types"), schema=AllPythonTypesCompact, ) @@ -26,8 +26,8 @@ def test_comprehensive_type_conversion(self, all_types_ini_file: Path): def test_ini_sections(self, ini_sections_file: Path): """Test INI sections and DEFAULT inheritance.""" - loader = IniLoader() - data = loader._load(ini_sections_file) + loader = IniSource(file=ini_sections_file) + data = loader._load() assert data == { "DEFAULT": { @@ -59,7 +59,7 @@ class PrefixedConfig: ) result = load( - Source(file=prefixed_ini_file, loader=IniLoader, prefix="app"), + IniSource(file=prefixed_ini_file, prefix="app"), schema=PrefixedConfig, ) @@ -70,10 +70,10 @@ def test_ini_requires_sections(self, tmp_path: Path): ini_file = tmp_path / "nosection.ini" ini_file.write_text("key = value") - loader = IniLoader() + loader = IniSource(file=ini_file) with pytest.raises(configparser.MissingSectionHeaderError): - loader._load(ini_file) + loader._load() def test_ini_env_var_substitution(self, tmp_path: Path, monkeypatch): monkeypatch.setenv("DB_HOST", "db.example.com") @@ -88,7 +88,7 @@ class DbConfig: port: int result = load( - Source(file=ini_file, loader=IniLoader, prefix="database"), + IniSource(file=ini_file, prefix="database"), schema=DbConfig, ) @@ -107,7 +107,7 @@ class Config: url: str result = load( - Source(file=ini_file, loader=IniLoader, prefix="section"), + IniSource(file=ini_file, prefix="section"), schema=Config, ) @@ -124,7 +124,7 @@ class Config: value: str result = load( - Source(file=ini_file, loader=IniLoader, prefix="section"), + IniSource(file=ini_file, prefix="section"), schema=Config, ) @@ -141,9 +141,31 @@ class Config: value: str result = load( - Source(file=ini_file, loader=IniLoader, prefix="section"), + IniSource(file=ini_file, prefix="section"), schema=Config, ) assert result.value == "prefix$nonexistent/suffix" assert result.value == "prefix$nonexistent/suffix" + + +class TestIniSourceDisplayProperties: + def test_format_name_and_label(self): + assert IniSource.format_name == "ini" + assert IniSource.location_label == "FILE" + + +class TestIniSourceStream: + def test_load_from_string_stream(self): + @dataclass + class Config: + key: str + value: str + + result = load( + IniSource(file=StringIO("[section]\nkey = hello\nvalue = world"), prefix="section"), + schema=Config, + ) + + assert result.key == "hello" + assert result.value == "world" diff --git a/tests/sources_loader/test_json5_.py b/tests/sources/test_json5_.py similarity index 80% rename from tests/sources_loader/test_json5_.py rename to tests/sources/test_json5_.py index 067fe84..664cfdf 100644 --- a/tests/sources_loader/test_json5_.py +++ b/tests/sources/test_json5_.py @@ -1,23 +1,28 @@ -"""Tests for json5_ module (Json5Loader).""" +"""Tests for json5_ module (Json5Source).""" from dataclasses import dataclass from pathlib import Path import pytest -from dature import Source, load +from dature import Json5Source, load from dature.errors import DatureConfigError, FieldLoadError -from dature.sources_loader.json5_ import Json5Loader from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact -from tests.sources_loader.checker import assert_all_types_equal +from tests.sources.checker import assert_all_types_equal -class TestJson5Loader: - """Tests for Json5Loader class.""" +class TestJson5SourceDisplayProperties: + def test_format_name_and_label(self): + assert Json5Source.format_name == "json5" + assert Json5Source.location_label == "FILE" + + +class TestJson5Source: + """Tests for Json5Source class.""" def test_comprehensive_type_conversion(self, all_types_json5_file: Path): """Test loading JSON5 with full type coercion to dataclass.""" - result = load(Source(file=all_types_json5_file, loader=Json5Loader), schema=AllPythonTypesCompact) + result = load(Json5Source(file=all_types_json5_file), schema=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -37,7 +42,7 @@ class PrefixedConfig: ) result = load( - Source(file=prefixed_json5_file, loader=Json5Loader, prefix="app"), + Json5Source(file=prefixed_json5_file, prefix="app"), schema=PrefixedConfig, ) @@ -48,8 +53,8 @@ def test_json5_empty_object(self, tmp_path: Path): json5_file = tmp_path / "empty.json5" json5_file.write_text("{}") - loader = Json5Loader() - data = loader._load(json5_file) + loader = Json5Source(file=json5_file) + data = loader._load() assert data == {} @@ -65,7 +70,7 @@ class DbConfig: host: str port: int - result = load(Source(file=json5_file, loader=Json5Loader), schema=DbConfig) + result = load(Json5Source(file=json5_file), schema=DbConfig) assert result.host == "db.example.com" assert result.port == 5432 @@ -81,7 +86,7 @@ def test_json5_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file=json5_file, loader=Json5Loader), schema=Config) + result = load(Json5Source(file=json5_file), schema=Config) assert result.url == "http://localhost:8080/api" @@ -95,7 +100,7 @@ def test_json5_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeyp class Config: value: str - result = load(Source(file=json5_file, loader=Json5Loader), schema=Config) + result = load(Json5Source(file=json5_file), schema=Config) assert result.value == "prefixreplaced/suffix" @@ -109,7 +114,7 @@ def test_json5_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file=json5_file, loader=Json5Loader), schema=Config) + result = load(Json5Source(file=json5_file), schema=Config) assert result.value == "prefix$nonexistent/suffix" @@ -122,7 +127,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json5_file, loader=Json5Loader), schema=Config) + load(Json5Source(file=json5_file), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -145,7 +150,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json5_file, loader=Json5Loader), schema=Config) + load(Json5Source(file=json5_file), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/sources_loader/test_json_.py b/tests/sources/test_json_.py similarity index 75% rename from tests/sources_loader/test_json_.py rename to tests/sources/test_json_.py index bbd6294..c1671a6 100644 --- a/tests/sources_loader/test_json_.py +++ b/tests/sources/test_json_.py @@ -1,23 +1,23 @@ -"""Tests for json_ module (JsonLoader).""" +"""Tests for json_ module (JsonSource).""" from dataclasses import dataclass +from io import StringIO from pathlib import Path import pytest -from dature import Source, load +from dature import JsonSource, load from dature.errors import DatureConfigError, FieldLoadError -from dature.sources_loader.json_ import JsonLoader from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact -from tests.sources_loader.checker import assert_all_types_equal +from tests.sources.checker import assert_all_types_equal -class TestJsonLoader: - """Tests for JsonLoader class.""" +class TestJsonSource: + """Tests for JsonSource class.""" def test_comprehensive_type_conversion(self, all_types_json_file: Path): """Test loading JSON with full type coercion to dataclass.""" - result = load(Source(file=all_types_json_file, loader=JsonLoader), schema=AllPythonTypesCompact) + result = load(JsonSource(file=all_types_json_file), schema=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -37,7 +37,7 @@ class PrefixedConfig: ) result = load( - Source(file=prefixed_json_file, loader=JsonLoader, prefix="app"), + JsonSource(file=prefixed_json_file, prefix="app"), schema=PrefixedConfig, ) @@ -48,8 +48,8 @@ def test_json_empty_object(self, tmp_path: Path): json_file = tmp_path / "empty.json" json_file.write_text("{}") - loader = JsonLoader() - data = loader._load(json_file) + loader = JsonSource(file=json_file) + data = loader._load() assert data == {} @@ -65,7 +65,7 @@ class DbConfig: host: str port: int - result = load(Source(file=json_file, loader=JsonLoader), schema=DbConfig) + result = load(JsonSource(file=json_file), schema=DbConfig) assert result.host == "db.example.com" assert result.port == 5432 @@ -81,7 +81,7 @@ def test_json_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file=json_file, loader=JsonLoader), schema=Config) + result = load(JsonSource(file=json_file), schema=Config) assert result.url == "http://localhost:8080/api" @@ -95,7 +95,7 @@ def test_json_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file=json_file, loader=JsonLoader), schema=Config) + result = load(JsonSource(file=json_file), schema=Config) assert result.value == "prefixreplaced/suffix" @@ -109,7 +109,7 @@ def test_json_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat class Config: value: str - result = load(Source(file=json_file, loader=JsonLoader), schema=Config) + result = load(JsonSource(file=json_file), schema=Config) assert result.value == "prefix$nonexistent/suffix" @@ -122,7 +122,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json_file, loader=JsonLoader), schema=Config) + load(JsonSource(file=json_file), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -145,7 +145,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=json_file, loader=JsonLoader), schema=Config) + load(JsonSource(file=json_file), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -158,3 +158,22 @@ class Config: f" │ ^\n" f" └── FILE '{json_file}', line 1" ) + + +class TestJsonSourceDisplayProperties: + def test_format_name_and_label(self): + assert JsonSource.format_name == "json" + assert JsonSource.location_label == "FILE" + + +class TestJsonSourceStream: + def test_load_from_string_stream(self): + @dataclass + class Config: + name: str + port: int + + result = load(JsonSource(file=StringIO('{"name": "test", "port": 8080}')), schema=Config) + + assert result.name == "test" + assert result.port == 8080 diff --git a/tests/sources_loader/test_nested_resolve.py b/tests/sources/test_nested_resolve.py similarity index 87% rename from tests/sources_loader/test_nested_resolve.py rename to tests/sources/test_nested_resolve.py index 519fd5c..07da684 100644 --- a/tests/sources_loader/test_nested_resolve.py +++ b/tests/sources/test_nested_resolve.py @@ -7,10 +7,8 @@ import pytest -from dature import F, Source, load +from dature import DockerSecretsSource, EnvFileSource, EnvSource, F, Source, load from dature.errors import DatureConfigError, FieldLoadError -from dature.sources_loader.docker_secrets import DockerSecretsLoader -from dature.sources_loader.env_ import EnvFileLoader, EnvLoader @dataclass @@ -83,10 +81,10 @@ def set_data(data: dict[str, str]) -> None: def make_metadata(**kwargs: Any) -> Source: if loader_type == "env": - return Source(loader=EnvLoader, prefix="MYAPP__", **kwargs) + return EnvSource(prefix="MYAPP__", **kwargs) if loader_type == "envfile": - return Source(file=tmp_path / ".env", loader=EnvFileLoader, prefix="MYAPP__", **kwargs) - return Source(file=tmp_path, loader=DockerSecretsLoader, **kwargs) + return EnvFileSource(file=tmp_path / ".env", prefix="MYAPP__", **kwargs) + return DockerSecretsSource(dir_=tmp_path, **kwargs) return FlatLoaderSetup(set_data=set_data, make_metadata=make_metadata) @@ -180,7 +178,7 @@ def test_partial_missing_field( with pytest.raises(DatureConfigError) as exc_info: load( - Source(loader=EnvLoader, prefix="MYAPP__", **_strategy_kwargs(strategy, local=local)), + EnvSource(prefix="MYAPP__", **_strategy_kwargs(strategy, local=local)), schema=NestedConfig, ) @@ -202,9 +200,8 @@ def test_partial_missing_field_flat(self, tmp_path: Path, local: bool) -> None: with pytest.raises(DatureConfigError) as exc_info: load( - Source( + EnvFileSource( file=env_file, - loader=EnvFileLoader, prefix="MYAPP__", **_strategy_kwargs("flat", local=local), ), @@ -225,9 +222,8 @@ def test_partial_missing_field_json(self, tmp_path: Path, local: bool) -> None: with pytest.raises(DatureConfigError) as exc_info: load( - Source( + EnvFileSource( file=env_file, - loader=EnvFileLoader, prefix="MYAPP__", **_strategy_kwargs("json", local=local), ), @@ -256,7 +252,7 @@ def test_partial_missing_field_flat(self, tmp_path: Path, local: bool) -> None: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file=tmp_path, loader=DockerSecretsLoader, **_strategy_kwargs("flat", local=local)), + DockerSecretsSource(dir_=tmp_path, **_strategy_kwargs("flat", local=local)), schema=NestedConfig, ) @@ -274,7 +270,7 @@ def test_partial_missing_field_json(self, tmp_path: Path, local: bool) -> None: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file=tmp_path, loader=DockerSecretsLoader, **_strategy_kwargs("json", local=local)), + DockerSecretsSource(dir_=tmp_path, **_strategy_kwargs("json", local=local)), schema=NestedConfig, ) @@ -295,7 +291,7 @@ def test_json_invalid_flat_strategy_succeeds(self, monkeypatch: pytest.MonkeyPat monkeypatch.setenv("MYAPP__VAR__BAR", "20") result = load( - Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), + EnvSource(prefix="MYAPP__", nested_resolve_strategy="flat"), schema=NestedIntConfig, ) @@ -308,7 +304,7 @@ def test_json_invalid_json_strategy_errors(self, monkeypatch: pytest.MonkeyPatch with pytest.raises(DatureConfigError) as exc_info: load( - Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="json"), + EnvSource(prefix="MYAPP__", nested_resolve_strategy="json"), schema=NestedIntConfig, ) @@ -334,7 +330,7 @@ def test_flat_invalid_json_strategy_succeeds(self, monkeypatch: pytest.MonkeyPat monkeypatch.setenv("MYAPP__VAR__BAR", "not_a_number") result = load( - Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="json"), + EnvSource(prefix="MYAPP__", nested_resolve_strategy="json"), schema=NestedIntConfig, ) @@ -347,7 +343,7 @@ def test_flat_invalid_flat_strategy_errors(self, monkeypatch: pytest.MonkeyPatch with pytest.raises(DatureConfigError) as exc_info: load( - Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), + EnvSource(prefix="MYAPP__", nested_resolve_strategy="flat"), schema=NestedIntConfig, ) @@ -376,7 +372,7 @@ def test_json_invalid_flat_strategy_succeeds(self, tmp_path: Path) -> None: ) result = load( - Source(file=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), + EnvFileSource(file=env_file, prefix="MYAPP__", nested_resolve_strategy="flat"), schema=NestedIntConfig, ) @@ -390,7 +386,7 @@ def test_json_invalid_json_strategy_errors(self, tmp_path: Path) -> None: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="json"), + EnvFileSource(file=env_file, prefix="MYAPP__", nested_resolve_strategy="json"), schema=NestedIntConfig, ) @@ -421,7 +417,7 @@ def test_flat_invalid_json_strategy_succeeds(self, tmp_path: Path) -> None: ) result = load( - Source(file=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="json"), + EnvFileSource(file=env_file, prefix="MYAPP__", nested_resolve_strategy="json"), schema=NestedIntConfig, ) @@ -435,7 +431,7 @@ def test_flat_invalid_flat_strategy_errors(self, tmp_path: Path) -> None: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), + EnvFileSource(file=env_file, prefix="MYAPP__", nested_resolve_strategy="flat"), schema=NestedIntConfig, ) @@ -469,7 +465,7 @@ def test_json_invalid_flat_strategy_succeeds(self, tmp_path: Path) -> None: (tmp_path / "var__bar").write_text("20") result = load( - Source(file=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="flat"), + DockerSecretsSource(dir_=tmp_path, nested_resolve_strategy="flat"), schema=NestedIntConfig, ) @@ -482,7 +478,7 @@ def test_json_invalid_json_strategy_errors(self, tmp_path: Path) -> None: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="json"), + DockerSecretsSource(dir_=tmp_path, nested_resolve_strategy="json"), schema=NestedIntConfig, ) @@ -508,7 +504,7 @@ def test_flat_invalid_json_strategy_succeeds(self, tmp_path: Path) -> None: (tmp_path / "var__bar").write_text("not_a_number") result = load( - Source(file=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="json"), + DockerSecretsSource(dir_=tmp_path, nested_resolve_strategy="json"), schema=NestedIntConfig, ) @@ -521,7 +517,7 @@ def test_flat_invalid_flat_strategy_errors(self, tmp_path: Path) -> None: with pytest.raises(DatureConfigError) as exc_info: load( - Source(file=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="flat"), + DockerSecretsSource(dir_=tmp_path, nested_resolve_strategy="flat"), schema=NestedIntConfig, ) @@ -553,7 +549,7 @@ def test_multiline_json_strategy_errors(self, monkeypatch: pytest.MonkeyPatch) - with pytest.raises(DatureConfigError) as exc_info: load( - Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="json"), + EnvSource(prefix="MYAPP__", nested_resolve_strategy="json"), schema=NestedIntConfig, ) @@ -582,7 +578,7 @@ def test_multiline_flat_strategy_ignores_json(self, monkeypatch: pytest.MonkeyPa monkeypatch.setenv("MYAPP__VAR__BAR", "20") result = load( - Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), + EnvSource(prefix="MYAPP__", nested_resolve_strategy="flat"), schema=NestedIntConfig, ) @@ -616,8 +612,7 @@ def test_different_strategies( monkeypatch.setenv("MYAPP__VAR2__BAR", "flat2") result = load( - Source( - loader=EnvLoader, + EnvSource( prefix="MYAPP__", nested_resolve={ var1_strategy: (F[TwoNestedConfig].var1,), @@ -653,8 +648,7 @@ def test_local_overrides_global( monkeypatch.setenv("MYAPP__VAR__BAR", "from_flat") result = load( - Source( - loader=EnvLoader, + EnvSource( prefix="MYAPP__", nested_resolve_strategy=global_strategy, nested_resolve={local_strategy: (F[NestedConfig].var,)}, @@ -674,12 +668,7 @@ def test_flat_strategy_single_underscore(self, monkeypatch: pytest.MonkeyPatch) monkeypatch.setenv("APP_VAR_BAR", "from_flat") result = load( - Source( - loader=EnvLoader, - prefix="APP_", - split_symbols="_", - nested_resolve_strategy="flat", - ), + EnvSource(prefix="APP_", split_symbols="_", nested_resolve_strategy="flat"), schema=NestedConfig, ) @@ -692,12 +681,7 @@ def test_json_strategy_single_underscore_error(self, monkeypatch: pytest.MonkeyP with pytest.raises(DatureConfigError) as exc_info: load( - Source( - loader=EnvLoader, - prefix="APP_", - split_symbols="_", - nested_resolve_strategy="json", - ), + EnvSource(prefix="APP_", split_symbols="_", nested_resolve_strategy="json"), schema=NestedIntConfig, ) @@ -716,12 +700,7 @@ def test_flat_strategy_single_underscore_error(self, monkeypatch: pytest.MonkeyP with pytest.raises(DatureConfigError) as exc_info: load( - Source( - loader=EnvLoader, - prefix="APP_", - split_symbols="_", - nested_resolve_strategy="flat", - ), + EnvSource(prefix="APP_", split_symbols="_", nested_resolve_strategy="flat"), schema=NestedIntConfig, ) @@ -780,7 +759,7 @@ def test_deep_env(self, monkeypatch: pytest.MonkeyPatch, strategy: str, expected monkeypatch.setenv("MYAPP__VAR__SUB__KEY", "from_flat") result = load( - Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy=strategy), + EnvSource(prefix="MYAPP__", nested_resolve_strategy=strategy), schema=DeepConfig, ) @@ -793,12 +772,7 @@ def test_flat_strategy_deep_envfile(self, tmp_path: Path) -> None: ) result = load( - Source( - file=env_file, - loader=EnvFileLoader, - prefix="MYAPP__", - nested_resolve_strategy="flat", - ), + EnvFileSource(file=env_file, prefix="MYAPP__", nested_resolve_strategy="flat"), schema=DeepConfig, ) @@ -809,7 +783,7 @@ def test_json_strategy_deep_docker_secrets(self, tmp_path: Path) -> None: (tmp_path / "var__sub__key").write_text("from_flat") result = load( - Source(file=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="json"), + DockerSecretsSource(dir_=tmp_path, nested_resolve_strategy="json"), schema=DeepConfig, ) @@ -826,12 +800,7 @@ def test_flat_strategy_error(self, tmp_path: Path) -> None: with pytest.raises(DatureConfigError) as exc_info: load( - Source( - file=tmp_path, - loader=DockerSecretsLoader, - prefix="myapp__", - nested_resolve_strategy="flat", - ), + DockerSecretsSource(dir_=tmp_path, prefix="myapp__", nested_resolve_strategy="flat"), schema=NestedIntConfig, ) @@ -851,12 +820,7 @@ def test_json_strategy_error(self, tmp_path: Path) -> None: with pytest.raises(DatureConfigError) as exc_info: load( - Source( - file=tmp_path, - loader=DockerSecretsLoader, - prefix="myapp__", - nested_resolve_strategy="json", - ), + DockerSecretsSource(dir_=tmp_path, prefix="myapp__", nested_resolve_strategy="json"), schema=NestedIntConfig, ) @@ -879,11 +843,11 @@ def test_flat_first_then_json(self, monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setenv("MYAPP__VAR", '{"foo": "from_json", "bar": "from_json"}') result_flat = load( - Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="flat"), + EnvSource(prefix="MYAPP__", nested_resolve_strategy="flat"), schema=NestedConfig, ) result_json = load( - Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="json"), + EnvSource(prefix="MYAPP__", nested_resolve_strategy="json"), schema=NestedConfig, ) @@ -897,21 +861,11 @@ def test_envfilereversed_order(self, tmp_path: Path) -> None: ) result_flat = load( - Source( - file=env_file, - loader=EnvFileLoader, - prefix="MYAPP__", - nested_resolve_strategy="flat", - ), + EnvFileSource(file=env_file, prefix="MYAPP__", nested_resolve_strategy="flat"), schema=NestedConfig, ) result_json = load( - Source( - file=env_file, - loader=EnvFileLoader, - prefix="MYAPP__", - nested_resolve_strategy="json", - ), + EnvFileSource(file=env_file, prefix="MYAPP__", nested_resolve_strategy="json"), schema=NestedConfig, ) @@ -938,13 +892,9 @@ def test_empty_dict_uses_global( monkeypatch.setenv("MYAPP__VAR__BAR", "from_flat") result = load( - Source( - loader=EnvLoader, - prefix="MYAPP__", - nested_resolve_strategy=strategy, - nested_resolve={}, - ), + EnvSource(prefix="MYAPP__", nested_resolve_strategy=strategy, nested_resolve={}), schema=NestedConfig, ) assert result == NestedConfig(var=NestedVar(foo=expected_source, bar=expected_source)) + assert result == NestedConfig(var=NestedVar(foo=expected_source, bar=expected_source)) diff --git a/tests/sources/test_retort.py b/tests/sources/test_retort.py new file mode 100644 index 0000000..6b63104 --- /dev/null +++ b/tests/sources/test_retort.py @@ -0,0 +1,280 @@ +from dataclasses import dataclass + +import pytest +from adaptix import NameStyle as AdaptixNameStyle +from adaptix import Retort + +from dature.field_path import F +from dature.sources.base import Source +from dature.sources.retort import ( + build_base_recipe, + create_probe_retort, + create_retort, + create_validating_retort, + ensure_retort, + get_adaptix_name_style, + get_name_mapping_providers, + get_validator_providers, + transform_to_dataclass, +) +from dature.types import JSONValue + + +@dataclass(kw_only=True) +class MockSource(Source): + format_name = "mock" + location_label = "MOCK" + test_data: JSONValue = None + + def __post_init__(self) -> None: + if self.test_data is None: + self.test_data = {} + + def _load(self) -> JSONValue: + return self.test_data + + +class TestGetAdaptixNameStyle: + @pytest.mark.parametrize( + ("name_style", "expected"), + [ + ("lower_snake", AdaptixNameStyle.LOWER_SNAKE), + ("upper_snake", AdaptixNameStyle.UPPER_SNAKE), + ("lower_camel", AdaptixNameStyle.CAMEL), + ("upper_camel", AdaptixNameStyle.PASCAL), + ("lower_kebab", AdaptixNameStyle.LOWER_KEBAB), + ("upper_kebab", AdaptixNameStyle.UPPER_KEBAB), + ], + ) + def test_maps_style(self, name_style, expected): + result = get_adaptix_name_style(name_style) + + assert result == expected + + def test_none_returns_none(self): + result = get_adaptix_name_style(None) + + assert result is None + + +class TestGetNameMappingProviders: + def test_none_none_returns_empty(self): + result = get_name_mapping_providers(None, None) + + assert result == [] + + def test_name_style_only(self): + result = get_name_mapping_providers("lower_camel", None) + + assert len(result) == 1 + + def test_field_mapping_with_field_path(self): + @dataclass + class Config: + name: str + + field_mapping = {F[Config].name: "fullName"} + result = get_name_mapping_providers(None, field_mapping) + + assert len(result) >= 1 + + def test_field_mapping_with_string_owner(self): + field_mapping = {F["Config"].name: "fullName"} + result = get_name_mapping_providers(None, field_mapping) + + assert len(result) >= 1 + + def test_combined_name_style_and_field_mapping(self): + @dataclass + class Config: + user_name: str + + field_mapping = {F[Config].user_name: "full_name"} + result = get_name_mapping_providers("lower_camel", field_mapping) + + assert len(result) >= 2 + + def test_nested_field_path(self): + @dataclass + class Inner: + city: str + + @dataclass + class Outer: + inner: Inner + + field_mapping = {F[Outer].inner.city: "cityName"} + result = get_name_mapping_providers(None, field_mapping) + + assert len(result) >= 1 + + +class TestGetValidatorProviders: + def test_no_validators_returns_empty(self): + @dataclass + class Config: + name: str + port: int + + result = get_validator_providers(Config) + + assert result == [] + + +class TestBuildBaseRecipe: + def test_default_source(self): + source = MockSource() + result = build_base_recipe(source) + + assert len(result) > 0 + + def test_with_resolved_type_loaders(self): + source = MockSource() + custom_loaders = {str: lambda x: str(x).upper()} + + result_default = build_base_recipe(source) + result_custom = build_base_recipe(source, resolved_type_loaders=custom_loaders) + + assert len(result_custom) == len(result_default) + 1 + + def test_with_source_type_loaders(self): + source = MockSource(type_loaders={str: lambda x: str(x).upper()}) + + result_with = build_base_recipe(source) + result_without = build_base_recipe(MockSource()) + + assert len(result_with) == len(result_without) + 1 + + def test_resolved_type_loaders_override_source(self): + source = MockSource(type_loaders={str: lambda _: "source"}) + resolved = {int: lambda x: x + 1} + + result = build_base_recipe(source, resolved_type_loaders=resolved) + + result_with_source_loaders = build_base_recipe( + MockSource(type_loaders={str: lambda _: "source"}), + ) + result_with_resolved = build_base_recipe( + MockSource(), + resolved_type_loaders=resolved, + ) + + assert len(result) == len(result_with_resolved) + assert len(result) != len(result_with_source_loaders) or len(resolved) == len(source.type_loaders or {}) + + +class TestCreateRetort: + def test_returns_retort(self): + source = MockSource() + + result = create_retort(source) + + assert isinstance(result, Retort) + + +class TestCreateProbeRetort: + def test_returns_retort(self): + source = MockSource() + + result = create_probe_retort(source) + + assert isinstance(result, Retort) + + +class TestCreateValidatingRetort: + def test_returns_retort(self): + @dataclass + class Config: + name: str + + source = MockSource() + + result = create_validating_retort(source, Config) + + assert isinstance(result, Retort) + + def test_with_root_validators(self): + @dataclass + class Config: + name: str + + @dataclass(frozen=True, slots=True) + class AlwaysTrue: + def get_validator_func(self): + return lambda _: True + + def get_error_message(self): + return "always true" + + source = MockSource(root_validators=(AlwaysTrue(),)) + + result = create_validating_retort(source, Config) + + assert isinstance(result, Retort) + + +class TestTransformToDataclass: + def test_basic_transform(self): + @dataclass + class Config: + name: str + port: int + + source = MockSource() + data = {"name": "TestApp", "port": 8080} + + result = transform_to_dataclass(source, data, Config) + + assert result == Config(name="TestApp", port=8080) + + def test_caches_retort(self): + @dataclass + class Config: + name: str + + source = MockSource() + assert Config not in source.retorts + + transform_to_dataclass(source, {"name": "a"}, Config) + + assert Config in source.retorts + + def test_reuses_cached_retort(self): + @dataclass + class Config: + name: str + + source = MockSource() + transform_to_dataclass(source, {"name": "a"}, Config) + cached = source.retorts[Config] + + transform_to_dataclass(source, {"name": "b"}, Config) + + assert source.retorts[Config] is cached + + +class TestEnsureRetort: + def test_creates_retort(self): + @dataclass + class Config: + name: str + + source = MockSource() + assert Config not in source.retorts + + ensure_retort(source, Config) + + assert Config in source.retorts + + def test_does_not_overwrite_existing(self): + @dataclass + class Config: + name: str + + source = MockSource() + ensure_retort(source, Config) + existing = source.retorts[Config] + + ensure_retort(source, Config) + + assert source.retorts[Config] is existing diff --git a/tests/sources_loader/test_toml10_.py b/tests/sources/test_toml10_.py similarity index 80% rename from tests/sources_loader/test_toml10_.py rename to tests/sources/test_toml10_.py index 4c03c71..cf18ce1 100644 --- a/tests/sources_loader/test_toml10_.py +++ b/tests/sources/test_toml10_.py @@ -1,23 +1,28 @@ -"""Tests for toml_ module (Toml10Loader).""" +"""Tests for toml_ module (Toml10Source).""" from dataclasses import dataclass from pathlib import Path import pytest -from dature import Source, load +from dature import Toml10Source, load from dature.errors import DatureConfigError, FieldLoadError -from dature.sources_loader.toml_ import Toml10Loader from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact -from tests.sources_loader.checker import assert_all_types_equal +from tests.sources.checker import assert_all_types_equal -class TestToml10Loader: - """Tests for Toml10Loader class.""" +class TestToml10SourceDisplayProperties: + def test_format_name_and_label(self): + assert Toml10Source.format_name == "toml1.0" + assert Toml10Source.location_label == "FILE" + + +class TestToml10Source: + """Tests for Toml10Source class.""" def test_comprehensive_type_conversion(self, all_types_toml10_file: Path): """Test loading TOML with full type coercion to dataclass.""" - result = load(Source(file=all_types_toml10_file, loader=Toml10Loader), schema=AllPythonTypesCompact) + result = load(Toml10Source(file=all_types_toml10_file), schema=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -37,7 +42,7 @@ class PrefixedConfig: ) result = load( - Source(file=prefixed_toml_file, loader=Toml10Loader, prefix="app"), + Toml10Source(file=prefixed_toml_file, prefix="app"), schema=PrefixedConfig, ) @@ -48,8 +53,8 @@ def test_toml_empty_file(self, tmp_path: Path): toml_file = tmp_path / "empty.toml" toml_file.write_text("") - loader = Toml10Loader() - data = loader._load(toml_file) + loader = Toml10Source(file=toml_file) + data = loader._load() assert data == {} @@ -65,7 +70,7 @@ class Config: name: str port: int - result = load(Source(file=toml_file, loader=Toml10Loader), schema=Config) + result = load(Toml10Source(file=toml_file), schema=Config) assert result.name == "MyApp" assert result.port == 9090 @@ -81,7 +86,7 @@ def test_toml_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file=toml_file, loader=Toml10Loader), schema=Config) + result = load(Toml10Source(file=toml_file), schema=Config) assert result.url == "http://localhost:8080/api" @@ -95,7 +100,7 @@ def test_toml_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file=toml_file, loader=Toml10Loader), schema=Config) + result = load(Toml10Source(file=toml_file), schema=Config) assert result.value == "prefixreplaced/suffix" @@ -109,7 +114,7 @@ def test_toml_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat class Config: value: str - result = load(Source(file=toml_file, loader=Toml10Loader), schema=Config) + result = load(Toml10Source(file=toml_file), schema=Config) assert result.value == "prefix$nonexistent/suffix" @@ -122,7 +127,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=toml_file, loader=Toml10Loader), schema=Config) + load(Toml10Source(file=toml_file), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -145,7 +150,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=toml_file, loader=Toml10Loader), schema=Config) + load(Toml10Source(file=toml_file), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/sources_loader/test_toml11_.py b/tests/sources/test_toml11_.py similarity index 80% rename from tests/sources_loader/test_toml11_.py rename to tests/sources/test_toml11_.py index f20e134..d9cf1c3 100644 --- a/tests/sources_loader/test_toml11_.py +++ b/tests/sources/test_toml11_.py @@ -1,23 +1,28 @@ -"""Tests for toml_ module (Toml11Loader).""" +"""Tests for toml_ module (Toml11Source).""" from dataclasses import dataclass from pathlib import Path import pytest -from dature import Source, load +from dature import Toml11Source, load from dature.errors import DatureConfigError, FieldLoadError -from dature.sources_loader.toml_ import Toml11Loader from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact -from tests.sources_loader.checker import assert_all_types_equal +from tests.sources.checker import assert_all_types_equal -class TestToml11Loader: - """Tests for Toml11Loader class.""" +class TestToml11SourceDisplayProperties: + def test_format_name_and_label(self): + assert Toml11Source.format_name == "toml1.1" + assert Toml11Source.location_label == "FILE" + + +class TestToml11Source: + """Tests for Toml11Source class.""" def test_comprehensive_type_conversion(self, all_types_toml11_file: Path): """Test loading TOML with full type coercion to dataclass.""" - result = load(Source(file=all_types_toml11_file, loader=Toml11Loader), schema=AllPythonTypesCompact) + result = load(Toml11Source(file=all_types_toml11_file), schema=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -37,7 +42,7 @@ class PrefixedConfig: ) result = load( - Source(file=prefixed_toml_file, loader=Toml11Loader, prefix="app"), + Toml11Source(file=prefixed_toml_file, prefix="app"), schema=PrefixedConfig, ) @@ -48,8 +53,8 @@ def test_toml_empty_file(self, tmp_path: Path): toml_file = tmp_path / "empty.toml" toml_file.write_text("") - loader = Toml11Loader() - data = loader._load(toml_file) + loader = Toml11Source(file=toml_file) + data = loader._load() assert data == {} @@ -65,7 +70,7 @@ class Config: name: str port: int - result = load(Source(file=toml_file, loader=Toml11Loader), schema=Config) + result = load(Toml11Source(file=toml_file), schema=Config) assert result.name == "MyApp" assert result.port == 9090 @@ -81,7 +86,7 @@ def test_toml_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file=toml_file, loader=Toml11Loader), schema=Config) + result = load(Toml11Source(file=toml_file), schema=Config) assert result.url == "http://localhost:8080/api" @@ -95,7 +100,7 @@ def test_toml_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file=toml_file, loader=Toml11Loader), schema=Config) + result = load(Toml11Source(file=toml_file), schema=Config) assert result.value == "prefixreplaced/suffix" @@ -109,7 +114,7 @@ def test_toml_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat class Config: value: str - result = load(Source(file=toml_file, loader=Toml11Loader), schema=Config) + result = load(Toml11Source(file=toml_file), schema=Config) assert result.value == "prefix$nonexistent/suffix" @@ -122,7 +127,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=toml_file, loader=Toml11Loader), schema=Config) + load(Toml11Source(file=toml_file), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -145,7 +150,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=toml_file, loader=Toml11Loader), schema=Config) + load(Toml11Source(file=toml_file), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/sources_loader/test_yaml11_.py b/tests/sources/test_yaml11_.py similarity index 81% rename from tests/sources_loader/test_yaml11_.py rename to tests/sources/test_yaml11_.py index ddd2be3..ec913a1 100644 --- a/tests/sources_loader/test_yaml11_.py +++ b/tests/sources/test_yaml11_.py @@ -1,23 +1,28 @@ -"""Tests for yaml_ module (Yaml11Loader).""" +"""Tests for yaml_ module (Yaml11Source).""" from dataclasses import dataclass from pathlib import Path import pytest -from dature import Source, load +from dature import Yaml11Source, load from dature.errors import DatureConfigError, FieldLoadError -from dature.sources_loader.yaml_ import Yaml11Loader from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact -from tests.sources_loader.checker import assert_all_types_equal +from tests.sources.checker import assert_all_types_equal -class TestYaml11Loader: - """Tests for Yaml11Loader class.""" +class TestYaml11SourceDisplayProperties: + def test_format_name_and_label(self): + assert Yaml11Source.format_name == "yaml1.1" + assert Yaml11Source.location_label == "FILE" + + +class TestYaml11Source: + """Tests for Yaml11Source class.""" def test_comprehensive_type_conversion(self, all_types_yaml11_file: Path): """Test loading YAML with full type coercion to dataclass.""" - result = load(Source(file=all_types_yaml11_file, loader=Yaml11Loader), schema=AllPythonTypesCompact) + result = load(Yaml11Source(file=all_types_yaml11_file), schema=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -37,7 +42,7 @@ class PrefixedConfig: ) result = load( - Source(file=prefixed_yaml_file, loader=Yaml11Loader, prefix="app"), + Yaml11Source(file=prefixed_yaml_file, prefix="app"), schema=PrefixedConfig, ) @@ -62,7 +67,7 @@ class EnvConfig: services: Services result = load( - Source(file=yaml_config_with_env_vars_file, loader=Yaml11Loader), + Yaml11Source(file=yaml_config_with_env_vars_file), schema=EnvConfig, ) @@ -82,7 +87,7 @@ def test_yaml_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file=yaml_file, loader=Yaml11Loader), schema=Config) + result = load(Yaml11Source(file=yaml_file), schema=Config) assert result.url == "http://localhost:8080/api" @@ -96,7 +101,7 @@ def test_yaml_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file=yaml_file, loader=Yaml11Loader), schema=Config) + result = load(Yaml11Source(file=yaml_file), schema=Config) assert result.value == "prefixreplaced/suffix" @@ -110,7 +115,7 @@ def test_yaml_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat class Config: value: str - result = load(Source(file=yaml_file, loader=Yaml11Loader), schema=Config) + result = load(Yaml11Source(file=yaml_file), schema=Config) assert result.value == "prefix$nonexistent/suffix" @@ -119,8 +124,8 @@ def test_yaml_empty_file(self, tmp_path: Path): yaml_file = tmp_path / "empty.yaml" yaml_file.write_text("") - loader = Yaml11Loader() - data = loader._load(yaml_file) + loader = Yaml11Source(file=yaml_file) + data = loader._load() assert data is None @@ -133,7 +138,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=yaml_file, loader=Yaml11Loader), schema=Config) + load(Yaml11Source(file=yaml_file), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -156,7 +161,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=yaml_file, loader=Yaml11Loader), schema=Config) + load(Yaml11Source(file=yaml_file), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/sources_loader/test_yaml12_.py b/tests/sources/test_yaml12_.py similarity index 81% rename from tests/sources_loader/test_yaml12_.py rename to tests/sources/test_yaml12_.py index c1796e3..c55d1e8 100644 --- a/tests/sources_loader/test_yaml12_.py +++ b/tests/sources/test_yaml12_.py @@ -1,23 +1,28 @@ -"""Tests for yaml_ module (Yaml12Loader).""" +"""Tests for yaml_ module (Yaml12Source).""" from dataclasses import dataclass from pathlib import Path import pytest -from dature import Source, load +from dature import Yaml12Source, load from dature.errors import DatureConfigError, FieldLoadError -from dature.sources_loader.yaml_ import Yaml12Loader from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact -from tests.sources_loader.checker import assert_all_types_equal +from tests.sources.checker import assert_all_types_equal -class TestYaml12Loader: - """Tests for Yaml12Loader class.""" +class TestYaml12SourceDisplayProperties: + def test_format_name_and_label(self): + assert Yaml12Source.format_name == "yaml1.2" + assert Yaml12Source.location_label == "FILE" + + +class TestYaml12Source: + """Tests for Yaml12Source class.""" def test_comprehensive_type_conversion(self, all_types_yaml12_file: Path): """Test loading YAML with full type coercion to dataclass.""" - result = load(Source(file=all_types_yaml12_file, loader=Yaml12Loader), schema=AllPythonTypesCompact) + result = load(Yaml12Source(file=all_types_yaml12_file), schema=AllPythonTypesCompact) assert_all_types_equal(result, EXPECTED_ALL_TYPES) @@ -37,7 +42,7 @@ class PrefixedConfig: ) result = load( - Source(file=prefixed_yaml_file, loader=Yaml12Loader, prefix="app"), + Yaml12Source(file=prefixed_yaml_file, prefix="app"), schema=PrefixedConfig, ) @@ -62,7 +67,7 @@ class EnvConfig: services: Services result = load( - Source(file=yaml_config_with_env_vars_file, loader=Yaml12Loader), + Yaml12Source(file=yaml_config_with_env_vars_file), schema=EnvConfig, ) @@ -82,7 +87,7 @@ def test_yaml_env_var_partial_substitution(self, tmp_path: Path, monkeypatch): class Config: url: str - result = load(Source(file=yaml_file, loader=Yaml12Loader), schema=Config) + result = load(Yaml12Source(file=yaml_file), schema=Config) assert result.url == "http://localhost:8080/api" @@ -96,7 +101,7 @@ def test_yaml_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa class Config: value: str - result = load(Source(file=yaml_file, loader=Yaml12Loader), schema=Config) + result = load(Yaml12Source(file=yaml_file), schema=Config) assert result.value == "prefixreplaced/suffix" @@ -110,7 +115,7 @@ def test_yaml_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat class Config: value: str - result = load(Source(file=yaml_file, loader=Yaml12Loader), schema=Config) + result = load(Yaml12Source(file=yaml_file), schema=Config) assert result.value == "prefix$nonexistent/suffix" @@ -119,8 +124,8 @@ def test_yaml_empty_file(self, tmp_path: Path): yaml_file = tmp_path / "empty.yaml" yaml_file.write_text("") - loader = Yaml12Loader() - data = loader._load(yaml_file) + loader = Yaml12Source(file=yaml_file) + data = loader._load() assert data is None @@ -133,7 +138,7 @@ class Config: count: int with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=yaml_file, loader=Yaml12Loader), schema=Config) + load(Yaml12Source(file=yaml_file), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 @@ -156,7 +161,7 @@ class Config: flag: bool with pytest.raises(DatureConfigError) as exc_info: - load(Source(file=yaml_file, loader=Yaml12Loader), schema=Config) + load(Yaml12Source(file=yaml_file), schema=Config) err = exc_info.value assert len(err.exceptions) == 1 diff --git a/tests/sources_loader/__init__.py b/tests/sources_loader/__init__.py deleted file mode 100644 index f03c2b7..0000000 --- a/tests/sources_loader/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for sources_loader.""" diff --git a/tests/sources_loader/test_docker_secrets.py b/tests/sources_loader/test_docker_secrets.py deleted file mode 100644 index af2bdf9..0000000 --- a/tests/sources_loader/test_docker_secrets.py +++ /dev/null @@ -1,82 +0,0 @@ -from dataclasses import dataclass -from pathlib import Path - -from dature import Source, load -from dature.sources_loader.docker_secrets import DockerSecretsLoader -from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact -from tests.sources_loader.checker import assert_all_types_equal - - -class TestDockerSecretsLoader: - def test_comprehensive_type_conversion(self, all_types_docker_secrets_dir: Path): - result = load( - Source(file=all_types_docker_secrets_dir, loader=DockerSecretsLoader), - schema=AllPythonTypesCompact, - ) - - assert_all_types_equal(result, EXPECTED_ALL_TYPES) - - def test_custom_split_symbols(self, tmp_path: Path): - (tmp_path / "db.host").write_text("localhost") - (tmp_path / "db.port").write_text("5432") - - loader = DockerSecretsLoader(split_symbols=".") - result = loader.load_raw(tmp_path) - - assert result.data == {"db": {"host": "localhost", "port": 5432}} - - def test_prefix_filtering(self, tmp_path: Path): - (tmp_path / "APP_name").write_text("myapp") - (tmp_path / "APP_port").write_text("8080") - (tmp_path / "OTHER_key").write_text("ignored") - - loader = DockerSecretsLoader(prefix="APP_") - data = loader._load(tmp_path) - - assert data == {"name": "myapp", "port": "8080"} - - def test_skip_subdirectories(self, tmp_path: Path): - (tmp_path / "name").write_text("myapp") - subdir = tmp_path / "subdir" - subdir.mkdir() - (subdir / "nested_file").write_text("should_be_ignored") - - loader = DockerSecretsLoader() - data = loader._load(tmp_path) - - assert data == {"name": "myapp"} - - def test_empty_directory(self, tmp_path: Path): - loader = DockerSecretsLoader() - data = loader._load(tmp_path) - - assert data == {} - - def test_strip_filecontent(self, tmp_path: Path): - (tmp_path / "secret").write_text(" password123\n") - - loader = DockerSecretsLoader() - data = loader._load(tmp_path) - - assert data == {"secret": "password123"} - - def test_env_var_substitution(self, tmp_path: Path, monkeypatch): - monkeypatch.setenv("BASE_URL", "https://api.example.com") - - (tmp_path / "api_url").write_text("$BASE_URL/v1") - (tmp_path / "base").write_text("$BASE_URL") - - @dataclass - class Config: - api_url: str - base: str - - result = load( - Source(file=tmp_path, loader=DockerSecretsLoader), - schema=Config, - ) - - assert result.api_url == "https://api.example.com/v1" - assert result.base == "https://api.example.com" - assert result.base == "https://api.example.com" - assert result.base == "https://api.example.com" diff --git a/tests/test_custom_loader.py b/tests/test_custom_loader.py index 9be7c63..644141f 100644 --- a/tests/test_custom_loader.py +++ b/tests/test_custom_loader.py @@ -1,4 +1,4 @@ -"""Tests for custom loaders — subclassing BaseLoader.""" +"""Tests for custom sources — subclassing Source.""" import xml.etree.ElementTree as ET from dataclasses import dataclass @@ -7,24 +7,25 @@ from adaptix import Provider, loader -from dature import Source, load -from dature.sources_loader.base import BaseLoader -from dature.sources_loader.loaders import bool_loader, float_from_string +from dature import FileSource, load +from dature.loaders import bool_loader, float_from_string from dature.types import FileOrStream, JSONValue -class XmlLoader(BaseLoader): - display_name: ClassVar[str] = "xml" +@dataclass(kw_only=True) +class XmlSource(FileSource): + format_name: ClassVar[str] = "xml" + path_finder_class = None - def _load(self, path: FileOrStream) -> JSONValue: + def _load_file(self, path: FileOrStream) -> JSONValue: if not isinstance(path, Path): - msg = "XmlLoader only supports file paths" + msg = "XmlSource only supports file paths" raise TypeError(msg) tree = ET.parse(path) # noqa: S314 root = tree.getroot() return {child.tag: child.text or "" for child in root} - def _additional_loaders(self) -> list[Provider]: + def additional_loaders(self) -> list[Provider]: return [ loader(bool, bool_loader), loader(float, float_from_string), @@ -46,7 +47,7 @@ def test_xml_loader(self, tmp_path: Path) -> None: ) result = load( - Source(file=xml_file, loader=XmlLoader), + XmlSource(file=xml_file), schema=XmlConfig, ) diff --git a/tests/test_load_report.py b/tests/test_load_report.py index c86730d..c1a98c4 100644 --- a/tests/test_load_report.py +++ b/tests/test_load_report.py @@ -8,7 +8,7 @@ import pytest -from dature import Source, get_load_report, load +from dature import JsonSource, get_load_report, load from dature.errors import DatureConfigError from dature.load_report import FieldOrigin, LoadReport, SourceEntry from dature.validators.number import Ge @@ -28,8 +28,8 @@ class Config: port: int result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, debug=True, ) @@ -86,8 +86,8 @@ class Config: port: int result = load( - Source(file=first), - Source(file=second), + JsonSource(file=first), + JsonSource(file=second), schema=Config, strategy="first_wins", debug=True, @@ -149,8 +149,8 @@ class Config: database: Database result = load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, debug=True, ) @@ -187,7 +187,7 @@ class Config: name: str port: int - result = load(Source(file=json_file), schema=Config, debug=True) + result = load(JsonSource(file=json_file), schema=Config, debug=True) report = get_load_report(result) @@ -231,7 +231,7 @@ def test_merge_decorator(self, tmp_path: Path): overrides = tmp_path / "overrides.json" overrides.write_text('{"port": 9090}') - @load(Source(file=defaults), Source(file=overrides), debug=True) + @load(JsonSource(file=defaults), JsonSource(file=overrides), debug=True) @dataclass class Config: host: str @@ -247,7 +247,7 @@ def test_single_source_decorator(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"host": "localhost", "port": 3000}') - @load(Source(file=json_file), debug=True) + @load(JsonSource(file=json_file), debug=True) @dataclass class Config: host: str @@ -279,7 +279,7 @@ class Config: host: str port: int - result = load(Source(file=json_file), schema=Config) + result = load(JsonSource(file=json_file), schema=Config) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") @@ -305,19 +305,19 @@ class Config: with caplog.at_level(logging.DEBUG, logger="dature"): load( - Source(file=defaults), - Source(file=overrides), + JsonSource(file=defaults), + JsonSource(file=overrides), schema=Config, ) messages = [r.message for r in caplog.records if r.name == "dature"] expected = [ - f"[JsonLoader] load_raw: path={defaults}," + f"[JsonSource] load_raw: source={defaults}," " raw_keys=['host', 'port'], after_preprocessing_keys=['host', 'port']", f"[Config] Source 0 loaded: loader=json, file={defaults}, keys=['host', 'port']", "[Config] Source 0 raw data: {'host': 'localhost', 'port': 3000}", - f"[JsonLoader] load_raw: path={overrides}, raw_keys=['port'], after_preprocessing_keys=['port']", + f"[JsonSource] load_raw: source={overrides}, raw_keys=['port'], after_preprocessing_keys=['port']", f"[Config] Source 1 loaded: loader=json, file={overrides}, keys=['port']", "[Config] Source 1 raw data: {'port': 8080}", "[Config] Merge step 0 (strategy=last_wins): added=['host', 'port'], overwritten=[]", @@ -340,12 +340,12 @@ class Config: port: int with caplog.at_level(logging.DEBUG, logger="dature"): - load(Source(file=json_file), schema=Config) + load(JsonSource(file=json_file), schema=Config) messages = [r.message for r in caplog.records if r.name == "dature"] expected = [ - f"[JsonLoader] load_raw: path={json_file}," + f"[JsonSource] load_raw: source={json_file}," " raw_keys=['host', 'port'], after_preprocessing_keys=['host', 'port']", f"[Config] Single-source load: loader=json, file={json_file}", "[Config] Loaded data: {'host': 'localhost', 'port': 3000}", @@ -368,8 +368,8 @@ class Config: with pytest.raises(DatureConfigError): load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, debug=True, ) @@ -408,8 +408,8 @@ class Config: with pytest.raises(DatureConfigError): load( - Source(file=a), - Source(file=b), + JsonSource(file=a), + JsonSource(file=b), schema=Config, debug=True, ) @@ -445,7 +445,7 @@ class Config: port: int with pytest.raises(DatureConfigError): - load(Source(file=json_file), schema=Config, debug=True) + load(JsonSource(file=json_file), schema=Config, debug=True) expected = LoadReport( dataclass_name="Config", @@ -475,7 +475,7 @@ class Config: port: Annotated[int, Ge(0)] with pytest.raises(DatureConfigError): - load(Source(file=json_file), schema=Config, debug=True) + load(JsonSource(file=json_file), schema=Config, debug=True) expected = LoadReport( dataclass_name="Config", diff --git a/tests/test_main.py b/tests/test_main.py index 69fbedc..f94f932 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -5,17 +5,23 @@ import pytest -from dature import Source, load -from dature.sources_loader.env_ import EnvFileLoader -from dature.sources_loader.ini_ import IniLoader -from dature.sources_loader.json5_ import Json5Loader -from dature.sources_loader.json_ import JsonLoader -from dature.sources_loader.toml_ import Toml10Loader, Toml11Loader -from dature.sources_loader.yaml_ import Yaml11Loader, Yaml12Loader +from dature import ( + EnvFileSource, + EnvSource, + IniSource, + Json5Source, + JsonSource, + Source, + Toml10Source, + Toml11Source, + Yaml11Source, + Yaml12Source, + load, +) -def _all_fileloaders() -> list[type]: - return [EnvFileLoader, Yaml11Loader, Yaml12Loader, JsonLoader, Json5Loader, Toml10Loader, Toml11Loader, IniLoader] +def _all_file_sources() -> list[type[Source]]: + return [EnvFileSource, Yaml11Source, Yaml12Source, JsonSource, Json5Source, Toml10Source, Toml11Source, IniSource] class TestLoadAsDecorator: @@ -23,7 +29,7 @@ def test_loads_from_file(self, tmp_path: Path) -> None: json_file = tmp_path / "config.json" json_file.write_text('{"name": "FromFile", "port": 8080}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) @load(metadata) @dataclass @@ -39,7 +45,7 @@ def test_loads_from_env(self, monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setenv("APP_NAME", "EnvApp") monkeypatch.setenv("APP_PORT", "3000") - metadata = Source(prefix="APP_") + metadata = EnvSource(prefix="APP_") @load(metadata) @dataclass @@ -54,7 +60,7 @@ class Config: def test_default_metadata(self, monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setenv("MY_VAR", "test_value") - @load(Source()) + @load(EnvSource()) @dataclass class Config: my_var: str @@ -66,7 +72,7 @@ def test_explicit_loader_overrides_extension(self, tmp_path: Path) -> None: txt_file = tmp_path / "config.txt" txt_file.write_text('{"app_name": "OverrideApp"}') - metadata = Source(file=txt_file, loader=JsonLoader) + metadata = JsonSource(file=txt_file) @load(metadata) @dataclass @@ -80,7 +86,7 @@ def test_priority(self, monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setenv("LOADED_VAR", "loaded") monkeypatch.setenv("OVERRIDDEN_VAR", "loaded") - @load(Source()) + @load(EnvSource()) @dataclass class Config: overridden_var: str @@ -97,7 +103,7 @@ def test_invalid_decorator_order(self) -> None: with pytest.raises(TypeError, match="Config must be a dataclass"): @dataclass - @load(Source()) + @load(EnvSource()) class Config: pass @@ -107,7 +113,7 @@ def test_cache_enabled_by_default(self, tmp_path: Path) -> None: json_file = tmp_path / "config.json" json_file.write_text('{"name": "original", "port": 8080}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) @load(metadata) @dataclass @@ -126,7 +132,7 @@ def test_cache_disabled(self, tmp_path: Path) -> None: json_file = tmp_path / "config.json" json_file.write_text('{"name": "original", "port": 8080}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) @load(metadata, cache=False) @dataclass @@ -152,7 +158,7 @@ class Config: name: str port: int - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=Config) assert result.name == "FromFile" @@ -167,7 +173,7 @@ class Config: name: str debug: bool - metadata = Source(prefix="APP_") + metadata = EnvSource(prefix="APP_") result = load(metadata, schema=Config) assert result.name == "EnvFunc" @@ -180,33 +186,33 @@ def test_default_metadata(self, monkeypatch: pytest.MonkeyPatch) -> None: class Config: my_var: str - result = load(Source(), schema=Config) + result = load(EnvSource(), schema=Config) assert result.my_var == "from_env" class TestFileNotFoundWithLoad: @pytest.mark.parametrize( - "loader_class", - _all_fileloaders(), + "source_class", + _all_file_sources(), ) - def test_load_function_single_source_filenot_found(self, loader_class: type) -> None: + def test_load_function_single_source_filenot_found(self, source_class: type[Source]) -> None: @dataclass class Config: name: str - metadata = Source(file="/non/existent/file.json", loader=loader_class) + metadata = source_class(file="/non/existent/file.json") with pytest.raises(FileNotFoundError): load(metadata, schema=Config) @pytest.mark.parametrize( - "loader_class", - _all_fileloaders(), + "source_class", + _all_file_sources(), ) - def test_load_decorator_single_source_filenot_found(self, loader_class: type) -> None: - metadata = Source(file="/non/existent/config.json", loader=loader_class) + def test_load_decorator_single_source_filenot_found(self, source_class: type[Source]) -> None: + metadata = source_class(file="/non/existent/config.json") @load(metadata) @dataclass diff --git a/tests/test_type_loaders.py b/tests/test_type_loaders.py index 8492ffb..1801ba0 100644 --- a/tests/test_type_loaders.py +++ b/tests/test_type_loaders.py @@ -6,7 +6,7 @@ import pytest -from dature import Source, configure, load +from dature import Yaml12Source, configure, load from dature.config import _ConfigProxy @@ -47,7 +47,7 @@ def yaml_with_rgb(tmp_path: Path) -> Path: class TestTypeLoadersInSource: def test_single_source_with_type_loader(self, yaml_with_rgb: Path) -> None: result = load( - Source( + Yaml12Source( file=yaml_with_rgb, type_loaders={Rgb: rgb_from_string}, ), @@ -66,7 +66,7 @@ def int_times_two(value: str) -> int: p.write_text("name: app\ncolor: '10,20,30'\n") result = load( - Source( + Yaml12Source( file=p, type_loaders={Rgb: rgb_from_string}, ), @@ -81,7 +81,7 @@ def test_global_type_loaders_via_configure(self, yaml_with_rgb: Path) -> None: configure( type_loaders={Rgb: rgb_from_string}, ) - result = load(Source(file=yaml_with_rgb), schema=ConfigWithRgb) + result = load(Yaml12Source(file=yaml_with_rgb), schema=ConfigWithRgb) assert result.color == Rgb(r=255, g=128, b=0) @@ -93,8 +93,8 @@ def test_merge_metadata_type_loaders(self, tmp_path: Path) -> None: override.write_text("name: override\n") result = load( - Source(file=base), - Source(file=override), + Yaml12Source(file=base), + Yaml12Source(file=override), schema=ConfigWithRgb, type_loaders={Rgb: rgb_from_string}, ) @@ -121,7 +121,7 @@ def tag_upper(value: str) -> str: p.write_text("color: '10,20,30'\ntag: hello\n") result = load( - Source( + Yaml12Source( file=p, type_loaders={str: tag_upper}, ), diff --git a/tests/test_type_utils.py b/tests/test_type_utils.py new file mode 100644 index 0000000..f926eb5 --- /dev/null +++ b/tests/test_type_utils.py @@ -0,0 +1,58 @@ +from dataclasses import dataclass +from typing import Annotated + +from dature.type_utils import find_nested_dataclasses + + +class TestFindNestedDataclasses: + def test_plain_dataclass(self): + @dataclass + class Inner: + name: str + + result = find_nested_dataclasses(Inner) + assert result == [Inner] + + def test_list_of_dataclasses(self): + @dataclass + class Inner: + name: str + + result = find_nested_dataclasses(list[Inner]) + assert result == [Inner] + + def test_plain_type_no_dataclass(self): + result = find_nested_dataclasses(str) + assert result == [] + + def test_optional_dataclass(self): + @dataclass + class Inner: + name: str + + result = find_nested_dataclasses(Inner | None) + assert result == [Inner] + + def test_annotated_dataclass(self): + @dataclass + class Inner: + name: str + + result = find_nested_dataclasses(Annotated[Inner, "some_meta"]) + assert result == [Inner] + + def test_dict_value_dataclass(self): + @dataclass + class Inner: + name: str + + result = find_nested_dataclasses(dict[str, Inner]) + assert result == [Inner] + + def test_nested_generic(self): + @dataclass + class Inner: + name: str + + result = find_nested_dataclasses(list[Inner | None]) + assert result == [Inner] diff --git a/tests/validators/test_complex.py b/tests/validators/test_complex.py index 473e876..d5a21c7 100644 --- a/tests/validators/test_complex.py +++ b/tests/validators/test_complex.py @@ -4,7 +4,7 @@ import pytest -from dature import Source, load +from dature import JsonSource, load from dature.errors import DatureConfigError from dature.validators.number import Ge, Le from dature.validators.sequence import MinItems, UniqueItems @@ -22,7 +22,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"name": "Alice", "age": 30, "tags": ["python", "coding"]}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=Config) assert result.name == "Alice" @@ -40,7 +40,7 @@ class Config: content = '{"name": "AB", "age": 200, "tags": []}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -86,7 +86,7 @@ class User: '{"name": "Alice", "age": 30, "address": {"city": "NYC", "zip_code": "12345"}}', ) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=User) assert result.name == "Alice" @@ -110,7 +110,7 @@ class User: content = '{"name": "Al", "age": 15, "address": {"city": "N", "zip_code": "ABCDE"}}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=User) @@ -154,7 +154,7 @@ class Config: content = '{"age": 15}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -178,7 +178,7 @@ class Config: '{"groups": {"admins": [{"name": "Alice"}]}}', ) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=Config) assert result.groups == {"admins": [{"name": "Alice"}]} @@ -192,7 +192,7 @@ class Config: content = '{"groups": {}}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -222,7 +222,7 @@ class Config: '{"teams": {"backend": [{"name": "Alice", "role": "admin"}]}}', ) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=Config) assert result.teams["backend"][0].name == "Alice" @@ -242,7 +242,7 @@ class Config: content = '{"teams": {"backend": [{"name": "A", "role": "ab"}]}}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) diff --git a/tests/validators/test_custom_validator.py b/tests/validators/test_custom_validator.py index 28808d1..cc5c45e 100644 --- a/tests/validators/test_custom_validator.py +++ b/tests/validators/test_custom_validator.py @@ -5,7 +5,7 @@ import pytest -from dature import Source, load +from dature import JsonSource, load from dature.errors import DatureConfigError @@ -48,7 +48,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"count": 10}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=Config) assert result.count == 10 @@ -62,7 +62,7 @@ class Config: content = '{"count": 7}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -86,7 +86,7 @@ class Config: content = '{"count": 7}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -111,7 +111,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"url": "https://example.com"}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=Config) assert result.url == "https://example.com" @@ -125,7 +125,7 @@ class Config: content = '{"url": "http://example.com"}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -146,7 +146,7 @@ def test_success(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"port": 8080}') - @load(Source(file=json_file)) + @load(JsonSource(file=json_file)) @dataclass class Config: port: Annotated[int, Divisible(10)] @@ -159,7 +159,7 @@ def test_failure(self, tmp_path: Path): content = '{"port": 8081}' json_file.write_text(content) - @load(Source(file=json_file)) + @load(JsonSource(file=json_file)) @dataclass class Config: port: Annotated[int, Divisible(10)] @@ -182,7 +182,7 @@ def test_direct_instantiation_validates(self, tmp_path: Path): content = '{"port": 8080}' json_file.write_text(content) - @load(Source(file=json_file)) + @load(JsonSource(file=json_file)) @dataclass class Config: port: Annotated[int, Divisible(10)] @@ -208,7 +208,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"count": 15, "url": "https://example.com"}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=Config) assert result.count == 15 @@ -224,7 +224,7 @@ class Config: content = '{"count": 7, "url": "http://example.com"}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) diff --git a/tests/validators/test_metadata_validators.py b/tests/validators/test_metadata_validators.py index c729168..8d033e6 100644 --- a/tests/validators/test_metadata_validators.py +++ b/tests/validators/test_metadata_validators.py @@ -4,7 +4,7 @@ import pytest -from dature import Source, load +from dature import JsonSource, load from dature.errors import DatureConfigError from dature.field_path import F from dature.validators.number import Ge, Gt, Lt @@ -21,7 +21,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"name": "Alice"}') - metadata = Source( + metadata = JsonSource( file=json_file, validators={ F[Config].name: MinLength(3), @@ -39,7 +39,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"port": 8080}') - metadata = Source( + metadata = JsonSource( file=json_file, validators={ F[Config].port: (Gt(0), Lt(65536)), @@ -58,7 +58,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"name": "Alice", "port": 8080}') - metadata = Source( + metadata = JsonSource( file=json_file, validators={ F[Config].name: MinLength(3), @@ -81,7 +81,7 @@ class Config: content = '{"name": "Al"}' json_file.write_text(content) - metadata = Source( + metadata = JsonSource( file=json_file, validators={ F[Config].name: MinLength(3), @@ -110,7 +110,7 @@ class Config: content = '{"port": -1}' json_file.write_text(content) - metadata = Source( + metadata = JsonSource( file=json_file, validators={ F[Config].port: (Gt(0), Lt(65536)), @@ -145,7 +145,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"database": {"host": "localhost", "port": 5432}}') - metadata = Source( + metadata = JsonSource( file=json_file, validators={ F[Config].database.host: MinLength(1), @@ -171,7 +171,7 @@ class Config: content = '{"database": {"host": "", "port": 5432}}' json_file.write_text(content) - metadata = Source( + metadata = JsonSource( file=json_file, validators={ F[Config].database.host: MinLength(1), @@ -202,7 +202,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"name": "Alice", "port": 8080}') - metadata = Source( + metadata = JsonSource( file=json_file, validators={ F[Config].name: MaxLength(50), @@ -223,7 +223,7 @@ class Config: content = '{"name": "Al"}' json_file.write_text(content) - metadata = Source( + metadata = JsonSource( file=json_file, validators={ F[Config].name: MaxLength(50), @@ -252,7 +252,7 @@ class Config: content = '{"name": "This is a very long name that exceeds the limit"}' json_file.write_text(content) - metadata = Source( + metadata = JsonSource( file=json_file, validators={ F[Config].name: MaxLength(10), @@ -280,7 +280,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"name": "Alice"}') - metadata = Source( + metadata = JsonSource( file=json_file, validators={ F[Config].name: MaxLength(10), @@ -299,7 +299,7 @@ class Config: content = '{"name": "AB"}' json_file.write_text(content) - metadata = Source( + metadata = JsonSource( file=json_file, validators={ F[Config].name: MaxLength(50), @@ -327,7 +327,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"port": 8080}') - metadata = Source( + metadata = JsonSource( file=json_file, validators={ F[Config].port: Lt(65536), @@ -346,7 +346,7 @@ class Config: content = '{"port": 80}' json_file.write_text(content) - metadata = Source( + metadata = JsonSource( file=json_file, validators={ F[Config].port: Lt(65536), @@ -375,7 +375,7 @@ class Config: content = '{"port": 70000}' json_file.write_text(content) - metadata = Source( + metadata = JsonSource( file=json_file, validators={ F[Config].port: Lt(65536), @@ -405,7 +405,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"name": "Alice"}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=Config) assert result.name == "Alice" @@ -426,7 +426,7 @@ def validate_config(obj: Config) -> bool: json_file = tmp_path / "config.json" json_file.write_text('{"port": 8080, "user": "admin"}') - metadata = Source( + metadata = JsonSource( file=json_file, root_validators=(RootValidator(validate_config),), validators={ @@ -449,7 +449,7 @@ class Config: name: str age: int - metadata = Source( + metadata = JsonSource( file=json_file, validators={ F[Config].name: MinLength(2), diff --git a/tests/validators/test_number.py b/tests/validators/test_number.py index c2681ac..8fe42d9 100644 --- a/tests/validators/test_number.py +++ b/tests/validators/test_number.py @@ -4,7 +4,7 @@ import pytest -from dature import Source, load +from dature import JsonSource, load from dature.errors import DatureConfigError from dature.validators.number import Ge, Gt, Le, Lt @@ -18,7 +18,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"age": 25}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=Config) assert result.age == 25 @@ -32,7 +32,7 @@ class Config: content = '{"age": 18}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -57,7 +57,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"age": 18}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=Config) assert result.age == 18 @@ -71,7 +71,7 @@ class Config: content = '{"age": 17}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -96,7 +96,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"age": 99}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=Config) assert result.age == 99 @@ -110,7 +110,7 @@ class Config: content = '{"age": 100}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -135,7 +135,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"age": 100}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=Config) assert result.age == 100 @@ -149,7 +149,7 @@ class Config: content = '{"age": 101}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -174,7 +174,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"age": 30}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=Config) assert result.age == 30 @@ -188,7 +188,7 @@ class Config: content = '{"age": 70}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) diff --git a/tests/validators/test_post_init_and_property.py b/tests/validators/test_post_init_and_property.py index 827d35e..40d27d8 100644 --- a/tests/validators/test_post_init_and_property.py +++ b/tests/validators/test_post_init_and_property.py @@ -3,7 +3,7 @@ import pytest -from dature import Source, load +from dature import JsonSource, load class TestPostInitValidationFunctionMode: @@ -21,7 +21,7 @@ def __post_init__(self) -> None: json_file = tmp_path / "config.json" json_file.write_text('{"port": 8080, "host": "localhost"}') - result = load(Source(file=json_file), schema=Config) + result = load(JsonSource(file=json_file), schema=Config) assert result.port == 8080 assert result.host == "localhost" @@ -41,7 +41,7 @@ def __post_init__(self) -> None: json_file.write_text('{"port": 99999, "host": "localhost"}') with pytest.raises(ValueError, match="Invalid port: 99999"): - load(Source(file=json_file), schema=Config) + load(JsonSource(file=json_file), schema=Config) def test_post_init_cross_field_validation(self, tmp_path: Path): @dataclass @@ -58,7 +58,7 @@ def __post_init__(self) -> None: json_file.write_text('{"min_value": 100, "max_value": 10}') with pytest.raises(ValueError, match=r"min_value \(100\) must be less than max_value \(10\)"): - load(Source(file=json_file), schema=Config) + load(JsonSource(file=json_file), schema=Config) def test_post_init_cross_field_success(self, tmp_path: Path): @dataclass @@ -74,7 +74,7 @@ def __post_init__(self) -> None: json_file = tmp_path / "config.json" json_file.write_text('{"min_value": 1, "max_value": 100}') - result = load(Source(file=json_file), schema=Config) + result = load(JsonSource(file=json_file), schema=Config) assert result.min_value == 1 assert result.max_value == 100 @@ -85,7 +85,7 @@ def test_post_init_success(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"port": 8080, "host": "localhost"}') - @load(Source(file=json_file)) + @load(JsonSource(file=json_file)) @dataclass class Config: port: int @@ -105,7 +105,7 @@ def test_post_init_failure_from_file(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"port": 99999, "host": "localhost"}') - @load(Source(file=json_file)) + @load(JsonSource(file=json_file)) @dataclass class Config: port: int @@ -123,7 +123,7 @@ def test_post_init_failure_from_override(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"port": 8080, "host": "localhost"}') - @load(Source(file=json_file)) + @load(JsonSource(file=json_file)) @dataclass class Config: port: int @@ -141,7 +141,7 @@ def test_post_init_cross_field(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"min_value": 50, "max_value": 10}') - @load(Source(file=json_file)) + @load(JsonSource(file=json_file)) @dataclass class Config: min_value: int @@ -170,7 +170,7 @@ def __post_init__(self) -> None: json_file = tmp_path / "config.json" json_file.write_text('{"host": "localhost", "port": 8080}') - result = load(Source(file=json_file), schema=Config) + result = load(JsonSource(file=json_file), schema=Config) assert result.base_url == "http://localhost:8080" @@ -178,7 +178,7 @@ def test_computed_field_via_post_init_decorator(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"host": "example.com", "port": 443}') - @load(Source(file=json_file)) + @load(JsonSource(file=json_file)) @dataclass class Config: host: str @@ -207,7 +207,7 @@ def address(self) -> str: json_file = tmp_path / "config.json" json_file.write_text('{"host": "localhost", "port": 8080}') - result = load(Source(file=json_file), schema=Config) + result = load(JsonSource(file=json_file), schema=Config) assert result.address == "localhost:8080" @@ -215,7 +215,7 @@ def test_property_computed_value_decorator(self, tmp_path: Path): json_file = tmp_path / "config.json" json_file.write_text('{"host": "localhost", "port": 3000}') - @load(Source(file=json_file)) + @load(JsonSource(file=json_file)) @dataclass class Config: host: str @@ -241,6 +241,6 @@ def email(self) -> str: json_file = tmp_path / "config.json" json_file.write_text('{"_email": " Admin@Example.COM "}') - result = load(Source(file=json_file), schema=Config) + result = load(JsonSource(file=json_file), schema=Config) assert result.email == "admin@example.com" diff --git a/tests/validators/test_root_validator.py b/tests/validators/test_root_validator.py index 24f89dc..42a4f09 100644 --- a/tests/validators/test_root_validator.py +++ b/tests/validators/test_root_validator.py @@ -3,7 +3,7 @@ import pytest -from dature import Source, load +from dature import JsonSource, load from dature.errors import DatureConfigError from dature.validators.root import RootValidator @@ -23,7 +23,7 @@ def validate_config(obj: Config) -> bool: json_file = tmp_path / "config.json" json_file.write_text('{"port": 80, "user": "root"}') - metadata = Source( + metadata = JsonSource( file=json_file, root_validators=(RootValidator(validate_config),), ) @@ -46,7 +46,7 @@ def validate_config(obj: Config) -> bool: json_file = tmp_path / "config.json" json_file.write_text('{"port": 80, "user": "admin"}') - metadata = Source( + metadata = JsonSource( file=json_file, root_validators=(RootValidator(validate_config),), ) @@ -75,7 +75,7 @@ def validate_step(obj: Config) -> bool: json_file = tmp_path / "config.json" json_file.write_text('{"min_value": 10, "max_value": 100, "step": 5}') - metadata = Source( + metadata = JsonSource( file=json_file, root_validators=( RootValidator(validate_min_max), @@ -104,7 +104,7 @@ def validate_step(obj: Config) -> bool: json_file = tmp_path / "config.json" json_file.write_text('{"min_value": 100, "max_value": 10, "step": -5}') - metadata = Source( + metadata = JsonSource( file=json_file, root_validators=( RootValidator(validate_min_max), @@ -132,7 +132,7 @@ def validate_config(obj: Config) -> bool: json_file = tmp_path / "config.json" json_file.write_text('{"port": 80, "host": "localhost"}') - metadata = Source( + metadata = JsonSource( file=json_file, root_validators=(RootValidator(validate_config),), ) @@ -154,7 +154,7 @@ def validate_credentials(obj) -> bool: json_file = tmp_path / "config.json" json_file.write_text('{"username": "admin", "password": "short"}') - metadata = Source( + metadata = JsonSource( file=json_file, root_validators=(RootValidator(validate_credentials),), ) @@ -187,7 +187,7 @@ def validate_config(obj: Config) -> bool: json_file = tmp_path / "config.json" json_file.write_text('{"port": 80, "user": "admin"}') - metadata = Source( + metadata = JsonSource( file=json_file, root_validators=( RootValidator( diff --git a/tests/validators/test_sequence.py b/tests/validators/test_sequence.py index dcc10e9..cd40ce9 100644 --- a/tests/validators/test_sequence.py +++ b/tests/validators/test_sequence.py @@ -4,7 +4,7 @@ import pytest -from dature import Source, load +from dature import JsonSource, load from dature.errors import DatureConfigError from dature.validators.sequence import MaxItems, MinItems, UniqueItems @@ -18,7 +18,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"tags": ["python", "typing"]}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=Config) assert result.tags == ["python", "typing"] @@ -32,7 +32,7 @@ class Config: content = '{"tags": ["python"]}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -57,7 +57,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"tags": ["python", "typing"]}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=Config) assert result.tags == ["python", "typing"] @@ -71,7 +71,7 @@ class Config: content = '{"tags": ["python", "typing", "validation"]}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -96,7 +96,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"tags": ["python", "typing", "validation"]}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=Config) assert result.tags == ["python", "typing", "validation"] @@ -110,7 +110,7 @@ class Config: content = '{"tags": ["python", "typing", "python"]}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -135,7 +135,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"tags": ["python", "typing", "validation"]}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=Config) assert result.tags == ["python", "typing", "validation"] @@ -149,7 +149,7 @@ class Config: content = '{"tags": ["python", "typing", "validation", "testing", "coding", "extra"]}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) diff --git a/tests/validators/test_string.py b/tests/validators/test_string.py index 5f24a46..eddd5ea 100644 --- a/tests/validators/test_string.py +++ b/tests/validators/test_string.py @@ -4,7 +4,7 @@ import pytest -from dature import Source, load +from dature import JsonSource, load from dature.errors import DatureConfigError from dature.validators.string import MaxLength, MinLength, RegexPattern @@ -18,7 +18,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"name": "Alice"}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=Config) assert result.name == "Alice" @@ -32,7 +32,7 @@ class Config: content = '{"name": "Bob"}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -57,7 +57,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"name": "Alice"}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=Config) assert result.name == "Alice" @@ -71,7 +71,7 @@ class Config: content = '{"name": "Alexander"}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -96,7 +96,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"email": "test@example.com"}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=Config) assert result.email == "test@example.com" @@ -110,7 +110,7 @@ class Config: content = '{"email": "invalid-email"}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) @@ -135,7 +135,7 @@ class Config: json_file = tmp_path / "config.json" json_file.write_text('{"username": "john_doe"}') - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) result = load(metadata, schema=Config) assert result.username == "john_doe" @@ -149,7 +149,7 @@ class Config: content = '{"username": "this_is_a_very_long_username_that_exceeds_limit"}' json_file.write_text(content) - metadata = Source(file=json_file) + metadata = JsonSource(file=json_file) with pytest.raises(DatureConfigError) as exc_info: load(metadata, schema=Config) diff --git a/tests/validators/test_validators_base.py b/tests/validators/test_validators_base.py new file mode 100644 index 0000000..1f37085 --- /dev/null +++ b/tests/validators/test_validators_base.py @@ -0,0 +1,142 @@ +"""Tests for validators/base.py — extract and create validator providers.""" + +from dataclasses import dataclass +from typing import Annotated + +import pytest + +from dature.field_path import FieldPath +from dature.validators.base import ( + create_metadata_validator_providers, + create_root_validator_providers, + create_validator_providers, + extract_validators_from_type, +) +from dature.validators.number import Ge, Gt +from dature.validators.root import RootValidator +from dature.validators.string import MinLength + + +class TestExtractValidatorsFromType: + def test_plain_type_returns_empty(self): + result = extract_validators_from_type(str) + + assert result == [] + + def test_annotated_without_validators_returns_empty(self): + result = extract_validators_from_type(Annotated[int, "some metadata"]) + + assert result == [] + + def test_annotated_with_validator(self): + result = extract_validators_from_type(Annotated[str, MinLength(3)]) + + assert len(result) == 1 + assert isinstance(result[0], MinLength) + + def test_annotated_with_multiple_validators(self): + result = extract_validators_from_type(Annotated[int, Gt(0), Ge(0)]) + + assert len(result) == 2 + + def test_annotated_mixed_metadata_and_validators(self): + result = extract_validators_from_type(Annotated[str, "description", MinLength(1)]) + + assert len(result) == 1 + assert isinstance(result[0], MinLength) + + +class TestCreateValidatorProviders: + def test_creates_providers_from_validators(self): + @dataclass + class Cfg: + name: str + + validators = [MinLength(3)] + result = create_validator_providers(Cfg, "name", validators) + + assert len(result) == 1 + + +class TestCreateMetadataValidatorProviders: + def test_single_field_validator(self): + @dataclass + class Cfg: + name: str + + fp = FieldPath(owner=Cfg, parts=("name",)) + result = create_metadata_validator_providers({fp: MinLength(3)}) + + assert len(result) == 1 + + def test_tuple_validators(self): + @dataclass + class Cfg: + value: int + + fp = FieldPath(owner=Cfg, parts=("value",)) + result = create_metadata_validator_providers({fp: (Gt(0), Ge(0))}) + + assert len(result) == 2 + + def test_empty_field_path_raises(self): + @dataclass + class Cfg: + name: str + + fp = FieldPath(owner=Cfg, parts=()) + + with pytest.raises(ValueError, match="FieldPath must contain at least one field name"): + create_metadata_validator_providers({fp: MinLength(3)}) + + @pytest.mark.parametrize( + "parts", + [("name",), ("inner", "name")], + ids=["single", "nested"], + ) + def test_string_owner_raises(self, parts: tuple[str, ...]): + fp = FieldPath(owner="Cfg", parts=parts) + + with pytest.raises(TypeError, match="string owner"): + create_metadata_validator_providers({fp: MinLength(3)}) + + def test_non_fieldpath_key_raises(self): + with pytest.raises(TypeError, match="validators key must be a FieldPath"): + create_metadata_validator_providers({"name": MinLength(3)}) + + def test_nested_field_path(self): + @dataclass + class Inner: + value: str + + @dataclass + class Outer: + inner: Inner + + fp = FieldPath(owner=Outer, parts=("inner", "value")) + result = create_metadata_validator_providers({fp: MinLength(1)}) + + assert len(result) == 1 + + +class TestCreateRootValidatorProviders: + def test_creates_providers(self): + @dataclass + class Cfg: + name: str + + rv = RootValidator(func=lambda _self: True) + result = create_root_validator_providers(Cfg, (rv,)) + + assert len(result) == 1 + + def test_multiple_root_validators(self): + @dataclass + class Cfg: + name: str + + rv1 = RootValidator(func=lambda _self: True) + rv2 = RootValidator(func=lambda _self: True, error_message="custom") + result = create_root_validator_providers(Cfg, (rv1, rv2)) + + assert len(result) == 2 From f624b9b7afcf3778099ea4fd3de02303f1ba3ba1 Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 7 Apr 2026 15:55:27 +0300 Subject: [PATCH 29/36] fix subprocess --- tests/test_examples.py | 23 ++++++++++++++++------- 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/tests/test_examples.py b/tests/test_examples.py index fa1280e..ec80722 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -1,5 +1,6 @@ import os import pathlib +import signal import subprocess import sys @@ -9,18 +10,26 @@ example_scripts = sorted(examples_dir.rglob("*.py")) -def _run_example(script_path: pathlib.Path) -> subprocess.CompletedProcess[str]: +def _run_example( + script_path: pathlib.Path, + *, + retries: int = 3, +) -> subprocess.CompletedProcess[str]: env = os.environ.copy() project_root = pathlib.Path(__file__).parent.parent / "src" env["PYTHONPATH"] = str(project_root) + os.pathsep + env.get("PYTHONPATH", "") - return subprocess.run( # noqa: PLW1510, S603 - [sys.executable, str(script_path)], - capture_output=True, - text=True, - env=env, - ) + for _ in range(retries): + result = subprocess.run( # noqa: PLW1510, S603 + [sys.executable, str(script_path)], + capture_output=True, + text=True, + env=env, + ) + if result.returncode != -signal.SIGSEGV: + return result + return result def _resolve_stderr_placeholders(template: str, script_path: pathlib.Path) -> str: From cbdb5d037f33cdd52a5b634693664a0ffb86740f Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 7 Apr 2026 15:59:18 +0300 Subject: [PATCH 30/36] fix --- examples/load_all_formats.py | 1 - src/dature/type_utils.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/examples/load_all_formats.py b/examples/load_all_formats.py index e06f5f1..eb6518b 100644 --- a/examples/load_all_formats.py +++ b/examples/load_all_formats.py @@ -26,4 +26,3 @@ config = dature.load(meta, schema=AllPythonTypesCompact) assert config.string_value == "hello world" assert config.integer_value == 42 - assert config.integer_value == 42 diff --git a/src/dature/type_utils.py b/src/dature/type_utils.py index e52a255..1c48fdd 100644 --- a/src/dature/type_utils.py +++ b/src/dature/type_utils.py @@ -11,7 +11,7 @@ def find_nested_dataclasses(field_type: TypeAnnotation) -> list[type]: while queue: current = queue.pop() - if is_dataclass(current): + if is_dataclass(current) and isinstance(current, type): result.append(current) continue From 2e8195c129e2fb78adc192830b01e251237cb1e6 Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 7 Apr 2026 16:17:06 +0300 Subject: [PATCH 31/36] fix subprocess --- tests/test_examples.py | 26 ++++++++++---------------- 1 file changed, 10 insertions(+), 16 deletions(-) diff --git a/tests/test_examples.py b/tests/test_examples.py index ec80722..de3728a 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -1,6 +1,5 @@ import os import pathlib -import signal import subprocess import sys @@ -10,26 +9,21 @@ example_scripts = sorted(examples_dir.rglob("*.py")) -def _run_example( - script_path: pathlib.Path, - *, - retries: int = 3, -) -> subprocess.CompletedProcess[str]: +def _run_example(script_path: pathlib.Path) -> subprocess.CompletedProcess[str]: env = os.environ.copy() project_root = pathlib.Path(__file__).parent.parent / "src" env["PYTHONPATH"] = str(project_root) + os.pathsep + env.get("PYTHONPATH", "") - for _ in range(retries): - result = subprocess.run( # noqa: PLW1510, S603 - [sys.executable, str(script_path)], - capture_output=True, - text=True, - env=env, - ) - if result.returncode != -signal.SIGSEGV: - return result - return result + # process_group=0 forces posix_spawn instead of fork on macOS, + # avoiding segfaults in subprocess._execute_child (CPython + macOS CI) + return subprocess.run( # noqa: PLW1510, S603 + [sys.executable, str(script_path)], + capture_output=True, + text=True, + env=env, + process_group=0, + ) def _resolve_stderr_placeholders(template: str, script_path: pathlib.Path) -> str: From 6a7b49cb585401ccbb304dff5b57742329c8f027 Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 7 Apr 2026 16:25:12 +0300 Subject: [PATCH 32/36] fix subprocess --- tests/test_examples.py | 32 +++++++++++++++++++++++--------- 1 file changed, 23 insertions(+), 9 deletions(-) diff --git a/tests/test_examples.py b/tests/test_examples.py index de3728a..5e5378a 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -2,6 +2,7 @@ import pathlib import subprocess import sys +import tempfile import pytest @@ -15,15 +16,28 @@ def _run_example(script_path: pathlib.Path) -> subprocess.CompletedProcess[str]: project_root = pathlib.Path(__file__).parent.parent / "src" env["PYTHONPATH"] = str(project_root) + os.pathsep + env.get("PYTHONPATH", "") - # process_group=0 forces posix_spawn instead of fork on macOS, - # avoiding segfaults in subprocess._execute_child (CPython + macOS CI) - return subprocess.run( # noqa: PLW1510, S603 - [sys.executable, str(script_path)], - capture_output=True, - text=True, - env=env, - process_group=0, - ) + # Redirect to temp files instead of pipes (capture_output=True). + # Pipes force CPython to use fork() instead of posix_spawn(), + # which causes segfaults on macOS CI (Python 3.12-3.14). + with ( + tempfile.TemporaryFile(mode="w+") as stdout_f, + tempfile.TemporaryFile(mode="w+") as stderr_f, + ): + result = subprocess.run( # noqa: PLW1510, S603 + [sys.executable, str(script_path)], + stdout=stdout_f, + stderr=stderr_f, + text=True, + env=env, + ) + stdout_f.seek(0) + stderr_f.seek(0) + return subprocess.CompletedProcess( + args=result.args, + returncode=result.returncode, + stdout=stdout_f.read(), + stderr=stderr_f.read(), + ) def _resolve_stderr_placeholders(template: str, script_path: pathlib.Path) -> str: From 3898c07a0e7bfc78e6b4bdce990431eedfc76212 Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 7 Apr 2026 16:38:14 +0300 Subject: [PATCH 33/36] fix --- tests/test_examples.py | 78 ++++++++++++++++++++++++++++-------------- 1 file changed, 53 insertions(+), 25 deletions(-) diff --git a/tests/test_examples.py b/tests/test_examples.py index 5e5378a..0a0d4bf 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -2,42 +2,70 @@ import pathlib import subprocess import sys -import tempfile +from dataclasses import dataclass import pytest examples_dir = pathlib.Path(__file__).parent.parent / "examples" example_scripts = sorted(examples_dir.rglob("*.py")) +_IS_POSIX = hasattr(os, "posix_spawn") -def _run_example(script_path: pathlib.Path) -> subprocess.CompletedProcess[str]: - env = os.environ.copy() +@dataclass +class ScriptResult: + returncode: int + stderr: str + + +def _run_via_posix_spawn(script_path: pathlib.Path, env: dict[str, str]) -> ScriptResult: + """Use posix_spawn to avoid fork() segfaults on macOS CI.""" + devnull = os.open(os.devnull, os.O_WRONLY) + stderr_r, stderr_w = os.pipe() + + file_actions = [ + (os.POSIX_SPAWN_CLOSE, 0), + (os.POSIX_SPAWN_DUP2, devnull, 1), + (os.POSIX_SPAWN_DUP2, stderr_w, 2), + ] + + pid = os.posix_spawn( + sys.executable, + [sys.executable, str(script_path)], + env, + file_actions=file_actions, + ) + + os.close(devnull) + os.close(stderr_w) + + with os.fdopen(stderr_r) as stderr_f: + stderr = stderr_f.read() + + _, wait_status = os.waitpid(pid, 0) + returncode = os.waitstatus_to_exitcode(wait_status) + + return ScriptResult(returncode=returncode, stderr=stderr) + + +def _run_via_subprocess(script_path: pathlib.Path, env: dict[str, str]) -> ScriptResult: + result = subprocess.run( # noqa: PLW1510, S603 + [sys.executable, str(script_path)], + capture_output=True, + text=True, + env=env, + ) + return ScriptResult(returncode=result.returncode, stderr=result.stderr) + + +def _run_example(script_path: pathlib.Path) -> ScriptResult: project_root = pathlib.Path(__file__).parent.parent / "src" + env = os.environ.copy() env["PYTHONPATH"] = str(project_root) + os.pathsep + env.get("PYTHONPATH", "") - # Redirect to temp files instead of pipes (capture_output=True). - # Pipes force CPython to use fork() instead of posix_spawn(), - # which causes segfaults on macOS CI (Python 3.12-3.14). - with ( - tempfile.TemporaryFile(mode="w+") as stdout_f, - tempfile.TemporaryFile(mode="w+") as stderr_f, - ): - result = subprocess.run( # noqa: PLW1510, S603 - [sys.executable, str(script_path)], - stdout=stdout_f, - stderr=stderr_f, - text=True, - env=env, - ) - stdout_f.seek(0) - stderr_f.seek(0) - return subprocess.CompletedProcess( - args=result.args, - returncode=result.returncode, - stdout=stdout_f.read(), - stderr=stderr_f.read(), - ) + if _IS_POSIX: + return _run_via_posix_spawn(script_path, env) + return _run_via_subprocess(script_path, env) def _resolve_stderr_placeholders(template: str, script_path: pathlib.Path) -> str: From 3ec5798a58dc14a6a30f6c32677bf3fc5536832b Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 7 Apr 2026 16:56:17 +0300 Subject: [PATCH 34/36] fix windows --- .../advanced_field_groups_expansion_error.stderr | 4 ++-- .../advanced_field_groups_multiple_error.stderr | 8 ++++---- .../advanced_field_groups_nested_error.stderr | 4 ++-- .../why-not-dynaconf/dynaconf_root_validators.stderr | 2 +- .../why-not-dynaconf/dynaconf_validators.stderr | 2 +- .../docs/comparison/why-not-hydra/hydra_validators.stderr | 2 +- examples/docs/features/masking/masking_by_name.stderr | 2 +- examples/docs/features/masking/masking_heuristic.stderr | 2 +- examples/docs/features/masking/masking_merge_mode.stderr | 2 +- examples/docs/features/masking/masking_no_mask.stderr | 2 +- examples/docs/features/masking/masking_per_source.stderr | 2 +- examples/docs/features/masking/masking_secret_str.stderr | 2 +- .../docs/features/validation/validation_annotated.stderr | 8 ++++---- .../docs/features/validation/validation_custom.stderr | 2 +- .../docs/features/validation/validation_metadata.stderr | 4 ++-- examples/docs/features/validation/validation_root.stderr | 2 +- tests/sources/test_base.py | 2 +- tests/test_examples.py | 4 ++-- 18 files changed, 28 insertions(+), 28 deletions(-) diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.stderr b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.stderr index b690322..100f974 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.stderr +++ b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.stderr @@ -1,6 +1,6 @@ | dature.errors.exceptions.FieldGroupError: Config field group errors (1) +-+---------------- 1 ---------------- | dature.errors.exceptions.FieldGroupViolationError: Field group (database.host, database.port, port) partially overridden in source 1 - | changed: database.host (from source yaml1.2 '{SOURCES_DIR}/advanced_field_groups_expansion_error_overrides.yaml'), port (from source yaml1.2 '{SOURCES_DIR}/advanced_field_groups_expansion_error_overrides.yaml') - | unchanged: database.port (from source yaml1.2 '{SOURCES_DIR}/field_groups_nested_defaults.yaml') + | changed: database.host (from source yaml1.2 '{SOURCES_DIR}advanced_field_groups_expansion_error_overrides.yaml'), port (from source yaml1.2 '{SOURCES_DIR}advanced_field_groups_expansion_error_overrides.yaml') + | unchanged: database.port (from source yaml1.2 '{SOURCES_DIR}field_groups_nested_defaults.yaml') +------------------------------------ diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.stderr b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.stderr index 1e3a6a3..965fc96 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.stderr +++ b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.stderr @@ -1,10 +1,10 @@ | dature.errors.exceptions.FieldGroupError: Config field group errors (2) +-+---------------- 1 ---------------- | dature.errors.exceptions.FieldGroupViolationError: Field group (host, port) partially overridden in source 1 - | changed: host (from source yaml1.2 '{SOURCES_DIR}/advanced_field_groups_multiple_error_overrides.yaml') - | unchanged: port (from source yaml1.2 '{SHARED_DIR}/common_field_groups_defaults.yaml') + | changed: host (from source yaml1.2 '{SOURCES_DIR}advanced_field_groups_multiple_error_overrides.yaml') + | unchanged: port (from source yaml1.2 '{SHARED_DIR}common_field_groups_defaults.yaml') +---------------- 2 ---------------- | dature.errors.exceptions.FieldGroupViolationError: Field group (user, password) partially overridden in source 1 - | changed: user (from source yaml1.2 '{SOURCES_DIR}/advanced_field_groups_multiple_error_overrides.yaml') - | unchanged: password (from source yaml1.2 '{SHARED_DIR}/common_field_groups_defaults.yaml') + | changed: user (from source yaml1.2 '{SOURCES_DIR}advanced_field_groups_multiple_error_overrides.yaml') + | unchanged: password (from source yaml1.2 '{SHARED_DIR}common_field_groups_defaults.yaml') +------------------------------------ diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.stderr b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.stderr index e427c26..52fd438 100644 --- a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.stderr +++ b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.stderr @@ -1,6 +1,6 @@ | dature.errors.exceptions.FieldGroupError: Config field group errors (1) +-+---------------- 1 ---------------- | dature.errors.exceptions.FieldGroupViolationError: Field group (host, port) partially overridden in source 1 - | changed: host (from source yaml1.2 '{SOURCES_DIR}/field_groups_partial_overrides.yaml') - | unchanged: port (from source yaml1.2 '{SHARED_DIR}/common_field_groups_defaults.yaml') + | changed: host (from source yaml1.2 '{SOURCES_DIR}field_groups_partial_overrides.yaml') + | unchanged: port (from source yaml1.2 '{SHARED_DIR}common_field_groups_defaults.yaml') +------------------------------------ diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.stderr b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.stderr index c6583ad..9035282 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.stderr +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.stderr @@ -1,5 +1,5 @@ | dature.errors.exceptions.DatureConfigError: Config loading errors (1) +-+---------------- 1 ---------------- | dature.errors.exceptions.FieldLoadError: [] debug mode should not use port 80 - | └── FILE '{SOURCES_DIR}/dynaconf_root_validators_invalid.toml' + | └── FILE '{SOURCES_DIR}dynaconf_root_validators_invalid.toml' +------------------------------------ diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.stderr b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.stderr index 5acfd40..a119963 100644 --- a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.stderr +++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.stderr @@ -3,5 +3,5 @@ | dature.errors.exceptions.FieldLoadError: [port] Value must be greater than 0 | ├── port = -1 | │ ^^ - | └── FILE '{SOURCES_DIR}/dynaconf_validators_invalid.toml', line 2 + | └── FILE '{SOURCES_DIR}dynaconf_validators_invalid.toml', line 2 +------------------------------------ diff --git a/examples/docs/comparison/why-not-hydra/hydra_validators.stderr b/examples/docs/comparison/why-not-hydra/hydra_validators.stderr index 3f028a2..84ae71e 100644 --- a/examples/docs/comparison/why-not-hydra/hydra_validators.stderr +++ b/examples/docs/comparison/why-not-hydra/hydra_validators.stderr @@ -3,5 +3,5 @@ | dature.errors.exceptions.FieldLoadError: [port] Value must be greater than 0 | ├── port: -1 | │ ^^ - | └── FILE '{SOURCES_DIR}/hydra_validators_invalid.yaml', line 2 + | └── FILE '{SOURCES_DIR}hydra_validators_invalid.yaml', line 2 +------------------------------------ diff --git a/examples/docs/features/masking/masking_by_name.stderr b/examples/docs/features/masking/masking_by_name.stderr index 50473e9..e6d1d6b 100644 --- a/examples/docs/features/masking/masking_by_name.stderr +++ b/examples/docs/features/masking/masking_by_name.stderr @@ -3,5 +3,5 @@ | dature.errors.exceptions.FieldLoadError: [password] Invalid variant: '' | ├── password: "" | │ ^^^^^^^^^^ - | └── FILE '{SOURCES_DIR}/masking_by_name.yaml', line 1 + | └── FILE '{SOURCES_DIR}masking_by_name.yaml', line 1 +------------------------------------ diff --git a/examples/docs/features/masking/masking_heuristic.stderr b/examples/docs/features/masking/masking_heuristic.stderr index 67151ba..3059a5c 100644 --- a/examples/docs/features/masking/masking_heuristic.stderr +++ b/examples/docs/features/masking/masking_heuristic.stderr @@ -3,5 +3,5 @@ | dature.errors.exceptions.FieldLoadError: [connection_id] Invalid variant: '' | ├── connection_id: "" | │ ^^^^^^^^^^ - | └── FILE '{SOURCES_DIR}/masking_heuristic.yaml', line 1 + | └── FILE '{SOURCES_DIR}masking_heuristic.yaml', line 1 +------------------------------------ diff --git a/examples/docs/features/masking/masking_merge_mode.stderr b/examples/docs/features/masking/masking_merge_mode.stderr index ce627aa..e5c416c 100644 --- a/examples/docs/features/masking/masking_merge_mode.stderr +++ b/examples/docs/features/masking/masking_merge_mode.stderr @@ -3,5 +3,5 @@ | dature.errors.exceptions.FieldLoadError: [api_key] Value must have at least 20 characters | ├── api_key: "" | │ ^^^^^^^^^^ - | └── FILE '{SOURCES_DIR}/masking_merge_mode_secrets.yaml', line 1 + | └── FILE '{SOURCES_DIR}masking_merge_mode_secrets.yaml', line 1 +------------------------------------ diff --git a/examples/docs/features/masking/masking_no_mask.stderr b/examples/docs/features/masking/masking_no_mask.stderr index bd35ea3..27375d2 100644 --- a/examples/docs/features/masking/masking_no_mask.stderr +++ b/examples/docs/features/masking/masking_no_mask.stderr @@ -3,5 +3,5 @@ | dature.errors.exceptions.FieldLoadError: [api_key] Value must have at least 20 characters | ├── api_key: "short" | │ ^^^^^ - | └── FILE '{SOURCES_DIR}/masking_per_source.yaml', line 1 + | └── FILE '{SOURCES_DIR}masking_per_source.yaml', line 1 +------------------------------------ diff --git a/examples/docs/features/masking/masking_per_source.stderr b/examples/docs/features/masking/masking_per_source.stderr index c4a1368..c787795 100644 --- a/examples/docs/features/masking/masking_per_source.stderr +++ b/examples/docs/features/masking/masking_per_source.stderr @@ -3,5 +3,5 @@ | dature.errors.exceptions.FieldLoadError: [api_key] Value must have at least 20 characters | ├── api_key: "" | │ ^^^^^^^^^^ - | └── FILE '{SOURCES_DIR}/masking_per_source.yaml', line 1 + | └── FILE '{SOURCES_DIR}masking_per_source.yaml', line 1 +------------------------------------ diff --git a/examples/docs/features/masking/masking_secret_str.stderr b/examples/docs/features/masking/masking_secret_str.stderr index ec31dac..b1fc7ed 100644 --- a/examples/docs/features/masking/masking_secret_str.stderr +++ b/examples/docs/features/masking/masking_secret_str.stderr @@ -3,5 +3,5 @@ | dature.errors.exceptions.FieldLoadError: [card_number] Card number must contain only digits | ├── card_number: "" | │ ^^^^^^^^^^ - | └── FILE '{SOURCES_DIR}/masking_secret_str.yaml', line 2 + | └── FILE '{SOURCES_DIR}masking_secret_str.yaml', line 2 +------------------------------------ diff --git a/examples/docs/features/validation/validation_annotated.stderr b/examples/docs/features/validation/validation_annotated.stderr index b9babba..a0a941e 100644 --- a/examples/docs/features/validation/validation_annotated.stderr +++ b/examples/docs/features/validation/validation_annotated.stderr @@ -3,20 +3,20 @@ | dature.errors.exceptions.FieldLoadError: [port] Value must be greater than or equal to 1 | ├── port: 0, | │ ^ - | └── FILE '{SOURCES_DIR}/validation_annotated_invalid.json5', line 3 + | └── FILE '{SOURCES_DIR}validation_annotated_invalid.json5', line 3 +---------------- 2 ---------------- | dature.errors.exceptions.FieldLoadError: [name] Value must have at least 3 characters | ├── name: "ab", | │ ^^ - | └── FILE '{SOURCES_DIR}/validation_annotated_invalid.json5', line 4 + | └── FILE '{SOURCES_DIR}validation_annotated_invalid.json5', line 4 +---------------- 3 ---------------- | dature.errors.exceptions.FieldLoadError: [tags] Value must contain unique items | ├── tags: ["web", "web"], | │ ^^^^^^^^^^^^^^ - | └── FILE '{SOURCES_DIR}/validation_annotated_invalid.json5', line 5 + | └── FILE '{SOURCES_DIR}validation_annotated_invalid.json5', line 5 +---------------- 4 ---------------- | dature.errors.exceptions.FieldLoadError: [workers] Value must be greater than or equal to 1 | ├── workers: 0, | │ ^ - | └── FILE '{SOURCES_DIR}/validation_annotated_invalid.json5', line 6 + | └── FILE '{SOURCES_DIR}validation_annotated_invalid.json5', line 6 +------------------------------------ diff --git a/examples/docs/features/validation/validation_custom.stderr b/examples/docs/features/validation/validation_custom.stderr index f9fbfad..89acce4 100644 --- a/examples/docs/features/validation/validation_custom.stderr +++ b/examples/docs/features/validation/validation_custom.stderr @@ -3,5 +3,5 @@ | dature.errors.exceptions.FieldLoadError: [workers] Value must be divisible by 2 | ├── workers: 3, | │ ^ - | └── FILE '{SOURCES_DIR}/validation_custom_invalid.json5', line 5 + | └── FILE '{SOURCES_DIR}validation_custom_invalid.json5', line 5 +------------------------------------ diff --git a/examples/docs/features/validation/validation_metadata.stderr b/examples/docs/features/validation/validation_metadata.stderr index db21bb5..995c048 100644 --- a/examples/docs/features/validation/validation_metadata.stderr +++ b/examples/docs/features/validation/validation_metadata.stderr @@ -3,10 +3,10 @@ | dature.errors.exceptions.FieldLoadError: [host] Value must have at least 1 characters | ├── host: "" | │ ^^ - | └── FILE '{SOURCES_DIR}/validation_metadata_invalid.yaml', line 1 + | └── FILE '{SOURCES_DIR}validation_metadata_invalid.yaml', line 1 +---------------- 2 ---------------- | dature.errors.exceptions.FieldLoadError: [port] Value must be greater than or equal to 1 | ├── port: 0 | │ ^ - | └── FILE '{SOURCES_DIR}/validation_metadata_invalid.yaml', line 2 + | └── FILE '{SOURCES_DIR}validation_metadata_invalid.yaml', line 2 +------------------------------------ diff --git a/examples/docs/features/validation/validation_root.stderr b/examples/docs/features/validation/validation_root.stderr index cceaa7f..37cc95e 100644 --- a/examples/docs/features/validation/validation_root.stderr +++ b/examples/docs/features/validation/validation_root.stderr @@ -1,5 +1,5 @@ | dature.errors.exceptions.DatureConfigError: Config loading errors (1) +-+---------------- 1 ---------------- | dature.errors.exceptions.FieldLoadError: [] debug=True is not allowed on non-localhost hosts - | └── FILE '{SOURCES_DIR}/validation_root_invalid.yaml' + | └── FILE '{SOURCES_DIR}validation_root_invalid.yaml' +------------------------------------ diff --git a/tests/sources/test_base.py b/tests/sources/test_base.py index 1a78e64..83293a7 100644 --- a/tests/sources/test_base.py +++ b/tests/sources/test_base.py @@ -596,7 +596,7 @@ class Src(FileFieldMixin): src = Src(file=Path("/data/test.json")) src._init_file_field() - assert src.file == "/data/test.json" + assert src.file == str(Path("/data/test.json")) assert isinstance(src.file, str) def test_init_file_field_none(self): diff --git a/tests/test_examples.py b/tests/test_examples.py index 0a0d4bf..ff4a006 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -69,8 +69,8 @@ def _run_example(script_path: pathlib.Path) -> ScriptResult: def _resolve_stderr_placeholders(template: str, script_path: pathlib.Path) -> str: - sources_dir = str(script_path.parent / "sources") - shared_dir = str(script_path.parents[2] / "shared") + sources_dir = str(script_path.parent / "sources") + os.sep + shared_dir = str(script_path.parents[2] / "shared") + os.sep return template.replace("{SOURCES_DIR}", sources_dir).replace("{SHARED_DIR}", shared_dir) From 5a6099deccca56518bb6aa5a7a95b47788dc5b26 Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 7 Apr 2026 17:02:49 +0300 Subject: [PATCH 35/36] fix windows --- tests/test_examples.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_examples.py b/tests/test_examples.py index ff4a006..c03d911 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -62,6 +62,7 @@ def _run_example(script_path: pathlib.Path) -> ScriptResult: project_root = pathlib.Path(__file__).parent.parent / "src" env = os.environ.copy() env["PYTHONPATH"] = str(project_root) + os.pathsep + env.get("PYTHONPATH", "") + env["PYTHONIOENCODING"] = "utf-8" if _IS_POSIX: return _run_via_posix_spawn(script_path, env) From cc86c4285ccc18542edeabf90cffae1a146949e6 Mon Sep 17 00:00:00 2001 From: niccolum Date: Tue, 7 Apr 2026 17:27:19 +0300 Subject: [PATCH 36/36] fix devin comments --- changes/+remove-source-mutations.refactor | 2 +- src/dature/loading/source_loading.py | 2 +- src/dature/sources/base.py | 22 ++-- src/dature/sources/retort.py | 24 ++-- tests/loading/test_context.py | 11 +- tests/loading/test_source_loading.py | 31 +++++ tests/sources/test_retort.py | 140 ++++++++++++++++++++-- 7 files changed, 198 insertions(+), 34 deletions(-) diff --git a/changes/+remove-source-mutations.refactor b/changes/+remove-source-mutations.refactor index 79b8816..6549f3b 100644 --- a/changes/+remove-source-mutations.refactor +++ b/changes/+remove-source-mutations.refactor @@ -1 +1 @@ -Source objects are no longer mutated during ``load()``. All parameter resolution happens via ``resolve_source_params()`` in ``source_loading.py``. \ No newline at end of file +Source user-facing attributes are no longer mutated during ``load()``. All parameter resolution happens via ``resolve_source_params()`` in ``source_loading.py``. The ``retorts`` cache is still populated lazily during loading. \ No newline at end of file diff --git a/src/dature/loading/source_loading.py b/src/dature/loading/source_loading.py index ec8fef0..f144f20 100644 --- a/src/dature/loading/source_loading.py +++ b/src/dature/loading/source_loading.py @@ -66,7 +66,7 @@ def resolve_source_params( resolved_type_loaders = merged_loaders or None resolved_nested_strategy: NestedResolveStrategy = config.loading.nested_resolve_strategy - if isinstance(source, FlatKeySource) and source.nested_resolve_strategy != "flat": + if isinstance(source, FlatKeySource) and source.nested_resolve_strategy is not None: resolved_nested_strategy = source.nested_resolve_strategy elif load_nested_resolve_strategy is not None: resolved_nested_strategy = load_nested_resolve_strategy diff --git a/src/dature/sources/base.py b/src/dature/sources/base.py index e9717ae..96df606 100644 --- a/src/dature/sources/base.py +++ b/src/dature/sources/base.py @@ -4,7 +4,7 @@ from dataclasses import dataclass, field from datetime import date, datetime, time from pathlib import Path -from typing import TYPE_CHECKING, ClassVar, cast +from typing import TYPE_CHECKING, Any, ClassVar, cast from adaptix import Retort, loader from adaptix.provider import Provider @@ -85,7 +85,11 @@ class Source(abc.ABC): location_label: ClassVar[str] path_finder_class: ClassVar[type[PathFinder] | None] = None - retorts: dict[type, Retort] = field(default_factory=dict, init=False, repr=False) + retorts: dict[tuple[type, frozenset[tuple[type, Any]]], Retort] = field( + default_factory=dict, + init=False, + repr=False, + ) def __repr__(self) -> str: return self.format_name @@ -306,7 +310,7 @@ def _load_file(self, path: FileOrStream) -> JSONValue: ... @dataclass(kw_only=True, repr=False) class FlatKeySource(Source, abc.ABC): split_symbols: str = "__" - nested_resolve_strategy: NestedResolveStrategy = "flat" + nested_resolve_strategy: "NestedResolveStrategy | None" = None nested_resolve: NestedResolve | None = None # --8<-- [end:flat-key-source] @@ -396,7 +400,7 @@ def load_raw( self, *, resolved_expand: ExpandEnvVarsMode = "default", - resolved_nested_strategy: NestedResolveStrategy | None = None, + resolved_nested_strategy: NestedResolveStrategy = "flat", resolved_nested_resolve: NestedResolve | None = None, ) -> LoadRawResult: data = self._load() @@ -404,20 +408,14 @@ def load_raw( result: dict[str, JSONValue] = {} conflicts: NestedConflicts = {} - effective_nested_strategy: NestedResolveStrategy = self.nested_resolve_strategy - if self.nested_resolve_strategy == "flat" and resolved_nested_strategy is not None: - effective_nested_strategy = resolved_nested_strategy - - effective_nested_resolve = self.nested_resolve if self.nested_resolve is not None else resolved_nested_resolve - for key, value in data_dict.items(): self._pre_process_row( key=key, value=value, result=result, conflicts=conflicts, - resolved_nested_strategy=effective_nested_strategy, - resolved_nested_resolve=effective_nested_resolve, + resolved_nested_strategy=resolved_nested_strategy, + resolved_nested_resolve=resolved_nested_resolve, ) expanded = expand_env_vars(result, mode=resolved_expand) diff --git a/src/dature/sources/retort.py b/src/dature/sources/retort.py index 5257374..2552eaf 100644 --- a/src/dature/sources/retort.py +++ b/src/dature/sources/retort.py @@ -1,6 +1,6 @@ from dataclasses import fields from datetime import timedelta -from typing import TYPE_CHECKING, cast, get_type_hints +from typing import TYPE_CHECKING, Any, cast, get_type_hints from adaptix import NameStyle as AdaptixNameStyle from adaptix import Retort, loader, name_mapping @@ -198,6 +198,14 @@ def create_validating_retort[T]( ) +def _retort_cache_key( + schema: type, + resolved_type_loaders: "TypeLoaderMap | None", +) -> tuple[type, frozenset[tuple[type, Any]]]: + loaders_key = frozenset(resolved_type_loaders.items()) if resolved_type_loaders is not None else frozenset() + return (schema, loaders_key) + + def transform_to_dataclass[T]( source: "Source", data: "JSONValue", @@ -205,9 +213,10 @@ def transform_to_dataclass[T]( *, resolved_type_loaders: "TypeLoaderMap | None" = None, ) -> T: - if schema not in source.retorts: - source.retorts[schema] = create_retort(source, resolved_type_loaders=resolved_type_loaders) - return source.retorts[schema].load(data, schema) + key = _retort_cache_key(schema, resolved_type_loaders) + if key not in source.retorts: + source.retorts[key] = create_retort(source, resolved_type_loaders=resolved_type_loaders) + return source.retorts[key].load(data, schema) def ensure_retort( @@ -216,6 +225,7 @@ def ensure_retort( *, resolved_type_loaders: "TypeLoaderMap | None" = None, ) -> None: - if cls not in source.retorts: - source.retorts[cls] = create_retort(source, resolved_type_loaders=resolved_type_loaders) - source.retorts[cls].get_loader(cls) + key = _retort_cache_key(cls, resolved_type_loaders) + if key not in source.retorts: + source.retorts[key] = create_retort(source, resolved_type_loaders=resolved_type_loaders) + source.retorts[key].get_loader(cls) diff --git a/tests/loading/test_context.py b/tests/loading/test_context.py index 772bf93..0ba8804 100644 --- a/tests/loading/test_context.py +++ b/tests/loading/test_context.py @@ -19,7 +19,7 @@ ) from dature.sources.env_ import EnvSource from dature.sources.json_ import JsonSource -from dature.sources.retort import ensure_retort +from dature.sources.retort import _retort_cache_key, ensure_retort class TestMergeFields: @@ -232,14 +232,15 @@ class Cfg: name: str source = JsonSource(file=json_file) - assert Cfg not in source.retorts + key = _retort_cache_key(Cfg, None) + assert key not in source.retorts ensure_retort(source, Cfg) - assert Cfg in source.retorts + assert key in source.retorts - first = source.retorts[Cfg] + first = source.retorts[key] ensure_retort(source, Cfg) - assert source.retorts[Cfg] is first + assert source.retorts[key] is first class TestMakeValidatingPostInit: diff --git a/tests/loading/test_source_loading.py b/tests/loading/test_source_loading.py index df653b3..adb05b6 100644 --- a/tests/loading/test_source_loading.py +++ b/tests/loading/test_source_loading.py @@ -15,6 +15,7 @@ resolve_mask_secrets, resolve_secret_field_names, resolve_skip_invalid, + resolve_source_params, should_skip_broken, ) from dature.sources.env_ import EnvSource @@ -553,3 +554,33 @@ class Cfg: assert result.cleaned_dict == raw assert result.skipped_paths == [] + + +class TestResolveSourceParamsNestedStrategy: + @pytest.mark.parametrize( + ("source_strategy", "load_strategy", "expected"), + [ + (None, "json", "json"), + ("flat", "json", "flat"), + ("json", "flat", "json"), + (None, None, "flat"), + ], + ids=[ + "source-none-uses-load-level", + "source-explicit-flat-overrides-load-level", + "source-explicit-json-overrides-load-level", + "source-none-no-load-level-uses-config-default", + ], + ) + def test_resolve( + self, + source_strategy: str | None, + load_strategy: str | None, + expected: str, + ): + kwargs = {} if source_strategy is None else {"nested_resolve_strategy": source_strategy} + source = EnvSource(**kwargs) + + resolved = resolve_source_params(source, load_nested_resolve_strategy=load_strategy) + + assert resolved.nested_resolve_strategy == expected diff --git a/tests/sources/test_retort.py b/tests/sources/test_retort.py index 6b63104..5ac326c 100644 --- a/tests/sources/test_retort.py +++ b/tests/sources/test_retort.py @@ -7,6 +7,7 @@ from dature.field_path import F from dature.sources.base import Source from dature.sources.retort import ( + _retort_cache_key, build_base_recipe, create_probe_retort, create_retort, @@ -213,6 +214,56 @@ def get_error_message(self): assert isinstance(result, Retort) +class TestRetortCacheKey: + def test_none_loaders_produces_empty_frozenset(self): + @dataclass + class Config: + name: str + + key = _retort_cache_key(Config, None) + + assert key == (Config, frozenset()) + + def test_same_loaders_produce_equal_keys(self): + @dataclass + class Config: + name: str + + loaders = {str: lambda x: x} + + key1 = _retort_cache_key(Config, loaders) + key2 = _retort_cache_key(Config, loaders) + + assert key1 == key2 + + def test_different_loaders_produce_different_keys(self): + @dataclass + class Config: + name: str + + loaders_a = {str: lambda x: x} + loaders_b = {int: lambda x: x} + + key_a = _retort_cache_key(Config, loaders_a) + key_b = _retort_cache_key(Config, loaders_b) + + assert key_a != key_b + + def test_different_schemas_produce_different_keys(self): + @dataclass + class ConfigA: + name: str + + @dataclass + class ConfigB: + name: str + + key_a = _retort_cache_key(ConfigA, None) + key_b = _retort_cache_key(ConfigB, None) + + assert key_a != key_b + + class TestTransformToDataclass: def test_basic_transform(self): @dataclass @@ -233,11 +284,12 @@ class Config: name: str source = MockSource() - assert Config not in source.retorts + key = _retort_cache_key(Config, None) + assert key not in source.retorts transform_to_dataclass(source, {"name": "a"}, Config) - assert Config in source.retorts + assert key in source.retorts def test_reuses_cached_retort(self): @dataclass @@ -246,11 +298,63 @@ class Config: source = MockSource() transform_to_dataclass(source, {"name": "a"}, Config) - cached = source.retorts[Config] + key = _retort_cache_key(Config, None) + cached = source.retorts[key] transform_to_dataclass(source, {"name": "b"}, Config) - assert source.retorts[Config] is cached + assert source.retorts[key] is cached + + def test_different_type_loaders_create_separate_cache_entries(self): + @dataclass + class Config: + name: str + + source = MockSource() + loaders_a = {str: lambda x: str(x).upper()} + loaders_b = {str: lambda x: str(x).lower()} + + transform_to_dataclass(source, {"name": "hello"}, Config, resolved_type_loaders=loaders_a) + transform_to_dataclass(source, {"name": "hello"}, Config, resolved_type_loaders=loaders_b) + + key_a = _retort_cache_key(Config, loaders_a) + key_b = _retort_cache_key(Config, loaders_b) + assert key_a in source.retorts + assert key_b in source.retorts + assert source.retorts[key_a] is not source.retorts[key_b] + + def test_type_loaders_vs_none_create_separate_cache_entries(self): + @dataclass + class Config: + name: str + + source = MockSource() + custom_loaders = {str: lambda x: str(x).upper()} + + transform_to_dataclass(source, {"name": "a"}, Config) + transform_to_dataclass(source, {"name": "a"}, Config, resolved_type_loaders=custom_loaders) + + key_none = _retort_cache_key(Config, None) + key_custom = _retort_cache_key(Config, custom_loaders) + assert key_none in source.retorts + assert key_custom in source.retorts + assert source.retorts[key_none] is not source.retorts[key_custom] + + def test_same_type_loaders_reuse_cached_retort(self): + @dataclass + class Config: + name: str + + source = MockSource() + custom_loaders = {str: lambda x: str(x).upper()} + + transform_to_dataclass(source, {"name": "a"}, Config, resolved_type_loaders=custom_loaders) + key = _retort_cache_key(Config, custom_loaders) + cached = source.retorts[key] + + transform_to_dataclass(source, {"name": "b"}, Config, resolved_type_loaders=custom_loaders) + + assert source.retorts[key] is cached class TestEnsureRetort: @@ -260,11 +364,12 @@ class Config: name: str source = MockSource() - assert Config not in source.retorts + key = _retort_cache_key(Config, None) + assert key not in source.retorts ensure_retort(source, Config) - assert Config in source.retorts + assert key in source.retorts def test_does_not_overwrite_existing(self): @dataclass @@ -273,8 +378,27 @@ class Config: source = MockSource() ensure_retort(source, Config) - existing = source.retorts[Config] + key = _retort_cache_key(Config, None) + existing = source.retorts[key] ensure_retort(source, Config) - assert source.retorts[Config] is existing + assert source.retorts[key] is existing + + def test_different_type_loaders_create_separate_cache_entries(self): + @dataclass + class Config: + name: str + + source = MockSource() + loaders_a = {str: lambda x: str(x).upper()} + loaders_b = {str: lambda x: str(x).lower()} + + ensure_retort(source, Config, resolved_type_loaders=loaders_a) + ensure_retort(source, Config, resolved_type_loaders=loaders_b) + + key_a = _retort_cache_key(Config, loaders_a) + key_b = _retort_cache_key(Config, loaders_b) + assert key_a in source.retorts + assert key_b in source.retorts + assert source.retorts[key_a] is not source.retorts[key_b]