diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index 1eabe43..452aa35 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -3,8 +3,7 @@ updates:
- package-ecosystem: "pip"
directory: "/"
schedule:
- interval: "weekly"
- day: "saturday"
+ interval: "monthly"
time: "09:00"
timezone: "Europe/Moscow"
open-pull-requests-limit: 10
@@ -16,8 +15,7 @@ updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
- interval: "weekly"
- day: "saturday"
+ interval: "monthly"
time: "09:00"
timezone: "Europe/Moscow"
labels:
diff --git a/README.md b/README.md
index 30b602f..6e69dd8 100644
--- a/README.md
+++ b/README.md
@@ -1,8 +1,8 @@
-# dature
+
----
+

-
+---
[](https://pypi.org/project/dature/)
[](https://pypi.org/project/dature/)
@@ -44,7 +44,8 @@ pip install dature[secure] # Secret detection heuristics
```python
from dataclasses import dataclass
-from dature import Source, load
+
+import dature
@dataclass
class Config:
@@ -52,20 +53,20 @@ class Config:
port: int
debug: bool = False
-config = load(Source(file_="config.yaml"), Config)
+config = dature.load(dature.Yaml12Source(file="config.yaml"), Config)
```
## Key Features
- **Multiple formats** — YAML, JSON, JSON5, TOML, INI, ENV, environment variables, Docker secrets
-- **Merging** — combine multiple sources with configurable strategies (`LAST_WINS`, `FIRST_WINS`, `RAISE_ON_CONFLICT`)
+- **Merging** — combine multiple sources with configurable strategies (`"last_wins"`, `"first_wins"`, `"raise_on_conflict"`)
- **Validation** — `Annotated` field validators, root validators, `__post_init__` support
- **Naming** — automatic field name mapping (`snake_case` ↔ `camelCase` ↔ `UPPER_SNAKE` etc.)
- **Secret masking** — automatic masking in error messages and logs by field type, name, or heuristic
- **ENV expansion** — `$VAR`, `${VAR:-default}` substitution in all file formats
- **Special types** — `SecretStr`, `ByteSize`, `PaymentCardNumber`, `URL`, `Base64UrlStr`
- **Debug report** — `debug=True` shows which source provided each field value
-- **Decorator mode** — `@load(meta)` auto-loads config on dataclass instantiation with caching
+- **Decorator mode** — `@dature.load(meta)` auto-loads config on dataclass instantiation with caching
See the **[documentation](https://dature.readthedocs.io/)** for detailed guides and API reference.
diff --git a/changes/+common-loading.refactor b/changes/+common-loading.refactor
new file mode 100644
index 0000000..84253d2
--- /dev/null
+++ b/changes/+common-loading.refactor
@@ -0,0 +1 @@
+Extracted shared ``resolve_mask_secrets`` logic from ``single.py`` and ``multi.py`` into ``loading/common.py``.
\ No newline at end of file
diff --git a/changes/+concrete-sources-exported.feature b/changes/+concrete-sources-exported.feature
new file mode 100644
index 0000000..cfae81b
--- /dev/null
+++ b/changes/+concrete-sources-exported.feature
@@ -0,0 +1 @@
+All concrete source classes (``EnvSource``, ``JsonSource``, ``Yaml11Source``, ``Yaml12Source``, ``Toml10Source``, ``Toml11Source``, ``IniSource``, ``Json5Source``, ``EnvFileSource``, ``DockerSecretsSource``, ``FileSource``) are now exported from ``dature`` directly.
diff --git a/changes/+docs-tuning.doc b/changes/+docs-tuning.doc
new file mode 100644
index 0000000..11f3c95
--- /dev/null
+++ b/changes/+docs-tuning.doc
@@ -0,0 +1 @@
+Improved documentation for Caching, Merge Rules, Configure, Custom Types, and Field Groups sections.
diff --git a/changes/+file-path-expansion.feature b/changes/+file-path-expansion.feature
index 0fa9a0c..1ee561c 100644
--- a/changes/+file-path-expansion.feature
+++ b/changes/+file-path-expansion.feature
@@ -1 +1 @@
-Environment variables in `Source(file_=...)` are now expanded automatically in strict mode. Both directory paths (`$CONFIG_DIR/config.toml`) and file names (`config.$APP_ENV.toml`) are supported.
+Environment variables in `Source(file=...)` are now expanded automatically in strict mode. Both directory paths (`$CONFIG_DIR/config.toml`) and file names (`config.$APP_ENV.toml`) are supported.
diff --git a/changes/+fix-merge-conflict-filecontent.bugfix b/changes/+fix-merge-conflict-filecontent.bugfix
new file mode 100644
index 0000000..2b12276
--- /dev/null
+++ b/changes/+fix-merge-conflict-filecontent.bugfix
@@ -0,0 +1 @@
+Fixed attribute name typo (``filecontent`` → ``file_content``) in ``raise_on_conflict`` merge strategy that caused ``AttributeError`` when conflicting fields were detected.
diff --git a/changes/+import-style.refactor b/changes/+import-style.refactor
new file mode 100644
index 0000000..2270769
--- /dev/null
+++ b/changes/+import-style.refactor
@@ -0,0 +1 @@
+Recommended import style changed from `from dature import load, Source` to `import dature` with access via `dature.load()`, `dature.Source()`.
diff --git a/changes/+internal-enum-hints.refactor b/changes/+internal-enum-hints.refactor
new file mode 100644
index 0000000..5f09956
--- /dev/null
+++ b/changes/+internal-enum-hints.refactor
@@ -0,0 +1 @@
+Internal type hints now use `MergeStrategyEnum`/`FieldMergeStrategyEnum` instead of `MergeStrategyName`/`FieldMergeStrategyName` Literal aliases. Public API type hints remain unchanged.
diff --git a/changes/+merge-config-public.refactor b/changes/+merge-config-public.refactor
new file mode 100644
index 0000000..62747cb
--- /dev/null
+++ b/changes/+merge-config-public.refactor
@@ -0,0 +1 @@
+Renamed ``_MergeConfig`` to ``MergeConfig``.
\ No newline at end of file
diff --git a/changes/+normalize-metadata-naming.refactor b/changes/+normalize-metadata-naming.refactor
new file mode 100644
index 0000000..caaa03e
--- /dev/null
+++ b/changes/+normalize-metadata-naming.refactor
@@ -0,0 +1 @@
+Renamed ``metadata``/``source_meta`` parameters to ``source`` throughout the loading module.
\ No newline at end of file
diff --git a/changes/+remove-loader-protocol.removal b/changes/+remove-loader-protocol.removal
new file mode 100644
index 0000000..f298562
--- /dev/null
+++ b/changes/+remove-loader-protocol.removal
@@ -0,0 +1 @@
+Removed ``LoaderProtocol`` from ``dature.protocols``. Source classes now handle loading internally.
diff --git a/changes/+remove-merge-class.refactor b/changes/+remove-merge-class.refactor
new file mode 100644
index 0000000..03b850d
--- /dev/null
+++ b/changes/+remove-merge-class.refactor
@@ -0,0 +1 @@
+`Merge` class has been removed. Use `load()` with multiple `Source` arguments instead.
diff --git a/changes/+remove-source-mutations.refactor b/changes/+remove-source-mutations.refactor
new file mode 100644
index 0000000..6549f3b
--- /dev/null
+++ b/changes/+remove-source-mutations.refactor
@@ -0,0 +1 @@
+Source user-facing attributes are no longer mutated during ``load()``. All parameter resolution happens via ``resolve_source_params()`` in ``source_loading.py``. The ``retorts`` cache is still populated lazily during loading.
\ No newline at end of file
diff --git a/changes/+rename-display-name.refactor b/changes/+rename-display-name.refactor
new file mode 100644
index 0000000..14b6ee8
--- /dev/null
+++ b/changes/+rename-display-name.refactor
@@ -0,0 +1 @@
+Renamed ``display_name`` to ``format_name`` and ``display_label`` to ``location_label`` across all source classes and error types.
\ No newline at end of file
diff --git a/changes/+rename-file-param.refactor b/changes/+rename-file-param.refactor
new file mode 100644
index 0000000..b143f9c
--- /dev/null
+++ b/changes/+rename-file-param.refactor
@@ -0,0 +1 @@
+`Source(file_=...)` has been renamed to `Source(file=...)`.
diff --git a/changes/+rename-sources-loader-package.refactor b/changes/+rename-sources-loader-package.refactor
new file mode 100644
index 0000000..5b0e656
--- /dev/null
+++ b/changes/+rename-sources-loader-package.refactor
@@ -0,0 +1 @@
+Renamed internal package ``sources_loader`` to ``sources`` (source classes) and ``loaders`` (type conversion). All public imports from ``dature`` are unchanged.
diff --git a/changes/+retort-factory.refactor b/changes/+retort-factory.refactor
new file mode 100644
index 0000000..73db551
--- /dev/null
+++ b/changes/+retort-factory.refactor
@@ -0,0 +1 @@
+Extracted retort factory methods from ``Source`` into free functions in ``sources/retort.py``. ``transform_to_dataclass`` is now a free function.
\ No newline at end of file
diff --git a/changes/+simplify-configure.refactor b/changes/+simplify-configure.refactor
new file mode 100644
index 0000000..72620f9
--- /dev/null
+++ b/changes/+simplify-configure.refactor
@@ -0,0 +1 @@
+`configure()` now accepts dicts instead of dataclass instances: `masking={"mask": "***"}`, `error_display={"max_visible_lines": 5}`, `loading={"debug": True}`, `type_loaders={MyType: my_loader}`.
diff --git a/changes/+simplify-enums.removal b/changes/+simplify-enums.removal
new file mode 100644
index 0000000..8adfc48
--- /dev/null
+++ b/changes/+simplify-enums.removal
@@ -0,0 +1 @@
+Removed `MergeStrategy` and `FieldMergeStrategy` enums from public API. Use string literals instead: `"last_wins"`, `"first_wins"`, `"first_found"`, `"raise_on_conflict"` for merge strategies; `"first_wins"`, `"last_wins"`, `"append"`, `"append_unique"`, `"prepend"`, `"prepend_unique"` for field merge strategies.
diff --git a/changes/+simplify-field-group.removal b/changes/+simplify-field-group.removal
new file mode 100644
index 0000000..7c216fc
--- /dev/null
+++ b/changes/+simplify-field-group.removal
@@ -0,0 +1 @@
+Removed `FieldGroup` dataclass from public API. Pass `field_groups` as `tuple[tuple[F[Config].field, ...], ...]` instead.
diff --git a/changes/+simplify-merge-rule.removal b/changes/+simplify-merge-rule.removal
new file mode 100644
index 0000000..6039352
--- /dev/null
+++ b/changes/+simplify-merge-rule.removal
@@ -0,0 +1 @@
+Removed `MergeRule` dataclass from public API. Pass `field_merges` as `dict` mapping `F[Config].field` to a strategy string or callable instead.
diff --git a/changes/+simplify-type-loader.removal b/changes/+simplify-type-loader.removal
new file mode 100644
index 0000000..8e36483
--- /dev/null
+++ b/changes/+simplify-type-loader.removal
@@ -0,0 +1 @@
+Removed `TypeLoader` dataclass from public API. Pass `type_loaders` as `dict[type, Callable]` instead.
diff --git a/changes/+simplify-validator-args.refactor b/changes/+simplify-validator-args.refactor
new file mode 100644
index 0000000..cb72ee0
--- /dev/null
+++ b/changes/+simplify-validator-args.refactor
@@ -0,0 +1 @@
+Built-in validators (`Ge`, `Le`, `Gt`, `Lt`, `MinLength`, `MaxLength`, `RegexPattern`, `MinItems`, `MaxItems`, `UniqueItems`) now accept `value` as a positional argument: `Ge(1)` instead of `Ge(value=1)`. `RootValidator` now accepts `func` as a positional argument: `RootValidator(check)` instead of `RootValidator(func=check)`. `error_message` remains keyword-only in all validators.
diff --git a/changes/+type-utils.refactor b/changes/+type-utils.refactor
new file mode 100644
index 0000000..73d7cf6
--- /dev/null
+++ b/changes/+type-utils.refactor
@@ -0,0 +1 @@
+Deduplicated ``_find_nested_dataclasses`` into shared ``type_utils.find_nested_dataclasses``.
\ No newline at end of file
diff --git a/changes/+unit-tests.misc b/changes/+unit-tests.misc
new file mode 100644
index 0000000..0bf969d
--- /dev/null
+++ b/changes/+unit-tests.misc
@@ -0,0 +1 @@
+Added unit tests for ``loading/context``, ``loading/source_loading``, ``masking/detection``, ``validators/base``, ``loaders/common``, ``loaders/base``.
diff --git a/docs/advanced/caching.md b/docs/advanced/caching.md
index 0344a0f..5ddaa64 100644
--- a/docs/advanced/caching.md
+++ b/docs/advanced/caching.md
@@ -2,8 +2,16 @@
In decorator mode, caching is enabled by default:
-```python
---8<-- "examples/docs/advanced/caching/advanced_caching.py"
-```
+=== "cache=True"
+
+ ```python
+ --8<-- "examples/docs/advanced/caching/advanced_caching_enabled.py"
+ ```
+
+=== "cache=False"
+
+ ```python
+ --8<-- "examples/docs/advanced/caching/advanced_caching_disabled.py"
+ ```
Caching can also be configured globally via `configure()`.
diff --git a/docs/advanced/configure.md b/docs/advanced/configure.md
index 1aebf26..c508bc1 100644
--- a/docs/advanced/configure.md
+++ b/docs/advanced/configure.md
@@ -42,7 +42,7 @@ Customize defaults for the entire application — programmatically or via enviro
### type_loaders
-Register global custom type loaders that apply to all `load()` calls. See [Custom Types & Loaders](custom_types.md#per-source-vs-global).
+Register global custom type loaders that apply to all `dature.load()` calls. See [Custom Types & Loaders](custom_types.md#per-source-vs-global).
## Environment Variables
diff --git a/docs/advanced/custom_types.md b/docs/advanced/custom_types.md
index 68442f3..7abda02 100644
--- a/docs/advanced/custom_types.md
+++ b/docs/advanced/custom_types.md
@@ -4,19 +4,23 @@
Use `type_loaders` to teach dature how to parse custom types from strings.
-Each `TypeLoader` maps a type to a conversion function:
+Pass `type_loaders` as a `dict[type, Callable]` mapping types to conversion functions:
-```python
---8<-- "examples/docs/advanced/custom_types/custom_type.py"
-```
+=== "Python"
-```yaml title="custom_type_common.yaml"
---8<-- "examples/docs/advanced/custom_types/sources/custom_type_common.yaml"
-```
+ ```python
+ --8<-- "examples/docs/advanced/custom_types/custom_type.py"
+ ```
+
+=== "custom_type_common.yaml"
+
+ ```yaml
+ --8<-- "examples/docs/advanced/custom_types/sources/custom_type_common.yaml"
+ ```
### Per-source vs Global
-`type_loaders` can be set per-source in `Source`, per-merge in `Merge`, or globally via `configure()`:
+`type_loaders` can be set per-source in `Source`, in `dature.load()` for merge mode, or globally via `configure()`:
=== "Per-source (Source)"
@@ -24,7 +28,7 @@ Each `TypeLoader` maps a type to a conversion function:
--8<-- "examples/docs/advanced/custom_types/custom_type.py"
```
-=== "Per-merge (Merge)"
+=== "Per-merge (load)"
```python
--8<-- "examples/docs/advanced/custom_types/custom_type_merge.py"
@@ -36,25 +40,46 @@ Each `TypeLoader` maps a type to a conversion function:
--8<-- "examples/docs/advanced/custom_types/advanced_configure_type_loaders.py"
```
-When both per-source and global `type_loaders` are set, they merge — per-source loaders take priority (placed first in the recipe).
+When both per-source and global `type_loaders` are set, they merge — per-source loaders take priority.
+
+## Custom Source Classes
+
+For formats that dature doesn't support out of the box, you can create your own source by subclassing one of the base classes from `dature.sources.base`:
+
+### Choosing a base class
-### TypeLoader Reference
+| Base class | Use when | You implement | You get for free |
+|------------|----------|---------------|------------------|
+| [`Source`](../api-reference.md#source) | Non-file data (API, database, custom protocol) | `format_name`, `_load() -> JSONValue` | Prefix filtering, env var expansion, type coercion, validation, merge support |
+| [`FileSource`](../api-reference.md#filesourcesource) | File-based format (XML, CSV, HCL, …) | `format_name`, `_load_file(path: FileOrStream) -> JSONValue` | Everything from `Source` + `file` parameter, stream support, `file_display()`, `file_path_for_errors()`, `__repr__` |
+| [`FlatKeySource`](../api-reference.md#flatkeysourcesource) | Flat key=value data (custom env store, Consul KV, …) | `format_name`, `_load() -> JSONValue` (flat `dict[str, str]`) | Everything from `Source` + `split_symbols` nesting, `nested_resolve`, automatic string→type parsing (`int`, `bool`, `date`, …) |
+
+All base classes are in `dature.sources.base`:
```python
---8<-- "src/dature/metadata.py:type-loader"
+--8<-- "examples/docs/advanced/custom_types/custom_source_import.py"
```
-| Parameter | Description |
-|-----------|-------------|
-| `type_` | The target type to register a loader for |
-| `func` | A callable that converts the raw value to the target type |
+### Minimal interface
+
+Every custom source needs:
+
+1. **`format_name`** — class-level string shown in `__repr__` and error messages (e.g. `"xml"`, `"consul"`)
+2. **A load method** — `_load()` for `Source`/`FlatKeySource`, or `_load_file(path)` for `FileSource`. Must return `JSONValue` (a nested dict).
-## Custom Loaders
+### Optional overrides
-For formats that dature doesn't support out of the box, subclass `BaseLoader` and implement two things:
+| Method | Default | Override when |
+|--------|---------|---------------|
+| `additional_loaders()` | `[]` (FileSource) or string-value loaders (FlatKeySource) | Your format stores all values as strings and needs extra type parsers (e.g. `bool`, `float`). |
+| `file_display()` | `None` | Your source has a meaningful display path (shown in logs and errors). |
+| `file_path_for_errors()` | `None` | Your source points to a file on disk (used in error messages). |
+| `resolve_location(...)` | Empty `SourceLocation` | You want errors to show line numbers or variable names from your source. |
+| `location_label` | inherited | Change the label in error messages (e.g. `"FILE"`, `"ENV"`, `"API"`). |
-1. `display_name` — a class-level string shown in error messages
-2. `_load(path)` — returns `JSONValue` (a nested dict) from the source
+### Example: FileSource subclass
+
+The most common case — reading a file format:
```python
--8<-- "examples/docs/advanced/custom_types/custom_loader.py"
@@ -64,4 +89,18 @@ For formats that dature doesn't support out of the box, subclass `BaseLoader` an
--8<-- "examples/docs/advanced/custom_types/sources/custom_loader.xml"
```
-Pass your custom loader via the `loader` parameter in `Source`. All built-in features (type coercion, validation, prefix extraction, ENV expansion) work automatically.
+`FileSource` handles the `file` parameter, path expansion, and stream detection. Your `_load_file()` receives a `Path` or file-like object and returns a dict.
+
+### Example: Source subclass (non-file)
+
+For sources that don't read files — e.g. an API, a database, or an in-memory dict:
+
+```python
+--8<-- "examples/docs/advanced/custom_types/custom_dict_source.py"
+```
+
+### Tips
+
+- All built-in features (type coercion, validation, prefix extraction, ENV expansion, merge support) work automatically with any custom source.
+- Override `additional_loaders()` to return `_string_value_loaders()` from `dature.sources.base` if your format stores everything as strings (like INI or ENV).
+- Pass your custom source to `dature.load()` the same way as any built-in source.
diff --git a/docs/advanced/env-expansion.md b/docs/advanced/env-expansion.md
index cb0d03d..76bb93c 100644
--- a/docs/advanced/env-expansion.md
+++ b/docs/advanced/env-expansion.md
@@ -49,7 +49,7 @@ Set the mode on `Source`:
--8<-- "examples/docs/advanced/env_expansion/sources/advanced_env_expansion_strict.yaml"
```
-For merge mode, set on `Merge` as default for all sources:
+For merge mode, pass `expand_env_vars` to `dature.load()` as default for all sources:
=== "Python"
@@ -131,7 +131,7 @@ The `${VAR:-default}` fallback syntax works in all modes.
## File Path Expansion
-Environment variables in `Source(file_=...)` are expanded automatically in `"strict"` mode — if a variable is missing, `EnvVarExpandError` is raised immediately at `Source` creation time.
+Environment variables in the `file=...` parameter of Source subclasses are expanded automatically in `"strict"` mode — if a variable is missing, `EnvVarExpandError` is raised immediately at Source creation time.
This works for both directory paths and file names:
diff --git a/docs/advanced/field-groups.md b/docs/advanced/field-groups.md
index 477059e..8b456f6 100644
--- a/docs/advanced/field-groups.md
+++ b/docs/advanced/field-groups.md
@@ -1,6 +1,26 @@
# Field Groups
-Ensure related fields are always overridden together. If a source partially overrides a group, `FieldGroupError` is raised:
+Ensure related fields are always overridden together:
+
+=== "Python"
+
+ ```python
+ --8<-- "examples/docs/advanced/field_groups/field_groups_basic.py"
+ ```
+
+=== "common_field_groups_defaults.yaml"
+
+ ```yaml
+ --8<-- "examples/docs/shared/common_field_groups_defaults.yaml"
+ ```
+
+=== "common_field_groups_overrides.yaml"
+
+ ```yaml
+ --8<-- "examples/docs/shared/common_field_groups_overrides.yaml"
+ ```
+
+If `overrides.yaml` changes `host` and `port` together, the group constraint is satisfied. If a source partially overrides a group, `FieldGroupError` is raised:
=== "Python"
@@ -20,6 +40,12 @@ Ensure related fields are always overridden together. If a source partially over
--8<-- "examples/docs/advanced/field_groups/sources/field_groups_partial_overrides.yaml"
```
+=== "Error"
+
+ ```
+ --8<-- "examples/docs/advanced/field_groups/advanced_field_groups_nested_error.stderr"
+ ```
+
## Nested Dataclass Expansion
Passing a dataclass field expands it into all its leaf fields:
@@ -42,6 +68,12 @@ Passing a dataclass field expands it into all its leaf fields:
--8<-- "examples/docs/advanced/field_groups/sources/advanced_field_groups_expansion_error_overrides.yaml"
```
+=== "Error"
+
+ ```
+ --8<-- "examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.stderr"
+ ```
+
## Multiple Groups
If a source partially overrides multiple groups, all violations are reported:
@@ -64,4 +96,10 @@ If a source partially overrides multiple groups, all violations are reported:
--8<-- "examples/docs/advanced/field_groups/sources/advanced_field_groups_multiple_error_overrides.yaml"
```
+=== "Error"
+
+ ```
+ --8<-- "examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.stderr"
+ ```
+
Field groups work with all merge strategies and can be combined with `field_merges`.
diff --git a/docs/advanced/merge-rules.md b/docs/advanced/merge-rules.md
index ba55516..ea7e319 100644
--- a/docs/advanced/merge-rules.md
+++ b/docs/advanced/merge-rules.md
@@ -23,16 +23,16 @@ graph TD
Override the global strategy for individual fields using `field_merges`.
-All available `FieldMergeStrategy` values:
+Available field merge strategies:
| Strategy | Behavior |
|----------|----------|
-| `FIRST_WINS` | Keep the value from the first source |
-| `LAST_WINS` | Keep the value from the last source |
-| `APPEND` | Concatenate lists: `base + override` |
-| `APPEND_UNIQUE` | Concatenate lists, removing duplicates |
-| `PREPEND` | Concatenate lists: `override + base` |
-| `PREPEND_UNIQUE` | Concatenate lists in reverse order, removing duplicates |
+| `"first_wins"` | Keep the value from the first source |
+| `"last_wins"` | Keep the value from the last source |
+| `"append"` | Concatenate lists: `base + override` |
+| `"append_unique"` | Concatenate lists, removing duplicates |
+| `"prepend"` | Concatenate lists: `override + base` |
+| `"prepend_unique"` | Concatenate lists in reverse order, removing duplicates |
Given two sources with overlapping `tags`:
@@ -50,47 +50,47 @@ Given two sources with overlapping `tags`:
Each strategy produces a different result:
-=== "FIRST_WINS"
+=== "first_wins"
```python
--8<-- "examples/docs/advanced/merge_rules/merging_field_first_wins.py"
```
-=== "LAST_WINS"
+=== "last_wins"
```python
--8<-- "examples/docs/advanced/merge_rules/merging_field_last_wins.py"
```
-=== "APPEND"
+=== "append"
```python
--8<-- "examples/docs/advanced/merge_rules/merging_field_append.py"
```
-=== "APPEND_UNIQUE"
+=== "append_unique"
```python
--8<-- "examples/docs/advanced/merge_rules/merging_field_append_unique.py"
```
-=== "PREPEND"
+=== "prepend"
```python
--8<-- "examples/docs/advanced/merge_rules/merging_field_prepend.py"
```
-=== "PREPEND_UNIQUE"
+=== "prepend_unique"
```python
--8<-- "examples/docs/advanced/merge_rules/merging_field_prepend_unique.py"
```
-Nested fields are supported: `F[Config].database.host`.
+Nested fields are supported: `dature.F[Config].database.host`.
-Per-field strategies work with `RAISE_ON_CONFLICT` — fields with an explicit strategy are excluded from conflict detection.
+Per-field strategies work with `"raise_on_conflict"` — fields with an explicit strategy are excluded from conflict detection.
-## With RAISE_ON_CONFLICT
+## With raise_on_conflict
Fields with an explicit strategy are excluded from conflict detection:
@@ -136,40 +136,6 @@ You can also pass a callable as the strategy:
The callable receives a `list[JSONValue]` (one value per source) and returns the merged value.
-## Field Groups
-
-Ensure that related fields are always overridden together. If a source changes some fields in a group but not others, `FieldGroupError` is raised:
-
-=== "Python"
-
- ```python
- --8<-- "examples/docs/advanced/merge_rules/merging_field_groups.py"
- ```
-
-=== "common_field_groups_defaults.yaml"
-
- ```yaml
- --8<-- "examples/docs/shared/common_field_groups_defaults.yaml"
- ```
-
-=== "common_field_groups_overrides.yaml"
-
- ```yaml
- --8<-- "examples/docs/shared/common_field_groups_overrides.yaml"
- ```
-
-If `overrides.yaml` changes `host` and `port` together, the group constraint is satisfied. If it changed only `host` but not `port`, loading would fail:
-
-```
-Config field group errors (1)
-
- Field group (host, port) partially overridden in source 1
- changed: host (from source yaml 'overrides.yaml')
- unchanged: port (from source yaml 'defaults.yaml')
-```
-
-For nested dataclass expansion and multiple groups, see [Field Groups](field-groups.md).
-
## Skipping Broken Sources
Skip sources that fail to load (missing file, invalid syntax):
diff --git a/docs/advanced/nested-resolve.md b/docs/advanced/nested-resolve.md
index cba3e05..04b2d15 100644
--- a/docs/advanced/nested-resolve.md
+++ b/docs/advanced/nested-resolve.md
@@ -1,6 +1,6 @@
# Nested Resolve
-Flat-key loaders (ENV, `.env` file, Docker secrets) store nested dataclasses as either a single JSON string or as separate flat keys:
+Flat-key sources (ENV, `.env` file, Docker secrets) store nested dataclasses as either a single JSON string or as separate flat keys:
```
# JSON form
@@ -65,9 +65,9 @@ When both `nested_resolve_strategy` and `nested_resolve` are set, per-field take
--8<-- "examples/docs/advanced/nested_resolve/nested_resolve_override.py"
```
-## All Flat-Key Loaders
+## All Flat-Key Sources
-The mechanism works identically across all flat-key loaders:
+The mechanism works identically across all flat-key sources:
=== "ENV"
diff --git a/docs/api-reference.md b/docs/api-reference.md
index c8f9ea4..b64be03 100644
--- a/docs/api-reference.md
+++ b/docs/api-reference.md
@@ -2,7 +2,7 @@
## Core
-### `load()`
+### `dature.load()`
```python
--8<-- "src/dature/main.py:load"
@@ -10,95 +10,151 @@
Main entry point. Two calling patterns:
-**Function mode** — pass `dataclass_`, get an instance back:
+**Function mode** — pass `schema`, get an instance back:
```python
-config = load(Source(file_="config.yaml"), Config)
+--8<-- "examples/docs/api_reference/api_reference_function_mode.py"
```
-**Decorator mode** — omit `dataclass_`, get a decorator:
+**Decorator mode** — omit `schema`, get a decorator:
```python
-@load(Source(file_="config.yaml"))
-@dataclass
-class Config:
- host: str
+--8<-- "examples/docs/api_reference/api_reference_decorator_mode.py"
```
**Parameters:**
-| Parameter | Type | Description |
-|-----------|------|-------------|
-| `metadata` | `Source \| Merge \| tuple[Source, ...] \| None` | Source descriptor. Tuple is shorthand for `Merge(...)` with `LAST_WINS`. `None` → `Source()` (env vars). |
-| `dataclass_` | `type[T] \| None` | Target dataclass. If provided → function mode. If `None` → decorator mode. |
-| `cache` | `bool \| None` | Enable caching in decorator mode. Default from `configure()`. |
-| `debug` | `bool \| None` | Collect `LoadReport`. Default from `configure()`. |
+| Parameter | Type | Default | Description |
+|-----------|------|---------|-------------|
+| `*sources` | `Source` | — | One or more source descriptors (e.g. `JsonSource(file=...)`, `EnvSource()`). Multiple sources → merge mode. |
+| `schema` | `type[T] \| None` | `None` | Target dataclass. If provided → function mode. If `None` → decorator mode. |
+| `cache` | `bool \| None` | `None` | Enable caching in decorator mode. Default from `configure()`. Ignored in function mode. |
+| `debug` | `bool \| None` | `None` | Collect `LoadReport` on the result instance. Default from `configure()`. Retrieve with `get_load_report()`. |
+| `strategy` | `MergeStrategyName` | `"last_wins"` | Merge strategy. Only used with multiple sources. See [Merge Strategies](#merge-strategies). |
+| `field_merges` | `FieldMergeMap \| None` | `None` | Per-field merge strategy overrides. Maps `F[Config].field` to a strategy string or callable. See [Field Merge Strategies](#field-merge-strategies). |
+| `field_groups` | `tuple[FieldGroupTuple, ...]` | `()` | Groups of fields that must change together. Each group is a tuple of `F[Config].field` references. |
+| `skip_broken_sources` | `bool` | `False` | Skip sources that fail to load instead of raising. |
+| `skip_invalid_fields` | `bool` | `False` | Skip fields that fail validation instead of raising. |
+| `expand_env_vars` | `ExpandEnvVarsMode \| None` | `None` | Env var expansion mode applied to all sources. Source-level setting takes priority. |
+| `secret_field_names` | `tuple[str, ...] \| None` | `None` | Extra secret field name patterns for masking. |
+| `mask_secrets` | `bool \| None` | `None` | Enable/disable secret masking globally. |
+| `type_loaders` | `TypeLoaderMap \| None` | `None` | Custom type loaders mapping types to conversion functions. Merged with source-level and global loaders. |
+| `nested_resolve_strategy` | `NestedResolveStrategy \| None` | `None` | Default priority for JSON vs flat keys in `FlatKeySource`. See [Nested Resolve](advanced/nested-resolve.md). |
+| `nested_resolve` | `NestedResolve \| None` | `None` | Per-field nested resolve strategy overrides. See [Nested Resolve](advanced/nested-resolve.md#per-field-strategy). |
+
+**Returns:**
+
+- **Function mode** (`schema` provided): an instance of `schema` populated from the sources.
+- **Decorator mode** (`schema=None`): a decorator that adds `load()` logic to the decorated dataclass.
+
+**Raises:**
+
+- `TypeError` — no sources passed, or a positional argument is not a `Source` instance.
+- `DatureConfigError` — aggregated field loading errors.
+- `MergeConflictError` — conflicting values with `strategy="raise_on_conflict"`.
+- `FieldGroupError` — field group constraint violation.
+- `EnvVarExpandError` — missing env vars with `expand_env_vars="strict"`.
---
### `Source`
```python
---8<-- "src/dature/metadata.py:load-metadata"
+--8<-- "src/dature/sources/base.py:load-metadata"
```
-See [Introduction — Source Reference](introduction.md#source-reference) for parameter descriptions.
+Abstract base class for all sources. See [Introduction — Source Reference](introduction.md#source-reference) for parameter descriptions.
----
+**Parameters:**
-### `Merge`
+| Parameter | Type | Default | Description |
+|-----------|------|---------|-------------|
+| `prefix` | `DotSeparatedPath \| None` | `None` | Filter ENV keys (`"APP_"`) or extract nested object (`"app.database"`). |
+| `name_style` | `NameStyle \| None` | `None` | Naming convention mapping: `"lower_snake"`, `"upper_snake"`, `"lower_camel"`, `"upper_camel"`, `"lower_kebab"`, `"upper_kebab"`. |
+| `field_mapping` | `FieldMapping \| None` | `None` | Explicit field renaming with `F` objects. |
+| `root_validators` | `tuple[ValidatorProtocol, ...] \| None` | `None` | Post-load validation of the entire object. |
+| `validators` | `FieldValidators \| None` | `None` | Per-field validators via `Annotated` metadata or explicit mapping. |
+| `expand_env_vars` | `ExpandEnvVarsMode \| None` | `None` | ENV variable expansion: `"disabled"`, `"default"`, `"empty"`, `"strict"`. |
+| `skip_if_broken` | `bool \| None` | `None` | Skip this source if it fails to load. |
+| `skip_if_invalid` | `bool \| tuple[FieldPath, ...] \| None` | `None` | Skip invalid fields from this source. `True` for all, or a tuple of specific fields. |
+| `secret_field_names` | `tuple[str, ...] \| None` | `None` | Extra secret name patterns for masking. |
+| `mask_secrets` | `bool \| None` | `None` | Enable/disable secret masking for this source. |
+| `type_loaders` | `TypeLoaderMap \| None` | `None` | Custom type converters `{type: callable}` for this source. |
+
+**Public methods:**
+
+| Method | Return type | Description |
+|--------|-------------|-------------|
+| `load_raw()` | `LoadRawResult` | Load raw data, apply prefix filtering and env var expansion. Returns `LoadRawResult(data, nested_conflicts)`. |
+| `transform_to_dataclass(data, schema)` | `T` | Convert a `JSONValue` dict into a dataclass instance using adaptix. Caches the retort per schema type. |
+| `create_retort()` | `Retort` | Build an adaptix `Retort` with base loaders, name mapping, and type loaders. |
+| `create_validating_retort(schema)` | `Retort` | Like `create_retort()`, plus field and root validators extracted from `schema`. |
+| `create_probe_retort()` | `Retort` | Retort that skips missing fields — used internally for partial loading in merge mode. |
+| `file_display()` | `str \| None` | Human-readable file identifier for logging. Returns `None` by default. |
+| `file_path_for_errors()` | `Path \| None` | File path used in error messages. Returns `None` by default. |
+| `resolve_location(...)` | `list[SourceLocation]` | Locate a field in the source content for error reporting. Class method. Returns `SourceLocation` with line range, env var name, etc. |
+
+### `FileSource(Source)`
+
+Base class for file-based sources (`JsonSource`, `Yaml11Source`, `Toml10Source`, `IniSource`, etc.).
```python
---8<-- "src/dature/metadata.py:merge-metadata"
+--8<-- "src/dature/sources/base.py:file-source"
```
-| Parameter | Description |
-|-----------|-------------|
-| `sources` | Ordered tuple of `Source` to merge |
-| `strategy` | Global merge strategy |
-| `field_merges` | Per-field strategy overrides |
-| `field_groups` | Groups of fields that must change together |
-| `skip_broken_sources` | Global default for broken source handling |
-| `skip_invalid_fields` | Global default for invalid field handling |
-| `expand_env_vars` | Default env expansion mode for all sources |
-| `secret_field_names` | Extra secret patterns for all sources |
-| `mask_secrets` | Enable/disable masking globally |
-| `nested_resolve_strategy` | Default priority for JSON vs flat keys across all sources. See [Nested Resolve](advanced/nested-resolve.md) |
-| `nested_resolve` | Default per-field strategy overrides for all sources. See [Nested Resolve](advanced/nested-resolve.md#per-field-strategy) |
+| Parameter | Type | Default | Description |
+|-----------|------|---------|-------------|
+| `file` | `FileLike \| FilePath \| None` | `None` | Path to the config file (`str`, `Path`), or an open file-like object (`StringIO`, `BytesIO`, any `TextIOBase`/`BufferedIOBase`/`RawIOBase`). If `None`, the path defaults to the current directory. |
----
-
-### `MergeStrategy`
+**Overridden methods:**
-```python
---8<-- "src/dature/metadata.py:merge-strategy"
-```
+| Method | Behavior |
+|--------|----------|
+| `file_display()` | Returns the path as string, `""` for file-like objects, or `None` when `file=None`. |
+| `file_path_for_errors()` | Returns `Path` for string/Path inputs, `None` for streams or `None`. |
+| `__repr__()` | Returns `"format_name 'file_path'"` or just `"format_name"`. |
----
+### `FlatKeySource(Source)`
-### `FieldMergeStrategy`
+Base class for flat key=value sources (`EnvSource`, `EnvFileSource`, `DockerSecretsSource`).
```python
---8<-- "src/dature/metadata.py:field-merge-strategy"
+--8<-- "src/dature/sources/base.py:flat-key-source"
```
+| Parameter | Type | Default | Description |
+|-----------|------|---------|-------------|
+| `split_symbols` | `str` | `"__"` | Separator for nested key splitting. `APP__DB__HOST` → `{"db": {"host": ...}}` |
+| `nested_resolve_strategy` | `NestedResolveStrategy` | `"flat"` | Default priority when both flat and JSON keys exist: `"flat"` or `"json"`. See [Nested Resolve](advanced/nested-resolve.md). |
+| `nested_resolve` | `NestedResolve \| None` | `None` | Per-field nested resolve strategy overrides. See [Nested Resolve](advanced/nested-resolve.md#per-field-strategy). |
+
+**Behavior:** All values are strings. Automatic parsing of `str`, `float`, `date`, `datetime`, `time`, `bytearray`, `bool`, `None`, `str | None`. Nested JSON in values (`[...]`, `{...}`) is inferred. `load_raw()` returns `LoadRawResult` with `nested_conflicts` populated when both flat and JSON keys exist for the same field.
+
---
-### `MergeRule`
+### Merge Strategies
-```python
---8<-- "src/dature/metadata.py:merge-rule"
-```
+Strategies for resolving field values across multiple sources. Set via `strategy` parameter of `load()`.
----
+| Strategy | Behavior |
+|----------|----------|
+| `"last_wins"` | Last source overrides (default). |
+| `"first_wins"` | First source wins. |
+| `"first_found"` | Uses the first source that loads successfully. |
+| `"raise_on_conflict"` | Raises `MergeConflictError` on conflicting values. |
-### `FieldGroup`
+### Field Merge Strategies
-```python
---8<-- "src/dature/metadata.py:field-group"
-```
+Per-field overrides via `field_merges` parameter. Maps `F[Config].field` to a strategy name or a `Callable[[list[JSONValue]], JSONValue]`.
-Usage: `FieldGroup(F[Config].host, F[Config].port)`
+| Strategy | Behavior |
+|----------|----------|
+| `"first_wins"` | Keep the value from the first source. |
+| `"last_wins"` | Keep the value from the last source. |
+| `"append"` | Concatenate lists: `base + override`. |
+| `"append_unique"` | Concatenate lists, removing duplicates. |
+| `"prepend"` | Concatenate lists: `override + base`. |
+| `"prepend_unique"` | Concatenate lists in reverse order, removing duplicates. |
---
@@ -106,14 +162,28 @@ Usage: `FieldGroup(F[Config].host, F[Config].port)`
### `F`
-Factory for building field paths with validation:
+Factory for building type-safe field paths. Used for `field_mapping`, `field_merges`, `field_groups`, `validators`, `skip_if_invalid`, and `nested_resolve`.
```python
-F[Config].host # FieldPath with eager validation
-F[Config].database.host # nested path
-F["Config"].host # string-based, no validation (for decorator mode)
+--8<-- "examples/docs/api_reference/api_reference_field_path.py"
```
+### `FieldPath`
+
+Immutable dataclass (`frozen=True, slots=True`) created via `F[Config].field_name`.
+
+| Field | Type | Description |
+|-------|------|-------------|
+| `owner` | `type \| str` | The dataclass type (or its string name) this path belongs to. |
+| `parts` | `tuple[str, ...]` | Sequence of field names forming the path. |
+
+**Methods:**
+
+| Method | Return type | Description |
+|--------|-------------|-------------|
+| `__getattr__(name)` | `FieldPath` | Chain to nested fields. Validates that the field exists on the owner dataclass. Returns a new `FieldPath` with extended parts. |
+| `as_path()` | `str` | Dot-separated string representation (e.g. `"database.host"`). Raises `ValueError` if parts is empty. |
+
---
## Report
@@ -124,7 +194,11 @@ F["Config"].host # string-based, no validation (for decorator mode)
--8<-- "src/dature/load_report.py:get-load-report"
```
-Returns the `LoadReport` attached to a loaded instance (or type on error). Returns `None` and emits a warning if `debug=True` was not passed.
+Retrieves the `LoadReport` attached to a loaded instance. Returns `None` and emits a warning if `debug=True` was not passed to `load()`.
+
+| Parameter | Type | Description |
+|-----------|------|-------------|
+| `instance` | `Any` | The loaded dataclass instance (or the type in decorator mode on error). |
### `LoadReport`, `SourceEntry`, `FieldOrigin`
@@ -132,6 +206,41 @@ Returns the `LoadReport` attached to a loaded instance (or type on error). Retur
--8<-- "src/dature/load_report.py:report-structure"
```
+#### `SourceEntry`
+
+Frozen dataclass describing one source in the load pipeline.
+
+| Field | Type | Description |
+|-------|------|-------------|
+| `index` | `int` | Source position (0-based) in the `load()` call. |
+| `file_path` | `str \| None` | File path string, or `None` for non-file sources. |
+| `loader_type` | `str` | Source class name (e.g. `"JsonSource"`, `"EnvSource"`). |
+| `raw_data` | `JSONValue` | Raw data loaded from this source before merging. |
+
+#### `FieldOrigin`
+
+Frozen dataclass describing which source provided a specific field value.
+
+| Field | Type | Description |
+|-------|------|-------------|
+| `key` | `str` | Dot-separated field path (e.g. `"database.host"`). |
+| `value` | `JSONValue` | The value that was used. |
+| `source_index` | `int` | Index of the winning source. |
+| `source_file` | `str \| None` | File path of the winning source. |
+| `source_loader_type` | `str` | Class name of the winning source. |
+
+#### `LoadReport`
+
+Frozen dataclass with full load diagnostics.
+
+| Field | Type | Description |
+|-------|------|-------------|
+| `dataclass_name` | `str` | Name of the target dataclass. |
+| `strategy` | `MergeStrategyEnum \| None` | Merge strategy used, or `None` for single source. |
+| `sources` | `tuple[SourceEntry, ...]` | All sources in order. |
+| `field_origins` | `tuple[FieldOrigin, ...]` | Per-field origin info, sorted by key. |
+| `merged_data` | `JSONValue` | Final merged data dict before dataclass conversion. |
+
---
## Configuration
@@ -142,7 +251,16 @@ Returns the `LoadReport` attached to a loaded instance (or type on error). Retur
--8<-- "src/dature/config.py:configure"
```
-Set global configuration. `None` parameters keep their current values.
+Set global configuration. Pass dicts to override specific options: `masking={"mask": "***"}`, `loading={"debug": True}`. `None` parameters keep their current values. Empty dict `{}` resets the group to defaults.
+
+Global config is also loaded from `DATURE_*` environment variables on first access.
+
+| Parameter | Type | Default | Description |
+|-----------|------|---------|-------------|
+| `masking` | `MaskingOptions \| None` | `None` | Secret masking options. |
+| `error_display` | `ErrorDisplayOptions \| None` | `None` | Error formatting options. |
+| `loading` | `LoadingOptions \| None` | `None` | Loading behavior options. |
+| `type_loaders` | `TypeLoaderMap \| None` | `None` | Global custom type loaders `{type: callable}`. Merged with source-level loaders (source takes priority). |
### `MaskingConfig`
@@ -150,48 +268,82 @@ Set global configuration. `None` parameters keep their current values.
--8<-- "src/dature/config.py:masking-config"
```
+Frozen dataclass controlling secret masking behavior.
+
+| Field | Type | Default | Description |
+|-------|------|---------|-------------|
+| `mask` | `str` | `""` | Replacement string for masked values. Must be non-empty. |
+| `visible_prefix` | `int` | `0` | Number of leading characters to keep visible. |
+| `visible_suffix` | `int` | `0` | Number of trailing characters to keep visible. |
+| `min_heuristic_length` | `int` | `8` | Minimum string length for heuristic-based detection. |
+| `heuristic_threshold` | `float` | `0.5` | Entropy threshold for heuristic secret detection. |
+| `secret_field_names` | `tuple[str, ...]` | `("password", "passwd", ...)` | Field name patterns that trigger masking. |
+| `mask_secrets` | `bool` | `True` | Global on/off switch for masking. |
+
### `ErrorDisplayConfig`
```python
--8<-- "src/dature/config.py:error-display-config"
```
+Frozen dataclass controlling error message formatting.
+
+| Field | Type | Default | Description |
+|-------|------|---------|-------------|
+| `max_visible_lines` | `int` | `3` | Maximum lines of source content shown in errors. |
+| `max_line_length` | `int` | `80` | Maximum characters per line before truncation. |
+
### `LoadingConfig`
```python
--8<-- "src/dature/config.py:loading-config"
```
+Frozen dataclass controlling load behavior defaults.
+
+| Field | Type | Default | Description |
+|-------|------|---------|-------------|
+| `cache` | `bool` | `True` | Default caching in decorator mode. |
+| `debug` | `bool` | `False` | Default debug mode (collect `LoadReport`). |
+| `nested_resolve_strategy` | `NestedResolveStrategy` | `"flat"` | Default nested resolve strategy for `FlatKeySource`. |
+
---
## Validators
-All validators are frozen dataclasses implementing `get_validator_func()` and `get_error_message()`.
+All validators are frozen dataclasses (`frozen=True, slots=True`) with two methods:
+
+| Method | Return type | Description |
+|--------|-------------|-------------|
+| `get_validator_func()` | `Callable` | Returns a function that takes the field value and returns `bool`. |
+| `get_error_message()` | `str` | Returns the formatted error message. |
+
+All validators accept an optional `error_message` parameter to override the default message. Use `{value}` / `{pattern}` placeholders in custom messages.
### Number Validators (`dature.validators.number`)
-| Class | Parameter | Description |
-|-------|-----------|-------------|
-| `Gt` | `value: int \| float` | Greater than |
-| `Ge` | `value: int \| float` | Greater than or equal |
-| `Lt` | `value: int \| float` | Less than |
-| `Le` | `value: int \| float` | Less than or equal |
+| Class | Parameter | Default message | Description |
+|-------|-----------|-----------------|-------------|
+| `Gt` | `value: int \| float` | `"Value must be greater than {value}"` | Strictly greater than. |
+| `Ge` | `value: int \| float` | `"Value must be greater than or equal to {value}"` | Greater than or equal. |
+| `Lt` | `value: int \| float` | `"Value must be less than {value}"` | Strictly less than. |
+| `Le` | `value: int \| float` | `"Value must be less than or equal to {value}"` | Less than or equal. |
### String Validators (`dature.validators.string`)
-| Class | Parameter | Description |
-|-------|-----------|-------------|
-| `MinLength` | `value: int` | Minimum length |
-| `MaxLength` | `value: int` | Maximum length |
-| `RegexPattern` | `pattern: str` | Regex match |
+| Class | Parameter | Default message | Description |
+|-------|-----------|-----------------|-------------|
+| `MinLength` | `value: int` | `"Value must have at least {value} characters"` | Minimum string length. |
+| `MaxLength` | `value: int` | `"Value must have at most {value} characters"` | Maximum string length. |
+| `RegexPattern` | `pattern: str` | `"Value must match pattern '{pattern}'"` | Full regex match (`re.match`). |
### Sequence Validators (`dature.validators.sequence`)
-| Class | Parameter | Description |
-|-------|-----------|-------------|
-| `MinItems` | `value: int` | Minimum items |
-| `MaxItems` | `value: int` | Maximum items |
-| `UniqueItems` | — | All items unique |
+| Class | Parameter | Default message | Description |
+|-------|-----------|-----------------|-------------|
+| `MinItems` | `value: int` | `"Value must have at least {value} items"` | Minimum number of items. |
+| `MaxItems` | `value: int` | `"Value must have at most {value} items"` | Maximum number of items. |
+| `UniqueItems` | — | `"Value must contain unique items"` | All items must be unique. |
### Root Validator (`dature.validators.root`)
@@ -199,51 +351,313 @@ All validators are frozen dataclasses implementing `get_validator_func()` and `g
--8<-- "src/dature/validators/root.py:root-validator"
```
+| Field | Type | Default | Description |
+|-------|------|---------|-------------|
+| `func` | `Callable[..., bool]` | — | Validation function. Receives the loaded dataclass instance, returns `True` if valid. |
+| `error_message` | `str` | `"Root validation failed"` | Error message on failure. |
+
+**Methods:** `get_validator_func()` → returns `func`. `get_error_message()` → returns `error_message`.
+
---
## Special Types
+### `SecretStr`
+
+Module: `dature.fields.secret_str`. A string wrapper that hides its value in `str()` and `repr()`.
+
+| Method / Property | Return type | Description |
+|-------------------|-------------|-------------|
+| `SecretStr(secret_value)` | — | Constructor. Takes the raw secret string. |
+| `get_secret_value()` | `str` | Returns the actual secret value. |
+| `__str__()` | `str` | Returns `"**********"`. |
+| `__repr__()` | `str` | Returns `"SecretStr('**********')"`. |
+| `__len__()` | `int` | Length of the underlying secret. |
+| `__eq__()`, `__hash__()` | — | Equality and hashing based on the secret value. |
+
+### `ByteSize`
+
+Module: `dature.fields.byte_size`. Parses human-readable byte sizes (`"1.5 GB"`, `"512 KiB"`) into an integer byte count.
+
+**Accepted formats:** `` where unit is one of: `B`, `KB`, `MB`, `GB`, `TB`, `PB` (decimal) or `KiB`, `MiB`, `GiB`, `TiB`, `PiB` (binary). Case-insensitive. Whitespace between number and unit is allowed.
+
+| Method / Property | Return type | Description |
+|-------------------|-------------|-------------|
+| `ByteSize(value)` | — | Constructor. Accepts `int` (raw bytes) or `str` (e.g. `"1.5 GB"`). |
+| `human_readable(*, decimal=False)` | `str` | Format as human-readable string. `decimal=True` for KB/MB/GB, `False` for KiB/MiB/GiB. |
+| `__int__()` | `int` | Raw byte count. |
+| `__str__()` | `str` | Same as `human_readable()`. |
+| `__repr__()` | `str` | Returns `"ByteSize()"`. |
+| `__eq__()`, `__hash__()` | — | Equality and hashing based on byte count. |
+| `__lt__()`, `__le__()`, `__gt__()`, `__ge__()` | `bool` | Comparison operators based on byte count. |
+
+### `PaymentCardNumber`
+
+Module: `dature.fields.payment_card`. Luhn-validated payment card number with brand detection.
+
+Constructor strips spaces and dashes, validates digit-only 12–19 chars, and runs Luhn check. Raises `ValueError` on invalid input.
+
+| Method / Property | Return type | Description |
+|-------------------|-------------|-------------|
+| `PaymentCardNumber(card_number)` | — | Constructor. Accepts string with digits, spaces, dashes. |
+| `get_raw_number()` | `str` | Returns the cleaned digit-only number. |
+| `masked` | `str` | Property. Returns `"************1234"` (last 4 digits visible). |
+| `brand` | `str` | Property. Detected brand: `"Visa"`, `"Mastercard"`, `"American Express"`, `"Discover"`, `"JCB"`, `"Diners Club"`, `"UnionPay"`, `"Maestro"`, `"Mir"`, `"Troy"`, `"RuPay"`, `"Verve"`, or `"Unknown"`. |
+| `__str__()` | `str` | Same as `masked`. |
+| `__repr__()` | `str` | Returns `"PaymentCardNumber('')"`. |
+| `__eq__()`, `__hash__()` | — | Equality and hashing based on the raw number. |
+
+### Other Type Aliases
+
| Type | Module | Description |
|------|--------|-------------|
-| `SecretStr` | `dature.fields.secret_str` | Masked string with `get_secret_value()` |
-| `ByteSize` | `dature.fields.byte_size` | Human-readable byte sizes |
-| `PaymentCardNumber` | `dature.fields.payment_card` | Luhn-validated card with brand detection |
-| `URL` | `dature.types` | Alias for `urllib.parse.ParseResult` |
-| `Base64UrlStr` | `dature.types` | Base64-decoded string |
-| `Base64UrlBytes` | `dature.types` | Base64-decoded bytes |
+| `URL` | `dature.types` | Alias for `urllib.parse.ParseResult`. Parsed from URL strings. |
+| `Base64UrlStr` | `dature.types` | `NewType` over `str`. Decoded from base64url-encoded strings. |
+| `Base64UrlBytes` | `dature.types` | `NewType` over `bytes`. Decoded from base64url-encoded strings. |
---
-## Loaders
-
-| Loader | Module | Format |
-|--------|--------|--------|
-| `JsonLoader` | `dature.sources_loader.json_` | JSON |
-| `Json5Loader` | `dature.sources_loader.json5_` | JSON5 |
-| `Yaml11Loader` | `dature.sources_loader.yaml_` | YAML 1.1 |
-| `Yaml12Loader` | `dature.sources_loader.yaml_` | YAML 1.2 |
-| `Toml10Loader` | `dature.sources_loader.toml_` | TOML 1.0 |
-| `Toml11Loader` | `dature.sources_loader.toml_` | TOML 1.1 |
-| `IniLoader` | `dature.sources_loader.ini_` | INI |
-| `EnvLoader` | `dature.sources_loader.env_` | Environment variables |
-| `EnvFileLoader` | `dature.sources_loader.env_` | .env files |
-| `DockerSecretsLoader` | `dature.sources_loader.docker_secrets` | Docker secrets directory |
+## Source Classes
+
+### File-based sources (inherit `FileSource`)
+
+All file-based sources accept the `file` parameter from [`FileSource`](#filesourcesource) plus all common parameters from [`Source`](#source).
+
+`file` accepts `str`, `Path`, or file-like objects (`StringIO`, `BytesIO`, any `TextIOBase`/`BufferedIOBase`/`RawIOBase`). When `file=None`, the path defaults to the current directory.
+
+`file_display()` returns the path as string, `""` for file-like objects, or `None` when `file=None`.
+
+#### `JsonSource(FileSource)`
+
+| | |
+|---|---|
+| **Format** | JSON |
+| **Module** | `dature.sources.json_` |
+| **Dependencies** | stdlib `json` |
+| **Error label** | `FILE` |
+| **String parsing** | `float`, `date`, `datetime`, `time`, `bytearray` from strings |
+
+#### `Json5Source(FileSource)`
+
+| | |
+|---|---|
+| **Format** | JSON5 (comments, trailing commas, unquoted keys) |
+| **Module** | `dature.sources.json5_` |
+| **Dependencies** | `json5` |
+| **Error label** | `FILE` |
+| **String parsing** | `str` (from JSON5 identifiers), `float`, `date`, `datetime`, `time`, `bytearray` from strings |
+
+#### `Yaml11Source(FileSource)`
+
+| | |
+|---|---|
+| **Format** | YAML 1.1 |
+| **Module** | `dature.sources.yaml_` |
+| **Dependencies** | `ruamel.yaml` |
+| **Error label** | `FILE` |
+| **Native types** | `date`, `datetime` parsed natively by YAML. `time` from int, `bytearray` from strings |
+
+#### `Yaml12Source(FileSource)`
+
+| | |
+|---|---|
+| **Format** | YAML 1.2 |
+| **Module** | `dature.sources.yaml_` |
+| **Dependencies** | `ruamel.yaml` |
+| **Error label** | `FILE` |
+| **Native types** | `date`, `datetime` parsed natively by YAML. `time`, `bytearray` from strings |
+
+#### `Toml10Source(FileSource)`
+
+| | |
+|---|---|
+| **Format** | TOML 1.0 |
+| **Module** | `dature.sources.toml_` |
+| **Dependencies** | `toml_rs` |
+| **Error label** | `FILE` |
+| **Native types** | `date`, `datetime`, `time` parsed natively by TOML. `bytearray`, `None`, `str \| None` from strings |
+
+#### `Toml11Source(FileSource)`
+
+| | |
+|---|---|
+| **Format** | TOML 1.1 |
+| **Module** | `dature.sources.toml_` |
+| **Dependencies** | `toml_rs` |
+| **Error label** | `FILE` |
+| **Native types** | `date`, `datetime`, `time` parsed natively by TOML. `bytearray`, `None`, `str \| None` from strings |
+
+#### `IniSource(FileSource)`
+
+| | |
+|---|---|
+| **Format** | INI (stdlib `configparser`) |
+| **Module** | `dature.sources.ini_` |
+| **Dependencies** | stdlib `configparser` |
+| **Error label** | `FILE` |
+| **String parsing** | All values are strings. Automatic parsing of `str`, `float`, `date`, `datetime`, `time`, `bytearray`, `bool`, `None`, `str \| None`. Nested JSON in values (`[...]`, `{...}`) is inferred. |
+
+Section headers become top-level dict keys. Dotted sections (`database.pool`) create nested dicts. `prefix` selects a single section.
+
+### Flat key-value sources (inherit `FlatKeySource`)
+
+All flat key-value sources accept `split_symbols`, `nested_resolve_strategy` and `nested_resolve` from [`FlatKeySource`](#flatkeysourcesource) plus all common parameters from [`Source`](#source).
+
+All values are strings. Automatic parsing of `str`, `float`, `date`, `datetime`, `time`, `bytearray`, `bool`, `None`, `str | None`. Nested JSON in values (`[...]`, `{...}`) is inferred.
+
+Nesting is built from `split_symbols` (default `"__"`): `APP__DB__HOST=x` → `{"db": {"host": "x"}}`.
+
+#### `EnvSource(FlatKeySource)`
+
+| | |
+|---|---|
+| **Format** | Environment variables (`os.environ`) |
+| **Module** | `dature.sources.env_` |
+| **Dependencies** | — |
+| **Error label** | `ENV` |
+
+Keys are lowercased after stripping `prefix`. `resolve_location()` returns `env_var_name` instead of file/line info.
+
+#### `EnvFileSource(FlatKeySource)`
+
+| | |
+|---|---|
+| **Format** | `.env` files (`KEY=value`, `#` comments, quoted values) |
+| **Module** | `dature.sources.env_` |
+| **Dependencies** | — |
+| **Error label** | `ENV FILE` |
+
+Inherits from both `FileFieldMixin` and `EnvSource`, so accepts the `file` parameter. `resolve_location()` returns line range within the `.env` file.
+
+#### `DockerSecretsSource(FlatKeySource)`
+
+| | |
+|---|---|
+| **Format** | Docker secrets directory (one file per secret) |
+| **Module** | `dature.sources.docker_secrets` |
+| **Dependencies** | — |
+| **Error label** | `SECRET FILE` |
+
+| Parameter | Type | Default | Description |
+|-----------|------|---------|-------------|
+| `dir_` | `FilePath` | — | Path to the Docker secrets directory (e.g. `/run/secrets`). Required. |
+
+Each file in `dir_` becomes a key (filename, lowercased) with the file content (stripped) as value. Subdirectories are skipped. `resolve_location()` returns the path `dir_/secret_name` as `file_path`.
---
## Exceptions
-| Exception | Description |
-|-----------|-------------|
-| `DatureError` | Base exception |
-| `DatureConfigError` | Aggregated config loading errors |
-| `MergeConflictError` | Merge conflict between sources |
-| `FieldGroupError` | Field group constraint violation |
-| `EnvVarExpandError` | Missing environment variables in strict mode |
-| `FieldLoadError` | Single field loading error |
-| `SourceLoadError` | Source loading failure |
+All exceptions are in `dature.errors`.
+
+### `DatureError`
+
+Base exception for all dature errors.
+
+### `DatureConfigError(ExceptionGroup[DatureError])`
+
+Aggregated config loading errors. Contains one or more `FieldLoadError` sub-exceptions.
+
+| Field | Type | Description |
+|-------|------|-------------|
+| `dataclass_name` | `str` | Name of the target dataclass. |
+| `exceptions` | `tuple[DatureError, ...]` | Individual errors (inherited from `ExceptionGroup`). |
+
+`str()` returns `" loading errors ()"`.
+
+### `FieldLoadError(DatureError)`
+
+Single field loading error with source location.
+
+| Field | Type | Description |
+|-------|------|-------------|
+| `field_path` | `list[str]` | Path to the field (e.g. `["database", "host"]`). |
+| `message` | `str` | Human-readable error description. |
+| `input_value` | `JSONValue` | The raw value that failed to load. |
+| `locations` | `list[SourceLocation]` | Source locations for error reporting (file path, line range, env var name). |
+
+`str()` returns a formatted multi-line message with source context and caret pointing at the value.
+
+### `SourceLoadError(DatureError)`
+
+Source-level loading failure (e.g. file not found, parse error).
+
+| Field | Type | Description |
+|-------|------|-------------|
+| `message` | `str` | Error description. |
+| `location` | `SourceLocation \| None` | Source location, if available. |
+
+### `MergeConflictError(DatureConfigError)`
+
+Raised with `strategy="raise_on_conflict"` when sources provide different values. Contains `MergeConflictFieldError` sub-exceptions.
+
+### `MergeConflictFieldError(DatureError)`
+
+Per-field merge conflict.
+
+| Field | Type | Description |
+|-------|------|-------------|
+| `field_path` | `list[str]` | Path to the conflicting field. |
+| `message` | `str` | Conflict description. |
+| `locations` | `list[SourceLocation]` | Conflicting source locations. |
+
+### `FieldGroupError(DatureConfigError)`
+
+Field group constraint violation. Contains `FieldGroupViolationError` sub-exceptions.
+
+### `FieldGroupViolationError(DatureError)`
+
+Single field group violation.
+
+| Field | Type | Description |
+|-------|------|-------------|
+| `group_fields` | `tuple[str, ...]` | All fields in the group. |
+| `changed_fields` | `tuple[str, ...]` | Fields that were overridden. |
+| `unchanged_fields` | `tuple[str, ...]` | Fields that were not overridden. |
+| `changed_sources` | `tuple[str, ...]` | Source names for changed fields. |
+| `unchanged_sources` | `tuple[str, ...]` | Source names for unchanged fields. |
+| `source_index` | `int` | Index of the source that caused the violation. |
+
+### `EnvVarExpandError(DatureConfigError)`
+
+Missing environment variables in `expand_env_vars="strict"` mode. Contains `MissingEnvVarError` sub-exceptions.
+
+### `MissingEnvVarError(DatureError)`
+
+Single missing env var.
+
+| Field | Type | Description |
+|-------|------|-------------|
+| `var_name` | `str` | Name of the missing variable. |
+| `position` | `int` | Character position in the source string. |
+| `source_text` | `str` | The original string containing `$VAR`. |
+| `field_path` | `list[str]` | Field path, if known. |
+| `location` | `SourceLocation \| None` | Source location, if available. |
+
+### `SourceLocation`
+
+Frozen dataclass used in error messages to point at the source of a value.
+
+| Field | Type | Description |
+|-------|------|-------------|
+| `location_label` | `str` | Source type label: `"FILE"`, `"ENV"`, `"ENV FILE"`, `"SECRET FILE"`. |
+| `file_path` | `Path \| None` | File path, or `None` for env vars. |
+| `line_range` | `LineRange \| None` | Start/end line numbers in the file. |
+| `line_content` | `list[str] \| None` | Relevant source lines for context. |
+| `env_var_name` | `str \| None` | Environment variable name, for ENV sources. |
+| `annotation` | `str \| None` | Extra annotation (e.g. merge conflict info). |
+| `env_var_value` | `str \| None` | Raw env var value for conflict reporting. |
+
+### `LineRange`
+
+Frozen dataclass for file line ranges.
+
+| Field | Type | Description |
+|-------|------|-------------|
+| `start` | `int` | Start line (1-based). |
+| `end` | `int` | End line (1-based, inclusive). |
-All exceptions are in `dature.errors.exceptions`.
+`repr()` returns `"line 5"` or `"line 5-8"`.
---
@@ -256,7 +670,16 @@ All exceptions are in `dature.errors.exceptions`.
| `FileOrStream` | `Path \| FileLike` | `dature.types` |
| `NameStyle` | `Literal["lower_snake", "upper_snake", "lower_camel", "upper_camel", "lower_kebab", "upper_kebab"]` | `dature.types` |
| `ExpandEnvVarsMode` | `Literal["disabled", "default", "empty", "strict"]` | `dature.types` |
-| `FieldMapping` | `dict[FieldPath, str \| tuple[str, ...]]` | `dature.types` |
-| `FieldValidators` | `dict[FieldPath, ValidatorProtocol \| tuple[ValidatorProtocol, ...]]` | `dature.types` |
+| `FieldRef` | `FieldPath \| str \| int \| float \| bool \| list \| dict \| tuple \| set \| bytes \| None` | `dature.types` |
+| `FieldMapping` | `dict[FieldRef, str \| tuple[str, ...]]` | `dature.types` |
+| `FieldValidators` | `dict[FieldRef, ValidatorProtocol \| tuple[ValidatorProtocol, ...]]` | `dature.types` |
+| `FieldMergeMap` | `dict[FieldRef, FieldMergeStrategyName \| Callable[..., Any]]` | `dature.types` |
| `FieldMergeCallable` | `Callable[[list[JSONValue]], JSONValue]` | `dature.types` |
+| `FieldMergeStrategyName` | `Literal["first_wins", "last_wins", "append", "append_unique", "prepend", "prepend_unique"]` | `dature.types` |
+| `FieldGroupTuple` | `tuple[FieldRef, ...]` | `dature.types` |
+| `TypeLoaderMap` | `dict[type, Callable[..., Any]]` | `dature.types` |
+| `MergeStrategyName` | `Literal["last_wins", "first_wins", "first_found", "raise_on_conflict"]` | `dature.types` |
+| `NestedResolveStrategy` | `Literal["flat", "json"]` | `dature.types` |
+| `NestedResolve` | `dict[NestedResolveStrategy, tuple[FieldPath \| Any, ...]]` | `dature.types` |
| `JSONValue` | `dict[str, JSONValue] \| list[JSONValue] \| str \| int \| float \| bool \| None` | `dature.types` |
+| `LoadRawResult` | `dataclass(data: JSONValue, nested_conflicts: NestedConflicts)` | `dature.types` |
diff --git a/docs/comparison/why-not-dynaconf.md b/docs/comparison/why-not-dynaconf.md
index f051857..0a2b4fe 100644
--- a/docs/comparison/why-not-dynaconf.md
+++ b/docs/comparison/why-not-dynaconf.md
@@ -15,7 +15,7 @@ The trade-off is **how** it covers it: Dynaconf is powerful and battle-tested, b
| **Validation** | Separate `Validator` objects | Both: `Annotated` inline validators + separate root/custom validators |
| **Formats** | YAML, TOML, JSON, INI, `.env`, Python files | YAML (1.1/1.2), JSON, JSON5, TOML (1.0/1.1), INI, `.env`, env vars, Docker secrets |
| **Remote sources** | Vault, Redis + community plugins | Not yet (planned) |
-| **Merging** | Layered override + `dynaconf_merge` | 4 strategies + per-field rules (`APPEND`, `PREPEND`, field groups, etc.) |
+| **Merging** | Layered override + `dynaconf_merge` | 4 strategies + per-field rules (`"append"`, `"prepend"`, field groups, etc.) |
| **Dynamic variables** | `@format`, `@jinja` templates with lazy evaluation | `${VAR:-default}` env expansion in all formats + file paths |
| **CLI** | `dynaconf list`, `inspect`, `write`, `validate`, etc. | No CLI |
| **Per-environment files** | Built-in (`[development]`, `[production]` sections) | Manual via multiple `Source` objects |
@@ -76,13 +76,21 @@ This gives flexibility — validators can be defined in a different module, reus
dature supports **both approaches**. Inline validators live with the type:
```python
---8<-- "examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py:validators"
+--8<-- "examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py"
+```
+
+```title="Error"
+--8<-- "examples/docs/comparison/why-not-dynaconf/dynaconf_validators.stderr"
```
And separate validators when you need cross-field checks or decoupled validation logic:
```python
---8<-- "examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py:root-validators"
+--8<-- "examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py"
+```
+
+```title="Error"
+--8<-- "examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.stderr"
```
You choose the style that fits — or mix them.
diff --git a/docs/comparison/why-not-hydra.md b/docs/comparison/why-not-hydra.md
index b5bf0f8..f613ce7 100644
--- a/docs/comparison/why-not-hydra.md
+++ b/docs/comparison/why-not-hydra.md
@@ -13,7 +13,7 @@ The trade-off is scope: Hydra is a **framework** that takes over your entry poin
| **Formats** | YAML only | YAML (1.1/1.2), JSON, JSON5, TOML (1.0/1.1), INI, `.env`, env vars, Docker secrets |
| **Env variables** | `oc.env` resolver; no `.env` support | First-class: env vars, `.env` files, `${VAR:-default}` expansion in all formats + file paths |
| **CLI overrides** | Built-in: `python app.py db.port=3306` + tab completion | No CLI |
-| **Composition** | Config groups, defaults list, package overrides | Multi-source `Merge` with explicit strategies |
+| **Composition** | Config groups, defaults list, package overrides | Multi-source merge with explicit strategies |
| **Parameter sweeps** | Built-in multirun + sweeper plugins (Ax, Optuna, etc.) | No — not a use case |
| **Object instantiation** | `instantiate()` — creates objects from config with DI | No — config loading only |
| **Variable interpolation** | OmegaConf `${path.to.key}` + custom resolvers | `${VAR:-default}` env expansion in all formats + file paths |
@@ -22,7 +22,7 @@ The trade-off is scope: Hydra is a **framework** that takes over your entry poin
| **Error messages** | OmegaConf exceptions | Human-readable: source file, line number, context snippet |
| **Secret masking** | No | Auto-masks secrets in errors and logs |
| **Debug / audit** | Output dir with saved config + logs | `debug=True` — which source provided each value |
-| **Plugin system** | Sweepers, launchers, config sources, search path | Custom loaders via `BaseLoader` subclass |
+| **Plugin system** | Sweepers, launchers, config sources, search path | Custom loaders via `Source` subclass |
| **Dependencies** | `hydra-core` + `omegaconf` + `antlr4-runtime` | `adaptix` (pure Python) |
| **Config result** | `OmegaConf.DictConfig` (dict-like wrapper) | Your actual `@dataclass` instance |
| **Maintenance** | Last release: Dec 2022. [Acknowledged as unmaintained](https://github.com/facebookresearch/xformers/issues/848) by other Meta teams | Active development |
@@ -36,7 +36,7 @@ Hydra reads YAML exclusively. You can reference env vars via OmegaConf's `${oc.e
- **JSON / JSON5** — common in web services and JavaScript-adjacent tooling. Not supported.
- **INI** — legacy format still common in enterprise. Not supported.
-dature handles all of these out of the box, with auto-detection from file extension:
+dature handles all of these out of the box:
```python
--8<-- "examples/docs/comparison/why-not-hydra/hydra_merge.py:merge"
@@ -89,7 +89,11 @@ class Config:
dature uses `Annotated` validators:
```python
---8<-- "examples/docs/comparison/why-not-hydra/hydra_validators.py:validators"
+--8<-- "examples/docs/comparison/why-not-hydra/hydra_validators.py"
+```
+
+```title="Error"
+--8<-- "examples/docs/comparison/why-not-hydra/hydra_validators.stderr"
```
Plus root validators for cross-field checks, custom validators, and standard `__post_init__`.
diff --git a/docs/comparison/why-not-pydantic-settings.md b/docs/comparison/why-not-pydantic-settings.md
index 3d7230e..c6a8f50 100644
--- a/docs/comparison/why-not-pydantic-settings.md
+++ b/docs/comparison/why-not-pydantic-settings.md
@@ -9,14 +9,14 @@ The trade-off is coupling: your config must be a Pydantic model, custom types ne
| | pydantic-settings | dature |
|---|---|---|
| **Base class** | `BaseSettings` (Pydantic model) | stdlib `@dataclass` |
-| **Formats** | `.env`, env vars, JSON, YAML, TOML + custom sources | YAML (1.1/1.2), JSON, JSON5, TOML (1.0/1.1), INI, `.env`, env vars, Docker secrets — auto-detected |
-| **Merging** | Fixed priority order (init > env > dotenv > secrets > defaults) | 4 strategies + per-field rules (`APPEND`, `PREPEND`, field groups, etc.) |
+| **Formats** | `.env`, env vars, JSON, YAML, TOML + custom sources | YAML (1.1/1.2), JSON, JSON5, TOML (1.0/1.1), INI, `.env`, env vars, Docker secrets |
+| **Merging** | Fixed priority order (init > env > dotenv > secrets > defaults) | 4 strategies + per-field rules (`"append"`, `"prepend"`, field groups, etc.) |
| **Skip broken sources** | No | Yes — `skip_if_broken`, `skip_if_invalid` |
| **Field groups** | No | Yes — enforce related fields are overridden together |
| **Naming conventions** | `alias` / `alias_generator` (`to_camel`, `to_pascal`, `to_snake`) | Built-in `name_style` (6 conventions) + explicit `field_mapping` with multiple aliases |
| **CLI** | Built-in `CliSettingsSource` with subcommands, async support | No CLI |
| **Secrets** | `SecretStr`, `secrets_dir`, nested secrets directories | `SecretStr`, auto-masking in errors/logs (by type, name pattern, or heuristic) |
-| **ENV expansion** | No | `${VAR:-default}` syntax in all file formats + file paths (`Source(file_="$DIR/config.toml")`) |
+| **ENV expansion** | No | `${VAR:-default}` syntax in all file formats + file paths (`Toml11Source(file="$DIR/config.toml")`) |
| **Error messages** | Pydantic `ValidationError` | Human-readable: source file, line number, context snippet |
| **Debug / audit** | No | `debug=True` — which source provided each value |
| **Validation** | Pydantic `field_validator`, `model_validator` (pre/post), constraints | `Annotated` validators, root validators, custom validators, `__post_init__` |
@@ -73,15 +73,15 @@ class Settings(BaseSettings):
)
```
-dature **auto-detects** the format from the file extension — no boilerplate:
+dature uses explicit Source subclasses — no boilerplate:
```python
---8<-- "examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py:auto-detect"
+--8<-- "examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_formats.py:formats"
```
dature also supports INI, JSON5, and YAML 1.1/1.2 + TOML 1.0/1.1 version variants — formats that pydantic-settings doesn't cover.
-Need a custom format? Subclass `BaseLoader` — one method to implement, not an entire `SettingsSource`.
+Need a custom format? Subclass `Source` — one method to implement, not an entire `SettingsSource`.
## Error Messages You Can Actually Read
diff --git a/docs/features/masking.md b/docs/features/masking.md
index 1c63138..d2c34d7 100644
--- a/docs/features/masking.md
+++ b/docs/features/masking.md
@@ -52,6 +52,10 @@ dature uses three methods to identify secrets:
--8<-- "examples/docs/features/masking/sources/masking_secret_str.yaml"
```
+ ```title="Error"
+ --8<-- "examples/docs/features/masking/masking_secret_str.stderr"
+ ```
+
=== "By name"
Fields whose names contain known patterns are automatically masked in error messages:
@@ -64,6 +68,10 @@ dature uses three methods to identify secrets:
--8<-- "examples/docs/features/masking/sources/masking_by_name.yaml"
```
+ ```title="Error"
+ --8<-- "examples/docs/features/masking/masking_by_name.stderr"
+ ```
+
=== "Heuristic"
With `dature[secure]`, values that look like random tokens are masked in error messages even if the field name is not a known secret pattern:
@@ -76,6 +84,10 @@ dature uses three methods to identify secrets:
--8<-- "examples/docs/features/masking/sources/masking_heuristic.yaml"
```
+ ```title="Error"
+ --8<-- "examples/docs/features/masking/masking_heuristic.stderr"
+ ```
+
## Mask Format
By default, the entire value is replaced with ``:
@@ -102,22 +114,36 @@ Control masking via `Source`:
=== "secret_field_names"
```python
- --8<-- "examples/docs/features/masking/masking_per_source.py:per-source"
+ --8<-- "examples/docs/features/masking/masking_per_source.py"
+ ```
+
+ ```title="Error"
+ --8<-- "examples/docs/features/masking/masking_per_source.stderr"
```
=== "mask_secrets=False"
```python
- --8<-- "examples/docs/features/masking/masking_no_mask.py:no-mask"
+ --8<-- "examples/docs/features/masking/masking_no_mask.py"
+ ```
+
+ ```title="Error"
+ --8<-- "examples/docs/features/masking/masking_no_mask.stderr"
```
+### Per-load
+
+`mask_secrets` and `secret_field_names` can be passed directly to `dature.load()`. They apply to both single-source and multi-source modes. `Source.mask_secrets` overrides `load(mask_secrets=...)` when not `None`. `Source.secret_field_names` and `load(secret_field_names=...)` are combined.
+
### In merge mode
```python
---8<-- "examples/docs/features/masking/masking_merge_mode.py:merge-mode"
+--8<-- "examples/docs/features/masking/masking_merge_mode.py"
```
-`Source.mask_secrets` overrides `Merge.mask_secrets` when not `None`. `secret_field_names` from both are combined.
+```title="Error"
+--8<-- "examples/docs/features/masking/masking_merge_mode.stderr"
+```
### Global
diff --git a/docs/features/merging.md b/docs/features/merging.md
index 23897b2..9410b95 100644
--- a/docs/features/merging.md
+++ b/docs/features/merging.md
@@ -4,7 +4,7 @@ Load configuration from multiple sources and merge them into one dataclass.
## Basic Merging
-Use `Merge` to combine sources:
+Pass multiple `Source` objects to `dature.load()`:
=== "Python"
@@ -24,9 +24,9 @@ Use `Merge` to combine sources:
--8<-- "examples/docs/shared/common_overrides.yaml"
```
-## Tuple Shorthand
+## Multiple Sources
-Pass a tuple of `Source` directly — uses `LAST_WINS` by default:
+Multiple sources use `"last_wins"` by default:
=== "Python"
@@ -64,14 +64,14 @@ Works as a decorator too:
| Strategy | Behavior |
|----------|----------|
-| `LAST_WINS` | Last source overrides (default) |
-| `FIRST_WINS` | First source wins |
-| `FIRST_FOUND` | Uses the first source that loads successfully, skips broken sources automatically |
-| `RAISE_ON_CONFLICT` | Raises `MergeConflictError` if the same key appears in multiple sources with different values |
+| `"last_wins"` | Last source overrides (default) |
+| `"first_wins"` | First source wins |
+| `"first_found"` | Uses the first source that loads successfully, skips broken sources automatically |
+| `"raise_on_conflict"` | Raises `MergeConflictError` if the same key appears in multiple sources with different values |
Nested dicts are merged recursively. Lists and scalars are replaced entirely according to the strategy.
-=== "LAST_WINS"
+=== "last_wins"
Last source overrides earlier ones. This is the default strategy.
@@ -91,7 +91,7 @@ Nested dicts are merged recursively. Lists and scalars are replaced entirely acc
--8<-- "examples/docs/shared/common_overrides.yaml"
```
-=== "FIRST_WINS"
+=== "first_wins"
First source wins on conflict. Later sources only fill in missing keys.
@@ -111,7 +111,7 @@ Nested dicts are merged recursively. Lists and scalars are replaced entirely acc
--8<-- "examples/docs/shared/common_overrides.yaml"
```
-=== "FIRST_FOUND"
+=== "first_found"
Uses the first source that loads successfully and ignores the rest. Broken sources (missing file, parse error) are skipped automatically — no `skip_if_broken` needed. Type errors (wrong type, missing field) are **not** skipped.
@@ -125,7 +125,7 @@ Nested dicts are merged recursively. Lists and scalars are replaced entirely acc
--8<-- "examples/docs/shared/common_defaults.yaml"
```
-=== "RAISE_ON_CONFLICT"
+=== "raise_on_conflict"
Raises `MergeConflictError` if the same key appears in multiple sources with different values. Works best when sources have disjoint keys.
@@ -145,20 +145,17 @@ Nested dicts are merged recursively. Lists and scalars are replaced entirely acc
--8<-- "examples/docs/shared/common_raise_on_conflict_b.yaml"
```
-For per-field strategy overrides, see [Per-Field Merge Strategies](../advanced/merge-rules.md#per-field-merge-strategies). To enforce that related fields are always overridden together, see [Field Groups](../advanced/merge-rules.md#field-groups).
+For per-field strategy overrides, see [Per-Field Merge Strategies](../advanced/merge-rules.md#per-field-merge-strategies). To enforce that related fields are always overridden together, see [Field Groups](../advanced/field-groups.md).
-## Merge Reference
+## Merge Parameters
-```python
---8<-- "src/dature/metadata.py:merge-metadata"
-```
+All merge-related parameters are passed directly to `dature.load()` as keyword arguments:
| Parameter | Description |
|-----------|-------------|
-| `sources` | Tuple of `Source` descriptors — one per source to merge |
-| `strategy` | Global merge strategy. Default: `LAST_WINS`. See [Merge Strategies](#merge-strategies) |
+| `strategy` | Global merge strategy. Default: `"last_wins"`. See [Merge Strategies](#merge-strategies) |
| `field_merges` | Per-field merge strategy overrides. See [Per-Field Merge Strategies](../advanced/merge-rules.md#per-field-merge-strategies) |
-| `field_groups` | Enforce related fields are overridden together. See [Field Groups](../advanced/merge-rules.md#field-groups) |
+| `field_groups` | Enforce related fields are overridden together. See [Field Groups](../advanced/field-groups.md) |
| `skip_broken_sources` | Skip sources that fail to load. See [Skipping Broken Sources](../advanced/merge-rules.md#skipping-broken-sources) |
| `skip_invalid_fields` | Drop fields with invalid values. See [Skipping Invalid Fields](../advanced/merge-rules.md#skipping-invalid-fields) |
| `expand_env_vars` | ENV variable expansion mode. See [ENV Expansion](../advanced/env-expansion.md) |
diff --git a/docs/features/naming.md b/docs/features/naming.md
index 6c75d1b..e3e6864 100644
--- a/docs/features/naming.md
+++ b/docs/features/naming.md
@@ -48,7 +48,7 @@ Explicit field renaming using `F` objects. Takes priority over `name_style`:
A field can have multiple aliases — the first matching key in the source wins:
```python
-field_mapping={F[Config].name: ("fullName", "userName")}
+--8<-- "examples/docs/features/naming/naming_field_mapping_aliases.py:aliases"
```
### Nested Fields
@@ -72,7 +72,7 @@ Nested fields are supported via `F[Owner].field` syntax on inner dataclasses:
In decorator mode where the class is not yet defined, use a string:
```python
-F["Config"].name # autocomplete doesn't work here
+--8<-- "examples/docs/features/naming/naming_field_mapping_decorator.py:decorator"
```
## prefix
diff --git a/docs/features/validation.md b/docs/features/validation.md
index 9e23ce2..0862141 100644
--- a/docs/features/validation.md
+++ b/docs/features/validation.md
@@ -18,38 +18,43 @@ Declare validators using `typing.Annotated`:
--8<-- "examples/docs/features/validation/sources/validation_annotated_invalid.json5"
```
+=== "Error"
+
+ ```
+ --8<-- "examples/docs/features/validation/validation_annotated.stderr"
+ ```
+
### Available Validators
**Numbers** (`dature.validators.number`):
| Validator | Description |
|-----------|-------------|
-| `Gt(value=N)` | Greater than N |
-| `Ge(value=N)` | Greater than or equal to N |
-| `Lt(value=N)` | Less than N |
-| `Le(value=N)` | Less than or equal to N |
+| `Gt(N)` | Greater than N |
+| `Ge(N)` | Greater than or equal to N |
+| `Lt(N)` | Less than N |
+| `Le(N)` | Less than or equal to N |
**Strings** (`dature.validators.string`):
| Validator | Description |
|-----------|-------------|
-| `MinLength(value=N)` | Minimum string length |
-| `MaxLength(value=N)` | Maximum string length |
-| `RegexPattern(pattern=r"...")` | Match regex pattern |
+| `MinLength(N)` | Minimum string length |
+| `MaxLength(N)` | Maximum string length |
+| `RegexPattern(r"...")` | Match regex pattern |
**Sequences** (`dature.validators.sequence`):
| Validator | Description |
|-----------|-------------|
-| `MinItems(value=N)` | Minimum number of items |
-| `MaxItems(value=N)` | Maximum number of items |
+| `MinItems(N)` | Minimum number of items |
+| `MaxItems(N)` | Maximum number of items |
| `UniqueItems()` | All items must be unique |
Multiple validators can be combined:
```python
-port: Annotated[int, Ge(value=1), Le(value=65535)]
-tags: Annotated[list[str], MinItems(value=1), MaxItems(value=10), UniqueItems()]
+--8<-- "examples/docs/features/validation/validation_annotated_combined.py:combined"
```
## Root Validators
@@ -68,6 +73,12 @@ Validate the entire object after loading:
--8<-- "examples/docs/features/validation/sources/validation_root_invalid.yaml"
```
+=== "Error"
+
+ ```
+ --8<-- "examples/docs/features/validation/validation_root.stderr"
+ ```
+
Root validators receive the fully constructed dataclass instance and return `True` if valid.
## Metadata Validators
@@ -86,22 +97,22 @@ Field validators can be specified in `Source` using the `validators` parameter.
--8<-- "examples/docs/features/validation/sources/validation_metadata_invalid.yaml"
```
+=== "Error"
+
+ ```
+ --8<-- "examples/docs/features/validation/validation_metadata.stderr"
+ ```
+
A single validator can be passed directly. Multiple validators require a tuple:
```python
-validators={
- F[Config].port: (Gt(value=0), Lt(value=65536)), # tuple for multiple
- F[Config].host: MinLength(value=1), # single, no tuple needed
-}
+--8<-- "examples/docs/features/validation/validation_metadata_syntax.py:syntax"
```
Nested fields are supported:
```python
-validators={
- F[Config].database.host: MinLength(value=1),
- F[Config].database.port: Gt(value=0),
-}
+--8<-- "examples/docs/features/validation/validation_metadata_nested.py:nested"
```
## Custom Validators
@@ -120,6 +131,12 @@ Create your own validators by implementing `get_validator_func()` and `get_error
--8<-- "examples/docs/features/validation/sources/validation_custom_invalid.json5"
```
+=== "Error"
+
+ ```
+ --8<-- "examples/docs/features/validation/validation_custom.stderr"
+ ```
+
Custom validators can be combined with built-in ones in `Annotated`.
## `__post_init__` and `@property`
@@ -138,6 +155,12 @@ Standard dataclass `__post_init__` and `@property` work as expected — dature p
--8<-- "examples/docs/features/validation/sources/validation_post_init_invalid.yaml"
```
+=== "Error"
+
+ ```
+ --8<-- "examples/docs/features/validation/validation_post_init.stderr"
+ ```
+
Both approaches work in function mode and decorator mode.
## Error Format
diff --git a/docs/index.md b/docs/index.md
index 1885abb..b2e6793 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -99,24 +99,24 @@ Load config from YAML, JSON, TOML, INI, ENV files, environment variables and Doc
## Supported Formats
-| Format | Extension | Loader | Extra dependency |
-|--------|-----------|--------|------------------|
-| YAML 1.1 | `.yaml`, `.yml` | `Yaml11Loader` | `ruamel.yaml` |
-| YAML 1.2 | `.yaml`, `.yml` | `Yaml12Loader` | `ruamel.yaml` |
-| JSON | `.json` | `JsonLoader` | — |
-| JSON5 | `.json5` | `Json5Loader` | `json-five` |
-| TOML 1.0 | `.toml` | `Toml10Loader` | `toml-rs` |
-| TOML 1.1 | `.toml` | `Toml11Loader` | `toml-rs` |
-| INI | `.ini`, `.cfg` | `IniLoader` | — |
-| ENV file | `.env` | `EnvFileLoader` | — |
-| Environment variables | — | `EnvLoader` | — |
-| Docker secrets | directory | `DockerSecretsLoader` | — |
-
-The format is auto-detected from the file extension. When `file_` is not specified, environment variables are used. When `file_` points to a directory, `DockerSecretsLoader` is used. `file_` also accepts `Path` objects and file-like objects (`BytesIO`, `StringIO`) — for file-like objects, the `loader` parameter is required.
+| Format | Source Class | Extra dependency |
+|--------|--------------|------------------|
+| YAML 1.1 | `Yaml11Source` | `ruamel.yaml` |
+| YAML 1.2 | `Yaml12Source` | `ruamel.yaml` |
+| JSON | `JsonSource` | — |
+| JSON5 | `Json5Source` | `json-five` |
+| TOML 1.0 | `Toml10Source` | `toml-rs` |
+| TOML 1.1 | `Toml11Source` | `toml-rs` |
+| INI | `IniSource` | — |
+| ENV file | `EnvFileSource` | — |
+| Environment variables | `EnvSource` | — |
+| Docker secrets | `DockerSecretsSource` | — |
+
+Use the specific Source subclass for your format. File-based sources (`FileSource` subclasses) accept `file` as `str`, `Path`, or file-like object (`BytesIO`, `StringIO`). `EnvSource` reads from environment variables (no `file` parameter). `DockerSecretsSource` accepts `dir` pointing to a secrets directory.
## mypy Plugin
-When using `@load()` as a decorator, mypy will report `call-arg` errors because the original dataclass `__init__` still requires all fields. dature ships with a mypy plugin that makes all fields optional in decorated classes:
+When using `@dature.load()` as a decorator, mypy will report `call-arg` errors because the original dataclass `__init__` still requires all fields. dature ships with a mypy plugin that makes all fields optional in decorated classes:
```toml
[tool.mypy]
diff --git a/docs/introduction.md b/docs/introduction.md
index 77cd98f..0ca4c96 100644
--- a/docs/introduction.md
+++ b/docs/introduction.md
@@ -25,12 +25,12 @@ dature offers two ways to load configuration: **function mode** and **decorator
Explicit arguments to `__init__` take priority over loaded values:
```python
- config = Config(port=9090) # host from source, port overridden
+ --8<-- "examples/docs/introduction/intro_decorator_override.py:override"
```
## All Formats
-dature auto-detects the format from the file extension. Here's the same config loaded from every supported format:
+Use the specific Source subclass for your format. Here's the same config loaded from every supported format:
=== "YAML"
@@ -105,39 +105,17 @@ dature auto-detects the format from the file extension. Here's the same config l
--8<-- "examples/docs/introduction/format_docker.py"
```
-### Auto-Detection
-
-| Extension | Loader |
-|-----------|--------|
-| `.yaml`, `.yml` | `Yaml12Loader` (default) |
-| `.json` | `JsonLoader` |
-| `.json5` | `Json5Loader` |
-| `.toml` | `Toml11Loader` (default) |
-| `.ini`, `.cfg` | `IniLoader` |
-| `.env` | `EnvFileLoader` |
-| directory | `DockerSecretsLoader` |
-| not specified | `EnvLoader` (environment variables) |
-
-Override auto-detection with the `loader` parameter:
-
-```python
-from dature.sources_loader.yaml_ import Yaml11Loader
-
-Source(file_="config.yaml", loader=Yaml11Loader)
-```
+See the full list of Source classes and their extra dependencies on the [main page](index.md#supported-formats).
## Source Reference
```python
---8<-- "src/dature/metadata.py:load-metadata"
+--8<-- "src/dature/sources/base.py:load-metadata"
```
| Parameter | Description |
|-----------|-------------|
-| `file_` | Path to config file (`str`, `Path`), file-like object (`BytesIO`, `StringIO`), or directory. `None` → environment variables. File-like objects require explicit `loader` |
-| `loader` | Explicit loader class. `None` → auto-detect from extension |
| `prefix` | Filter ENV keys (`"APP_"`) or extract nested object (`"app.database"`) |
-| `split_symbols` | Delimiter for flat→nested conversion. Default: `"__"` |
| `name_style` | Naming convention mapping. See [Naming](features/naming.md) |
| `field_mapping` | Explicit field renaming with `F` objects. See [Naming](features/naming.md) |
| `root_validators` | Post-load validation of the entire object. See [Validation](features/validation.md) |
@@ -148,19 +126,31 @@ Source(file_="config.yaml", loader=Yaml11Loader)
| `secret_field_names` | Extra secret name patterns for masking. See [Masking](features/masking.md) |
| `mask_secrets` | Enable/disable secret masking for this source. See [Masking](features/masking.md) |
| `type_loaders` | Custom type converters for this source. See [Custom Types & Loaders](advanced/custom_types.md#custom-types) |
+
+**FileSource** subclasses (`JsonSource`, `Yaml*Source`, `Toml*Source`, `IniSource`, `Json5Source`) also have:
+
+| Parameter | Description |
+|-----------|-------------|
+| `file` | Path to config file (`str`, `Path`) or file-like object (`BytesIO`, `StringIO`). `None` → empty path |
+
+**FlatKeySource** subclasses (`EnvSource`, `EnvFileSource`, `DockerSecretsSource`) also have:
+
+| Parameter | Description |
+|-----------|-------------|
+| `split_symbols` | Delimiter for flat→nested conversion. Default: `"__"` |
| `nested_resolve_strategy` | Priority when both JSON and flat keys exist for a nested field: `"flat"` (default) or `"json"`. See [Nested Resolve](advanced/nested-resolve.md) |
| `nested_resolve` | Per-field strategy overrides using `F` objects. Takes priority over `nested_resolve_strategy`. See [Nested Resolve](advanced/nested-resolve.md#per-field-strategy) |
### File-Like Objects
-`file_` accepts file-like objects (`StringIO`, `BytesIO`, and any `TextIOBase`/`BufferedIOBase`/`RawIOBase` subclass). The `loader` parameter is required since there is no file extension to auto-detect from:
+`file` accepts file-like objects (`StringIO`, `BytesIO`, and any `TextIOBase`/`BufferedIOBase`/`RawIOBase` subclass):
```python
--8<-- "examples/docs/introduction/intro_file_like.py"
```
!!! note
- `EnvLoader` and `DockerSecretsLoader` do not support file-like objects — they read from environment variables and directories respectively.
+ `EnvSource` and `DockerSecretsSource` do not support file-like objects — they read from environment variables and directories respectively.
## Type Coercion
diff --git a/docs/javascript/readthedocs.js b/docs/javascript/readthedocs.js
index a71af0a..580721f 100644
--- a/docs/javascript/readthedocs.js
+++ b/docs/javascript/readthedocs.js
@@ -54,12 +54,17 @@ function renderVersionItem(version) {
// Cached HTML fragments, built once from RTD data
let versioningHtml = "";
let olderItemsHtml = "";
+// Set when MutationObserver detects missing selector but data isn't ready yet
+let pendingInject = false;
function injectVersionSelector() {
if (versioningHtml === "") {
+ pendingInject = true;
return;
}
+ pendingInject = false;
+
const topic = document.querySelector(".md-header__topic");
if (topic === null) {
return;
@@ -120,19 +125,19 @@ document.addEventListener("readthedocs-addons-data-ready", function (event) {
injectVersionSelector();
});
-// Re-inject after Material instant navigation replaces the DOM
+// Re-inject after Material instant navigation replaces the DOM.
+// Debounce via setTimeout so we inject only after Material finishes its
+// batch of DOM mutations, not in between them.
document.addEventListener("DOMContentLoaded", function () {
if (typeof document.body.dataset.mdColorScheme === "undefined") {
return;
}
- let injecting = false;
+ let timer = 0;
new MutationObserver(function () {
- if (injecting) return;
const topic = document.querySelector(".md-header__topic");
if (topic !== null && topic.querySelector(".md-version") === null) {
- injecting = true;
- injectVersionSelector();
- injecting = false;
+ clearTimeout(timer);
+ timer = setTimeout(injectVersionSelector, 50);
}
}).observe(document.querySelector(".md-header") || document.body, {
childList: true,
diff --git a/examples/docs/advanced/caching/advanced_caching.py b/examples/docs/advanced/caching/advanced_caching.py
deleted file mode 100644
index 7d84db5..0000000
--- a/examples/docs/advanced/caching/advanced_caching.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""Caching — decorator mode with cache enabled/disabled."""
-
-import os
-from dataclasses import dataclass
-
-from dature import Source, load
-
-os.environ["CACHE_HOST"] = "localhost"
-os.environ["CACHE_PORT"] = "6379"
-
-
-@load(Source(prefix="CACHE_"), cache=True)
-@dataclass
-class CachedConfig:
- host: str
- port: int
-
-
-config1 = CachedConfig()
-os.environ["CACHE_PORT"] = "9999"
-config2 = CachedConfig()
-assert config1.port == 6379
-assert config2.port == 6379
-
-os.environ["NOCACHE_HOST"] = "localhost"
-os.environ["NOCACHE_PORT"] = "6379"
-
-
-@load(Source(prefix="NOCACHE_"), cache=False)
-@dataclass
-class UncachedConfig:
- host: str
- port: int
-
-
-config3 = UncachedConfig()
-os.environ["NOCACHE_PORT"] = "9999"
-config4 = UncachedConfig()
-assert config3.port == 6379
-assert config4.port == 9999
diff --git a/examples/docs/advanced/caching/advanced_caching_disabled.py b/examples/docs/advanced/caching/advanced_caching_disabled.py
new file mode 100644
index 0000000..8b54b98
--- /dev/null
+++ b/examples/docs/advanced/caching/advanced_caching_disabled.py
@@ -0,0 +1,23 @@
+"""Caching — decorator mode with cache disabled."""
+
+import os
+from dataclasses import dataclass
+
+import dature
+
+os.environ["NOCACHE_HOST"] = "localhost"
+os.environ["NOCACHE_PORT"] = "6379"
+
+
+@dature.load(dature.EnvSource(prefix="NOCACHE_"), cache=False)
+@dataclass
+class UncachedConfig:
+ host: str
+ port: int
+
+
+config3 = UncachedConfig()
+os.environ["NOCACHE_PORT"] = "9999"
+config4 = UncachedConfig()
+assert config3.port == 6379
+assert config4.port == 9999
diff --git a/examples/docs/advanced/caching/advanced_caching_enabled.py b/examples/docs/advanced/caching/advanced_caching_enabled.py
new file mode 100644
index 0000000..bec4cc6
--- /dev/null
+++ b/examples/docs/advanced/caching/advanced_caching_enabled.py
@@ -0,0 +1,23 @@
+"""Caching — decorator mode with cache enabled."""
+
+import os
+from dataclasses import dataclass
+
+import dature
+
+os.environ["CACHE_HOST"] = "localhost"
+os.environ["CACHE_PORT"] = "6379"
+
+
+@dature.load(dature.EnvSource(prefix="CACHE_"), cache=True)
+@dataclass
+class CachedConfig:
+ host: str
+ port: int
+
+
+config1 = CachedConfig()
+os.environ["CACHE_PORT"] = "9999"
+config2 = CachedConfig()
+assert config1.port == 6379
+assert config2.port == 6379
diff --git a/examples/docs/advanced/configure/advanced_configure.py b/examples/docs/advanced/configure/advanced_configure.py
index 9ba32ad..1db8677 100644
--- a/examples/docs/advanced/configure/advanced_configure.py
+++ b/examples/docs/advanced/configure/advanced_configure.py
@@ -1,10 +1,9 @@
-"""Global configure() — customize masking, error display, loading defaults."""
+"""Global dature.configure() — customize masking, error display, loading defaults."""
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, configure, get_load_report, load
-from dature.config import LoadingConfig
+import dature
SHARED_DIR = Path(__file__).parents[2] / "shared"
@@ -17,20 +16,20 @@ class Config:
# 1. Default config — debug is off, no report
-config = load(Source(file_=SHARED_DIR / "common_app.yaml"), Config)
-report = get_load_report(config)
+config = dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml"), schema=Config)
+report = dature.get_load_report(config)
assert report is None
-# 2. Enable debug globally via configure()
-configure(loading=LoadingConfig(debug=True))
+# 2. Enable debug globally via dature.configure()
+dature.configure(loading={"debug": True})
-config = load(Source(file_=SHARED_DIR / "common_app.yaml"), Config)
-report = get_load_report(config)
+config = dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml"), schema=Config)
+report = dature.get_load_report(config)
assert report is not None
# 3. Reset to defaults — debug is off again
-configure(loading=LoadingConfig())
+dature.configure(loading={})
-config = load(Source(file_=SHARED_DIR / "common_app.yaml"), Config)
-report = get_load_report(config)
+config = dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml"), schema=Config)
+report = dature.get_load_report(config)
assert report is None
diff --git a/examples/docs/advanced/configure/advanced_configure_env.py b/examples/docs/advanced/configure/advanced_configure_env.py
index 1864519..31af68d 100644
--- a/examples/docs/advanced/configure/advanced_configure_env.py
+++ b/examples/docs/advanced/configure/advanced_configure_env.py
@@ -1,11 +1,10 @@
-"""Global configure() via environment variables — DATURE_ prefix."""
+"""Global dature.configure() via environment variables — DATURE_ prefix."""
import os
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, configure, get_load_report, load
-from dature.config import LoadingConfig
+import dature
SHARED_DIR = Path(__file__).parents[2] / "shared"
@@ -21,20 +20,20 @@ class Config:
# 1. DATURE_LOADING__DEBUG=true — debug is on, report attached
-config = load(Source(file_=SHARED_DIR / "common_app.yaml"), Config)
-report = get_load_report(config)
+config = dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml"), schema=Config)
+report = dature.get_load_report(config)
assert report is not None
-# 2. Override env with configure() — debug is off
-configure(loading=LoadingConfig(debug=False))
+# 2. Override env with dature.configure() — debug is off
+dature.configure(loading={"debug": False})
-config = load(Source(file_=SHARED_DIR / "common_app.yaml"), Config)
-report = get_load_report(config)
+config = dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml"), schema=Config)
+report = dature.get_load_report(config)
assert report is None
# 3. Reset to env defaults — debug is on again
-configure(loading=LoadingConfig(debug=True))
+dature.configure(loading={"debug": True})
-config = load(Source(file_=SHARED_DIR / "common_app.yaml"), Config)
-report = get_load_report(config)
+config = dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml"), schema=Config)
+report = dature.get_load_report(config)
assert report is not None
diff --git a/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py b/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py
index 8863a55..f7d56a3 100644
--- a/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py
+++ b/examples/docs/advanced/custom_types/advanced_configure_type_loaders.py
@@ -1,9 +1,9 @@
-"""Global type_loaders via configure() — register custom type parsers for all load() calls."""
+"""Global type_loaders via dature.configure() — register custom type parsers for all load() calls."""
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, TypeLoader, configure, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -27,7 +27,7 @@ class AppConfig:
# Register Rgb parser globally — no need to pass type_loaders to every load() call
-configure(type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),))
+dature.configure(type_loaders={Rgb: rgb_from_string})
-config = load(Source(file_=SOURCES_DIR / "custom_type_common.yaml"), AppConfig)
+config = dature.load(dature.Yaml12Source(file=SOURCES_DIR / "custom_type_common.yaml"), schema=AppConfig)
assert config == AppConfig(name="my-app", color=Rgb(r=255, g=128, b=0))
diff --git a/examples/docs/advanced/custom_types/custom_dict_source.py b/examples/docs/advanced/custom_types/custom_dict_source.py
new file mode 100644
index 0000000..ad6cd3e
--- /dev/null
+++ b/examples/docs/advanced/custom_types/custom_dict_source.py
@@ -0,0 +1,31 @@
+"""Custom source — subclass Source to load from a plain dict."""
+
+from dataclasses import dataclass
+from typing import Any, cast
+
+import dature
+from dature.sources.base import Source
+from dature.types import JSONValue
+
+
+@dataclass(kw_only=True, repr=False)
+class DictSource(Source):
+ format_name = "dict"
+ data: dict[str, Any]
+
+ def _load(self) -> JSONValue:
+ return cast("JSONValue", self.data)
+
+
+@dataclass
+class Config:
+ host: str
+ port: int
+
+
+config = dature.load(
+ DictSource(data={"host": "localhost", "port": 8080}),
+ schema=Config,
+)
+
+assert config == Config(host="localhost", port=8080)
diff --git a/examples/docs/advanced/custom_types/custom_loader.py b/examples/docs/advanced/custom_types/custom_loader.py
index c1f635f..6be884c 100644
--- a/examples/docs/advanced/custom_types/custom_loader.py
+++ b/examples/docs/advanced/custom_types/custom_loader.py
@@ -1,32 +1,32 @@
-"""Custom loader — subclass BaseLoader to read XML files."""
+"""Custom source — subclass Source to read XML files."""
import xml.etree.ElementTree as ET
from dataclasses import dataclass
from pathlib import Path
-from typing import ClassVar
from adaptix import Provider, loader
-from dature import Source, load
-from dature.sources_loader.base import BaseLoader
-from dature.sources_loader.loaders import bool_loader, float_from_string
+import dature
+from dature.loaders import bool_loader, float_from_string
+from dature.sources.base import FileSource
from dature.types import FileOrStream, JSONValue
SOURCES_DIR = Path(__file__).parent / "sources"
-class XmlLoader(BaseLoader):
- display_name: ClassVar[str] = "xml"
+@dataclass(kw_only=True, repr=False)
+class XmlSource(FileSource):
+ format_name = "xml"
- def _load(self, path: FileOrStream) -> JSONValue:
+ def _load_file(self, path: FileOrStream) -> JSONValue:
if not isinstance(path, Path):
- msg = "XmlLoader only supports file paths"
+ msg = "XmlSource only supports file paths"
raise TypeError(msg)
tree = ET.parse(path) # noqa: S314
root = tree.getroot()
return {child.tag: child.text or "" for child in root}
- def _additional_loaders(self) -> list[Provider]:
+ def additional_loaders(self) -> list[Provider]:
return [
loader(bool, bool_loader),
loader(float, float_from_string),
@@ -40,12 +40,11 @@ class Config:
debug: bool
-config = load(
- Source(
- file_=SOURCES_DIR / "custom_loader.xml",
- loader=XmlLoader,
+config = dature.load(
+ XmlSource(
+ file=SOURCES_DIR / "custom_loader.xml",
),
- Config,
+ schema=Config,
)
assert config == Config(host="localhost", port=9090, debug=True)
diff --git a/examples/docs/advanced/custom_types/custom_source_import.py b/examples/docs/advanced/custom_types/custom_source_import.py
new file mode 100644
index 0000000..6ea548f
--- /dev/null
+++ b/examples/docs/advanced/custom_types/custom_source_import.py
@@ -0,0 +1,3 @@
+from dature.sources.base import FileSource, FlatKeySource, Source
+
+__all__ = ["FileSource", "FlatKeySource", "Source"]
diff --git a/examples/docs/advanced/custom_types/custom_type.py b/examples/docs/advanced/custom_types/custom_type.py
index 4d5f429..a1f88a6 100644
--- a/examples/docs/advanced/custom_types/custom_type.py
+++ b/examples/docs/advanced/custom_types/custom_type.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, TypeLoader, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -26,12 +26,12 @@ class AppConfig:
color: Rgb
-config = load(
- Source(
- file_=SOURCES_DIR / "custom_type_common.yaml",
- type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),),
+config = dature.load(
+ dature.Yaml12Source(
+ file=SOURCES_DIR / "custom_type_common.yaml",
+ type_loaders={Rgb: rgb_from_string},
),
- AppConfig,
+ schema=AppConfig,
)
assert config == AppConfig(name="my-app", color=Rgb(r=255, g=128, b=0))
diff --git a/examples/docs/advanced/custom_types/custom_type_merge.py b/examples/docs/advanced/custom_types/custom_type_merge.py
index 569126a..ea5fee0 100644
--- a/examples/docs/advanced/custom_types/custom_type_merge.py
+++ b/examples/docs/advanced/custom_types/custom_type_merge.py
@@ -1,9 +1,9 @@
-"""Per-merge type_loaders — set type_loaders on Merge for multi-source loads."""
+"""Per-merge type_loaders — set type_loaders on load() for multi-source loads."""
from dataclasses import dataclass
from pathlib import Path
-from dature import Merge, Source, TypeLoader, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -26,13 +26,11 @@ class AppConfig:
color: Rgb
-config = load(
- Merge(
- Source(file_=SOURCES_DIR / "custom_type_common.yaml"),
- Source(file_=SOURCES_DIR / "custom_type_merge_override.yaml"),
- type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),),
- ),
- AppConfig,
+config = dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "custom_type_common.yaml"),
+ dature.Yaml12Source(file=SOURCES_DIR / "custom_type_merge_override.yaml"),
+ schema=AppConfig,
+ type_loaders={Rgb: rgb_from_string},
)
assert config == AppConfig(name="my-app", color=Rgb(r=100, g=200, b=50))
diff --git a/examples/docs/advanced/debug/advanced_debug_error.py b/examples/docs/advanced/debug/advanced_debug_error.py
index 87fdc39..ce18d6c 100644
--- a/examples/docs/advanced/debug/advanced_debug_error.py
+++ b/examples/docs/advanced/debug/advanced_debug_error.py
@@ -1,10 +1,10 @@
-"""Report on error — get_load_report() from the dataclass type after a failed load."""
+"""Report on error — dature.get_load_report() from the dataclass type after a failed load."""
from dataclasses import dataclass
from pathlib import Path
-from dature import Merge, Source, get_load_report, load
-from dature.errors.exceptions import DatureConfigError
+import dature
+from dature.errors import DatureConfigError
SOURCES_DIR = Path(__file__).parent / "sources"
SHARED_DIR = Path(__file__).parents[2] / "shared"
@@ -18,16 +18,14 @@ class Config:
try:
- config = load(
- Merge(
- Source(file_=SHARED_DIR / "common_overrides.yaml"),
- Source(file_=SOURCES_DIR / "advanced_debug_error_defaults.yaml"),
- ),
- Config,
+ config = dature.load(
+ dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"),
+ dature.Yaml12Source(file=SOURCES_DIR / "advanced_debug_error_defaults.yaml"),
+ schema=Config,
debug=True,
)
except DatureConfigError:
- report = get_load_report(Config)
+ report = dature.get_load_report(Config)
assert report is not None
assert report.dataclass_name == "Config"
diff --git a/examples/docs/advanced/debug/advanced_debug_logging.py b/examples/docs/advanced/debug/advanced_debug_logging.py
index 3baf018..4a03917 100644
--- a/examples/docs/advanced/debug/advanced_debug_logging.py
+++ b/examples/docs/advanced/debug/advanced_debug_logging.py
@@ -5,7 +5,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Merge, Source, load
+import dature
log_stream = io.StringIO()
handler = logging.StreamHandler(log_stream)
@@ -23,12 +23,10 @@ class Config:
tags: list[str]
-config = load(
- Merge(
- Source(file_=SHARED_DIR / "common_defaults.yaml"),
- Source(file_=SHARED_DIR / "common_overrides.yaml"),
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"),
+ dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"),
+ schema=Config,
)
log_lines = [line for line in log_stream.getvalue().splitlines() if "[Config]" in line]
diff --git a/examples/docs/advanced/debug/advanced_debug_report.py b/examples/docs/advanced/debug/advanced_debug_report.py
index de2b425..fde3641 100644
--- a/examples/docs/advanced/debug/advanced_debug_report.py
+++ b/examples/docs/advanced/debug/advanced_debug_report.py
@@ -1,9 +1,9 @@
-"""Debug report — get_load_report() to inspect which source provided each field."""
+"""Debug report — dature.get_load_report() to inspect which source provided each field."""
from dataclasses import dataclass
from pathlib import Path
-from dature import Merge, Source, get_load_report, load
+import dature
SHARED_DIR = Path(__file__).parents[2] / "shared"
@@ -15,16 +15,14 @@ class Config:
tags: list[str]
-config = load(
- Merge(
- Source(file_=SHARED_DIR / "common_defaults.yaml"),
- Source(file_=SHARED_DIR / "common_overrides.yaml"),
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"),
+ dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"),
+ schema=Config,
debug=True,
)
-report = get_load_report(config)
+report = dature.get_load_report(config)
assert report is not None
origins = report.field_origins
diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion.py b/examples/docs/advanced/env_expansion/advanced_env_expansion.py
index ef05feb..65d028f 100644
--- a/examples/docs/advanced/env_expansion/advanced_env_expansion.py
+++ b/examples/docs/advanced/env_expansion/advanced_env_expansion.py
@@ -4,7 +4,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -23,9 +23,9 @@ class Config:
escape_percent: str
-config = load(
- Source(file_=SOURCES_DIR / "advanced_env_expansion.yaml", expand_env_vars="default"),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "advanced_env_expansion.yaml", expand_env_vars="default"),
+ schema=Config,
)
assert config.simple == "https://api.example.com"
diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py
index 7dec1d7..414a321 100644
--- a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py
+++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_combined.py
@@ -4,7 +4,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -18,9 +18,9 @@ class Config:
port: int
-config = load(
- Source(file_="$DATURE_SOURCES_DIR/config.$DATURE_APP_ENV.yaml"),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file="$DATURE_SOURCES_DIR/config.$DATURE_APP_ENV.yaml"),
+ schema=Config,
)
assert config.host == "prod.example.com"
diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py
index 2c6960b..17a2206 100644
--- a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py
+++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_dir.py
@@ -4,7 +4,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -17,9 +17,9 @@ class Config:
port: int
-config = load(
- Source(file_="$DATURE_SOURCES_DIR/advanced_env_expansion_file_path.yaml"),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file="$DATURE_SOURCES_DIR/advanced_env_expansion_file_path.yaml"),
+ schema=Config,
)
assert config.host == "localhost"
diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py
index 794d638..5518ce9 100644
--- a/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py
+++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_file_path_name.py
@@ -4,7 +4,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -17,9 +17,9 @@ class Config:
port: int
-config = load(
- Source(file_=str(SOURCES_DIR / "config.$DATURE_APP_ENV.yaml")),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=str(SOURCES_DIR / "config.$DATURE_APP_ENV.yaml")),
+ schema=Config,
)
assert config.host == "prod.example.com"
diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py
index add09de..bd2dfa8 100644
--- a/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py
+++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_merge.py
@@ -4,7 +4,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Merge, Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -21,14 +21,12 @@ class Config:
disabled_unset_url: str
-config = load(
- Merge(
- Source(file_=SOURCES_DIR / "advanced_env_expansion_merge_default.yaml"), # uses global "default"
- Source(file_=SOURCES_DIR / "advanced_env_expansion_merge_empty.yaml", expand_env_vars="empty"),
- Source(file_=SOURCES_DIR / "advanced_env_expansion_merge_disabled.yaml", expand_env_vars="disabled"),
- expand_env_vars="default", # global default for all sources
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "advanced_env_expansion_merge_default.yaml"), # uses global "default"
+ dature.Yaml12Source(file=SOURCES_DIR / "advanced_env_expansion_merge_empty.yaml", expand_env_vars="empty"),
+ dature.Yaml12Source(file=SOURCES_DIR / "advanced_env_expansion_merge_disabled.yaml", expand_env_vars="disabled"),
+ schema=Config,
+ expand_env_vars="default", # global default for all sources
)
assert config.default_set_url == "https://api.example.com/api"
diff --git a/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py b/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py
index 78f4e02..918ae87 100644
--- a/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py
+++ b/examples/docs/advanced/env_expansion/advanced_env_expansion_strict.py
@@ -4,7 +4,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -17,9 +17,9 @@ class Config:
fallback_url: str
-config = load(
- Source(file_=SOURCES_DIR / "advanced_env_expansion_strict.yaml", expand_env_vars="strict"),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "advanced_env_expansion_strict.yaml", expand_env_vars="strict"),
+ schema=Config,
)
assert config.resolved_url == "https://api.example.com/api/v1"
diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py
index 07f50d8..62ac98a 100644
--- a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py
+++ b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.py
@@ -2,10 +2,8 @@
from dataclasses import dataclass
from pathlib import Path
-from textwrap import dedent
-from dature import F, FieldGroup, Merge, Source, load
-from dature.errors.exceptions import FieldGroupError
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -23,24 +21,11 @@ class Config:
database: Database
-# FieldGroup(F[Config].database, F[Config].port)
+# (dature.F[Config].database, dature.F[Config].port)
# expands to (database.host, database.port, port)
-try:
- load(
- Merge(
- Source(file_=SOURCES_DIR / "field_groups_nested_defaults.yaml"),
- Source(file_=SOURCES_DIR / "advanced_field_groups_expansion_error_overrides.yaml"),
- field_groups=(FieldGroup(F[Config].database, F[Config].port),),
- ),
- Config,
- )
-except FieldGroupError as exc:
- defaults_path = str(SOURCES_DIR / "field_groups_nested_defaults.yaml")
- overrides_path = str(SOURCES_DIR / "advanced_field_groups_expansion_error_overrides.yaml")
- assert str(exc) == dedent(f"""\
- Config field group errors (1)
-
- Field group (database.host, database.port, port) partially overridden in source 1
- changed: database.host (from source yaml1.2 '{overrides_path}'), port (from source yaml1.2 '{overrides_path}')
- unchanged: database.port (from source yaml1.2 '{defaults_path}')
-""")
+dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "field_groups_nested_defaults.yaml"),
+ dature.Yaml12Source(file=SOURCES_DIR / "advanced_field_groups_expansion_error_overrides.yaml"),
+ schema=Config,
+ field_groups=((dature.F[Config].database, dature.F[Config].port),),
+)
diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.stderr b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.stderr
new file mode 100644
index 0000000..100f974
--- /dev/null
+++ b/examples/docs/advanced/field_groups/advanced_field_groups_expansion_error.stderr
@@ -0,0 +1,6 @@
+ | dature.errors.exceptions.FieldGroupError: Config field group errors (1)
+ +-+---------------- 1 ----------------
+ | dature.errors.exceptions.FieldGroupViolationError: Field group (database.host, database.port, port) partially overridden in source 1
+ | changed: database.host (from source yaml1.2 '{SOURCES_DIR}advanced_field_groups_expansion_error_overrides.yaml'), port (from source yaml1.2 '{SOURCES_DIR}advanced_field_groups_expansion_error_overrides.yaml')
+ | unchanged: database.port (from source yaml1.2 '{SOURCES_DIR}field_groups_nested_defaults.yaml')
+ +------------------------------------
diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py
index e73a05c..699b0ec 100644
--- a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py
+++ b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.py
@@ -2,10 +2,8 @@
from dataclasses import dataclass
from pathlib import Path
-from textwrap import dedent
-from dature import F, FieldGroup, Merge, Source, load
-from dature.errors.exceptions import FieldGroupError
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
SHARED_DIR = Path(__file__).parents[2] / "shared"
@@ -20,29 +18,12 @@ class Config:
password: str
-try:
- load(
- Merge(
- Source(file_=SHARED_DIR / "common_field_groups_defaults.yaml"),
- Source(file_=SOURCES_DIR / "advanced_field_groups_multiple_error_overrides.yaml"),
- field_groups=(
- FieldGroup(F[Config].host, F[Config].port),
- FieldGroup(F[Config].user, F[Config].password),
- ),
- ),
- Config,
- )
-except FieldGroupError as exc:
- defaults_path = str(SHARED_DIR / "common_field_groups_defaults.yaml")
- overrides_path = str(SOURCES_DIR / "advanced_field_groups_multiple_error_overrides.yaml")
- assert str(exc) == dedent(f"""\
- Config field group errors (2)
-
- Field group (host, port) partially overridden in source 1
- changed: host (from source yaml1.2 '{overrides_path}')
- unchanged: port (from source yaml1.2 '{defaults_path}')
-
- Field group (user, password) partially overridden in source 1
- changed: user (from source yaml1.2 '{overrides_path}')
- unchanged: password (from source yaml1.2 '{defaults_path}')
-""")
+dature.load(
+ dature.Yaml12Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"),
+ dature.Yaml12Source(file=SOURCES_DIR / "advanced_field_groups_multiple_error_overrides.yaml"),
+ schema=Config,
+ field_groups=(
+ (dature.F[Config].host, dature.F[Config].port),
+ (dature.F[Config].user, dature.F[Config].password),
+ ),
+)
diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.stderr b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.stderr
new file mode 100644
index 0000000..965fc96
--- /dev/null
+++ b/examples/docs/advanced/field_groups/advanced_field_groups_multiple_error.stderr
@@ -0,0 +1,10 @@
+ | dature.errors.exceptions.FieldGroupError: Config field group errors (2)
+ +-+---------------- 1 ----------------
+ | dature.errors.exceptions.FieldGroupViolationError: Field group (host, port) partially overridden in source 1
+ | changed: host (from source yaml1.2 '{SOURCES_DIR}advanced_field_groups_multiple_error_overrides.yaml')
+ | unchanged: port (from source yaml1.2 '{SHARED_DIR}common_field_groups_defaults.yaml')
+ +---------------- 2 ----------------
+ | dature.errors.exceptions.FieldGroupViolationError: Field group (user, password) partially overridden in source 1
+ | changed: user (from source yaml1.2 '{SOURCES_DIR}advanced_field_groups_multiple_error_overrides.yaml')
+ | unchanged: password (from source yaml1.2 '{SHARED_DIR}common_field_groups_defaults.yaml')
+ +------------------------------------
diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py
index 7990e4f..e77bf78 100644
--- a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py
+++ b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.py
@@ -2,10 +2,8 @@
from dataclasses import dataclass
from pathlib import Path
-from textwrap import dedent
-from dature import F, FieldGroup, Merge, Source, load
-from dature.errors.exceptions import FieldGroupError
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
SHARED_DIR = Path(__file__).parents[2] / "shared"
@@ -20,25 +18,12 @@ class Config:
password: str
-try:
- load(
- Merge(
- Source(file_=SHARED_DIR / "common_field_groups_defaults.yaml"),
- Source(file_=SOURCES_DIR / "field_groups_partial_overrides.yaml"),
- field_groups=(
- FieldGroup(F[Config].host, F[Config].port),
- FieldGroup(F[Config].user, F[Config].password),
- ),
- ),
- Config,
- )
-except FieldGroupError as exc:
- defaults_path = str(SHARED_DIR / "common_field_groups_defaults.yaml")
- overrides_path = str(SOURCES_DIR / "field_groups_partial_overrides.yaml")
- assert str(exc) == dedent(f"""\
- Config field group errors (1)
-
- Field group (host, port) partially overridden in source 1
- changed: host (from source yaml1.2 '{overrides_path}')
- unchanged: port (from source yaml1.2 '{defaults_path}')
-""")
+dature.load(
+ dature.Yaml12Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"),
+ dature.Yaml12Source(file=SOURCES_DIR / "field_groups_partial_overrides.yaml"),
+ schema=Config,
+ field_groups=(
+ (dature.F[Config].host, dature.F[Config].port),
+ (dature.F[Config].user, dature.F[Config].password),
+ ),
+)
diff --git a/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.stderr b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.stderr
new file mode 100644
index 0000000..52fd438
--- /dev/null
+++ b/examples/docs/advanced/field_groups/advanced_field_groups_nested_error.stderr
@@ -0,0 +1,6 @@
+ | dature.errors.exceptions.FieldGroupError: Config field group errors (1)
+ +-+---------------- 1 ----------------
+ | dature.errors.exceptions.FieldGroupViolationError: Field group (host, port) partially overridden in source 1
+ | changed: host (from source yaml1.2 '{SOURCES_DIR}field_groups_partial_overrides.yaml')
+ | unchanged: port (from source yaml1.2 '{SHARED_DIR}common_field_groups_defaults.yaml')
+ +------------------------------------
diff --git a/examples/docs/advanced/merge_rules/merging_field_groups.py b/examples/docs/advanced/field_groups/field_groups_basic.py
similarity index 57%
rename from examples/docs/advanced/merge_rules/merging_field_groups.py
rename to examples/docs/advanced/field_groups/field_groups_basic.py
index 23c3c86..af2f0f6 100644
--- a/examples/docs/advanced/merge_rules/merging_field_groups.py
+++ b/examples/docs/advanced/field_groups/field_groups_basic.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import F, FieldGroup, Merge, Source, load
+import dature
SHARED_DIR = Path(__file__).parents[2] / "shared"
@@ -17,13 +17,11 @@ class Config:
password: str
-config = load(
- Merge(
- Source(file_=SHARED_DIR / "common_field_groups_defaults.yaml"),
- Source(file_=SHARED_DIR / "common_field_groups_overrides.yaml"),
- field_groups=(FieldGroup(F[Config].host, F[Config].port),),
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SHARED_DIR / "common_field_groups_defaults.yaml"),
+ dature.Yaml12Source(file=SHARED_DIR / "common_field_groups_overrides.yaml"),
+ schema=Config,
+ field_groups=((dature.F[Config].host, dature.F[Config].port),),
)
assert config.host == "production.example.com"
diff --git a/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py b/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py
index 36828e5..5143a28 100644
--- a/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py
+++ b/examples/docs/advanced/merge_rules/advanced_merge_rules_callable.py
@@ -4,7 +4,7 @@
from pathlib import Path
from typing import Any
-from dature import F, Merge, MergeRule, MergeStrategy, Source, load
+import dature
SHARED_DIR = Path(__file__).parents[2] / "shared"
@@ -20,14 +20,12 @@ def merge_tags(values: list[Any]) -> list[str]:
return sorted({v for lst in values for v in lst})
-config = load(
- Merge(
- Source(file_=SHARED_DIR / "common_defaults.yaml"),
- Source(file_=SHARED_DIR / "common_overrides.yaml"),
- strategy=MergeStrategy.LAST_WINS,
- field_merges=(MergeRule(F[Config].tags, merge_tags),),
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"),
+ dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"),
+ schema=Config,
+ strategy="last_wins",
+ field_merges={dature.F[Config].tags: merge_tags},
)
assert config.host == "production.example.com"
diff --git a/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py b/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py
index 2b9b577..4aafc20 100644
--- a/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py
+++ b/examples/docs/advanced/merge_rules/advanced_merge_rules_conflict.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import F, FieldMergeStrategy, Merge, MergeRule, MergeStrategy, Source, load
+import dature
SHARED_DIR = Path(__file__).parents[2] / "shared"
@@ -15,18 +15,16 @@ class Config:
tags: list[str]
-config = load(
- Merge(
- Source(file_=SHARED_DIR / "common_defaults.yaml"),
- Source(file_=SHARED_DIR / "common_overrides.yaml"),
- strategy=MergeStrategy.RAISE_ON_CONFLICT,
- field_merges=(
- MergeRule(F[Config].host, FieldMergeStrategy.LAST_WINS),
- MergeRule(F[Config].port, FieldMergeStrategy.LAST_WINS),
- MergeRule(F[Config].tags, FieldMergeStrategy.APPEND_UNIQUE),
- ),
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"),
+ dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"),
+ schema=Config,
+ strategy="raise_on_conflict",
+ field_merges={
+ dature.F[Config].host: "last_wins",
+ dature.F[Config].port: "last_wins",
+ dature.F[Config].tags: "append_unique",
+ },
)
assert config.host == "production.example.com"
diff --git a/examples/docs/advanced/merge_rules/merging_field_append.py b/examples/docs/advanced/merge_rules/merging_field_append.py
index 338e190..bee8872 100644
--- a/examples/docs/advanced/merge_rules/merging_field_append.py
+++ b/examples/docs/advanced/merge_rules/merging_field_append.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import F, FieldMergeStrategy, Merge, MergeRule, Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -13,13 +13,11 @@ class Config:
tags: list[str]
-config = load(
- Merge(
- Source(file_=SOURCES_DIR / "merging_field_base.yaml"),
- Source(file_=SOURCES_DIR / "merging_field_override.yaml"),
- field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND),),
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "merging_field_base.yaml"),
+ dature.Yaml12Source(file=SOURCES_DIR / "merging_field_override.yaml"),
+ schema=Config,
+ field_merges={dature.F[Config].tags: "append"},
)
assert config.tags == ["web", "default", "web", "api"]
diff --git a/examples/docs/advanced/merge_rules/merging_field_append_unique.py b/examples/docs/advanced/merge_rules/merging_field_append_unique.py
index 23f5ccd..8ea9f38 100644
--- a/examples/docs/advanced/merge_rules/merging_field_append_unique.py
+++ b/examples/docs/advanced/merge_rules/merging_field_append_unique.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import F, FieldMergeStrategy, Merge, MergeRule, Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -13,13 +13,11 @@ class Config:
tags: list[str]
-config = load(
- Merge(
- Source(file_=SOURCES_DIR / "merging_field_base.yaml"),
- Source(file_=SOURCES_DIR / "merging_field_override.yaml"),
- field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND_UNIQUE),),
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "merging_field_base.yaml"),
+ dature.Yaml12Source(file=SOURCES_DIR / "merging_field_override.yaml"),
+ schema=Config,
+ field_merges={dature.F[Config].tags: "append_unique"},
)
assert config.tags == ["web", "default", "api"]
diff --git a/examples/docs/advanced/merge_rules/merging_field_first_wins.py b/examples/docs/advanced/merge_rules/merging_field_first_wins.py
index d5d4879..ebd5466 100644
--- a/examples/docs/advanced/merge_rules/merging_field_first_wins.py
+++ b/examples/docs/advanced/merge_rules/merging_field_first_wins.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import F, FieldMergeStrategy, Merge, MergeRule, Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -13,13 +13,11 @@ class Config:
tags: list[str]
-config = load(
- Merge(
- Source(file_=SOURCES_DIR / "merging_field_base.yaml"),
- Source(file_=SOURCES_DIR / "merging_field_override.yaml"),
- field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.FIRST_WINS),),
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "merging_field_base.yaml"),
+ dature.Yaml12Source(file=SOURCES_DIR / "merging_field_override.yaml"),
+ schema=Config,
+ field_merges={dature.F[Config].tags: "first_wins"},
)
assert config.tags == ["web", "default"]
diff --git a/examples/docs/advanced/merge_rules/merging_field_last_wins.py b/examples/docs/advanced/merge_rules/merging_field_last_wins.py
index e4302f0..1a0544c 100644
--- a/examples/docs/advanced/merge_rules/merging_field_last_wins.py
+++ b/examples/docs/advanced/merge_rules/merging_field_last_wins.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import F, FieldMergeStrategy, Merge, MergeRule, Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -13,13 +13,11 @@ class Config:
tags: list[str]
-config = load(
- Merge(
- Source(file_=SOURCES_DIR / "merging_field_base.yaml"),
- Source(file_=SOURCES_DIR / "merging_field_override.yaml"),
- field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.LAST_WINS),),
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "merging_field_base.yaml"),
+ dature.Yaml12Source(file=SOURCES_DIR / "merging_field_override.yaml"),
+ schema=Config,
+ field_merges={dature.F[Config].tags: "last_wins"},
)
assert config.tags == ["web", "api"]
diff --git a/examples/docs/advanced/merge_rules/merging_field_prepend.py b/examples/docs/advanced/merge_rules/merging_field_prepend.py
index 65c36c9..6d448a3 100644
--- a/examples/docs/advanced/merge_rules/merging_field_prepend.py
+++ b/examples/docs/advanced/merge_rules/merging_field_prepend.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import F, FieldMergeStrategy, Merge, MergeRule, Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -13,13 +13,11 @@ class Config:
tags: list[str]
-config = load(
- Merge(
- Source(file_=SOURCES_DIR / "merging_field_base.yaml"),
- Source(file_=SOURCES_DIR / "merging_field_override.yaml"),
- field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.PREPEND),),
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "merging_field_base.yaml"),
+ dature.Yaml12Source(file=SOURCES_DIR / "merging_field_override.yaml"),
+ schema=Config,
+ field_merges={dature.F[Config].tags: "prepend"},
)
assert config.tags == ["web", "api", "web", "default"]
diff --git a/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py b/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py
index ee5d59a..f9a98c2 100644
--- a/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py
+++ b/examples/docs/advanced/merge_rules/merging_field_prepend_unique.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import F, FieldMergeStrategy, Merge, MergeRule, Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -13,13 +13,11 @@ class Config:
tags: list[str]
-config = load(
- Merge(
- Source(file_=SOURCES_DIR / "merging_field_base.yaml"),
- Source(file_=SOURCES_DIR / "merging_field_override.yaml"),
- field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.PREPEND_UNIQUE),),
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "merging_field_base.yaml"),
+ dature.Yaml12Source(file=SOURCES_DIR / "merging_field_override.yaml"),
+ schema=Config,
+ field_merges={dature.F[Config].tags: "prepend_unique"},
)
assert config.tags == ["web", "api", "default"]
diff --git a/examples/docs/advanced/merge_rules/merging_first_found.py b/examples/docs/advanced/merge_rules/merging_first_found.py
index 0f4848e..d8571c9 100644
--- a/examples/docs/advanced/merge_rules/merging_first_found.py
+++ b/examples/docs/advanced/merge_rules/merging_first_found.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Merge, MergeStrategy, Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -14,13 +14,11 @@ class Config:
port: int
-config = load(
- Merge(
- Source(file_=SOURCES_DIR / "merging_first_found_primary.yaml"),
- Source(file_=SOURCES_DIR / "merging_first_found_fallback.yaml"),
- strategy=MergeStrategy.FIRST_FOUND,
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "merging_first_found_primary.yaml"),
+ dature.Yaml12Source(file=SOURCES_DIR / "merging_first_found_fallback.yaml"),
+ schema=Config,
+ strategy="first_found",
)
assert config.host == "production-host"
diff --git a/examples/docs/advanced/merge_rules/merging_skip_broken.py b/examples/docs/advanced/merge_rules/merging_skip_broken.py
index 24c8a48..0b0b7c7 100644
--- a/examples/docs/advanced/merge_rules/merging_skip_broken.py
+++ b/examples/docs/advanced/merge_rules/merging_skip_broken.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Merge, Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
SHARED_DIR = Path(__file__).parents[2] / "shared"
@@ -16,12 +16,10 @@ class Config:
debug: bool = False
-config = load(
- Merge(
- Source(file_=SHARED_DIR / "common_defaults.yaml"),
- Source(file_=SOURCES_DIR / "nonexistent.yaml", skip_if_broken=True),
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"),
+ dature.Yaml12Source(file=SOURCES_DIR / "nonexistent.yaml", skip_if_broken=True),
+ schema=Config,
)
assert config.host == "localhost"
diff --git a/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py b/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py
index cc19595..72a399d 100644
--- a/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py
+++ b/examples/docs/advanced/merge_rules/merging_skip_broken_per_source.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Merge, Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
SHARED_DIR = Path(__file__).parents[2] / "shared"
@@ -16,20 +16,18 @@ class Config:
tags: list[str]
-config = load(
- Merge(
- Source(file_=SHARED_DIR / "common_defaults.yaml"), # uses global
- Source(
- file_=SOURCES_DIR / "optional.yaml",
- skip_if_broken=True,
- ), # always skip if broken
- Source(
- file_=SHARED_DIR / "common_overrides.yaml",
- skip_if_broken=False,
- ), # never skip, even if global is True
- skip_broken_sources=True, # global default
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"), # uses global
+ dature.Yaml12Source(
+ file=SOURCES_DIR / "optional.yaml",
+ skip_if_broken=True,
+ ), # always skip if broken
+ dature.Yaml12Source(
+ file=SHARED_DIR / "common_overrides.yaml",
+ skip_if_broken=False,
+ ), # never skip, even if global is True
+ schema=Config,
+ skip_broken_sources=True, # global default
)
assert config.host == "production.example.com"
diff --git a/examples/docs/advanced/merge_rules/merging_skip_invalid.py b/examples/docs/advanced/merge_rules/merging_skip_invalid.py
index b4e4752..16ec80f 100644
--- a/examples/docs/advanced/merge_rules/merging_skip_invalid.py
+++ b/examples/docs/advanced/merge_rules/merging_skip_invalid.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -14,9 +14,9 @@ class Config:
port: int = 3000
-config = load(
- Source(file_=SOURCES_DIR / "merging_skip_invalid_defaults.yaml", skip_if_invalid=True),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "merging_skip_invalid_defaults.yaml", skip_if_invalid=True),
+ schema=Config,
)
assert config.host == "localhost"
diff --git a/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py b/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py
index a0e39a2..d78b72e 100644
--- a/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py
+++ b/examples/docs/advanced/merge_rules/merging_skip_invalid_per_field.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import F, Merge, Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -15,15 +15,13 @@ class Config:
timeout: int
-config = load(
- Merge(
- Source(file_=SOURCES_DIR / "merging_skip_invalid_per_field_defaults.yaml"),
- Source(
- file_=SOURCES_DIR / "merging_skip_invalid_per_field_overrides.yaml",
- skip_if_invalid=(F[Config].port, F[Config].timeout),
- ),
+config = dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "merging_skip_invalid_per_field_defaults.yaml"),
+ dature.Yaml12Source(
+ file=SOURCES_DIR / "merging_skip_invalid_per_field_overrides.yaml",
+ skip_if_invalid=(dature.F[Config].port, dature.F[Config].timeout),
),
- Config,
+ schema=Config,
)
assert config.host == "production.example.com"
diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py b/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py
index 0bae70a..fb6fe84 100644
--- a/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py
+++ b/examples/docs/advanced/nested_resolve/nested_resolve_docker_secrets.py
@@ -4,8 +4,7 @@
from pathlib import Path
from tempfile import TemporaryDirectory
-from dature import Source, load
-from dature.sources_loader.docker_secrets import DockerSecretsLoader
+import dature
@dataclass
@@ -25,13 +24,12 @@ class Config:
(secrets_path / "database__host").write_text("flat-host")
(secrets_path / "database__port").write_text("3306")
- config = load(
- Source(
- file_=secrets_path,
- loader=DockerSecretsLoader,
+ config = dature.load(
+ dature.DockerSecretsSource(
+ dir_=secrets_path,
nested_resolve_strategy="json",
),
- Config,
+ schema=Config,
)
assert config.database.host == "json-host"
diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py b/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py
index 6109622..9f9386a 100644
--- a/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py
+++ b/examples/docs/advanced/nested_resolve/nested_resolve_envfile.py
@@ -3,8 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
-from dature.sources_loader.env_ import EnvFileLoader
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -20,14 +19,13 @@ class Config:
database: Database
-config = load(
- Source(
- file_=SOURCES_DIR / "nested_resolve.env",
- loader=EnvFileLoader,
+config = dature.load(
+ dature.EnvFileSource(
+ file=SOURCES_DIR / "nested_resolve.env",
prefix="APP__",
nested_resolve_strategy="json",
),
- Config,
+ schema=Config,
)
assert config.database.host == "json-host"
diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_global_flat.py b/examples/docs/advanced/nested_resolve/nested_resolve_global_flat.py
index 322fae9..7f37668 100644
--- a/examples/docs/advanced/nested_resolve/nested_resolve_global_flat.py
+++ b/examples/docs/advanced/nested_resolve/nested_resolve_global_flat.py
@@ -3,8 +3,7 @@
import os
from dataclasses import dataclass
-from dature import Source, load
-from dature.sources_loader.env_ import EnvLoader
+import dature
os.environ["APP__DATABASE"] = '{"host": "json-host", "port": "5432"}'
os.environ["APP__DATABASE__HOST"] = "flat-host"
@@ -22,9 +21,9 @@ class Config:
database: Database
-config = load(
- Source(loader=EnvLoader, prefix="APP__", nested_resolve_strategy="flat"),
- Config,
+config = dature.load(
+ dature.EnvSource(prefix="APP__", nested_resolve_strategy="flat"),
+ schema=Config,
)
assert config.database.host == "flat-host"
diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_global_json.py b/examples/docs/advanced/nested_resolve/nested_resolve_global_json.py
index f0ff36b..0f26fc1 100644
--- a/examples/docs/advanced/nested_resolve/nested_resolve_global_json.py
+++ b/examples/docs/advanced/nested_resolve/nested_resolve_global_json.py
@@ -3,8 +3,7 @@
import os
from dataclasses import dataclass
-from dature import Source, load
-from dature.sources_loader.env_ import EnvLoader
+import dature
os.environ["APP__DATABASE"] = '{"host": "json-host", "port": "5432"}'
os.environ["APP__DATABASE__HOST"] = "flat-host"
@@ -22,9 +21,9 @@ class Config:
database: Database
-config = load(
- Source(loader=EnvLoader, prefix="APP__", nested_resolve_strategy="json"),
- Config,
+config = dature.load(
+ dature.EnvSource(prefix="APP__", nested_resolve_strategy="json"),
+ schema=Config,
)
assert config.database.host == "json-host"
diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_no_conflict.py b/examples/docs/advanced/nested_resolve/nested_resolve_no_conflict.py
index 702b176..144c7a0 100644
--- a/examples/docs/advanced/nested_resolve/nested_resolve_no_conflict.py
+++ b/examples/docs/advanced/nested_resolve/nested_resolve_no_conflict.py
@@ -3,8 +3,7 @@
import os
from dataclasses import dataclass
-from dature import Source, load
-from dature.sources_loader.env_ import EnvLoader
+import dature
# Only JSON form, no flat keys
os.environ["APP__DATABASE"] = '{"host": "json-host", "port": "5432"}'
@@ -26,9 +25,9 @@ class Config:
# Even with strategy="flat", JSON is parsed because there are no flat keys
-config = load(
- Source(loader=EnvLoader, prefix="APP__", nested_resolve_strategy="flat"),
- Config,
+config = dature.load(
+ dature.EnvSource(prefix="APP__", nested_resolve_strategy="flat"),
+ schema=Config,
)
assert config.database.host == "json-host"
diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_override.py b/examples/docs/advanced/nested_resolve/nested_resolve_override.py
index fa37a51..b63c675 100644
--- a/examples/docs/advanced/nested_resolve/nested_resolve_override.py
+++ b/examples/docs/advanced/nested_resolve/nested_resolve_override.py
@@ -3,8 +3,7 @@
import os
from dataclasses import dataclass
-from dature import F, Source, load
-from dature.sources_loader.env_ import EnvLoader
+import dature
os.environ["APP__DATABASE"] = '{"host": "json-host", "port": "5432"}'
os.environ["APP__DATABASE__HOST"] = "flat-host"
@@ -33,14 +32,13 @@ class Config:
# Global: "flat" for everything, but database overridden to "json"
-config = load(
- Source(
- loader=EnvLoader,
+config = dature.load(
+ dature.EnvSource(
prefix="APP__",
nested_resolve_strategy="flat",
- nested_resolve={"json": (F[Config].database,)},
+ nested_resolve={"json": (dature.F[Config].database,)},
),
- Config,
+ schema=Config,
)
assert config.database.host == "json-host" # per-field override wins
diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_per_field.py b/examples/docs/advanced/nested_resolve/nested_resolve_per_field.py
index a2f80b7..ce0d384 100644
--- a/examples/docs/advanced/nested_resolve/nested_resolve_per_field.py
+++ b/examples/docs/advanced/nested_resolve/nested_resolve_per_field.py
@@ -3,8 +3,7 @@
import os
from dataclasses import dataclass
-from dature import F, Source, load
-from dature.sources_loader.env_ import EnvLoader
+import dature
os.environ["APP__DATABASE"] = '{"host": "json-host", "port": "5432"}'
os.environ["APP__DATABASE__HOST"] = "flat-host"
@@ -33,16 +32,15 @@ class Config:
# database uses JSON, cache uses flat keys
-config = load(
- Source(
- loader=EnvLoader,
+config = dature.load(
+ dature.EnvSource(
prefix="APP__",
nested_resolve={
- "json": (F[Config].database,),
- "flat": (F[Config].cache,),
+ "json": (dature.F[Config].database,),
+ "flat": (dature.F[Config].cache,),
},
),
- Config,
+ schema=Config,
)
assert config.database.host == "json-host"
diff --git a/examples/docs/advanced/nested_resolve/nested_resolve_problem.py b/examples/docs/advanced/nested_resolve/nested_resolve_problem.py
index d959e8c..d7b668e 100644
--- a/examples/docs/advanced/nested_resolve/nested_resolve_problem.py
+++ b/examples/docs/advanced/nested_resolve/nested_resolve_problem.py
@@ -3,8 +3,7 @@
import os
from dataclasses import dataclass
-from dature import Source, load
-from dature.sources_loader.env_ import EnvLoader
+import dature
os.environ["APP__DATABASE"] = '{"host": "json-host", "port": "5432"}'
os.environ["APP__DATABASE__HOST"] = "flat-host"
@@ -23,7 +22,7 @@ class Config:
# Without nested_resolve_strategy, flat keys win by default
-config = load(Source(loader=EnvLoader, prefix="APP__"), Config)
+config = dature.load(dature.EnvSource(prefix="APP__"), schema=Config)
assert config.database.host == "flat-host"
assert config.database.port == 3306
diff --git a/examples/docs/api_reference/api_reference_decorator_mode.py b/examples/docs/api_reference/api_reference_decorator_mode.py
new file mode 100644
index 0000000..5bb6acb
--- /dev/null
+++ b/examples/docs/api_reference/api_reference_decorator_mode.py
@@ -0,0 +1,23 @@
+"""Decorator mode — omit schema, get a decorator."""
+
+from dataclasses import dataclass
+from pathlib import Path
+
+import dature
+
+SHARED_DIR = Path(__file__).parents[1] / "shared"
+
+
+@dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml"))
+@dataclass
+class Config:
+ host: str
+ port: int
+ debug: bool = False
+
+
+config = Config()
+
+assert config.host == "localhost"
+assert config.port == 8080
+assert config.debug is False
diff --git a/examples/docs/api_reference/api_reference_field_path.py b/examples/docs/api_reference/api_reference_field_path.py
new file mode 100644
index 0000000..9a7c64c
--- /dev/null
+++ b/examples/docs/api_reference/api_reference_field_path.py
@@ -0,0 +1,22 @@
+"""F[] factory for building field paths with validation."""
+
+from dataclasses import dataclass
+
+import dature
+
+
+@dataclass
+class Database:
+ host: str
+ port: int
+
+
+@dataclass
+class Config:
+ database: Database
+ host: str
+
+
+path_eager = dature.F[Config].host
+path_nested = dature.F[Config].database.host
+path_string = dature.F["Config"].host
diff --git a/examples/docs/api_reference/api_reference_function_mode.py b/examples/docs/api_reference/api_reference_function_mode.py
new file mode 100644
index 0000000..c86999f
--- /dev/null
+++ b/examples/docs/api_reference/api_reference_function_mode.py
@@ -0,0 +1,22 @@
+"""Function mode — pass schema, get an instance back."""
+
+from dataclasses import dataclass
+from pathlib import Path
+
+import dature
+
+SHARED_DIR = Path(__file__).parents[1] / "shared"
+
+
+@dataclass
+class Config:
+ host: str
+ port: int
+ debug: bool = False
+
+
+config = dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml"), schema=Config)
+
+assert config.host == "localhost"
+assert config.port == 8080
+assert config.debug is False
diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py
index 7390a64..b275722 100644
--- a/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py
+++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_basic.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -16,7 +16,7 @@ class Config:
debug: bool = False
-config = load(Source(file_=SOURCES_DIR / "dynaconf_basic.toml"), Config)
+config = dature.load(dature.Toml11Source(file=SOURCES_DIR / "dynaconf_basic.toml"), schema=Config)
# config.hostt → AttributeError immediately
# config.port is always int — guaranteed
# --8<-- [end:basic]
diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py
index 3afa371..dc61315 100644
--- a/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py
+++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_merge.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Merge, MergeStrategy, Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -15,13 +15,11 @@ class Config:
# --8<-- [start:merge]
-config = load(
- Merge(
- Source(file_=SOURCES_DIR / "dynaconf_merge_defaults.yaml"),
- Source(file_=SOURCES_DIR / "dynaconf_merge_local.yaml", skip_if_broken=True),
- strategy=MergeStrategy.LAST_WINS,
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "dynaconf_merge_defaults.yaml"),
+ dature.Yaml12Source(file=SOURCES_DIR / "dynaconf_merge_local.yaml", skip_if_broken=True),
+ schema=Config,
+ strategy="last_wins",
)
# --8<-- [end:merge]
diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py
index dca7fa3..02ebda1 100644
--- a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py
+++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.py
@@ -4,8 +4,7 @@
from pathlib import Path
from typing import Annotated
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError
+import dature
from dature.validators.number import Gt, Lt
from dature.validators.root import RootValidator
@@ -15,37 +14,23 @@
@dataclass
class Config:
host: str
- port: Annotated[int, Gt(value=0), Lt(value=65536)]
+ port: Annotated[int, Gt(0), Lt(65536)]
debug: bool = False
-# --8<-- [start:root-validators]
def check_debug_port(config: Config) -> bool:
return not (config.debug and config.port == 80)
-try:
- load(
- Source(
- file_=SOURCES_DIR / "dynaconf_root_validators_invalid.toml",
- root_validators=(
- RootValidator(
- func=check_debug_port,
- error_message="debug mode should not use port 80",
- ),
+dature.load(
+ dature.Toml11Source(
+ file=SOURCES_DIR / "dynaconf_root_validators_invalid.toml",
+ root_validators=(
+ RootValidator(
+ check_debug_port,
+ error_message="debug mode should not use port 80",
),
),
- Config,
- )
-except DatureConfigError as exc:
- source = str(SOURCES_DIR / "dynaconf_root_validators_invalid.toml")
- assert str(exc) == "Config loading errors (1)"
- # fmt: off
- assert str(exc.exceptions[0]) == (
- " [] debug mode should not use port 80\n"
- f" └── FILE '{source}'"
- )
- # fmt: on
-else:
- raise AssertionError("Expected DatureConfigError")
-# --8<-- [end:root-validators]
+ ),
+ schema=Config,
+)
diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.stderr b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.stderr
new file mode 100644
index 0000000..9035282
--- /dev/null
+++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_root_validators.stderr
@@ -0,0 +1,5 @@
+ | dature.errors.exceptions.DatureConfigError: Config loading errors (1)
+ +-+---------------- 1 ----------------
+ | dature.errors.exceptions.FieldLoadError: [] debug mode should not use port 80
+ | └── FILE '{SOURCES_DIR}dynaconf_root_validators_invalid.toml'
+ +------------------------------------
diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py
index ded4216..f119529 100644
--- a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py
+++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.py
@@ -4,34 +4,17 @@
from pathlib import Path
from typing import Annotated
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError
+import dature
from dature.validators.number import Gt, Lt
SOURCES_DIR = Path(__file__).parent / "sources"
-# --8<-- [start:validators]
@dataclass
class Config:
host: str
- port: Annotated[int, Gt(value=0), Lt(value=65536)]
+ port: Annotated[int, Gt(0), Lt(65536)]
debug: bool = False
-try:
- load(Source(file_=SOURCES_DIR / "dynaconf_validators_invalid.toml"), Config)
-except DatureConfigError as exc:
- source = str(SOURCES_DIR / "dynaconf_validators_invalid.toml")
- assert str(exc) == "Config loading errors (1)"
- # fmt: off
- assert str(exc.exceptions[0]) == (
- " [port] Value must be greater than 0\n"
- " ├── port = -1\n"
- " │ ^^\n"
- f" └── FILE '{source}', line 2"
- )
- # fmt: on
-else:
- raise AssertionError("Expected DatureConfigError")
-# --8<-- [end:validators]
+dature.load(dature.Toml11Source(file=SOURCES_DIR / "dynaconf_validators_invalid.toml"), schema=Config)
diff --git a/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.stderr b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.stderr
new file mode 100644
index 0000000..a119963
--- /dev/null
+++ b/examples/docs/comparison/why-not-dynaconf/dynaconf_validators.stderr
@@ -0,0 +1,7 @@
+ | dature.errors.exceptions.DatureConfigError: Config loading errors (1)
+ +-+---------------- 1 ----------------
+ | dature.errors.exceptions.FieldLoadError: [port] Value must be greater than 0
+ | ├── port = -1
+ | │ ^^
+ | └── FILE '{SOURCES_DIR}dynaconf_validators_invalid.toml', line 2
+ +------------------------------------
diff --git a/examples/docs/comparison/why-not-hydra/hydra_dataclass.py b/examples/docs/comparison/why-not-hydra/hydra_dataclass.py
index 62e4082..3a634a8 100644
--- a/examples/docs/comparison/why-not-hydra/hydra_dataclass.py
+++ b/examples/docs/comparison/why-not-hydra/hydra_dataclass.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -15,7 +15,7 @@ class Config:
# --8<-- [start:dataclass]
-config = load(Source(file_=SOURCES_DIR / "hydra_defaults.yaml"), Config)
+config = dature.load(dature.Yaml12Source(file=SOURCES_DIR / "hydra_defaults.yaml"), schema=Config)
assert isinstance(config, Config)
# Full IDE support, type safety, __post_init__ works
# --8<-- [end:dataclass]
diff --git a/examples/docs/comparison/why-not-hydra/hydra_merge.py b/examples/docs/comparison/why-not-hydra/hydra_merge.py
index 5a13c26..ed004ea 100644
--- a/examples/docs/comparison/why-not-hydra/hydra_merge.py
+++ b/examples/docs/comparison/why-not-hydra/hydra_merge.py
@@ -1,9 +1,9 @@
-"""dature vs Hydra — multi-format merge with auto-detection."""
+"""dature vs Hydra — multi-format merge."""
from dataclasses import dataclass
from pathlib import Path
-from dature import Merge, Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -15,13 +15,11 @@ class Config:
# --8<-- [start:merge]
-config = load(
- Merge(
- Source(file_=SOURCES_DIR / "hydra_defaults.yaml"),
- Source(file_=SOURCES_DIR / "hydra_config.toml", skip_if_broken=True),
- Source(prefix="APP_"),
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "hydra_defaults.yaml"),
+ dature.Toml11Source(file=SOURCES_DIR / "hydra_config.toml", skip_if_broken=True),
+ dature.EnvSource(prefix="APP_"),
+ schema=Config,
)
# --8<-- [end:merge]
diff --git a/examples/docs/comparison/why-not-hydra/hydra_validators.py b/examples/docs/comparison/why-not-hydra/hydra_validators.py
index 7b5b24c..6166b9f 100644
--- a/examples/docs/comparison/why-not-hydra/hydra_validators.py
+++ b/examples/docs/comparison/why-not-hydra/hydra_validators.py
@@ -4,33 +4,16 @@
from pathlib import Path
from typing import Annotated
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError
+import dature
from dature.validators.number import Gt, Lt
SOURCES_DIR = Path(__file__).parent / "sources"
-# --8<-- [start:validators]
@dataclass
class Config:
host: str
- port: Annotated[int, Gt(value=0), Lt(value=65536)] = 8080
+ port: Annotated[int, Gt(0), Lt(65536)] = 8080
-try:
- load(Source(file_=SOURCES_DIR / "hydra_validators_invalid.yaml"), Config)
-except DatureConfigError as exc:
- source = str(SOURCES_DIR / "hydra_validators_invalid.yaml")
- assert str(exc) == "Config loading errors (1)"
- # fmt: off
- assert str(exc.exceptions[0]) == (
- " [port] Value must be greater than 0\n"
- " ├── port: -1\n"
- " │ ^^\n"
- f" └── FILE '{source}', line 2"
- )
- # fmt: on
-else:
- raise AssertionError("Expected DatureConfigError")
-# --8<-- [end:validators]
+dature.load(dature.Yaml12Source(file=SOURCES_DIR / "hydra_validators_invalid.yaml"), schema=Config)
diff --git a/examples/docs/comparison/why-not-hydra/hydra_validators.stderr b/examples/docs/comparison/why-not-hydra/hydra_validators.stderr
new file mode 100644
index 0000000..84ae71e
--- /dev/null
+++ b/examples/docs/comparison/why-not-hydra/hydra_validators.stderr
@@ -0,0 +1,7 @@
+ | dature.errors.exceptions.DatureConfigError: Config loading errors (1)
+ +-+---------------- 1 ----------------
+ | dature.errors.exceptions.FieldLoadError: [port] Value must be greater than 0
+ | ├── port: -1
+ | │ ^^
+ | └── FILE '{SOURCES_DIR}hydra_validators_invalid.yaml', line 2
+ +------------------------------------
diff --git a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py
deleted file mode 100644
index 5775f9d..0000000
--- a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_auto_detect.py
+++ /dev/null
@@ -1,26 +0,0 @@
-"""dature vs pydantic-settings — auto-detection of file format."""
-
-from dataclasses import dataclass
-from pathlib import Path
-
-from dature import Source, load
-
-SOURCES_DIR = Path(__file__).parent / "sources"
-
-
-@dataclass
-class Config:
- host: str
- port: int
-
-
-# --8<-- [start:auto-detect]
-# Just change the file — dature picks the right loader
-yaml_config = load(Source(file_=SOURCES_DIR / "pydantic_settings_auto_detect.yaml"), Config)
-toml_config = load(Source(file_=SOURCES_DIR / "pydantic_settings_auto_detect.toml"), Config)
-json5_config = load(Source(file_=SOURCES_DIR / "pydantic_settings_auto_detect.json5"), Config)
-# --8<-- [end:auto-detect]
-
-assert yaml_config.host == "localhost"
-assert toml_config.host == "localhost"
-assert json5_config.host == "localhost"
diff --git a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py
index 12a7624..ebc2ccf 100644
--- a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py
+++ b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_basic.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -16,7 +16,7 @@ class Config:
debug: bool = False
-config = load(Source(file_=SOURCES_DIR / "pydantic_settings_basic.yaml"), Config)
+config = dature.load(dature.Yaml12Source(file=SOURCES_DIR / "pydantic_settings_basic.yaml"), schema=Config)
# config.hostt → AttributeError immediately
# config.port is always int — guaranteed
# --8<-- [end:basic]
diff --git a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_formats.py b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_formats.py
new file mode 100644
index 0000000..9412f49
--- /dev/null
+++ b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_formats.py
@@ -0,0 +1,25 @@
+"""dature vs pydantic-settings — multiple file formats."""
+
+from dataclasses import dataclass
+from pathlib import Path
+
+import dature
+
+SOURCES_DIR = Path(__file__).parent / "sources"
+
+
+@dataclass
+class Config:
+ host: str
+ port: int
+
+
+# --8<-- [start:formats]
+yaml_config = dature.load(dature.Yaml12Source(file=SOURCES_DIR / "pydantic_settings_formats.yaml"), schema=Config)
+toml_config = dature.load(dature.Toml11Source(file=SOURCES_DIR / "pydantic_settings_formats.toml"), schema=Config)
+json5_config = dature.load(dature.Json5Source(file=SOURCES_DIR / "pydantic_settings_formats.json5"), schema=Config)
+# --8<-- [end:formats]
+
+assert yaml_config.host == "localhost"
+assert toml_config.host == "localhost"
+assert json5_config.host == "localhost"
diff --git a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py
index bed427e..23392de 100644
--- a/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py
+++ b/examples/docs/comparison/why-not-pydantic-settings/pydantic_settings_merge.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Merge, Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -15,13 +15,11 @@ class Config:
# --8<-- [start:merge]
-config = load(
- Merge(
- Source(file_=SOURCES_DIR / "pydantic_settings_merge_defaults.yaml"),
- Source(file_=SOURCES_DIR / "pydantic_settings_merge_local.yaml", skip_if_broken=True),
- Source(prefix="APP_"),
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "pydantic_settings_merge_defaults.yaml"),
+ dature.Yaml12Source(file=SOURCES_DIR / "pydantic_settings_merge_local.yaml", skip_if_broken=True),
+ dature.EnvSource(prefix="APP_"),
+ schema=Config,
)
# --8<-- [end:merge]
diff --git a/examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_auto_detect.json5 b/examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_formats.json5
similarity index 100%
rename from examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_auto_detect.json5
rename to examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_formats.json5
diff --git a/examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_auto_detect.toml b/examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_formats.toml
similarity index 100%
rename from examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_auto_detect.toml
rename to examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_formats.toml
diff --git a/examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_auto_detect.yaml b/examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_formats.yaml
similarity index 100%
rename from examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_auto_detect.yaml
rename to examples/docs/comparison/why-not-pydantic-settings/sources/pydantic_settings_formats.yaml
diff --git a/examples/docs/features/masking/masking_by_name.py b/examples/docs/features/masking/masking_by_name.py
index c1ddff5..e78e57b 100644
--- a/examples/docs/features/masking/masking_by_name.py
+++ b/examples/docs/features/masking/masking_by_name.py
@@ -4,8 +4,7 @@
from pathlib import Path
from typing import Literal
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -16,17 +15,4 @@ class Config:
host: str
-try:
- load(Source(file_=SOURCES_DIR / "masking_by_name.yaml"), Config)
-except DatureConfigError as exc:
- source = str(SOURCES_DIR / "masking_by_name.yaml")
- assert str(exc) == "Config loading errors (1)"
- assert len(exc.exceptions) == 1
- assert str(exc.exceptions[0]) == (
- f" [password] Invalid variant: ''\n"
- f' ├── password: ""\n'
- f" │ ^^^^^^^^^^\n"
- f" └── FILE '{source}', line 1"
- )
-else:
- raise AssertionError("Expected DatureConfigError")
+dature.load(dature.Yaml12Source(file=SOURCES_DIR / "masking_by_name.yaml"), schema=Config)
diff --git a/examples/docs/features/masking/masking_by_name.stderr b/examples/docs/features/masking/masking_by_name.stderr
new file mode 100644
index 0000000..e6d1d6b
--- /dev/null
+++ b/examples/docs/features/masking/masking_by_name.stderr
@@ -0,0 +1,7 @@
+ | dature.errors.exceptions.DatureConfigError: Config loading errors (1)
+ +-+---------------- 1 ----------------
+ | dature.errors.exceptions.FieldLoadError: [password] Invalid variant: ''
+ | ├── password: ""
+ | │ ^^^^^^^^^^
+ | └── FILE '{SOURCES_DIR}masking_by_name.yaml', line 1
+ +------------------------------------
diff --git a/examples/docs/features/masking/masking_classic_style.py b/examples/docs/features/masking/masking_classic_style.py
index 1db8c52..e1b267d 100644
--- a/examples/docs/features/masking/masking_classic_style.py
+++ b/examples/docs/features/masking/masking_classic_style.py
@@ -1,16 +1,15 @@
-"""Classic ab*****cd masking style via configure()."""
+"""Classic ab*****cd masking style via dature.configure()."""
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, configure, load
-from dature.config import MaskingConfig
+import dature
from dature.masking.masking import mask_value
SOURCES_DIR = Path(__file__).parent / "sources"
# --8<-- [start:classic-style]
-configure(masking=MaskingConfig(mask="*****", visible_prefix=2, visible_suffix=2))
+dature.configure(masking={"mask": "*****", "visible_prefix": 2, "visible_suffix": 2})
# "my_secret_password" → "my*****rd"
# "ab" → "ab" (too short — shown as-is)
# --8<-- [end:classic-style]
@@ -22,8 +21,8 @@ class Config:
host: str
-config = load(Source(file_=SOURCES_DIR / "masking_by_name.yaml"), Config)
+config = dature.load(dature.Yaml12Source(file=SOURCES_DIR / "masking_by_name.yaml"), schema=Config)
assert mask_value("my_secret_password") == "my*****rd"
assert mask_value("ab") == "ab"
-configure(masking=MaskingConfig())
+dature.configure(masking={})
diff --git a/examples/docs/features/masking/masking_heuristic.py b/examples/docs/features/masking/masking_heuristic.py
index d11f5a2..26ee856 100644
--- a/examples/docs/features/masking/masking_heuristic.py
+++ b/examples/docs/features/masking/masking_heuristic.py
@@ -4,8 +4,7 @@
from pathlib import Path
from typing import Literal
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -16,20 +15,7 @@ class Config:
host: str
-try:
- load(
- Source(file_=SOURCES_DIR / "masking_heuristic.yaml", mask_secrets=True),
- Config,
- )
-except DatureConfigError as exc:
- source = str(SOURCES_DIR / "masking_heuristic.yaml")
- assert str(exc) == "Config loading errors (1)"
- assert len(exc.exceptions) == 1
- assert str(exc.exceptions[0]) == (
- " [connection_id] Invalid variant: ''\n"
- ' ├── connection_id: ""\n'
- " │ ^^^^^^^^^^\n"
- f" └── FILE '{source}', line 1"
- )
-else:
- raise AssertionError("Expected DatureConfigError")
+dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "masking_heuristic.yaml", mask_secrets=True),
+ schema=Config,
+)
diff --git a/examples/docs/features/masking/masking_heuristic.stderr b/examples/docs/features/masking/masking_heuristic.stderr
new file mode 100644
index 0000000..3059a5c
--- /dev/null
+++ b/examples/docs/features/masking/masking_heuristic.stderr
@@ -0,0 +1,7 @@
+ | dature.errors.exceptions.DatureConfigError: Config loading errors (1)
+ +-+---------------- 1 ----------------
+ | dature.errors.exceptions.FieldLoadError: [connection_id] Invalid variant: ''
+ | ├── connection_id: ""
+ | │ ^^^^^^^^^^
+ | └── FILE '{SOURCES_DIR}masking_heuristic.yaml', line 1
+ +------------------------------------
diff --git a/examples/docs/features/masking/masking_merge_mode.py b/examples/docs/features/masking/masking_merge_mode.py
index c2a5007..0355297 100644
--- a/examples/docs/features/masking/masking_merge_mode.py
+++ b/examples/docs/features/masking/masking_merge_mode.py
@@ -4,8 +4,7 @@
from pathlib import Path
from typing import Annotated
-from dature import Merge, Source, load
-from dature.errors.exceptions import DatureConfigError
+import dature
from dature.validators.string import MinLength
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -15,30 +14,14 @@
class Config:
host: str
port: int
- api_key: Annotated[str, MinLength(value=20)] = ""
+ api_key: Annotated[str, MinLength(20)] = ""
-# --8<-- [start:merge-mode]
-try:
- load(
- Merge(
- Source(file_=SOURCES_DIR / "masking_merge_mode_defaults.yaml"),
- Source(
- file_=SOURCES_DIR / "masking_merge_mode_secrets.yaml",
- secret_field_names=("api_key",),
- ),
- ),
- Config,
- )
-except DatureConfigError as exc:
- source = str(SOURCES_DIR / "masking_merge_mode_secrets.yaml")
- assert str(exc) == "Config loading errors (1)"
- assert str(exc.exceptions[0]) == (
- " [api_key] Value must have at least 20 characters\n"
- ' ├── api_key: ""\n'
- " │ ^^^^^^^^^^\n"
- f" └── FILE '{source}', line 1"
- )
-else:
- raise AssertionError("Expected DatureConfigError")
-# --8<-- [end:merge-mode]
+dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "masking_merge_mode_defaults.yaml"),
+ dature.Yaml12Source(
+ file=SOURCES_DIR / "masking_merge_mode_secrets.yaml",
+ secret_field_names=("api_key",),
+ ),
+ schema=Config,
+)
diff --git a/examples/docs/features/masking/masking_merge_mode.stderr b/examples/docs/features/masking/masking_merge_mode.stderr
new file mode 100644
index 0000000..e5c416c
--- /dev/null
+++ b/examples/docs/features/masking/masking_merge_mode.stderr
@@ -0,0 +1,7 @@
+ | dature.errors.exceptions.DatureConfigError: Config loading errors (1)
+ +-+---------------- 1 ----------------
+ | dature.errors.exceptions.FieldLoadError: [api_key] Value must have at least 20 characters
+ | ├── api_key: ""
+ | │ ^^^^^^^^^^
+ | └── FILE '{SOURCES_DIR}masking_merge_mode_secrets.yaml', line 1
+ +------------------------------------
diff --git a/examples/docs/features/masking/masking_no_mask.py b/examples/docs/features/masking/masking_no_mask.py
index 8c9761e..ad63725 100644
--- a/examples/docs/features/masking/masking_no_mask.py
+++ b/examples/docs/features/masking/masking_no_mask.py
@@ -4,8 +4,7 @@
from pathlib import Path
from typing import Annotated
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError
+import dature
from dature.validators.string import MinLength
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -13,28 +12,14 @@
@dataclass
class Config:
- api_key: Annotated[str, MinLength(value=20)]
+ api_key: Annotated[str, MinLength(20)]
host: str
-# --8<-- [start:no-mask]
-try:
- load(
- Source(
- file_=SOURCES_DIR / "masking_per_source.yaml",
- mask_secrets=False,
- ),
- Config,
- )
-except DatureConfigError as exc:
- source = str(SOURCES_DIR / "masking_per_source.yaml")
- assert str(exc) == "Config loading errors (1)"
- assert str(exc.exceptions[0]) == (
- " [api_key] Value must have at least 20 characters\n"
- ' ├── api_key: "short"\n'
- " │ ^^^^^\n"
- f" └── FILE '{source}', line 1"
- )
-else:
- raise AssertionError("Expected DatureConfigError")
-# --8<-- [end:no-mask]
+dature.load(
+ dature.Yaml12Source(
+ file=SOURCES_DIR / "masking_per_source.yaml",
+ mask_secrets=False,
+ ),
+ schema=Config,
+)
diff --git a/examples/docs/features/masking/masking_no_mask.stderr b/examples/docs/features/masking/masking_no_mask.stderr
new file mode 100644
index 0000000..27375d2
--- /dev/null
+++ b/examples/docs/features/masking/masking_no_mask.stderr
@@ -0,0 +1,7 @@
+ | dature.errors.exceptions.DatureConfigError: Config loading errors (1)
+ +-+---------------- 1 ----------------
+ | dature.errors.exceptions.FieldLoadError: [api_key] Value must have at least 20 characters
+ | ├── api_key: "short"
+ | │ ^^^^^
+ | └── FILE '{SOURCES_DIR}masking_per_source.yaml', line 1
+ +------------------------------------
diff --git a/examples/docs/features/masking/masking_per_source.py b/examples/docs/features/masking/masking_per_source.py
index a5a6d72..0c969a4 100644
--- a/examples/docs/features/masking/masking_per_source.py
+++ b/examples/docs/features/masking/masking_per_source.py
@@ -4,8 +4,7 @@
from pathlib import Path
from typing import Annotated
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError
+import dature
from dature.validators.string import MinLength
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -13,28 +12,14 @@
@dataclass
class Config:
- api_key: Annotated[str, MinLength(value=20)]
+ api_key: Annotated[str, MinLength(20)]
host: str
-# --8<-- [start:per-source]
-try:
- load(
- Source(
- file_=SOURCES_DIR / "masking_per_source.yaml",
- secret_field_names=("api_key",),
- ),
- Config,
- )
-except DatureConfigError as exc:
- source = str(SOURCES_DIR / "masking_per_source.yaml")
- assert str(exc) == "Config loading errors (1)"
- assert str(exc.exceptions[0]) == (
- " [api_key] Value must have at least 20 characters\n"
- ' ├── api_key: ""\n'
- " │ ^^^^^^^^^^\n"
- f" └── FILE '{source}', line 1"
- )
-else:
- raise AssertionError("Expected DatureConfigError")
-# --8<-- [end:per-source]
+dature.load(
+ dature.Yaml12Source(
+ file=SOURCES_DIR / "masking_per_source.yaml",
+ secret_field_names=("api_key",),
+ ),
+ schema=Config,
+)
diff --git a/examples/docs/features/masking/masking_per_source.stderr b/examples/docs/features/masking/masking_per_source.stderr
new file mode 100644
index 0000000..c787795
--- /dev/null
+++ b/examples/docs/features/masking/masking_per_source.stderr
@@ -0,0 +1,7 @@
+ | dature.errors.exceptions.DatureConfigError: Config loading errors (1)
+ +-+---------------- 1 ----------------
+ | dature.errors.exceptions.FieldLoadError: [api_key] Value must have at least 20 characters
+ | ├── api_key: ""
+ | │ ^^^^^^^^^^
+ | └── FILE '{SOURCES_DIR}masking_per_source.yaml', line 1
+ +------------------------------------
diff --git a/examples/docs/features/masking/masking_secret_str.py b/examples/docs/features/masking/masking_secret_str.py
index 4734a7f..fed1b28 100644
--- a/examples/docs/features/masking/masking_secret_str.py
+++ b/examples/docs/features/masking/masking_secret_str.py
@@ -3,8 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError
+import dature
from dature.fields.payment_card import PaymentCardNumber
from dature.fields.secret_str import SecretStr
@@ -18,20 +17,7 @@ class Config:
host: str
-try:
- config = load(
- Source(file_=SOURCES_DIR / "masking_secret_str.yaml"),
- Config,
- )
-except DatureConfigError as exc:
- source = str(SOURCES_DIR / "masking_secret_str.yaml")
- assert str(exc) == "Config loading errors (1)"
- assert len(exc.exceptions) == 1
- assert str(exc.exceptions[0]) == (
- " [card_number] Card number must contain only digits\n"
- f' ├── card_number: ""\n'
- " │ ^^^^^^^^^^\n"
- f" └── FILE '{source}', line 2"
- )
-else:
- raise AssertionError("Expected DatureConfigError")
+dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "masking_secret_str.yaml"),
+ schema=Config,
+)
diff --git a/examples/docs/features/masking/masking_secret_str.stderr b/examples/docs/features/masking/masking_secret_str.stderr
new file mode 100644
index 0000000..b1fc7ed
--- /dev/null
+++ b/examples/docs/features/masking/masking_secret_str.stderr
@@ -0,0 +1,7 @@
+ | dature.errors.exceptions.DatureConfigError: Config loading errors (1)
+ +-+---------------- 1 ----------------
+ | dature.errors.exceptions.FieldLoadError: [card_number] Card number must contain only digits
+ | ├── card_number: ""
+ | │ ^^^^^^^^^^
+ | └── FILE '{SOURCES_DIR}masking_secret_str.yaml', line 2
+ +------------------------------------
diff --git a/examples/docs/features/merging/merging_basic.py b/examples/docs/features/merging/merging_basic.py
index 9d5c175..536f512 100644
--- a/examples/docs/features/merging/merging_basic.py
+++ b/examples/docs/features/merging/merging_basic.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Merge, MergeStrategy, Source, load
+import dature
SHARED_DIR = Path(__file__).parents[2] / "shared"
@@ -15,13 +15,11 @@ class Config:
tags: list[str]
-config = load(
- Merge(
- Source(file_=SHARED_DIR / "common_defaults.yaml"),
- Source(file_=SHARED_DIR / "common_overrides.yaml"),
- strategy=MergeStrategy.LAST_WINS,
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"),
+ dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"),
+ schema=Config,
+ strategy="last_wins",
)
assert config.host == "production.example.com"
diff --git a/examples/docs/features/merging/merging_strategies.py b/examples/docs/features/merging/merging_strategies.py
index 05a4d4b..00b3af1 100644
--- a/examples/docs/features/merging/merging_strategies.py
+++ b/examples/docs/features/merging/merging_strategies.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Merge, MergeStrategy, Source, load
+import dature
SHARED_DIR = Path(__file__).parents[2] / "shared"
@@ -15,22 +15,18 @@ class Config:
tags: list[str]
-last_wins = load(
- Merge(
- Source(file_=SHARED_DIR / "common_defaults.yaml"),
- Source(file_=SHARED_DIR / "common_overrides.yaml"),
- strategy=MergeStrategy.LAST_WINS,
- ),
- Config,
+last_wins = dature.load(
+ dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"),
+ dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"),
+ schema=Config,
+ strategy="last_wins",
)
-first_wins = load(
- Merge(
- Source(file_=SHARED_DIR / "common_defaults.yaml"),
- Source(file_=SHARED_DIR / "common_overrides.yaml"),
- strategy=MergeStrategy.FIRST_WINS,
- ),
- Config,
+first_wins = dature.load(
+ dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"),
+ dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"),
+ schema=Config,
+ strategy="first_wins",
)
assert last_wins.host == "production.example.com"
diff --git a/examples/docs/features/merging/merging_strategy_first_found.py b/examples/docs/features/merging/merging_strategy_first_found.py
index d17859f..f78905a 100644
--- a/examples/docs/features/merging/merging_strategy_first_found.py
+++ b/examples/docs/features/merging/merging_strategy_first_found.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Merge, MergeStrategy, Source, load
+import dature
SHARED_DIR = Path(__file__).parents[2] / "shared"
@@ -15,14 +15,12 @@ class Config:
tags: list[str]
-config = load(
- Merge(
- Source(file_=SHARED_DIR / "nonexistent.yaml"),
- Source(file_=SHARED_DIR / "common_defaults.yaml"),
- Source(file_=SHARED_DIR / "common_overrides.yaml"),
- strategy=MergeStrategy.FIRST_FOUND,
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SHARED_DIR / "nonexistent.yaml"),
+ dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"),
+ dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"),
+ schema=Config,
+ strategy="first_found",
)
# nonexistent.yaml is skipped, common_defaults.yaml is used entirely
diff --git a/examples/docs/features/merging/merging_strategy_first_wins.py b/examples/docs/features/merging/merging_strategy_first_wins.py
index 0322dca..6331d63 100644
--- a/examples/docs/features/merging/merging_strategy_first_wins.py
+++ b/examples/docs/features/merging/merging_strategy_first_wins.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Merge, MergeStrategy, Source, load
+import dature
SHARED_DIR = Path(__file__).parents[2] / "shared"
@@ -15,13 +15,11 @@ class Config:
tags: list[str]
-config = load(
- Merge(
- Source(file_=SHARED_DIR / "common_defaults.yaml"),
- Source(file_=SHARED_DIR / "common_overrides.yaml"),
- strategy=MergeStrategy.FIRST_WINS,
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"),
+ dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"),
+ schema=Config,
+ strategy="first_wins",
)
assert config.host == "localhost"
diff --git a/examples/docs/features/merging/merging_strategy_last_wins.py b/examples/docs/features/merging/merging_strategy_last_wins.py
index 862f0eb..7e458d4 100644
--- a/examples/docs/features/merging/merging_strategy_last_wins.py
+++ b/examples/docs/features/merging/merging_strategy_last_wins.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Merge, MergeStrategy, Source, load
+import dature
SHARED_DIR = Path(__file__).parents[2] / "shared"
@@ -15,13 +15,11 @@ class Config:
tags: list[str]
-config = load(
- Merge(
- Source(file_=SHARED_DIR / "common_defaults.yaml"),
- Source(file_=SHARED_DIR / "common_overrides.yaml"),
- strategy=MergeStrategy.LAST_WINS,
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"),
+ dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"),
+ schema=Config,
+ strategy="last_wins",
)
assert config.host == "production.example.com"
diff --git a/examples/docs/features/merging/merging_strategy_raise_on_conflict.py b/examples/docs/features/merging/merging_strategy_raise_on_conflict.py
index 0e9513a..aa22dac 100644
--- a/examples/docs/features/merging/merging_strategy_raise_on_conflict.py
+++ b/examples/docs/features/merging/merging_strategy_raise_on_conflict.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Merge, MergeStrategy, Source, load
+import dature
SHARED_DIR = Path(__file__).parents[2] / "shared"
@@ -15,13 +15,11 @@ class Config:
debug: bool
-config = load(
- Merge(
- Source(file_=SHARED_DIR / "common_raise_on_conflict_a.yaml"),
- Source(file_=SHARED_DIR / "common_raise_on_conflict_b.yaml"),
- strategy=MergeStrategy.RAISE_ON_CONFLICT,
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SHARED_DIR / "common_raise_on_conflict_a.yaml"),
+ dature.Yaml12Source(file=SHARED_DIR / "common_raise_on_conflict_b.yaml"),
+ schema=Config,
+ strategy="raise_on_conflict",
)
# Disjoint keys — no conflict
diff --git a/examples/docs/features/merging/merging_tuple_shorthand.py b/examples/docs/features/merging/merging_tuple_shorthand.py
index 9c176a4..b82b153 100644
--- a/examples/docs/features/merging/merging_tuple_shorthand.py
+++ b/examples/docs/features/merging/merging_tuple_shorthand.py
@@ -1,9 +1,9 @@
-"""Tuple shorthand — implicit LAST_WINS merge."""
+"""Multiple sources — implicit LAST_WINS merge."""
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SHARED_DIR = Path(__file__).parents[2] / "shared"
@@ -15,12 +15,10 @@ class Config:
tags: list[str]
-config = load(
- (
- Source(file_=SHARED_DIR / "common_defaults.yaml"),
- Source(file_=SHARED_DIR / "common_overrides.yaml"),
- ),
- Config,
+config = dature.load(
+ dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"),
+ dature.Yaml12Source(file=SHARED_DIR / "common_overrides.yaml"),
+ schema=Config,
)
assert config.host == "production.example.com"
diff --git a/examples/docs/features/merging/merging_tuple_shorthand_decorator.py b/examples/docs/features/merging/merging_tuple_shorthand_decorator.py
index c25a9c8..e4b73cf 100644
--- a/examples/docs/features/merging/merging_tuple_shorthand_decorator.py
+++ b/examples/docs/features/merging/merging_tuple_shorthand_decorator.py
@@ -1,21 +1,19 @@
-"""Tuple shorthand as a decorator — implicit LAST_WINS merge."""
+"""Multiple sources as a decorator — implicit LAST_WINS merge."""
import os
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SHARED_DIR = Path(__file__).parents[2] / "shared"
os.environ["APP_HOST"] = "env_localhost"
-@load(
- (
- Source(file_=SHARED_DIR / "common_defaults.yaml"),
- Source(prefix="APP_"),
- ),
+@dature.load(
+ dature.Yaml12Source(file=SHARED_DIR / "common_defaults.yaml"),
+ dature.EnvSource(prefix="APP_"),
)
@dataclass
class Config:
diff --git a/examples/docs/features/naming/naming_field_mapping.py b/examples/docs/features/naming/naming_field_mapping.py
index c229d94..aa046ef 100644
--- a/examples/docs/features/naming/naming_field_mapping.py
+++ b/examples/docs/features/naming/naming_field_mapping.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import F, Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -15,16 +15,16 @@ class DbConfig:
pool_size: int
-config = load(
- Source(
- file_=SOURCES_DIR / "naming_field_mapping.yaml",
+config = dature.load(
+ dature.Yaml12Source(
+ file=SOURCES_DIR / "naming_field_mapping.yaml",
field_mapping={
- F[DbConfig].database_url: "db_url",
- F[DbConfig].secret_key: "key",
- F[DbConfig].pool_size: "pool",
+ dature.F[DbConfig].database_url: "db_url",
+ dature.F[DbConfig].secret_key: "key",
+ dature.F[DbConfig].pool_size: "pool",
},
),
- DbConfig,
+ schema=DbConfig,
)
assert config.database_url == "postgresql://localhost:5432/mydb"
diff --git a/examples/docs/features/naming/naming_field_mapping_aliases.py b/examples/docs/features/naming/naming_field_mapping_aliases.py
new file mode 100644
index 0000000..9b6aaaa
--- /dev/null
+++ b/examples/docs/features/naming/naming_field_mapping_aliases.py
@@ -0,0 +1,15 @@
+"""field_mapping — multiple aliases for a single field."""
+
+from dataclasses import dataclass
+
+import dature
+
+
+@dataclass
+class Config:
+ name: str
+
+
+# --8<-- [start:aliases]
+field_mapping = {dature.F[Config].name: ("fullName", "userName")}
+# --8<-- [end:aliases]
diff --git a/examples/docs/features/naming/naming_field_mapping_decorator.py b/examples/docs/features/naming/naming_field_mapping_decorator.py
new file mode 100644
index 0000000..a4df74d
--- /dev/null
+++ b/examples/docs/features/naming/naming_field_mapping_decorator.py
@@ -0,0 +1,9 @@
+"""Decorator mode — use string instead of class reference in F[]."""
+
+from dature import F
+
+# --8<-- [start:decorator]
+field_ref = F["Config"].name # autocomplete doesn't work here
+# --8<-- [end:decorator]
+
+assert field_ref is not None
diff --git a/examples/docs/features/naming/naming_name_style.py b/examples/docs/features/naming/naming_name_style.py
index de3b575..b8e0878 100644
--- a/examples/docs/features/naming/naming_name_style.py
+++ b/examples/docs/features/naming/naming_name_style.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -16,9 +16,9 @@ class ApiConfig:
base_url: str
-config = load(
- Source(file_=SOURCES_DIR / "naming_name_style.yaml", name_style="lower_camel"),
- ApiConfig,
+config = dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "naming_name_style.yaml", name_style="lower_camel"),
+ schema=ApiConfig,
)
assert config.user_name == "admin"
diff --git a/examples/docs/features/naming/naming_nested_fields.py b/examples/docs/features/naming/naming_nested_fields.py
index 05d5bd5..9845e5b 100644
--- a/examples/docs/features/naming/naming_nested_fields.py
+++ b/examples/docs/features/naming/naming_nested_fields.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import F, Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -20,17 +20,17 @@ class User:
address: Address
-config = load(
- Source(
- file_=SOURCES_DIR / "naming_nested_fields.yaml",
+config = dature.load(
+ dature.Yaml12Source(
+ file=SOURCES_DIR / "naming_nested_fields.yaml",
field_mapping={
- F[User].name: "fullName",
- F[User].address: "location",
- F[Address].city: "cityName",
- F[Address].street: "streetName",
+ dature.F[User].name: "fullName",
+ dature.F[User].address: "location",
+ dature.F[Address].city: "cityName",
+ dature.F[Address].street: "streetName",
},
),
- User,
+ schema=User,
)
assert config.name == "Alice"
diff --git a/examples/docs/features/naming/naming_prefix.py b/examples/docs/features/naming/naming_prefix.py
index fac6ad6..6db4437 100644
--- a/examples/docs/features/naming/naming_prefix.py
+++ b/examples/docs/features/naming/naming_prefix.py
@@ -3,7 +3,7 @@
import os
from dataclasses import dataclass
-from dature import Source, load
+import dature
os.environ["MYAPP_HOST"] = "localhost"
os.environ["MYAPP_PORT"] = "9090"
@@ -17,7 +17,7 @@ class Config:
debug: bool = False
-config = load(Source(prefix="MYAPP_"), Config)
+config = dature.load(dature.EnvSource(prefix="MYAPP_"), schema=Config)
assert config.host == "localhost"
assert config.port == 9090
diff --git a/examples/docs/features/naming/naming_prefix_nested.py b/examples/docs/features/naming/naming_prefix_nested.py
index 7003763..cbe9d3b 100644
--- a/examples/docs/features/naming/naming_prefix_nested.py
+++ b/examples/docs/features/naming/naming_prefix_nested.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -14,7 +14,10 @@ class Database:
port: int
-db = load(Source(file_=SOURCES_DIR / "naming_prefix_nested.yaml", prefix="app.database"), Database)
+db = dature.load(
+ dature.Yaml12Source(file=SOURCES_DIR / "naming_prefix_nested.yaml", prefix="app.database"),
+ schema=Database,
+)
assert db.host == "localhost"
assert db.port == 5432
diff --git a/examples/docs/features/naming/naming_split_symbols.py b/examples/docs/features/naming/naming_split_symbols.py
index 285b83c..472a6cd 100644
--- a/examples/docs/features/naming/naming_split_symbols.py
+++ b/examples/docs/features/naming/naming_split_symbols.py
@@ -3,7 +3,7 @@
import os
from dataclasses import dataclass
-from dature import Source, load
+import dature
os.environ["NS_DB__HOST"] = "localhost"
os.environ["NS_DB__PORT"] = "5432"
@@ -20,7 +20,7 @@ class Config:
db: Database
-config = load(Source(prefix="NS_", split_symbols="__"), Config)
+config = dature.load(dature.EnvSource(prefix="NS_", split_symbols="__"), schema=Config)
assert config.db.host == "localhost"
assert config.db.port == 5432
diff --git a/examples/docs/features/validation/validation_annotated.py b/examples/docs/features/validation/validation_annotated.py
index 83b66ef..09d51c8 100644
--- a/examples/docs/features/validation/validation_annotated.py
+++ b/examples/docs/features/validation/validation_annotated.py
@@ -4,8 +4,7 @@
from pathlib import Path
from typing import Annotated
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError
+import dature
from dature.validators.number import Ge, Le
from dature.validators.sequence import MinItems, UniqueItems
from dature.validators.string import MaxLength, MinLength
@@ -15,42 +14,13 @@
@dataclass
class ServiceConfig:
- port: Annotated[int, Ge(value=1), Le(value=65535)]
- name: Annotated[str, MinLength(value=3), MaxLength(value=50)]
- tags: Annotated[list[str], MinItems(value=1), UniqueItems()]
- workers: Annotated[int, Ge(value=1)]
+ port: Annotated[int, Ge(1), Le(65535)]
+ name: Annotated[str, MinLength(3), MaxLength(50)]
+ tags: Annotated[list[str], MinItems(1), UniqueItems()]
+ workers: Annotated[int, Ge(1)]
-try:
- load(
- Source(file_=SOURCES_DIR / "validation_annotated_invalid.json5"),
- ServiceConfig,
- )
-except DatureConfigError as exc:
- source = str(SOURCES_DIR / "validation_annotated_invalid.json5")
- assert str(exc) == "ServiceConfig loading errors (4)"
- assert len(exc.exceptions) == 4
- assert str(exc.exceptions[0]) == (
- f" [port] Value must be greater than or equal to 1\n"
- f" ├── port: 0,\n"
- f" │ ^\n"
- f" └── FILE '{source}', line 3"
- )
- assert str(exc.exceptions[1]) == (
- f" [name] Value must have at least 3 characters\n"
- f' ├── name: "ab",\n'
- f" │ ^^\n"
- f" └── FILE '{source}', line 4"
- )
- assert str(exc.exceptions[2]) == (
- f" [tags] Value must contain unique items\n"
- f' ├── tags: ["web", "web"],\n'
- f" │ ^^^^^^^^^^^^^^\n"
- f" └── FILE '{source}', line 5"
- )
- assert str(exc.exceptions[3]) == (
- f" [workers] Value must be greater than or equal to 1\n"
- f" ├── workers: 0,\n"
- f" │ ^\n"
- f" └── FILE '{source}', line 6"
- )
+dature.load(
+ dature.Json5Source(file=SOURCES_DIR / "validation_annotated_invalid.json5"),
+ schema=ServiceConfig,
+)
diff --git a/examples/docs/features/validation/validation_annotated.stderr b/examples/docs/features/validation/validation_annotated.stderr
new file mode 100644
index 0000000..a0a941e
--- /dev/null
+++ b/examples/docs/features/validation/validation_annotated.stderr
@@ -0,0 +1,22 @@
+ | dature.errors.exceptions.DatureConfigError: ServiceConfig loading errors (4)
+ +-+---------------- 1 ----------------
+ | dature.errors.exceptions.FieldLoadError: [port] Value must be greater than or equal to 1
+ | ├── port: 0,
+ | │ ^
+ | └── FILE '{SOURCES_DIR}validation_annotated_invalid.json5', line 3
+ +---------------- 2 ----------------
+ | dature.errors.exceptions.FieldLoadError: [name] Value must have at least 3 characters
+ | ├── name: "ab",
+ | │ ^^
+ | └── FILE '{SOURCES_DIR}validation_annotated_invalid.json5', line 4
+ +---------------- 3 ----------------
+ | dature.errors.exceptions.FieldLoadError: [tags] Value must contain unique items
+ | ├── tags: ["web", "web"],
+ | │ ^^^^^^^^^^^^^^
+ | └── FILE '{SOURCES_DIR}validation_annotated_invalid.json5', line 5
+ +---------------- 4 ----------------
+ | dature.errors.exceptions.FieldLoadError: [workers] Value must be greater than or equal to 1
+ | ├── workers: 0,
+ | │ ^
+ | └── FILE '{SOURCES_DIR}validation_annotated_invalid.json5', line 6
+ +------------------------------------
diff --git a/examples/docs/features/validation/validation_annotated_combined.py b/examples/docs/features/validation/validation_annotated_combined.py
new file mode 100644
index 0000000..48ff297
--- /dev/null
+++ b/examples/docs/features/validation/validation_annotated_combined.py
@@ -0,0 +1,15 @@
+"""Multiple Annotated validators can be combined on a single field."""
+
+from dataclasses import dataclass
+from typing import Annotated
+
+from dature.validators.number import Ge, Le
+from dature.validators.sequence import MaxItems, MinItems, UniqueItems
+
+
+@dataclass
+class Config:
+ # --8<-- [start:combined]
+ port: Annotated[int, Ge(1), Le(65535)]
+ tags: Annotated[list[str], MinItems(1), MaxItems(10), UniqueItems()]
+ # --8<-- [end:combined]
diff --git a/examples/docs/features/validation/validation_custom.py b/examples/docs/features/validation/validation_custom.py
index 5e284cf..ad3d737 100644
--- a/examples/docs/features/validation/validation_custom.py
+++ b/examples/docs/features/validation/validation_custom.py
@@ -5,14 +5,13 @@
from pathlib import Path
from typing import Annotated
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError
+import dature
from dature.validators.number import Ge
SOURCES_DIR = Path(__file__).parent / "sources"
-@dataclass(frozen=True, slots=True, kw_only=True)
+@dataclass(frozen=True, slots=True)
class Divisible:
value: int
error_message: str = "Value must be divisible by {value}"
@@ -32,21 +31,10 @@ class ServiceConfig:
port: int
name: str
tags: list[str]
- workers: Annotated[int, Ge(value=1), Divisible(value=2)]
-
-
-try:
- load(
- Source(file_=SOURCES_DIR / "validation_custom_invalid.json5"),
- ServiceConfig,
- )
-except DatureConfigError as exc:
- source = str(SOURCES_DIR / "validation_custom_invalid.json5")
- assert str(exc) == "ServiceConfig loading errors (1)"
- assert len(exc.exceptions) == 1
- assert str(exc.exceptions[0]) == (
- f" [workers] Value must be divisible by 2\n"
- f" ├── workers: 3,\n"
- f" │ ^\n"
- f" └── FILE '{source}', line 5"
- )
+ workers: Annotated[int, Ge(1), Divisible(2)]
+
+
+dature.load(
+ dature.Json5Source(file=SOURCES_DIR / "validation_custom_invalid.json5"),
+ schema=ServiceConfig,
+)
diff --git a/examples/docs/features/validation/validation_custom.stderr b/examples/docs/features/validation/validation_custom.stderr
new file mode 100644
index 0000000..89acce4
--- /dev/null
+++ b/examples/docs/features/validation/validation_custom.stderr
@@ -0,0 +1,7 @@
+ | dature.errors.exceptions.DatureConfigError: ServiceConfig loading errors (1)
+ +-+---------------- 1 ----------------
+ | dature.errors.exceptions.FieldLoadError: [workers] Value must be divisible by 2
+ | ├── workers: 3,
+ | │ ^
+ | └── FILE '{SOURCES_DIR}validation_custom_invalid.json5', line 5
+ +------------------------------------
diff --git a/examples/docs/features/validation/validation_metadata.py b/examples/docs/features/validation/validation_metadata.py
index 333d35b..2da9fe7 100644
--- a/examples/docs/features/validation/validation_metadata.py
+++ b/examples/docs/features/validation/validation_metadata.py
@@ -3,8 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import F, Source, load
-from dature.errors.exceptions import DatureConfigError
+import dature
from dature.validators.number import Ge, Lt
from dature.validators.string import MinLength
@@ -18,30 +17,13 @@ class Config:
debug: bool = False
-try:
- load(
- Source(
- file_=SOURCES_DIR / "validation_metadata_invalid.yaml",
- validators={
- F[Config].host: MinLength(value=1),
- F[Config].port: (Ge(value=1), Lt(value=65536)),
- },
- ),
- Config,
- )
-except DatureConfigError as exc:
- source = str(SOURCES_DIR / "validation_metadata_invalid.yaml")
- assert str(exc) == "Config loading errors (2)"
- assert len(exc.exceptions) == 2
- assert str(exc.exceptions[0]) == (
- f" [host] Value must have at least 1 characters\n"
- f' ├── host: ""\n'
- f" │ ^^\n"
- f" └── FILE '{source}', line 1"
- ) # fmt: skip
- assert str(exc.exceptions[1]) == (
- f" [port] Value must be greater than or equal to 1\n"
- f" ├── port: 0\n"
- f" │ ^\n"
- f" └── FILE '{source}', line 2"
- )
+dature.load(
+ dature.Yaml12Source(
+ file=SOURCES_DIR / "validation_metadata_invalid.yaml",
+ validators={
+ dature.F[Config].host: MinLength(1),
+ dature.F[Config].port: (Ge(1), Lt(65536)),
+ },
+ ),
+ schema=Config,
+)
diff --git a/examples/docs/features/validation/validation_metadata.stderr b/examples/docs/features/validation/validation_metadata.stderr
new file mode 100644
index 0000000..995c048
--- /dev/null
+++ b/examples/docs/features/validation/validation_metadata.stderr
@@ -0,0 +1,12 @@
+ | dature.errors.exceptions.DatureConfigError: Config loading errors (2)
+ +-+---------------- 1 ----------------
+ | dature.errors.exceptions.FieldLoadError: [host] Value must have at least 1 characters
+ | ├── host: ""
+ | │ ^^
+ | └── FILE '{SOURCES_DIR}validation_metadata_invalid.yaml', line 1
+ +---------------- 2 ----------------
+ | dature.errors.exceptions.FieldLoadError: [port] Value must be greater than or equal to 1
+ | ├── port: 0
+ | │ ^
+ | └── FILE '{SOURCES_DIR}validation_metadata_invalid.yaml', line 2
+ +------------------------------------
diff --git a/examples/docs/features/validation/validation_metadata_nested.py b/examples/docs/features/validation/validation_metadata_nested.py
new file mode 100644
index 0000000..2137989
--- /dev/null
+++ b/examples/docs/features/validation/validation_metadata_nested.py
@@ -0,0 +1,26 @@
+"""Metadata validators for nested dataclass fields."""
+
+from dataclasses import dataclass
+
+import dature
+from dature.validators.number import Gt
+from dature.validators.string import MinLength
+
+
+@dataclass
+class Database:
+ host: str
+ port: int
+
+
+@dataclass
+class Config:
+ database: Database
+
+
+# --8<-- [start:nested]
+validators = {
+ dature.F[Config].database.host: MinLength(1),
+ dature.F[Config].database.port: Gt(0),
+}
+# --8<-- [end:nested]
diff --git a/examples/docs/features/validation/validation_metadata_syntax.py b/examples/docs/features/validation/validation_metadata_syntax.py
new file mode 100644
index 0000000..7c2402b
--- /dev/null
+++ b/examples/docs/features/validation/validation_metadata_syntax.py
@@ -0,0 +1,21 @@
+"""Metadata validators syntax — single validator vs tuple for multiple."""
+
+from dataclasses import dataclass
+
+import dature
+from dature.validators.number import Gt, Lt
+from dature.validators.string import MinLength
+
+
+@dataclass
+class Config:
+ host: str
+ port: int
+
+
+# --8<-- [start:syntax]
+validators = {
+ dature.F[Config].port: (Gt(0), Lt(65536)), # tuple for multiple
+ dature.F[Config].host: MinLength(1), # single, no tuple needed
+}
+# --8<-- [end:syntax]
diff --git a/examples/docs/features/validation/validation_post_init.py b/examples/docs/features/validation/validation_post_init.py
index 78c36cf..29ac938 100644
--- a/examples/docs/features/validation/validation_post_init.py
+++ b/examples/docs/features/validation/validation_post_init.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -24,7 +24,4 @@ def address(self) -> str:
return f"{self.host}:{self.port}"
-try:
- load(Source(file_=SOURCES_DIR / "validation_post_init_invalid.yaml"), Config)
-except ValueError as exc:
- assert str(exc) == "port must be between 1 and 65535, got 99999"
+dature.load(dature.Yaml12Source(file=SOURCES_DIR / "validation_post_init_invalid.yaml"), schema=Config)
diff --git a/examples/docs/features/validation/validation_post_init.stderr b/examples/docs/features/validation/validation_post_init.stderr
new file mode 100644
index 0000000..5bfa833
--- /dev/null
+++ b/examples/docs/features/validation/validation_post_init.stderr
@@ -0,0 +1 @@
+ValueError: port must be between 1 and 65535, got 99999
diff --git a/examples/docs/features/validation/validation_root.py b/examples/docs/features/validation/validation_root.py
index bf6fc6c..4b447ab 100644
--- a/examples/docs/features/validation/validation_root.py
+++ b/examples/docs/features/validation/validation_root.py
@@ -3,8 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError
+import dature
from dature.validators.root import RootValidator
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -23,23 +22,15 @@ def check_debug_not_on_production(obj: Config) -> bool:
return True
-try:
- load(
- Source(
- file_=SOURCES_DIR / "validation_root_invalid.yaml",
- root_validators=(
- RootValidator(
- func=check_debug_not_on_production,
- error_message="debug=True is not allowed on non-localhost hosts",
- ),
+dature.load(
+ dature.Yaml12Source(
+ file=SOURCES_DIR / "validation_root_invalid.yaml",
+ root_validators=(
+ RootValidator(
+ func=check_debug_not_on_production,
+ error_message="debug=True is not allowed on non-localhost hosts",
),
),
- Config,
- )
-except DatureConfigError as exc:
- source = str(SOURCES_DIR / "validation_root_invalid.yaml")
- assert str(exc) == "Config loading errors (1)"
- assert len(exc.exceptions) == 1
- assert str(exc.exceptions[0]) == (
- f" [] debug=True is not allowed on non-localhost hosts\n └── FILE '{source}'"
- )
+ ),
+ schema=Config,
+)
diff --git a/examples/docs/features/validation/validation_root.stderr b/examples/docs/features/validation/validation_root.stderr
new file mode 100644
index 0000000..37cc95e
--- /dev/null
+++ b/examples/docs/features/validation/validation_root.stderr
@@ -0,0 +1,5 @@
+ | dature.errors.exceptions.DatureConfigError: Config loading errors (1)
+ +-+---------------- 1 ----------------
+ | dature.errors.exceptions.FieldLoadError: [] debug=True is not allowed on non-localhost hosts
+ | └── FILE '{SOURCES_DIR}validation_root_invalid.yaml'
+ +------------------------------------
diff --git a/examples/docs/index/intro_decorator.py b/examples/docs/index/intro_decorator.py
index 322be9c..f889723 100644
--- a/examples/docs/index/intro_decorator.py
+++ b/examples/docs/index/intro_decorator.py
@@ -3,14 +3,14 @@
import os
from dataclasses import dataclass
-from dature import Source, load
+import dature
os.environ["APP_HOST"] = "0.0.0.0"
os.environ["APP_PORT"] = "8080"
os.environ["APP_DEBUG"] = "true"
-@load(Source(prefix="APP_"))
+@dature.load(dature.EnvSource(prefix="APP_"))
@dataclass
class AppConfig:
host: str
diff --git a/examples/docs/index/intro_function.py b/examples/docs/index/intro_function.py
index e1f11c5..2e91319 100644
--- a/examples/docs/index/intro_function.py
+++ b/examples/docs/index/intro_function.py
@@ -3,7 +3,7 @@
import os
from dataclasses import dataclass
-from dature import Source, load
+import dature
os.environ["APP_HOST"] = "0.0.0.0"
os.environ["APP_PORT"] = "8080"
@@ -17,7 +17,7 @@ class AppConfig:
debug: bool = False
-config = load(Source(prefix="APP_"), AppConfig)
+config = dature.load(dature.EnvSource(prefix="APP_"), schema=AppConfig)
assert config.host == "0.0.0.0"
assert config.port == 8080
diff --git a/examples/docs/introduction/format_docker.py b/examples/docs/introduction/format_docker.py
index 055b1ad..e20ac53 100644
--- a/examples/docs/introduction/format_docker.py
+++ b/examples/docs/introduction/format_docker.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -15,9 +15,9 @@ class Config:
debug: bool = False
-config = load(
- Source(file_=SOURCES_DIR / "intro_app_docker_secrets"),
- Config,
+config = dature.load(
+ dature.DockerSecretsSource(dir_=SOURCES_DIR / "intro_app_docker_secrets"),
+ schema=Config,
)
assert config.host == "localhost"
diff --git a/examples/docs/introduction/format_env.py b/examples/docs/introduction/format_env.py
index 0a212da..916b0ce 100644
--- a/examples/docs/introduction/format_env.py
+++ b/examples/docs/introduction/format_env.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -15,7 +15,7 @@ class Config:
debug: bool = False
-config = load(Source(file_=SOURCES_DIR / "intro_app.env"), Config)
+config = dature.load(dature.EnvFileSource(file=SOURCES_DIR / "intro_app.env"), schema=Config)
assert config.host == "localhost"
assert config.port == 8080
diff --git a/examples/docs/introduction/format_ini.py b/examples/docs/introduction/format_ini.py
index 33486ac..ab45524 100644
--- a/examples/docs/introduction/format_ini.py
+++ b/examples/docs/introduction/format_ini.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -15,9 +15,9 @@ class Config:
debug: bool = False
-config = load(
- Source(file_=SOURCES_DIR / "intro_app.ini", prefix="app"),
- Config,
+config = dature.load(
+ dature.IniSource(file=SOURCES_DIR / "intro_app.ini", prefix="app"),
+ schema=Config,
)
assert config.host == "localhost"
diff --git a/examples/docs/introduction/format_json.py b/examples/docs/introduction/format_json.py
index 80dfb7c..6b1e6a3 100644
--- a/examples/docs/introduction/format_json.py
+++ b/examples/docs/introduction/format_json.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -15,7 +15,7 @@ class Config:
debug: bool = False
-config = load(Source(file_=SOURCES_DIR / "intro_app.json"), Config)
+config = dature.load(dature.JsonSource(file=SOURCES_DIR / "intro_app.json"), schema=Config)
assert config.host == "localhost"
assert config.port == 8080
diff --git a/examples/docs/introduction/format_json5.py b/examples/docs/introduction/format_json5.py
index 97edaad..1a87c84 100644
--- a/examples/docs/introduction/format_json5.py
+++ b/examples/docs/introduction/format_json5.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -15,7 +15,7 @@ class Config:
debug: bool = False
-config = load(Source(file_=SOURCES_DIR / "intro_app.json5"), Config)
+config = dature.load(dature.Json5Source(file=SOURCES_DIR / "intro_app.json5"), schema=Config)
assert config.host == "localhost"
assert config.port == 8080
diff --git a/examples/docs/introduction/format_toml.py b/examples/docs/introduction/format_toml.py
index ae72c85..869493e 100644
--- a/examples/docs/introduction/format_toml.py
+++ b/examples/docs/introduction/format_toml.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
@@ -15,7 +15,7 @@ class Config:
debug: bool = False
-config = load(Source(file_=SOURCES_DIR / "intro_app.toml"), Config)
+config = dature.load(dature.Toml11Source(file=SOURCES_DIR / "intro_app.toml"), schema=Config)
assert config.host == "localhost"
assert config.port == 8080
diff --git a/examples/docs/introduction/format_yaml.py b/examples/docs/introduction/format_yaml.py
index babd3e0..fd99146 100644
--- a/examples/docs/introduction/format_yaml.py
+++ b/examples/docs/introduction/format_yaml.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SHARED_DIR = Path(__file__).parents[1] / "shared"
@@ -15,7 +15,7 @@ class Config:
debug: bool = False
-config = load(Source(file_=SHARED_DIR / "common_app.yaml"), Config)
+config = dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml"), schema=Config)
assert config.host == "localhost"
assert config.port == 8080
diff --git a/examples/docs/introduction/intro_decorator_file.py b/examples/docs/introduction/intro_decorator_file.py
index 8e807a7..8bef9f1 100644
--- a/examples/docs/introduction/intro_decorator_file.py
+++ b/examples/docs/introduction/intro_decorator_file.py
@@ -3,12 +3,12 @@
from dataclasses import dataclass
from pathlib import Path
-from dature import Source, load
+import dature
SHARED_DIR = Path(__file__).parents[1] / "shared"
-@load(Source(file_=SHARED_DIR / "common_app.yaml"))
+@dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml"))
@dataclass
class Config:
host: str
diff --git a/examples/docs/introduction/intro_decorator_override.py b/examples/docs/introduction/intro_decorator_override.py
new file mode 100644
index 0000000..55c2f33
--- /dev/null
+++ b/examples/docs/introduction/intro_decorator_override.py
@@ -0,0 +1,24 @@
+"""Decorator mode — explicit __init__ arguments take priority over loaded values."""
+
+from dataclasses import dataclass
+from pathlib import Path
+
+import dature
+
+SHARED_DIR = Path(__file__).parents[1] / "shared"
+
+
+@dature.load(dature.Yaml12Source(file=SHARED_DIR / "common_app.yaml"))
+@dataclass
+class Config:
+ host: str
+ port: int
+ debug: bool = False
+
+
+# --8<-- [start:override]
+config = Config(port=9090) # host from source, port overridden
+# --8<-- [end:override]
+
+assert config.host == "localhost"
+assert config.port == 9090
diff --git a/examples/docs/introduction/intro_file_like.py b/examples/docs/introduction/intro_file_like.py
index ddaccf2..4741e6c 100644
--- a/examples/docs/introduction/intro_file_like.py
+++ b/examples/docs/introduction/intro_file_like.py
@@ -3,8 +3,7 @@
from dataclasses import dataclass
from io import BytesIO, StringIO
-from dature import Source, load
-from dature.sources_loader.json_ import JsonLoader
+import dature
@dataclass
@@ -16,14 +15,14 @@ class Config:
# From StringIO
text_stream = StringIO('{"host": "localhost", "port": 8080, "debug": true}')
-config = load(Source(file_=text_stream, loader=JsonLoader), Config)
+config = dature.load(dature.JsonSource(file=text_stream), schema=Config)
assert config.host == "localhost"
assert config.port == 8080
# From BytesIO
binary_stream = BytesIO(b'{"host": "0.0.0.0", "port": 3000}')
-config = load(Source(file_=binary_stream, loader=JsonLoader), Config)
+config = dature.load(dature.JsonSource(file=binary_stream), schema=Config)
assert config.host == "0.0.0.0"
assert config.port == 3000
diff --git a/examples/load_all_formats.py b/examples/load_all_formats.py
index 005936d..eb6518b 100644
--- a/examples/load_all_formats.py
+++ b/examples/load_all_formats.py
@@ -1,32 +1,28 @@
-"""dature.load() as a function — auto-detect format by file extension."""
+"""dature.load() as a function — load from every supported format."""
from pathlib import Path
from all_types_dataclass import AllPythonTypesCompact # type: ignore[import-not-found]
-from dature import Source, load
-from dature.sources_loader.docker_secrets import DockerSecretsLoader
-from dature.sources_loader.toml_ import Toml10Loader
-from dature.sources_loader.yaml_ import Yaml11Loader, Yaml12Loader
+import dature
SOURCES_DIR = Path(__file__).parent / "sources"
FORMATS = {
- "json": Source(file_=SOURCES_DIR / "all_types.json"),
- "json5": Source(file_=SOURCES_DIR / "all_types.json5"),
- "toml10": Source(file_=SOURCES_DIR / "all_types_toml10.toml", loader=Toml10Loader),
- "toml11": Source(file_=SOURCES_DIR / "all_types_toml11.toml"),
- "ini": Source(file_=SOURCES_DIR / "all_types.ini", prefix="all_types"),
- "yaml11": Source(file_=SOURCES_DIR / "all_types_yaml11.yaml", loader=Yaml11Loader),
- "yaml12": Source(file_=SOURCES_DIR / "all_types_yaml12.yaml", loader=Yaml12Loader),
- "env": Source(file_=SOURCES_DIR / "all_types.env"),
- "docker_secrets": Source(
- file_=SOURCES_DIR / "all_types_docker_secrets",
- loader=DockerSecretsLoader,
+ "json": dature.JsonSource(file=SOURCES_DIR / "all_types.json"),
+ "json5": dature.Json5Source(file=SOURCES_DIR / "all_types.json5"),
+ "toml10": dature.Toml10Source(file=SOURCES_DIR / "all_types_toml10.toml"),
+ "toml11": dature.Toml11Source(file=SOURCES_DIR / "all_types_toml11.toml"),
+ "ini": dature.IniSource(file=SOURCES_DIR / "all_types.ini", prefix="all_types"),
+ "yaml11": dature.Yaml11Source(file=SOURCES_DIR / "all_types_yaml11.yaml"),
+ "yaml12": dature.Yaml12Source(file=SOURCES_DIR / "all_types_yaml12.yaml"),
+ "env": dature.EnvFileSource(file=SOURCES_DIR / "all_types.env"),
+ "docker_secrets": dature.DockerSecretsSource(
+ dir_=SOURCES_DIR / "all_types_docker_secrets",
),
}
for meta in FORMATS.values():
- config = load(meta, AllPythonTypesCompact)
+ config = dature.load(meta, schema=AllPythonTypesCompact)
assert config.string_value == "hello world"
assert config.integer_value == 42
diff --git a/pyproject.toml b/pyproject.toml
index 419fa92..c6d7536 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -83,7 +83,7 @@ ignore = [
"S105", # possible hardcoded password
"FBT001", # boolean argument in function definition
]
-"src/dature/sources_loader/loaders/*.py" = [
+"src/dature/loaders/*.py" = [
"FBT001", # loader functions accept bool as scalar value, not as a flag
]
"docs/generate_*.py" = [
@@ -126,7 +126,7 @@ warn_unused_ignores = true
warn_no_return = true
strict_equality = true
explicit_package_bases = true
-mypy_path = "src"
+mypy_path = "src:."
plugins = ["dature.mypy_plugin"]
[[tool.mypy.overrides]]
@@ -181,6 +181,11 @@ directory = "doc"
name = "Docs"
showcontent = true
+[[tool.towncrier.type]]
+directory = "refactor"
+name = "Refactoring"
+showcontent = true
+
[[tool.towncrier.type]]
directory = "removal"
name = "Removals"
diff --git a/src/dature/__init__.py b/src/dature/__init__.py
index 874a077..a7334ec 100644
--- a/src/dature/__init__.py
+++ b/src/dature/__init__.py
@@ -3,17 +3,29 @@
from dature.field_path import F
from dature.load_report import get_load_report
from dature.main import load
-from dature.metadata import FieldGroup, FieldMergeStrategy, Merge, MergeRule, MergeStrategy, Source, TypeLoader
+from dature.sources.base import FileSource, Source
+from dature.sources.docker_secrets import DockerSecretsSource
+from dature.sources.env_ import EnvFileSource, EnvSource
+from dature.sources.ini_ import IniSource
+from dature.sources.json5_ import Json5Source
+from dature.sources.json_ import JsonSource
+from dature.sources.toml_ import Toml10Source, Toml11Source
+from dature.sources.yaml_ import Yaml11Source, Yaml12Source
__all__ = [
+ "DockerSecretsSource",
+ "EnvFileSource",
+ "EnvSource",
"F",
- "FieldGroup",
- "FieldMergeStrategy",
- "Merge",
- "MergeRule",
- "MergeStrategy",
+ "FileSource",
+ "IniSource",
+ "Json5Source",
+ "JsonSource",
"Source",
- "TypeLoader",
+ "Toml10Source",
+ "Toml11Source",
+ "Yaml11Source",
+ "Yaml12Source",
"__version__",
"configure",
"get_load_report",
diff --git a/src/dature/_descriptors.py b/src/dature/_descriptors.py
new file mode 100644
index 0000000..f58b211
--- /dev/null
+++ b/src/dature/_descriptors.py
@@ -0,0 +1,13 @@
+from collections.abc import Callable
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from dature.path_finders.base import PathFinder
+
+
+class classproperty: # noqa: N801
+ def __init__(self, func: Callable[..., "type[PathFinder]"]) -> None:
+ self.fget = func
+
+ def __get__(self, obj: object | None, owner: type) -> "type[PathFinder]":
+ return self.fget(owner)
diff --git a/src/dature/config.py b/src/dature/config.py
index e107f71..30a1afd 100644
--- a/src/dature/config.py
+++ b/src/dature/config.py
@@ -1,21 +1,22 @@
-from dataclasses import dataclass
-from typing import TYPE_CHECKING, Annotated
+from collections.abc import Mapping
+from dataclasses import asdict, dataclass
+from typing import TYPE_CHECKING, Annotated, Any, ClassVar, TypedDict, cast
-from dature.types import NestedResolveStrategy
+from dature.types import NestedResolveStrategy, TypeLoaderMap
from dature.validators.number import Ge
from dature.validators.string import MinLength
if TYPE_CHECKING:
- from dature.metadata import TypeLoader
+ from dature.protocols import DataclassInstance
# --8<-- [start:masking-config]
@dataclass(frozen=True, slots=True)
class MaskingConfig:
- mask: Annotated[str, MinLength(value=1)] = ""
- visible_prefix: Annotated[int, Ge(value=0)] = 0
- visible_suffix: Annotated[int, Ge(value=0)] = 0
- min_heuristic_length: Annotated[int, Ge(value=1)] = 8
+ mask: Annotated[str, MinLength(1)] = ""
+ visible_prefix: Annotated[int, Ge(0)] = 0
+ visible_suffix: Annotated[int, Ge(0)] = 0
+ min_heuristic_length: Annotated[int, Ge(1)] = 8
heuristic_threshold: float = 0.5
secret_field_names: tuple[str, ...] = (
"password",
@@ -39,8 +40,8 @@ class MaskingConfig:
# --8<-- [start:error-display-config]
@dataclass(frozen=True, slots=True)
class ErrorDisplayConfig:
- max_visible_lines: Annotated[int, Ge(value=1)] = 3
- max_line_length: Annotated[int, Ge(value=1)] = 80
+ max_visible_lines: Annotated[int, Ge(1)] = 3
+ max_line_length: Annotated[int, Ge(1)] = 80
# --8<-- [end:error-display-config]
@@ -66,15 +67,36 @@ class DatureConfig:
def _load_config() -> DatureConfig:
from dature.main import load # noqa: PLC0415
- from dature.metadata import Source # noqa: PLC0415
+ from dature.sources.env_ import EnvSource # noqa: PLC0415
- return load(Source(prefix="DATURE_"), DatureConfig)
+ return load(EnvSource(prefix="DATURE_"), schema=DatureConfig)
+
+
+class MaskingOptions(TypedDict, total=False):
+ mask: str
+ visible_prefix: int
+ visible_suffix: int
+ min_heuristic_length: int
+ heuristic_threshold: float
+ secret_field_names: tuple[str, ...]
+ mask_secrets: bool
+
+
+class ErrorDisplayOptions(TypedDict, total=False):
+ max_visible_lines: int
+ max_line_length: int
+
+
+class LoadingOptions(TypedDict, total=False):
+ cache: bool
+ debug: bool
+ nested_resolve_strategy: NestedResolveStrategy
class _ConfigProxy:
_instance: DatureConfig | None = None
_loading: bool = False
- _type_loaders: "tuple[TypeLoader, ...]" = ()
+ _type_loaders: ClassVar[TypeLoaderMap] = {}
@staticmethod
def ensure_loaded() -> DatureConfig:
@@ -94,7 +116,7 @@ def set_instance(value: DatureConfig | None) -> None:
_ConfigProxy._instance = value
@staticmethod
- def set_type_loaders(value: "tuple[TypeLoader, ...]") -> None:
+ def set_type_loaders(value: TypeLoaderMap) -> None:
_ConfigProxy._type_loaders = value
@property
@@ -110,34 +132,41 @@ def loading(self) -> LoadingConfig:
return self.ensure_loaded().loading
@property
- def type_loaders(self) -> "tuple[TypeLoader, ...]":
+ def type_loaders(self) -> TypeLoaderMap:
return _ConfigProxy._type_loaders
config: _ConfigProxy = _ConfigProxy()
+def _merge_group[D: DataclassInstance](current: D, options: Mapping[str, Any] | None, cls: type[D]) -> D:
+ if options is None:
+ return current
+ if not options:
+ return cls()
+ return cls(**cast("dict[str, Any]", asdict(current) | dict(options)))
+
+
# --8<-- [start:configure]
def configure(
*,
- masking: MaskingConfig | None = None,
- error_display: ErrorDisplayConfig | None = None,
- loading: LoadingConfig | None = None,
- type_loaders: "tuple[TypeLoader, ...] | None" = None,
+ masking: MaskingOptions | None = None,
+ error_display: ErrorDisplayOptions | None = None,
+ loading: LoadingOptions | None = None,
+ type_loaders: TypeLoaderMap | None = None,
) -> None:
# --8<-- [end:configure]
current = config.ensure_loaded()
- if masking is None:
- masking = current.masking
- if error_display is None:
- error_display = current.error_display
- if loading is None:
- loading = current.loading
+
+ merged_masking = _merge_group(current.masking, masking, MaskingConfig)
+ merged_error = _merge_group(current.error_display, error_display, ErrorDisplayConfig)
+ merged_loading = _merge_group(current.loading, loading, LoadingConfig)
+
config.set_instance(
DatureConfig(
- masking=masking,
- error_display=error_display,
- loading=loading,
+ masking=merged_masking,
+ error_display=merged_error,
+ loading=merged_loading,
),
)
if type_loaders is not None:
diff --git a/src/dature/errors/__init__.py b/src/dature/errors/__init__.py
index e69de29..efb7cb7 100644
--- a/src/dature/errors/__init__.py
+++ b/src/dature/errors/__init__.py
@@ -0,0 +1,29 @@
+from dature.errors.exceptions import (
+ DatureConfigError,
+ DatureError,
+ EnvVarExpandError,
+ FieldGroupError,
+ FieldGroupViolationError,
+ FieldLoadError,
+ LineRange,
+ MergeConflictError,
+ MergeConflictFieldError,
+ MissingEnvVarError,
+ SourceLoadError,
+ SourceLocation,
+)
+
+__all__ = [
+ "DatureConfigError",
+ "DatureError",
+ "EnvVarExpandError",
+ "FieldGroupError",
+ "FieldGroupViolationError",
+ "FieldLoadError",
+ "LineRange",
+ "MergeConflictError",
+ "MergeConflictFieldError",
+ "MissingEnvVarError",
+ "SourceLoadError",
+ "SourceLocation",
+]
diff --git a/src/dature/errors/exceptions.py b/src/dature/errors/exceptions.py
index 347adc6..90696ee 100644
--- a/src/dature/errors/exceptions.py
+++ b/src/dature/errors/exceptions.py
@@ -21,7 +21,7 @@ def __repr__(self) -> str:
@dataclass(frozen=True, slots=True)
class SourceLocation:
- display_label: str
+ location_label: str
file_path: Path | None
line_range: LineRange | None
line_content: list[str] | None
@@ -88,7 +88,7 @@ def _format_location(
suffix = f" ({loc.annotation})" if loc.annotation is not None else ""
if loc.env_var_name is not None and loc.file_path is None:
- main = f" {connector} {loc.display_label} '{loc.env_var_name}'"
+ main = f" {connector} {loc.location_label} '{loc.env_var_name}'"
if loc.env_var_value is not None:
main += f" = '{loc.env_var_value}'"
return [main + suffix]
@@ -105,23 +105,23 @@ def _format_location(
return [
*_format_content_lines(loc.line_content, prefix=" ├── "),
f" │ {' ' * found.pos}{'^' * caret_len}",
- *_format_file_line(loc, connector="└──" if is_last else "├──", suffix=suffix),
+ *_format_fileline(loc, connector="└──" if is_last else "├──", suffix=suffix),
]
if loc.line_content is not None:
return [
*_format_content_lines(loc.line_content, prefix=" ├── "),
- *_format_file_line(loc, connector="└──" if is_last else "├──", suffix=suffix),
+ *_format_fileline(loc, connector="└──" if is_last else "├──", suffix=suffix),
]
- return _format_file_line(loc, connector="└──" if is_last else "├──", suffix=suffix)
+ return _format_fileline(loc, connector="└──" if is_last else "├──", suffix=suffix)
-def _format_file_line(loc: SourceLocation, *, connector: str, suffix: str = "") -> list[str]:
- file_main = f" {connector} {loc.display_label} '{loc.file_path}'"
+def _format_fileline(loc: SourceLocation, *, connector: str, suffix: str = "") -> list[str]:
+ filemain = f" {connector} {loc.location_label} '{loc.file_path}'"
if loc.line_range is not None:
- file_main += f", {loc.line_range!r}"
- return [file_main + suffix]
+ filemain += f", {loc.line_range!r}"
+ return [filemain + suffix]
def _format_path(field_path: list[str]) -> str:
@@ -344,8 +344,4 @@ def __new__(
return super().__new__(cls, dataclass_name, errors)
def __str__(self) -> str:
- lines = [f"{self.dataclass_name} field group errors ({len(self.exceptions)})", ""]
- for exc in self.exceptions:
- lines.append(str(exc))
- lines.append("")
- return "\n".join(lines)
+ return f"{self.dataclass_name} field group errors ({len(self.exceptions)})"
diff --git a/src/dature/errors/formatter.py b/src/dature/errors/formatter.py
index f202242..3a564ad 100644
--- a/src/dature/errors/formatter.py
+++ b/src/dature/errors/formatter.py
@@ -186,7 +186,7 @@ def enrich_skipped_errors(
updated.append(exc)
continue
- source_reprs = ", ".join(repr(s.metadata) for s in sources)
+ source_reprs = ", ".join(repr(s.source) for s in sources)
locations = [
loc for s in sources for loc in resolve_source_location(exc.field_path, s.error_ctx, s.file_content)
]
diff --git a/src/dature/errors/location.py b/src/dature/errors/location.py
index 840b2cb..281c5ee 100644
--- a/src/dature/errors/location.py
+++ b/src/dature/errors/location.py
@@ -5,19 +5,20 @@
from dature.errors.exceptions import LineRange, SourceLocation
from dature.masking.masking import mask_env_line
+from dature.path_finders.base import PathFinder
from dature.types import NestedConflict, NestedConflicts
if TYPE_CHECKING:
- from dature.protocols import LoaderProtocol
+ from dature.sources.base import Source
@dataclass(frozen=True)
class ErrorContext:
dataclass_name: str
- loader_class: "type[LoaderProtocol]"
+ source_class: "type[Source]"
file_path: Path | None
prefix: str | None
- split_symbols: str
+ split_symbols: str | None = None
secret_paths: frozenset[str] = frozenset()
mask_secrets: bool = False
nested_conflicts: NestedConflicts | None = None
@@ -50,7 +51,7 @@ def _secret_overlaps_lines(
line_range: LineRange,
secret_paths: frozenset[str],
prefix: str | None,
- path_finder_class: type,
+ path_finder_class: type[PathFinder],
) -> bool:
finder = path_finder_class(file_content)
for secret_path in secret_paths:
@@ -85,7 +86,7 @@ def _apply_masking(
not should_mask
and ctx.secret_paths
and location.line_range is not None
- and ctx.loader_class.path_finder_class is not None
+ and ctx.source_class.path_finder_class is not None
and file_content is not None
):
should_mask = _secret_overlaps_lines(
@@ -93,13 +94,13 @@ def _apply_masking(
line_range=location.line_range,
secret_paths=ctx.secret_paths,
prefix=ctx.prefix,
- path_finder_class=ctx.loader_class.path_finder_class,
+ path_finder_class=ctx.source_class.path_finder_class,
)
if should_mask and location.line_content is not None:
masked_lines = [mask_env_line(line) for line in location.line_content]
result.append(
SourceLocation(
- display_label=location.display_label,
+ location_label=location.location_label,
file_path=location.file_path,
line_range=location.line_range,
line_content=masked_lines,
@@ -119,13 +120,13 @@ def resolve_source_location(
is_secret = ".".join(field_path) in ctx.secret_paths
conflict = _resolve_conflict(field_path, ctx)
- locations = ctx.loader_class.resolve_location(
- field_path,
- ctx.file_path,
- file_content,
- ctx.prefix,
- ctx.split_symbols,
- conflict,
+ locations = ctx.source_class.resolve_location(
+ field_path=field_path,
+ file_path=ctx.file_path,
+ file_content=file_content,
+ prefix=ctx.prefix,
+ nested_conflict=conflict,
+ split_symbols=ctx.split_symbols,
)
return _apply_masking(locations, ctx, file_content, is_secret=is_secret)
diff --git a/src/dature/expansion/env_expand.py b/src/dature/expansion/env_expand.py
index bb20e50..b7e2dbe 100644
--- a/src/dature/expansion/env_expand.py
+++ b/src/dature/expansion/env_expand.py
@@ -1,7 +1,7 @@
import os
import re
-from dature.errors.exceptions import EnvVarExpandError, MissingEnvVarError
+from dature.errors import EnvVarExpandError, MissingEnvVarError
from dature.types import ExpandEnvVarsMode, FilePath, JSONValue
# $VAR, ${VAR}, ${VAR:-default}, %VAR%, $$, %%
diff --git a/src/dature/field_path.py b/src/dature/field_path.py
index acc9d66..ad96e18 100644
--- a/src/dature/field_path.py
+++ b/src/dature/field_path.py
@@ -57,20 +57,20 @@ def as_path(self) -> str:
# --8<-- [end:field-path]
-def _validate_field_path_parts(field_path: FieldPath, dataclass_: type) -> None:
+def _validate_field_path_parts(field_path: FieldPath, schema: type) -> None:
for i, part in enumerate(field_path.parts):
- _validate_field(dataclass_, field_path.parts[:i], part)
+ _validate_field(schema, field_path.parts[:i], part)
-def validate_field_path_owner(field_path: FieldPath, dataclass_: type[DataclassInstance]) -> None:
+def validate_field_path_owner(field_path: FieldPath, schema: type[DataclassInstance]) -> None:
if isinstance(field_path.owner, str):
- if field_path.owner != dataclass_.__name__:
- msg = f"FieldPath owner '{field_path.owner}' does not match target dataclass '{dataclass_.__name__}'"
+ if field_path.owner != schema.__name__:
+ msg = f"FieldPath owner '{field_path.owner}' does not match target dataclass '{schema.__name__}'"
raise TypeError(msg)
- _validate_field_path_parts(field_path, dataclass_)
+ _validate_field_path_parts(field_path, schema)
return
- if field_path.owner is not dataclass_:
- msg = f"FieldPath owner '{field_path.owner.__name__}' does not match target dataclass '{dataclass_.__name__}'"
+ if field_path.owner is not schema:
+ msg = f"FieldPath owner '{field_path.owner.__name__}' does not match target dataclass '{schema.__name__}'"
raise TypeError(msg)
diff --git a/src/dature/load_report.py b/src/dature/load_report.py
index 62ec1f4..f94f37d 100644
--- a/src/dature/load_report.py
+++ b/src/dature/load_report.py
@@ -3,7 +3,7 @@
from dataclasses import dataclass
from typing import Any
-from dature.metadata import MergeStrategy
+from dature.merging.strategy import MergeStrategyEnum
from dature.types import JSONValue
logger = logging.getLogger("dature")
@@ -32,7 +32,7 @@ class FieldOrigin:
@dataclass(frozen=True, slots=True, kw_only=True)
class LoadReport:
dataclass_name: str
- strategy: MergeStrategy | None
+ strategy: MergeStrategyEnum | None
sources: tuple[SourceEntry, ...]
field_origins: tuple[FieldOrigin, ...]
merged_data: JSONValue
@@ -43,7 +43,7 @@ def compute_field_origins(
*,
raw_dicts: list[JSONValue],
source_entries: tuple[SourceEntry, ...],
- strategy: MergeStrategy,
+ strategy: MergeStrategyEnum,
) -> tuple[FieldOrigin, ...]:
first_source: dict[str, int] = {}
last_source: dict[str, int] = {}
@@ -60,7 +60,7 @@ def compute_field_origins(
origins: list[FieldOrigin] = []
for key in sorted(last_source):
- if strategy in (MergeStrategy.FIRST_WINS, MergeStrategy.FIRST_FOUND):
+ if strategy in (MergeStrategyEnum.FIRST_WINS, MergeStrategyEnum.FIRST_FOUND):
winner_idx = first_source[key]
else:
winner_idx = last_source[key]
diff --git a/src/dature/sources_loader/loaders/__init__.py b/src/dature/loaders/__init__.py
similarity index 92%
rename from src/dature/sources_loader/loaders/__init__.py
rename to src/dature/loaders/__init__.py
index de9cfeb..a0b94bc 100644
--- a/src/dature/sources_loader/loaders/__init__.py
+++ b/src/dature/loaders/__init__.py
@@ -1,4 +1,4 @@
-from dature.sources_loader.loaders.base import (
+from dature.loaders.base import (
base64url_bytes_from_string,
base64url_str_from_string,
byte_size_from_string,
@@ -9,7 +9,7 @@
timedelta_from_string,
url_from_string,
)
-from dature.sources_loader.loaders.common import (
+from dature.loaders.common import (
bool_loader,
bytearray_from_json_string,
bytearray_from_string,
diff --git a/src/dature/sources_loader/loaders/base.py b/src/dature/loaders/base.py
similarity index 100%
rename from src/dature/sources_loader/loaders/base.py
rename to src/dature/loaders/base.py
diff --git a/src/dature/sources_loader/loaders/common.py b/src/dature/loaders/common.py
similarity index 100%
rename from src/dature/sources_loader/loaders/common.py
rename to src/dature/loaders/common.py
diff --git a/src/dature/sources_loader/loaders/json5_.py b/src/dature/loaders/json5_.py
similarity index 100%
rename from src/dature/sources_loader/loaders/json5_.py
rename to src/dature/loaders/json5_.py
diff --git a/src/dature/sources_loader/loaders/toml_.py b/src/dature/loaders/toml_.py
similarity index 100%
rename from src/dature/sources_loader/loaders/toml_.py
rename to src/dature/loaders/toml_.py
diff --git a/src/dature/sources_loader/loaders/yaml_.py b/src/dature/loaders/yaml_.py
similarity index 100%
rename from src/dature/sources_loader/loaders/yaml_.py
rename to src/dature/loaders/yaml_.py
diff --git a/src/dature/loading/common.py b/src/dature/loading/common.py
new file mode 100644
index 0000000..5ea072b
--- /dev/null
+++ b/src/dature/loading/common.py
@@ -0,0 +1,9 @@
+from dature.config import config
+
+
+def resolve_mask_secrets(*, source_level: bool | None = None, load_level: bool | None = None) -> bool:
+ if source_level is not None:
+ return source_level
+ if load_level is not None:
+ return load_level
+ return config.masking.mask_secrets
diff --git a/src/dature/loading/context.py b/src/dature/loading/context.py
index 9013f73..d7ff494 100644
--- a/src/dature/loading/context.py
+++ b/src/dature/loading/context.py
@@ -3,7 +3,6 @@
from collections.abc import Callable
from dataclasses import Field, asdict, fields, is_dataclass
from enum import Flag
-from pathlib import Path
from typing import Any, Protocol, cast, get_type_hints, runtime_checkable
from adaptix import Retort
@@ -11,23 +10,23 @@
from dature.errors.formatter import handle_load_errors
from dature.errors.location import ErrorContext
from dature.field_path import FieldPath
-from dature.loading.resolver import resolve_loader_class
from dature.merging.predicate import extract_field_path
-from dature.metadata import Source
-from dature.protocols import DataclassInstance, LoaderProtocol
+from dature.protocols import DataclassInstance
from dature.skip_field_provider import FilterResult, filter_invalid_fields
-from dature.types import FILE_LIKE_TYPES, JSONValue, NestedConflicts
+from dature.sources.base import FlatKeySource, Source
+from dature.sources.retort import create_probe_retort
+from dature.types import JSONValue, NestedConflicts
logger = logging.getLogger("dature")
-def coerce_flag_fields[T](data: JSONValue, dataclass_: type[T]) -> JSONValue:
- if not isinstance(data, dict) or not is_dataclass(dataclass_):
+def coerce_flag_fields[T](data: JSONValue, schema: type[T]) -> JSONValue:
+ if not isinstance(data, dict) or not is_dataclass(schema):
return data
- type_hints = get_type_hints(dataclass_)
+ type_hints = get_type_hints(schema)
coerced = dict(data)
- for field in fields(cast("type[DataclassInstance]", dataclass_)):
+ for field in fields(cast("type[DataclassInstance]", schema)):
hint = type_hints.get(field.name)
if hint is None:
continue
@@ -49,19 +48,15 @@ def build_error_ctx(
mask_secrets: bool = False,
nested_conflicts: NestedConflicts | None = None,
) -> ErrorContext:
- loader_class = resolve_loader_class(metadata.loader, metadata.file_)
- if isinstance(metadata.file_, FILE_LIKE_TYPES):
- error_file_path = None
- elif metadata.file_ is not None:
- error_file_path = Path(metadata.file_)
- else:
- error_file_path = None
+ error_file_path = metadata.file_path_for_errors()
+
+ split_symbols = metadata.split_symbols if isinstance(metadata, FlatKeySource) else None
return ErrorContext(
dataclass_name=dataclass_name,
- loader_class=loader_class,
+ source_class=type(metadata),
file_path=error_file_path,
prefix=metadata.prefix,
- split_symbols=metadata.split_symbols,
+ split_symbols=split_symbols,
secret_paths=secret_paths,
mask_secrets=mask_secrets,
nested_conflicts=nested_conflicts,
@@ -71,12 +66,12 @@ def build_error_ctx(
def get_allowed_fields(
*,
skip_value: bool | tuple[FieldPath, ...],
- dataclass_: type[DataclassInstance] | None = None,
+ schema: type[DataclassInstance] | None = None,
) -> set[str] | None:
if skip_value is True:
return None
if isinstance(skip_value, tuple):
- return {extract_field_path(fp, dataclass_) for fp in skip_value}
+ return {extract_field_path(field_path, schema) for field_path in skip_value}
return None
@@ -84,20 +79,20 @@ def apply_skip_invalid(
*,
raw: JSONValue,
skip_if_invalid: bool | tuple[FieldPath, ...] | None,
- loader_instance: LoaderProtocol,
- dataclass_: type[DataclassInstance],
+ source: Source,
+ schema: type[DataclassInstance],
log_prefix: str,
probe_retort: Retort | None = None,
) -> FilterResult:
if not skip_if_invalid:
return FilterResult(cleaned_dict=raw, skipped_paths=[])
- allowed_fields = get_allowed_fields(skip_value=skip_if_invalid, dataclass_=dataclass_)
+ allowed_fields = get_allowed_fields(skip_value=skip_if_invalid, schema=schema)
if probe_retort is None:
- probe_retort = loader_instance.create_probe_retort()
+ probe_retort = create_probe_retort(source)
- result = filter_invalid_fields(raw, probe_retort, dataclass_, allowed_fields)
+ result = filter_invalid_fields(raw, probe_retort, schema, allowed_fields)
for path in result.skipped_paths:
logger.warning(
"%s Skipped invalid field '%s'",
@@ -126,13 +121,6 @@ def merge_fields(
return complete_kwargs
-def ensure_retort(loader_instance: LoaderProtocol, cls: type[DataclassInstance]) -> None:
- """Creates a replacement response to __init__ so that Adaptix sees the original signature."""
- if cls not in loader_instance.retorts:
- loader_instance.retorts[cls] = loader_instance.create_retort()
- loader_instance.retorts[cls].get_loader(cls)
-
-
@runtime_checkable
class PatchContext(Protocol):
loading: bool
diff --git a/src/dature/loading/merge_config.py b/src/dature/loading/merge_config.py
new file mode 100644
index 0000000..547f953
--- /dev/null
+++ b/src/dature/loading/merge_config.py
@@ -0,0 +1,31 @@
+from dataclasses import dataclass
+from typing import TYPE_CHECKING
+
+from dature.merging.strategy import MergeStrategyEnum
+from dature.sources.base import Source
+
+if TYPE_CHECKING:
+ from dature.types import (
+ ExpandEnvVarsMode,
+ FieldGroupTuple,
+ FieldMergeMap,
+ NestedResolve,
+ NestedResolveStrategy,
+ TypeLoaderMap,
+ )
+
+
+@dataclass(slots=True, kw_only=True)
+class MergeConfig:
+ sources: tuple[Source, ...]
+ strategy: MergeStrategyEnum = MergeStrategyEnum.LAST_WINS
+ field_merges: "FieldMergeMap | None" = None
+ field_groups: "tuple[FieldGroupTuple, ...]" = ()
+ skip_broken_sources: bool = False
+ skip_invalid_fields: bool = False
+ expand_env_vars: "ExpandEnvVarsMode" = "default"
+ secret_field_names: tuple[str, ...] | None = None
+ mask_secrets: bool | None = None
+ type_loaders: "TypeLoaderMap | None" = None
+ nested_resolve_strategy: "NestedResolveStrategy | None" = None
+ nested_resolve: "NestedResolve | None" = None
diff --git a/src/dature/loading/multi.py b/src/dature/loading/multi.py
index 6172baf..d1b0aa1 100644
--- a/src/dature/loading/multi.py
+++ b/src/dature/loading/multi.py
@@ -4,8 +4,7 @@
from dataclasses import fields, is_dataclass
from typing import Any
-from dature.config import config
-from dature.errors.exceptions import DatureConfigError
+from dature.errors import DatureConfigError
from dature.errors.formatter import enrich_skipped_errors, handle_load_errors
from dature.load_report import (
FieldOrigin,
@@ -15,39 +14,39 @@
compute_field_origins,
get_load_report,
)
+from dature.loading.common import resolve_mask_secrets
from dature.loading.context import (
build_error_ctx,
coerce_flag_fields,
- ensure_retort,
make_validating_post_init,
merge_fields,
)
-from dature.loading.resolver import resolve_loader
-from dature.loading.source_loading import load_sources, resolve_expand_env_vars
+from dature.loading.merge_config import MergeConfig
+from dature.loading.source_loading import ResolvedSourceParams, load_sources, resolve_source_params
from dature.masking.detection import build_secret_paths
from dature.masking.masking import mask_field_origins, mask_json_value, mask_source_entries, mask_value
from dature.merging.deep_merge import deep_merge, deep_merge_last_wins, raise_on_conflict
from dature.merging.field_group import FieldGroupContext, validate_field_groups
from dature.merging.predicate import ResolvedFieldGroup, build_field_group_paths, build_field_merge_map
-from dature.metadata import FieldMergeStrategy, Merge, MergeStrategy, Source, TypeLoader
-from dature.protocols import DataclassInstance, LoaderProtocol
+from dature.merging.strategy import FieldMergeStrategyEnum, MergeStrategyEnum
+from dature.protocols import DataclassInstance
+from dature.sources.base import Source
+from dature.sources.retort import (
+ create_validating_retort,
+ ensure_retort,
+ transform_to_dataclass,
+)
from dature.types import FieldMergeCallable, JSONValue
logger = logging.getLogger("dature")
-def _resolve_merge_mask_secrets(merge_meta: Merge) -> bool:
- if merge_meta.mask_secrets is not None:
- return merge_meta.mask_secrets
- return config.masking.mask_secrets
-
-
-def _collect_extra_secret_patterns(merge_meta: Merge) -> tuple[str, ...]:
+def _collect_extra_secret_patterns(merge_meta: MergeConfig) -> tuple[str, ...]:
merge_names = merge_meta.secret_field_names or ()
source_names: list[str] = []
- for source_meta in merge_meta.sources:
- if source_meta.secret_field_names is not None:
- source_names.extend(source_meta.secret_field_names)
+ for source_item in merge_meta.sources:
+ if source_item.secret_field_names is not None:
+ source_names.extend(source_item.secret_field_names)
return merge_names + tuple(source_names)
@@ -55,7 +54,7 @@ def _log_merge_step( # noqa: PLR0913
*,
dataclass_name: str,
step_idx: int,
- strategy: MergeStrategy,
+ strategy: MergeStrategyEnum,
before: JSONValue,
source_data: JSONValue,
after: JSONValue,
@@ -68,7 +67,7 @@ def _log_merge_step( # noqa: PLR0913
"[%s] Merge step %d (strategy=%s): added=%s, overwritten=%s",
dataclass_name,
step_idx,
- strategy.value,
+ strategy,
sorted(added_keys),
sorted(overwritten_keys),
)
@@ -115,7 +114,7 @@ def _log_field_origins(
def _build_merge_report(
*,
dataclass_name: str,
- strategy: MergeStrategy,
+ strategy: MergeStrategyEnum,
source_entries: tuple[SourceEntry, ...],
field_origins: tuple[FieldOrigin, ...],
merged_data: JSONValue,
@@ -217,9 +216,9 @@ def _set_nested_value(
def _merge_raw_dicts(
*,
raw_dicts: list[JSONValue],
- strategy: MergeStrategy,
+ strategy: MergeStrategyEnum,
dataclass_name: str,
- field_merge_map: dict[str, FieldMergeStrategy] | None = None,
+ field_merge_map: dict[str, FieldMergeStrategyEnum] | None = None,
callable_merge_map: dict[str, FieldMergeCallable] | None = None,
secret_paths: frozenset[str] = frozenset(),
) -> JSONValue:
@@ -227,7 +226,7 @@ def _merge_raw_dicts(
for step_idx, raw in enumerate(raw_dicts):
before = merged
- if strategy == MergeStrategy.RAISE_ON_CONFLICT:
+ if strategy == MergeStrategyEnum.RAISE_ON_CONFLICT:
merged = deep_merge_last_wins(merged, raw, field_merge_map=field_merge_map)
else:
merged = deep_merge(merged, raw, strategy=strategy, field_merge_map=field_merge_map)
@@ -257,53 +256,49 @@ def _merge_raw_dicts(
class _MergedData[T: DataclassInstance]:
result: T
merged_raw: JSONValue
- last_loader: LoaderProtocol
- last_source_meta: Source
+ last_source: Source
+ last_resolved: ResolvedSourceParams
def _load_and_merge[T: DataclassInstance]( # noqa: C901
*,
- merge_meta: Merge,
- dataclass_: type[T],
- loaders: tuple[LoaderProtocol, ...] | None = None,
+ merge_meta: MergeConfig,
+ schema: type[T],
debug: bool = False,
- type_loaders: tuple[TypeLoader, ...] = (),
) -> _MergedData[T]:
secret_paths: frozenset[str] = frozenset()
- if _resolve_merge_mask_secrets(merge_meta):
+ if resolve_mask_secrets(load_level=merge_meta.mask_secrets):
extra_patterns = _collect_extra_secret_patterns(merge_meta)
- secret_paths = build_secret_paths(dataclass_, extra_patterns=extra_patterns)
+ secret_paths = build_secret_paths(schema, extra_patterns=extra_patterns)
loaded = load_sources(
merge_meta=merge_meta,
- dataclass_name=dataclass_.__name__,
- dataclass_=dataclass_,
- loaders=loaders,
+ dataclass_name=schema.__name__,
+ schema=schema,
secret_paths=secret_paths,
- mask_secrets=_resolve_merge_mask_secrets(merge_meta),
- type_loaders=type_loaders,
+ mask_secrets=resolve_mask_secrets(load_level=merge_meta.mask_secrets),
)
- merge_maps = build_field_merge_map(merge_meta.field_merges, dataclass_)
+ merge_maps = build_field_merge_map(merge_meta.field_merges, schema)
field_group_paths: tuple[ResolvedFieldGroup, ...] = ()
if merge_meta.field_groups:
- field_group_paths = build_field_group_paths(merge_meta.field_groups, dataclass_)
+ field_group_paths = build_field_group_paths(merge_meta.field_groups, schema)
if field_group_paths:
source_reprs = tuple(repr(merge_meta.sources[entry.index]) for entry in loaded.source_entries)
_validate_all_field_groups(
raw_dicts=loaded.raw_dicts,
field_group_paths=field_group_paths,
- dataclass_name=dataclass_.__name__,
+ dataclass_name=schema.__name__,
source_reprs=source_reprs,
)
- if merge_meta.strategy == MergeStrategy.RAISE_ON_CONFLICT:
+ if merge_meta.strategy == MergeStrategyEnum.RAISE_ON_CONFLICT:
raise_on_conflict(
loaded.raw_dicts,
loaded.source_ctxs,
- dataclass_.__name__,
+ schema.__name__,
field_merge_map=merge_maps.enum_map or None,
callable_merge_paths=merge_maps.callable_paths or None,
)
@@ -311,7 +306,7 @@ def _load_and_merge[T: DataclassInstance]( # noqa: C901
merged = _merge_raw_dicts(
raw_dicts=loaded.raw_dicts,
strategy=merge_meta.strategy,
- dataclass_name=dataclass_.__name__,
+ dataclass_name=schema.__name__,
field_merge_map=merge_maps.enum_map or None,
callable_merge_map=merge_maps.callable_map or None,
secret_paths=secret_paths,
@@ -323,8 +318,8 @@ def _load_and_merge[T: DataclassInstance]( # noqa: C901
masked_merged = merged
logger.debug(
"[%s] Merged result (strategy=%s, %d sources): %s",
- dataclass_.__name__,
- merge_meta.strategy.value,
+ schema.__name__,
+ merge_meta.strategy,
len(loaded.raw_dicts),
masked_merged,
)
@@ -337,7 +332,7 @@ def _load_and_merge[T: DataclassInstance]( # noqa: C901
)
_log_field_origins(
- dataclass_name=dataclass_.__name__,
+ dataclass_name=schema.__name__,
field_origins=field_origins,
secret_paths=secret_paths,
)
@@ -345,7 +340,7 @@ def _load_and_merge[T: DataclassInstance]( # noqa: C901
report: LoadReport | None = None
if debug:
report = _build_merge_report(
- dataclass_name=dataclass_.__name__,
+ dataclass_name=schema.__name__,
strategy=merge_meta.strategy,
source_entries=frozen_entries,
field_origins=field_origins,
@@ -353,16 +348,22 @@ def _load_and_merge[T: DataclassInstance]( # noqa: C901
secret_paths=secret_paths,
)
+ last_resolved = loaded.last_resolved
last_error_ctx = loaded.source_ctxs[-1].error_ctx
- merged = coerce_flag_fields(merged, dataclass_)
+ merged = coerce_flag_fields(merged, schema)
try:
result = handle_load_errors(
- func=lambda: loaded.last_loader.transform_to_dataclass(merged, dataclass_),
+ func=lambda: transform_to_dataclass(
+ loaded.last_source,
+ merged,
+ schema,
+ resolved_type_loaders=last_resolved.type_loaders,
+ ),
ctx=last_error_ctx,
)
except DatureConfigError as exc:
if report is not None:
- attach_load_report(dataclass_, report)
+ attach_load_report(schema, report)
if loaded.skipped_fields:
raise enrich_skipped_errors(exc, loaded.skipped_fields) from exc
raise
@@ -370,41 +371,43 @@ def _load_and_merge[T: DataclassInstance]( # noqa: C901
if report is not None:
attach_load_report(result, report)
- last_source_idx = loaded.source_entries[-1].index
return _MergedData(
result=result,
merged_raw=merged,
- last_loader=loaded.last_loader,
- last_source_meta=merge_meta.sources[last_source_idx],
+ last_source=loaded.last_source,
+ last_resolved=loaded.last_resolved,
)
def merge_load_as_function[T: DataclassInstance](
- merge_meta: Merge,
- dataclass_: type[T],
+ merge_meta: MergeConfig,
+ schema: type[T],
*,
debug: bool,
- type_loaders: tuple[TypeLoader, ...] = (),
) -> T:
data = _load_and_merge(
merge_meta=merge_meta,
- dataclass_=dataclass_,
+ schema=schema,
debug=debug,
- type_loaders=type_loaders,
)
- validating_retort = data.last_loader.create_validating_retort(dataclass_)
- validation_loader = validating_retort.get_loader(dataclass_)
+ last_resolved = data.last_resolved
+ validating_retort = create_validating_retort(
+ data.last_source,
+ schema,
+ resolved_type_loaders=last_resolved.type_loaders,
+ )
+ validation_loader = validating_retort.get_loader(schema)
- last_meta = data.last_source_meta
- mask_secrets = _resolve_merge_mask_secrets(merge_meta)
+ last_meta = data.last_source
+ mask_secrets = resolve_mask_secrets(load_level=merge_meta.mask_secrets)
secret_paths: frozenset[str] = frozenset()
if mask_secrets:
extra_patterns = _collect_extra_secret_patterns(merge_meta)
- secret_paths = build_secret_paths(dataclass_, extra_patterns=extra_patterns)
+ secret_paths = build_secret_paths(schema, extra_patterns=extra_patterns)
last_error_ctx = build_error_ctx(
last_meta,
- dataclass_.__name__,
+ schema.__name__,
secret_paths=secret_paths,
mask_secrets=mask_secrets,
)
@@ -417,7 +420,7 @@ def merge_load_as_function[T: DataclassInstance](
if debug:
report = get_load_report(data.result)
if report is not None:
- attach_load_report(dataclass_, report)
+ attach_load_report(schema, report)
raise
return data.result
@@ -427,19 +430,17 @@ class _MergePatchContext:
def __init__(
self,
*,
- merge_meta: Merge,
+ merge_meta: MergeConfig,
cls: type[DataclassInstance],
cache: bool,
debug: bool,
- type_loaders: tuple[TypeLoader, ...] = (),
) -> None:
- self.loaders = self._prepare_loaders(merge_meta=merge_meta, cls=cls, type_loaders=type_loaders)
+ self._prepare_sources(merge_meta=merge_meta, cls=cls)
self.merge_meta = merge_meta
self.cls = cls
self.cache = cache
self.debug = debug
- self.type_loaders = type_loaders
self.cached_data: DataclassInstance | None = None
self.field_list = fields(cls)
self.original_init = cls.__init__
@@ -447,11 +448,23 @@ def __init__(
self.loading = False
self.validating = False
- last_loader = self.loaders[-1]
- validating_retort = last_loader.create_validating_retort(cls)
+ last_source = merge_meta.sources[-1]
+ last_resolved = resolve_source_params(
+ last_source,
+ load_expand_env_vars=merge_meta.expand_env_vars,
+ load_type_loaders=merge_meta.type_loaders,
+ load_nested_resolve_strategy=merge_meta.nested_resolve_strategy,
+ load_nested_resolve=merge_meta.nested_resolve,
+ )
+ ensure_retort(last_source, cls, resolved_type_loaders=last_resolved.type_loaders)
+ validating_retort = create_validating_retort(
+ last_source,
+ cls,
+ resolved_type_loaders=last_resolved.type_loaders,
+ )
self.validation_loader: Callable[[JSONValue], DataclassInstance] = validating_retort.get_loader(cls)
- mask_secrets = _resolve_merge_mask_secrets(merge_meta)
+ mask_secrets = resolve_mask_secrets(load_level=merge_meta.mask_secrets)
self.secret_paths: frozenset[str] = frozenset()
if mask_secrets:
extra_patterns = _collect_extra_secret_patterns(merge_meta)
@@ -465,37 +478,21 @@ def __init__(
mask_secrets=mask_secrets,
)
- @staticmethod
- def _prepare_loaders(
+ def _prepare_sources(
+ self,
*,
- merge_meta: Merge,
+ merge_meta: MergeConfig,
cls: type[DataclassInstance],
- type_loaders: tuple[TypeLoader, ...] = (),
- ) -> tuple[LoaderProtocol, ...]:
- loaders: list[LoaderProtocol] = []
- for source_meta in merge_meta.sources:
- resolved_expand = resolve_expand_env_vars(source_meta, merge_meta)
- source_type_loaders = (source_meta.type_loaders or ()) + type_loaders
- resolved_strategy = (
- source_meta.nested_resolve_strategy
- or merge_meta.nested_resolve_strategy
- or config.loading.nested_resolve_strategy
- )
- resolve_kwargs: dict[str, Any] = {
- "expand_env_vars": resolved_expand,
- "type_loaders": source_type_loaders,
- "nested_resolve_strategy": resolved_strategy,
- }
- resolved_resolve = source_meta.nested_resolve or merge_meta.nested_resolve
- if resolved_resolve is not None:
- resolve_kwargs["nested_resolve"] = resolved_resolve
- loader_instance = resolve_loader(
- source_meta,
- **resolve_kwargs,
+ ) -> None:
+ for source_item in merge_meta.sources:
+ resolved = resolve_source_params(
+ source_item,
+ load_expand_env_vars=merge_meta.expand_env_vars,
+ load_type_loaders=merge_meta.type_loaders,
+ load_nested_resolve_strategy=merge_meta.nested_resolve_strategy,
+ load_nested_resolve=merge_meta.nested_resolve,
)
- ensure_retort(loader_instance, cls)
- loaders.append(loader_instance)
- return tuple(loaders)
+ ensure_retort(source_item, cls, resolved_type_loaders=resolved.type_loaders)
def _make_merge_new_init(ctx: _MergePatchContext) -> Callable[..., None]:
@@ -511,16 +508,14 @@ def new_init(self: DataclassInstance, *args: Any, **kwargs: Any) -> None: # noq
try:
merged_data = _load_and_merge(
merge_meta=ctx.merge_meta,
- dataclass_=ctx.cls,
- loaders=ctx.loaders,
+ schema=ctx.cls,
debug=ctx.debug,
- type_loaders=ctx.type_loaders,
)
finally:
ctx.loading = False
loaded_data = merged_data.result
ctx.error_ctx = build_error_ctx(
- merged_data.last_source_meta,
+ merged_data.last_source,
ctx.cls.__name__,
secret_paths=ctx.secret_paths,
mask_secrets=ctx.error_ctx.mask_secrets,
@@ -543,11 +538,10 @@ def new_init(self: DataclassInstance, *args: Any, **kwargs: Any) -> None: # noq
def merge_make_decorator(
- merge_meta: Merge,
+ merge_meta: MergeConfig,
*,
cache: bool,
debug: bool,
- type_loaders: tuple[TypeLoader, ...] = (),
) -> Callable[[type[DataclassInstance]], type[DataclassInstance]]:
def decorator(cls: type[DataclassInstance]) -> type[DataclassInstance]:
if not is_dataclass(cls):
@@ -559,7 +553,6 @@ def decorator(cls: type[DataclassInstance]) -> type[DataclassInstance]:
cls=cls,
cache=cache,
debug=debug,
- type_loaders=type_loaders,
)
cls.__init__ = _make_merge_new_init(ctx) # type: ignore[method-assign]
cls.__post_init__ = make_validating_post_init(ctx) # type: ignore[attr-defined]
diff --git a/src/dature/loading/resolver.py b/src/dature/loading/resolver.py
deleted file mode 100644
index 79715ad..0000000
--- a/src/dature/loading/resolver.py
+++ /dev/null
@@ -1,136 +0,0 @@
-from pathlib import Path
-from typing import TYPE_CHECKING, Any
-
-from dature.sources_loader.docker_secrets import DockerSecretsLoader
-from dature.sources_loader.env_ import EnvFileLoader, EnvLoader
-from dature.sources_loader.ini_ import IniLoader
-from dature.sources_loader.json_ import JsonLoader
-from dature.types import FILE_LIKE_TYPES, ExpandEnvVarsMode, NestedResolve, NestedResolveStrategy
-
-if TYPE_CHECKING:
- from dature.metadata import Source, TypeLoader
- from dature.protocols import LoaderProtocol
- from dature.types import FileLike, FilePath
-
-SUPPORTED_EXTENSIONS = (".cfg", ".env", ".ini", ".json", ".json5", ".toml", ".yaml", ".yml")
-
-_EXTRA_BY_EXTENSION: dict[str, str] = {
- ".toml": "toml",
- ".yaml": "yaml",
- ".yml": "yaml",
- ".json5": "json5",
-}
-
-
-def _resolve_by_extension(extension: str) -> "type[LoaderProtocol]":
- try:
- return _resolve_by_extension_inner(extension)
- except ImportError:
- extra = _EXTRA_BY_EXTENSION.get(extension)
- if extra is None:
- raise
- msg = f"To use '{extension}' files, install the '{extra}' extra: pip install dature[{extra}]"
- raise ImportError(msg) from None
-
-
-def _resolve_by_extension_inner(extension: str) -> "type[LoaderProtocol]":
- match extension:
- case ".json":
- return JsonLoader
- case ".toml":
- from dature.sources_loader.toml_ import Toml11Loader # noqa: PLC0415
-
- return Toml11Loader
- case ".ini" | ".cfg":
- return IniLoader
- case ".env":
- return EnvFileLoader
- case ".yaml" | ".yml":
- from dature.sources_loader.yaml_ import Yaml12Loader # noqa: PLC0415
-
- return Yaml12Loader
- case ".json5":
- from dature.sources_loader.json5_ import Json5Loader # noqa: PLC0415
-
- return Json5Loader
- case _:
- supported = ", ".join(SUPPORTED_EXTENSIONS)
- msg = (
- f"Cannot determine loader type for extension '{extension}'. "
- f"Please specify loader explicitly or use a supported extension: {supported}"
- )
- raise ValueError(msg)
-
-
-def resolve_loader_class(
- loader: "type[LoaderProtocol] | None",
- file_: "FileLike | FilePath | None",
-) -> "type[LoaderProtocol]":
- if loader is not None:
- if file_ is not None and not isinstance(file_, FILE_LIKE_TYPES) and loader is EnvLoader:
- msg = (
- "EnvLoader reads from environment variables and does not use files. "
- "Remove file_ or use a file-based loader instead (e.g. EnvFileLoader)."
- )
- raise ValueError(msg)
- if isinstance(file_, FILE_LIKE_TYPES) and loader in (EnvLoader, DockerSecretsLoader):
- msg = (
- f"{loader.__name__} does not support file-like objects. "
- "Use a file-based loader (e.g. JsonLoader, TomlLoader) with file-like objects."
- )
- raise ValueError(msg)
- return loader
-
- if isinstance(file_, FILE_LIKE_TYPES):
- msg = (
- "Cannot determine loader type for a file-like object. "
- "Please specify loader explicitly (e.g. loader=JsonLoader)."
- )
- raise TypeError(msg)
-
- if file_ is None:
- return EnvLoader
-
- # file-like objects are handled above; here file_ is str | Path
- file_path = Path(file_)
-
- if file_path.is_dir():
- return DockerSecretsLoader
-
- if file_path.name.startswith(".env"):
- return EnvFileLoader
-
- return _resolve_by_extension(file_path.suffix.lower())
-
-
-def resolve_loader(
- metadata: "Source",
- *,
- expand_env_vars: ExpandEnvVarsMode | None = None,
- type_loaders: "tuple[TypeLoader, ...]" = (),
- nested_resolve_strategy: NestedResolveStrategy = "flat",
- nested_resolve: NestedResolve | None = None,
-) -> "LoaderProtocol":
- loader_class = resolve_loader_class(metadata.loader, metadata.file_)
-
- resolved_expand = expand_env_vars or metadata.expand_env_vars or "default"
-
- kwargs: dict[str, Any] = {
- "prefix": metadata.prefix,
- "name_style": metadata.name_style,
- "field_mapping": metadata.field_mapping,
- "root_validators": metadata.root_validators,
- "validators": metadata.validators,
- "expand_env_vars": resolved_expand,
- "type_loaders": type_loaders,
- }
-
- if issubclass(loader_class, (EnvLoader, DockerSecretsLoader)):
- kwargs["split_symbols"] = metadata.split_symbols
- resolved_strategy = metadata.nested_resolve_strategy or nested_resolve_strategy
- kwargs["nested_resolve_strategy"] = resolved_strategy
- resolved_resolve = metadata.nested_resolve or nested_resolve
- if resolved_resolve is not None:
- kwargs["nested_resolve"] = resolved_resolve
-
- return loader_class(**kwargs)
diff --git a/src/dature/loading/single.py b/src/dature/loading/single.py
index 6ec5578..d1eab16 100644
--- a/src/dature/loading/single.py
+++ b/src/dature/loading/single.py
@@ -3,37 +3,47 @@
from dataclasses import asdict, fields, is_dataclass
from typing import TYPE_CHECKING, Any
-from dature.config import config
-from dature.errors.exceptions import DatureConfigError
+from dature.errors import DatureConfigError
from dature.errors.formatter import enrich_skipped_errors, handle_load_errors
from dature.errors.location import read_file_content
from dature.load_report import FieldOrigin, LoadReport, SourceEntry, attach_load_report
+from dature.loading.common import resolve_mask_secrets
from dature.loading.context import (
apply_skip_invalid,
build_error_ctx,
coerce_flag_fields,
- ensure_retort,
make_validating_post_init,
merge_fields,
)
-from dature.loading.resolver import resolve_loader_class
-from dature.loading.source_loading import SkippedFieldSource
+from dature.loading.source_loading import (
+ ResolvedSourceParams,
+ SkippedFieldSource,
+ load_source_raw,
+ resolve_source_params,
+)
from dature.masking.detection import build_secret_paths
from dature.masking.masking import mask_json_value
-from dature.metadata import Source
-from dature.protocols import DataclassInstance, LoaderProtocol
-from dature.types import FILE_LIKE_TYPES, FileOrStream, JSONValue
+from dature.protocols import DataclassInstance
+from dature.sources.base import Source
+from dature.sources.retort import (
+ create_probe_retort,
+ create_validating_retort,
+ ensure_retort,
+ transform_to_dataclass,
+)
+from dature.types import JSONValue
if TYPE_CHECKING:
from adaptix import Retort
-logger = logging.getLogger("dature")
-
+ from dature.types import (
+ ExpandEnvVarsMode,
+ NestedResolve,
+ NestedResolveStrategy,
+ TypeLoaderMap,
+ )
-def _resolve_single_mask_secrets(metadata: Source) -> bool:
- if metadata.mask_secrets is not None:
- return metadata.mask_secrets
- return config.masking.mask_secrets
+logger = logging.getLogger("dature")
def _log_single_source_load(
@@ -102,23 +112,27 @@ def _build_single_source_report(
class _PatchContext:
- def __init__(
+ def __init__( # noqa: PLR0913
self,
*,
- loader_instance: LoaderProtocol,
- file_path: FileOrStream,
+ source: Source,
cls: type[DataclassInstance],
- metadata: Source,
cache: bool,
debug: bool,
+ secret_field_names: tuple[str, ...] | None = None,
+ mask_secrets: bool | None = None,
+ resolved: ResolvedSourceParams,
) -> None:
- ensure_retort(loader_instance, cls)
- validating_retort = loader_instance.create_validating_retort(cls)
+ self.resolved = resolved
+ ensure_retort(source, cls, resolved_type_loaders=self.resolved.type_loaders)
+ validating_retort = create_validating_retort(
+ source,
+ cls,
+ resolved_type_loaders=self.resolved.type_loaders,
+ )
- self.loader_instance = loader_instance
- self.file_path = file_path
+ self.source = source
self.cls = cls
- self.metadata = metadata
self.cache = cache
self.debug = debug
self.cached_data: DataclassInstance | None = None
@@ -129,39 +143,38 @@ def __init__(
self.validating = False
self.loading = False
- loader_class = resolve_loader_class(metadata.loader, metadata.file_)
- self.loader_type = loader_class.display_name
+ self.loader_type = source.format_name
- mask_secrets = _resolve_single_mask_secrets(metadata)
+ resolved_mask_secrets = resolve_mask_secrets(source_level=source.mask_secrets, load_level=mask_secrets)
self.secret_paths: frozenset[str] = frozenset()
- if mask_secrets:
- extra_patterns = metadata.secret_field_names or ()
+ if resolved_mask_secrets:
+ extra_patterns = (source.secret_field_names or ()) + (secret_field_names or ())
self.secret_paths = build_secret_paths(cls, extra_patterns=extra_patterns)
self.error_ctx = build_error_ctx(
- metadata,
+ source,
cls.__name__,
secret_paths=self.secret_paths,
- mask_secrets=mask_secrets,
+ mask_secrets=resolved_mask_secrets,
)
# probe_retort is created early so adaptix sees the original signature
self.probe_retort: Retort | None = None
- if metadata.skip_if_invalid:
- self.probe_retort = loader_instance.create_probe_retort()
+ if source.skip_if_invalid:
+ self.probe_retort = create_probe_retort(source, resolved_type_loaders=self.resolved.type_loaders)
self.probe_retort.get_loader(cls)
def _load_single_source(ctx: _PatchContext) -> DataclassInstance:
load_result = handle_load_errors(
- func=lambda: ctx.loader_instance.load_raw(ctx.file_path),
+ func=lambda: load_source_raw(ctx.source, ctx.resolved),
ctx=ctx.error_ctx,
)
raw_data = load_result.data
if load_result.nested_conflicts:
ctx.error_ctx = build_error_ctx(
- ctx.metadata,
+ ctx.source,
ctx.cls.__name__,
secret_paths=ctx.secret_paths,
mask_secrets=ctx.error_ctx.mask_secrets,
@@ -170,9 +183,9 @@ def _load_single_source(ctx: _PatchContext) -> DataclassInstance:
filter_result = apply_skip_invalid(
raw=raw_data,
- skip_if_invalid=ctx.metadata.skip_if_invalid,
- loader_instance=ctx.loader_instance,
- dataclass_=ctx.cls,
+ skip_if_invalid=ctx.source.skip_if_invalid,
+ source=ctx.source,
+ schema=ctx.cls,
log_prefix=f"[{ctx.cls.__name__}]",
probe_retort=ctx.probe_retort,
)
@@ -183,11 +196,11 @@ def _load_single_source(ctx: _PatchContext) -> DataclassInstance:
file_content = read_file_content(ctx.error_ctx.file_path)
for path in filter_result.skipped_paths:
skipped_fields.setdefault(path, []).append(
- SkippedFieldSource(metadata=ctx.metadata, error_ctx=ctx.error_ctx, file_content=file_content),
+ SkippedFieldSource(source=ctx.source, error_ctx=ctx.error_ctx, file_content=file_content),
)
- def _transform(rd: JSONValue = raw_data) -> DataclassInstance:
- return ctx.loader_instance.transform_to_dataclass(rd, ctx.cls)
+ def _transform(data: JSONValue = raw_data) -> DataclassInstance:
+ return transform_to_dataclass(ctx.source, data, ctx.cls, resolved_type_loaders=ctx.resolved.type_loaders)
try:
loaded_data = handle_load_errors(
@@ -220,7 +233,7 @@ def new_init(self: DataclassInstance, *args: Any, **kwargs: Any) -> None: # noq
_log_single_source_load(
dataclass_name=ctx.cls.__name__,
loader_type=ctx.loader_type,
- file_path="" if isinstance(ctx.file_path, FILE_LIKE_TYPES) else str(ctx.file_path),
+ file_path=ctx.source.file_display() or "",
data=asdict(loaded_data),
secret_paths=ctx.secret_paths,
)
@@ -236,9 +249,7 @@ def new_init(self: DataclassInstance, *args: Any, **kwargs: Any) -> None: # noq
report = _build_single_source_report(
dataclass_name=ctx.cls.__name__,
loader_type=ctx.loader_type,
- file_path=str(ctx.file_path)
- if not isinstance(ctx.metadata.file_, (*FILE_LIKE_TYPES, type(None)))
- else None,
+ file_path=str(path) if (path := ctx.source.file_path_for_errors()) else None,
raw_data=result_dict,
secret_paths=ctx.secret_paths,
)
@@ -250,45 +261,60 @@ def new_init(self: DataclassInstance, *args: Any, **kwargs: Any) -> None: # noq
return new_init
-def load_as_function( # noqa: C901, PLR0912
+def load_as_function( # noqa: C901, PLR0913
*,
- loader_instance: LoaderProtocol,
- file_path: FileOrStream,
- dataclass_: type[DataclassInstance],
- metadata: Source,
+ source: Source,
+ schema: type[DataclassInstance],
debug: bool,
+ secret_field_names: tuple[str, ...] | None = None,
+ mask_secrets: bool | None = None,
+ expand_env_vars: "ExpandEnvVarsMode | None" = None,
+ type_loaders: "TypeLoaderMap | None" = None,
+ nested_resolve_strategy: "NestedResolveStrategy | None" = None,
+ nested_resolve: "NestedResolve | None" = None,
) -> DataclassInstance:
- loader_class = resolve_loader_class(metadata.loader, metadata.file_)
- display_name = loader_class.display_name
+ resolved = resolve_source_params(
+ source,
+ load_expand_env_vars=expand_env_vars,
+ load_type_loaders=type_loaders,
+ load_nested_resolve_strategy=nested_resolve_strategy,
+ load_nested_resolve=nested_resolve,
+ )
+ format_name = source.format_name
secret_paths: frozenset[str] = frozenset()
- mask_secrets = _resolve_single_mask_secrets(metadata)
- if mask_secrets:
- extra_patterns = metadata.secret_field_names or ()
- secret_paths = build_secret_paths(dataclass_, extra_patterns=extra_patterns)
- error_ctx = build_error_ctx(metadata, dataclass_.__name__, secret_paths=secret_paths, mask_secrets=mask_secrets)
+ resolved_mask_secrets = resolve_mask_secrets(source_level=source.mask_secrets, load_level=mask_secrets)
+ if resolved_mask_secrets:
+ extra_patterns = (source.secret_field_names or ()) + (secret_field_names or ())
+ secret_paths = build_secret_paths(schema, extra_patterns=extra_patterns)
+ error_ctx = build_error_ctx(
+ source,
+ schema.__name__,
+ secret_paths=secret_paths,
+ mask_secrets=resolved_mask_secrets,
+ )
load_result = handle_load_errors(
- func=lambda: loader_instance.load_raw(file_path),
+ func=lambda: load_source_raw(source, resolved),
ctx=error_ctx,
)
raw_data = load_result.data
if load_result.nested_conflicts:
error_ctx = build_error_ctx(
- metadata,
- dataclass_.__name__,
+ source,
+ schema.__name__,
secret_paths=secret_paths,
- mask_secrets=mask_secrets,
+ mask_secrets=resolved_mask_secrets,
nested_conflicts=load_result.nested_conflicts,
)
filter_result = apply_skip_invalid(
raw=raw_data,
- skip_if_invalid=metadata.skip_if_invalid,
- loader_instance=loader_instance,
- dataclass_=dataclass_,
- log_prefix=f"[{dataclass_.__name__}]",
+ skip_if_invalid=source.skip_if_invalid,
+ source=source,
+ schema=schema,
+ log_prefix=f"[{schema.__name__}]",
)
raw_data = filter_result.cleaned_dict
@@ -296,36 +322,36 @@ def load_as_function( # noqa: C901, PLR0912
file_content = read_file_content(error_ctx.file_path)
for path in filter_result.skipped_paths:
skipped_fields.setdefault(path, []).append(
- SkippedFieldSource(metadata=metadata, error_ctx=error_ctx, file_content=file_content),
+ SkippedFieldSource(source=source, error_ctx=error_ctx, file_content=file_content),
)
report: LoadReport | None = None
if debug:
- if isinstance(metadata.file_, FILE_LIKE_TYPES):
- report_file_path = None
- elif metadata.file_ is not None:
- report_file_path = str(metadata.file_)
- else:
- report_file_path = None
+ source_path = source.file_path_for_errors()
+ report_file_path = str(source_path) if source_path is not None else None
report = _build_single_source_report(
- dataclass_name=dataclass_.__name__,
- loader_type=display_name,
+ dataclass_name=schema.__name__,
+ loader_type=format_name,
file_path=report_file_path,
raw_data=raw_data,
secret_paths=secret_paths,
)
_log_single_source_load(
- dataclass_name=dataclass_.__name__,
- loader_type=display_name,
- file_path="" if isinstance(file_path, FILE_LIKE_TYPES) else str(file_path),
+ dataclass_name=schema.__name__,
+ loader_type=format_name,
+ file_path=source.file_display() or "",
data=raw_data if isinstance(raw_data, dict) else {},
secret_paths=secret_paths,
)
- validating_retort = loader_instance.create_validating_retort(dataclass_)
- validation_loader = validating_retort.get_loader(dataclass_)
- raw_data = coerce_flag_fields(raw_data, dataclass_)
+ validating_retort = create_validating_retort(
+ source,
+ schema,
+ resolved_type_loaders=resolved.type_loaders,
+ )
+ validation_loader = validating_retort.get_loader(schema)
+ raw_data = coerce_flag_fields(raw_data, schema)
try:
handle_load_errors(
@@ -334,19 +360,24 @@ def load_as_function( # noqa: C901, PLR0912
)
except DatureConfigError as exc:
if report is not None:
- attach_load_report(dataclass_, report)
+ attach_load_report(schema, report)
if skipped_fields:
raise enrich_skipped_errors(exc, skipped_fields) from exc
raise
try:
result = handle_load_errors(
- func=lambda: loader_instance.transform_to_dataclass(raw_data, dataclass_),
+ func=lambda: transform_to_dataclass(
+ source,
+ raw_data,
+ schema,
+ resolved_type_loaders=resolved.type_loaders,
+ ),
ctx=error_ctx,
)
except DatureConfigError as exc:
if report is not None:
- attach_load_report(dataclass_, report)
+ attach_load_report(schema, report)
if skipped_fields:
raise enrich_skipped_errors(exc, skipped_fields) from exc
raise
@@ -357,26 +388,39 @@ def load_as_function( # noqa: C901, PLR0912
return result
-def make_decorator(
+def make_decorator( # noqa: PLR0913
*,
- loader_instance: LoaderProtocol,
- file_path: FileOrStream,
- metadata: Source,
+ source: Source,
cache: bool,
debug: bool,
+ secret_field_names: tuple[str, ...] | None = None,
+ mask_secrets: bool | None = None,
+ expand_env_vars: "ExpandEnvVarsMode | None" = None,
+ type_loaders: "TypeLoaderMap | None" = None,
+ nested_resolve_strategy: "NestedResolveStrategy | None" = None,
+ nested_resolve: "NestedResolve | None" = None,
) -> Callable[[type[DataclassInstance]], type[DataclassInstance]]:
+ resolved = resolve_source_params(
+ source,
+ load_expand_env_vars=expand_env_vars,
+ load_type_loaders=type_loaders,
+ load_nested_resolve_strategy=nested_resolve_strategy,
+ load_nested_resolve=nested_resolve,
+ )
+
def decorator(cls: type[DataclassInstance]) -> type[DataclassInstance]:
if not is_dataclass(cls):
msg = f"{cls.__name__} must be a dataclass"
raise TypeError(msg)
ctx = _PatchContext(
- loader_instance=loader_instance,
- file_path=file_path,
+ source=source,
cls=cls,
- metadata=metadata,
cache=cache,
debug=debug,
+ secret_field_names=secret_field_names,
+ mask_secrets=mask_secrets,
+ resolved=resolved,
)
cls.__init__ = _make_new_init(ctx) # type: ignore[method-assign]
cls.__post_init__ = make_validating_post_init(ctx) # type: ignore[attr-defined]
diff --git a/src/dature/loading/source_loading.py b/src/dature/loading/source_loading.py
index f7f23d6..f144f20 100644
--- a/src/dature/loading/source_loading.py
+++ b/src/dature/loading/source_loading.py
@@ -1,72 +1,125 @@
import logging
from dataclasses import dataclass
-from pathlib import Path
+from functools import partial
from dature.config import config
-from dature.errors.exceptions import DatureConfigError, SourceLoadError, SourceLocation
+from dature.errors import DatureConfigError, SourceLoadError, SourceLocation
from dature.errors.formatter import handle_load_errors
from dature.errors.location import ErrorContext, read_file_content
from dature.field_path import FieldPath
from dature.load_report import SourceEntry
from dature.loading.context import apply_skip_invalid, build_error_ctx
-from dature.loading.resolver import resolve_loader, resolve_loader_class
+from dature.loading.merge_config import MergeConfig
from dature.masking.masking import mask_json_value
-from dature.metadata import Merge, MergeStrategy, Source, TypeLoader
-from dature.protocols import DataclassInstance, LoaderProtocol
+from dature.merging.strategy import MergeStrategyEnum
+from dature.protocols import DataclassInstance
from dature.skip_field_provider import FilterResult
-from dature.types import FILE_LIKE_TYPES, ExpandEnvVarsMode, FileOrStream, JSONValue, LoadRawResult
+from dature.sources.base import FlatKeySource, Source
+from dature.types import (
+ ExpandEnvVarsMode,
+ JSONValue,
+ LoadRawResult,
+ NestedResolve,
+ NestedResolveStrategy,
+ TypeLoaderMap,
+)
logger = logging.getLogger("dature")
-def resolve_loader_for_source(
+def load_source_raw(source: Source, resolved: "ResolvedSourceParams") -> LoadRawResult:
+ if isinstance(source, FlatKeySource):
+ return source.load_raw(
+ resolved_expand=resolved.expand_env_vars,
+ resolved_nested_strategy=resolved.nested_resolve_strategy,
+ resolved_nested_resolve=resolved.nested_resolve,
+ )
+ return source.load_raw(resolved_expand=resolved.expand_env_vars)
+
+
+@dataclass(frozen=True, slots=True)
+class ResolvedSourceParams:
+ expand_env_vars: ExpandEnvVarsMode
+ type_loaders: TypeLoaderMap | None
+ nested_resolve_strategy: NestedResolveStrategy
+ nested_resolve: NestedResolve | None
+
+
+def resolve_source_params(
+ source: Source,
*,
- loaders: tuple[LoaderProtocol, ...] | None,
- index: int,
- source_meta: Source,
- expand_env_vars: ExpandEnvVarsMode | None = None,
- type_loaders: "tuple[TypeLoader, ...]" = (),
-) -> LoaderProtocol:
- if loaders is not None:
- return loaders[index]
- return resolve_loader(source_meta, expand_env_vars=expand_env_vars, type_loaders=type_loaders)
-
-
-def should_skip_broken(source_meta: Source, merge_meta: Merge) -> bool:
- if source_meta.skip_if_broken is not None:
- if source_meta.file_ is None:
+ load_expand_env_vars: ExpandEnvVarsMode | None = None,
+ load_type_loaders: TypeLoaderMap | None = None,
+ load_nested_resolve_strategy: NestedResolveStrategy | None = None,
+ load_nested_resolve: NestedResolve | None = None,
+) -> ResolvedSourceParams:
+ resolved_expand: ExpandEnvVarsMode = "default"
+ if source.expand_env_vars is not None:
+ resolved_expand = source.expand_env_vars
+ elif load_expand_env_vars is not None:
+ resolved_expand = load_expand_env_vars
+
+ source_loaders = source.type_loaders or {}
+ load_loaders = load_type_loaders or {}
+ config_loaders = config.type_loaders or {}
+ merged_loaders = {**config_loaders, **load_loaders, **source_loaders}
+ resolved_type_loaders = merged_loaders or None
+
+ resolved_nested_strategy: NestedResolveStrategy = config.loading.nested_resolve_strategy
+ if isinstance(source, FlatKeySource) and source.nested_resolve_strategy is not None:
+ resolved_nested_strategy = source.nested_resolve_strategy
+ elif load_nested_resolve_strategy is not None:
+ resolved_nested_strategy = load_nested_resolve_strategy
+
+ resolved_nested_resolve: NestedResolve | None = None
+ if isinstance(source, FlatKeySource) and source.nested_resolve is not None:
+ resolved_nested_resolve = source.nested_resolve
+ elif load_nested_resolve is not None:
+ resolved_nested_resolve = load_nested_resolve
+
+ return ResolvedSourceParams(
+ expand_env_vars=resolved_expand,
+ type_loaders=resolved_type_loaders,
+ nested_resolve_strategy=resolved_nested_strategy,
+ nested_resolve=resolved_nested_resolve,
+ )
+
+
+def should_skip_broken(source: Source, merge_meta: MergeConfig) -> bool:
+ if source.skip_if_broken is not None:
+ if source.file_display() is None:
logger.warning(
"skip_if_broken has no effect on environment variable sources — they cannot be broken",
)
- return source_meta.skip_if_broken
+ return source.skip_if_broken
return merge_meta.skip_broken_sources
-def resolve_expand_env_vars(source_meta: Source, merge_meta: Merge) -> ExpandEnvVarsMode:
- if source_meta.expand_env_vars is not None:
- return source_meta.expand_env_vars
+def resolve_expand_env_vars(source: Source, merge_meta: MergeConfig) -> ExpandEnvVarsMode:
+ if source.expand_env_vars is not None:
+ return source.expand_env_vars
return merge_meta.expand_env_vars
def resolve_skip_invalid(
- source_meta: Source,
- merge_meta: Merge,
+ source: Source,
+ merge_meta: MergeConfig,
) -> bool | tuple[FieldPath, ...]:
- if source_meta.skip_if_invalid is not None:
- return source_meta.skip_if_invalid
+ if source.skip_if_invalid is not None:
+ return source.skip_if_invalid
return merge_meta.skip_invalid_fields
-def resolve_mask_secrets(source_meta: Source, merge_meta: Merge) -> bool:
- if source_meta.mask_secrets is not None:
- return source_meta.mask_secrets
+def resolve_mask_secrets(source: Source, merge_meta: MergeConfig) -> bool:
+ if source.mask_secrets is not None:
+ return source.mask_secrets
if merge_meta.mask_secrets is not None:
return merge_meta.mask_secrets
return config.masking.mask_secrets
-def resolve_secret_field_names(source_meta: Source, merge_meta: Merge) -> tuple[str, ...]:
- source_names = source_meta.secret_field_names or ()
+def resolve_secret_field_names(source: Source, merge_meta: MergeConfig) -> tuple[str, ...]:
+ source_names = source.secret_field_names or ()
merge_names = merge_meta.secret_field_names or ()
return source_names + merge_names
@@ -74,22 +127,21 @@ def resolve_secret_field_names(source_meta: Source, merge_meta: Merge) -> tuple[
def apply_merge_skip_invalid(
*,
raw: JSONValue,
- source_meta: Source,
- merge_meta: Merge,
- loader_instance: LoaderProtocol,
- dataclass_: type[DataclassInstance],
+ source: Source,
+ merge_meta: MergeConfig,
+ schema: type[DataclassInstance],
source_index: int,
) -> FilterResult:
- skip_value = resolve_skip_invalid(source_meta, merge_meta)
+ skip_value = resolve_skip_invalid(source, merge_meta)
if not skip_value:
return FilterResult(cleaned_dict=raw, skipped_paths=[])
return apply_skip_invalid(
raw=raw,
skip_if_invalid=skip_value,
- loader_instance=loader_instance,
- dataclass_=dataclass_,
- log_prefix=f"[{dataclass_.__name__}] Source {source_index}:",
+ source=source,
+ schema=schema,
+ log_prefix=f"[{schema.__name__}] Source {source_index}:",
)
@@ -101,7 +153,7 @@ class SourceContext:
@dataclass(frozen=True, slots=True)
class SkippedFieldSource:
- metadata: Source
+ source: Source
error_ctx: ErrorContext
file_content: str | None
@@ -111,73 +163,55 @@ class LoadedSources:
raw_dicts: list[JSONValue]
source_ctxs: list[SourceContext]
source_entries: list[SourceEntry]
- last_loader: LoaderProtocol
+ last_source: Source
+ last_resolved: ResolvedSourceParams
skipped_fields: dict[str, list[SkippedFieldSource]]
-def load_sources( # noqa: C901, PLR0912, PLR0913, PLR0915
+def load_sources( # noqa: C901, PLR0912, PLR0915
*,
- merge_meta: Merge,
+ merge_meta: MergeConfig,
dataclass_name: str,
- dataclass_: type[DataclassInstance],
- loaders: tuple[LoaderProtocol, ...] | None = None,
+ schema: type[DataclassInstance],
secret_paths: frozenset[str] = frozenset(),
mask_secrets: bool = False,
- type_loaders: "tuple[TypeLoader, ...]" = (),
) -> LoadedSources:
raw_dicts: list[JSONValue] = []
source_ctxs: list[SourceContext] = []
source_entries: list[SourceEntry] = []
- last_loader: LoaderProtocol | None = None
+ last_source: Source | None = None
+ last_resolved: ResolvedSourceParams | None = None
skipped_fields: dict[str, list[SkippedFieldSource]] = {}
- for i, source_meta in enumerate(merge_meta.sources):
- resolved_expand = resolve_expand_env_vars(source_meta, merge_meta)
- source_type_loaders = (source_meta.type_loaders or ()) + type_loaders
- loader_instance = resolve_loader_for_source(
- loaders=loaders,
- index=i,
- source_meta=source_meta,
- expand_env_vars=resolved_expand,
- type_loaders=source_type_loaders,
+ for i, source_item in enumerate(merge_meta.sources):
+ resolved = resolve_source_params(
+ source_item,
+ load_expand_env_vars=merge_meta.expand_env_vars,
+ load_type_loaders=merge_meta.type_loaders,
+ load_nested_resolve_strategy=merge_meta.nested_resolve_strategy,
+ load_nested_resolve=merge_meta.nested_resolve,
)
- file_or_path: FileOrStream
- if isinstance(source_meta.file_, FILE_LIKE_TYPES):
- file_or_path = source_meta.file_
- elif source_meta.file_ is not None:
- file_or_path = Path(source_meta.file_)
- else:
- file_or_path = Path()
- error_ctx = build_error_ctx(source_meta, dataclass_name, secret_paths=secret_paths, mask_secrets=mask_secrets)
-
- def _load_raw(
- li: LoaderProtocol = loader_instance,
- fp: FileOrStream = file_or_path,
- ) -> LoadRawResult:
- return li.load_raw(fp)
+ error_ctx = build_error_ctx(source_item, dataclass_name, secret_paths=secret_paths, mask_secrets=mask_secrets)
try:
load_result = handle_load_errors(
- func=_load_raw,
+ func=partial(load_source_raw, source_item, resolved),
ctx=error_ctx,
)
except (DatureConfigError, FileNotFoundError):
- if merge_meta.strategy != MergeStrategy.FIRST_FOUND and not should_skip_broken(source_meta, merge_meta):
+ if merge_meta.strategy != MergeStrategyEnum.FIRST_FOUND and not should_skip_broken(source_item, merge_meta):
raise
logger.warning(
"[%s] Source %d skipped (broken): file=%s",
dataclass_name,
i,
- source_meta.file_
- if isinstance(source_meta.file_, (str, Path))
- else ("" if source_meta.file_ is not None else ""),
+ source_item.file_display() or "",
)
continue
except Exception as exc:
- if merge_meta.strategy != MergeStrategy.FIRST_FOUND and not should_skip_broken(source_meta, merge_meta):
- loader_class = resolve_loader_class(source_meta.loader, source_meta.file_)
+ if merge_meta.strategy != MergeStrategyEnum.FIRST_FOUND and not should_skip_broken(source_item, merge_meta):
location = SourceLocation(
- display_label=loader_class.display_label,
+ location_label=type(source_item).location_label,
file_path=error_ctx.file_path,
line_range=None,
line_content=None,
@@ -192,16 +226,14 @@ def _load_raw(
"[%s] Source %d skipped (broken): file=%s",
dataclass_name,
i,
- source_meta.file_
- if isinstance(source_meta.file_, (str, Path))
- else ("" if source_meta.file_ is not None else ""),
+ source_item.file_display() or "",
)
continue
raw = load_result.data
if load_result.nested_conflicts:
error_ctx = build_error_ctx(
- source_meta,
+ source_item,
dataclass_name,
secret_paths=secret_paths,
mask_secrets=mask_secrets,
@@ -212,32 +244,28 @@ def _load_raw(
filter_result = apply_merge_skip_invalid(
raw=raw,
- source_meta=source_meta,
+ source=source_item,
merge_meta=merge_meta,
- loader_instance=loader_instance,
- dataclass_=dataclass_,
+ schema=schema,
source_index=i,
)
for path in filter_result.skipped_paths:
skipped_fields.setdefault(path, []).append(
- SkippedFieldSource(metadata=source_meta, error_ctx=error_ctx, file_content=file_content),
+ SkippedFieldSource(source=source_item, error_ctx=error_ctx, file_content=file_content),
)
raw = filter_result.cleaned_dict
raw_dicts.append(raw)
- loader_class = resolve_loader_class(source_meta.loader, source_meta.file_)
- display_name = loader_class.display_name
+ format_name = type(source_item).format_name
logger.debug(
"[%s] Source %d loaded: loader=%s, file=%s, keys=%s",
dataclass_name,
i,
- display_name,
- source_meta.file_
- if isinstance(source_meta.file_, (str, Path))
- else ("" if source_meta.file_ is not None else ""),
+ format_name,
+ source_item.file_display() or "",
sorted(raw.keys()) if isinstance(raw, dict) else "",
)
if secret_paths:
@@ -254,23 +282,24 @@ def _load_raw(
source_entries.append(
SourceEntry(
index=i,
- file_path=str(source_meta.file_) if isinstance(source_meta.file_, (str, Path)) else None,
- loader_type=display_name,
+ file_path=str(src_path) if (src_path := source_item.file_path_for_errors()) is not None else None,
+ loader_type=format_name,
raw_data=raw,
),
)
source_ctxs.append(SourceContext(error_ctx=error_ctx, file_content=file_content))
- last_loader = loader_instance
+ last_source = source_item
+ last_resolved = resolved
- if merge_meta.strategy == MergeStrategy.FIRST_FOUND:
+ if merge_meta.strategy == MergeStrategyEnum.FIRST_FOUND:
break
- if last_loader is None:
+ if last_source is None or last_resolved is None:
if merge_meta.sources:
msg = f"All {len(merge_meta.sources)} source(s) failed to load"
else:
- msg = "Merge.sources must not be empty"
+ msg = "load() requires at least one Source for merge"
source_error = SourceLoadError(message=msg)
raise DatureConfigError(dataclass_name, [source_error])
@@ -278,6 +307,7 @@ def _load_raw(
raw_dicts=raw_dicts,
source_ctxs=source_ctxs,
source_entries=source_entries,
- last_loader=last_loader,
+ last_source=last_source,
+ last_resolved=last_resolved,
skipped_fields=skipped_fields,
)
diff --git a/src/dature/main.py b/src/dature/main.py
index 06e5b07..536a0bd 100644
--- a/src/dature/main.py
+++ b/src/dature/main.py
@@ -1,45 +1,83 @@
+import logging
from collections.abc import Callable
-from pathlib import Path
from typing import Any, overload
from dature.config import config
+from dature.loading.merge_config import MergeConfig
from dature.loading.multi import merge_load_as_function, merge_make_decorator
-from dature.loading.resolver import resolve_loader
from dature.loading.single import load_as_function, make_decorator
-from dature.metadata import Merge, Source
+from dature.merging.strategy import MergeStrategyEnum
from dature.protocols import DataclassInstance
-from dature.types import FILE_LIKE_TYPES, FileOrStream
+from dature.sources.base import Source
+from dature.types import (
+ ExpandEnvVarsMode,
+ FieldGroupTuple,
+ FieldMergeMap,
+ MergeStrategyName,
+ NestedResolve,
+ NestedResolveStrategy,
+ TypeLoaderMap,
+)
+
+logger = logging.getLogger("dature")
@overload
def load[T](
- metadata: Source | Merge | tuple[Source, ...] | None,
- /,
- dataclass_: type[T],
- *,
+ *sources: Source,
+ schema: type[T],
debug: bool | None = None,
+ strategy: MergeStrategyName = "last_wins",
+ field_merges: FieldMergeMap | None = None,
+ field_groups: tuple[FieldGroupTuple, ...] = (),
+ skip_broken_sources: bool = False,
+ skip_invalid_fields: bool = False,
+ expand_env_vars: ExpandEnvVarsMode | None = None,
+ secret_field_names: tuple[str, ...] | None = None,
+ mask_secrets: bool | None = None,
+ type_loaders: TypeLoaderMap | None = None,
+ nested_resolve_strategy: NestedResolveStrategy | None = None,
+ nested_resolve: NestedResolve | None = None,
) -> T: ...
@overload
def load(
- metadata: Source | Merge | tuple[Source, ...] | None = None,
- /,
- dataclass_: None = None,
- *,
+ *sources: Source,
+ schema: None = None,
cache: bool | None = None,
debug: bool | None = None,
+ strategy: MergeStrategyName = "last_wins",
+ field_merges: FieldMergeMap | None = None,
+ field_groups: tuple[FieldGroupTuple, ...] = (),
+ skip_broken_sources: bool = False,
+ skip_invalid_fields: bool = False,
+ expand_env_vars: ExpandEnvVarsMode | None = None,
+ secret_field_names: tuple[str, ...] | None = None,
+ mask_secrets: bool | None = None,
+ type_loaders: TypeLoaderMap | None = None,
+ nested_resolve_strategy: NestedResolveStrategy | None = None,
+ nested_resolve: NestedResolve | None = None,
) -> Callable[[type[DataclassInstance]], type[DataclassInstance]]: ...
# --8<-- [start:load]
-def load(
- metadata: Source | Merge | tuple[Source, ...] | None = None,
- /,
- dataclass_: type[Any] | None = None,
- *,
+def load( # noqa: PLR0913
+ *sources: Source,
+ schema: type[Any] | None = None,
cache: bool | None = None,
debug: bool | None = None,
+ strategy: MergeStrategyName = "last_wins",
+ field_merges: FieldMergeMap | None = None,
+ field_groups: tuple[FieldGroupTuple, ...] = (),
+ skip_broken_sources: bool = False,
+ skip_invalid_fields: bool = False,
+ expand_env_vars: ExpandEnvVarsMode | None = None,
+ secret_field_names: tuple[str, ...] | None = None,
+ mask_secrets: bool | None = None,
+ type_loaders: TypeLoaderMap | None = None,
+ nested_resolve_strategy: NestedResolveStrategy | None = None,
+ nested_resolve: NestedResolve | None = None,
) -> Any:
# --8<-- [end:load]
if cache is None:
@@ -47,46 +85,107 @@ def load(
if debug is None:
debug = config.loading.debug
- if isinstance(metadata, tuple):
- metadata = Merge(*metadata)
+ _validate_sources(sources)
- if isinstance(metadata, Merge):
- merge_type_loaders = (metadata.type_loaders or ()) + config.type_loaders
- if dataclass_ is not None:
- return merge_load_as_function(metadata, dataclass_, debug=debug, type_loaders=merge_type_loaders)
- return merge_make_decorator(metadata, cache=cache, debug=debug, type_loaders=merge_type_loaders)
+ if len(sources) == 1 and (
+ strategy != "last_wins"
+ or field_merges is not None
+ or field_groups != ()
+ or skip_broken_sources
+ or skip_invalid_fields
+ ):
+ logger.warning("Merge-related parameters have no effect with a single source")
- if metadata is None:
- metadata = Source()
-
- type_loaders = (metadata.type_loaders or ()) + config.type_loaders
- loader_instance = resolve_loader(
- metadata,
- type_loaders=type_loaders,
- nested_resolve_strategy=config.loading.nested_resolve_strategy,
- )
+ if len(sources) > 1:
+ return _load_multi(
+ sources=sources,
+ schema=schema,
+ cache=cache,
+ debug=debug,
+ strategy=strategy,
+ field_merges=field_merges,
+ field_groups=field_groups,
+ skip_broken_sources=skip_broken_sources,
+ skip_invalid_fields=skip_invalid_fields,
+ expand_env_vars=expand_env_vars,
+ secret_field_names=secret_field_names,
+ mask_secrets=mask_secrets,
+ type_loaders=type_loaders,
+ nested_resolve_strategy=nested_resolve_strategy,
+ nested_resolve=nested_resolve,
+ )
- file_or_path: FileOrStream
- if isinstance(metadata.file_, FILE_LIKE_TYPES):
- file_or_path = metadata.file_
- elif metadata.file_ is not None:
- file_or_path = Path(metadata.file_)
- else:
- file_or_path = Path()
+ source = sources[0]
- if dataclass_ is not None:
+ if schema is not None:
return load_as_function(
- loader_instance=loader_instance,
- file_path=file_or_path,
- dataclass_=dataclass_,
- metadata=metadata,
+ source=source,
+ schema=schema,
debug=debug,
+ secret_field_names=secret_field_names,
+ mask_secrets=mask_secrets,
+ expand_env_vars=expand_env_vars,
+ type_loaders=type_loaders,
+ nested_resolve_strategy=nested_resolve_strategy,
+ nested_resolve=nested_resolve,
)
return make_decorator(
- loader_instance=loader_instance,
- file_path=file_or_path,
- metadata=metadata,
+ source=source,
cache=cache,
debug=debug,
+ secret_field_names=secret_field_names,
+ mask_secrets=mask_secrets,
+ expand_env_vars=expand_env_vars,
+ type_loaders=type_loaders,
+ nested_resolve_strategy=nested_resolve_strategy,
+ nested_resolve=nested_resolve,
+ )
+
+
+def _validate_sources(sources: tuple[Source, ...]) -> None:
+ for source in sources:
+ if not isinstance(source, Source):
+ msg = f"load() positional arguments must be Source instances, got {source!r}"
+ raise TypeError(msg)
+
+ if not sources:
+ msg = "load() requires at least one Source"
+ raise TypeError(msg)
+
+
+def _load_multi( # noqa: PLR0913
+ *,
+ sources: tuple[Source, ...],
+ schema: type[DataclassInstance] | None,
+ cache: bool,
+ debug: bool,
+ strategy: MergeStrategyName,
+ field_merges: FieldMergeMap | None,
+ field_groups: tuple[FieldGroupTuple, ...],
+ skip_broken_sources: bool,
+ skip_invalid_fields: bool,
+ expand_env_vars: ExpandEnvVarsMode | None,
+ secret_field_names: tuple[str, ...] | None,
+ mask_secrets: bool | None,
+ type_loaders: TypeLoaderMap | None,
+ nested_resolve_strategy: NestedResolveStrategy | None,
+ nested_resolve: NestedResolve | None,
+) -> DataclassInstance | Callable[[type[DataclassInstance]], type[DataclassInstance]]:
+ merge_meta = MergeConfig(
+ sources=sources,
+ strategy=MergeStrategyEnum(strategy),
+ field_merges=field_merges,
+ field_groups=field_groups,
+ skip_broken_sources=skip_broken_sources,
+ skip_invalid_fields=skip_invalid_fields,
+ expand_env_vars=expand_env_vars or "default",
+ secret_field_names=secret_field_names,
+ mask_secrets=mask_secrets,
+ type_loaders=type_loaders,
+ nested_resolve_strategy=nested_resolve_strategy,
+ nested_resolve=nested_resolve,
)
+ if schema is not None:
+ return merge_load_as_function(merge_meta, schema, debug=debug)
+ return merge_make_decorator(merge_meta, cache=cache, debug=debug)
diff --git a/src/dature/masking/detection.py b/src/dature/masking/detection.py
index 73d720b..04b7e4f 100644
--- a/src/dature/masking/detection.py
+++ b/src/dature/masking/detection.py
@@ -5,6 +5,7 @@
from dature.config import config
from dature.fields.payment_card import PaymentCardNumber
from dature.fields.secret_str import SecretStr
+from dature.type_utils import find_nested_dataclasses
from dature.types import TypeAnnotation
_secret_paths_cache: dict[tuple[type, tuple[str, ...]], frozenset[str]] = {}
@@ -59,7 +60,7 @@ def _walk_dataclass_fields(
if _is_secret_type(field_type) or _matches_secret_pattern(field_name, all_patterns):
result.add(full_path)
- nested_types = _find_nested_dataclasses(field_type)
+ nested_types = find_nested_dataclasses(field_type)
for nested_dc in nested_types:
_walk_dataclass_fields(
nested_dc,
@@ -69,26 +70,6 @@ def _walk_dataclass_fields(
)
-def _find_nested_dataclasses(field_type: TypeAnnotation) -> list[type]:
- result: list[type] = []
- queue: list[TypeAnnotation] = [field_type]
-
- while queue:
- current = queue.pop()
-
- if is_dataclass(current) and isinstance(current, type):
- result.append(current)
- continue
-
- origin = get_origin(current)
- if origin is Annotated:
- queue.append(get_args(current)[0])
- elif origin is not None:
- queue.extend(get_args(current))
-
- return result
-
-
def build_secret_paths(
dataclass_type: type,
*,
diff --git a/src/dature/merging/deep_merge.py b/src/dature/merging/deep_merge.py
index 89a6531..107dac9 100644
--- a/src/dature/merging/deep_merge.py
+++ b/src/dature/merging/deep_merge.py
@@ -1,9 +1,9 @@
from dataclasses import dataclass
-from dature.errors.exceptions import MergeConflictError, MergeConflictFieldError, SourceLocation
+from dature.errors import MergeConflictError, MergeConflictFieldError, SourceLocation
from dature.errors.location import resolve_source_location
from dature.loading.source_loading import SourceContext
-from dature.metadata import FieldMergeStrategy, MergeStrategy
+from dature.merging.strategy import FieldMergeStrategyEnum, MergeStrategyEnum
from dature.types import JSONValue
_MIN_CONFLICT_SOURCES = 2
@@ -44,21 +44,21 @@ def _ensure_both_lists(
def _apply_list_merge(
base: JSONValue,
override: JSONValue,
- strategy: FieldMergeStrategy,
+ strategy: FieldMergeStrategyEnum,
) -> list[JSONValue]:
- if strategy == FieldMergeStrategy.APPEND:
+ if strategy == FieldMergeStrategyEnum.APPEND:
pair = _ensure_both_lists(base, override, "APPEND")
return list(pair.base) + list(pair.override)
- if strategy == FieldMergeStrategy.APPEND_UNIQUE:
+ if strategy == FieldMergeStrategyEnum.APPEND_UNIQUE:
pair = _ensure_both_lists(base, override, "APPEND_UNIQUE")
return _deduplicate_list(list(pair.base) + list(pair.override))
- if strategy == FieldMergeStrategy.PREPEND:
+ if strategy == FieldMergeStrategyEnum.PREPEND:
pair = _ensure_both_lists(base, override, "PREPEND")
return list(pair.override) + list(pair.base)
- # PREPEND_UNIQUE
+ # prepend_unique
pair = _ensure_both_lists(base, override, "PREPEND_UNIQUE")
return _deduplicate_list(list(pair.override) + list(pair.base))
@@ -66,12 +66,12 @@ def _apply_list_merge(
def apply_field_merge(
base: JSONValue,
override: JSONValue,
- strategy: FieldMergeStrategy,
+ strategy: FieldMergeStrategyEnum,
) -> JSONValue:
- if strategy == FieldMergeStrategy.FIRST_WINS:
+ if strategy == FieldMergeStrategyEnum.FIRST_WINS:
return base
- if strategy == FieldMergeStrategy.LAST_WINS:
+ if strategy == FieldMergeStrategyEnum.LAST_WINS:
return override
return _apply_list_merge(base, override, strategy)
@@ -81,7 +81,7 @@ def deep_merge_last_wins(
base: JSONValue,
override: JSONValue,
*,
- field_merge_map: dict[str, FieldMergeStrategy] | None = None,
+ field_merge_map: dict[str, FieldMergeStrategyEnum] | None = None,
_path: str = "",
) -> JSONValue:
if field_merge_map is not None and _path in field_merge_map:
@@ -108,7 +108,7 @@ def deep_merge_first_wins(
base: JSONValue,
override: JSONValue,
*,
- field_merge_map: dict[str, FieldMergeStrategy] | None = None,
+ field_merge_map: dict[str, FieldMergeStrategyEnum] | None = None,
_path: str = "",
) -> JSONValue:
if field_merge_map is not None and _path in field_merge_map:
@@ -136,7 +136,7 @@ def _collect_conflicts(
source_contexts: list[SourceContext],
path: list[str],
conflicts: list[tuple[list[str], list[tuple[int, JSONValue]]]],
- field_merge_map: dict[str, FieldMergeStrategy] | None = None,
+ field_merge_map: dict[str, FieldMergeStrategyEnum] | None = None,
callable_merge_paths: frozenset[str] | None = None,
) -> None:
key_sources: dict[str, list[tuple[int, JSONValue]]] = {}
@@ -183,7 +183,7 @@ def raise_on_conflict(
dicts: list[JSONValue],
source_ctxs: list[SourceContext],
dataclass_name: str,
- field_merge_map: dict[str, FieldMergeStrategy] | None = None,
+ field_merge_map: dict[str, FieldMergeStrategyEnum] | None = None,
callable_merge_paths: frozenset[str] | None = None,
) -> None:
conflicts: list[tuple[list[str], list[tuple[int, JSONValue]]]] = []
@@ -221,12 +221,12 @@ def deep_merge(
base: JSONValue,
override: JSONValue,
*,
- strategy: MergeStrategy,
- field_merge_map: dict[str, FieldMergeStrategy] | None = None,
+ strategy: MergeStrategyEnum,
+ field_merge_map: dict[str, FieldMergeStrategyEnum] | None = None,
) -> JSONValue:
- if strategy == MergeStrategy.LAST_WINS:
+ if strategy == MergeStrategyEnum.LAST_WINS:
return deep_merge_last_wins(base, override, field_merge_map=field_merge_map)
- if strategy in (MergeStrategy.FIRST_WINS, MergeStrategy.FIRST_FOUND):
+ if strategy in (MergeStrategyEnum.FIRST_WINS, MergeStrategyEnum.FIRST_FOUND):
return deep_merge_first_wins(base, override, field_merge_map=field_merge_map)
msg = "Use merge_sources for RAISE_ON_CONFLICT strategy"
raise ValueError(msg)
diff --git a/src/dature/merging/field_group.py b/src/dature/merging/field_group.py
index 900fe6f..61c46c6 100644
--- a/src/dature/merging/field_group.py
+++ b/src/dature/merging/field_group.py
@@ -1,7 +1,7 @@
from dataclasses import dataclass
from typing import Any
-from dature.errors.exceptions import FieldGroupError, FieldGroupViolationError
+from dature.errors import FieldGroupError, FieldGroupViolationError
from dature.merging.predicate import ResolvedFieldGroup
from dature.types import JSONValue
diff --git a/src/dature/merging/predicate.py b/src/dature/merging/predicate.py
index 82d7338..116efe5 100644
--- a/src/dature/merging/predicate.py
+++ b/src/dature/merging/predicate.py
@@ -2,12 +2,11 @@
from typing import TYPE_CHECKING, Any, get_type_hints
from dature.field_path import FieldPath, resolve_field_type, validate_field_path_owner
-from dature.metadata import FieldMergeStrategy
+from dature.merging.strategy import FieldMergeStrategyEnum
from dature.protocols import DataclassInstance
if TYPE_CHECKING:
- from dature.metadata import FieldGroup, MergeRule
- from dature.types import FieldMergeCallable
+ from dature.types import FieldGroupTuple, FieldMergeCallable, FieldMergeMap
@dataclass(frozen=True, slots=True)
@@ -17,7 +16,7 @@ class ResolvedFieldGroup:
@dataclass(frozen=True, slots=True)
class FieldMergeMaps:
- enum_map: "dict[str, FieldMergeStrategy]"
+ enum_map: dict[str, FieldMergeStrategyEnum]
callable_map: "dict[str, FieldMergeCallable]"
@property
@@ -25,27 +24,29 @@ def callable_paths(self) -> frozenset[str]:
return frozenset(self.callable_map.keys())
-def extract_field_path(predicate: Any, dataclass_: type[DataclassInstance] | None = None) -> str: # noqa: ANN401
+def extract_field_path(predicate: Any, schema: type[DataclassInstance] | None = None) -> str: # noqa: ANN401
if not isinstance(predicate, FieldPath):
msg = f"Expected FieldPath, got {type(predicate).__name__}"
raise TypeError(msg)
- if dataclass_ is not None:
- validate_field_path_owner(predicate, dataclass_)
+ if schema is not None:
+ validate_field_path_owner(predicate, schema)
return predicate.as_path()
def build_field_merge_map(
- field_merges: "tuple[MergeRule, ...]",
- dataclass_: type[DataclassInstance] | None = None,
+ field_merges: "FieldMergeMap | None",
+ schema: type[DataclassInstance] | None = None,
) -> FieldMergeMaps:
- enum_map: dict[str, FieldMergeStrategy] = {}
+ enum_map: dict[str, FieldMergeStrategyEnum] = {}
callable_map: dict[str, FieldMergeCallable] = {}
- for rule in field_merges:
- path = extract_field_path(rule.predicate, dataclass_)
- if isinstance(rule.strategy, FieldMergeStrategy):
- enum_map[path] = rule.strategy
+ if not field_merges:
+ return FieldMergeMaps(enum_map=enum_map, callable_map=callable_map)
+ for predicate, strategy in field_merges.items():
+ path = extract_field_path(predicate, schema)
+ if isinstance(strategy, str):
+ enum_map[path] = FieldMergeStrategyEnum(strategy)
else:
- callable_map[path] = rule.strategy
+ callable_map[path] = strategy
return FieldMergeMaps(enum_map=enum_map, callable_map=callable_map)
@@ -63,18 +64,18 @@ def _expand_dataclass_fields(prefix: str, dc_type: type) -> list[str]:
def build_field_group_paths(
- field_groups: "tuple[FieldGroup, ...]",
- dataclass_: type[DataclassInstance],
+ field_groups: "tuple[FieldGroupTuple, ...]",
+ schema: type[DataclassInstance],
) -> tuple[ResolvedFieldGroup, ...]:
resolved: list[ResolvedFieldGroup] = []
for group in field_groups:
paths: list[str] = []
- for field in group.fields:
- path = extract_field_path(field, dataclass_)
+ for field in group:
+ path = extract_field_path(field, schema)
if isinstance(field, FieldPath) and isinstance(field.owner, type):
resolved_type = resolve_field_type(field.owner, field.parts)
else:
- resolved_type = resolve_field_type(dataclass_, tuple(path.split(".")))
+ resolved_type = resolve_field_type(schema, tuple(path.split(".")))
if resolved_type is not None:
paths.extend(_expand_dataclass_fields(path, resolved_type))
else:
diff --git a/src/dature/merging/strategy.py b/src/dature/merging/strategy.py
new file mode 100644
index 0000000..798cea2
--- /dev/null
+++ b/src/dature/merging/strategy.py
@@ -0,0 +1,17 @@
+from enum import StrEnum
+
+
+class MergeStrategyEnum(StrEnum):
+ LAST_WINS = "last_wins"
+ FIRST_WINS = "first_wins"
+ FIRST_FOUND = "first_found"
+ RAISE_ON_CONFLICT = "raise_on_conflict"
+
+
+class FieldMergeStrategyEnum(StrEnum):
+ FIRST_WINS = "first_wins"
+ LAST_WINS = "last_wins"
+ APPEND = "append"
+ APPEND_UNIQUE = "append_unique"
+ PREPEND = "prepend"
+ PREPEND_UNIQUE = "prepend_unique"
diff --git a/src/dature/metadata.py b/src/dature/metadata.py
deleted file mode 100644
index 6876f39..0000000
--- a/src/dature/metadata.py
+++ /dev/null
@@ -1,168 +0,0 @@
-from collections.abc import Callable
-from dataclasses import dataclass
-from enum import StrEnum
-from pathlib import Path
-from typing import TYPE_CHECKING, Any
-
-from dature.expansion.env_expand import expand_file_path
-from dature.loading.resolver import resolve_loader_class
-from dature.types import FILE_LIKE_TYPES
-
-if TYPE_CHECKING:
- from dature.field_path import FieldPath
- from dature.protocols import LoaderProtocol, ValidatorProtocol
- from dature.types import (
- DotSeparatedPath,
- ExpandEnvVarsMode,
- FieldMapping,
- FieldMergeCallable,
- FieldValidators,
- FileLike,
- FilePath,
- NameStyle,
- NestedResolve,
- NestedResolveStrategy,
- )
-
-
-# --8<-- [start:type-loader]
-@dataclass(frozen=True, slots=True)
-class TypeLoader:
- type_: type
- func: Callable[..., Any]
-
-
-# --8<-- [end:type-loader]
-
-
-# --8<-- [start:merge-strategy]
-class MergeStrategy(StrEnum):
- LAST_WINS = "last_wins"
- FIRST_WINS = "first_wins"
- FIRST_FOUND = "first_found"
- RAISE_ON_CONFLICT = "raise_on_conflict"
-
-
-# --8<-- [end:merge-strategy]
-
-
-# --8<-- [start:field-merge-strategy]
-class FieldMergeStrategy(StrEnum):
- FIRST_WINS = "first_wins"
- LAST_WINS = "last_wins"
- APPEND = "append"
- APPEND_UNIQUE = "append_unique"
- PREPEND = "prepend"
- PREPEND_UNIQUE = "prepend_unique"
-
-
-# --8<-- [end:field-merge-strategy]
-
-
-# --8<-- [start:load-metadata]
-@dataclass(slots=True, kw_only=True)
-class Source:
- file_: "FileLike | FilePath | None" = None
- loader: "type[LoaderProtocol] | None" = None
- prefix: "DotSeparatedPath | None" = None
- split_symbols: str = "__"
- name_style: "NameStyle | None" = None
- field_mapping: "FieldMapping | None" = None
- root_validators: "tuple[ValidatorProtocol, ...] | None" = None
- validators: "FieldValidators | None" = None
- expand_env_vars: "ExpandEnvVarsMode | None" = None
- skip_if_broken: bool | None = None
- skip_if_invalid: "bool | tuple[FieldPath, ...] | None" = None
- secret_field_names: tuple[str, ...] | None = None
- mask_secrets: bool | None = None
- type_loaders: "tuple[TypeLoader, ...] | None" = None
- nested_resolve_strategy: "NestedResolveStrategy | None" = None
- nested_resolve: "NestedResolve | None" = None
- # --8<-- [end:load-metadata]
-
- def __post_init__(self) -> None:
- if isinstance(self.file_, (str, Path)):
- self.file_ = expand_file_path(str(self.file_), mode="strict")
-
- def __repr__(self) -> str:
- loader_class = resolve_loader_class(self.loader, self.file_)
- display = loader_class.display_name
- if isinstance(self.file_, FILE_LIKE_TYPES):
- return f"{display} ''"
- if self.file_ is not None:
- return f"{display} '{self.file_}'"
- return display
-
-
-# --8<-- [start:merge-rule]
-@dataclass(frozen=True, slots=True)
-class MergeRule:
- predicate: "FieldPath"
- strategy: "FieldMergeStrategy | FieldMergeCallable"
-
-
-# --8<-- [end:merge-rule]
-
-
-# --8<-- [start:field-group]
-@dataclass(slots=True)
-class FieldGroup:
- fields: "tuple[FieldPath, ...]"
-
- def __init__(self, *fields: "FieldPath") -> None:
- self.fields = fields
-
-
-# --8<-- [end:field-group]
-
-
-# --8<-- [start:merge-metadata]
-@dataclass(slots=True)
-class Merge:
- sources: tuple[Source, ...]
- strategy: MergeStrategy = MergeStrategy.LAST_WINS
- field_merges: tuple[MergeRule, ...] = ()
- field_groups: tuple[FieldGroup, ...] = ()
- skip_broken_sources: bool = False
- skip_invalid_fields: bool = False
- expand_env_vars: "ExpandEnvVarsMode" = "default"
- secret_field_names: tuple[str, ...] | None = None
- mask_secrets: bool | None = None
- type_loaders: "tuple[TypeLoader, ...] | None" = None
- nested_resolve_strategy: "NestedResolveStrategy | None" = None
- nested_resolve: "NestedResolve | None" = None
-
- def __init__( # noqa: PLR0913
- self,
- *sources: Source,
- strategy: MergeStrategy = MergeStrategy.LAST_WINS,
- field_merges: tuple[MergeRule, ...] = (),
- field_groups: tuple[FieldGroup, ...] = (),
- skip_broken_sources: bool = False,
- skip_invalid_fields: bool = False,
- expand_env_vars: "ExpandEnvVarsMode" = "default",
- secret_field_names: tuple[str, ...] | None = None,
- mask_secrets: bool | None = None,
- type_loaders: "tuple[TypeLoader, ...] | None" = None,
- nested_resolve_strategy: "NestedResolveStrategy | None" = None,
- nested_resolve: "NestedResolve | None" = None,
- ) -> None:
- if not sources:
- msg = "Merge() requires at least one Source"
- raise TypeError(msg)
-
- self.sources = sources
- self.strategy = strategy
- self.field_merges = field_merges
- self.field_groups = field_groups
- self.skip_broken_sources = skip_broken_sources
- self.skip_invalid_fields = skip_invalid_fields
- self.expand_env_vars = expand_env_vars
- self.secret_field_names = secret_field_names
- self.mask_secrets = mask_secrets
- self.type_loaders = type_loaders
- self.nested_resolve_strategy = nested_resolve_strategy
- self.nested_resolve = nested_resolve
-
-
-# --8<-- [end:merge-metadata]
diff --git a/src/dature/mypy_plugin.py b/src/dature/mypy_plugin.py
index 5e3f5f5..1c5f93a 100644
--- a/src/dature/mypy_plugin.py
+++ b/src/dature/mypy_plugin.py
@@ -13,7 +13,7 @@
from collections.abc import Callable
-from mypy.nodes import ARG_NAMED_OPT, ARG_STAR, ARG_STAR2
+from mypy.nodes import ARG_NAMED_OPT, ARG_STAR, ARG_STAR2, ArgKind
from mypy.options import Options
from mypy.plugin import ClassDefContext, FunctionSigContext, Plugin
from mypy.types import CallableType, FunctionLike
@@ -27,7 +27,7 @@
def _make_args_optional(sig: CallableType) -> CallableType:
- new_arg_kinds = []
+ new_arg_kinds: list[ArgKind] = []
for kind in sig.arg_kinds:
if kind in (ARG_STAR, ARG_STAR2):
new_arg_kinds.append(kind)
diff --git a/src/dature/path_finders/base.py b/src/dature/path_finders/base.py
index bd58bb7..0665b1b 100644
--- a/src/dature/path_finders/base.py
+++ b/src/dature/path_finders/base.py
@@ -1,6 +1,6 @@
import abc
-from dature.errors.exceptions import LineRange
+from dature.errors import LineRange
class PathFinder(abc.ABC):
diff --git a/src/dature/path_finders/ini_.py b/src/dature/path_finders/ini_.py
index 827557a..d17c78f 100644
--- a/src/dature/path_finders/ini_.py
+++ b/src/dature/path_finders/ini_.py
@@ -1,7 +1,7 @@
import configparser
import sys
-from dature.errors.exceptions import LineRange
+from dature.errors import LineRange
from dature.path_finders.base import PathFinder
_MIN_INI_PATH_DEPTH = 2
diff --git a/src/dature/path_finders/json5_.py b/src/dature/path_finders/json5_.py
index 4e51aa2..f17ccb9 100644
--- a/src/dature/path_finders/json5_.py
+++ b/src/dature/path_finders/json5_.py
@@ -1,7 +1,7 @@
from json5.model import Identifier, JSONArray, JSONObject, String, Value
from json5.parser import parse_source
-from dature.errors.exceptions import LineRange
+from dature.errors import LineRange
from dature.path_finders.base import PathFinder
diff --git a/src/dature/path_finders/json_.py b/src/dature/path_finders/json_.py
index eb50a8c..aca276e 100644
--- a/src/dature/path_finders/json_.py
+++ b/src/dature/path_finders/json_.py
@@ -5,7 +5,7 @@
from json.scanner import py_make_scanner # type: ignore[attr-defined]
from typing import TYPE_CHECKING
-from dature.errors.exceptions import LineRange
+from dature.errors import LineRange
from dature.path_finders.base import PathFinder
if TYPE_CHECKING:
diff --git a/src/dature/path_finders/toml_.py b/src/dature/path_finders/toml_.py
index da4c86c..3364f25 100644
--- a/src/dature/path_finders/toml_.py
+++ b/src/dature/path_finders/toml_.py
@@ -3,7 +3,7 @@
import toml_rs
from toml_rs._lib import TomlVersion
-from dature.errors.exceptions import LineRange
+from dature.errors import LineRange
from dature.path_finders.base import PathFinder
if TYPE_CHECKING:
diff --git a/src/dature/path_finders/yaml_.py b/src/dature/path_finders/yaml_.py
index 104203f..b2a286a 100644
--- a/src/dature/path_finders/yaml_.py
+++ b/src/dature/path_finders/yaml_.py
@@ -5,7 +5,7 @@
from ruamel.yaml.docinfo import Version
from ruamel.yaml.scalarstring import ScalarString
-from dature.errors.exceptions import LineRange
+from dature.errors import LineRange
from dature.path_finders.base import PathFinder
diff --git a/src/dature/protocols.py b/src/dature/protocols.py
index d0340d2..dd7b8d2 100644
--- a/src/dature/protocols.py
+++ b/src/dature/protocols.py
@@ -1,15 +1,6 @@
from collections.abc import Callable
from dataclasses import Field
-from pathlib import Path
-from typing import Any, ClassVar, Protocol, TypeVar
-
-from adaptix import Retort
-
-from dature.errors.exceptions import SourceLocation
-from dature.path_finders.base import PathFinder
-from dature.types import FileOrStream, JSONValue, LoadRawResult, NestedConflict
-
-_T = TypeVar("_T")
+from typing import Any, ClassVar, Protocol
class DataclassInstance(Protocol):
@@ -20,31 +11,3 @@ class ValidatorProtocol(Protocol):
def get_validator_func(self) -> Callable[..., bool]: ...
def get_error_message(self) -> str: ...
-
-
-class LoaderProtocol(Protocol):
- display_name: ClassVar[str]
- display_label: ClassVar[str]
- path_finder_class: type[PathFinder] | None
- retorts: dict[type, Retort]
-
- def load_raw(self, path: FileOrStream) -> LoadRawResult: ...
-
- def transform_to_dataclass(self, data: JSONValue, dataclass_: type[_T]) -> _T: ...
-
- def create_retort(self) -> Retort: ...
-
- def create_probe_retort(self) -> Retort: ...
-
- def create_validating_retort(self, dataclass_: type[_T]) -> Retort: ...
-
- @classmethod
- def resolve_location(
- cls,
- field_path: list[str],
- file_path: Path | None,
- file_content: str | None,
- prefix: str | None,
- split_symbols: str,
- nested_conflict: NestedConflict | None,
- ) -> list[SourceLocation]: ...
diff --git a/src/dature/skip_field_provider.py b/src/dature/skip_field_provider.py
index e73840b..c4e0264 100644
--- a/src/dature/skip_field_provider.py
+++ b/src/dature/skip_field_provider.py
@@ -100,13 +100,13 @@ class FilterResult:
def filter_invalid_fields(
raw_dict: JSONValue,
probe_retort: Retort,
- dataclass_: type[DataclassInstance],
+ schema: type[DataclassInstance],
allowed_fields: set[str] | None,
) -> FilterResult:
if not isinstance(raw_dict, dict):
return FilterResult(cleaned_dict=raw_dict, skipped_paths=[])
- probed: ProbeDict = probe_retort.load(raw_dict, dataclass_)
+ probed: ProbeDict = probe_retort.load(raw_dict, schema)
all_not_loaded = _collect_not_loaded_paths(probed, "")
skipped: list[str] = []
diff --git a/src/dature/sources_loader/__init__.py b/src/dature/sources/__init__.py
similarity index 100%
rename from src/dature/sources_loader/__init__.py
rename to src/dature/sources/__init__.py
diff --git a/src/dature/sources/base.py b/src/dature/sources/base.py
new file mode 100644
index 0000000..96df606
--- /dev/null
+++ b/src/dature/sources/base.py
@@ -0,0 +1,471 @@
+import abc
+import json
+import logging
+from dataclasses import dataclass, field
+from datetime import date, datetime, time
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, ClassVar, cast
+
+from adaptix import Retort, loader
+from adaptix.provider import Provider
+
+from dature.errors import LineRange, SourceLocation
+from dature.expansion.env_expand import expand_env_vars, expand_file_path
+from dature.field_path import FieldPath
+from dature.loaders import (
+ bool_loader,
+ bytearray_from_json_string,
+ date_from_string,
+ datetime_from_string,
+ float_from_string,
+ none_from_empty_string,
+ optional_from_empty_string,
+ str_from_scalar,
+ time_from_string,
+)
+from dature.path_finders.base import PathFinder
+from dature.types import (
+ FILE_LIKE_TYPES,
+ DotSeparatedPath,
+ ExpandEnvVarsMode,
+ FileOrStream,
+ JSONValue,
+ LoadRawResult,
+ NestedConflict,
+ NestedConflicts,
+ NestedResolve,
+ NestedResolveStrategy,
+)
+
+if TYPE_CHECKING:
+ from dature.protocols import ValidatorProtocol
+ from dature.types import (
+ FieldMapping,
+ FieldValidators,
+ FileLike,
+ FilePath,
+ NameStyle,
+ TypeLoaderMap,
+ )
+
+logger = logging.getLogger("dature")
+
+
+def _string_value_loaders() -> list[Provider]:
+ return [
+ loader(str, str_from_scalar),
+ loader(float, float_from_string),
+ loader(date, date_from_string),
+ loader(datetime, datetime_from_string),
+ loader(time, time_from_string),
+ loader(bytearray, bytearray_from_json_string),
+ loader(type(None), none_from_empty_string),
+ loader(str | None, optional_from_empty_string),
+ loader(bool, bool_loader),
+ ]
+
+
+# --8<-- [start:load-metadata]
+@dataclass(kw_only=True, repr=False)
+class Source(abc.ABC):
+ prefix: "DotSeparatedPath | None" = None
+ name_style: "NameStyle | None" = None
+ field_mapping: "FieldMapping | None" = None
+ root_validators: "tuple[ValidatorProtocol, ...] | None" = None
+ validators: "FieldValidators | None" = None
+ expand_env_vars: "ExpandEnvVarsMode | None" = None
+ skip_if_broken: bool | None = None
+ skip_if_invalid: "bool | tuple[FieldPath, ...] | None" = None
+ secret_field_names: tuple[str, ...] | None = None
+ mask_secrets: bool | None = None
+ type_loaders: "TypeLoaderMap | None" = None
+ # --8<-- [end:load-metadata]
+
+ format_name: ClassVar[str]
+ location_label: ClassVar[str]
+ path_finder_class: ClassVar[type[PathFinder] | None] = None
+
+ retorts: dict[tuple[type, frozenset[tuple[type, Any]]], Retort] = field(
+ default_factory=dict,
+ init=False,
+ repr=False,
+ )
+
+ def __repr__(self) -> str:
+ return self.format_name
+
+ def file_display(self) -> str | None:
+ return None
+
+ def file_path_for_errors(self) -> Path | None:
+ return None
+
+ def additional_loaders(self) -> list[Provider]:
+ return []
+
+ @staticmethod
+ def _infer_type(value: str) -> JSONValue:
+ if value == "":
+ return value
+
+ try:
+ return cast("JSONValue", json.loads(value))
+ except (json.JSONDecodeError, ValueError):
+ return value
+
+ @classmethod
+ def _parse_string_values(cls, data: JSONValue, *, infer_scalars: bool = False) -> JSONValue:
+ if not isinstance(data, dict):
+ return data
+
+ result: dict[str, JSONValue] = {}
+ for key, value in data.items():
+ if isinstance(value, dict):
+ result[key] = cls._parse_string_values(value, infer_scalars=True)
+ elif isinstance(value, str) and (infer_scalars or value.startswith(("[", "{"))):
+ result[key] = cls._infer_type(value)
+ else:
+ result[key] = value
+ return result
+
+ @abc.abstractmethod
+ def _load(self) -> JSONValue: ...
+
+ def _apply_prefix(self, data: JSONValue) -> JSONValue:
+ if not self.prefix:
+ return data
+
+ for key in self.prefix.split("."):
+ if not isinstance(data, dict):
+ return {}
+ if key not in data:
+ return {}
+ data = data[key]
+
+ return data
+
+ def _pre_processing(
+ self,
+ data: JSONValue,
+ *,
+ resolved_expand: ExpandEnvVarsMode,
+ ) -> JSONValue:
+ prefixed = self._apply_prefix(data)
+ return expand_env_vars(prefixed, mode=resolved_expand)
+
+ def load_raw(self, *, resolved_expand: ExpandEnvVarsMode = "default") -> LoadRawResult:
+ data = self._load()
+ processed = self._pre_processing(data, resolved_expand=resolved_expand)
+ logger.debug(
+ "[%s] load_raw: source=%s, raw_keys=%s, after_preprocessing_keys=%s",
+ type(self).__name__,
+ self.file_display() or "",
+ sorted(data.keys()) if isinstance(data, dict) else "",
+ sorted(processed.keys()) if isinstance(processed, dict) else "",
+ )
+ return LoadRawResult(data=processed)
+
+ @staticmethod
+ def _empty_location(location_label: str, file_path: Path | None) -> SourceLocation:
+ return SourceLocation(
+ location_label=location_label,
+ file_path=file_path,
+ line_range=None,
+ line_content=None,
+ env_var_name=None,
+ )
+
+ @staticmethod
+ def _build_search_path(field_path: list[str], prefix: str | None) -> list[str]:
+ if not prefix:
+ return field_path
+ prefix_parts = prefix.split(".")
+ return prefix_parts + field_path
+
+ @staticmethod
+ def _find_parent_line_range(finder: PathFinder, search_path: list[str]) -> LineRange | None:
+ path = search_path[:-1]
+ while path:
+ line_range = finder.find_line_range(path)
+ if line_range is not None:
+ return line_range
+ path = path[:-1]
+ return None
+
+ @staticmethod
+ def _strip_common_indent(raw_lines: list[str]) -> list[str]:
+ indents = [len(line) - len(line.lstrip()) for line in raw_lines if line.strip()]
+ if not indents:
+ return raw_lines
+ min_indent = min(indents)
+ return [line[min_indent:] for line in raw_lines]
+
+ @classmethod
+ def resolve_location(
+ cls,
+ *,
+ field_path: list[str],
+ file_path: Path | None,
+ file_content: str | None,
+ prefix: str | None,
+ nested_conflict: NestedConflict | None, # noqa: ARG003
+ split_symbols: str | None = None, # noqa: ARG003
+ ) -> list[SourceLocation]:
+ if file_content is None or not field_path:
+ return [cls._empty_location(cls.location_label, file_path)]
+
+ if cls.path_finder_class is None:
+ return [cls._empty_location(cls.location_label, file_path)]
+
+ search_path = cls._build_search_path(field_path, prefix)
+ finder = cls.path_finder_class(file_content)
+ line_range = finder.find_line_range(search_path)
+ if line_range is None:
+ line_range = cls._find_parent_line_range(finder, search_path)
+ if line_range is None:
+ return [cls._empty_location(cls.location_label, file_path)]
+
+ lines = file_content.splitlines()
+ content_lines: list[str] | None = None
+ if 0 < line_range.start <= len(lines):
+ end = min(line_range.end, len(lines))
+ raw = lines[line_range.start - 1 : end]
+ content_lines = cls._strip_common_indent(raw)
+
+ return [
+ SourceLocation(
+ location_label=cls.location_label,
+ file_path=file_path,
+ line_range=line_range,
+ line_content=content_lines,
+ env_var_name=None,
+ ),
+ ]
+
+
+# --8<-- [start:file-source]
+@dataclass(kw_only=True, repr=False)
+class FileFieldMixin:
+ file: "FileLike | FilePath | None" = None
+ # --8<-- [end:file-source]
+
+ def _init_file_field(self) -> None:
+ if isinstance(self.file, (str, Path)):
+ self.file = expand_file_path(str(self.file), mode="strict")
+
+ @staticmethod
+ def resolve_file_field(file: "FileLike | FilePath | None") -> FileOrStream:
+ if isinstance(file, FILE_LIKE_TYPES):
+ return file
+ if file is not None:
+ return Path(file)
+ return Path()
+
+ @staticmethod
+ def file_field_display(file: "FileLike | FilePath | None") -> str | None:
+ if isinstance(file, FILE_LIKE_TYPES):
+ return ""
+ if file is not None:
+ return str(file)
+ return None
+
+ @staticmethod
+ def file_field_path_for_errors(file: "FileLike | FilePath | None") -> Path | None:
+ if isinstance(file, FILE_LIKE_TYPES):
+ return None
+ if file is not None:
+ return Path(file)
+ return None
+
+ def file_display(self) -> str | None:
+ return self.file_field_display(self.file)
+
+ def file_path_for_errors(self) -> Path | None:
+ return self.file_field_path_for_errors(self.file)
+
+
+@dataclass(kw_only=True, repr=False)
+class FileSource(FileFieldMixin, Source, abc.ABC):
+ location_label: ClassVar[str] = "FILE"
+
+ def __post_init__(self) -> None:
+ self._init_file_field()
+
+ def __repr__(self) -> str:
+ display = self.format_name
+ file_path_display = self.file_display()
+ if file_path_display is not None:
+ return f"{display} '{file_path_display}'"
+ return display
+
+ def _load(self) -> JSONValue:
+ path = self.resolve_file_field(self.file)
+ return self._load_file(path)
+
+ @abc.abstractmethod
+ def _load_file(self, path: FileOrStream) -> JSONValue: ...
+
+
+# --8<-- [start:flat-key-source]
+@dataclass(kw_only=True, repr=False)
+class FlatKeySource(Source, abc.ABC):
+ split_symbols: str = "__"
+ nested_resolve_strategy: "NestedResolveStrategy | None" = None
+ nested_resolve: NestedResolve | None = None
+ # --8<-- [end:flat-key-source]
+
+ @staticmethod
+ def _set_nested(target: dict[str, JSONValue], keys: list[str], value: str) -> None:
+ for key in keys[:-1]:
+ target = cast("dict[str, JSONValue]", target.setdefault(key, {}))
+ target[keys[-1]] = value
+
+ def _resolve_field_strategy(
+ self,
+ field_name: str,
+ *,
+ resolved_nested_strategy: NestedResolveStrategy = "flat",
+ resolved_nested_resolve: NestedResolve | None = None,
+ ) -> NestedResolveStrategy:
+ effective_nested_resolve = (
+ resolved_nested_resolve if resolved_nested_resolve is not None else self.nested_resolve
+ )
+ if effective_nested_resolve is not None:
+ for strategy, field_paths in effective_nested_resolve.items():
+ paths = field_paths if isinstance(field_paths, tuple) else (field_paths,)
+ for field_path in paths:
+ if self._field_path_matches(field_path, field_name):
+ return strategy
+ return resolved_nested_strategy
+
+ @staticmethod
+ def _field_path_matches(field_path: FieldPath, field_name: str) -> bool:
+ if not field_path.parts:
+ return True
+ return field_path.parts[0] == field_name
+
+ def additional_loaders(self) -> list[Provider]:
+ return _string_value_loaders()
+
+ @staticmethod
+ def _resolve_var_name(
+ field_path: list[str],
+ prefix: str | None,
+ split_symbols: str,
+ conflict: NestedConflict | None,
+ ) -> str:
+ def _build_name(parts: list[str]) -> str:
+ var_name = split_symbols.join(part.upper() for part in parts)
+ if prefix is not None:
+ return prefix + var_name
+ return var_name
+
+ json_var = _build_name(field_path[:1])
+ if conflict is not None and conflict.used_var == json_var:
+ return json_var
+ return _build_name(field_path)
+
+ def _build_var_name(self, key: str) -> str:
+ if self.prefix:
+ return self.prefix + key.upper()
+ return key.upper()
+
+ def _build_nested_var_name(self, top_field: str, nested: dict[str, JSONValue]) -> str:
+ for sub_key in nested:
+ full_key = f"{top_field}{self.split_symbols}{sub_key}"
+ return self._build_var_name(full_key)
+ return self._build_var_name(top_field)
+
+ def _pre_process_row(
+ self,
+ key: str,
+ value: str,
+ result: dict[str, JSONValue],
+ conflicts: NestedConflicts,
+ *,
+ resolved_nested_strategy: NestedResolveStrategy = "flat",
+ resolved_nested_resolve: NestedResolve | None = None,
+ ) -> None:
+ parts = key.split(self.split_symbols)
+ self._process_key_value(
+ parts=parts,
+ value=value,
+ result=result,
+ conflicts=conflicts,
+ resolved_nested_strategy=resolved_nested_strategy,
+ resolved_nested_resolve=resolved_nested_resolve,
+ )
+
+ def load_raw(
+ self,
+ *,
+ resolved_expand: ExpandEnvVarsMode = "default",
+ resolved_nested_strategy: NestedResolveStrategy = "flat",
+ resolved_nested_resolve: NestedResolve | None = None,
+ ) -> LoadRawResult:
+ data = self._load()
+ data_dict = cast("dict[str, str]", data)
+ result: dict[str, JSONValue] = {}
+ conflicts: NestedConflicts = {}
+
+ for key, value in data_dict.items():
+ self._pre_process_row(
+ key=key,
+ value=value,
+ result=result,
+ conflicts=conflicts,
+ resolved_nested_strategy=resolved_nested_strategy,
+ resolved_nested_resolve=resolved_nested_resolve,
+ )
+
+ expanded = expand_env_vars(result, mode=resolved_expand)
+ processed = self._parse_string_values(expanded)
+ return LoadRawResult(data=processed, nested_conflicts=conflicts)
+
+ def _process_key_value(
+ self,
+ *,
+ parts: list[str],
+ value: str,
+ result: dict[str, JSONValue],
+ conflicts: NestedConflicts,
+ resolved_nested_strategy: NestedResolveStrategy = "flat",
+ resolved_nested_resolve: NestedResolve | None = None,
+ ) -> None:
+ if len(parts) > 1:
+ top_field = parts[0]
+ strategy = self._resolve_field_strategy(
+ top_field,
+ resolved_nested_strategy=resolved_nested_strategy,
+ resolved_nested_resolve=resolved_nested_resolve,
+ )
+ existing = result.get(top_field)
+ if isinstance(existing, str):
+ flat_var = self._build_var_name(self.split_symbols.join(parts))
+ json_var = self._build_var_name(top_field)
+ if strategy == "flat":
+ result.pop(top_field)
+ self._set_nested(result, parts, value)
+ conflicts[top_field] = NestedConflict(flat_var, json_var, existing)
+ elif strategy == "json":
+ conflicts[top_field] = NestedConflict(json_var, flat_var, existing)
+ else:
+ self._set_nested(result, parts, value)
+ else:
+ top_field = parts[0]
+ strategy = self._resolve_field_strategy(
+ top_field,
+ resolved_nested_strategy=resolved_nested_strategy,
+ resolved_nested_resolve=resolved_nested_resolve,
+ )
+ existing = result.get(top_field)
+ if isinstance(existing, dict):
+ json_var = self._build_var_name(top_field)
+ flat_var = self._build_nested_var_name(top_field, existing)
+ if strategy == "json":
+ result[top_field] = value
+ conflicts[top_field] = NestedConflict(json_var, flat_var, value)
+ elif strategy == "flat":
+ conflicts[top_field] = NestedConflict(flat_var, json_var, value)
+ else:
+ result[top_field] = value
diff --git a/src/dature/sources/docker_secrets.py b/src/dature/sources/docker_secrets.py
new file mode 100644
index 0000000..4ffe701
--- /dev/null
+++ b/src/dature/sources/docker_secrets.py
@@ -0,0 +1,85 @@
+from dataclasses import dataclass
+from pathlib import Path
+from typing import TYPE_CHECKING, ClassVar
+
+from dature.errors import SourceLocation
+from dature.expansion.env_expand import expand_file_path
+from dature.sources.base import FlatKeySource
+from dature.types import JSONValue, NestedConflict
+
+if TYPE_CHECKING:
+ from dature.types import FilePath
+
+
+@dataclass(kw_only=True, repr=False)
+class DockerSecretsSource(FlatKeySource):
+ dir_: "FilePath"
+ format_name = "docker_secrets"
+ location_label: ClassVar[str] = "SECRET FILE"
+
+ def __post_init__(self) -> None:
+ if isinstance(self.dir_, (str, Path)):
+ self.dir_ = expand_file_path(str(self.dir_), mode="strict")
+
+ def __repr__(self) -> str:
+ return f"{self.format_name} '{self.dir_}'"
+
+ def file_display(self) -> str | None:
+ return str(self.dir_)
+
+ def file_path_for_errors(self) -> Path | None:
+ return Path(self.dir_)
+
+ @classmethod
+ def resolve_location(
+ cls,
+ *,
+ field_path: list[str],
+ file_path: Path | None,
+ file_content: str | None, # noqa: ARG003
+ prefix: str | None,
+ nested_conflict: NestedConflict | None,
+ split_symbols: str | None = None,
+ ) -> list[SourceLocation]:
+ resolved_symbols = split_symbols or "__"
+ if nested_conflict is not None:
+ json_var = cls._resolve_var_name(field_path[:1], prefix, resolved_symbols, None)
+ if nested_conflict.used_var == json_var:
+ secret_name = field_path[0]
+ else:
+ secret_name = resolved_symbols.join(field_path)
+ else:
+ secret_name = resolved_symbols.join(field_path)
+ if prefix is not None:
+ secret_name = prefix + secret_name
+ secret_file = file_path / secret_name if file_path is not None else None
+ return [
+ SourceLocation(
+ location_label=cls.location_label,
+ file_path=secret_file,
+ line_range=None,
+ line_content=None,
+ env_var_name=None,
+ ),
+ ]
+
+ def _load(self) -> JSONValue:
+ path = Path(self.dir_)
+
+ result: dict[str, JSONValue] = {}
+ for entry in sorted(path.iterdir()):
+ if not entry.is_file():
+ continue
+
+ key = entry.name.lower()
+ value = entry.read_text().strip()
+
+ if self.prefix and not key.startswith(self.prefix.lower()):
+ continue
+
+ if self.prefix:
+ key = key[len(self.prefix) :]
+
+ result[key] = value
+
+ return result
diff --git a/src/dature/sources_loader/env_.py b/src/dature/sources/env_.py
similarity index 64%
rename from src/dature/sources_loader/env_.py
rename to src/dature/sources/env_.py
index 124fac2..3835f46 100644
--- a/src/dature/sources_loader/env_.py
+++ b/src/dature/sources/env_.py
@@ -1,47 +1,52 @@
import io
import os
from collections.abc import Iterable
+from dataclasses import dataclass
from pathlib import Path
from typing import ClassVar, cast
-from dature.errors.exceptions import LineRange, SourceLocation
-from dature.sources_loader.flat_key import FlatKeyLoader
+from dature.errors import LineRange, SourceLocation
+from dature.sources.base import FileFieldMixin, FlatKeySource
from dature.types import (
BINARY_IO_TYPES,
TEXT_IO_TYPES,
- FileOrStream,
JSONValue,
NestedConflict,
NestedConflicts,
+ NestedResolve,
+ NestedResolveStrategy,
)
-class EnvLoader(FlatKeyLoader):
- display_name = "env"
- display_label: ClassVar[str] = "ENV"
+@dataclass(kw_only=True, repr=False)
+class EnvSource(FlatKeySource):
+ format_name = "env"
+ location_label: ClassVar[str] = "ENV"
- def _load(self, _: FileOrStream) -> JSONValue:
+ def _load(self) -> JSONValue:
return cast("JSONValue", os.environ)
@classmethod
def resolve_location(
cls,
+ *,
field_path: list[str],
file_path: Path | None, # noqa: ARG003
file_content: str | None, # noqa: ARG003
prefix: str | None,
- split_symbols: str,
nested_conflict: NestedConflict | None,
+ split_symbols: str | None = None,
) -> list[SourceLocation]:
- var_name = cls._resolve_var_name(field_path, prefix, split_symbols, nested_conflict)
+ resolved_symbols = split_symbols or "__"
+ var_name = cls._resolve_var_name(field_path, prefix, resolved_symbols, nested_conflict)
env_var_value: str | None = None
if nested_conflict is not None:
- json_var = cls._resolve_var_name(field_path[:1], prefix, split_symbols, None)
+ json_var = cls._resolve_var_name(field_path[:1], prefix, resolved_symbols, None)
if nested_conflict.used_var == json_var:
env_var_value = nested_conflict.json_raw_value
return [
SourceLocation(
- display_label=cls.display_label,
+ location_label=cls.location_label,
file_path=None,
line_range=None,
line_content=None,
@@ -56,39 +61,62 @@ def _pre_process_row(
value: str,
result: dict[str, JSONValue],
conflicts: NestedConflicts,
+ *,
+ resolved_nested_strategy: NestedResolveStrategy = "flat",
+ resolved_nested_resolve: NestedResolve | None = None,
) -> None:
- if self._prefix and not key.startswith(self._prefix):
+ if self.prefix and not key.startswith(self.prefix):
return
- processed_key = key[len(self._prefix) :] if self._prefix else key
+ processed_key = key[len(self.prefix) :] if self.prefix else key
processed_key = processed_key.lower()
- parts = processed_key.split(self._split_symbols)
- self._process_key_value(parts=parts, value=value, result=result, conflicts=conflicts)
+ parts = processed_key.split(self.split_symbols)
+ self._process_key_value(
+ parts=parts,
+ value=value,
+ result=result,
+ conflicts=conflicts,
+ resolved_nested_strategy=resolved_nested_strategy,
+ resolved_nested_resolve=resolved_nested_resolve,
+ )
-class EnvFileLoader(EnvLoader):
- display_name = "envfile"
- display_label: ClassVar[str] = "ENV FILE"
+@dataclass(kw_only=True, repr=False)
+class EnvFileSource(FileFieldMixin, EnvSource):
+ format_name = "envfile"
+ location_label: ClassVar[str] = "ENV FILE"
+
+ def __post_init__(self) -> None:
+ self._init_file_field()
+
+ def __repr__(self) -> str:
+ display = self.format_name
+ file_path_display = self.file_display()
+ if file_path_display is not None:
+ return f"{display} '{file_path_display}'"
+ return display
@classmethod
def resolve_location(
cls,
+ *,
field_path: list[str],
file_path: Path | None,
file_content: str | None,
prefix: str | None,
- split_symbols: str,
nested_conflict: NestedConflict | None,
+ split_symbols: str | None = None,
) -> list[SourceLocation]:
- var_name = cls._resolve_var_name(field_path, prefix, split_symbols, nested_conflict)
+ resolved_symbols = split_symbols or "__"
+ var_name = cls._resolve_var_name(field_path, prefix, resolved_symbols, nested_conflict)
line_range: LineRange | None = None
line_content: list[str] | None = None
if file_content is not None:
line_range, line_content = _find_env_line(file_content, var_name)
return [
SourceLocation(
- display_label=cls.display_label,
+ location_label=cls.location_label,
file_path=file_path,
line_range=line_range,
line_content=line_content,
@@ -96,8 +124,9 @@ def resolve_location(
),
]
- def _load(self, path: FileOrStream) -> JSONValue:
+ def _load(self) -> JSONValue:
"""Parse .env file into a flat key=value dict (before nesting/expand/parse)."""
+ path = self.resolve_file_field(self.file)
raw_pairs: dict[str, JSONValue] = {}
if isinstance(path, TEXT_IO_TYPES):
diff --git a/src/dature/sources/ini_.py b/src/dature/sources/ini_.py
new file mode 100644
index 0000000..d011033
--- /dev/null
+++ b/src/dature/sources/ini_.py
@@ -0,0 +1,61 @@
+import configparser
+import io
+from dataclasses import dataclass
+from typing import cast
+
+from adaptix.provider import Provider
+
+from dature.expansion.env_expand import expand_env_vars
+from dature.path_finders.ini_ import TablePathFinder
+from dature.sources.base import FileSource, _string_value_loaders
+from dature.types import BINARY_IO_TYPES, TEXT_IO_TYPES, ExpandEnvVarsMode, FileOrStream, JSONValue
+
+
+@dataclass(kw_only=True, repr=False)
+class IniSource(FileSource):
+ format_name = "ini"
+ path_finder_class = TablePathFinder
+
+ def additional_loaders(self) -> list[Provider]:
+ return _string_value_loaders()
+
+ def _pre_processing(
+ self,
+ data: JSONValue,
+ *,
+ resolved_expand: ExpandEnvVarsMode,
+ ) -> JSONValue:
+ prefixed = self._apply_prefix(data)
+ expanded = expand_env_vars(prefixed, mode=resolved_expand)
+ return self._parse_string_values(expanded)
+
+ def _load_file(self, path: FileOrStream) -> JSONValue:
+ config = configparser.ConfigParser(interpolation=None)
+ if isinstance(path, TEXT_IO_TYPES):
+ config.read_file(path)
+ elif isinstance(path, BINARY_IO_TYPES):
+ config.read_file(io.TextIOWrapper(cast("io.BufferedReader", path)))
+ else:
+ with path.open() as f:
+ config.read_file(f)
+ if self.prefix and self.prefix in config:
+ result: dict[str, JSONValue] = dict(config[self.prefix])
+ child_prefix = self.prefix + "."
+ for section in config.sections():
+ if section.startswith(child_prefix):
+ nested_key = section[len(child_prefix) :]
+ result[nested_key] = dict(config[section])
+ return {self.prefix: result}
+
+ all_sections: dict[str, JSONValue] = {}
+ if config.defaults():
+ all_sections["DEFAULT"] = dict(config.defaults())
+ for section in config.sections():
+ parts = section.split(".")
+ target = all_sections
+ for part in parts[:-1]:
+ if part not in target:
+ target[part] = {}
+ target = cast("dict[str, JSONValue]", target[part])
+ target[parts[-1]] = dict(config[section])
+ return all_sections
diff --git a/src/dature/sources_loader/json5_.py b/src/dature/sources/json5_.py
similarity index 51%
rename from src/dature/sources_loader/json5_.py
rename to src/dature/sources/json5_.py
index 4562953..18d7787 100644
--- a/src/dature/sources_loader/json5_.py
+++ b/src/dature/sources/json5_.py
@@ -1,29 +1,38 @@
import io
+from dataclasses import dataclass
from datetime import date, datetime, time
-from typing import TextIO, cast
+from typing import TYPE_CHECKING, TextIO, cast
-import json5
from adaptix import loader
from adaptix.provider import Provider
-from dature.path_finders.json5_ import Json5PathFinder
-from dature.sources_loader.base import BaseLoader
-from dature.sources_loader.loaders import (
+from dature._descriptors import classproperty
+from dature.sources.base import FileSource
+
+if TYPE_CHECKING:
+ from dature.path_finders.base import PathFinder
+from dature.loaders import (
bytearray_from_string,
date_from_string,
datetime_from_string,
float_from_string,
time_from_string,
)
-from dature.sources_loader.loaders.json5_ import str_from_json_identifier
+from dature.loaders.json5_ import str_from_json_identifier
from dature.types import BINARY_IO_TYPES, TEXT_IO_TYPES, FileOrStream, JSONValue
-class Json5Loader(BaseLoader):
- display_name = "json5"
- path_finder_class = Json5PathFinder
+@dataclass(kw_only=True, repr=False)
+class Json5Source(FileSource):
+ format_name = "json5"
+
+ @classproperty
+ def path_finder_class(cls) -> "type[PathFinder]": # noqa: N805
+ from dature.path_finders.json5_ import Json5PathFinder # noqa: PLC0415
- def _additional_loaders(self) -> list[Provider]:
+ return Json5PathFinder
+
+ def additional_loaders(self) -> list[Provider]:
return [
loader(str, str_from_json_identifier),
loader(float, float_from_string),
@@ -33,10 +42,12 @@ def _additional_loaders(self) -> list[Provider]:
loader(bytearray, bytearray_from_string),
]
- def _load(self, path: FileOrStream) -> JSONValue:
+ def _load_file(self, path: FileOrStream) -> JSONValue:
+ import json5 # noqa: PLC0415
+
if isinstance(path, TEXT_IO_TYPES):
return cast("JSONValue", json5.load(cast("TextIO", path)))
if isinstance(path, BINARY_IO_TYPES):
return cast("JSONValue", json5.load(io.TextIOWrapper(cast("io.BufferedReader", path))))
- with path.open() as file_:
- return cast("JSONValue", json5.load(file_))
+ with path.open() as file:
+ return cast("JSONValue", json5.load(file))
diff --git a/src/dature/sources_loader/json_.py b/src/dature/sources/json_.py
similarity index 66%
rename from src/dature/sources_loader/json_.py
rename to src/dature/sources/json_.py
index 718b41b..23e38b0 100644
--- a/src/dature/sources_loader/json_.py
+++ b/src/dature/sources/json_.py
@@ -1,27 +1,29 @@
import json
+from dataclasses import dataclass
from datetime import date, datetime, time
from typing import cast
from adaptix import loader
from adaptix.provider import Provider
-from dature.path_finders.json_ import JsonPathFinder
-from dature.sources_loader.base import BaseLoader
-from dature.sources_loader.loaders import (
+from dature.loaders import (
bytearray_from_string,
date_from_string,
datetime_from_string,
float_from_string,
time_from_string,
)
+from dature.path_finders.json_ import JsonPathFinder
+from dature.sources.base import FileSource
from dature.types import FILE_LIKE_TYPES, FileOrStream, JSONValue
-class JsonLoader(BaseLoader):
- display_name = "json"
+@dataclass(kw_only=True, repr=False)
+class JsonSource(FileSource):
+ format_name = "json"
path_finder_class = JsonPathFinder
- def _additional_loaders(self) -> list[Provider]:
+ def additional_loaders(self) -> list[Provider]:
return [
loader(float, float_from_string),
loader(date, date_from_string),
@@ -30,8 +32,8 @@ def _additional_loaders(self) -> list[Provider]:
loader(bytearray, bytearray_from_string),
]
- def _load(self, path: FileOrStream) -> JSONValue:
+ def _load_file(self, path: FileOrStream) -> JSONValue:
if isinstance(path, FILE_LIKE_TYPES):
return cast("JSONValue", json.load(path))
- with path.open() as file_:
- return cast("JSONValue", json.load(file_))
+ with path.open() as file:
+ return cast("JSONValue", json.load(file))
diff --git a/src/dature/sources/retort.py b/src/dature/sources/retort.py
new file mode 100644
index 0000000..2552eaf
--- /dev/null
+++ b/src/dature/sources/retort.py
@@ -0,0 +1,231 @@
+from dataclasses import fields
+from datetime import timedelta
+from typing import TYPE_CHECKING, Any, cast, get_type_hints
+
+from adaptix import NameStyle as AdaptixNameStyle
+from adaptix import Retort, loader, name_mapping
+from adaptix.provider import Provider
+
+from dature.expansion.alias_provider import AliasProvider, resolve_nested_owner
+from dature.field_path import FieldPath
+from dature.fields.byte_size import ByteSize
+from dature.fields.payment_card import PaymentCardNumber
+from dature.fields.secret_str import SecretStr
+from dature.loaders.base import (
+ base64url_bytes_from_string,
+ base64url_str_from_string,
+ byte_size_from_string,
+ bytes_from_string,
+ complex_from_string,
+ payment_card_number_from_string,
+ secret_str_from_string,
+ timedelta_from_string,
+ url_from_string,
+)
+from dature.loaders.common import float_passthrough, int_from_string
+from dature.skip_field_provider import ModelToDictProvider, SkipFieldProvider
+from dature.type_utils import find_nested_dataclasses
+from dature.types import (
+ URL,
+ Base64UrlBytes,
+ Base64UrlStr,
+)
+from dature.validators.base import (
+ create_metadata_validator_providers,
+ create_root_validator_providers,
+ create_validator_providers,
+ extract_validators_from_type,
+)
+
+if TYPE_CHECKING:
+ from dature.protocols import DataclassInstance
+ from dature.sources.base import Source
+ from dature.types import (
+ FieldMapping,
+ JSONValue,
+ NameStyle,
+ TypeLoaderMap,
+ )
+
+
+def get_adaptix_name_style(name_style: "NameStyle | None") -> AdaptixNameStyle | None:
+ if name_style is None:
+ return None
+
+ name_style_map = {
+ "lower_snake": AdaptixNameStyle.LOWER_SNAKE,
+ "upper_snake": AdaptixNameStyle.UPPER_SNAKE,
+ "lower_camel": AdaptixNameStyle.CAMEL,
+ "upper_camel": AdaptixNameStyle.PASCAL,
+ "lower_kebab": AdaptixNameStyle.LOWER_KEBAB,
+ "upper_kebab": AdaptixNameStyle.UPPER_KEBAB,
+ }
+ return name_style_map.get(name_style)
+
+
+def get_name_mapping_providers(
+ name_style: "NameStyle | None",
+ field_mapping: "FieldMapping | None",
+) -> list[Provider]:
+ providers: list[Provider] = []
+
+ adaptix_name_style = get_adaptix_name_style(name_style)
+ if adaptix_name_style is not None:
+ providers.append(name_mapping(name_style=adaptix_name_style))
+
+ if field_mapping:
+ owner_fields: dict[type[DataclassInstance] | str, dict[str, str]] = {}
+ for field_path_key in field_mapping:
+ if not isinstance(field_path_key, FieldPath):
+ continue
+ owner: type[DataclassInstance] | str = field_path_key.owner
+ if len(field_path_key.parts) > 1 and not isinstance(field_path_key.owner, str):
+ owner = resolve_nested_owner(field_path_key.owner, field_path_key.parts[:-1])
+ field_name = field_path_key.parts[-1]
+ if owner not in owner_fields:
+ owner_fields[owner] = {}
+ owner_fields[owner][field_name] = field_name
+
+ for owner, identity_map in owner_fields.items():
+ if isinstance(owner, str):
+ providers.append(name_mapping(map=identity_map))
+ else:
+ providers.append(name_mapping(owner, map=identity_map))
+
+ providers.append(AliasProvider(field_mapping))
+
+ return providers
+
+
+def get_validator_providers[T](schema: type[T]) -> list[Provider]:
+ providers: list[Provider] = []
+ type_hints = get_type_hints(schema, include_extras=True)
+
+ for f in fields(cast("type[DataclassInstance]", schema)):
+ if f.name not in type_hints:
+ continue
+
+ field_type = type_hints[f.name]
+ validators_list = extract_validators_from_type(field_type)
+
+ if validators_list:
+ field_providers = create_validator_providers(schema, f.name, validators_list)
+ providers.extend(field_providers)
+
+ for nested_dc in find_nested_dataclasses(field_type):
+ nested_providers = get_validator_providers(nested_dc)
+ providers.extend(nested_providers)
+
+ return providers
+
+
+def build_base_recipe(
+ source: "Source",
+ *,
+ resolved_type_loaders: "TypeLoaderMap | None" = None,
+) -> list[Provider]:
+ user_loaders: list[Provider] = [
+ loader(type_, func) for type_, func in (resolved_type_loaders or source.type_loaders or {}).items()
+ ]
+ default_loaders: list[Provider] = [
+ loader(int, int_from_string),
+ loader(float, float_passthrough),
+ loader(bytes, bytes_from_string),
+ loader(complex, complex_from_string),
+ loader(timedelta, timedelta_from_string),
+ loader(URL, url_from_string),
+ loader(Base64UrlBytes, base64url_bytes_from_string),
+ loader(Base64UrlStr, base64url_str_from_string),
+ loader(SecretStr, secret_str_from_string),
+ loader(PaymentCardNumber, payment_card_number_from_string),
+ loader(ByteSize, byte_size_from_string),
+ ]
+ return [
+ *user_loaders,
+ *source.additional_loaders(),
+ *default_loaders,
+ *get_name_mapping_providers(source.name_style, source.field_mapping),
+ ]
+
+
+def create_retort(
+ source: "Source",
+ *,
+ resolved_type_loaders: "TypeLoaderMap | None" = None,
+) -> Retort:
+ return Retort(
+ strict_coercion=True,
+ recipe=build_base_recipe(source, resolved_type_loaders=resolved_type_loaders),
+ )
+
+
+def create_probe_retort(
+ source: "Source",
+ *,
+ resolved_type_loaders: "TypeLoaderMap | None" = None,
+) -> Retort:
+ return Retort(
+ strict_coercion=True,
+ recipe=[
+ SkipFieldProvider(),
+ ModelToDictProvider(),
+ *build_base_recipe(source, resolved_type_loaders=resolved_type_loaders),
+ ],
+ )
+
+
+def create_validating_retort[T](
+ source: "Source",
+ schema: type[T],
+ *,
+ resolved_type_loaders: "TypeLoaderMap | None" = None,
+) -> Retort:
+ root_validator_providers = create_root_validator_providers(
+ schema,
+ source.root_validators or (),
+ )
+ metadata_validator_providers = create_metadata_validator_providers(
+ source.validators or {},
+ )
+ return Retort(
+ strict_coercion=True,
+ recipe=[
+ *get_validator_providers(schema),
+ *metadata_validator_providers,
+ *root_validator_providers,
+ *build_base_recipe(source, resolved_type_loaders=resolved_type_loaders),
+ ],
+ )
+
+
+def _retort_cache_key(
+ schema: type,
+ resolved_type_loaders: "TypeLoaderMap | None",
+) -> tuple[type, frozenset[tuple[type, Any]]]:
+ loaders_key = frozenset(resolved_type_loaders.items()) if resolved_type_loaders is not None else frozenset()
+ return (schema, loaders_key)
+
+
+def transform_to_dataclass[T](
+ source: "Source",
+ data: "JSONValue",
+ schema: type[T],
+ *,
+ resolved_type_loaders: "TypeLoaderMap | None" = None,
+) -> T:
+ key = _retort_cache_key(schema, resolved_type_loaders)
+ if key not in source.retorts:
+ source.retorts[key] = create_retort(source, resolved_type_loaders=resolved_type_loaders)
+ return source.retorts[key].load(data, schema)
+
+
+def ensure_retort(
+ source: "Source",
+ cls: "type[DataclassInstance]",
+ *,
+ resolved_type_loaders: "TypeLoaderMap | None" = None,
+) -> None:
+ key = _retort_cache_key(cls, resolved_type_loaders)
+ if key not in source.retorts:
+ source.retorts[key] = create_retort(source, resolved_type_loaders=resolved_type_loaders)
+ source.retorts[key].get_loader(cls)
diff --git a/src/dature/sources/toml_.py b/src/dature/sources/toml_.py
new file mode 100644
index 0000000..609724f
--- /dev/null
+++ b/src/dature/sources/toml_.py
@@ -0,0 +1,80 @@
+import abc
+from dataclasses import dataclass
+from datetime import date, datetime, time
+from typing import TYPE_CHECKING, Any, cast
+
+from adaptix import loader
+from adaptix.provider import Provider
+
+from dature._descriptors import classproperty
+from dature.sources.base import FileSource
+
+if TYPE_CHECKING:
+ from toml_rs._lib import TomlVersion
+
+ from dature.path_finders.base import PathFinder
+from dature.loaders import (
+ bytearray_from_string,
+ date_passthrough,
+ datetime_passthrough,
+ none_from_empty_string,
+ optional_from_empty_string,
+)
+from dature.loaders.toml_ import time_passthrough
+from dature.types import FILE_LIKE_TYPES, FileOrStream, JSONValue
+
+
+@dataclass(kw_only=True, repr=False)
+class _BaseTomlSource(FileSource, abc.ABC):
+ @abc.abstractmethod
+ def _toml_version(self) -> "TomlVersion": ...
+
+ def _load_file(self, path: FileOrStream) -> JSONValue:
+ import toml_rs # noqa: PLC0415
+
+ if isinstance(path, FILE_LIKE_TYPES):
+ content = path.read()
+ if isinstance(content, bytes):
+ content = content.decode()
+ return cast("JSONValue", toml_rs.loads(content, toml_version=self._toml_version()))
+ with path.open() as file:
+ return cast("JSONValue", toml_rs.loads(file.read(), toml_version=self._toml_version()))
+
+ def additional_loaders(self) -> list[Provider]:
+ return [
+ loader(date, date_passthrough),
+ loader(datetime, datetime_passthrough),
+ loader(time, time_passthrough),
+ loader(bytearray, bytearray_from_string),
+ loader(type(None), none_from_empty_string),
+ loader(str | None, optional_from_empty_string),
+ loader(Any, optional_from_empty_string),
+ ]
+
+
+@dataclass(kw_only=True, repr=False)
+class Toml10Source(_BaseTomlSource):
+ format_name = "toml1.0"
+
+ @classproperty
+ def path_finder_class(cls) -> "type[PathFinder]": # noqa: N805
+ from dature.path_finders.toml_ import Toml10PathFinder # noqa: PLC0415
+
+ return Toml10PathFinder
+
+ def _toml_version(self) -> "TomlVersion":
+ return "1.0.0"
+
+
+@dataclass(kw_only=True, repr=False)
+class Toml11Source(_BaseTomlSource):
+ format_name = "toml1.1"
+
+ @classproperty
+ def path_finder_class(cls) -> "type[PathFinder]": # noqa: N805
+ from dature.path_finders.toml_ import Toml11PathFinder # noqa: PLC0415
+
+ return Toml11PathFinder
+
+ def _toml_version(self) -> "TomlVersion":
+ return "1.1.0"
diff --git a/src/dature/sources/yaml_.py b/src/dature/sources/yaml_.py
new file mode 100644
index 0000000..5e27e32
--- /dev/null
+++ b/src/dature/sources/yaml_.py
@@ -0,0 +1,87 @@
+import abc
+from dataclasses import dataclass
+from datetime import date, datetime, time
+from typing import TYPE_CHECKING, cast
+
+from adaptix import loader
+from adaptix.provider import Provider
+
+from dature._descriptors import classproperty
+from dature.loaders import (
+ bytearray_from_string,
+ date_passthrough,
+ datetime_passthrough,
+ time_from_string,
+)
+from dature.loaders.yaml_ import time_from_int
+from dature.sources.base import FileSource
+from dature.types import FILE_LIKE_TYPES, FileOrStream, JSONValue
+
+if TYPE_CHECKING:
+ from ruamel.yaml.docinfo import Version
+
+ from dature.path_finders.base import PathFinder
+
+
+@dataclass(kw_only=True, repr=False)
+class _BaseYamlSource(FileSource, abc.ABC):
+ @abc.abstractmethod
+ def _yaml_version(self) -> "Version": ...
+
+ def _load_file(self, path: FileOrStream) -> JSONValue:
+ from ruamel.yaml import YAML # noqa: PLC0415
+
+ yaml = YAML(typ="safe")
+ yaml.version = self._yaml_version()
+ if isinstance(path, FILE_LIKE_TYPES):
+ return cast("JSONValue", yaml.load(path))
+ with path.open() as file:
+ return cast("JSONValue", yaml.load(file))
+
+
+@dataclass(kw_only=True, repr=False)
+class Yaml11Source(_BaseYamlSource):
+ format_name = "yaml1.1"
+
+ @classproperty
+ def path_finder_class(cls) -> "type[PathFinder]": # noqa: N805
+ from dature.path_finders.yaml_ import Yaml11PathFinder # noqa: PLC0415
+
+ return Yaml11PathFinder
+
+ def _yaml_version(self) -> "Version":
+ from ruamel.yaml.docinfo import Version # noqa: PLC0415
+
+ return Version(1, 1)
+
+ def additional_loaders(self) -> list[Provider]:
+ return [
+ loader(date, date_passthrough),
+ loader(datetime, datetime_passthrough),
+ loader(time, time_from_int),
+ loader(bytearray, bytearray_from_string),
+ ]
+
+
+@dataclass(kw_only=True, repr=False)
+class Yaml12Source(_BaseYamlSource):
+ format_name = "yaml1.2"
+
+ @classproperty
+ def path_finder_class(cls) -> "type[PathFinder]": # noqa: N805
+ from dature.path_finders.yaml_ import Yaml12PathFinder # noqa: PLC0415
+
+ return Yaml12PathFinder
+
+ def _yaml_version(self) -> "Version":
+ from ruamel.yaml.docinfo import Version # noqa: PLC0415
+
+ return Version(1, 2)
+
+ def additional_loaders(self) -> list[Provider]:
+ return [
+ loader(date, date_passthrough),
+ loader(datetime, datetime_passthrough),
+ loader(time, time_from_string),
+ loader(bytearray, bytearray_from_string),
+ ]
diff --git a/src/dature/sources_loader/base.py b/src/dature/sources_loader/base.py
deleted file mode 100644
index c41a6e5..0000000
--- a/src/dature/sources_loader/base.py
+++ /dev/null
@@ -1,368 +0,0 @@
-import abc
-import json
-import logging
-from dataclasses import fields, is_dataclass
-from datetime import timedelta
-from pathlib import Path
-from typing import TYPE_CHECKING, Annotated, ClassVar, TypeVar, cast, get_args, get_origin, get_type_hints
-
-from adaptix import NameStyle as AdaptixNameStyle
-from adaptix import Retort, loader, name_mapping
-from adaptix.provider import Provider
-
-from dature.errors.exceptions import LineRange, SourceLocation
-from dature.expansion.alias_provider import AliasProvider, resolve_nested_owner
-from dature.expansion.env_expand import expand_env_vars
-from dature.field_path import FieldPath
-from dature.fields.byte_size import ByteSize
-from dature.fields.payment_card import PaymentCardNumber
-from dature.fields.secret_str import SecretStr
-from dature.path_finders.base import PathFinder
-from dature.protocols import DataclassInstance, LoaderProtocol, ValidatorProtocol
-from dature.skip_field_provider import ModelToDictProvider, SkipFieldProvider
-from dature.sources_loader.loaders.base import (
- base64url_bytes_from_string,
- base64url_str_from_string,
- byte_size_from_string,
- bytes_from_string,
- complex_from_string,
- payment_card_number_from_string,
- secret_str_from_string,
- timedelta_from_string,
- url_from_string,
-)
-from dature.sources_loader.loaders.common import float_passthrough, int_from_string
-from dature.types import (
- URL,
- Base64UrlBytes,
- Base64UrlStr,
- DotSeparatedPath,
- ExpandEnvVarsMode,
- FieldMapping,
- FieldValidators,
- FileOrStream,
- JSONValue,
- LoadRawResult,
- NameStyle,
- NestedConflict,
- TypeAnnotation,
-)
-from dature.validators.base import (
- create_metadata_validator_providers,
- create_root_validator_providers,
- create_validator_providers,
- extract_validators_from_type,
-)
-
-if TYPE_CHECKING:
- from dature.metadata import TypeLoader
-
-T = TypeVar("T")
-
-logger = logging.getLogger("dature")
-
-
-class BaseLoader(LoaderProtocol, abc.ABC):
- display_name: ClassVar[str]
- display_label: ClassVar[str] = "FILE"
- path_finder_class: type[PathFinder] | None = None
-
- def __init__( # noqa: PLR0913
- self,
- *,
- prefix: DotSeparatedPath | None = None,
- name_style: NameStyle | None = None,
- field_mapping: FieldMapping | None = None,
- root_validators: tuple[ValidatorProtocol, ...] | None = None,
- validators: FieldValidators | None = None,
- expand_env_vars: ExpandEnvVarsMode = "default",
- type_loaders: "tuple[TypeLoader, ...]" = (),
- ) -> None:
- self._prefix = prefix
- self._name_style = name_style
- self._field_mapping = field_mapping
- self._root_validators = root_validators or ()
- self._validators = validators or {}
- self._expand_env_vars_mode = expand_env_vars
- self._type_loaders = type_loaders
- self.retorts: dict[type, Retort] = {}
-
- def _additional_loaders(self) -> list[Provider]:
- return []
-
- def _get_adaptix_name_style(self) -> AdaptixNameStyle | None:
- if self._name_style is None:
- return None
-
- name_style_map = {
- "lower_snake": AdaptixNameStyle.LOWER_SNAKE,
- "upper_snake": AdaptixNameStyle.UPPER_SNAKE,
- "lower_camel": AdaptixNameStyle.CAMEL,
- "upper_camel": AdaptixNameStyle.PASCAL,
- "lower_kebab": AdaptixNameStyle.LOWER_KEBAB,
- "upper_kebab": AdaptixNameStyle.UPPER_KEBAB,
- }
- return name_style_map.get(self._name_style)
-
- def _get_name_mapping_provider(self) -> list[Provider]:
- providers: list[Provider] = []
-
- adaptix_name_style = self._get_adaptix_name_style()
- if adaptix_name_style is not None:
- providers.append(name_mapping(name_style=adaptix_name_style))
-
- if self._field_mapping:
- owner_fields: dict[type[DataclassInstance] | str, dict[str, str]] = {}
- for field_path_key in self._field_mapping:
- if not isinstance(field_path_key, FieldPath):
- continue
- owner: type[DataclassInstance] | str = field_path_key.owner
- if len(field_path_key.parts) > 1 and not isinstance(field_path_key.owner, str):
- owner = resolve_nested_owner(field_path_key.owner, field_path_key.parts[:-1])
- field_name = field_path_key.parts[-1]
- if owner not in owner_fields:
- owner_fields[owner] = {}
- owner_fields[owner][field_name] = field_name
-
- for owner, identity_map in owner_fields.items():
- if isinstance(owner, str):
- providers.append(name_mapping(map=identity_map))
- else:
- providers.append(name_mapping(owner, map=identity_map))
-
- providers.append(AliasProvider(self._field_mapping))
-
- return providers
-
- def _get_validator_providers(self, dataclass_: type[T]) -> list[Provider]:
- providers: list[Provider] = []
- type_hints = get_type_hints(dataclass_, include_extras=True)
-
- for field in fields(cast("type[DataclassInstance]", dataclass_)):
- if field.name not in type_hints:
- continue
-
- field_type = type_hints[field.name]
- validators = extract_validators_from_type(field_type)
-
- if validators:
- field_providers = create_validator_providers(dataclass_, field.name, validators)
- providers.extend(field_providers)
-
- for nested_dc in self._find_nested_dataclasses(field_type):
- nested_providers = self._get_validator_providers(nested_dc)
- providers.extend(nested_providers)
-
- return providers
-
- @staticmethod
- def _find_nested_dataclasses(
- field_type: TypeAnnotation,
- ) -> list[type[DataclassInstance]]:
- result: list[type[DataclassInstance]] = []
- queue: list[TypeAnnotation] = [field_type]
-
- while queue:
- current = queue.pop()
-
- if is_dataclass(current):
- result.append(current)
- continue
-
- origin = get_origin(current)
- if origin is Annotated:
- queue.append(get_args(current)[0])
- elif origin is not None:
- queue.extend(get_args(current))
-
- return result
-
- @staticmethod
- def _infer_type(value: str) -> JSONValue:
- if value == "":
- return value
-
- try:
- return cast("JSONValue", json.loads(value))
- except (json.JSONDecodeError, ValueError):
- return value
-
- @classmethod
- def _parse_string_values(cls, data: JSONValue, *, infer_scalars: bool = False) -> JSONValue:
- if not isinstance(data, dict):
- return data
-
- result: dict[str, JSONValue] = {}
- for key, value in data.items():
- if isinstance(value, dict):
- result[key] = cls._parse_string_values(value, infer_scalars=True)
- elif isinstance(value, str) and (infer_scalars or value.startswith(("[", "{"))):
- result[key] = cls._infer_type(value)
- else:
- result[key] = value
- return result
-
- def _base_recipe(self) -> list[Provider]:
- user_loaders: list[Provider] = [loader(tl.type_, tl.func) for tl in self._type_loaders]
- default_loaders: list[Provider] = [
- loader(int, int_from_string),
- loader(float, float_passthrough),
- loader(bytes, bytes_from_string),
- loader(complex, complex_from_string),
- loader(timedelta, timedelta_from_string),
- loader(URL, url_from_string),
- loader(Base64UrlBytes, base64url_bytes_from_string),
- loader(Base64UrlStr, base64url_str_from_string),
- loader(SecretStr, secret_str_from_string),
- loader(PaymentCardNumber, payment_card_number_from_string),
- loader(ByteSize, byte_size_from_string),
- ]
- return [
- *user_loaders,
- *self._additional_loaders(),
- *default_loaders,
- *self._get_name_mapping_provider(),
- ]
-
- def create_retort(self) -> Retort:
- return Retort(
- strict_coercion=True,
- recipe=self._base_recipe(),
- )
-
- def create_probe_retort(self) -> Retort:
- return Retort(
- strict_coercion=True,
- recipe=[SkipFieldProvider(), ModelToDictProvider(), *self._base_recipe()],
- )
-
- def create_validating_retort(self, dataclass_: type[T]) -> Retort:
- root_validator_providers = create_root_validator_providers(
- dataclass_,
- self._root_validators,
- )
- metadata_validator_providers = create_metadata_validator_providers(
- self._validators,
- )
- return Retort(
- strict_coercion=True,
- recipe=[
- *self._get_validator_providers(dataclass_),
- *metadata_validator_providers,
- *root_validator_providers,
- *self._base_recipe(),
- ],
- )
-
- @abc.abstractmethod
- def _load(self, path: FileOrStream) -> JSONValue: ...
-
- def _apply_prefix(self, data: JSONValue) -> JSONValue:
- if not self._prefix:
- return data
-
- for key in self._prefix.split("."):
- if not isinstance(data, dict):
- return {}
- if key not in data:
- return {}
- data = data[key]
-
- return data
-
- def _pre_processing(self, data: JSONValue) -> JSONValue:
- prefixed = self._apply_prefix(data)
- return expand_env_vars(prefixed, mode=self._expand_env_vars_mode)
-
- def transform_to_dataclass(self, data: JSONValue, dataclass_: type[T]) -> T:
- if dataclass_ not in self.retorts:
- self.retorts[dataclass_] = self.create_retort()
- return self.retorts[dataclass_].load(data, dataclass_)
-
- def load_raw(self, path: FileOrStream) -> LoadRawResult:
- data = self._load(path)
- processed = self._pre_processing(data)
- logger.debug(
- "[%s] load_raw: path=%s, raw_keys=%s, after_preprocessing_keys=%s",
- type(self).__name__,
- path,
- sorted(data.keys()) if isinstance(data, dict) else "",
- sorted(processed.keys()) if isinstance(processed, dict) else "",
- )
- return LoadRawResult(data=processed)
-
- @classmethod
- def resolve_location(
- cls,
- field_path: list[str],
- file_path: Path | None,
- file_content: str | None,
- prefix: str | None,
- split_symbols: str, # noqa: ARG003
- nested_conflict: NestedConflict | None, # noqa: ARG003
- ) -> list[SourceLocation]:
- if file_content is None or not field_path:
- return [_empty_file_location(cls.display_label, file_path)]
-
- if cls.path_finder_class is None:
- return [_empty_file_location(cls.display_label, file_path)]
-
- search_path = _build_search_path(field_path, prefix)
- finder = cls.path_finder_class(file_content)
- line_range = finder.find_line_range(search_path)
- if line_range is None:
- line_range = _find_parent_line_range(finder, search_path)
- if line_range is None:
- return [_empty_file_location(cls.display_label, file_path)]
-
- lines = file_content.splitlines()
- content_lines: list[str] | None = None
- if 0 < line_range.start <= len(lines):
- end = min(line_range.end, len(lines))
- raw = lines[line_range.start - 1 : end]
- content_lines = _strip_common_indent(raw)
-
- return [
- SourceLocation(
- display_label=cls.display_label,
- file_path=file_path,
- line_range=line_range,
- line_content=content_lines,
- env_var_name=None,
- ),
- ]
-
-
-def _find_parent_line_range(finder: PathFinder, search_path: list[str]) -> LineRange | None:
- path = search_path[:-1]
- while path:
- line_range = finder.find_line_range(path)
- if line_range is not None:
- return line_range
- path = path[:-1]
- return None
-
-
-def _build_search_path(field_path: list[str], prefix: str | None) -> list[str]:
- if not prefix:
- return field_path
- prefix_parts = prefix.split(".")
- return prefix_parts + field_path
-
-
-def _strip_common_indent(raw_lines: list[str]) -> list[str]:
- indents = [len(line) - len(line.lstrip()) for line in raw_lines if line.strip()]
- if not indents:
- return raw_lines
- min_indent = min(indents)
- return [line[min_indent:] for line in raw_lines]
-
-
-def _empty_file_location(display_label: str, file_path: Path | None) -> SourceLocation:
- return SourceLocation(
- display_label=display_label,
- file_path=file_path,
- line_range=None,
- line_content=None,
- env_var_name=None,
- )
diff --git a/src/dature/sources_loader/docker_secrets.py b/src/dature/sources_loader/docker_secrets.py
deleted file mode 100644
index 5a78452..0000000
--- a/src/dature/sources_loader/docker_secrets.py
+++ /dev/null
@@ -1,69 +0,0 @@
-from pathlib import Path
-from typing import ClassVar
-
-from dature.errors.exceptions import SourceLocation
-from dature.sources_loader.flat_key import FlatKeyLoader
-from dature.types import (
- FileOrStream,
- JSONValue,
- NestedConflict,
-)
-
-
-class DockerSecretsLoader(FlatKeyLoader):
- display_name = "docker_secrets"
- display_label: ClassVar[str] = "SECRET FILE"
-
- @classmethod
- def resolve_location(
- cls,
- field_path: list[str],
- file_path: Path | None,
- file_content: str | None, # noqa: ARG003
- prefix: str | None,
- split_symbols: str,
- nested_conflict: NestedConflict | None,
- ) -> list[SourceLocation]:
- if nested_conflict is not None:
- json_var = cls._resolve_var_name(field_path[:1], prefix, split_symbols, None)
- if nested_conflict.used_var == json_var:
- secret_name = field_path[0]
- else:
- secret_name = split_symbols.join(field_path)
- else:
- secret_name = split_symbols.join(field_path)
- if prefix is not None:
- secret_name = prefix + secret_name
- secret_file = file_path / secret_name if file_path is not None else None
- return [
- SourceLocation(
- display_label=cls.display_label,
- file_path=secret_file,
- line_range=None,
- line_content=None,
- env_var_name=None,
- ),
- ]
-
- def _load(self, path: FileOrStream) -> JSONValue:
- if not isinstance(path, Path):
- msg = "DockerSecretsLoader does not support file-like objects"
- raise TypeError(msg)
-
- result: dict[str, JSONValue] = {}
- for entry in sorted(path.iterdir()):
- if not entry.is_file():
- continue
-
- key = entry.name.lower()
- value = entry.read_text().strip()
-
- if self._prefix and not key.startswith(self._prefix.lower()):
- continue
-
- if self._prefix:
- key = key[len(self._prefix) :]
-
- result[key] = value
-
- return result
diff --git a/src/dature/sources_loader/flat_key.py b/src/dature/sources_loader/flat_key.py
deleted file mode 100644
index 2b45397..0000000
--- a/src/dature/sources_loader/flat_key.py
+++ /dev/null
@@ -1,191 +0,0 @@
-import abc
-from datetime import date, datetime, time
-from typing import TYPE_CHECKING, cast
-
-from adaptix import loader
-from adaptix.provider import Provider
-
-from dature.expansion.env_expand import expand_env_vars
-from dature.protocols import ValidatorProtocol
-from dature.sources_loader.base import BaseLoader
-from dature.sources_loader.loaders import (
- bool_loader,
- bytearray_from_json_string,
- date_from_string,
- datetime_from_string,
- float_from_string,
- none_from_empty_string,
- optional_from_empty_string,
- str_from_scalar,
- time_from_string,
-)
-from dature.types import (
- DotSeparatedPath,
- ExpandEnvVarsMode,
- FieldMapping,
- FieldValidators,
- FileOrStream,
- JSONValue,
- LoadRawResult,
- NameStyle,
- NestedConflict,
- NestedConflicts,
- NestedResolve,
- NestedResolveStrategy,
-)
-
-if TYPE_CHECKING:
- from dature.field_path import FieldPath
- from dature.metadata import TypeLoader
-
-
-def set_nested(d: dict[str, JSONValue], keys: list[str], value: str) -> None:
- for key in keys[:-1]:
- d = cast("dict[str, JSONValue]", d.setdefault(key, {}))
- d[keys[-1]] = value
-
-
-class FlatKeyLoader(BaseLoader, abc.ABC):
- def __init__( # noqa: PLR0913
- self,
- *,
- prefix: DotSeparatedPath | None = None,
- split_symbols: str = "__",
- name_style: NameStyle | None = None,
- field_mapping: FieldMapping | None = None,
- root_validators: tuple[ValidatorProtocol, ...] | None = None,
- validators: FieldValidators | None = None,
- expand_env_vars: ExpandEnvVarsMode = "default",
- type_loaders: "tuple[TypeLoader, ...]" = (),
- nested_resolve_strategy: NestedResolveStrategy = "flat",
- nested_resolve: NestedResolve | None = None,
- ) -> None:
- self._split_symbols = split_symbols
- self._nested_resolve_strategy = nested_resolve_strategy
- self._nested_resolve = nested_resolve
- super().__init__(
- prefix=prefix,
- name_style=name_style,
- field_mapping=field_mapping,
- root_validators=root_validators,
- validators=validators,
- expand_env_vars=expand_env_vars,
- type_loaders=type_loaders,
- )
-
- def _resolve_field_strategy(self, field_name: str) -> NestedResolveStrategy:
- if self._nested_resolve is not None:
- for strategy, field_paths in self._nested_resolve.items():
- for fp in field_paths:
- if self._field_path_matches(fp, field_name):
- return strategy
- return self._nested_resolve_strategy
-
- @staticmethod
- def _field_path_matches(fp: "FieldPath", field_name: str) -> bool:
- if not fp.parts:
- return True
- return fp.parts[0] == field_name
-
- def _additional_loaders(self) -> list[Provider]:
- return [
- loader(str, str_from_scalar),
- loader(float, float_from_string),
- loader(date, date_from_string),
- loader(datetime, datetime_from_string),
- loader(time, time_from_string),
- loader(bytearray, bytearray_from_json_string),
- loader(type(None), none_from_empty_string),
- loader(str | None, optional_from_empty_string),
- loader(bool, bool_loader),
- ]
-
- @staticmethod
- def _resolve_var_name(
- field_path: list[str],
- prefix: str | None,
- split_symbols: str,
- conflict: NestedConflict | None,
- ) -> str:
- def _build_name(parts: list[str]) -> str:
- var = split_symbols.join(p.upper() for p in parts)
- if prefix is not None:
- return prefix + var
- return var
-
- json_var = _build_name(field_path[:1])
- if conflict is not None and conflict.used_var == json_var:
- return json_var
- return _build_name(field_path)
-
- def _build_var_name(self, key: str) -> str:
- if self._prefix:
- return self._prefix + key.upper()
- return key.upper()
-
- def _build_nested_var_name(self, top_field: str, nested: dict[str, JSONValue]) -> str:
- for sub_key in nested:
- full_key = f"{top_field}{self._split_symbols}{sub_key}"
- return self._build_var_name(full_key)
- return self._build_var_name(top_field)
-
- def _pre_process_row(
- self,
- key: str,
- value: str,
- result: dict[str, JSONValue],
- conflicts: NestedConflicts,
- ) -> None:
- parts = key.split(self._split_symbols)
- self._process_key_value(parts=parts, value=value, result=result, conflicts=conflicts)
-
- def load_raw(self, path: FileOrStream) -> LoadRawResult:
- data = self._load(path)
- data_dict = cast("dict[str, str]", data)
- result: dict[str, JSONValue] = {}
- conflicts: NestedConflicts = {}
-
- for key, value in data_dict.items():
- self._pre_process_row(key=key, value=value, result=result, conflicts=conflicts)
-
- expanded = expand_env_vars(result, mode=self._expand_env_vars_mode)
- processed = self._parse_string_values(expanded)
- return LoadRawResult(data=processed, nested_conflicts=conflicts)
-
- def _process_key_value(
- self,
- *,
- parts: list[str],
- value: str,
- result: dict[str, JSONValue],
- conflicts: NestedConflicts,
- ) -> None:
- if len(parts) > 1:
- top_field = parts[0]
- strategy = self._resolve_field_strategy(top_field)
- existing = result.get(top_field)
- if isinstance(existing, str):
- flat_var = self._build_var_name(self._split_symbols.join(parts))
- json_var = self._build_var_name(top_field)
- if strategy == "flat":
- result.pop(top_field)
- set_nested(result, parts, value)
- conflicts[top_field] = NestedConflict(flat_var, json_var, existing)
- elif strategy == "json":
- conflicts[top_field] = NestedConflict(json_var, flat_var, existing)
- else:
- set_nested(result, parts, value)
- else:
- top_field = parts[0]
- strategy = self._resolve_field_strategy(top_field)
- existing = result.get(top_field)
- if isinstance(existing, dict):
- json_var = self._build_var_name(top_field)
- flat_var = self._build_nested_var_name(top_field, existing)
- if strategy == "json":
- result[top_field] = value
- conflicts[top_field] = NestedConflict(json_var, flat_var, value)
- elif strategy == "flat":
- conflicts[top_field] = NestedConflict(flat_var, json_var, value)
- else:
- result[top_field] = value
diff --git a/src/dature/sources_loader/ini_.py b/src/dature/sources_loader/ini_.py
deleted file mode 100644
index 24fa85c..0000000
--- a/src/dature/sources_loader/ini_.py
+++ /dev/null
@@ -1,77 +0,0 @@
-import configparser
-import io
-from datetime import date, datetime, time
-from typing import cast
-
-from adaptix import loader
-from adaptix.provider import Provider
-
-from dature.expansion.env_expand import expand_env_vars
-from dature.path_finders.ini_ import TablePathFinder
-from dature.sources_loader.base import BaseLoader
-from dature.sources_loader.loaders import (
- bool_loader,
- bytearray_from_json_string,
- date_from_string,
- datetime_from_string,
- float_from_string,
- none_from_empty_string,
- optional_from_empty_string,
- str_from_scalar,
- time_from_string,
-)
-from dature.types import BINARY_IO_TYPES, TEXT_IO_TYPES, FileOrStream, JSONValue
-
-
-class IniLoader(BaseLoader):
- display_name = "ini"
- path_finder_class = TablePathFinder
-
- def _additional_loaders(self) -> list[Provider]:
- return [
- loader(str, str_from_scalar),
- loader(float, float_from_string),
- loader(date, date_from_string),
- loader(datetime, datetime_from_string),
- loader(time, time_from_string),
- loader(bytearray, bytearray_from_json_string),
- loader(type(None), none_from_empty_string),
- loader(str | None, optional_from_empty_string),
- loader(bool, bool_loader),
- ]
-
- def _pre_processing(self, data: JSONValue) -> JSONValue:
- prefixed = self._apply_prefix(data)
- expanded = expand_env_vars(prefixed, mode=self._expand_env_vars_mode)
- return self._parse_string_values(expanded)
-
- def _load(self, path: FileOrStream) -> JSONValue:
- config = configparser.ConfigParser(interpolation=None)
- if isinstance(path, TEXT_IO_TYPES):
- config.read_file(path)
- elif isinstance(path, BINARY_IO_TYPES):
- config.read_file(io.TextIOWrapper(cast("io.BufferedReader", path)))
- else:
- with path.open() as f:
- config.read_file(f)
- if self._prefix and self._prefix in config:
- result: dict[str, JSONValue] = dict(config[self._prefix])
- child_prefix = self._prefix + "."
- for section in config.sections():
- if section.startswith(child_prefix):
- nested_key = section[len(child_prefix) :]
- result[nested_key] = dict(config[section])
- return {self._prefix: result}
-
- all_sections: dict[str, JSONValue] = {}
- if config.defaults():
- all_sections["DEFAULT"] = dict(config.defaults())
- for section in config.sections():
- parts = section.split(".")
- target = all_sections
- for part in parts[:-1]:
- if part not in target:
- target[part] = {}
- target = cast("dict[str, JSONValue]", target[part])
- target[parts[-1]] = dict(config[section])
- return all_sections
diff --git a/src/dature/sources_loader/toml_.py b/src/dature/sources_loader/toml_.py
deleted file mode 100644
index bfa03a0..0000000
--- a/src/dature/sources_loader/toml_.py
+++ /dev/null
@@ -1,61 +0,0 @@
-import abc
-from datetime import date, datetime, time
-from typing import Any, cast
-
-import toml_rs
-from adaptix import loader
-from adaptix.provider import Provider
-from toml_rs._lib import TomlVersion
-
-from dature.path_finders.toml_ import Toml10PathFinder, Toml11PathFinder
-from dature.sources_loader.base import BaseLoader
-from dature.sources_loader.loaders import (
- bytearray_from_string,
- date_passthrough,
- datetime_passthrough,
- none_from_empty_string,
- optional_from_empty_string,
-)
-from dature.sources_loader.loaders.toml_ import time_passthrough
-from dature.types import FILE_LIKE_TYPES, FileOrStream, JSONValue
-
-
-class BaseTomlLoader(BaseLoader, abc.ABC):
- @abc.abstractmethod
- def _toml_version(self) -> TomlVersion: ...
-
- def _load(self, path: FileOrStream) -> JSONValue:
- if isinstance(path, FILE_LIKE_TYPES):
- content = path.read()
- if isinstance(content, bytes):
- content = content.decode()
- return cast("JSONValue", toml_rs.loads(content, toml_version=self._toml_version()))
- with path.open() as file_:
- return cast("JSONValue", toml_rs.loads(file_.read(), toml_version=self._toml_version()))
-
- def _additional_loaders(self) -> list[Provider]:
- return [
- loader(date, date_passthrough),
- loader(datetime, datetime_passthrough),
- loader(time, time_passthrough),
- loader(bytearray, bytearray_from_string),
- loader(type(None), none_from_empty_string),
- loader(str | None, optional_from_empty_string),
- loader(Any, optional_from_empty_string),
- ]
-
-
-class Toml10Loader(BaseTomlLoader):
- display_name = "toml1.0"
- path_finder_class = Toml10PathFinder
-
- def _toml_version(self) -> TomlVersion:
- return "1.0.0"
-
-
-class Toml11Loader(BaseTomlLoader):
- display_name = "toml1.1"
- path_finder_class = Toml11PathFinder
-
- def _toml_version(self) -> TomlVersion:
- return "1.1.0"
diff --git a/src/dature/sources_loader/yaml_.py b/src/dature/sources_loader/yaml_.py
deleted file mode 100644
index 95b1e5a..0000000
--- a/src/dature/sources_loader/yaml_.py
+++ /dev/null
@@ -1,64 +0,0 @@
-import abc
-from datetime import date, datetime, time
-from typing import cast
-
-from adaptix import loader
-from adaptix.provider import Provider
-from ruamel.yaml import YAML
-from ruamel.yaml.docinfo import Version
-
-from dature.path_finders.yaml_ import Yaml11PathFinder, Yaml12PathFinder
-from dature.sources_loader.base import BaseLoader
-from dature.sources_loader.loaders import (
- bytearray_from_string,
- date_passthrough,
- datetime_passthrough,
- time_from_string,
-)
-from dature.sources_loader.loaders.yaml_ import time_from_int
-from dature.types import FILE_LIKE_TYPES, FileOrStream, JSONValue
-
-
-class BaseYamlLoader(BaseLoader, abc.ABC):
- @abc.abstractmethod
- def _yaml_version(self) -> Version: ...
-
- def _load(self, path: FileOrStream) -> JSONValue:
- yaml = YAML(typ="safe")
- yaml.version = self._yaml_version()
- if isinstance(path, FILE_LIKE_TYPES):
- return cast("JSONValue", yaml.load(path))
- with path.open() as file_:
- return cast("JSONValue", yaml.load(file_))
-
-
-class Yaml11Loader(BaseYamlLoader):
- display_name = "yaml1.1"
- path_finder_class = Yaml11PathFinder
-
- def _yaml_version(self) -> Version:
- return Version(1, 1)
-
- def _additional_loaders(self) -> list[Provider]:
- return [
- loader(date, date_passthrough),
- loader(datetime, datetime_passthrough),
- loader(time, time_from_int),
- loader(bytearray, bytearray_from_string),
- ]
-
-
-class Yaml12Loader(BaseYamlLoader):
- display_name = "yaml1.2"
- path_finder_class = Yaml12PathFinder
-
- def _yaml_version(self) -> Version:
- return Version(1, 2)
-
- def _additional_loaders(self) -> list[Provider]:
- return [
- loader(date, date_passthrough),
- loader(datetime, datetime_passthrough),
- loader(time, time_from_string),
- loader(bytearray, bytearray_from_string),
- ]
diff --git a/src/dature/type_utils.py b/src/dature/type_utils.py
new file mode 100644
index 0000000..1c48fdd
--- /dev/null
+++ b/src/dature/type_utils.py
@@ -0,0 +1,24 @@
+from dataclasses import is_dataclass
+from typing import Annotated, get_args, get_origin
+
+from dature.types import TypeAnnotation
+
+
+def find_nested_dataclasses(field_type: TypeAnnotation) -> list[type]:
+ result: list[type] = []
+ queue: list[TypeAnnotation] = [field_type]
+
+ while queue:
+ current = queue.pop()
+
+ if is_dataclass(current) and isinstance(current, type):
+ result.append(current)
+ continue
+
+ origin = get_origin(current)
+ if origin is Annotated:
+ queue.append(get_args(current)[0])
+ elif origin is not None:
+ queue.extend(get_args(current))
+
+ return result
diff --git a/src/dature/types.py b/src/dature/types.py
index 8e821da..ac41bb8 100644
--- a/src/dature/types.py
+++ b/src/dature/types.py
@@ -52,10 +52,14 @@ def __hash__(self) -> int:
"upper_kebab",
]
-# Keys are FieldPath at runtime, but F[Type].field returns the field's static type (str, int, etc.)
-# due to the overload trick for IDE autocompletion, so we accept those types here too.
-type _FieldMappingKey = "FieldPath | str | int | float | bool | None"
-type FieldMapping = dict[_FieldMappingKey, str | tuple[str, ...]]
+# F[Type].field is FieldPath at runtime, but the overload trick makes mypy see the
+# field's static type (str, int, list[str], dict, etc.) for IDE autocompletion.
+# This union covers all types mypy can infer from F expressions.
+type FieldRef = (
+ "FieldPath | str | int | float | bool | list[Any] | dict[str, Any] | tuple[Any, ...] | set[Any] | bytes | None"
+)
+
+type FieldMapping = dict[FieldRef, str | tuple[str, ...]]
type URL = ParseResult
@@ -70,11 +74,16 @@ def __hash__(self) -> int:
type _NestedResolveValue = "tuple[FieldPath | Any, ...]"
type NestedResolve = dict[NestedResolveStrategy, _NestedResolveValue]
-type _ValidatorKey = "FieldPath | str | int | float | bool | None"
-type FieldValidators = dict[_ValidatorKey, "ValidatorProtocol | tuple[ValidatorProtocol, ...]"]
+type FieldValidators = dict[FieldRef, "ValidatorProtocol | tuple[ValidatorProtocol, ...]"]
type FieldMergeCallable = Callable[[list[JSONValue]], JSONValue]
+type MergeStrategyName = Literal["last_wins", "first_wins", "first_found", "raise_on_conflict"]
+type FieldMergeStrategyName = Literal["first_wins", "last_wins", "append", "append_unique", "prepend", "prepend_unique"]
+type TypeLoaderMap = dict[type, Callable[..., Any]]
+type FieldMergeMap = dict[FieldRef, "FieldMergeStrategyName | Callable[..., Any]"]
+type FieldGroupTuple = tuple[FieldRef, ...]
+
type FileLike = TextIOBase | BufferedIOBase | RawIOBase
FILE_LIKE_TYPES: Final = (TextIOBase, BufferedIOBase, RawIOBase)
TEXT_IO_TYPES: Final = TextIOBase
diff --git a/src/dature/validators/base.py b/src/dature/validators/base.py
index 9678200..7900202 100644
--- a/src/dature/validators/base.py
+++ b/src/dature/validators/base.py
@@ -23,7 +23,7 @@ def extract_validators_from_type(field_type: Any) -> list[ValidatorProtocol]: #
def create_validator_providers(
- dataclass_: type,
+ schema: type,
field_name: str,
validators: list[ValidatorProtocol],
) -> list[Provider]:
@@ -33,7 +33,7 @@ def create_validator_providers(
func = v.get_validator_func()
error = v.get_error_message()
provider = validator(
- P[dataclass_][field_name],
+ P[schema][field_name],
func,
error,
)
@@ -91,14 +91,14 @@ def create_metadata_validator_providers(
def create_root_validator_providers(
- dataclass_: type,
+ schema: type,
root_validators: tuple[ValidatorProtocol, ...],
) -> list[Provider]:
providers = []
for root_validator in root_validators:
provider = validator(
- P[dataclass_],
+ P[schema],
root_validator.get_validator_func(),
root_validator.get_error_message(),
)
diff --git a/src/dature/validators/number.py b/src/dature/validators/number.py
index 5198b47..d1ea239 100644
--- a/src/dature/validators/number.py
+++ b/src/dature/validators/number.py
@@ -1,11 +1,11 @@
from collections.abc import Callable
-from dataclasses import dataclass
+from dataclasses import dataclass, field
-@dataclass(frozen=True, slots=True, kw_only=True)
+@dataclass(frozen=True, slots=True)
class Gt:
value: int | float
- error_message: str = "Value must be greater than {value}"
+ error_message: str = field(default="Value must be greater than {value}", kw_only=True)
def get_validator_func(self) -> Callable[[int | float], bool]:
def validate(val: float) -> bool:
@@ -17,10 +17,10 @@ def get_error_message(self) -> str:
return self.error_message.format(value=self.value)
-@dataclass(frozen=True, slots=True, kw_only=True)
+@dataclass(frozen=True, slots=True)
class Ge:
value: int | float
- error_message: str = "Value must be greater than or equal to {value}"
+ error_message: str = field(default="Value must be greater than or equal to {value}", kw_only=True)
def get_validator_func(self) -> Callable[[int | float], bool]:
def validate(val: float) -> bool:
@@ -32,10 +32,10 @@ def get_error_message(self) -> str:
return self.error_message.format(value=self.value)
-@dataclass(frozen=True, slots=True, kw_only=True)
+@dataclass(frozen=True, slots=True)
class Lt:
value: int | float
- error_message: str = "Value must be less than {value}"
+ error_message: str = field(default="Value must be less than {value}", kw_only=True)
def get_validator_func(self) -> Callable[[int | float], bool]:
def validate(val: float) -> bool:
@@ -47,10 +47,10 @@ def get_error_message(self) -> str:
return self.error_message.format(value=self.value)
-@dataclass(frozen=True, slots=True, kw_only=True)
+@dataclass(frozen=True, slots=True)
class Le:
value: int | float
- error_message: str = "Value must be less than or equal to {value}"
+ error_message: str = field(default="Value must be less than or equal to {value}", kw_only=True)
def get_validator_func(self) -> Callable[[int | float], bool]:
def validate(val: float) -> bool:
diff --git a/src/dature/validators/root.py b/src/dature/validators/root.py
index 6c3aac6..9b5360b 100644
--- a/src/dature/validators/root.py
+++ b/src/dature/validators/root.py
@@ -1,12 +1,12 @@
from collections.abc import Callable
-from dataclasses import dataclass
+from dataclasses import dataclass, field
# --8<-- [start:root-validator]
-@dataclass(frozen=True, slots=True, kw_only=True)
+@dataclass(frozen=True, slots=True)
class RootValidator:
func: Callable[..., bool]
- error_message: str = "Root validation failed"
+ error_message: str = field(default="Root validation failed", kw_only=True)
# --8<-- [end:root-validator]
def get_validator_func(self) -> Callable[..., bool]:
diff --git a/src/dature/validators/sequence.py b/src/dature/validators/sequence.py
index 99a0288..ac67f72 100644
--- a/src/dature/validators/sequence.py
+++ b/src/dature/validators/sequence.py
@@ -1,12 +1,12 @@
from collections.abc import Callable, Sequence
-from dataclasses import dataclass
+from dataclasses import dataclass, field
from typing import Any
-@dataclass(frozen=True, slots=True, kw_only=True)
+@dataclass(frozen=True, slots=True)
class MinItems:
value: int
- error_message: str = "Value must have at least {value} items"
+ error_message: str = field(default="Value must have at least {value} items", kw_only=True)
def get_validator_func(self) -> Callable[[Sequence[Any]], bool]:
def validate(val: Sequence[Any]) -> bool:
@@ -18,10 +18,10 @@ def get_error_message(self) -> str:
return self.error_message.format(value=self.value)
-@dataclass(frozen=True, slots=True, kw_only=True)
+@dataclass(frozen=True, slots=True)
class MaxItems:
value: int
- error_message: str = "Value must have at most {value} items"
+ error_message: str = field(default="Value must have at most {value} items", kw_only=True)
def get_validator_func(self) -> Callable[[Sequence[Any]], bool]:
def validate(val: Sequence[Any]) -> bool:
@@ -33,9 +33,9 @@ def get_error_message(self) -> str:
return self.error_message.format(value=self.value)
-@dataclass(frozen=True, slots=True, kw_only=True)
+@dataclass(frozen=True, slots=True)
class UniqueItems:
- error_message: str = "Value must contain unique items"
+ error_message: str = field(default="Value must contain unique items", kw_only=True)
def get_validator_func(self) -> Callable[[Sequence[Any]], bool]:
def validate(val: Sequence[Any]) -> bool:
diff --git a/src/dature/validators/string.py b/src/dature/validators/string.py
index 6562aa9..482dbf7 100644
--- a/src/dature/validators/string.py
+++ b/src/dature/validators/string.py
@@ -1,12 +1,12 @@
import re
from collections.abc import Callable
-from dataclasses import dataclass
+from dataclasses import dataclass, field
-@dataclass(frozen=True, slots=True, kw_only=True)
+@dataclass(frozen=True, slots=True)
class MinLength:
value: int
- error_message: str = "Value must have at least {value} characters"
+ error_message: str = field(default="Value must have at least {value} characters", kw_only=True)
def get_validator_func(self) -> Callable[[str], bool]:
def validate(val: str) -> bool:
@@ -18,10 +18,10 @@ def get_error_message(self) -> str:
return self.error_message.format(value=self.value)
-@dataclass(frozen=True, slots=True, kw_only=True)
+@dataclass(frozen=True, slots=True)
class MaxLength:
value: int
- error_message: str = "Value must have at most {value} characters"
+ error_message: str = field(default="Value must have at most {value} characters", kw_only=True)
def get_validator_func(self) -> Callable[[str], bool]:
def validate(val: str) -> bool:
@@ -33,10 +33,10 @@ def get_error_message(self) -> str:
return self.error_message.format(value=self.value)
-@dataclass(frozen=True, slots=True, kw_only=True)
+@dataclass(frozen=True, slots=True)
class RegexPattern:
pattern: str
- error_message: str = "Value must match pattern '{pattern}'"
+ error_message: str = field(default="Value must match pattern '{pattern}'", kw_only=True)
def get_validator_func(self) -> Callable[[str], bool]:
def validate(val: str) -> bool:
diff --git a/tests/errors/test_exceptions.py b/tests/errors/test_exceptions.py
index 9ba64d7..e0cd3f0 100644
--- a/tests/errors/test_exceptions.py
+++ b/tests/errors/test_exceptions.py
@@ -3,8 +3,8 @@
import pytest
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError, FieldLoadError, LineRange, SourceLocation
+from dature import EnvSource, JsonSource, Toml11Source, Yaml12Source, load
+from dature.errors import DatureConfigError, FieldLoadError, LineRange, SourceLocation
class TestDatureConfigErrorFormat:
@@ -16,7 +16,7 @@ def test_single_error_message(self):
input_value="30",
locations=[
SourceLocation(
- display_label="FILE",
+ location_label="FILE",
file_path=Path("config.toml"),
line_range=LineRange(start=2, end=2),
line_content=['timeout = "30"'],
@@ -42,7 +42,7 @@ def test_multiple_errors_message(self):
input_value="abc",
locations=[
SourceLocation(
- display_label="FILE",
+ location_label="FILE",
file_path=Path("config.json"),
line_range=LineRange(start=2, end=2),
line_content=['"timeout": "abc"'],
@@ -56,7 +56,7 @@ def test_multiple_errors_message(self):
input_value=None,
locations=[
SourceLocation(
- display_label="FILE",
+ location_label="FILE",
file_path=Path("config.json"),
line_range=None,
line_content=None,
@@ -83,7 +83,7 @@ def test_env_error_message(self):
input_value="abc",
locations=[
SourceLocation(
- display_label="ENV",
+ location_label="ENV",
file_path=None,
line_range=None,
line_content=None,
@@ -109,7 +109,7 @@ def test_caret_points_to_value_not_key_when_same_string(self) -> None:
input_value="name",
locations=[
SourceLocation(
- display_label="FILE",
+ location_label="FILE",
file_path=Path("config.toml"),
line_range=LineRange(start=1, end=1),
line_content=['name = "name"'],
@@ -134,7 +134,7 @@ def test_caret_points_to_value_in_json_duplicate_string(self) -> None:
input_value="host",
locations=[
SourceLocation(
- display_label="FILE",
+ location_label="FILE",
file_path=Path("config.json"),
line_range=LineRange(start=2, end=2),
line_content=['"host": "host"'],
@@ -157,7 +157,7 @@ def test_json_type_error_decorator(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"timeout": "abc", "name": "test"}')
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
@load(metadata)
@dataclass
@@ -192,10 +192,10 @@ class Config:
name: str
port: int
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
err = exc_info.value
assert len(err.exceptions) == 1
@@ -214,10 +214,10 @@ class Config:
timeout: int
name: str
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
err = exc_info.value
assert len(err.exceptions) == 2
@@ -249,10 +249,10 @@ class DB:
class Config:
db: DB
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
err = exc_info.value
assert len(err.exceptions) == 1
@@ -271,7 +271,7 @@ def test_env_type_error(self, monkeypatch: pytest.MonkeyPatch):
monkeypatch.setenv("APP_TIMEOUT", "abc")
monkeypatch.setenv("APP_NAME", "test")
- metadata = Source(prefix="APP_")
+ metadata = EnvSource(prefix="APP_")
@load(metadata)
@dataclass
@@ -298,10 +298,10 @@ class Config:
name: str
timeout: int
- metadata = Source(file_=toml_file)
+ metadata = Toml11Source(file=toml_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
err = exc_info.value
assert len(err.exceptions) == 1
@@ -326,10 +326,10 @@ class Config:
name: str
timeout: int
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
err = exc_info.value
first = err.exceptions[0]
@@ -366,7 +366,7 @@ class TestLineTruncation:
),
],
)
- def test_file_source_truncation(
+ def test_filesource_truncation(
self,
line_content: str,
expected_content: str,
@@ -378,7 +378,7 @@ def test_file_source_truncation(
input_value="30",
locations=[
SourceLocation(
- display_label="FILE",
+ location_label="FILE",
file_path=Path("config.toml"),
line_range=LineRange(start=2, end=2),
line_content=[line_content],
@@ -413,7 +413,7 @@ def test_file_source_truncation(
),
],
)
- def test_envfile_source_truncation(
+ def test_envfilesource_truncation(
self,
line_content: str,
expected_content: str,
@@ -425,7 +425,7 @@ def test_envfile_source_truncation(
input_value="abc",
locations=[
SourceLocation(
- display_label="ENV FILE",
+ location_label="ENV FILE",
file_path=Path(".env"),
line_range=LineRange(start=2, end=2),
line_content=[line_content],
@@ -452,7 +452,7 @@ def test_multiline_content_each_line_truncated(self) -> None:
input_value=None,
locations=[
SourceLocation(
- display_label="FILE",
+ location_label="FILE",
file_path=Path("config.json"),
line_range=LineRange(start=2, end=4),
line_content=[line_long, line_short, line_long],
@@ -480,7 +480,7 @@ def test_four_lines_shows_two_and_ellipsis(self) -> None:
input_value=None,
locations=[
SourceLocation(
- display_label="FILE",
+ location_label="FILE",
file_path=Path("config.json"),
line_range=LineRange(start=2, end=5),
line_content=["line1", "line2", "line3", "line4"],
@@ -507,7 +507,7 @@ def test_five_lines_shows_two_and_ellipsis(self) -> None:
input_value=None,
locations=[
SourceLocation(
- display_label="FILE",
+ location_label="FILE",
file_path=Path("config.json"),
line_range=LineRange(start=2, end=6),
line_content=["line1", "line2", "line3", "line4", "line5"],
@@ -538,7 +538,7 @@ def test_value_fully_past_truncation_skips_caret(self) -> None:
input_value=0,
locations=[
SourceLocation(
- display_label="FILE",
+ location_label="FILE",
file_path=Path("config.json"),
line_range=LineRange(start=1, end=1),
line_content=[line],
@@ -564,7 +564,7 @@ def test_value_partially_truncated_shows_partial_caret(self) -> None:
input_value="abcdefghij",
locations=[
SourceLocation(
- display_label="FILE",
+ location_label="FILE",
file_path=Path("config.toml"),
line_range=LineRange(start=1, end=1),
line_content=[line],
@@ -591,7 +591,7 @@ def test_value_within_visible_area_shows_full_caret(self) -> None:
input_value="30",
locations=[
SourceLocation(
- display_label="FILE",
+ location_label="FILE",
file_path=Path("config.toml"),
line_range=LineRange(start=2, end=2),
line_content=[line],
@@ -619,10 +619,10 @@ def test_json_multiline_dict(self, tmp_path: Path):
class Config:
db: int
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
err = exc_info.value
assert str(err) == "Config loading errors (1)"
@@ -643,10 +643,10 @@ class Config:
db: int
name: str
- metadata = Source(file_=yaml_file)
+ metadata = Yaml12Source(file=yaml_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
err = exc_info.value
assert str(err) == "Config loading errors (1)"
@@ -666,10 +666,10 @@ def test_toml_multiline_array(self, tmp_path: Path):
class Config:
tags: int
- metadata = Source(file_=toml_file)
+ metadata = Toml11Source(file=toml_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
err = exc_info.value
assert str(err) == "Config loading errors (1)"
@@ -689,10 +689,10 @@ def test_json_multiline_array(self, tmp_path: Path):
class Config:
tags: int
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
err = exc_info.value
assert str(err) == "Config loading errors (1)"
@@ -714,8 +714,8 @@ class Product:
class Config:
product: list[Product]
- metadata = Source(file_=array_of_tables_toml_file)
- result = load(metadata, Config)
+ metadata = Toml11Source(file=array_of_tables_toml_file)
+ result = load(metadata, schema=Config)
assert result == Config(
product=[
@@ -735,10 +735,10 @@ class Product:
class Config:
product: list[Product]
- metadata = Source(file_=array_of_tables_error_first_toml_file)
+ metadata = Toml11Source(file=array_of_tables_error_first_toml_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
err = exc_info.value
assert len(err.exceptions) == 1
@@ -760,10 +760,10 @@ class Product:
class Config:
product: list[Product]
- metadata = Source(file_=array_of_tables_error_last_toml_file)
+ metadata = Toml11Source(file=array_of_tables_error_last_toml_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
err = exc_info.value
assert len(err.exceptions) == 1
diff --git a/tests/errors/test_fixtures.py b/tests/errors/test_fixtures.py
index 48f119b..8860b4d 100644
--- a/tests/errors/test_fixtures.py
+++ b/tests/errors/test_fixtures.py
@@ -4,9 +4,8 @@
import pytest
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError, FieldLoadError
-from dature.sources_loader.yaml_ import Yaml11Loader, Yaml12Loader
+from dature import EnvFileSource, IniSource, Json5Source, JsonSource, Toml11Source, Yaml11Source, Yaml12Source, load
+from dature.errors import DatureConfigError, FieldLoadError
from dature.validators.number import Ge, Le
from dature.validators.sequence import MinItems, UniqueItems
from dature.validators.string import MaxLength, MinLength, RegexPattern
@@ -14,8 +13,8 @@
@dataclass
class Address:
- city: Annotated[str, MinLength(value=2)]
- zip_code: Annotated[str, RegexPattern(pattern=r"^\d{5}$")]
+ city: Annotated[str, MinLength(2)]
+ zip_code: Annotated[str, RegexPattern(r"^\d{5}$")]
@dataclass
@@ -23,10 +22,10 @@ class ErrorConfig:
port: int
host: str
status: Literal["active", "inactive"]
- name: Annotated[str, MinLength(value=3), MaxLength(value=50)]
- email: Annotated[str, RegexPattern(pattern=r"^[\w.-]+@[\w.-]+\.\w+$")]
- age: Annotated[int, Ge(value=0), Le(value=150)]
- tags: Annotated[list[str], MinItems(value=1), UniqueItems()]
+ name: Annotated[str, MinLength(3), MaxLength(50)]
+ email: Annotated[str, RegexPattern(r"^[\w.-]+@[\w.-]+\.\w+$")]
+ age: Annotated[int, Ge(0), Le(150)]
+ tags: Annotated[list[str], MinItems(1), UniqueItems()]
address: Address
@@ -44,24 +43,23 @@ class LoadErrorConfig:
@dataclass
class ValidationErrorConfig:
- name: Annotated[str, MinLength(value=3), MaxLength(value=50)]
- email: Annotated[str, RegexPattern(pattern=r"^[\w.-]+@[\w.-]+\.\w+$")]
- age: Annotated[int, Ge(value=0), Le(value=150)]
- tags: Annotated[list[str], MinItems(value=1), UniqueItems()]
+ name: Annotated[str, MinLength(3), MaxLength(50)]
+ email: Annotated[str, RegexPattern(r"^[\w.-]+@[\w.-]+\.\w+$")]
+ age: Annotated[int, Ge(0), Le(150)]
+ tags: Annotated[list[str], MinItems(1), UniqueItems()]
address: Address
FIXTURES_DIR = Path(__file__).parent.parent / "fixtures"
ALL_SOURCES = [
- ("errors.json", {}),
- ("errors.json5", {}),
- ("errors.yaml", {}),
- ("errors.yaml", {"loader": Yaml11Loader}),
- ("errors.yaml", {"loader": Yaml12Loader}),
- ("errors.toml", {}),
- ("errors.ini", {"prefix": "config"}),
- ("errors.env", {}),
+ ("errors.json", JsonSource, {}),
+ ("errors.json5", Json5Source, {}),
+ ("errors.yaml", Yaml11Source, {}),
+ ("errors.yaml", Yaml12Source, {}),
+ ("errors.toml", Toml11Source, {}),
+ ("errors.ini", IniSource, {"prefix": "config"}),
+ ("errors.env", EnvFileSource, {}),
]
EXPECTED_LOAD_ERRORS = [
@@ -93,30 +91,32 @@ def _assert_field_errors(
assert exc.message == message
-@pytest.mark.parametrize(("fixture_file", "metadata_kwargs"), ALL_SOURCES)
+@pytest.mark.parametrize(("fixture_file", "source_class", "source_kwargs"), ALL_SOURCES)
def test_load_error_types(
fixture_file: str,
- metadata_kwargs: dict[str, str],
+ source_class: type,
+ source_kwargs: dict[str, str],
) -> None:
- metadata = Source(file_=str(FIXTURES_DIR / fixture_file), **metadata_kwargs)
+ metadata = source_class(file=str(FIXTURES_DIR / fixture_file), **source_kwargs)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, LoadErrorConfig)
+ load(metadata, schema=LoadErrorConfig)
err = exc_info.value
assert str(err) == f"LoadErrorConfig loading errors ({len(EXPECTED_LOAD_ERRORS)})"
_assert_field_errors(err.exceptions, EXPECTED_LOAD_ERRORS)
-@pytest.mark.parametrize(("fixture_file", "metadata_kwargs"), ALL_SOURCES)
+@pytest.mark.parametrize(("fixture_file", "source_class", "source_kwargs"), ALL_SOURCES)
def test_validation_error_types(
fixture_file: str,
- metadata_kwargs: dict[str, str],
+ source_class: type,
+ source_kwargs: dict[str, str],
) -> None:
- metadata = Source(file_=str(FIXTURES_DIR / fixture_file), **metadata_kwargs)
+ metadata = source_class(file=str(FIXTURES_DIR / fixture_file), **source_kwargs)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, ValidationErrorConfig)
+ load(metadata, schema=ValidationErrorConfig)
err = exc_info.value
assert str(err) == f"ValidationErrorConfig loading errors ({len(EXPECTED_VALIDATION_ERRORS)})"
diff --git a/tests/errors/test_location.py b/tests/errors/test_location.py
index ae36a41..2295f35 100644
--- a/tests/errors/test_location.py
+++ b/tests/errors/test_location.py
@@ -1,31 +1,29 @@
from pathlib import Path
-from dature.errors.exceptions import LineRange
+from dature import EnvFileSource, EnvSource, JsonSource, Toml11Source
+from dature.errors import LineRange
from dature.errors.location import ErrorContext, resolve_source_location
-from dature.sources_loader.env_ import EnvFileLoader, EnvLoader
-from dature.sources_loader.json_ import JsonLoader
-from dature.sources_loader.toml_ import Toml11Loader
class TestResolveSourceLocation:
def test_env_source(self):
ctx = ErrorContext(
dataclass_name="Config",
- loader_class=EnvLoader,
+ source_class=EnvSource,
file_path=None,
prefix="APP_",
split_symbols="__",
)
locs = resolve_source_location(["database", "port"], ctx, file_content=None)
assert len(locs) == 1
- assert locs[0].display_label == "ENV"
+ assert locs[0].location_label == "ENV"
assert locs[0].env_var_name == "APP_DATABASE__PORT"
assert locs[0].file_path is None
def test_env_source_no_prefix(self):
ctx = ErrorContext(
dataclass_name="Config",
- loader_class=EnvLoader,
+ source_class=EnvSource,
file_path=None,
prefix=None,
split_symbols="__",
@@ -36,7 +34,7 @@ def test_env_source_no_prefix(self):
def test_env_source_custom_split_symbols(self):
ctx = ErrorContext(
dataclass_name="Config",
- loader_class=EnvLoader,
+ source_class=EnvSource,
file_path=None,
prefix="APP_",
split_symbols="_",
@@ -48,13 +46,13 @@ def test_json_source_with_line(self):
content = '{\n "timeout": "30",\n "name": "test"\n}'
ctx = ErrorContext(
dataclass_name="Config",
- loader_class=JsonLoader,
+ source_class=JsonSource,
file_path=Path("config.json"),
prefix=None,
split_symbols="__",
)
locs = resolve_source_location(["timeout"], ctx, file_content=content)
- assert locs[0].display_label == "FILE"
+ assert locs[0].location_label == "FILE"
assert locs[0].line_range == LineRange(start=2, end=2)
assert locs[0].line_content == ['"timeout": "30",']
@@ -62,36 +60,36 @@ def test_toml_source_with_line(self):
content = 'timeout = "30"\nname = "test"'
ctx = ErrorContext(
dataclass_name="Config",
- loader_class=Toml11Loader,
+ source_class=Toml11Source,
file_path=Path("config.toml"),
prefix=None,
split_symbols="__",
)
locs = resolve_source_location(["timeout"], ctx, file_content=content)
- assert locs[0].display_label == "FILE"
+ assert locs[0].location_label == "FILE"
assert locs[0].line_range == LineRange(start=1, end=1)
assert locs[0].line_content == ['timeout = "30"']
- def test_envfile_source(self):
+ def test_envfilesource(self):
content = "# comment\nAPP_TIMEOUT=30\nAPP_NAME=test"
ctx = ErrorContext(
dataclass_name="Config",
- loader_class=EnvFileLoader,
+ source_class=EnvFileSource,
file_path=Path(".env"),
prefix="APP_",
split_symbols="__",
)
locs = resolve_source_location(["timeout"], ctx, file_content=content)
- assert locs[0].display_label == "ENV FILE"
+ assert locs[0].location_label == "ENV FILE"
assert locs[0].env_var_name == "APP_TIMEOUT"
assert locs[0].line_range == LineRange(start=2, end=2)
assert locs[0].line_content == ["APP_TIMEOUT=30"]
- def test_file_source_does_not_mask_non_secret_field(self):
+ def test_filesource_does_not_mask_non_secret_field(self):
content = '{\n "password": "secret123",\n "timeout": "30"\n}'
ctx = ErrorContext(
dataclass_name="Config",
- loader_class=JsonLoader,
+ source_class=JsonSource,
file_path=Path("config.json"),
prefix=None,
split_symbols="__",
@@ -100,11 +98,11 @@ def test_file_source_does_not_mask_non_secret_field(self):
locs = resolve_source_location(["timeout"], ctx, file_content=content)
assert locs[0].line_content == ['"timeout": "30"']
- def test_file_source_masks_secret_field(self):
+ def test_filesource_masks_secret_field(self):
content = '{\n "password": "secret123",\n "timeout": "30"\n}'
ctx = ErrorContext(
dataclass_name="Config",
- loader_class=JsonLoader,
+ source_class=JsonSource,
file_path=Path("config.json"),
prefix=None,
split_symbols="__",
@@ -113,11 +111,11 @@ def test_file_source_masks_secret_field(self):
locs = resolve_source_location(["password"], ctx, file_content=content)
assert locs[0].line_content == ['"password": "",']
- def test_file_source_masks_line_when_secret_on_same_line(self):
+ def test_filesource_masks_line_when_secret_on_same_line(self):
content = '{"password": "secret123", "timeout": "30"}'
ctx = ErrorContext(
dataclass_name="Config",
- loader_class=JsonLoader,
+ source_class=JsonSource,
file_path=Path("config.json"),
prefix=None,
split_symbols="__",
diff --git a/tests/expansion/test_env_expand.py b/tests/expansion/test_env_expand.py
index a5d3179..461aa5f 100644
--- a/tests/expansion/test_env_expand.py
+++ b/tests/expansion/test_env_expand.py
@@ -1,6 +1,6 @@
import pytest
-from dature.errors.exceptions import EnvVarExpandError
+from dature.errors import EnvVarExpandError
from dature.expansion.env_expand import expand_env_vars, expand_string
from dature.types import JSONValue
diff --git a/tests/expansion/test_expand_file_path.py b/tests/expansion/test_expand_file_path.py
index fa2fd23..8823ed5 100644
--- a/tests/expansion/test_expand_file_path.py
+++ b/tests/expansion/test_expand_file_path.py
@@ -3,9 +3,9 @@
import pytest
-from dature.errors.exceptions import EnvVarExpandError
+from dature import EnvSource, Toml11Source
+from dature.errors import EnvVarExpandError
from dature.expansion.env_expand import expand_file_path
-from dature.metadata import Source
SEP = os.sep
@@ -100,7 +100,7 @@ def test_disabled_no_expansion(self) -> None:
class TestSourceFileExpansion:
@pytest.mark.parametrize(
- ("file_", "env_vars", "expected"),
+ ("file", "env_vars", "expected"),
[
("$DATURE_DIR/config.toml", {"DATURE_DIR": "/etc/app"}, "/etc/app/config.toml"),
(
@@ -130,10 +130,10 @@ class TestSourceFileExpansion:
],
ids=["str-dir", "path-dir", "str-filename-env", "no-vars", "str-windows-percent", "path-dir-and-filename"],
)
- def test_file_expanded(
+ def test_fileexpanded(
self,
monkeypatch: pytest.MonkeyPatch,
- file_: str | Path,
+ file: str | Path,
env_vars: dict[str, str],
expected: str,
) -> None:
@@ -142,17 +142,17 @@ def test_file_expanded(
for key, value in env_vars.items():
monkeypatch.setenv(key, value)
- source = Source(file_=file_)
+ source = Toml11Source(file=file)
- assert source.file_ == expected
+ assert source.file == expected
- def test_none_file_unchanged(self) -> None:
- source = Source()
+ def test_none_fileunchanged(self) -> None:
+ source = EnvSource()
- assert source.file_ is None
+ assert not hasattr(source, "file")
def test_missing_var_raises(self, monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.delenv("DATURE_MISSING", raising=False)
with pytest.raises(EnvVarExpandError):
- Source(file_="$DATURE_MISSING/config.toml")
+ Toml11Source(file="$DATURE_MISSING/config.toml")
diff --git a/tests/sources_loader/loaders/__init__.py b/tests/loaders/__init__.py
similarity index 100%
rename from tests/sources_loader/loaders/__init__.py
rename to tests/loaders/__init__.py
diff --git a/tests/sources_loader/loaders/test_base.py b/tests/loaders/test_base.py
similarity index 82%
rename from tests/sources_loader/loaders/test_base.py
rename to tests/loaders/test_base.py
index afdff07..0e7405e 100644
--- a/tests/sources_loader/loaders/test_base.py
+++ b/tests/loaders/test_base.py
@@ -8,7 +8,7 @@
from dature.fields.byte_size import ByteSize
from dature.fields.payment_card import PaymentCardNumber
from dature.fields.secret_str import SecretStr
-from dature.sources_loader.loaders.base import (
+from dature.loaders.base import (
base64url_bytes_from_string,
base64url_str_from_string,
byte_size_from_string,
@@ -35,12 +35,19 @@ def test_bytes_from_string(input_value, expected):
("input_value", "expected"),
[
("1+2j", 1 + 2j),
+ ("1 + 2j", 1 + 2j),
],
+ ids=["compact", "with-spaces"],
)
def test_complex_from_string(input_value, expected):
assert complex_from_string(input_value) == expected
+def test_complex_from_string_invalid():
+ with pytest.raises(ValueError, match="complex\\(\\) arg is a malformed string"):
+ complex_from_string("not-a-complex")
+
+
@pytest.mark.parametrize(
("input_value", "expected"),
[
@@ -49,6 +56,7 @@ def test_complex_from_string(input_value, expected):
("0:00:01", timedelta(seconds=1)),
("0:45:00", timedelta(minutes=45)),
("2:03:04.500000", timedelta(hours=2, minutes=3, seconds=4, microseconds=500000)),
+ ("0:00:01.5", timedelta(seconds=1, microseconds=500000)),
("1 day, 2:30:00", timedelta(days=1, hours=2, minutes=30)),
("2 days, 0:00:00", timedelta(days=2)),
("1 day, 2:03:04.500000", timedelta(days=1, hours=2, minutes=3, seconds=4, microseconds=500000)),
@@ -83,9 +91,14 @@ def test_timedelta_from_string(input_value: str, expected: timedelta):
assert timedelta_from_string(input_value) == expected
-def test_timedelta_from_string_invalid():
+@pytest.mark.parametrize(
+ "input_value",
+ ["not a timedelta", ""],
+ ids=["invalid-text", "empty-string"],
+)
+def test_timedelta_from_string_invalid(input_value: str):
with pytest.raises(ValueError, match="Invalid timedelta format"):
- timedelta_from_string("not a timedelta")
+ timedelta_from_string(input_value)
@pytest.mark.parametrize(
@@ -146,18 +159,24 @@ def test_secret_str_from_string(input_value: str, expected: SecretStr):
@pytest.mark.parametrize(
- ("input_value", "expected_brand"),
+ ("card_number", "expected_brand"),
[
- ("4111111111111111", "Visa"),
- ("5500000000000004", "Mastercard"),
+ ("4000000000000002", "Visa"),
+ ("5100000000000008", "Mastercard"),
],
+ ids=["visa", "mastercard"],
)
-def test_payment_card_number_from_string(input_value: str, expected_brand: str):
- result = payment_card_number_from_string(input_value)
+def test_payment_card_number_from_string(card_number, expected_brand: str):
+ result = payment_card_number_from_string(card_number)
assert isinstance(result, PaymentCardNumber)
assert result.brand == expected_brand
+def test_payment_card_number_from_string_invalid():
+ with pytest.raises(ValueError, match="Card number must be 12-19 digits"):
+ payment_card_number_from_string("1234")
+
+
@pytest.mark.parametrize(
("input_value", "expected"),
[
@@ -167,3 +186,8 @@ def test_payment_card_number_from_string(input_value: str, expected_brand: str):
)
def test_byte_size_from_string(input_value: str | int, expected: ByteSize):
assert byte_size_from_string(input_value) == expected
+
+
+def test_byte_size_from_string_invalid():
+ with pytest.raises(ValueError, match="Invalid byte size format"):
+ byte_size_from_string("not-a-size")
diff --git a/tests/sources_loader/loaders/test_common.py b/tests/loaders/test_common.py
similarity index 68%
rename from tests/sources_loader/loaders/test_common.py
rename to tests/loaders/test_common.py
index f300faf..7876dc4 100644
--- a/tests/sources_loader/loaders/test_common.py
+++ b/tests/loaders/test_common.py
@@ -1,11 +1,12 @@
"""Tests for common loader functions (used across multiple formats)."""
+import math
from datetime import date, datetime, time
import pytest
from adaptix.load_error import TypeLoadError
-from dature.sources_loader.loaders.common import (
+from dature.loaders.common import (
bool_loader,
bytearray_from_json_string,
bytearray_from_string,
@@ -13,10 +14,12 @@
date_passthrough,
datetime_from_string,
datetime_passthrough,
+ float_from_string,
float_passthrough,
int_from_string,
none_from_empty_string,
optional_from_empty_string,
+ str_from_scalar,
time_from_string,
)
@@ -54,6 +57,16 @@ def test_time_from_string(input_value, expected):
assert time_from_string(input_value) == expected
+@pytest.mark.parametrize(
+ "input_value",
+ ["10", "10:30:45:99", "abc"],
+ ids=["one-part", "four-parts", "non-numeric"],
+)
+def test_time_from_string_invalid(input_value):
+ with pytest.raises(ValueError, match="Invalid time format"):
+ time_from_string(input_value)
+
+
@pytest.mark.parametrize(
("input_value", "expected"),
[
@@ -97,6 +110,11 @@ def test_none_from_empty_string(input_value, expected):
assert none_from_empty_string(input_value) is expected
+def test_none_from_empty_string_non_empty_raises():
+ with pytest.raises(TypeLoadError):
+ none_from_empty_string("not empty")
+
+
@pytest.mark.parametrize(
("input_value", "expected"),
[
@@ -108,6 +126,22 @@ def test_optional_from_empty_string(input_value, expected):
assert optional_from_empty_string(input_value) == expected
+# === str_from_scalar ===
+
+
+@pytest.mark.parametrize(
+ ("input_value", "expected"),
+ [
+ ("hello", "hello"),
+ (3.14, "3.14"),
+ (True, "True"),
+ ],
+ ids=["string", "float", "bool"],
+)
+def test_str_from_scalar(input_value, expected):
+ assert str_from_scalar(input_value) == expected
+
+
# === Bool converter ===
@@ -133,6 +167,11 @@ def test_bool_loader(input_value, expected):
assert bool_loader(input_value) is expected
+def test_bool_loader_invalid_string():
+ with pytest.raises(TypeLoadError):
+ bool_loader("maybe")
+
+
# === Int converter ===
@@ -158,6 +197,11 @@ def test_int_from_string_rejects_invalid(input_value):
int_from_string(input_value)
+def test_int_from_string_invalid_string():
+ with pytest.raises(ValueError, match="invalid literal for int"):
+ int_from_string("not-a-number")
+
+
# === Float passthrough ===
@@ -183,6 +227,35 @@ def test_float_passthrough_rejects_invalid(input_value):
float_passthrough(input_value)
+# === Float from string ===
+
+
+@pytest.mark.parametrize(
+ ("input_value", "expected"),
+ [
+ ("3.14", 3.14),
+ ("inf", float("inf")),
+ ("+inf", float("inf")),
+ ("-inf", float("-inf")),
+ (3.14, 3.14),
+ (42, 42.0),
+ ],
+ ids=["string", "inf", "plus-inf", "minus-inf", "float-passthrough", "int-to-float"],
+)
+def test_float_from_string(input_value, expected):
+ assert float_from_string(input_value) == expected
+
+
+def test_float_from_string_nan():
+ result = float_from_string("nan")
+ assert math.isnan(result)
+
+
+def test_float_from_string_invalid():
+ with pytest.raises(ValueError, match="could not convert string to float"):
+ float_from_string("not-a-number")
+
+
# === JSON string converters ===
@@ -191,7 +264,10 @@ def test_float_passthrough_rejects_invalid(input_value):
[
("hello", bytearray(b"hello")),
("", bytearray()),
+ ("[72, 101, 108]", bytearray([72, 101, 108])),
+ ('{"key": "val"}', bytearray(b'{"key": "val"}')),
],
+ ids=["plain-string", "empty", "json-array", "non-bracket-string"],
)
def test_bytearray_from_json_string(input_value, expected):
assert bytearray_from_json_string(input_value) == expected
diff --git a/tests/sources_loader/loaders/test_json5_.py b/tests/loaders/test_json5_.py
similarity index 84%
rename from tests/sources_loader/loaders/test_json5_.py
rename to tests/loaders/test_json5_.py
index 0430f0b..8a095b6 100644
--- a/tests/sources_loader/loaders/test_json5_.py
+++ b/tests/loaders/test_json5_.py
@@ -3,7 +3,7 @@
import pytest
from json5 import JsonIdentifier
-from dature.sources_loader.loaders.json5_ import str_from_json_identifier
+from dature.loaders.json5_ import str_from_json_identifier
@pytest.mark.parametrize(
diff --git a/tests/sources_loader/loaders/test_toml_.py b/tests/loaders/test_toml_.py
similarity index 84%
rename from tests/sources_loader/loaders/test_toml_.py
rename to tests/loaders/test_toml_.py
index 4948c85..cb15714 100644
--- a/tests/sources_loader/loaders/test_toml_.py
+++ b/tests/loaders/test_toml_.py
@@ -4,7 +4,7 @@
import pytest
-from dature.sources_loader.loaders.toml_ import time_passthrough
+from dature.loaders.toml_ import time_passthrough
@pytest.mark.parametrize(
diff --git a/tests/sources_loader/loaders/test_yaml_.py b/tests/loaders/test_yaml_.py
similarity index 84%
rename from tests/sources_loader/loaders/test_yaml_.py
rename to tests/loaders/test_yaml_.py
index 39c363f..16f7fff 100644
--- a/tests/sources_loader/loaders/test_yaml_.py
+++ b/tests/loaders/test_yaml_.py
@@ -4,7 +4,7 @@
import pytest
-from dature.sources_loader.loaders.yaml_ import time_from_int
+from dature.loaders.yaml_ import time_from_int
@pytest.mark.parametrize(
diff --git a/tests/loading/test_context.py b/tests/loading/test_context.py
index 2f6d6b7..0ba8804 100644
--- a/tests/loading/test_context.py
+++ b/tests/loading/test_context.py
@@ -2,9 +2,24 @@
from dataclasses import dataclass, fields
from enum import Flag
+from pathlib import Path
from typing import Any
+from unittest.mock import MagicMock
-from dature.loading.context import coerce_flag_fields, merge_fields
+import pytest
+
+from dature.field_path import FieldPath
+from dature.loading.context import (
+ apply_skip_invalid,
+ build_error_ctx,
+ coerce_flag_fields,
+ get_allowed_fields,
+ make_validating_post_init,
+ merge_fields,
+)
+from dature.sources.env_ import EnvSource
+from dature.sources.json_ import JsonSource
+from dature.sources.retort import _retort_cache_key, ensure_retort
class TestMergeFields:
@@ -141,3 +156,134 @@ def test_flag_object_coerced_to_int(self):
result = coerce_flag_fields(data, self.FlagConfig)
assert result == {"name": "test", "perms": 3}
+
+
+class TestBuildErrorCtx:
+ def test_file_source_no_split_symbols(self, tmp_path: Path):
+ json_file = tmp_path / "config.json"
+ json_file.write_text("{}")
+ source = JsonSource(file=json_file, prefix="app")
+
+ ctx = build_error_ctx(source, "MyConfig")
+
+ assert ctx.dataclass_name == "MyConfig"
+ assert ctx.source_class is JsonSource
+ assert ctx.prefix == "app"
+ assert ctx.split_symbols is None
+
+ def test_flat_key_source_has_split_symbols(self):
+ source = EnvSource(prefix="APP", split_symbols="__")
+
+ ctx = build_error_ctx(source, "MyConfig")
+
+ assert ctx.split_symbols == "__"
+
+
+class TestGetAllowedFields:
+ def test_bool_returns_none(self):
+ assert get_allowed_fields(skip_value=True) is None
+ assert get_allowed_fields(skip_value=False) is None
+
+ def test_tuple_of_field_paths(self):
+ @dataclass
+ class Cfg:
+ name: str
+ port: int
+
+ fp = FieldPath(owner=Cfg, parts=("name",))
+
+ result = get_allowed_fields(skip_value=(fp,), schema=Cfg)
+
+ assert result == {"name"}
+
+
+class TestApplySkipInvalid:
+ @pytest.mark.parametrize("skip_if_invalid", [False, None], ids=["false", "none"])
+ def test_falsy_returns_raw_unchanged(self, tmp_path: Path, skip_if_invalid):
+ json_file = tmp_path / "config.json"
+ json_file.write_text("{}")
+
+ @dataclass
+ class Cfg:
+ name: str
+
+ source = JsonSource(file=json_file)
+ raw = {"name": "hello"}
+
+ result = apply_skip_invalid(
+ raw=raw,
+ skip_if_invalid=skip_if_invalid,
+ source=source,
+ schema=Cfg,
+ log_prefix="[test]",
+ )
+
+ assert result.cleaned_dict == raw
+ assert result.skipped_paths == []
+
+
+class TestEnsureRetort:
+ def test_creates_and_caches_retort(self, tmp_path: Path):
+ json_file = tmp_path / "config.json"
+ json_file.write_text("{}")
+
+ @dataclass
+ class Cfg:
+ name: str
+
+ source = JsonSource(file=json_file)
+ key = _retort_cache_key(Cfg, None)
+ assert key not in source.retorts
+
+ ensure_retort(source, Cfg)
+ assert key in source.retorts
+
+ first = source.retorts[key]
+ ensure_retort(source, Cfg)
+ assert source.retorts[key] is first
+
+
+class TestMakeValidatingPostInit:
+ @dataclass
+ class Cfg:
+ name: str
+
+ def test_loading_flag_skips_validation(self):
+ ctx = MagicMock()
+ ctx.loading = True
+ ctx.validating = False
+ ctx.original_post_init = None
+
+ post_init = make_validating_post_init(ctx)
+ instance = MagicMock()
+ post_init(instance)
+
+ ctx.validation_loader.assert_not_called()
+
+ def test_validating_flag_skips_reentrant(self):
+ ctx = MagicMock()
+ ctx.loading = False
+ ctx.validating = True
+ ctx.original_post_init = None
+
+ post_init = make_validating_post_init(ctx)
+ instance = MagicMock()
+ post_init(instance)
+
+ ctx.validation_loader.assert_not_called()
+
+ def test_calls_original_post_init(self):
+ original = MagicMock()
+ ctx = MagicMock()
+ ctx.loading = False
+ ctx.validating = False
+ ctx.original_post_init = original
+ ctx.cls = self.Cfg
+ ctx.validation_loader = MagicMock()
+ ctx.error_ctx = MagicMock()
+
+ post_init = make_validating_post_init(ctx)
+ instance = self.Cfg(name="test")
+ post_init(instance)
+
+ original.assert_called_once_with(instance)
diff --git a/tests/loading/test_field_merges.py b/tests/loading/test_field_merges.py
index 67cf939..a56684e 100644
--- a/tests/loading/test_field_merges.py
+++ b/tests/loading/test_field_merges.py
@@ -1,13 +1,17 @@
"""Tests for per-field merge strategies (field_merges)."""
+import logging
+from collections.abc import Callable
from dataclasses import dataclass
from pathlib import Path
+from typing import Any
import pytest
-from dature import FieldMergeStrategy, Merge, MergeRule, MergeStrategy, Source, load
-from dature.errors.exceptions import MergeConflictError
+from dature import JsonSource, load
+from dature.errors import MergeConflictError
from dature.field_path import F
+from dature.types import FieldMergeStrategyName
class TestFieldMergesFunction:
@@ -24,13 +28,11 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- strategy=MergeStrategy.LAST_WINS,
- field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS),),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
+ strategy="last_wins",
+ field_merges={F[Config].host: "first_wins"},
)
assert result.host == "default-host"
@@ -49,13 +51,11 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=first),
- Source(file_=second),
- strategy=MergeStrategy.FIRST_WINS,
- field_merges=(MergeRule(F[Config].port, FieldMergeStrategy.LAST_WINS),),
- ),
- Config,
+ JsonSource(file=first),
+ JsonSource(file=second),
+ schema=Config,
+ strategy="first_wins",
+ field_merges={F[Config].port: "last_wins"},
)
assert result.host == "first-host"
@@ -74,12 +74,10 @@ class Config:
name: str
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND),),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
+ field_merges={F[Config].tags: "append"},
)
assert result.tags == ["a", "b", "c", "d"]
@@ -97,12 +95,10 @@ class Config:
tags: list[str]
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND_UNIQUE),),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
+ field_merges={F[Config].tags: "append_unique"},
)
assert result.tags == ["a", "b", "c", "d"]
@@ -119,12 +115,10 @@ class Config:
tags: list[str]
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.PREPEND),),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
+ field_merges={F[Config].tags: "prepend"},
)
assert result.tags == ["c", "d", "a", "b"]
@@ -141,12 +135,10 @@ class Config:
tags: list[str]
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.PREPEND_UNIQUE),),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
+ field_merges={F[Config].tags: "prepend_unique"},
)
assert result.tags == ["b", "c", "d", "a"]
@@ -168,12 +160,10 @@ class Config:
database: Database
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- field_merges=(MergeRule(F[Config].database.host, FieldMergeStrategy.FIRST_WINS),),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
+ field_merges={F[Config].database.host: "first_wins"},
)
assert result.database.host == "localhost"
@@ -192,12 +182,10 @@ class Config:
with pytest.raises(TypeError, match="APPEND strategy requires both values to be lists"):
load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- field_merges=(MergeRule(F[Config].value, FieldMergeStrategy.APPEND),),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
+ field_merges={F[Config].value: "append"},
)
def test_multiple_merge_rules(self, tmp_path: Path):
@@ -214,16 +202,14 @@ class Config:
tags: list[str]
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- strategy=MergeStrategy.LAST_WINS,
- field_merges=(
- MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS),
- MergeRule(F[Config].tags, FieldMergeStrategy.APPEND),
- ),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
+ strategy="last_wins",
+ field_merges={
+ F[Config].host: "first_wins",
+ F[Config].tags: "append",
+ },
)
assert result.host == "default-host"
@@ -243,12 +229,10 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- field_merges=(),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
+ field_merges={},
)
assert result.host == "localhost"
@@ -263,16 +247,14 @@ def test_decorator_with_field_merges(self, tmp_path: Path):
overrides = tmp_path / "overrides.json"
overrides.write_text('{"host": "override-host", "port": 9090, "tags": ["b"]}')
- meta = Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- field_merges=(
- MergeRule(F["Config"].host, FieldMergeStrategy.FIRST_WINS),
- MergeRule(F["Config"].tags, FieldMergeStrategy.APPEND),
- ),
+ @load(
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ field_merges={
+ F["Config"].host: "first_wins",
+ F["Config"].tags: "append",
+ },
)
-
- @load(meta)
@dataclass
class Config:
host: str
@@ -299,13 +281,11 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- strategy=MergeStrategy.RAISE_ON_CONFLICT,
- field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.LAST_WINS),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
+ strategy="raise_on_conflict",
+ field_merges={F[Config].host: "last_wins"},
)
assert result.host == "host-b"
@@ -324,13 +304,11 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- strategy=MergeStrategy.RAISE_ON_CONFLICT,
- field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
+ strategy="raise_on_conflict",
+ field_merges={F[Config].host: "first_wins"},
)
assert result.host == "host-a"
@@ -350,13 +328,11 @@ class Config:
with pytest.raises(MergeConflictError):
load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- strategy=MergeStrategy.RAISE_ON_CONFLICT,
- field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.LAST_WINS),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
+ strategy="raise_on_conflict",
+ field_merges={F[Config].host: "last_wins"},
)
def test_nested_field_merge_suppresses_conflict(self, tmp_path: Path):
@@ -376,13 +352,11 @@ class Config:
name: str
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- strategy=MergeStrategy.RAISE_ON_CONFLICT,
- field_merges=(MergeRule(F[Config].database.host, FieldMergeStrategy.LAST_WINS),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
+ strategy="raise_on_conflict",
+ field_merges={F[Config].database.host: "last_wins"},
)
assert result.database.host == "host-b"
@@ -401,16 +375,14 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- strategy=MergeStrategy.RAISE_ON_CONFLICT,
- field_merges=(
- MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS),
- MergeRule(F[Config].port, max),
- ),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
+ strategy="raise_on_conflict",
+ field_merges={
+ F[Config].host: "first_wins",
+ F[Config].port: max,
+ },
)
assert result.host == "host-a"
@@ -422,22 +394,22 @@ class TestFieldMergesErrors:
("strategy", "match"),
[
pytest.param(
- FieldMergeStrategy.APPEND,
+ "append",
"APPEND strategy requires both values to be lists",
id="append",
),
pytest.param(
- FieldMergeStrategy.APPEND_UNIQUE,
+ "append_unique",
"APPEND_UNIQUE strategy requires both values to be lists",
id="append_unique",
),
pytest.param(
- FieldMergeStrategy.PREPEND,
+ "prepend",
"PREPEND strategy requires both values to be lists",
id="prepend",
),
pytest.param(
- FieldMergeStrategy.PREPEND_UNIQUE,
+ "prepend_unique",
"PREPEND_UNIQUE strategy requires both values to be lists",
id="prepend_unique",
),
@@ -446,7 +418,7 @@ class TestFieldMergesErrors:
def test_list_strategy_on_strings_raises_type_error(
self,
tmp_path: Path,
- strategy: FieldMergeStrategy,
+ strategy: FieldMergeStrategyName,
match: str,
):
a = tmp_path / "a.json"
@@ -461,34 +433,32 @@ class Config:
with pytest.raises(TypeError, match=match):
load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- field_merges=(MergeRule(F[Config].value, strategy),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
+ field_merges={F[Config].value: strategy},
)
@pytest.mark.parametrize(
("strategy", "match"),
[
pytest.param(
- FieldMergeStrategy.APPEND,
+ "append",
"APPEND strategy requires both values to be lists",
id="append",
),
pytest.param(
- FieldMergeStrategy.APPEND_UNIQUE,
+ "append_unique",
"APPEND_UNIQUE strategy requires both values to be lists",
id="append_unique",
),
pytest.param(
- FieldMergeStrategy.PREPEND,
+ "prepend",
"PREPEND strategy requires both values to be lists",
id="prepend",
),
pytest.param(
- FieldMergeStrategy.PREPEND_UNIQUE,
+ "prepend_unique",
"PREPEND_UNIQUE strategy requires both values to be lists",
id="prepend_unique",
),
@@ -497,7 +467,7 @@ class Config:
def test_list_strategy_on_integers_raises_type_error(
self,
tmp_path: Path,
- strategy: FieldMergeStrategy,
+ strategy: FieldMergeStrategyName,
match: str,
):
a = tmp_path / "a.json"
@@ -512,24 +482,22 @@ class Config:
with pytest.raises(TypeError, match=match):
load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- field_merges=(MergeRule(F[Config].value, strategy),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
+ field_merges={F[Config].value: strategy},
)
@pytest.mark.parametrize(
("strategy", "match"),
[
pytest.param(
- FieldMergeStrategy.APPEND,
+ "append",
"APPEND strategy requires both values to be lists, got list and str",
id="append",
),
pytest.param(
- FieldMergeStrategy.PREPEND,
+ "prepend",
"PREPEND strategy requires both values to be lists, got list and str",
id="prepend",
),
@@ -538,7 +506,7 @@ class Config:
def test_list_strategy_mixed_types_raises_type_error(
self,
tmp_path: Path,
- strategy: FieldMergeStrategy,
+ strategy: FieldMergeStrategyName,
match: str,
):
a = tmp_path / "a.json"
@@ -553,12 +521,10 @@ class Config:
with pytest.raises(TypeError, match=match):
load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- field_merges=(MergeRule(F[Config].value, strategy),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
+ field_merges={F[Config].value: strategy},
)
@pytest.mark.parametrize(
@@ -571,7 +537,7 @@ class Config:
def test_max_min_on_lists_compares_elementwise(
self,
tmp_path: Path,
- strategy: object,
+ strategy: Callable[..., Any],
expected: list[int],
):
a = tmp_path / "a.json"
@@ -585,12 +551,10 @@ class Config:
value: list[int]
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- field_merges=(MergeRule(F[Config].value, strategy),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
+ field_merges={F[Config].value: strategy},
)
assert result.value == expected
@@ -605,7 +569,7 @@ class Config:
def test_max_min_on_dicts_raises_type_error(
self,
tmp_path: Path,
- strategy: object,
+ strategy: Callable[..., Any],
match: str,
):
a = tmp_path / "a.json"
@@ -620,12 +584,10 @@ class Config:
with pytest.raises(TypeError, match=match):
load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- field_merges=(MergeRule(F[Config].value, strategy),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
+ field_merges={F[Config].value: strategy},
)
@pytest.mark.parametrize(
@@ -638,7 +600,7 @@ class Config:
def test_max_min_on_null_raises_type_error(
self,
tmp_path: Path,
- strategy: object,
+ strategy: Callable[..., Any],
match: str,
):
a = tmp_path / "a.json"
@@ -653,12 +615,10 @@ class Config:
with pytest.raises(TypeError, match=match):
load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- field_merges=(MergeRule(F[Config].value, strategy),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
+ field_merges={F[Config].value: strategy},
)
def test_field_merge_on_missing_key_in_one_source(self, tmp_path: Path):
@@ -674,12 +634,10 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- field_merges=(MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
+ field_merges={F[Config].host: "first_wins"},
)
assert result.host == "localhost"
@@ -700,13 +658,11 @@ class Config:
tags: list[str]
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- Source(file_=c),
- field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ JsonSource(file=c),
+ schema=Config,
+ field_merges={F[Config].tags: "append"},
)
assert result.tags == ["a", "b", "c"]
@@ -726,13 +682,11 @@ class Config:
priority: int
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- Source(file_=c),
- field_merges=(MergeRule(F[Config].priority, max),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ JsonSource(file=c),
+ schema=Config,
+ field_merges={F[Config].priority: max},
)
assert result.priority == 15
@@ -752,13 +706,11 @@ class Config:
priority: int
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- Source(file_=c),
- field_merges=(MergeRule(F[Config].priority, min),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ JsonSource(file=c),
+ schema=Config,
+ field_merges={F[Config].priority: min},
)
assert result.priority == 5
@@ -782,15 +734,13 @@ class Config:
inner: Inner
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- field_merges=(
- MergeRule(F[Config].user_name, FieldMergeStrategy.FIRST_WINS),
- MergeRule(F[Config].inner.user_name, FieldMergeStrategy.LAST_WINS),
- ),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
+ field_merges={
+ F[Config].user_name: "first_wins",
+ F[Config].inner.user_name: "last_wins",
+ },
)
assert result.user_name == "root-first"
@@ -813,15 +763,13 @@ class Config:
inner: Inner
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- field_merges=(
- MergeRule(F[Config].user_name, FieldMergeStrategy.LAST_WINS),
- MergeRule(F[Config].inner.user_name, FieldMergeStrategy.FIRST_WINS),
- ),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
+ field_merges={
+ F[Config].user_name: "last_wins",
+ F[Config].inner.user_name: "first_wins",
+ },
)
assert result.user_name == "root-second"
@@ -841,12 +789,10 @@ class Config:
score: int
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- field_merges=(MergeRule(F[Config].score, sum),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
+ field_merges={F[Config].score: sum},
)
assert result.score == 30
@@ -866,13 +812,11 @@ class Config:
score: int
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- Source(file_=c),
- field_merges=(MergeRule(F[Config].score, sum),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ JsonSource(file=c),
+ schema=Config,
+ field_merges={F[Config].score: sum},
)
assert result.score == 30
@@ -892,13 +836,11 @@ class Config:
weight: float
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- Source(file_=c),
- field_merges=(MergeRule(F[Config].weight, lambda vals: sum(vals) / len(vals)),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ JsonSource(file=c),
+ schema=Config,
+ field_merges={F[Config].weight: lambda vals: sum(vals) / len(vals)},
)
assert result.weight == 6.0
@@ -918,13 +860,11 @@ class Config:
priority: int
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- Source(file_=c),
- field_merges=(MergeRule(F[Config].priority, max),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ JsonSource(file=c),
+ schema=Config,
+ field_merges={F[Config].priority: max},
)
assert result.priority == 15
@@ -948,13 +888,11 @@ class Config:
database: Database
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- Source(file_=c),
- field_merges=(MergeRule(F[Config].database.port, max),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ JsonSource(file=c),
+ schema=Config,
+ field_merges={F[Config].database.port: max},
)
assert result.database.port == 7000
@@ -968,15 +906,35 @@ class Config:
score: int
result = load(
- Merge(
- Source(file_=a),
- field_merges=(MergeRule(F[Config].score, sum),),
- ),
- Config,
+ JsonSource(file=a),
+ schema=Config,
+ field_merges={F[Config].score: sum},
)
assert result.score == 42
+ def test_single_source_merge_params_warning(
+ self,
+ tmp_path: Path,
+ caplog: pytest.LogCaptureFixture,
+ ) -> None:
+ a = tmp_path / "a.json"
+ a.write_text('{"score": 42}')
+
+ @dataclass
+ class Config:
+ score: int
+
+ with caplog.at_level(logging.WARNING, logger="dature"):
+ load(
+ JsonSource(file=a),
+ schema=Config,
+ field_merges={F[Config].score: sum},
+ )
+
+ messages = [r.message for r in caplog.records if r.name == "dature"]
+ assert messages == ["Merge-related parameters have no effect with a single source"]
+
def test_callable_with_raise_on_conflict(self, tmp_path: Path):
a = tmp_path / "a.json"
a.write_text('{"score": 10, "name": "app"}')
@@ -990,13 +948,11 @@ class Config:
name: str
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- strategy=MergeStrategy.RAISE_ON_CONFLICT,
- field_merges=(MergeRule(F[Config].score, sum),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
+ strategy="raise_on_conflict",
+ field_merges={F[Config].score: sum},
)
assert result.score == 30
@@ -1016,16 +972,14 @@ class Config:
tags: list[str]
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- field_merges=(
- MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS),
- MergeRule(F[Config].score, sum),
- MergeRule(F[Config].tags, FieldMergeStrategy.APPEND),
- ),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
+ field_merges={
+ F[Config].host: "first_wins",
+ F[Config].score: sum,
+ F[Config].tags: "append",
+ },
)
assert result.host == "host-a"
@@ -1048,13 +1002,11 @@ class Config:
name: str
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- Source(file_=c),
- field_merges=(MergeRule(F[Config].score, sum),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ JsonSource(file=c),
+ schema=Config,
+ field_merges={F[Config].score: sum},
)
assert result.score == 30
diff --git a/tests/loading/test_loading_common.py b/tests/loading/test_loading_common.py
new file mode 100644
index 0000000..d7a8c8a
--- /dev/null
+++ b/tests/loading/test_loading_common.py
@@ -0,0 +1,38 @@
+import pytest
+
+from dature.config import DatureConfig, MaskingConfig
+from dature.loading.common import resolve_mask_secrets
+
+
+@pytest.mark.parametrize(
+ ("source_level", "load_level", "config_default", "expected"),
+ [
+ (True, None, False, True),
+ (False, None, True, False),
+ (True, False, False, True),
+ (None, True, False, True),
+ (None, False, True, False),
+ (None, None, True, True),
+ (None, None, False, False),
+ ],
+ ids=[
+ "source_true_wins",
+ "source_false_wins",
+ "source_beats_load",
+ "load_true_wins",
+ "load_false_wins",
+ "config_true_default",
+ "config_false_default",
+ ],
+)
+def test_resolve_mask_secrets(
+ monkeypatch: pytest.MonkeyPatch,
+ source_level: bool | None,
+ load_level: bool | None,
+ config_default: bool,
+ expected: bool,
+) -> None:
+ fake_config = DatureConfig(masking=MaskingConfig(mask_secrets=config_default))
+ monkeypatch.setattr("dature.loading.common.config", fake_config)
+ result = resolve_mask_secrets(source_level=source_level, load_level=load_level)
+ assert result == expected
diff --git a/tests/loading/test_multi.py b/tests/loading/test_multi.py
index c5856b2..06d6bd1 100644
--- a/tests/loading/test_multi.py
+++ b/tests/loading/test_multi.py
@@ -8,8 +8,8 @@
import pytest
-from dature import Merge, MergeStrategy, Source, load
-from dature.errors.exceptions import DatureConfigError, MergeConflictError
+from dature import EnvFileSource, EnvSource, JsonSource, Yaml12Source, load
+from dature.errors import DatureConfigError, MergeConflictError
from dature.validators.number import Ge
@@ -27,11 +27,9 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
)
assert result.host == "localhost"
@@ -50,23 +48,21 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=first),
- Source(file_=second),
- strategy=MergeStrategy.FIRST_WINS,
- ),
- Config,
+ JsonSource(file=first),
+ JsonSource(file=second),
+ schema=Config,
+ strategy="first_wins",
)
assert result.host == "first-host"
assert result.port == 3000
def test_partial_sources(self, tmp_path: Path):
- file_a = tmp_path / "a.json"
- file_a.write_text('{"host": "myhost"}')
+ filea = tmp_path / "a.json"
+ filea.write_text('{"host": "myhost"}')
- file_b = tmp_path / "b.json"
- file_b.write_text('{"port": 9090}')
+ fileb = tmp_path / "b.json"
+ fileb.write_text('{"port": 9090}')
@dataclass
class Config:
@@ -74,11 +70,9 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=file_a),
- Source(file_=file_b),
- ),
- Config,
+ JsonSource(file=filea),
+ JsonSource(file=fileb),
+ schema=Config,
)
assert result.host == "myhost"
@@ -101,11 +95,9 @@ class Config:
database: Database
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
)
assert result.database.host == "prod-host"
@@ -128,12 +120,10 @@ class Config:
debug: bool
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- Source(file_=c),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ JsonSource(file=c),
+ schema=Config,
)
assert result.host == "a-host"
@@ -153,11 +143,9 @@ class Config:
port: int
result = load(
- (
- Source(file_=defaults),
- Source(file_=overrides),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
)
assert result.host == "localhost"
@@ -176,11 +164,9 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=defaults),
- Source(prefix="APP_"),
- ),
- Config,
+ JsonSource(file=defaults),
+ EnvSource(prefix="APP_"),
+ schema=Config,
)
assert result.host == "env-host"
@@ -200,11 +186,9 @@ class Config:
with pytest.raises(DatureConfigError) as exc_info:
load(
- Merge(
- Source(file_=defaults),
- Source(prefix="APP_"),
- ),
- Config,
+ JsonSource(file=defaults),
+ EnvSource(prefix="APP_"),
+ schema=Config,
)
err = exc_info.value
@@ -226,11 +210,9 @@ class Config:
with pytest.raises(DatureConfigError) as exc_info:
load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
)
err = exc_info.value
@@ -247,7 +229,7 @@ class Config:
name: str
port: int
- result = load(Source(file_=json_file), Config)
+ result = load(JsonSource(file=json_file), schema=Config)
assert result.name == "test"
assert result.port == 8080
@@ -259,7 +241,7 @@ def test_backward_compat_none_metadata(self, monkeypatch):
class Config:
my_var: str
- result = load(None, Config)
+ result = load(EnvSource(), schema=Config)
assert result.my_var == "from_env"
@@ -272,12 +254,10 @@ def test_decorator_with_merge(self, tmp_path: Path):
overrides = tmp_path / "overrides.json"
overrides.write_text('{"port": 9090}')
- meta = Merge(
- Source(file_=defaults),
- Source(file_=overrides),
+ @load(
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
)
-
- @load(meta)
@dataclass
class Config:
host: str
@@ -291,9 +271,7 @@ def test_decorator_cache(self, tmp_path: Path):
defaults = tmp_path / "defaults.json"
defaults.write_text('{"host": "original", "port": 3000}')
- meta = Merge(Source(file_=defaults))
-
- @load(meta)
+ @load(JsonSource(file=defaults))
@dataclass
class Config:
host: str
@@ -310,9 +288,7 @@ def test_decorator_no_cache(self, tmp_path: Path):
defaults = tmp_path / "defaults.json"
defaults.write_text('{"host": "original", "port": 3000}')
- meta = Merge(Source(file_=defaults))
-
- @load(meta, cache=False)
+ @load(JsonSource(file=defaults), cache=False)
@dataclass
class Config:
host: str
@@ -333,10 +309,8 @@ def test_decorator_with_tuple(self, tmp_path: Path):
overrides.write_text('{"port": 8080}')
@load(
- (
- Source(file_=defaults),
- Source(file_=overrides),
- ),
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
)
@dataclass
class Config:
@@ -351,9 +325,7 @@ def test_decorator_init_override(self, tmp_path: Path):
defaults = tmp_path / "defaults.json"
defaults.write_text('{"host": "localhost", "port": 3000}')
- meta = Merge(Source(file_=defaults))
-
- @load(meta)
+ @load(JsonSource(file=defaults))
@dataclass
class Config:
host: str
@@ -364,11 +336,9 @@ class Config:
assert config.port == 3000
def test_decorator_not_dataclass(self):
- meta = Merge(Source())
-
with pytest.raises(TypeError, match="must be a dataclass"):
- @load(meta)
+ @load(EnvSource())
class NotDataclass:
pass
@@ -379,13 +349,11 @@ def test_decorator_first_wins(self, tmp_path: Path):
second = tmp_path / "second.json"
second.write_text('{"host": "second-host", "port": 2000}')
- meta = Merge(
- Source(file_=first),
- Source(file_=second),
- strategy=MergeStrategy.FIRST_WINS,
+ @load(
+ JsonSource(file=first),
+ JsonSource(file=second),
+ strategy="first_wins",
)
-
- @load(meta)
@dataclass
class Config:
host: str
@@ -411,12 +379,10 @@ class Config:
with pytest.raises(MergeConflictError) as exc_info:
load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- strategy=MergeStrategy.RAISE_ON_CONFLICT,
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
+ strategy="raise_on_conflict",
)
assert str(exc_info.value) == dedent(f"""\
@@ -442,12 +408,10 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- strategy=MergeStrategy.RAISE_ON_CONFLICT,
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
+ strategy="raise_on_conflict",
)
assert result.host == "localhost"
@@ -466,12 +430,10 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- strategy=MergeStrategy.RAISE_ON_CONFLICT,
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
+ strategy="raise_on_conflict",
)
assert result.host == "same"
@@ -495,12 +457,10 @@ class Config:
with pytest.raises(MergeConflictError) as exc_info:
load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- strategy=MergeStrategy.RAISE_ON_CONFLICT,
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
+ strategy="raise_on_conflict",
)
assert str(exc_info.value) == dedent(f"""\
@@ -526,12 +486,10 @@ class Config:
with pytest.raises(MergeConflictError) as exc_info:
load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- strategy=MergeStrategy.RAISE_ON_CONFLICT,
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
+ strategy="raise_on_conflict",
)
assert str(exc_info.value) == dedent(f"""\
@@ -557,12 +515,10 @@ class Config:
with pytest.raises(MergeConflictError) as exc_info:
load(
- Merge(
- Source(file_=a),
- Source(prefix="APP_"),
- strategy=MergeStrategy.RAISE_ON_CONFLICT,
- ),
- Config,
+ JsonSource(file=a),
+ EnvSource(prefix="APP_"),
+ schema=Config,
+ strategy="raise_on_conflict",
)
assert str(exc_info.value) == dedent(f"""\
@@ -588,12 +544,10 @@ class Config:
with pytest.raises(MergeConflictError) as exc_info:
load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- strategy=MergeStrategy.RAISE_ON_CONFLICT,
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
+ strategy="raise_on_conflict",
)
assert len(exc_info.value.exceptions) == 2
@@ -631,11 +585,9 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=yaml_file),
- Source(file_=env_file),
- ),
- Config,
+ Yaml12Source(file=yaml_file),
+ EnvFileSource(file=env_file),
+ schema=Config,
)
assert result.host == "localhost"
@@ -649,7 +601,7 @@ class _Permission(Flag):
class TestCoerceFlagFieldsMergeMode:
- def test_flag_from_env_file_merge(self, tmp_path: Path):
+ def test_flag_from_env_filemerge(self, tmp_path: Path):
json_file = tmp_path / "defaults.json"
json_file.write_text('{"name": "app"}')
@@ -662,11 +614,9 @@ class Config:
perms: _Permission
result = load(
- Merge(
- Source(file_=json_file),
- Source(file_=env_file),
- ),
- Config,
+ JsonSource(file=json_file),
+ EnvFileSource(file=env_file),
+ schema=Config,
)
assert result.perms == _Permission.READ | _Permission.WRITE
@@ -683,11 +633,9 @@ class Config:
perms: _Permission
result = load(
- Merge(
- Source(file_=json_file),
- Source(prefix="APP_"),
- ),
- Config,
+ JsonSource(file=json_file),
+ EnvSource(prefix="APP_"),
+ schema=Config,
)
assert result.perms == _Permission.READ | _Permission.EXECUTE
@@ -705,11 +653,9 @@ class Config:
perms: _Permission
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
)
assert result.perms == _Permission.READ | _Permission.WRITE | _Permission.EXECUTE
@@ -726,12 +672,10 @@ class Config:
name: str
perms: _Permission
- meta = Merge(
- Source(file_=json_file),
- Source(file_=env_file),
+ @load(
+ JsonSource(file=json_file),
+ EnvFileSource(file=env_file),
)
-
- @load(meta)
@dataclass
class MergedConfig:
name: str
@@ -755,12 +699,10 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=first),
- Source(file_=second),
- strategy=MergeStrategy.FIRST_FOUND,
- ),
- Config,
+ Yaml12Source(file=first),
+ Yaml12Source(file=second),
+ schema=Config,
+ strategy="first_found",
)
assert result.host == "first-host"
@@ -777,12 +719,10 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=missing),
- Source(file_=fallback),
- strategy=MergeStrategy.FIRST_FOUND,
- ),
- Config,
+ Yaml12Source(file=missing),
+ Yaml12Source(file=fallback),
+ schema=Config,
+ strategy="first_found",
)
assert result.host == "fallback-host"
@@ -801,12 +741,10 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=broken),
- Source(file_=fallback),
- strategy=MergeStrategy.FIRST_FOUND,
- ),
- Config,
+ Yaml12Source(file=broken),
+ Yaml12Source(file=fallback),
+ schema=Config,
+ strategy="first_found",
)
assert result.host == "fallback-host"
@@ -823,12 +761,10 @@ class Config:
with pytest.raises(DatureConfigError) as exc_info:
load(
- Merge(
- Source(file_=missing1),
- Source(file_=missing2),
- strategy=MergeStrategy.FIRST_FOUND,
- ),
- Config,
+ Yaml12Source(file=missing1),
+ Yaml12Source(file=missing2),
+ schema=Config,
+ strategy="first_found",
)
err = exc_info.value
@@ -850,12 +786,10 @@ class Config:
with pytest.raises(DatureConfigError) as exc_info:
load(
- Merge(
- Source(file_=partial),
- Source(file_=full),
- strategy=MergeStrategy.FIRST_FOUND,
- ),
- Config,
+ Yaml12Source(file=partial),
+ Yaml12Source(file=full),
+ schema=Config,
+ strategy="first_found",
)
err = exc_info.value
@@ -877,12 +811,10 @@ class Config:
with pytest.raises(DatureConfigError) as exc_info:
load(
- Merge(
- Source(file_=bad_type),
- Source(file_=fallback),
- strategy=MergeStrategy.FIRST_FOUND,
- ),
- Config,
+ Yaml12Source(file=bad_type),
+ Yaml12Source(file=fallback),
+ schema=Config,
+ strategy="first_found",
)
err = exc_info.value
@@ -905,16 +837,14 @@ def test_validation_error_references_correct_source(self, tmp_path: Path):
@dataclass
class Config:
host: str
- port: Annotated[int, Ge(value=1)]
+ port: Annotated[int, Ge(1)]
with pytest.raises(DatureConfigError) as exc_info:
load(
- Merge(
- Source(file_=first),
- Source(file_=second),
- strategy=MergeStrategy.FIRST_FOUND,
- ),
- Config,
+ Yaml12Source(file=first),
+ Yaml12Source(file=second),
+ schema=Config,
+ strategy="first_found",
)
err = exc_info.value
@@ -935,17 +865,15 @@ def test_validation_error_references_correct_source_decorator(self, tmp_path: Pa
second.write_text("host: second-host\nport: 5000\n")
@load(
- Merge(
- Source(file_=first),
- Source(file_=second),
- strategy=MergeStrategy.FIRST_FOUND,
- ),
+ Yaml12Source(file=first),
+ Yaml12Source(file=second),
+ strategy="first_found",
cache=False,
)
@dataclass
class Config:
host: str
- port: Annotated[int, Ge(value=1)]
+ port: Annotated[int, Ge(1)]
with pytest.raises(DatureConfigError) as exc_info:
Config()
diff --git a/tests/loading/test_resolver.py b/tests/loading/test_resolver.py
deleted file mode 100644
index 3f941e1..0000000
--- a/tests/loading/test_resolver.py
+++ /dev/null
@@ -1,188 +0,0 @@
-from collections.abc import Buffer
-from dataclasses import dataclass
-from io import BytesIO, RawIOBase, StringIO
-from pathlib import Path
-
-import pytest
-
-from dature.field_path import F
-from dature.loading.resolver import resolve_loader, resolve_loader_class
-from dature.metadata import Source
-from dature.sources_loader.docker_secrets import DockerSecretsLoader
-from dature.sources_loader.env_ import EnvFileLoader, EnvLoader
-from dature.sources_loader.ini_ import IniLoader
-from dature.sources_loader.json5_ import Json5Loader
-from dature.sources_loader.json_ import JsonLoader
-from dature.sources_loader.toml_ import Toml11Loader
-from dature.sources_loader.yaml_ import Yaml11Loader, Yaml12Loader
-
-
-class _DummyRawIO(RawIOBase):
- def readinto(self, b: Buffer) -> int: # noqa: ARG002
- return 0
-
-
-class TestResolveLoaderClass:
- def test_explicit_loader(self) -> None:
- assert resolve_loader_class(loader=Yaml11Loader, file_="config.json") is Yaml11Loader
-
- def test_no_file_returns_env(self) -> None:
- assert resolve_loader_class(loader=None, file_=None) is EnvLoader
-
- @pytest.mark.parametrize(
- ("extension", "expected"),
- [
- (".env", EnvFileLoader),
- (".yaml", Yaml12Loader),
- (".yml", Yaml12Loader),
- (".json", JsonLoader),
- (".json5", Json5Loader),
- (".toml", Toml11Loader),
- (".ini", IniLoader),
- (".cfg", IniLoader),
- ],
- )
- def test_extension_mapping(self, extension: str, expected: type) -> None:
- assert resolve_loader_class(loader=None, file_=f"config{extension}") is expected
-
- @pytest.mark.parametrize(
- "filename",
- [".env.local", ".env.development", ".env.production"],
- )
- def test_dotenv_patterns(self, filename: str) -> None:
- assert resolve_loader_class(loader=None, file_=filename) is EnvFileLoader
-
- def test_unknown_extension_raises(self) -> None:
- with pytest.raises(ValueError, match="Cannot determine loader type"):
- resolve_loader_class(loader=None, file_="config.xyz")
-
- def test_uppercase_extension(self) -> None:
- assert resolve_loader_class(loader=None, file_="config.JSON") is JsonLoader
-
- def test_env_loader_with_file_raises(self) -> None:
- with pytest.raises(ValueError, match="EnvLoader reads from environment variables") as exc_info:
- resolve_loader_class(loader=EnvLoader, file_="config.json")
-
- assert str(exc_info.value) == (
- "EnvLoader reads from environment variables and does not use files. "
- "Remove file_ or use a file-based loader instead (e.g. EnvFileLoader)."
- )
-
- def test_env_file_loader_with_file_allowed(self) -> None:
- assert resolve_loader_class(loader=EnvFileLoader, file_=".env.local") is EnvFileLoader
-
- def test_directory_returns_docker_secrets(self, tmp_path) -> None:
- assert resolve_loader_class(loader=None, file_=tmp_path) is DockerSecretsLoader
-
-
-class TestMissingOptionalDependency:
- @pytest.mark.parametrize(
- ("extension", "extra", "blocked_module"),
- [
- (".toml", "toml", "toml_rs"),
- (".yaml", "yaml", "ruamel"),
- (".yml", "yaml", "ruamel"),
- (".json5", "json5", "json5"),
- ],
- )
- def test_missing_extra_raises_helpful_error(
- self,
- extension,
- extra,
- blocked_module,
- block_import,
- ) -> None:
- with block_import(blocked_module):
- with pytest.raises(ImportError) as exc_info:
- resolve_loader_class(loader=None, file_=f"config{extension}")
-
- assert str(exc_info.value) == (
- f"To use '{extension}' files, install the '{extra}' extra: pip install dature[{extra}]"
- )
-
-
-class TestResolveLoader:
- def test_returns_correct_loader_type(self) -> None:
- metadata = Source(file_="config.json")
-
- loader = resolve_loader(metadata)
-
- assert isinstance(loader, JsonLoader)
-
- def test_passes_prefix(self) -> None:
- metadata = Source(prefix="APP_")
-
- loader = resolve_loader(metadata)
-
- assert loader._prefix == "APP_"
-
- def test_passes_name_style(self) -> None:
- metadata = Source(file_="config.json", name_style="lower_snake")
-
- loader = resolve_loader(metadata)
-
- assert loader._name_style == "lower_snake"
-
- def test_passes_field_mapping(self) -> None:
- @dataclass
- class Config:
- key: str
-
- mapping = {F[Config].key: "value"}
- metadata = Source(file_="config.json", field_mapping=mapping)
-
- loader = resolve_loader(metadata)
-
- assert loader._field_mapping == mapping
-
- def test_default_metadata_returns_env_loader(self) -> None:
- metadata = Source()
-
- loader = resolve_loader(metadata)
-
- assert isinstance(loader, EnvLoader)
-
- def test_env_with_file_path(self, tmp_path: Path) -> None:
- env_file = tmp_path / ".env"
- env_file.write_text("KEY=VALUE")
- metadata = Source(file_=env_file)
-
- loader = resolve_loader(metadata)
-
- assert isinstance(loader, EnvFileLoader)
-
-
-class TestFilelikeResolverValidation:
- @pytest.mark.parametrize("stream", [StringIO(), BytesIO(), _DummyRawIO()])
- def test_file_like_without_loader_raises(self, stream) -> None:
- with pytest.raises(TypeError) as exc_info:
- resolve_loader_class(loader=None, file_=stream)
-
- assert str(exc_info.value) == (
- "Cannot determine loader type for a file-like object. "
- "Please specify loader explicitly (e.g. loader=JsonLoader)."
- )
-
- @pytest.mark.parametrize("stream", [StringIO(), BytesIO(), _DummyRawIO()])
- def test_file_like_with_env_loader_raises(self, stream) -> None:
- with pytest.raises(ValueError, match="EnvLoader does not support file-like objects") as exc_info:
- resolve_loader_class(loader=EnvLoader, file_=stream)
-
- assert str(exc_info.value) == (
- "EnvLoader does not support file-like objects. "
- "Use a file-based loader (e.g. JsonLoader, TomlLoader) with file-like objects."
- )
-
- @pytest.mark.parametrize("stream", [StringIO(), BytesIO(), _DummyRawIO()])
- def test_file_like_with_docker_secrets_loader_raises(self, stream) -> None:
- with pytest.raises(ValueError, match="DockerSecretsLoader does not support file-like objects") as exc_info:
- resolve_loader_class(loader=DockerSecretsLoader, file_=stream)
-
- assert str(exc_info.value) == (
- "DockerSecretsLoader does not support file-like objects. "
- "Use a file-based loader (e.g. JsonLoader, TomlLoader) with file-like objects."
- )
-
- @pytest.mark.parametrize("stream", [StringIO(), BytesIO(), _DummyRawIO()])
- def test_file_like_with_explicit_loader_allowed(self, stream) -> None:
- assert resolve_loader_class(loader=JsonLoader, file_=stream) is JsonLoader
diff --git a/tests/loading/test_single.py b/tests/loading/test_single.py
index 796b60a..1c0f4ac 100644
--- a/tests/loading/test_single.py
+++ b/tests/loading/test_single.py
@@ -7,22 +7,18 @@
import pytest
+from dature import EnvFileSource, JsonSource
from dature.loading.single import load_as_function, make_decorator
-from dature.metadata import Source
-from dature.sources_loader.env_ import EnvFileLoader
-from dature.sources_loader.json_ import JsonLoader
class TestMakeDecorator:
def test_not_dataclass_raises(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "test"}')
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
decorator = make_decorator(
- loader_instance=JsonLoader(),
- file_path=json_file,
- metadata=metadata,
+ source=metadata,
cache=True,
debug=False,
)
@@ -36,7 +32,7 @@ class NotADataclass:
def test_patches_init(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "test"}')
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
@dataclass
class Config:
@@ -44,9 +40,7 @@ class Config:
original_init = Config.__init__
decorator = make_decorator(
- loader_instance=JsonLoader(),
- file_path=json_file,
- metadata=metadata,
+ source=metadata,
cache=True,
debug=False,
)
@@ -57,16 +51,14 @@ class Config:
def test_patches_post_init(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "test"}')
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
@dataclass
class Config:
name: str
decorator = make_decorator(
- loader_instance=JsonLoader(),
- file_path=json_file,
- metadata=metadata,
+ source=metadata,
cache=True,
debug=False,
)
@@ -77,7 +69,7 @@ class Config:
def test_loads_on_init(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "from_file", "port": 8080}')
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
@dataclass
class Config:
@@ -85,9 +77,7 @@ class Config:
port: int
decorator = make_decorator(
- loader_instance=JsonLoader(),
- file_path=json_file,
- metadata=metadata,
+ source=metadata,
cache=True,
debug=False,
)
@@ -100,7 +90,7 @@ class Config:
def test_init_args_override_loaded(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "from_file", "port": 8080}')
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
@dataclass
class Config:
@@ -108,9 +98,7 @@ class Config:
port: int
decorator = make_decorator(
- loader_instance=JsonLoader(),
- file_path=json_file,
- metadata=metadata,
+ source=metadata,
cache=True,
debug=False,
)
@@ -123,16 +111,14 @@ class Config:
def test_returns_same_class(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "test"}')
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
@dataclass
class Config:
name: str
decorator = make_decorator(
- loader_instance=JsonLoader(),
- file_path=json_file,
- metadata=metadata,
+ source=metadata,
cache=True,
debug=False,
)
@@ -143,7 +129,7 @@ class Config:
def test_preserves_original_post_init(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "test"}')
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
post_init_called = []
@@ -155,9 +141,7 @@ def __post_init__(self):
post_init_called.append(True)
decorator = make_decorator(
- loader_instance=JsonLoader(),
- file_path=json_file,
- metadata=metadata,
+ source=metadata,
cache=True,
debug=False,
)
@@ -171,7 +155,7 @@ class TestCache:
def test_cache_returns_same_data(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "original", "port": 8080}')
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
@dataclass
class Config:
@@ -179,9 +163,7 @@ class Config:
port: int
decorator = make_decorator(
- loader_instance=JsonLoader(),
- file_path=json_file,
- metadata=metadata,
+ source=metadata,
cache=True,
debug=False,
)
@@ -198,7 +180,7 @@ class Config:
def test_no_cache_rereads_file(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "original", "port": 8080}')
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
@dataclass
class Config:
@@ -206,9 +188,7 @@ class Config:
port: int
decorator = make_decorator(
- loader_instance=JsonLoader(),
- file_path=json_file,
- metadata=metadata,
+ source=metadata,
cache=False,
debug=False,
)
@@ -225,7 +205,7 @@ class Config:
def test_cache_allows_override(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "original", "port": 8080}')
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
@dataclass
class Config:
@@ -233,9 +213,7 @@ class Config:
port: int
decorator = make_decorator(
- loader_instance=JsonLoader(),
- file_path=json_file,
- metadata=metadata,
+ source=metadata,
cache=True,
debug=False,
)
@@ -254,7 +232,7 @@ class TestLoadAsFunction:
def test_returns_loaded_dataclass(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "test", "port": 3000}')
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
@dataclass
class Config:
@@ -262,10 +240,8 @@ class Config:
port: int
result = load_as_function(
- loader_instance=JsonLoader(),
- file_path=json_file,
- dataclass_=Config,
- metadata=metadata,
+ source=metadata,
+ schema=Config,
debug=False,
)
@@ -275,17 +251,15 @@ class Config:
def test_with_prefix(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"app": {"name": "nested"}}')
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file, prefix="app")
@dataclass
class Config:
name: str
result = load_as_function(
- loader_instance=JsonLoader(prefix="app"),
- file_path=json_file,
- dataclass_=Config,
- metadata=metadata,
+ source=metadata,
+ schema=Config,
debug=False,
)
@@ -302,7 +276,7 @@ class TestCoerceFlagFieldsFunctionMode:
def test_flag_from_env_file(self, tmp_path: Path):
env_file = tmp_path / "config.env"
env_file.write_text("NAME=test\nPERMS=3\n")
- metadata = Source(file_=env_file, loader=EnvFileLoader)
+ metadata = EnvFileSource(file=env_file)
@dataclass
class Config:
@@ -310,10 +284,8 @@ class Config:
perms: _Permission
result = load_as_function(
- loader_instance=EnvFileLoader(),
- file_path=env_file,
- dataclass_=Config,
- metadata=metadata,
+ source=metadata,
+ schema=Config,
debug=False,
)
@@ -322,7 +294,7 @@ class Config:
def test_flag_from_json_as_int(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "test", "perms": 3}')
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
@dataclass
class Config:
@@ -330,10 +302,8 @@ class Config:
perms: _Permission
result = load_as_function(
- loader_instance=JsonLoader(),
- file_path=json_file,
- dataclass_=Config,
- metadata=metadata,
+ source=metadata,
+ schema=Config,
debug=False,
)
@@ -344,7 +314,7 @@ class TestCoerceFlagFieldsDecoratorMode:
def test_flag_from_env_file(self, tmp_path: Path):
env_file = tmp_path / "config.env"
env_file.write_text("NAME=test\nPERMS=5\n")
- metadata = Source(file_=env_file, loader=EnvFileLoader)
+ metadata = EnvFileSource(file=env_file)
@dataclass
class Config:
@@ -352,9 +322,7 @@ class Config:
perms: _Permission
decorator = make_decorator(
- loader_instance=EnvFileLoader(),
- file_path=env_file,
- metadata=metadata,
+ source=metadata,
cache=True,
debug=False,
)
@@ -366,7 +334,7 @@ class Config:
def test_flag_from_json_as_int(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "test", "perms": 7}')
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
@dataclass
class Config:
@@ -374,9 +342,7 @@ class Config:
perms: _Permission
decorator = make_decorator(
- loader_instance=JsonLoader(),
- file_path=json_file,
- metadata=metadata,
+ source=metadata,
cache=True,
debug=False,
)
@@ -394,8 +360,8 @@ class TestFilelikeLoadAsFunction:
StringIO('{"name": "test", "port": 3000}'),
],
)
- def test_json_from_file_like(self, stream) -> None:
- metadata = Source(file_=stream, loader=JsonLoader)
+ def test_json_from_filelike(self, stream) -> None:
+ metadata = JsonSource(file=stream)
@dataclass
class Config:
@@ -403,10 +369,8 @@ class Config:
port: int
result = load_as_function(
- loader_instance=JsonLoader(),
- file_path=stream,
- dataclass_=Config,
- metadata=metadata,
+ source=metadata,
+ schema=Config,
debug=False,
)
@@ -416,17 +380,15 @@ class Config:
def test_path_object_directly(self, tmp_path: Path) -> None:
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "direct_path"}')
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
@dataclass
class Config:
name: str
result = load_as_function(
- loader_instance=JsonLoader(),
- file_path=json_file,
- dataclass_=Config,
- metadata=metadata,
+ source=metadata,
+ schema=Config,
debug=False,
)
diff --git a/tests/loading/test_skip_invalid_fields.py b/tests/loading/test_skip_invalid_fields.py
index 1ec2899..d39a756 100644
--- a/tests/loading/test_skip_invalid_fields.py
+++ b/tests/loading/test_skip_invalid_fields.py
@@ -6,8 +6,8 @@
import pytest
-from dature import F, Merge, MergeStrategy, Source, load
-from dature.errors.exceptions import DatureConfigError
+from dature import F, JsonSource, load
+from dature.errors import DatureConfigError
class TestMergeSkipInvalidFields:
@@ -24,12 +24,10 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=source1),
- Source(file_=source2),
- skip_invalid_fields=True,
- ),
- Config,
+ JsonSource(file=source1),
+ JsonSource(file=source2),
+ schema=Config,
+ skip_invalid_fields=True,
)
assert result.host == "localhost"
@@ -48,12 +46,10 @@ class Config:
port: int = 9090
result = load(
- Merge(
- Source(file_=source1),
- Source(file_=source2),
- skip_invalid_fields=True,
- ),
- Config,
+ JsonSource(file=source1),
+ JsonSource(file=source2),
+ schema=Config,
+ skip_invalid_fields=True,
)
assert result.host == "localhost"
@@ -73,12 +69,10 @@ class Config:
with pytest.raises(DatureConfigError) as exc_info:
load(
- Merge(
- Source(file_=source1),
- Source(file_=source2),
- skip_invalid_fields=True,
- ),
- Config,
+ JsonSource(file=source1),
+ JsonSource(file=source2),
+ schema=Config,
+ skip_invalid_fields=True,
)
err = exc_info.value
@@ -109,12 +103,10 @@ class Config:
db: Database
result = load(
- Merge(
- Source(file_=source1),
- Source(file_=source2),
- skip_invalid_fields=True,
- ),
- Config,
+ JsonSource(file=source1),
+ JsonSource(file=source2),
+ schema=Config,
+ skip_invalid_fields=True,
)
assert result.db.host == "s2-host"
@@ -133,11 +125,9 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=source1, skip_if_invalid=True),
- Source(file_=source2),
- ),
- Config,
+ JsonSource(file=source1, skip_if_invalid=True),
+ JsonSource(file=source2),
+ schema=Config,
)
assert result.host == "localhost"
@@ -156,12 +146,10 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=source1),
- Source(file_=source2),
- skip_invalid_fields=True,
- ),
- Config,
+ JsonSource(file=source1),
+ JsonSource(file=source2),
+ schema=Config,
+ skip_invalid_fields=True,
)
assert result.host == "localhost"
@@ -178,10 +166,8 @@ class Config:
with pytest.raises(DatureConfigError) as exc_info:
load(
- Merge(
- Source(file_=source1),
- ),
- Config,
+ JsonSource(file=source1),
+ schema=Config,
)
err = exc_info.value
@@ -207,13 +193,11 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=source1),
- Source(file_=source2),
- strategy=MergeStrategy.RAISE_ON_CONFLICT,
- skip_invalid_fields=True,
- ),
- Config,
+ JsonSource(file=source1),
+ JsonSource(file=source2),
+ schema=Config,
+ strategy="raise_on_conflict",
+ skip_invalid_fields=True,
)
assert result.host == "localhost"
@@ -233,14 +217,12 @@ class Config:
timeout: int = 30
result = load(
- Merge(
- Source(
- file_=source1,
- skip_if_invalid=(F[Config].port, F[Config].timeout),
- ),
- Source(file_=source2),
+ JsonSource(
+ file=source1,
+ skip_if_invalid=(F[Config].port, F[Config].timeout),
),
- Config,
+ JsonSource(file=source2),
+ schema=Config,
)
assert result.host == "localhost"
@@ -258,13 +240,11 @@ class Config:
with pytest.raises(DatureConfigError) as exc_info:
load(
- Merge(
- Source(
- file_=source1,
- skip_if_invalid=(F[Config].port,),
- ),
+ JsonSource(
+ file=source1,
+ skip_if_invalid=(F[Config].port,),
),
- Config,
+ schema=Config,
)
err = exc_info.value
@@ -296,12 +276,10 @@ class Config:
with caplog.at_level(logging.WARNING, logger="dature"):
load(
- Merge(
- Source(file_=source1),
- Source(file_=source2),
- skip_invalid_fields=True,
- ),
- Config,
+ JsonSource(file=source1),
+ JsonSource(file=source2),
+ schema=Config,
+ skip_invalid_fields=True,
)
warning_messages = [r.getMessage() for r in caplog.records if r.levelno >= logging.WARNING]
@@ -322,8 +300,8 @@ class Config:
port: int = 8080
result = load(
- Source(file_=json_file, skip_if_invalid=True),
- Config,
+ JsonSource(file=json_file, skip_if_invalid=True),
+ schema=Config,
)
assert result.host == "localhost"
@@ -340,8 +318,8 @@ class Config:
with pytest.raises(DatureConfigError) as exc_info:
load(
- Source(file_=json_file, skip_if_invalid=True),
- Config,
+ JsonSource(file=json_file, skip_if_invalid=True),
+ schema=Config,
)
err = exc_info.value
@@ -357,7 +335,7 @@ def test_single_source_decorator_skip(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"host": "localhost", "port": "abc"}')
- @load(Source(file_=json_file, skip_if_invalid=True))
+ @load(JsonSource(file=json_file, skip_if_invalid=True))
@dataclass
class Config:
host: str
@@ -378,11 +356,11 @@ class Config:
timeout: int = 30
result = load(
- Source(
- file_=json_file,
+ JsonSource(
+ file=json_file,
skip_if_invalid=(F[Config].port,),
),
- Config,
+ schema=Config,
)
assert result.host == "localhost"
@@ -400,8 +378,8 @@ class Config:
with caplog.at_level(logging.WARNING, logger="dature"):
load(
- Source(file_=json_file, skip_if_invalid=True),
- Config,
+ JsonSource(file=json_file, skip_if_invalid=True),
+ schema=Config,
)
warning_messages = [r.getMessage() for r in caplog.records if r.levelno >= logging.WARNING]
@@ -423,11 +401,11 @@ class Config:
inner: Inner = None # type: ignore[assignment]
result = load(
- Source(
- file_=source,
+ JsonSource(
+ file=source,
skip_if_invalid=(F[Config].port,),
),
- Config,
+ schema=Config,
)
assert result.port == 3000
@@ -450,14 +428,12 @@ class Config:
inner: Inner
result = load(
- Merge(
- Source(
- file_=source1,
- skip_if_invalid=(F[Config].inner.port,),
- ),
- Source(file_=source2),
+ JsonSource(
+ file=source1,
+ skip_if_invalid=(F[Config].inner.port,),
),
- Config,
+ JsonSource(file=source2),
+ schema=Config,
)
assert result.port == 8080
@@ -480,14 +456,12 @@ class Config:
inner: Inner
result = load(
- Merge(
- Source(
- file_=source1,
- skip_if_invalid=(F[Config].port, F[Config].inner.port),
- ),
- Source(file_=source2),
+ JsonSource(
+ file=source1,
+ skip_if_invalid=(F[Config].port, F[Config].inner.port),
),
- Config,
+ JsonSource(file=source2),
+ schema=Config,
)
assert result.port == 8080
diff --git a/tests/loading/test_source_loading.py b/tests/loading/test_source_loading.py
index 55155a2..adb05b6 100644
--- a/tests/loading/test_source_loading.py
+++ b/tests/loading/test_source_loading.py
@@ -6,8 +6,19 @@
import pytest
-from dature import Merge, Source, load
-from dature.errors.exceptions import DatureConfigError, EnvVarExpandError
+from dature import EnvFileSource, IniSource, JsonSource, Toml11Source, Yaml12Source, load
+from dature.errors import DatureConfigError, EnvVarExpandError
+from dature.loading.merge_config import MergeConfig
+from dature.loading.source_loading import (
+ apply_merge_skip_invalid,
+ resolve_expand_env_vars,
+ resolve_mask_secrets,
+ resolve_secret_field_names,
+ resolve_skip_invalid,
+ resolve_source_params,
+ should_skip_broken,
+)
+from dature.sources.env_ import EnvSource
class TestSkipBrokenSources:
@@ -23,12 +34,10 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=valid),
- Source(file_=missing),
- skip_broken_sources=True,
- ),
- Config,
+ JsonSource(file=valid),
+ JsonSource(file=missing),
+ schema=Config,
+ skip_broken_sources=True,
)
assert result.host == "localhost"
@@ -47,12 +56,10 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=valid),
- Source(file_=broken),
- skip_broken_sources=True,
- ),
- Config,
+ JsonSource(file=valid),
+ JsonSource(file=broken),
+ schema=Config,
+ skip_broken_sources=True,
)
assert result.host == "localhost"
@@ -71,12 +78,10 @@ class Config:
with pytest.raises(DatureConfigError) as exc_info:
load(
- Merge(
- Source(file_=broken_a),
- Source(file_=broken_b),
- skip_broken_sources=True,
- ),
- Config,
+ JsonSource(file=broken_a),
+ JsonSource(file=broken_b),
+ schema=Config,
+ skip_broken_sources=True,
)
assert str(exc_info.value) == "Config loading errors (1)"
@@ -95,11 +100,9 @@ class Config:
with pytest.raises(DatureConfigError):
load(
- Merge(
- Source(file_=valid),
- Source(file_=broken),
- ),
- Config,
+ JsonSource(file=valid),
+ JsonSource(file=broken),
+ schema=Config,
)
def test_skip_middle_source(self, tmp_path: Path):
@@ -118,13 +121,11 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=a),
- Source(file_=broken),
- Source(file_=c),
- skip_broken_sources=True,
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=broken),
+ JsonSource(file=c),
+ schema=Config,
+ skip_broken_sources=True,
)
assert result.host == "a-host"
@@ -143,12 +144,10 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=valid),
- Source(file_=broken, skip_if_broken=True),
- skip_broken_sources=False,
- ),
- Config,
+ JsonSource(file=valid),
+ JsonSource(file=broken, skip_if_broken=True),
+ schema=Config,
+ skip_broken_sources=False,
)
assert result.host == "localhost"
@@ -168,12 +167,10 @@ class Config:
with pytest.raises(DatureConfigError):
load(
- Merge(
- Source(file_=valid),
- Source(file_=broken, skip_if_broken=False),
- skip_broken_sources=True,
- ),
- Config,
+ JsonSource(file=valid),
+ JsonSource(file=broken, skip_if_broken=False),
+ schema=Config,
+ skip_broken_sources=True,
)
def test_per_source_none_uses_global(self, tmp_path: Path):
@@ -189,20 +186,18 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=valid),
- Source(file_=broken, skip_if_broken=None),
- skip_broken_sources=True,
- ),
- Config,
+ JsonSource(file=valid),
+ JsonSource(file=broken, skip_if_broken=None),
+ schema=Config,
+ skip_broken_sources=True,
)
assert result.host == "localhost"
assert result.port == 3000
def test_empty_sources_raises(self):
- with pytest.raises(TypeError, match="Merge\\(\\) requires at least one Source"):
- Merge()
+ with pytest.raises(TypeError, match="load\\(\\) requires at least one Source"):
+ load(schema=int)
def test_all_sources_broken_mixed_errors(self, tmp_path: Path):
missing = str(tmp_path / "does_not_exist.json")
@@ -216,12 +211,10 @@ class Config:
with pytest.raises(DatureConfigError) as exc_info:
load(
- Merge(
- Source(file_=missing),
- Source(file_=broken),
- skip_broken_sources=True,
- ),
- Config,
+ JsonSource(file=missing),
+ JsonSource(file=broken),
+ schema=Config,
+ skip_broken_sources=True,
)
assert str(exc_info.value) == "Config loading errors (1)"
@@ -240,10 +233,8 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=json_file),
- ),
- Config,
+ JsonSource(file=json_file),
+ schema=Config,
)
assert result.host == "from-env"
@@ -259,11 +250,9 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=json_file),
- expand_env_vars="disabled",
- ),
- Config,
+ JsonSource(file=json_file),
+ schema=Config,
+ expand_env_vars="disabled",
)
assert result.host == "$DATURE_HOST"
@@ -280,11 +269,9 @@ class Config:
with pytest.raises(EnvVarExpandError):
load(
- Merge(
- Source(file_=json_file),
- expand_env_vars="strict",
- ),
- Config,
+ JsonSource(file=json_file),
+ schema=Config,
+ expand_env_vars="strict",
)
def test_source_overrides_merge(self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch):
@@ -298,11 +285,9 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=json_file, expand_env_vars="disabled"),
- expand_env_vars="default",
- ),
- Config,
+ JsonSource(file=json_file, expand_env_vars="disabled"),
+ schema=Config,
+ expand_env_vars="default",
)
assert result.host == "$DATURE_HOST"
@@ -318,11 +303,9 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=json_file, expand_env_vars=None),
- expand_env_vars="disabled",
- ),
- Config,
+ JsonSource(file=json_file, expand_env_vars=None),
+ schema=Config,
+ expand_env_vars="disabled",
)
assert result.host == "$DATURE_HOST"
@@ -338,11 +321,9 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=json_file),
- expand_env_vars="empty",
- ),
- Config,
+ JsonSource(file=json_file),
+ schema=Config,
+ expand_env_vars="empty",
)
assert result.host == ""
@@ -359,32 +340,44 @@ class StrictConfig:
class TestEnvVarExpandErrorFormat:
@pytest.mark.parametrize(
- ("ext", "prefix", "source_label", "line", "line_content"),
+ ("source_cls", "source_kwargs", "source_label", "line", "line_content"),
[
- ("yaml", None, "FILE", 1, 'host: "$MISSING_HOST"'),
- ("json", None, "FILE", 1, '{"host": "$MISSING_HOST", "port": 8080}'),
- ("toml", None, "FILE", 1, 'host = "$MISSING_HOST"'),
- ("ini", "section", "FILE", 2, "host = $MISSING_HOST"),
- ("env", None, "ENV FILE", 1, "HOST=$MISSING_HOST"),
+ (Yaml12Source, {"file": FIXTURES_DIR / "env_expand_strict.yaml"}, "FILE", 1, 'host: "$MISSING_HOST"'),
+ (
+ JsonSource,
+ {"file": FIXTURES_DIR / "env_expand_strict.json"},
+ "FILE",
+ 1,
+ '{"host": "$MISSING_HOST", "port": 8080}',
+ ),
+ (Toml11Source, {"file": FIXTURES_DIR / "env_expand_strict.toml"}, "FILE", 1, 'host = "$MISSING_HOST"'),
+ (
+ IniSource,
+ {"file": FIXTURES_DIR / "env_expand_strict.ini", "prefix": "section"},
+ "FILE",
+ 2,
+ "host = $MISSING_HOST",
+ ),
+ (EnvFileSource, {"file": FIXTURES_DIR / "env_expand_strict.env"}, "ENV FILE", 1, "HOST=$MISSING_HOST"),
],
ids=["yaml", "json", "toml", "ini", "env"],
)
def test_error_format(
self,
monkeypatch: pytest.MonkeyPatch,
- ext: str,
- prefix: str | None,
+ source_cls: type,
+ source_kwargs: dict[str, object],
source_label: str,
line: int,
line_content: str,
) -> None:
monkeypatch.delenv("MISSING_HOST", raising=False)
- file = FIXTURES_DIR / f"env_expand_strict.{ext}"
+ file = source_kwargs["file"]
with pytest.raises(EnvVarExpandError) as exc_info:
load(
- Source(file_=file, prefix=prefix, expand_env_vars="strict"),
- StrictConfig,
+ source_cls(**source_kwargs, expand_env_vars="strict"),
+ schema=StrictConfig,
)
assert str(exc_info.value) == dedent(f"""\
@@ -394,3 +387,200 @@ def test_error_format(
├── {line_content}
└── {source_label} '{file}', line {line}
""")
+
+
+class TestShouldSkipBroken:
+ @pytest.mark.parametrize(
+ ("skip_if_broken", "skip_broken_sources", "expected"),
+ [
+ (True, False, True),
+ (False, True, False),
+ (None, True, True),
+ ],
+ ids=["source-true", "source-false", "source-none-uses-merge"],
+ )
+ def test_resolve(
+ self,
+ tmp_path: Path,
+ skip_if_broken: bool | None,
+ skip_broken_sources: bool,
+ expected: bool,
+ ):
+ json_file = tmp_path / "c.json"
+ json_file.write_text("{}")
+ kwargs = {} if skip_if_broken is None else {"skip_if_broken": skip_if_broken}
+ source = JsonSource(file=json_file, **kwargs)
+ merge = MergeConfig(sources=(source,), skip_broken_sources=skip_broken_sources)
+
+ assert should_skip_broken(source, merge) is expected
+
+ def test_env_source_warns(self, caplog: pytest.LogCaptureFixture):
+ source = EnvSource(skip_if_broken=True)
+ merge = MergeConfig(sources=(source,))
+
+ should_skip_broken(source, merge)
+
+ assert "skip_if_broken has no effect on environment variable sources" in caplog.text
+
+
+class TestResolveExpandEnvVars:
+ @pytest.mark.parametrize(
+ ("source_expand", "merge_expand", "expected"),
+ [
+ ("disabled", "strict", "disabled"),
+ (None, "strict", "strict"),
+ ],
+ ids=["source-overrides", "source-none-inherits"],
+ )
+ def test_resolve(
+ self,
+ tmp_path: Path,
+ source_expand: str | None,
+ merge_expand: str,
+ expected: str,
+ ):
+ json_file = tmp_path / "c.json"
+ json_file.write_text("{}")
+ kwargs = {} if source_expand is None else {"expand_env_vars": source_expand}
+ source = JsonSource(file=json_file, **kwargs)
+ merge = MergeConfig(sources=(source,), expand_env_vars=merge_expand)
+
+ assert resolve_expand_env_vars(source, merge) == expected
+
+
+class TestResolveSkipInvalid:
+ @pytest.mark.parametrize(
+ ("source_skip", "merge_skip", "expected"),
+ [
+ (True, False, True),
+ (None, True, True),
+ ],
+ ids=["source-overrides", "source-none-inherits"],
+ )
+ def test_resolve(
+ self,
+ tmp_path: Path,
+ source_skip: bool | None,
+ merge_skip: bool,
+ expected: bool,
+ ):
+ json_file = tmp_path / "c.json"
+ json_file.write_text("{}")
+ kwargs = {} if source_skip is None else {"skip_if_invalid": source_skip}
+ source = JsonSource(file=json_file, **kwargs)
+ merge = MergeConfig(sources=(source,), skip_invalid_fields=merge_skip)
+
+ assert resolve_skip_invalid(source, merge) is expected
+
+
+class TestResolveMaskSecrets:
+ @pytest.mark.usefixtures("_reset_config")
+ def test_source_overrides_all(self, tmp_path: Path):
+ json_file = tmp_path / "c.json"
+ json_file.write_text("{}")
+ source = JsonSource(file=json_file, mask_secrets=False)
+ merge = MergeConfig(sources=(source,), mask_secrets=True)
+
+ assert resolve_mask_secrets(source, merge) is False
+
+ @pytest.mark.usefixtures("_reset_config")
+ def test_merge_overrides_config(self, tmp_path: Path):
+ json_file = tmp_path / "c.json"
+ json_file.write_text("{}")
+ source = JsonSource(file=json_file)
+ merge = MergeConfig(sources=(source,), mask_secrets=False)
+
+ assert resolve_mask_secrets(source, merge) is False
+
+ @pytest.mark.usefixtures("_reset_config")
+ def test_falls_back_to_config(self, tmp_path: Path):
+ json_file = tmp_path / "c.json"
+ json_file.write_text("{}")
+ source = JsonSource(file=json_file)
+ merge = MergeConfig(sources=(source,))
+
+ result = resolve_mask_secrets(source, merge)
+
+ assert isinstance(result, bool)
+
+
+class TestResolveSecretFieldNames:
+ @pytest.mark.parametrize(
+ ("source_names", "merge_names", "expected"),
+ [
+ (("api_key",), ("token",), ("api_key", "token")),
+ (None, ("token",), ("token",)),
+ (None, None, ()),
+ ],
+ ids=["combines-both", "source-none", "both-none"],
+ )
+ def test_resolve(
+ self,
+ tmp_path: Path,
+ source_names: tuple[str, ...] | None,
+ merge_names: tuple[str, ...] | None,
+ expected: tuple[str, ...],
+ ):
+ json_file = tmp_path / "c.json"
+ json_file.write_text("{}")
+ kwargs = {} if source_names is None else {"secret_field_names": source_names}
+ source = JsonSource(file=json_file, **kwargs)
+ merge_kwargs = {} if merge_names is None else {"secret_field_names": merge_names}
+ merge = MergeConfig(sources=(source,), **merge_kwargs)
+
+ assert resolve_secret_field_names(source, merge) == expected
+
+
+class TestApplyMergeSkipInvalid:
+ def test_skip_false_returns_raw(self, tmp_path: Path):
+ json_file = tmp_path / "c.json"
+ json_file.write_text("{}")
+
+ @dataclass
+ class Cfg:
+ name: str
+
+ source = JsonSource(file=json_file)
+ merge = MergeConfig(sources=(source,), skip_invalid_fields=False)
+ raw = {"name": "hello"}
+
+ result = apply_merge_skip_invalid(
+ raw=raw,
+ source=source,
+ merge_meta=merge,
+ schema=Cfg,
+ source_index=0,
+ )
+
+ assert result.cleaned_dict == raw
+ assert result.skipped_paths == []
+
+
+class TestResolveSourceParamsNestedStrategy:
+ @pytest.mark.parametrize(
+ ("source_strategy", "load_strategy", "expected"),
+ [
+ (None, "json", "json"),
+ ("flat", "json", "flat"),
+ ("json", "flat", "json"),
+ (None, None, "flat"),
+ ],
+ ids=[
+ "source-none-uses-load-level",
+ "source-explicit-flat-overrides-load-level",
+ "source-explicit-json-overrides-load-level",
+ "source-none-no-load-level-uses-config-default",
+ ],
+ )
+ def test_resolve(
+ self,
+ source_strategy: str | None,
+ load_strategy: str | None,
+ expected: str,
+ ):
+ kwargs = {} if source_strategy is None else {"nested_resolve_strategy": source_strategy}
+ source = EnvSource(**kwargs)
+
+ resolved = resolve_source_params(source, load_nested_resolve_strategy=load_strategy)
+
+ assert resolved.nested_resolve_strategy == expected
diff --git a/tests/masking/test_detection.py b/tests/masking/test_detection.py
index 7c391ce..7f2b3c1 100644
--- a/tests/masking/test_detection.py
+++ b/tests/masking/test_detection.py
@@ -1,9 +1,15 @@
from dataclasses import dataclass
from typing import Annotated
+import pytest
+
from dature.fields.payment_card import PaymentCardNumber
from dature.fields.secret_str import SecretStr
-from dature.masking.detection import build_secret_paths
+from dature.masking.detection import (
+ _is_secret_type,
+ _matches_secret_pattern,
+ build_secret_paths,
+)
class TestBuildSecretPaths:
@@ -97,3 +103,50 @@ class Cfg:
paths1 = build_secret_paths(Cfg)
paths2 = build_secret_paths(Cfg)
assert paths1 is paths2
+
+ def test_non_dataclass_returns_empty(self):
+ result = build_secret_paths(str)
+
+ assert result == frozenset()
+
+ def test_cache_differs_by_extra_patterns(self):
+ @dataclass
+ class Cfg2:
+ my_field: str
+
+ paths_without = build_secret_paths(Cfg2)
+ paths_with = build_secret_paths(Cfg2, extra_patterns=("my_field",))
+
+ assert paths_without == frozenset()
+ assert paths_with == frozenset({"my_field"})
+
+
+class TestIsSecretType:
+ @pytest.mark.parametrize(
+ ("field_type", "expected"),
+ [
+ (str, False),
+ (SecretStr, True),
+ (PaymentCardNumber, True),
+ (SecretStr | None, True),
+ (Annotated[SecretStr, "meta"], True),
+ (Annotated[SecretStr | None, "meta"], True),
+ ],
+ ids=["plain-str", "secret-str", "payment-card", "optional", "annotated", "annotated-optional"],
+ )
+ def test_detection(self, field_type: type, expected: bool):
+ assert _is_secret_type(field_type) is expected
+
+
+class TestMatchesSecretPattern:
+ @pytest.mark.parametrize(
+ ("name", "patterns", "expected"),
+ [
+ ("DB_PASSWORD", ("password",), True),
+ ("my_api_key_v2", ("api_key",), True),
+ ("hostname", ("password", "secret"), False),
+ ],
+ ids=["case-insensitive", "substring", "no-match"],
+ )
+ def test_matching(self, name: str, patterns: tuple[str, ...], expected: bool):
+ assert _matches_secret_pattern(name, patterns) is expected
diff --git a/tests/masking/test_masking.py b/tests/masking/test_masking.py
index ea87625..b52f9a8 100644
--- a/tests/masking/test_masking.py
+++ b/tests/masking/test_masking.py
@@ -5,9 +5,8 @@
import pytest
-from dature import Merge, Source, configure, get_load_report, load
-from dature.config import MaskingConfig
-from dature.errors.exceptions import DatureConfigError
+from dature import JsonSource, configure, get_load_report, load
+from dature.errors import DatureConfigError
from dature.fields.secret_str import SecretStr
from dature.load_report import FieldOrigin, SourceEntry
from dature.masking.masking import mask_env_line, mask_field_origins, mask_json_value, mask_source_entries, mask_value
@@ -58,11 +57,11 @@ def test_mask_value_with_custom_config(
expected: str,
):
configure(
- masking=MaskingConfig(
- mask=mask,
- visible_prefix=visible_prefix,
- visible_suffix=visible_suffix,
- ),
+ masking={
+ "mask": mask,
+ "visible_prefix": visible_prefix,
+ "visible_suffix": visible_suffix,
+ },
)
assert mask_value(input_value) == expected
@@ -216,7 +215,7 @@ class Cfg:
password: str
host: str
- result = load(Source(file_=json_file), Cfg, debug=True)
+ result = load(JsonSource(file=json_file), schema=Cfg, debug=True)
report = get_load_report(result)
assert report is not None
@@ -241,11 +240,9 @@ class Cfg:
host: str
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- ),
- Cfg,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Cfg,
debug=True,
)
@@ -269,7 +266,7 @@ class Cfg:
api_key: SecretStr
host: str
- result = load(Source(file_=json_file), Cfg, debug=True)
+ result = load(JsonSource(file=json_file), schema=Cfg, debug=True)
report = get_load_report(result)
assert report is not None
@@ -290,7 +287,7 @@ class Cfg:
host: str
with caplog.at_level("DEBUG", logger="dature"):
- load(Source(file_=json_file), Cfg, debug=True)
+ load(JsonSource(file=json_file), schema=Cfg, debug=True)
assert _SECRET_VALUE not in caplog.text
@@ -308,11 +305,9 @@ class Cfg:
with caplog.at_level("DEBUG", logger="dature"):
load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- ),
- Cfg,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Cfg,
debug=True,
)
@@ -328,7 +323,7 @@ class Cfg:
port: int
with pytest.raises(DatureConfigError) as exc_info:
- load(Source(file_=json_file), Cfg)
+ load(JsonSource(file=json_file), schema=Cfg)
assert _SECRET_VALUE not in str(exc_info.value)
@@ -336,11 +331,7 @@ def test_merge_decorator_error_message_masks_secrets(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"password": "allowed", "host": "prod"}')
- meta = Merge(
- Source(file_=json_file),
- )
-
- @load(meta)
+ @load(JsonSource(file=json_file))
@dataclass
class Cfg:
password: Literal["allowed"]
@@ -363,7 +354,7 @@ class Cfg:
host: str
with pytest.raises(DatureConfigError) as exc_info:
- load(Source(file_=json_file, mask_secrets=True), Cfg)
+ load(JsonSource(file=json_file, mask_secrets=True), schema=Cfg)
assert str(exc_info.value) == "Cfg loading errors (1)"
assert str(exc_info.value.exceptions[0]) == (
@@ -385,7 +376,7 @@ class Cfg:
host: str
with patch("dature.masking.masking._heuristic_detector", None), pytest.raises(DatureConfigError) as exc_info:
- load(Source(file_=json_file, mask_secrets=True), Cfg)
+ load(JsonSource(file=json_file, mask_secrets=True), schema=Cfg)
assert str(exc_info.value) == "Cfg loading errors (1)"
assert str(exc_info.value.exceptions[0]) == (
@@ -417,8 +408,8 @@ class Cfg:
password: str
host: str
- configure(masking=MaskingConfig(mask_secrets=mask_secrets))
- result = load(Source(file_=json_file), Cfg, debug=True)
+ configure(masking={"mask_secrets": mask_secrets})
+ result = load(JsonSource(file=json_file), schema=Cfg, debug=True)
report = get_load_report(result)
assert report is not None
@@ -448,10 +439,10 @@ class Cfg:
password: str
port: int
- configure(masking=MaskingConfig(mask_secrets=mask_secrets))
+ configure(masking={"mask_secrets": mask_secrets})
with pytest.raises(DatureConfigError) as exc_info:
- load(Source(file_=json_file), Cfg)
+ load(JsonSource(file=json_file), schema=Cfg)
assert str(exc_info.value) == "Cfg loading errors (1)"
content = f'{{"password": "{expected_password}", "port": "not_a_number"}}'
@@ -462,3 +453,88 @@ class Cfg:
f" │ {' ' * caret_pos}{'^^^^^^^^^^^^'}\n"
f" └── FILE '{json_file}', line 1"
)
+
+
+@pytest.mark.usefixtures("_reset_config")
+class TestLoadLevelMaskingParams:
+ def test_load_level_mask_secrets(self, tmp_path: Path):
+ json_file = tmp_path / "config.json"
+ json_file.write_text(f'{{"password": "{_SECRET_VALUE}", "host": "{_PUBLIC_VALUE}"}}')
+
+ @dataclass
+ class Cfg:
+ password: str
+ host: str
+
+ result = load(JsonSource(file=json_file), schema=Cfg, debug=True, mask_secrets=True)
+
+ report = get_load_report(result)
+ assert report is not None
+ assert report.merged_data == {"password": _MASKED_SECRET, "host": _PUBLIC_VALUE}
+
+ def test_load_level_secret_field_names(self, tmp_path: Path):
+ json_file = tmp_path / "config.json"
+ json_file.write_text(f'{{"my_token": "{_SECRET_VALUE}", "host": "{_PUBLIC_VALUE}"}}')
+
+ @dataclass
+ class Cfg:
+ my_token: str
+ host: str
+
+ result = load(
+ JsonSource(file=json_file),
+ schema=Cfg,
+ debug=True,
+ mask_secrets=True,
+ secret_field_names=("my_token",),
+ )
+
+ report = get_load_report(result)
+ assert report is not None
+ assert report.merged_data == {"my_token": _MASKED_SECRET, "host": _PUBLIC_VALUE}
+
+ def test_source_mask_secrets_overrides_load_level(self, tmp_path: Path):
+ json_file = tmp_path / "config.json"
+ json_file.write_text(f'{{"password": "{_SECRET_VALUE}", "host": "{_PUBLIC_VALUE}"}}')
+
+ @dataclass
+ class Cfg:
+ password: str
+ host: str
+
+ result = load(
+ JsonSource(file=json_file, mask_secrets=False),
+ schema=Cfg,
+ debug=True,
+ mask_secrets=True,
+ )
+
+ report = get_load_report(result)
+ assert report is not None
+ assert report.merged_data == {"password": _SECRET_VALUE, "host": _PUBLIC_VALUE}
+
+ def test_source_and_load_secret_field_names_combined(self, tmp_path: Path):
+ json_file = tmp_path / "config.json"
+ json_file.write_text(
+ f'{{"nickname": "{_SECRET_VALUE}", "label": "{_SECRET_VALUE}", "host": "{_PUBLIC_VALUE}"}}',
+ )
+
+ @dataclass
+ class Cfg:
+ nickname: str
+ label: str
+ host: str
+
+ result = load(
+ JsonSource(file=json_file, secret_field_names=("label",)),
+ schema=Cfg,
+ debug=True,
+ mask_secrets=True,
+ secret_field_names=("nickname",),
+ )
+
+ report = get_load_report(result)
+ assert report is not None
+ assert report.merged_data["label"] == _MASKED_SECRET
+ assert report.merged_data["nickname"] == _MASKED_SECRET
+ assert report.merged_data["host"] == _PUBLIC_VALUE
diff --git a/tests/merging/test_deep_merge.py b/tests/merging/test_deep_merge.py
index b502f29..02eeb36 100644
--- a/tests/merging/test_deep_merge.py
+++ b/tests/merging/test_deep_merge.py
@@ -3,7 +3,6 @@
import pytest
from dature.merging.deep_merge import deep_merge, deep_merge_first_wins, deep_merge_last_wins
-from dature.metadata import MergeStrategy
class TestDeepMerge:
@@ -13,112 +12,112 @@ class TestDeepMerge:
pytest.param(
{"a": 1, "b": 2},
{"b": 3, "c": 4},
- MergeStrategy.LAST_WINS,
+ "last_wins",
{"a": 1, "b": 3, "c": 4},
id="flat_last_wins",
),
pytest.param(
{"a": 1, "b": 2},
{"b": 3, "c": 4},
- MergeStrategy.FIRST_WINS,
+ "first_wins",
{"a": 1, "b": 2, "c": 4},
id="flat_first_wins",
),
pytest.param(
{"db": {"host": "localhost", "port": 5432}},
{"db": {"host": "prod-host", "name": "mydb"}},
- MergeStrategy.LAST_WINS,
+ "last_wins",
{"db": {"host": "prod-host", "port": 5432, "name": "mydb"}},
id="nested_last_wins",
),
pytest.param(
{"db": {"host": "localhost", "port": 5432}},
{"db": {"host": "prod-host", "name": "mydb"}},
- MergeStrategy.FIRST_WINS,
+ "first_wins",
{"db": {"host": "localhost", "port": 5432, "name": "mydb"}},
id="nested_first_wins",
),
pytest.param(
{"a": {"b": {"c": 1, "d": 2}}},
{"a": {"b": {"c": 99, "e": 3}}},
- MergeStrategy.LAST_WINS,
+ "last_wins",
{"a": {"b": {"c": 99, "d": 2, "e": 3}}},
id="deeply_nested",
),
pytest.param(
{"tags": ["a", "b"]},
{"tags": ["c"]},
- MergeStrategy.LAST_WINS,
+ "last_wins",
{"tags": ["c"]},
id="lists_replaced_entirely",
),
pytest.param(
{},
{"a": 1},
- MergeStrategy.LAST_WINS,
+ "last_wins",
{"a": 1},
id="empty_base",
),
pytest.param(
{"a": 1},
{},
- MergeStrategy.LAST_WINS,
+ "last_wins",
{"a": 1},
id="empty_override",
),
pytest.param(
{},
{},
- MergeStrategy.LAST_WINS,
+ "last_wins",
{},
id="both_empty",
),
pytest.param(
"old",
"new",
- MergeStrategy.LAST_WINS,
+ "last_wins",
"new",
id="scalar_last_wins",
),
pytest.param(
"old",
"new",
- MergeStrategy.FIRST_WINS,
+ "first_wins",
"old",
id="scalar_first_wins",
),
pytest.param(
{"a": None},
{"a": 1},
- MergeStrategy.LAST_WINS,
+ "last_wins",
{"a": 1},
id="none_value_last_wins",
),
pytest.param(
{"a": None},
{"a": 1},
- MergeStrategy.FIRST_WINS,
+ "first_wins",
{"a": None},
id="none_value_first_wins",
),
pytest.param(
{"a": {"nested": 1}},
{"a": "scalar"},
- MergeStrategy.LAST_WINS,
+ "last_wins",
{"a": "scalar"},
id="dict_vs_scalar_last_wins",
),
pytest.param(
{"a": "scalar"},
{"a": {"nested": 1}},
- MergeStrategy.LAST_WINS,
+ "last_wins",
{"a": {"nested": 1}},
id="scalar_vs_dict_last_wins",
),
pytest.param(
{"a": {"nested": 1}},
{"a": "scalar"},
- MergeStrategy.FIRST_WINS,
+ "first_wins",
{"a": {"nested": 1}},
id="dict_vs_scalar_first_wins",
),
@@ -129,7 +128,7 @@ def test_merge(self, base, override, strategy, expected):
def test_raise_on_conflict_strategy_raises_value_error(self):
with pytest.raises(ValueError, match="RAISE_ON_CONFLICT"):
- deep_merge({"a": 1}, {"a": 2}, strategy=MergeStrategy.RAISE_ON_CONFLICT)
+ deep_merge({"a": 1}, {"a": 2}, strategy="raise_on_conflict")
class TestDeepMergeLastWins:
diff --git a/tests/merging/test_field_group.py b/tests/merging/test_field_group.py
index eae4bbc..547c0d4 100644
--- a/tests/merging/test_field_group.py
+++ b/tests/merging/test_field_group.py
@@ -2,12 +2,11 @@
from dataclasses import dataclass
from pathlib import Path
-from textwrap import dedent
import pytest
-from dature import FieldGroup, FieldMergeStrategy, Merge, MergeRule, MergeStrategy, Source, load
-from dature.errors.exceptions import FieldGroupError
+from dature import JsonSource, load
+from dature.errors import FieldGroupError
from dature.field_path import F
@@ -25,13 +24,11 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- strategy=MergeStrategy.LAST_WINS,
- field_groups=(FieldGroup(F[Config].host, F[Config].port),),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
+ strategy="last_wins",
+ field_groups=((F[Config].host, F[Config].port),),
)
assert result.host == "remote"
@@ -50,13 +47,11 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=first),
- Source(file_=second),
- strategy=MergeStrategy.FIRST_WINS,
- field_groups=(FieldGroup(F[Config].host, F[Config].port),),
- ),
- Config,
+ JsonSource(file=first),
+ JsonSource(file=second),
+ schema=Config,
+ strategy="first_wins",
+ field_groups=((F[Config].host, F[Config].port),),
)
assert result.host == "first-host"
@@ -77,12 +72,10 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- field_groups=(FieldGroup(F[Config].host, F[Config].port),),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
+ field_groups=((F[Config].host, F[Config].port),),
)
assert result.host == "localhost"
@@ -102,12 +95,10 @@ class Config:
debug: bool
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- field_groups=(FieldGroup(F[Config].host, F[Config].port),),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
+ field_groups=((F[Config].host, F[Config].port),),
)
assert result.host == "localhost"
@@ -123,8 +114,8 @@ def test_partial_change_raises(self, tmp_path: Path):
overrides = tmp_path / "overrides.json"
overrides.write_text('{"host": "remote"}')
- defaults_meta = Source(file_=defaults)
- overrides_meta = Source(file_=overrides)
+ defaults_meta = JsonSource(file=defaults)
+ overrides_meta = JsonSource(file=overrides)
@dataclass
class Config:
@@ -133,21 +124,18 @@ class Config:
with pytest.raises(FieldGroupError) as exc_info:
load(
- Merge(
- defaults_meta,
- overrides_meta,
- field_groups=(FieldGroup(F[Config].host, F[Config].port),),
- ),
- Config,
+ defaults_meta,
+ overrides_meta,
+ schema=Config,
+ field_groups=((F[Config].host, F[Config].port),),
)
- assert str(exc_info.value) == dedent(f"""\
- Config field group errors (1)
-
- Field group (host, port) partially overridden in source 1
- changed: host (from source {overrides_meta!r})
- unchanged: port (from source {defaults_meta!r})
- """)
+ assert str(exc_info.value) == "Config field group errors (1)"
+ assert str(exc_info.value.exceptions[0]) == (
+ f" Field group (host, port) partially overridden in source 1\n"
+ f" changed: host (from source {overrides_meta!r})\n"
+ f" unchanged: port (from source {defaults_meta!r})"
+ )
def test_partial_change_field_present_but_equal(self, tmp_path: Path):
defaults = tmp_path / "defaults.json"
@@ -156,8 +144,8 @@ def test_partial_change_field_present_but_equal(self, tmp_path: Path):
overrides = tmp_path / "overrides.json"
overrides.write_text('{"host": "remote", "port": 3000}')
- defaults_meta = Source(file_=defaults)
- overrides_meta = Source(file_=overrides)
+ defaults_meta = JsonSource(file=defaults)
+ overrides_meta = JsonSource(file=overrides)
@dataclass
class Config:
@@ -166,21 +154,18 @@ class Config:
with pytest.raises(FieldGroupError) as exc_info:
load(
- Merge(
- defaults_meta,
- overrides_meta,
- field_groups=(FieldGroup(F[Config].host, F[Config].port),),
- ),
- Config,
+ defaults_meta,
+ overrides_meta,
+ schema=Config,
+ field_groups=((F[Config].host, F[Config].port),),
)
- assert str(exc_info.value) == dedent(f"""\
- Config field group errors (1)
-
- Field group (host, port) partially overridden in source 1
- changed: host (from source {overrides_meta!r})
- unchanged: port (from source {defaults_meta!r})
- """)
+ assert str(exc_info.value) == "Config field group errors (1)"
+ assert str(exc_info.value.exceptions[0]) == (
+ f" Field group (host, port) partially overridden in source 1\n"
+ f" changed: host (from source {overrides_meta!r})\n"
+ f" unchanged: port (from source {defaults_meta!r})"
+ )
def test_partial_change_with_first_wins(self, tmp_path: Path):
defaults = tmp_path / "defaults.json"
@@ -196,13 +181,11 @@ class Config:
with pytest.raises(FieldGroupError):
load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- strategy=MergeStrategy.FIRST_WINS,
- field_groups=(FieldGroup(F[Config].host, F[Config].port),),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
+ strategy="first_wins",
+ field_groups=((F[Config].host, F[Config].port),),
)
def test_partial_change_with_raise_on_conflict(self, tmp_path: Path):
@@ -219,13 +202,11 @@ class Config:
with pytest.raises(FieldGroupError):
load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- strategy=MergeStrategy.RAISE_ON_CONFLICT,
- field_groups=(FieldGroup(F[Config].host, F[Config].port),),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
+ strategy="raise_on_conflict",
+ field_groups=((F[Config].host, F[Config].port),),
)
@@ -237,8 +218,8 @@ def test_auto_expand_nested_dataclass(self, tmp_path: Path):
overrides = tmp_path / "overrides.json"
overrides.write_text('{"database": {"host": "remote"}}')
- defaults_meta = Source(file_=defaults)
- overrides_meta = Source(file_=overrides)
+ defaults_meta = JsonSource(file=defaults)
+ overrides_meta = JsonSource(file=overrides)
@dataclass
class Database:
@@ -251,21 +232,18 @@ class Config:
with pytest.raises(FieldGroupError) as exc_info:
load(
- Merge(
- defaults_meta,
- overrides_meta,
- field_groups=(FieldGroup(F[Config].database),),
- ),
- Config,
+ defaults_meta,
+ overrides_meta,
+ schema=Config,
+ field_groups=((F[Config].database,),),
)
- assert str(exc_info.value) == dedent(f"""\
- Config field group errors (1)
-
- Field group (database.host, database.port) partially overridden in source 1
- changed: database.host (from source {overrides_meta!r})
- unchanged: database.port (from source {defaults_meta!r})
- """)
+ assert str(exc_info.value) == "Config field group errors (1)"
+ assert str(exc_info.value.exceptions[0]) == (
+ f" Field group (database.host, database.port) partially overridden in source 1\n"
+ f" changed: database.host (from source {overrides_meta!r})\n"
+ f" unchanged: database.port (from source {defaults_meta!r})"
+ )
def test_auto_expand_all_changed_ok(self, tmp_path: Path):
defaults = tmp_path / "defaults.json"
@@ -284,12 +262,10 @@ class Config:
database: Database
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- field_groups=(FieldGroup(F[Config].database),),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
+ field_groups=((F[Config].database,),),
)
assert result.database.host == "remote"
@@ -307,9 +283,9 @@ def test_three_sources_violation_on_second(self, tmp_path: Path):
c = tmp_path / "c.json"
c.write_text('{"host": "c-host", "port": 3000}')
- a_meta = Source(file_=a)
- b_meta = Source(file_=b)
- c_meta = Source(file_=c)
+ a_meta = JsonSource(file=a)
+ b_meta = JsonSource(file=b)
+ c_meta = JsonSource(file=c)
@dataclass
class Config:
@@ -318,22 +294,19 @@ class Config:
with pytest.raises(FieldGroupError) as exc_info:
load(
- Merge(
- a_meta,
- b_meta,
- c_meta,
- field_groups=(FieldGroup(F[Config].host, F[Config].port),),
- ),
- Config,
+ a_meta,
+ b_meta,
+ c_meta,
+ schema=Config,
+ field_groups=((F[Config].host, F[Config].port),),
)
- assert str(exc_info.value) == dedent(f"""\
- Config field group errors (1)
-
- Field group (host, port) partially overridden in source 1
- changed: host (from source {b_meta!r})
- unchanged: port (from source {a_meta!r})
- """)
+ assert str(exc_info.value) == "Config field group errors (1)"
+ assert str(exc_info.value.exceptions[0]) == (
+ f" Field group (host, port) partially overridden in source 1\n"
+ f" changed: host (from source {b_meta!r})\n"
+ f" unchanged: port (from source {a_meta!r})"
+ )
def test_three_sources_all_ok(self, tmp_path: Path):
a = tmp_path / "a.json"
@@ -351,13 +324,11 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- Source(file_=c),
- field_groups=(FieldGroup(F[Config].host, F[Config].port),),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ JsonSource(file=c),
+ schema=Config,
+ field_groups=((F[Config].host, F[Config].port),),
)
assert result.host == "c-host"
@@ -372,8 +343,8 @@ def test_one_ok_one_violated(self, tmp_path: Path):
overrides = tmp_path / "overrides.json"
overrides.write_text('{"host": "remote", "port": 9090, "user": "root"}')
- defaults_meta = Source(file_=defaults)
- overrides_meta = Source(file_=overrides)
+ defaults_meta = JsonSource(file=defaults)
+ overrides_meta = JsonSource(file=overrides)
@dataclass
class Config:
@@ -384,24 +355,21 @@ class Config:
with pytest.raises(FieldGroupError) as exc_info:
load(
- Merge(
- defaults_meta,
- overrides_meta,
- field_groups=(
- FieldGroup(F[Config].host, F[Config].port),
- FieldGroup(F[Config].user, F[Config].password),
- ),
+ defaults_meta,
+ overrides_meta,
+ schema=Config,
+ field_groups=(
+ (F[Config].host, F[Config].port),
+ (F[Config].user, F[Config].password),
),
- Config,
)
- assert str(exc_info.value) == dedent(f"""\
- Config field group errors (1)
-
- Field group (user, password) partially overridden in source 1
- changed: user (from source {overrides_meta!r})
- unchanged: password (from source {defaults_meta!r})
- """)
+ assert str(exc_info.value) == "Config field group errors (1)"
+ assert str(exc_info.value.exceptions[0]) == (
+ f" Field group (user, password) partially overridden in source 1\n"
+ f" changed: user (from source {overrides_meta!r})\n"
+ f" unchanged: password (from source {defaults_meta!r})"
+ )
class TestFieldGroupWithFieldMerges:
@@ -419,13 +387,11 @@ class Config:
tags: list[str]
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- field_merges=(MergeRule(F[Config].tags, FieldMergeStrategy.APPEND),),
- field_groups=(FieldGroup(F[Config].host, F[Config].port),),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
+ field_merges={F[Config].tags: "append"},
+ field_groups=((F[Config].host, F[Config].port),),
)
assert result.host == "remote"
@@ -441,13 +407,11 @@ def test_decorator_with_field_groups(self, tmp_path: Path):
overrides = tmp_path / "overrides.json"
overrides.write_text('{"host": "remote", "port": 9090}')
- meta = Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- field_groups=(FieldGroup(F["Config"].host, F["Config"].port),),
+ @load(
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ field_groups=((F["Config"].host, F["Config"].port),),
)
-
- @load(meta)
@dataclass
class Config:
host: str
@@ -464,13 +428,11 @@ def test_decorator_partial_change_raises(self, tmp_path: Path):
overrides = tmp_path / "overrides.json"
overrides.write_text('{"host": "remote"}')
- meta = Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- field_groups=(FieldGroup(F["Config"].host, F["Config"].port),),
+ @load(
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ field_groups=((F["Config"].host, F["Config"].port),),
)
-
- @load(meta)
@dataclass
class Config:
host: str
@@ -488,8 +450,8 @@ def test_error_message_format(self, tmp_path: Path):
overrides = tmp_path / "overrides.json"
overrides.write_text('{"host": "remote", "debug": true}')
- defaults_meta = Source(file_=defaults)
- overrides_meta = Source(file_=overrides)
+ defaults_meta = JsonSource(file=defaults)
+ overrides_meta = JsonSource(file=overrides)
@dataclass
class Config:
@@ -499,21 +461,18 @@ class Config:
with pytest.raises(FieldGroupError) as exc_info:
load(
- Merge(
- defaults_meta,
- overrides_meta,
- field_groups=(FieldGroup(F[Config].host, F[Config].port),),
- ),
- Config,
+ defaults_meta,
+ overrides_meta,
+ schema=Config,
+ field_groups=((F[Config].host, F[Config].port),),
)
- assert str(exc_info.value) == dedent(f"""\
- Config field group errors (1)
-
- Field group (host, port) partially overridden in source 1
- changed: host (from source {overrides_meta!r})
- unchanged: port (from source {defaults_meta!r})
- """)
+ assert str(exc_info.value) == "Config field group errors (1)"
+ assert str(exc_info.value.exceptions[0]) == (
+ f" Field group (host, port) partially overridden in source 1\n"
+ f" changed: host (from source {overrides_meta!r})\n"
+ f" unchanged: port (from source {defaults_meta!r})"
+ )
def test_multiple_violations_message(self, tmp_path: Path):
defaults = tmp_path / "defaults.json"
@@ -522,8 +481,8 @@ def test_multiple_violations_message(self, tmp_path: Path):
overrides = tmp_path / "overrides.json"
overrides.write_text('{"host": "remote", "user": "root"}')
- defaults_meta = Source(file_=defaults)
- overrides_meta = Source(file_=overrides)
+ defaults_meta = JsonSource(file=defaults)
+ overrides_meta = JsonSource(file=overrides)
@dataclass
class Config:
@@ -534,28 +493,26 @@ class Config:
with pytest.raises(FieldGroupError) as exc_info:
load(
- Merge(
- defaults_meta,
- overrides_meta,
- field_groups=(
- FieldGroup(F[Config].host, F[Config].port),
- FieldGroup(F[Config].user, F[Config].password),
- ),
+ defaults_meta,
+ overrides_meta,
+ schema=Config,
+ field_groups=(
+ (F[Config].host, F[Config].port),
+ (F[Config].user, F[Config].password),
),
- Config,
)
- assert str(exc_info.value) == dedent(f"""\
- Config field group errors (2)
-
- Field group (host, port) partially overridden in source 1
- changed: host (from source {overrides_meta!r})
- unchanged: port (from source {defaults_meta!r})
-
- Field group (user, password) partially overridden in source 1
- changed: user (from source {overrides_meta!r})
- unchanged: password (from source {defaults_meta!r})
- """)
+ assert str(exc_info.value) == "Config field group errors (2)"
+ assert str(exc_info.value.exceptions[0]) == (
+ f" Field group (host, port) partially overridden in source 1\n"
+ f" changed: host (from source {overrides_meta!r})\n"
+ f" unchanged: port (from source {defaults_meta!r})"
+ )
+ assert str(exc_info.value.exceptions[1]) == (
+ f" Field group (user, password) partially overridden in source 1\n"
+ f" changed: user (from source {overrides_meta!r})\n"
+ f" unchanged: password (from source {defaults_meta!r})"
+ )
class TestFieldGroupMixedExpandAndFlat:
@@ -581,12 +538,10 @@ class Config:
timeout: int
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- field_groups=(FieldGroup(F[Config].database, F[Config].timeout),),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
+ field_groups=((F[Config].database, F[Config].timeout),),
)
assert result.database.host == "remote"
@@ -615,12 +570,10 @@ class Config:
timeout: int
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- field_groups=(FieldGroup(F[Config].database, F[Config].timeout),),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
+ field_groups=((F[Config].database, F[Config].timeout),),
)
assert result.database.host == "localhost"
@@ -636,8 +589,8 @@ def test_flat_changed_nested_not(self, tmp_path: Path):
overrides = tmp_path / "overrides.json"
overrides.write_text('{"timeout": 60}')
- defaults_meta = Source(file_=defaults)
- overrides_meta = Source(file_=overrides)
+ defaults_meta = JsonSource(file=defaults)
+ overrides_meta = JsonSource(file=overrides)
@dataclass
class Database:
@@ -651,21 +604,20 @@ class Config:
with pytest.raises(FieldGroupError) as exc_info:
load(
- Merge(
- defaults_meta,
- overrides_meta,
- field_groups=(FieldGroup(F[Config].database, F[Config].timeout),),
- ),
- Config,
+ defaults_meta,
+ overrides_meta,
+ schema=Config,
+ field_groups=((F[Config].database, F[Config].timeout),),
)
- assert str(exc_info.value) == dedent(f"""\
- Config field group errors (1)
-
- Field group (database.host, database.port, timeout) partially overridden in source 1
- changed: timeout (from source {overrides_meta!r})
- unchanged: database.host (from source {defaults_meta!r}), database.port (from source {defaults_meta!r})
- """)
+ assert str(exc_info.value) == "Config field group errors (1)"
+ defaults_repr = repr(defaults_meta)
+ overrides_repr = repr(overrides_meta)
+ assert str(exc_info.value.exceptions[0]) == (
+ f" Field group (database.host, database.port, timeout) partially overridden in source 1\n"
+ f" changed: timeout (from source {overrides_repr})\n"
+ f" unchanged: database.host (from source {defaults_repr}), database.port (from source {defaults_repr})"
+ )
def test_nested_partial_flat_not(self, tmp_path: Path):
defaults = tmp_path / "defaults.json"
@@ -676,8 +628,8 @@ def test_nested_partial_flat_not(self, tmp_path: Path):
overrides = tmp_path / "overrides.json"
overrides.write_text('{"database": {"host": "remote"}}')
- defaults_meta = Source(file_=defaults)
- overrides_meta = Source(file_=overrides)
+ defaults_meta = JsonSource(file=defaults)
+ overrides_meta = JsonSource(file=overrides)
@dataclass
class Database:
@@ -691,21 +643,18 @@ class Config:
with pytest.raises(FieldGroupError) as exc_info:
load(
- Merge(
- defaults_meta,
- overrides_meta,
- field_groups=(FieldGroup(F[Config].database, F[Config].timeout),),
- ),
- Config,
+ defaults_meta,
+ overrides_meta,
+ schema=Config,
+ field_groups=((F[Config].database, F[Config].timeout),),
)
- assert str(exc_info.value) == dedent(f"""\
- Config field group errors (1)
-
- Field group (database.host, database.port, timeout) partially overridden in source 1
- changed: database.host (from source {overrides_meta!r})
- unchanged: database.port (from source {defaults_meta!r}), timeout (from source {defaults_meta!r})
- """)
+ assert str(exc_info.value) == "Config field group errors (1)"
+ assert str(exc_info.value.exceptions[0]) == (
+ f" Field group (database.host, database.port, timeout) partially overridden in source 1\n"
+ f" changed: database.host (from source {overrides_meta!r})\n"
+ f" unchanged: database.port (from source {defaults_meta!r}), timeout (from source {defaults_meta!r})"
+ )
def test_nested_all_changed_flat_not(self, tmp_path: Path):
defaults = tmp_path / "defaults.json"
@@ -716,10 +665,10 @@ def test_nested_all_changed_flat_not(self, tmp_path: Path):
overrides = tmp_path / "overrides.json"
overrides.write_text('{"database": {"host": "remote", "port": 3306}}')
- defaults_meta = Source(file_=defaults)
- overrides_meta = Source(file_=overrides)
- d = repr(defaults_meta)
- o = repr(overrides_meta)
+ defaults_meta = JsonSource(file=defaults)
+ overrides_meta = JsonSource(file=overrides)
+ defaults_repr = repr(defaults_meta)
+ overrides_repr = repr(overrides_meta)
@dataclass
class Database:
@@ -733,21 +682,19 @@ class Config:
with pytest.raises(FieldGroupError) as exc_info:
load(
- Merge(
- defaults_meta,
- overrides_meta,
- field_groups=(FieldGroup(F[Config].database, F[Config].timeout),),
- ),
- Config,
+ defaults_meta,
+ overrides_meta,
+ schema=Config,
+ field_groups=((F[Config].database, F[Config].timeout),),
)
- assert str(exc_info.value) == dedent(f"""\
- Config field group errors (1)
-
- Field group (database.host, database.port, timeout) partially overridden in source 1
- changed: database.host (from source {o}), database.port (from source {o})
- unchanged: timeout (from source {d})
- """)
+ assert str(exc_info.value) == "Config field group errors (1)"
+ changed = f"database.host (from source {overrides_repr}), database.port (from source {overrides_repr})"
+ assert str(exc_info.value.exceptions[0]) == (
+ f" Field group (database.host, database.port, timeout) partially overridden in source 1\n"
+ f" changed: {changed}\n"
+ f" unchanged: timeout (from source {defaults_repr})"
+ )
class TestFieldGroupSameFieldNameNested:
@@ -772,12 +719,10 @@ class Config:
inner: Inner
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- field_groups=(FieldGroup(F[Config].user_name, F[Config].inner.user_name),),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
+ field_groups=((F[Config].user_name, F[Config].inner.user_name),),
)
assert result.user_name == "root-new"
@@ -792,8 +737,8 @@ def test_only_root_changed_raises(self, tmp_path: Path):
overrides = tmp_path / "overrides.json"
overrides.write_text('{"user_name": "root-new"}')
- defaults_meta = Source(file_=defaults)
- overrides_meta = Source(file_=overrides)
+ defaults_meta = JsonSource(file=defaults)
+ overrides_meta = JsonSource(file=overrides)
@dataclass
class Inner:
@@ -806,18 +751,15 @@ class Config:
with pytest.raises(FieldGroupError) as exc_info:
load(
- Merge(
- defaults_meta,
- overrides_meta,
- field_groups=(FieldGroup(F[Config].user_name, F[Config].inner.user_name),),
- ),
- Config,
+ defaults_meta,
+ overrides_meta,
+ schema=Config,
+ field_groups=((F[Config].user_name, F[Config].inner.user_name),),
)
- assert str(exc_info.value) == dedent(f"""\
- Config field group errors (1)
-
- Field group (user_name, inner.user_name) partially overridden in source 1
- changed: user_name (from source {overrides_meta!r})
- unchanged: inner.user_name (from source {defaults_meta!r})
- """)
+ assert str(exc_info.value) == "Config field group errors (1)"
+ assert str(exc_info.value.exceptions[0]) == (
+ f" Field group (user_name, inner.user_name) partially overridden in source 1\n"
+ f" changed: user_name (from source {overrides_meta!r})\n"
+ f" unchanged: inner.user_name (from source {defaults_meta!r})"
+ )
diff --git a/tests/merging/test_predicate.py b/tests/merging/test_predicate.py
index 9b812ca..5872c91 100644
--- a/tests/merging/test_predicate.py
+++ b/tests/merging/test_predicate.py
@@ -6,7 +6,6 @@
from dature.field_path import F
from dature.merging.predicate import build_field_merge_map, extract_field_path
-from dature.metadata import FieldMergeStrategy, MergeRule
class TestExtractFieldPath:
@@ -45,21 +44,21 @@ class Config:
port: int
tags: list[str]
- rules = (
- MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS),
- MergeRule(F[Config].tags, FieldMergeStrategy.APPEND),
- )
+ field_merges = {
+ F[Config].host: "first_wins",
+ F[Config].tags: "append",
+ }
- result = build_field_merge_map(rules)
+ result = build_field_merge_map(field_merges)
assert result.enum_map == {
- "host": FieldMergeStrategy.FIRST_WINS,
- "tags": FieldMergeStrategy.APPEND,
+ "host": "first_wins",
+ "tags": "append",
}
assert result.callable_map == {}
def test_empty_rules(self):
- result = build_field_merge_map(())
+ result = build_field_merge_map({})
assert result.enum_map == {}
assert result.callable_map == {}
@@ -72,11 +71,11 @@ class Database:
class Config:
database: Database
- rules = (MergeRule(F[Config].database.host, FieldMergeStrategy.LAST_WINS),)
+ field_merges = {F[Config].database.host: "last_wins"}
- result = build_field_merge_map(rules)
+ result = build_field_merge_map(field_merges)
- assert result.enum_map == {"database.host": FieldMergeStrategy.LAST_WINS}
+ assert result.enum_map == {"database.host": "last_wins"}
assert result.callable_map == {}
def test_callable_strategy(self):
@@ -85,14 +84,14 @@ class Config:
host: str
score: int
- rules = (
- MergeRule(F[Config].host, FieldMergeStrategy.FIRST_WINS),
- MergeRule(F[Config].score, sum),
- )
+ field_merges = {
+ F[Config].host: "first_wins",
+ F[Config].score: sum,
+ }
- result = build_field_merge_map(rules)
+ result = build_field_merge_map(field_merges)
- assert result.enum_map == {"host": FieldMergeStrategy.FIRST_WINS}
+ assert result.enum_map == {"host": "first_wins"}
assert result.callable_map == {"score": sum}
def test_validates_owner_mismatch(self):
@@ -104,10 +103,10 @@ class Config:
class Other:
host: str
- rules = (MergeRule(F[Other].host, FieldMergeStrategy.FIRST_WINS),)
+ field_merges = {F[Other].host: "first_wins"}
with pytest.raises(TypeError) as exc_info:
- build_field_merge_map(rules, Config)
+ build_field_merge_map(field_merges, schema=Config)
assert str(exc_info.value) == "FieldPath owner 'Other' does not match target dataclass 'Config'"
@@ -118,7 +117,7 @@ class Config:
host: str
with pytest.raises(TypeError) as exc_info:
- extract_field_path(F["Other"].host, Config)
+ extract_field_path(F["Other"].host, schema=Config)
assert str(exc_info.value) == "FieldPath owner 'Other' does not match target dataclass 'Config'"
def test_passes_with_correct_string_owner(self):
@@ -126,4 +125,4 @@ def test_passes_with_correct_string_owner(self):
class Config:
host: str
- assert extract_field_path(F["Config"].host, Config) == "host"
+ assert extract_field_path(F["Config"].host, schema=Config) == "host"
diff --git a/tests/path_finders/test_ini.py b/tests/path_finders/test_ini.py
index 8c68d36..2871e61 100644
--- a/tests/path_finders/test_ini.py
+++ b/tests/path_finders/test_ini.py
@@ -1,4 +1,4 @@
-from dature.errors.exceptions import LineRange
+from dature.errors import LineRange
from dature.path_finders.ini_ import TablePathFinder
diff --git a/tests/path_finders/test_json.py b/tests/path_finders/test_json.py
index 887ffcd..82094a2 100644
--- a/tests/path_finders/test_json.py
+++ b/tests/path_finders/test_json.py
@@ -1,4 +1,4 @@
-from dature.errors.exceptions import LineRange
+from dature.errors import LineRange
from dature.path_finders.json_ import JsonPathFinder
diff --git a/tests/path_finders/test_json5.py b/tests/path_finders/test_json5.py
index 40727fc..eddb51a 100644
--- a/tests/path_finders/test_json5.py
+++ b/tests/path_finders/test_json5.py
@@ -1,4 +1,4 @@
-from dature.errors.exceptions import LineRange
+from dature.errors import LineRange
from dature.path_finders.json5_ import Json5PathFinder
diff --git a/tests/path_finders/test_toml.py b/tests/path_finders/test_toml.py
index b7b55dd..d798f0b 100644
--- a/tests/path_finders/test_toml.py
+++ b/tests/path_finders/test_toml.py
@@ -1,4 +1,4 @@
-from dature.errors.exceptions import LineRange
+from dature.errors import LineRange
from dature.path_finders.toml_ import Toml10PathFinder, Toml11PathFinder
diff --git a/tests/path_finders/test_yaml.py b/tests/path_finders/test_yaml.py
index 2b5a069..83f02cf 100644
--- a/tests/path_finders/test_yaml.py
+++ b/tests/path_finders/test_yaml.py
@@ -1,4 +1,4 @@
-from dature.errors.exceptions import LineRange
+from dature.errors import LineRange
from dature.path_finders.yaml_ import Yaml11PathFinder, Yaml12PathFinder
diff --git a/tests/sources/__init__.py b/tests/sources/__init__.py
new file mode 100644
index 0000000..d949306
--- /dev/null
+++ b/tests/sources/__init__.py
@@ -0,0 +1 @@
+"""Tests for sources."""
diff --git a/tests/sources_loader/checker.py b/tests/sources/checker.py
similarity index 100%
rename from tests/sources_loader/checker.py
rename to tests/sources/checker.py
diff --git a/tests/sources_loader/test_base.py b/tests/sources/test_base.py
similarity index 54%
rename from tests/sources_loader/test_base.py
rename to tests/sources/test_base.py
index e73869b..83293a7 100644
--- a/tests/sources_loader/test_base.py
+++ b/tests/sources/test_base.py
@@ -1,43 +1,41 @@
from dataclasses import dataclass
+from io import BytesIO, StringIO
from pathlib import Path
import pytest
-from dature import Source, load
-from dature.errors.exceptions import EnvVarExpandError
+from dature import JsonSource, Source, load
+from dature.errors import EnvVarExpandError
from dature.field_path import F
-from dature.sources_loader.base import BaseLoader
-from dature.sources_loader.json_ import JsonLoader
-from dature.types import ExpandEnvVarsMode, FileOrStream, JSONValue
+from dature.sources.base import FileFieldMixin, _string_value_loaders
+from dature.sources.retort import transform_to_dataclass
+from dature.types import JSONValue
-class MockLoader(BaseLoader):
- """Mock loader for testing base class functionality."""
+@dataclass(kw_only=True)
+class MockSource(Source):
+ """Mock source for testing base class functionality."""
- display_name = "mock"
+ format_name = "mock"
+ location_label = "MOCK"
+ test_data: JSONValue = None
- def __init__(
- self,
- *,
- prefix: str | None = None,
- test_data: JSONValue = None,
- expand_env_vars: ExpandEnvVarsMode = "default",
- ):
- super().__init__(prefix=prefix, expand_env_vars=expand_env_vars)
- self._test_data = test_data or {}
+ def __post_init__(self) -> None:
+ if self.test_data is None:
+ self.test_data = {}
- def _load(self, path: FileOrStream) -> JSONValue: # noqa: ARG002
+ def _load(self) -> JSONValue:
"""Return test data."""
- return self._test_data
+ return self.test_data
-class TestBaseLoader:
- """Tests for BaseLoader base class."""
+class TestBaseSource:
+ """Tests for Source base class."""
def test_apply_prefix_simple(self):
"""Test applying simple prefix."""
data = {"app": {"name": "Test", "port": 8080}, "other": "value"}
- loader = MockLoader(prefix="app", test_data=data)
+ loader = MockSource(prefix="app", test_data=data)
result = loader._apply_prefix(data)
@@ -46,7 +44,7 @@ def test_apply_prefix_simple(self):
def test_apply_prefix_nested(self):
"""Test applying nested prefix with dots."""
data = {"app": {"database": {"host": "localhost", "port": 5432}}}
- loader = MockLoader(prefix="app.database", test_data=data)
+ loader = MockSource(prefix="app.database", test_data=data)
result = loader._apply_prefix(data)
@@ -55,7 +53,7 @@ def test_apply_prefix_nested(self):
def test_apply_prefix_none(self):
"""Test that None prefix returns original data."""
data = {"key": "value"}
- loader = MockLoader(test_data=data)
+ loader = MockSource(test_data=data)
result = loader._apply_prefix(data)
@@ -64,7 +62,7 @@ def test_apply_prefix_none(self):
def test_apply_prefix_empty_string(self):
"""Test that empty string prefix returns original data."""
data = {"key": "value"}
- loader = MockLoader(prefix="", test_data=data)
+ loader = MockSource(prefix="", test_data=data)
result = loader._apply_prefix(data)
@@ -73,7 +71,7 @@ def test_apply_prefix_empty_string(self):
def test_apply_prefix_nonexistent(self):
"""Test applying nonexistent prefix returns empty dict."""
data = {"app": {"name": "Test"}}
- loader = MockLoader(prefix="nonexistent", test_data=data)
+ loader = MockSource(prefix="nonexistent", test_data=data)
result = loader._apply_prefix(data)
@@ -82,7 +80,7 @@ def test_apply_prefix_nonexistent(self):
def test_apply_prefix_deep_nesting(self):
"""Test applying deeply nested prefix."""
data = {"a": {"b": {"c": {"d": {"value": "deep"}}}}}
- loader = MockLoader(prefix="a.b.c.d", test_data=data)
+ loader = MockSource(prefix="a.b.c.d", test_data=data)
result = loader._apply_prefix(data)
@@ -91,7 +89,7 @@ def test_apply_prefix_deep_nesting(self):
def test_apply_prefix_invalid_path(self):
"""Test applying prefix with invalid path."""
data = {"app": "not_a_dict"}
- loader = MockLoader(prefix="app.nested", test_data=data)
+ loader = MockSource(prefix="app.nested", test_data=data)
result = loader._apply_prefix(data)
@@ -107,9 +105,9 @@ class Config:
expected_data = Config(name="TestApp", port=8080)
data = {"name": "TestApp", "port": 8080}
- loader = MockLoader(test_data=data)
+ loader = MockSource(test_data=data)
- result = loader.transform_to_dataclass(data, Config)
+ result = transform_to_dataclass(loader, data, schema=Config)
assert result == expected_data
@@ -127,9 +125,9 @@ class Config:
expected_data = Config(database=DatabaseConfig(host="localhost", port=5432))
data = {"database": {"host": "localhost", "port": 5432}}
- loader = MockLoader(test_data=data)
+ loader = MockSource(test_data=data)
- result = loader.transform_to_dataclass(data, Config)
+ result = transform_to_dataclass(loader, data, schema=Config)
assert result == expected_data
@@ -145,17 +143,17 @@ class Config:
expected_data = Config(name="TestApp", port=8080, debug=True, default="value")
data = {"app": {"name": "TestApp", "port": 8080, "debug": True}}
- loader = MockLoader(prefix="app", test_data=data)
+ loader = MockSource(prefix="app", test_data=data)
- load_result = loader.load_raw(Path())
- result = loader.transform_to_dataclass(load_result.data, Config)
+ load_result = loader.load_raw()
+ result = transform_to_dataclass(loader, load_result.data, schema=Config)
assert result == expected_data
def test_apply_prefix_with_list(self):
"""Test that apply_prefix returns data as-is when prefix points to non-dict."""
data = {"items": [1, 2, 3]}
- loader = MockLoader(prefix="items", test_data=data)
+ loader = MockSource(prefix="items", test_data=data)
result = loader._apply_prefix(data)
@@ -174,8 +172,8 @@ class Config:
json_file.write_text('{"userName": "John", "userAge": 25, "isActive": true}')
result = load(
- Source(file_=json_file, loader=JsonLoader, name_style="lower_camel"),
- Config,
+ JsonSource(file=json_file, name_style="lower_camel"),
+ schema=Config,
)
assert result.user_name == "John"
@@ -192,8 +190,8 @@ class Config:
json_file.write_text('{"user_name": "Alice", "user_age": 30}')
result = load(
- Source(file_=json_file, loader=JsonLoader, name_style="lower_snake"),
- Config,
+ JsonSource(file=json_file, name_style="lower_snake"),
+ schema=Config,
)
assert result.user_name == "Alice"
@@ -209,8 +207,8 @@ class Config:
json_file.write_text('{"UserName": "Bob", "TotalCount": 100}')
result = load(
- Source(file_=json_file, loader=JsonLoader, name_style="upper_camel"),
- Config,
+ JsonSource(file=json_file, name_style="upper_camel"),
+ schema=Config,
)
assert result.user_name == "Bob"
@@ -226,8 +224,8 @@ class Config:
json_file.write_text('{"user-name": "Charlie", "api-key": "secret123"}')
result = load(
- Source(file_=json_file, loader=JsonLoader, name_style="lower_kebab"),
- Config,
+ JsonSource(file=json_file, name_style="lower_kebab"),
+ schema=Config,
)
assert result.user_name == "Charlie"
@@ -243,8 +241,8 @@ class Config:
json_file.write_text('{"USER-NAME": "Dave", "API-KEY": "secret456"}')
result = load(
- Source(file_=json_file, loader=JsonLoader, name_style="upper_kebab"),
- Config,
+ JsonSource(file=json_file, name_style="upper_kebab"),
+ schema=Config,
)
assert result.user_name == "Dave"
@@ -260,8 +258,8 @@ class Config:
json_file.write_text('{"USER_NAME": "Eve", "MAX_RETRIES": 3}')
result = load(
- Source(file_=json_file, loader=JsonLoader, name_style="upper_snake"),
- Config,
+ JsonSource(file=json_file, name_style="upper_snake"),
+ schema=Config,
)
assert result.user_name == "Eve"
@@ -286,8 +284,8 @@ class Config:
}
result = load(
- Source(file_=json_file, loader=JsonLoader, field_mapping=field_mapping),
- Config,
+ JsonSource(file=json_file, field_mapping=field_mapping),
+ schema=Config,
)
assert result.name == "John Doe"
@@ -307,8 +305,8 @@ class Config:
field_mapping = {F[Config].name: "userName"}
result = load(
- Source(file_=json_file, loader=JsonLoader, field_mapping=field_mapping),
- Config,
+ JsonSource(file=json_file, field_mapping=field_mapping),
+ schema=Config,
)
assert result.name == "Alice"
@@ -328,13 +326,8 @@ class Config:
field_mapping = {F[Config].special_field: "customKey"}
result = load(
- Source(
- file_=json_file,
- loader=JsonLoader,
- name_style="lower_camel",
- field_mapping=field_mapping,
- ),
- Config,
+ JsonSource(file=json_file, name_style="lower_camel", field_mapping=field_mapping),
+ schema=Config,
)
assert result.user_name == "Bob"
@@ -365,8 +358,8 @@ class User:
}
result = load(
- Source(file_=json_file, loader=JsonLoader, field_mapping=field_mapping),
- User,
+ JsonSource(file=json_file, field_mapping=field_mapping),
+ schema=User,
)
assert result.name == "Charlie"
@@ -384,8 +377,8 @@ class Config:
field_mapping = {F[Config].name: ("fullName", "userName")}
result = load(
- Source(file_=json_file, loader=JsonLoader, field_mapping=field_mapping),
- Config,
+ JsonSource(file=json_file, field_mapping=field_mapping),
+ schema=Config,
)
assert result.name == "Alice"
@@ -401,8 +394,8 @@ class Config:
field_mapping = {F[Config].name: ("fullName", "userName")}
result = load(
- Source(file_=json_file, loader=JsonLoader, field_mapping=field_mapping),
- Config,
+ JsonSource(file=json_file, field_mapping=field_mapping),
+ schema=Config,
)
assert result.name == "Bob"
@@ -422,8 +415,8 @@ class User:
field_mapping = {F[User].address.city: "cityName"}
result = load(
- Source(file_=json_file, loader=JsonLoader, field_mapping=field_mapping),
- User,
+ JsonSource(file=json_file, field_mapping=field_mapping),
+ schema=User,
)
assert result.address.city == "LA"
@@ -439,8 +432,8 @@ class Config:
field_mapping = {F["Config"].name: "fullName"}
result = load(
- Source(file_=json_file, loader=JsonLoader, field_mapping=field_mapping),
- Config,
+ JsonSource(file=json_file, field_mapping=field_mapping),
+ schema=Config,
)
assert result.name == "Eve"
@@ -456,8 +449,8 @@ class Config:
field_mapping = {F[Config].name: "fullName"}
result = load(
- Source(file_=json_file, loader=JsonLoader, field_mapping=field_mapping),
- Config,
+ JsonSource(file=json_file, field_mapping=field_mapping),
+ schema=Config,
)
assert result.name == "Direct"
@@ -484,13 +477,8 @@ class Config:
}
result = load(
- Source(
- file_=json_file,
- loader=JsonLoader,
- name_style="lower_camel",
- field_mapping=field_mapping,
- ),
- Config,
+ JsonSource(file=json_file, name_style="lower_camel", field_mapping=field_mapping),
+ schema=Config,
)
assert result.user_name == "Alice"
@@ -519,13 +507,8 @@ class Config:
}
result = load(
- Source(
- file_=json_file,
- loader=JsonLoader,
- name_style="lower_camel",
- field_mapping=field_mapping,
- ),
- Config,
+ JsonSource(file=json_file, name_style="lower_camel", field_mapping=field_mapping),
+ schema=Config,
)
assert result.user_name == "Alice"
@@ -537,57 +520,300 @@ class TestExpandEnvVars:
def test_default_expands_existing(self, monkeypatch):
monkeypatch.setenv("DATURE_TEST_HOST", "localhost")
data = {"host": "$DATURE_TEST_HOST", "port": 8080}
- loader = MockLoader(test_data=data)
+ loader = MockSource(test_data=data)
- load_result = loader.load_raw(Path())
- result = loader.transform_to_dataclass(load_result.data, dict)
+ load_result = loader.load_raw()
+ result = transform_to_dataclass(loader, load_result.data, dict)
assert result == {"host": "localhost", "port": 8080}
def test_default_keeps_missing(self, monkeypatch):
monkeypatch.delenv("DATURE_MISSING", raising=False)
data = {"host": "$DATURE_MISSING", "port": 8080}
- loader = MockLoader(test_data=data)
+ loader = MockSource(test_data=data)
- load_result = loader.load_raw(Path())
- result = loader.transform_to_dataclass(load_result.data, dict)
+ load_result = loader.load_raw()
+ result = transform_to_dataclass(loader, load_result.data, dict)
assert result == {"host": "$DATURE_MISSING", "port": 8080}
def test_disabled(self, monkeypatch):
monkeypatch.setenv("DATURE_TEST_HOST", "localhost")
data = {"host": "$DATURE_TEST_HOST", "port": 8080}
- loader = MockLoader(test_data=data, expand_env_vars="disabled")
+ loader = MockSource(test_data=data)
- load_result = loader.load_raw(Path())
- result = loader.transform_to_dataclass(load_result.data, dict)
+ load_result = loader.load_raw(resolved_expand="disabled")
+ result = transform_to_dataclass(loader, load_result.data, dict)
assert result == {"host": "$DATURE_TEST_HOST", "port": 8080}
def test_empty_replaces_missing_with_empty_string(self, monkeypatch):
monkeypatch.delenv("DATURE_MISSING", raising=False)
data = {"host": "$DATURE_MISSING", "port": 8080}
- loader = MockLoader(test_data=data, expand_env_vars="empty")
+ loader = MockSource(test_data=data)
- load_result = loader.load_raw(Path())
- result = loader.transform_to_dataclass(load_result.data, dict)
+ load_result = loader.load_raw(resolved_expand="empty")
+ result = transform_to_dataclass(loader, load_result.data, dict)
assert result == {"host": "", "port": 8080}
def test_strict_raises_on_missing(self, monkeypatch):
monkeypatch.delenv("DATURE_MISSING", raising=False)
data = {"host": "$DATURE_MISSING", "port": 8080}
- loader = MockLoader(test_data=data, expand_env_vars="strict")
+ loader = MockSource(test_data=data)
with pytest.raises(EnvVarExpandError):
- loader.load_raw(Path())
+ loader.load_raw(resolved_expand="strict")
def test_strict_expands_existing(self, monkeypatch):
monkeypatch.setenv("DATURE_TEST_HOST", "localhost")
data = {"host": "$DATURE_TEST_HOST", "port": 8080}
- loader = MockLoader(test_data=data, expand_env_vars="strict")
+ loader = MockSource(test_data=data)
- load_result = loader.load_raw(Path())
- result = loader.transform_to_dataclass(load_result.data, dict)
+ load_result = loader.load_raw(resolved_expand="strict")
+ result = transform_to_dataclass(loader, load_result.data, dict)
assert result == {"host": "localhost", "port": 8080}
+
+
+class TestFileFieldMixin:
+ def test_init_file_field_str(self):
+ @dataclass
+ class Src(FileFieldMixin):
+ pass
+
+ src = Src(file="/data/test.json")
+ src._init_file_field()
+
+ assert src.file == "/data/test.json"
+ assert isinstance(src.file, str)
+
+ def test_init_file_field_path(self):
+ @dataclass
+ class Src(FileFieldMixin):
+ pass
+
+ src = Src(file=Path("/data/test.json"))
+ src._init_file_field()
+
+ assert src.file == str(Path("/data/test.json"))
+ assert isinstance(src.file, str)
+
+ def test_init_file_field_none(self):
+ @dataclass
+ class Src(FileFieldMixin):
+ pass
+
+ src = Src(file=None)
+ src._init_file_field()
+
+ assert src.file is None
+
+ def test_init_file_field_stream(self):
+ @dataclass
+ class Src(FileFieldMixin):
+ pass
+
+ stream = StringIO("data")
+ src = Src(file=stream)
+ src._init_file_field()
+
+ assert src.file is stream
+
+ @pytest.mark.parametrize(
+ ("file_input", "expected_type"),
+ [
+ ("config.json", Path),
+ (Path("config.json"), Path),
+ (None, Path),
+ ],
+ )
+ def test_resolve_file_field_path_types(self, file_input, expected_type):
+ result = FileFieldMixin.resolve_file_field(file_input)
+
+ assert isinstance(result, expected_type)
+
+ def test_resolve_file_field_stream(self):
+ stream = StringIO("data")
+
+ result = FileFieldMixin.resolve_file_field(stream)
+
+ assert result is stream
+
+ def test_resolve_file_field_binary_stream(self):
+ stream = BytesIO(b"data")
+
+ result = FileFieldMixin.resolve_file_field(stream)
+
+ assert result is stream
+
+ @pytest.mark.parametrize(
+ ("file_input", "expected"),
+ [
+ ("config.json", "config.json"),
+ (Path("config.json"), "config.json"),
+ (None, None),
+ ],
+ )
+ def test_file_field_display(self, file_input, expected):
+ result = FileFieldMixin.file_field_display(file_input)
+
+ assert result == expected
+
+ def test_file_field_display_stream(self):
+ result = FileFieldMixin.file_field_display(StringIO("data"))
+
+ assert result == ""
+
+ def test_file_field_display_binary_stream(self):
+ result = FileFieldMixin.file_field_display(BytesIO(b"data"))
+
+ assert result == ""
+
+ @pytest.mark.parametrize(
+ ("file_input", "expected"),
+ [
+ ("config.json", Path("config.json")),
+ (Path("config.json"), Path("config.json")),
+ (None, None),
+ ],
+ )
+ def test_file_field_path_for_errors(self, file_input, expected):
+ result = FileFieldMixin.file_field_path_for_errors(file_input)
+
+ assert result == expected
+
+ def test_file_field_path_for_errors_stream(self):
+ result = FileFieldMixin.file_field_path_for_errors(StringIO("data"))
+
+ assert result is None
+
+ def test_file_field_path_for_errors_binary_stream(self):
+ result = FileFieldMixin.file_field_path_for_errors(BytesIO(b"data"))
+
+ assert result is None
+
+ def test_file_display_delegates(self):
+ @dataclass
+ class Src(FileFieldMixin):
+ pass
+
+ src = Src(file="config.json")
+
+ assert src.file_display() == "config.json"
+
+ def test_file_display_none(self):
+ @dataclass
+ class Src(FileFieldMixin):
+ pass
+
+ src = Src(file=None)
+
+ assert src.file_display() is None
+
+ def test_file_path_for_errors_delegates(self):
+ @dataclass
+ class Src(FileFieldMixin):
+ pass
+
+ src = Src(file=Path("config.json"))
+
+ assert src.file_path_for_errors() == Path("config.json")
+
+ def test_file_path_for_errors_none(self):
+ @dataclass
+ class Src(FileFieldMixin):
+ pass
+
+ src = Src(file=None)
+
+ assert src.file_path_for_errors() is None
+
+
+class TestStringValueLoaders:
+ def test_returns_nine_providers(self):
+ loaders = _string_value_loaders()
+
+ assert len(loaders) == 9
+
+
+class TestResolveLocation:
+ def test_file_content_none_returns_empty(self):
+ locations = MockSource.resolve_location(
+ field_path=["name"],
+ file_path=Path("config.json"),
+ file_content=None,
+ prefix=None,
+ nested_conflict=None,
+ )
+
+ assert len(locations) == 1
+ assert locations[0].line_range is None
+ assert locations[0].line_content is None
+
+ def test_empty_field_path_returns_empty(self):
+ locations = MockSource.resolve_location(
+ field_path=[],
+ file_path=Path("config.json"),
+ file_content='{"name": "test"}',
+ prefix=None,
+ nested_conflict=None,
+ )
+
+ assert len(locations) == 1
+ assert locations[0].line_range is None
+
+ def test_path_finder_none_returns_empty(self):
+ locations = MockSource.resolve_location(
+ field_path=["name"],
+ file_path=Path("config.json"),
+ file_content='{"name": "test"}',
+ prefix=None,
+ nested_conflict=None,
+ )
+
+ assert len(locations) == 1
+ assert locations[0].line_range is None
+
+ def test_json_source_finds_line_range(self, tmp_path):
+ content = '{\n "name": "test",\n "port": 8080\n}'
+
+ locations = JsonSource.resolve_location(
+ field_path=["name"],
+ file_path=tmp_path / "config.json",
+ file_content=content,
+ prefix=None,
+ nested_conflict=None,
+ )
+
+ assert len(locations) == 1
+ assert locations[0].line_range is not None
+ assert locations[0].line_content is not None
+
+ def test_json_source_with_prefix(self, tmp_path):
+ content = '{\n "app": {\n "name": "test"\n }\n}'
+
+ locations = JsonSource.resolve_location(
+ field_path=["name"],
+ file_path=tmp_path / "config.json",
+ file_content=content,
+ prefix="app",
+ nested_conflict=None,
+ )
+
+ assert len(locations) == 1
+ assert locations[0].line_range is not None
+
+ def test_json_source_field_not_found_returns_empty(self, tmp_path):
+ content = '{\n "name": "test"\n}'
+
+ locations = JsonSource.resolve_location(
+ field_path=["nonexistent"],
+ file_path=tmp_path / "config.json",
+ file_content=content,
+ prefix=None,
+ nested_conflict=None,
+ )
+
+ assert len(locations) == 1
+ assert locations[0].line_range is None
diff --git a/tests/sources/test_docker_secrets.py b/tests/sources/test_docker_secrets.py
new file mode 100644
index 0000000..60bf140
--- /dev/null
+++ b/tests/sources/test_docker_secrets.py
@@ -0,0 +1,141 @@
+from dataclasses import dataclass
+from pathlib import Path
+
+from dature import DockerSecretsSource, load
+from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact
+from tests.sources.checker import assert_all_types_equal
+
+
+class TestDockerSecretsSource:
+ def test_comprehensive_type_conversion(self, all_types_docker_secrets_dir: Path):
+ result = load(
+ DockerSecretsSource(dir_=all_types_docker_secrets_dir),
+ schema=AllPythonTypesCompact,
+ )
+
+ assert_all_types_equal(result, EXPECTED_ALL_TYPES)
+
+ def test_custom_split_symbols(self, tmp_path: Path):
+ (tmp_path / "db.host").write_text("localhost")
+ (tmp_path / "db.port").write_text("5432")
+
+ loader = DockerSecretsSource(dir_=tmp_path, split_symbols=".")
+ result = loader.load_raw()
+
+ assert result.data == {"db": {"host": "localhost", "port": 5432}}
+
+ def test_prefix_filtering(self, tmp_path: Path):
+ (tmp_path / "APP_name").write_text("myapp")
+ (tmp_path / "APP_port").write_text("8080")
+ (tmp_path / "OTHER_key").write_text("ignored")
+
+ loader = DockerSecretsSource(dir_=tmp_path, prefix="APP_")
+ data = loader._load()
+
+ assert data == {"name": "myapp", "port": "8080"}
+
+ def test_skip_subdirectories(self, tmp_path: Path):
+ (tmp_path / "name").write_text("myapp")
+ subdir = tmp_path / "subdir"
+ subdir.mkdir()
+ (subdir / "nested_file").write_text("should_be_ignored")
+
+ loader = DockerSecretsSource(dir_=tmp_path)
+ data = loader._load()
+
+ assert data == {"name": "myapp"}
+
+ def test_empty_directory(self, tmp_path: Path):
+ loader = DockerSecretsSource(dir_=tmp_path)
+ data = loader._load()
+
+ assert data == {}
+
+ def test_strip_file_content(self, tmp_path: Path):
+ (tmp_path / "secret").write_text(" password123\n")
+
+ loader = DockerSecretsSource(dir_=tmp_path)
+ data = loader._load()
+
+ assert data == {"secret": "password123"}
+
+ def test_env_var_substitution(self, tmp_path: Path, monkeypatch):
+ monkeypatch.setenv("BASE_URL", "https://api.example.com")
+
+ (tmp_path / "api_url").write_text("$BASE_URL/v1")
+ (tmp_path / "base").write_text("$BASE_URL")
+
+ @dataclass
+ class Config:
+ api_url: str
+ base: str
+
+ result = load(
+ DockerSecretsSource(dir_=tmp_path),
+ schema=Config,
+ )
+
+ assert result.api_url == "https://api.example.com/v1"
+ assert result.base == "https://api.example.com"
+ assert result.base == "https://api.example.com"
+ assert result.base == "https://api.example.com"
+ assert result.base == "https://api.example.com"
+ assert result.base == "https://api.example.com"
+
+
+class TestDockerSecretsDisplayProperties:
+ def test_format_name_and_label(self):
+ assert DockerSecretsSource.format_name == "docker_secrets"
+ assert DockerSecretsSource.location_label == "SECRET FILE"
+
+
+class TestDockerSecretsResolveLocation:
+ def test_resolve_builds_secret_path(self, tmp_path: Path):
+ locations = DockerSecretsSource.resolve_location(
+ field_path=["db_password"],
+ file_path=tmp_path,
+ file_content=None,
+ prefix=None,
+ nested_conflict=None,
+ )
+
+ assert len(locations) == 1
+ assert locations[0].file_path == tmp_path / "db_password"
+ assert locations[0].line_range is None
+ assert locations[0].location_label == "SECRET FILE"
+
+ def test_resolve_with_prefix(self, tmp_path: Path):
+ locations = DockerSecretsSource.resolve_location(
+ field_path=["password"],
+ file_path=tmp_path,
+ file_content=None,
+ prefix="APP_",
+ nested_conflict=None,
+ )
+
+ assert len(locations) == 1
+ assert locations[0].file_path == tmp_path / "APP_password"
+
+ def test_resolve_nested_path(self, tmp_path: Path):
+ locations = DockerSecretsSource.resolve_location(
+ field_path=["database", "host"],
+ file_path=tmp_path,
+ file_content=None,
+ prefix=None,
+ nested_conflict=None,
+ )
+
+ assert len(locations) == 1
+ assert locations[0].file_path == tmp_path / "database__host"
+
+ def test_resolve_file_path_none(self):
+ locations = DockerSecretsSource.resolve_location(
+ field_path=["secret"],
+ file_path=None,
+ file_content=None,
+ prefix=None,
+ nested_conflict=None,
+ )
+
+ assert len(locations) == 1
+ assert locations[0].file_path is None
diff --git a/tests/sources_loader/test_env_.py b/tests/sources/test_env_.py
similarity index 65%
rename from tests/sources_loader/test_env_.py
rename to tests/sources/test_env_.py
index 68540c9..a70e979 100644
--- a/tests/sources_loader/test_env_.py
+++ b/tests/sources/test_env_.py
@@ -1,23 +1,23 @@
-"""Tests for env_ module (EnvLoader and EnvFileLoader)."""
+"""Tests for env_ module (EnvSource and EnvFileSource)."""
from dataclasses import dataclass
+from io import StringIO
from pathlib import Path
import pytest
-from dature import Source, load
-from dature.sources_loader.env_ import EnvFileLoader, EnvLoader
+from dature import EnvFileSource, EnvSource, load
from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact
-from tests.sources_loader.checker import assert_all_types_equal
+from tests.sources.checker import assert_all_types_equal
-class TestEnvFileLoader:
- """Tests for EnvFileLoader class."""
+class TestEnvFileSource:
+ """Tests for EnvFileSource class."""
def test_prefix_filtering(self, prefixed_env_file: Path):
"""Test prefix filtering with nested structures."""
- loader = EnvFileLoader(prefix="APP_")
- result = loader.load_raw(prefixed_env_file)
+ loader = EnvFileSource(file=prefixed_env_file, prefix="APP_")
+ result = loader.load_raw()
assert result.data == {
"name": "PrefixedApp",
@@ -30,8 +30,8 @@ def test_prefix_filtering(self, prefixed_env_file: Path):
def test_custom_split_symbols(self, custom_separator_env_file: Path):
"""Test custom separator for nested keys."""
- loader = EnvFileLoader(prefix="APP_", split_symbols=".")
- result = loader.load_raw(custom_separator_env_file)
+ loader = EnvFileSource(file=custom_separator_env_file, prefix="APP_", split_symbols=".")
+ result = loader.load_raw()
assert result.data == {
"name": "CustomApp",
@@ -44,7 +44,7 @@ def test_custom_split_symbols(self, custom_separator_env_file: Path):
def test_comprehensive_type_conversion(self, all_types_env_file: Path):
"""Test loading ENV with full type coercion to dataclass."""
- result = load(Source(file_=all_types_env_file, loader=EnvFileLoader), AllPythonTypesCompact)
+ result = load(EnvFileSource(file=all_types_env_file), schema=AllPythonTypesCompact)
assert_all_types_equal(result, EXPECTED_ALL_TYPES)
@@ -53,12 +53,12 @@ def test_empty_file(self, tmp_path: Path):
env_file = tmp_path / ".env"
env_file.write_text("")
- loader = EnvFileLoader()
- data = loader._load(env_file)
+ loader = EnvFileSource(file=env_file)
+ data = loader._load()
assert data == {}
- def test_env_file_env_var_substitution(self, tmp_path: Path, monkeypatch):
+ def test_env_fileenv_var_substitution(self, tmp_path: Path, monkeypatch):
monkeypatch.setenv("BASE_URL", "https://api.example.com")
env_file = tmp_path / ".env"
@@ -69,12 +69,12 @@ class Config:
api_url: str
base: str
- result = load(Source(file_=env_file, loader=EnvFileLoader), Config)
+ result = load(EnvFileSource(file=env_file), schema=Config)
assert result.api_url == "https://api.example.com/v1"
assert result.base == "https://api.example.com"
- def test_env_file_env_var_partial_substitution(self, tmp_path: Path, monkeypatch):
+ def test_env_fileenv_var_partial_substitution(self, tmp_path: Path, monkeypatch):
monkeypatch.setenv("HOST", "localhost")
monkeypatch.setenv("PORT", "8080")
@@ -85,11 +85,11 @@ def test_env_file_env_var_partial_substitution(self, tmp_path: Path, monkeypatch
class Config:
url: str
- result = load(Source(file_=env_file, loader=EnvFileLoader), Config)
+ result = load(EnvFileSource(file=env_file), schema=Config)
assert result.url == "http://localhost:8080/api"
- def test_env_file_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypatch):
+ def test_env_filedollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypatch):
monkeypatch.setenv("abc", "replaced")
env_file = tmp_path / ".env"
@@ -99,7 +99,7 @@ def test_env_file_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monk
class Config:
value: str
- result = load(Source(file_=env_file, loader=EnvFileLoader), Config)
+ result = load(EnvFileSource(file=env_file), schema=Config)
assert result.value == "prefixreplaced/suffix"
@@ -125,12 +125,12 @@ def test_quote_stripping(self, tmp_path: Path, raw_value: str, expected: str):
env_file = tmp_path / ".env"
env_file.write_text(f"value={raw_value}")
- loader = EnvFileLoader()
- data = loader._load(env_file)
+ loader = EnvFileSource(file=env_file)
+ data = loader._load()
assert data == {"value": expected}
- def test_env_file_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypatch):
+ def test_env_filedollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypatch):
monkeypatch.delenv("nonexistent", raising=False)
env_file = tmp_path / ".env"
@@ -140,13 +140,13 @@ def test_env_file_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monke
class Config:
value: str
- result = load(Source(file_=env_file, loader=EnvFileLoader), Config)
+ result = load(EnvFileSource(file=env_file), schema=Config)
assert result.value == "prefix$nonexistent/suffix"
-class TestEnvLoader:
- """Tests for EnvLoader class."""
+class TestEnvSource:
+ """Tests for EnvSource class."""
def test_comprehensive_type_conversion(self, monkeypatch):
"""Test loading from os.environ with full type coercion to dataclass."""
@@ -256,7 +256,7 @@ def test_comprehensive_type_conversion(self, monkeypatch):
for key, value in env_vars.items():
monkeypatch.setenv(key, value)
- result = load(Source(loader=EnvLoader, prefix="APP_"), AllPythonTypesCompact)
+ result = load(EnvSource(prefix="APP_"), schema=AllPythonTypesCompact)
assert_all_types_equal(result, EXPECTED_ALL_TYPES)
@@ -273,7 +273,7 @@ class TestConfig:
expected_data = TestConfig(var="included", key="also_included")
- data = load(Source(loader=EnvLoader, prefix="APP_"), TestConfig)
+ data = load(EnvSource(prefix="APP_"), schema=TestConfig)
assert data == expected_data
@@ -294,9 +294,119 @@ class TestConfig:
expected_data = TestConfig(db=TestData(host="localhost", port="5432"))
data = load(
- Source(loader=EnvLoader, prefix="APP_", split_symbols="."),
- TestConfig,
+ EnvSource(prefix="APP_", split_symbols="."),
+ schema=TestConfig,
)
assert data == expected_data
assert data == expected_data
+ assert data == expected_data
+
+
+class TestEnvSourceDisplayProperties:
+ def test_format_name_and_label(self):
+ assert EnvSource.format_name == "env"
+ assert EnvSource.location_label == "ENV"
+
+
+class TestEnvFileSourceDisplayProperties:
+ def test_format_name_and_label(self):
+ assert EnvFileSource.format_name == "envfile"
+ assert EnvFileSource.location_label == "ENV FILE"
+
+
+class TestEnvSourceResolveLocation:
+ def test_resolve_returns_env_var_name(self):
+ locations = EnvSource.resolve_location(
+ field_path=["host"],
+ file_path=None,
+ file_content=None,
+ prefix=None,
+ nested_conflict=None,
+ )
+
+ assert len(locations) == 1
+ assert locations[0].env_var_name == "HOST"
+ assert locations[0].file_path is None
+ assert locations[0].location_label == "ENV"
+
+ def test_resolve_with_prefix(self):
+ locations = EnvSource.resolve_location(
+ field_path=["host"],
+ file_path=None,
+ file_content=None,
+ prefix="APP_",
+ nested_conflict=None,
+ )
+
+ assert len(locations) == 1
+ assert locations[0].env_var_name == "APP_HOST"
+
+ def test_resolve_nested_path(self):
+ locations = EnvSource.resolve_location(
+ field_path=["database", "host"],
+ file_path=None,
+ file_content=None,
+ prefix=None,
+ nested_conflict=None,
+ )
+
+ assert len(locations) == 1
+ assert locations[0].env_var_name == "DATABASE__HOST"
+
+ def test_resolve_with_custom_split_symbols(self):
+ locations = EnvSource.resolve_location(
+ field_path=["database", "host"],
+ file_path=None,
+ file_content=None,
+ prefix=None,
+ nested_conflict=None,
+ split_symbols=".",
+ )
+
+ assert len(locations) == 1
+ assert locations[0].env_var_name == "DATABASE.HOST"
+
+
+class TestEnvFileSourceResolveLocation:
+ def test_resolve_finds_line_in_content(self):
+ content = "HOST=localhost\nPORT=8080"
+
+ locations = EnvFileSource.resolve_location(
+ field_path=["port"],
+ file_path=Path(".env"),
+ file_content=content,
+ prefix=None,
+ nested_conflict=None,
+ )
+
+ assert len(locations) == 1
+ assert locations[0].env_var_name == "PORT"
+ assert locations[0].line_range is not None
+ assert locations[0].line_range.start == 2
+
+ def test_resolve_no_content(self):
+ locations = EnvFileSource.resolve_location(
+ field_path=["host"],
+ file_path=Path(".env"),
+ file_content=None,
+ prefix=None,
+ nested_conflict=None,
+ )
+
+ assert len(locations) == 1
+ assert locations[0].line_range is None
+ assert locations[0].location_label == "ENV FILE"
+
+
+class TestEnvFileSourceStream:
+ def test_load_from_string_stream(self):
+ @dataclass
+ class Config:
+ name: str
+ port: int
+
+ result = load(EnvFileSource(file=StringIO("NAME=test\nPORT=8080")), schema=Config)
+
+ assert result.name == "test"
+ assert result.port == 8080
diff --git a/tests/sources_loader/test_ini_.py b/tests/sources/test_ini_.py
similarity index 68%
rename from tests/sources_loader/test_ini_.py
rename to tests/sources/test_ini_.py
index 61bc789..d9788f0 100644
--- a/tests/sources_loader/test_ini_.py
+++ b/tests/sources/test_ini_.py
@@ -1,33 +1,33 @@
-"""Tests for ini_ module (IniLoader)."""
+"""Tests for ini_ module (IniSource)."""
import configparser
from dataclasses import dataclass
+from io import StringIO
from pathlib import Path
import pytest
-from dature import Source, load
-from dature.sources_loader.ini_ import IniLoader
+from dature import IniSource, load
from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact
-from tests.sources_loader.checker import assert_all_types_equal
+from tests.sources.checker import assert_all_types_equal
-class TestIniLoader:
- """Tests for IniLoader class."""
+class TestIniSource:
+ """Tests for IniSource class."""
def test_comprehensive_type_conversion(self, all_types_ini_file: Path):
"""Test loading INI with full type coercion to dataclass."""
result = load(
- Source(file_=all_types_ini_file, loader=IniLoader, prefix="all_types"),
- AllPythonTypesCompact,
+ IniSource(file=all_types_ini_file, prefix="all_types"),
+ schema=AllPythonTypesCompact,
)
assert_all_types_equal(result, EXPECTED_ALL_TYPES)
def test_ini_sections(self, ini_sections_file: Path):
"""Test INI sections and DEFAULT inheritance."""
- loader = IniLoader()
- data = loader._load(ini_sections_file)
+ loader = IniSource(file=ini_sections_file)
+ data = loader._load()
assert data == {
"DEFAULT": {
@@ -59,8 +59,8 @@ class PrefixedConfig:
)
result = load(
- Source(file_=prefixed_ini_file, loader=IniLoader, prefix="app"),
- PrefixedConfig,
+ IniSource(file=prefixed_ini_file, prefix="app"),
+ schema=PrefixedConfig,
)
assert result == expected_data
@@ -70,10 +70,10 @@ def test_ini_requires_sections(self, tmp_path: Path):
ini_file = tmp_path / "nosection.ini"
ini_file.write_text("key = value")
- loader = IniLoader()
+ loader = IniSource(file=ini_file)
with pytest.raises(configparser.MissingSectionHeaderError):
- loader._load(ini_file)
+ loader._load()
def test_ini_env_var_substitution(self, tmp_path: Path, monkeypatch):
monkeypatch.setenv("DB_HOST", "db.example.com")
@@ -88,8 +88,8 @@ class DbConfig:
port: int
result = load(
- Source(file_=ini_file, loader=IniLoader, prefix="database"),
- DbConfig,
+ IniSource(file=ini_file, prefix="database"),
+ schema=DbConfig,
)
assert result.host == "db.example.com"
@@ -107,8 +107,8 @@ class Config:
url: str
result = load(
- Source(file_=ini_file, loader=IniLoader, prefix="section"),
- Config,
+ IniSource(file=ini_file, prefix="section"),
+ schema=Config,
)
assert result.url == "http://localhost:8080/api"
@@ -124,8 +124,8 @@ class Config:
value: str
result = load(
- Source(file_=ini_file, loader=IniLoader, prefix="section"),
- Config,
+ IniSource(file=ini_file, prefix="section"),
+ schema=Config,
)
assert result.value == "prefixreplaced/suffix"
@@ -141,8 +141,31 @@ class Config:
value: str
result = load(
- Source(file_=ini_file, loader=IniLoader, prefix="section"),
- Config,
+ IniSource(file=ini_file, prefix="section"),
+ schema=Config,
)
assert result.value == "prefix$nonexistent/suffix"
+ assert result.value == "prefix$nonexistent/suffix"
+
+
+class TestIniSourceDisplayProperties:
+ def test_format_name_and_label(self):
+ assert IniSource.format_name == "ini"
+ assert IniSource.location_label == "FILE"
+
+
+class TestIniSourceStream:
+ def test_load_from_string_stream(self):
+ @dataclass
+ class Config:
+ key: str
+ value: str
+
+ result = load(
+ IniSource(file=StringIO("[section]\nkey = hello\nvalue = world"), prefix="section"),
+ schema=Config,
+ )
+
+ assert result.key == "hello"
+ assert result.value == "world"
diff --git a/tests/sources_loader/test_json5_.py b/tests/sources/test_json5_.py
similarity index 78%
rename from tests/sources_loader/test_json5_.py
rename to tests/sources/test_json5_.py
index de53b5f..664cfdf 100644
--- a/tests/sources_loader/test_json5_.py
+++ b/tests/sources/test_json5_.py
@@ -1,23 +1,28 @@
-"""Tests for json5_ module (Json5Loader)."""
+"""Tests for json5_ module (Json5Source)."""
from dataclasses import dataclass
from pathlib import Path
import pytest
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError, FieldLoadError
-from dature.sources_loader.json5_ import Json5Loader
+from dature import Json5Source, load
+from dature.errors import DatureConfigError, FieldLoadError
from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact
-from tests.sources_loader.checker import assert_all_types_equal
+from tests.sources.checker import assert_all_types_equal
-class TestJson5Loader:
- """Tests for Json5Loader class."""
+class TestJson5SourceDisplayProperties:
+ def test_format_name_and_label(self):
+ assert Json5Source.format_name == "json5"
+ assert Json5Source.location_label == "FILE"
+
+
+class TestJson5Source:
+ """Tests for Json5Source class."""
def test_comprehensive_type_conversion(self, all_types_json5_file: Path):
"""Test loading JSON5 with full type coercion to dataclass."""
- result = load(Source(file_=all_types_json5_file, loader=Json5Loader), AllPythonTypesCompact)
+ result = load(Json5Source(file=all_types_json5_file), schema=AllPythonTypesCompact)
assert_all_types_equal(result, EXPECTED_ALL_TYPES)
@@ -37,8 +42,8 @@ class PrefixedConfig:
)
result = load(
- Source(file_=prefixed_json5_file, loader=Json5Loader, prefix="app"),
- PrefixedConfig,
+ Json5Source(file=prefixed_json5_file, prefix="app"),
+ schema=PrefixedConfig,
)
assert result == expected_data
@@ -48,8 +53,8 @@ def test_json5_empty_object(self, tmp_path: Path):
json5_file = tmp_path / "empty.json5"
json5_file.write_text("{}")
- loader = Json5Loader()
- data = loader._load(json5_file)
+ loader = Json5Source(file=json5_file)
+ data = loader._load()
assert data == {}
@@ -65,7 +70,7 @@ class DbConfig:
host: str
port: int
- result = load(Source(file_=json5_file, loader=Json5Loader), DbConfig)
+ result = load(Json5Source(file=json5_file), schema=DbConfig)
assert result.host == "db.example.com"
assert result.port == 5432
@@ -81,7 +86,7 @@ def test_json5_env_var_partial_substitution(self, tmp_path: Path, monkeypatch):
class Config:
url: str
- result = load(Source(file_=json5_file, loader=Json5Loader), Config)
+ result = load(Json5Source(file=json5_file), schema=Config)
assert result.url == "http://localhost:8080/api"
@@ -95,7 +100,7 @@ def test_json5_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeyp
class Config:
value: str
- result = load(Source(file_=json5_file, loader=Json5Loader), Config)
+ result = load(Json5Source(file=json5_file), schema=Config)
assert result.value == "prefixreplaced/suffix"
@@ -109,7 +114,7 @@ def test_json5_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypa
class Config:
value: str
- result = load(Source(file_=json5_file, loader=Json5Loader), Config)
+ result = load(Json5Source(file=json5_file), schema=Config)
assert result.value == "prefix$nonexistent/suffix"
@@ -122,7 +127,7 @@ class Config:
count: int
with pytest.raises(DatureConfigError) as exc_info:
- load(Source(file_=json5_file, loader=Json5Loader), Config)
+ load(Json5Source(file=json5_file), schema=Config)
err = exc_info.value
assert len(err.exceptions) == 1
@@ -145,7 +150,7 @@ class Config:
flag: bool
with pytest.raises(DatureConfigError) as exc_info:
- load(Source(file_=json5_file, loader=Json5Loader), Config)
+ load(Json5Source(file=json5_file), schema=Config)
err = exc_info.value
assert len(err.exceptions) == 1
diff --git a/tests/sources_loader/test_json_.py b/tests/sources/test_json_.py
similarity index 74%
rename from tests/sources_loader/test_json_.py
rename to tests/sources/test_json_.py
index fae8cef..c1671a6 100644
--- a/tests/sources_loader/test_json_.py
+++ b/tests/sources/test_json_.py
@@ -1,23 +1,23 @@
-"""Tests for json_ module (JsonLoader)."""
+"""Tests for json_ module (JsonSource)."""
from dataclasses import dataclass
+from io import StringIO
from pathlib import Path
import pytest
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError, FieldLoadError
-from dature.sources_loader.json_ import JsonLoader
+from dature import JsonSource, load
+from dature.errors import DatureConfigError, FieldLoadError
from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact
-from tests.sources_loader.checker import assert_all_types_equal
+from tests.sources.checker import assert_all_types_equal
-class TestJsonLoader:
- """Tests for JsonLoader class."""
+class TestJsonSource:
+ """Tests for JsonSource class."""
def test_comprehensive_type_conversion(self, all_types_json_file: Path):
"""Test loading JSON with full type coercion to dataclass."""
- result = load(Source(file_=all_types_json_file, loader=JsonLoader), AllPythonTypesCompact)
+ result = load(JsonSource(file=all_types_json_file), schema=AllPythonTypesCompact)
assert_all_types_equal(result, EXPECTED_ALL_TYPES)
@@ -37,8 +37,8 @@ class PrefixedConfig:
)
result = load(
- Source(file_=prefixed_json_file, loader=JsonLoader, prefix="app"),
- PrefixedConfig,
+ JsonSource(file=prefixed_json_file, prefix="app"),
+ schema=PrefixedConfig,
)
assert result == expected_data
@@ -48,8 +48,8 @@ def test_json_empty_object(self, tmp_path: Path):
json_file = tmp_path / "empty.json"
json_file.write_text("{}")
- loader = JsonLoader()
- data = loader._load(json_file)
+ loader = JsonSource(file=json_file)
+ data = loader._load()
assert data == {}
@@ -65,7 +65,7 @@ class DbConfig:
host: str
port: int
- result = load(Source(file_=json_file, loader=JsonLoader), DbConfig)
+ result = load(JsonSource(file=json_file), schema=DbConfig)
assert result.host == "db.example.com"
assert result.port == 5432
@@ -81,7 +81,7 @@ def test_json_env_var_partial_substitution(self, tmp_path: Path, monkeypatch):
class Config:
url: str
- result = load(Source(file_=json_file, loader=JsonLoader), Config)
+ result = load(JsonSource(file=json_file), schema=Config)
assert result.url == "http://localhost:8080/api"
@@ -95,7 +95,7 @@ def test_json_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa
class Config:
value: str
- result = load(Source(file_=json_file, loader=JsonLoader), Config)
+ result = load(JsonSource(file=json_file), schema=Config)
assert result.value == "prefixreplaced/suffix"
@@ -109,7 +109,7 @@ def test_json_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat
class Config:
value: str
- result = load(Source(file_=json_file, loader=JsonLoader), Config)
+ result = load(JsonSource(file=json_file), schema=Config)
assert result.value == "prefix$nonexistent/suffix"
@@ -122,7 +122,7 @@ class Config:
count: int
with pytest.raises(DatureConfigError) as exc_info:
- load(Source(file_=json_file, loader=JsonLoader), Config)
+ load(JsonSource(file=json_file), schema=Config)
err = exc_info.value
assert len(err.exceptions) == 1
@@ -145,7 +145,7 @@ class Config:
flag: bool
with pytest.raises(DatureConfigError) as exc_info:
- load(Source(file_=json_file, loader=JsonLoader), Config)
+ load(JsonSource(file=json_file), schema=Config)
err = exc_info.value
assert len(err.exceptions) == 1
@@ -158,3 +158,22 @@ class Config:
f" │ ^\n"
f" └── FILE '{json_file}', line 1"
)
+
+
+class TestJsonSourceDisplayProperties:
+ def test_format_name_and_label(self):
+ assert JsonSource.format_name == "json"
+ assert JsonSource.location_label == "FILE"
+
+
+class TestJsonSourceStream:
+ def test_load_from_string_stream(self):
+ @dataclass
+ class Config:
+ name: str
+ port: int
+
+ result = load(JsonSource(file=StringIO('{"name": "test", "port": 8080}')), schema=Config)
+
+ assert result.name == "test"
+ assert result.port == 8080
diff --git a/tests/sources_loader/test_nested_resolve.py b/tests/sources/test_nested_resolve.py
similarity index 83%
rename from tests/sources_loader/test_nested_resolve.py
rename to tests/sources/test_nested_resolve.py
index 892a0ed..07da684 100644
--- a/tests/sources_loader/test_nested_resolve.py
+++ b/tests/sources/test_nested_resolve.py
@@ -7,10 +7,8 @@
import pytest
-from dature import F, Source, load
-from dature.errors.exceptions import DatureConfigError, FieldLoadError
-from dature.sources_loader.docker_secrets import DockerSecretsLoader
-from dature.sources_loader.env_ import EnvFileLoader, EnvLoader
+from dature import DockerSecretsSource, EnvFileSource, EnvSource, F, Source, load
+from dature.errors import DatureConfigError, FieldLoadError
@dataclass
@@ -83,10 +81,10 @@ def set_data(data: dict[str, str]) -> None:
def make_metadata(**kwargs: Any) -> Source:
if loader_type == "env":
- return Source(loader=EnvLoader, prefix="MYAPP__", **kwargs)
+ return EnvSource(prefix="MYAPP__", **kwargs)
if loader_type == "envfile":
- return Source(file_=tmp_path / ".env", loader=EnvFileLoader, prefix="MYAPP__", **kwargs)
- return Source(file_=tmp_path, loader=DockerSecretsLoader, **kwargs)
+ return EnvFileSource(file=tmp_path / ".env", prefix="MYAPP__", **kwargs)
+ return DockerSecretsSource(dir_=tmp_path, **kwargs)
return FlatLoaderSetup(set_data=set_data, make_metadata=make_metadata)
@@ -103,14 +101,14 @@ class TestNestedResolve:
def test_json_only(self, flat_loader_setup: FlatLoaderSetup) -> None:
flat_loader_setup.set_data({"var": '{"foo": "from_json", "bar": "from_json"}'})
- result = load(flat_loader_setup.make_metadata(), NestedConfig)
+ result = load(flat_loader_setup.make_metadata(), schema=NestedConfig)
assert result == NestedConfig(var=NestedVar(foo="from_json", bar="from_json"))
def test_flat_only(self, flat_loader_setup: FlatLoaderSetup) -> None:
flat_loader_setup.set_data({"var__foo": "from_flat", "var__bar": "from_flat"})
- result = load(flat_loader_setup.make_metadata(), NestedConfig)
+ result = load(flat_loader_setup.make_metadata(), schema=NestedConfig)
assert result == NestedConfig(var=NestedVar(foo="from_flat", bar="from_flat"))
@@ -141,7 +139,7 @@ def test_both_sources(
result = load(
flat_loader_setup.make_metadata(**_strategy_kwargs(strategy, local=local)),
- NestedConfig,
+ schema=NestedConfig,
)
assert result == NestedConfig(var=NestedVar(foo=expected_source, bar=expected_source))
@@ -180,8 +178,8 @@ def test_partial_missing_field(
with pytest.raises(DatureConfigError) as exc_info:
load(
- Source(loader=EnvLoader, prefix="MYAPP__", **_strategy_kwargs(strategy, local=local)),
- NestedConfig,
+ EnvSource(prefix="MYAPP__", **_strategy_kwargs(strategy, local=local)),
+ schema=NestedConfig,
)
err = exc_info.value
@@ -202,13 +200,12 @@ def test_partial_missing_field_flat(self, tmp_path: Path, local: bool) -> None:
with pytest.raises(DatureConfigError) as exc_info:
load(
- Source(
- file_=env_file,
- loader=EnvFileLoader,
+ EnvFileSource(
+ file=env_file,
prefix="MYAPP__",
**_strategy_kwargs("flat", local=local),
),
- NestedConfig,
+ schema=NestedConfig,
)
err = exc_info.value
@@ -225,13 +222,12 @@ def test_partial_missing_field_json(self, tmp_path: Path, local: bool) -> None:
with pytest.raises(DatureConfigError) as exc_info:
load(
- Source(
- file_=env_file,
- loader=EnvFileLoader,
+ EnvFileSource(
+ file=env_file,
prefix="MYAPP__",
**_strategy_kwargs("json", local=local),
),
- NestedConfig,
+ schema=NestedConfig,
)
err = exc_info.value
@@ -256,8 +252,8 @@ def test_partial_missing_field_flat(self, tmp_path: Path, local: bool) -> None:
with pytest.raises(DatureConfigError) as exc_info:
load(
- Source(file_=tmp_path, loader=DockerSecretsLoader, **_strategy_kwargs("flat", local=local)),
- NestedConfig,
+ DockerSecretsSource(dir_=tmp_path, **_strategy_kwargs("flat", local=local)),
+ schema=NestedConfig,
)
err = exc_info.value
@@ -274,8 +270,8 @@ def test_partial_missing_field_json(self, tmp_path: Path, local: bool) -> None:
with pytest.raises(DatureConfigError) as exc_info:
load(
- Source(file_=tmp_path, loader=DockerSecretsLoader, **_strategy_kwargs("json", local=local)),
- NestedConfig,
+ DockerSecretsSource(dir_=tmp_path, **_strategy_kwargs("json", local=local)),
+ schema=NestedConfig,
)
err = exc_info.value
@@ -295,8 +291,8 @@ def test_json_invalid_flat_strategy_succeeds(self, monkeypatch: pytest.MonkeyPat
monkeypatch.setenv("MYAPP__VAR__BAR", "20")
result = load(
- Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="flat"),
- NestedIntConfig,
+ EnvSource(prefix="MYAPP__", nested_resolve_strategy="flat"),
+ schema=NestedIntConfig,
)
assert result == NestedIntConfig(var=NestedIntVar(foo=10, bar=20))
@@ -308,8 +304,8 @@ def test_json_invalid_json_strategy_errors(self, monkeypatch: pytest.MonkeyPatch
with pytest.raises(DatureConfigError) as exc_info:
load(
- Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="json"),
- NestedIntConfig,
+ EnvSource(prefix="MYAPP__", nested_resolve_strategy="json"),
+ schema=NestedIntConfig,
)
err = exc_info.value
@@ -334,8 +330,8 @@ def test_flat_invalid_json_strategy_succeeds(self, monkeypatch: pytest.MonkeyPat
monkeypatch.setenv("MYAPP__VAR__BAR", "not_a_number")
result = load(
- Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="json"),
- NestedIntConfig,
+ EnvSource(prefix="MYAPP__", nested_resolve_strategy="json"),
+ schema=NestedIntConfig,
)
assert result == NestedIntConfig(var=NestedIntVar(foo=10, bar=20))
@@ -347,8 +343,8 @@ def test_flat_invalid_flat_strategy_errors(self, monkeypatch: pytest.MonkeyPatch
with pytest.raises(DatureConfigError) as exc_info:
load(
- Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="flat"),
- NestedIntConfig,
+ EnvSource(prefix="MYAPP__", nested_resolve_strategy="flat"),
+ schema=NestedIntConfig,
)
err = exc_info.value
@@ -376,8 +372,8 @@ def test_json_invalid_flat_strategy_succeeds(self, tmp_path: Path) -> None:
)
result = load(
- Source(file_=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="flat"),
- NestedIntConfig,
+ EnvFileSource(file=env_file, prefix="MYAPP__", nested_resolve_strategy="flat"),
+ schema=NestedIntConfig,
)
assert result == NestedIntConfig(var=NestedIntVar(foo=10, bar=20))
@@ -390,8 +386,8 @@ def test_json_invalid_json_strategy_errors(self, tmp_path: Path) -> None:
with pytest.raises(DatureConfigError) as exc_info:
load(
- Source(file_=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="json"),
- NestedIntConfig,
+ EnvFileSource(file=env_file, prefix="MYAPP__", nested_resolve_strategy="json"),
+ schema=NestedIntConfig,
)
err = exc_info.value
@@ -421,8 +417,8 @@ def test_flat_invalid_json_strategy_succeeds(self, tmp_path: Path) -> None:
)
result = load(
- Source(file_=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="json"),
- NestedIntConfig,
+ EnvFileSource(file=env_file, prefix="MYAPP__", nested_resolve_strategy="json"),
+ schema=NestedIntConfig,
)
assert result == NestedIntConfig(var=NestedIntVar(foo=10, bar=20))
@@ -435,8 +431,8 @@ def test_flat_invalid_flat_strategy_errors(self, tmp_path: Path) -> None:
with pytest.raises(DatureConfigError) as exc_info:
load(
- Source(file_=env_file, loader=EnvFileLoader, prefix="MYAPP__", nested_resolve_strategy="flat"),
- NestedIntConfig,
+ EnvFileSource(file=env_file, prefix="MYAPP__", nested_resolve_strategy="flat"),
+ schema=NestedIntConfig,
)
err = exc_info.value
@@ -469,8 +465,8 @@ def test_json_invalid_flat_strategy_succeeds(self, tmp_path: Path) -> None:
(tmp_path / "var__bar").write_text("20")
result = load(
- Source(file_=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="flat"),
- NestedIntConfig,
+ DockerSecretsSource(dir_=tmp_path, nested_resolve_strategy="flat"),
+ schema=NestedIntConfig,
)
assert result == NestedIntConfig(var=NestedIntVar(foo=10, bar=20))
@@ -482,8 +478,8 @@ def test_json_invalid_json_strategy_errors(self, tmp_path: Path) -> None:
with pytest.raises(DatureConfigError) as exc_info:
load(
- Source(file_=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="json"),
- NestedIntConfig,
+ DockerSecretsSource(dir_=tmp_path, nested_resolve_strategy="json"),
+ schema=NestedIntConfig,
)
err = exc_info.value
@@ -508,8 +504,8 @@ def test_flat_invalid_json_strategy_succeeds(self, tmp_path: Path) -> None:
(tmp_path / "var__bar").write_text("not_a_number")
result = load(
- Source(file_=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="json"),
- NestedIntConfig,
+ DockerSecretsSource(dir_=tmp_path, nested_resolve_strategy="json"),
+ schema=NestedIntConfig,
)
assert result == NestedIntConfig(var=NestedIntVar(foo=10, bar=20))
@@ -521,8 +517,8 @@ def test_flat_invalid_flat_strategy_errors(self, tmp_path: Path) -> None:
with pytest.raises(DatureConfigError) as exc_info:
load(
- Source(file_=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="flat"),
- NestedIntConfig,
+ DockerSecretsSource(dir_=tmp_path, nested_resolve_strategy="flat"),
+ schema=NestedIntConfig,
)
err = exc_info.value
@@ -553,8 +549,8 @@ def test_multiline_json_strategy_errors(self, monkeypatch: pytest.MonkeyPatch) -
with pytest.raises(DatureConfigError) as exc_info:
load(
- Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="json"),
- NestedIntConfig,
+ EnvSource(prefix="MYAPP__", nested_resolve_strategy="json"),
+ schema=NestedIntConfig,
)
err = exc_info.value
@@ -582,8 +578,8 @@ def test_multiline_flat_strategy_ignores_json(self, monkeypatch: pytest.MonkeyPa
monkeypatch.setenv("MYAPP__VAR__BAR", "20")
result = load(
- Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="flat"),
- NestedIntConfig,
+ EnvSource(prefix="MYAPP__", nested_resolve_strategy="flat"),
+ schema=NestedIntConfig,
)
assert result == NestedIntConfig(var=NestedIntVar(foo=10, bar=20))
@@ -616,15 +612,14 @@ def test_different_strategies(
monkeypatch.setenv("MYAPP__VAR2__BAR", "flat2")
result = load(
- Source(
- loader=EnvLoader,
+ EnvSource(
prefix="MYAPP__",
nested_resolve={
var1_strategy: (F[TwoNestedConfig].var1,),
var2_strategy: (F[TwoNestedConfig].var2,),
},
),
- TwoNestedConfig,
+ schema=TwoNestedConfig,
)
assert result == TwoNestedConfig(var1=expected_var1, var2=expected_var2)
@@ -653,13 +648,12 @@ def test_local_overrides_global(
monkeypatch.setenv("MYAPP__VAR__BAR", "from_flat")
result = load(
- Source(
- loader=EnvLoader,
+ EnvSource(
prefix="MYAPP__",
nested_resolve_strategy=global_strategy,
nested_resolve={local_strategy: (F[NestedConfig].var,)},
),
- NestedConfig,
+ schema=NestedConfig,
)
assert result == NestedConfig(var=NestedVar(foo=expected_source, bar=expected_source))
@@ -674,13 +668,8 @@ def test_flat_strategy_single_underscore(self, monkeypatch: pytest.MonkeyPatch)
monkeypatch.setenv("APP_VAR_BAR", "from_flat")
result = load(
- Source(
- loader=EnvLoader,
- prefix="APP_",
- split_symbols="_",
- nested_resolve_strategy="flat",
- ),
- NestedConfig,
+ EnvSource(prefix="APP_", split_symbols="_", nested_resolve_strategy="flat"),
+ schema=NestedConfig,
)
assert result == NestedConfig(var=NestedVar(foo="from_flat", bar="from_flat"))
@@ -692,13 +681,8 @@ def test_json_strategy_single_underscore_error(self, monkeypatch: pytest.MonkeyP
with pytest.raises(DatureConfigError) as exc_info:
load(
- Source(
- loader=EnvLoader,
- prefix="APP_",
- split_symbols="_",
- nested_resolve_strategy="json",
- ),
- NestedIntConfig,
+ EnvSource(prefix="APP_", split_symbols="_", nested_resolve_strategy="json"),
+ schema=NestedIntConfig,
)
err = exc_info.value
@@ -716,13 +700,8 @@ def test_flat_strategy_single_underscore_error(self, monkeypatch: pytest.MonkeyP
with pytest.raises(DatureConfigError) as exc_info:
load(
- Source(
- loader=EnvLoader,
- prefix="APP_",
- split_symbols="_",
- nested_resolve_strategy="flat",
- ),
- NestedIntConfig,
+ EnvSource(prefix="APP_", split_symbols="_", nested_resolve_strategy="flat"),
+ schema=NestedIntConfig,
)
err = exc_info.value
@@ -746,7 +725,7 @@ def test_only_json_no_conflict(
result = load(
flat_loader_setup.make_metadata(nested_resolve_strategy=strategy),
- NestedConfig,
+ schema=NestedConfig,
)
assert result == NestedConfig(var=NestedVar(foo="val1", bar="val2"))
@@ -761,7 +740,7 @@ def test_only_flat_no_conflict(
result = load(
flat_loader_setup.make_metadata(nested_resolve_strategy=strategy),
- NestedConfig,
+ schema=NestedConfig,
)
assert result == NestedConfig(var=NestedVar(foo="val1", bar="val2"))
@@ -780,8 +759,8 @@ def test_deep_env(self, monkeypatch: pytest.MonkeyPatch, strategy: str, expected
monkeypatch.setenv("MYAPP__VAR__SUB__KEY", "from_flat")
result = load(
- Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy=strategy),
- DeepConfig,
+ EnvSource(prefix="MYAPP__", nested_resolve_strategy=strategy),
+ schema=DeepConfig,
)
assert result == DeepConfig(var=DeepVar(sub=DeepSub(key=expected_key)))
@@ -793,13 +772,8 @@ def test_flat_strategy_deep_envfile(self, tmp_path: Path) -> None:
)
result = load(
- Source(
- file_=env_file,
- loader=EnvFileLoader,
- prefix="MYAPP__",
- nested_resolve_strategy="flat",
- ),
- DeepConfig,
+ EnvFileSource(file=env_file, prefix="MYAPP__", nested_resolve_strategy="flat"),
+ schema=DeepConfig,
)
assert result == DeepConfig(var=DeepVar(sub=DeepSub(key="from_flat")))
@@ -809,8 +783,8 @@ def test_json_strategy_deep_docker_secrets(self, tmp_path: Path) -> None:
(tmp_path / "var__sub__key").write_text("from_flat")
result = load(
- Source(file_=tmp_path, loader=DockerSecretsLoader, nested_resolve_strategy="json"),
- DeepConfig,
+ DockerSecretsSource(dir_=tmp_path, nested_resolve_strategy="json"),
+ schema=DeepConfig,
)
assert result == DeepConfig(var=DeepVar(sub=DeepSub(key="from_json")))
@@ -826,13 +800,8 @@ def test_flat_strategy_error(self, tmp_path: Path) -> None:
with pytest.raises(DatureConfigError) as exc_info:
load(
- Source(
- file_=tmp_path,
- loader=DockerSecretsLoader,
- prefix="myapp__",
- nested_resolve_strategy="flat",
- ),
- NestedIntConfig,
+ DockerSecretsSource(dir_=tmp_path, prefix="myapp__", nested_resolve_strategy="flat"),
+ schema=NestedIntConfig,
)
err = exc_info.value
@@ -851,13 +820,8 @@ def test_json_strategy_error(self, tmp_path: Path) -> None:
with pytest.raises(DatureConfigError) as exc_info:
load(
- Source(
- file_=tmp_path,
- loader=DockerSecretsLoader,
- prefix="myapp__",
- nested_resolve_strategy="json",
- ),
- NestedIntConfig,
+ DockerSecretsSource(dir_=tmp_path, prefix="myapp__", nested_resolve_strategy="json"),
+ schema=NestedIntConfig,
)
err = exc_info.value
@@ -879,40 +843,30 @@ def test_flat_first_then_json(self, monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.setenv("MYAPP__VAR", '{"foo": "from_json", "bar": "from_json"}')
result_flat = load(
- Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="flat"),
- NestedConfig,
+ EnvSource(prefix="MYAPP__", nested_resolve_strategy="flat"),
+ schema=NestedConfig,
)
result_json = load(
- Source(loader=EnvLoader, prefix="MYAPP__", nested_resolve_strategy="json"),
- NestedConfig,
+ EnvSource(prefix="MYAPP__", nested_resolve_strategy="json"),
+ schema=NestedConfig,
)
assert result_flat == NestedConfig(var=NestedVar(foo="from_flat", bar="from_flat"))
assert result_json == NestedConfig(var=NestedVar(foo="from_json", bar="from_json"))
- def test_envfile_reversed_order(self, tmp_path: Path) -> None:
+ def test_envfilereversed_order(self, tmp_path: Path) -> None:
env_file = tmp_path / ".env"
env_file.write_text(
'MYAPP__VAR__FOO=from_flat\nMYAPP__VAR__BAR=from_flat\nMYAPP__VAR={"foo": "from_json", "bar": "from_json"}',
)
result_flat = load(
- Source(
- file_=env_file,
- loader=EnvFileLoader,
- prefix="MYAPP__",
- nested_resolve_strategy="flat",
- ),
- NestedConfig,
+ EnvFileSource(file=env_file, prefix="MYAPP__", nested_resolve_strategy="flat"),
+ schema=NestedConfig,
)
result_json = load(
- Source(
- file_=env_file,
- loader=EnvFileLoader,
- prefix="MYAPP__",
- nested_resolve_strategy="json",
- ),
- NestedConfig,
+ EnvFileSource(file=env_file, prefix="MYAPP__", nested_resolve_strategy="json"),
+ schema=NestedConfig,
)
assert result_flat == NestedConfig(var=NestedVar(foo="from_flat", bar="from_flat"))
@@ -938,13 +892,9 @@ def test_empty_dict_uses_global(
monkeypatch.setenv("MYAPP__VAR__BAR", "from_flat")
result = load(
- Source(
- loader=EnvLoader,
- prefix="MYAPP__",
- nested_resolve_strategy=strategy,
- nested_resolve={},
- ),
- NestedConfig,
+ EnvSource(prefix="MYAPP__", nested_resolve_strategy=strategy, nested_resolve={}),
+ schema=NestedConfig,
)
assert result == NestedConfig(var=NestedVar(foo=expected_source, bar=expected_source))
+ assert result == NestedConfig(var=NestedVar(foo=expected_source, bar=expected_source))
diff --git a/tests/sources/test_retort.py b/tests/sources/test_retort.py
new file mode 100644
index 0000000..5ac326c
--- /dev/null
+++ b/tests/sources/test_retort.py
@@ -0,0 +1,404 @@
+from dataclasses import dataclass
+
+import pytest
+from adaptix import NameStyle as AdaptixNameStyle
+from adaptix import Retort
+
+from dature.field_path import F
+from dature.sources.base import Source
+from dature.sources.retort import (
+ _retort_cache_key,
+ build_base_recipe,
+ create_probe_retort,
+ create_retort,
+ create_validating_retort,
+ ensure_retort,
+ get_adaptix_name_style,
+ get_name_mapping_providers,
+ get_validator_providers,
+ transform_to_dataclass,
+)
+from dature.types import JSONValue
+
+
+@dataclass(kw_only=True)
+class MockSource(Source):
+ format_name = "mock"
+ location_label = "MOCK"
+ test_data: JSONValue = None
+
+ def __post_init__(self) -> None:
+ if self.test_data is None:
+ self.test_data = {}
+
+ def _load(self) -> JSONValue:
+ return self.test_data
+
+
+class TestGetAdaptixNameStyle:
+ @pytest.mark.parametrize(
+ ("name_style", "expected"),
+ [
+ ("lower_snake", AdaptixNameStyle.LOWER_SNAKE),
+ ("upper_snake", AdaptixNameStyle.UPPER_SNAKE),
+ ("lower_camel", AdaptixNameStyle.CAMEL),
+ ("upper_camel", AdaptixNameStyle.PASCAL),
+ ("lower_kebab", AdaptixNameStyle.LOWER_KEBAB),
+ ("upper_kebab", AdaptixNameStyle.UPPER_KEBAB),
+ ],
+ )
+ def test_maps_style(self, name_style, expected):
+ result = get_adaptix_name_style(name_style)
+
+ assert result == expected
+
+ def test_none_returns_none(self):
+ result = get_adaptix_name_style(None)
+
+ assert result is None
+
+
+class TestGetNameMappingProviders:
+ def test_none_none_returns_empty(self):
+ result = get_name_mapping_providers(None, None)
+
+ assert result == []
+
+ def test_name_style_only(self):
+ result = get_name_mapping_providers("lower_camel", None)
+
+ assert len(result) == 1
+
+ def test_field_mapping_with_field_path(self):
+ @dataclass
+ class Config:
+ name: str
+
+ field_mapping = {F[Config].name: "fullName"}
+ result = get_name_mapping_providers(None, field_mapping)
+
+ assert len(result) >= 1
+
+ def test_field_mapping_with_string_owner(self):
+ field_mapping = {F["Config"].name: "fullName"}
+ result = get_name_mapping_providers(None, field_mapping)
+
+ assert len(result) >= 1
+
+ def test_combined_name_style_and_field_mapping(self):
+ @dataclass
+ class Config:
+ user_name: str
+
+ field_mapping = {F[Config].user_name: "full_name"}
+ result = get_name_mapping_providers("lower_camel", field_mapping)
+
+ assert len(result) >= 2
+
+ def test_nested_field_path(self):
+ @dataclass
+ class Inner:
+ city: str
+
+ @dataclass
+ class Outer:
+ inner: Inner
+
+ field_mapping = {F[Outer].inner.city: "cityName"}
+ result = get_name_mapping_providers(None, field_mapping)
+
+ assert len(result) >= 1
+
+
+class TestGetValidatorProviders:
+ def test_no_validators_returns_empty(self):
+ @dataclass
+ class Config:
+ name: str
+ port: int
+
+ result = get_validator_providers(Config)
+
+ assert result == []
+
+
+class TestBuildBaseRecipe:
+ def test_default_source(self):
+ source = MockSource()
+ result = build_base_recipe(source)
+
+ assert len(result) > 0
+
+ def test_with_resolved_type_loaders(self):
+ source = MockSource()
+ custom_loaders = {str: lambda x: str(x).upper()}
+
+ result_default = build_base_recipe(source)
+ result_custom = build_base_recipe(source, resolved_type_loaders=custom_loaders)
+
+ assert len(result_custom) == len(result_default) + 1
+
+ def test_with_source_type_loaders(self):
+ source = MockSource(type_loaders={str: lambda x: str(x).upper()})
+
+ result_with = build_base_recipe(source)
+ result_without = build_base_recipe(MockSource())
+
+ assert len(result_with) == len(result_without) + 1
+
+ def test_resolved_type_loaders_override_source(self):
+ source = MockSource(type_loaders={str: lambda _: "source"})
+ resolved = {int: lambda x: x + 1}
+
+ result = build_base_recipe(source, resolved_type_loaders=resolved)
+
+ result_with_source_loaders = build_base_recipe(
+ MockSource(type_loaders={str: lambda _: "source"}),
+ )
+ result_with_resolved = build_base_recipe(
+ MockSource(),
+ resolved_type_loaders=resolved,
+ )
+
+ assert len(result) == len(result_with_resolved)
+ assert len(result) != len(result_with_source_loaders) or len(resolved) == len(source.type_loaders or {})
+
+
+class TestCreateRetort:
+ def test_returns_retort(self):
+ source = MockSource()
+
+ result = create_retort(source)
+
+ assert isinstance(result, Retort)
+
+
+class TestCreateProbeRetort:
+ def test_returns_retort(self):
+ source = MockSource()
+
+ result = create_probe_retort(source)
+
+ assert isinstance(result, Retort)
+
+
+class TestCreateValidatingRetort:
+ def test_returns_retort(self):
+ @dataclass
+ class Config:
+ name: str
+
+ source = MockSource()
+
+ result = create_validating_retort(source, Config)
+
+ assert isinstance(result, Retort)
+
+ def test_with_root_validators(self):
+ @dataclass
+ class Config:
+ name: str
+
+ @dataclass(frozen=True, slots=True)
+ class AlwaysTrue:
+ def get_validator_func(self):
+ return lambda _: True
+
+ def get_error_message(self):
+ return "always true"
+
+ source = MockSource(root_validators=(AlwaysTrue(),))
+
+ result = create_validating_retort(source, Config)
+
+ assert isinstance(result, Retort)
+
+
+class TestRetortCacheKey:
+ def test_none_loaders_produces_empty_frozenset(self):
+ @dataclass
+ class Config:
+ name: str
+
+ key = _retort_cache_key(Config, None)
+
+ assert key == (Config, frozenset())
+
+ def test_same_loaders_produce_equal_keys(self):
+ @dataclass
+ class Config:
+ name: str
+
+ loaders = {str: lambda x: x}
+
+ key1 = _retort_cache_key(Config, loaders)
+ key2 = _retort_cache_key(Config, loaders)
+
+ assert key1 == key2
+
+ def test_different_loaders_produce_different_keys(self):
+ @dataclass
+ class Config:
+ name: str
+
+ loaders_a = {str: lambda x: x}
+ loaders_b = {int: lambda x: x}
+
+ key_a = _retort_cache_key(Config, loaders_a)
+ key_b = _retort_cache_key(Config, loaders_b)
+
+ assert key_a != key_b
+
+ def test_different_schemas_produce_different_keys(self):
+ @dataclass
+ class ConfigA:
+ name: str
+
+ @dataclass
+ class ConfigB:
+ name: str
+
+ key_a = _retort_cache_key(ConfigA, None)
+ key_b = _retort_cache_key(ConfigB, None)
+
+ assert key_a != key_b
+
+
+class TestTransformToDataclass:
+ def test_basic_transform(self):
+ @dataclass
+ class Config:
+ name: str
+ port: int
+
+ source = MockSource()
+ data = {"name": "TestApp", "port": 8080}
+
+ result = transform_to_dataclass(source, data, Config)
+
+ assert result == Config(name="TestApp", port=8080)
+
+ def test_caches_retort(self):
+ @dataclass
+ class Config:
+ name: str
+
+ source = MockSource()
+ key = _retort_cache_key(Config, None)
+ assert key not in source.retorts
+
+ transform_to_dataclass(source, {"name": "a"}, Config)
+
+ assert key in source.retorts
+
+ def test_reuses_cached_retort(self):
+ @dataclass
+ class Config:
+ name: str
+
+ source = MockSource()
+ transform_to_dataclass(source, {"name": "a"}, Config)
+ key = _retort_cache_key(Config, None)
+ cached = source.retorts[key]
+
+ transform_to_dataclass(source, {"name": "b"}, Config)
+
+ assert source.retorts[key] is cached
+
+ def test_different_type_loaders_create_separate_cache_entries(self):
+ @dataclass
+ class Config:
+ name: str
+
+ source = MockSource()
+ loaders_a = {str: lambda x: str(x).upper()}
+ loaders_b = {str: lambda x: str(x).lower()}
+
+ transform_to_dataclass(source, {"name": "hello"}, Config, resolved_type_loaders=loaders_a)
+ transform_to_dataclass(source, {"name": "hello"}, Config, resolved_type_loaders=loaders_b)
+
+ key_a = _retort_cache_key(Config, loaders_a)
+ key_b = _retort_cache_key(Config, loaders_b)
+ assert key_a in source.retorts
+ assert key_b in source.retorts
+ assert source.retorts[key_a] is not source.retorts[key_b]
+
+ def test_type_loaders_vs_none_create_separate_cache_entries(self):
+ @dataclass
+ class Config:
+ name: str
+
+ source = MockSource()
+ custom_loaders = {str: lambda x: str(x).upper()}
+
+ transform_to_dataclass(source, {"name": "a"}, Config)
+ transform_to_dataclass(source, {"name": "a"}, Config, resolved_type_loaders=custom_loaders)
+
+ key_none = _retort_cache_key(Config, None)
+ key_custom = _retort_cache_key(Config, custom_loaders)
+ assert key_none in source.retorts
+ assert key_custom in source.retorts
+ assert source.retorts[key_none] is not source.retorts[key_custom]
+
+ def test_same_type_loaders_reuse_cached_retort(self):
+ @dataclass
+ class Config:
+ name: str
+
+ source = MockSource()
+ custom_loaders = {str: lambda x: str(x).upper()}
+
+ transform_to_dataclass(source, {"name": "a"}, Config, resolved_type_loaders=custom_loaders)
+ key = _retort_cache_key(Config, custom_loaders)
+ cached = source.retorts[key]
+
+ transform_to_dataclass(source, {"name": "b"}, Config, resolved_type_loaders=custom_loaders)
+
+ assert source.retorts[key] is cached
+
+
+class TestEnsureRetort:
+ def test_creates_retort(self):
+ @dataclass
+ class Config:
+ name: str
+
+ source = MockSource()
+ key = _retort_cache_key(Config, None)
+ assert key not in source.retorts
+
+ ensure_retort(source, Config)
+
+ assert key in source.retorts
+
+ def test_does_not_overwrite_existing(self):
+ @dataclass
+ class Config:
+ name: str
+
+ source = MockSource()
+ ensure_retort(source, Config)
+ key = _retort_cache_key(Config, None)
+ existing = source.retorts[key]
+
+ ensure_retort(source, Config)
+
+ assert source.retorts[key] is existing
+
+ def test_different_type_loaders_create_separate_cache_entries(self):
+ @dataclass
+ class Config:
+ name: str
+
+ source = MockSource()
+ loaders_a = {str: lambda x: str(x).upper()}
+ loaders_b = {str: lambda x: str(x).lower()}
+
+ ensure_retort(source, Config, resolved_type_loaders=loaders_a)
+ ensure_retort(source, Config, resolved_type_loaders=loaders_b)
+
+ key_a = _retort_cache_key(Config, loaders_a)
+ key_b = _retort_cache_key(Config, loaders_b)
+ assert key_a in source.retorts
+ assert key_b in source.retorts
+ assert source.retorts[key_a] is not source.retorts[key_b]
diff --git a/tests/sources_loader/test_toml10_.py b/tests/sources/test_toml10_.py
similarity index 78%
rename from tests/sources_loader/test_toml10_.py
rename to tests/sources/test_toml10_.py
index cb49094..cf18ce1 100644
--- a/tests/sources_loader/test_toml10_.py
+++ b/tests/sources/test_toml10_.py
@@ -1,23 +1,28 @@
-"""Tests for toml_ module (Toml10Loader)."""
+"""Tests for toml_ module (Toml10Source)."""
from dataclasses import dataclass
from pathlib import Path
import pytest
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError, FieldLoadError
-from dature.sources_loader.toml_ import Toml10Loader
+from dature import Toml10Source, load
+from dature.errors import DatureConfigError, FieldLoadError
from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact
-from tests.sources_loader.checker import assert_all_types_equal
+from tests.sources.checker import assert_all_types_equal
-class TestToml10Loader:
- """Tests for Toml10Loader class."""
+class TestToml10SourceDisplayProperties:
+ def test_format_name_and_label(self):
+ assert Toml10Source.format_name == "toml1.0"
+ assert Toml10Source.location_label == "FILE"
+
+
+class TestToml10Source:
+ """Tests for Toml10Source class."""
def test_comprehensive_type_conversion(self, all_types_toml10_file: Path):
"""Test loading TOML with full type coercion to dataclass."""
- result = load(Source(file_=all_types_toml10_file, loader=Toml10Loader), AllPythonTypesCompact)
+ result = load(Toml10Source(file=all_types_toml10_file), schema=AllPythonTypesCompact)
assert_all_types_equal(result, EXPECTED_ALL_TYPES)
@@ -37,8 +42,8 @@ class PrefixedConfig:
)
result = load(
- Source(file_=prefixed_toml_file, loader=Toml10Loader, prefix="app"),
- PrefixedConfig,
+ Toml10Source(file=prefixed_toml_file, prefix="app"),
+ schema=PrefixedConfig,
)
assert result == expected_data
@@ -48,8 +53,8 @@ def test_toml_empty_file(self, tmp_path: Path):
toml_file = tmp_path / "empty.toml"
toml_file.write_text("")
- loader = Toml10Loader()
- data = loader._load(toml_file)
+ loader = Toml10Source(file=toml_file)
+ data = loader._load()
assert data == {}
@@ -65,7 +70,7 @@ class Config:
name: str
port: int
- result = load(Source(file_=toml_file, loader=Toml10Loader), Config)
+ result = load(Toml10Source(file=toml_file), schema=Config)
assert result.name == "MyApp"
assert result.port == 9090
@@ -81,7 +86,7 @@ def test_toml_env_var_partial_substitution(self, tmp_path: Path, monkeypatch):
class Config:
url: str
- result = load(Source(file_=toml_file, loader=Toml10Loader), Config)
+ result = load(Toml10Source(file=toml_file), schema=Config)
assert result.url == "http://localhost:8080/api"
@@ -95,7 +100,7 @@ def test_toml_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa
class Config:
value: str
- result = load(Source(file_=toml_file, loader=Toml10Loader), Config)
+ result = load(Toml10Source(file=toml_file), schema=Config)
assert result.value == "prefixreplaced/suffix"
@@ -109,7 +114,7 @@ def test_toml_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat
class Config:
value: str
- result = load(Source(file_=toml_file, loader=Toml10Loader), Config)
+ result = load(Toml10Source(file=toml_file), schema=Config)
assert result.value == "prefix$nonexistent/suffix"
@@ -122,7 +127,7 @@ class Config:
count: int
with pytest.raises(DatureConfigError) as exc_info:
- load(Source(file_=toml_file, loader=Toml10Loader), Config)
+ load(Toml10Source(file=toml_file), schema=Config)
err = exc_info.value
assert len(err.exceptions) == 1
@@ -145,7 +150,7 @@ class Config:
flag: bool
with pytest.raises(DatureConfigError) as exc_info:
- load(Source(file_=toml_file, loader=Toml10Loader), Config)
+ load(Toml10Source(file=toml_file), schema=Config)
err = exc_info.value
assert len(err.exceptions) == 1
diff --git a/tests/sources_loader/test_toml11_.py b/tests/sources/test_toml11_.py
similarity index 78%
rename from tests/sources_loader/test_toml11_.py
rename to tests/sources/test_toml11_.py
index b96ca89..d9cf1c3 100644
--- a/tests/sources_loader/test_toml11_.py
+++ b/tests/sources/test_toml11_.py
@@ -1,23 +1,28 @@
-"""Tests for toml_ module (Toml11Loader)."""
+"""Tests for toml_ module (Toml11Source)."""
from dataclasses import dataclass
from pathlib import Path
import pytest
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError, FieldLoadError
-from dature.sources_loader.toml_ import Toml11Loader
+from dature import Toml11Source, load
+from dature.errors import DatureConfigError, FieldLoadError
from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact
-from tests.sources_loader.checker import assert_all_types_equal
+from tests.sources.checker import assert_all_types_equal
-class TestToml11Loader:
- """Tests for Toml11Loader class."""
+class TestToml11SourceDisplayProperties:
+ def test_format_name_and_label(self):
+ assert Toml11Source.format_name == "toml1.1"
+ assert Toml11Source.location_label == "FILE"
+
+
+class TestToml11Source:
+ """Tests for Toml11Source class."""
def test_comprehensive_type_conversion(self, all_types_toml11_file: Path):
"""Test loading TOML with full type coercion to dataclass."""
- result = load(Source(file_=all_types_toml11_file, loader=Toml11Loader), AllPythonTypesCompact)
+ result = load(Toml11Source(file=all_types_toml11_file), schema=AllPythonTypesCompact)
assert_all_types_equal(result, EXPECTED_ALL_TYPES)
@@ -37,8 +42,8 @@ class PrefixedConfig:
)
result = load(
- Source(file_=prefixed_toml_file, loader=Toml11Loader, prefix="app"),
- PrefixedConfig,
+ Toml11Source(file=prefixed_toml_file, prefix="app"),
+ schema=PrefixedConfig,
)
assert result == expected_data
@@ -48,8 +53,8 @@ def test_toml_empty_file(self, tmp_path: Path):
toml_file = tmp_path / "empty.toml"
toml_file.write_text("")
- loader = Toml11Loader()
- data = loader._load(toml_file)
+ loader = Toml11Source(file=toml_file)
+ data = loader._load()
assert data == {}
@@ -65,7 +70,7 @@ class Config:
name: str
port: int
- result = load(Source(file_=toml_file, loader=Toml11Loader), Config)
+ result = load(Toml11Source(file=toml_file), schema=Config)
assert result.name == "MyApp"
assert result.port == 9090
@@ -81,7 +86,7 @@ def test_toml_env_var_partial_substitution(self, tmp_path: Path, monkeypatch):
class Config:
url: str
- result = load(Source(file_=toml_file, loader=Toml11Loader), Config)
+ result = load(Toml11Source(file=toml_file), schema=Config)
assert result.url == "http://localhost:8080/api"
@@ -95,7 +100,7 @@ def test_toml_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa
class Config:
value: str
- result = load(Source(file_=toml_file, loader=Toml11Loader), Config)
+ result = load(Toml11Source(file=toml_file), schema=Config)
assert result.value == "prefixreplaced/suffix"
@@ -109,7 +114,7 @@ def test_toml_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat
class Config:
value: str
- result = load(Source(file_=toml_file, loader=Toml11Loader), Config)
+ result = load(Toml11Source(file=toml_file), schema=Config)
assert result.value == "prefix$nonexistent/suffix"
@@ -122,7 +127,7 @@ class Config:
count: int
with pytest.raises(DatureConfigError) as exc_info:
- load(Source(file_=toml_file, loader=Toml11Loader), Config)
+ load(Toml11Source(file=toml_file), schema=Config)
err = exc_info.value
assert len(err.exceptions) == 1
@@ -145,7 +150,7 @@ class Config:
flag: bool
with pytest.raises(DatureConfigError) as exc_info:
- load(Source(file_=toml_file, loader=Toml11Loader), Config)
+ load(Toml11Source(file=toml_file), schema=Config)
err = exc_info.value
assert len(err.exceptions) == 1
diff --git a/tests/sources_loader/test_yaml11_.py b/tests/sources/test_yaml11_.py
similarity index 79%
rename from tests/sources_loader/test_yaml11_.py
rename to tests/sources/test_yaml11_.py
index 836feb3..ec913a1 100644
--- a/tests/sources_loader/test_yaml11_.py
+++ b/tests/sources/test_yaml11_.py
@@ -1,23 +1,28 @@
-"""Tests for yaml_ module (Yaml11Loader)."""
+"""Tests for yaml_ module (Yaml11Source)."""
from dataclasses import dataclass
from pathlib import Path
import pytest
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError, FieldLoadError
-from dature.sources_loader.yaml_ import Yaml11Loader
+from dature import Yaml11Source, load
+from dature.errors import DatureConfigError, FieldLoadError
from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact
-from tests.sources_loader.checker import assert_all_types_equal
+from tests.sources.checker import assert_all_types_equal
-class TestYaml11Loader:
- """Tests for Yaml11Loader class."""
+class TestYaml11SourceDisplayProperties:
+ def test_format_name_and_label(self):
+ assert Yaml11Source.format_name == "yaml1.1"
+ assert Yaml11Source.location_label == "FILE"
+
+
+class TestYaml11Source:
+ """Tests for Yaml11Source class."""
def test_comprehensive_type_conversion(self, all_types_yaml11_file: Path):
"""Test loading YAML with full type coercion to dataclass."""
- result = load(Source(file_=all_types_yaml11_file, loader=Yaml11Loader), AllPythonTypesCompact)
+ result = load(Yaml11Source(file=all_types_yaml11_file), schema=AllPythonTypesCompact)
assert_all_types_equal(result, EXPECTED_ALL_TYPES)
@@ -37,8 +42,8 @@ class PrefixedConfig:
)
result = load(
- Source(file_=prefixed_yaml_file, loader=Yaml11Loader, prefix="app"),
- PrefixedConfig,
+ Yaml11Source(file=prefixed_yaml_file, prefix="app"),
+ schema=PrefixedConfig,
)
assert result == expected_data
@@ -62,8 +67,8 @@ class EnvConfig:
services: Services
result = load(
- Source(file_=yaml_config_with_env_vars_file, loader=Yaml11Loader),
- EnvConfig,
+ Yaml11Source(file=yaml_config_with_env_vars_file),
+ schema=EnvConfig,
)
assert result.database_url == "postgresql://localhost/db"
@@ -82,7 +87,7 @@ def test_yaml_env_var_partial_substitution(self, tmp_path: Path, monkeypatch):
class Config:
url: str
- result = load(Source(file_=yaml_file, loader=Yaml11Loader), Config)
+ result = load(Yaml11Source(file=yaml_file), schema=Config)
assert result.url == "http://localhost:8080/api"
@@ -96,7 +101,7 @@ def test_yaml_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa
class Config:
value: str
- result = load(Source(file_=yaml_file, loader=Yaml11Loader), Config)
+ result = load(Yaml11Source(file=yaml_file), schema=Config)
assert result.value == "prefixreplaced/suffix"
@@ -110,7 +115,7 @@ def test_yaml_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat
class Config:
value: str
- result = load(Source(file_=yaml_file, loader=Yaml11Loader), Config)
+ result = load(Yaml11Source(file=yaml_file), schema=Config)
assert result.value == "prefix$nonexistent/suffix"
@@ -119,8 +124,8 @@ def test_yaml_empty_file(self, tmp_path: Path):
yaml_file = tmp_path / "empty.yaml"
yaml_file.write_text("")
- loader = Yaml11Loader()
- data = loader._load(yaml_file)
+ loader = Yaml11Source(file=yaml_file)
+ data = loader._load()
assert data is None
@@ -133,7 +138,7 @@ class Config:
count: int
with pytest.raises(DatureConfigError) as exc_info:
- load(Source(file_=yaml_file, loader=Yaml11Loader), Config)
+ load(Yaml11Source(file=yaml_file), schema=Config)
err = exc_info.value
assert len(err.exceptions) == 1
@@ -156,7 +161,7 @@ class Config:
flag: bool
with pytest.raises(DatureConfigError) as exc_info:
- load(Source(file_=yaml_file, loader=Yaml11Loader), Config)
+ load(Yaml11Source(file=yaml_file), schema=Config)
err = exc_info.value
assert len(err.exceptions) == 1
diff --git a/tests/sources_loader/test_yaml12_.py b/tests/sources/test_yaml12_.py
similarity index 79%
rename from tests/sources_loader/test_yaml12_.py
rename to tests/sources/test_yaml12_.py
index a7fa425..c55d1e8 100644
--- a/tests/sources_loader/test_yaml12_.py
+++ b/tests/sources/test_yaml12_.py
@@ -1,23 +1,28 @@
-"""Tests for yaml_ module (Yaml12Loader)."""
+"""Tests for yaml_ module (Yaml12Source)."""
from dataclasses import dataclass
from pathlib import Path
import pytest
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError, FieldLoadError
-from dature.sources_loader.yaml_ import Yaml12Loader
+from dature import Yaml12Source, load
+from dature.errors import DatureConfigError, FieldLoadError
from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact
-from tests.sources_loader.checker import assert_all_types_equal
+from tests.sources.checker import assert_all_types_equal
-class TestYaml12Loader:
- """Tests for Yaml12Loader class."""
+class TestYaml12SourceDisplayProperties:
+ def test_format_name_and_label(self):
+ assert Yaml12Source.format_name == "yaml1.2"
+ assert Yaml12Source.location_label == "FILE"
+
+
+class TestYaml12Source:
+ """Tests for Yaml12Source class."""
def test_comprehensive_type_conversion(self, all_types_yaml12_file: Path):
"""Test loading YAML with full type coercion to dataclass."""
- result = load(Source(file_=all_types_yaml12_file, loader=Yaml12Loader), AllPythonTypesCompact)
+ result = load(Yaml12Source(file=all_types_yaml12_file), schema=AllPythonTypesCompact)
assert_all_types_equal(result, EXPECTED_ALL_TYPES)
@@ -37,8 +42,8 @@ class PrefixedConfig:
)
result = load(
- Source(file_=prefixed_yaml_file, loader=Yaml12Loader, prefix="app"),
- PrefixedConfig,
+ Yaml12Source(file=prefixed_yaml_file, prefix="app"),
+ schema=PrefixedConfig,
)
assert result == expected_data
@@ -62,8 +67,8 @@ class EnvConfig:
services: Services
result = load(
- Source(file_=yaml_config_with_env_vars_file, loader=Yaml12Loader),
- EnvConfig,
+ Yaml12Source(file=yaml_config_with_env_vars_file),
+ schema=EnvConfig,
)
assert result.database_url == "postgresql://localhost/db"
@@ -82,7 +87,7 @@ def test_yaml_env_var_partial_substitution(self, tmp_path: Path, monkeypatch):
class Config:
url: str
- result = load(Source(file_=yaml_file, loader=Yaml12Loader), Config)
+ result = load(Yaml12Source(file=yaml_file), schema=Config)
assert result.url == "http://localhost:8080/api"
@@ -96,7 +101,7 @@ def test_yaml_dollar_sign_mid_string_existing_var(self, tmp_path: Path, monkeypa
class Config:
value: str
- result = load(Source(file_=yaml_file, loader=Yaml12Loader), Config)
+ result = load(Yaml12Source(file=yaml_file), schema=Config)
assert result.value == "prefixreplaced/suffix"
@@ -110,7 +115,7 @@ def test_yaml_dollar_sign_mid_string_missing_var(self, tmp_path: Path, monkeypat
class Config:
value: str
- result = load(Source(file_=yaml_file, loader=Yaml12Loader), Config)
+ result = load(Yaml12Source(file=yaml_file), schema=Config)
assert result.value == "prefix$nonexistent/suffix"
@@ -119,8 +124,8 @@ def test_yaml_empty_file(self, tmp_path: Path):
yaml_file = tmp_path / "empty.yaml"
yaml_file.write_text("")
- loader = Yaml12Loader()
- data = loader._load(yaml_file)
+ loader = Yaml12Source(file=yaml_file)
+ data = loader._load()
assert data is None
@@ -133,7 +138,7 @@ class Config:
count: int
with pytest.raises(DatureConfigError) as exc_info:
- load(Source(file_=yaml_file, loader=Yaml12Loader), Config)
+ load(Yaml12Source(file=yaml_file), schema=Config)
err = exc_info.value
assert len(err.exceptions) == 1
@@ -156,7 +161,7 @@ class Config:
flag: bool
with pytest.raises(DatureConfigError) as exc_info:
- load(Source(file_=yaml_file, loader=Yaml12Loader), Config)
+ load(Yaml12Source(file=yaml_file), schema=Config)
err = exc_info.value
assert len(err.exceptions) == 1
diff --git a/tests/sources_loader/__init__.py b/tests/sources_loader/__init__.py
deleted file mode 100644
index f03c2b7..0000000
--- a/tests/sources_loader/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-"""Tests for sources_loader."""
diff --git a/tests/sources_loader/test_docker_secrets.py b/tests/sources_loader/test_docker_secrets.py
deleted file mode 100644
index c130f70..0000000
--- a/tests/sources_loader/test_docker_secrets.py
+++ /dev/null
@@ -1,81 +0,0 @@
-from dataclasses import dataclass
-from pathlib import Path
-
-from dature import Source, load
-from dature.sources_loader.docker_secrets import DockerSecretsLoader
-from examples.all_types_dataclass import EXPECTED_ALL_TYPES, AllPythonTypesCompact
-from tests.sources_loader.checker import assert_all_types_equal
-
-
-class TestDockerSecretsLoader:
- def test_comprehensive_type_conversion(self, all_types_docker_secrets_dir: Path):
- result = load(
- Source(file_=all_types_docker_secrets_dir, loader=DockerSecretsLoader),
- AllPythonTypesCompact,
- )
-
- assert_all_types_equal(result, EXPECTED_ALL_TYPES)
-
- def test_custom_split_symbols(self, tmp_path: Path):
- (tmp_path / "db.host").write_text("localhost")
- (tmp_path / "db.port").write_text("5432")
-
- loader = DockerSecretsLoader(split_symbols=".")
- result = loader.load_raw(tmp_path)
-
- assert result.data == {"db": {"host": "localhost", "port": 5432}}
-
- def test_prefix_filtering(self, tmp_path: Path):
- (tmp_path / "APP_name").write_text("myapp")
- (tmp_path / "APP_port").write_text("8080")
- (tmp_path / "OTHER_key").write_text("ignored")
-
- loader = DockerSecretsLoader(prefix="APP_")
- data = loader._load(tmp_path)
-
- assert data == {"name": "myapp", "port": "8080"}
-
- def test_skip_subdirectories(self, tmp_path: Path):
- (tmp_path / "name").write_text("myapp")
- subdir = tmp_path / "subdir"
- subdir.mkdir()
- (subdir / "nested_file").write_text("should_be_ignored")
-
- loader = DockerSecretsLoader()
- data = loader._load(tmp_path)
-
- assert data == {"name": "myapp"}
-
- def test_empty_directory(self, tmp_path: Path):
- loader = DockerSecretsLoader()
- data = loader._load(tmp_path)
-
- assert data == {}
-
- def test_strip_file_content(self, tmp_path: Path):
- (tmp_path / "secret").write_text(" password123\n")
-
- loader = DockerSecretsLoader()
- data = loader._load(tmp_path)
-
- assert data == {"secret": "password123"}
-
- def test_env_var_substitution(self, tmp_path: Path, monkeypatch):
- monkeypatch.setenv("BASE_URL", "https://api.example.com")
-
- (tmp_path / "api_url").write_text("$BASE_URL/v1")
- (tmp_path / "base").write_text("$BASE_URL")
-
- @dataclass
- class Config:
- api_url: str
- base: str
-
- result = load(
- Source(file_=tmp_path, loader=DockerSecretsLoader),
- Config,
- )
-
- assert result.api_url == "https://api.example.com/v1"
- assert result.base == "https://api.example.com"
- assert result.base == "https://api.example.com"
diff --git a/tests/test_config.py b/tests/test_config.py
index d801687..3f6cb5f 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -1,3 +1,5 @@
+from typing import Any
+
import pytest
from dature.config import (
@@ -8,7 +10,7 @@
config,
configure,
)
-from dature.errors.exceptions import DatureConfigError
+from dature.errors import DatureConfigError
@pytest.mark.usefixtures("_reset_config")
@@ -27,27 +29,27 @@ class TestConfigure:
("kwargs", "attr_path", "expected"),
[
(
- {"masking": MaskingConfig(mask="[HIDDEN]")},
+ {"masking": {"mask": "[HIDDEN]"}},
("masking", "mask"),
"[HIDDEN]",
),
(
- {"masking": MaskingConfig(visible_prefix=3)},
+ {"masking": {"visible_prefix": 3}},
("masking", "visible_prefix"),
3,
),
(
- {"error_display": ErrorDisplayConfig(max_visible_lines=10)},
+ {"error_display": {"max_visible_lines": 10}},
("error_display", "max_visible_lines"),
10,
),
(
- {"loading": LoadingConfig(cache=False, debug=True)},
+ {"loading": {"cache": False, "debug": True}},
("loading", "cache"),
False,
),
(
- {"loading": LoadingConfig(cache=False, debug=True)},
+ {"loading": {"cache": False, "debug": True}},
("loading", "debug"),
True,
),
@@ -61,7 +63,7 @@ class TestConfigure:
],
)
def test_configure_overrides(
- kwargs: dict[str, MaskingConfig | ErrorDisplayConfig | LoadingConfig],
+ kwargs: dict[str, Any],
attr_path: tuple[str, str],
expected: str | int | bool,
) -> None:
@@ -74,17 +76,17 @@ def test_configure_overrides(
("kwargs", "unchanged_group", "expected_default"),
[
(
- {"masking": MaskingConfig(mask="###")},
+ {"masking": {"mask": "###"}},
"error_display",
ErrorDisplayConfig(),
),
(
- {"masking": MaskingConfig(mask="###")},
+ {"masking": {"mask": "###"}},
"loading",
LoadingConfig(),
),
(
- {"error_display": ErrorDisplayConfig(max_visible_lines=10)},
+ {"error_display": {"max_visible_lines": 10}},
"masking",
MaskingConfig(),
),
@@ -96,7 +98,7 @@ def test_configure_overrides(
],
)
def test_configure_preserves_other_groups(
- kwargs: dict[str, MaskingConfig | ErrorDisplayConfig | LoadingConfig],
+ kwargs: dict[str, Any],
unchanged_group: str,
expected_default: MaskingConfig | ErrorDisplayConfig | LoadingConfig,
) -> None:
@@ -104,6 +106,51 @@ def test_configure_preserves_other_groups(
assert getattr(config, unchanged_group) == expected_default
+@pytest.mark.usefixtures("_reset_config")
+class TestConfigureEmptyDictReset:
+ @staticmethod
+ @pytest.mark.parametrize(
+ ("group", "override", "expected_default"),
+ [
+ (
+ "masking",
+ {"mask": "*****", "visible_prefix": 2, "visible_suffix": 2},
+ MaskingConfig(),
+ ),
+ (
+ "error_display",
+ {"max_visible_lines": 10, "max_line_length": 200},
+ ErrorDisplayConfig(),
+ ),
+ (
+ "loading",
+ {"cache": False, "debug": True},
+ LoadingConfig(),
+ ),
+ ],
+ ids=["masking", "error_display", "loading"],
+ )
+ def test_empty_dict_resets_group_to_defaults(
+ group: str,
+ override: dict[str, Any],
+ expected_default: MaskingConfig | ErrorDisplayConfig | LoadingConfig,
+ ) -> None:
+ configure(**{group: override})
+ assert getattr(config, group) != expected_default
+
+ configure(**{group: {}})
+ assert getattr(config, group) == expected_default
+
+ @staticmethod
+ def test_empty_dict_preserves_other_groups() -> None:
+ configure(masking={"mask": "*****"}, error_display={"max_visible_lines": 10})
+
+ configure(masking={})
+
+ assert config.masking == MaskingConfig()
+ assert config.error_display.max_visible_lines == 10
+
+
@pytest.mark.usefixtures("_reset_config")
class TestEnvLoading:
@staticmethod
diff --git a/tests/test_custom_loader.py b/tests/test_custom_loader.py
index 70a5761..644141f 100644
--- a/tests/test_custom_loader.py
+++ b/tests/test_custom_loader.py
@@ -1,4 +1,4 @@
-"""Tests for custom loaders — subclassing BaseLoader."""
+"""Tests for custom sources — subclassing Source."""
import xml.etree.ElementTree as ET
from dataclasses import dataclass
@@ -7,24 +7,25 @@
from adaptix import Provider, loader
-from dature import Source, load
-from dature.sources_loader.base import BaseLoader
-from dature.sources_loader.loaders import bool_loader, float_from_string
+from dature import FileSource, load
+from dature.loaders import bool_loader, float_from_string
from dature.types import FileOrStream, JSONValue
-class XmlLoader(BaseLoader):
- display_name: ClassVar[str] = "xml"
+@dataclass(kw_only=True)
+class XmlSource(FileSource):
+ format_name: ClassVar[str] = "xml"
+ path_finder_class = None
- def _load(self, path: FileOrStream) -> JSONValue:
+ def _load_file(self, path: FileOrStream) -> JSONValue:
if not isinstance(path, Path):
- msg = "XmlLoader only supports file paths"
+ msg = "XmlSource only supports file paths"
raise TypeError(msg)
tree = ET.parse(path) # noqa: S314
root = tree.getroot()
return {child.tag: child.text or "" for child in root}
- def _additional_loaders(self) -> list[Provider]:
+ def additional_loaders(self) -> list[Provider]:
return [
loader(bool, bool_loader),
loader(float, float_from_string),
@@ -46,8 +47,8 @@ def test_xml_loader(self, tmp_path: Path) -> None:
)
result = load(
- Source(file_=xml_file, loader=XmlLoader),
- XmlConfig,
+ XmlSource(file=xml_file),
+ schema=XmlConfig,
)
assert result.host == "localhost"
diff --git a/tests/test_examples.py b/tests/test_examples.py
index 5343ec3..c03d911 100644
--- a/tests/test_examples.py
+++ b/tests/test_examples.py
@@ -2,25 +2,105 @@
import pathlib
import subprocess
import sys
+from dataclasses import dataclass
import pytest
examples_dir = pathlib.Path(__file__).parent.parent / "examples"
example_scripts = sorted(examples_dir.rglob("*.py"))
+_IS_POSIX = hasattr(os, "posix_spawn")
-@pytest.mark.parametrize("script_path", example_scripts, ids=lambda p: p.name)
-def test_example_execution(script_path):
- env = os.environ.copy()
- project_root = pathlib.Path(__file__).parent.parent / "src"
- env["PYTHONPATH"] = str(project_root) + os.pathsep + env.get("PYTHONPATH", "")
+@dataclass
+class ScriptResult:
+ returncode: int
+ stderr: str
+
+
+def _run_via_posix_spawn(script_path: pathlib.Path, env: dict[str, str]) -> ScriptResult:
+ """Use posix_spawn to avoid fork() segfaults on macOS CI."""
+ devnull = os.open(os.devnull, os.O_WRONLY)
+ stderr_r, stderr_w = os.pipe()
+
+ file_actions = [
+ (os.POSIX_SPAWN_CLOSE, 0),
+ (os.POSIX_SPAWN_DUP2, devnull, 1),
+ (os.POSIX_SPAWN_DUP2, stderr_w, 2),
+ ]
+
+ pid = os.posix_spawn(
+ sys.executable,
+ [sys.executable, str(script_path)],
+ env,
+ file_actions=file_actions,
+ )
+
+ os.close(devnull)
+ os.close(stderr_w)
+
+ with os.fdopen(stderr_r) as stderr_f:
+ stderr = stderr_f.read()
+
+ _, wait_status = os.waitpid(pid, 0)
+ returncode = os.waitstatus_to_exitcode(wait_status)
+ return ScriptResult(returncode=returncode, stderr=stderr)
+
+
+def _run_via_subprocess(script_path: pathlib.Path, env: dict[str, str]) -> ScriptResult:
result = subprocess.run( # noqa: PLW1510, S603
[sys.executable, str(script_path)],
capture_output=True,
text=True,
env=env,
)
+ return ScriptResult(returncode=result.returncode, stderr=result.stderr)
+
+
+def _run_example(script_path: pathlib.Path) -> ScriptResult:
+ project_root = pathlib.Path(__file__).parent.parent / "src"
+ env = os.environ.copy()
+ env["PYTHONPATH"] = str(project_root) + os.pathsep + env.get("PYTHONPATH", "")
+ env["PYTHONIOENCODING"] = "utf-8"
+
+ if _IS_POSIX:
+ return _run_via_posix_spawn(script_path, env)
+ return _run_via_subprocess(script_path, env)
+
+
+def _resolve_stderr_placeholders(template: str, script_path: pathlib.Path) -> str:
+ sources_dir = str(script_path.parent / "sources") + os.sep
+ shared_dir = str(script_path.parents[2] / "shared") + os.sep
+
+ return template.replace("{SOURCES_DIR}", sources_dir).replace("{SHARED_DIR}", shared_dir)
+
+def _normalize_trailing_whitespace(text: str) -> str:
+ return "\n".join(line.rstrip() for line in text.splitlines())
+
+
+_success_scripts = [s for s in example_scripts if not s.with_suffix(".stderr").exists()]
+_error_scripts = [s for s in example_scripts if s.with_suffix(".stderr").exists()]
+
+
+@pytest.mark.parametrize("script_path", _success_scripts, ids=lambda p: p.name)
+def test_example_execution(script_path: pathlib.Path) -> None:
+ result = _run_example(script_path)
assert result.returncode == 0, f"Script {script_path.name} Failed!\n\nError:\n{result.stderr}"
+
+
+@pytest.mark.parametrize("script_path", _error_scripts, ids=lambda p: p.name)
+def test_example_expected_error(script_path: pathlib.Path) -> None:
+ result = _run_example(script_path)
+ assert result.returncode != 0, f"Script {script_path.name} should have failed but exited with 0"
+
+ stderr_file = script_path.with_suffix(".stderr")
+ expected = _resolve_stderr_placeholders(stderr_file.read_text(), script_path)
+ normalized_stderr = _normalize_trailing_whitespace(result.stderr)
+ normalized_expected = _normalize_trailing_whitespace(expected.strip())
+ assert normalized_expected in normalized_stderr, (
+ f"Script {script_path.name} stderr mismatch.\n\n"
+ f"Expected fragment:\n{expected.strip()}\n\n"
+ f"Actual stderr:\n{result.stderr}"
+ )
diff --git a/tests/test_load_report.py b/tests/test_load_report.py
index e56f2dc..c1a98c4 100644
--- a/tests/test_load_report.py
+++ b/tests/test_load_report.py
@@ -8,8 +8,8 @@
import pytest
-from dature import Merge, MergeStrategy, Source, get_load_report, load
-from dature.errors.exceptions import DatureConfigError
+from dature import JsonSource, get_load_report, load
+from dature.errors import DatureConfigError
from dature.load_report import FieldOrigin, LoadReport, SourceEntry
from dature.validators.number import Ge
@@ -28,11 +28,9 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
debug=True,
)
@@ -40,7 +38,7 @@ class Config:
expected = LoadReport(
dataclass_name="Config",
- strategy=MergeStrategy.LAST_WINS,
+ strategy="last_wins",
sources=(
SourceEntry(
index=0,
@@ -88,12 +86,10 @@ class Config:
port: int
result = load(
- Merge(
- Source(file_=first),
- Source(file_=second),
- strategy=MergeStrategy.FIRST_WINS,
- ),
- Config,
+ JsonSource(file=first),
+ JsonSource(file=second),
+ schema=Config,
+ strategy="first_wins",
debug=True,
)
@@ -101,7 +97,7 @@ class Config:
expected = LoadReport(
dataclass_name="Config",
- strategy=MergeStrategy.FIRST_WINS,
+ strategy="first_wins",
sources=(
SourceEntry(
index=0,
@@ -153,11 +149,9 @@ class Config:
database: Database
result = load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
debug=True,
)
@@ -193,7 +187,7 @@ class Config:
name: str
port: int
- result = load(Source(file_=json_file), Config, debug=True)
+ result = load(JsonSource(file=json_file), schema=Config, debug=True)
report = get_load_report(result)
@@ -237,12 +231,7 @@ def test_merge_decorator(self, tmp_path: Path):
overrides = tmp_path / "overrides.json"
overrides.write_text('{"port": 9090}')
- meta = Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- )
-
- @load(meta, debug=True)
+ @load(JsonSource(file=defaults), JsonSource(file=overrides), debug=True)
@dataclass
class Config:
host: str
@@ -251,14 +240,14 @@ class Config:
config = Config()
report = get_load_report(config)
assert report is not None
- assert report.strategy == MergeStrategy.LAST_WINS
+ assert report.strategy == "last_wins"
assert len(report.sources) == 2
def test_single_source_decorator(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"host": "localhost", "port": 3000}')
- @load(Source(file_=json_file), debug=True)
+ @load(JsonSource(file=json_file), debug=True)
@dataclass
class Config:
host: str
@@ -290,7 +279,7 @@ class Config:
host: str
port: int
- result = load(Source(file_=json_file), Config)
+ result = load(JsonSource(file=json_file), schema=Config)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
@@ -316,21 +305,19 @@ class Config:
with caplog.at_level(logging.DEBUG, logger="dature"):
load(
- Merge(
- Source(file_=defaults),
- Source(file_=overrides),
- ),
- Config,
+ JsonSource(file=defaults),
+ JsonSource(file=overrides),
+ schema=Config,
)
messages = [r.message for r in caplog.records if r.name == "dature"]
expected = [
- f"[JsonLoader] load_raw: path={defaults},"
+ f"[JsonSource] load_raw: source={defaults},"
" raw_keys=['host', 'port'], after_preprocessing_keys=['host', 'port']",
f"[Config] Source 0 loaded: loader=json, file={defaults}, keys=['host', 'port']",
"[Config] Source 0 raw data: {'host': 'localhost', 'port': 3000}",
- f"[JsonLoader] load_raw: path={overrides}, raw_keys=['port'], after_preprocessing_keys=['port']",
+ f"[JsonSource] load_raw: source={overrides}, raw_keys=['port'], after_preprocessing_keys=['port']",
f"[Config] Source 1 loaded: loader=json, file={overrides}, keys=['port']",
"[Config] Source 1 raw data: {'port': 8080}",
"[Config] Merge step 0 (strategy=last_wins): added=['host', 'port'], overwritten=[]",
@@ -353,12 +340,12 @@ class Config:
port: int
with caplog.at_level(logging.DEBUG, logger="dature"):
- load(Source(file_=json_file), Config)
+ load(JsonSource(file=json_file), schema=Config)
messages = [r.message for r in caplog.records if r.name == "dature"]
expected = [
- f"[JsonLoader] load_raw: path={json_file},"
+ f"[JsonSource] load_raw: source={json_file},"
" raw_keys=['host', 'port'], after_preprocessing_keys=['host', 'port']",
f"[Config] Single-source load: loader=json, file={json_file}",
"[Config] Loaded data: {'host': 'localhost', 'port': 3000}",
@@ -381,17 +368,15 @@ class Config:
with pytest.raises(DatureConfigError):
load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
debug=True,
)
expected = LoadReport(
dataclass_name="Config",
- strategy=MergeStrategy.LAST_WINS,
+ strategy="last_wins",
sources=(
SourceEntry(index=0, file_path=str(a), loader_type="json", raw_data={"host": "localhost"}),
SourceEntry(index=1, file_path=str(b), loader_type="json", raw_data={"host": "override"}),
@@ -419,21 +404,19 @@ def test_merge_validation_error(self, tmp_path: Path):
@dataclass
class Config:
host: str
- port: Annotated[int, Ge(value=0)]
+ port: Annotated[int, Ge(0)]
with pytest.raises(DatureConfigError):
load(
- Merge(
- Source(file_=a),
- Source(file_=b),
- ),
- Config,
+ JsonSource(file=a),
+ JsonSource(file=b),
+ schema=Config,
debug=True,
)
expected = LoadReport(
dataclass_name="Config",
- strategy=MergeStrategy.LAST_WINS,
+ strategy="last_wins",
sources=(
SourceEntry(index=0, file_path=str(a), loader_type="json", raw_data={"port": -5}),
SourceEntry(index=1, file_path=str(b), loader_type="json", raw_data={"host": "localhost"}),
@@ -462,7 +445,7 @@ class Config:
port: int
with pytest.raises(DatureConfigError):
- load(Source(file_=json_file), Config, debug=True)
+ load(JsonSource(file=json_file), schema=Config, debug=True)
expected = LoadReport(
dataclass_name="Config",
@@ -489,10 +472,10 @@ def test_single_source_validation_error(self, tmp_path: Path):
@dataclass
class Config:
- port: Annotated[int, Ge(value=0)]
+ port: Annotated[int, Ge(0)]
with pytest.raises(DatureConfigError):
- load(Source(file_=json_file), Config, debug=True)
+ load(JsonSource(file=json_file), schema=Config, debug=True)
expected = LoadReport(
dataclass_name="Config",
diff --git a/tests/test_main.py b/tests/test_main.py
index e25ec0e..f94f932 100644
--- a/tests/test_main.py
+++ b/tests/test_main.py
@@ -5,17 +5,23 @@
import pytest
-from dature import Source, load
-from dature.sources_loader.env_ import EnvFileLoader
-from dature.sources_loader.ini_ import IniLoader
-from dature.sources_loader.json5_ import Json5Loader
-from dature.sources_loader.json_ import JsonLoader
-from dature.sources_loader.toml_ import Toml10Loader, Toml11Loader
-from dature.sources_loader.yaml_ import Yaml11Loader, Yaml12Loader
+from dature import (
+ EnvFileSource,
+ EnvSource,
+ IniSource,
+ Json5Source,
+ JsonSource,
+ Source,
+ Toml10Source,
+ Toml11Source,
+ Yaml11Source,
+ Yaml12Source,
+ load,
+)
-def _all_file_loaders() -> list[type]:
- return [EnvFileLoader, Yaml11Loader, Yaml12Loader, JsonLoader, Json5Loader, Toml10Loader, Toml11Loader, IniLoader]
+def _all_file_sources() -> list[type[Source]]:
+ return [EnvFileSource, Yaml11Source, Yaml12Source, JsonSource, Json5Source, Toml10Source, Toml11Source, IniSource]
class TestLoadAsDecorator:
@@ -23,7 +29,7 @@ def test_loads_from_file(self, tmp_path: Path) -> None:
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "FromFile", "port": 8080}')
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
@load(metadata)
@dataclass
@@ -39,7 +45,7 @@ def test_loads_from_env(self, monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.setenv("APP_NAME", "EnvApp")
monkeypatch.setenv("APP_PORT", "3000")
- metadata = Source(prefix="APP_")
+ metadata = EnvSource(prefix="APP_")
@load(metadata)
@dataclass
@@ -54,7 +60,7 @@ class Config:
def test_default_metadata(self, monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.setenv("MY_VAR", "test_value")
- @load()
+ @load(EnvSource())
@dataclass
class Config:
my_var: str
@@ -66,7 +72,7 @@ def test_explicit_loader_overrides_extension(self, tmp_path: Path) -> None:
txt_file = tmp_path / "config.txt"
txt_file.write_text('{"app_name": "OverrideApp"}')
- metadata = Source(file_=txt_file, loader=JsonLoader)
+ metadata = JsonSource(file=txt_file)
@load(metadata)
@dataclass
@@ -80,7 +86,7 @@ def test_priority(self, monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.setenv("LOADED_VAR", "loaded")
monkeypatch.setenv("OVERRIDDEN_VAR", "loaded")
- @load()
+ @load(EnvSource())
@dataclass
class Config:
overridden_var: str
@@ -97,7 +103,7 @@ def test_invalid_decorator_order(self) -> None:
with pytest.raises(TypeError, match="Config must be a dataclass"):
@dataclass
- @load()
+ @load(EnvSource())
class Config:
pass
@@ -107,7 +113,7 @@ def test_cache_enabled_by_default(self, tmp_path: Path) -> None:
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "original", "port": 8080}')
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
@load(metadata)
@dataclass
@@ -126,7 +132,7 @@ def test_cache_disabled(self, tmp_path: Path) -> None:
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "original", "port": 8080}')
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
@load(metadata, cache=False)
@dataclass
@@ -152,8 +158,8 @@ class Config:
name: str
port: int
- metadata = Source(file_=json_file)
- result = load(metadata, Config)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=Config)
assert result.name == "FromFile"
assert result.port == 9090
@@ -167,8 +173,8 @@ class Config:
name: str
debug: bool
- metadata = Source(prefix="APP_")
- result = load(metadata, Config)
+ metadata = EnvSource(prefix="APP_")
+ result = load(metadata, schema=Config)
assert result.name == "EnvFunc"
assert result.debug is True
@@ -180,33 +186,33 @@ def test_default_metadata(self, monkeypatch: pytest.MonkeyPatch) -> None:
class Config:
my_var: str
- result = load(None, Config)
+ result = load(EnvSource(), schema=Config)
assert result.my_var == "from_env"
class TestFileNotFoundWithLoad:
@pytest.mark.parametrize(
- "loader_class",
- _all_file_loaders(),
+ "source_class",
+ _all_file_sources(),
)
- def test_load_function_single_source_file_not_found(self, loader_class: type) -> None:
+ def test_load_function_single_source_filenot_found(self, source_class: type[Source]) -> None:
@dataclass
class Config:
name: str
- metadata = Source(file_="/non/existent/file.json", loader=loader_class)
+ metadata = source_class(file="/non/existent/file.json")
with pytest.raises(FileNotFoundError):
- load(metadata, Config)
+ load(metadata, schema=Config)
@pytest.mark.parametrize(
- "loader_class",
- _all_file_loaders(),
+ "source_class",
+ _all_file_sources(),
)
- def test_load_decorator_single_source_file_not_found(self, loader_class: type) -> None:
- metadata = Source(file_="/non/existent/config.json", loader=loader_class)
+ def test_load_decorator_single_source_filenot_found(self, source_class: type[Source]) -> None:
+ metadata = source_class(file="/non/existent/config.json")
@load(metadata)
@dataclass
diff --git a/tests/test_type_loaders.py b/tests/test_type_loaders.py
index eeba86a..1801ba0 100644
--- a/tests/test_type_loaders.py
+++ b/tests/test_type_loaders.py
@@ -1,4 +1,4 @@
-"""Tests for TypeLoader — custom type loading via Source, configure(), and Merge."""
+"""Tests for TypeLoader — custom type loading via Source, configure(), and load()."""
from collections.abc import Generator
from dataclasses import dataclass
@@ -6,7 +6,7 @@
import pytest
-from dature import Merge, Source, TypeLoader, configure, load
+from dature import Yaml12Source, configure, load
from dature.config import _ConfigProxy
@@ -31,10 +31,10 @@ class ConfigWithRgb:
@pytest.fixture
def _reset_config() -> Generator[None]:
_ConfigProxy.set_instance(None)
- _ConfigProxy.set_type_loaders(())
+ _ConfigProxy.set_type_loaders({})
yield
_ConfigProxy.set_instance(None)
- _ConfigProxy.set_type_loaders(())
+ _ConfigProxy.set_type_loaders({})
@pytest.fixture
@@ -47,11 +47,11 @@ def yaml_with_rgb(tmp_path: Path) -> Path:
class TestTypeLoadersInSource:
def test_single_source_with_type_loader(self, yaml_with_rgb: Path) -> None:
result = load(
- Source(
- file_=yaml_with_rgb,
- type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),),
+ Yaml12Source(
+ file=yaml_with_rgb,
+ type_loaders={Rgb: rgb_from_string},
),
- ConfigWithRgb,
+ schema=ConfigWithRgb,
)
assert result.name == "test"
assert result.color == Rgb(r=255, g=128, b=0)
@@ -66,11 +66,11 @@ def int_times_two(value: str) -> int:
p.write_text("name: app\ncolor: '10,20,30'\n")
result = load(
- Source(
- file_=p,
- type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),),
+ Yaml12Source(
+ file=p,
+ type_loaders={Rgb: rgb_from_string},
),
- ConfigWithRgb,
+ schema=ConfigWithRgb,
)
assert result.color == Rgb(r=10, g=20, b=30)
@@ -79,9 +79,9 @@ class TestTypeLoadersInConfigure:
@pytest.mark.usefixtures("_reset_config")
def test_global_type_loaders_via_configure(self, yaml_with_rgb: Path) -> None:
configure(
- type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),),
+ type_loaders={Rgb: rgb_from_string},
)
- result = load(Source(file_=yaml_with_rgb), ConfigWithRgb)
+ result = load(Yaml12Source(file=yaml_with_rgb), schema=ConfigWithRgb)
assert result.color == Rgb(r=255, g=128, b=0)
@@ -93,12 +93,10 @@ def test_merge_metadata_type_loaders(self, tmp_path: Path) -> None:
override.write_text("name: override\n")
result = load(
- Merge(
- Source(file_=base),
- Source(file_=override),
- type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),),
- ),
- ConfigWithRgb,
+ Yaml12Source(file=base),
+ Yaml12Source(file=override),
+ schema=ConfigWithRgb,
+ type_loaders={Rgb: rgb_from_string},
)
assert result.name == "override"
assert result.color == Rgb(r=1, g=2, b=3)
@@ -116,18 +114,18 @@ def tag_upper(value: str) -> str:
return value.upper()
configure(
- type_loaders=(TypeLoader(type_=Rgb, func=rgb_from_string),),
+ type_loaders={Rgb: rgb_from_string},
)
p = tmp_path / "cfg.yaml"
p.write_text("color: '10,20,30'\ntag: hello\n")
result = load(
- Source(
- file_=p,
- type_loaders=(TypeLoader(type_=str, func=tag_upper),),
+ Yaml12Source(
+ file=p,
+ type_loaders={str: tag_upper},
),
- TwoCustom,
+ schema=TwoCustom,
)
assert result.color == Rgb(r=10, g=20, b=30)
assert result.tag == "HELLO"
diff --git a/tests/test_type_utils.py b/tests/test_type_utils.py
new file mode 100644
index 0000000..f926eb5
--- /dev/null
+++ b/tests/test_type_utils.py
@@ -0,0 +1,58 @@
+from dataclasses import dataclass
+from typing import Annotated
+
+from dature.type_utils import find_nested_dataclasses
+
+
+class TestFindNestedDataclasses:
+ def test_plain_dataclass(self):
+ @dataclass
+ class Inner:
+ name: str
+
+ result = find_nested_dataclasses(Inner)
+ assert result == [Inner]
+
+ def test_list_of_dataclasses(self):
+ @dataclass
+ class Inner:
+ name: str
+
+ result = find_nested_dataclasses(list[Inner])
+ assert result == [Inner]
+
+ def test_plain_type_no_dataclass(self):
+ result = find_nested_dataclasses(str)
+ assert result == []
+
+ def test_optional_dataclass(self):
+ @dataclass
+ class Inner:
+ name: str
+
+ result = find_nested_dataclasses(Inner | None)
+ assert result == [Inner]
+
+ def test_annotated_dataclass(self):
+ @dataclass
+ class Inner:
+ name: str
+
+ result = find_nested_dataclasses(Annotated[Inner, "some_meta"])
+ assert result == [Inner]
+
+ def test_dict_value_dataclass(self):
+ @dataclass
+ class Inner:
+ name: str
+
+ result = find_nested_dataclasses(dict[str, Inner])
+ assert result == [Inner]
+
+ def test_nested_generic(self):
+ @dataclass
+ class Inner:
+ name: str
+
+ result = find_nested_dataclasses(list[Inner | None])
+ assert result == [Inner]
diff --git a/tests/validators/test_complex.py b/tests/validators/test_complex.py
index 0362a75..d5a21c7 100644
--- a/tests/validators/test_complex.py
+++ b/tests/validators/test_complex.py
@@ -4,8 +4,8 @@
import pytest
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError
+from dature import JsonSource, load
+from dature.errors import DatureConfigError
from dature.validators.number import Ge, Le
from dature.validators.sequence import MinItems, UniqueItems
from dature.validators.string import MaxLength, MinLength, RegexPattern
@@ -15,15 +15,15 @@ class TestMultipleFields:
def test_success(self, tmp_path: Path):
@dataclass
class Config:
- name: Annotated[str, MinLength(value=3), MaxLength(value=50)]
- age: Annotated[int, Ge(value=0), Le(value=150)]
- tags: Annotated[list[str], MinItems(value=1), UniqueItems()]
+ name: Annotated[str, MinLength(3), MaxLength(50)]
+ age: Annotated[int, Ge(0), Le(150)]
+ tags: Annotated[list[str], MinItems(1), UniqueItems()]
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "Alice", "age": 30, "tags": ["python", "coding"]}')
- metadata = Source(file_=json_file)
- result = load(metadata, Config)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=Config)
assert result.name == "Alice"
assert result.age == 30
@@ -32,18 +32,18 @@ class Config:
def test_all_invalid(self, tmp_path: Path):
@dataclass
class Config:
- name: Annotated[str, MinLength(value=3), MaxLength(value=50)]
- age: Annotated[int, Ge(value=0), Le(value=150)]
- tags: Annotated[list[str], MinItems(value=1), UniqueItems()]
+ name: Annotated[str, MinLength(3), MaxLength(50)]
+ age: Annotated[int, Ge(0), Le(150)]
+ tags: Annotated[list[str], MinItems(1), UniqueItems()]
json_file = tmp_path / "config.json"
content = '{"name": "AB", "age": 200, "tags": []}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 3
@@ -72,13 +72,13 @@ class TestNestedDataclass:
def test_success(self, tmp_path: Path):
@dataclass
class Address:
- city: Annotated[str, MinLength(value=2)]
- zip_code: Annotated[str, RegexPattern(pattern=r"^\d{5}$")]
+ city: Annotated[str, MinLength(2)]
+ zip_code: Annotated[str, RegexPattern(r"^\d{5}$")]
@dataclass
class User:
- name: Annotated[str, MinLength(value=3)]
- age: Annotated[int, Ge(value=18)]
+ name: Annotated[str, MinLength(3)]
+ age: Annotated[int, Ge(18)]
address: Address
json_file = tmp_path / "config.json"
@@ -86,8 +86,8 @@ class User:
'{"name": "Alice", "age": 30, "address": {"city": "NYC", "zip_code": "12345"}}',
)
- metadata = Source(file_=json_file)
- result = load(metadata, User)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=User)
assert result.name == "Alice"
assert result.age == 30
@@ -97,23 +97,23 @@ class User:
def test_all_invalid(self, tmp_path: Path):
@dataclass
class Address:
- city: Annotated[str, MinLength(value=2)]
- zip_code: Annotated[str, RegexPattern(pattern=r"^\d{5}$")]
+ city: Annotated[str, MinLength(2)]
+ zip_code: Annotated[str, RegexPattern(r"^\d{5}$")]
@dataclass
class User:
- name: Annotated[str, MinLength(value=3)]
- age: Annotated[int, Ge(value=18)]
+ name: Annotated[str, MinLength(3)]
+ age: Annotated[int, Ge(18)]
address: Address
json_file = tmp_path / "config.json"
content = '{"name": "Al", "age": 15, "address": {"city": "N", "zip_code": "ABCDE"}}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, User)
+ load(metadata, schema=User)
e = exc_info.value
assert len(e.exceptions) == 4
@@ -148,16 +148,16 @@ class TestCustomErrorMessage:
def test_custom_error_message(self, tmp_path: Path):
@dataclass
class Config:
- age: Annotated[int, Ge(value=18, error_message="Age must be 18 or older")]
+ age: Annotated[int, Ge(18, error_message="Age must be 18 or older")]
json_file = tmp_path / "config.json"
content = '{"age": 15}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -171,31 +171,31 @@ class TestDictListDict:
def test_raw_dict_field_validator_success(self, tmp_path: Path):
@dataclass
class Config:
- groups: Annotated[dict[str, list[dict[str, Any]]], MinItems(value=1)]
+ groups: Annotated[dict[str, list[dict[str, Any]]], MinItems(1)]
json_file = tmp_path / "config.json"
json_file.write_text(
'{"groups": {"admins": [{"name": "Alice"}]}}',
)
- metadata = Source(file_=json_file)
- result = load(metadata, Config)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=Config)
assert result.groups == {"admins": [{"name": "Alice"}]}
def test_raw_dict_field_validator_failure(self, tmp_path: Path):
@dataclass
class Config:
- groups: Annotated[dict[str, list[dict[str, Any]]], MinItems(value=1)]
+ groups: Annotated[dict[str, list[dict[str, Any]]], MinItems(1)]
json_file = tmp_path / "config.json"
content = '{"groups": {}}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -210,8 +210,8 @@ class Config:
def test_nested_dataclass_in_dict_list_success(self, tmp_path: Path):
@dataclass
class Member:
- name: Annotated[str, MinLength(value=2)]
- role: Annotated[str, MinLength(value=3)]
+ name: Annotated[str, MinLength(2)]
+ role: Annotated[str, MinLength(3)]
@dataclass
class Config:
@@ -222,8 +222,8 @@ class Config:
'{"teams": {"backend": [{"name": "Alice", "role": "admin"}]}}',
)
- metadata = Source(file_=json_file)
- result = load(metadata, Config)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=Config)
assert result.teams["backend"][0].name == "Alice"
assert result.teams["backend"][0].role == "admin"
@@ -231,8 +231,8 @@ class Config:
def test_nested_dataclass_in_dict_list_validation_fails(self, tmp_path: Path):
@dataclass
class Member:
- name: Annotated[str, MinLength(value=2)]
- role: Annotated[str, MinLength(value=3)]
+ name: Annotated[str, MinLength(2)]
+ role: Annotated[str, MinLength(3)]
@dataclass
class Config:
@@ -242,10 +242,10 @@ class Config:
content = '{"teams": {"backend": [{"name": "A", "role": "ab"}]}}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 2
diff --git a/tests/validators/test_custom_validator.py b/tests/validators/test_custom_validator.py
index ee7b55a..cc5c45e 100644
--- a/tests/validators/test_custom_validator.py
+++ b/tests/validators/test_custom_validator.py
@@ -5,11 +5,11 @@
import pytest
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError
+from dature import JsonSource, load
+from dature.errors import DatureConfigError
-@dataclass(frozen=True, slots=True, kw_only=True)
+@dataclass(frozen=True, slots=True)
class Divisible:
value: int
error_message: str = "Value must be divisible by {value}"
@@ -24,7 +24,7 @@ def get_error_message(self) -> str:
return self.error_message.format(value=self.value)
-@dataclass(frozen=True, slots=True, kw_only=True)
+@dataclass(frozen=True, slots=True)
class StartsWith:
prefix: str
error_message: str = "Value must start with '{prefix}'"
@@ -43,29 +43,29 @@ class TestCustomFieldValidator:
def test_success(self, tmp_path: Path):
@dataclass
class Config:
- count: Annotated[int, Divisible(value=5)]
+ count: Annotated[int, Divisible(5)]
json_file = tmp_path / "config.json"
json_file.write_text('{"count": 10}')
- metadata = Source(file_=json_file)
- result = load(metadata, Config)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=Config)
assert result.count == 10
def test_failure(self, tmp_path: Path):
@dataclass
class Config:
- count: Annotated[int, Divisible(value=5)]
+ count: Annotated[int, Divisible(5)]
json_file = tmp_path / "config.json"
content = '{"count": 7}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -80,16 +80,16 @@ class Config:
def test_custom_error_message(self, tmp_path: Path):
@dataclass
class Config:
- count: Annotated[int, Divisible(value=3, error_message="Must be a multiple of {value}")]
+ count: Annotated[int, Divisible(3, error_message="Must be a multiple of {value}")]
json_file = tmp_path / "config.json"
content = '{"count": 7}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -106,29 +106,29 @@ class TestCustomStringValidator:
def test_success(self, tmp_path: Path):
@dataclass
class Config:
- url: Annotated[str, StartsWith(prefix="https://")]
+ url: Annotated[str, StartsWith("https://")]
json_file = tmp_path / "config.json"
json_file.write_text('{"url": "https://example.com"}')
- metadata = Source(file_=json_file)
- result = load(metadata, Config)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=Config)
assert result.url == "https://example.com"
def test_failure(self, tmp_path: Path):
@dataclass
class Config:
- url: Annotated[str, StartsWith(prefix="https://")]
+ url: Annotated[str, StartsWith("https://")]
json_file = tmp_path / "config.json"
content = '{"url": "http://example.com"}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -146,10 +146,10 @@ def test_success(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"port": 8080}')
- @load(Source(file_=json_file))
+ @load(JsonSource(file=json_file))
@dataclass
class Config:
- port: Annotated[int, Divisible(value=10)]
+ port: Annotated[int, Divisible(10)]
config = Config()
assert config.port == 8080
@@ -159,10 +159,10 @@ def test_failure(self, tmp_path: Path):
content = '{"port": 8081}'
json_file.write_text(content)
- @load(Source(file_=json_file))
+ @load(JsonSource(file=json_file))
@dataclass
class Config:
- port: Annotated[int, Divisible(value=10)]
+ port: Annotated[int, Divisible(10)]
with pytest.raises(DatureConfigError) as exc_info:
Config()
@@ -182,10 +182,10 @@ def test_direct_instantiation_validates(self, tmp_path: Path):
content = '{"port": 8080}'
json_file.write_text(content)
- @load(Source(file_=json_file))
+ @load(JsonSource(file=json_file))
@dataclass
class Config:
- port: Annotated[int, Divisible(value=10)]
+ port: Annotated[int, Divisible(10)]
with pytest.raises(DatureConfigError) as exc_info:
Config(port=8081)
@@ -202,14 +202,14 @@ class TestMultipleCustomValidators:
def test_combined_success(self, tmp_path: Path):
@dataclass
class Config:
- count: Annotated[int, Divisible(value=5)]
- url: Annotated[str, StartsWith(prefix="https://")]
+ count: Annotated[int, Divisible(5)]
+ url: Annotated[str, StartsWith("https://")]
json_file = tmp_path / "config.json"
json_file.write_text('{"count": 15, "url": "https://example.com"}')
- metadata = Source(file_=json_file)
- result = load(metadata, Config)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=Config)
assert result.count == 15
assert result.url == "https://example.com"
@@ -217,17 +217,17 @@ class Config:
def test_all_fail(self, tmp_path: Path):
@dataclass
class Config:
- count: Annotated[int, Divisible(value=5)]
- url: Annotated[str, StartsWith(prefix="https://")]
+ count: Annotated[int, Divisible(5)]
+ url: Annotated[str, StartsWith("https://")]
json_file = tmp_path / "config.json"
content = '{"count": 7, "url": "http://example.com"}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 2
diff --git a/tests/validators/test_metadata_validators.py b/tests/validators/test_metadata_validators.py
index 2ab49ec..8d033e6 100644
--- a/tests/validators/test_metadata_validators.py
+++ b/tests/validators/test_metadata_validators.py
@@ -4,8 +4,8 @@
import pytest
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError
+from dature import JsonSource, load
+from dature.errors import DatureConfigError
from dature.field_path import F
from dature.validators.number import Ge, Gt, Lt
from dature.validators.root import RootValidator
@@ -21,13 +21,13 @@ class Config:
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "Alice"}')
- metadata = Source(
- file_=json_file,
+ metadata = JsonSource(
+ file=json_file,
validators={
- F[Config].name: MinLength(value=3),
+ F[Config].name: MinLength(3),
},
)
- result = load(metadata, Config)
+ result = load(metadata, schema=Config)
assert result.name == "Alice"
@@ -39,13 +39,13 @@ class Config:
json_file = tmp_path / "config.json"
json_file.write_text('{"port": 8080}')
- metadata = Source(
- file_=json_file,
+ metadata = JsonSource(
+ file=json_file,
validators={
- F[Config].port: (Gt(value=0), Lt(value=65536)),
+ F[Config].port: (Gt(0), Lt(65536)),
},
)
- result = load(metadata, Config)
+ result = load(metadata, schema=Config)
assert result.port == 8080
@@ -58,14 +58,14 @@ class Config:
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "Alice", "port": 8080}')
- metadata = Source(
- file_=json_file,
+ metadata = JsonSource(
+ file=json_file,
validators={
- F[Config].name: MinLength(value=3),
- F[Config].port: Gt(value=0),
+ F[Config].name: MinLength(3),
+ F[Config].port: Gt(0),
},
)
- result = load(metadata, Config)
+ result = load(metadata, schema=Config)
assert result.name == "Alice"
assert result.port == 8080
@@ -81,15 +81,15 @@ class Config:
content = '{"name": "Al"}'
json_file.write_text(content)
- metadata = Source(
- file_=json_file,
+ metadata = JsonSource(
+ file=json_file,
validators={
- F[Config].name: MinLength(value=3),
+ F[Config].name: MinLength(3),
},
)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -110,15 +110,15 @@ class Config:
content = '{"port": -1}'
json_file.write_text(content)
- metadata = Source(
- file_=json_file,
+ metadata = JsonSource(
+ file=json_file,
validators={
- F[Config].port: (Gt(value=0), Lt(value=65536)),
+ F[Config].port: (Gt(0), Lt(65536)),
},
)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -145,14 +145,14 @@ class Config:
json_file = tmp_path / "config.json"
json_file.write_text('{"database": {"host": "localhost", "port": 5432}}')
- metadata = Source(
- file_=json_file,
+ metadata = JsonSource(
+ file=json_file,
validators={
- F[Config].database.host: MinLength(value=1),
- F[Config].database.port: Gt(value=0),
+ F[Config].database.host: MinLength(1),
+ F[Config].database.port: Gt(0),
},
)
- result = load(metadata, Config)
+ result = load(metadata, schema=Config)
assert result.database.host == "localhost"
assert result.database.port == 5432
@@ -171,15 +171,15 @@ class Config:
content = '{"database": {"host": "", "port": 5432}}'
json_file.write_text(content)
- metadata = Source(
- file_=json_file,
+ metadata = JsonSource(
+ file=json_file,
validators={
- F[Config].database.host: MinLength(value=1),
+ F[Config].database.host: MinLength(1),
},
)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -196,20 +196,20 @@ class TestMetadataValidatorsComplement:
def test_metadata_validators_complement_annotated(self, tmp_path: Path):
@dataclass
class Config:
- name: Annotated[str, MinLength(value=3)]
+ name: Annotated[str, MinLength(3)]
port: int
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "Alice", "port": 8080}')
- metadata = Source(
- file_=json_file,
+ metadata = JsonSource(
+ file=json_file,
validators={
- F[Config].name: MaxLength(value=50),
- F[Config].port: Gt(value=0),
+ F[Config].name: MaxLength(50),
+ F[Config].port: Gt(0),
},
)
- result = load(metadata, Config)
+ result = load(metadata, schema=Config)
assert result.name == "Alice"
assert result.port == 8080
@@ -217,21 +217,21 @@ class Config:
def test_annotated_still_validates(self, tmp_path: Path):
@dataclass
class Config:
- name: Annotated[str, MinLength(value=5)]
+ name: Annotated[str, MinLength(5)]
json_file = tmp_path / "config.json"
content = '{"name": "Al"}'
json_file.write_text(content)
- metadata = Source(
- file_=json_file,
+ metadata = JsonSource(
+ file=json_file,
validators={
- F[Config].name: MaxLength(value=50),
+ F[Config].name: MaxLength(50),
},
)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -246,21 +246,21 @@ class Config:
def test_metadata_validator_fails_with_annotated_present(self, tmp_path: Path):
@dataclass
class Config:
- name: Annotated[str, MinLength(value=3)]
+ name: Annotated[str, MinLength(3)]
json_file = tmp_path / "config.json"
content = '{"name": "This is a very long name that exceeds the limit"}'
json_file.write_text(content)
- metadata = Source(
- file_=json_file,
+ metadata = JsonSource(
+ file=json_file,
validators={
- F[Config].name: MaxLength(value=10),
+ F[Config].name: MaxLength(10),
},
)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -275,39 +275,39 @@ class Config:
def test_both_annotated_and_metadata_on_same_field_pass(self, tmp_path: Path):
@dataclass
class Config:
- name: Annotated[str, MinLength(value=3)]
+ name: Annotated[str, MinLength(3)]
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "Alice"}')
- metadata = Source(
- file_=json_file,
+ metadata = JsonSource(
+ file=json_file,
validators={
- F[Config].name: MaxLength(value=10),
+ F[Config].name: MaxLength(10),
},
)
- result = load(metadata, Config)
+ result = load(metadata, schema=Config)
assert result.name == "Alice"
def test_annotated_fails_while_metadata_would_pass(self, tmp_path: Path):
@dataclass
class Config:
- name: Annotated[str, MinLength(value=5)]
+ name: Annotated[str, MinLength(5)]
json_file = tmp_path / "config.json"
content = '{"name": "AB"}'
json_file.write_text(content)
- metadata = Source(
- file_=json_file,
+ metadata = JsonSource(
+ file=json_file,
validators={
- F[Config].name: MaxLength(value=50),
+ F[Config].name: MaxLength(50),
},
)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -322,39 +322,39 @@ class Config:
def test_same_validator_type_in_annotated_and_metadata(self, tmp_path: Path):
@dataclass
class Config:
- port: Annotated[int, Ge(value=0)]
+ port: Annotated[int, Ge(0)]
json_file = tmp_path / "config.json"
json_file.write_text('{"port": 8080}')
- metadata = Source(
- file_=json_file,
+ metadata = JsonSource(
+ file=json_file,
validators={
- F[Config].port: Lt(value=65536),
+ F[Config].port: Lt(65536),
},
)
- result = load(metadata, Config)
+ result = load(metadata, schema=Config)
assert result.port == 8080
def test_same_validator_type_in_annotated_and_metadata_fails(self, tmp_path: Path):
@dataclass
class Config:
- port: Annotated[int, Ge(value=1024)]
+ port: Annotated[int, Ge(1024)]
json_file = tmp_path / "config.json"
content = '{"port": 80}'
json_file.write_text(content)
- metadata = Source(
- file_=json_file,
+ metadata = JsonSource(
+ file=json_file,
validators={
- F[Config].port: Lt(value=65536),
+ F[Config].port: Lt(65536),
},
)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -369,21 +369,21 @@ class Config:
def test_metadata_fails_while_annotated_passes(self, tmp_path: Path):
@dataclass
class Config:
- port: Annotated[int, Ge(value=0)]
+ port: Annotated[int, Ge(0)]
json_file = tmp_path / "config.json"
content = '{"port": 70000}'
json_file.write_text(content)
- metadata = Source(
- file_=json_file,
+ metadata = JsonSource(
+ file=json_file,
validators={
- F[Config].port: Lt(value=65536),
+ F[Config].port: Lt(65536),
},
)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -405,8 +405,8 @@ class Config:
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "Alice"}')
- metadata = Source(file_=json_file)
- result = load(metadata, Config)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=Config)
assert result.name == "Alice"
@@ -426,14 +426,14 @@ def validate_config(obj: Config) -> bool:
json_file = tmp_path / "config.json"
json_file.write_text('{"port": 8080, "user": "admin"}')
- metadata = Source(
- file_=json_file,
- root_validators=(RootValidator(func=validate_config),),
+ metadata = JsonSource(
+ file=json_file,
+ root_validators=(RootValidator(validate_config),),
validators={
- F[Config].port: Ge(value=0),
+ F[Config].port: Ge(0),
},
)
- result = load(metadata, Config)
+ result = load(metadata, schema=Config)
assert result.port == 8080
assert result.user == "admin"
@@ -449,11 +449,11 @@ class Config:
name: str
age: int
- metadata = Source(
- file_=json_file,
+ metadata = JsonSource(
+ file=json_file,
validators={
- F[Config].name: MinLength(value=2),
- F[Config].age: Ge(value=0),
+ F[Config].name: MinLength(2),
+ F[Config].age: Ge(0),
},
)
diff --git a/tests/validators/test_number.py b/tests/validators/test_number.py
index d4e91e5..8fe42d9 100644
--- a/tests/validators/test_number.py
+++ b/tests/validators/test_number.py
@@ -4,8 +4,8 @@
import pytest
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError
+from dature import JsonSource, load
+from dature.errors import DatureConfigError
from dature.validators.number import Ge, Gt, Le, Lt
@@ -13,29 +13,29 @@ class TestGt:
def test_success(self, tmp_path: Path):
@dataclass
class Config:
- age: Annotated[int, Gt(value=0)]
+ age: Annotated[int, Gt(0)]
json_file = tmp_path / "config.json"
json_file.write_text('{"age": 25}')
- metadata = Source(file_=json_file)
- result = load(metadata, Config)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=Config)
assert result.age == 25
def test_failure(self, tmp_path: Path):
@dataclass
class Config:
- age: Annotated[int, Gt(value=18)]
+ age: Annotated[int, Gt(18)]
json_file = tmp_path / "config.json"
content = '{"age": 18}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -52,29 +52,29 @@ class TestGe:
def test_success(self, tmp_path: Path):
@dataclass
class Config:
- age: Annotated[int, Ge(value=18)]
+ age: Annotated[int, Ge(18)]
json_file = tmp_path / "config.json"
json_file.write_text('{"age": 18}')
- metadata = Source(file_=json_file)
- result = load(metadata, Config)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=Config)
assert result.age == 18
def test_failure(self, tmp_path: Path):
@dataclass
class Config:
- age: Annotated[int, Ge(value=18)]
+ age: Annotated[int, Ge(18)]
json_file = tmp_path / "config.json"
content = '{"age": 17}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -91,29 +91,29 @@ class TestLt:
def test_success(self, tmp_path: Path):
@dataclass
class Config:
- age: Annotated[int, Lt(value=100)]
+ age: Annotated[int, Lt(100)]
json_file = tmp_path / "config.json"
json_file.write_text('{"age": 99}')
- metadata = Source(file_=json_file)
- result = load(metadata, Config)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=Config)
assert result.age == 99
def test_failure(self, tmp_path: Path):
@dataclass
class Config:
- age: Annotated[int, Lt(value=100)]
+ age: Annotated[int, Lt(100)]
json_file = tmp_path / "config.json"
content = '{"age": 100}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -130,29 +130,29 @@ class TestLe:
def test_success(self, tmp_path: Path):
@dataclass
class Config:
- age: Annotated[int, Le(value=100)]
+ age: Annotated[int, Le(100)]
json_file = tmp_path / "config.json"
json_file.write_text('{"age": 100}')
- metadata = Source(file_=json_file)
- result = load(metadata, Config)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=Config)
assert result.age == 100
def test_failure(self, tmp_path: Path):
@dataclass
class Config:
- age: Annotated[int, Le(value=100)]
+ age: Annotated[int, Le(100)]
json_file = tmp_path / "config.json"
content = '{"age": 101}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -169,29 +169,29 @@ class TestCombined:
def test_combined_numeric_validators(self, tmp_path: Path):
@dataclass
class Config:
- age: Annotated[int, Ge(value=18), Le(value=65)]
+ age: Annotated[int, Ge(18), Le(65)]
json_file = tmp_path / "config.json"
json_file.write_text('{"age": 30}')
- metadata = Source(file_=json_file)
- result = load(metadata, Config)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=Config)
assert result.age == 30
def test_combined_numeric_validators_failure(self, tmp_path: Path):
@dataclass
class Config:
- age: Annotated[int, Ge(value=18), Le(value=65)]
+ age: Annotated[int, Ge(18), Le(65)]
json_file = tmp_path / "config.json"
content = '{"age": 70}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
diff --git a/tests/validators/test_post_init_and_property.py b/tests/validators/test_post_init_and_property.py
index c133189..40d27d8 100644
--- a/tests/validators/test_post_init_and_property.py
+++ b/tests/validators/test_post_init_and_property.py
@@ -3,7 +3,7 @@
import pytest
-from dature import Source, load
+from dature import JsonSource, load
class TestPostInitValidationFunctionMode:
@@ -21,7 +21,7 @@ def __post_init__(self) -> None:
json_file = tmp_path / "config.json"
json_file.write_text('{"port": 8080, "host": "localhost"}')
- result = load(Source(file_=json_file), Config)
+ result = load(JsonSource(file=json_file), schema=Config)
assert result.port == 8080
assert result.host == "localhost"
@@ -41,7 +41,7 @@ def __post_init__(self) -> None:
json_file.write_text('{"port": 99999, "host": "localhost"}')
with pytest.raises(ValueError, match="Invalid port: 99999"):
- load(Source(file_=json_file), Config)
+ load(JsonSource(file=json_file), schema=Config)
def test_post_init_cross_field_validation(self, tmp_path: Path):
@dataclass
@@ -58,7 +58,7 @@ def __post_init__(self) -> None:
json_file.write_text('{"min_value": 100, "max_value": 10}')
with pytest.raises(ValueError, match=r"min_value \(100\) must be less than max_value \(10\)"):
- load(Source(file_=json_file), Config)
+ load(JsonSource(file=json_file), schema=Config)
def test_post_init_cross_field_success(self, tmp_path: Path):
@dataclass
@@ -74,7 +74,7 @@ def __post_init__(self) -> None:
json_file = tmp_path / "config.json"
json_file.write_text('{"min_value": 1, "max_value": 100}')
- result = load(Source(file_=json_file), Config)
+ result = load(JsonSource(file=json_file), schema=Config)
assert result.min_value == 1
assert result.max_value == 100
@@ -85,7 +85,7 @@ def test_post_init_success(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"port": 8080, "host": "localhost"}')
- @load(Source(file_=json_file))
+ @load(JsonSource(file=json_file))
@dataclass
class Config:
port: int
@@ -105,7 +105,7 @@ def test_post_init_failure_from_file(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"port": 99999, "host": "localhost"}')
- @load(Source(file_=json_file))
+ @load(JsonSource(file=json_file))
@dataclass
class Config:
port: int
@@ -123,7 +123,7 @@ def test_post_init_failure_from_override(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"port": 8080, "host": "localhost"}')
- @load(Source(file_=json_file))
+ @load(JsonSource(file=json_file))
@dataclass
class Config:
port: int
@@ -141,7 +141,7 @@ def test_post_init_cross_field(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"min_value": 50, "max_value": 10}')
- @load(Source(file_=json_file))
+ @load(JsonSource(file=json_file))
@dataclass
class Config:
min_value: int
@@ -170,7 +170,7 @@ def __post_init__(self) -> None:
json_file = tmp_path / "config.json"
json_file.write_text('{"host": "localhost", "port": 8080}')
- result = load(Source(file_=json_file), Config)
+ result = load(JsonSource(file=json_file), schema=Config)
assert result.base_url == "http://localhost:8080"
@@ -178,7 +178,7 @@ def test_computed_field_via_post_init_decorator(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"host": "example.com", "port": 443}')
- @load(Source(file_=json_file))
+ @load(JsonSource(file=json_file))
@dataclass
class Config:
host: str
@@ -207,7 +207,7 @@ def address(self) -> str:
json_file = tmp_path / "config.json"
json_file.write_text('{"host": "localhost", "port": 8080}')
- result = load(Source(file_=json_file), Config)
+ result = load(JsonSource(file=json_file), schema=Config)
assert result.address == "localhost:8080"
@@ -215,7 +215,7 @@ def test_property_computed_value_decorator(self, tmp_path: Path):
json_file = tmp_path / "config.json"
json_file.write_text('{"host": "localhost", "port": 3000}')
- @load(Source(file_=json_file))
+ @load(JsonSource(file=json_file))
@dataclass
class Config:
host: str
@@ -241,6 +241,6 @@ def email(self) -> str:
json_file = tmp_path / "config.json"
json_file.write_text('{"_email": " Admin@Example.COM "}')
- result = load(Source(file_=json_file), Config)
+ result = load(JsonSource(file=json_file), schema=Config)
assert result.email == "admin@example.com"
diff --git a/tests/validators/test_root_validator.py b/tests/validators/test_root_validator.py
index 1b70fb1..42a4f09 100644
--- a/tests/validators/test_root_validator.py
+++ b/tests/validators/test_root_validator.py
@@ -3,8 +3,8 @@
import pytest
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError
+from dature import JsonSource, load
+from dature.errors import DatureConfigError
from dature.validators.root import RootValidator
@@ -23,11 +23,11 @@ def validate_config(obj: Config) -> bool:
json_file = tmp_path / "config.json"
json_file.write_text('{"port": 80, "user": "root"}')
- metadata = Source(
- file_=json_file,
- root_validators=(RootValidator(func=validate_config),),
+ metadata = JsonSource(
+ file=json_file,
+ root_validators=(RootValidator(validate_config),),
)
- result = load(metadata, Config)
+ result = load(metadata, schema=Config)
assert result.port == 80
assert result.user == "root"
@@ -46,13 +46,13 @@ def validate_config(obj: Config) -> bool:
json_file = tmp_path / "config.json"
json_file.write_text('{"port": 80, "user": "admin"}')
- metadata = Source(
- file_=json_file,
- root_validators=(RootValidator(func=validate_config),),
+ metadata = JsonSource(
+ file=json_file,
+ root_validators=(RootValidator(validate_config),),
)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -75,14 +75,14 @@ def validate_step(obj: Config) -> bool:
json_file = tmp_path / "config.json"
json_file.write_text('{"min_value": 10, "max_value": 100, "step": 5}')
- metadata = Source(
- file_=json_file,
+ metadata = JsonSource(
+ file=json_file,
root_validators=(
- RootValidator(func=validate_min_max),
- RootValidator(func=validate_step),
+ RootValidator(validate_min_max),
+ RootValidator(validate_step),
),
)
- result = load(metadata, Config)
+ result = load(metadata, schema=Config)
assert result.min_value == 10
assert result.max_value == 100
@@ -104,16 +104,16 @@ def validate_step(obj: Config) -> bool:
json_file = tmp_path / "config.json"
json_file.write_text('{"min_value": 100, "max_value": 10, "step": -5}')
- metadata = Source(
- file_=json_file,
+ metadata = JsonSource(
+ file=json_file,
root_validators=(
- RootValidator(func=validate_min_max),
- RootValidator(func=validate_step),
+ RootValidator(validate_min_max),
+ RootValidator(validate_step),
),
)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -132,13 +132,13 @@ def validate_config(obj: Config) -> bool:
json_file = tmp_path / "config.json"
json_file.write_text('{"port": 80, "host": "localhost"}')
- metadata = Source(
- file_=json_file,
- root_validators=(RootValidator(func=validate_config),),
+ metadata = JsonSource(
+ file=json_file,
+ root_validators=(RootValidator(validate_config),),
)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -154,9 +154,9 @@ def validate_credentials(obj) -> bool:
json_file = tmp_path / "config.json"
json_file.write_text('{"username": "admin", "password": "short"}')
- metadata = Source(
- file_=json_file,
- root_validators=(RootValidator(func=validate_credentials),),
+ metadata = JsonSource(
+ file=json_file,
+ root_validators=(RootValidator(validate_credentials),),
)
@load(metadata)
@@ -187,8 +187,8 @@ def validate_config(obj: Config) -> bool:
json_file = tmp_path / "config.json"
json_file.write_text('{"port": 80, "user": "admin"}')
- metadata = Source(
- file_=json_file,
+ metadata = JsonSource(
+ file=json_file,
root_validators=(
RootValidator(
func=validate_config,
@@ -198,7 +198,7 @@ def validate_config(obj: Config) -> bool:
)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
diff --git a/tests/validators/test_sequence.py b/tests/validators/test_sequence.py
index 1b90bea..cd40ce9 100644
--- a/tests/validators/test_sequence.py
+++ b/tests/validators/test_sequence.py
@@ -4,8 +4,8 @@
import pytest
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError
+from dature import JsonSource, load
+from dature.errors import DatureConfigError
from dature.validators.sequence import MaxItems, MinItems, UniqueItems
@@ -13,29 +13,29 @@ class TestMinItems:
def test_success(self, tmp_path: Path):
@dataclass
class Config:
- tags: Annotated[list[str], MinItems(value=2)]
+ tags: Annotated[list[str], MinItems(2)]
json_file = tmp_path / "config.json"
json_file.write_text('{"tags": ["python", "typing"]}')
- metadata = Source(file_=json_file)
- result = load(metadata, Config)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=Config)
assert result.tags == ["python", "typing"]
def test_failure(self, tmp_path: Path):
@dataclass
class Config:
- tags: Annotated[list[str], MinItems(value=3)]
+ tags: Annotated[list[str], MinItems(3)]
json_file = tmp_path / "config.json"
content = '{"tags": ["python"]}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -52,29 +52,29 @@ class TestMaxItems:
def test_success(self, tmp_path: Path):
@dataclass
class Config:
- tags: Annotated[list[str], MaxItems(value=5)]
+ tags: Annotated[list[str], MaxItems(5)]
json_file = tmp_path / "config.json"
json_file.write_text('{"tags": ["python", "typing"]}')
- metadata = Source(file_=json_file)
- result = load(metadata, Config)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=Config)
assert result.tags == ["python", "typing"]
def test_failure(self, tmp_path: Path):
@dataclass
class Config:
- tags: Annotated[list[str], MaxItems(value=2)]
+ tags: Annotated[list[str], MaxItems(2)]
json_file = tmp_path / "config.json"
content = '{"tags": ["python", "typing", "validation"]}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -96,8 +96,8 @@ class Config:
json_file = tmp_path / "config.json"
json_file.write_text('{"tags": ["python", "typing", "validation"]}')
- metadata = Source(file_=json_file)
- result = load(metadata, Config)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=Config)
assert result.tags == ["python", "typing", "validation"]
@@ -110,10 +110,10 @@ class Config:
content = '{"tags": ["python", "typing", "python"]}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -130,29 +130,29 @@ class TestCombined:
def test_combined_list_validators(self, tmp_path: Path):
@dataclass
class Config:
- tags: Annotated[list[str], MinItems(value=2), MaxItems(value=5), UniqueItems()]
+ tags: Annotated[list[str], MinItems(2), MaxItems(5), UniqueItems()]
json_file = tmp_path / "config.json"
json_file.write_text('{"tags": ["python", "typing", "validation"]}')
- metadata = Source(file_=json_file)
- result = load(metadata, Config)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=Config)
assert result.tags == ["python", "typing", "validation"]
def test_combined_list_validators_failure(self, tmp_path: Path):
@dataclass
class Config:
- tags: Annotated[list[str], MinItems(value=2), MaxItems(value=5), UniqueItems()]
+ tags: Annotated[list[str], MinItems(2), MaxItems(5), UniqueItems()]
json_file = tmp_path / "config.json"
content = '{"tags": ["python", "typing", "validation", "testing", "coding", "extra"]}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
diff --git a/tests/validators/test_string.py b/tests/validators/test_string.py
index 6128b9e..eddd5ea 100644
--- a/tests/validators/test_string.py
+++ b/tests/validators/test_string.py
@@ -4,8 +4,8 @@
import pytest
-from dature import Source, load
-from dature.errors.exceptions import DatureConfigError
+from dature import JsonSource, load
+from dature.errors import DatureConfigError
from dature.validators.string import MaxLength, MinLength, RegexPattern
@@ -13,29 +13,29 @@ class TestMinLength:
def test_success(self, tmp_path: Path):
@dataclass
class Config:
- name: Annotated[str, MinLength(value=3)]
+ name: Annotated[str, MinLength(3)]
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "Alice"}')
- metadata = Source(file_=json_file)
- result = load(metadata, Config)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=Config)
assert result.name == "Alice"
def test_failure(self, tmp_path: Path):
@dataclass
class Config:
- name: Annotated[str, MinLength(value=5)]
+ name: Annotated[str, MinLength(5)]
json_file = tmp_path / "config.json"
content = '{"name": "Bob"}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -52,29 +52,29 @@ class TestMaxLength:
def test_success(self, tmp_path: Path):
@dataclass
class Config:
- name: Annotated[str, MaxLength(value=10)]
+ name: Annotated[str, MaxLength(10)]
json_file = tmp_path / "config.json"
json_file.write_text('{"name": "Alice"}')
- metadata = Source(file_=json_file)
- result = load(metadata, Config)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=Config)
assert result.name == "Alice"
def test_failure(self, tmp_path: Path):
@dataclass
class Config:
- name: Annotated[str, MaxLength(value=5)]
+ name: Annotated[str, MaxLength(5)]
json_file = tmp_path / "config.json"
content = '{"name": "Alexander"}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -91,29 +91,29 @@ class TestRegexPattern:
def test_success(self, tmp_path: Path):
@dataclass
class Config:
- email: Annotated[str, RegexPattern(pattern=r"^[\w\.-]+@[\w\.-]+\.\w+$")]
+ email: Annotated[str, RegexPattern(r"^[\w\.-]+@[\w\.-]+\.\w+$")]
json_file = tmp_path / "config.json"
json_file.write_text('{"email": "test@example.com"}')
- metadata = Source(file_=json_file)
- result = load(metadata, Config)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=Config)
assert result.email == "test@example.com"
def test_failure(self, tmp_path: Path):
@dataclass
class Config:
- email: Annotated[str, RegexPattern(pattern=r"^[\w\.-]+@[\w\.-]+\.\w+$")]
+ email: Annotated[str, RegexPattern(r"^[\w\.-]+@[\w\.-]+\.\w+$")]
json_file = tmp_path / "config.json"
content = '{"email": "invalid-email"}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
@@ -130,29 +130,29 @@ class TestCombined:
def test_combined_string_validators(self, tmp_path: Path):
@dataclass
class Config:
- username: Annotated[str, MinLength(value=3), MaxLength(value=20)]
+ username: Annotated[str, MinLength(3), MaxLength(20)]
json_file = tmp_path / "config.json"
json_file.write_text('{"username": "john_doe"}')
- metadata = Source(file_=json_file)
- result = load(metadata, Config)
+ metadata = JsonSource(file=json_file)
+ result = load(metadata, schema=Config)
assert result.username == "john_doe"
def test_combined_string_validators_failure(self, tmp_path: Path):
@dataclass
class Config:
- username: Annotated[str, MinLength(value=3), MaxLength(value=20)]
+ username: Annotated[str, MinLength(3), MaxLength(20)]
json_file = tmp_path / "config.json"
content = '{"username": "this_is_a_very_long_username_that_exceeds_limit"}'
json_file.write_text(content)
- metadata = Source(file_=json_file)
+ metadata = JsonSource(file=json_file)
with pytest.raises(DatureConfigError) as exc_info:
- load(metadata, Config)
+ load(metadata, schema=Config)
e = exc_info.value
assert len(e.exceptions) == 1
diff --git a/tests/validators/test_validators_base.py b/tests/validators/test_validators_base.py
new file mode 100644
index 0000000..1f37085
--- /dev/null
+++ b/tests/validators/test_validators_base.py
@@ -0,0 +1,142 @@
+"""Tests for validators/base.py — extract and create validator providers."""
+
+from dataclasses import dataclass
+from typing import Annotated
+
+import pytest
+
+from dature.field_path import FieldPath
+from dature.validators.base import (
+ create_metadata_validator_providers,
+ create_root_validator_providers,
+ create_validator_providers,
+ extract_validators_from_type,
+)
+from dature.validators.number import Ge, Gt
+from dature.validators.root import RootValidator
+from dature.validators.string import MinLength
+
+
+class TestExtractValidatorsFromType:
+ def test_plain_type_returns_empty(self):
+ result = extract_validators_from_type(str)
+
+ assert result == []
+
+ def test_annotated_without_validators_returns_empty(self):
+ result = extract_validators_from_type(Annotated[int, "some metadata"])
+
+ assert result == []
+
+ def test_annotated_with_validator(self):
+ result = extract_validators_from_type(Annotated[str, MinLength(3)])
+
+ assert len(result) == 1
+ assert isinstance(result[0], MinLength)
+
+ def test_annotated_with_multiple_validators(self):
+ result = extract_validators_from_type(Annotated[int, Gt(0), Ge(0)])
+
+ assert len(result) == 2
+
+ def test_annotated_mixed_metadata_and_validators(self):
+ result = extract_validators_from_type(Annotated[str, "description", MinLength(1)])
+
+ assert len(result) == 1
+ assert isinstance(result[0], MinLength)
+
+
+class TestCreateValidatorProviders:
+ def test_creates_providers_from_validators(self):
+ @dataclass
+ class Cfg:
+ name: str
+
+ validators = [MinLength(3)]
+ result = create_validator_providers(Cfg, "name", validators)
+
+ assert len(result) == 1
+
+
+class TestCreateMetadataValidatorProviders:
+ def test_single_field_validator(self):
+ @dataclass
+ class Cfg:
+ name: str
+
+ fp = FieldPath(owner=Cfg, parts=("name",))
+ result = create_metadata_validator_providers({fp: MinLength(3)})
+
+ assert len(result) == 1
+
+ def test_tuple_validators(self):
+ @dataclass
+ class Cfg:
+ value: int
+
+ fp = FieldPath(owner=Cfg, parts=("value",))
+ result = create_metadata_validator_providers({fp: (Gt(0), Ge(0))})
+
+ assert len(result) == 2
+
+ def test_empty_field_path_raises(self):
+ @dataclass
+ class Cfg:
+ name: str
+
+ fp = FieldPath(owner=Cfg, parts=())
+
+ with pytest.raises(ValueError, match="FieldPath must contain at least one field name"):
+ create_metadata_validator_providers({fp: MinLength(3)})
+
+ @pytest.mark.parametrize(
+ "parts",
+ [("name",), ("inner", "name")],
+ ids=["single", "nested"],
+ )
+ def test_string_owner_raises(self, parts: tuple[str, ...]):
+ fp = FieldPath(owner="Cfg", parts=parts)
+
+ with pytest.raises(TypeError, match="string owner"):
+ create_metadata_validator_providers({fp: MinLength(3)})
+
+ def test_non_fieldpath_key_raises(self):
+ with pytest.raises(TypeError, match="validators key must be a FieldPath"):
+ create_metadata_validator_providers({"name": MinLength(3)})
+
+ def test_nested_field_path(self):
+ @dataclass
+ class Inner:
+ value: str
+
+ @dataclass
+ class Outer:
+ inner: Inner
+
+ fp = FieldPath(owner=Outer, parts=("inner", "value"))
+ result = create_metadata_validator_providers({fp: MinLength(1)})
+
+ assert len(result) == 1
+
+
+class TestCreateRootValidatorProviders:
+ def test_creates_providers(self):
+ @dataclass
+ class Cfg:
+ name: str
+
+ rv = RootValidator(func=lambda _self: True)
+ result = create_root_validator_providers(Cfg, (rv,))
+
+ assert len(result) == 1
+
+ def test_multiple_root_validators(self):
+ @dataclass
+ class Cfg:
+ name: str
+
+ rv1 = RootValidator(func=lambda _self: True)
+ rv2 = RootValidator(func=lambda _self: True, error_message="custom")
+ result = create_root_validator_providers(Cfg, (rv1, rv2))
+
+ assert len(result) == 2