diff --git a/README.md b/README.md
index 2cdbe27..7176dd2 100644
--- a/README.md
+++ b/README.md
@@ -1,129 +1,288 @@
-# chidian - chimeric data interchange
+# chidian
[](https://opensource.org/licenses/MIT)
-> Declarative, type-safe data mapping for savvy data engineers
+> Dict-to-dict data mappings that look like dicts
-**chidian** is a composable framework for building readable data transformations with **Pydantic v2**.
+**chidian** lets you write data transformations as plain dictionaries. Your mapping *looks like* your output.
## Quick Start
+
```python
-from pydantic import BaseModel
-from chidian import Mapper
-import chidian.partials as p
-
-# Source data (nested)
-source_data = {
- "name": {"first": "Gandalf", "given": ["the", "Grey"], "suffix": None},
- "address": {
- "street": ["Bag End", "Hobbiton"],
- "city": "The Shire",
- "postal_code": "ME001",
- "country": "Middle Earth"
+from chidian import mapper, grab
+
+@mapper
+def patient_summary(d):
+ return {
+ "patient_id": grab(d, "data.patient.id"),
+ "is_active": grab(d, "data.patient.active"),
+ "latest_visit": grab(d, "data.visits[0].date"),
}
-}
-# Target data (flat)
-target = {
- "full_name": "Gandalf the Grey",
- "address": "Bag End\nHobbiton\nThe Shire\nME001\nMiddle Earth"
+source = {
+ "data": {
+ "patient": {"id": "p-123", "active": True},
+ "visits": [
+ {"date": "2024-01-15", "type": "checkup"},
+ {"date": "2024-02-20", "type": "followup"}
+ ]
+ }
}
-# Define schemas
-class SourceSchema(BaseModel):
- name: dict
- address: dict
-
-class TargetSchema(BaseModel):
- full_name: str
- address: str
-
-# Create type-safe mapper
-person_mapping = Mapper(
- {
- "full_name": p.get([
- "name.first",
- "name.given[*]",
- "name.suffix"
- ]).join(" ", flatten=True),
-
- "address": p.get([
- "address.street[*]",
- "address.city",
- "address.postal_code",
- "address.country"
- ]).join("\n", flatten=True),
- },
- min_input_schemas=[SourceSchema],
- output_schema=TargetSchema,
-)
+result = patient_summary(source)
+# {"patient_id": "p-123", "is_active": True, "latest_visit": "2024-01-15"}
+```
+
+## Core Idea
+
+Write your mapping as the dict you want back:
+
+```python
+from chidian import mapper, grab, DROP, KEEP
+
+@mapper
+def normalize_user(d):
+ return {
+ # Static values — just write them
+ "version": "2.0",
+
+ # Pull from source
+ "name": grab(d, "user.name"),
+
+ # Nested output — nest your mapping
+ "address": {
+ "city": grab(d, "location.city"),
+ "zip": grab(d, "location.postal"),
+ },
+
+ # Conditionally drop
+ "risky_field": DROP.THIS_OBJECT if not grab(d, "verified") else grab(d, "data"),
+ }
+```
+
+Decorated functions are shareable, testable, and composable:
+
+```python
+# Import and use in your codebase
+from myproject.mappings import normalize_user
+
+result = normalize_user(raw_data)
+```
+
+## `grab(data, path)`
+
+Extract values using dot notation and bracket indexing:
+
+```python
+grab(d, "user.name") # Nested access
+grab(d, "items[0]") # List index
+grab(d, "items[-1]") # Negative index
+grab(d, "users[*].name") # Map over list
+```
+
+## `DROP` — Conditional Removal
+
+Control what gets excluded from output. `DROP` propagates upward through the structure:
+
+| Sentinel | Effect |
+|----------|--------|
+| `DROP.THIS_OBJECT` | Remove this value (or list item, or dict) |
+| `DROP.PARENT` | Remove the parent container |
+| `DROP.GRANDPARENT` | Remove two levels up |
+| `DROP.GREATGRANDPARENT` | Remove three levels up (raises if out of bounds) |
+
+```python
+@mapper
+def with_drops(d):
+ return {
+ "kept": {"id": grab(d, "data.patient.id")},
+ "dropped": {
+ "trigger": DROP.THIS_OBJECT, # This whole dict removed
+ "ignored": "never appears",
+ },
+ "items": [
+ {"bad": DROP.PARENT, "also_ignored": "x"}, # Removes entire list
+ {"good": "value"},
+ ],
+ }
+
+# Result: {"kept": {"id": "..."}}
+```
+
+**In lists**, `DROP.THIS_OBJECT` removes just that item:
+
+```python
+@mapper
+def filter_list(d):
+ return {
+ "tags": [
+ "first_kept",
+ DROP.THIS_OBJECT, # Removed
+ "third_kept",
+ {"nested": DROP.THIS_OBJECT}, # Entire dict removed
+ ],
+ }
+
+# Result: {"tags": ["first_kept", "third_kept"]}
+```
+
+## `KEEP` — Preserve Empty Values
-# Execute
-result = person_mapping(SourceSchema(**source_data))
-assert result == TargetSchema(**target)
+By default, empty values (`{}`, `[]`, `""`, `None`) are removed. Wrap with `KEEP()` to preserve them:
+
+```python
+from chidian import KEEP
+
+@mapper
+def with_empties(d):
+ return {
+ "explicit_empty": KEEP({}), # Preserved as {}
+ "explicit_none": KEEP(None), # Preserved as None
+ "implicit_empty": {}, # Removed by default
+ "normal_value": "hello",
+ }
+
+# Result: {"explicit_empty": {}, "explicit_none": None, "normal_value": "hello"}
+```
+
+## Decorator Options
+
+```python
+@mapper(remove_empty=False)
+def keep_all_empties(d):
+ return {
+ "empty_dict": {}, # Kept
+ "empty_list": [], # Kept
+ "none_val": None, # Kept
+ }
+```
+
+## Strict Mode
+
+Catch missing keys during development:
+
+```python
+from chidian import mapper, grab, mapping_context
+
+@mapper
+def risky_mapping(d):
+ return {
+ "id": grab(d, "data.patient.id"),
+ "missing": grab(d, "key.not.found"), # Doesn't exist
+ }
+
+# Normal — missing keys become empty/removed
+result = risky_mapping(source)
+
+# Strict — raises KeyError on missing keys
+with mapping_context(strict=True):
+ risky_mapping(source) # KeyError!
+```
+
+**Note**: Strict mode distinguishes between "key not found" and "key exists with `None` value":
+
+```python
+source = {"has_none": None}
+
+@mapper
+def check_none(d):
+ return {
+ "explicit_none": grab(d, "has_none"), # OK — key exists, value is None
+ "missing": grab(d, "does.not.exist"), # Raises in strict mode
+ }
```
-## Core Features
+## Validation
-| Component | Purpose |
-| ---------------- | ------------------------------------------------------------------------ |
-| **Mapper** | Dict→dict transformations with optional schema validation |
-| **DataMapping** | Pydantic-validated, type-safe transformations |
-| **Partials API** | Composable operators for concise transformation chains |
-| **Table** | Sparse tables with path queries, joins, pandas/polars interop |
-| **Lexicon** | Bidirectional code lookups (e.g., LOINC ↔ SNOMED) with metadata |
+chidian includes a dict-like validation DSL that mirrors your data structure:
-## Table & DataFrames
+```python
+from chidian.validation import Required, Optional, validate, to_pydantic, Gte, InSet
+
+schema = {
+ "name": Required(str),
+ "email": Optional(str),
+ "age": int & Gte(0),
+ "role": InSet({"admin", "user"}),
+ "tags": [str],
+ "profile": {
+ "bio": Optional(str),
+ "avatar_url": str,
+ },
+}
-Seamless conversion between chidian Tables and pandas/polars:
+# Validate data
+data = {"name": "Alice", "age": 30, "role": "admin", "tags": ["python"]}
+result = validate(data, schema)
-```bash
-pip install 'chidian[pandas]' # pandas support
-pip install 'chidian[polars]' # polars support
-pip install 'chidian[df]' # both
+if result.is_ok():
+ print("Valid!", result.value)
+else:
+ for path, msg in result.error:
+ print(f" {'.'.join(map(str, path))}: {msg}")
```
+### Composing Validators
+
+Use `&` (and) and `|` (or) to combine validators:
+
```python
-from chidian.table import Table
+from chidian.validation import IsType, Gt, Matches
+
+# Both must pass
+positive_int = IsType(int) & Gt(0)
-table = Table([
- {"name": "Alice", "age": 30},
- {"name": "Bob", "age": 25}
-])
+# Either can pass
+str_or_int = str | int
-df_pd = table.to_pandas(index=True)
-df_pl = table.to_polars(add_index=True)
+# With regex
+email = str & Matches(r"^[\w.-]+@[\w.-]+\.\w+$")
```
-### Flatten Nested Data
+### Pydantic Integration
-Convert nested structures into flat, column-based tables:
+Compile schemas to Pydantic models for runtime validation:
```python
-table = Table([
- {"user": {"name": "John", "prefs": ["email", "sms"]}, "id": 123},
- {"user": {"name": "Jane", "prefs": ["phone"]}, "id": 456}
-])
-
-# Flatten with intuitive path notation
-flat = table.flatten()
-print(flat.columns)
-# {'id', 'user.name', 'user.prefs[0]', 'user.prefs[1]'}
-
-# Export flattened data
-table.to_pandas(flatten=True)
-table.to_polars(flatten=True)
-table.to_csv("flat.csv", flatten=True)
-
-# Control flattening behavior
-table.flatten(max_depth=2, array_index_limit=5)
+User = to_pydantic("User", {
+ "name": Required(str),
+ "email": Optional(str),
+ "age": int,
+})
+
+user = User(name="Alice", age=30) # Full Pydantic validation
```
-**Features:**
-- Path notation: `user.name`, `items[0]`, `data.settings.theme`
-- Handles sparse data (different nesting per row)
-- Special key escaping for dots/brackets
-- Depth and array size controls
+### Built-in Validators
+
+| Validator | Description |
+|-----------|-------------|
+| `Required(v)` | Field cannot be None |
+| `Optional(v)` | Field can be None |
+| `IsType(t)` | Value must be instance of type |
+| `InRange(lo, hi)` | Length must be in range |
+| `InSet(values)` | Value must be in set |
+| `Matches(pattern)` | String must match regex |
+| `Gt`, `Gte`, `Lt`, `Lte` | Numeric comparisons |
+| `Between(lo, hi)` | Value between bounds |
+| `Predicate(fn, msg)` | Custom validation function |
+
+## API Reference
+
+### `@mapper` / `@mapper(remove_empty=True)`
+
+Decorator that transforms a mapping function into a callable mapper.
+
+### `grab(data, path)`
+
+Extract values using dot notation and bracket indexing:
+
+```python
+grab(d, "user.name") # Nested access
+grab(d, "items[0]") # List index
+grab(d, "items[-1]") # Negative index
+grab(d, "users[*].name") # Map over list
+```
## Design Philosophy
@@ -137,7 +296,7 @@ Built by data engineers, for data engineers. chidian solves common pain points:
**Solutions:**
- **Iterate over perfection**: Learn and adapt as you build
- **Functions as first-class objects**: Compose transformations cleanly
-- **JSON-first**: Simple, universal data structures
+- **Keep things dict-like**: Simple, universal structure that's quick to read
chidian applies functional programming principles to data mappings, drawing inspiration from [Pydantic](https://github.com/pydantic/pydantic), [JMESPath](https://github.com/jmespath), [funcy](https://github.com/Suor/funcy), and others.
@@ -147,4 +306,4 @@ Contributions welcome! Open an issue to discuss your idea before submitting a PR
---
-See [tests](/chidian/tests) for more examples.
+See [tests](/tests) for more examples.
diff --git a/chidian/__init__.py b/chidian/__init__.py
index 208f3b9..68beaaa 100644
--- a/chidian/__init__.py
+++ b/chidian/__init__.py
@@ -1,22 +1,16 @@
-from .core import get, put
-from .lexicon import Lexicon, LexiconBuilder
-from .lib.get_dsl_parser import parse_path_peg as parse_path
-from .mapper import DROP, KEEP, Mapper, MapperResult, ValidationMode
-from .partials import ChainableFunction, FunctionChain
-from .table import Table
+from .context import mapping_context
+from .core import grab
+from .decorator import mapper
+from .drop import DROP, process_drops
+from .keep import KEEP
+from .process import process_output
__all__ = [
- "get",
- "put",
- "parse_path",
- "Table",
- "Mapper",
- "Lexicon",
- "LexiconBuilder",
+ "grab",
+ "mapper",
+ "mapping_context",
"DROP",
"KEEP",
- "ValidationMode",
- "MapperResult",
- "FunctionChain",
- "ChainableFunction",
+ "process_drops",
+ "process_output",
]
diff --git a/chidian/context.py b/chidian/context.py
new file mode 100644
index 0000000..b32c68c
--- /dev/null
+++ b/chidian/context.py
@@ -0,0 +1,48 @@
+"""
+Context manager for mapping configuration (e.g., strict mode).
+"""
+
+from contextlib import contextmanager
+from contextvars import ContextVar
+
+# Context variable for strict mode
+_strict_mode: ContextVar[bool] = ContextVar("strict_mode", default=False)
+
+
+def is_strict() -> bool:
+ """Check if strict mode is currently enabled."""
+ return _strict_mode.get()
+
+
+@contextmanager
+def mapping_context(*, strict: bool = False):
+ """
+ Context manager for mapping configuration.
+
+ Args:
+ strict: If True, grab() raises ValueError on missing keys instead of
+ returning None. Distinguishes between "key not found" and
+ "key exists with None value".
+
+ Example:
+ from chidian import mapper, grab, mapping_context
+
+ @mapper
+ def risky_mapping(d):
+ return {
+ "id": grab(d, "data.patient.id"),
+ "missing": grab(d, "key.not.found"), # Doesn't exist
+ }
+
+ # Normal — missing keys become None/removed
+ result = risky_mapping(source)
+
+ # Strict — raises ValueError on missing keys
+ with mapping_context(strict=True):
+ risky_mapping(source) # ValueError!
+ """
+ token = _strict_mode.set(strict)
+ try:
+ yield
+ finally:
+ _strict_mode.reset(token)
diff --git a/chidian/core.py b/chidian/core.py
index ffa12ef..c4f2ffa 100644
--- a/chidian/core.py
+++ b/chidian/core.py
@@ -1,48 +1,59 @@
"""
-Core get/put functions for chidian data traversal and mutation.
+Core grab function for chidian data traversal.
"""
-import copy
from typing import Any, Callable
-from .lib.core_helpers import (
- apply_functions,
- mutate_path,
- traverse_path,
- validate_mutation_path,
-)
+from .context import is_strict
+from .lib.core_helpers import apply_functions, traverse_path
from .lib.parser import parse_path
-def get(
+def grab(
source: dict | list,
- key: str,
+ path: str,
default: Any = None,
apply: Callable | list[Callable] | None = None,
- strict: bool = False,
) -> Any:
"""
Extract values from nested data structures using path notation.
Args:
source: Source data to traverse
- key: Path string (e.g., "data.items[0].name")
+ path: Path string (e.g., "data.items[0].name")
default: Default value if path not found
apply: Function(s) to apply to the result
- strict: If True, raise errors on missing paths
Returns:
Value at path or default if not found
+
+ Raises:
+ KeyError: In strict mode, if a dict key is not found
+ IndexError: In strict mode, if a list index is out of range
+ TypeError: In strict mode, if a type mismatch occurs during traversal
+
+ Note:
+ Strict mode distinguishes between "key not found" and "key exists with None":
+ - {"has_none": None} -> grab(d, "has_none") returns None (OK in strict mode)
+ - {} -> grab(d, "missing") raises KeyError in strict mode
+
+ Examples:
+ grab(d, "user.name") # Nested access
+ grab(d, "items[0]") # List index
+ grab(d, "items[-1]") # Negative index
+ grab(d, "users[*].name") # Map over list
"""
+ strict = is_strict()
+
try:
- path = parse_path(key)
+ parsed = parse_path(path)
except ValueError as e:
if strict:
- raise ValueError(f"Invalid path syntax: {key}") from e
+ raise ValueError(f"Invalid path syntax: {path}") from e
return default
try:
- result = traverse_path(source, path, strict=strict)
+ result = traverse_path(source, parsed, strict=strict)
except Exception:
if strict:
raise
@@ -57,45 +68,3 @@ def get(
result = apply_functions(result, apply)
return result
-
-
-def put(
- target: Any,
- path: str,
- value: Any,
- strict: bool = False,
-) -> Any:
- """
- Set a value in a nested data structure, creating containers as needed.
-
- Args:
- target: Target data structure to modify
- path: Path string (e.g., "data.items[0].name")
- value: Value to set
- strict: If True, raise errors on invalid operations
-
- Returns:
- Modified copy of the target data
- """
- try:
- parsed_path = parse_path(path)
- except ValueError as e:
- raise ValueError(f"Invalid path syntax: {path}") from e
-
- # Validate path for mutation
- if not validate_mutation_path(parsed_path):
- if strict:
- raise ValueError(f"Invalid mutation path: {path}")
- return target
-
- # Deep copy for copy-on-write semantics
- result = copy.deepcopy(target)
-
- try:
- mutate_path(result, parsed_path, value, strict=strict)
- except Exception:
- if strict:
- raise
- return target
-
- return result
diff --git a/chidian/decorator.py b/chidian/decorator.py
new file mode 100644
index 0000000..47340e3
--- /dev/null
+++ b/chidian/decorator.py
@@ -0,0 +1,52 @@
+"""
+The @mapper decorator for transforming functions into data mappers.
+"""
+
+from functools import wraps
+from typing import Any, Callable
+
+from .process import process_output
+
+
+def mapper(_func: Callable | None = None, *, remove_empty: bool = True) -> Callable:
+ """
+ Decorator that transforms a mapping function into a callable mapper.
+
+ The decorated function should return a dict. The decorator automatically:
+ - Processes DROP sentinels (removes marked values/containers)
+ - Unwraps KEEP wrappers (preserves explicitly kept values)
+ - Removes empty values by default ({}, [], "", None)
+
+ Can be used with or without arguments:
+ @mapper
+ def my_mapping(d): ...
+
+ @mapper(remove_empty=False)
+ def my_mapping(d): ...
+
+ Args:
+ remove_empty: If True (default), remove empty values from output.
+ KEEP-wrapped values are always preserved.
+
+ Returns:
+ Decorated function that processes its output through the mapper pipeline.
+ """
+
+ def decorator(func: Callable) -> Callable:
+ @wraps(func)
+ def wrapper(*args: Any, **kwargs: Any) -> Any:
+ # Call the original function to get the raw mapping result
+ result = func(*args, **kwargs)
+
+ # Process the result (DROP, KEEP, empty removal)
+ return process_output(result, remove_empty=remove_empty)
+
+ return wrapper
+
+ # Handle both @mapper and @mapper(...) syntax
+ if _func is not None:
+ # Called as @mapper without parentheses
+ return decorator(_func)
+ else:
+ # Called as @mapper(...) with arguments
+ return decorator
diff --git a/chidian/drop.py b/chidian/drop.py
new file mode 100644
index 0000000..9c569f3
--- /dev/null
+++ b/chidian/drop.py
@@ -0,0 +1,137 @@
+"""
+DROP sentinel for conditional removal of values from output.
+"""
+
+from enum import Enum
+
+
+class DROP(Enum):
+ """
+ Sentinel indicating a value (or its container) should be dropped from output.
+
+ DROP propagates upward through the structure:
+ - THIS_OBJECT: Remove the dict/list containing this DROP value
+ - PARENT: Remove the parent of that container
+ - GRANDPARENT: Remove two levels up
+ - GREATGRANDPARENT: Remove three levels up (raises if out of bounds)
+
+ In lists, DROP.THIS_OBJECT removes just that item (not the whole list).
+
+ Examples:
+ # DROP.THIS_OBJECT removes the containing dict
+ {"dropped": {"trigger": DROP.THIS_OBJECT, "ignored": "x"}}
+ # Result: {} (the inner dict is removed, so "dropped" has no value)
+
+ # In a list, DROP.THIS_OBJECT removes just that item
+ ["first", DROP.THIS_OBJECT, "third"]
+ # Result: ["first", "third"]
+
+ # DROP.PARENT removes the parent container
+ {"items": [{"bad": DROP.PARENT}, {"good": "value"}]}
+ # Result: {} (the list is removed, so "items" has no value)
+ """
+
+ THIS_OBJECT = 1
+ PARENT = 2
+ GRANDPARENT = 3
+ GREATGRANDPARENT = 4
+
+
+class _DropSignal(Exception):
+ """Internal signal for DROP propagation."""
+
+ def __init__(self, levels: int):
+ self.levels = levels
+
+
+def process_drops(data):
+ """
+ Recursively process a data structure, handling DROP sentinels.
+
+ Returns the processed data with DROPs applied.
+ If DROP propagates to the top level, returns {} for dict input or [] for list input.
+ """
+ try:
+ return _process_value(data)
+ except _DropSignal as signal:
+ if signal.levels > 0:
+ raise ValueError(
+ f"DROP level exceeds structure depth (levels remaining: {signal.levels})"
+ )
+ # Top-level container was dropped
+ if isinstance(data, dict):
+ return {}
+ elif isinstance(data, list):
+ return []
+ else:
+ return None
+
+
+def _process_value(data):
+ """Internal processor that may raise _DropSignal."""
+ if isinstance(data, DROP):
+ raise _DropSignal(data.value)
+
+ if isinstance(data, dict):
+ return _process_dict(data)
+
+ if isinstance(data, list):
+ return _process_list(data)
+
+ return data
+
+
+def _process_dict(d: dict) -> dict:
+ """Process a dict, handling DROP sentinels in values."""
+ result = {}
+
+ for key, value in d.items():
+ try:
+ processed = _process_value(value)
+ result[key] = processed
+ except _DropSignal as signal:
+ if signal.levels == 0:
+ # Remove this key (don't add to result)
+ pass
+ elif signal.levels == 1:
+ # Remove this dict from its parent
+ raise _DropSignal(0)
+ else:
+ # Propagate further up
+ raise _DropSignal(signal.levels - 1)
+
+ return result
+
+
+def _process_list(lst: list) -> list:
+ """Process a list, handling DROP sentinels in items."""
+ result = []
+
+ for item in lst:
+ # Special case: DROP directly in list
+ if isinstance(item, DROP):
+ if item == DROP.THIS_OBJECT:
+ # Just skip this item
+ continue
+ elif item == DROP.PARENT:
+ # Remove this list's parent container
+ raise _DropSignal(1)
+ else:
+ # GRANDPARENT or higher - propagate up
+ raise _DropSignal(item.value - 1)
+
+ try:
+ processed = _process_value(item)
+ result.append(processed)
+ except _DropSignal as signal:
+ if signal.levels == 0:
+ # Remove this item (don't add to result)
+ pass
+ elif signal.levels == 1:
+ # Remove this list from its parent
+ raise _DropSignal(0)
+ else:
+ # Propagate further up
+ raise _DropSignal(signal.levels - 1)
+
+ return result
diff --git a/chidian/keep.py b/chidian/keep.py
new file mode 100644
index 0000000..825dba6
--- /dev/null
+++ b/chidian/keep.py
@@ -0,0 +1,31 @@
+"""
+KEEP wrapper to preserve empty values from automatic removal.
+"""
+
+from typing import Any
+
+
+class KEEP:
+ """
+ Wrapper to preserve empty values that would otherwise be removed.
+
+ By default, empty values ({}, [], "", None) are removed during processing.
+ Wrap with KEEP() to explicitly preserve them.
+
+ Examples:
+ KEEP({}) # Preserved as {}
+ KEEP(None) # Preserved as None
+ KEEP([]) # Preserved as []
+ KEEP("") # Preserved as ""
+ """
+
+ def __init__(self, value: Any):
+ self.value = value
+
+ def __repr__(self) -> str:
+ return f"KEEP({self.value!r})"
+
+ def __eq__(self, other: object) -> bool:
+ if isinstance(other, KEEP):
+ return self.value == other.value
+ return False
diff --git a/chidian/lexicon.py b/chidian/lexicon.py
deleted file mode 100644
index 53d2802..0000000
--- a/chidian/lexicon.py
+++ /dev/null
@@ -1,169 +0,0 @@
-"""
-Bidirectional string mapper for code/terminology translations.
-
-Primary use case: Medical code system mappings (e.g., LOINC ↔ SNOMED).
-Supports both one-to-one and many-to-one relationships with automatic
-reverse lookup generation.
-
-Examples:
- Simple code mapping:
- >>> loinc_to_snomed = Lexicon({'8480-6': '271649006'})
- >>> loinc_to_snomed['8480-6'] # Forward lookup
- '271649006'
- >>> loinc_to_snomed['271649006'] # Reverse lookup
- '8480-6'
-
- Many-to-one mapping (first value is default):
- >>> mapper = Lexicon({('LA6699-8', 'LA6700-4'): 'absent'})
- >>> mapper['absent'] # Returns first key as default
- 'LA6699-8'
-"""
-
-from __future__ import annotations
-
-from collections.abc import Iterator, Mapping, Sequence
-from typing import Optional
-
-
-class Lexicon(Mapping[str, str]):
- """
- Bidirectional string mapper for code/terminology translations.
-
- - Many->one supported by allowing tuple/list of keys.
- - Reverse lookup returns the first key seen for a given value.
- - Default (if set) is returned when a key/value isn't found instead of KeyError.
- """
-
- def __init__(
- self,
- mappings: Mapping[str | Sequence[str], str],
- default: Optional[str] = None,
- metadata: Optional[Mapping[str, str]] = None,
- ) -> None:
- fwd: dict[str, str] = {}
- rev: dict[str, str] = {}
-
- for key, value in mappings.items():
- if not isinstance(value, str):
- raise TypeError("Values must be strings")
-
- if isinstance(key, str):
- fwd[key] = value
- if value not in rev:
- rev[value] = key
- continue
-
- if isinstance(key, Sequence) and not isinstance(key, str):
- if len(key) == 0:
- raise ValueError("Empty tuple keys are not allowed")
- for i, k in enumerate(key):
- if not isinstance(k, str):
- raise TypeError("All keys in tuples must be strings")
- fwd[k] = value
- if i == 0 and value not in rev:
- rev[value] = k
- continue
-
- raise TypeError("Keys must be strings or tuples of strings")
-
- self._fwd = fwd
- self._rev = rev
- self._default = default
- self.metadata: dict[str, str] = dict(metadata or {})
-
- # Mapping interface
- def __len__(self) -> int:
- return len(self._fwd)
-
- def __iter__(self) -> Iterator[str]:
- return iter(self._fwd)
-
- def __getitem__(self, key: str) -> str:
- if key in self._fwd:
- return self._fwd[key]
- if key in self._rev:
- return self._rev[key]
- if self._default is not None:
- return self._default
- raise KeyError(f"Key '{key}' not found")
-
- # Dict-like conveniences with bidirectional semantics
- def get(self, key: str, default: Optional[str] = None) -> Optional[str]: # type: ignore[override]
- if key in self._fwd:
- return self._fwd[key]
- if key in self._rev:
- return self._rev[key]
- return default if default is not None else self._default
-
- def __contains__(self, key: object) -> bool:
- return isinstance(key, str) and (key in self._fwd or key in self._rev)
-
- # Explicit helpers
- def forward(self, key: str) -> Optional[str]:
- return self._fwd.get(key)
-
- def reverse(self, value: str) -> Optional[str]:
- return self._rev.get(value)
-
- def prefer(self, value: str, primary_key: str) -> None:
- """Override which key is returned for reverse lookup of a value."""
- if self._fwd.get(primary_key) != value:
- raise ValueError(f"Key '{primary_key}' must map to value '{value}'")
- self._rev[value] = primary_key
-
- @classmethod
- def builder(cls) -> LexiconBuilder:
- return LexiconBuilder()
-
-
-class LexiconBuilder:
- """Fluent builder for creating Lexicon instances."""
-
- def __init__(self) -> None:
- self._mappings: dict[str, str] = {}
- self._reverse_priorities: dict[str, str] = {}
- self._default: Optional[str] = None
- self._metadata: dict[str, str] = {}
-
- def add(self, key: str, value: str) -> LexiconBuilder:
- if not isinstance(key, str) or not isinstance(value, str):
- raise TypeError("Keys and values must be strings")
- self._mappings[key] = value
- if value not in self._reverse_priorities:
- self._reverse_priorities[value] = key
- return self
-
- def add_many(self, keys: Sequence[str], value: str) -> LexiconBuilder:
- if not isinstance(value, str):
- raise TypeError("Value must be a string")
- if len(keys) == 0:
- raise ValueError("Empty tuple keys are not allowed")
- for i, key in enumerate(keys):
- if not isinstance(key, str):
- raise TypeError("All keys must be strings")
- self._mappings[key] = value
- if i == 0 and value not in self._reverse_priorities:
- self._reverse_priorities[value] = key
- return self
-
- def set_primary_reverse(self, value: str, primary_key: str) -> LexiconBuilder:
- if self._mappings.get(primary_key) != value:
- raise ValueError(f"Key '{primary_key}' must map to value '{value}'")
- self._reverse_priorities[value] = primary_key
- return self
-
- def set_default(self, default: str) -> LexiconBuilder:
- if not isinstance(default, str):
- raise TypeError("Default must be a string")
- self._default = default
- return self
-
- def set_metadata(self, metadata: Mapping[str, str]) -> LexiconBuilder:
- self._metadata = dict(metadata)
- return self
-
- def build(self) -> Lexicon:
- lex = Lexicon(self._mappings, default=self._default, metadata=self._metadata) # type: ignore
- for value, primary in self._reverse_priorities.items():
- lex.prefer(value, primary)
- return lex
diff --git a/chidian/lib/core_helpers.py b/chidian/lib/core_helpers.py
index d03a5d2..efb4c0b 100644
--- a/chidian/lib/core_helpers.py
+++ b/chidian/lib/core_helpers.py
@@ -1,10 +1,10 @@
"""
-Helper functions for core get/put operations.
+Helper functions for core grab operations.
"""
from typing import Any, Callable
-from .parser import Path, PathSegment, PathSegmentType
+from .parser import Path, PathSegmentType
def traverse_path(data: Any, path: Path, strict: bool = False) -> Any:
@@ -24,8 +24,6 @@ def traverse_path(data: Any, path: Path, strict: bool = False) -> Any:
if segment.type == PathSegmentType.KEY:
assert isinstance(segment.value, str)
result = _traverse_key(item, segment.value, strict)
- # Only extend if we applied key to a list of dicts
- # (i.e., when item was a list and we distributed the key)
if isinstance(item, list) and isinstance(result, list):
next_items.extend(result)
else:
@@ -56,7 +54,6 @@ def traverse_path(data: Any, path: Path, strict: bool = False) -> Any:
current = next_items
- # Return single item if only one result
if len(current) == 1:
return current[0]
return current
@@ -73,7 +70,6 @@ def _traverse_key(data: Any, key: str, strict: bool) -> Any:
return None
elif isinstance(data, list):
- # Apply key to each dict in list
results = []
for item in data:
if isinstance(item, dict):
@@ -102,7 +98,6 @@ def _traverse_index(data: Any, idx: int, strict: bool) -> Any:
raise TypeError("Expected list but got different type")
return None
- # Handle negative indexing
length = len(data)
actual_idx = idx if idx >= 0 else length + idx
@@ -120,8 +115,6 @@ def _traverse_slice(data: Any, start: int | None, end: int | None, strict: bool)
if strict:
raise TypeError("Expected list but got different type")
return None
-
- # Python handles negative indices and None values in slices automatically
return data[start:end]
@@ -156,170 +149,3 @@ def apply_functions(value: Any, functions: Callable | list[Callable]) -> Any:
return None
return current
-
-
-def validate_mutation_path(path: Path) -> bool:
- """Validate that a path is suitable for mutation operations."""
- if not path.segments:
- return False
-
- # Path must start with a key (not an index)
- if path.segments[0].type != PathSegmentType.KEY:
- return False
-
- # Check for unsupported segment types
- for segment in path.segments:
- if segment.type in (
- PathSegmentType.WILDCARD,
- PathSegmentType.SLICE,
- PathSegmentType.TUPLE,
- ):
- return False
-
- return True
-
-
-def mutate_path(data: Any, path: Path, value: Any, strict: bool = False) -> None:
- """Mutate data in-place at the specified path."""
- if not path.segments:
- raise ValueError("Empty path")
-
- # Navigate to parent of target
- current = data
- for i, segment in enumerate(path.segments[:-1]):
- if segment.type == PathSegmentType.KEY:
- assert isinstance(segment.value, str)
- current = _ensure_key_container(
- current, segment.value, path.segments, i, strict
- )
- elif segment.type == PathSegmentType.INDEX:
- assert isinstance(segment.value, int)
- current = _ensure_index_container(
- current, segment.value, path.segments, i, strict
- )
-
- # Set final value
- final_segment = path.segments[-1]
- if final_segment.type == PathSegmentType.KEY:
- assert isinstance(final_segment.value, str)
- if not isinstance(current, dict):
- if strict:
- raise TypeError(f"Cannot set key '{final_segment.value}' on non-dict")
- return
- current[final_segment.value] = value
-
- elif final_segment.type == PathSegmentType.INDEX:
- assert isinstance(final_segment.value, int)
- if not isinstance(current, list):
- if strict:
- raise TypeError(f"Cannot set index {final_segment.value} on non-list")
- return
-
- idx = final_segment.value
- # Expand list if needed for positive indices
- if idx >= 0:
- while len(current) <= idx:
- current.append(None)
- current[idx] = value
- else:
- # Negative index
- actual_idx = len(current) + idx
- if actual_idx < 0:
- if strict:
- raise IndexError(f"Index {idx} out of range")
- else:
- current[actual_idx] = value
-
-
-def _ensure_key_container(
- current: Any, key: str, segments: list[PathSegment], index: int, strict: bool
-) -> Any:
- """Ensure a dict exists at key, creating if needed."""
- if not isinstance(current, dict):
- if strict:
- raise TypeError(f"Cannot traverse into non-dict at '{key}'")
- return current
-
- # Determine what type of container we need
- next_segment = segments[index + 1]
- container_type = _determine_container_type(next_segment)
-
- if key not in current:
- # Create appropriate container
- if container_type == "list":
- current[key] = []
- else:
- current[key] = {}
- else:
- # Validate existing container type
- existing = current[key]
- if container_type == "list" and not isinstance(existing, list):
- if strict:
- raise TypeError(
- f"Expected list at '{key}' but found {type(existing).__name__}"
- )
- current[key] = []
- elif container_type == "dict" and not isinstance(existing, dict):
- if strict:
- raise TypeError(
- f"Expected dict at '{key}' but found {type(existing).__name__}"
- )
- current[key] = {}
-
- return current[key]
-
-
-def _ensure_index_container(
- current: Any, idx: int, segments: list[PathSegment], index: int, strict: bool
-) -> Any:
- """Ensure a list exists and has capacity for index."""
- if not isinstance(current, list):
- if strict:
- raise TypeError("Cannot index into non-list")
- return current
-
- # Handle negative indexing
- actual_idx = idx if idx >= 0 else len(current) + idx
- if actual_idx < 0:
- if strict:
- raise IndexError(f"Index {idx} out of range")
- return current
-
- # Expand list if needed
- while len(current) <= actual_idx:
- current.append(None)
-
- # Determine container type for this index
- next_segment = segments[index + 1]
- container_type = _determine_container_type(next_segment)
-
- if current[actual_idx] is None:
- # Create appropriate container
- if container_type == "list":
- current[actual_idx] = []
- else:
- current[actual_idx] = {}
- else:
- # Validate existing container type
- existing = current[actual_idx]
- if container_type == "list" and not isinstance(existing, list):
- if strict:
- raise TypeError(
- f"Expected list at index {idx} but found {type(existing).__name__}"
- )
- current[actual_idx] = []
- elif container_type == "dict" and not isinstance(existing, dict):
- if strict:
- raise TypeError(
- f"Expected dict at index {idx} but found {type(existing).__name__}"
- )
- current[actual_idx] = {}
-
- return current[actual_idx]
-
-
-def _determine_container_type(segment: PathSegment) -> str:
- """Determine whether we need a dict or list container."""
- if segment.type == PathSegmentType.INDEX:
- return "list"
- return "dict"
diff --git a/chidian/lib/data_mapping_helpers.py b/chidian/lib/data_mapping_helpers.py
deleted file mode 100644
index adf6238..0000000
--- a/chidian/lib/data_mapping_helpers.py
+++ /dev/null
@@ -1,33 +0,0 @@
-"""
-Helper functions for DataMapping validation and processing.
-"""
-
-from typing import Any, Type, TypeVar
-
-from pydantic import BaseModel
-
-# Define generic type variables bounded to BaseModel
-_InModel = TypeVar("_InModel", bound=BaseModel)
-_OutModel = TypeVar("_OutModel", bound=BaseModel)
-
-
-def is_pydantic_model(model_class: Type) -> bool:
- """Check if a class is a Pydantic BaseModel."""
- try:
- return (
- isinstance(model_class, type)
- and issubclass(model_class, BaseModel)
- and hasattr(model_class, "model_fields")
- )
- except TypeError:
- return False
-
-
-def to_dict(model: _InModel) -> dict[str, Any]:
- """Convert Pydantic model to dictionary."""
- return model.model_dump()
-
-
-def validate_output(data: dict[str, Any], output_schema: Type[_OutModel]) -> _OutModel:
- """Validate output data against output schema."""
- return output_schema.model_validate(data) # type: ignore[return-value]
diff --git a/chidian/lib/dsl/filter.peg b/chidian/lib/dsl/filter.peg
deleted file mode 100644
index cac9c08..0000000
--- a/chidian/lib/dsl/filter.peg
+++ /dev/null
@@ -1,61 +0,0 @@
-# === Table Filter DSL ===
-# NOTE: Assume whitespace is removed beforehand
-
-filter_expr = or_expr
-
-# === Logical Expressions ===
-or_expr = and_expr (whitespace or_op whitespace and_expr)*
-and_expr = comparison (whitespace and_op whitespace comparison)*
-
-# === Comparisons ===
-comparison = path whitespace compare_op whitespace value
-
-# === Operators ===
-compare_op = lte / gte / lt / gt / ne / eq / contains / in_op
-or_op = ~"(?i)OR"
-and_op = ~"(?i)AND"
-
-# Order matters for these (>= before >, <= before <, != before =)
-lte = "<="
-gte = ">="
-lt = "<"
-gt = ">"
-ne = "!="
-eq = "="
-contains = ~"(?i)CONTAINS"
-in_op = ~"(?i)IN"
-
-# === Path Expression ===
-# Reuse path syntax from select
-path = nested_path / simple_name
-nested_path = simple_name (dot path_segment)+
-path_segment = simple_name array_index?
-array_index = lbrack index_content rbrack
-index_content = number / star
-
-# === Values ===
-value = string / number / boolean / null / list_value
-string = single_quoted / double_quoted
-single_quoted = single_quote string_content_single single_quote
-double_quoted = double_quote string_content_double double_quote
-string_content_single = ~"[^']*"
-string_content_double = ~"[^\"]*"
-boolean = true / false
-true = ~"(?i)true"
-false = ~"(?i)false"
-null = ~"(?i)null" / ~"(?i)none"
-list_value = lbrack (value (comma value)*)? rbrack
-
-# === Primitives ===
-lbrack = "["
-rbrack = "]"
-comma = ","
-dot = "."
-star = "*"
-single_quote = "'"
-double_quote = "\""
-whitespace = ~"\\s+"
-
-# === Lexemes ===
-simple_name = ~"[a-zA-Z_][a-zA-Z0-9_-]*"
-number = ~"-?[0-9]+(\\.[0-9]+)?"
diff --git a/chidian/lib/dsl/select.peg b/chidian/lib/dsl/select.peg
deleted file mode 100644
index c785314..0000000
--- a/chidian/lib/dsl/select.peg
+++ /dev/null
@@ -1,32 +0,0 @@
-# === Table Select DSL ===
-# NOTE: Assume whitespace is removed beforehand
-
-select_expr = star / column_list
-
-# === Column Lists ===
-column_list = column_spec (whitespace? comma whitespace? column_spec)*
-
-# === Column Specification ===
-column_spec = path (whitespace? rename_op)?
-rename_op = arrow whitespace? name
-
-# === Path Expression ===
-# Reuse existing get.peg path syntax but simplified
-path = nested_path / simple_name
-nested_path = simple_name (dot path_segment)+
-path_segment = simple_name array_index?
-array_index = lbrack (number / star) rbrack
-
-# === Primitives ===
-lbrack = "["
-rbrack = "]"
-comma = ","
-arrow = "->"
-dot = "."
-star = "*"
-whitespace = ~"\\s+"
-
-# === Lexemes ===
-simple_name = ~"[a-zA-Z_][a-zA-Z0-9_-]*"
-name = ~"[a-zA-Z_][a-zA-Z0-9_-]*"
-number = ~"-?[0-9]+"
diff --git a/chidian/lib/filter_parser.py b/chidian/lib/filter_parser.py
deleted file mode 100644
index 9ddb6c9..0000000
--- a/chidian/lib/filter_parser.py
+++ /dev/null
@@ -1,270 +0,0 @@
-"""
-Parser for Table filter DSL expressions.
-"""
-
-from pathlib import Path as PathLib
-from typing import Any, Callable, List, Union
-
-from parsimonious import Grammar, NodeVisitor
-from parsimonious.nodes import Node
-
-from ..core import get
-
-# Load the PEG grammar
-FILTER_PEG_PATH = PathLib(__file__).parent / "dsl" / "filter.peg"
-
-with open(FILTER_PEG_PATH, "r") as f:
- FILTER_GRAMMAR_TEXT = f.read()
-
-FILTER_GRAMMAR = Grammar(FILTER_GRAMMAR_TEXT)
-
-
-class FilterVisitor(NodeVisitor):
- """Transforms filter DSL parse tree into callable predicates."""
-
- def visit_filter_expr(
- self, node: Node, visited_children: List[Any]
- ) -> Callable[[dict], bool]:
- """Process the root filter expression."""
- return visited_children[0]
-
- def visit_or_expr(
- self, node: Node, visited_children: List[Any]
- ) -> Callable[[dict], bool]:
- """Process OR expressions."""
- first_expr, rest = visited_children
-
- if not rest:
- return first_expr
-
- # Build OR chain
- def or_predicate(row: dict) -> bool:
- if first_expr(row):
- return True
- for or_part in rest:
- # Extract expr from: whitespace or_op whitespace and_expr
- expr = or_part[3] if len(or_part) > 3 else or_part[-1]
- if expr(row):
- return True
- return False
-
- return or_predicate
-
- def visit_and_expr(
- self, node: Node, visited_children: List[Any]
- ) -> Callable[[dict], bool]:
- """Process AND expressions."""
- first_comp, rest = visited_children
-
- if not rest:
- return first_comp
-
- # Build AND chain
- def and_predicate(row: dict) -> bool:
- if not first_comp(row):
- return False
- for and_part in rest:
- # Extract comp from: whitespace and_op whitespace comparison
- comp = and_part[3] if len(and_part) > 3 else and_part[-1]
- if not comp(row):
- return False
- return True
-
- return and_predicate
-
- def visit_comparison(
- self, node: Node, visited_children: List[Any]
- ) -> Callable[[dict], bool]:
- """Process a single comparison."""
- # Extract path, op, value from: path whitespace op whitespace value
- path = visited_children[0]
- op = visited_children[2]
- value = visited_children[4]
-
- def compare(row: dict) -> bool:
- try:
- row_value = get(row, path)
-
- # Handle different operators
- if op == "=":
- return row_value == value
- elif op == "!=":
- return row_value != value
- elif op == ">":
- return row_value > value
- elif op == "<":
- return row_value < value
- elif op == ">=":
- return row_value >= value
- elif op == "<=":
- return row_value <= value
- elif op == "CONTAINS":
- # String contains or list contains
- if isinstance(row_value, str) and isinstance(value, str):
- return value in row_value
- elif isinstance(row_value, list):
- return value in row_value
- return False
- elif op == "IN":
- # Value in list
- return row_value in value if isinstance(value, list) else False
-
- return False
- except Exception:
- # Path not found or comparison failed
- return False
-
- return compare
-
- def visit_compare_op(self, node: Node, visited_children: List[Any]) -> str:
- """Process comparison operator."""
- op = visited_children[0]
- # Normalize to uppercase for CONTAINS/IN
- if isinstance(op, str) and op.upper() in ["CONTAINS", "IN"]:
- return op.upper()
- return op
-
- def visit_path(self, node: Node, visited_children: List[Any]) -> str:
- """Process a path expression."""
- result = visited_children[0]
- if isinstance(result, list):
- return result[0]
- return result
-
- def visit_nested_path(self, node: Node, visited_children: List[Any]) -> str:
- """Process a nested path."""
- base_name, segments = visited_children
- parts = [base_name]
-
- for dot_segment in segments:
- _, segment = dot_segment
- parts.append(segment)
-
- return ".".join(parts)
-
- def visit_path_segment(self, node: Node, visited_children: List[Any]) -> str:
- """Process a path segment."""
- name, array_index = visited_children
-
- if array_index:
- [index_str] = array_index
- return f"{name}{index_str}"
-
- return name
-
- def visit_array_index(self, node: Node, visited_children: List[Any]) -> str:
- """Process array index."""
- lbrack, index_content, rbrack = visited_children
- return f"[{index_content}]"
-
- def visit_index_content(self, node: Node, visited_children: List[Any]) -> str:
- """Process index content."""
- return visited_children[0]
-
- def visit_value(self, node: Node, visited_children: List[Any]) -> Any:
- """Process a value."""
- return visited_children[0]
-
- def visit_string(self, node: Node, visited_children: List[Any]) -> str:
- """Process string value."""
- # Either single_quoted or double_quoted
- return visited_children[0]
-
- def visit_single_quoted(self, node: Node, visited_children: List[Any]) -> str:
- """Process single quoted string."""
- _, content, _ = visited_children
- return content
-
- def visit_double_quoted(self, node: Node, visited_children: List[Any]) -> str:
- """Process double quoted string."""
- _, content, _ = visited_children
- return content
-
- def visit_string_content_single(
- self, node: Node, visited_children: List[Any]
- ) -> str:
- """Process single quoted string content."""
- return node.text
-
- def visit_string_content_double(
- self, node: Node, visited_children: List[Any]
- ) -> str:
- """Process double quoted string content."""
- return node.text
-
- def visit_number(
- self, node: Node, visited_children: List[Any]
- ) -> Union[int, float]:
- """Process numeric value."""
- text = node.text
- if "." in text:
- return float(text)
- return int(text)
-
- def visit_boolean(self, node: Node, visited_children: List[Any]) -> bool:
- """Process boolean value."""
- value = visited_children[0]
- return value.upper() == "TRUE"
-
- def visit_null(self, node: Node, visited_children: List[Any]) -> None:
- """Process null value."""
- return None
-
- def visit_list_value(self, node: Node, visited_children: List[Any]) -> List[Any]:
- """Process list value."""
- lbrack, content, rbrack = visited_children
-
- if not content:
- return []
-
- [values] = content
- if not isinstance(values, list):
- return [values]
-
- # Extract first value and rest
- result = []
- if len(values) >= 1:
- result.append(values[0])
-
- if len(values) > 1 and values[1]:
- for comma_value in values[1]:
- _, value = comma_value
- result.append(value)
-
- return result
-
- def visit_simple_name(self, node: Node, visited_children: List[Any]) -> str:
- """Process a simple name."""
- return node.text
-
- def generic_visit(self, node: Node, visited_children: List[Any]) -> Any:
- """Default handler."""
- return visited_children or node.text
-
-
-def parse_filter(expr: str) -> Callable[[dict], bool]:
- """
- Parse a filter expression into a callable predicate.
-
- Args:
- expr: The filter expression (e.g., "age > 25 AND city = 'NYC'")
-
- Returns:
- A callable that takes a dict and returns bool
-
- Examples:
- >>> predicate = parse_filter("age > 25")
- >>> predicate({"age": 30})
- True
- >>> predicate({"age": 20})
- False
- """
- # Remove extra whitespace but preserve spaces in operators
- clean_expr = " ".join(expr.split())
-
- if not clean_expr:
- raise ValueError("Empty filter expression")
-
- tree = FILTER_GRAMMAR.parse(clean_expr)
- visitor = FilterVisitor()
- return visitor.visit(tree)
diff --git a/chidian/lib/select_parser.py b/chidian/lib/select_parser.py
deleted file mode 100644
index 41e11f1..0000000
--- a/chidian/lib/select_parser.py
+++ /dev/null
@@ -1,171 +0,0 @@
-"""
-Parser for Table select DSL expressions.
-"""
-
-from pathlib import Path as PathLib
-from typing import Any, List, Optional, Union
-
-from parsimonious import Grammar, NodeVisitor
-from parsimonious.nodes import Node
-
-# Load the PEG grammar
-SELECT_PEG_PATH = PathLib(__file__).parent / "dsl" / "select.peg"
-
-with open(SELECT_PEG_PATH, "r") as f:
- SELECT_GRAMMAR_TEXT = f.read()
-
-SELECT_GRAMMAR = Grammar(SELECT_GRAMMAR_TEXT)
-
-
-class ColumnSpec:
- """Represents a single column specification in a select expression."""
-
- def __init__(self, path: str, rename_to: Optional[str] = None):
- self.path = path
- self.rename_to = rename_to
-
- def __repr__(self):
- if self.rename_to:
- return f"ColumnSpec({self.path!r} -> {self.rename_to!r})"
- return f"ColumnSpec({self.path!r})"
-
-
-class SelectVisitor(NodeVisitor):
- """Transforms select DSL parse tree into column specifications."""
-
- def visit_select_expr(
- self, node: Node, visited_children: List[Any]
- ) -> Union[str, List[ColumnSpec]]:
- """Process the root select expression."""
- # Either star or column_list
- return visited_children[0]
-
- def visit_star(self, node: Node, visited_children: List[Any]) -> str:
- """Handle wildcard selection."""
- return "*"
-
- def visit_column_list(
- self, node: Node, visited_children: List[Any]
- ) -> List[ColumnSpec]:
- """Process a list of column specifications."""
- first_spec, rest = visited_children
- specs = [first_spec]
-
- if rest:
- for comma_group in rest:
- # Extract the spec from the group (might have whitespace)
- spec = None
- for item in comma_group:
- if isinstance(item, ColumnSpec):
- spec = item
- break
- if spec:
- specs.append(spec)
-
- return specs
-
- def visit_column_spec(self, node: Node, visited_children: List[Any]) -> ColumnSpec:
- """Process a single column specification."""
- path, rename_op = visited_children
- rename_to = None
-
- if rename_op and rename_op[0]: # Check if rename_op exists and isn't empty
- # Extract the actual rename value from the nested structure
- if isinstance(rename_op[0], list):
- # It's wrapped in a list, extract from it
- for item in rename_op[0]:
- if isinstance(item, str) and item not in [" ", "\t", "\n", "->"]:
- rename_to = item
- break
- elif isinstance(rename_op[0], str):
- rename_to = rename_op[0]
-
- return ColumnSpec(path, rename_to)
-
- def visit_rename_op(self, node: Node, visited_children: List[Any]) -> str:
- """Process rename operation."""
- # Extract name from arrow, possible whitespace, name
- for item in visited_children:
- if isinstance(item, str) and item not in ["->", " ", "\t", "\n"]:
- return item
- return visited_children[-1] # Fallback to last item
-
- def visit_path(self, node: Node, visited_children: List[Any]) -> str:
- """Process a path expression."""
- # Can be nested_path or simple_name
- result = visited_children[0]
- if isinstance(result, list):
- # It's a simple_name wrapped in a list
- return result[0]
- return result
-
- def visit_nested_path(self, node: Node, visited_children: List[Any]) -> str:
- """Process a nested path like 'user.profile.name'."""
- base_name, segments = visited_children
- parts = [base_name]
-
- for dot_segment in segments:
- _, segment = dot_segment
- parts.append(segment)
-
- return ".".join(parts)
-
- def visit_path_segment(self, node: Node, visited_children: List[Any]) -> str:
- """Process a path segment with optional array index."""
- name, array_index = visited_children
-
- if array_index:
- [index_str] = array_index
- return f"{name}{index_str}"
-
- return name
-
- def visit_array_index(self, node: Node, visited_children: List[Any]) -> str:
- """Process array index notation."""
- lbrack, index_or_star, rbrack = visited_children
- return f"[{index_or_star}]"
-
- def visit_simple_name(self, node: Node, visited_children: List[Any]) -> str:
- """Process a simple name."""
- return node.text
-
- def visit_name(self, node: Node, visited_children: List[Any]) -> str:
- """Process a name (for rename targets)."""
- return node.text
-
- def visit_number(self, node: Node, visited_children: List[Any]) -> str:
- """Process a number."""
- return node.text
-
- def generic_visit(self, node: Node, visited_children: List[Any]) -> Any:
- """Default handler."""
- return visited_children or node.text
-
-
-def parse_select(expr: str) -> Union[str, List[ColumnSpec]]:
- """
- Parse a select expression into column specifications.
-
- Args:
- expr: The select expression (e.g., "name, age -> years, address.city")
-
- Returns:
- Either "*" for wildcard or a list of ColumnSpec objects
-
- Examples:
- >>> parse_select("*")
- "*"
- >>> parse_select("name")
- [ColumnSpec('name')]
- >>> parse_select("patient.id -> patient_id, status")
- [ColumnSpec('patient.id' -> 'patient_id'), ColumnSpec('status')]
- """
- # Trim but preserve internal spaces for proper parsing
- clean_expr = expr.strip()
-
- if not clean_expr:
- raise ValueError("Empty select expression")
-
- tree = SELECT_GRAMMAR.parse(clean_expr)
- visitor = SelectVisitor()
- return visitor.visit(tree)
diff --git a/chidian/mapper.py b/chidian/mapper.py
deleted file mode 100644
index 91b9fc1..0000000
--- a/chidian/mapper.py
+++ /dev/null
@@ -1,279 +0,0 @@
-from dataclasses import dataclass
-from enum import Enum
-from typing import (
- Any,
- Callable,
- Generic,
- List,
- Mapping,
- Optional,
- Type,
- TypeVar,
-)
-
-from pydantic import BaseModel, ValidationError
-
-"""
-Mapper class - execution engine for DataMapping with validation strategies.
-
-The Mapper class takes a DataMapping and executes it with different validation modes:
-- STRICT: Validate and throw errors
-- FLEXIBLE: Validate but continue on errors, collecting issues
-- AUTO: Use strict if schemas present, flexible otherwise
-
-Also contains special types for transformation control (DROP, KEEP).
-"""
-
-# Define generic type variable for output models
-_OutT = TypeVar("_OutT", bound=BaseModel)
-
-
-class ValidationMode(Enum):
- """Validation modes for mapper execution."""
-
- STRICT = "strict" # Validate and throw errors
- FLEXIBLE = "flexible" # Validate but continue on errors
- AUTO = "auto" # Strict if schemas present, flexible otherwise
-
-
-@dataclass
-class ValidationIssue:
- """Represents a validation issue in flexible mode."""
-
- stage: str # "input" or "output"
- field: Optional[str]
- error: str
- value: Any
-
-
-class MapperResult(Generic[_OutT]):
- """Result of a mapping operation, potentially with validation issues."""
-
- def __init__(
- self,
- data: _OutT | dict[str, Any] | Any,
- issues: Optional[List[ValidationIssue]] = None,
- ):
- self.data: _OutT | dict[str, Any] | Any = data
- self.issues = issues or []
-
- @property
- def has_issues(self) -> bool:
- return len(self.issues) > 0
-
- def raise_if_issues(self):
- """Raise an exception if there are validation issues."""
- if self.has_issues:
- messages = [f"{i.stage}: {i.error}" for i in self.issues]
- raise ValidationError(f"Validation issues: {'; '.join(messages)}")
-
-
-class Mapper(Generic[_OutT]):
- """
- Data transformation engine with validation strategies.
- Combines semantic transformation definition with execution logic.
- """
-
- def __init__(
- self,
- transformations: Mapping[str, Callable[..., Any] | Any],
- output_schema: Optional[Type[_OutT]] = None,
- mode: ValidationMode = ValidationMode.AUTO,
- min_input_schemas: Optional[List[Type[BaseModel]]] = None,
- other_input_schemas: Optional[List[Type[BaseModel]]] = None,
- collect_all_errors: bool = True,
- ):
- """
- Initialize a Mapper with transformations and validation configuration.
-
- Args:
- transformations: Dict mapping output fields to transformations
- output_schema: Optional Pydantic model for output validation
- mode: Validation mode (strict, flexible, or auto)
- min_input_schemas: Minimal set of source models (metadata-only)
- other_input_schemas: Additional source models (metadata-only)
- collect_all_errors: In flexible mode, whether to collect all errors
- """
- # Convert Mapping to dict if needed
- if isinstance(transformations, dict):
- self.transformations = transformations
- elif hasattr(transformations, "items"):
- # Support Mapping types by converting to dict
- self.transformations = dict(transformations)
- else:
- raise TypeError(
- f"Transformations must be dict or Mapping, got {type(transformations).__name__}"
- )
- self.output_schema = output_schema
- self.min_input_schemas = min_input_schemas or []
- self.other_input_schemas = other_input_schemas or []
- self._backward_compat = False
-
- self.collect_all_errors = collect_all_errors
-
- # Determine actual mode
- if mode == ValidationMode.AUTO:
- self.mode = (
- ValidationMode.STRICT if self.has_schemas else ValidationMode.FLEXIBLE
- )
- else:
- self.mode = mode
-
- def transform(self, data: dict) -> dict:
- """
- Apply the pure transformation logic.
- This is the core semantic transformation without any validation.
- """
- result = {}
-
- for target_field, transform_spec in self.transformations.items():
- if callable(transform_spec):
- result[target_field] = transform_spec(data)
- else:
- result[target_field] = transform_spec
-
- return result
-
- @property
- def has_schemas(self) -> bool:
- """Check if this mapping has output schema defined."""
- return self.output_schema is not None
-
- def __call__(self, data: Any) -> _OutT | MapperResult[_OutT] | Any:
- """
- Execute the mapping with the configured validation mode.
-
- Returns:
- - In strict mode: The transformed data (raises on validation errors)
- - In flexible mode: MapperResult with data and any validation issues
- - In backward compat mode with dict: Always returns dict
- """
- # For non-schema mode, just return dict
- if not self.has_schemas and self.mode == ValidationMode.FLEXIBLE:
- return self.transform(data)
-
- if self.mode == ValidationMode.STRICT:
- return self._execute_strict(data)
- else:
- return self._execute_flexible(data)
-
- def _execute_strict(self, data: Any) -> Any:
- """Execute with strict validation - raise on any errors."""
- # Import helpers here to avoid circular dependency
- from .lib.data_mapping_helpers import to_dict, validate_output
-
- # Convert input to dict if needed (no validation)
- input_dict = to_dict(data) if hasattr(data, "model_dump") else data
-
- # Apply transformation
- output_dict = self.transform(input_dict)
-
- # Validate output if schema provided
- if self.output_schema:
- return validate_output(output_dict, self.output_schema)
- return output_dict
-
- def _execute_flexible(self, data: Any) -> MapperResult:
- """Execute with flexible validation - collect errors but continue."""
- # Import helpers here to avoid circular dependency
- from .lib.data_mapping_helpers import to_dict, validate_output
-
- issues = []
-
- # Convert input to dict if needed (no validation)
- input_dict = to_dict(data) if hasattr(data, "model_dump") else data
-
- # Apply transformation (might fail if input validation failed)
- try:
- output_dict = self.transform(input_dict)
- except Exception as e:
- # If transformation fails, return with error
- issues.append(
- ValidationIssue(
- stage="transform", field=None, error=str(e), value=input_dict
- )
- )
- return MapperResult(None, issues)
-
- # Try to validate output
- final_output: Any = output_dict
- if self.output_schema:
- try:
- final_output = validate_output(output_dict, self.output_schema)
- except ValidationError as e:
- # Collect output validation errors
- for error in e.errors():
- issues.append(
- ValidationIssue(
- stage="output",
- field=".".join(str(loc) for loc in error["loc"]),
- error=error["msg"],
- value=error.get("input"),
- )
- )
- # Return raw output dict if validation fails
- final_output = output_dict
-
- return MapperResult(final_output, issues)
-
-
-class DROP(Enum):
- """
- A DROP placeholder object indicates the object relative to the current value should be dropped.
- An "object" in this context is a dict or a list.
-
- This enum implements the transformation protocol without inheritance to avoid metaclass conflicts.
-
- Examples:
- ```
- { <-- Grandparent (rel to _value)
- 'A': { <-- Parent (rel to _value)
- 'B': { <-- This Object (rel to _value)
- 'C': _value
- }
- }
- }
- ```
-
- ```
- { <-- Grandparent (rel to _value1 and _value2)
- 'A': [ <-- Parent (rel to _value1 and _value2)
- { <-- This Object (rel to _value1)
- 'B': _value1
- },
- { <-- This Object (rel to _value2)
- 'B': _value2
- }
- ]
- }
- ```
- """
-
- THIS_OBJECT = -1
- PARENT = -2
- GRANDPARENT = -3
- GREATGRANDPARENT = -4
-
- def process(self, _data: Any, _context: dict[str, Any] | None = None) -> "DROP":
- """DROP sentinels are processed by Mapper, not directly."""
- return self
-
- @property
- def level(self) -> int:
- """Get the drop level value for compatibility."""
- return self.value
-
-
-class KEEP:
- """
- A value wrapped in a KEEP object should be ignored by the Mapper class when removing values.
-
- Partial keeping is _not_ supported (i.e. a KEEP object within an object to be DROP-ed).
- """
-
- def __init__(self, value: Any):
- self.value = value
-
- def process(self, _data: Any, _context: dict[str, Any] | None = None) -> Any:
- """KEEP sentinels preserve their value during processing."""
- return self.value
diff --git a/chidian/partials.py b/chidian/partials.py
deleted file mode 100644
index ec90665..0000000
--- a/chidian/partials.py
+++ /dev/null
@@ -1,304 +0,0 @@
-"""
-The `partials` module provides a simplified set of core functions for data transformation.
-
-This focuses on basic operations that are Rust-friendly and essential for data processing.
-"""
-
-import operator
-from functools import partial, reduce
-from typing import Any, Callable, TypeVar
-
-from .core import get as _get
-
-T = TypeVar("T")
-
-
-class FunctionChain:
- """Composable function chain that consolidates operations."""
-
- def __init__(self, *operations: Callable):
- self.operations = list(operations)
-
- def __or__(
- self, other: Callable | "FunctionChain" | "ChainableFunction"
- ) -> "FunctionChain":
- """Chain operations with | operator."""
- if isinstance(other, FunctionChain):
- return FunctionChain(*self.operations, *other.operations)
- elif isinstance(other, ChainableFunction):
- return FunctionChain(*self.operations, other.func)
- else:
- return FunctionChain(*self.operations, other)
-
- def __call__(self, value: Any) -> Any:
- """Apply all operations in sequence."""
- return reduce(lambda v, f: f(v), self.operations, value)
-
- def __repr__(self) -> str:
- ops = " | ".join(
- f.__name__ if hasattr(f, "__name__") else str(f) for f in self.operations
- )
- return f"FunctionChain({ops})"
-
- def __len__(self) -> int:
- """Number of operations in the chain."""
- return len(self.operations)
-
- def __getattr__(self, name: str) -> Any:
- """
- Support method chaining by looking up chainable functions in the module.
-
- This allows: p.get("path").join(" ").upper() instead of p.get("path") | p.join(" ") | p.upper
- """
- # Get the current module's globals to look up functions
- import sys
-
- current_module = sys.modules[__name__]
-
- # Check if the attribute is a known chainable function
- if hasattr(current_module, name):
- attr = getattr(current_module, name)
- # If it's a ChainableFunction or a function that returns one, create a method
- if isinstance(attr, ChainableFunction):
- # Return a function that chains this operation
- def chain_method(*args, **kwargs):
- new_op = (
- attr
- if not args and not kwargs
- else (lambda v: attr(*args, **kwargs)(v))
- )
- return FunctionChain(*self.operations, new_op)
-
- return chain_method
- # If it's a function that returns a ChainableFunction (like split, join, etc.)
- elif callable(attr):
-
- def chain_method(*args, **kwargs):
- new_op = attr(*args, **kwargs)
- if isinstance(new_op, ChainableFunction):
- return FunctionChain(*self.operations, new_op.func)
- elif callable(new_op):
- return FunctionChain(*self.operations, new_op)
- return FunctionChain(*self.operations, lambda v: new_op)
-
- return chain_method
-
- raise AttributeError(
- f"'{type(self).__name__}' object has no attribute '{name}'"
- )
-
-
-class ChainableFunction:
- """Wrapper to make any function/partial chainable with | or method chaining."""
-
- def __init__(self, func: Callable):
- self.func = func
- # Preserve function metadata
- self.__name__ = getattr(func, "__name__", repr(func))
- self.__doc__ = getattr(func, "__doc__", None)
-
- def __or__(
- self, other: Callable | FunctionChain | "ChainableFunction"
- ) -> FunctionChain:
- """Start or extend a chain with | operator."""
- if isinstance(other, FunctionChain):
- return FunctionChain(self.func, *other.operations)
- elif isinstance(other, ChainableFunction):
- return FunctionChain(self.func, other.func)
- else:
- return FunctionChain(self.func, other)
-
- def __ror__(self, other: Callable | FunctionChain) -> FunctionChain:
- """Allow chaining when ChainableFunction is on the right side."""
- if isinstance(other, FunctionChain):
- return FunctionChain(*other.operations, self.func)
- else:
- return FunctionChain(other, self.func)
-
- def __call__(self, *args, **kwargs):
- """Call the wrapped function."""
- return self.func(*args, **kwargs)
-
- def __getattr__(self, name: str) -> Any:
- """Support method chaining by delegating to FunctionChain."""
- # Convert to FunctionChain and delegate
- return getattr(FunctionChain(self.func), name)
-
- def __repr__(self) -> str:
- return f"ChainableFunction({self.__name__})"
-
-
-def get(
- key: str | list[str], default: Any = None, apply: Any = None, strict: bool = False
-) -> FunctionChain:
- """
- Create a chainable function for get operations.
-
- Args:
- key: Path string (e.g., "data.items[0].name") or list of paths for multi-path extraction
- default: Default value if path not found
- apply: Function(s) to apply to the result (legacy parameter)
- strict: If True, raise errors on missing paths
-
- Returns:
- FunctionChain that extracts values from source data and supports method chaining
- like .join(), .upper(), etc.
-
- Examples:
- # Single path
- p.get("user.name")(data)
-
- # Multi-path with chaining
- p.get(["name.first", "name.given[*]", "name.suffix"]).join(" ", unwrap=True)(data)
- """
- # Multi-path extraction
- if isinstance(key, list):
-
- def multi_get(source):
- values = []
- for path in key:
- val = _get(source, path, default=default, strict=strict)
- if val is not None:
- # If val is a list, extend; otherwise append
- if isinstance(val, list):
- values.extend(val)
- else:
- values.append(val)
- return values
-
- return FunctionChain(multi_get)
-
- # Single path - keep backward compatibility
- def get_partial(source):
- return _get(source, key, default=default, apply=apply, strict=strict)
-
- return FunctionChain(get_partial)
-
-
-# Arithmetic operations
-def add(value: Any) -> Callable[[Any], Any]:
- """Add a value to the input."""
- return partial(lambda x, v: operator.add(x, v), v=value)
-
-
-def subtract(value: Any) -> Callable[[Any], Any]:
- """Subtract a value from the input."""
- return partial(lambda x, v: operator.sub(x, v), v=value)
-
-
-def multiply(value: Any) -> Callable[[Any], Any]:
- """Multiply the input by a value."""
- return partial(lambda x, v: operator.mul(x, v), v=value)
-
-
-def divide(value: Any) -> Callable[[Any], Any]:
- """Divide the input by a value."""
- return partial(lambda x, v: operator.truediv(x, v), v=value)
-
-
-# Boolean operations
-def equals(value: Any) -> Callable[[Any], bool]:
- """Check if input equals the given value."""
- return partial(operator.eq, value)
-
-
-def contains(value: Any) -> Callable[[Any], bool]:
- """Check if input contains the given value."""
- return partial(lambda x, v: operator.contains(x, v), v=value)
-
-
-def isinstance_of(type_or_types: type) -> Callable[[Any], bool]:
- """Check if input is an instance of the given type(s)."""
- return partial(lambda x, types: isinstance(x, types), types=type_or_types)
-
-
-# String manipulation functions as ChainableFunction
-upper = ChainableFunction(str.upper)
-lower = ChainableFunction(str.lower)
-strip = ChainableFunction(str.strip)
-
-
-def split(sep: str | None = None) -> ChainableFunction:
- """Create a chainable split function."""
- return ChainableFunction(partial(str.split, sep=sep))
-
-
-def replace(old: str, new: str) -> ChainableFunction:
- """Create a chainable replace function."""
- return ChainableFunction(
- partial(
- lambda s, old_val, new_val: s.replace(old_val, new_val),
- old_val=old,
- new_val=new,
- )
- )
-
-
-def join(sep: str, flatten: bool = False) -> ChainableFunction:
- """
- Create a chainable join function.
-
- Args:
- sep: Separator string
- flatten: If True, flatten nested lists and filter None values before joining
- """
- if flatten:
-
- def join_flatten(items):
- flat = []
- for item in items:
- if isinstance(item, list):
- # Flatten nested list
- flat.extend(str(x) for x in item if x is not None)
- elif item is not None:
- flat.append(str(item))
- return sep.join(flat)
-
- return ChainableFunction(join_flatten)
- else:
- return ChainableFunction(
- partial(
- lambda separator, items: separator.join(
- str(x) for x in items if x is not None
- ),
- sep,
- )
- )
-
-
-# Array/List operations as ChainableFunction
-first = ChainableFunction(lambda x: x[0] if x else None)
-last = ChainableFunction(lambda x: x[-1] if x else None)
-length = ChainableFunction(len)
-
-
-def at_index(i: int) -> ChainableFunction:
- """Get element at index."""
- return ChainableFunction(
- partial(lambda x, idx: x[idx] if len(x) > idx else None, idx=i)
- )
-
-
-def slice_range(start: int | None = None, end: int | None = None) -> ChainableFunction:
- """Slice a sequence."""
- return ChainableFunction(partial(lambda x, s, e: x[s:e], s=start, e=end))
-
-
-# Type conversions as ChainableFunction
-to_int = ChainableFunction(int)
-to_float = ChainableFunction(float)
-to_str = ChainableFunction(str)
-to_bool = ChainableFunction(bool)
-
-
-# Utility functions
-def round_to(decimals: int) -> ChainableFunction:
- """Round to specified decimals."""
- return ChainableFunction(partial(round, ndigits=decimals))
-
-
-def default_to(default_value: Any) -> ChainableFunction:
- """Replace None with default value."""
- return ChainableFunction(
- partial(lambda x, default: default if x is None else x, default=default_value)
- )
diff --git a/chidian/process.py b/chidian/process.py
new file mode 100644
index 0000000..4fcdf67
--- /dev/null
+++ b/chidian/process.py
@@ -0,0 +1,170 @@
+"""
+Processing utilities for chidian output transformation.
+
+Combines DROP handling, KEEP unwrapping, and empty value removal.
+"""
+
+from typing import Any
+
+from .drop import DROP, _DropSignal
+from .keep import KEEP
+
+
+def is_empty(value: Any) -> bool:
+ """Check if a value is considered empty."""
+ if value is None:
+ return True
+ if isinstance(value, dict) and len(value) == 0:
+ return True
+ if isinstance(value, list) and len(value) == 0:
+ return True
+ if isinstance(value, str) and len(value) == 0:
+ return True
+ return False
+
+
+def process_output(data: Any, remove_empty: bool = True) -> Any:
+ """
+ Process output data: handle DROPs, unwrap KEEPs, optionally remove empties.
+
+ Args:
+ data: The data structure to process
+ remove_empty: If True (default), remove empty values ({}, [], "", None)
+ KEEP-wrapped values are preserved regardless of this setting.
+
+ Returns:
+ Processed data with DROPs applied, KEEPs unwrapped, and empties removed.
+ """
+ try:
+ return _process_value(data, remove_empty)
+ except _DropSignal as signal:
+ if signal.levels > 0:
+ raise ValueError(
+ f"DROP level exceeds structure depth (levels remaining: {signal.levels})"
+ )
+ # Top-level container was dropped
+ if isinstance(data, dict):
+ return {}
+ elif isinstance(data, list):
+ return []
+ else:
+ return None
+
+
+def _process_value(data: Any, remove_empty: bool) -> Any:
+ """Internal processor that may raise _DropSignal."""
+ # Handle DROP sentinel
+ if isinstance(data, DROP):
+ raise _DropSignal(data.value)
+
+ # Handle KEEP wrapper - process inner value for DROPs but preserve from empty removal
+ if isinstance(data, KEEP):
+ # Process the inner value to handle any DROP sentinels, but skip empty removal
+ return _process_value(data.value, remove_empty=False)
+
+ # Process containers recursively
+ if isinstance(data, dict):
+ return _process_dict(data, remove_empty)
+
+ if isinstance(data, list):
+ return _process_list(data, remove_empty)
+
+ # For scalar values, check if empty and should be removed
+ if remove_empty and is_empty(data):
+ return None # Will be filtered out by parent
+
+ return data
+
+
+def _process_dict(d: dict, remove_empty: bool) -> dict:
+ """Process a dict, handling DROP/KEEP and optionally removing empties."""
+ result = {}
+
+ for key, value in d.items():
+ # Handle KEEP specially - process inner value for DROPs but preserve from empty removal
+ if isinstance(value, KEEP):
+ try:
+ result[key] = _process_value(value.value, remove_empty=False)
+ except _DropSignal as signal:
+ if signal.levels == 0:
+ pass # Remove this key
+ elif signal.levels == 1:
+ raise _DropSignal(0)
+ else:
+ raise _DropSignal(signal.levels - 1)
+ continue
+
+ try:
+ processed = _process_value(value, remove_empty)
+
+ # Skip empty values if remove_empty is True
+ if remove_empty and is_empty(processed):
+ continue
+
+ result[key] = processed
+
+ except _DropSignal as signal:
+ if signal.levels == 0:
+ # Remove this key (don't add to result)
+ pass
+ elif signal.levels == 1:
+ # Remove this dict from its parent
+ raise _DropSignal(0)
+ else:
+ # Propagate further up
+ raise _DropSignal(signal.levels - 1)
+
+ return result
+
+
+def _process_list(lst: list, remove_empty: bool) -> list:
+ """Process a list, handling DROP/KEEP and optionally removing empties."""
+ result = []
+
+ for item in lst:
+ # Handle KEEP specially - process inner value for DROPs but preserve from empty removal
+ if isinstance(item, KEEP):
+ try:
+ result.append(_process_value(item.value, remove_empty=False))
+ except _DropSignal as signal:
+ if signal.levels == 0:
+ pass # Remove this item
+ elif signal.levels == 1:
+ raise _DropSignal(0)
+ else:
+ raise _DropSignal(signal.levels - 1)
+ continue
+
+ # Special case: DROP directly in list
+ if isinstance(item, DROP):
+ if item == DROP.THIS_OBJECT:
+ # Just skip this item
+ continue
+ elif item == DROP.PARENT:
+ # Remove this list's parent container
+ raise _DropSignal(1)
+ else:
+ # GRANDPARENT or higher - propagate up
+ raise _DropSignal(item.value - 1)
+
+ try:
+ processed = _process_value(item, remove_empty)
+
+ # Skip empty values if remove_empty is True
+ if remove_empty and is_empty(processed):
+ continue
+
+ result.append(processed)
+
+ except _DropSignal as signal:
+ if signal.levels == 0:
+ # Remove this item (don't add to result)
+ pass
+ elif signal.levels == 1:
+ # Remove this list from its parent
+ raise _DropSignal(0)
+ else:
+ # Propagate further up
+ raise _DropSignal(signal.levels - 1)
+
+ return result
diff --git a/chidian/table.py b/chidian/table.py
deleted file mode 100644
index 8dba9ab..0000000
--- a/chidian/table.py
+++ /dev/null
@@ -1,1931 +0,0 @@
-import csv
-import json
-from datetime import datetime
-from pathlib import Path
-from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, Union
-
-from .core import get
-
-if TYPE_CHECKING:
- import pandas as pd
- import polars as pl
-
-"""
-A `Table` is a lightweight, sparse table implementation that treats a collection of dictionaries as rows in a table.
-
-Each dictionary represents a row with potentially different keys (columns), making it ideal for heterogeneous,
-nested data. Provides a middle ground between the strictness of DataFrames and raw list[dict]/dict[str, dict].
-
-Supports path-based queries, filtering, mapping, and other functional operations.
-"""
-
-
-class Table:
- def __init__(
- self,
- rows: Union[list[dict[str, Any]], dict[str, dict[str, Any]], None] = None,
- ):
- """
- Initialize a Table from rows.
-
- Args:
- rows: Either:
- - list[dict]: Each dict is a row, auto-keyed by index ($0, $1, ...)
- - dict[str, dict]: Pre-keyed rows (keys preserved)
- - None: Empty table
- """
- self._rows: list[dict[str, Any]] = []
- self._row_keys: dict[str, int] = {} # Maps row keys to indices
- self._key_to_row: dict[str, dict[str, Any]] = {} # Maps $ keys to row dicts
-
- # Initialize rows based on input type
- if rows is not None:
- if isinstance(rows, list):
- self._rows = rows
- # Store rows by index using $-syntax
- for i, row in enumerate(rows):
- key = f"${i}"
- self._key_to_row[key] = row
- self._row_keys[key] = i
- elif isinstance(rows, dict):
- self._rows = list(rows.values())
- # Store rows by their original keys
- for i, (key, row) in enumerate(rows.items()):
- # Ensure keys start with $ for consistency
- if not key.startswith("$"):
- key = f"${key}"
- self._key_to_row[key] = row
- self._row_keys[key] = i
-
- def get(self, path: str, default: Any = None) -> Union[Any, list[Any]]:
- """
- Extract values from rows using a path expression.
-
- If path starts with $, extracts from a specific row only.
- Otherwise, extracts from all rows.
-
- Uses the existing chidian.core.get() engine to navigate nested structures.
-
- Args:
- path: Path expression:
- - "$0.name" or "$bob.name": Extract from specific row
- - "name" or "patient.id": Extract from all rows
- default: Value to use when path doesn't exist
-
- Returns:
- - Single value when using $-prefixed path for specific row
- - List of values (one per row) when extracting from all rows
-
- Examples:
- >>> t = Table([
- ... {"name": "John", "age": 30},
- ... {"name": "Jane", "age": 25},
- ... {"name": "Bob"} # Note: no age
- ... ])
- >>> t.get("name")
- ["John", "Jane", "Bob"]
- >>> t.get("$0.name")
- "John"
- >>> t.get("$1.age")
- 25
- >>> t.get("$2.age", default=0)
- 0
- >>> t.append({"name": "Alice"}, custom_key="alice")
- >>> t.get("$alice.name")
- "Alice"
- """
- # Check if path starts with $ (specific row access)
- if path.startswith("$"):
- # Extract row key and remaining path
- parts = path.split(".", 1)
- row_key = parts[0]
-
- # Check if this key exists
- if row_key not in self._key_to_row:
- return default
-
- # Get the specific row
- row = self._key_to_row[row_key]
-
- # If there's a remaining path, extract from the row
- if len(parts) > 1:
- return self._get_path_value(row, parts[1], default=default)
- else:
- # Just the row key itself, return the whole row
- return row
-
- # Original behavior: extract from all rows
- results = []
- for row in self._rows:
- value = self._get_path_value(row, path, default=default)
- results.append(value)
- return results
-
- @property
- def columns(self) -> set[str]:
- """
- Return the union of all keys across all rows.
-
- This represents the "sparse columns" of the table.
-
- Examples:
- >>> t = Table([
- ... {"name": "John", "age": 30},
- ... {"name": "Jane", "city": "NYC"}
- ... ])
- >>> t.columns
- {"name", "age", "city"}
- """
- all_keys: set[str] = set()
- for row in self._rows:
- all_keys.update(row.keys())
- return all_keys
-
- def to_list(self) -> list[dict[str, Any]]:
- """Return rows as a plain list of dicts."""
- return self._rows.copy()
-
- def to_dict(self) -> dict[str, dict[str, Any]]:
- """Return rows as a dict keyed by row identifiers."""
- return self._key_to_row.copy()
-
- def append(self, row: dict[str, Any], custom_key: Optional[str] = None) -> None:
- """
- Add a new row to the table.
-
- This operation may expand the logical column set if the new row
- contains keys not seen in existing rows.
-
- Args:
- row: Dictionary representing the new row
- custom_key: Optional row identifier (defaults to $n where n is the index)
- If provided and doesn't start with $, will be prefixed with $
-
- Examples:
- >>> t = Table([{"name": "John"}])
- >>> t.append({"name": "Jane", "age": 25}) # Adds 'age' column
- >>> t.append({"name": "Bob", "city": "NYC"}, custom_key="bob") # Adds 'city' column
- >>> len(t)
- 3
- """
- self._rows.append(row)
-
- if custom_key is None:
- # Use $-prefixed index as key
- key = f"${len(self._rows) - 1}"
- else:
- # Ensure custom keys start with $
- if not custom_key.startswith("$"):
- key = f"${custom_key}"
- else:
- key = custom_key
-
- self._key_to_row[key] = row
- self._row_keys[key] = len(self._rows) - 1
-
- def filter(self, predicate: Union[str, Callable[[dict], bool]]) -> "Table":
- """
- Filter rows based on a predicate.
-
- Args:
- predicate: Either:
- - Callable: Function that takes a row dict and returns bool
- - str: DSL filter expression
-
- Returns:
- New Table with only rows matching the predicate
-
- Examples:
- >>> t = Table([{"name": "John", "age": 30}, {"name": "Jane", "age": 25}])
- >>> t.filter(lambda row: row.get("age", 0) > 26) # Returns Table with just John
- >>> t.filter("age > 26")
- >>> t.filter("status = 'active' AND age >= 18")
- >>> t.filter("addresses[0].city = 'NYC'")
- """
- if isinstance(predicate, str):
- from .lib.filter_parser import parse_filter
-
- predicate = parse_filter(predicate)
-
- # Functional predicate implementation
- filtered_rows = [row for row in self._rows if predicate(row)]
- return Table(filtered_rows)
-
- def map(self, transform: Callable[[dict], dict]) -> "Table":
- """
- Transform each row using the provided function.
-
- Args:
- transform: Function that takes a row dict and returns a new dict
-
- Returns:
- New Table with transformed rows
-
- Examples:
- >>> t = Table([{"name": "john"}, {"name": "jane"}])
- >>> t2 = t.map(lambda row: {**row, "name": row["name"].upper()})
- >>> t2.get("name")
- ["JOHN", "JANE"]
-
- >>> # Add computed field
- >>> t3 = t.map(lambda row: {**row, "name_length": len(row.get("name", ""))})
- """
- transformed_rows = [transform(row) for row in self._rows]
- return Table(transformed_rows)
-
- def select(self, query: str) -> "Table":
- """
- Project columns and create a new Table using DSL syntax.
-
- Args:
- query: DSL column selection expression
-
- Returns:
- New Table with selected columns
-
- Examples:
- >>> t.select("name, age") # Select specific columns
- >>> t.select("*") # Select all columns
- >>> t.select("patient.id -> patient_id, status") # Rename column
- >>> t.select("name, addresses[0].city -> primary_city") # Nested + rename
- """
- from .lib.select_parser import parse_select
-
- parsed = parse_select(query)
-
- # Handle wildcard selection
- if parsed == "*":
- return Table(self._rows.copy())
-
- # Handle column specifications
- if not isinstance(parsed, list):
- # This shouldn't happen based on parse_select implementation
- raise ValueError("Unexpected parser result")
-
- new_rows = []
- for row in self._rows:
- new_row = {}
-
- for spec in parsed:
- # Get value using path
- value = self._get_path_value(row, spec.path, default=None)
-
- # Use rename if specified, otherwise use the last segment of path
- if spec.rename_to:
- key = spec.rename_to
- else:
- # Extract last part of path as column name
- # e.g., "patient.id" -> "id", "name" -> "name"
- path_parts = spec.path.split(".")
- # Remove array indices from last part
- last_part = path_parts[-1].split("[")[0]
- key = last_part
-
- new_row[key] = value
-
- new_rows.append(new_row)
-
- return Table(new_rows)
-
- def unique(self, path: str) -> list[Any]:
- """
- Get unique values from a column path.
-
- Args:
- path: Path expression to extract values from
-
- Returns:
- List of unique values found at the path
- """
- values = self.get(path)
- seen = set()
- unique_values = []
- for value in values:
- # Handle unhashable types by converting to string for dedup
- try:
- if value not in seen:
- seen.add(value)
- unique_values.append(value)
- except TypeError:
- # Unhashable type, use string representation for dedup
- str_value = str(value)
- if str_value not in seen:
- seen.add(str_value)
- unique_values.append(value)
- return unique_values
-
- def group_by(self, path: str) -> dict[Any, "Table"]:
- """
- Group rows by values at a given path.
-
- Args:
- path: Path expression to group by
-
- Returns:
- Dictionary mapping unique values to Tables containing matching rows
- """
- groups: dict[Any, list[dict[str, Any]]] = {}
-
- for row in self._rows:
- group_value = self._get_path_value(row, path, default=None)
- # Handle unhashable types by converting to string
- try:
- hash(group_value)
- key = group_value
- except TypeError:
- key = str(group_value)
-
- if key not in groups:
- groups[key] = []
- groups[key].append(row)
-
- return {key: Table(rows) for key, rows in groups.items()}
-
- def extract(self, path: str) -> "Table":
- """
- Extract values from all rows using a path and return as a new Table.
-
- This method is particularly useful for extracting nested structures
- like FHIR Bundle entries or other collections within your data.
-
- Args:
- path: Path expression to extract from each row
- - Supports wildcards: "entries[*].resource"
- - Supports nested paths: "patient.address[0].city"
-
- Returns:
- New Table where each extracted value becomes a row.
- If path uses wildcards and returns lists, the lists are flattened.
- None values are filtered out.
-
- Examples:
- >>> # FHIR Bundle example
- >>> bundle_table = Table([fhir_bundle])
- >>> resources = bundle_table.extract("entries[*].resource")
-
- >>> # Extract nested values from multiple rows
- >>> patients_table = Table([patient1, patient2, patient3])
- >>> addresses = patients_table.extract("address[*]")
-
- >>> # Simple field extraction
- >>> names = patients_table.extract("name.given[0]")
- """
- # Get extracted values using existing logic
- extracted = self.get(path)
-
- # Handle the case where get() returns a single value (shouldn't happen for non-$ paths, but be safe)
- if not isinstance(extracted, list):
- extracted = [extracted]
-
- # Flatten any nested lists and filter out None values
- flattened = []
- for item in extracted:
- if item is None:
- continue
- elif isinstance(item, list):
- # Flatten lists from wildcard extractions
- flattened.extend(item)
- else:
- flattened.append(item)
-
- # Return new Table with extracted values as rows
- return Table(flattened)
-
- @classmethod
- def from_path(cls, data: Any, path: str) -> "Table":
- """
- Create a Table by extracting a path from source data.
-
- This is a convenience constructor that's perfect for extracting
- collections from complex nested structures like FHIR Bundles.
-
- Args:
- data: Source data structure (dict, list, or any nested structure)
- path: Path expression to extract
- - "entries[*].resource" for FHIR Bundle resources
- - "results[*].observation" for lab results
- - "items[*]" for simple list extraction
-
- Returns:
- New Table with extracted data as rows
-
- Examples:
- >>> # Extract FHIR Bundle resources directly
- >>> resources_table = Table.from_path(fhir_bundle, "entries[*].resource")
-
- >>> # Extract nested arrays
- >>> observations = Table.from_path(lab_report, "results[*].observation")
-
- >>> # Extract from lists
- >>> items = Table.from_path({"data": [item1, item2]}, "data[*]")
- """
- # Create temporary single-row table with the source data
- temp_table = cls([data])
-
- # Extract the path and return the result
- return temp_table.extract(f"$0.{path}")
-
- def join(
- self,
- other: "Table",
- on: str | tuple[str, str] | list[str | tuple[str, str]] | None = None,
- how: str = "left",
- suffixes: tuple[str, str] = ("", "_2"),
- ) -> "Table":
- """
- Join two tables based on matching column values.
-
- Supports SQL-like join operations with flexible key specification
- and path-based column access.
-
- Args:
- other: The right table to join with
- on: Join key specification:
- - str: Same column name in both tables ("id")
- - tuple: Different names (("id", "patient_id"))
- - list[str]: Multiple columns, same names (["id", "type"])
- - list[tuple]: Multiple columns, different names
- ([("patient_id", "subject_id"), ("date", "visit_date")])
- - None: Natural join on all common columns
- how: Join type - "left" (default), "inner", "right", or "outer"
- suffixes: Tuple of suffixes for conflicting column names.
- Default ("", "_2") adds "_2" to right table conflicts.
-
- Returns:
- New Table with joined data
-
- Examples:
- >>> # Simple join on same column
- >>> patients.join(visits, on="patient_id")
-
- >>> # Join with different column names
- >>> patients.join(visits, on=("id", "patient_id"))
-
- >>> # Multiple join keys
- >>> orders.join(items, on=[("order_id", "oid"), "date"])
-
- >>> # Inner join with path expression
- >>> patients.join(observations,
- ... on=("id", "subject.reference"),
- ... how="inner")
- """
- # Parse the 'on' parameter to get join column specifications
- left_keys, right_keys = self._parse_join_keys(on, other)
-
- # Build lookup dictionary from right table for efficient joining
- right_lookup = self._build_join_lookup(other, right_keys)
-
- # Perform the join based on the specified type
- if how == "left":
- return self._left_join(left_keys, right_lookup, suffixes)
- elif how == "inner":
- return self._inner_join(left_keys, right_lookup, suffixes)
- elif how == "right":
- return self._right_join(
- other, left_keys, right_keys, right_lookup, suffixes
- )
- elif how == "outer":
- return self._outer_join(left_keys, right_lookup, suffixes)
- else:
- raise ValueError(
- f"Invalid join type: {how}. Must be 'left', 'inner', 'right', or 'outer'"
- )
-
- def _parse_join_keys(
- self,
- on: str | tuple[str, str] | list[str | tuple[str, str]] | None,
- other: "Table",
- ) -> tuple[list[str], list[str]]:
- """Parse the 'on' parameter to extract left and right join keys."""
- if on is None:
- # Natural join - find common columns
- common = self.columns & other.columns
- if not common:
- raise ValueError("No common columns found for natural join")
- left_keys = list(common)
- right_keys = list(common)
- elif isinstance(on, str):
- # Single column, same name
- left_keys = [on]
- right_keys = [on]
- elif isinstance(on, tuple) and len(on) == 2:
- # Single column, different names
- left_keys = [on[0]]
- right_keys = [on[1]]
- elif isinstance(on, list):
- left_keys = []
- right_keys = []
- for item in on:
- if isinstance(item, str):
- # Same column name
- left_keys.append(item)
- right_keys.append(item)
- elif isinstance(item, tuple) and len(item) == 2:
- # Different column names
- left_keys.append(item[0])
- right_keys.append(item[1])
- else:
- raise ValueError(f"Invalid join key specification: {item}")
- else:
- raise ValueError(f"Invalid 'on' parameter: {on}")
-
- return left_keys, right_keys
-
- def _build_join_lookup(
- self, table: "Table", keys: list[str]
- ) -> dict[tuple, list[dict[str, Any]]]:
- """Build a lookup dictionary for efficient joining."""
- lookup: dict[tuple, list[dict[str, Any]]] = {}
-
- for row in table._rows:
- # Extract key values using path resolution to support paths
- key_values = []
- for key in keys:
- value = self._get_path_value(row, key, default=None)
- # Convert unhashable types to strings for lookup
- try:
- hash(value)
- key_values.append(value)
- except TypeError:
- key_values.append(str(value))
-
- key_tuple = tuple(key_values)
- if key_tuple not in lookup:
- lookup[key_tuple] = []
- lookup[key_tuple].append(row)
-
- return lookup
-
- def _merge_rows(
- self,
- left_row: dict[str, Any],
- right_row: dict[str, Any] | None,
- suffixes: tuple[str, str],
- join_keys: set[str] | None = None,
- ) -> dict[str, Any]:
- """Merge two rows, handling column conflicts with suffixes."""
- if right_row is None:
- return left_row.copy()
-
- merged = left_row.copy()
- left_suffix, right_suffix = suffixes
- join_keys = join_keys or set()
-
- for key, value in right_row.items():
- if key in merged:
- # Check if this is a join key - if so, don't apply suffixes
- if key in join_keys:
- # Join key - keep as-is (left value takes precedence)
- continue
-
- # Column conflict - apply suffixes
- if left_suffix:
- # Rename left column
- merged[key + left_suffix] = merged[key]
- del merged[key]
-
- if right_suffix:
- merged[key + right_suffix] = value
- elif not left_suffix:
- # No suffixes - right overwrites left
- merged[key] = value
- else:
- # No conflict
- merged[key] = value
-
- return merged
-
- def _left_join(
- self,
- left_keys: list[str],
- right_lookup: dict[tuple, list[dict[str, Any]]],
- suffixes: tuple[str, str],
- ) -> "Table":
- """Perform a left outer join."""
- result_rows = []
-
- for left_row in self._rows:
- # Extract key values from left row
- key_values = []
- for key in left_keys:
- value = self._get_path_value(left_row, key, default=None)
- try:
- hash(value)
- key_values.append(value)
- except TypeError:
- key_values.append(str(value))
-
- key_tuple = tuple(key_values)
- matching_rows = right_lookup.get(key_tuple, [None])
-
- # Create a result row for each match (or one with None if no matches)
- for right_row in matching_rows:
- result_rows.append(
- self._merge_rows(left_row, right_row, suffixes, set(left_keys))
- )
-
- return Table(result_rows)
-
- def _inner_join(
- self,
- left_keys: list[str],
- right_lookup: dict[tuple, list[dict[str, Any]]],
- suffixes: tuple[str, str],
- ) -> "Table":
- """Perform an inner join."""
- result_rows = []
-
- for left_row in self._rows:
- # Extract key values from left row
- key_values = []
- for key in left_keys:
- value = self._get_path_value(left_row, key, default=None)
- try:
- hash(value)
- key_values.append(value)
- except TypeError:
- key_values.append(str(value))
-
- key_tuple = tuple(key_values)
- matching_rows = right_lookup.get(key_tuple, [])
-
- # Only add rows when there are matches
- for right_row in matching_rows:
- result_rows.append(
- self._merge_rows(left_row, right_row, suffixes, set(left_keys))
- )
-
- return Table(result_rows)
-
- def _right_join(
- self,
- other: "Table",
- left_keys: list[str],
- right_keys: list[str],
- right_lookup: dict[tuple, list[dict[str, Any]]],
- suffixes: tuple[str, str],
- ) -> "Table":
- """Perform a right outer join."""
- # Build lookup for left table
- left_lookup = self._build_join_lookup(self, left_keys)
-
- result_rows = []
- for right_row in other._rows:
- # Extract key values from right row
- key_values = []
- for key in right_keys:
- value = self._get_path_value(right_row, key, default=None)
- try:
- hash(value)
- key_values.append(value)
- except TypeError:
- key_values.append(str(value))
-
- key_tuple = tuple(key_values)
- matching_rows = left_lookup.get(key_tuple, [None])
-
- # Create a result row for each match (or one with None if no matches)
- for left_row in matching_rows:
- if left_row is None:
- # No match - just use right row
- result_rows.append(right_row.copy())
- else:
- result_rows.append(
- self._merge_rows(left_row, right_row, suffixes, set(left_keys))
- )
-
- return Table(result_rows)
-
- def _outer_join(
- self,
- left_keys: list[str],
- right_lookup: dict[tuple, list[dict[str, Any]]],
- suffixes: tuple[str, str],
- ) -> "Table":
- """Perform a full outer join."""
- result_rows = []
- seen_right_keys = set()
-
- # Process all left rows
- for left_row in self._rows:
- # Extract key values from left row
- key_values = []
- for key in left_keys:
- value = self._get_path_value(left_row, key, default=None)
- try:
- hash(value)
- key_values.append(value)
- except TypeError:
- key_values.append(str(value))
-
- key_tuple = tuple(key_values)
- matching_rows = right_lookup.get(key_tuple, [None])
-
- if matching_rows != [None]:
- seen_right_keys.add(key_tuple)
-
- for right_row in matching_rows:
- result_rows.append(
- self._merge_rows(left_row, right_row, suffixes, set(left_keys))
- )
-
- # Add unmatched right rows
- for key_tuple, right_rows in right_lookup.items():
- if key_tuple not in seen_right_keys:
- for right_row in right_rows:
- result_rows.append(right_row.copy())
-
- return Table(result_rows)
-
- def __getitem__(self, key: str) -> Any:
- """
- Enhanced access with dot syntax support.
-
- Supports both row access and path-based access:
- - table["$0"] → returns the row dict
- - table["$0.name"] → extracts value using path syntax
- - table["column"] → extracts column values from all rows (same as get())
-
- Args:
- key: Either a row key ("$0") or a path expression ("$0.name", "column")
-
- Returns:
- For row keys: the row dict
- For path expressions: the extracted value(s)
-
- Examples:
- >>> table["$0"] # Get entire row
- {"name": "John", "age": 30}
- >>> table["$0.name"] # Get specific field from row
- "John"
- >>> table["name"] # Get column from all rows
- ["John", "Jane", "Bob"]
- """
- # Check if this is a path expression (contains dot) or column access
- if "." in key or not key.startswith("$"):
- # Use the get() method which handles path syntax
- return self.get(key)
-
- # Row key access
- if key in self._key_to_row:
- return self._key_to_row[key]
- else:
- raise KeyError(key)
-
- def __contains__(self, key: str) -> bool:
- """Check if a row key exists in the table."""
- return key in self._key_to_row
-
- def __setitem__(self, key: str, value: dict[str, Any]) -> None:
- """Set a row by key (mainly for internal use)."""
- if not key.startswith("$"):
- raise ValueError("Row keys must start with '$'")
- self._key_to_row[key] = value
- # Note: This doesn't update _rows or _row_keys for simplicity
- # Main usage should be through append() method
-
- def head(self, n: int = 5) -> "Table":
- """
- Return first n rows.
-
- Args:
- n: Number of rows to return (default 5)
-
- Returns:
- New Table with first n rows
- """
- return Table(self._rows[:n])
-
- def tail(self, n: int = 5) -> "Table":
- """
- Return last n rows.
-
- Args:
- n: Number of rows to return (default 5)
-
- Returns:
- New Table with last n rows
- """
- return Table(self._rows[-n:])
-
- def __iter__(self) -> Iterator[dict[str, Any]]:
- """
- Iterate over rows in insertion order.
-
- Examples:
- >>> t = Table([{"id": 1}, {"id": 2}])
- >>> for row in t:
- ... print(row["id"])
- 1
- 2
- """
- return iter(self._rows)
-
- def __len__(self) -> int:
- """
- Return the number of rows in the table.
-
- Examples:
- >>> t = Table([{"id": 1}, {"id": 2}])
- >>> len(t)
- 2
- """
- return len(self._rows)
-
- def __repr__(self) -> str:
- """
- Return a concise representation of the Table.
-
- Examples:
- >>> t = Table([{"id": 1, "name": "John"}, {"id": 2, "name": "Jane"}])
- >>> repr(t)
- ''
- """
- num_rows = len(self._rows)
- num_cols = len(self.columns)
- return f""
-
- def __str__(self) -> str:
- """
- Return a pretty-printed string representation of the Table.
- Shows the first 5 rows by default.
-
- Examples:
- >>> t = Table([{"name": "John", "age": 30}, {"name": "Jane", "age": 25}])
- >>> print(t)
- $key | name | age
- -------|------|-----
- $0 | John | 30
- $1 | Jane | 25
- """
- return self.show(n=5)
-
- def show(
- self,
- n: int = 5,
- truncate: int = 50,
- max_col_width: int = 20,
- *,
- flatten: bool = False,
- max_depth: int | None = None,
- array_index_limit: int | None = None,
- ) -> str:
- """
- Return a formatted string representation of the Table.
-
- Args:
- n: Number of rows to display (default: 5)
- truncate: Maximum length for cell values before truncation (default: 50)
- max_col_width: Maximum column width for display (default: 20)
- flatten: If True, flatten nested structures before display (default: False)
- max_depth: Maximum recursion depth when flattening (None for unlimited)
- array_index_limit: Maximum array indices when flattening (None for unlimited)
-
- Returns:
- Formatted string representation of the table
-
- Examples:
- >>> t = Table([{"name": "John", "age": 30}, {"name": "Jane", "age": 25}])
- >>> print(t.show(n=10)) # Show up to 10 rows
-
- >>> # Show flattened view
- >>> nested = Table([{"user": {"name": "John", "prefs": ["email", "sms"]}}])
- >>> print(nested.show(flatten=True))
- # Shows columns: user.name, user.prefs[0], user.prefs[1]
- """
- if len(self._rows) == 0:
- return ""
-
- # Apply flattening if requested
- if flatten:
- flattened_table = self.flatten(
- max_depth=max_depth, array_index_limit=array_index_limit
- )
- return flattened_table.show(
- n=n, truncate=truncate, max_col_width=max_col_width
- )
-
- # Get the rows to display
- display_rows = self._rows[:n]
- has_more = len(self._rows) > n
-
- # Get all columns from displayed rows
- columns: list[str] = []
- for row in display_rows:
- columns.extend(row.keys())
- columns = list(dict.fromkeys(columns)) # Preserve order, remove duplicates
-
- # Prepare data for display
- # First column is the row key
- headers = ["$key"] + columns
-
- # Format cell values
- formatted_rows: list[list[str]] = []
- for i, row in enumerate(display_rows):
- # Find the row key
- row_key = None
- for key, idx in self._row_keys.items():
- if idx == i:
- row_key = key
- break
- if row_key is None:
- row_key = f"${i}"
-
- formatted_row = [row_key]
- for col in columns:
- value = row.get(col)
- formatted_value = self._format_value(value, truncate)
- formatted_row.append(formatted_value)
- formatted_rows.append(formatted_row)
-
- # Calculate column widths
- col_widths: list[int] = []
- for i, header in enumerate(headers):
- width = min(len(str(header)), max_col_width)
- for row in formatted_rows: # type: ignore
- if i < len(row):
- width = max(width, min(len(str(row[i])), max_col_width)) # type: ignore
- col_widths.append(width)
-
- # Build the table string
- lines = []
-
- # Header row
- header_parts = []
- for header, width in zip(headers, col_widths):
- header_str = str(header)
- if len(header_str) > width:
- header_str = header_str[: width - 3] + "..."
- header_parts.append(f" {header_str:<{width}}")
- lines.append(" |".join(header_parts))
-
- # Separator
- separator_parts = ["-" * (width + 2) for width in col_widths]
- lines.append("-+-".join(separator_parts))
-
- # Data rows
- for formatted_row in formatted_rows:
- row_parts = []
- for value, width in zip(formatted_row, col_widths):
- value_str = str(value)
- if len(value_str) > width:
- value_str = value_str[: width - 3] + "..."
- row_parts.append(f" {value_str:<{width}}")
- lines.append(" |".join(row_parts))
-
- # Add indicator if there are more rows
- if has_more:
- remaining = len(self._rows) - n
- lines.append(f"... {remaining} more row{'s' if remaining != 1 else ''}")
-
- return "\n".join(lines)
-
- def _format_value(self, value: Any, max_length: int = 50) -> str:
- """
- Format a value for display in the table.
-
- Args:
- value: The value to format
- max_length: Maximum length before truncation
-
- Returns:
- Formatted string representation
- """
- if value is None:
- return "None"
- elif isinstance(value, (dict, list)):
- # Use compact JSON representation for complex types
- import json
-
- json_str = json.dumps(value, separators=(",", ":"))
- if len(json_str) > max_length:
- return json_str[: max_length - 3] + "..."
- return json_str
- elif isinstance(value, bool):
- return str(value)
- elif isinstance(value, (int, float)):
- return str(value)
- else:
- str_value = str(value)
- if len(str_value) > max_length:
- return str_value[: max_length - 3] + "..."
- return str_value
-
- def to_pandas(
- self,
- *,
- index: bool = False,
- index_name: str = "_index",
- flatten: bool = False,
- max_depth: int | None = None,
- array_index_limit: int | None = None,
- ) -> "pd.DataFrame":
- """
- Convert Table to pandas DataFrame.
-
- Args:
- index: If True, use row keys ($0, $1, etc.) as DataFrame index
- index_name: Name for the index column when index=True
- flatten: If True, flatten nested structures before conversion (default: False)
- max_depth: Maximum recursion depth when flattening (None for unlimited)
- array_index_limit: Maximum array indices when flattening (None for unlimited)
-
- Returns:
- pandas.DataFrame with Table data
-
- Examples:
- >>> # Basic conversion
- >>> df = table.to_pandas()
-
- >>> # With index from row keys
- >>> df = table.to_pandas(index=True)
-
- >>> # Custom index name
- >>> df = table.to_pandas(index=True, index_name="row_id")
-
- >>> # Flatten nested structures
- >>> nested = Table([{"user": {"name": "John", "prefs": ["email"]}}])
- >>> df = nested.to_pandas(flatten=True)
- # Results in columns: user.name, user.prefs[0]
-
- Note: Requires pandas to be installed: pip install 'chidian[pandas]'
- """
- try:
- import pandas as pd # type: ignore
- except ModuleNotFoundError as e:
- raise ModuleNotFoundError(
- "pandas not installed. pip install 'chidian[pandas]'"
- ) from e
-
- # Apply flattening if requested
- if flatten:
- flattened_table = self.flatten(
- max_depth=max_depth, array_index_limit=array_index_limit
- )
- return flattened_table.to_pandas(index=index, index_name=index_name)
-
- if not index:
- return pd.DataFrame(self.to_list())
-
- rows = [
- {
- index_name: (k[1:] if isinstance(k, str) and k.startswith("$") else k),
- **row,
- }
- for k, row in self.to_dict().items()
- ]
- return pd.DataFrame(rows).set_index(index_name)
-
- def to_polars(
- self,
- *,
- add_index: bool = False,
- index_name: str = "_index",
- flatten: bool = False,
- max_depth: int | None = None,
- array_index_limit: int | None = None,
- ) -> "pl.DataFrame":
- """
- Convert Table to polars DataFrame.
-
- Args:
- add_index: If True, add row keys ($0, $1, etc.) as a column
- index_name: Name for the index column when add_index=True
- flatten: If True, flatten nested structures before conversion (default: False)
- max_depth: Maximum recursion depth when flattening (None for unlimited)
- array_index_limit: Maximum array indices when flattening (None for unlimited)
-
- Returns:
- polars.DataFrame with Table data
-
- Examples:
- >>> # Basic conversion
- >>> df = table.to_polars()
-
- >>> # With index column from row keys
- >>> df = table.to_polars(add_index=True)
-
- >>> # Custom index column name
- >>> df = table.to_polars(add_index=True, index_name="row_id")
-
- >>> # Flatten nested structures
- >>> nested = Table([{"user": {"name": "John", "prefs": ["email"]}}])
- >>> df = nested.to_polars(flatten=True)
- # Results in columns: user.name, user.prefs[0]
-
- Note: Requires polars to be installed: pip install 'chidian[polars]'
- """
- try:
- import polars as pl # type: ignore
- except ModuleNotFoundError as e:
- raise ModuleNotFoundError(
- "polars not installed. pip install 'chidian[polars]'"
- ) from e
-
- # Apply flattening if requested
- if flatten:
- flattened_table = self.flatten(
- max_depth=max_depth, array_index_limit=array_index_limit
- )
- return flattened_table.to_polars(add_index=add_index, index_name=index_name)
-
- if len(self._rows) == 0:
- # Handle empty table case - polars needs schema for empty data
- return pl.DataFrame()
-
- if not add_index:
- return pl.from_dicts(self.to_list())
-
- rows = [
- {
- index_name: (k[1:] if isinstance(k, str) and k.startswith("$") else k),
- **row,
- }
- for k, row in self.to_dict().items()
- ]
- return pl.from_dicts(rows)
-
- def flatten(
- self, *, max_depth: int | None = None, array_index_limit: int | None = None
- ) -> "Table":
- """
- Return a new Table with nested data structures flattened into path-keyed columns.
-
- Converts nested dictionaries and lists into a flat structure using dot-notation
- for dictionaries (e.g., 'a.b') and bracket-notation for arrays (e.g., 'a[0]').
-
- Args:
- max_depth: Maximum recursion depth (None for unlimited). When reached,
- sub-structures are preserved as values.
- array_index_limit: Maximum array indices to process per array (None for unlimited).
- Helps prevent explosion with very large arrays.
-
- Returns:
- New Table with flattened structure
-
- Examples:
- >>> # Basic flattening
- >>> t = Table([{'a': 1, 'b': [2, 3], 'c': {'d': 4}}])
- >>> flat = t.flatten()
- >>> list(flat.columns)
- ['a', 'b[0]', 'b[1]', 'c.d']
-
- >>> # With depth limit
- >>> deep = Table([{'a': {'b': {'c': {'d': 1}}}}])
- >>> limited = deep.flatten(max_depth=2)
- >>> list(limited.columns) # 'a.b' contains {'c': {'d': 1}}
- ['a.b']
-
- >>> # With array limit
- >>> big_array = Table([{'items': list(range(100))}])
- >>> limited = big_array.flatten(array_index_limit=3)
- >>> sorted(limited.columns) # Only items[0], items[1], items[2]
- ['items[0]', 'items[1]', 'items[2]']
- """
- flattened_rows = [
- self._flatten_row(
- row, max_depth=max_depth, array_index_limit=array_index_limit
- )
- for row in self._rows
- ]
- return Table(flattened_rows)
-
- def columns_flattened(
- self,
- *,
- sample_rows: int | None = None,
- max_depth: int | None = None,
- array_index_limit: int | None = None,
- ) -> set[str]:
- """
- Return the union of all flattened column names without modifying the table.
-
- Useful for previewing the column structure that would result from flattening,
- especially on large tables where you want to limit analysis to a sample.
-
- Args:
- sample_rows: If provided, only analyze this many rows (from the beginning)
- max_depth: Maximum recursion depth (None for unlimited)
- array_index_limit: Maximum array indices to process per array (None for unlimited)
-
- Returns:
- Set of flattened column names
-
- Examples:
- >>> t = Table([
- ... {'a': 1, 'b': [1, 2]},
- ... {'a': 2, 'b': [3], 'c': {'d': 4}}
- ... ])
- >>> t.columns_flattened()
- {'a', 'b[0]', 'b[1]', 'c.d'}
-
- >>> # Sample just first row
- >>> t.columns_flattened(sample_rows=1)
- {'a', 'b[0]', 'b[1]'}
- """
- rows_to_analyze = (
- self._rows[:sample_rows] if sample_rows is not None else self._rows
- )
-
- all_columns: set[str] = set()
- for row in rows_to_analyze:
- flattened = self._flatten_row(
- row, max_depth=max_depth, array_index_limit=array_index_limit
- )
- all_columns.update(flattened.keys())
-
- return all_columns
-
- def _get_path_value(
- self, row: dict[str, Any], path: str, default: Any = None
- ) -> Any:
- """
- Enhanced path value extraction that works with both regular and flattened tables.
-
- First tries standard chidian.core.get() for path resolution. If that returns the
- default value and the path looks like it could be a flattened key, tries direct
- dictionary lookup as a fallback.
-
- Args:
- row: Row dictionary to extract from
- path: Path string (e.g., "a.b", "items[0]", or flattened key like "c.d")
- default: Default value if path not found
-
- Returns:
- Value at path or default if not found
- """
- # First try standard path resolution
- result = get(row, path, default=default)
-
- # If we got the default and the path contains path syntax,
- # check if it's a direct key in a flattened table
- if (
- result is default
- and any(char in path for char in [".", "[", "]"])
- and path in row
- ):
- return row[path]
-
- return result
-
- def _encode_key_for_path_segment(self, key: str) -> str:
- """
- Encode a key for use in flattened path notation.
-
- Simple keys (alphanumeric + underscore) use plain notation.
- Complex keys use bracket-quoted notation with proper escaping.
-
- Args:
- key: The dictionary key to encode
-
- Returns:
- Encoded key suitable for path notation
- """
- # Check if key contains only safe characters
- if key.replace("_", "").replace("-", "").isalnum() and key[0].isalpha():
- return key
-
- # Need to use bracket notation - escape quotes and backslashes
- escaped = key.replace("\\", "\\\\").replace('"', '\\"')
- return f'["{escaped}"]'
-
- def _flatten_value(
- self,
- value: Any,
- prefix: str,
- out: dict[str, Any],
- max_depth: int | None,
- array_index_limit: int | None,
- depth: int,
- ) -> None:
- """
- Recursively flatten a value into path-keyed entries.
-
- Args:
- value: Value to flatten
- prefix: Current path prefix
- out: Output dictionary to populate
- max_depth: Maximum recursion depth (None for unlimited)
- array_index_limit: Maximum array indices to process (None for unlimited)
- depth: Current recursion depth
- """
- # Check depth limit
- if max_depth is not None and depth >= max_depth:
- out[prefix] = value
- return
-
- if isinstance(value, dict):
- # Flatten dictionary
- for k, v in value.items():
- encoded_key = self._encode_key_for_path_segment(k)
- new_prefix = f"{prefix}.{encoded_key}" if prefix else encoded_key
- self._flatten_value(
- v, new_prefix, out, max_depth, array_index_limit, depth + 1
- )
- elif isinstance(value, list):
- # Flatten list with index notation
- for i, v in enumerate(value):
- if array_index_limit is not None and i >= array_index_limit:
- break
- new_prefix = f"{prefix}[{i}]"
- self._flatten_value(
- v, new_prefix, out, max_depth, array_index_limit, depth + 1
- )
- else:
- # Leaf value - store as-is
- out[prefix] = value
-
- def _flatten_row(
- self,
- row: dict[str, Any],
- max_depth: int | None = None,
- array_index_limit: int | None = None,
- ) -> dict[str, Any]:
- """
- Flatten a single row's nested structure into path-keyed columns.
-
- Args:
- row: Row dictionary to flatten
- max_depth: Maximum recursion depth (None for unlimited)
- array_index_limit: Maximum array indices to process (None for unlimited)
-
- Returns:
- Flattened row dictionary
- """
- out: dict[str, Any] = {}
- for k, v in row.items():
- encoded_key = self._encode_key_for_path_segment(k)
- self._flatten_value(v, encoded_key, out, max_depth, array_index_limit, 0)
- return out
-
- @classmethod
- def from_csv(
- cls,
- path: str | Path,
- *,
- delimiter: str = ",",
- encoding: str = "utf-8",
- header: bool | int = True,
- columns: list[str] | None = None,
- dtypes: dict[str, type] | None = None,
- parse_dates: bool | list[str] = False,
- null_values: list[str] | None = None,
- skip_rows: int = 0,
- max_rows: int | None = None,
- ) -> "Table":
- """
- Create a Table from a CSV file.
-
- Args:
- path: Path to the CSV file
- delimiter: Field delimiter (default: ",")
- encoding: File encoding (default: "utf-8")
- header: Whether first row contains headers (True),
- row index of headers (int), or no headers (False)
- columns: Column names to use (overrides file headers)
- dtypes: Dict mapping column names to types for parsing
- parse_dates: Parse date columns. If True, auto-detect.
- If list, parse specified columns as dates
- null_values: List of strings to interpret as null/None
- skip_rows: Number of rows to skip from beginning
- max_rows: Maximum number of rows to read
-
- Returns:
- New Table with CSV data as rows
-
- Examples:
- >>> # Basic usage
- >>> table = Table.from_csv("data.csv")
-
- >>> # Custom delimiter and encoding
- >>> table = Table.from_csv("data.tsv", delimiter="\t", encoding="latin-1")
-
- >>> # Specify column types
- >>> table = Table.from_csv("data.csv", dtypes={
- ... "age": int,
- ... "salary": float,
- ... "active": bool
- ... })
-
- >>> # Parse date columns
- >>> table = Table.from_csv("orders.csv", parse_dates=["order_date", "ship_date"])
-
- >>> # Handle missing values
- >>> table = Table.from_csv("data.csv", null_values=["NA", "N/A", "null", ""])
- """
- path = Path(path)
- if not path.exists():
- raise FileNotFoundError(f"CSV file not found: {path}")
-
- # Default null values if not specified
- if null_values is None:
- null_values = ["NA", "N/A", "null", ""]
-
- rows = []
-
- try:
- with open(path, "r", encoding=encoding, newline="") as f:
- # Skip initial rows if requested
- for _ in range(skip_rows):
- next(f, None)
-
- reader = csv.reader(f, delimiter=delimiter)
-
- # Handle header
- first_data_row = None
- if header is True:
- # First row is header
- file_columns = next(reader, [])
- elif isinstance(header, int) and header is not False:
- # Header at specific row index
- for _ in range(header):
- next(reader, None)
- file_columns = next(reader, [])
- else:
- # No header - first row is data
- file_columns = None
- first_data_row = next(reader, None)
-
- # Use provided columns or file columns or generate numeric columns
- if columns:
- column_names = columns
- elif file_columns:
- column_names = file_columns
- else:
- # Generate column names based on first row
- if first_data_row:
- column_names = [f"col_{i}" for i in range(len(first_data_row))]
- else:
- column_names = []
-
- # Process the first data row if we read it during header handling
- if first_data_row and column_names:
- # Handle column count mismatch
- row_data = first_data_row[:] # Make a copy
- if len(row_data) != len(column_names):
- if len(row_data) < len(column_names):
- # Pad with empty strings (will be converted to None later if in null_values)
- row_data.extend([""] * (len(column_names) - len(row_data)))
- else:
- # Truncate
- row_data = row_data[: len(column_names)]
-
- row_dict = cls._process_csv_row(
- row_data, column_names, dtypes, parse_dates, null_values
- )
- rows.append(row_dict)
-
- # Read remaining rows
- row_count = len(rows) # Account for any rows already processed
- for row_data in reader:
- if max_rows and row_count >= max_rows:
- break
-
- if len(row_data) != len(column_names):
- # Handle rows with different number of columns
- if len(row_data) < len(column_names):
- # Pad with empty strings (will be converted to None later if in null_values)
- row_data.extend([""] * (len(column_names) - len(row_data)))
- else:
- # Truncate
- row_data = row_data[: len(column_names)]
-
- row_dict = cls._process_csv_row(
- row_data, column_names, dtypes, parse_dates, null_values
- )
- rows.append(row_dict)
- row_count += 1
-
- except PermissionError:
- raise PermissionError(f"Permission denied reading file: {path}")
- except Exception as e:
- raise ValueError(f"Error reading CSV file {path}: {e}")
-
- return cls(rows)
-
- @classmethod
- def _process_csv_row(
- cls,
- row_data: list[str],
- column_names: list[str],
- dtypes: dict[str, type] | None,
- parse_dates: bool | list[str],
- null_values: list[str],
- ) -> dict[str, Any]:
- """Process a single CSV row, applying type conversions."""
- row_dict: dict[str, Any] = {}
-
- for col_name, value in zip(column_names, row_data):
- # Check for null values
- if value in null_values:
- row_dict[col_name] = None
- continue
-
- # Apply type conversion
- if dtypes and col_name in dtypes:
- # Explicit type conversion
- try:
- if dtypes[col_name] is bool:
- row_dict[col_name] = value.lower() in ("true", "1", "yes", "y")
- else:
- row_dict[col_name] = dtypes[col_name](value)
- except (ValueError, TypeError):
- # Keep as string if conversion fails
- row_dict[col_name] = value # type: ignore
- elif parse_dates:
- # Date parsing
- if (isinstance(parse_dates, list) and col_name in parse_dates) or (
- parse_dates is True and cls._looks_like_date(value)
- ):
- try:
- # Try common date formats
- for fmt in [
- "%Y-%m-%d",
- "%Y/%m/%d",
- "%m/%d/%Y",
- "%d/%m/%Y",
- "%Y-%m-%d %H:%M:%S",
- "%Y/%m/%d %H:%M:%S",
- ]:
- try:
- row_dict[col_name] = datetime.strptime(value, fmt) # type: ignore
- break
- except ValueError:
- continue
- else:
- # No format matched, keep as string
- row_dict[col_name] = value # type: ignore
- except Exception:
- row_dict[col_name] = value # type: ignore
- else:
- # Auto-infer type
- row_dict[col_name] = cls._infer_type(value)
- else:
- # Auto-infer type
- row_dict[col_name] = cls._infer_type(value)
-
- return row_dict
-
- @classmethod
- def _infer_type(cls, value: str) -> Any:
- """Attempt to infer and convert string value to appropriate type."""
- # Handle None case
- if value is None:
- return None
-
- # Check for boolean
- if value.lower() in ("true", "false"):
- return value.lower() == "true"
-
- # Try integer
- try:
- return int(value)
- except ValueError:
- pass
-
- # Try float
- try:
- return float(value)
- except ValueError:
- pass
-
- # Check if it looks like JSON
- if (value.startswith("{") and value.endswith("}")) or (
- value.startswith("[") and value.endswith("]")
- ):
- try:
- return json.loads(value)
- except json.JSONDecodeError:
- pass
-
- # Return as string
- return value
-
- @classmethod
- def _looks_like_date(cls, value: str) -> bool:
- """Heuristic to check if a string looks like a date."""
- # Simple heuristics
- if len(value) < 6 or len(value) > 30:
- return False
-
- # Check for common date separators and patterns
- date_indicators = ["-", "/", ":", "20", "19"]
- matches = sum(1 for indicator in date_indicators if indicator in value)
- return matches >= 2
-
- def to_csv(
- self,
- path: str | Path,
- *,
- delimiter: str = ",",
- encoding: str = "utf-8",
- header: bool = True,
- columns: list[str] | None = None,
- index: bool = False,
- null_value: str = "",
- float_format: str | None = None,
- date_format: str | None = None,
- mode: str = "w",
- flatten: bool = False,
- max_depth: int | None = None,
- array_index_limit: int | None = None,
- ) -> None:
- """
- Write the Table to a CSV file.
-
- Args:
- path: Output file path
- delimiter: Field delimiter (default: ",")
- encoding: File encoding (default: "utf-8")
- header: Whether to write column headers
- columns: Specific columns to write (default: all columns)
- index: Whether to write row keys as first column
- null_value: String representation for None values
- float_format: Format string for floating point numbers (e.g., "%.2f")
- date_format: Format string for datetime objects (e.g., "%Y-%m-%d")
- mode: File write mode ("w" for overwrite, "a" for append)
- flatten: If True, flatten nested structures before export (default: False)
- max_depth: Maximum recursion depth when flattening (None for unlimited)
- array_index_limit: Maximum array indices when flattening (None for unlimited)
-
- Examples:
- >>> # Basic export
- >>> table.to_csv("output.csv")
-
- >>> # Tab-separated with specific columns
- >>> table.to_csv("output.tsv", delimiter="\t", columns=["name", "age", "city"])
-
- >>> # Include row keys and format numbers
- >>> table.to_csv("output.csv", index=True, float_format="%.2f")
-
- >>> # Append to existing file
- >>> table.to_csv("output.csv", mode="a", header=False)
-
- >>> # Export with flattened structure
- >>> nested = Table([{"user": {"name": "John", "prefs": ["email"]}}])
- >>> nested.to_csv("flat.csv", flatten=True)
- # Results in columns: user.name, user.prefs[0]
- """
- # Apply flattening if requested
- if flatten:
- flattened_table = self.flatten(
- max_depth=max_depth, array_index_limit=array_index_limit
- )
- return flattened_table.to_csv(
- path,
- delimiter=delimiter,
- encoding=encoding,
- header=header,
- columns=columns,
- index=index,
- null_value=null_value,
- float_format=float_format,
- date_format=date_format,
- mode=mode,
- )
-
- path = Path(path)
-
- # Determine columns to write
- if columns:
- write_columns = columns
- else:
- # Get all columns from the table
- write_columns = list(self.columns)
-
- # Add index column if requested
- if index:
- write_columns = ["_index"] + write_columns
-
- try:
- with open(path, mode, encoding=encoding, newline="") as f:
- writer = csv.DictWriter(
- f,
- fieldnames=write_columns,
- delimiter=delimiter,
- extrasaction="ignore", # Ignore extra fields not in fieldnames
- )
-
- # Write header if requested
- if header:
- writer.writeheader()
-
- # Write rows
- for i, row in enumerate(self._rows):
- write_row = {}
-
- # Add index if requested
- if index:
- # Get the row key
- row_key = None
- for key, idx in self._row_keys.items():
- if idx == i:
- row_key = key
- break
- write_row["_index"] = row_key or f"${i}"
-
- # Process each column
- for col in write_columns:
- if col == "_index":
- continue # Already handled
-
- value = row.get(col)
-
- # Format the value
- if value is None:
- write_row[col] = null_value
- elif isinstance(value, float) and float_format:
- write_row[col] = float_format % value
- elif isinstance(value, datetime) and date_format:
- write_row[col] = value.strftime(date_format)
- elif isinstance(value, (dict, list)):
- # Serialize complex types as JSON
- write_row[col] = json.dumps(value)
- else:
- write_row[col] = str(value)
-
- writer.writerow(write_row)
-
- except PermissionError:
- raise PermissionError(f"Permission denied writing to file: {path}")
- except Exception as e:
- raise ValueError(f"Error writing CSV file {path}: {e}")
-
- @classmethod
- def from_parquet(
- cls,
- path: str | Path,
- *,
- columns: list[str] | None = None,
- filters: list[tuple] | None = None,
- use_nullable_dtypes: bool = True,
- ) -> "Table":
- """
- Create a Table from a Parquet file.
-
- Args:
- path: Path to the Parquet file
- columns: Specific columns to read (default: all)
- filters: Row-level filters using PyArrow syntax
- e.g., [("age", ">", 18), ("city", "in", ["NYC", "LA"])]
- use_nullable_dtypes: Use pandas nullable dtypes for better None handling
-
- Returns:
- New Table with Parquet data as rows
-
- Examples:
- >>> # Basic usage
- >>> table = Table.from_parquet("data.parquet")
-
- >>> # Read specific columns
- >>> table = Table.from_parquet("data.parquet", columns=["id", "name", "score"])
-
- >>> # Apply filters during read
- >>> table = Table.from_parquet("data.parquet", filters=[
- ... ("year", ">=", 2020),
- ... ("status", "==", "active")
- ... ])
-
- Note: Requires pyarrow to be installed
- """
- pa, pq = cls._check_pyarrow()
-
- path = Path(path)
- if not path.exists():
- raise FileNotFoundError(f"Parquet file not found: {path}")
-
- try:
- # Read parquet file
- table = pq.read_table(
- path,
- columns=columns,
- filters=filters,
- )
-
- # Convert to list of dicts using PyArrow directly
- rows = []
-
- # Convert PyArrow table to Python objects
- for i in range(table.num_rows):
- row_dict = {}
- for j, column_name in enumerate(table.column_names):
- column = table.column(j)
- value = column[i].as_py() # Convert to Python object
- row_dict[column_name] = value
- rows.append(row_dict)
-
- return cls(rows)
-
- except PermissionError:
- raise PermissionError(f"Permission denied reading file: {path}")
- except Exception as e:
- raise ValueError(f"Error reading Parquet file {path}: {e}")
-
- def to_parquet(
- self,
- path: str | Path,
- *,
- compression: str = "snappy",
- columns: list[str] | None = None,
- index: bool = False,
- partition_cols: list[str] | None = None,
- schema: Any | None = None,
- ) -> None:
- """
- Write the Table to a Parquet file.
-
- Args:
- path: Output file path or directory (if using partitions)
- compression: Compression codec ("snappy", "gzip", "brotli", "lz4", "zstd", or None)
- columns: Specific columns to write (default: all)
- index: Whether to write row keys as a column
- partition_cols: Columns to use for partitioning the dataset
- schema: PyArrow schema for explicit type control
-
- Examples:
- >>> # Basic export
- >>> table.to_parquet("output.parquet")
-
- >>> # With compression and specific columns
- >>> table.to_parquet("output.parquet",
- ... compression="gzip",
- ... columns=["id", "name", "metrics"])
-
- >>> # Partitioned dataset
- >>> table.to_parquet("output_dir/",
- ... partition_cols=["year", "month"])
-
- Note: Requires pyarrow to be installed
- """
- pa, pq = self._check_pyarrow()
-
- path = Path(path)
-
- # Prepare data for writing
- write_data = []
-
- for i, row in enumerate(self._rows):
- write_row = {}
-
- # Add index if requested
- if index:
- # Get the row key
- row_key = None
- for key, idx in self._row_keys.items():
- if idx == i:
- row_key = key
- break
- write_row["_index"] = row_key or f"${i}"
-
- # Add specified columns or all columns
- if columns:
- for col in columns:
- write_row[col] = row.get(col) # type: ignore
- else:
- write_row.update(row)
-
- write_data.append(write_row)
-
- try:
- # Convert to PyArrow table
- if schema:
- # Use provided schema
- pa_table = pa.Table.from_pylist(write_data, schema=schema)
- else:
- # Auto-infer schema
- pa_table = pa.Table.from_pylist(write_data)
-
- # Write to parquet
- if partition_cols:
- # Partitioned dataset
- pq.write_to_dataset(
- pa_table,
- root_path=path,
- partition_cols=partition_cols,
- compression=compression,
- )
- else:
- # Single file
- pq.write_table(
- pa_table,
- path,
- compression=compression,
- )
-
- except PermissionError:
- raise PermissionError(f"Permission denied writing to file: {path}")
- except Exception as e:
- raise ValueError(f"Error writing Parquet file {path}: {e}")
-
- @classmethod
- def _check_pyarrow(cls):
- """Check if pyarrow is available and return the modules."""
- try:
- import pyarrow as pa
- import pyarrow.parquet as pq
-
- return pa, pq
- except ImportError:
- raise ImportError(
- "Parquet support requires pyarrow. " "Install with: uv add pyarrow"
- )
diff --git a/chidian/validation/__init__.py b/chidian/validation/__init__.py
new file mode 100644
index 0000000..82e6c62
--- /dev/null
+++ b/chidian/validation/__init__.py
@@ -0,0 +1,66 @@
+"""
+Chidian Validation - Dict-like schema validation with Pydantic interop.
+
+Usage:
+ from chidian.validation import V, Required, Optional, validate, to_pydantic
+
+ schema = {
+ "name": Required(str),
+ "email": Optional(str),
+ "tags": [str],
+ }
+
+ result = validate(data, schema)
+ Model = to_pydantic("MyModel", schema)
+"""
+
+from .core import DictV, ListV, V, to_validator
+from .schema import to_pydantic, validate
+from .types import Err, Ok
+from .validators import (
+ Between,
+ Eq,
+ Gt,
+ Gte,
+ InRange,
+ InSet,
+ IsType,
+ Lt,
+ Lte,
+ Matches,
+ MaxLength,
+ MinLength,
+ Optional,
+ Predicate,
+ Required,
+)
+
+__all__ = [
+ # Result types
+ "Ok",
+ "Err",
+ # Core
+ "V",
+ "DictV",
+ "ListV",
+ "to_validator",
+ # Validators
+ "Required",
+ "Optional",
+ "IsType",
+ "InRange",
+ "MinLength",
+ "MaxLength",
+ "InSet",
+ "Matches",
+ "Predicate",
+ "Eq",
+ "Gt",
+ "Gte",
+ "Lt",
+ "Lte",
+ "Between",
+ # Schema
+ "validate",
+ "to_pydantic",
+]
diff --git a/chidian/validation/core.py b/chidian/validation/core.py
new file mode 100644
index 0000000..b0299b2
--- /dev/null
+++ b/chidian/validation/core.py
@@ -0,0 +1,214 @@
+"""
+Core validator classes for chidian validation.
+
+Provides V, DictV, ListV dataclasses with functional composition.
+"""
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Any, Callable
+
+from .types import CheckFn, Err, Ok, Path
+
+
+@dataclass(frozen=True, slots=True)
+class V:
+ """
+ Immutable validator node.
+
+ The fundamental building block. Wraps a check function with metadata
+ for composition and Pydantic generation.
+ """
+
+ check: CheckFn
+ required: bool = False
+ type_hint: type | None = None
+ message: str | None = None
+
+ def __call__(self, value: Any, path: Path = ()) -> Ok[Any] | Err[tuple[Path, str]]:
+ """
+ Validate a value.
+
+ Returns:
+ Ok(value) if validation passes
+ Err((path, message)) if validation fails
+ """
+ if value is None:
+ if self.required:
+ msg = self.message or "Required field is missing"
+ return Err((path, msg))
+ return Ok(None)
+
+ try:
+ passed = self.check(value)
+ except Exception as e:
+ return Err((path, f"Validation error: {e}"))
+
+ if not passed:
+ msg = self.message or f"Validation failed for value: {repr(value)[:50]}"
+ return Err((path, msg))
+
+ return Ok(value)
+
+ def __and__(self, other: V | type | Callable | dict | list) -> V:
+ """Combine with AND logic: both must pass."""
+ other_v = to_validator(other)
+ if not isinstance(other_v, V):
+ raise TypeError("Cannot combine V with nested structure using &")
+
+ def combined_check(x: Any) -> bool:
+ return self.check(x) and other_v.check(x)
+
+ return V(
+ check=combined_check,
+ required=self.required or other_v.required,
+ type_hint=self.type_hint or other_v.type_hint,
+ message=self.message or other_v.message,
+ )
+
+ def __rand__(self, other: type | Callable | dict | list) -> V:
+ """Support `str & Required()` where str comes first."""
+ return to_validator(other) & self
+
+ def __or__(self, other: V | type | Callable) -> V:
+ """Combine with OR logic: at least one must pass."""
+ other_v = to_validator(other)
+ if not isinstance(other_v, V):
+ raise TypeError("Cannot combine V with nested structure using |")
+
+ def combined_check(x: Any) -> bool:
+ return self.check(x) or other_v.check(x)
+
+ return V(
+ check=combined_check,
+ required=self.required and other_v.required,
+ type_hint=None, # Union type - defer to Pydantic
+ )
+
+ def __ror__(self, other: type | Callable) -> V:
+ """Support `str | int` where str comes first."""
+ return to_validator(other) | self
+
+ def with_message(self, msg: str) -> V:
+ """Return new validator with custom error message."""
+ return V(
+ check=self.check,
+ required=self.required,
+ type_hint=self.type_hint,
+ message=msg,
+ )
+
+
+@dataclass(frozen=True, slots=True)
+class DictV:
+ """Validator for dict structures with nested field validators."""
+
+ fields: dict[str, V | DictV | ListV]
+ required: bool = False
+
+ def __call__(
+ self, value: Any, path: Path = ()
+ ) -> Ok[Any] | Err[list[tuple[Path, str]]]:
+ if value is None:
+ if self.required:
+ return Err([(path, "Required dict is missing")])
+ return Ok(None)
+
+ if not isinstance(value, dict):
+ return Err([(path, f"Expected dict, got {type(value).__name__}")])
+
+ errors: list[tuple[Path, str]] = []
+
+ for key, validator in self.fields.items():
+ field_path = (*path, key)
+ field_value = value.get(key)
+ result = validator(field_value, field_path)
+
+ if isinstance(result, Err):
+ err = result.error
+ if isinstance(err, list):
+ errors.extend(err)
+ else:
+ errors.append(err)
+
+ return Err(errors) if errors else Ok(value)
+
+
+@dataclass(frozen=True, slots=True)
+class ListV:
+ """Validator for list structures with item validation."""
+
+ items: V | DictV | ListV
+ min_length: int | None = None
+ max_length: int | None = None
+ required: bool = False
+
+ def __call__(
+ self, value: Any, path: Path = ()
+ ) -> Ok[Any] | Err[list[tuple[Path, str]]]:
+ if value is None:
+ if self.required:
+ return Err([(path, "Required list is missing")])
+ return Ok(None)
+
+ if not isinstance(value, list):
+ return Err([(path, f"Expected list, got {type(value).__name__}")])
+
+ errors: list[tuple[Path, str]] = []
+
+ if self.min_length is not None and len(value) < self.min_length:
+ errors.append((path, f"List too short: {len(value)} < {self.min_length}"))
+ if self.max_length is not None and len(value) > self.max_length:
+ errors.append((path, f"List too long: {len(value)} > {self.max_length}"))
+
+ for i, item in enumerate(value):
+ item_path = (*path, i)
+ result = self.items(item, item_path)
+ if isinstance(result, Err):
+ err = result.error
+ if isinstance(err, list):
+ errors.extend(err)
+ else:
+ errors.append(err)
+
+ return Err(errors) if errors else Ok(value)
+
+
+def to_validator(v: Any) -> V | DictV | ListV:
+ """
+ Coerce a value to a validator.
+
+ Conversion rules:
+ V | DictV | ListV -> pass through
+ type -> V(check=isinstance, type_hint=type)
+ dict -> DictV with recursive conversion
+ list -> ListV with item validator from list[0]
+ Callable -> V(check=callable)
+ """
+ match v:
+ case V() | DictV() | ListV():
+ return v
+ case type():
+
+ def type_check(x: Any, t: type = v) -> bool:
+ return isinstance(x, t)
+
+ return V(check=type_check, type_hint=v)
+ case dict():
+ fields = {k: to_validator(val) for k, val in v.items()}
+ return DictV(fields=fields)
+ case list() if len(v) == 1:
+ return ListV(items=to_validator(v[0]))
+ case list() if len(v) > 1:
+ # Multiple items = OR logic for item types (must be V instances)
+ item_v: V = to_validator(v[0]) # type: ignore[assignment]
+ for other in v[1:]:
+ other_v = to_validator(other)
+ if isinstance(item_v, V) and isinstance(other_v, V):
+ item_v = item_v | other_v
+ return ListV(items=item_v)
+ case _ if callable(v):
+ return V(check=v)
+
+ raise TypeError(f"Cannot convert {type(v).__name__} to validator")
diff --git a/chidian/validation/schema.py b/chidian/validation/schema.py
new file mode 100644
index 0000000..bd94b01
--- /dev/null
+++ b/chidian/validation/schema.py
@@ -0,0 +1,96 @@
+"""
+Schema operations for chidian validation.
+
+Provides validate() and to_pydantic() functions.
+"""
+
+from __future__ import annotations
+
+from typing import Any
+from typing import Optional as TypingOptional
+
+from pydantic import create_model
+
+from .core import DictV, ListV, V, to_validator
+from .types import Err, Ok, Path
+
+
+def validate(
+ data: dict[str, Any], schema: dict[str, Any]
+) -> Ok[dict[str, Any]] | Err[list[tuple[Path, str]]]:
+ """
+ Validate data against a schema.
+
+ Args:
+ data: The dict to validate
+ schema: Dict-like schema definition
+
+ Returns:
+ Ok(data) if validation passes
+ Err([(path, message), ...]) if validation fails
+
+ Usage:
+ schema = {
+ "name": Required(str),
+ "email": str,
+ "age": int & Gte(0),
+ }
+ result = validate({"name": "Alice", "age": 30}, schema)
+ """
+ validator = to_validator(schema)
+
+ if not isinstance(validator, DictV):
+ raise TypeError("Schema must be a dict")
+
+ return validator(data)
+
+
+def to_pydantic(name: str, schema: dict[str, Any]) -> type:
+ """
+ Compile schema to a Pydantic model.
+
+ Args:
+ name: Name of the generated model class
+ schema: Dict-like schema definition
+
+ Returns:
+ A Pydantic BaseModel subclass
+
+ Usage:
+ User = to_pydantic("User", {
+ "name": Required(str),
+ "email": Optional(str),
+ })
+ user = User(name="Alice")
+ """
+ validator = to_validator(schema)
+ if not isinstance(validator, DictV):
+ raise TypeError("Schema must be a dict")
+
+ fields: dict[str, Any] = {}
+
+ for key, v in validator.fields.items():
+ field_type, default = _extract_pydantic_field(v)
+ fields[key] = (field_type, default)
+
+ return create_model(name, **fields)
+
+
+def _extract_pydantic_field(v: V | DictV | ListV) -> tuple[Any, Any]:
+ """Extract Pydantic field type and default from validator."""
+ match v:
+ case V(required=True, type_hint=t):
+ return (t or Any, ...)
+ case V(required=False, type_hint=t):
+ return (TypingOptional[t or Any], None)
+ case DictV(required=req):
+ if req:
+ return (dict[str, Any], ...)
+ return (dict[str, Any] | None, None)
+ case ListV(required=req, items=items):
+ item_type, _ = _extract_pydantic_field(items)
+ if req:
+ return (list[item_type], ...) # type: ignore[valid-type]
+ return (list[item_type] | None, None) # type: ignore[valid-type]
+
+ return (Any, None)
diff --git a/chidian/validation/types.py b/chidian/validation/types.py
new file mode 100644
index 0000000..c7e4473
--- /dev/null
+++ b/chidian/validation/types.py
@@ -0,0 +1,46 @@
+"""
+Type definitions for chidian validation.
+
+Provides a minimal Result type (Ok/Err) and type aliases.
+"""
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Any, Callable, Generic, TypeVar
+
+T = TypeVar("T")
+E = TypeVar("E")
+
+
+@dataclass(frozen=True, slots=True)
+class Ok(Generic[T]):
+ """Success result containing a value."""
+
+ value: T
+
+ def is_ok(self) -> bool:
+ return True
+
+ def is_err(self) -> bool:
+ return False
+
+
+@dataclass(frozen=True, slots=True)
+class Err(Generic[E]):
+ """Error result containing an error value."""
+
+ error: E
+
+ def is_ok(self) -> bool:
+ return False
+
+ def is_err(self) -> bool:
+ return True
+
+
+# Type aliases
+CheckFn = Callable[[Any], bool]
+Path = tuple[str | int, ...]
+ValidationError = tuple[Path, str]
+ValidationErrors = list[ValidationError]
diff --git a/chidian/validation/validators.py b/chidian/validation/validators.py
new file mode 100644
index 0000000..60e54f9
--- /dev/null
+++ b/chidian/validation/validators.py
@@ -0,0 +1,235 @@
+"""
+Built-in validators for chidian validation.
+
+Provides factory functions that return V instances.
+"""
+
+from __future__ import annotations
+
+import re
+from typing import Any
+
+from .core import V, to_validator
+
+
+def Required(v: V | type | None = None) -> V:
+ """
+ Mark a field as required (cannot be None).
+
+ Usage:
+ Required() # Just required, no type check
+ Required(str) # Required string
+ str & Required() # Same as above
+ """
+ if v is None:
+ return V(check=lambda _: True, required=True)
+
+ inner = to_validator(v)
+ if not isinstance(inner, V):
+ raise TypeError(
+ "Required() on nested structures: use the nested validator directly"
+ )
+
+ return V(
+ check=inner.check,
+ required=True,
+ type_hint=inner.type_hint,
+ message=inner.message,
+ )
+
+
+def Optional(v: V | type) -> V:
+ """
+ Allow None, validate if present.
+
+ Usage:
+ Optional(str) # None or valid string
+ """
+ inner = to_validator(v)
+ if not isinstance(inner, V):
+ raise TypeError("Optional() requires a simple validator, not nested structure")
+
+ def check(x: Any) -> bool:
+ return x is None or inner.check(x)
+
+ return V(
+ check=check,
+ required=False,
+ type_hint=inner.type_hint,
+ )
+
+
+def IsType(t: type) -> V:
+ """
+ Validate that value is an instance of type.
+
+ Usage:
+ IsType(str)
+ IsType(int) & InRange(0, 100)
+ """
+
+ def check(x: Any) -> bool:
+ return isinstance(x, t)
+
+ return V(
+ check=check,
+ type_hint=t,
+ message=f"Expected {t.__name__}",
+ )
+
+
+def InRange(lower: int | None = None, upper: int | None = None) -> V:
+ """
+ Validate length is within range (inclusive).
+
+ Usage:
+ InRange(1, 10) # 1 to 10 items
+ InRange(lower=5) # At least 5
+ InRange(upper=20) # At most 20
+ """
+
+ def check(x: Any) -> bool:
+ try:
+ n = len(x)
+ except TypeError:
+ return False
+ if lower is not None and n < lower:
+ return False
+ if upper is not None and n > upper:
+ return False
+ return True
+
+ msg_parts = []
+ if lower is not None:
+ msg_parts.append(f">= {lower}")
+ if upper is not None:
+ msg_parts.append(f"<= {upper}")
+ msg = f"Length must be {' and '.join(msg_parts)}"
+
+ return V(check=check, message=msg)
+
+
+def MinLength(n: int) -> V:
+ """Validate minimum length."""
+ return InRange(lower=n)
+
+
+def MaxLength(n: int) -> V:
+ """Validate maximum length."""
+ return InRange(upper=n)
+
+
+def InSet(values: set | frozenset | list | tuple) -> V:
+ """
+ Validate value is in a set of allowed values.
+
+ Usage:
+ InSet({"active", "inactive", "pending"})
+ InSet([1, 2, 3])
+ """
+ container = frozenset(values)
+
+ def check(x: Any) -> bool:
+ return x in container
+
+ return V(
+ check=check,
+ message=f"Must be one of: {container}",
+ )
+
+
+def Matches(pattern: str) -> V:
+ """
+ Validate string matches regex pattern.
+
+ Usage:
+ Matches(r"^[a-z]+$")
+ Matches(r"\\d{3}-\\d{4}")
+ """
+ compiled = re.compile(pattern)
+
+ def check(x: Any) -> bool:
+ return isinstance(x, str) and compiled.match(x) is not None
+
+ return V(
+ check=check,
+ type_hint=str,
+ message=f"Must match pattern: {pattern}",
+ )
+
+
+def Predicate(fn: Any, message: str | None = None) -> V:
+ """
+ Create validator from arbitrary predicate function.
+
+ Usage:
+ Predicate(lambda x: x > 0, "Must be positive")
+ Predicate(str.isalpha, "Must be alphabetic")
+ """
+ return V(check=fn, message=message)
+
+
+def Eq(value: Any) -> V:
+ """Validate exact equality."""
+
+ def check(x: Any) -> bool:
+ return x == value
+
+ return V(check=check, message=f"Must equal {repr(value)}")
+
+
+def Gt(value: Any) -> V:
+ """Validate greater than."""
+
+ def check(x: Any) -> bool:
+ return x > value
+
+ return V(check=check, message=f"Must be > {value}")
+
+
+def Gte(value: Any) -> V:
+ """Validate greater than or equal."""
+
+ def check(x: Any) -> bool:
+ return x >= value
+
+ return V(check=check, message=f"Must be >= {value}")
+
+
+def Lt(value: Any) -> V:
+ """Validate less than."""
+
+ def check(x: Any) -> bool:
+ return x < value
+
+ return V(check=check, message=f"Must be < {value}")
+
+
+def Lte(value: Any) -> V:
+ """Validate less than or equal."""
+
+ def check(x: Any) -> bool:
+ return x <= value
+
+ return V(check=check, message=f"Must be <= {value}")
+
+
+def Between(lower: Any, upper: Any, inclusive: bool = True) -> V:
+ """Validate value is between bounds."""
+ if inclusive:
+
+ def check(x: Any) -> bool:
+ return lower <= x <= upper
+
+ return V(
+ check=check,
+ message=f"Must be between {lower} and {upper}",
+ )
+
+ def check_exclusive(x: Any) -> bool:
+ return lower < x < upper
+
+ return V(
+ check=check_exclusive,
+ message=f"Must be between {lower} and {upper} (exclusive)",
+ )
diff --git a/pyproject.toml b/pyproject.toml
index 60d75d7..9566943 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,29 +4,13 @@ build-backend = "setuptools.build_meta"
[project]
name = "chidian"
-version = "0.1.5"
-requires-python = ">=3.8"
-classifiers = [
- "Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.8",
- "Programming Language :: Python :: 3.9",
- "Programming Language :: Python :: 3.10",
- "Programming Language :: Python :: 3.11",
- "Programming Language :: Python :: 3.12",
- "Programming Language :: Python :: Implementation :: CPython",
- "Programming Language :: Python :: Implementation :: PyPy",
-]
+version = "0.2.0"
+requires-python = ">=3.10"
dependencies = [
"parsimonious>=0.10.0",
- "pyarrow>=17.0.0",
"pydantic>=2.10.6,<3.0.0", # Only Pydantic v2 is supported
]
-[project.optional-dependencies]
-pandas = ["pandas>=2.0"]
-polars = ["polars>=0.20"]
-dfs = ["pandas>=2.0", "polars>=0.20"]
-
[tool.setuptools.package-data]
# relative to the chidian/lib/ package directory
"chidian.lib" = ["dsl/*.peg"]
diff --git a/tests/structstest.py b/tests/structstest.py
deleted file mode 100644
index 192f683..0000000
--- a/tests/structstest.py
+++ /dev/null
@@ -1,256 +0,0 @@
-"""
-Shared test models for all test files.
-
-Consolidates Pydantic models used across the test suite to avoid duplication
-and provide consistent test data structures. This prevents inline model
-definitions scattered across test files and makes it easier to reason about
-test data structures across the entire test suite.
-"""
-
-from typing import Optional
-
-from pydantic import BaseModel
-
-# =============================================================================
-# Medical Domain Models (Patient/Observation)
-# =============================================================================
-
-
-class Patient(BaseModel):
- """Sample Patient model for testing transformations."""
-
- id: str
- name: str
- active: bool
- age: Optional[int] = None
-
-
-class Observation(BaseModel):
- """Sample Observation model for testing transformations."""
-
- subject_ref: str
- performer: str
- status: Optional[str] = None
-
-
-# =============================================================================
-# Basic Processing Models
-# =============================================================================
-
-
-class SourceData(BaseModel):
- """Generic source data container."""
-
- data: dict = {}
-
-
-class ProcessedData(BaseModel):
- """Generic processed data with standard fields."""
-
- patient_id: str
- is_active: bool
- status: str
-
-
-class PersonSource(BaseModel):
- """Source model for person-based transformations."""
-
- firstName: str
- lastName: str
- status: str
- codes: list[str]
- address: str
-
-
-class PersonTarget(BaseModel):
- """Target model for person-based transformations."""
-
- name: str
- status_display: str
- all_codes: str
- city: str
- backup_name: str
-
-
-# =============================================================================
-# Consolidated Inline Models from Test Files
-# =============================================================================
-
-
-class SimpleTarget(BaseModel):
- """Basic target model for seed processing tests."""
-
- patient_id: str
- is_active: bool
-
-
-class KeepTestTarget(BaseModel):
- """Target model for testing KEEP object processing."""
-
- processed_value: str
- regular_value: str
-
-
-class SourceModel(BaseModel):
- """Simple source model for basic mapping tests."""
-
- value: str
-
-
-class TargetModel(BaseModel):
- """Simple target model for basic mapping tests."""
-
- result: str
-
-
-class BasicSource(BaseModel):
- """Generic source model for validation tests."""
-
- id: str
-
-
-class BasicTarget(BaseModel):
- """Generic target model for validation tests."""
-
- id: str
- required_field: str
-
-
-class BasicSourceWithName(BaseModel):
- """Source model with id and name fields."""
-
- id: str
- name: str
-
-
-class BasicTargetWithPersonId(BaseModel):
- """Target model with person_id and display_name."""
-
- person_id: str
- display_name: str
-
-
-class NestedSource(BaseModel):
- """Source model with nested dictionary fields."""
-
- subject: dict
- valueQuantity: dict
-
-
-class NestedTarget(BaseModel):
- """Target model for nested source transformations."""
-
- patient_id: str
- value: float
-
-
-class TransformSource(BaseModel):
- """Source model for transformation testing."""
-
- name: str
- reference: str
-
-
-class TransformTarget(BaseModel):
- """Target model with transformed fields."""
-
- name_upper: str
- id: int
-
-
-class ErrorTestSource(BaseModel):
- """Source model for error handling tests."""
-
- data: dict
-
-
-class ErrorTestTarget(BaseModel):
- """Target model for error handling tests."""
-
- safe: Optional[str] = None
- error: Optional[str] = None
-
-
-class NestedBidirectionalSource(BaseModel):
- """Model with nested structure for bidirectional tests."""
-
- patient: dict
- metadata: dict
-
-
-class NestedBidirectionalTarget(BaseModel):
- """Target with different nesting for bidirectional tests."""
-
- id: str
- name: str
- extra_data: dict
-
-
-# =============================================================================
-# A/B Test Data Models (Complex to Flat transformation)
-# =============================================================================
-
-
-class NameData(BaseModel):
- """Nested name structure for A.json test data."""
-
- first: str
- given: list[str]
- prefix: Optional[str] = None
- suffix: Optional[str] = None
-
-
-class AddressData(BaseModel):
- """Address structure for A.json test data."""
-
- street: list[str]
- city: str
- state: str
- postal_code: str
- country: str
-
-
-class AddressHistory(BaseModel):
- """Address history for A.json test data."""
-
- current: AddressData
- previous: list[AddressData]
-
-
-class ComplexPersonData(BaseModel):
- """Complex nested structure (A.json format)."""
-
- name: NameData
- address: AddressHistory
-
-
-class FlatPersonData(BaseModel):
- """Flattened structure (B.json format)."""
-
- full_name: str
- current_address: str
- last_previous_address: str
-
-
-# =============================================================================
-# FHIR-style Models (Available for realistic test scenarios)
-# =============================================================================
-
-
-class FHIRObservation(BaseModel):
- """FHIR-style Observation for realistic transformations."""
-
- id: str
- subject: dict
- code: dict
- valueQuantity: Optional[dict] = None
-
-
-class FlatObservation(BaseModel):
- """Flattened observation for analytics."""
-
- observation_id: str
- patient_id: str
- loinc_code: str
- value: Optional[float] = None
- unit: Optional[str] = None
diff --git a/tests/test_data_mapping.py b/tests/test_data_mapping.py
deleted file mode 100644
index 83fc3f4..0000000
--- a/tests/test_data_mapping.py
+++ /dev/null
@@ -1,392 +0,0 @@
-"""Test the new DataMapping class and Mapper with validation modes."""
-
-from typing import Any, Optional
-
-import pytest
-from pydantic import BaseModel
-
-import chidian.partials as p
-from chidian import Mapper, MapperResult, ValidationMode
-
-
-# Test models
-class Patient(BaseModel):
- id: str
- name: str
- active: bool
- internal_notes: Optional[str] = None
- age: Optional[int] = None
-
-
-class Observation(BaseModel):
- subject_ref: str
- performer: str
- status: Optional[str] = None
-
-
-class TestDataMappingBasic:
- """Test basic DataMapping functionality as forward-only validator."""
-
- def test_simple_mapping_with_mapper(self) -> None:
- """Test DataMapping with Mapper for basic field mapping."""
- # Create a Mapper for transformation
- mapper = Mapper(
- transformations={
- "subject_ref": p.get("id"),
- "performer": p.get("name"),
- },
- min_input_schemas=[Patient],
- output_schema=Observation,
- mode=ValidationMode.STRICT,
- )
-
- patient = Patient(id="123", name="John", active=True)
- obs = mapper(patient)
-
- assert isinstance(obs, Observation)
- assert obs.subject_ref == "123"
- assert obs.performer == "John"
-
- def test_complex_mapping_with_callable_mapper(self) -> None:
- """Test DataMapping with callable transformations."""
- mapper = Mapper(
- transformations={
- "subject_ref": lambda data: f"Patient/{data['id']}",
- "performer": lambda data: data["name"].upper(),
- "status": lambda data: "active" if data["active"] else "inactive",
- },
- min_input_schemas=[Patient],
- output_schema=Observation,
- mode=ValidationMode.STRICT,
- )
-
- patient = Patient(id="123", name="john", active=True)
- obs = mapper(patient)
-
- assert isinstance(obs, Observation)
- assert obs.subject_ref == "Patient/123"
- assert obs.performer == "JOHN"
- assert obs.status == "active"
-
- def test_validation_modes(self) -> None:
- """Test different validation modes."""
- # Test strict mode
- strict_mapper = Mapper(
- transformations={
- "subject_ref": p.get("id"),
- "performer": p.get("name"),
- },
- min_input_schemas=[Patient],
- output_schema=Observation,
- mode=ValidationMode.STRICT,
- )
- patient = Patient(id="123", name="John", active=True)
- obs = strict_mapper(patient)
- assert isinstance(obs, Observation)
- assert obs.subject_ref == "123"
-
- # Test flexible mode
- flexible_mapper = Mapper(
- transformations={
- "subject_ref": p.get("id"),
- "performer": p.get("name"),
- },
- min_input_schemas=[Patient],
- output_schema=Observation,
- mode=ValidationMode.FLEXIBLE,
- )
- result = flexible_mapper(patient)
- assert isinstance(result, MapperResult)
- assert not result.has_issues
- assert isinstance(result.data, Observation)
- assert result.data.subject_ref == "123"
-
-
-class TestDataMappingValidation:
- """Test DataMapping validation features."""
-
- def test_no_input_validation(self) -> None:
- """Test that Mapper no longer validates input (min_input_schemas is metadata-only)."""
- mapper = Mapper(
- transformations={
- "subject_ref": p.get("id"),
- "performer": p.get("name"),
- },
- min_input_schemas=[Patient],
- output_schema=Observation,
- mode=ValidationMode.STRICT,
- )
-
- # Valid input works
- patient = Patient(id="123", name="John", active=True)
- obs = mapper(patient)
- assert isinstance(obs, Observation)
- assert obs.subject_ref == "123"
-
- # Invalid input now works because no input validation occurs
- # Will fail on output validation due to missing required fields
- with pytest.raises(Exception): # Output validation error
- mapper({"invalid": "data"})
-
- def test_output_validation(self) -> None:
- """Test that Mapper validates output against output schema."""
- # Mapper that produces invalid output
- mapper = Mapper(
- transformations={
- "invalid_field": lambda data: "value", # Missing required fields
- },
- min_input_schemas=[Patient],
- output_schema=Observation,
- mode=ValidationMode.STRICT,
- )
- patient = Patient(id="123", name="John", active=True)
-
- # Should raise ValidationError due to invalid output in strict mode
- with pytest.raises(Exception): # Pydantic ValidationError
- mapper(patient)
-
- def test_flexible_mode_validation(self) -> None:
- """Test flexible mode collects validation errors."""
- # Mapper that produces invalid output
- mapper = Mapper(
- transformations={
- "invalid_field": lambda data: "value", # Missing required fields
- },
- min_input_schemas=[Patient],
- output_schema=Observation,
- mode=ValidationMode.FLEXIBLE,
- )
- patient = Patient(id="123", name="John", active=True)
-
- # Should return MapperResult with issues
- result = mapper(patient)
- assert isinstance(result, MapperResult)
- assert result.has_issues
- assert len(result.issues) > 0
- assert result.issues[0].stage == "output"
-
- def test_dict_input_with_strict_mode(self) -> None:
- """Test handling of dict input in strict mode."""
- mapper = Mapper(
- transformations={
- "subject_ref": p.get("id"),
- "performer": p.get("name"),
- },
- min_input_schemas=[Patient],
- output_schema=Observation,
- mode=ValidationMode.STRICT,
- )
-
- # Dict input should be validated and converted
- dict_input = {"id": "123", "name": "John", "active": True}
- obs = mapper(dict_input)
- assert isinstance(obs, Observation)
- assert obs.subject_ref == "123"
- assert obs.performer == "John"
-
- def test_auto_mode(self) -> None:
- """Test auto mode behavior."""
- # With output schema - should use strict mode
- mapper_with_schemas = Mapper(
- transformations={
- "subject_ref": p.get("id"),
- "performer": p.get("name"),
- },
- min_input_schemas=[Patient],
- output_schema=Observation,
- ) # AUTO mode by default
- assert mapper_with_schemas.mode == ValidationMode.STRICT
-
- # Without schemas - should use flexible mode
- mapper_no_schemas: Mapper[Any] = Mapper(
- transformations={
- "subject_ref": p.get("id"),
- "performer": p.get("name"),
- }
- ) # AUTO mode by default
- assert mapper_no_schemas.mode == ValidationMode.FLEXIBLE
-
-
-class TestDataMappingWithoutSchemas:
- """Test DataMapping without schemas (pure transformation)."""
-
- def test_pure_transformation(self) -> None:
- """Test DataMapping as pure transformation without schemas."""
- mapper: Mapper[Any] = Mapper(
- transformations={
- "subject_ref": p.get("id"),
- "performer": p.get("name"),
- }
- )
-
- # Direct transformation
- result = mapper.transform({"id": "123", "name": "John"})
- assert result["subject_ref"] == "123"
- assert result["performer"] == "John"
-
- def test_with_flexible_mapper(self) -> None:
- """Test DataMapping without schemas using flexible Mapper."""
- mapper: Mapper[Any] = Mapper(
- transformations={
- "subject_ref": lambda data: f"Patient/{data.get('id', 'unknown')}",
- "performer": lambda data: data.get("name", "Unknown"),
- "status": lambda data: "processed",
- },
- mode=ValidationMode.FLEXIBLE,
- )
-
- # Should work with incomplete data
- result = mapper({"id": "123"})
- # Without schemas, returns dict directly
- assert isinstance(result, dict)
- assert result["subject_ref"] == "Patient/123"
- assert result["performer"] == "Unknown"
- assert result["status"] == "processed"
-
- def test_mapper_result_interface(self) -> None:
- """Test MapperResult interface."""
- mapper = Mapper(
- transformations={
- "missing_field": p.get("nonexistent"),
- },
- output_schema=Observation,
- mode=ValidationMode.FLEXIBLE,
- )
- result = mapper({"id": "123"})
-
- assert isinstance(result, MapperResult)
- assert result.has_issues
-
- # Test raise_if_issues
- with pytest.raises(Exception):
- result.raise_if_issues()
-
-
-class TestManyToOneMapping:
- """Test many-to-one mapping metadata functionality."""
-
- def test_min_input_schemas_metadata(self) -> None:
- """Test that min_input_schemas is stored as metadata."""
-
- class Encounter(BaseModel):
- id: str
- status: str
- period_start: str
-
- mapper = Mapper(
- transformations={
- "subject_ref": lambda data: f"Patient/{data.get('patient_id', 'unknown')}",
- "encounter_ref": lambda data: f"Encounter/{data.get('encounter_id', 'unknown')}",
- "status": lambda data: data.get("status", "unknown"),
- },
- min_input_schemas=[Patient, Encounter],
- output_schema=Observation,
- )
-
- # Verify metadata is stored
- assert mapper.min_input_schemas == [Patient, Encounter]
- assert len(mapper.min_input_schemas) == 2
-
- def test_other_input_schemas_metadata(self) -> None:
- """Test that other_input_schemas is stored as metadata."""
-
- class Encounter(BaseModel):
- id: str
- status: str
-
- class Practitioner(BaseModel):
- id: str
- name: str
-
- mapper = Mapper(
- transformations={
- "subject_ref": p.get("patient_id"),
- "performer": p.get("practitioner_name"),
- "encounter_ref": p.get("encounter_id"),
- },
- min_input_schemas=[Patient],
- other_input_schemas=[Encounter, Practitioner],
- output_schema=Observation,
- )
-
- # Verify metadata is stored
- assert mapper.min_input_schemas == [Patient]
- # Type: ignore for mypy - local classes are BaseModel subclasses
- assert mapper.other_input_schemas == [Encounter, Practitioner] # type: ignore[list-item]
- assert len(mapper.other_input_schemas) == 2
-
- def test_metadata_not_enforced_at_runtime(self) -> None:
- """Test that input schemas are not enforced during transformation."""
-
- class CompletelyDifferentModel(BaseModel):
- foo: str
- bar: int
-
- mapper = Mapper(
- transformations={
- "subject_ref": lambda data: f"Patient/{data.get('totally_different_field', '123')}",
- "performer": lambda data: "Dr. Smith",
- },
- min_input_schemas=[CompletelyDifferentModel], # This is just metadata
- output_schema=Observation,
- mode=ValidationMode.STRICT,
- )
-
- # Can pass any dict, not enforced to match CompletelyDifferentModel
- result = mapper(
- {"totally_different_field": "xyz", "some_other_field": "ignored"}
- )
-
- assert isinstance(result, Observation)
- assert result.subject_ref == "Patient/xyz"
- assert result.performer == "Dr. Smith"
-
- def test_empty_schemas_lists(self) -> None:
- """Test DataMapping with empty or None schema lists."""
- # Test with None (should default to empty lists)
- mapper1: Mapper[Any] = Mapper(
- transformations={
- "field1": p.get("source1"),
- },
- min_input_schemas=None,
- other_input_schemas=None,
- )
-
- assert mapper1.min_input_schemas == []
- assert mapper1.other_input_schemas == []
-
- # Test with explicit empty lists
- mapper2: Mapper[Any] = Mapper(
- transformations={
- "field2": p.get("source2"),
- },
- min_input_schemas=[],
- other_input_schemas=[],
- )
-
- assert mapper2.min_input_schemas == []
- assert mapper2.other_input_schemas == []
-
- def test_has_schemas_only_checks_output(self) -> None:
- """Test that has_schemas only checks for output_schema."""
- # With min_input_schemas but no output_schema
- mapper1: Mapper[Any] = Mapper(
- transformations={"field": p.get("source")},
- min_input_schemas=[Patient],
- )
- assert not mapper1.has_schemas
-
- # With output_schema
- mapper2 = Mapper(
- transformations={"field": p.get("source")},
- output_schema=Observation,
- )
- assert mapper2.has_schemas
-
- # With both min_input_schemas and output_schema
- mapper3 = Mapper(
- transformations={"field": p.get("source")},
- min_input_schemas=[Patient],
- output_schema=Observation,
- )
- assert mapper3.has_schemas
diff --git a/tests/test_get.py b/tests/test_grab.py
similarity index 68%
rename from tests/test_get.py
rename to tests/test_grab.py
index 2a691dc..70761d7 100644
--- a/tests/test_get.py
+++ b/tests/test_grab.py
@@ -1,14 +1,14 @@
-"""Consolidated tests for the get function."""
+"""Tests for the grab function."""
from typing import Any
import pytest
-from chidian import get
+from chidian import grab
-class TestGetBasic:
- """Test basic get operations."""
+class TestGrabBasic:
+ """Test basic grab operations."""
@pytest.mark.parametrize(
"path,expected_key",
@@ -20,7 +20,7 @@ class TestGetBasic:
)
def test_simple_paths(self, simple_data: dict[str, Any], path: str, expected_key):
"""Test basic dot notation paths."""
- result = get(simple_data, path)
+ result = grab(simple_data, path)
# Navigate to expected value
expected = simple_data
@@ -34,18 +34,18 @@ def test_simple_paths(self, simple_data: dict[str, Any], path: str, expected_key
def test_missing_paths(self, simple_data: dict[str, Any]):
"""Test behavior with missing paths."""
- assert get(simple_data, "missing") is None
- assert get(simple_data, "data.missing") is None
- assert get(simple_data, "missing", default="DEFAULT") == "DEFAULT"
+ assert grab(simple_data, "missing") is None
+ assert grab(simple_data, "data.missing") is None
+ assert grab(simple_data, "missing", default="DEFAULT") == "DEFAULT"
def test_apply_function(self, simple_data: dict[str, Any]):
"""Test applying transformation functions."""
- result = get(simple_data, "data.patient.id", apply=lambda x: x + "_modified")
+ result = grab(simple_data, "data.patient.id", apply=lambda x: x + "_modified")
assert result == simple_data["data"]["patient"]["id"] + "_modified"
-class TestGetArrays:
- """Test get operations on arrays."""
+class TestGrabArrays:
+ """Test grab operations on arrays."""
@pytest.mark.parametrize(
"path,indices",
@@ -59,13 +59,13 @@ def test_single_index(
self, simple_data: dict[str, Any], path: str, indices: list[int]
):
"""Test single array index access."""
- result = get(simple_data, path)
+ result = grab(simple_data, path)
expected = simple_data["list_data"][indices[0]]["patient"]
assert result == expected
def test_out_of_bounds_index(self, simple_data: dict[str, Any]):
"""Test behavior with out of bounds indices."""
- assert get(simple_data, "list_data[5000].patient") is None
+ assert grab(simple_data, "list_data[5000].patient") is None
@pytest.mark.parametrize(
"path,slice_params",
@@ -78,26 +78,26 @@ def test_out_of_bounds_index(self, simple_data: dict[str, Any]):
)
def test_array_slicing(self, simple_data: dict[str, Any], path: str, slice_params):
"""Test array slicing operations."""
- result = get(simple_data, path)
+ result = grab(simple_data, path)
start, stop = slice_params
expected = simple_data["list_data"][start:stop]
assert result == expected
def test_slice_then_access(self, simple_data: dict[str, Any]):
"""Test slicing followed by property access."""
- result = get(simple_data, "list_data[1:3].patient")
+ result = grab(simple_data, "list_data[1:3].patient")
expected = [item["patient"] for item in simple_data["list_data"][1:3]]
assert result == expected
def test_wildcard_on_list(self, list_data: list[Any]):
"""Test wildcard operations on lists."""
- assert get(list_data, "[*].patient") == [p["patient"] for p in list_data]
- assert get(list_data, "[:].patient.id") == [
+ assert grab(list_data, "[*].patient") == [p["patient"] for p in list_data]
+ assert grab(list_data, "[:].patient.id") == [
p["patient"]["id"] for p in list_data
]
-class TestGetEdgeCases:
+class TestGrabEdgeCases:
"""Test edge cases and error conditions."""
@pytest.mark.parametrize(
@@ -113,11 +113,11 @@ class TestGetEdgeCases:
)
def test_none_handling(self, data, path, expected):
"""Test handling of None values."""
- assert get(data, path) == expected
+ assert grab(data, path) == expected
def test_nested_arrays(self, deep_nested_list: list[Any]):
"""Test deeply nested array access."""
- result = get(deep_nested_list, "[0].patient.list_of_dicts[*].num")
+ result = grab(deep_nested_list, "[0].patient.list_of_dicts[*].num")
expected = [
item["num"] for item in deep_nested_list[0]["patient"]["list_of_dicts"]
]
@@ -128,9 +128,25 @@ def test_type_mismatches(self):
data = {"list": [1, 2, 3], "dict": {"key": "value"}}
# Trying to index a dict - should return None
- result = get(data, "dict[0]")
+ result = grab(data, "dict[0]")
assert result is None
# Trying to access property on list - returns list of None
- result = get(data, "list.property")
- assert result == [None, None, None] # One None for each item in list
+ result = grab(data, "list.property")
+ assert result == [None, None, None]
+
+
+class TestGrabIntegration:
+ """Integration tests for grab function."""
+
+ def test_fhir_data(self, fhir_bundle: dict[str, Any]):
+ """Test with FHIR-like data structures."""
+ # Get all observation IDs
+ ids = grab(fhir_bundle, "entry[*].resource.id")
+ assert ids == ["bp-1", "bp-2"]
+
+ # Get first observation's systolic value
+ systolic = grab(
+ fhir_bundle, "entry[0].resource.component[0].valueQuantity.value"
+ )
+ assert systolic == 120
diff --git a/tests/test_lexicon.py b/tests/test_lexicon.py
deleted file mode 100644
index 4d9c35a..0000000
--- a/tests/test_lexicon.py
+++ /dev/null
@@ -1,354 +0,0 @@
-"""Tests for the Lexicon class with tuple support."""
-
-import pytest
-
-from chidian.lexicon import Lexicon
-
-
-class TestLexiconBasic:
- """Test basic Lexicon functionality."""
-
- def test_simple_string_mappings(self):
- """Test basic one-to-one string mappings."""
- lexicon = Lexicon({"8480-6": "271649006", "8462-4": "271650006"})
-
- # Forward lookups (keys first)
- assert lexicon["8480-6"] == "271649006"
- assert lexicon["8462-4"] == "271650006"
-
- # Reverse lookups (values second)
- assert lexicon["271649006"] == "8480-6"
- assert lexicon["271650006"] == "8462-4"
-
- def test_tuple_many_to_one_mappings(self):
- """Test many-to-one mappings with tuples."""
- lexicon = Lexicon(
- {
- ("A", "B", "C"): "x",
- ("D", "E"): "y",
- "F": "z", # Can mix single and tuple mappings
- }
- )
-
- # Forward lookups - all keys map to value
- assert lexicon["A"] == "x"
- assert lexicon["B"] == "x"
- assert lexicon["C"] == "x"
- assert lexicon["D"] == "y"
- assert lexicon["E"] == "y"
- assert lexicon["F"] == "z"
-
- # Reverse lookups - first in tuple is default
- assert lexicon["x"] == "A" # First in tuple
- assert lexicon["y"] == "D" # First in tuple
- assert lexicon["z"] == "F"
-
- def test_lookup_priority(self):
- """Test that keys are scanned before values."""
- # If a value matches a key, the key lookup wins
- lexicon = Lexicon({"A": "B", "B": "C"})
-
- assert lexicon["A"] == "B" # Key lookup
- assert lexicon["B"] == "C" # Key lookup (takes priority over value)
- assert lexicon["C"] == "B" # Value lookup (reverse)
-
- def test_get_method(self):
- """Test get method with defaults."""
- lexicon = Lexicon(
- {
- "yes": "Y",
- "no": "N",
- ("true", "1", "on"): "T",
- ("false", "0", "off"): "F",
- }
- )
-
- # Key lookups
- assert lexicon.get("yes") == "Y"
- assert lexicon.get("true") == "T"
- assert lexicon.get("1") == "T"
-
- # Value lookups (reverse)
- assert lexicon.get("Y") == "yes"
- assert lexicon.get("T") == "true" # First in tuple
- assert lexicon.get("F") == "false" # First in tuple
-
- # Missing keys with default
- assert lexicon.get("missing") is None
- assert lexicon.get("missing", "DEFAULT") == "DEFAULT"
-
- def test_instance_default(self):
- """Test default value behavior."""
- lexicon = Lexicon({"yes": "Y"}, default="UNKNOWN")
-
- assert lexicon["yes"] == "Y"
- assert lexicon["Y"] == "yes"
- assert lexicon["missing"] == "UNKNOWN"
- assert lexicon.get("missing") == "UNKNOWN"
- assert lexicon.get("missing", "CUSTOM") == "CUSTOM" # Override default
-
- def test_contains(self):
- """Test membership checking."""
- lexicon = Lexicon({"a": "1", ("b", "c"): "2"})
-
- # Keys
- assert "a" in lexicon
- assert "b" in lexicon
- assert "c" in lexicon
-
- # Values (also searchable)
- assert "1" in lexicon
- assert "2" in lexicon
-
- # Missing
- assert "d" not in lexicon
- assert "3" not in lexicon
-
- def test_dict_interface(self):
- """Test that Lexicon maintains dict-like interface."""
- lexicon = Lexicon({"a": "1", "b": "2"})
-
- # Basic dict operations
- assert len(lexicon) == 2
- assert sorted(lexicon.keys()) == ["a", "b"]
- assert sorted(lexicon.values()) == ["1", "2"]
- assert dict(lexicon) == {"a": "1", "b": "2"}
-
- def test_empty_lexicon(self):
- """Test empty lexicon behavior."""
- lexicon = Lexicon({})
-
- assert len(lexicon) == 0
- assert lexicon.get("any") is None
-
- with pytest.raises(KeyError):
- _ = lexicon["any"]
-
- def test_no_key_error_with_default(self):
- """Test that KeyError is not raised when default is set."""
- lexicon = Lexicon({}, default="DEFAULT")
-
- # Should return default, not raise KeyError
- assert lexicon["missing"] == "DEFAULT"
-
-
-class TestLexiconBuilder:
- """Test the builder pattern interface."""
-
- def test_builder_basic(self):
- """Test basic builder usage."""
- lexicon = Lexicon.builder().add("A", "1").add("B", "2").build()
-
- assert lexicon["A"] == "1"
- assert lexicon["B"] == "2"
- assert lexicon["1"] == "A"
- assert lexicon["2"] == "B"
-
- def test_builder_with_many(self):
- """Test builder with many-to-one mappings."""
- lexicon = (
- Lexicon.builder()
- .add_many(["A", "B", "C"], "x")
- .add_many(["D", "E"], "y")
- .add("F", "z")
- .build()
- )
-
- # Forward mappings
- assert lexicon["A"] == "x"
- assert lexicon["B"] == "x"
- assert lexicon["C"] == "x"
- assert lexicon["F"] == "z"
-
- # Reverse mappings (first is default)
- assert lexicon["x"] == "A"
- assert lexicon["y"] == "D"
- assert lexicon["z"] == "F"
-
- def test_builder_with_default(self):
- """Test builder with default value."""
- lexicon = Lexicon.builder().add("A", "1").set_default("MISSING").build()
-
- assert lexicon["A"] == "1"
- assert lexicon["missing"] == "MISSING"
-
- def test_builder_with_metadata(self):
- """Test builder with metadata."""
- lexicon = (
- Lexicon.builder().add("A", "1").set_metadata({"version": "1.0"}).build()
- )
-
- assert lexicon.metadata["version"] == "1.0"
-
- def test_builder_primary_override(self):
- """Test that builder can override primary reverse mapping."""
- lexicon = (
- Lexicon.builder()
- .add_many(["A", "B", "C"], "x")
- .set_primary_reverse("x", "B") # Override default
- .build()
- )
-
- assert lexicon["x"] == "B" # Not "A"
-
-
-class TestLexiconEdgeCases:
- """Test edge cases and special scenarios."""
-
- def test_self_mapping(self):
- """Test when a key maps to itself."""
- lexicon = Lexicon({"A": "A", "B": "B"})
-
- # Should work normally
- assert lexicon["A"] == "A"
- assert lexicon["B"] == "B"
-
- def test_circular_mapping(self):
- """Test circular mappings."""
- lexicon = Lexicon({"A": "B", "B": "A"})
-
- # Forward lookups
- assert lexicon["A"] == "B"
- assert lexicon["B"] == "A"
-
- def test_chain_mapping(self):
- """Test chain-like mappings."""
- lexicon = Lexicon({"A": "B", "B": "C", "C": "D"})
-
- # Each lookup is independent
- assert lexicon["A"] == "B"
- assert lexicon["B"] == "C"
- assert lexicon["C"] == "D"
- assert lexicon["D"] == "C" # Reverse lookup
-
- def test_case_sensitivity(self):
- """Test that lookups are case-sensitive."""
- lexicon = Lexicon({"Code": "VALUE", "code": "value"})
-
- assert lexicon["Code"] == "VALUE"
- assert lexicon["code"] == "value"
- assert lexicon["VALUE"] == "Code"
- assert lexicon["value"] == "code"
-
- def test_whitespace_handling(self):
- """Test handling of whitespace in keys/values."""
- lexicon = Lexicon({" A ": " B ", "C": "D "})
-
- assert lexicon[" A "] == " B "
- assert lexicon[" B "] == " A "
- assert lexicon["C"] == "D "
- assert lexicon["D "] == "C"
-
- def test_overlapping_tuples(self):
- """Test when multiple tuples map to same value."""
- lexicon = Lexicon(
- {
- ("A", "B"): "x",
- ("C", "D"): "x", # Same value
- "E": "x", # Also same value
- }
- )
-
- # All forward mappings work
- assert lexicon["A"] == "x"
- assert lexicon["B"] == "x"
- assert lexicon["C"] == "x"
- assert lexicon["D"] == "x"
- assert lexicon["E"] == "x"
-
- # Reverse gives first occurrence
- assert lexicon["x"] == "A" # First key that mapped to "x"
-
- def test_empty_tuple(self):
- """Test that empty tuples are handled gracefully."""
- with pytest.raises(ValueError, match="Empty tuple"):
- Lexicon({(): "value"})
-
- def test_mixed_types_rejected(self):
- """Test that non-string types are rejected."""
- with pytest.raises(TypeError, match="must be strings"):
- Lexicon({123: "value"})
-
- with pytest.raises(TypeError, match="must be strings"):
- Lexicon({"key": 456})
-
- with pytest.raises(TypeError, match="must be strings"):
- Lexicon({("A", 123): "value"})
-
-
-class TestLexiconRealWorld:
- """Test real-world scenarios."""
-
- def test_medical_code_mapping(self):
- """Test LOINC to SNOMED mapping example."""
- lab_codes = Lexicon(
- {
- "8480-6": "271649006", # Systolic BP
- "8462-4": "271650006", # Diastolic BP
- "8867-4": "364075005", # Heart rate
- # Multiple LOINC codes for same concept
- ("2160-0", "38483-4", "14682-9"): "113075003", # Creatinine
- },
- metadata={"version": "2023-Q4", "source": "LOINC-SNOMED"},
- )
-
- # Forward mapping (LOINC to SNOMED)
- assert lab_codes["8480-6"] == "271649006"
- assert lab_codes["2160-0"] == "113075003"
- assert lab_codes["38483-4"] == "113075003"
- assert lab_codes["14682-9"] == "113075003"
-
- # Reverse mapping (SNOMED to LOINC)
- assert lab_codes["271649006"] == "8480-6"
- assert lab_codes["113075003"] == "2160-0" # First in tuple
-
- # Metadata
- assert lab_codes.metadata["version"] == "2023-Q4"
-
- def test_status_code_mapping(self):
- """Test status code transformations with aliases."""
- status_map = Lexicon(
- {
- ("active", "current", "live"): "A",
- ("inactive", "stopped", "discontinued"): "I",
- ("pending", "waiting"): "P",
- "completed": "C",
- },
- default="U", # Unknown
- )
-
- # Forward mapping with aliases
- assert status_map["active"] == "A"
- assert status_map["current"] == "A"
- assert status_map["live"] == "A"
- assert status_map["stopped"] == "I"
- assert status_map["completed"] == "C"
-
- # Reverse mapping (first alias is default)
- assert status_map["A"] == "active"
- assert status_map["I"] == "inactive"
- assert status_map["P"] == "pending"
-
- # Unknown status
- assert status_map["unknown"] == "U"
- assert status_map["X"] == "U"
-
- def test_unit_conversion_codes(self):
- """Test unit of measure mappings."""
- unit_map = Lexicon(
- {
- ("mg/dL", "mg/dl", "MG/DL"): "MG_PER_DL",
- ("mmol/L", "mmol/l", "MMOL/L"): "MMOL_PER_L",
- "g/dL": "G_PER_DL",
- "mEq/L": "MEQ_PER_L",
- }
- )
-
- # Case variations all map to canonical form
- assert unit_map["mg/dL"] == "MG_PER_DL"
- assert unit_map["mg/dl"] == "MG_PER_DL"
- assert unit_map["MG/DL"] == "MG_PER_DL"
-
- # Reverse gives the first (preferred) form
- assert unit_map["MG_PER_DL"] == "mg/dL"
- assert unit_map["MMOL_PER_L"] == "mmol/L"
diff --git a/tests/test_lib.py b/tests/test_lib.py
index b2ed01b..dea0b67 100644
--- a/tests/test_lib.py
+++ b/tests/test_lib.py
@@ -1,10 +1,10 @@
-"""Simplified integration tests for core functionality."""
+"""Integration tests for core functionality."""
-from chidian import get, put
+from chidian import grab
-def test_get_function_basic():
- """Test basic get functionality."""
+def test_grab_function_basic():
+ """Test basic grab functionality."""
data = {
"patient": {
"id": "123",
@@ -17,22 +17,9 @@ def test_get_function_basic():
}
# Basic path access
- assert get(data, "patient.id") == "123"
- assert get(data, "patient.name.given") == "John"
- assert get(data, "patient.contact[0].value") == "555-1234"
+ assert grab(data, "patient.id") == "123"
+ assert grab(data, "patient.name.given") == "John"
+ assert grab(data, "patient.contact[0].value") == "555-1234"
# Array operations
- assert get(data, "patient.contact[*].system") == ["phone", "email"]
-
-
-def test_put_function_basic():
- """Test basic put functionality."""
- data = {"patient": {"id": "123"}}
-
- # Basic put
- result = put(data, "patient.name", "John Doe")
- assert result["patient"]["name"] == "John Doe"
-
- # Nested put
- result = put(data, "patient.address.city", "Boston")
- assert result["patient"]["address"]["city"] == "Boston"
+ assert grab(data, "patient.contact[*].system") == ["phone", "email"]
diff --git a/tests/test_mapper.py b/tests/test_mapper.py
index 4de5b77..60e00b7 100644
--- a/tests/test_mapper.py
+++ b/tests/test_mapper.py
@@ -1,419 +1,376 @@
-"""Tests for Mapper as independent dict->dict transformer and validation engine."""
-
-from typing import Any
+"""Tests for the @mapper decorator and related functionality."""
import pytest
-from pydantic import BaseModel
-import chidian.partials as p
-from chidian import Mapper, MapperResult, ValidationMode, get
+from chidian import DROP, KEEP, grab, mapper, mapping_context
class TestMapperBasic:
- """Test basic Mapper functionality as dict->dict transformer."""
+ """Test basic @mapper decorator functionality."""
- def test_simple_dict_mapping(self) -> None:
- """Test basic Mapper with dict mapping."""
- mapping = {
- "patient_id": p.get("data.patient.id"),
- "is_active": p.get("data.patient.active"),
- }
- mapper: Mapper[Any] = Mapper(mapping)
+ def test_simple_mapping(self):
+ """Test basic mapper with grab."""
- input_data = {
- "data": {"patient": {"id": "abc123", "active": True}, "other": "value"}
- }
+ @mapper
+ def patient_summary(d):
+ return {
+ "patient_id": grab(d, "data.patient.id"),
+ "is_active": grab(d, "data.patient.active"),
+ }
- result = mapper(input_data)
+ source = {"data": {"patient": {"id": "p-123", "active": True}}}
+ result = patient_summary(source)
- assert isinstance(result, dict)
- assert result["patient_id"] == "abc123" # type: ignore[index]
- assert result["is_active"] is True # type: ignore[index]
+ assert result == {"patient_id": "p-123", "is_active": True}
- def test_callable_mapping(self) -> None:
- """Test Mapper with callable mapping values."""
- mapping = {
- "patient_id": lambda data: get(data, "data.patient.id"),
- "is_active": lambda data: get(data, "data.patient.active"),
- "status": lambda data: "processed",
- }
+ def test_nested_output(self):
+ """Test mapper with nested output structure."""
- mapper: Mapper[Any] = Mapper(mapping)
+ @mapper
+ def with_nested(d):
+ return {
+ "patient": {
+ "id": grab(d, "data.id"),
+ "name": grab(d, "data.name"),
+ },
+ "meta": {"version": "1.0"},
+ }
- input_data = {
- "data": {"patient": {"id": "abc123", "active": True}, "other": "value"}
- }
+ source = {"data": {"id": "123", "name": "John"}}
+ result = with_nested(source)
- result = mapper(input_data)
-
- assert isinstance(result, dict)
- assert result["patient_id"] == "abc123" # type: ignore[index]
- assert result["is_active"] is True # type: ignore[index]
- assert result["status"] == "processed" # type: ignore[index]
-
- def test_callable_mapping_with_partials(self) -> None:
- """Test Mapper with callable mapping values using simplified partials API."""
- # Use simplified partials API
- get_first = p.get("firstName")
- get_last = p.get("lastName")
-
- # Status mapping function
- def status_transform(data: dict) -> str:
- status_map = {"active": "✓ Active", "inactive": "✗ Inactive"}
- status_value = get(data, "status", default="unknown")
- return status_map.get(status_value, "Unknown")
-
- # Name concatenation function
- def full_name_transform(data: dict) -> str:
- first_name = get_first(data) or ""
- last_name = get_last(data) or ""
- return f"{first_name} {last_name}".strip()
-
- # Codes joining function
- def codes_transform(data: dict) -> str:
- codes = get(data, "codes", default=[])
- return ", ".join(str(c) for c in codes) if codes else ""
-
- # Backup name function
- def backup_name_transform(data: dict) -> str:
- return get(data, "nickname") or get(data, "firstName") or "Guest"
-
- mapping = {
- "name": full_name_transform,
- "status_display": status_transform,
- "all_codes": codes_transform,
- "city": p.get("address") | p.split("|") | p.at_index(1),
- "backup_name": backup_name_transform,
+ assert result == {
+ "patient": {"id": "123", "name": "John"},
+ "meta": {"version": "1.0"},
}
- mapper: Mapper[Any] = Mapper(mapping)
+ def test_static_values(self):
+ """Test mapper with static values."""
- input_data = {
- "firstName": "John",
- "lastName": "Doe",
- "status": "active",
- "codes": ["A", "B", "C"],
- "address": "123 Main St|Boston|02101",
- }
+ @mapper
+ def with_static(d):
+ return {
+ "version": "2.0",
+ "type": "patient",
+ "id": grab(d, "id"),
+ }
- result = mapper(input_data)
+ result = with_static({"id": "123"})
+ assert result == {"version": "2.0", "type": "patient", "id": "123"}
- assert isinstance(result, dict)
- assert result["name"] == "John Doe" # type: ignore[index]
- assert result["status_display"] == "✓ Active" # type: ignore[index]
- assert result["all_codes"] == "A, B, C" # type: ignore[index]
- assert result["city"] == "Boston" # type: ignore[index]
- assert result["backup_name"] == "John" # type: ignore[index]
+ def test_composable_mappers(self):
+ """Test that mappers can be composed."""
+ @mapper
+ def first_transform(d):
+ return {"data": {"id": grab(d, "raw_id")}}
-class TestMapperMapping:
- """Test Mapper mapping functionality."""
+ @mapper
+ def second_transform(d):
+ return {"patient_id": grab(d, "data.id")}
- def test_mapper_with_invalid_mapping(self) -> None:
- """Test that Mapper rejects invalid mapping types."""
- with pytest.raises(TypeError):
- Mapper(123) # type: ignore # Invalid type
+ source = {"raw_id": "123"}
+ result = second_transform(first_transform(source))
- with pytest.raises(TypeError):
- Mapper("not a mapping") # type: ignore # Invalid type
+ assert result == {"patient_id": "123"}
- with pytest.raises(TypeError):
- Mapper(lambda x: x) # type: ignore # Callable not allowed
- def test_mapper_with_dict_mapping_containing_callable(self) -> None:
- """Test Mapper with dict mapping containing callable values."""
- mapping = {
- "simple": p.get("path.to.value"),
- "transformed": lambda data: data.get("value", "").upper(),
- "partial": p.get("nested.value") | p.upper,
- }
- mapper: Mapper[Any] = Mapper(mapping)
+class TestDrop:
+ """Test DROP sentinel functionality."""
- input_data = {
- "path": {"to": {"value": "hello"}},
- "value": "world",
- "nested": {"value": "test"},
- }
+ def test_drop_this_object_in_dict(self):
+ """Test DROP.THIS_OBJECT removes the containing dict."""
+
+ @mapper
+ def with_drop(d):
+ return {
+ "kept": {"id": grab(d, "id")},
+ "dropped": {"trigger": DROP.THIS_OBJECT, "ignored": "x"},
+ }
+
+ result = with_drop({"id": "123"})
+ assert result == {"kept": {"id": "123"}}
+
+ def test_drop_this_object_in_list(self):
+ """Test DROP.THIS_OBJECT in list removes just that item."""
- result = mapper(input_data)
+ @mapper
+ def filter_list(d):
+ return {
+ "tags": [
+ "first",
+ DROP.THIS_OBJECT,
+ "third",
+ ]
+ }
+
+ result = filter_list({})
+ assert result == {"tags": ["first", "third"]}
+
+ def test_drop_nested_dict_in_list(self):
+ """Test DROP.THIS_OBJECT in nested dict removes the dict from list."""
+
+ @mapper
+ def filter_list(d):
+ return {
+ "items": [
+ {"keep": "me"},
+ {"drop": DROP.THIS_OBJECT, "ignored": "x"},
+ {"also": "kept"},
+ ]
+ }
+
+ result = filter_list({})
+ assert result == {"items": [{"keep": "me"}, {"also": "kept"}]}
- assert result["simple"] == "hello" # type: ignore[index]
- assert result["transformed"] == "WORLD" # type: ignore[index]
- assert result["partial"] == "TEST" # type: ignore[index]
+ def test_drop_parent(self):
+ """Test DROP.PARENT removes the parent container."""
- def test_mapper_error_handling(self) -> None:
- """Test Mapper error handling."""
+ @mapper
+ def with_parent_drop(d):
+ return {
+ "kept": {"id": "123"},
+ "items": [
+ {"trigger": DROP.PARENT},
+ {"ignored": "never seen"},
+ ],
+ }
- def failing_mapper(data: dict) -> str:
- raise ValueError("Test error")
+ result = with_parent_drop({})
+ assert result == {"kept": {"id": "123"}}
- mapping: dict[str, Any] = {"result": failing_mapper}
- mapper: Mapper[Any] = Mapper(mapping)
+ def test_drop_grandparent(self):
+ """Test DROP.GRANDPARENT removes two levels up."""
- with pytest.raises(ValueError, match="Test error"):
- mapper({"test": "data"})
+ @mapper
+ def with_grandparent_drop(d):
+ return {
+ "outer": {
+ "middle": {"trigger": DROP.GRANDPARENT},
+ }
+ }
- def test_mapper_with_empty_mapping(self) -> None:
- """Test Mapper with empty mapping."""
- mapper: Mapper[Any] = Mapper({})
- result = mapper({"input": "data"})
+ result = with_grandparent_drop({})
assert result == {}
- def test_mapper_with_constant_values(self) -> None:
- """Test Mapper with constant string and other values."""
- mapping = {
- "constant_string": "Hello, World!",
- "constant_number": 42,
- "constant_bool": True,
- "constant_none": None,
- "dynamic_value": p.get("input.value"),
- }
- mapper: Mapper[Any] = Mapper(mapping)
+ def test_drop_conditional(self):
+ """Test conditional DROP based on data."""
+
+ @mapper
+ def conditional_drop(d):
+ verified = grab(d, "verified")
+ return {
+ "id": grab(d, "id"),
+ # Use nested dict so DROP.THIS_OBJECT removes only "sensitive" key
+ "sensitive": {
+ "data": DROP.THIS_OBJECT if not verified else grab(d, "data"),
+ },
+ }
- input_data = {"input": {"value": "dynamic"}, "ignored": "data"}
- result = mapper(input_data)
+ # Not verified - sensitive dict is dropped (contains DROP)
+ result = conditional_drop({"id": "123", "verified": False, "data": "secret"})
+ assert result == {"id": "123"}
- assert result["constant_string"] == "Hello, World!" # type: ignore[index]
- assert result["constant_number"] == 42 # type: ignore[index]
- assert result["constant_bool"] is True # type: ignore[index]
- assert result["constant_none"] is None # type: ignore[index]
- assert result["dynamic_value"] == "dynamic" # type: ignore[index]
+ # Verified - sensitive is kept
+ result = conditional_drop({"id": "123", "verified": True, "data": "secret"})
+ assert result == {"id": "123", "sensitive": {"data": "secret"}}
- def test_mapper_preserves_dict_structure(self) -> None:
- """Test that Mapper preserves nested dict structure in results."""
- # Note: Mapper only supports flat dictionaries, not nested output structures
- # To achieve nested results, use callables that return nested dicts
- def nested_transform(data: dict) -> dict:
- return {"deep": get(data, "another.path"), "value": "direct_value"}
+class TestKeep:
+ """Test KEEP wrapper functionality."""
- mapping = {
- "flat": p.get("simple.value"),
- "nested": nested_transform,
- }
+ def test_keep_empty_dict(self):
+ """Test KEEP preserves empty dict."""
- mapper: Mapper[Any] = Mapper(mapping)
+ @mapper
+ def with_keep(d):
+ return {
+ "explicit_empty": KEEP({}),
+ "implicit_empty": {},
+ }
- input_data = {"simple": {"value": "test"}, "another": {"path": "nested_test"}}
+ result = with_keep({})
+ assert result == {"explicit_empty": {}}
- result = mapper(input_data)
+ def test_keep_none(self):
+ """Test KEEP preserves None."""
- assert result["flat"] == "test" # type: ignore[index]
- assert result["nested"]["deep"] == "nested_test" # type: ignore[index]
- assert result["nested"]["value"] == "direct_value" # type: ignore[index]
+ @mapper
+ def with_keep(d):
+ return {
+ "explicit_none": KEEP(None),
+ "implicit_none": None,
+ }
+ result = with_keep({})
+ assert result == {"explicit_none": None}
-class TestMapperCalling:
- """Test Mapper calling interface."""
+ def test_keep_empty_list(self):
+ """Test KEEP preserves empty list."""
- def test_mapper_callable_interface(self) -> None:
- """Test that Mapper can be called directly."""
- mapping = {"output": p.get("input")}
- mapper: Mapper[Any] = Mapper(mapping)
+ @mapper
+ def with_keep(d):
+ return {
+ "explicit_empty": KEEP([]),
+ "implicit_empty": [],
+ }
- input_data = {"input": "test_value"}
- result = mapper(input_data)
+ result = with_keep({})
+ assert result == {"explicit_empty": []}
- assert result["output"] == "test_value" # type: ignore[index]
+ def test_keep_empty_string(self):
+ """Test KEEP preserves empty string."""
- def test_mapper_callable_only(self) -> None:
- """Test that Mapper only has __call__ method (no forward method)."""
- mapping = {"output": p.get("input")}
- mapper: Mapper[Any] = Mapper(mapping)
+ @mapper
+ def with_keep(d):
+ return {
+ "explicit_empty": KEEP(""),
+ "implicit_empty": "",
+ }
- input_data = {"input": "test_value"}
+ result = with_keep({})
+ assert result == {"explicit_empty": ""}
- # Should work with __call__
- result = mapper(input_data)
- assert result == {"output": "test_value"}
- # Should not have forward method
- assert not hasattr(mapper, "forward")
+class TestEmptyRemoval:
+ """Test automatic empty value removal."""
- def test_mapper_no_reverse(self) -> None:
- """Test that Mapper doesn't support reverse operations."""
- mapping = {"output": p.get("input")}
- mapper: Mapper[Any] = Mapper(mapping)
+ def test_empty_values_removed_by_default(self):
+ """Test empty values are removed by default."""
- # Should not have reverse method
- assert not hasattr(mapper, "reverse")
+ @mapper
+ def with_empties(d):
+ return {
+ "kept": "value",
+ "empty_dict": {},
+ "empty_list": [],
+ "empty_string": "",
+ "none_value": None,
+ }
+
+ result = with_empties({})
+ assert result == {"kept": "value"}
+
+ def test_remove_empty_false(self):
+ """Test remove_empty=False keeps all values."""
+
+ @mapper(remove_empty=False)
+ def keep_empties(d):
+ return {
+ "kept": "value",
+ "empty_dict": {},
+ "empty_list": [],
+ "none_value": None,
+ }
+
+ result = keep_empties({})
+ assert result == {
+ "kept": "value",
+ "empty_dict": {},
+ "empty_list": [],
+ "none_value": None,
+ }
- # Should not have can_reverse method
- assert not hasattr(mapper, "can_reverse")
+class TestMappingContext:
+ """Test mapping_context strict mode."""
-class TestMapperNewSyntax:
- """Test Mapper with new ergonomic syntax from README."""
+ def test_normal_mode_missing_keys(self):
+ """Test missing keys return None in normal mode."""
- def test_readme_example(self) -> None:
- """Test the exact example from README with new syntax."""
- from pydantic import BaseModel
+ @mapper
+ def test_mapping(d):
+ return {
+ "exists": grab(d, "data.id"),
+ "missing": grab(d, "does.not.exist"),
+ }
- source_data = {
- "name": {
- "first": "Gandalf",
- "given": ["the", "Grey"],
- "suffix": None,
- },
- "address": {
- "street": ["Bag End", "Hobbiton"],
- "city": "The Shire",
- "postal_code": "ME001",
- "country": "Middle Earth",
- },
+ result = test_mapping({"data": {"id": "123"}})
+ assert result == {"exists": "123"} # 'missing' removed as None
+
+ def test_strict_mode_raises_on_missing(self):
+ """Test strict mode raises on missing keys."""
+
+ @mapper
+ def test_mapping(d):
+ return {
+ "exists": grab(d, "data.id"),
+ "missing": grab(d, "does.not.exist"),
+ }
+
+ with pytest.raises(KeyError):
+ with mapping_context(strict=True):
+ test_mapping({"data": {"id": "123"}})
+
+ def test_strict_mode_allows_existing_none(self):
+ """Test strict mode allows keys that exist with None value."""
+
+ @mapper(remove_empty=False)
+ def test_mapping(d):
+ return {
+ "has_none": grab(d, "value"),
+ }
+
+ source = {"value": None}
+ with mapping_context(strict=True):
+ result = test_mapping(source)
+
+ assert result == {"has_none": None}
+
+
+class TestReadmeExamples:
+ """Test examples from the README."""
+
+ def test_quick_start_example(self):
+ """Test the quick start example."""
+
+ @mapper
+ def patient_summary(d):
+ return {
+ "patient_id": grab(d, "data.patient.id"),
+ "is_active": grab(d, "data.patient.active"),
+ "latest_visit": grab(d, "data.visits[0].date"),
+ }
+
+ source = {
+ "data": {
+ "patient": {"id": "p-123", "active": True},
+ "visits": [
+ {"date": "2024-01-15", "type": "checkup"},
+ {"date": "2024-02-20", "type": "followup"},
+ ],
+ }
+ }
+
+ result = patient_summary(source)
+ assert result == {
+ "patient_id": "p-123",
+ "is_active": True,
+ "latest_visit": "2024-01-15",
}
- class SourceSchema(BaseModel):
- name: dict
- address: dict
-
- class TargetSchema(BaseModel):
- full_name: str
- address: str
-
- person_mapping = Mapper(
- {
- "full_name": p.get(
- [
- "name.first",
- "name.given[*]",
- "name.suffix",
- ]
- ).join(" ", flatten=True),
- "address": p.get(
- [
- "address.street[*]",
- "address.city",
- "address.postal_code",
- "address.country",
- ]
- ).join("\n", flatten=True),
- },
- min_input_schemas=[SourceSchema],
- output_schema=TargetSchema,
- )
-
- source_obj = SourceSchema(**source_data)
- result = person_mapping(source_obj)
-
- assert isinstance(result, TargetSchema)
- assert result.full_name == "Gandalf the Grey"
- assert result.address == "Bag End\nHobbiton\nThe Shire\nME001\nMiddle Earth"
-
-
-class TestMapperWithValidation:
- """Test Mapper functionality with validation modes."""
-
- def test_mapper_backward_compatibility(self) -> None:
- """Test that Mapper maintains backward compatibility with dict."""
- # Old-style dict mapping should still work
- mapper: Mapper[Any] = Mapper({"output": p.get("input")})
- result = mapper({"input": "test"})
- assert result == {"output": "test"}
-
- def test_mapper_with_data_mapping_strict(self) -> None:
- """Test Mapper with schema validation in strict mode."""
-
- class InputModel(BaseModel):
- name: str
- age: int
-
- class OutputModel(BaseModel):
- display_name: str
- age_group: str
-
- mapper = Mapper(
- transformations={
- "display_name": p.get("name") | p.upper,
- "age_group": lambda d: "adult" if d.get("age", 0) >= 18 else "child",
- },
- min_input_schemas=[InputModel],
- output_schema=OutputModel,
- mode=ValidationMode.STRICT,
- )
-
- # Valid input
- result = mapper({"name": "John", "age": 25})
- assert isinstance(result, OutputModel)
- assert result.display_name == "JOHN"
- assert result.age_group == "adult"
-
- # Invalid input no longer raises (input validation removed)
- # but output validation will fail due to missing field transformation
- result2 = mapper({"name": "John", "age": 10})
- assert isinstance(result2, OutputModel)
- assert result2.age_group == "child"
-
- def test_mapper_with_data_mapping_flexible(self) -> None:
- """Test Mapper with schema validation in flexible mode."""
-
- class InputModel(BaseModel):
- name: str
- age: int
-
- class OutputModel(BaseModel):
- display_name: str
- age_group: str
-
- mapper = Mapper(
- transformations={
- "display_name": p.get("name") | p.upper,
- "age_group": lambda d: "adult" if d.get("age", 0) >= 18 else "child",
- },
- min_input_schemas=[InputModel],
- output_schema=OutputModel,
- mode=ValidationMode.FLEXIBLE,
- )
-
- # Valid input
- result = mapper({"name": "John", "age": 25})
- assert isinstance(result, MapperResult)
- assert not result.has_issues
- assert isinstance(result.data, OutputModel)
- assert result.data.display_name == "JOHN"
-
- # Invalid input no longer causes input validation issues
- # but missing transformation data causes output issues
- result = mapper({"name": "John"}) # Missing age
- assert isinstance(result, MapperResult)
- # Since age is missing, age_group becomes "child" (default 0 < 18)
- assert isinstance(result.data, OutputModel)
- assert result.data.age_group == "child"
-
- def test_mapper_auto_mode(self) -> None:
- """Test Mapper auto mode selection."""
- # With schemas -> strict
- mapper_with_schemas: Mapper[BaseModel] = Mapper(
- transformations={"out": p.get("in")},
- min_input_schemas=[BaseModel],
- output_schema=BaseModel,
- )
- assert mapper_with_schemas.mode == ValidationMode.STRICT
-
- # Without schemas -> flexible
- mapper_no_schemas: Mapper[Any] = Mapper(transformations={"out": p.get("in")})
- assert mapper_no_schemas.mode == ValidationMode.FLEXIBLE
-
- def test_mapper_with_pure_transformation(self) -> None:
- """Test Mapper without schemas."""
- mapper: Mapper[Any] = Mapper(
- transformations={
- "id": p.get("patient.id"),
- "name": p.get("patient.name"),
- "provider": p.get("provider.name", default="Unknown"),
- },
- mode=ValidationMode.FLEXIBLE,
- )
-
- result = mapper(
- {
- "patient": {"id": "123", "name": "John"},
- "provider": {"name": "Dr. Smith"},
+ def test_normalize_user_example(self):
+ """Test the normalize_user example pattern."""
+
+ @mapper
+ def normalize_user(d):
+ return {
+ "version": "2.0",
+ "name": grab(d, "user.name"),
+ "address": {
+ "city": grab(d, "location.city"),
+ "zip": grab(d, "location.postal"),
+ },
}
- )
- # Without schemas, returns dict directly
- assert isinstance(result, dict)
- assert result["id"] == "123"
- assert result["name"] == "John"
- assert result["provider"] == "Dr. Smith"
+ source = {
+ "user": {"name": "John"},
+ "location": {"city": "Boston", "postal": "02101"},
+ }
+
+ result = normalize_user(source)
+ assert result == {
+ "version": "2.0",
+ "name": "John",
+ "address": {"city": "Boston", "zip": "02101"},
+ }
diff --git a/tests/test_partials.py b/tests/test_partials.py
deleted file mode 100644
index e059088..0000000
--- a/tests/test_partials.py
+++ /dev/null
@@ -1,327 +0,0 @@
-import pytest
-
-import chidian.partials as p
-
-
-def test_basic_arithmetic():
- """Test basic arithmetic operations."""
- n = 100
- assert p.add(1)(n) == n + 1
- assert p.subtract(1)(n) == n - 1
- assert p.multiply(10)(n) == n * 10
- assert p.divide(10)(n) == n / 10
-
- # Test with lists
- lst = [1, 2, 3]
- assert p.add([4])(lst) == lst + [4]
-
-
-def test_boolean_checks():
- """Test basic boolean operations."""
- value = {"a": "b", "c": "d"}
-
- assert p.equals(value)(value) is True
- assert p.equals("test")("test") is True
- assert p.equals("test")("other") is False
-
- assert p.contains("a")(value) is True
- assert p.contains("z")(value) is False
-
- assert p.isinstance_of(dict)(value) is True
- assert p.isinstance_of(str)("test") is True
- assert p.isinstance_of(int)("test") is False
-
-
-def test_basic_chainable_fn():
- """Test basic ChainableFunction functionality."""
- # Single operations
- assert p.upper("hello") == "HELLO"
- assert p.lower("WORLD") == "world"
- assert p.strip(" test ") == "test"
-
-
-def test_function_chain_creation():
- """Test creating FunctionChain with | operator."""
- # ChainableFunction | ChainableFunction
- chain = p.upper | p.replace(" ", "_")
- assert isinstance(chain, p.FunctionChain)
- assert len(chain) == 2
- assert chain("hello world") == "HELLO_WORLD"
-
- # Regular function | ChainableFunction
- chain2 = str.strip | p.upper
- assert chain2(" test ") == "TEST"
-
-
-def test_complex_chains():
- """Test complex function chains."""
- # Multi-step string transformation
- normalize = p.strip | p.lower | p.replace(" ", "_")
- assert normalize(" Hello World ") == "hello_world"
-
- # Array operations
- get_last_word = p.split() | p.last | p.upper
- assert get_last_word("hello beautiful world") == "WORLD"
-
- # Mixed operations
- extract_number = p.split("-") | p.last | p.to_int | p.multiply(2)
- assert extract_number("item-42") == 84
-
-
-def test_string_operations():
- """Test string manipulation functions."""
- # Split with custom separator
- split_comma = p.split(",")
- assert split_comma("a,b,c") == ["a", "b", "c"]
-
- # Replace with parameters
- sanitize = p.replace("&", "and") | p.replace("@", "at")
- assert sanitize("tom & jerry @ home") == "tom and jerry at home"
-
- # Join
- join_with_dash = p.join("-")
- assert join_with_dash(["a", "b", "c"]) == "a-b-c"
-
-
-def test_array_operations():
- """Test array/list operations."""
- data = ["first", "second", "third", "fourth"]
-
- assert p.first(data) == "first"
- assert p.last(data) == "fourth"
- assert p.length(data) == 4
- assert p.at_index(2)(data) == "third"
- assert p.slice_range(1, 3)(data) == ["second", "third"]
-
- # Empty list handling
- assert p.first([]) is None
- assert p.last([]) is None
- assert p.at_index(10)([1, 2, 3]) is None
-
-
-def test_type_conversions():
- """Test type conversion chains."""
- # String to number
- parse_int = p.strip | p.to_int
- assert parse_int(" 42 ") == 42
-
- # Number to string
- format_num = p.to_float | p.round_to(2) | p.to_str
- assert format_num("19.999") == "20.0"
-
- # Boolean conversion
- assert p.to_bool("") is False
- assert p.to_bool("text") is True
- assert p.to_bool(0) is False
- assert p.to_bool(1) is True
-
-
-def test_get_operations():
- """Test get operations for data access."""
- data = {
- "user": {
- "name": "John",
- "age": 30,
- "emails": ["john@example.com", "john.doe@work.com"],
- }
- }
-
- # Basic get
- get_name = p.get("user.name")
- assert get_name(data) == "John"
-
- # Get with default
- get_missing = p.get("user.missing", default="N/A")
- assert get_missing(data) == "N/A"
-
- # Get from array
- get_email = p.get("user.emails[0]")
- assert get_email(data) == "john@example.com"
-
- # Chain with get
- get_upper_name = p.get("user.name") | p.upper
- assert get_upper_name(data) == "JOHN"
-
-
-def test_default_handling():
- """Test default value handling."""
- # Replace None with default
- safe_upper = p.default_to("") | p.upper
- assert safe_upper(None) == ""
- assert safe_upper("hello") == "HELLO"
-
- # Chain with null safety
- safe_process = p.default_to("0") | p.to_int | p.add(10)
- assert safe_process(None) == 10
- assert safe_process("5") == 15
-
-
-def test_numeric_operations():
- """Test numeric operations and rounding."""
- # Round to decimals
- round_2 = p.round_to(2)
- assert round_2(3.14159) == 3.14
-
- # Chain with arithmetic
- calculate = p.to_int | p.add(10) | p.multiply(2)
- assert calculate("5") == 30
-
-
-def test_chain_composition():
- """Test composing multiple chains."""
- # Create reusable chains
- normalize_text = p.strip | p.lower
-
- # Compose chains
- process_input = normalize_text | p.replace(" ", "_") | p.upper
- assert process_input(" Hello World ") == "HELLO_WORLD"
-
- # Chain of chains
- chain1 = p.upper | p.replace("A", "X")
- chain2 = p.replace("E", "Y") | p.lower
- combined = chain1 | chain2
- assert combined("apple") == "xpply"
-
-
-def test_error_propagation():
- """Test that errors propagate through chains."""
- chain = p.to_int | p.multiply(2)
-
- with pytest.raises(ValueError):
- chain("not a number")
-
- # Safe handling with default - first convert to "0" then to int
- safe_chain = p.default_to("0") | p.to_int | p.multiply(2)
- assert safe_chain(None) == 0
- assert safe_chain("42") == 84
-
-
-def test_function_chain_repr():
- """Test string representation of chains."""
- chain = p.upper | p.strip | p.replace(" ", "_")
- repr_str = repr(chain)
- assert "upper" in repr_str
- assert "strip" in repr_str
- assert "|" in repr_str
-
-
-def test_real_world_usage():
- """Test realistic data transformation scenarios."""
- # Clean and format user input
- clean_input = p.strip | p.lower | p.replace(" ", "_")
- assert clean_input(" User Name ") == "user_name"
-
- # Process numeric data
- process_score = p.to_float | p.round_to(1) | p.multiply(100) | p.to_int
- assert process_score("0.856") == 90
-
- # Extract and format
- extract_domain = p.split("@") | p.last | p.upper
- assert extract_domain("user@example.com") == "EXAMPLE.COM"
-
- # Complex nested data access
- data = {
- "users": [
- {"name": " john doe ", "score": "85.7"},
- {"name": "jane smith", "score": "92.3"},
- ]
- }
-
- get_first_user_score = (
- p.get("users[0].score") | p.to_float | p.round_to(0) | p.to_int
- )
- assert get_first_user_score(data) == 86
-
-
-def test_multi_path_get():
- """Test multi-path get() functionality."""
- data = {
- "name": {
- "first": "John",
- "middle": ["Robert", "James"],
- "last": "Doe",
- }
- }
-
- # Multi-path extraction returns list of all values
- get_names = p.get(["name.first", "name.middle[*]", "name.last"])
- result = get_names(data)
- assert result == ["John", "Robert", "James", "Doe"]
-
-
-def test_multi_path_get_with_join():
- """Test multi-path get() with join chaining."""
- data = {
- "name": {
- "first": "Gandalf",
- "given": ["the", "Grey"],
- "suffix": None,
- }
- }
-
- # Join without flatten - nested lists stay nested
- get_name = p.get(["name.first", "name.given", "name.suffix"]).join(" ")
- result = get_name(data)
- # Result: "Gandalf ['the', 'Grey']"
- assert "Gandalf" in result
-
- # Join with flatten - nested lists are flattened and None filtered
- get_name_flat = p.get(["name.first", "name.given[*]", "name.suffix"]).join(
- " ", flatten=True
- )
- result = get_name_flat(data)
- assert result == "Gandalf the Grey"
-
-
-def test_method_chaining_syntax():
- """Test the new . method chaining syntax."""
- data = {"user": {"email": "JOHN.DOE@EXAMPLE.COM", "tags": ["admin", "user"]}}
-
- # Old pipe syntax still works
- process_email_pipe = p.get("user.email") | p.lower | p.split("@") | p.first
- assert process_email_pipe(data) == "john.doe"
-
- # New dot syntax
- process_email_dot = p.get("user.email").lower().split("@").first()
- assert process_email_dot(data) == "john.doe"
-
- # Chaining with parameters
- process_tags = p.get("user.tags").join(", ")
- assert process_tags(data) == "admin, user"
-
-
-def test_join_flatten_nested_lists():
- """Test join() with flatten parameter for nested lists."""
- # Without flatten
- data = ["a", ["b", "c"], "d", None]
- result = p.join(", ")(data)
- assert "a" in result and "d" in result
-
- # With flatten
- result_flat = p.join(", ", flatten=True)(data)
- assert result_flat == "a, b, c, d"
-
-
-def test_multi_path_with_address():
- """Test multi-path example from README."""
- data = {
- "address": {
- "street": ["123 Main St", "Apt 4"],
- "city": "Boston",
- "postal_code": "02101",
- "country": "USA",
- }
- }
-
- get_address = p.get(
- [
- "address.street[*]",
- "address.city",
- "address.postal_code",
- "address.country",
- ]
- ).join("\n", flatten=True)
-
- result = get_address(data)
- expected = "123 Main St\nApt 4\nBoston\n02101\nUSA"
- assert result == expected
diff --git a/tests/test_property_based.py b/tests/test_property_based.py
deleted file mode 100644
index 0df1640..0000000
--- a/tests/test_property_based.py
+++ /dev/null
@@ -1,242 +0,0 @@
-"""Property-based tests for core chidian functionality."""
-
-from hypothesis import given
-from hypothesis import strategies as st
-
-import chidian.partials as p
-from chidian import get
-
-
-# Custom strategies for valid paths
-@st.composite
-def valid_path_strategy(draw):
- """Generate valid path strings for chidian."""
- # Simple paths like "field", "field.subfield", "field[0]", etc.
- path_parts = draw(
- st.lists(
- st.text(
- alphabet=st.characters(whitelist_categories=("Ll", "Lu", "Nd", "_")),
- min_size=1,
- max_size=10,
- ),
- min_size=1,
- max_size=3,
- )
- )
- return ".".join(part for part in path_parts if part)
-
-
-@st.composite
-def data_with_paths(draw):
- """Generate data dictionary with corresponding valid paths."""
- # Create simple field names
- field_names = draw(
- st.lists(
- st.text(
- alphabet=st.characters(whitelist_categories=("Ll", "Lu")),
- min_size=1,
- max_size=8,
- ),
- min_size=1,
- max_size=5,
- )
- )
-
- # Create data dict
- data = {}
- paths = []
-
- for field in field_names:
- if field: # Ensure field is not empty
- data[field] = draw(
- st.one_of(
- st.text(min_size=0, max_size=20),
- st.integers(),
- st.lists(st.text(min_size=0, max_size=10), max_size=3),
- )
- )
- paths.append(field)
-
- return data, paths
-
-
-class TestPropertyBasedCore:
- """Property-based tests for core functionality."""
-
- @given(data_with_paths())
- def test_get_always_returns_value_or_none(self, data_and_paths):
- """Test that get always returns a value or None, never crashes."""
- data, paths = data_and_paths
-
- # Test with valid paths
- for path in paths:
- result = get(data, path)
- # Should either return a value from data or None/default
- assert result is None or isinstance(
- result, (int, str, list, dict, bool, float)
- )
-
- # Test with invalid path - should not crash
- result = get(data, "nonexistent.path")
- assert result is None
-
- @given(st.text(alphabet=st.characters(blacklist_categories=["Z"]), max_size=50))
- def test_string_operations_property(self, text_value):
- """Test that string operations are consistent."""
- # Test upper/lower are reversible
- upper_result = p.upper(text_value)
- lower_result = p.lower(text_value)
-
- assert isinstance(upper_result, str)
- assert isinstance(lower_result, str)
-
- # Test strip functionality (strip removes all surrounding whitespace)
- if text_value:
- padded = f" {text_value} "
- stripped = p.strip(padded)
- assert isinstance(stripped, str)
- # strip removes leading/trailing whitespace from the original value too
- assert stripped == text_value.strip()
-
- @given(st.integers(min_value=1, max_value=100))
- def test_arithmetic_operations_property(self, value):
- """Test that arithmetic operations are consistent."""
- # Test basic arithmetic properties
- add_result = p.add(10)(value)
- assert add_result == value + 10
-
- multiply_result = p.multiply(2)(value)
- assert multiply_result == value * 2
-
- # Test chain consistency with ChainableFunction
- add_chainable = p.ChainableFunction(p.add(5))
- multiply_chainable = p.ChainableFunction(p.multiply(2))
- chain_result = (add_chainable | multiply_chainable)(value)
- assert chain_result == (value + 5) * 2
-
- @given(st.lists(st.text(min_size=1, max_size=10), min_size=1, max_size=5))
- def test_array_operations_property(self, test_list):
- """Test that array operations work consistently."""
- # Test first/last
- first_result = p.first(test_list)
- last_result = p.last(test_list)
-
- assert first_result == test_list[0]
- assert last_result == test_list[-1]
-
- # Test length
- length_result = p.length(test_list)
- assert length_result == len(test_list)
-
- # Test at_index
- if len(test_list) > 2:
- middle_result = p.at_index(1)(test_list)
- assert middle_result == test_list[1]
-
- @given(st.dictionaries(st.text(max_size=20), st.text(max_size=20), min_size=1))
- def test_boolean_operations_property(self, test_dict):
- """Test that boolean operations work consistently."""
- if not test_dict:
- return
-
- # Pick a key that exists
- test_key = list(test_dict.keys())[0]
- test_value = test_dict[test_key]
-
- # Test equals
- equals_func = p.equals(test_value)
- assert equals_func(test_value) is True
- assert equals_func("different_value") is False
-
- # Test contains
- contains_func = p.contains(test_key)
- assert contains_func(test_dict) is True
- assert contains_func({}) is False
-
- @given(st.text(max_size=100))
- def test_partials_chaining(self, input_text):
- """Test that partials chaining doesn't crash."""
- # Simple chain that should always work
- try:
- chain = p.strip | p.lower | p.upper
- result = chain(input_text)
- assert isinstance(result, str)
- assert result == input_text.strip().lower().upper()
- except AttributeError:
- # input_text might not be a string in some edge cases
- pass
-
-
-class TestPropertyBasedHelpers:
- """Property-based tests for helper functions."""
-
- @given(st.lists(st.integers(), min_size=1, max_size=10))
- def test_partials_list_operations(self, values):
- """Test list operations in partials."""
- # Test that basic list operations work
- assert p.first(values) == values[0]
- assert p.last(values) == values[-1]
- assert p.length(values) == len(values)
-
- if len(values) > 1:
- assert p.at_index(1)(values) == values[1]
-
- @given(st.text(min_size=1, max_size=50))
- def test_string_partials(self, text):
- """Test string operations."""
- # These should not crash
- assert isinstance(p.upper(text), str)
- assert isinstance(p.lower(text), str)
- assert isinstance(p.strip(text), str)
-
- # Chain them
- result = (p.strip | p.lower | p.upper)(text)
- assert isinstance(result, str)
-
-
-class TestPropertyBasedRobustness:
- """Test that core functions handle edge cases gracefully."""
-
- @given(
- st.dictionaries(
- st.text(),
- st.one_of(st.none(), st.text(), st.integers(), st.lists(st.text())),
- )
- )
- def test_get_robustness(self, data):
- """Test get function with various data types."""
- # Should never crash, regardless of input
- result = get(data, "any.path.here")
- # Result should be None or a valid type
- assert result is None or isinstance(result, (str, int, list, dict, bool, float))
-
- @given(st.text(min_size=1), st.text())
- def test_type_conversion_edge_cases(self, separator, input_value):
- """Test type conversions with various inputs."""
- # Test string conversions are robust
- str_result = p.to_str(input_value)
- assert isinstance(str_result, str)
-
- # Test split doesn't crash
- try:
- split_func = p.split(separator)
- result = split_func(str_result)
- assert isinstance(result, list)
- except (AttributeError, ValueError):
- # Some edge cases may fail, which is acceptable
- pass
-
- @given(st.lists(st.text(min_size=1, max_size=10), max_size=5))
- def test_join_operations(self, text_list):
- """Test join operations with various inputs."""
- # Should not crash even with empty or invalid inputs
- try:
- join_func = p.join(", ")
- result = join_func(text_list)
- assert isinstance(result, str)
- if text_list:
- # If we have content, result should contain it
- assert len(result) >= 0
- except (AttributeError, TypeError):
- # Some combinations may fail, which is acceptable for edge cases
- pass
diff --git a/tests/test_put.py b/tests/test_put.py
deleted file mode 100644
index fccd0f2..0000000
--- a/tests/test_put.py
+++ /dev/null
@@ -1,121 +0,0 @@
-"""Consolidated tests for the put function."""
-
-from typing import Any
-
-import pytest
-
-from chidian import put
-
-
-class TestPutBasic:
- """Test basic put operations."""
-
- @pytest.mark.parametrize(
- "path,value,expected",
- [
- ("simple", "value", {"simple": "value"}),
- ("nested.path", "value", {"nested": {"path": "value"}}),
- ("deep.nested.path", "value", {"deep": {"nested": {"path": "value"}}}),
- ],
- )
- def test_simple_paths(self, path: str, value: Any, expected: dict):
- """Test basic dot notation paths."""
- result = put({}, path, value)
- assert result == expected
-
- def test_overwrite_existing(self):
- """Test overwriting existing values."""
- data = {"a": {"b": "old"}}
- result = put(data, "a.b", "new")
- assert result == {"a": {"b": "new"}}
-
- def test_preserve_existing(self):
- """Test that existing data is preserved."""
- data = {"a": {"b": 1, "c": 2}}
- result = put(data, "a.d", 3)
- assert result == {"a": {"b": 1, "c": 2, "d": 3}}
-
-
-class TestPutArrays:
- """Test put operations on arrays."""
-
- @pytest.mark.parametrize(
- "path,value,expected",
- [
- ("arr[0]", "a", {"arr": ["a"]}),
- ("arr[2]", "c", {"arr": [None, None, "c"]}),
- ],
- )
- def test_array_creation(self, path: str, value: Any, expected: dict):
- """Test creating arrays with put."""
- result = put({}, path, value)
- assert result == expected
-
- def test_array_gap_filling(self):
- """Test that array gaps are filled with None."""
- result = put({}, "items[5]", "value")
- assert result == {"items": [None, None, None, None, None, "value"]}
- assert len(result["items"]) == 6
-
- def test_nested_array_paths(self):
- """Test complex nested array paths."""
- result = put({}, "data[0].items[1].value", "test")
- expected = {"data": [{"items": [None, {"value": "test"}]}]}
- assert result == expected
-
- def test_negative_indices(self):
- """Test negative array indices."""
- data = {"arr": [1, 2, 3]}
- result = put(data, "arr[-1]", "changed")
- assert result == {"arr": [1, 2, "changed"]}
-
-
-class TestPutEdgeCases:
- """Test edge cases and special behaviors."""
-
- def test_empty_path(self):
- """Test behavior with empty path."""
- with pytest.raises(ValueError):
- put({}, "", "value")
-
- def test_none_values(self):
- """Test putting None values."""
- result = put({}, "path", None)
- assert result == {"path": None}
-
- def test_complex_values(self):
- """Test putting complex values."""
- complex_value = {"nested": {"data": [1, 2, 3]}}
- result = put({}, "root", complex_value)
- assert result == {"root": complex_value}
-
- @pytest.mark.parametrize(
- "initial,path,value,expected",
- [
- ({"a": 1}, "b", 2, {"a": 1, "b": 2}),
- ({"a": {"b": 1}}, "a.c", 2, {"a": {"b": 1, "c": 2}}),
- ],
- )
- def test_various_updates(self, initial, path, value, expected):
- """Test various update scenarios."""
- result = put(initial, path, value)
- assert result == expected
-
-
-class TestPutIntegration:
- """Test put in combination with get."""
-
- def test_round_trip(self):
- """Test that put and get are inverse operations."""
- from chidian import get
-
- paths_and_values = [
- ("a.b.c", "value1"),
- ("x.y[0].z", "value2"),
- ("arr[2].nested.field", "value3"),
- ]
-
- data = {}
- for path, value in paths_and_values:
- data = put(data, path, value)
- assert get(data, path) == value
diff --git a/tests/test_table.py b/tests/test_table.py
deleted file mode 100644
index 7171bb0..0000000
--- a/tests/test_table.py
+++ /dev/null
@@ -1,581 +0,0 @@
-from chidian.table import Table
-
-
-def test_basic_table():
- """Test basic Table functionality."""
- # Create from list
- rows = [
- {"id": "p1", "name": "John", "age": 30},
- {"id": "p2", "name": "Jane", "age": 25},
- {"id": "p3", "name": "Bob", "age": 35},
- ]
-
- table = Table(rows)
-
- # Test length
- assert len(table) == 3
-
- # Test iteration
- assert list(table) == rows
-
-
-def test_dict_indexing():
- """Test dict-like access with $ syntax."""
- table = Table(
- [
- {"id": "p1", "name": "John", "age": 30},
- {"id": "p2", "name": "Jane", "age": 25},
- ]
- )
-
- # Test basic dict access
- assert table["$0.name"] == "John"
- assert table["$1.name"] == "Jane"
-
- # Test __contains__ method
- assert "$0" in table
- assert "$1" in table
- assert "$nonexistent" not in table
-
- # Test with custom keys
- table.append({"name": "Bob", "age": 35}, custom_key="bob")
- assert table["$bob"]["name"] == "Bob"
-
-
-def test_get_method_basic():
- """Test Table.get method for extracting values from all rows."""
- table = Table(
- [
- {"name": "John", "age": 30, "city": "NYC"},
- {"name": "Jane", "age": 25, "city": "LA"},
- {"name": "Bob", "age": 35}, # Note: no city
- ]
- )
-
- # Test simple field extraction
- assert table.get("name") == ["John", "Jane", "Bob"]
- assert table.get("age") == [30, 25, 35]
-
- # Test with missing fields and default
- assert table.get("city") == ["NYC", "LA", None]
- assert table.get("city", default="Unknown") == ["NYC", "LA", "Unknown"]
-
- # Test completely missing field
- assert table.get("phone") == [None, None, None]
- assert table.get("phone", default="N/A") == ["N/A", "N/A", "N/A"]
-
-
-def test_get_method_nested():
- """Test Table.get method with nested paths."""
- table = Table(
- [
- {"patient": {"id": "123", "name": "John"}, "status": "active"},
- {"patient": {"id": "456", "name": "Jane"}, "status": "inactive"},
- {"patient": {"id": "789", "name": "Bob"}, "status": "active"},
- ]
- )
-
- # Test nested path extraction
- assert table.get("patient.id") == ["123", "456", "789"]
- assert table.get("patient.name") == ["John", "Jane", "Bob"]
- assert table.get("status") == ["active", "inactive", "active"]
-
- # Test missing nested paths
- assert table.get("patient.age") == [None, None, None]
- assert table.get("patient.age", default=0) == [0, 0, 0]
-
- # Test partially missing nested structure
- table_mixed = Table(
- [
- {"patient": {"id": "123", "name": "John"}},
- {"status": "active"}, # No patient object
- {"patient": {"id": "789"}}, # No name
- ]
- )
- assert table_mixed.get("patient.name") == ["John", None, None]
- assert table_mixed.get("patient.name", default="Unknown") == [
- "John",
- "Unknown",
- "Unknown",
- ]
-
-
-def test_filter_method():
- """Test the filter method."""
- table = Table(
- [
- {"name": "John", "age": 30, "active": True},
- {"name": "Jane", "age": 25, "active": False},
- {"name": "Bob", "age": 35, "active": True},
- ]
- )
- table.append({"name": "Alice", "age": 28, "active": True}, custom_key="alice")
-
- # Filter by active status
- active_table = table.filter(lambda x: x.get("active", False))
- assert len(active_table) == 3
-
- # Check that new table has proper $ keys
- assert "$0" in active_table
- assert "$1" in active_table
- assert "$2" in active_table
- assert active_table["$0.name"] == "John"
- assert active_table["$1.name"] == "Bob"
- assert active_table["$2.name"] == "Alice"
-
- # Filter by age
- young_table = table.filter(lambda x: x.get("age", 0) < 30)
- assert len(young_table) == 2
- assert young_table["$0.name"] == "Jane"
- assert young_table["$1.name"] == "Alice"
-
-
-def test_map_method():
- """Test the map method."""
- table = Table([{"name": "John", "age": 30}, {"name": "Jane", "age": 25}])
-
- # Transform to add computed field
- enhanced = table.map(lambda x: {**x, "adult": x.get("age", 0) >= 18})
-
- assert all("adult" in row for row in enhanced)
- assert all(row["adult"] is True for row in enhanced)
-
-
-def test_columns_property():
- """Test the columns property."""
- table = Table(
- [
- {"name": "John", "age": 30},
- {"name": "Jane", "city": "NYC"},
- {"id": "123", "name": "Bob", "age": 25, "country": "USA"},
- ]
- )
-
- expected_columns = {"name", "age", "city", "id", "country"}
- assert table.columns == expected_columns
-
-
-def test_to_list_to_dict():
- """Test conversion methods."""
- rows = [{"id": 1, "name": "Test"}, {"id": 2, "name": "Another"}]
- table = Table(rows)
-
- # Test to_list
- assert table.to_list() == rows
-
- # Test to_dict
- result_dict = table.to_dict()
- assert "$0" in result_dict
- assert "$1" in result_dict
- assert result_dict["$0"] == {"id": 1, "name": "Test"}
- assert result_dict["$1"] == {"id": 2, "name": "Another"}
-
-
-def test_append_method():
- """Test appending rows to table."""
- table = Table()
-
- # Append with auto-generated key
- table.append({"name": "John"})
- assert len(table) == 1
- assert table["$0.name"] == "John"
-
- # Append with specific key (should get $ prefix)
- table.append({"name": "Jane"}, custom_key="jane_key")
- assert table["$jane_key.name"] == "Jane"
- assert len(table) == 2
-
- # Append another auto-keyed row
- table.append({"name": "Bob"})
- assert table["$2.name"] == "Bob"
- assert len(table) == 3
-
- # Test accessing named row with dict access
- assert table["$jane_key.name"] == "Jane"
-
-
-def test_unique_method():
- """Test unique values extraction."""
- table = Table(
- [
- {"name": "John", "city": "NYC"},
- {"name": "Jane", "city": "LA"},
- {"name": "Bob", "city": "NYC"},
- {"name": "Alice", "city": "Chicago"},
- {"name": "Charlie", "city": "NYC"},
- ]
- )
-
- unique_cities = table.unique("city")
- assert set(unique_cities) == {"NYC", "LA", "Chicago"}
- assert len(unique_cities) == 3 # Should preserve order and uniqueness
-
- unique_names = table.unique("name")
- assert len(unique_names) == 5 # All names are unique
-
-
-def test_group_by_method():
- """Test grouping by a column."""
- table = Table(
- [
- {"name": "John", "city": "NYC", "age": 30},
- {"name": "Jane", "city": "LA", "age": 25},
- {"name": "Bob", "city": "NYC", "age": 35},
- {"name": "Alice", "city": "Chicago", "age": 28},
- {"name": "Charlie", "city": "NYC", "age": 40},
- ]
- )
-
- grouped = table.group_by("city")
-
- assert "NYC" in grouped
- assert "LA" in grouped
- assert "Chicago" in grouped
-
- nyc_table = grouped["NYC"]
- assert len(nyc_table) == 3
- assert nyc_table.get("name") == ["John", "Bob", "Charlie"]
-
- la_table = grouped["LA"]
- assert len(la_table) == 1
- assert la_table.get("name") == ["Jane"]
-
- chicago_table = grouped["Chicago"]
- assert len(chicago_table) == 1
- assert chicago_table.get("name") == ["Alice"]
-
-
-def test_head_tail_methods():
- """Test head and tail methods."""
- table = Table([{"id": i, "name": f"Person{i}"} for i in range(10)])
-
- # Test head
- head_3 = table.head(3)
- assert len(head_3) == 3
- assert head_3.get("id") == [0, 1, 2]
-
- head_default = table.head()
- assert len(head_default) == 5 # Default is 5
- assert head_default.get("id") == [0, 1, 2, 3, 4]
-
- # Test tail
- tail_3 = table.tail(3)
- assert len(tail_3) == 3
- assert tail_3.get("id") == [7, 8, 9]
-
- tail_default = table.tail()
- assert len(tail_default) == 5 # Default is 5
- assert tail_default.get("id") == [5, 6, 7, 8, 9]
-
-
-def test_get_method_arrays():
- """Test Table.get method with array paths and wildcards."""
- table = Table(
- [
- {
- "patient": {
- "id": "123",
- "identifiers": [
- {"system": "MRN", "value": "MRN123"},
- {"system": "SSN", "value": "SSN456"},
- ],
- },
- "encounters": [
- {"id": "e1", "date": "2024-01-01"},
- {"id": "e2", "date": "2024-02-01"},
- ],
- },
- {
- "patient": {
- "id": "456",
- "identifiers": [
- {"system": "MRN", "value": "MRN789"},
- ],
- },
- "encounters": [], # Empty encounters
- },
- ]
- )
-
- # Test array index access
- assert table.get("patient.identifiers[0].value") == ["MRN123", "MRN789"]
- assert table.get("patient.identifiers[1].value") == ["SSN456", None]
-
- # Test wildcard array access
- assert table.get("encounters[*].id") == [["e1", "e2"], []]
- # Note: When wildcard matches single item, it returns the item directly, not wrapped in a list
- assert table.get("patient.identifiers[*].system") == [["MRN", "SSN"], "MRN"]
-
- # Test getting entire array
- identifiers = table.get("patient.identifiers")
- assert len(identifiers) == 2
- assert len(identifiers[0]) == 2 # First patient has 2 identifiers
- assert len(identifiers[1]) == 1 # Second patient has 1 identifier
-
- # Test with missing array paths
- assert table.get("patient.addresses[0].city") == [None, None]
- assert table.get("patient.addresses[0].city", default="Unknown") == [
- "Unknown",
- "Unknown",
- ]
-
-
-def test_get_method_dollar_syntax():
- """Test Table.get method with $-prefixed paths for specific row access."""
- table = Table(
- [
- {"name": "John", "age": 30, "city": "NYC"},
- {"name": "Jane", "age": 25, "city": "LA"},
- {"name": "Bob", "age": 35}, # Note: no city
- ]
- )
-
- # Test basic $-prefixed access
- assert table.get("$0.name") == "John"
- assert table.get("$1.age") == 25
- assert table.get("$2.name") == "Bob"
-
- # Test missing fields with $-prefix
- assert table.get("$2.city") is None
- assert table.get("$2.city", default="Unknown") == "Unknown"
-
- # Test non-existent row keys
- assert table.get("$99.name") is None
- assert table.get("$99.name", default="N/A") == "N/A"
-
- # Test getting entire row with just $key
- row0 = table.get("$0")
- assert row0 == {"name": "John", "age": 30, "city": "NYC"}
-
- # Test with custom keys
- table.append({"name": "Alice", "age": 28}, custom_key="alice")
- assert table.get("$alice.name") == "Alice"
- assert table.get("$alice.age") == 28
-
- # Test nested paths with $-prefix
- table2 = Table(
- [
- {"patient": {"id": "123", "name": "John"}},
- {"patient": {"id": "456", "name": "Jane"}},
- ]
- )
- assert table2.get("$0.patient.id") == "123"
- assert table2.get("$1.patient.name") == "Jane"
-
- # Compare with non-$ behavior (returns list)
- assert table.get("name") == ["John", "Jane", "Bob", "Alice"]
- assert table2.get("patient.id") == ["123", "456"]
-
-
-def test_get_method_edge_cases():
- """Test Table.get method edge cases."""
- # Test with empty table
- empty_table = Table()
- assert empty_table.get("name") == []
- assert empty_table.get("name", default="N/A") == []
-
- # Test with heterogeneous data types
- table = Table(
- [
- {"value": "string"},
- {"value": 123},
- {"value": True},
- {"value": None},
- {"value": [1, 2, 3]},
- {"value": {"nested": "object"}},
- ]
- )
-
- values = table.get("value")
- assert values == ["string", 123, True, None, [1, 2, 3], {"nested": "object"}]
-
- # Test deep nesting with mixed types
- table2 = Table(
- [
- {"data": {"level1": {"level2": {"level3": "deep"}}}},
- {"data": {"level1": "shallow"}}, # Not nested as deep
- {"data": None}, # Null data
- {}, # Missing data entirely
- ]
- )
-
- assert table2.get("data.level1.level2.level3") == ["deep", None, None, None]
- assert table2.get("data.level1") == [
- {"level2": {"level3": "deep"}},
- "shallow",
- None,
- None,
- ]
-
-
-def test_init_with_dict():
- """Test initialization with dict instead of list."""
- rows = {"user1": {"name": "John", "age": 30}, "user2": {"name": "Jane", "age": 25}}
-
- table = Table(rows)
-
- assert len(table) == 2
- assert "$user1" in table
- assert "$user2" in table
- assert table["$user1.name"] == "John"
- assert table["$user2.name"] == "Jane"
-
-
-def test_empty_table():
- """Test empty table initialization."""
- table = Table()
-
- assert len(table) == 0
- assert table.columns == set()
- assert table.to_list() == []
- assert table.to_dict() == {}
-
-
-# DSL Tests (TDD - these will fail until DSL is implemented)
-
-
-def test_select_dsl_basic():
- """Test basic select DSL functionality."""
- table = Table(
- [
- {"name": "John", "age": 30, "city": "NYC"},
- {"name": "Jane", "age": 25, "city": "LA"},
- ]
- )
-
- # Test specific column selection
- result = table.select("name, age")
- assert len(result) == 2
- assert result.get("name") == ["John", "Jane"]
- assert result.get("age") == [30, 25]
- assert "city" not in result.columns
-
- # Test wildcard selection
- result = table.select("*")
- assert len(result) == 2
- assert result.columns == {"name", "age", "city"}
-
-
-def test_select_dsl_with_renaming():
- """Test select DSL with column renaming."""
- table = Table(
- [
- {"patient": {"id": "123", "name": "John"}},
- {"patient": {"id": "456", "name": "Jane"}},
- ]
- )
-
- # Test column renaming
- result = table.select("patient.id -> patient_id, patient.name -> patient_name")
- assert len(result) == 2
- assert result.get("patient_id") == ["123", "456"]
- assert result.get("patient_name") == ["John", "Jane"]
- assert result.columns == {"patient_id", "patient_name"}
-
-
-def test_filter_dsl_basic():
- """Test basic filter DSL functionality."""
- table = Table(
- [
- {"name": "John", "age": 30},
- {"name": "Jane", "age": 25},
- {"name": "Bob", "age": 35},
- ]
- )
-
- # Test numeric comparison
- result = table.filter("age > 26")
- assert len(result) == 2
- assert result.get("name") == ["John", "Bob"]
-
- # Test string equality
- result = table.filter("name = 'John'")
- assert len(result) == 1
- assert result.get("name") == ["John"]
-
-
-def test_filter_dsl_complex():
- """Test complex filter DSL functionality."""
- table = Table(
- [
- {"name": "John", "age": 30, "status": "active"},
- {"name": "Jane", "age": 25, "status": "inactive"},
- {"name": "Bob", "age": 35, "status": "active"},
- ]
- )
-
- # Test AND operator
- result = table.filter("status = 'active' AND age >= 30")
- assert len(result) == 2
- assert result.get("name") == ["John", "Bob"]
-
- # Test OR operator
- result = table.filter("age > 25 OR name = 'Jane'")
- assert len(result) == 3 # All rows match
-
-
-def test_filter_dsl_nested_paths():
- """Test filter DSL with nested paths."""
- table = Table(
- [
- {"patient": {"name": "John", "addresses": [{"city": "NYC"}]}},
- {"patient": {"name": "Jane", "addresses": [{"city": "LA"}]}},
- ]
- )
-
- # Test nested path with array index
- result = table.filter("patient.addresses[0].city = 'NYC'")
- assert len(result) == 1
- assert result.get("patient.name") == ["John"]
-
- # Test CONTAINS with wildcard - note: this returns list from wildcard
- table2 = Table(
- [
- {"name": "John", "cities": ["NYC", "Boston"]},
- {"name": "Jane", "cities": ["LA", "SF"]},
- ]
- )
- result = table2.filter("cities CONTAINS 'NYC'")
- assert len(result) == 1
- assert result.get("name") == ["John"]
-
-
-# Integration tests showing expected DSL behavior (will pass once implemented)
-
-
-def test_full_workflow_with_dsl():
- """Test complete workflow combining functional and DSL APIs."""
- table = Table(
- [
- {"name": "John", "age": 30, "city": "NYC", "department": "Engineering"},
- {"name": "Jane", "age": 25, "city": "LA", "department": "Marketing"},
- {"name": "Bob", "age": 35, "city": "NYC", "department": "Engineering"},
- {"name": "Alice", "age": 28, "city": "Chicago", "department": "Sales"},
- ]
- )
-
- # This workflow combines DSL and functional APIs:
- # 1. Filter for NYC employees over 25
- # 2. Select specific columns with renaming
- # 3. Add computed field
- # 4. Get unique departments
-
- # Step 1: DSL filter
- nyc_employees = table.filter("city = 'NYC' AND age > 25")
- assert len(nyc_employees) == 2
-
- # Step 2: DSL select
- selected = nyc_employees.select("name -> employee_name, department, age")
- assert len(selected) == 2
- assert selected.columns == {"employee_name", "department", "age"}
- assert selected.get("employee_name") == ["John", "Bob"]
-
- # Step 3: Functional map
- enhanced = selected.map(
- lambda row: {**row, "seniority": "Senior" if row["age"] > 30 else "Junior"}
- )
- assert len(enhanced) == 2
-
- # Step 4: Functional unique
- departments = enhanced.unique("department")
- assert departments == ["Engineering"] # Both NYC employees are in Engineering
diff --git a/tests/test_table_display.py b/tests/test_table_display.py
deleted file mode 100644
index 22c61e9..0000000
--- a/tests/test_table_display.py
+++ /dev/null
@@ -1,153 +0,0 @@
-"""Tests for Table display methods."""
-
-from chidian.table import Table
-
-
-def test_repr():
- """Test __repr__ method."""
- # Empty table
- t = Table()
- assert repr(t) == ""
-
- # Single row, single column
- t = Table([{"name": "John"}])
- assert repr(t) == ""
-
- # Multiple rows and columns
- t = Table([{"name": "John", "age": 30}, {"name": "Jane", "age": 25}])
- assert repr(t) == ""
-
- # Sparse table (different columns per row)
- t = Table(
- [
- {"name": "John", "age": 30},
- {"name": "Jane", "city": "NYC"},
- {"email": "bob@example.com"},
- ]
- )
- assert repr(t) == ""
-
-
-def test_str_basic():
- """Test __str__ method with basic data."""
- t = Table([{"name": "John", "age": 30}, {"name": "Jane", "age": 25}])
-
- result = str(t)
- assert "$key" in result
- assert "name" in result
- assert "age" in result
- assert "John" in result
- assert "Jane" in result
- assert "30" in result
- assert "25" in result
- assert "$0" in result
- assert "$1" in result
-
-
-def test_str_empty():
- """Test __str__ with empty table."""
- t = Table()
- assert str(t) == ""
-
-
-def test_str_truncation():
- """Test that __str__ shows only first 5 rows by default."""
- rows = [{"id": i, "value": f"item{i}"} for i in range(10)]
- t = Table(rows)
-
- result = str(t)
- # Should show first 5 rows
- assert "$0" in result
- assert "$4" in result
- # Should not show row 5 and beyond in the data
- assert "$5" not in result
- # Should show indicator for more rows
- assert "5 more rows" in result
-
-
-def test_show_method():
- """Test show() method with various parameters."""
- rows = [
- {"id": i, "description": f"A very long description for item {i}" * 3}
- for i in range(10)
- ]
- t = Table(rows)
-
- # Show only 2 rows
- result = t.show(n=2)
- assert "$0" in result
- assert "$1" in result
- assert "$2" not in result
- assert "8 more rows" in result
-
- # Show all rows
- result = t.show(n=20)
- assert "$0" in result
- assert "$9" in result
- assert "more row" not in result
-
- # Test truncation
- result = t.show(n=1, truncate=20)
- assert "..." in result # Description should be truncated
-
-
-def test_show_with_none_values():
- """Test display of None values."""
- t = Table(
- [
- {"name": "John", "age": 30},
- {"name": "Jane", "age": None},
- {"name": None, "age": 25},
- ]
- )
-
- result = str(t)
- assert "None" in result
-
-
-def test_show_with_nested_data():
- """Test display of nested structures."""
- t = Table(
- [
- {"name": "John", "data": {"age": 30, "city": "NYC"}},
- {"name": "Jane", "data": [1, 2, 3]},
- ]
- )
-
- result = str(t)
- # Should show JSON representation
- assert '{"age":30' in result or '"city"' in result
- assert "[1,2,3]" in result
-
-
-def test_show_with_custom_keys():
- """Test display with custom row keys."""
- t = Table()
- t.append({"name": "Alice"}, custom_key="alice")
- t.append({"name": "Bob"}, custom_key="bob")
-
- result = str(t)
- assert "$alice" in result
- assert "$bob" in result
- assert "Alice" in result
- assert "Bob" in result
-
-
-def test_show_column_width():
- """Test that columns are properly aligned."""
- t = Table(
- [
- {"short": "a", "medium": "hello", "long": "this is a longer value"},
- {"short": "bb", "medium": "world", "long": "another long value"},
- ]
- )
-
- result = str(t)
- lines = result.split("\n")
-
- # Check that separator line exists
- assert any("-+-" in line for line in lines)
-
- # Check that all data lines have consistent structure
- data_lines = [line for line in lines if "|" in line]
- assert len(data_lines) >= 3 # Header + 2 data rows
diff --git a/tests/test_table_flatten.py b/tests/test_table_flatten.py
deleted file mode 100644
index 82c6a66..0000000
--- a/tests/test_table_flatten.py
+++ /dev/null
@@ -1,438 +0,0 @@
-"""Tests for Table flattening functionality."""
-
-from chidian.table import Table
-
-
-def test_basic_flattening():
- """Test basic dict/list flattening functionality."""
- # Test the canonical example: {'a': 1, 'b': [2], 'c': {'d': 3}}
- t = Table([{"a": 1, "b": [2], "c": {"d": 3}}])
- flattened = t.flatten()
-
- # Should have flattened structure
- assert len(flattened) == 1
- row = flattened.to_list()[0]
-
- expected_keys = {"a", "b[0]", "c.d"}
- assert set(row.keys()) == expected_keys
- assert row["a"] == 1
- assert row["b[0]"] == 2
- assert row["c.d"] == 3
-
-
-def test_columns_flattened():
- """Test columns_flattened method for column preview."""
- t = Table([{"a": 1, "b": [1, 2]}, {"a": 2, "b": [3], "c": {"d": 4}}])
-
- # Should get union of all flattened columns
- columns = t.columns_flattened()
- expected = {"a", "b[0]", "b[1]", "c.d"}
- assert columns == expected
-
- # Test sampling
- columns_sample = t.columns_flattened(sample_rows=1)
- expected_sample = {"a", "b[0]", "b[1]"} # Only first row
- assert columns_sample == expected_sample
-
-
-def test_ragged_arrays():
- """Test arrays of different lengths across rows."""
- t = Table([{"items": [1, 2, 3]}, {"items": [4]}, {"items": [5, 6]}])
-
- flattened = t.flatten()
- columns = flattened.columns
-
- # Should have columns for all indices that appear
- expected_columns = {"items[0]", "items[1]", "items[2]"}
- assert columns == expected_columns
-
- rows = flattened.to_list()
- # First row has all values
- assert rows[0]["items[0]"] == 1
- assert rows[0]["items[1]"] == 2
- assert rows[0]["items[2]"] == 3
-
- # Second row only has first value, others should be missing
- assert rows[1]["items[0]"] == 4
- assert "items[1]" not in rows[1]
- assert "items[2]" not in rows[1]
-
- # Third row has first two values
- assert rows[2]["items[0]"] == 5
- assert rows[2]["items[1]"] == 6
- assert "items[2]" not in rows[2]
-
-
-def test_mixed_types():
- """Test same key as different types across rows."""
- t = Table(
- [
- {"data": {"nested": "value1"}},
- {"data": "simple_string"},
- {"data": {"nested": "value2", "other": 42}},
- ]
- )
-
- flattened = t.flatten()
- columns = flattened.columns
-
- # Should include both direct 'data' and nested 'data.nested', 'data.other'
- expected_columns = {"data", "data.nested", "data.other"}
- assert columns == expected_columns
-
- rows = flattened.to_list()
-
- # First row: only nested structure
- assert "data" not in rows[0] # No direct 'data' value
- assert rows[0]["data.nested"] == "value1"
- assert "data.other" not in rows[0]
-
- # Second row: only direct value
- assert rows[1]["data"] == "simple_string"
- assert "data.nested" not in rows[1]
- assert "data.other" not in rows[1]
-
- # Third row: nested structure with multiple keys
- assert "data" not in rows[2]
- assert rows[2]["data.nested"] == "value2"
- assert rows[2]["data.other"] == 42
-
-
-def test_special_keys():
- """Test keys with special characters that need bracket notation."""
- t = Table(
- [
- {
- "normal_key": 1,
- "key.with.dots": 2,
- "key with spaces": 3,
- "key[with]brackets": 4,
- 'key"with"quotes': 5,
- "nested": {"normal": "a", "special.key": "b"},
- }
- ]
- )
-
- flattened = t.flatten()
- columns = flattened.columns
-
- # Check that special keys are properly encoded
- assert "normal_key" in columns
- assert '["key.with.dots"]' in columns
- assert '["key with spaces"]' in columns
- assert '["key[with]brackets"]' in columns
- assert '["key\\"with\\"quotes"]' in columns
- assert "nested.normal" in columns
- assert 'nested.["special.key"]' in columns
-
- row = flattened.to_list()[0]
- assert row["normal_key"] == 1
- assert row['["key.with.dots"]'] == 2
- assert row['["key with spaces"]'] == 3
- assert row['["key[with]brackets"]'] == 4
- assert row['["key\\"with\\"quotes"]'] == 5
- assert row["nested.normal"] == "a"
- assert row['nested.["special.key"]'] == "b"
-
-
-def test_max_depth_limit():
- """Test depth limiting functionality."""
- deep_data = {"level1": {"level2": {"level3": {"level4": "deep_value"}}}}
-
- t = Table([deep_data])
-
- # No depth limit - should fully flatten
- unlimited = t.flatten()
- assert "level1.level2.level3.level4" in unlimited.columns
- assert unlimited.to_list()[0]["level1.level2.level3.level4"] == "deep_value"
-
- # Depth limit of 2 - should stop at level3 (depth 0,1,2)
- limited = t.flatten(max_depth=2)
- columns = limited.columns
- assert "level1.level2.level3" in columns
- assert "level1.level2.level3.level4" not in columns
-
- # The limited value should contain the remaining nested structure
- row = limited.to_list()[0]
- remaining = row["level1.level2.level3"]
- assert remaining == {"level4": "deep_value"}
-
-
-def test_array_index_limit():
- """Test array index limiting functionality."""
- t = Table([{"items": list(range(10))}]) # 0 through 9
-
- # No limit - should include all indices
- unlimited = t.flatten()
- for i in range(10):
- assert f"items[{i}]" in unlimited.columns
-
- # Limit to 3 - should only include 0, 1, 2
- limited = t.flatten(array_index_limit=3)
- columns = limited.columns
- expected_columns = {"items[0]", "items[1]", "items[2]"}
- assert columns == expected_columns
-
- row = limited.to_list()[0]
- assert row["items[0]"] == 0
- assert row["items[1]"] == 1
- assert row["items[2]"] == 2
- # Should not have items[3] and beyond
-
-
-def test_get_path_value_compatibility():
- """Test that _get_path_value works with flattened tables."""
- original = Table([{"user": {"name": "John", "prefs": ["email", "sms"]}, "id": 123}])
-
- flattened = original.flatten()
-
- # Test that path-based access works on flattened table
- # Should be able to access flattened keys directly
- row = flattened.to_list()[0]
- table_instance = flattened
-
- # Direct key access should work
- assert table_instance._get_path_value(row, "user.name") == "John"
- assert table_instance._get_path_value(row, "user.prefs[0]") == "email"
- assert table_instance._get_path_value(row, "user.prefs[1]") == "sms"
- assert table_instance._get_path_value(row, "id") == 123
-
- # Test that regular table operations work
- assert flattened.get("user.name") == ["John"]
- assert flattened.get("user.prefs[0]") == ["email"]
-
-
-def test_join_on_flattened_path():
- """Test that joins work with flattened path columns."""
- table1 = Table(
- [{"user": {"id": 1}, "name": "John"}, {"user": {"id": 2}, "name": "Jane"}]
- )
-
- table2 = Table([{"user": {"id": 1}, "score": 85}, {"user": {"id": 2}, "score": 92}])
-
- # Flatten both tables
- flat1 = table1.flatten()
- flat2 = table2.flatten()
-
- # Should be able to join on the flattened path
- joined = flat1.join(flat2, on="user.id")
-
- assert len(joined) == 2
- rows = joined.to_list()
-
- # Verify join worked correctly
- assert rows[0]["name"] == "John"
- assert rows[0]["score"] == 85
- assert rows[1]["name"] == "Jane"
- assert rows[1]["score"] == 92
-
-
-def test_select_on_flattened():
- """Test select operations on flattened tables."""
- original = Table(
- [
- {
- "user": {"name": "John", "age": 30},
- "meta": {"created": "2023-01-01"},
- "id": 123,
- }
- ]
- )
-
- flattened = original.flatten()
-
- # Select specific flattened columns (avoiding bracket notation which select parser doesn't support yet)
- selected = flattened.select("user.name, user.age, meta.created")
-
- assert len(selected) == 1
- row = selected.to_list()[0]
- assert set(row.keys()) == {"name", "age", "created"} # Renamed from paths
- assert row["name"] == "John"
- assert row["age"] == 30
- assert row["created"] == "2023-01-01"
-
-
-def test_group_by_on_flattened():
- """Test group_by operations on flattened tables."""
- original = Table(
- [
- {"user": {"dept": "eng"}, "name": "John"},
- {"user": {"dept": "eng"}, "name": "Jane"},
- {"user": {"dept": "sales"}, "name": "Bob"},
- ]
- )
-
- flattened = original.flatten()
-
- # Group by flattened path
- groups = flattened.group_by("user.dept")
-
- assert len(groups) == 2
- assert "eng" in groups
- assert "sales" in groups
-
- eng_group = groups["eng"]
- assert len(eng_group) == 2
-
- sales_group = groups["sales"]
- assert len(sales_group) == 1
-
-
-def test_display_flattened():
- """Test show method with flatten=True."""
- nested = Table(
- [
- {
- "user": {"name": "John", "prefs": ["email", "sms"]},
- "meta": {"created": "2023-01-01"},
- }
- ]
- )
-
- # Regular display
- regular_display = nested.show()
- assert "user" in regular_display
- assert "user.name" not in regular_display
-
- # Flattened display
- flat_display = nested.show(flatten=True)
- assert "user.name" in flat_display
- assert "user.prefs[0]" in flat_display
- assert "user.prefs[1]" in flat_display
- assert "meta.created" in flat_display
- assert "John" in flat_display
- assert "email" in flat_display
-
-
-def test_to_pandas_flatten():
- """Test pandas export with flattening."""
- try:
- import pandas as pd # noqa: F401
- except ImportError:
- # Skip if pandas not available
- return
-
- nested = Table([{"user": {"name": "John", "age": 30}, "tags": ["python", "data"]}])
-
- # Export with flattening
- df = nested.to_pandas(flatten=True)
-
- expected_columns = {"user.name", "user.age", "tags[0]", "tags[1]"}
- assert set(df.columns) == expected_columns
-
- assert df.iloc[0]["user.name"] == "John"
- assert df.iloc[0]["user.age"] == 30
- assert df.iloc[0]["tags[0]"] == "python"
- assert df.iloc[0]["tags[1]"] == "data"
-
-
-def test_to_polars_flatten():
- """Test polars export with flattening."""
- try:
- import polars as pl # noqa: F401
- except ImportError:
- # Skip if polars not available
- return
-
- nested = Table([{"user": {"name": "John", "age": 30}, "tags": ["python", "data"]}])
-
- # Export with flattening
- df = nested.to_polars(flatten=True)
-
- expected_columns = {"user.name", "user.age", "tags[0]", "tags[1]"}
- assert set(df.columns) == expected_columns
-
- row_dict = df.to_dicts()[0]
- assert row_dict["user.name"] == "John"
- assert row_dict["user.age"] == 30
- assert row_dict["tags[0]"] == "python"
- assert row_dict["tags[1]"] == "data"
-
-
-def test_to_csv_flatten():
- """Test CSV export with flattening."""
- import csv
- import tempfile
-
- nested = Table([{"user": {"name": "John", "age": 30}, "tags": ["python", "data"]}])
-
- # Export with flattening
- with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f:
- nested.to_csv(f.name, flatten=True)
-
- # Read back and verify
- with open(f.name, "r") as f:
- reader = csv.DictReader(f)
- rows = list(reader)
-
- assert len(rows) == 1
- row = rows[0]
-
- assert row["user.name"] == "John"
- assert row["user.age"] == "30"
- assert row["tags[0]"] == "python"
- assert row["tags[1]"] == "data"
-
-
-def test_empty_table_flatten():
- """Test flattening empty table."""
- empty = Table()
- flattened = empty.flatten()
-
- assert len(flattened) == 0
- assert flattened.columns == set()
- assert flattened.columns_flattened() == set()
-
-
-def test_none_values_flatten():
- """Test flattening with None values."""
- t = Table([{"data": None, "nested": {"value": None}, "array": [None, 1, None]}])
-
- flattened = t.flatten()
- row = flattened.to_list()[0]
-
- assert row["data"] is None
- assert row["nested.value"] is None
- assert row["array[0]"] is None
- assert row["array[1]"] == 1
- assert row["array[2]"] is None
-
-
-def test_complex_nested_structure():
- """Test flattening deeply nested and complex structures."""
- complex_data = {
- "users": [
- {
- "profile": {"name": "John", "settings": {"theme": "dark"}},
- "contacts": [{"type": "email", "value": "john@example.com"}],
- },
- {
- "profile": {"name": "Jane", "settings": {"theme": "light"}},
- "contacts": [
- {"type": "email", "value": "jane@example.com"},
- {"type": "phone", "value": "555-1234"},
- ],
- },
- ],
- "meta": {"version": 1},
- }
-
- t = Table([complex_data])
- flattened = t.flatten()
-
- columns = flattened.columns
-
- # Should have all the deeply nested paths
- assert "users[0].profile.name" in columns
- assert "users[0].profile.settings.theme" in columns
- assert "users[0].contacts[0].type" in columns
- assert "users[0].contacts[0].value" in columns
- assert "users[1].profile.name" in columns
- assert "users[1].contacts[1].type" in columns
- assert "meta.version" in columns
-
- row = flattened.to_list()[0]
- assert row["users[0].profile.name"] == "John"
- assert row["users[0].profile.settings.theme"] == "dark"
- assert row["users[1].contacts[1].value"] == "555-1234"
- assert row["meta.version"] == 1
diff --git a/tests/test_table_io.py b/tests/test_table_io.py
deleted file mode 100644
index 7f3f99d..0000000
--- a/tests/test_table_io.py
+++ /dev/null
@@ -1,664 +0,0 @@
-"""Tests for Table I/O functionality (CSV and Parquet)."""
-
-import csv
-import tempfile
-from datetime import datetime
-from pathlib import Path
-
-import pytest
-
-from chidian import Table
-
-
-class TestTableCSV:
- """Test CSV import/export functionality."""
-
- def test_csv_round_trip_basic(self):
- """Test basic CSV write and read."""
- data = [
- {"name": "Alice", "age": 30, "city": "NYC"},
- {"name": "Bob", "age": 25, "city": "LA"},
- {"name": "Charlie", "age": 35, "city": "Chicago"},
- ]
- table = Table(data)
-
- with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f:
- temp_path = Path(f.name)
-
- try:
- # Write to CSV
- table.to_csv(temp_path)
-
- # Read back
- table2 = Table.from_csv(temp_path)
-
- # Verify data
- assert len(table2) == len(table)
- assert table2.columns == table.columns
-
- # Check values
- for i, (row1, row2) in enumerate(zip(table, table2)):
- assert row1 == row2
- finally:
- temp_path.unlink()
-
- def test_csv_with_custom_delimiter(self):
- """Test CSV with tab delimiter."""
- data = [{"col1": "value1", "col2": "value2"}]
- table = Table(data)
-
- with tempfile.NamedTemporaryFile(mode="w", suffix=".tsv", delete=False) as f:
- temp_path = Path(f.name)
-
- try:
- # Write with tab delimiter
- table.to_csv(temp_path, delimiter="\t")
-
- # Read back with tab delimiter
- table2 = Table.from_csv(temp_path, delimiter="\t")
-
- assert len(table2) == 1
- assert table2.get("$0") == data[0]
- finally:
- temp_path.unlink()
-
- def test_csv_with_null_values(self):
- """Test handling of null values in CSV."""
- data = [
- {"name": "Alice", "age": 30, "score": None},
- {"name": "Bob", "age": None, "score": 95.5},
- {"name": None, "age": 25, "score": 88.0},
- ]
- table = Table(data)
-
- with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f:
- temp_path = Path(f.name)
-
- try:
- # Write with custom null value
- table.to_csv(temp_path, null_value="NULL")
-
- # Read back with null value handling
- table2 = Table.from_csv(temp_path, null_values=["NULL"])
-
- # Verify nulls are preserved
- assert table2.get("$0.score") is None
- assert table2.get("$1.age") is None
- assert table2.get("$2.name") is None
- finally:
- temp_path.unlink()
-
- def test_csv_with_type_specification(self):
- """Test CSV reading with explicit type specifications."""
- with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f:
- temp_path = Path(f.name)
- f.write("id,value,active\n")
- f.write("1,123.45,true\n")
- f.write("2,67.89,false\n")
-
- try:
- # Read with explicit types
- table = Table.from_csv(
- temp_path, dtypes={"id": int, "value": float, "active": bool}
- )
-
- # Verify types
- row0 = table.get("$0")
- assert isinstance(row0["id"], int)
- assert row0["id"] == 1
- assert isinstance(row0["value"], float)
- assert row0["value"] == 123.45
- assert isinstance(row0["active"], bool)
- assert row0["active"] is True
- finally:
- temp_path.unlink()
-
- def test_csv_with_date_parsing(self):
- """Test CSV date parsing."""
- with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f:
- temp_path = Path(f.name)
- f.write("order_id,order_date,ship_date\n")
- f.write("1,2024-01-15,2024-01-17\n")
- f.write("2,2024-02-20,2024-02-22\n")
-
- try:
- # Read with date parsing
- table = Table.from_csv(temp_path, parse_dates=["order_date", "ship_date"])
-
- # Verify dates are parsed
- row0 = table.get("$0")
- assert isinstance(row0["order_date"], datetime)
- assert row0["order_date"].year == 2024
- assert row0["order_date"].month == 1
- assert row0["order_date"].day == 15
- finally:
- temp_path.unlink()
-
- def test_csv_skip_rows_and_max_rows(self):
- """Test skipping rows and limiting rows read."""
- with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f:
- temp_path = Path(f.name)
- f.write("# Comment line\n")
- f.write("# Another comment\n")
- f.write("id,name\n")
- for i in range(10):
- f.write(f"{i},Name{i}\n")
-
- try:
- # Skip first 2 lines and read max 5 rows
- table = Table.from_csv(temp_path, skip_rows=2, max_rows=5)
-
- assert len(table) == 5
- assert table.get("$0.id") == 0
- assert table.get("$4.id") == 4
- finally:
- temp_path.unlink()
-
- def test_csv_with_index(self):
- """Test writing and reading CSV with row index."""
- # Create table with custom row keys
- table = Table()
- table.append({"name": "Alice", "age": 30}, custom_key="alice")
- table.append({"name": "Bob", "age": 25}, custom_key="bob")
-
- with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f:
- temp_path = Path(f.name)
-
- try:
- # Write with index
- table.to_csv(temp_path, index=True)
-
- # Read back
- table2 = Table.from_csv(temp_path)
-
- # Verify index was written as column
- assert "_index" in table2.columns
- assert table2.get("$0._index") == "$alice"
- assert table2.get("$1._index") == "$bob"
- finally:
- temp_path.unlink()
-
- def test_csv_append_mode(self):
- """Test appending to existing CSV file."""
- data1 = [{"id": 1, "name": "Alice"}]
- data2 = [{"id": 2, "name": "Bob"}]
-
- table1 = Table(data1)
- table2 = Table(data2)
-
- with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f:
- temp_path = Path(f.name)
-
- try:
- # Write first table
- table1.to_csv(temp_path)
-
- # Append second table
- table2.to_csv(temp_path, mode="a", header=False)
-
- # Read combined file
- combined = Table.from_csv(temp_path)
-
- assert len(combined) == 2
- assert combined.get("$0.name") == "Alice"
- assert combined.get("$1.name") == "Bob"
- finally:
- temp_path.unlink()
-
- def test_csv_with_nested_data(self):
- """Test CSV with nested dict/list data (JSON serialization)."""
- data = [
- {
- "id": 1,
- "metadata": {"type": "A", "count": 10},
- "tags": ["red", "blue"],
- },
- {
- "id": 2,
- "metadata": {"type": "B", "count": 20},
- "tags": ["green"],
- },
- ]
- table = Table(data)
-
- with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f:
- temp_path = Path(f.name)
-
- try:
- # Write with nested data
- table.to_csv(temp_path)
-
- # Read back
- table2 = Table.from_csv(temp_path)
-
- # Nested data should be preserved as JSON
- row0 = table2.get("$0")
- assert isinstance(row0["metadata"], dict)
- assert row0["metadata"]["type"] == "A"
- assert isinstance(row0["tags"], list)
- assert "red" in row0["tags"]
- finally:
- temp_path.unlink()
-
- def test_csv_column_selection(self):
- """Test writing specific columns to CSV."""
- data = [
- {"id": 1, "name": "Alice", "age": 30, "city": "NYC"},
- {"id": 2, "name": "Bob", "age": 25, "city": "LA"},
- ]
- table = Table(data)
-
- with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f:
- temp_path = Path(f.name)
-
- try:
- # Write only specific columns
- table.to_csv(temp_path, columns=["id", "name"])
-
- # Read back
- table2 = Table.from_csv(temp_path)
-
- # Only selected columns should be present
- assert table2.columns == {"id", "name"}
- assert "age" not in table2.columns
- assert "city" not in table2.columns
- finally:
- temp_path.unlink()
-
- def test_csv_float_formatting(self):
- """Test float formatting in CSV output."""
- data = [
- {"id": 1, "value": 123.456789},
- {"id": 2, "value": 987.654321},
- ]
- table = Table(data)
-
- with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f:
- temp_path = Path(f.name)
-
- try:
- # Write with float formatting
- table.to_csv(temp_path, float_format="%.2f")
-
- # Read raw file to verify formatting
- with open(temp_path, "r") as f:
- lines = f.readlines()
- # Check that floats are formatted
- assert "123.46" in lines[1] # Rounded
- assert "987.65" in lines[2] # Rounded
- finally:
- temp_path.unlink()
-
- def test_csv_no_header(self):
- """Test CSV without header row."""
- with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f:
- temp_path = Path(f.name)
- f.write("1,Alice,30\n")
- f.write("2,Bob,25\n")
-
- try:
- # Read without header
- table = Table.from_csv(
- temp_path, header=False, columns=["id", "name", "age"]
- )
-
- assert len(table) == 2
- assert table.get("$0.name") == "Alice"
- assert table.get("$1.age") == 25
- finally:
- temp_path.unlink()
-
- def test_csv_error_handling(self):
- """Test error handling for CSV operations."""
- table = Table([{"id": 1}])
-
- # Test file not found
- with pytest.raises(FileNotFoundError):
- Table.from_csv("/nonexistent/file.csv")
-
- # Test permission error simulation (we can't easily simulate this)
- # but the code handles it
-
- # Test malformed CSV handling
- with tempfile.NamedTemporaryFile(mode="w", suffix=".csv", delete=False) as f:
- temp_path = Path(f.name)
- f.write("id,name\n")
- f.write("1,Alice\n")
- f.write("2\n") # Missing column
-
- try:
- # Should handle gracefully by padding with None
- table = Table.from_csv(temp_path)
- assert len(table) == 2
- assert table.get("$1.name") is None
- finally:
- temp_path.unlink()
-
-
-class TestTableParquet:
- """Test Parquet import/export functionality."""
-
- @pytest.fixture(autouse=True)
- def check_pyarrow(self):
- """Skip tests if pyarrow is not installed."""
- pytest.importorskip("pyarrow")
-
- def test_parquet_round_trip_basic(self):
- """Test basic Parquet write and read."""
- data = [
- {"id": 1, "name": "Alice", "age": 30, "score": 95.5},
- {"id": 2, "name": "Bob", "age": 25, "score": 88.0},
- {"id": 3, "name": "Charlie", "age": 35, "score": 92.3},
- ]
- table = Table(data)
-
- with tempfile.NamedTemporaryFile(suffix=".parquet", delete=False) as f:
- temp_path = Path(f.name)
-
- try:
- # Write to Parquet
- table.to_parquet(temp_path)
-
- # Read back
- table2 = Table.from_parquet(temp_path)
-
- # Verify data and types are preserved
- assert len(table2) == len(table)
- assert table2.columns == table.columns
-
- for i, (row1, row2) in enumerate(zip(table, table2)):
- assert row1["id"] == row2["id"]
- assert row1["name"] == row2["name"]
- assert row1["age"] == row2["age"]
- assert abs(row1["score"] - row2["score"]) < 0.001
- finally:
- temp_path.unlink()
-
- def test_parquet_with_nested_data(self):
- """Test Parquet with nested structures."""
- data = [
- {
- "id": 1,
- "user": {"name": "Alice", "email": "alice@example.com"},
- "tags": ["python", "data"],
- "scores": [95, 88, 92],
- },
- {
- "id": 2,
- "user": {"name": "Bob", "email": "bob@example.com"},
- "tags": ["java", "web"],
- "scores": [88, 90, 85],
- },
- ]
- table = Table(data)
-
- with tempfile.NamedTemporaryFile(suffix=".parquet", delete=False) as f:
- temp_path = Path(f.name)
-
- try:
- # Write nested data
- table.to_parquet(temp_path)
-
- # Read back
- table2 = Table.from_parquet(temp_path)
-
- # Verify nested structures are preserved
- row0 = table2.get("$0")
- assert isinstance(row0["user"], dict)
- assert row0["user"]["name"] == "Alice"
- assert isinstance(row0["tags"], list)
- assert "python" in row0["tags"]
- assert isinstance(row0["scores"], list)
- assert row0["scores"][0] == 95
- finally:
- temp_path.unlink()
-
- def test_parquet_column_selection(self):
- """Test reading specific columns from Parquet."""
- data = [
- {"id": 1, "name": "Alice", "age": 30, "city": "NYC", "country": "USA"},
- {"id": 2, "name": "Bob", "age": 25, "city": "LA", "country": "USA"},
- ]
- table = Table(data)
-
- with tempfile.NamedTemporaryFile(suffix=".parquet", delete=False) as f:
- temp_path = Path(f.name)
-
- try:
- # Write all data
- table.to_parquet(temp_path)
-
- # Read only specific columns
- table2 = Table.from_parquet(temp_path, columns=["id", "name", "age"])
-
- # Verify only selected columns are present
- assert table2.columns == {"id", "name", "age"}
- assert "city" not in table2.columns
- assert "country" not in table2.columns
- finally:
- temp_path.unlink()
-
- def test_parquet_compression_options(self):
- """Test different compression options for Parquet."""
- data = [{"id": i, "value": i * 1.5} for i in range(100)]
- table = Table(data)
-
- compressions = ["snappy", "gzip", None]
-
- for compression in compressions:
- with tempfile.NamedTemporaryFile(suffix=".parquet", delete=False) as f:
- temp_path = Path(f.name)
-
- try:
- # Write with specific compression
- table.to_parquet(temp_path, compression=compression)
-
- # Read back
- table2 = Table.from_parquet(temp_path)
-
- # Verify data is preserved
- assert len(table2) == 100
- assert table2.get("$0.id") == 0
- assert table2.get("$99.id") == 99
- finally:
- temp_path.unlink()
-
- def test_parquet_with_null_values(self):
- """Test Parquet handling of null values."""
- data = [
- {"id": 1, "name": "Alice", "score": None},
- {"id": 2, "name": None, "score": 88.5},
- {"id": None, "name": "Charlie", "score": 92.0},
- ]
- table = Table(data)
-
- with tempfile.NamedTemporaryFile(suffix=".parquet", delete=False) as f:
- temp_path = Path(f.name)
-
- try:
- # Write with nulls
- table.to_parquet(temp_path)
-
- # Read back
- table2 = Table.from_parquet(temp_path)
-
- # Verify nulls are preserved
- assert table2.get("$0.score") is None
- assert table2.get("$1.name") is None
- assert table2.get("$2.id") is None
- finally:
- temp_path.unlink()
-
- def test_parquet_with_index(self):
- """Test Parquet with row index preservation."""
- # Create table with custom row keys
- table = Table()
- table.append({"name": "Alice", "age": 30}, custom_key="alice")
- table.append({"name": "Bob", "age": 25}, custom_key="bob")
- table.append({"name": "Charlie", "age": 35}, custom_key="charlie")
-
- with tempfile.NamedTemporaryFile(suffix=".parquet", delete=False) as f:
- temp_path = Path(f.name)
-
- try:
- # Write with index
- table.to_parquet(temp_path, index=True)
-
- # Read back
- table2 = Table.from_parquet(temp_path)
-
- # Verify index was written
- assert "_index" in table2.columns
- assert table2.get("$0._index") == "$alice"
- assert table2.get("$1._index") == "$bob"
- assert table2.get("$2._index") == "$charlie"
- finally:
- temp_path.unlink()
-
- def test_parquet_error_handling(self):
- """Test error handling for Parquet operations."""
- # Test file not found
- with pytest.raises(FileNotFoundError):
- Table.from_parquet("/nonexistent/file.parquet")
-
- def test_parquet_filters(self):
- """Test row filtering when reading Parquet."""
- data = [
- {"year": 2020, "month": 1, "sales": 100},
- {"year": 2020, "month": 2, "sales": 150},
- {"year": 2021, "month": 1, "sales": 200},
- {"year": 2021, "month": 2, "sales": 250},
- {"year": 2022, "month": 1, "sales": 300},
- ]
- table = Table(data)
-
- with tempfile.NamedTemporaryFile(suffix=".parquet", delete=False) as f:
- temp_path = Path(f.name)
-
- try:
- # Write all data
- table.to_parquet(temp_path)
-
- # Read with filters
- table2 = Table.from_parquet(
- temp_path, filters=[("year", ">=", 2021), ("month", "==", 1)]
- )
-
- # Should only get 2021 and 2022 January data
- assert len(table2) == 2
- rows = list(table2)
- assert all(row["month"] == 1 for row in rows)
- assert all(row["year"] >= 2021 for row in rows)
- finally:
- temp_path.unlink()
-
- def test_parquet_partitioned_dataset(self):
- """Test writing partitioned Parquet dataset."""
- data = [
- {"year": 2020, "month": 1, "day": 1, "sales": 100},
- {"year": 2020, "month": 1, "day": 2, "sales": 150},
- {"year": 2020, "month": 2, "day": 1, "sales": 200},
- {"year": 2021, "month": 1, "day": 1, "sales": 250},
- {"year": 2021, "month": 2, "day": 1, "sales": 300},
- ]
- table = Table(data)
-
- with tempfile.TemporaryDirectory() as temp_dir:
- temp_path = Path(temp_dir) / "partitioned_data"
-
- # Write partitioned dataset
- table.to_parquet(temp_path, partition_cols=["year", "month"])
-
- # Verify directory structure was created
- assert temp_path.exists()
- year_dirs = list(temp_path.glob("year=*"))
- assert len(year_dirs) == 2 # 2020 and 2021
-
- # Read back the partitioned dataset
- table2 = Table.from_parquet(temp_path)
-
- # Data should be preserved (though order might differ)
- assert len(table2) == len(table)
- all_sales = sorted([row["sales"] for row in table2])
- expected_sales = sorted([row["sales"] for row in table])
- assert all_sales == expected_sales
-
-
-class TestTableIOIntegration:
- """Integration tests for Table I/O functionality."""
-
- def test_csv_to_parquet_conversion(self):
- """Test converting CSV to Parquet format."""
- pytest.importorskip("pyarrow")
-
- # Create CSV file
- with tempfile.NamedTemporaryFile(
- mode="w", suffix=".csv", delete=False
- ) as csv_file:
- csv_path = Path(csv_file.name)
- writer = csv.writer(csv_file)
- writer.writerow(["id", "name", "value"])
- writer.writerow([1, "Alice", 123.45])
- writer.writerow([2, "Bob", 678.90])
-
- with tempfile.NamedTemporaryFile(suffix=".parquet", delete=False) as pq_file:
- parquet_path = Path(pq_file.name)
-
- try:
- # Read CSV and convert to Parquet
- table = Table.from_csv(csv_path, dtypes={"id": int, "value": float})
- table.to_parquet(parquet_path)
-
- # Read Parquet and verify
- table2 = Table.from_parquet(parquet_path)
-
- assert len(table2) == 2
- assert table2.get("$0.id") == 1
- assert table2.get("$0.name") == "Alice"
- assert abs(table2.get("$0.value") - 123.45) < 0.001
- finally:
- csv_path.unlink()
- parquet_path.unlink()
-
- def test_data_pipeline_example(self):
- """Test the data pipeline example from the spec."""
- pytest.importorskip("pyarrow")
-
- # Create sample data
- data = [
- {"id": 1, "name": "Alice", "status": "active", "value": 100},
- {"id": 2, "name": "Bob", "status": "inactive", "value": 200},
- {"id": 3, "name": "Charlie", "status": "active", "value": 300},
- ]
-
- with tempfile.NamedTemporaryFile(
- mode="w", suffix=".csv", delete=False
- ) as csv_file:
- csv_path = Path(csv_file.name)
- writer = csv.DictWriter(
- csv_file, fieldnames=["id", "name", "status", "value"]
- )
- writer.writeheader()
- writer.writerows(data)
-
- with tempfile.NamedTemporaryFile(suffix=".parquet", delete=False) as pq_file:
- parquet_path = Path(pq_file.name)
-
- try:
- # Load, transform, and save
- table = Table.from_csv(csv_path)
- processed = (
- table.filter("status = 'active'")
- .map(lambda row: {**row, "value_doubled": int(row["value"]) * 2})
- .select("id, name, value_doubled")
- )
- processed.to_parquet(parquet_path)
-
- # Verify results
- result = Table.from_parquet(parquet_path)
- assert len(result) == 2 # Only active records
- assert set(result.columns) == {"id", "name", "value_doubled"}
- assert result.get("value_doubled") == [200, 600] # 100*2, 300*2
- finally:
- csv_path.unlink()
- parquet_path.unlink()
diff --git a/tests/test_table_pandas.py b/tests/test_table_pandas.py
deleted file mode 100644
index c925f19..0000000
--- a/tests/test_table_pandas.py
+++ /dev/null
@@ -1,86 +0,0 @@
-"""Tests for Table pandas interoperability."""
-
-import pytest
-
-from chidian.table import Table
-
-pd = pytest.importorskip("pandas")
-
-
-def test_to_pandas_basic():
- """Test basic conversion to pandas DataFrame."""
- t = Table([{"a": 1}, {"b": 2}])
- df = t.to_pandas()
- assert set(df.columns) >= {"a", "b"}
- assert len(df) == 2
-
-
-def test_to_pandas_index():
- """Test conversion with index from row keys."""
- t = Table([{"a": 1}, {"a": 2}])
- df = t.to_pandas(index=True)
- assert df.index.name == "_index"
- assert len(df) == 2
- # Check that index values are correct (stripped of $)
- assert list(df.index) == ["0", "1"]
-
-
-def test_to_pandas_custom_index_name():
- """Test conversion with custom index name."""
- t = Table([{"value": 10}, {"value": 20}])
- df = t.to_pandas(index=True, index_name="row_id")
- assert df.index.name == "row_id"
- assert len(df) == 2
-
-
-def test_to_pandas_with_custom_keys():
- """Test conversion with custom row keys."""
- t = Table()
- t.append({"name": "Alice", "age": 30}, custom_key="alice")
- t.append({"name": "Bob", "age": 25}, custom_key="bob")
-
- df = t.to_pandas(index=True)
- assert df.index.name == "_index"
- assert list(df.index) == ["alice", "bob"]
- assert list(df["name"]) == ["Alice", "Bob"]
- assert list(df["age"]) == [30, 25]
-
-
-def test_to_pandas_sparse_data():
- """Test conversion with sparse/missing data."""
- t = Table(
- [
- {"name": "John", "age": 30},
- {"name": "Jane"}, # Missing age
- {"age": 25}, # Missing name
- ]
- )
-
- df = t.to_pandas()
- assert len(df) == 3
- assert pd.isna(df.iloc[1]["age"])
- assert pd.isna(df.iloc[2]["name"])
-
-
-def test_to_pandas_empty_table():
- """Test conversion of empty table."""
- t = Table()
- df = t.to_pandas()
- assert len(df) == 0
- assert list(df.columns) == []
-
-
-def test_to_pandas_nested_data():
- """Test conversion with nested structures."""
- t = Table(
- [
- {"id": 1, "metadata": {"color": "red", "size": "large"}},
- {"id": 2, "metadata": {"color": "blue", "size": "small"}},
- ]
- )
-
- df = t.to_pandas()
- assert len(df) == 2
- # Nested dicts should be preserved as-is
- assert isinstance(df.iloc[0]["metadata"], dict)
- assert df.iloc[0]["metadata"]["color"] == "red"
diff --git a/tests/test_table_polars.py b/tests/test_table_polars.py
deleted file mode 100644
index a32c58b..0000000
--- a/tests/test_table_polars.py
+++ /dev/null
@@ -1,95 +0,0 @@
-"""Tests for Table polars interoperability."""
-
-import pytest
-
-from chidian.table import Table
-
-pl = pytest.importorskip("polars")
-
-
-def test_to_polars_basic():
- """Test basic conversion to polars DataFrame."""
- t = Table([{"a": 1}, {"b": 2}])
- df = t.to_polars()
- assert set(df.columns) >= {"a", "b"}
- assert df.height == 2
-
-
-def test_to_polars_add_index():
- """Test conversion with index column from row keys."""
- t = Table([{"a": 1}, {"a": 2}])
- df = t.to_polars(add_index=True)
- assert "_index" in df.columns
- assert df.height == 2
- # Check that index values are correct (stripped of $)
- assert df["_index"].to_list() == ["0", "1"]
-
-
-def test_to_polars_custom_index_name():
- """Test conversion with custom index column name."""
- t = Table([{"value": 10}, {"value": 20}])
- df = t.to_polars(add_index=True, index_name="row_id")
- assert "row_id" in df.columns
- assert df.height == 2
-
-
-def test_to_polars_with_custom_keys():
- """Test conversion with custom row keys."""
- t = Table()
- t.append({"name": "Alice", "age": 30}, custom_key="alice")
- t.append({"name": "Bob", "age": 25}, custom_key="bob")
-
- df = t.to_polars(add_index=True)
- assert "_index" in df.columns
- assert df["_index"].to_list() == ["alice", "bob"]
- assert df["name"].to_list() == ["Alice", "Bob"]
- assert df["age"].to_list() == [30, 25]
-
-
-def test_to_polars_sparse_data():
- """Test conversion with sparse/missing data."""
- t = Table(
- [
- {"name": "John", "age": 30},
- {"name": "Jane"}, # Missing age
- {"age": 25}, # Missing name
- ]
- )
-
- df = t.to_polars()
- assert df.height == 3
- assert df["age"][1] is None
- assert df["name"][2] is None
-
-
-def test_to_polars_empty_table():
- """Test conversion of empty table."""
- t = Table()
- df = t.to_polars()
- assert df.height == 0
- assert list(df.columns) == []
-
-
-def test_to_polars_nested_data():
- """Test conversion with nested structures."""
- t = Table(
- [
- {"id": 1, "metadata": {"color": "red", "size": "large"}},
- {"id": 2, "metadata": {"color": "blue", "size": "small"}},
- ]
- )
-
- df = t.to_polars()
- assert df.height == 2
- # Nested dicts should be preserved as struct type
- metadata_values = df["metadata"].to_list()
- assert isinstance(metadata_values[0], dict)
- assert metadata_values[0]["color"] == "red"
-
-
-def test_to_polars_no_index_by_default():
- """Test that index is not added by default."""
- t = Table([{"x": 1}, {"x": 2}, {"x": 3}])
- df = t.to_polars()
- assert "_index" not in df.columns
- assert list(df.columns) == ["x"]
diff --git a/tests/test_types.py b/tests/test_types.py
deleted file mode 100644
index b4ce275..0000000
--- a/tests/test_types.py
+++ /dev/null
@@ -1,177 +0,0 @@
-"""Comprehensive tests for special types (DROP, KEEP) with Mapper."""
-
-from typing import Any
-
-from chidian import DROP, KEEP, Mapper, get
-from tests.structstest import (
- ComplexPersonData,
- FlatPersonData,
- KeepTestTarget,
- SimpleTarget,
- SourceData,
-)
-
-
-class TestSeedProcessing:
- """Test special type value processing independently."""
-
- def test_drop_enum_values(self) -> None:
- """Test DROP enum values and level property."""
- assert DROP.THIS_OBJECT.value == -1
- assert DROP.PARENT.value == -2
- assert DROP.GRANDPARENT.value == -3
- assert DROP.GREATGRANDPARENT.value == -4
-
- assert DROP.THIS_OBJECT.level == -1
- assert DROP.PARENT.level == -2
-
- def test_drop_process_method(self) -> None:
- """Test DROP process method returns itself."""
- result = DROP.THIS_OBJECT.process({"test": "data"})
- # The Python implementation returns the DROP enum itself
- assert result == DROP.THIS_OBJECT
- assert result.level == DROP.THIS_OBJECT.value
-
- def test_keep_value_preservation(self) -> None:
- """Test KEEP preserves values correctly."""
- # Test basic value preservation
- assert KEEP({}).value == {}
- assert KEEP([]).value == []
- assert KEEP("").value == ""
- assert KEEP(None).value is None
-
- # Test process method returns the wrapped value
- keep_obj = KEEP("test_value")
- assert keep_obj.process({"irrelevant": "data"}) == "test_value"
-
- def test_keep_complex_values(self) -> None:
- """Test KEEP with complex data structures."""
- complex_data = {"nested": {"list": [1, 2, 3]}, "simple": "value"}
- keep_obj = KEEP(complex_data)
-
- assert keep_obj.value == complex_data
- assert keep_obj.process({}) == complex_data
-
-
-class TestSeedsWithMapper:
- """Test special type integration with Mapper."""
-
- def test_simple_data_flow_without_special_types(
- self, simple_data: dict[str, Any]
- ) -> None:
- """Test baseline data flow without any special type objects."""
- from chidian.partials import get as p_get
-
- mapping = {
- "patient_id": p_get("data.patient.id"),
- "is_active": p_get("data.patient.active"),
- }
-
- mapper = Mapper(
- transformations=mapping,
- min_input_schemas=[SourceData],
- output_schema=SimpleTarget,
- )
- result = mapper(SourceData.model_validate(simple_data))
-
- assert isinstance(result, SimpleTarget)
- assert result.patient_id == "abc123"
- assert result.is_active is True
-
- def test_keep_in_transformation(self) -> None:
- """Test KEEP objects in data transformations.
-
- Note: This test demonstrates that special type processing is not yet implemented
- in the current Mapper system. KEEP objects need to be processed
- to extract their values before Pydantic validation.
- """
- # For now, manually process KEEP objects since automatic processing isn't implemented
- keep_obj = KEEP("processed_string")
-
- mapping = {
- "processed_value": lambda _data: keep_obj.process({}),
- "regular_value": lambda _data: "regular_string",
- }
-
- mapper = Mapper(
- transformations=mapping,
- min_input_schemas=[SourceData],
- output_schema=KeepTestTarget,
- )
-
- source = SourceData(data={})
- result = mapper(source)
-
- # Manually processed KEEP objects work
- assert isinstance(result, KeepTestTarget)
- assert result.processed_value == "processed_string"
- assert result.regular_value == "regular_string"
-
- def test_complex_transformation_with_a_b_data(self, test_A: dict[str, Any]) -> None:
- """Test complex transformation using A.json data structure."""
-
- def full_name_transform(data: dict) -> str:
- """Build full name from name parts."""
- first_name = get(data, "name.first", default="")
- given_names = get(data, "name.given", default=[])
- suffix = get(data, "name.suffix", default="")
-
- name_parts = [first_name] + given_names
- if suffix:
- name_parts.append(suffix)
- return " ".join(filter(None, name_parts))
-
- def current_address_transform(data: dict) -> str:
- """Format current address."""
- current_addr = get(data, "address.current", default={})
- street_lines = get(current_addr, "street", default=[])
- city = get(current_addr, "city", default="")
- state = get(current_addr, "state", default="")
- postal = get(current_addr, "postal_code", default="")
- country = get(current_addr, "country", default="")
-
- return "\n".join(
- filter(None, street_lines + [city, state, postal, country])
- )
-
- def last_previous_address_transform(data: dict) -> str:
- """Get last previous address."""
- previous_addrs = get(data, "address.previous", default=[])
- if not previous_addrs:
- return ""
-
- last_addr = previous_addrs[-1]
- prev_street = get(last_addr, "street", default=[])
- prev_city = get(last_addr, "city", default="")
- prev_state = get(last_addr, "state", default="")
- prev_postal = get(last_addr, "postal_code", default="")
- prev_country = get(last_addr, "country", default="")
-
- return "\n".join(
- filter(
- None,
- prev_street + [prev_city, prev_state, prev_postal, prev_country],
- )
- )
-
- mapping: dict[str, Any] = {
- "full_name": full_name_transform,
- "current_address": current_address_transform,
- "last_previous_address": last_previous_address_transform,
- }
-
- mapper = Mapper(
- transformations=mapping,
- min_input_schemas=[ComplexPersonData],
- output_schema=FlatPersonData,
- )
-
- source = ComplexPersonData.model_validate(test_A)
- result = mapper(source)
-
- assert isinstance(result, FlatPersonData)
- assert "Bob" in result.full_name
- assert "Figgens" in result.full_name
- assert "Sr." in result.full_name
- assert "123 Privet Drive" in result.current_address
- assert "Surrey" in result.current_address
diff --git a/tests/test_validation.py b/tests/test_validation.py
new file mode 100644
index 0000000..93c7452
--- /dev/null
+++ b/tests/test_validation.py
@@ -0,0 +1,221 @@
+"""
+Tests for chidian.validation module.
+"""
+
+import pytest
+
+from chidian.validation import (
+ Between,
+ DictV,
+ Err,
+ Gt,
+ Gte,
+ InRange,
+ InSet,
+ IsType,
+ ListV,
+ Lt,
+ Lte,
+ Matches,
+ Ok,
+ Optional,
+ Predicate,
+ Required,
+ V,
+ to_pydantic,
+ to_validator,
+ validate,
+)
+
+
+class TestV:
+ def test_simple_check(self):
+ is_positive = V(check=lambda x: x > 0)
+ assert isinstance(is_positive(5), Ok)
+ assert isinstance(is_positive(-1), Err)
+
+ def test_required(self):
+ req = V(check=lambda _: True, required=True)
+ assert isinstance(req(None), Err)
+ assert isinstance(req("value"), Ok)
+
+ def test_optional_none(self):
+ opt = V(check=lambda x: isinstance(x, str), required=False)
+ assert isinstance(opt(None), Ok)
+ assert isinstance(opt("hello"), Ok)
+
+ def test_and_combination(self):
+ is_str_nonempty = IsType(str) & V(check=lambda x: len(x) > 0)
+ assert isinstance(is_str_nonempty("hello"), Ok)
+ assert isinstance(is_str_nonempty(""), Err)
+ assert isinstance(is_str_nonempty(123), Err)
+
+ def test_or_combination(self):
+ str_or_int = IsType(str) | IsType(int)
+ assert isinstance(str_or_int("hello"), Ok)
+ assert isinstance(str_or_int(42), Ok)
+ assert isinstance(str_or_int(3.14), Err)
+
+ def test_type_as_validator(self):
+ combined = str & Required()
+ assert isinstance(combined("hello"), Ok)
+ assert isinstance(combined(None), Err)
+
+ def test_with_message(self):
+ v = V(check=lambda x: x > 0).with_message("Must be positive")
+ result = v(-1)
+ assert isinstance(result, Err)
+ assert result.error[1] == "Must be positive"
+
+
+class TestValidators:
+ def test_required(self):
+ v = Required(str)
+ assert isinstance(v("hello"), Ok)
+ assert isinstance(v(None), Err)
+
+ def test_required_bare(self):
+ v = Required()
+ assert isinstance(v("anything"), Ok)
+ assert isinstance(v(None), Err)
+
+ def test_optional(self):
+ v = Optional(str)
+ assert isinstance(v(None), Ok)
+ assert isinstance(v("hello"), Ok)
+ assert isinstance(v(123), Err)
+
+ def test_istype(self):
+ v = IsType(int)
+ assert isinstance(v(42), Ok)
+ assert isinstance(v("42"), Err)
+
+ def test_inrange(self):
+ v = InRange(1, 5)
+ assert isinstance(v([1, 2, 3]), Ok)
+ assert isinstance(v([]), Err)
+ assert isinstance(v([1, 2, 3, 4, 5, 6]), Err)
+
+ def test_inset(self):
+ v = InSet({"a", "b", "c"})
+ assert isinstance(v("a"), Ok)
+ assert isinstance(v("d"), Err)
+
+ def test_matches(self):
+ v = Matches(r"^[a-z]+$")
+ assert isinstance(v("hello"), Ok)
+ assert isinstance(v("Hello"), Err)
+ assert isinstance(v(123), Err)
+
+ def test_predicate(self):
+ v = Predicate(lambda x: x > 0, "Must be positive")
+ assert isinstance(v(5), Ok)
+ assert isinstance(v(-5), Err)
+
+ def test_gt_gte_lt_lte(self):
+ assert isinstance(Gt(5)(6), Ok)
+ assert isinstance(Gt(5)(5), Err)
+ assert isinstance(Gte(5)(5), Ok)
+ assert isinstance(Lt(5)(4), Ok)
+ assert isinstance(Lt(5)(5), Err)
+ assert isinstance(Lte(5)(5), Ok)
+
+ def test_between(self):
+ v = Between(0, 10)
+ assert isinstance(v(5), Ok)
+ assert isinstance(v(0), Ok)
+ assert isinstance(v(10), Ok)
+ assert isinstance(v(-1), Err)
+ assert isinstance(v(11), Err)
+
+ def test_between_exclusive(self):
+ v = Between(0, 10, inclusive=False)
+ assert isinstance(v(5), Ok)
+ assert isinstance(v(0), Err)
+ assert isinstance(v(10), Err)
+
+
+class TestValidate:
+ def test_simple_schema(self):
+ schema = {"name": str, "age": int}
+ assert isinstance(validate({"name": "Alice", "age": 30}, schema), Ok)
+ result = validate({"name": 123, "age": 30}, schema)
+ assert isinstance(result, Err)
+
+ def test_nested_schema(self):
+ schema = {
+ "user": {
+ "name": Required(str),
+ "email": Optional(str),
+ }
+ }
+ valid = {"user": {"name": "Alice"}}
+ invalid = {"user": {"email": "a@b.com"}} # Missing required name
+
+ assert isinstance(validate(valid, schema), Ok)
+ assert isinstance(validate(invalid, schema), Err)
+
+ def test_list_schema(self):
+ schema = {"tags": [str]}
+ assert isinstance(validate({"tags": ["a", "b"]}, schema), Ok)
+ assert isinstance(validate({"tags": ["a", 1]}, schema), Err)
+
+ def test_error_paths(self):
+ schema = {"user": {"name": Required(str)}}
+ result = validate({"user": {"name": None}}, schema)
+ assert isinstance(result, Err)
+ errors = result.error
+ assert len(errors) == 1
+ path, _ = errors[0]
+ assert path == ("user", "name")
+
+
+class TestToPydantic:
+ def test_simple_model(self):
+ schema = {
+ "name": Required(str),
+ "age": int,
+ }
+ User = to_pydantic("User", schema)
+ user = User(name="Alice", age=30)
+ assert user.name == "Alice"
+ assert user.age == 30
+
+ def test_optional_fields(self):
+ schema = {
+ "name": Required(str),
+ "email": Optional(str),
+ }
+ User = to_pydantic("User", schema)
+ user = User(name="Alice")
+ assert user.name == "Alice"
+ assert user.email is None
+
+ def test_pydantic_validation(self):
+ from pydantic import ValidationError
+
+ schema = {"name": Required(str)}
+ User = to_pydantic("User", schema)
+
+ with pytest.raises(ValidationError):
+ User() # Missing required field
+
+
+class TestToValidator:
+ def test_type_coercion(self):
+ v = to_validator(str)
+ assert isinstance(v, V)
+ assert isinstance(v("hello"), Ok)
+
+ def test_dict_coercion(self):
+ v = to_validator({"name": str})
+ assert isinstance(v, DictV)
+
+ def test_list_coercion(self):
+ v = to_validator([str])
+ assert isinstance(v, ListV)
+
+ def test_callable_coercion(self):
+ v = to_validator(lambda x: x > 0)
+ assert isinstance(v, V)
+ assert isinstance(v(5), Ok)
diff --git a/uv.lock b/uv.lock
index 9b7e3c0..b7af557 100644
--- a/uv.lock
+++ b/uv.lock
@@ -1,19 +1,10 @@
version = 1
-requires-python = ">=3.8"
-resolution-markers = [
- "python_full_version >= '3.12'",
- "python_full_version == '3.11.*'",
- "python_full_version >= '3.9' and python_full_version < '3.11'",
- "python_full_version < '3.9'",
-]
+requires-python = ">=3.10"
[[package]]
name = "annotated-types"
version = "0.7.0"
source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "typing-extensions", marker = "python_full_version < '3.9'" },
-]
sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 },
@@ -30,50 +21,24 @@ wheels = [
[[package]]
name = "chidian"
-version = "0.1.5"
+version = "0.2.0"
source = { editable = "." }
dependencies = [
{ name = "parsimonious" },
- { name = "pyarrow", version = "17.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" },
- { name = "pyarrow", version = "21.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" },
- { name = "pydantic", version = "2.10.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" },
- { name = "pydantic", version = "2.11.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" },
-]
-
-[package.optional-dependencies]
-dfs = [
- { name = "pandas", version = "2.0.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" },
- { name = "pandas", version = "2.3.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" },
- { name = "polars", version = "1.8.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" },
- { name = "polars", version = "1.32.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" },
-]
-pandas = [
- { name = "pandas", version = "2.0.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" },
- { name = "pandas", version = "2.3.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" },
-]
-polars = [
- { name = "polars", version = "1.8.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" },
- { name = "polars", version = "1.32.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" },
+ { name = "pydantic" },
]
[package.dev-dependencies]
dev = [
- { name = "hypothesis", version = "6.113.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" },
- { name = "hypothesis", version = "6.131.32", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" },
- { name = "mypy", version = "1.14.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" },
- { name = "mypy", version = "1.16.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" },
+ { name = "hypothesis" },
+ { name = "mypy" },
{ name = "pytest" },
{ name = "ruff" },
]
[package.metadata]
requires-dist = [
- { name = "pandas", marker = "extra == 'dfs'", specifier = ">=2.0" },
- { name = "pandas", marker = "extra == 'pandas'", specifier = ">=2.0" },
{ name = "parsimonious", specifier = ">=0.10.0" },
- { name = "polars", marker = "extra == 'dfs'", specifier = ">=0.20" },
- { name = "polars", marker = "extra == 'polars'", specifier = ">=0.20" },
- { name = "pyarrow", specifier = ">=17.0.0" },
{ name = "pydantic", specifier = ">=2.10.6,<3.0.0" },
]
@@ -99,43 +64,21 @@ name = "exceptiongroup"
version = "1.3.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
- { name = "typing-extensions", marker = "python_full_version < '3.11'" },
+ { name = "typing-extensions", marker = "python_full_version < '3.13'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674 },
]
-[[package]]
-name = "hypothesis"
-version = "6.113.0"
-source = { registry = "https://pypi.org/simple" }
-resolution-markers = [
- "python_full_version < '3.9'",
-]
-dependencies = [
- { name = "attrs", marker = "python_full_version < '3.9'" },
- { name = "exceptiongroup", marker = "python_full_version < '3.9'" },
- { name = "sortedcontainers", marker = "python_full_version < '3.9'" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/28/32/6513cd7256f38c19a6c8a1d5ce9792bcd35c7f11651989994731f0e97672/hypothesis-6.113.0.tar.gz", hash = "sha256:5556ac66fdf72a4ccd5d237810f7cf6bdcd00534a4485015ef881af26e20f7c7", size = 408897 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/14/fa/4acb477b86a94571958bd337eae5baf334d21b8c98a04b594d0dad381ba8/hypothesis-6.113.0-py3-none-any.whl", hash = "sha256:d539180eb2bb71ed28a23dfe94e67c851f9b09f3ccc4125afad43f17e32e2bad", size = 469790 },
-]
-
[[package]]
name = "hypothesis"
version = "6.131.32"
source = { registry = "https://pypi.org/simple" }
-resolution-markers = [
- "python_full_version >= '3.12'",
- "python_full_version == '3.11.*'",
- "python_full_version >= '3.9' and python_full_version < '3.11'",
-]
dependencies = [
- { name = "attrs", marker = "python_full_version >= '3.9'" },
- { name = "exceptiongroup", marker = "python_full_version >= '3.9' and python_full_version < '3.11'" },
- { name = "sortedcontainers", marker = "python_full_version >= '3.9'" },
+ { name = "attrs" },
+ { name = "exceptiongroup", marker = "python_full_version < '3.11'" },
+ { name = "sortedcontainers" },
]
sdist = { url = "https://files.pythonhosted.org/packages/8d/e6/de036f7b7a38807e7f39dfd56a1048220a27502e65546bfaa96fc278f369/hypothesis-6.131.32.tar.gz", hash = "sha256:9ef3376a6bf1f4a060fa150fe71fb5cabec93e6ebae0cb9bf93d44b55bd8f8d6", size = 443838 }
wheels = [
@@ -151,73 +94,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050 },
]
-[[package]]
-name = "mypy"
-version = "1.14.1"
-source = { registry = "https://pypi.org/simple" }
-resolution-markers = [
- "python_full_version < '3.9'",
-]
-dependencies = [
- { name = "mypy-extensions", marker = "python_full_version < '3.9'" },
- { name = "tomli", marker = "python_full_version < '3.9'" },
- { name = "typing-extensions", marker = "python_full_version < '3.9'" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/b9/eb/2c92d8ea1e684440f54fa49ac5d9a5f19967b7b472a281f419e69a8d228e/mypy-1.14.1.tar.gz", hash = "sha256:7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6", size = 3216051 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/9b/7a/87ae2adb31d68402da6da1e5f30c07ea6063e9f09b5e7cfc9dfa44075e74/mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb", size = 11211002 },
- { url = "https://files.pythonhosted.org/packages/e1/23/eada4c38608b444618a132be0d199b280049ded278b24cbb9d3fc59658e4/mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0", size = 10358400 },
- { url = "https://files.pythonhosted.org/packages/43/c9/d6785c6f66241c62fd2992b05057f404237deaad1566545e9f144ced07f5/mypy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90716d8b2d1f4cd503309788e51366f07c56635a3309b0f6a32547eaaa36a64d", size = 12095172 },
- { url = "https://files.pythonhosted.org/packages/c3/62/daa7e787770c83c52ce2aaf1a111eae5893de9e004743f51bfcad9e487ec/mypy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae753f5c9fef278bcf12e1a564351764f2a6da579d4a81347e1d5a15819997b", size = 12828732 },
- { url = "https://files.pythonhosted.org/packages/1b/a2/5fb18318a3637f29f16f4e41340b795da14f4751ef4f51c99ff39ab62e52/mypy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e0fe0f5feaafcb04505bcf439e991c6d8f1bf8b15f12b05feeed96e9e7bf1427", size = 13012197 },
- { url = "https://files.pythonhosted.org/packages/28/99/e153ce39105d164b5f02c06c35c7ba958aaff50a2babba7d080988b03fe7/mypy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:7d54bd85b925e501c555a3227f3ec0cfc54ee8b6930bd6141ec872d1c572f81f", size = 9780836 },
- { url = "https://files.pythonhosted.org/packages/da/11/a9422850fd506edbcdc7f6090682ecceaf1f87b9dd847f9df79942da8506/mypy-1.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f995e511de847791c3b11ed90084a7a0aafdc074ab88c5a9711622fe4751138c", size = 11120432 },
- { url = "https://files.pythonhosted.org/packages/b6/9e/47e450fd39078d9c02d620545b2cb37993a8a8bdf7db3652ace2f80521ca/mypy-1.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d64169ec3b8461311f8ce2fd2eb5d33e2d0f2c7b49116259c51d0d96edee48d1", size = 10279515 },
- { url = "https://files.pythonhosted.org/packages/01/b5/6c8d33bd0f851a7692a8bfe4ee75eb82b6983a3cf39e5e32a5d2a723f0c1/mypy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba24549de7b89b6381b91fbc068d798192b1b5201987070319889e93038967a8", size = 12025791 },
- { url = "https://files.pythonhosted.org/packages/f0/4c/e10e2c46ea37cab5c471d0ddaaa9a434dc1d28650078ac1b56c2d7b9b2e4/mypy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:183cf0a45457d28ff9d758730cd0210419ac27d4d3f285beda038c9083363b1f", size = 12749203 },
- { url = "https://files.pythonhosted.org/packages/88/55/beacb0c69beab2153a0f57671ec07861d27d735a0faff135a494cd4f5020/mypy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f2a0ecc86378f45347f586e4163d1769dd81c5a223d577fe351f26b179e148b1", size = 12885900 },
- { url = "https://files.pythonhosted.org/packages/a2/75/8c93ff7f315c4d086a2dfcde02f713004357d70a163eddb6c56a6a5eff40/mypy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:ad3301ebebec9e8ee7135d8e3109ca76c23752bac1e717bc84cd3836b4bf3eae", size = 9777869 },
- { url = "https://files.pythonhosted.org/packages/43/1b/b38c079609bb4627905b74fc6a49849835acf68547ac33d8ceb707de5f52/mypy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14", size = 11266668 },
- { url = "https://files.pythonhosted.org/packages/6b/75/2ed0d2964c1ffc9971c729f7a544e9cd34b2cdabbe2d11afd148d7838aa2/mypy-1.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9", size = 10254060 },
- { url = "https://files.pythonhosted.org/packages/a1/5f/7b8051552d4da3c51bbe8fcafffd76a6823779101a2b198d80886cd8f08e/mypy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11", size = 11933167 },
- { url = "https://files.pythonhosted.org/packages/04/90/f53971d3ac39d8b68bbaab9a4c6c58c8caa4d5fd3d587d16f5927eeeabe1/mypy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e", size = 12864341 },
- { url = "https://files.pythonhosted.org/packages/03/d2/8bc0aeaaf2e88c977db41583559319f1821c069e943ada2701e86d0430b7/mypy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89", size = 12972991 },
- { url = "https://files.pythonhosted.org/packages/6f/17/07815114b903b49b0f2cf7499f1c130e5aa459411596668267535fe9243c/mypy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fa2220e54d2946e94ab6dbb3ba0a992795bd68b16dc852db33028df2b00191b", size = 9879016 },
- { url = "https://files.pythonhosted.org/packages/9e/15/bb6a686901f59222275ab228453de741185f9d54fecbaacec041679496c6/mypy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:92c3ed5afb06c3a8e188cb5da4984cab9ec9a77ba956ee419c68a388b4595255", size = 11252097 },
- { url = "https://files.pythonhosted.org/packages/f8/b3/8b0f74dfd072c802b7fa368829defdf3ee1566ba74c32a2cb2403f68024c/mypy-1.14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dbec574648b3e25f43d23577309b16534431db4ddc09fda50841f1e34e64ed34", size = 10239728 },
- { url = "https://files.pythonhosted.org/packages/c5/9b/4fd95ab20c52bb5b8c03cc49169be5905d931de17edfe4d9d2986800b52e/mypy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c6d94b16d62eb3e947281aa7347d78236688e21081f11de976376cf010eb31a", size = 11924965 },
- { url = "https://files.pythonhosted.org/packages/56/9d/4a236b9c57f5d8f08ed346914b3f091a62dd7e19336b2b2a0d85485f82ff/mypy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d4b19b03fdf54f3c5b2fa474c56b4c13c9dbfb9a2db4370ede7ec11a2c5927d9", size = 12867660 },
- { url = "https://files.pythonhosted.org/packages/40/88/a61a5497e2f68d9027de2bb139c7bb9abaeb1be1584649fa9d807f80a338/mypy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0c911fde686394753fff899c409fd4e16e9b294c24bfd5e1ea4675deae1ac6fd", size = 12969198 },
- { url = "https://files.pythonhosted.org/packages/54/da/3d6fc5d92d324701b0c23fb413c853892bfe0e1dbe06c9138037d459756b/mypy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8b21525cb51671219f5307be85f7e646a153e5acc656e5cebf64bfa076c50107", size = 9885276 },
- { url = "https://files.pythonhosted.org/packages/39/02/1817328c1372be57c16148ce7d2bfcfa4a796bedaed897381b1aad9b267c/mypy-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7084fb8f1128c76cd9cf68fe5971b37072598e7c31b2f9f95586b65c741a9d31", size = 11143050 },
- { url = "https://files.pythonhosted.org/packages/b9/07/99db9a95ece5e58eee1dd87ca456a7e7b5ced6798fd78182c59c35a7587b/mypy-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f845a00b4f420f693f870eaee5f3e2692fa84cc8514496114649cfa8fd5e2c6", size = 10321087 },
- { url = "https://files.pythonhosted.org/packages/9a/eb/85ea6086227b84bce79b3baf7f465b4732e0785830726ce4a51528173b71/mypy-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44bf464499f0e3a2d14d58b54674dee25c031703b2ffc35064bd0df2e0fac319", size = 12066766 },
- { url = "https://files.pythonhosted.org/packages/4b/bb/f01bebf76811475d66359c259eabe40766d2f8ac8b8250d4e224bb6df379/mypy-1.14.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c99f27732c0b7dc847adb21c9d47ce57eb48fa33a17bc6d7d5c5e9f9e7ae5bac", size = 12787111 },
- { url = "https://files.pythonhosted.org/packages/2f/c9/84837ff891edcb6dcc3c27d85ea52aab0c4a34740ff5f0ccc0eb87c56139/mypy-1.14.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:bce23c7377b43602baa0bd22ea3265c49b9ff0b76eb315d6c34721af4cdf1d9b", size = 12974331 },
- { url = "https://files.pythonhosted.org/packages/84/5f/901e18464e6a13f8949b4909535be3fa7f823291b8ab4e4b36cfe57d6769/mypy-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:8edc07eeade7ebc771ff9cf6b211b9a7d93687ff892150cb5692e4f4272b0837", size = 9763210 },
- { url = "https://files.pythonhosted.org/packages/ca/1f/186d133ae2514633f8558e78cd658070ba686c0e9275c5a5c24a1e1f0d67/mypy-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3888a1816d69f7ab92092f785a462944b3ca16d7c470d564165fe703b0970c35", size = 11200493 },
- { url = "https://files.pythonhosted.org/packages/af/fc/4842485d034e38a4646cccd1369f6b1ccd7bc86989c52770d75d719a9941/mypy-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46c756a444117c43ee984bd055db99e498bc613a70bbbc120272bd13ca579fbc", size = 10357702 },
- { url = "https://files.pythonhosted.org/packages/b4/e6/457b83f2d701e23869cfec013a48a12638f75b9d37612a9ddf99072c1051/mypy-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:27fc248022907e72abfd8e22ab1f10e903915ff69961174784a3900a8cba9ad9", size = 12091104 },
- { url = "https://files.pythonhosted.org/packages/f1/bf/76a569158db678fee59f4fd30b8e7a0d75bcbaeef49edd882a0d63af6d66/mypy-1.14.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:499d6a72fb7e5de92218db961f1a66d5f11783f9ae549d214617edab5d4dbdbb", size = 12830167 },
- { url = "https://files.pythonhosted.org/packages/43/bc/0bc6b694b3103de9fed61867f1c8bd33336b913d16831431e7cb48ef1c92/mypy-1.14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57961db9795eb566dc1d1b4e9139ebc4c6b0cb6e7254ecde69d1552bf7613f60", size = 13013834 },
- { url = "https://files.pythonhosted.org/packages/b0/79/5f5ec47849b6df1e6943d5fd8e6632fbfc04b4fd4acfa5a5a9535d11b4e2/mypy-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:07ba89fdcc9451f2ebb02853deb6aaaa3d2239a236669a63ab3801bbf923ef5c", size = 9781231 },
- { url = "https://files.pythonhosted.org/packages/a0/b5/32dd67b69a16d088e533962e5044e51004176a9952419de0370cdaead0f8/mypy-1.14.1-py3-none-any.whl", hash = "sha256:b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1", size = 2752905 },
-]
-
[[package]]
name = "mypy"
version = "1.16.1"
source = { registry = "https://pypi.org/simple" }
-resolution-markers = [
- "python_full_version >= '3.12'",
- "python_full_version == '3.11.*'",
- "python_full_version >= '3.9' and python_full_version < '3.11'",
-]
dependencies = [
- { name = "mypy-extensions", marker = "python_full_version >= '3.9'" },
- { name = "pathspec", marker = "python_full_version >= '3.9'" },
- { name = "tomli", marker = "python_full_version >= '3.9' and python_full_version < '3.11'" },
- { name = "typing-extensions", marker = "python_full_version >= '3.9'" },
+ { name = "mypy-extensions" },
+ { name = "pathspec" },
+ { name = "tomli", marker = "python_full_version < '3.11'" },
+ { name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/81/69/92c7fa98112e4d9eb075a239caa4ef4649ad7d441545ccffbd5e34607cbb/mypy-1.16.1.tar.gz", hash = "sha256:6bd00a0a2094841c5e47e7374bb42b83d64c527a502e3334e1173a0c24437bab", size = 3324747 }
wheels = [
@@ -245,12 +130,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b4/7e/81ca3b074021ad9775e5cb97ebe0089c0f13684b066a750b7dc208438403/mypy-1.16.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:051e1677689c9d9578b9c7f4d206d763f9bbd95723cd1416fad50db49d52f359", size = 12715634 },
{ url = "https://files.pythonhosted.org/packages/e9/95/bdd40c8be346fa4c70edb4081d727a54d0a05382d84966869738cfa8a497/mypy-1.16.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d5d2309511cc56c021b4b4e462907c2b12f669b2dbeb68300110ec27723971be", size = 12895584 },
{ url = "https://files.pythonhosted.org/packages/5a/fd/d486a0827a1c597b3b48b1bdef47228a6e9ee8102ab8c28f944cb83b65dc/mypy-1.16.1-cp313-cp313-win_amd64.whl", hash = "sha256:4f58ac32771341e38a853c5d0ec0dfe27e18e27da9cdb8bbc882d2249c71a3ee", size = 9573886 },
- { url = "https://files.pythonhosted.org/packages/49/5e/ed1e6a7344005df11dfd58b0fdd59ce939a0ba9f7ed37754bf20670b74db/mypy-1.16.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7fc688329af6a287567f45cc1cefb9db662defeb14625213a5b7da6e692e2069", size = 10959511 },
- { url = "https://files.pythonhosted.org/packages/30/88/a7cbc2541e91fe04f43d9e4577264b260fecedb9bccb64ffb1a34b7e6c22/mypy-1.16.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e198ab3f55924c03ead626ff424cad1732d0d391478dfbf7bb97b34602395da", size = 10075555 },
- { url = "https://files.pythonhosted.org/packages/93/f7/c62b1e31a32fbd1546cca5e0a2e5f181be5761265ad1f2e94f2a306fa906/mypy-1.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09aa4f91ada245f0a45dbc47e548fd94e0dd5a8433e0114917dc3b526912a30c", size = 11874169 },
- { url = "https://files.pythonhosted.org/packages/c8/15/db580a28034657fb6cb87af2f8996435a5b19d429ea4dcd6e1c73d418e60/mypy-1.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13c7cd5b1cb2909aa318a90fd1b7e31f17c50b242953e7dd58345b2a814f6383", size = 12610060 },
- { url = "https://files.pythonhosted.org/packages/ec/78/c17f48f6843048fa92d1489d3095e99324f2a8c420f831a04ccc454e2e51/mypy-1.16.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:58e07fb958bc5d752a280da0e890c538f1515b79a65757bbdc54252ba82e0b40", size = 12875199 },
- { url = "https://files.pythonhosted.org/packages/bc/d6/ed42167d0a42680381653fd251d877382351e1bd2c6dd8a818764be3beb1/mypy-1.16.1-cp39-cp39-win_amd64.whl", hash = "sha256:f895078594d918f93337a505f8add9bd654d1a24962b4c6ed9390e12531eb31b", size = 9487033 },
{ url = "https://files.pythonhosted.org/packages/cf/d3/53e684e78e07c1a2bf7105715e5edd09ce951fc3f47cf9ed095ec1b7a037/mypy-1.16.1-py3-none-any.whl", hash = "sha256:5fc2ac4027d0ef28d6ba69a0343737a23c4d1b83672bf38d1fe237bdc0643b37", size = 2265923 },
]
@@ -263,130 +142,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963 },
]
-[[package]]
-name = "numpy"
-version = "1.24.4"
-source = { registry = "https://pypi.org/simple" }
-resolution-markers = [
- "python_full_version == '3.11.*'",
- "python_full_version >= '3.9' and python_full_version < '3.11'",
- "python_full_version < '3.9'",
-]
-sdist = { url = "https://files.pythonhosted.org/packages/a4/9b/027bec52c633f6556dba6b722d9a0befb40498b9ceddd29cbe67a45a127c/numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463", size = 10911229 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/6b/80/6cdfb3e275d95155a34659163b83c09e3a3ff9f1456880bec6cc63d71083/numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64", size = 19789140 },
- { url = "https://files.pythonhosted.org/packages/64/5f/3f01d753e2175cfade1013eea08db99ba1ee4bdb147ebcf3623b75d12aa7/numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1", size = 13854297 },
- { url = "https://files.pythonhosted.org/packages/5a/b3/2f9c21d799fa07053ffa151faccdceeb69beec5a010576b8991f614021f7/numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4", size = 13995611 },
- { url = "https://files.pythonhosted.org/packages/10/be/ae5bf4737cb79ba437879915791f6f26d92583c738d7d960ad94e5c36adf/numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6", size = 17282357 },
- { url = "https://files.pythonhosted.org/packages/c0/64/908c1087be6285f40e4b3e79454552a701664a079321cff519d8c7051d06/numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc", size = 12429222 },
- { url = "https://files.pythonhosted.org/packages/22/55/3d5a7c1142e0d9329ad27cece17933b0e2ab4e54ddc5c1861fbfeb3f7693/numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e", size = 14841514 },
- { url = "https://files.pythonhosted.org/packages/a9/cc/5ed2280a27e5dab12994c884f1f4d8c3bd4d885d02ae9e52a9d213a6a5e2/numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810", size = 19775508 },
- { url = "https://files.pythonhosted.org/packages/c0/bc/77635c657a3668cf652806210b8662e1aff84b818a55ba88257abf6637a8/numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254", size = 13840033 },
- { url = "https://files.pythonhosted.org/packages/a7/4c/96cdaa34f54c05e97c1c50f39f98d608f96f0677a6589e64e53104e22904/numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7", size = 13991951 },
- { url = "https://files.pythonhosted.org/packages/22/97/dfb1a31bb46686f09e68ea6ac5c63fdee0d22d7b23b8f3f7ea07712869ef/numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5", size = 17278923 },
- { url = "https://files.pythonhosted.org/packages/35/e2/76a11e54139654a324d107da1d98f99e7aa2a7ef97cfd7c631fba7dbde71/numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d", size = 12422446 },
- { url = "https://files.pythonhosted.org/packages/d8/ec/ebef2f7d7c28503f958f0f8b992e7ce606fb74f9e891199329d5f5f87404/numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694", size = 14834466 },
- { url = "https://files.pythonhosted.org/packages/11/10/943cfb579f1a02909ff96464c69893b1d25be3731b5d3652c2e0cf1281ea/numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61", size = 19780722 },
- { url = "https://files.pythonhosted.org/packages/a7/ae/f53b7b265fdc701e663fbb322a8e9d4b14d9cb7b2385f45ddfabfc4327e4/numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f", size = 13843102 },
- { url = "https://files.pythonhosted.org/packages/25/6f/2586a50ad72e8dbb1d8381f837008a0321a3516dfd7cb57fc8cf7e4bb06b/numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e", size = 14039616 },
- { url = "https://files.pythonhosted.org/packages/98/5d/5738903efe0ecb73e51eb44feafba32bdba2081263d40c5043568ff60faf/numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc", size = 17316263 },
- { url = "https://files.pythonhosted.org/packages/d1/57/8d328f0b91c733aa9aa7ee540dbc49b58796c862b4fbcb1146c701e888da/numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2", size = 12455660 },
- { url = "https://files.pythonhosted.org/packages/69/65/0d47953afa0ad569d12de5f65d964321c208492064c38fe3b0b9744f8d44/numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706", size = 14868112 },
- { url = "https://files.pythonhosted.org/packages/9a/cd/d5b0402b801c8a8b56b04c1e85c6165efab298d2f0ab741c2406516ede3a/numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400", size = 19816549 },
- { url = "https://files.pythonhosted.org/packages/14/27/638aaa446f39113a3ed38b37a66243e21b38110d021bfcb940c383e120f2/numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f", size = 13879950 },
- { url = "https://files.pythonhosted.org/packages/8f/27/91894916e50627476cff1a4e4363ab6179d01077d71b9afed41d9e1f18bf/numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9", size = 14030228 },
- { url = "https://files.pythonhosted.org/packages/7a/7c/d7b2a0417af6428440c0ad7cb9799073e507b1a465f827d058b826236964/numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d", size = 17311170 },
- { url = "https://files.pythonhosted.org/packages/18/9d/e02ace5d7dfccee796c37b995c63322674daf88ae2f4a4724c5dd0afcc91/numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835", size = 12454918 },
- { url = "https://files.pythonhosted.org/packages/63/38/6cc19d6b8bfa1d1a459daf2b3fe325453153ca7019976274b6f33d8b5663/numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8", size = 14867441 },
- { url = "https://files.pythonhosted.org/packages/a4/fd/8dff40e25e937c94257455c237b9b6bf5a30d42dd1cc11555533be099492/numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef", size = 19156590 },
- { url = "https://files.pythonhosted.org/packages/42/e7/4bf953c6e05df90c6d351af69966384fed8e988d0e8c54dad7103b59f3ba/numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a", size = 16705744 },
- { url = "https://files.pythonhosted.org/packages/fc/dd/9106005eb477d022b60b3817ed5937a43dad8fd1f20b0610ea8a32fcb407/numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2", size = 14734290 },
-]
-
-[[package]]
-name = "numpy"
-version = "2.3.2"
-source = { registry = "https://pypi.org/simple" }
-resolution-markers = [
- "python_full_version >= '3.12'",
-]
-sdist = { url = "https://files.pythonhosted.org/packages/37/7d/3fec4199c5ffb892bed55cff901e4f39a58c81df9c44c280499e92cad264/numpy-2.3.2.tar.gz", hash = "sha256:e0486a11ec30cdecb53f184d496d1c6a20786c81e55e41640270130056f8ee48", size = 20489306 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/96/26/1320083986108998bd487e2931eed2aeedf914b6e8905431487543ec911d/numpy-2.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:852ae5bed3478b92f093e30f785c98e0cb62fa0a939ed057c31716e18a7a22b9", size = 21259016 },
- { url = "https://files.pythonhosted.org/packages/c4/2b/792b341463fa93fc7e55abbdbe87dac316c5b8cb5e94fb7a59fb6fa0cda5/numpy-2.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a0e27186e781a69959d0230dd9909b5e26024f8da10683bd6344baea1885168", size = 14451158 },
- { url = "https://files.pythonhosted.org/packages/b7/13/e792d7209261afb0c9f4759ffef6135b35c77c6349a151f488f531d13595/numpy-2.3.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:f0a1a8476ad77a228e41619af2fa9505cf69df928e9aaa165746584ea17fed2b", size = 5379817 },
- { url = "https://files.pythonhosted.org/packages/49/ce/055274fcba4107c022b2113a213c7287346563f48d62e8d2a5176ad93217/numpy-2.3.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:cbc95b3813920145032412f7e33d12080f11dc776262df1712e1638207dde9e8", size = 6913606 },
- { url = "https://files.pythonhosted.org/packages/17/f2/e4d72e6bc5ff01e2ab613dc198d560714971900c03674b41947e38606502/numpy-2.3.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f75018be4980a7324edc5930fe39aa391d5734531b1926968605416ff58c332d", size = 14589652 },
- { url = "https://files.pythonhosted.org/packages/c8/b0/fbeee3000a51ebf7222016e2939b5c5ecf8000a19555d04a18f1e02521b8/numpy-2.3.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20b8200721840f5621b7bd03f8dcd78de33ec522fc40dc2641aa09537df010c3", size = 16938816 },
- { url = "https://files.pythonhosted.org/packages/a9/ec/2f6c45c3484cc159621ea8fc000ac5a86f1575f090cac78ac27193ce82cd/numpy-2.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f91e5c028504660d606340a084db4b216567ded1056ea2b4be4f9d10b67197f", size = 16370512 },
- { url = "https://files.pythonhosted.org/packages/b5/01/dd67cf511850bd7aefd6347aaae0956ed415abea741ae107834aae7d6d4e/numpy-2.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fb1752a3bb9a3ad2d6b090b88a9a0ae1cd6f004ef95f75825e2f382c183b2097", size = 18884947 },
- { url = "https://files.pythonhosted.org/packages/a7/17/2cf60fd3e6a61d006778735edf67a222787a8c1a7842aed43ef96d777446/numpy-2.3.2-cp311-cp311-win32.whl", hash = "sha256:4ae6863868aaee2f57503c7a5052b3a2807cf7a3914475e637a0ecd366ced220", size = 6599494 },
- { url = "https://files.pythonhosted.org/packages/d5/03/0eade211c504bda872a594f045f98ddcc6caef2b7c63610946845e304d3f/numpy-2.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:240259d6564f1c65424bcd10f435145a7644a65a6811cfc3201c4a429ba79170", size = 13087889 },
- { url = "https://files.pythonhosted.org/packages/13/32/2c7979d39dafb2a25087e12310fc7f3b9d3c7d960df4f4bc97955ae0ce1d/numpy-2.3.2-cp311-cp311-win_arm64.whl", hash = "sha256:4209f874d45f921bde2cff1ffcd8a3695f545ad2ffbef6d3d3c6768162efab89", size = 10459560 },
- { url = "https://files.pythonhosted.org/packages/00/6d/745dd1c1c5c284d17725e5c802ca4d45cfc6803519d777f087b71c9f4069/numpy-2.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bc3186bea41fae9d8e90c2b4fb5f0a1f5a690682da79b92574d63f56b529080b", size = 20956420 },
- { url = "https://files.pythonhosted.org/packages/bc/96/e7b533ea5740641dd62b07a790af5d9d8fec36000b8e2d0472bd7574105f/numpy-2.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f4f0215edb189048a3c03bd5b19345bdfa7b45a7a6f72ae5945d2a28272727f", size = 14184660 },
- { url = "https://files.pythonhosted.org/packages/2b/53/102c6122db45a62aa20d1b18c9986f67e6b97e0d6fbc1ae13e3e4c84430c/numpy-2.3.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:8b1224a734cd509f70816455c3cffe13a4f599b1bf7130f913ba0e2c0b2006c0", size = 5113382 },
- { url = "https://files.pythonhosted.org/packages/2b/21/376257efcbf63e624250717e82b4fae93d60178f09eb03ed766dbb48ec9c/numpy-2.3.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3dcf02866b977a38ba3ec10215220609ab9667378a9e2150615673f3ffd6c73b", size = 6647258 },
- { url = "https://files.pythonhosted.org/packages/91/ba/f4ebf257f08affa464fe6036e13f2bf9d4642a40228781dc1235da81be9f/numpy-2.3.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:572d5512df5470f50ada8d1972c5f1082d9a0b7aa5944db8084077570cf98370", size = 14281409 },
- { url = "https://files.pythonhosted.org/packages/59/ef/f96536f1df42c668cbacb727a8c6da7afc9c05ece6d558927fb1722693e1/numpy-2.3.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8145dd6d10df13c559d1e4314df29695613575183fa2e2d11fac4c208c8a1f73", size = 16641317 },
- { url = "https://files.pythonhosted.org/packages/f6/a7/af813a7b4f9a42f498dde8a4c6fcbff8100eed00182cc91dbaf095645f38/numpy-2.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:103ea7063fa624af04a791c39f97070bf93b96d7af7eb23530cd087dc8dbe9dc", size = 16056262 },
- { url = "https://files.pythonhosted.org/packages/8b/5d/41c4ef8404caaa7f05ed1cfb06afe16a25895260eacbd29b4d84dff2920b/numpy-2.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc927d7f289d14f5e037be917539620603294454130b6de200091e23d27dc9be", size = 18579342 },
- { url = "https://files.pythonhosted.org/packages/a1/4f/9950e44c5a11636f4a3af6e825ec23003475cc9a466edb7a759ed3ea63bd/numpy-2.3.2-cp312-cp312-win32.whl", hash = "sha256:d95f59afe7f808c103be692175008bab926b59309ade3e6d25009e9a171f7036", size = 6320610 },
- { url = "https://files.pythonhosted.org/packages/7c/2f/244643a5ce54a94f0a9a2ab578189c061e4a87c002e037b0829dd77293b6/numpy-2.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:9e196ade2400c0c737d93465327d1ae7c06c7cb8a1756121ebf54b06ca183c7f", size = 12786292 },
- { url = "https://files.pythonhosted.org/packages/54/cd/7b5f49d5d78db7badab22d8323c1b6ae458fbf86c4fdfa194ab3cd4eb39b/numpy-2.3.2-cp312-cp312-win_arm64.whl", hash = "sha256:ee807923782faaf60d0d7331f5e86da7d5e3079e28b291973c545476c2b00d07", size = 10194071 },
- { url = "https://files.pythonhosted.org/packages/1c/c0/c6bb172c916b00700ed3bf71cb56175fd1f7dbecebf8353545d0b5519f6c/numpy-2.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c8d9727f5316a256425892b043736d63e89ed15bbfe6556c5ff4d9d4448ff3b3", size = 20949074 },
- { url = "https://files.pythonhosted.org/packages/20/4e/c116466d22acaf4573e58421c956c6076dc526e24a6be0903219775d862e/numpy-2.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:efc81393f25f14d11c9d161e46e6ee348637c0a1e8a54bf9dedc472a3fae993b", size = 14177311 },
- { url = "https://files.pythonhosted.org/packages/78/45/d4698c182895af189c463fc91d70805d455a227261d950e4e0f1310c2550/numpy-2.3.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:dd937f088a2df683cbb79dda9a772b62a3e5a8a7e76690612c2737f38c6ef1b6", size = 5106022 },
- { url = "https://files.pythonhosted.org/packages/9f/76/3e6880fef4420179309dba72a8c11f6166c431cf6dee54c577af8906f914/numpy-2.3.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:11e58218c0c46c80509186e460d79fbdc9ca1eb8d8aee39d8f2dc768eb781089", size = 6640135 },
- { url = "https://files.pythonhosted.org/packages/34/fa/87ff7f25b3c4ce9085a62554460b7db686fef1e0207e8977795c7b7d7ba1/numpy-2.3.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5ad4ebcb683a1f99f4f392cc522ee20a18b2bb12a2c1c42c3d48d5a1adc9d3d2", size = 14278147 },
- { url = "https://files.pythonhosted.org/packages/1d/0f/571b2c7a3833ae419fe69ff7b479a78d313581785203cc70a8db90121b9a/numpy-2.3.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:938065908d1d869c7d75d8ec45f735a034771c6ea07088867f713d1cd3bbbe4f", size = 16635989 },
- { url = "https://files.pythonhosted.org/packages/24/5a/84ae8dca9c9a4c592fe11340b36a86ffa9fd3e40513198daf8a97839345c/numpy-2.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:66459dccc65d8ec98cc7df61307b64bf9e08101f9598755d42d8ae65d9a7a6ee", size = 16053052 },
- { url = "https://files.pythonhosted.org/packages/57/7c/e5725d99a9133b9813fcf148d3f858df98511686e853169dbaf63aec6097/numpy-2.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a7af9ed2aa9ec5950daf05bb11abc4076a108bd3c7db9aa7251d5f107079b6a6", size = 18577955 },
- { url = "https://files.pythonhosted.org/packages/ae/11/7c546fcf42145f29b71e4d6f429e96d8d68e5a7ba1830b2e68d7418f0bbd/numpy-2.3.2-cp313-cp313-win32.whl", hash = "sha256:906a30249315f9c8e17b085cc5f87d3f369b35fedd0051d4a84686967bdbbd0b", size = 6311843 },
- { url = "https://files.pythonhosted.org/packages/aa/6f/a428fd1cb7ed39b4280d057720fed5121b0d7754fd2a9768640160f5517b/numpy-2.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:c63d95dc9d67b676e9108fe0d2182987ccb0f11933c1e8959f42fa0da8d4fa56", size = 12782876 },
- { url = "https://files.pythonhosted.org/packages/65/85/4ea455c9040a12595fb6c43f2c217257c7b52dd0ba332c6a6c1d28b289fe/numpy-2.3.2-cp313-cp313-win_arm64.whl", hash = "sha256:b05a89f2fb84d21235f93de47129dd4f11c16f64c87c33f5e284e6a3a54e43f2", size = 10192786 },
- { url = "https://files.pythonhosted.org/packages/80/23/8278f40282d10c3f258ec3ff1b103d4994bcad78b0cba9208317f6bb73da/numpy-2.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4e6ecfeddfa83b02318f4d84acf15fbdbf9ded18e46989a15a8b6995dfbf85ab", size = 21047395 },
- { url = "https://files.pythonhosted.org/packages/1f/2d/624f2ce4a5df52628b4ccd16a4f9437b37c35f4f8a50d00e962aae6efd7a/numpy-2.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:508b0eada3eded10a3b55725b40806a4b855961040180028f52580c4729916a2", size = 14300374 },
- { url = "https://files.pythonhosted.org/packages/f6/62/ff1e512cdbb829b80a6bd08318a58698867bca0ca2499d101b4af063ee97/numpy-2.3.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:754d6755d9a7588bdc6ac47dc4ee97867271b17cee39cb87aef079574366db0a", size = 5228864 },
- { url = "https://files.pythonhosted.org/packages/7d/8e/74bc18078fff03192d4032cfa99d5a5ca937807136d6f5790ce07ca53515/numpy-2.3.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:a9f66e7d2b2d7712410d3bc5684149040ef5f19856f20277cd17ea83e5006286", size = 6737533 },
- { url = "https://files.pythonhosted.org/packages/19/ea/0731efe2c9073ccca5698ef6a8c3667c4cf4eea53fcdcd0b50140aba03bc/numpy-2.3.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de6ea4e5a65d5a90c7d286ddff2b87f3f4ad61faa3db8dabe936b34c2275b6f8", size = 14352007 },
- { url = "https://files.pythonhosted.org/packages/cf/90/36be0865f16dfed20f4bc7f75235b963d5939707d4b591f086777412ff7b/numpy-2.3.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3ef07ec8cbc8fc9e369c8dcd52019510c12da4de81367d8b20bc692aa07573a", size = 16701914 },
- { url = "https://files.pythonhosted.org/packages/94/30/06cd055e24cb6c38e5989a9e747042b4e723535758e6153f11afea88c01b/numpy-2.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:27c9f90e7481275c7800dc9c24b7cc40ace3fdb970ae4d21eaff983a32f70c91", size = 16132708 },
- { url = "https://files.pythonhosted.org/packages/9a/14/ecede608ea73e58267fd7cb78f42341b3b37ba576e778a1a06baffbe585c/numpy-2.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:07b62978075b67eee4065b166d000d457c82a1efe726cce608b9db9dd66a73a5", size = 18651678 },
- { url = "https://files.pythonhosted.org/packages/40/f3/2fe6066b8d07c3685509bc24d56386534c008b462a488b7f503ba82b8923/numpy-2.3.2-cp313-cp313t-win32.whl", hash = "sha256:c771cfac34a4f2c0de8e8c97312d07d64fd8f8ed45bc9f5726a7e947270152b5", size = 6441832 },
- { url = "https://files.pythonhosted.org/packages/0b/ba/0937d66d05204d8f28630c9c60bc3eda68824abde4cf756c4d6aad03b0c6/numpy-2.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:72dbebb2dcc8305c431b2836bcc66af967df91be793d63a24e3d9b741374c450", size = 12927049 },
- { url = "https://files.pythonhosted.org/packages/e9/ed/13542dd59c104d5e654dfa2ac282c199ba64846a74c2c4bcdbc3a0f75df1/numpy-2.3.2-cp313-cp313t-win_arm64.whl", hash = "sha256:72c6df2267e926a6d5286b0a6d556ebe49eae261062059317837fda12ddf0c1a", size = 10262935 },
- { url = "https://files.pythonhosted.org/packages/c9/7c/7659048aaf498f7611b783e000c7268fcc4dcf0ce21cd10aad7b2e8f9591/numpy-2.3.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:448a66d052d0cf14ce9865d159bfc403282c9bc7bb2a31b03cc18b651eca8b1a", size = 20950906 },
- { url = "https://files.pythonhosted.org/packages/80/db/984bea9d4ddf7112a04cfdfb22b1050af5757864cfffe8e09e44b7f11a10/numpy-2.3.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:546aaf78e81b4081b2eba1d105c3b34064783027a06b3ab20b6eba21fb64132b", size = 14185607 },
- { url = "https://files.pythonhosted.org/packages/e4/76/b3d6f414f4eca568f469ac112a3b510938d892bc5a6c190cb883af080b77/numpy-2.3.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:87c930d52f45df092f7578889711a0768094debf73cfcde105e2d66954358125", size = 5114110 },
- { url = "https://files.pythonhosted.org/packages/9e/d2/6f5e6826abd6bca52392ed88fe44a4b52aacb60567ac3bc86c67834c3a56/numpy-2.3.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:8dc082ea901a62edb8f59713c6a7e28a85daddcb67454c839de57656478f5b19", size = 6642050 },
- { url = "https://files.pythonhosted.org/packages/c4/43/f12b2ade99199e39c73ad182f103f9d9791f48d885c600c8e05927865baf/numpy-2.3.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af58de8745f7fa9ca1c0c7c943616c6fe28e75d0c81f5c295810e3c83b5be92f", size = 14296292 },
- { url = "https://files.pythonhosted.org/packages/5d/f9/77c07d94bf110a916b17210fac38680ed8734c236bfed9982fd8524a7b47/numpy-2.3.2-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed5527c4cf10f16c6d0b6bee1f89958bccb0ad2522c8cadc2efd318bcd545f5", size = 16638913 },
- { url = "https://files.pythonhosted.org/packages/9b/d1/9d9f2c8ea399cc05cfff8a7437453bd4e7d894373a93cdc46361bbb49a7d/numpy-2.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:095737ed986e00393ec18ec0b21b47c22889ae4b0cd2d5e88342e08b01141f58", size = 16071180 },
- { url = "https://files.pythonhosted.org/packages/4c/41/82e2c68aff2a0c9bf315e47d61951099fed65d8cb2c8d9dc388cb87e947e/numpy-2.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5e40e80299607f597e1a8a247ff8d71d79c5b52baa11cc1cce30aa92d2da6e0", size = 18576809 },
- { url = "https://files.pythonhosted.org/packages/14/14/4b4fd3efb0837ed252d0f583c5c35a75121038a8c4e065f2c259be06d2d8/numpy-2.3.2-cp314-cp314-win32.whl", hash = "sha256:7d6e390423cc1f76e1b8108c9b6889d20a7a1f59d9a60cac4a050fa734d6c1e2", size = 6366410 },
- { url = "https://files.pythonhosted.org/packages/11/9e/b4c24a6b8467b61aced5c8dc7dcfce23621baa2e17f661edb2444a418040/numpy-2.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:b9d0878b21e3918d76d2209c924ebb272340da1fb51abc00f986c258cd5e957b", size = 12918821 },
- { url = "https://files.pythonhosted.org/packages/0e/0f/0dc44007c70b1007c1cef86b06986a3812dd7106d8f946c09cfa75782556/numpy-2.3.2-cp314-cp314-win_arm64.whl", hash = "sha256:2738534837c6a1d0c39340a190177d7d66fdf432894f469728da901f8f6dc910", size = 10477303 },
- { url = "https://files.pythonhosted.org/packages/8b/3e/075752b79140b78ddfc9c0a1634d234cfdbc6f9bbbfa6b7504e445ad7d19/numpy-2.3.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:4d002ecf7c9b53240be3bb69d80f86ddbd34078bae04d87be81c1f58466f264e", size = 21047524 },
- { url = "https://files.pythonhosted.org/packages/fe/6d/60e8247564a72426570d0e0ea1151b95ce5bd2f1597bb878a18d32aec855/numpy-2.3.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:293b2192c6bcce487dbc6326de5853787f870aeb6c43f8f9c6496db5b1781e45", size = 14300519 },
- { url = "https://files.pythonhosted.org/packages/4d/73/d8326c442cd428d47a067070c3ac6cc3b651a6e53613a1668342a12d4479/numpy-2.3.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:0a4f2021a6da53a0d580d6ef5db29947025ae8b35b3250141805ea9a32bbe86b", size = 5228972 },
- { url = "https://files.pythonhosted.org/packages/34/2e/e71b2d6dad075271e7079db776196829019b90ce3ece5c69639e4f6fdc44/numpy-2.3.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:9c144440db4bf3bb6372d2c3e49834cc0ff7bb4c24975ab33e01199e645416f2", size = 6737439 },
- { url = "https://files.pythonhosted.org/packages/15/b0/d004bcd56c2c5e0500ffc65385eb6d569ffd3363cb5e593ae742749b2daa/numpy-2.3.2-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f92d6c2a8535dc4fe4419562294ff957f83a16ebdec66df0805e473ffaad8bd0", size = 14352479 },
- { url = "https://files.pythonhosted.org/packages/11/e3/285142fcff8721e0c99b51686426165059874c150ea9ab898e12a492e291/numpy-2.3.2-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cefc2219baa48e468e3db7e706305fcd0c095534a192a08f31e98d83a7d45fb0", size = 16702805 },
- { url = "https://files.pythonhosted.org/packages/33/c3/33b56b0e47e604af2c7cd065edca892d180f5899599b76830652875249a3/numpy-2.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:76c3e9501ceb50b2ff3824c3589d5d1ab4ac857b0ee3f8f49629d0de55ecf7c2", size = 16133830 },
- { url = "https://files.pythonhosted.org/packages/6e/ae/7b1476a1f4d6a48bc669b8deb09939c56dd2a439db1ab03017844374fb67/numpy-2.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:122bf5ed9a0221b3419672493878ba4967121514b1d7d4656a7580cd11dddcbf", size = 18652665 },
- { url = "https://files.pythonhosted.org/packages/14/ba/5b5c9978c4bb161034148ade2de9db44ec316fab89ce8c400db0e0c81f86/numpy-2.3.2-cp314-cp314t-win32.whl", hash = "sha256:6f1ae3dcb840edccc45af496f312528c15b1f79ac318169d094e85e4bb35fdf1", size = 6514777 },
- { url = "https://files.pythonhosted.org/packages/eb/46/3dbaf0ae7c17cdc46b9f662c56da2054887b8d9e737c1476f335c83d33db/numpy-2.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:087ffc25890d89a43536f75c5fe8770922008758e8eeeef61733957041ed2f9b", size = 13111856 },
- { url = "https://files.pythonhosted.org/packages/c1/9e/1652778bce745a67b5fe05adde60ed362d38eb17d919a540e813d30f6874/numpy-2.3.2-cp314-cp314t-win_arm64.whl", hash = "sha256:092aeb3449833ea9c0bf0089d70c29ae480685dd2377ec9cdbbb620257f84631", size = 10544226 },
- { url = "https://files.pythonhosted.org/packages/cf/ea/50ebc91d28b275b23b7128ef25c3d08152bc4068f42742867e07a870a42a/numpy-2.3.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:14a91ebac98813a49bc6aa1a0dfc09513dcec1d97eaf31ca21a87221a1cdcb15", size = 21130338 },
- { url = "https://files.pythonhosted.org/packages/9f/57/cdd5eac00dd5f137277355c318a955c0d8fb8aa486020c22afd305f8b88f/numpy-2.3.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:71669b5daae692189540cffc4c439468d35a3f84f0c88b078ecd94337f6cb0ec", size = 14375776 },
- { url = "https://files.pythonhosted.org/packages/83/85/27280c7f34fcd305c2209c0cdca4d70775e4859a9eaa92f850087f8dea50/numpy-2.3.2-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:69779198d9caee6e547adb933941ed7520f896fd9656834c300bdf4dd8642712", size = 5304882 },
- { url = "https://files.pythonhosted.org/packages/48/b4/6500b24d278e15dd796f43824e69939d00981d37d9779e32499e823aa0aa/numpy-2.3.2-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:2c3271cc4097beb5a60f010bcc1cc204b300bb3eafb4399376418a83a1c6373c", size = 6818405 },
- { url = "https://files.pythonhosted.org/packages/9b/c9/142c1e03f199d202da8e980c2496213509291b6024fd2735ad28ae7065c7/numpy-2.3.2-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8446acd11fe3dc1830568c941d44449fd5cb83068e5c70bd5a470d323d448296", size = 14419651 },
- { url = "https://files.pythonhosted.org/packages/8b/95/8023e87cbea31a750a6c00ff9427d65ebc5fef104a136bfa69f76266d614/numpy-2.3.2-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa098a5ab53fa407fded5870865c6275a5cd4101cfdef8d6fafc48286a96e981", size = 16760166 },
- { url = "https://files.pythonhosted.org/packages/78/e3/6690b3f85a05506733c7e90b577e4762517404ea78bab2ca3a5cb1aeb78d/numpy-2.3.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6936aff90dda378c09bea075af0d9c675fe3a977a9d2402f95a87f440f59f619", size = 12977811 },
-]
-
[[package]]
name = "packaging"
version = "25.0"
@@ -396,108 +151,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469 },
]
-[[package]]
-name = "pandas"
-version = "2.0.3"
-source = { registry = "https://pypi.org/simple" }
-resolution-markers = [
- "python_full_version < '3.9'",
-]
-dependencies = [
- { name = "numpy", version = "1.24.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" },
- { name = "python-dateutil", marker = "python_full_version < '3.9'" },
- { name = "pytz", marker = "python_full_version < '3.9'" },
- { name = "tzdata", marker = "python_full_version < '3.9'" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/b1/a7/824332581e258b5aa4f3763ecb2a797e5f9a54269044ba2e50ac19936b32/pandas-2.0.3.tar.gz", hash = "sha256:c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c", size = 5284455 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/3c/b2/0d4a5729ce1ce11630c4fc5d5522a33b967b3ca146c210f58efde7c40e99/pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8", size = 11760908 },
- { url = "https://files.pythonhosted.org/packages/4a/f6/f620ca62365d83e663a255a41b08d2fc2eaf304e0b8b21bb6d62a7390fe3/pandas-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f167beed68918d62bffb6ec64f2e1d8a7d297a038f86d4aed056b9493fca407f", size = 10823486 },
- { url = "https://files.pythonhosted.org/packages/c2/59/cb4234bc9b968c57e81861b306b10cd8170272c57b098b724d3de5eda124/pandas-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0c6f76a0f1ba361551f3e6dceaff06bde7514a374aa43e33b588ec10420183", size = 11571897 },
- { url = "https://files.pythonhosted.org/packages/e3/59/35a2892bf09ded9c1bf3804461efe772836a5261ef5dfb4e264ce813ff99/pandas-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba619e410a21d8c387a1ea6e8a0e49bb42216474436245718d7f2e88a2f8d7c0", size = 12306421 },
- { url = "https://files.pythonhosted.org/packages/94/71/3a0c25433c54bb29b48e3155b959ac78f4c4f2f06f94d8318aac612cb80f/pandas-2.0.3-cp310-cp310-win32.whl", hash = "sha256:3ef285093b4fe5058eefd756100a367f27029913760773c8bf1d2d8bebe5d210", size = 9540792 },
- { url = "https://files.pythonhosted.org/packages/ed/30/b97456e7063edac0e5a405128065f0cd2033adfe3716fb2256c186bd41d0/pandas-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:9ee1a69328d5c36c98d8e74db06f4ad518a1840e8ccb94a4ba86920986bb617e", size = 10664333 },
- { url = "https://files.pythonhosted.org/packages/b3/92/a5e5133421b49e901a12e02a6a7ef3a0130e10d13db8cb657fdd0cba3b90/pandas-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b084b91d8d66ab19f5bb3256cbd5ea661848338301940e17f4492b2ce0801fe8", size = 11645672 },
- { url = "https://files.pythonhosted.org/packages/8f/bb/aea1fbeed5b474cb8634364718abe9030d7cc7a30bf51f40bd494bbc89a2/pandas-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:37673e3bdf1551b95bf5d4ce372b37770f9529743d2498032439371fc7b7eb26", size = 10693229 },
- { url = "https://files.pythonhosted.org/packages/d6/90/e7d387f1a416b14e59290baa7a454a90d719baebbf77433ff1bdcc727800/pandas-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9cb1e14fdb546396b7e1b923ffaeeac24e4cedd14266c3497216dd4448e4f2d", size = 11581591 },
- { url = "https://files.pythonhosted.org/packages/d0/28/88b81881c056376254618fad622a5e94b5126db8c61157ea1910cd1c040a/pandas-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9cd88488cceb7635aebb84809d087468eb33551097d600c6dad13602029c2df", size = 12219370 },
- { url = "https://files.pythonhosted.org/packages/e4/a5/212b9039e25bf8ebb97e417a96660e3dc925dacd3f8653d531b8f7fd9be4/pandas-2.0.3-cp311-cp311-win32.whl", hash = "sha256:694888a81198786f0e164ee3a581df7d505024fbb1f15202fc7db88a71d84ebd", size = 9482935 },
- { url = "https://files.pythonhosted.org/packages/9e/71/756a1be6bee0209d8c0d8c5e3b9fc72c00373f384a4017095ec404aec3ad/pandas-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6a21ab5c89dcbd57f78d0ae16630b090eec626360085a4148693def5452d8a6b", size = 10607692 },
- { url = "https://files.pythonhosted.org/packages/78/a8/07dd10f90ca915ed914853cd57f79bfc22e1ef4384ab56cb4336d2fc1f2a/pandas-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4da0d45e7f34c069fe4d522359df7d23badf83abc1d1cef398895822d11061", size = 11653303 },
- { url = "https://files.pythonhosted.org/packages/53/c3/f8e87361f7fdf42012def602bfa2a593423c729f5cb7c97aed7f51be66ac/pandas-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32fca2ee1b0d93dd71d979726b12b61faa06aeb93cf77468776287f41ff8fdc5", size = 10710932 },
- { url = "https://files.pythonhosted.org/packages/a7/87/828d50c81ce0f434163bf70b925a0eec6076808e0bca312a79322b141f66/pandas-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:258d3624b3ae734490e4d63c430256e716f488c4fcb7c8e9bde2d3aa46c29089", size = 11684018 },
- { url = "https://files.pythonhosted.org/packages/f8/7f/5b047effafbdd34e52c9e2d7e44f729a0655efafb22198c45cf692cdc157/pandas-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eae3dc34fa1aa7772dd3fc60270d13ced7346fcbcfee017d3132ec625e23bb0", size = 12353723 },
- { url = "https://files.pythonhosted.org/packages/ea/ae/26a2eda7fa581347d69e51f93892493b2074ef3352ac71033c9f32c52389/pandas-2.0.3-cp38-cp38-win32.whl", hash = "sha256:f3421a7afb1a43f7e38e82e844e2bca9a6d793d66c1a7f9f0ff39a795bbc5e02", size = 9646403 },
- { url = "https://files.pythonhosted.org/packages/c3/6c/ea362eef61f05553aaf1a24b3e96b2d0603f5dc71a3bd35688a24ed88843/pandas-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:69d7f3884c95da3a31ef82b7618af5710dba95bb885ffab339aad925c3e8ce78", size = 10777638 },
- { url = "https://files.pythonhosted.org/packages/f8/c7/cfef920b7b457dff6928e824896cb82367650ea127d048ee0b820026db4f/pandas-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5247fb1ba347c1261cbbf0fcfba4a3121fbb4029d95d9ef4dc45406620b25c8b", size = 11834160 },
- { url = "https://files.pythonhosted.org/packages/6c/1c/689c9d99bc4e5d366a5fd871f0bcdee98a6581e240f96b78d2d08f103774/pandas-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81af086f4543c9d8bb128328b5d32e9986e0c84d3ee673a2ac6fb57fd14f755e", size = 10862752 },
- { url = "https://files.pythonhosted.org/packages/cc/b8/4d082f41c27c95bf90485d1447b647cc7e5680fea75e315669dc6e4cb398/pandas-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1994c789bf12a7c5098277fb43836ce090f1073858c10f9220998ac74f37c69b", size = 11715852 },
- { url = "https://files.pythonhosted.org/packages/9e/0d/91a9fd2c202f2b1d97a38ab591890f86480ecbb596cbc56d035f6f23fdcc/pandas-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec591c48e29226bcbb316e0c1e9423622bc7a4eaf1ef7c3c9fa1a3981f89641", size = 12398496 },
- { url = "https://files.pythonhosted.org/packages/26/7d/d8aa0a2c4f3f5f8ea59fb946c8eafe8f508090ca73e2b08a9af853c1103e/pandas-2.0.3-cp39-cp39-win32.whl", hash = "sha256:04dbdbaf2e4d46ca8da896e1805bc04eb85caa9a82e259e8eed00254d5e0c682", size = 9630766 },
- { url = "https://files.pythonhosted.org/packages/9a/f2/0ad053856debbe90c83de1b4f05915f85fd2146f20faf9daa3b320d36df3/pandas-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:1168574b036cd8b93abc746171c9b4f1b83467438a5e45909fed645cf8692dbc", size = 10755902 },
-]
-
-[[package]]
-name = "pandas"
-version = "2.3.1"
-source = { registry = "https://pypi.org/simple" }
-resolution-markers = [
- "python_full_version >= '3.12'",
- "python_full_version == '3.11.*'",
- "python_full_version >= '3.9' and python_full_version < '3.11'",
-]
-dependencies = [
- { name = "numpy", version = "1.24.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9' and python_full_version < '3.12'" },
- { name = "numpy", version = "2.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
- { name = "python-dateutil", marker = "python_full_version >= '3.9'" },
- { name = "pytz", marker = "python_full_version >= '3.9'" },
- { name = "tzdata", marker = "python_full_version >= '3.9'" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/d1/6f/75aa71f8a14267117adeeed5d21b204770189c0a0025acbdc03c337b28fc/pandas-2.3.1.tar.gz", hash = "sha256:0a95b9ac964fe83ce317827f80304d37388ea77616b1425f0ae41c9d2d0d7bb2", size = 4487493 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/c4/ca/aa97b47287221fa37a49634532e520300088e290b20d690b21ce3e448143/pandas-2.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22c2e866f7209ebc3a8f08d75766566aae02bcc91d196935a1d9e59c7b990ac9", size = 11542731 },
- { url = "https://files.pythonhosted.org/packages/80/bf/7938dddc5f01e18e573dcfb0f1b8c9357d9b5fa6ffdee6e605b92efbdff2/pandas-2.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3583d348546201aff730c8c47e49bc159833f971c2899d6097bce68b9112a4f1", size = 10790031 },
- { url = "https://files.pythonhosted.org/packages/ee/2f/9af748366763b2a494fed477f88051dbf06f56053d5c00eba652697e3f94/pandas-2.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f951fbb702dacd390561e0ea45cdd8ecfa7fb56935eb3dd78e306c19104b9b0", size = 11724083 },
- { url = "https://files.pythonhosted.org/packages/2c/95/79ab37aa4c25d1e7df953dde407bb9c3e4ae47d154bc0dd1692f3a6dcf8c/pandas-2.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd05b72ec02ebfb993569b4931b2e16fbb4d6ad6ce80224a3ee838387d83a191", size = 12342360 },
- { url = "https://files.pythonhosted.org/packages/75/a7/d65e5d8665c12c3c6ff5edd9709d5836ec9b6f80071b7f4a718c6106e86e/pandas-2.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1b916a627919a247d865aed068eb65eb91a344b13f5b57ab9f610b7716c92de1", size = 13202098 },
- { url = "https://files.pythonhosted.org/packages/65/f3/4c1dbd754dbaa79dbf8b537800cb2fa1a6e534764fef50ab1f7533226c5c/pandas-2.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fe67dc676818c186d5a3d5425250e40f179c2a89145df477dd82945eaea89e97", size = 13837228 },
- { url = "https://files.pythonhosted.org/packages/3f/d6/d7f5777162aa9b48ec3910bca5a58c9b5927cfd9cfde3aa64322f5ba4b9f/pandas-2.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:2eb789ae0274672acbd3c575b0598d213345660120a257b47b5dafdc618aec83", size = 11336561 },
- { url = "https://files.pythonhosted.org/packages/76/1c/ccf70029e927e473a4476c00e0d5b32e623bff27f0402d0a92b7fc29bb9f/pandas-2.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2b0540963d83431f5ce8870ea02a7430adca100cec8a050f0811f8e31035541b", size = 11566608 },
- { url = "https://files.pythonhosted.org/packages/ec/d3/3c37cb724d76a841f14b8f5fe57e5e3645207cc67370e4f84717e8bb7657/pandas-2.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fe7317f578c6a153912bd2292f02e40c1d8f253e93c599e82620c7f69755c74f", size = 10823181 },
- { url = "https://files.pythonhosted.org/packages/8a/4c/367c98854a1251940edf54a4df0826dcacfb987f9068abf3e3064081a382/pandas-2.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6723a27ad7b244c0c79d8e7007092d7c8f0f11305770e2f4cd778b3ad5f9f85", size = 11793570 },
- { url = "https://files.pythonhosted.org/packages/07/5f/63760ff107bcf5146eee41b38b3985f9055e710a72fdd637b791dea3495c/pandas-2.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3462c3735fe19f2638f2c3a40bd94ec2dc5ba13abbb032dd2fa1f540a075509d", size = 12378887 },
- { url = "https://files.pythonhosted.org/packages/15/53/f31a9b4dfe73fe4711c3a609bd8e60238022f48eacedc257cd13ae9327a7/pandas-2.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:98bcc8b5bf7afed22cc753a28bc4d9e26e078e777066bc53fac7904ddef9a678", size = 13230957 },
- { url = "https://files.pythonhosted.org/packages/e0/94/6fce6bf85b5056d065e0a7933cba2616dcb48596f7ba3c6341ec4bcc529d/pandas-2.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4d544806b485ddf29e52d75b1f559142514e60ef58a832f74fb38e48d757b299", size = 13883883 },
- { url = "https://files.pythonhosted.org/packages/c8/7b/bdcb1ed8fccb63d04bdb7635161d0ec26596d92c9d7a6cce964e7876b6c1/pandas-2.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:b3cd4273d3cb3707b6fffd217204c52ed92859533e31dc03b7c5008aa933aaab", size = 11340212 },
- { url = "https://files.pythonhosted.org/packages/46/de/b8445e0f5d217a99fe0eeb2f4988070908979bec3587c0633e5428ab596c/pandas-2.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:689968e841136f9e542020698ee1c4fbe9caa2ed2213ae2388dc7b81721510d3", size = 11588172 },
- { url = "https://files.pythonhosted.org/packages/1e/e0/801cdb3564e65a5ac041ab99ea6f1d802a6c325bb6e58c79c06a3f1cd010/pandas-2.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:025e92411c16cbe5bb2a4abc99732a6b132f439b8aab23a59fa593eb00704232", size = 10717365 },
- { url = "https://files.pythonhosted.org/packages/51/a5/c76a8311833c24ae61a376dbf360eb1b1c9247a5d9c1e8b356563b31b80c/pandas-2.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b7ff55f31c4fcb3e316e8f7fa194566b286d6ac430afec0d461163312c5841e", size = 11280411 },
- { url = "https://files.pythonhosted.org/packages/da/01/e383018feba0a1ead6cf5fe8728e5d767fee02f06a3d800e82c489e5daaf/pandas-2.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dcb79bf373a47d2a40cf7232928eb7540155abbc460925c2c96d2d30b006eb4", size = 11988013 },
- { url = "https://files.pythonhosted.org/packages/5b/14/cec7760d7c9507f11c97d64f29022e12a6cc4fc03ac694535e89f88ad2ec/pandas-2.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:56a342b231e8862c96bdb6ab97170e203ce511f4d0429589c8ede1ee8ece48b8", size = 12767210 },
- { url = "https://files.pythonhosted.org/packages/50/b9/6e2d2c6728ed29fb3d4d4d302504fb66f1a543e37eb2e43f352a86365cdf/pandas-2.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ca7ed14832bce68baef331f4d7f294411bed8efd032f8109d690df45e00c4679", size = 13440571 },
- { url = "https://files.pythonhosted.org/packages/80/a5/3a92893e7399a691bad7664d977cb5e7c81cf666c81f89ea76ba2bff483d/pandas-2.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:ac942bfd0aca577bef61f2bc8da8147c4ef6879965ef883d8e8d5d2dc3e744b8", size = 10987601 },
- { url = "https://files.pythonhosted.org/packages/32/ed/ff0a67a2c5505e1854e6715586ac6693dd860fbf52ef9f81edee200266e7/pandas-2.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9026bd4a80108fac2239294a15ef9003c4ee191a0f64b90f170b40cfb7cf2d22", size = 11531393 },
- { url = "https://files.pythonhosted.org/packages/c7/db/d8f24a7cc9fb0972adab0cc80b6817e8bef888cfd0024eeb5a21c0bb5c4a/pandas-2.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6de8547d4fdb12421e2d047a2c446c623ff4c11f47fddb6b9169eb98ffba485a", size = 10668750 },
- { url = "https://files.pythonhosted.org/packages/0f/b0/80f6ec783313f1e2356b28b4fd8d2148c378370045da918c73145e6aab50/pandas-2.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:782647ddc63c83133b2506912cc6b108140a38a37292102aaa19c81c83db2928", size = 11342004 },
- { url = "https://files.pythonhosted.org/packages/e9/e2/20a317688435470872885e7fc8f95109ae9683dec7c50be29b56911515a5/pandas-2.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ba6aff74075311fc88504b1db890187a3cd0f887a5b10f5525f8e2ef55bfdb9", size = 12050869 },
- { url = "https://files.pythonhosted.org/packages/55/79/20d746b0a96c67203a5bee5fb4e00ac49c3e8009a39e1f78de264ecc5729/pandas-2.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e5635178b387bd2ba4ac040f82bc2ef6e6b500483975c4ebacd34bec945fda12", size = 12750218 },
- { url = "https://files.pythonhosted.org/packages/7c/0f/145c8b41e48dbf03dd18fdd7f24f8ba95b8254a97a3379048378f33e7838/pandas-2.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6f3bf5ec947526106399a9e1d26d40ee2b259c66422efdf4de63c848492d91bb", size = 13416763 },
- { url = "https://files.pythonhosted.org/packages/b2/c0/54415af59db5cdd86a3d3bf79863e8cc3fa9ed265f0745254061ac09d5f2/pandas-2.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:1c78cf43c8fde236342a1cb2c34bcff89564a7bfed7e474ed2fffa6aed03a956", size = 10987482 },
- { url = "https://files.pythonhosted.org/packages/48/64/2fd2e400073a1230e13b8cd604c9bc95d9e3b962e5d44088ead2e8f0cfec/pandas-2.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8dfc17328e8da77be3cf9f47509e5637ba8f137148ed0e9b5241e1baf526e20a", size = 12029159 },
- { url = "https://files.pythonhosted.org/packages/d8/0a/d84fd79b0293b7ef88c760d7dca69828d867c89b6d9bc52d6a27e4d87316/pandas-2.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ec6c851509364c59a5344458ab935e6451b31b818be467eb24b0fe89bd05b6b9", size = 11393287 },
- { url = "https://files.pythonhosted.org/packages/50/ae/ff885d2b6e88f3c7520bb74ba319268b42f05d7e583b5dded9837da2723f/pandas-2.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:911580460fc4884d9b05254b38a6bfadddfcc6aaef856fb5859e7ca202e45275", size = 11309381 },
- { url = "https://files.pythonhosted.org/packages/85/86/1fa345fc17caf5d7780d2699985c03dbe186c68fee00b526813939062bb0/pandas-2.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f4d6feeba91744872a600e6edbbd5b033005b431d5ae8379abee5bcfa479fab", size = 11883998 },
- { url = "https://files.pythonhosted.org/packages/81/aa/e58541a49b5e6310d89474333e994ee57fea97c8aaa8fc7f00b873059bbf/pandas-2.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:fe37e757f462d31a9cd7580236a82f353f5713a80e059a29753cf938c6775d96", size = 12704705 },
- { url = "https://files.pythonhosted.org/packages/d5/f9/07086f5b0f2a19872554abeea7658200824f5835c58a106fa8f2ae96a46c/pandas-2.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5db9637dbc24b631ff3707269ae4559bce4b7fd75c1c4d7e13f40edc42df4444", size = 13189044 },
- { url = "https://files.pythonhosted.org/packages/6e/21/ecf2df680982616459409b09962a8c2065330c7151dc6538069f3b634acf/pandas-2.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4645f770f98d656f11c69e81aeb21c6fca076a44bed3dcbb9396a4311bc7f6d8", size = 11567275 },
- { url = "https://files.pythonhosted.org/packages/1e/1a/dcb50e44b75419e96b276c9fb023b0f147b3c411be1cd517492aa2a184d4/pandas-2.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:342e59589cc454aaff7484d75b816a433350b3d7964d7847327edda4d532a2e3", size = 10811488 },
- { url = "https://files.pythonhosted.org/packages/2d/55/66cd2b679f6a27398380eac7574bc24746128f74626a3c02b978ea00e5ce/pandas-2.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d12f618d80379fde6af007f65f0c25bd3e40251dbd1636480dfffce2cf1e6da", size = 11763000 },
- { url = "https://files.pythonhosted.org/packages/ae/1c/5b9b263c80fd5e231b77df6f78cd7426d1d4ad3a4e858e85b7b3d93d0e9c/pandas-2.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd71c47a911da120d72ef173aeac0bf5241423f9bfea57320110a978457e069e", size = 12361395 },
- { url = "https://files.pythonhosted.org/packages/f7/74/7e817b31413fbb96366ea327d43d1926a9c48c58074e27e094e2839a0e36/pandas-2.3.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:09e3b1587f0f3b0913e21e8b32c3119174551deb4a4eba4a89bc7377947977e7", size = 13225086 },
- { url = "https://files.pythonhosted.org/packages/1f/0f/bc0a44b47eba2f22ae4235719a573d552ef7ad76ed3ea39ae62d554e040b/pandas-2.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2323294c73ed50f612f67e2bf3ae45aea04dce5690778e08a09391897f35ff88", size = 13871698 },
- { url = "https://files.pythonhosted.org/packages/fa/cb/6c32f8fadefa4314b740fbe8f74f6a02423bd1549e7c930826df35ac3c1b/pandas-2.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:b4b0de34dc8499c2db34000ef8baad684cfa4cbd836ecee05f323ebfba348c7d", size = 11357186 },
-]
-
[[package]]
name = "parsimonious"
version = "0.10.0"
@@ -519,332 +172,36 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191 },
]
-[[package]]
-name = "pluggy"
-version = "1.5.0"
-source = { registry = "https://pypi.org/simple" }
-resolution-markers = [
- "python_full_version < '3.9'",
-]
-sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 },
-]
-
[[package]]
name = "pluggy"
version = "1.6.0"
source = { registry = "https://pypi.org/simple" }
-resolution-markers = [
- "python_full_version >= '3.12'",
- "python_full_version == '3.11.*'",
- "python_full_version >= '3.9' and python_full_version < '3.11'",
-]
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538 },
]
-[[package]]
-name = "polars"
-version = "1.8.2"
-source = { registry = "https://pypi.org/simple" }
-resolution-markers = [
- "python_full_version < '3.9'",
-]
-sdist = { url = "https://files.pythonhosted.org/packages/3d/75/2196c26fe049ecce55a0fa87b22ab3d9477bc9bab38116ed04854fc65ecb/polars-1.8.2.tar.gz", hash = "sha256:42f69277d5be2833b0b826af5e75dcf430222d65c9633872856e176a0bed27a0", size = 4010537 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/b5/8b/6829e22a0f4c6e754c2e2b5d81025ab14d7b214018119762f52bad7325aa/polars-1.8.2-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:114be1ebfb051b794fb9e1f15999430c79cc0824595e237d3f45632be3e56d73", size = 31165933 },
- { url = "https://files.pythonhosted.org/packages/8f/cd/5d6b837f42c1b6d87012beca940a075e450a352ab717a649000c2ec57d71/polars-1.8.2-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:e4fc36cfe48972d4c5be21a7cb119d6378fb7af0bb3eeb61456b66a1f43228e3", size = 27488552 },
- { url = "https://files.pythonhosted.org/packages/a7/f3/c317b1bc6759d1ec343c25d5ebd376a07a2e1fd2bd04fdc07ce6b2a855c4/polars-1.8.2-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67c1e448d6e38697650b22dd359f13c40b567c0b66686c8602e4367400e87801", size = 32548666 },
- { url = "https://files.pythonhosted.org/packages/1d/df/5ccf44218728caecda9f555879b40fe4ab34ff629c81b9117a1107437fdc/polars-1.8.2-cp38-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:570ee86b033dc5a6dbe2cb0df48522301642f304dda3da48f53d7488899a2206", size = 29187225 },
- { url = "https://files.pythonhosted.org/packages/9c/45/77e4fda23368907c06bf70fc722de28d442c5087bbc8a60c29b8396750ea/polars-1.8.2-cp38-abi3-win_amd64.whl", hash = "sha256:ce1a1c1e2150ffcc44a5f1c461d738e1dcd95abbd0f210af0271c7ac0c9f7ef9", size = 32394690 },
-]
-
-[[package]]
-name = "polars"
-version = "1.32.3"
-source = { registry = "https://pypi.org/simple" }
-resolution-markers = [
- "python_full_version >= '3.12'",
- "python_full_version == '3.11.*'",
- "python_full_version >= '3.9' and python_full_version < '3.11'",
-]
-sdist = { url = "https://files.pythonhosted.org/packages/aa/f2/1a76a8bd902bc4942e435a480f362c8687bba60d438ff3283191e38568fa/polars-1.32.3.tar.gz", hash = "sha256:57c500dc1b5cba49b0589034478db031815f3d57a20cb830b05ecee1a9ba56b1", size = 4838448 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/4c/9b/5937ab9f8fa49c8e00617aeb817a5ffa5740434d5bb8a90f2afa657875aa/polars-1.32.3-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:c7c472ea1d50a5104079cb64e34f78f85774bcc69b875ba8daf21233f4c70d42", size = 37935794 },
- { url = "https://files.pythonhosted.org/packages/6e/e9/88f5332001b9dd5c8e0a4fab51015f740e01715a081c41bc0f7ad2bf76a5/polars-1.32.3-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:fd87275f0cc795e72a2030b58293198cfa748d4b009cf52218e27db5397ed07f", size = 34621102 },
- { url = "https://files.pythonhosted.org/packages/ab/8a/6f56af7e535c34c95decc8654786bfce4632ba32817dc2f8bad18571ef9a/polars-1.32.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9a9b9668ef310e5a77a7e7daa9c753874779c8da52e93f654bfd7953eb4b60b", size = 38443071 },
- { url = "https://files.pythonhosted.org/packages/46/aa/63536ea5780edc0ef6850679dc81d519f3966c7bb11a5cf10ccecb541095/polars-1.32.3-cp39-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:c8f5d2f43b80b68e39bfaa2948ce632563633466576f12e74e8560d6481f5851", size = 35639598 },
- { url = "https://files.pythonhosted.org/packages/d7/c8/226953cda6cf9ae63aa9714d396a9138029e31db3c504c15d6711b618f8f/polars-1.32.3-cp39-abi3-win_amd64.whl", hash = "sha256:db56a7cb4898e173d62634e182f74bdff744c62be5470e0fe20df8d10f659af7", size = 38038192 },
- { url = "https://files.pythonhosted.org/packages/ec/99/6b93c854e602927a778eabd7550204f700cc4e6c07be73372371583dda3e/polars-1.32.3-cp39-abi3-win_arm64.whl", hash = "sha256:a2e3f87c60f54eefe67b1bebd3105918d84df0fd6d59cc6b870c2f16d2d26ca1", size = 34198919 },
-]
-
-[[package]]
-name = "pyarrow"
-version = "17.0.0"
-source = { registry = "https://pypi.org/simple" }
-resolution-markers = [
- "python_full_version < '3.9'",
-]
-dependencies = [
- { name = "numpy", version = "1.24.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/27/4e/ea6d43f324169f8aec0e57569443a38bab4b398d09769ca64f7b4d467de3/pyarrow-17.0.0.tar.gz", hash = "sha256:4beca9521ed2c0921c1023e68d097d0299b62c362639ea315572a58f3f50fd28", size = 1112479 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/39/5d/78d4b040bc5ff2fc6c3d03e80fca396b742f6c125b8af06bcf7427f931bc/pyarrow-17.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a5c8b238d47e48812ee577ee20c9a2779e6a5904f1708ae240f53ecbee7c9f07", size = 28994846 },
- { url = "https://files.pythonhosted.org/packages/3b/73/8ed168db7642e91180330e4ea9f3ff8bab404678f00d32d7df0871a4933b/pyarrow-17.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db023dc4c6cae1015de9e198d41250688383c3f9af8f565370ab2b4cb5f62655", size = 27165908 },
- { url = "https://files.pythonhosted.org/packages/81/36/e78c24be99242063f6d0590ef68c857ea07bdea470242c361e9a15bd57a4/pyarrow-17.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da1e060b3876faa11cee287839f9cc7cdc00649f475714b8680a05fd9071d545", size = 39264209 },
- { url = "https://files.pythonhosted.org/packages/18/4c/3db637d7578f683b0a8fb8999b436bdbedd6e3517bd4f90c70853cf3ad20/pyarrow-17.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c06d4624c0ad6674364bb46ef38c3132768139ddec1c56582dbac54f2663e2", size = 39862883 },
- { url = "https://files.pythonhosted.org/packages/81/3c/0580626896c842614a523e66b351181ed5bb14e5dfc263cd68cea2c46d90/pyarrow-17.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:fa3c246cc58cb5a4a5cb407a18f193354ea47dd0648194e6265bd24177982fe8", size = 38723009 },
- { url = "https://files.pythonhosted.org/packages/ee/fb/c1b47f0ada36d856a352da261a44d7344d8f22e2f7db3945f8c3b81be5dd/pyarrow-17.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:f7ae2de664e0b158d1607699a16a488de3d008ba99b3a7aa5de1cbc13574d047", size = 39855626 },
- { url = "https://files.pythonhosted.org/packages/19/09/b0a02908180a25d57312ab5919069c39fddf30602568980419f4b02393f6/pyarrow-17.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5984f416552eea15fd9cee03da53542bf4cddaef5afecefb9aa8d1010c335087", size = 25147242 },
- { url = "https://files.pythonhosted.org/packages/f9/46/ce89f87c2936f5bb9d879473b9663ce7a4b1f4359acc2f0eb39865eaa1af/pyarrow-17.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1c8856e2ef09eb87ecf937104aacfa0708f22dfeb039c363ec99735190ffb977", size = 29028748 },
- { url = "https://files.pythonhosted.org/packages/8d/8e/ce2e9b2146de422f6638333c01903140e9ada244a2a477918a368306c64c/pyarrow-17.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e19f569567efcbbd42084e87f948778eb371d308e137a0f97afe19bb860ccb3", size = 27190965 },
- { url = "https://files.pythonhosted.org/packages/3b/c8/5675719570eb1acd809481c6d64e2136ffb340bc387f4ca62dce79516cea/pyarrow-17.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b244dc8e08a23b3e352899a006a26ae7b4d0da7bb636872fa8f5884e70acf15", size = 39269081 },
- { url = "https://files.pythonhosted.org/packages/5e/78/3931194f16ab681ebb87ad252e7b8d2c8b23dad49706cadc865dff4a1dd3/pyarrow-17.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b72e87fe3e1db343995562f7fff8aee354b55ee83d13afba65400c178ab2597", size = 39864921 },
- { url = "https://files.pythonhosted.org/packages/d8/81/69b6606093363f55a2a574c018901c40952d4e902e670656d18213c71ad7/pyarrow-17.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dc5c31c37409dfbc5d014047817cb4ccd8c1ea25d19576acf1a001fe07f5b420", size = 38740798 },
- { url = "https://files.pythonhosted.org/packages/4c/21/9ca93b84b92ef927814cb7ba37f0774a484c849d58f0b692b16af8eebcfb/pyarrow-17.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e3343cb1e88bc2ea605986d4b94948716edc7a8d14afd4e2c097232f729758b4", size = 39871877 },
- { url = "https://files.pythonhosted.org/packages/30/d1/63a7c248432c71c7d3ee803e706590a0b81ce1a8d2b2ae49677774b813bb/pyarrow-17.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:a27532c38f3de9eb3e90ecab63dfda948a8ca859a66e3a47f5f42d1e403c4d03", size = 25151089 },
- { url = "https://files.pythonhosted.org/packages/d4/62/ce6ac1275a432b4a27c55fe96c58147f111d8ba1ad800a112d31859fae2f/pyarrow-17.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9b8a823cea605221e61f34859dcc03207e52e409ccf6354634143e23af7c8d22", size = 29019418 },
- { url = "https://files.pythonhosted.org/packages/8e/0a/dbd0c134e7a0c30bea439675cc120012337202e5fac7163ba839aa3691d2/pyarrow-17.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1e70de6cb5790a50b01d2b686d54aaf73da01266850b05e3af2a1bc89e16053", size = 27152197 },
- { url = "https://files.pythonhosted.org/packages/cb/05/3f4a16498349db79090767620d6dc23c1ec0c658a668d61d76b87706c65d/pyarrow-17.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0071ce35788c6f9077ff9ecba4858108eebe2ea5a3f7cf2cf55ebc1dbc6ee24a", size = 39263026 },
- { url = "https://files.pythonhosted.org/packages/c2/0c/ea2107236740be8fa0e0d4a293a095c9f43546a2465bb7df34eee9126b09/pyarrow-17.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:757074882f844411fcca735e39aae74248a1531367a7c80799b4266390ae51cc", size = 39880798 },
- { url = "https://files.pythonhosted.org/packages/f6/b0/b9164a8bc495083c10c281cc65064553ec87b7537d6f742a89d5953a2a3e/pyarrow-17.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ba11c4f16976e89146781a83833df7f82077cdab7dc6232c897789343f7891a", size = 38715172 },
- { url = "https://files.pythonhosted.org/packages/f1/c4/9625418a1413005e486c006e56675334929fad864347c5ae7c1b2e7fe639/pyarrow-17.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b0c6ac301093b42d34410b187bba560b17c0330f64907bfa4f7f7f2444b0cf9b", size = 39874508 },
- { url = "https://files.pythonhosted.org/packages/ae/49/baafe2a964f663413be3bd1cf5c45ed98c5e42e804e2328e18f4570027c1/pyarrow-17.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:392bc9feabc647338e6c89267635e111d71edad5fcffba204425a7c8d13610d7", size = 25099235 },
- { url = "https://files.pythonhosted.org/packages/8d/bd/8f52c1d7b430260f80a349cffa2df351750a737b5336313d56dcadeb9ae1/pyarrow-17.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:af5ff82a04b2171415f1410cff7ebb79861afc5dae50be73ce06d6e870615204", size = 28999345 },
- { url = "https://files.pythonhosted.org/packages/64/d9/51e35550f2f18b8815a2ab25948f735434db32000c0e91eba3a32634782a/pyarrow-17.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:edca18eaca89cd6382dfbcff3dd2d87633433043650c07375d095cd3517561d8", size = 27168441 },
- { url = "https://files.pythonhosted.org/packages/18/d8/7161d87d07ea51be70c49f615004c1446d5723622a18b2681f7e4b71bf6e/pyarrow-17.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c7916bff914ac5d4a8fe25b7a25e432ff921e72f6f2b7547d1e325c1ad9d155", size = 39363163 },
- { url = "https://files.pythonhosted.org/packages/3f/08/bc497130789833de09e345e3ce4647e3ce86517c4f70f2144f0367ca378b/pyarrow-17.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f553ca691b9e94b202ff741bdd40f6ccb70cdd5fbf65c187af132f1317de6145", size = 39965253 },
- { url = "https://files.pythonhosted.org/packages/d3/2e/493dd7db889402b4c7871ca7dfdd20f2c5deedbff802d3eb8576359930f9/pyarrow-17.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0cdb0e627c86c373205a2f94a510ac4376fdc523f8bb36beab2e7f204416163c", size = 38805378 },
- { url = "https://files.pythonhosted.org/packages/e6/c1/4c6bcdf7a820034aa91a8b4d25fef38809be79b42ca7aaa16d4680b0bbac/pyarrow-17.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:d7d192305d9d8bc9082d10f361fc70a73590a4c65cf31c3e6926cd72b76bc35c", size = 39958364 },
- { url = "https://files.pythonhosted.org/packages/d1/db/42ac644453cfdfc60fe002b46d647fe7a6dfad753ef7b28e99b4c936ad5d/pyarrow-17.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:02dae06ce212d8b3244dd3e7d12d9c4d3046945a5933d28026598e9dbbda1fca", size = 25229211 },
- { url = "https://files.pythonhosted.org/packages/43/e0/a898096d35be240aa61fb2d54db58b86d664b10e1e51256f9300f47565e8/pyarrow-17.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:13d7a460b412f31e4c0efa1148e1d29bdf18ad1411eb6757d38f8fbdcc8645fb", size = 29007881 },
- { url = "https://files.pythonhosted.org/packages/59/22/f7d14907ed0697b5dd488d393129f2738629fa5bcba863e00931b7975946/pyarrow-17.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b564a51fbccfab5a04a80453e5ac6c9954a9c5ef2890d1bcf63741909c3f8df", size = 27178117 },
- { url = "https://files.pythonhosted.org/packages/bf/ee/661211feac0ed48467b1d5c57298c91403809ec3ab78b1d175e1d6ad03cf/pyarrow-17.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32503827abbc5aadedfa235f5ece8c4f8f8b0a3cf01066bc8d29de7539532687", size = 39273896 },
- { url = "https://files.pythonhosted.org/packages/af/61/bcd9b58e38ead6ad42b9ed00da33a3f862bc1d445e3d3164799c25550ac2/pyarrow-17.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a155acc7f154b9ffcc85497509bcd0d43efb80d6f733b0dc3bb14e281f131c8b", size = 39875438 },
- { url = "https://files.pythonhosted.org/packages/75/63/29d1bfcc57af73cde3fc3baccab2f37548de512dbe0ab294b033cd203516/pyarrow-17.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:dec8d129254d0188a49f8a1fc99e0560dc1b85f60af729f47de4046015f9b0a5", size = 38735092 },
- { url = "https://files.pythonhosted.org/packages/39/f4/90258b4de753df7cc61cefb0312f8abcf226672e96cc64996e66afce817a/pyarrow-17.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a48ddf5c3c6a6c505904545c25a4ae13646ae1f8ba703c4df4a1bfe4f4006bda", size = 39867610 },
- { url = "https://files.pythonhosted.org/packages/e7/f6/b75d4816c32f1618ed31a005ee635dd1d91d8164495d94f2ea092f594661/pyarrow-17.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:42bf93249a083aca230ba7e2786c5f673507fa97bbd9725a1e2754715151a204", size = 25148611 },
-]
-
-[[package]]
-name = "pyarrow"
-version = "21.0.0"
-source = { registry = "https://pypi.org/simple" }
-resolution-markers = [
- "python_full_version >= '3.12'",
- "python_full_version == '3.11.*'",
- "python_full_version >= '3.9' and python_full_version < '3.11'",
-]
-sdist = { url = "https://files.pythonhosted.org/packages/ef/c2/ea068b8f00905c06329a3dfcd40d0fcc2b7d0f2e355bdb25b65e0a0e4cd4/pyarrow-21.0.0.tar.gz", hash = "sha256:5051f2dccf0e283ff56335760cbc8622cf52264d67e359d5569541ac11b6d5bc", size = 1133487 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/17/d9/110de31880016e2afc52d8580b397dbe47615defbf09ca8cf55f56c62165/pyarrow-21.0.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e563271e2c5ff4d4a4cbeb2c83d5cf0d4938b891518e676025f7268c6fe5fe26", size = 31196837 },
- { url = "https://files.pythonhosted.org/packages/df/5f/c1c1997613abf24fceb087e79432d24c19bc6f7259cab57c2c8e5e545fab/pyarrow-21.0.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:fee33b0ca46f4c85443d6c450357101e47d53e6c3f008d658c27a2d020d44c79", size = 32659470 },
- { url = "https://files.pythonhosted.org/packages/3e/ed/b1589a777816ee33ba123ba1e4f8f02243a844fed0deec97bde9fb21a5cf/pyarrow-21.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:7be45519b830f7c24b21d630a31d48bcebfd5d4d7f9d3bdb49da9cdf6d764edb", size = 41055619 },
- { url = "https://files.pythonhosted.org/packages/44/28/b6672962639e85dc0ac36f71ab3a8f5f38e01b51343d7aa372a6b56fa3f3/pyarrow-21.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:26bfd95f6bff443ceae63c65dc7e048670b7e98bc892210acba7e4995d3d4b51", size = 42733488 },
- { url = "https://files.pythonhosted.org/packages/f8/cc/de02c3614874b9089c94eac093f90ca5dfa6d5afe45de3ba847fd950fdf1/pyarrow-21.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bd04ec08f7f8bd113c55868bd3fc442a9db67c27af098c5f814a3091e71cc61a", size = 43329159 },
- { url = "https://files.pythonhosted.org/packages/a6/3e/99473332ac40278f196e105ce30b79ab8affab12f6194802f2593d6b0be2/pyarrow-21.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9b0b14b49ac10654332a805aedfc0147fb3469cbf8ea951b3d040dab12372594", size = 45050567 },
- { url = "https://files.pythonhosted.org/packages/7b/f5/c372ef60593d713e8bfbb7e0c743501605f0ad00719146dc075faf11172b/pyarrow-21.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:9d9f8bcb4c3be7738add259738abdeddc363de1b80e3310e04067aa1ca596634", size = 26217959 },
- { url = "https://files.pythonhosted.org/packages/94/dc/80564a3071a57c20b7c32575e4a0120e8a330ef487c319b122942d665960/pyarrow-21.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c077f48aab61738c237802836fc3844f85409a46015635198761b0d6a688f87b", size = 31243234 },
- { url = "https://files.pythonhosted.org/packages/ea/cc/3b51cb2db26fe535d14f74cab4c79b191ed9a8cd4cbba45e2379b5ca2746/pyarrow-21.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:689f448066781856237eca8d1975b98cace19b8dd2ab6145bf49475478bcaa10", size = 32714370 },
- { url = "https://files.pythonhosted.org/packages/24/11/a4431f36d5ad7d83b87146f515c063e4d07ef0b7240876ddb885e6b44f2e/pyarrow-21.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:479ee41399fcddc46159a551705b89c05f11e8b8cb8e968f7fec64f62d91985e", size = 41135424 },
- { url = "https://files.pythonhosted.org/packages/74/dc/035d54638fc5d2971cbf1e987ccd45f1091c83bcf747281cf6cc25e72c88/pyarrow-21.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:40ebfcb54a4f11bcde86bc586cbd0272bac0d516cfa539c799c2453768477569", size = 42823810 },
- { url = "https://files.pythonhosted.org/packages/2e/3b/89fced102448a9e3e0d4dded1f37fa3ce4700f02cdb8665457fcc8015f5b/pyarrow-21.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8d58d8497814274d3d20214fbb24abcad2f7e351474357d552a8d53bce70c70e", size = 43391538 },
- { url = "https://files.pythonhosted.org/packages/fb/bb/ea7f1bd08978d39debd3b23611c293f64a642557e8141c80635d501e6d53/pyarrow-21.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:585e7224f21124dd57836b1530ac8f2df2afc43c861d7bf3d58a4870c42ae36c", size = 45120056 },
- { url = "https://files.pythonhosted.org/packages/6e/0b/77ea0600009842b30ceebc3337639a7380cd946061b620ac1a2f3cb541e2/pyarrow-21.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:555ca6935b2cbca2c0e932bedd853e9bc523098c39636de9ad4693b5b1df86d6", size = 26220568 },
- { url = "https://files.pythonhosted.org/packages/ca/d4/d4f817b21aacc30195cf6a46ba041dd1be827efa4a623cc8bf39a1c2a0c0/pyarrow-21.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:3a302f0e0963db37e0a24a70c56cf91a4faa0bca51c23812279ca2e23481fccd", size = 31160305 },
- { url = "https://files.pythonhosted.org/packages/a2/9c/dcd38ce6e4b4d9a19e1d36914cb8e2b1da4e6003dd075474c4cfcdfe0601/pyarrow-21.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:b6b27cf01e243871390474a211a7922bfbe3bda21e39bc9160daf0da3fe48876", size = 32684264 },
- { url = "https://files.pythonhosted.org/packages/4f/74/2a2d9f8d7a59b639523454bec12dba35ae3d0a07d8ab529dc0809f74b23c/pyarrow-21.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:e72a8ec6b868e258a2cd2672d91f2860ad532d590ce94cdf7d5e7ec674ccf03d", size = 41108099 },
- { url = "https://files.pythonhosted.org/packages/ad/90/2660332eeb31303c13b653ea566a9918484b6e4d6b9d2d46879a33ab0622/pyarrow-21.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b7ae0bbdc8c6674259b25bef5d2a1d6af5d39d7200c819cf99e07f7dfef1c51e", size = 42829529 },
- { url = "https://files.pythonhosted.org/packages/33/27/1a93a25c92717f6aa0fca06eb4700860577d016cd3ae51aad0e0488ac899/pyarrow-21.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:58c30a1729f82d201627c173d91bd431db88ea74dcaa3885855bc6203e433b82", size = 43367883 },
- { url = "https://files.pythonhosted.org/packages/05/d9/4d09d919f35d599bc05c6950095e358c3e15148ead26292dfca1fb659b0c/pyarrow-21.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:072116f65604b822a7f22945a7a6e581cfa28e3454fdcc6939d4ff6090126623", size = 45133802 },
- { url = "https://files.pythonhosted.org/packages/71/30/f3795b6e192c3ab881325ffe172e526499eb3780e306a15103a2764916a2/pyarrow-21.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:cf56ec8b0a5c8c9d7021d6fd754e688104f9ebebf1bf4449613c9531f5346a18", size = 26203175 },
- { url = "https://files.pythonhosted.org/packages/16/ca/c7eaa8e62db8fb37ce942b1ea0c6d7abfe3786ca193957afa25e71b81b66/pyarrow-21.0.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:e99310a4ebd4479bcd1964dff9e14af33746300cb014aa4a3781738ac63baf4a", size = 31154306 },
- { url = "https://files.pythonhosted.org/packages/ce/e8/e87d9e3b2489302b3a1aea709aaca4b781c5252fcb812a17ab6275a9a484/pyarrow-21.0.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:d2fe8e7f3ce329a71b7ddd7498b3cfac0eeb200c2789bd840234f0dc271a8efe", size = 32680622 },
- { url = "https://files.pythonhosted.org/packages/84/52/79095d73a742aa0aba370c7942b1b655f598069489ab387fe47261a849e1/pyarrow-21.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:f522e5709379d72fb3da7785aa489ff0bb87448a9dc5a75f45763a795a089ebd", size = 41104094 },
- { url = "https://files.pythonhosted.org/packages/89/4b/7782438b551dbb0468892a276b8c789b8bbdb25ea5c5eb27faadd753e037/pyarrow-21.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:69cbbdf0631396e9925e048cfa5bce4e8c3d3b41562bbd70c685a8eb53a91e61", size = 42825576 },
- { url = "https://files.pythonhosted.org/packages/b3/62/0f29de6e0a1e33518dec92c65be0351d32d7ca351e51ec5f4f837a9aab91/pyarrow-21.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:731c7022587006b755d0bdb27626a1a3bb004bb56b11fb30d98b6c1b4718579d", size = 43368342 },
- { url = "https://files.pythonhosted.org/packages/90/c7/0fa1f3f29cf75f339768cc698c8ad4ddd2481c1742e9741459911c9ac477/pyarrow-21.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dc56bc708f2d8ac71bd1dcb927e458c93cec10b98eb4120206a4091db7b67b99", size = 45131218 },
- { url = "https://files.pythonhosted.org/packages/01/63/581f2076465e67b23bc5a37d4a2abff8362d389d29d8105832e82c9c811c/pyarrow-21.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:186aa00bca62139f75b7de8420f745f2af12941595bbbfa7ed3870ff63e25636", size = 26087551 },
- { url = "https://files.pythonhosted.org/packages/c9/ab/357d0d9648bb8241ee7348e564f2479d206ebe6e1c47ac5027c2e31ecd39/pyarrow-21.0.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:a7a102574faa3f421141a64c10216e078df467ab9576684d5cd696952546e2da", size = 31290064 },
- { url = "https://files.pythonhosted.org/packages/3f/8a/5685d62a990e4cac2043fc76b4661bf38d06efed55cf45a334b455bd2759/pyarrow-21.0.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:1e005378c4a2c6db3ada3ad4c217b381f6c886f0a80d6a316fe586b90f77efd7", size = 32727837 },
- { url = "https://files.pythonhosted.org/packages/fc/de/c0828ee09525c2bafefd3e736a248ebe764d07d0fd762d4f0929dbc516c9/pyarrow-21.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:65f8e85f79031449ec8706b74504a316805217b35b6099155dd7e227eef0d4b6", size = 41014158 },
- { url = "https://files.pythonhosted.org/packages/6e/26/a2865c420c50b7a3748320b614f3484bfcde8347b2639b2b903b21ce6a72/pyarrow-21.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:3a81486adc665c7eb1a2bde0224cfca6ceaba344a82a971ef059678417880eb8", size = 42667885 },
- { url = "https://files.pythonhosted.org/packages/0a/f9/4ee798dc902533159250fb4321267730bc0a107d8c6889e07c3add4fe3a5/pyarrow-21.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:fc0d2f88b81dcf3ccf9a6ae17f89183762c8a94a5bdcfa09e05cfe413acf0503", size = 43276625 },
- { url = "https://files.pythonhosted.org/packages/5a/da/e02544d6997037a4b0d22d8e5f66bc9315c3671371a8b18c79ade1cefe14/pyarrow-21.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6299449adf89df38537837487a4f8d3bd91ec94354fdd2a7d30bc11c48ef6e79", size = 44951890 },
- { url = "https://files.pythonhosted.org/packages/e5/4e/519c1bc1876625fe6b71e9a28287c43ec2f20f73c658b9ae1d485c0c206e/pyarrow-21.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:222c39e2c70113543982c6b34f3077962b44fca38c0bd9e68bb6781534425c10", size = 26371006 },
- { url = "https://files.pythonhosted.org/packages/3e/cc/ce4939f4b316457a083dc5718b3982801e8c33f921b3c98e7a93b7c7491f/pyarrow-21.0.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:a7f6524e3747e35f80744537c78e7302cd41deee8baa668d56d55f77d9c464b3", size = 31211248 },
- { url = "https://files.pythonhosted.org/packages/1f/c2/7a860931420d73985e2f340f06516b21740c15b28d24a0e99a900bb27d2b/pyarrow-21.0.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:203003786c9fd253ebcafa44b03c06983c9c8d06c3145e37f1b76a1f317aeae1", size = 32676896 },
- { url = "https://files.pythonhosted.org/packages/68/a8/197f989b9a75e59b4ca0db6a13c56f19a0ad8a298c68da9cc28145e0bb97/pyarrow-21.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:3b4d97e297741796fead24867a8dabf86c87e4584ccc03167e4a811f50fdf74d", size = 41067862 },
- { url = "https://files.pythonhosted.org/packages/fa/82/6ecfa89487b35aa21accb014b64e0a6b814cc860d5e3170287bf5135c7d8/pyarrow-21.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:898afce396b80fdda05e3086b4256f8677c671f7b1d27a6976fa011d3fd0a86e", size = 42747508 },
- { url = "https://files.pythonhosted.org/packages/3b/b7/ba252f399bbf3addc731e8643c05532cf32e74cebb5e32f8f7409bc243cf/pyarrow-21.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:067c66ca29aaedae08218569a114e413b26e742171f526e828e1064fcdec13f4", size = 43345293 },
- { url = "https://files.pythonhosted.org/packages/ff/0a/a20819795bd702b9486f536a8eeb70a6aa64046fce32071c19ec8230dbaa/pyarrow-21.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0c4e75d13eb76295a49e0ea056eb18dbd87d81450bfeb8afa19a7e5a75ae2ad7", size = 45060670 },
- { url = "https://files.pythonhosted.org/packages/10/15/6b30e77872012bbfe8265d42a01d5b3c17ef0ac0f2fae531ad91b6a6c02e/pyarrow-21.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdc4c17afda4dab2a9c0b79148a43a7f4e1094916b3e18d8975bfd6d6d52241f", size = 26227521 },
-]
-
-[[package]]
-name = "pydantic"
-version = "2.10.6"
-source = { registry = "https://pypi.org/simple" }
-resolution-markers = [
- "python_full_version < '3.9'",
-]
-dependencies = [
- { name = "annotated-types", marker = "python_full_version < '3.9'" },
- { name = "pydantic-core", version = "2.27.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" },
- { name = "typing-extensions", marker = "python_full_version < '3.9'" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/b7/ae/d5220c5c52b158b1de7ca89fc5edb72f304a70a4c540c84c8844bf4008de/pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236", size = 761681 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/f4/3c/8cc1cc84deffa6e25d2d0c688ebb80635dfdbf1dbea3e30c541c8cf4d860/pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584", size = 431696 },
-]
-
[[package]]
name = "pydantic"
version = "2.11.5"
source = { registry = "https://pypi.org/simple" }
-resolution-markers = [
- "python_full_version >= '3.12'",
- "python_full_version == '3.11.*'",
- "python_full_version >= '3.9' and python_full_version < '3.11'",
-]
dependencies = [
- { name = "annotated-types", marker = "python_full_version >= '3.9'" },
- { name = "pydantic-core", version = "2.33.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" },
- { name = "typing-extensions", marker = "python_full_version >= '3.9'" },
- { name = "typing-inspection", marker = "python_full_version >= '3.9'" },
+ { name = "annotated-types" },
+ { name = "pydantic-core" },
+ { name = "typing-extensions" },
+ { name = "typing-inspection" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f0/86/8ce9040065e8f924d642c58e4a344e33163a07f6b57f836d0d734e0ad3fb/pydantic-2.11.5.tar.gz", hash = "sha256:7f853db3d0ce78ce8bbb148c401c2cdd6431b3473c0cdff2755c7690952a7b7a", size = 787102 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b5/69/831ed22b38ff9b4b64b66569f0e5b7b97cf3638346eb95a2147fdb49ad5f/pydantic-2.11.5-py3-none-any.whl", hash = "sha256:f9c26ba06f9747749ca1e5c94d6a85cb84254577553c8785576fd38fa64dc0f7", size = 444229 },
]
-[[package]]
-name = "pydantic-core"
-version = "2.27.2"
-source = { registry = "https://pypi.org/simple" }
-resolution-markers = [
- "python_full_version < '3.9'",
-]
-dependencies = [
- { name = "typing-extensions", marker = "python_full_version < '3.9'" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/fc/01/f3e5ac5e7c25833db5eb555f7b7ab24cd6f8c322d3a3ad2d67a952dc0abc/pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39", size = 413443 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/3a/bc/fed5f74b5d802cf9a03e83f60f18864e90e3aed7223adaca5ffb7a8d8d64/pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa", size = 1895938 },
- { url = "https://files.pythonhosted.org/packages/71/2a/185aff24ce844e39abb8dd680f4e959f0006944f4a8a0ea372d9f9ae2e53/pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c", size = 1815684 },
- { url = "https://files.pythonhosted.org/packages/c3/43/fafabd3d94d159d4f1ed62e383e264f146a17dd4d48453319fd782e7979e/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a", size = 1829169 },
- { url = "https://files.pythonhosted.org/packages/a2/d1/f2dfe1a2a637ce6800b799aa086d079998959f6f1215eb4497966efd2274/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5", size = 1867227 },
- { url = "https://files.pythonhosted.org/packages/7d/39/e06fcbcc1c785daa3160ccf6c1c38fea31f5754b756e34b65f74e99780b5/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c", size = 2037695 },
- { url = "https://files.pythonhosted.org/packages/7a/67/61291ee98e07f0650eb756d44998214231f50751ba7e13f4f325d95249ab/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7", size = 2741662 },
- { url = "https://files.pythonhosted.org/packages/32/90/3b15e31b88ca39e9e626630b4c4a1f5a0dfd09076366f4219429e6786076/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a", size = 1993370 },
- { url = "https://files.pythonhosted.org/packages/ff/83/c06d333ee3a67e2e13e07794995c1535565132940715931c1c43bfc85b11/pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236", size = 1996813 },
- { url = "https://files.pythonhosted.org/packages/7c/f7/89be1c8deb6e22618a74f0ca0d933fdcb8baa254753b26b25ad3acff8f74/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962", size = 2005287 },
- { url = "https://files.pythonhosted.org/packages/b7/7d/8eb3e23206c00ef7feee17b83a4ffa0a623eb1a9d382e56e4aa46fd15ff2/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9", size = 2128414 },
- { url = "https://files.pythonhosted.org/packages/4e/99/fe80f3ff8dd71a3ea15763878d464476e6cb0a2db95ff1c5c554133b6b83/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af", size = 2155301 },
- { url = "https://files.pythonhosted.org/packages/2b/a3/e50460b9a5789ca1451b70d4f52546fa9e2b420ba3bfa6100105c0559238/pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4", size = 1816685 },
- { url = "https://files.pythonhosted.org/packages/57/4c/a8838731cb0f2c2a39d3535376466de6049034d7b239c0202a64aaa05533/pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31", size = 1982876 },
- { url = "https://files.pythonhosted.org/packages/c2/89/f3450af9d09d44eea1f2c369f49e8f181d742f28220f88cc4dfaae91ea6e/pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc", size = 1893421 },
- { url = "https://files.pythonhosted.org/packages/9e/e3/71fe85af2021f3f386da42d291412e5baf6ce7716bd7101ea49c810eda90/pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7", size = 1814998 },
- { url = "https://files.pythonhosted.org/packages/a6/3c/724039e0d848fd69dbf5806894e26479577316c6f0f112bacaf67aa889ac/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15", size = 1826167 },
- { url = "https://files.pythonhosted.org/packages/2b/5b/1b29e8c1fb5f3199a9a57c1452004ff39f494bbe9bdbe9a81e18172e40d3/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306", size = 1865071 },
- { url = "https://files.pythonhosted.org/packages/89/6c/3985203863d76bb7d7266e36970d7e3b6385148c18a68cc8915fd8c84d57/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99", size = 2036244 },
- { url = "https://files.pythonhosted.org/packages/0e/41/f15316858a246b5d723f7d7f599f79e37493b2e84bfc789e58d88c209f8a/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459", size = 2737470 },
- { url = "https://files.pythonhosted.org/packages/a8/7c/b860618c25678bbd6d1d99dbdfdf0510ccb50790099b963ff78a124b754f/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048", size = 1992291 },
- { url = "https://files.pythonhosted.org/packages/bf/73/42c3742a391eccbeab39f15213ecda3104ae8682ba3c0c28069fbcb8c10d/pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d", size = 1994613 },
- { url = "https://files.pythonhosted.org/packages/94/7a/941e89096d1175d56f59340f3a8ebaf20762fef222c298ea96d36a6328c5/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b", size = 2002355 },
- { url = "https://files.pythonhosted.org/packages/6e/95/2359937a73d49e336a5a19848713555605d4d8d6940c3ec6c6c0ca4dcf25/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474", size = 2126661 },
- { url = "https://files.pythonhosted.org/packages/2b/4c/ca02b7bdb6012a1adef21a50625b14f43ed4d11f1fc237f9d7490aa5078c/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6", size = 2153261 },
- { url = "https://files.pythonhosted.org/packages/72/9d/a241db83f973049a1092a079272ffe2e3e82e98561ef6214ab53fe53b1c7/pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c", size = 1812361 },
- { url = "https://files.pythonhosted.org/packages/e8/ef/013f07248041b74abd48a385e2110aa3a9bbfef0fbd97d4e6d07d2f5b89a/pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc", size = 1982484 },
- { url = "https://files.pythonhosted.org/packages/10/1c/16b3a3e3398fd29dca77cea0a1d998d6bde3902fa2706985191e2313cc76/pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4", size = 1867102 },
- { url = "https://files.pythonhosted.org/packages/d6/74/51c8a5482ca447871c93e142d9d4a92ead74de6c8dc5e66733e22c9bba89/pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0", size = 1893127 },
- { url = "https://files.pythonhosted.org/packages/d3/f3/c97e80721735868313c58b89d2de85fa80fe8dfeeed84dc51598b92a135e/pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef", size = 1811340 },
- { url = "https://files.pythonhosted.org/packages/9e/91/840ec1375e686dbae1bd80a9e46c26a1e0083e1186abc610efa3d9a36180/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7", size = 1822900 },
- { url = "https://files.pythonhosted.org/packages/f6/31/4240bc96025035500c18adc149aa6ffdf1a0062a4b525c932065ceb4d868/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934", size = 1869177 },
- { url = "https://files.pythonhosted.org/packages/fa/20/02fbaadb7808be578317015c462655c317a77a7c8f0ef274bc016a784c54/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6", size = 2038046 },
- { url = "https://files.pythonhosted.org/packages/06/86/7f306b904e6c9eccf0668248b3f272090e49c275bc488a7b88b0823444a4/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c", size = 2685386 },
- { url = "https://files.pythonhosted.org/packages/8d/f0/49129b27c43396581a635d8710dae54a791b17dfc50c70164866bbf865e3/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2", size = 1997060 },
- { url = "https://files.pythonhosted.org/packages/0d/0f/943b4af7cd416c477fd40b187036c4f89b416a33d3cc0ab7b82708a667aa/pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4", size = 2004870 },
- { url = "https://files.pythonhosted.org/packages/35/40/aea70b5b1a63911c53a4c8117c0a828d6790483f858041f47bab0b779f44/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3", size = 1999822 },
- { url = "https://files.pythonhosted.org/packages/f2/b3/807b94fd337d58effc5498fd1a7a4d9d59af4133e83e32ae39a96fddec9d/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4", size = 2130364 },
- { url = "https://files.pythonhosted.org/packages/fc/df/791c827cd4ee6efd59248dca9369fb35e80a9484462c33c6649a8d02b565/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57", size = 2158303 },
- { url = "https://files.pythonhosted.org/packages/9b/67/4e197c300976af185b7cef4c02203e175fb127e414125916bf1128b639a9/pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc", size = 1834064 },
- { url = "https://files.pythonhosted.org/packages/1f/ea/cd7209a889163b8dcca139fe32b9687dd05249161a3edda62860430457a5/pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9", size = 1989046 },
- { url = "https://files.pythonhosted.org/packages/bc/49/c54baab2f4658c26ac633d798dab66b4c3a9bbf47cff5284e9c182f4137a/pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b", size = 1885092 },
- { url = "https://files.pythonhosted.org/packages/41/b1/9bc383f48f8002f99104e3acff6cba1231b29ef76cfa45d1506a5cad1f84/pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b", size = 1892709 },
- { url = "https://files.pythonhosted.org/packages/10/6c/e62b8657b834f3eb2961b49ec8e301eb99946245e70bf42c8817350cbefc/pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154", size = 1811273 },
- { url = "https://files.pythonhosted.org/packages/ba/15/52cfe49c8c986e081b863b102d6b859d9defc63446b642ccbbb3742bf371/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9", size = 1823027 },
- { url = "https://files.pythonhosted.org/packages/b1/1c/b6f402cfc18ec0024120602bdbcebc7bdd5b856528c013bd4d13865ca473/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9", size = 1868888 },
- { url = "https://files.pythonhosted.org/packages/bd/7b/8cb75b66ac37bc2975a3b7de99f3c6f355fcc4d89820b61dffa8f1e81677/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1", size = 2037738 },
- { url = "https://files.pythonhosted.org/packages/c8/f1/786d8fe78970a06f61df22cba58e365ce304bf9b9f46cc71c8c424e0c334/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a", size = 2685138 },
- { url = "https://files.pythonhosted.org/packages/a6/74/d12b2cd841d8724dc8ffb13fc5cef86566a53ed358103150209ecd5d1999/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e", size = 1997025 },
- { url = "https://files.pythonhosted.org/packages/a0/6e/940bcd631bc4d9a06c9539b51f070b66e8f370ed0933f392db6ff350d873/pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4", size = 2004633 },
- { url = "https://files.pythonhosted.org/packages/50/cc/a46b34f1708d82498c227d5d80ce615b2dd502ddcfd8376fc14a36655af1/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27", size = 1999404 },
- { url = "https://files.pythonhosted.org/packages/ca/2d/c365cfa930ed23bc58c41463bae347d1005537dc8db79e998af8ba28d35e/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee", size = 2130130 },
- { url = "https://files.pythonhosted.org/packages/f4/d7/eb64d015c350b7cdb371145b54d96c919d4db516817f31cd1c650cae3b21/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1", size = 2157946 },
- { url = "https://files.pythonhosted.org/packages/a4/99/bddde3ddde76c03b65dfd5a66ab436c4e58ffc42927d4ff1198ffbf96f5f/pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130", size = 1834387 },
- { url = "https://files.pythonhosted.org/packages/71/47/82b5e846e01b26ac6f1893d3c5f9f3a2eb6ba79be26eef0b759b4fe72946/pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee", size = 1990453 },
- { url = "https://files.pythonhosted.org/packages/51/b2/b2b50d5ecf21acf870190ae5d093602d95f66c9c31f9d5de6062eb329ad1/pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b", size = 1885186 },
- { url = "https://files.pythonhosted.org/packages/43/53/13e9917fc69c0a4aea06fd63ed6a8d6cda9cf140ca9584d49c1650b0ef5e/pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506", size = 1899595 },
- { url = "https://files.pythonhosted.org/packages/f4/20/26c549249769ed84877f862f7bb93f89a6ee08b4bee1ed8781616b7fbb5e/pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320", size = 1775010 },
- { url = "https://files.pythonhosted.org/packages/35/eb/8234e05452d92d2b102ffa1b56d801c3567e628fdc63f02080fdfc68fd5e/pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145", size = 1830727 },
- { url = "https://files.pythonhosted.org/packages/8f/df/59f915c8b929d5f61e5a46accf748a87110ba145156f9326d1a7d28912b2/pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1", size = 1868393 },
- { url = "https://files.pythonhosted.org/packages/d5/52/81cf4071dca654d485c277c581db368b0c95b2b883f4d7b736ab54f72ddf/pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228", size = 2040300 },
- { url = "https://files.pythonhosted.org/packages/9c/00/05197ce1614f5c08d7a06e1d39d5d8e704dc81971b2719af134b844e2eaf/pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046", size = 2738785 },
- { url = "https://files.pythonhosted.org/packages/f7/a3/5f19bc495793546825ab160e530330c2afcee2281c02b5ffafd0b32ac05e/pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5", size = 1996493 },
- { url = "https://files.pythonhosted.org/packages/ed/e8/e0102c2ec153dc3eed88aea03990e1b06cfbca532916b8a48173245afe60/pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a", size = 1998544 },
- { url = "https://files.pythonhosted.org/packages/fb/a3/4be70845b555bd80aaee9f9812a7cf3df81550bce6dadb3cfee9c5d8421d/pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d", size = 2007449 },
- { url = "https://files.pythonhosted.org/packages/e3/9f/b779ed2480ba355c054e6d7ea77792467631d674b13d8257085a4bc7dcda/pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9", size = 2129460 },
- { url = "https://files.pythonhosted.org/packages/a0/f0/a6ab0681f6e95260c7fbf552874af7302f2ea37b459f9b7f00698f875492/pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da", size = 2159609 },
- { url = "https://files.pythonhosted.org/packages/8a/2b/e1059506795104349712fbca647b18b3f4a7fd541c099e6259717441e1e0/pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b", size = 1819886 },
- { url = "https://files.pythonhosted.org/packages/aa/6d/df49c17f024dfc58db0bacc7b03610058018dd2ea2eaf748ccbada4c3d06/pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad", size = 1980773 },
- { url = "https://files.pythonhosted.org/packages/27/97/3aef1ddb65c5ccd6eda9050036c956ff6ecbfe66cb7eb40f280f121a5bb0/pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993", size = 1896475 },
- { url = "https://files.pythonhosted.org/packages/ad/d3/5668da70e373c9904ed2f372cb52c0b996426f302e0dee2e65634c92007d/pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308", size = 1772279 },
- { url = "https://files.pythonhosted.org/packages/8a/9e/e44b8cb0edf04a2f0a1f6425a65ee089c1d6f9c4c2dcab0209127b6fdfc2/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4", size = 1829112 },
- { url = "https://files.pythonhosted.org/packages/1c/90/1160d7ac700102effe11616e8119e268770f2a2aa5afb935f3ee6832987d/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf", size = 1866780 },
- { url = "https://files.pythonhosted.org/packages/ee/33/13983426df09a36d22c15980008f8d9c77674fc319351813b5a2739b70f3/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76", size = 2037943 },
- { url = "https://files.pythonhosted.org/packages/01/d7/ced164e376f6747e9158c89988c293cd524ab8d215ae4e185e9929655d5c/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118", size = 2740492 },
- { url = "https://files.pythonhosted.org/packages/8b/1f/3dc6e769d5b7461040778816aab2b00422427bcaa4b56cc89e9c653b2605/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630", size = 1995714 },
- { url = "https://files.pythonhosted.org/packages/07/d7/a0bd09bc39283530b3f7c27033a814ef254ba3bd0b5cfd040b7abf1fe5da/pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54", size = 1997163 },
- { url = "https://files.pythonhosted.org/packages/2d/bb/2db4ad1762e1c5699d9b857eeb41959191980de6feb054e70f93085e1bcd/pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f", size = 2005217 },
- { url = "https://files.pythonhosted.org/packages/53/5f/23a5a3e7b8403f8dd8fc8a6f8b49f6b55c7d715b77dcf1f8ae919eeb5628/pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362", size = 2127899 },
- { url = "https://files.pythonhosted.org/packages/c2/ae/aa38bb8dd3d89c2f1d8362dd890ee8f3b967330821d03bbe08fa01ce3766/pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96", size = 2155726 },
- { url = "https://files.pythonhosted.org/packages/98/61/4f784608cc9e98f70839187117ce840480f768fed5d386f924074bf6213c/pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e", size = 1817219 },
- { url = "https://files.pythonhosted.org/packages/57/82/bb16a68e4a1a858bb3768c2c8f1ff8d8978014e16598f001ea29a25bf1d1/pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67", size = 1985382 },
- { url = "https://files.pythonhosted.org/packages/46/72/af70981a341500419e67d5cb45abe552a7c74b66326ac8877588488da1ac/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e", size = 1891159 },
- { url = "https://files.pythonhosted.org/packages/ad/3d/c5913cccdef93e0a6a95c2d057d2c2cba347815c845cda79ddd3c0f5e17d/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8", size = 1768331 },
- { url = "https://files.pythonhosted.org/packages/f6/f0/a3ae8fbee269e4934f14e2e0e00928f9346c5943174f2811193113e58252/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3", size = 1822467 },
- { url = "https://files.pythonhosted.org/packages/d7/7a/7bbf241a04e9f9ea24cd5874354a83526d639b02674648af3f350554276c/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f", size = 1979797 },
- { url = "https://files.pythonhosted.org/packages/4f/5f/4784c6107731f89e0005a92ecb8a2efeafdb55eb992b8e9d0a2be5199335/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133", size = 1987839 },
- { url = "https://files.pythonhosted.org/packages/6d/a7/61246562b651dff00de86a5f01b6e4befb518df314c54dec187a78d81c84/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc", size = 1998861 },
- { url = "https://files.pythonhosted.org/packages/86/aa/837821ecf0c022bbb74ca132e117c358321e72e7f9702d1b6a03758545e2/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50", size = 2116582 },
- { url = "https://files.pythonhosted.org/packages/81/b0/5e74656e95623cbaa0a6278d16cf15e10a51f6002e3ec126541e95c29ea3/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9", size = 2151985 },
- { url = "https://files.pythonhosted.org/packages/63/37/3e32eeb2a451fddaa3898e2163746b0cffbbdbb4740d38372db0490d67f3/pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151", size = 2004715 },
- { url = "https://files.pythonhosted.org/packages/29/0e/dcaea00c9dbd0348b723cae82b0e0c122e0fa2b43fa933e1622fd237a3ee/pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656", size = 1891733 },
- { url = "https://files.pythonhosted.org/packages/86/d3/e797bba8860ce650272bda6383a9d8cad1d1c9a75a640c9d0e848076f85e/pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278", size = 1768375 },
- { url = "https://files.pythonhosted.org/packages/41/f7/f847b15fb14978ca2b30262548f5fc4872b2724e90f116393eb69008299d/pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb", size = 1822307 },
- { url = "https://files.pythonhosted.org/packages/9c/63/ed80ec8255b587b2f108e514dc03eed1546cd00f0af281e699797f373f38/pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd", size = 1979971 },
- { url = "https://files.pythonhosted.org/packages/a9/6d/6d18308a45454a0de0e975d70171cadaf454bc7a0bf86b9c7688e313f0bb/pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc", size = 1987616 },
- { url = "https://files.pythonhosted.org/packages/82/8a/05f8780f2c1081b800a7ca54c1971e291c2d07d1a50fb23c7e4aef4ed403/pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b", size = 1998943 },
- { url = "https://files.pythonhosted.org/packages/5e/3e/fe5b6613d9e4c0038434396b46c5303f5ade871166900b357ada4766c5b7/pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b", size = 2116654 },
- { url = "https://files.pythonhosted.org/packages/db/ad/28869f58938fad8cc84739c4e592989730bfb69b7c90a8fff138dff18e1e/pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2", size = 2152292 },
- { url = "https://files.pythonhosted.org/packages/a1/0c/c5c5cd3689c32ed1fe8c5d234b079c12c281c051759770c05b8bed6412b5/pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35", size = 2004961 },
-]
-
[[package]]
name = "pydantic-core"
version = "2.33.2"
source = { registry = "https://pypi.org/simple" }
-resolution-markers = [
- "python_full_version >= '3.12'",
- "python_full_version == '3.11.*'",
- "python_full_version >= '3.9' and python_full_version < '3.11'",
-]
dependencies = [
- { name = "typing-extensions", marker = "python_full_version >= '3.9'" },
+ { name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195 }
wheels = [
@@ -906,19 +263,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162 },
{ url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560 },
{ url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777 },
- { url = "https://files.pythonhosted.org/packages/53/ea/bbe9095cdd771987d13c82d104a9c8559ae9aec1e29f139e286fd2e9256e/pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d", size = 2028677 },
- { url = "https://files.pythonhosted.org/packages/49/1d/4ac5ed228078737d457a609013e8f7edc64adc37b91d619ea965758369e5/pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954", size = 1864735 },
- { url = "https://files.pythonhosted.org/packages/23/9a/2e70d6388d7cda488ae38f57bc2f7b03ee442fbcf0d75d848304ac7e405b/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb", size = 1898467 },
- { url = "https://files.pythonhosted.org/packages/ff/2e/1568934feb43370c1ffb78a77f0baaa5a8b6897513e7a91051af707ffdc4/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7", size = 1983041 },
- { url = "https://files.pythonhosted.org/packages/01/1a/1a1118f38ab64eac2f6269eb8c120ab915be30e387bb561e3af904b12499/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4", size = 2136503 },
- { url = "https://files.pythonhosted.org/packages/5c/da/44754d1d7ae0f22d6d3ce6c6b1486fc07ac2c524ed8f6eca636e2e1ee49b/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b", size = 2736079 },
- { url = "https://files.pythonhosted.org/packages/4d/98/f43cd89172220ec5aa86654967b22d862146bc4d736b1350b4c41e7c9c03/pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3", size = 2006508 },
- { url = "https://files.pythonhosted.org/packages/2b/cc/f77e8e242171d2158309f830f7d5d07e0531b756106f36bc18712dc439df/pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a", size = 2113693 },
- { url = "https://files.pythonhosted.org/packages/54/7a/7be6a7bd43e0a47c147ba7fbf124fe8aaf1200bc587da925509641113b2d/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782", size = 2074224 },
- { url = "https://files.pythonhosted.org/packages/2a/07/31cf8fadffbb03be1cb520850e00a8490c0927ec456e8293cafda0726184/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9", size = 2245403 },
- { url = "https://files.pythonhosted.org/packages/b6/8d/bbaf4c6721b668d44f01861f297eb01c9b35f612f6b8e14173cb204e6240/pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e", size = 2242331 },
- { url = "https://files.pythonhosted.org/packages/bb/93/3cc157026bca8f5006250e74515119fcaa6d6858aceee8f67ab6dc548c16/pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9", size = 1910571 },
- { url = "https://files.pythonhosted.org/packages/5b/90/7edc3b2a0d9f0dda8806c04e511a67b0b7a41d2187e2003673a996fb4310/pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3", size = 1956504 },
{ url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982 },
{ url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412 },
{ url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749 },
@@ -937,15 +281,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013 },
{ url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715 },
{ url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757 },
- { url = "https://files.pythonhosted.org/packages/08/98/dbf3fdfabaf81cda5622154fda78ea9965ac467e3239078e0dcd6df159e7/pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101", size = 2024034 },
- { url = "https://files.pythonhosted.org/packages/8d/99/7810aa9256e7f2ccd492590f86b79d370df1e9292f1f80b000b6a75bd2fb/pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64", size = 1858578 },
- { url = "https://files.pythonhosted.org/packages/d8/60/bc06fa9027c7006cc6dd21e48dbf39076dc39d9abbaf718a1604973a9670/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d", size = 1892858 },
- { url = "https://files.pythonhosted.org/packages/f2/40/9d03997d9518816c68b4dfccb88969756b9146031b61cd37f781c74c9b6a/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535", size = 2068498 },
- { url = "https://files.pythonhosted.org/packages/d8/62/d490198d05d2d86672dc269f52579cad7261ced64c2df213d5c16e0aecb1/pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d", size = 2108428 },
- { url = "https://files.pythonhosted.org/packages/9a/ec/4cd215534fd10b8549015f12ea650a1a973da20ce46430b68fc3185573e8/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6", size = 2069854 },
- { url = "https://files.pythonhosted.org/packages/1a/1a/abbd63d47e1d9b0d632fee6bb15785d0889c8a6e0a6c3b5a8e28ac1ec5d2/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca", size = 2237859 },
- { url = "https://files.pythonhosted.org/packages/80/1c/fa883643429908b1c90598fd2642af8839efd1d835b65af1f75fba4d94fe/pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039", size = 2239059 },
- { url = "https://files.pythonhosted.org/packages/d4/29/3cade8a924a61f60ccfa10842f75eb12787e1440e2b8660ceffeb26685e7/pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27", size = 2066661 },
]
[[package]]
@@ -957,8 +292,7 @@ dependencies = [
{ name = "exceptiongroup", marker = "python_full_version < '3.11'" },
{ name = "iniconfig" },
{ name = "packaging" },
- { name = "pluggy", version = "1.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" },
- { name = "pluggy", version = "1.6.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" },
+ { name = "pluggy" },
{ name = "tomli", marker = "python_full_version < '3.11'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891 }
@@ -966,27 +300,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634 },
]
-[[package]]
-name = "python-dateutil"
-version = "2.9.0.post0"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "six" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 },
-]
-
-[[package]]
-name = "pytz"
-version = "2025.2"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225 },
-]
-
[[package]]
name = "regex"
version = "2024.11.6"
@@ -1054,38 +367,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ed/e3/c446a64984ea9f69982ba1a69d4658d5014bc7a0ea468a07e1a1265db6e2/regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d", size = 787733 },
{ url = "https://files.pythonhosted.org/packages/2b/f1/e40c8373e3480e4f29f2692bd21b3e05f296d3afebc7e5dcf21b9756ca1c/regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff", size = 262122 },
{ url = "https://files.pythonhosted.org/packages/45/94/bc295babb3062a731f52621cdc992d123111282e291abaf23faa413443ea/regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a", size = 273545 },
- { url = "https://files.pythonhosted.org/packages/44/0f/207b37e6e08d548fac0aa00bf0b7464126315d58ab5161216b8cb3abb2aa/regex-2024.11.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3a51ccc315653ba012774efca4f23d1d2a8a8f278a6072e29c7147eee7da446b", size = 482777 },
- { url = "https://files.pythonhosted.org/packages/5a/5a/586bafa294c5d2451265d3685815606c61e620f469cac3b946fff0a4aa48/regex-2024.11.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ad182d02e40de7459b73155deb8996bbd8e96852267879396fb274e8700190e3", size = 287751 },
- { url = "https://files.pythonhosted.org/packages/08/92/9df786fad8a4e0766bfc9a2e334c5f0757356070c9639b2ec776b8cdef3d/regex-2024.11.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba9b72e5643641b7d41fa1f6d5abda2c9a263ae835b917348fc3c928182ad467", size = 284552 },
- { url = "https://files.pythonhosted.org/packages/0a/27/0b3cf7d9fbe43301aa3473d54406019a7380abe4e3c9ae250bac13c4fdb3/regex-2024.11.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40291b1b89ca6ad8d3f2b82782cc33807f1406cf68c8d440861da6304d8ffbbd", size = 783587 },
- { url = "https://files.pythonhosted.org/packages/89/38/499b32cbb61163af60a5c5ff26aacea7836fe7e3d821e76af216e996088c/regex-2024.11.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdf58d0e516ee426a48f7b2c03a332a4114420716d55769ff7108c37a09951bf", size = 822904 },
- { url = "https://files.pythonhosted.org/packages/3f/a4/e3b11c643e5ae1059a08aeef971973f0c803d2a9ae2e7a86f97c68146a6c/regex-2024.11.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a36fdf2af13c2b14738f6e973aba563623cb77d753bbbd8d414d18bfaa3105dd", size = 809900 },
- { url = "https://files.pythonhosted.org/packages/5a/c8/dc7153ceb5bcc344f5c4f0291ea45925a5f00009afa3849e91561ac2e847/regex-2024.11.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cee317bfc014c2419a76bcc87f071405e3966da434e03e13beb45f8aced1a6", size = 785105 },
- { url = "https://files.pythonhosted.org/packages/2a/29/841489ea52013062b22625fbaf49b0916aeb62bae2e56425ac30f9dead46/regex-2024.11.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50153825ee016b91549962f970d6a4442fa106832e14c918acd1c8e479916c4f", size = 773033 },
- { url = "https://files.pythonhosted.org/packages/3e/4e/4a0da5e87f7c2dc73a8505785d5af2b1a19c66f4645b93caa50b7eb08242/regex-2024.11.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea1bfda2f7162605f6e8178223576856b3d791109f15ea99a9f95c16a7636fb5", size = 702374 },
- { url = "https://files.pythonhosted.org/packages/94/6e/444e66346600d11e8a0f4bb31611973cffa772d5033ba1cf1f15de8a0d52/regex-2024.11.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:df951c5f4a1b1910f1a99ff42c473ff60f8225baa1cdd3539fe2819d9543e9df", size = 769990 },
- { url = "https://files.pythonhosted.org/packages/da/28/95c3ed6cd51b27f54e59940400e2a3ddd3f8bbbc3aaf947e57a67104ecbd/regex-2024.11.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:072623554418a9911446278f16ecb398fb3b540147a7828c06e2011fa531e773", size = 775345 },
- { url = "https://files.pythonhosted.org/packages/07/5d/0cd19cf44d96a7aa31526611c24235d21d27c23b65201cb2c5cac508dd42/regex-2024.11.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f654882311409afb1d780b940234208a252322c24a93b442ca714d119e68086c", size = 840379 },
- { url = "https://files.pythonhosted.org/packages/2a/13/ec3f8d85b789ee1c6ffbdfd4092fd901416716317ee17bf51aa2890bac96/regex-2024.11.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:89d75e7293d2b3e674db7d4d9b1bee7f8f3d1609428e293771d1a962617150cc", size = 845842 },
- { url = "https://files.pythonhosted.org/packages/50/cb/7170247e65afea2bf9204bcb2682f292b0a3a57d112478da199b84d59792/regex-2024.11.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f65557897fc977a44ab205ea871b690adaef6b9da6afda4790a2484b04293a5f", size = 775026 },
- { url = "https://files.pythonhosted.org/packages/cc/06/c817c9201f09b7d9dd033039ba90d8197c91e9fe2984141f2d1de270c159/regex-2024.11.6-cp38-cp38-win32.whl", hash = "sha256:6f44ec28b1f858c98d3036ad5d7d0bfc568bdd7a74f9c24e25f41ef1ebfd81a4", size = 261738 },
- { url = "https://files.pythonhosted.org/packages/cf/69/c39e16320400842eb4358c982ef5fc680800866f35ebfd4dd38a22967ce0/regex-2024.11.6-cp38-cp38-win_amd64.whl", hash = "sha256:bb8f74f2f10dbf13a0be8de623ba4f9491faf58c24064f32b65679b021ed0001", size = 274094 },
- { url = "https://files.pythonhosted.org/packages/89/23/c4a86df398e57e26f93b13ae63acce58771e04bdde86092502496fa57f9c/regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5704e174f8ccab2026bd2f1ab6c510345ae8eac818b613d7d73e785f1310f839", size = 482682 },
- { url = "https://files.pythonhosted.org/packages/3c/8b/45c24ab7a51a1658441b961b86209c43e6bb9d39caf1e63f46ce6ea03bc7/regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:220902c3c5cc6af55d4fe19ead504de80eb91f786dc102fbd74894b1551f095e", size = 287679 },
- { url = "https://files.pythonhosted.org/packages/7a/d1/598de10b17fdafc452d11f7dada11c3be4e379a8671393e4e3da3c4070df/regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7e351589da0850c125f1600a4c4ba3c722efefe16b297de54300f08d734fbf", size = 284578 },
- { url = "https://files.pythonhosted.org/packages/49/70/c7eaa219efa67a215846766fde18d92d54cb590b6a04ffe43cef30057622/regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b", size = 782012 },
- { url = "https://files.pythonhosted.org/packages/89/e5/ef52c7eb117dd20ff1697968219971d052138965a4d3d9b95e92e549f505/regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0", size = 820580 },
- { url = "https://files.pythonhosted.org/packages/5f/3f/9f5da81aff1d4167ac52711acf789df13e789fe6ac9545552e49138e3282/regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b", size = 809110 },
- { url = "https://files.pythonhosted.org/packages/86/44/2101cc0890c3621b90365c9ee8d7291a597c0722ad66eccd6ffa7f1bcc09/regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef", size = 780919 },
- { url = "https://files.pythonhosted.org/packages/ce/2e/3e0668d8d1c7c3c0d397bf54d92fc182575b3a26939aed5000d3cc78760f/regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48", size = 771515 },
- { url = "https://files.pythonhosted.org/packages/a6/49/1bc4584254355e3dba930a3a2fd7ad26ccba3ebbab7d9100db0aff2eedb0/regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13", size = 696957 },
- { url = "https://files.pythonhosted.org/packages/c8/dd/42879c1fc8a37a887cd08e358af3d3ba9e23038cd77c7fe044a86d9450ba/regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2", size = 768088 },
- { url = "https://files.pythonhosted.org/packages/89/96/c05a0fe173cd2acd29d5e13c1adad8b706bcaa71b169e1ee57dcf2e74584/regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95", size = 774752 },
- { url = "https://files.pythonhosted.org/packages/b5/f3/a757748066255f97f14506483436c5f6aded7af9e37bca04ec30c90ca683/regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9", size = 838862 },
- { url = "https://files.pythonhosted.org/packages/5c/93/c6d2092fd479dcaeea40fc8fa673822829181ded77d294a7f950f1dda6e2/regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f", size = 842622 },
- { url = "https://files.pythonhosted.org/packages/ff/9c/daa99532c72f25051a90ef90e1413a8d54413a9e64614d9095b0c1c154d0/regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b", size = 772713 },
- { url = "https://files.pythonhosted.org/packages/13/5d/61a533ccb8c231b474ac8e3a7d70155b00dfc61af6cafdccd1947df6d735/regex-2024.11.6-cp39-cp39-win32.whl", hash = "sha256:41758407fc32d5c3c5de163888068cfee69cb4c2be844e7ac517a52770f9af57", size = 261756 },
- { url = "https://files.pythonhosted.org/packages/dc/7b/e59b7f7c91ae110d154370c24133f947262525b5d6406df65f23422acc17/regex-2024.11.6-cp39-cp39-win_amd64.whl", hash = "sha256:b2837718570f95dd41675328e111345f9b7095d821bac435aac173ac80b19983", size = 274110 },
]
[[package]]
@@ -1113,15 +394,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/44/42/d58086ec20f52d2b0140752ae54b355ea2be2ed46f914231136dd1effcc7/ruff-0.11.12-py3-none-win_arm64.whl", hash = "sha256:65194e37853158d368e333ba282217941029a28ea90913c67e558c611d04daa5", size = 10697770 },
]
-[[package]]
-name = "six"
-version = "1.17.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 },
-]
-
[[package]]
name = "sortedcontainers"
version = "2.4.0"
@@ -1184,18 +456,9 @@ name = "typing-inspection"
version = "0.4.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
- { name = "typing-extensions", marker = "python_full_version >= '3.9'" },
+ { name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552 },
]
-
-[[package]]
-name = "tzdata"
-version = "2025.2"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380 }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839 },
-]