Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 0 additions & 52 deletions AGENTS.md
Original file line number Diff line number Diff line change
Expand Up @@ -451,58 +451,6 @@ if supports_where(obj):
result = obj.where("condition")
```

### Mypyc-Compatible Metadata Class Pattern

When defining data-holding classes intended for core modules (`sqlspec/core/`, `sqlspec/driver/`) that will be compiled with MyPyC, use regular classes with `__slots__` and explicitly implement `__init__`, `__repr__`, `__eq__`, and `__hash__`. This approach ensures optimal performance and MyPyC compatibility, as `dataclasses` are not directly supported by MyPyC for compilation.

**Key Principles:**

- **`__slots__`**: Reduces memory footprint and speeds up attribute access.
- **Explicit `__init__`**: Defines the constructor for the class.
- **Explicit `__repr__`**: Provides a clear string representation for debugging.
- **Explicit `__eq__`**: Enables correct equality comparisons.
- **Explicit `__hash__`**: Makes instances hashable, allowing them to be used in sets or as dictionary keys. The hash implementation should be based on all fields that define the object's identity.

**Example Implementation:**

```python
class MyMetadata:
__slots__ = ("field1", "field2", "optional_field")

def __init__(self, field1: str, field2: int, optional_field: str | None = None) -> None:
self.field1 = field1
self.field2 = field2
self.optional_field = optional_field

def __repr__(self) -> str:
return f"MyMetadata(field1={self.field1!r}, field2={self.field2!r}, optional_field={self.optional_field!r})"

def __eq__(self, other: object) -> bool:
if not isinstance(other, MyMetadata):
return NotImplemented
return (
self.field1 == other.field1
and self.field2 == other.field2
and self.optional_field == other.optional_field
)

def __hash__(self) -> int:
return hash((self.field1, self.field2, self.optional_field))
```

**When to Use:**

- For all new data-holding classes in performance-critical paths (e.g., `sqlspec/driver/_common.py`).
- When MyPyC compilation is enabled for the module containing the class.

**Anti-Patterns to Avoid:**

- Using `@dataclass` decorators for classes intended for MyPyC compilation.
- Omitting `__slots__` when defining performance-critical data structures.
- Relying on default `__eq__` or `__hash__` behavior for complex objects, especially for equality comparisons in collections.

---

### Performance Patterns (MANDATORY)

**PERF401 - List Operations**:
Expand Down
32 changes: 0 additions & 32 deletions docs/examples/usage/usage_cli_1.py

This file was deleted.

28 changes: 0 additions & 28 deletions docs/examples/usage/usage_cli_2.py

This file was deleted.

73 changes: 23 additions & 50 deletions docs/examples/usage/usage_drivers_and_querying_10.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,57 +12,30 @@

def test_example_10_duckdb_config(tmp_path: Path) -> None:
# start-example
import tempfile

from sqlspec import SQLSpec
from sqlspec.adapters.duckdb import DuckDBConfig

# Use a temporary directory for the DuckDB database for test isolation
with tempfile.TemporaryDirectory() as tmpdir:
db_path = Path(tmpdir) / "analytics.duckdb"

spec = SQLSpec()
# In-memory
in_memory_db = spec.add_config(DuckDBConfig())
persistent_db = spec.add_config(DuckDBConfig(pool_config={"database": str(db_path)}))

try:
# Test with in-memory config
with spec.provide_session(in_memory_db) as session:
# Create table from Parquet
session.execute(f"""
CREATE TABLE if not exists users AS
SELECT * FROM read_parquet('{Path(__file__).parent.parent / "queries/users.parquet"}')
""")

# Analytical query
session.execute("""
SELECT date_trunc('day', created_at) as day,
count(*) as user_count
FROM users
GROUP BY day
ORDER BY day
""")

# Test with persistent config
with spec.provide_session(persistent_db) as session:
# Create table from Parquet
session.execute(f"""
CREATE TABLE if not exists users AS
SELECT * FROM read_parquet('{Path(__file__).parent.parent / "queries/users.parquet"}')
""")

# Analytical query
session.execute("""
SELECT date_trunc('day', created_at) as day,
count(*) as user_count
FROM users
GROUP BY day
ORDER BY day
""")
finally:
# Close the pool for the persistent config
spec.get_config(in_memory_db).close_pool()
spec.get_config(persistent_db).close_pool()
# The TemporaryDirectory context manager handles directory cleanup automatically
spec = SQLSpec()
# In-memory
config = DuckDBConfig()

# Persistent
database_file = tmp_path / "analytics.duckdb"
config = DuckDBConfig(pool_config={"database": database_file.name, "read_only": False})

with spec.provide_session(config) as session:
# Create table from Parquet
session.execute(f"""
CREATE TABLE if not exists users AS
SELECT * FROM read_parquet('{Path(__file__).parent.parent / "queries/users.parquet"}')
""")

# Analytical query
session.execute("""
SELECT date_trunc('day', created_at) as day,
count(*) as user_count
FROM users
GROUP BY day
ORDER BY day
""")
# end-example
53 changes: 21 additions & 32 deletions docs/examples/usage/usage_drivers_and_querying_6.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# Test module converted from docs example - code-block 6
"""Minimal smoke test for drivers_and_querying example 6."""

import tempfile
from pathlib import Path

from sqlspec import SQLSpec
Expand All @@ -13,34 +12,24 @@ def test_example_6_sqlite_config(tmp_path: Path) -> None:
# start-example
from sqlspec.adapters.sqlite import SqliteConfig

# Use a temporary file for the SQLite database for test isolation
with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as tmp_db_file:
db_path = tmp_db_file.name

spec = SQLSpec()

db = spec.add_config(
SqliteConfig(pool_config={"database": db_path, "timeout": 5.0, "check_same_thread": False})
)

try:
with spec.provide_session(db) as session:
# Create table
session.execute("""
CREATE TABLE IF NOT EXISTS usage6_users (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL
)
""")

# Insert with parameters
session.execute("INSERT INTO usage6_users (name) VALUES (?)", "Alice")

# Query
result = session.execute("SELECT * FROM usage6_users")
result.all()
finally:
# Clean up the temporary database file
spec.get_config(db).close_pool()
Path(db_path).unlink()
# end-example
spec = SQLSpec()

database_file = tmp_path / "myapp.db"
config = SqliteConfig(pool_config={"database": database_file.name, "timeout": 5.0, "check_same_thread": False})

with spec.provide_session(config) as session:
# Create table
session.execute("""
CREATE TABLE IF NOT EXISTS usage6_users (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL
)
""")

# Insert with parameters
session.execute("INSERT INTO usage6_users (name) VALUES (?)", "Alice")

# Query
result = session.execute("SELECT * FROM usage6_users")
result.all()
# end-example
52 changes: 52 additions & 0 deletions docs/examples/usage/usage_migrations_1.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
__all__ = ("test_async_methods",)


from pytest_databases.docker.postgres import PostgresService


async def test_async_methods(postgres_service: PostgresService) -> None:
# start-example
from sqlspec.adapters.asyncpg import AsyncpgConfig

dsn = (
f"postgresql://{postgres_service.user}:{postgres_service.password}"
f"@{postgres_service.host}:{postgres_service.port}/{postgres_service.database}"
)
config = AsyncpgConfig(
pool_config={"dsn": dsn}, migration_config={"enabled": True, "script_location": "migrations"}
)

# Apply migrations
await config.migrate_up("head")
# Or use the alias
# await config.upgrade("head")

# Rollback one revision
await config.migrate_down("-1")
# Or use the alias
# await config.downgrade("-1")

# Check current version
await config.get_current_migration(verbose=True)
# Create new migration
await config.create_migration("add users table", file_type="sql")

# Initialize migrations directory
await config.init_migrations()

# Stamp database to specific revision
await config.stamp_migration("0003")

# Convert timestamp to sequential migrations
await config.fix_migrations(dry_run=False, update_database=True, yes=True)
# end-example
# These are just smoke tests for method presence, not actual DB calls
assert hasattr(config, "migrate_up")
assert hasattr(config, "upgrade")
assert hasattr(config, "migrate_down")
assert hasattr(config, "downgrade")
assert hasattr(config, "get_current_migration")
assert hasattr(config, "create_migration")
assert hasattr(config, "init_migrations")
assert hasattr(config, "stamp_migration")
assert hasattr(config, "fix_migrations")
49 changes: 49 additions & 0 deletions docs/examples/usage/usage_migrations_2.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
__all__ = ("test_sync_methods",)


# start-example
from sqlspec.adapters.sqlite import SqliteConfig

config = SqliteConfig(
pool_config={"database": "myapp.db"}, migration_config={"enabled": True, "script_location": "migrations"}
)

# Apply migrations (no await needed)
config.migrate_up("head")
# Or use the alias
config.upgrade("head")

# Rollback one revision
config.migrate_down("-1")
# Or use the alias
config.downgrade("-1")

# Check current version
current = config.get_current_migration(verbose=True)
print(current)

# Create new migration
config.create_migration("add users table", file_type="sql")

# Initialize migrations directory
config.init_migrations()

# Stamp database to specific revision
config.stamp_migration("0003")

# Convert timestamp to sequential migrations
config.fix_migrations(dry_run=False, update_database=True, yes=True)
# end-example


def test_sync_methods() -> None:
# Smoke tests for method presence, not actual DB calls
assert hasattr(config, "migrate_up")
assert hasattr(config, "upgrade")
assert hasattr(config, "migrate_down")
assert hasattr(config, "downgrade")
assert hasattr(config, "get_current_migration")
assert hasattr(config, "create_migration")
assert hasattr(config, "init_migrations")
assert hasattr(config, "stamp_migration")
assert hasattr(config, "fix_migrations")
Loading
Loading