Skip to content

Commit 8e1197f

Browse files
committed
black project
1 parent e2e131f commit 8e1197f

10 files changed

Lines changed: 72 additions & 60 deletions

File tree

pycds/alembic/versions/33179b5ae85a_add_network_key_column_to_meta_network.py

Lines changed: 26 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
Create Date: 2026-01-07 20:25:34.314026
66
77
Notes: This process was made more complicated by some assumptions made by the history tracking code.
8-
In particular it assumes that the primary and history table have the same column order with the
8+
In particular it assumes that the primary and history table have the same column order with the
99
exception that history tables have additional columns at the end. When adding a new column it is added
1010
at the end and therefore breaks the assumption. To work around this, we have to recreate the history table
1111
with the correct column order. This involves renaming the existing history table, creating a new one with
@@ -55,19 +55,17 @@ def upgrade():
5555
"""
5656
)
5757
)
58-
58+
5959
# Drop existing triggers before modifying table structure so that we don't accidentally track
6060
# the intermediate states
6161
drop_history_triggers("meta_network")
62-
62+
6363
# Rename the existing history table to preserve existing history data
6464
# We'll copy data from this into the new table with the correct column order
6565
op.execute(
66-
text(
67-
f"ALTER TABLE {schema_name}.meta_network_hx RENAME TO meta_network_hx_old"
68-
)
66+
text(f"ALTER TABLE {schema_name}.meta_network_hx RENAME TO meta_network_hx_old")
6967
)
70-
68+
7169
op.add_column(
7270
"meta_network",
7371
sa.Column(
@@ -77,7 +75,7 @@ def upgrade():
7775
),
7876
schema=schema_name,
7977
)
80-
78+
8179
op.execute(
8280
text(
8381
f"""
@@ -86,14 +84,14 @@ def upgrade():
8684
"""
8785
)
8886
)
89-
87+
9088
op.create_unique_constraint(
9189
"uq_meta_network_network_key",
9290
"meta_network",
9391
["network_key"],
9492
schema=schema_name,
9593
)
96-
94+
9795
# Create a trigger function to auto-populate network_key on INSERT. Must be a trigger as
9896
# Deault values can't call functions that access other columns.
9997
op.execute(
@@ -113,7 +111,7 @@ def upgrade():
113111
"""
114112
)
115113
)
116-
114+
117115
# Create trigger to run before INSERT
118116
op.execute(
119117
text(
@@ -125,11 +123,11 @@ def upgrade():
125123
"""
126124
)
127125
)
128-
126+
129127
# Recreate the history table with the new column structure
130128
create_history_table("meta_network", foreign_tables=None)
131129
grant_standard_table_privileges(f"{schema_name}.meta_network_hx")
132-
130+
133131
# Copy existing history data from the old table to the new one
134132
op.execute(
135133
text(
@@ -148,7 +146,7 @@ def upgrade():
148146
"""
149147
)
150148
)
151-
149+
152150
# Reset the sequence to continue from the last ID
153151
op.execute(
154152
text(
@@ -160,10 +158,10 @@ def upgrade():
160158
"""
161159
)
162160
)
163-
161+
164162
# Update foreign key references in dependent tables to point to the new history table
165163
# meta_station_hx and meta_vars_hx have foreign keys to meta_network_hx
166-
164+
167165
# Drop the foreign key constraints from dependent tables
168166
op.execute(
169167
text(
@@ -175,10 +173,10 @@ def upgrade():
175173
f"ALTER TABLE {schema_name}.meta_vars_hx DROP CONSTRAINT meta_vars_hx_meta_network_hx_id_fkey"
176174
)
177175
)
178-
176+
179177
# Drop the old history table now that data has been copied and FKs removed
180178
op.execute(text(f"DROP TABLE {schema_name}.meta_network_hx_old"))
181-
179+
182180
# Recreate the foreign key constraints pointing to the new history table
183181
op.execute(
184182
text(
@@ -200,13 +198,15 @@ def upgrade():
200198
"""
201199
)
202200
)
203-
201+
204202
# Recreate the history tracking triggers
205203
create_primary_table_triggers("meta_network")
206204
create_history_table_triggers("meta_network", foreign_tables=None)
207-
205+
208206
# Create indexes on the history table
209-
create_history_table_indexes("meta_network", "network_id", foreign_tables=None, extras=None)
207+
create_history_table_indexes(
208+
"meta_network", "network_id", foreign_tables=None, extras=None
209+
)
210210

211211

212212
def downgrade():
@@ -216,10 +216,8 @@ def downgrade():
216216
f"DROP TRIGGER IF EXISTS set_network_key_default_trigger ON {schema_name}.meta_network"
217217
)
218218
)
219-
op.execute(
220-
text(f"DROP FUNCTION IF EXISTS {schema_name}.set_network_key_default()")
221-
)
222-
219+
op.execute(text(f"DROP FUNCTION IF EXISTS {schema_name}.set_network_key_default()"))
220+
223221
# Drop the constraint and column from primary table
224222
op.drop_constraint(
225223
"uq_meta_network_network_key",
@@ -228,13 +226,13 @@ def downgrade():
228226
schema=schema_name,
229227
)
230228

231-
# When dropping we don't have the same issues with column order so we can safely just drop the
229+
# When dropping we don't have the same issues with column order so we can safely just drop the
232230
# column to return to the pre-migration state
233231
op.drop_column("meta_network", "network_key", schema=schema_name)
234-
232+
235233
# Drop the column from history table
236234
op.drop_column("meta_network_hx", "network_key", schema=schema_name)
237-
235+
238236
# Drop the key generation function
239237
op.execute(
240238
text(f"DROP FUNCTION IF EXISTS {schema_name}.gen_network_key_from_name(text)")

pycds/orm/tables/__init__.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,11 +21,14 @@
2121
else:
2222
# Specific version requested - import from that version module
2323
import importlib
24-
_version_module = importlib.import_module(f"pycds.orm.tables.version_{_requested_version}")
25-
24+
25+
_version_module = importlib.import_module(
26+
f"pycds.orm.tables.version_{_requested_version}"
27+
)
28+
2629
# Import all public members from the version module
2730
for _name in dir(_version_module):
28-
if not _name.startswith('_'):
31+
if not _name.startswith("_"):
2932
globals()[_name] = getattr(_version_module, _name)
30-
33+
3134
del importlib, _version_module, _name

pycds/orm/tables/base.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,6 @@
4646
metadata = Base.metadata
4747

4848

49-
5049
# string templating functions for check functions applied against multiple columns
5150
def no_newline_ck_name(column):
5251
return f"{column}_nolinebreak"

pycds/orm/tables/version_33179b5ae85a.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -120,5 +120,5 @@ class NetworkHistory(Base):
120120
def __str__(self):
121121
return f"<CRMP NetworkHistory {self.name}>"
122122

123-
# import other tables from base
124123

124+
# import other tables from base

pycds/orm/versioning.py

Lines changed: 26 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -13,34 +13,36 @@
1313
class TableVersionManager:
1414
"""
1515
Manages versioned ORM table definitions.
16-
16+
1717
Usage:
1818
# Get tables at specific revision
1919
tables = get_tables_at_revision("a59d64cf16ca")
2020
Network = tables.Network
2121
NetworkHistory = tables.NetworkHistory
22-
22+
2323
# Or use current version (default)
2424
tables = get_tables_at_revision() # Returns current version
2525
Network = tables.Network
2626
"""
27-
27+
2828
def __init__(self, revision: Optional[str] = None):
2929
self.revision = revision
3030
self._module = None
31-
31+
3232
def _load_module(self):
3333
"""Lazily load the versioned module."""
3434
if self._module is None:
3535
if self.revision is None:
3636
# Import current/head version from head version module
37-
self._module = importlib.import_module("pycds.orm.tables.version_33179b5ae85a")
37+
self._module = importlib.import_module(
38+
"pycds.orm.tables.version_33179b5ae85a"
39+
)
3840
else:
3941
# Import specific version
4042
module_name = f"pycds.orm.tables.version_{self.revision}"
4143
self._module = importlib.import_module(module_name)
4244
return self._module
43-
45+
4446
def __getattr__(self, name):
4547
"""Dynamically access classes from the versioned module."""
4648
module = self._load_module()
@@ -56,20 +58,20 @@ def __getattr__(self, name):
5658
def get_tables_at_revision(revision: Optional[str] = None) -> TableVersionManager:
5759
"""
5860
Get ORM table classes at a specific migration revision.
59-
61+
6062
Args:
6163
revision: Alembic revision ID (e.g., "a59d64cf16ca").
6264
If None, returns current version.
63-
65+
6466
Returns:
6567
TableVersionManager that provides access to versioned table classes.
66-
68+
6769
Examples:
6870
# Get tables at old revision (before network_key)
6971
tables = get_tables_at_revision("a59d64cf16ca")
7072
Network = tables.Network
7173
assert not hasattr(Network, 'key')
72-
74+
7375
# Get current tables
7476
tables = get_tables_at_revision()
7577
Network = tables.Network
@@ -85,30 +87,33 @@ def get_tables_at_revision(revision: Optional[str] = None) -> TableVersionManage
8587
def set_global_table_version(revision: Optional[str] = None):
8688
"""
8789
Set the global table version that will be used by default.
88-
90+
8991
This is useful for test fixtures that need to ensure all table
9092
references use a specific version.
91-
93+
9294
IMPORTANT: This clears the module cache for pycds.orm.tables and related
9395
modules to ensure the new version is loaded on next import.
94-
96+
9597
Args:
9698
revision: Alembic revision ID, or None for current version.
9799
"""
98100
global _global_version
99101
_global_version = revision
100-
102+
101103
# Clear module cache for tables/views/matviews modules to force reload with new version
102104
# We need to clear modules that import from pycds.orm.tables (like main pycds module)
103105
# but NOT modules that don't depend on tables (like pycds.database, pycds.context, etc.)
104106
# since they may have been imported by test files and clearing them breaks references.
105107
modules_to_clear = [
106-
key for key in sys.modules.keys()
107-
if (key.startswith('pycds.orm.tables') or
108-
key.startswith('pycds.orm.views') or
109-
key.startswith('pycds.orm.native_matviews') or
110-
key.startswith('pycds.orm.manual_matviews') or
111-
key == 'pycds')
108+
key
109+
for key in sys.modules.keys()
110+
if (
111+
key.startswith("pycds.orm.tables")
112+
or key.startswith("pycds.orm.views")
113+
or key.startswith("pycds.orm.native_matviews")
114+
or key.startswith("pycds.orm.manual_matviews")
115+
or key == "pycds"
116+
)
112117
]
113118
for module_name in modules_to_clear:
114119
del sys.modules[module_name]
@@ -122,7 +127,7 @@ def get_global_table_version() -> Optional[str]:
122127
def get_default_tables() -> TableVersionManager:
123128
"""
124129
Get tables using the global version if set, otherwise current version.
125-
130+
126131
This respects the global version set by set_global_table_version().
127132
"""
128133
return TableVersionManager(_global_version)

tests/alembic_migrations/versions/v_33179b5ae85a_add_network_key_column_to_meta_network/test_smoke.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ def test_upgrade(
3838
)
3939
col = check_if_column_exists(column_name, meta_network_table)
4040
assert (col["nullable"] == True) and (col["name"] == column_name)
41-
41+
4242
# Check that column has been added to meta_network_hx
4343
meta_network_hx_table = inspect(alembic_engine).get_columns(
4444
history_table_name, schema=schema_name
@@ -61,7 +61,7 @@ def test_downgrade(alembic_engine, alembic_runner, schema_name):
6161
)
6262
col = check_if_column_exists(column_name, meta_network_table)
6363
assert col == null
64-
64+
6565
# Check that column has been removed from meta_network_hx
6666
meta_network_hx_table = inspect(alembic_engine).get_columns(
6767
history_table_name, schema=schema_name

tests/alembic_migrations/versions/v_7a3b247c577b_add_varsperhistory_native_matview/test_smoke.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,9 @@
2222
@pytest.mark.update20
2323
@pytest.mark.parametrize("supports_matviews", [True, False])
2424
def test_mock(mocker, supports_matviews):
25-
mock_func = mocker.patch("pycds.database.db_supports_matviews", return_value=supports_matviews)
25+
mock_func = mocker.patch(
26+
"pycds.database.db_supports_matviews", return_value=supports_matviews
27+
)
2628
# Call the mock directly since pycds module reference may be stale
2729
assert mock_func() is supports_matviews
2830

tests/alembic_migrations/versions/v_a59d64cf16ca_add_hx_tkg_to_main_metadata_tables/test_on_real_tables/test_on_real_tables.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
# IMPORTANT: Set table version BEFORE any pycds imports
1616
# This test needs the table schema at revision a59d64cf16ca (before network_key was added)
1717
from pycds.orm.versioning import set_global_table_version
18+
1819
set_global_table_version("a59d64cf16ca")
1920

2021
import logging

tests/alembic_migrations/versions/v_bdc28573df56_add_obs_raw_indexes/test_smoke.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,9 @@
2323
@pytest.mark.update20
2424
@pytest.mark.parametrize("item_names", [set(), {"alpha", "beta"}])
2525
def test_mock(mocker, item_names):
26-
mock_func = mocker.patch("pycds.database.get_schema_item_names", return_value=item_names)
26+
mock_func = mocker.patch(
27+
"pycds.database.get_schema_item_names", return_value=item_names
28+
)
2729
# Call the mock directly since pycds module reference may be stale
2830
assert mock_func() == item_names
2931

tests/climate_baseline_helpers/test_climate_baseline_helpers.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -152,7 +152,9 @@ def it_creates_precip_variable(sesh_with_climate_baseline_variables):
152152
def it_creates_no_more_than_one_of_each(pycds_sesh):
153153
sesh = pycds_sesh
154154
get_or_create_pcic_climate_baseline_variables(sesh)
155-
get_or_create_pcic_climate_baseline_variables(sesh) # TODO: Should this be run twice?
155+
get_or_create_pcic_climate_baseline_variables(
156+
sesh
157+
) # TODO: Should this be run twice?
156158
results = sesh.query(Variable).filter(Variable.name.like("%_Climatology"))
157159
assert results.count() == 3
158160

0 commit comments

Comments
 (0)