Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
…grid-model-io into feature/converters-doc
  • Loading branch information
nitbharambe committed Nov 8, 2022
2 parents 966aa05 + 8d19666 commit 50809d0
Show file tree
Hide file tree
Showing 27 changed files with 1,829 additions and 258 deletions.
2 changes: 1 addition & 1 deletion docs/examples/data/tiny-net/sym_output.json
Original file line number Diff line number Diff line change
Expand Up @@ -35,4 +35,4 @@
{"id": 201, "energized": 1, "u_residual": 8.033307352661723e-08, "u_angle_residual": -1.543556948924163e-14},
{"id": 301, "energized": 1, "u_residual": 8.038902876705833e-08, "u_angle_residual": -2.7651492207070305e-15}
]
}
}
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ power_grid_model_io = ["config/**/*.yaml"]

[tool.pytest.ini_options]
testpaths = ["tests/unit"]
addopts = ["--cov=power_grid_model_io", "--cov-report=term", "--cov-report=html:cov_html", "--cov-fail-under=20"]
addopts = ["--cov=power_grid_model_io", "--cov-report=term", "--cov-report=html:cov_html", "--cov-fail-under=82.67"]

[tool.black]
line-length = 120
Expand All @@ -91,6 +91,7 @@ line_length = 120
[tool.pylint]
max-line-length = 120
ignore-paths = [
"docs/",
"examples/",
"tests/",
"setup.py",
Expand Down
8 changes: 8 additions & 0 deletions src/power_grid_model_io/config/examples/multipliers.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# SPDX-FileCopyrightText: 2022 Contributors to the Power Grid Model project <[email protected]>
#
# SPDX-License-Identifier: MPL-2.0
---
multipliers:
.+_percent: 0.01
.+_k(a|v|w): 1_000.0
.+_m(w|va|var): 1_000_000.0
14 changes: 7 additions & 7 deletions src/power_grid_model_io/config/excel/vision_en.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -373,18 +373,18 @@ grid:
units:
A:
F:
µF: 0.000001
µF: 0.000_001
V:
kV: 1000.0
kV: 1_000.0
VA:
MVA: 1000000.0
MVA: 1_000_000.0
VAR:
Mvar: 1000000.0
Mvar: 1_000_000.0
W:
kW: 1000.0
MW: 1000000.0
kW: 1_000.0
MW: 1_000_000.0
Wp:
MWp: 1000000.0
MWp: 1_000_000.0
m/s:
ohm:
Ohm: 1.0
Expand Down
71 changes: 51 additions & 20 deletions src/power_grid_model_io/converters/base_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,7 @@


class BaseConverter(Generic[T], ABC):
"""
Abstract converter class
"""
"""Abstract converter class"""

def __init__(self, source: Optional[BaseDataStore[T]] = None, destination: Optional[BaseDataStore[T]] = None):
"""
Expand All @@ -32,10 +30,15 @@ def __init__(self, source: Optional[BaseDataStore[T]] = None, destination: Optio
self._lookup = AutoID()

def load_input_data(self, data: Optional[T] = None) -> Tuple[SingleDataset, ExtraInfoLookup]:
"""
Load input data and extra info
"""Load input data and extra info
Note: You shouldn't have to overwrite this method. Check _parse_data() instead.
Args:
data: Optional[T]: (Default value = None)
Returns:
"""
data = self._load_data(data)
extra_info: ExtraInfoLookup = {}
Expand All @@ -45,37 +48,58 @@ def load_input_data(self, data: Optional[T] = None) -> Tuple[SingleDataset, Extr
return data, extra_info

def load_update_data(self, data: Optional[T] = None) -> Dataset:
"""
Load update data
"""Load update data
Note: You shouldn't have to overwrite this method. Check _parse_data() instead.
Args:
data: Optional[T]: (Default value = None)
Returns:
"""
data = self._load_data(data)
return self._parse_data(data=data, data_type="update", extra_info=None)

def load_sym_output_data(self, data: Optional[T] = None) -> Dataset:
"""
Load symmetric output data
"""Load symmetric output data
Note: You shouldn't have to overwrite this method. Check _parse_data() instead.
Args:
data: Optional[T]: (Default value = None)
Returns:
"""
data = self._load_data(data)
return self._parse_data(data=data, data_type="sym_output", extra_info=None)

def load_asym_output_data(self, data: Optional[T] = None) -> Dataset:
"""
Load asymmetric output data
"""Load asymmetric output data
Note: You shouldn't have to overwrite this method. Check _parse_data() instead.
Args:
data: Optional[T]: (Default value = None)
Returns:
"""
data = self._load_data(data)
return self._parse_data(data=data, data_type="asym_output", extra_info=None)

def convert(self, data: Dataset, extra_info: Optional[ExtraInfoLookup] = None) -> T:
"""
Convert input/update/(a)sym_output data and optionally extra info.
"""Convert input/update/(a)sym_output data and optionally extra info.
Note: You shouldn't have to overwrite this method. Check _serialize_data() instead.
Args:
data: Dataset:
extra_info: Optional[ExtraInfoLookup]: (Default value = None)
Returns:
"""
return self._serialize_data(data=data, extra_info=extra_info)

Expand All @@ -85,16 +109,23 @@ def save(
extra_info: Optional[ExtraInfoLookup] = None,
destination: Optional[BaseDataStore[T]] = None,
) -> None:
"""
Save input/update/(a)sym_output data and optionally extra info.
"""Save input/update/(a)sym_output data and optionally extra info.
Note: You shouldn't have to overwrite this method. Check _serialize_data() instead.
Args:
data: Dataset:
extra_info: Optional[ExtraInfoLookup]: (Default value = None)
destination: Optional[BaseDataStore[T]]: (Default value = None)
Returns:
"""
data = self.convert(data=data, extra_info=extra_info)
data_converted = self.convert(data=data, extra_info=extra_info)
if destination is not None:
destination.save(data=data)
destination.save(data=data_converted)
elif self._destination is not None:
self._destination.save(data=data)
self._destination.save(data=data_converted)
else:
raise ValueError("No destination supplied!")

Expand All @@ -109,9 +140,9 @@ def _id_lookup(self, component: str, row: List[Hashable]) -> int:
return self._lookup(item=(component,) + tuple(row))

@abstractmethod # pragma: nocover
def _parse_data(self, data: T, data_type: str, extra_info: Optional[ExtraInfoLookup] = None) -> Dataset:
def _parse_data(self, data: T, data_type: str, extra_info: Optional[ExtraInfoLookup]) -> Dataset:
pass

@abstractmethod # pragma: nocover
def _serialize_data(self, data: Dataset, extra_info: Optional[ExtraInfoLookup] = None) -> T:
def _serialize_data(self, data: Dataset, extra_info: Optional[ExtraInfoLookup]) -> T:
pass
125 changes: 113 additions & 12 deletions src/power_grid_model_io/converters/pgm_json_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,13 @@ class PgmJsonConverter(BaseConverter[StructuredData]):
A 'converter' class to load and store power grid model data in the native PGM JSON format. The methods are simmilar
to the utils in power_grid_model, with the addition of storing and loading 'extra info'. Extra info is the set of
attributes that don't match the power grid model's internal structure, but are important to keep close to the data.
The most common example is the original object ID, if the original IDs are not numeric, or not unique oover all
The most common example is the original object ID, if the original IDs are not numeric, or not unique over all
components.
Args:
Returns:
"""

def __init__(
Expand All @@ -38,9 +43,26 @@ def __init__(
destination = JsonFileStore(file_path=Path(destination_file)) if destination_file else None
super().__init__(source=source, destination=destination)

def _parse_data(
self, data: StructuredData, data_type: str, extra_info: Optional[ExtraInfoLookup] = None
) -> Dataset:
def _parse_data(self, data: StructuredData, data_type: str, extra_info: Optional[ExtraInfoLookup]) -> Dataset:
"""This function expects Structured data, which can either be a dictionary (single dataset) or a list of
dictionaries (batch dataset). The structured dataset consists of components + attributes that exist within
power-grid-model, but can also contain other data. If this data should be saved for later usage an extra_info
dictionary can be provided when calling this function
Args:
data: Structured data, which can either be a dictionary or a list of dictionaries
data_type: the data type of the dataset, i.e. "input", "update", "sym_output" or "asym_output"
extra_info: an optional dictionary where extra component info (that can't be specified in
power-grid-model data) can be specified
data: StructuredData:
data_type: str:
extra_info: Optional[ExtraInfoLookup]:
Returns:
a dictionary containing the components as keys and their corresponding numpy arrays as values: a
power-grid-model "input" or "update" dataset
"""
self._log.debug(f"Loading PGM {data_type} data")
if isinstance(data, list):
parsed_data = [
Expand All @@ -52,18 +74,53 @@ def _parse_data(
return self._parse_dataset(data=data, data_type=data_type, extra_info=extra_info)

def _parse_dataset(
self, data: SinglePythonDataset, data_type: str, extra_info: Optional[ExtraInfoLookup] = None
self, data: SinglePythonDataset, data_type: str, extra_info: Optional[ExtraInfoLookup]
) -> SingleDataset:
"""This function parses a single Python dataset and returns a power-grid-model input or update dictionary
Args:
data: a single Python dataset
data_type: the data type of the dataset, i.e. "input" or "update"
extra_info: an optional dictionary where extra component info (that can't be specified in
power-grid-model data) can be specified
data: SinglePythonDataset:
data_type: str:
extra_info: Optional[ExtraInfoLookup]: (Default value = None)
Returns:
a dictionary containing the components as keys and their corresponding numpy arrays as values: a
power-grid-model "input" or "update" dataset
"""
return {
component: self._parse_component(
objects=objects, component=component, data_type=data_type, extra_info=extra_info
)
for component, objects in data.items()
}

@staticmethod
def _parse_component(
self, objects: ComponentList, component: str, data_type: str, extra_info: Optional[ExtraInfoLookup] = None
objects: ComponentList, component: str, data_type: str, extra_info: Optional[ExtraInfoLookup]
) -> np.ndarray:
"""This function generates a structured numpy array (power-grid-model native) from a structured dataset
Args:
objects: a list with dictionaries, where each dictionary contains all attributes of a component
component: the type of component, eg. node, line, etc. Note: it should be a valid power-grid-model
component
data_type: a string specifying the data type: input/update
extra_info: an optional dictionary where extra component info (that can't be specified in
power-grid-model data) can be specified
objects: ComponentList:
component: str:
data_type: str:
extra_info: Optional[ExtraInfoLookup]: (Default value = None)
Returns:
a numpy structured array for a power-grid-model component
"""
# We'll initialize an 1d-array with NaN values for all the objects of this component type
array = initialize_array(data_type, component, len(objects))

Expand All @@ -87,7 +144,23 @@ def _parse_component(
extra_info[obj["id"]][attribute] = value
return array

def _serialize_data(self, data: Dataset, extra_info: Optional[ExtraInfoLookup] = None) -> StructuredData:
def _serialize_data(self, data: Dataset, extra_info: Optional[ExtraInfoLookup]) -> StructuredData:
"""This function converts a power-grid-model dataset to a structured dataset. First, the function checks if the
dataset is a single dataset or batch dataset. If it is a batch, the batch data is converted to a list of
batches, then each batch is converted individually.
Args:
data: a power-grid-model dataset
extra_info: an optional dictionary with extra information. If supplied, the extra info is added to the
structured dataset. The keys in this dictionary should match with id's of components in the power-grid-model
dataset. Note, extra info can only be supplied for single datasets
data: Dataset:
extra_info: Optional[ExtraInfoLookup]: (Default value = None)
Returns:
the function returns a structured dataset
"""
# Check if the dataset is a single dataset or batch dataset
# It is batch dataset if it is 2D array or a indptr/data structure

Expand All @@ -100,11 +173,26 @@ def _serialize_data(self, data: Dataset, extra_info: Optional[ExtraInfoLookup] =
list_data = convert_batch_dataset_to_batch_list(data)
return [self._serialize_dataset(data=x) for x in list_data]

# We have established that this is not batch data, so let's tell the type checker that this is a BatchDataset
# We have established that this is not batch data, so let's tell the type checker that this is a SingleDataset
data = cast(SingleDataset, data)
return self._serialize_dataset(data=data, extra_info=extra_info)

def _is_batch(self, data: Dataset) -> bool:
@staticmethod
def _is_batch(data: Dataset) -> bool:
"""
This function checks if a dataset is single or batch. The function loops through all components in the dataset
and checks for each component if the corresponding dataset is single or batch. All components should have the
same array type (single or batch). If this is not the case a ValueError will be raised. An array is a batch
dataset if it is 2D array or has an indptr/data structure
Args:
data: a power-grid-model dataset which is either single or a batch
data: Dataset:
Returns:
returns True if the dataset is a batch dataset, False if it is a single dataset
"""
is_batch: Optional[bool] = None
for component, array in data.items():
is_dense_batch = isinstance(array, np.ndarray) and array.ndim == 2
Expand All @@ -117,9 +205,22 @@ def _is_batch(self, data: Dataset) -> bool:
is_batch = is_dense_batch or is_sparse_batch
return bool(is_batch)

def _serialize_dataset(
self, data: SingleDataset, extra_info: Optional[ExtraInfoLookup] = None
) -> SinglePythonDataset:
@staticmethod
def _serialize_dataset(data: SingleDataset, extra_info: Optional[ExtraInfoLookup] = None) -> SinglePythonDataset:
"""This function converts a single power-grid-model dataset to a structured dataset
Args:
data: a power-grid-model (single) dataset
extra_info: an optional dictionary with extra information. If supplied, the extra info is added to the
structured dataset. The keys in this dictionary should match with id's of components in the power-grid-model
dataset
data: SingleDataset:
extra_info: Optional[ExtraInfoLookup]: (Default value = None)
Returns:
the function returns a structured dataset
"""

# This should be a single data set
for component, array in data.items():
Expand Down
Loading

0 comments on commit 50809d0

Please sign in to comment.