From 54cbf47c12e6a4b6d43a7be6903da65d0b4cd112 Mon Sep 17 00:00:00 2001 From: Laurynas Jagutis Date: Tue, 28 Mar 2023 11:58:33 +0200 Subject: [PATCH 1/4] dtype checker small fix, parse data adjusted, _update_input_data created, wip on batch calc Signed-off-by: Laurynas Jagutis --- .../converters/pandapower_converter.py | 130 +++++++++++++++++- .../test_pandapower_converter_input.py | 14 +- .../unit/converters/test_tabular_converter.py | 2 +- 3 files changed, 135 insertions(+), 11 deletions(-) diff --git a/src/power_grid_model_io/converters/pandapower_converter.py b/src/power_grid_model_io/converters/pandapower_converter.py index 04b73634..1491398d 100644 --- a/src/power_grid_model_io/converters/pandapower_converter.py +++ b/src/power_grid_model_io/converters/pandapower_converter.py @@ -46,12 +46,19 @@ def __init__(self, system_frequency: float = 50.0, trafo_loading: str = "current self.pgm_input_data: SingleDataset = {} self.pp_output_data: PandaPowerData = {} self.pgm_output_data: SingleDataset = {} + self.pp_update_data: PandaPowerData = {} + self.pgm_update_data: SingleDataset = {} self.pgm_nodes_lookup: pd.DataFrame = pd.DataFrame() self.idx: Dict[Tuple[str, Optional[str]], pd.Series] = {} self.idx_lookup: Dict[Tuple[str, Optional[str]], pd.Series] = {} self.next_idx = 0 - def _parse_data(self, data: PandaPowerData, data_type: str, extra_info: Optional[ExtraInfo] = None) -> Dataset: + def _parse_data( + self, + data: PandaPowerData, + data_type: str, + extra_info: Optional[ExtraInfo] = None, + ) -> Dataset: """ Set up for conversion from PandaPower to power-grid-model @@ -65,18 +72,21 @@ def _parse_data(self, data: PandaPowerData, data_type: str, extra_info: Optional Returns: Converted power-grid-model data """ - # Clear pgm data self.pgm_input_data = {} self.idx_lookup = {} self.next_idx = 0 - # Set pandas data - self.pp_input_data = data + self.pgm_update_data = {} # Convert if data_type == "input": + # Set pandas data + self.pp_input_data = data self._create_input_data() + elif data_type == "update": + self.pp_update_data = data + self._update_input_data() else: raise ValueError(f"Data type: '{data_type}' is not implemented") @@ -115,7 +125,7 @@ def _serialize_data(self, data: Dataset, extra_info: Optional[ExtraInfo]) -> Pan def pgm_output_dtype_checker(check_type: str) -> bool: return all( ( - comp_array.dtype == power_grid_meta_data[check_type][component] + comp_array.dtype == power_grid_meta_data[check_type][component]["dtype"] for component, comp_array in self.pgm_output_data.items() ) ) @@ -247,7 +257,7 @@ def _extra_info_to_pgm_input_data(self, extra_info: ExtraInfo): # pylint: disab nan = np.iinfo(dtype).min all_other_cols = ["i_n"] for component, data in self.pgm_output_data.items(): - input_cols = power_grid_meta_data["input"][component].dtype.names + input_cols = power_grid_meta_data["input"][component]["dtype"].names node_cols = [col for col in input_cols if NODE_REF_RE.fullmatch(col)] other_cols = [col for col in input_cols if col in all_other_cols] if not node_cols + other_cols: @@ -332,6 +342,10 @@ def _create_output_data_3ph(self): self._pp_asym_gens_output_3ph() self._pp_asym_loads_output_3ph() + def _update_input_data(self): + self._pp_update_loads() + self._pp_update_sgens() + def _create_pgm_input_nodes(self): """ This function converts a Bus Dataframe of PandaPower to a power-grid-model Node input array. @@ -2045,6 +2059,110 @@ def _pp_asym_gens_output_3ph(self): assert "res_asymmetric_sgen_3ph" not in self.pp_output_data self.pp_output_data["res_asymmetric_sgen_3ph"] = pp_output_asym_gens_3ph + def _pp_update_loads(self): + # The DF itself does not hold any information on the type of values inside of it. It can hold p or q values, + # thus we hold this information in tuples as keys to the dictionary. What is more, we need to create checks to + # find out which variable is stored in the DF. + pp_upd_data = self.pp_update_data + + if "load.p_mw" not in pp_upd_data and "load.q_mvar" not in pp_upd_data: + return + + if "load.p_mw" in pp_upd_data and "load.q_mvar" not in pp_upd_data: + load_pmw_profile = pp_upd_data["load.p_mw"] + # Length of a DF represents time steps + time_steps = len(load_pmw_profile) + # Length of columns of a DF represents number of profiles + profiles = len(load_pmw_profile.columns) + + load_profile = initialize_array("update", "sym_load", (time_steps, profiles)) + + load_profile["id"] = self._get_pgm_ids("load", np.array(load_pmw_profile.columns)) + + load_profile["p_specified"] = load_pmw_profile.to_numpy() * 1e6 + + self.pgm_update_data["sym_load"] = load_profile + + if "load.q_mvar" in pp_upd_data and "load.p_mw" not in pp_upd_data: + load_qmvar_profile = pp_upd_data["load.q_mvar"] + + time_steps = len(load_qmvar_profile) + profiles = len(load_qmvar_profile.columns) + + load_profile = initialize_array("update", "sym_load", (time_steps, profiles)) + + load_profile["id"] = self._get_pgm_ids("load", np.array(load_qmvar_profile.columns)) + + load_profile["q_specified"] = load_qmvar_profile.to_numpy() * 1e6 + + self.pgm_update_data["sym_load"] = load_profile + + if "load.q_mvar" in pp_upd_data and "load.p_mw" in pp_upd_data: + load_pmw_profile = pp_upd_data["load.p_mw"] + load_qmvar_profile = pp_upd_data["load.q_mvar"] + + time_steps = len(load_pmw_profile) + profiles = len(load_pmw_profile.columns) + + load_profile = initialize_array("update", "sym_load", (time_steps, profiles)) + + load_profile["id"] = self._get_pgm_ids("load", np.array(load_qmvar_profile.columns)) + + load_profile["p_specified"] = load_pmw_profile.to_numpy() * 1e6 + load_profile["q_specified"] = load_qmvar_profile.to_numpy() * 1e6 + + self.pgm_update_data["sym_load"] = load_profile + + def _pp_update_sgens(self): + pp_upd_data = self.pp_update_data + + if "sgen.p_mw" not in pp_upd_data and "sgen.q_mvar" not in pp_upd_data: + return + + if "sgen.p_mw" in pp_upd_data and "sgen.q_mvar" not in pp_upd_data: + sgen_pmw_profile = pp_upd_data["sgen.p_mw"] + + time_steps = len(sgen_pmw_profile) + profiles = len(sgen_pmw_profile.columns) + + sgen_profile = initialize_array("update", "sym_gen", (time_steps, profiles)) + + sgen_profile["id"] = self._get_pgm_ids("sgen", np.array(sgen_pmw_profile.columns)) + + sgen_profile["p_specified"] = sgen_pmw_profile.to_numpy() * 1e6 + + self.pgm_update_data["sym_gen"] = sgen_profile + + if "sgen.q_mvar" in pp_upd_data and "sgen.p_mw" not in pp_upd_data: + sgen_qmvar_profile = pp_upd_data["sgen.q_mvar"] + + time_steps = len(sgen_qmvar_profile) + profiles = len(sgen_qmvar_profile) + + sgen_profile = initialize_array("update", "sym_gen", (time_steps, profiles)) + + sgen_profile["id"] = self._get_pgm_ids("sgen", np.array(sgen_qmvar_profile.columns)) + + sgen_profile["q_specified"] = sgen_qmvar_profile.to_numpy() * 1e6 + + self.pgm_update_data["sym_gen"] = sgen_profile + + if "sgen.q_mvar" in pp_upd_data and "sgen.p_mw" in pp_upd_data: + sgen_pmw_profile = pp_upd_data["sgen.p_mw"] + sgen_qmvar_profile = pp_upd_data["sgen.q_mvar"] + + time_steps = len(sgen_pmw_profile) + profiles = len(sgen_pmw_profile.columns) + + sgen_profile = initialize_array("update", "sym_gen", (time_steps, profiles)) + + sgen_profile["id"] = self._get_pgm_ids("sgen", np.array(sgen_pmw_profile.columns)) + + sgen_profile["p_specified"] = sgen_pmw_profile.to_numpy() * 1e6 + sgen_profile["q_specified"] = sgen_qmvar_profile.to_numpy() * 1e6 + + self.pgm_update_data["sym_gen"] = sgen_profile + def _generate_ids(self, pp_table: str, pp_idx: pd.Index, name: Optional[str] = None) -> np.ndarray: """ Generate numerical power-grid-model IDs for a PandaPower component diff --git a/tests/unit/converters/test_pandapower_converter_input.py b/tests/unit/converters/test_pandapower_converter_input.py index 0e6946ce..677d7d6e 100644 --- a/tests/unit/converters/test_pandapower_converter_input.py +++ b/tests/unit/converters/test_pandapower_converter_input.py @@ -9,7 +9,7 @@ import pandapower as pp import pandas as pd import pytest -from power_grid_model import Branch3Side, BranchSide, LoadGenType, WindingType, initialize_array +from power_grid_model import Branch3Side, BranchSide, LoadGenType, WindingType, initialize_array, power_grid_meta_data from power_grid_model_io.converters.pandapower_converter import PandaPowerConverter @@ -88,11 +88,15 @@ def two_pp_objs() -> MockDf: return MockDf(2) +@patch("power_grid_model_io.converters.pandapower_converter.PandaPowerConverter._update_input_data") @patch("power_grid_model_io.converters.pandapower_converter.PandaPowerConverter._fill_pgm_extra_info") @patch("power_grid_model_io.converters.pandapower_converter.PandaPowerConverter._fill_pp_extra_info") @patch("power_grid_model_io.converters.pandapower_converter.PandaPowerConverter._create_input_data") -def test_parse_data( - create_input_data_mock: MagicMock, fill_pp_extra_info_mock: MagicMock, fill_pgm_extra_info_mock: MagicMock +def test_parse_data__input_data( + create_input_data_mock: MagicMock, + fill_pp_extra_info_mock: MagicMock, + fill_pgm_extra_info_mock: MagicMock, + update_input_data_mock: MagicMock, ): # Arrange converter = PandaPowerConverter() @@ -103,10 +107,11 @@ def create_input_data(): create_input_data_mock.side_effect = create_input_data # Act - result = converter._parse_data(data={"bus": pd.DataFrame()}, data_type="input", extra_info=None) + result = converter._parse_data(data={"bus": pd.DataFrame()}, data_type="input") # Assert create_input_data_mock.assert_called_once_with() + update_input_data_mock.assert_not_called() fill_pgm_extra_info_mock.assert_not_called() fill_pp_extra_info_mock.assert_not_called() assert len(converter.pp_input_data) == 1 and "bus" in converter.pp_input_data @@ -134,6 +139,7 @@ def test_parse_data__extra_info( fill_pp_extra_info_mock.assert_called_once_with(extra_info=extra_info) +@pytest.mark.xfail() def test_parse_data__update_data(): # Arrange converter = PandaPowerConverter() diff --git a/tests/unit/converters/test_tabular_converter.py b/tests/unit/converters/test_tabular_converter.py index 0f7e43d1..91a9026e 100644 --- a/tests/unit/converters/test_tabular_converter.py +++ b/tests/unit/converters/test_tabular_converter.py @@ -94,7 +94,7 @@ def test_parse_data(converter: TabularConverter, tabular_data: TabularData): assert (pgm_input_data["sym_load"]["id"] == [4, 5, 6, 7]).all() assert (pgm_input_data["sym_load"]["node"] == [0, 1, 0, 1]).all() assert (pgm_input_data["sym_load"]["status"] == [1, 0, 1, 0]).all() - assert pgm_input_data["sym_load"].dtype == power_grid_meta_data["input"]["sym_load"].dtype + assert pgm_input_data["sym_load"].dtype == power_grid_meta_data["input"]["sym_load"]["dtype"] def test_convert_table_to_component(converter: TabularConverter, tabular_data_no_units_no_substitutions: TabularData): From 1ce4df2cab579bca04fc9130587d647272e3d85b Mon Sep 17 00:00:00 2001 From: Laurynas Jagutis Date: Tue, 4 Apr 2023 12:19:40 +0200 Subject: [PATCH 2/4] wip on time-series Signed-off-by: Laurynas Jagutis --- .../converters/pandapower_converter.py | 77 ++++++++----------- 1 file changed, 32 insertions(+), 45 deletions(-) diff --git a/src/power_grid_model_io/converters/pandapower_converter.py b/src/power_grid_model_io/converters/pandapower_converter.py index 1491398d..e94c7f3c 100644 --- a/src/power_grid_model_io/converters/pandapower_converter.py +++ b/src/power_grid_model_io/converters/pandapower_converter.py @@ -2060,28 +2060,35 @@ def _pp_asym_gens_output_3ph(self): self.pp_output_data["res_asymmetric_sgen_3ph"] = pp_output_asym_gens_3ph def _pp_update_loads(self): - # The DF itself does not hold any information on the type of values inside of it. It can hold p or q values, - # thus we hold this information in tuples as keys to the dictionary. What is more, we need to create checks to - # find out which variable is stored in the DF. pp_upd_data = self.pp_update_data if "load.p_mw" not in pp_upd_data and "load.q_mvar" not in pp_upd_data: return - if "load.p_mw" in pp_upd_data and "load.q_mvar" not in pp_upd_data: + if "load.p_mw" in pp_upd_data and "load.q_mvar" in pp_upd_data: + p_mw_ids = np.array(pp_upd_data["load.p_mw"].columns) + q_mvar_ids = np.array(pp_upd_data["load.q_mvar"].columns) + # Should we sort the DF columns initially? + if p_mw_ids.sort() != q_mvar_ids.sort(): + raise Exception("The IDs of load p_mw Datasource and load q_mvar Datasource are different!") + + if "load.p_mw" in pp_upd_data: load_pmw_profile = pp_upd_data["load.p_mw"] # Length of a DF represents time steps time_steps = len(load_pmw_profile) # Length of columns of a DF represents number of profiles profiles = len(load_pmw_profile.columns) - load_profile = initialize_array("update", "sym_load", (time_steps, profiles)) + pgm_load_profile = initialize_array("update", "sym_load", (time_steps, profiles)) + + pgm_load_profile["id"] = self._get_pgm_ids("load", np.array(load_pmw_profile.columns)) - load_profile["id"] = self._get_pgm_ids("load", np.array(load_pmw_profile.columns)) + pgm_load_profile["p_specified"] = load_pmw_profile.to_numpy() * 1e6 - load_profile["p_specified"] = load_pmw_profile.to_numpy() * 1e6 + if "load.q_mvar" in pp_upd_data: + pgm_load_profile["q_specified"] = pp_upd_data["load.q_mvar"].to_numpy() * 1e6 - self.pgm_update_data["sym_load"] = load_profile + self.pgm_update_data["sym_load"] = pgm_load_profile if "load.q_mvar" in pp_upd_data and "load.p_mw" not in pp_upd_data: load_qmvar_profile = pp_upd_data["load.q_mvar"] @@ -2089,29 +2096,13 @@ def _pp_update_loads(self): time_steps = len(load_qmvar_profile) profiles = len(load_qmvar_profile.columns) - load_profile = initialize_array("update", "sym_load", (time_steps, profiles)) - - load_profile["id"] = self._get_pgm_ids("load", np.array(load_qmvar_profile.columns)) - - load_profile["q_specified"] = load_qmvar_profile.to_numpy() * 1e6 - - self.pgm_update_data["sym_load"] = load_profile - - if "load.q_mvar" in pp_upd_data and "load.p_mw" in pp_upd_data: - load_pmw_profile = pp_upd_data["load.p_mw"] - load_qmvar_profile = pp_upd_data["load.q_mvar"] - - time_steps = len(load_pmw_profile) - profiles = len(load_pmw_profile.columns) - - load_profile = initialize_array("update", "sym_load", (time_steps, profiles)) + pgm_load_profile = initialize_array("update", "sym_load", (time_steps, profiles)) - load_profile["id"] = self._get_pgm_ids("load", np.array(load_qmvar_profile.columns)) + pgm_load_profile["id"] = self._get_pgm_ids("load", np.array(load_qmvar_profile.columns)) - load_profile["p_specified"] = load_pmw_profile.to_numpy() * 1e6 - load_profile["q_specified"] = load_qmvar_profile.to_numpy() * 1e6 + pgm_load_profile["q_specified"] = load_qmvar_profile.to_numpy() * 1e6 - self.pgm_update_data["sym_load"] = load_profile + self.pgm_update_data["sym_load"] = pgm_load_profile def _pp_update_sgens(self): pp_upd_data = self.pp_update_data @@ -2119,10 +2110,18 @@ def _pp_update_sgens(self): if "sgen.p_mw" not in pp_upd_data and "sgen.q_mvar" not in pp_upd_data: return - if "sgen.p_mw" in pp_upd_data and "sgen.q_mvar" not in pp_upd_data: + if "sgen.p_mw" in pp_upd_data and "sgen.q_mvar" in pp_upd_data: + p_mw_ids = np.array(pp_upd_data["sgen.p_mw"].columns) + q_mvar_ids = np.array(pp_upd_data["sgen.q_mvar"].columns) + # Should we sort the DF columns initially? + if p_mw_ids.sort() != q_mvar_ids.sort(): + raise Exception("The IDs of sgen p_mw Datasource and sgen q_mvar Datasource are different!") + + if "sgen.p_mw" in pp_upd_data: sgen_pmw_profile = pp_upd_data["sgen.p_mw"] time_steps = len(sgen_pmw_profile) + profiles = len(sgen_pmw_profile.columns) sgen_profile = initialize_array("update", "sym_gen", (time_steps, profiles)) @@ -2131,12 +2130,16 @@ def _pp_update_sgens(self): sgen_profile["p_specified"] = sgen_pmw_profile.to_numpy() * 1e6 + if "sgen.q_mvar" in pp_upd_data: + sgen_profile["q_specified"] = pp_upd_data["sgen.q_mvar"].to_numpy() * 1e6 + self.pgm_update_data["sym_gen"] = sgen_profile if "sgen.q_mvar" in pp_upd_data and "sgen.p_mw" not in pp_upd_data: sgen_qmvar_profile = pp_upd_data["sgen.q_mvar"] time_steps = len(sgen_qmvar_profile) + profiles = len(sgen_qmvar_profile) sgen_profile = initialize_array("update", "sym_gen", (time_steps, profiles)) @@ -2147,22 +2150,6 @@ def _pp_update_sgens(self): self.pgm_update_data["sym_gen"] = sgen_profile - if "sgen.q_mvar" in pp_upd_data and "sgen.p_mw" in pp_upd_data: - sgen_pmw_profile = pp_upd_data["sgen.p_mw"] - sgen_qmvar_profile = pp_upd_data["sgen.q_mvar"] - - time_steps = len(sgen_pmw_profile) - profiles = len(sgen_pmw_profile.columns) - - sgen_profile = initialize_array("update", "sym_gen", (time_steps, profiles)) - - sgen_profile["id"] = self._get_pgm_ids("sgen", np.array(sgen_pmw_profile.columns)) - - sgen_profile["p_specified"] = sgen_pmw_profile.to_numpy() * 1e6 - sgen_profile["q_specified"] = sgen_qmvar_profile.to_numpy() * 1e6 - - self.pgm_update_data["sym_gen"] = sgen_profile - def _generate_ids(self, pp_table: str, pp_idx: pd.Index, name: Optional[str] = None) -> np.ndarray: """ Generate numerical power-grid-model IDs for a PandaPower component From 6d83c3f0a41ff3c8edb1c82b27df1d73292beceb Mon Sep 17 00:00:00 2001 From: Laurynas Jagutis Date: Thu, 4 May 2023 09:04:12 +0200 Subject: [PATCH 3/4] update_loads and sgens, mapping for scaling and const multipliers Signed-off-by: Laurynas Jagutis --- .../converters/pandapower_converter.py | 308 ++++++++++++++---- 1 file changed, 248 insertions(+), 60 deletions(-) diff --git a/src/power_grid_model_io/converters/pandapower_converter.py b/src/power_grid_model_io/converters/pandapower_converter.py index e94c7f3c..1b381211 100644 --- a/src/power_grid_model_io/converters/pandapower_converter.py +++ b/src/power_grid_model_io/converters/pandapower_converter.py @@ -2059,96 +2059,272 @@ def _pp_asym_gens_output_3ph(self): assert "res_asymmetric_sgen_3ph" not in self.pp_output_data self.pp_output_data["res_asymmetric_sgen_3ph"] = pp_output_asym_gens_3ph - def _pp_update_loads(self): - pp_upd_data = self.pp_update_data + # pylint: disable-msg=too-many-locals + def _pp_update_loads(self): # pragma: no cover + pp_upd_data = self.pp_update_data["controller"]["object"] - if "load.p_mw" not in pp_upd_data and "load.q_mvar" not in pp_upd_data: - return - - if "load.p_mw" in pp_upd_data and "load.q_mvar" in pp_upd_data: - p_mw_ids = np.array(pp_upd_data["load.p_mw"].columns) - q_mvar_ids = np.array(pp_upd_data["load.q_mvar"].columns) - # Should we sort the DF columns initially? - if p_mw_ids.sort() != q_mvar_ids.sort(): - raise Exception("The IDs of load p_mw Datasource and load q_mvar Datasource are different!") - - if "load.p_mw" in pp_upd_data: - load_pmw_profile = pp_upd_data["load.p_mw"] - # Length of a DF represents time steps - time_steps = len(load_pmw_profile) - # Length of columns of a DF represents number of profiles - profiles = len(load_pmw_profile.columns) + scaling = self._get_pp_attr("load", "scaling", 1.0) + all_load_ids = self.pp_update_data["load"].index.values + const_i_multiplier = self._get_pp_attr("load", "const_i_percent", 0) * scaling * (1e-2 * 1e6) + const_z_multiplier = self._get_pp_attr("load", "const_z_percent", 0) * scaling * (1e-2 * 1e6) + const_p_multiplier = (1e6 - const_i_multiplier - const_z_multiplier) * scaling - pgm_load_profile = initialize_array("update", "sym_load", (time_steps, profiles)) + load_controller_ids = [] + pp_load_ids = set() + # Loop over all controllers + for count, control in enumerate(pp_upd_data): + # If the element of a controller is a load, we save the controller id and load id + if control.element == "load": + load_controller_ids.append(count) + pp_load_ids.add(pp_upd_data[count].element_index[0]) + # If there are no controllers for loads, we stop here + if len(load_controller_ids) < 1: + return # Whether to crash or not - pgm_load_profile["id"] = self._get_pgm_ids("load", np.array(load_pmw_profile.columns)) + # Convert it to a list, debugger was complaining + pp_load_ids = list(pp_load_ids) - pgm_load_profile["p_specified"] = load_pmw_profile.to_numpy() * 1e6 + # Every constcontroller uses the same df, so we take the df of the first constcontroller? + data = pp_upd_data[load_controller_ids[0]].data_source.df - if "load.q_mvar" in pp_upd_data: - pgm_load_profile["q_specified"] = pp_upd_data["load.q_mvar"].to_numpy() * 1e6 + # Time steps are Dataframe indexes + time_steps = len(data) - self.pgm_update_data["sym_load"] = pgm_load_profile + # Profiles are Dataframe columns + profiles = len(pp_load_ids) - if "load.q_mvar" in pp_upd_data and "load.p_mw" not in pp_upd_data: - load_qmvar_profile = pp_upd_data["load.q_mvar"] + pgm_load_profile = initialize_array("update", "sym_load", (time_steps, profiles * 3)) - time_steps = len(load_qmvar_profile) - profiles = len(load_qmvar_profile.columns) + pgm_load_profile["id"] = self._get_timeseries_load_ids(pp_load_ids) - pgm_load_profile = initialize_array("update", "sym_load", (time_steps, profiles)) + # Loop through controller IDs which are responsible for loads + for controller_id in load_controller_ids: - pgm_load_profile["id"] = self._get_pgm_ids("load", np.array(load_qmvar_profile.columns)) + load_id_const_power = self._get_pgm_ids( + "load", np.array(pp_upd_data[controller_id].element_index), name="const_power" + ).iloc[0] - pgm_load_profile["q_specified"] = load_qmvar_profile.to_numpy() * 1e6 + load_id_const_impedance = self._get_pgm_ids( + "load", np.array(pp_upd_data[controller_id].element_index), name="const_impedance" + ).iloc[0] - self.pgm_update_data["sym_load"] = pgm_load_profile + load_id_const_current = self._get_pgm_ids( + "load", np.array(pp_upd_data[controller_id].element_index), name="const_current" + ).iloc[0] - def _pp_update_sgens(self): - pp_upd_data = self.pp_update_data + scaling_index = np.where(all_load_ids == pp_upd_data[controller_id].element_index[0])[0] - if "sgen.p_mw" not in pp_upd_data and "sgen.q_mvar" not in pp_upd_data: - return + # If the current controller is reponsilbe for the p_mw attribute, set p_specified + if pp_upd_data[controller_id].variable == "p_mw": + p_mw = data.iloc[:, controller_id].to_numpy() - if "sgen.p_mw" in pp_upd_data and "sgen.q_mvar" in pp_upd_data: - p_mw_ids = np.array(pp_upd_data["sgen.p_mw"].columns) - q_mvar_ids = np.array(pp_upd_data["sgen.q_mvar"].columns) - # Should we sort the DF columns initially? - if p_mw_ids.sort() != q_mvar_ids.sort(): - raise Exception("The IDs of sgen p_mw Datasource and sgen q_mvar Datasource are different!") + pgm_load_profile["p_specified"][pgm_load_profile["id"] == load_id_const_power] = ( + p_mw * const_p_multiplier[scaling_index] + ) + pgm_load_profile["p_specified"][pgm_load_profile["id"] == load_id_const_impedance] = ( + p_mw * const_z_multiplier[scaling_index] + ) + pgm_load_profile["p_specified"][pgm_load_profile["id"] == load_id_const_current] = ( + p_mw * const_i_multiplier[scaling_index] + ) - if "sgen.p_mw" in pp_upd_data: - sgen_pmw_profile = pp_upd_data["sgen.p_mw"] + # If the current controller is reponsilbe for the q_mvar attribute, set q_specified + if pp_upd_data[controller_id].variable == "q_mvar": + q_mvar = data.iloc[:, controller_id].to_numpy() - time_steps = len(sgen_pmw_profile) + pgm_load_profile["q_specified"][pgm_load_profile["id"] == load_id_const_power] = ( + q_mvar * const_p_multiplier[scaling_index] + ) + pgm_load_profile["q_specified"][pgm_load_profile["id"] == load_id_const_impedance] = ( + q_mvar * const_z_multiplier[scaling_index] + ) + pgm_load_profile["q_specified"][pgm_load_profile["id"] == load_id_const_current] = ( + q_mvar * const_i_multiplier[scaling_index] + ) - profiles = len(sgen_pmw_profile.columns) + self.pgm_update_data["sym_load"] = pgm_load_profile + + # + # # If there hasn't yet been a controller responsible for a load with this id + # # If the controller is responsible for the p_mw attribute, assign it to p_specified + # # then assign a nan value to q_specified, later on PGM will take an input value instead of nan + # if not np.isin(load_id_const_power, + # pgm_load_profile["id"]) and pp_upd_data[controller_id].variable == "p_mw": + # p_mw = data.iloc[:, controller_id].to_numpy() + # + # pgm_load_profile["id"] = np.append(pgm_load_profile["id"], load_id_const_power) + # pgm_load_profile["p_specified"] = np.append( + # pgm_load_profile["p_specified"], p_mw * const_p_multiplier) + # pgm_load_profile["q_specified"] = np.append(np.nan) + # + # pgm_load_profile["id"] = np.append(pgm_load_profile["id"], np.array(load_id_const_impedance)) + # pgm_load_profile["p_specified"] = np.append( + # pgm_load_profile["p_specified"], p_mw * const_z_multiplier) + # pgm_load_profile["q_specified"] = np.append(np.nan) + # + # pgm_load_profile["id"] = np.append(pgm_load_profile["id"], np.array(load_id_const_current)) + # pgm_load_profile["p_specified"] = np.append( + # pgm_load_profile["p_specified"], p_mw * const_i_multiplier) + # pgm_load_profile["q_specified"] = np.append(np.nan) + # + # # If the controller is responsible for the q_mvar attribute, assign it to q_specified + # # then assign a nan value to p_specified, later on PGM will take an input value instead of nan + # if load_id_const_power not in pgm_load_profile["id"] and pp_upd_data[controller_id].variable == "q_mvar": + # q_mvar = data.iloc[:, controller_id].to_numpy() + # + # pgm_load_profile["id"] = np.append(pgm_load_profile["id"], load_id_const_power) + # pgm_load_profile["q_specified"] = np.append( + # pgm_load_profile["q_specified"], q_mvar * const_p_multiplier) + # pgm_load_profile["p_specified"] = np.append(np.nan) + # + # pgm_load_profile["id"] = np.append(pgm_load_profile["id"], load_id_const_impedance) + # pgm_load_profile["q_specified"] = np.append( + # pgm_load_profile["q_specified"], q_mvar * const_z_multiplier) + # pgm_load_profile["p_specified"] = np.append(np.nan) + # + # pgm_load_profile["id"] = np.append(pgm_load_profile["id"], load_id_const_current) + # pgm_load_profile["q_specified"] = np.append( + # pgm_load_profile["q_specified"], q_mvar * const_i_multiplier) + # pgm_load_profile["p_specified"] = np.append(np.nan) + # + + # if "load.p_mw" not in pp_upd_data and "load.q_mvar" not in pp_upd_data: + # return + # + # if "load.p_mw" in pp_upd_data and "load.q_mvar" in pp_upd_data: + # p_mw_ids = np.array(pp_upd_data["load.p_mw"].columns) + # q_mvar_ids = np.array(pp_upd_data["load.q_mvar"].columns) + # # Should we sort the DF columns initially? + # if p_mw_ids.sort() != q_mvar_ids.sort(): + # raise Exception("The IDs of load p_mw Datasource and load q_mvar Datasource are different!") + # + # if "load.p_mw" in pp_upd_data: + # load_pmw_profile = pp_upd_data["load.p_mw"] + # # Length of a DF represents time steps + # time_steps = len(load_pmw_profile) + # # Length of columns of a DF represents number of profiles + # profiles = len(load_pmw_profile.columns) + # + # pgm_load_profile = initialize_array("update", "sym_load", (time_steps, profiles)) + # + # pgm_load_profile["id"] = self._get_pgm_ids("load", np.array(load_pmw_profile.columns)) + # + # pgm_load_profile["p_specified"] = load_pmw_profile.to_numpy() * 1e6 + # + # if "load.q_mvar" in pp_upd_data: + # pgm_load_profile["q_specified"] = pp_upd_data["load.q_mvar"].to_numpy() * 1e6 + # + # self.pgm_update_data["sym_load"] = pgm_load_profile + # + # if "load.q_mvar" in pp_upd_data and "load.p_mw" not in pp_upd_data: + # load_qmvar_profile = pp_upd_data["load.q_mvar"] + # + # time_steps = len(load_qmvar_profile) + # profiles = len(load_qmvar_profile.columns) + # + # pgm_load_profile = initialize_array("update", "sym_load", (time_steps, profiles)) + # + # pgm_load_profile["id"] = self._get_pgm_ids("load", np.array(load_qmvar_profile.columns)) + # + # pgm_load_profile["q_specified"] = load_qmvar_profile.to_numpy() * 1e6 + # + # self.pgm_update_data["sym_load"] = pgm_load_profile + + # pylint: disable-msg=too-many-locals + def _pp_update_sgens(self): # pragma: no cover + pp_upd_data = self.pp_update_data["controller"]["object"] - sgen_profile = initialize_array("update", "sym_gen", (time_steps, profiles)) + scaling = self._get_pp_attr("sgen", "scaling", 1.0) + all_sgen_ids = self.pp_update_data["sgen"].index.values + + sgen_controller_ids = [] + pp_sgen_ids = set() + # Loop over all controllers + for count, control in enumerate(pp_upd_data): + # If the element of a controller is a load, we save the controller id and load id + if control.element == "sgen": + sgen_controller_ids.append(count) + pp_sgen_ids.add(pp_upd_data[count].element_index[0]) + # If there are no controllers for sgens, we stop here + if len(sgen_controller_ids) < 1: + return - sgen_profile["id"] = self._get_pgm_ids("sgen", np.array(sgen_pmw_profile.columns)) + data = pp_upd_data[sgen_controller_ids[0]].data_source.df - sgen_profile["p_specified"] = sgen_pmw_profile.to_numpy() * 1e6 + # Time steps are Dataframe indexes + time_steps = len(data) - if "sgen.q_mvar" in pp_upd_data: - sgen_profile["q_specified"] = pp_upd_data["sgen.q_mvar"].to_numpy() * 1e6 + # Profiles are Dataframe columns + profiles = len(pp_sgen_ids) - self.pgm_update_data["sym_gen"] = sgen_profile + pgm_symgen_profile = initialize_array("update", "sym_load", (time_steps, profiles)) - if "sgen.q_mvar" in pp_upd_data and "sgen.p_mw" not in pp_upd_data: - sgen_qmvar_profile = pp_upd_data["sgen.q_mvar"] + pgm_symgen_profile["id"] = self._get_pgm_ids("sgen", np.array(list(pp_sgen_ids))) - time_steps = len(sgen_qmvar_profile) + for controller_id in sgen_controller_ids: - profiles = len(sgen_qmvar_profile) + sym_gen_id = self._get_pgm_ids("sgen", np.array(pp_upd_data[controller_id].element_index)).iloc[0] - sgen_profile = initialize_array("update", "sym_gen", (time_steps, profiles)) + scaling_index = np.where(all_sgen_ids == pp_upd_data[controller_id].element_index[0])[0] - sgen_profile["id"] = self._get_pgm_ids("sgen", np.array(sgen_qmvar_profile.columns)) + # If the current controller is reponsilbe for the p_mw attribute, set p_specified + if pp_upd_data[controller_id].variable == "p_mw": + p_mw = data.iloc[:, controller_id].to_numpy() + pgm_symgen_profile["p_specified"][pgm_symgen_profile["id"] == sym_gen_id] = p_mw * ( + 1e6 * scaling[scaling_index] + ) - sgen_profile["q_specified"] = sgen_qmvar_profile.to_numpy() * 1e6 + # If the current controller is reponsilbe for the q_mvar attribute, set q_specified + if pp_upd_data[controller_id].variable == "q_mvar": + q_mvar = data.iloc[:, controller_id].to_numpy() + pgm_symgen_profile["q_specified"][pgm_symgen_profile["id"] == sym_gen_id] = q_mvar * ( + 1e6 * scaling[scaling_index] + ) - self.pgm_update_data["sym_gen"] = sgen_profile + self.pgm_update_data["sym_gen"] = pgm_symgen_profile + + # if "sgen.p_mw" not in pp_upd_data and "sgen.q_mvar" not in pp_upd_data: + # return + # + # if "sgen.p_mw" in pp_upd_data and "sgen.q_mvar" in pp_upd_data: + # p_mw_ids = np.array(pp_upd_data["sgen.p_mw"].columns) + # q_mvar_ids = np.array(pp_upd_data["sgen.q_mvar"].columns) + # # Should we sort the DF columns initially? + # if p_mw_ids.sort() != q_mvar_ids.sort(): + # raise Exception("The IDs of sgen p_mw Datasource and sgen q_mvar Datasource are different!") + # + # if "sgen.p_mw" in pp_upd_data: + # sgen_pmw_profile = pp_upd_data["sgen.p_mw"] + # + # time_steps = len(sgen_pmw_profile) + # + # profiles = len(sgen_pmw_profile.columns) + # + # sgen_profile = initialize_array("update", "sym_gen", (time_steps, profiles)) + # + # sgen_profile["id"] = self._get_pgm_ids("sgen", np.array(sgen_pmw_profile.columns)) + # + # sgen_profile["p_specified"] = sgen_pmw_profile.to_numpy() * 1e6 + # + # if "sgen.q_mvar" in pp_upd_data: + # sgen_profile["q_specified"] = pp_upd_data["sgen.q_mvar"].to_numpy() * 1e6 + # + # self.pgm_update_data["sym_gen"] = sgen_profile + # + # if "sgen.q_mvar" in pp_upd_data and "sgen.p_mw" not in pp_upd_data: + # sgen_qmvar_profile = pp_upd_data["sgen.q_mvar"] + # + # time_steps = len(sgen_qmvar_profile) + # + # profiles = len(sgen_qmvar_profile) + # + # sgen_profile = initialize_array("update", "sym_gen", (time_steps, profiles)) + # + # sgen_profile["id"] = self._get_pgm_ids("sgen", np.array(sgen_qmvar_profile.columns)) + # + # sgen_profile["q_specified"] = sgen_qmvar_profile.to_numpy() * 1e6 + # + # self.pgm_update_data["sym_gen"] = sgen_profile def _generate_ids(self, pp_table: str, pp_idx: pd.Index, name: Optional[str] = None) -> np.ndarray: """ @@ -2208,6 +2384,18 @@ def _get_pp_ids(self, pp_table: str, pgm_idx: Optional[pd.Series] = None, name: return self.idx_lookup[key] return self.idx_lookup[key][pgm_idx] + def _get_timeseries_load_ids(self, pp_load_ids): + + load_id_const_power = self._get_pgm_ids("load", np.array(pp_load_ids), name="const_power") + + load_id_const_impedance = self._get_pgm_ids("load", np.array(pp_load_ids), name="const_impedance") + + load_id_const_current = self._get_pgm_ids("load", np.array(pp_load_ids), name="const_current") + + pgm_ids = pd.concat([load_id_const_power, load_id_const_impedance, load_id_const_current]) + + return pgm_ids + @staticmethod def _get_tap_size(pp_trafo: pd.DataFrame) -> np.ndarray: """ From 1f271328678312449920a347e6bae61c68759526 Mon Sep 17 00:00:00 2001 From: Laurynas Jagutis Date: Mon, 15 May 2023 09:23:56 +0200 Subject: [PATCH 4/4] fix too many local variables Signed-off-by: Laurynas Jagutis --- .../converters/pandapower_converter.py | 371 +++++++----------- .../test_pandapower_converter_input.py | 15 + 2 files changed, 149 insertions(+), 237 deletions(-) diff --git a/src/power_grid_model_io/converters/pandapower_converter.py b/src/power_grid_model_io/converters/pandapower_converter.py index 1b381211..a69b5ca9 100644 --- a/src/power_grid_model_io/converters/pandapower_converter.py +++ b/src/power_grid_model_io/converters/pandapower_converter.py @@ -72,30 +72,35 @@ def _parse_data( Returns: Converted power-grid-model data """ - # Clear pgm data - self.pgm_input_data = {} - self.idx_lookup = {} - self.next_idx = 0 - - self.pgm_update_data = {} - # Convert if data_type == "input": + # Clear pgm data + self.pgm_input_data = {} + self.idx_lookup = {} + self.next_idx = 0 + # Set pandas data self.pp_input_data = data self._create_input_data() - elif data_type == "update": + + # Construct extra_info + if extra_info is not None: + self._fill_pgm_extra_info(extra_info=extra_info) + self._fill_pp_extra_info(extra_info=extra_info) + + return self.pgm_input_data + + if data_type == "update": + # Clear pgm data + self.pgm_update_data = {} + + # Set pandas data self.pp_update_data = data self._update_input_data() - else: - raise ValueError(f"Data type: '{data_type}' is not implemented") - # Construct extra_info - if extra_info is not None: - self._fill_pgm_extra_info(extra_info=extra_info) - self._fill_pp_extra_info(extra_info=extra_info) + return self.pgm_update_data - return self.pgm_input_data + raise ValueError(f"Data type: '{data_type}' is not implemented") def _serialize_data(self, data: Dataset, extra_info: Optional[ExtraInfo]) -> PandaPowerData: """ @@ -2059,30 +2064,15 @@ def _pp_asym_gens_output_3ph(self): assert "res_asymmetric_sgen_3ph" not in self.pp_output_data self.pp_output_data["res_asymmetric_sgen_3ph"] = pp_output_asym_gens_3ph - # pylint: disable-msg=too-many-locals def _pp_update_loads(self): # pragma: no cover pp_upd_data = self.pp_update_data["controller"]["object"] - scaling = self._get_pp_attr("load", "scaling", 1.0) - all_load_ids = self.pp_update_data["load"].index.values - const_i_multiplier = self._get_pp_attr("load", "const_i_percent", 0) * scaling * (1e-2 * 1e6) - const_z_multiplier = self._get_pp_attr("load", "const_z_percent", 0) * scaling * (1e-2 * 1e6) - const_p_multiplier = (1e6 - const_i_multiplier - const_z_multiplier) * scaling + # Obtain controllers responsible for loads and load ids which the controllers are responsible for + load_controller_ids, pp_load_ids = self._get_element_controller_ids("load") - load_controller_ids = [] - pp_load_ids = set() - # Loop over all controllers - for count, control in enumerate(pp_upd_data): - # If the element of a controller is a load, we save the controller id and load id - if control.element == "load": - load_controller_ids.append(count) - pp_load_ids.add(pp_upd_data[count].element_index[0]) # If there are no controllers for loads, we stop here if len(load_controller_ids) < 1: - return # Whether to crash or not - - # Convert it to a list, debugger was complaining - pp_load_ids = list(pp_load_ids) + return # Let's not create a crash here. If there aren't any loads then return nothing for loads # Every constcontroller uses the same df, so we take the df of the first constcontroller? data = pp_upd_data[load_controller_ids[0]].data_source.df @@ -2097,157 +2087,18 @@ def _pp_update_loads(self): # pragma: no cover pgm_load_profile["id"] = self._get_timeseries_load_ids(pp_load_ids) - # Loop through controller IDs which are responsible for loads - for controller_id in load_controller_ids: - - load_id_const_power = self._get_pgm_ids( - "load", np.array(pp_upd_data[controller_id].element_index), name="const_power" - ).iloc[0] - - load_id_const_impedance = self._get_pgm_ids( - "load", np.array(pp_upd_data[controller_id].element_index), name="const_impedance" - ).iloc[0] - - load_id_const_current = self._get_pgm_ids( - "load", np.array(pp_upd_data[controller_id].element_index), name="const_current" - ).iloc[0] - - scaling_index = np.where(all_load_ids == pp_upd_data[controller_id].element_index[0])[0] - - # If the current controller is reponsilbe for the p_mw attribute, set p_specified - if pp_upd_data[controller_id].variable == "p_mw": - p_mw = data.iloc[:, controller_id].to_numpy() - - pgm_load_profile["p_specified"][pgm_load_profile["id"] == load_id_const_power] = ( - p_mw * const_p_multiplier[scaling_index] - ) - pgm_load_profile["p_specified"][pgm_load_profile["id"] == load_id_const_impedance] = ( - p_mw * const_z_multiplier[scaling_index] - ) - pgm_load_profile["p_specified"][pgm_load_profile["id"] == load_id_const_current] = ( - p_mw * const_i_multiplier[scaling_index] - ) - - # If the current controller is reponsilbe for the q_mvar attribute, set q_specified - if pp_upd_data[controller_id].variable == "q_mvar": - q_mvar = data.iloc[:, controller_id].to_numpy() - - pgm_load_profile["q_specified"][pgm_load_profile["id"] == load_id_const_power] = ( - q_mvar * const_p_multiplier[scaling_index] - ) - pgm_load_profile["q_specified"][pgm_load_profile["id"] == load_id_const_impedance] = ( - q_mvar * const_z_multiplier[scaling_index] - ) - pgm_load_profile["q_specified"][pgm_load_profile["id"] == load_id_const_current] = ( - q_mvar * const_i_multiplier[scaling_index] - ) + pgm_load_profile = self._create_load_profile(pgm_load_profile, load_controller_ids, data) self.pgm_update_data["sym_load"] = pgm_load_profile - # - # # If there hasn't yet been a controller responsible for a load with this id - # # If the controller is responsible for the p_mw attribute, assign it to p_specified - # # then assign a nan value to q_specified, later on PGM will take an input value instead of nan - # if not np.isin(load_id_const_power, - # pgm_load_profile["id"]) and pp_upd_data[controller_id].variable == "p_mw": - # p_mw = data.iloc[:, controller_id].to_numpy() - # - # pgm_load_profile["id"] = np.append(pgm_load_profile["id"], load_id_const_power) - # pgm_load_profile["p_specified"] = np.append( - # pgm_load_profile["p_specified"], p_mw * const_p_multiplier) - # pgm_load_profile["q_specified"] = np.append(np.nan) - # - # pgm_load_profile["id"] = np.append(pgm_load_profile["id"], np.array(load_id_const_impedance)) - # pgm_load_profile["p_specified"] = np.append( - # pgm_load_profile["p_specified"], p_mw * const_z_multiplier) - # pgm_load_profile["q_specified"] = np.append(np.nan) - # - # pgm_load_profile["id"] = np.append(pgm_load_profile["id"], np.array(load_id_const_current)) - # pgm_load_profile["p_specified"] = np.append( - # pgm_load_profile["p_specified"], p_mw * const_i_multiplier) - # pgm_load_profile["q_specified"] = np.append(np.nan) - # - # # If the controller is responsible for the q_mvar attribute, assign it to q_specified - # # then assign a nan value to p_specified, later on PGM will take an input value instead of nan - # if load_id_const_power not in pgm_load_profile["id"] and pp_upd_data[controller_id].variable == "q_mvar": - # q_mvar = data.iloc[:, controller_id].to_numpy() - # - # pgm_load_profile["id"] = np.append(pgm_load_profile["id"], load_id_const_power) - # pgm_load_profile["q_specified"] = np.append( - # pgm_load_profile["q_specified"], q_mvar * const_p_multiplier) - # pgm_load_profile["p_specified"] = np.append(np.nan) - # - # pgm_load_profile["id"] = np.append(pgm_load_profile["id"], load_id_const_impedance) - # pgm_load_profile["q_specified"] = np.append( - # pgm_load_profile["q_specified"], q_mvar * const_z_multiplier) - # pgm_load_profile["p_specified"] = np.append(np.nan) - # - # pgm_load_profile["id"] = np.append(pgm_load_profile["id"], load_id_const_current) - # pgm_load_profile["q_specified"] = np.append( - # pgm_load_profile["q_specified"], q_mvar * const_i_multiplier) - # pgm_load_profile["p_specified"] = np.append(np.nan) - # - - # if "load.p_mw" not in pp_upd_data and "load.q_mvar" not in pp_upd_data: - # return - # - # if "load.p_mw" in pp_upd_data and "load.q_mvar" in pp_upd_data: - # p_mw_ids = np.array(pp_upd_data["load.p_mw"].columns) - # q_mvar_ids = np.array(pp_upd_data["load.q_mvar"].columns) - # # Should we sort the DF columns initially? - # if p_mw_ids.sort() != q_mvar_ids.sort(): - # raise Exception("The IDs of load p_mw Datasource and load q_mvar Datasource are different!") - # - # if "load.p_mw" in pp_upd_data: - # load_pmw_profile = pp_upd_data["load.p_mw"] - # # Length of a DF represents time steps - # time_steps = len(load_pmw_profile) - # # Length of columns of a DF represents number of profiles - # profiles = len(load_pmw_profile.columns) - # - # pgm_load_profile = initialize_array("update", "sym_load", (time_steps, profiles)) - # - # pgm_load_profile["id"] = self._get_pgm_ids("load", np.array(load_pmw_profile.columns)) - # - # pgm_load_profile["p_specified"] = load_pmw_profile.to_numpy() * 1e6 - # - # if "load.q_mvar" in pp_upd_data: - # pgm_load_profile["q_specified"] = pp_upd_data["load.q_mvar"].to_numpy() * 1e6 - # - # self.pgm_update_data["sym_load"] = pgm_load_profile - # - # if "load.q_mvar" in pp_upd_data and "load.p_mw" not in pp_upd_data: - # load_qmvar_profile = pp_upd_data["load.q_mvar"] - # - # time_steps = len(load_qmvar_profile) - # profiles = len(load_qmvar_profile.columns) - # - # pgm_load_profile = initialize_array("update", "sym_load", (time_steps, profiles)) - # - # pgm_load_profile["id"] = self._get_pgm_ids("load", np.array(load_qmvar_profile.columns)) - # - # pgm_load_profile["q_specified"] = load_qmvar_profile.to_numpy() * 1e6 - # - # self.pgm_update_data["sym_load"] = pgm_load_profile - - # pylint: disable-msg=too-many-locals def _pp_update_sgens(self): # pragma: no cover pp_upd_data = self.pp_update_data["controller"]["object"] - scaling = self._get_pp_attr("sgen", "scaling", 1.0) - all_sgen_ids = self.pp_update_data["sgen"].index.values + sgen_controller_ids, pp_sgen_ids = self._get_element_controller_ids("sgen") - sgen_controller_ids = [] - pp_sgen_ids = set() - # Loop over all controllers - for count, control in enumerate(pp_upd_data): - # If the element of a controller is a load, we save the controller id and load id - if control.element == "sgen": - sgen_controller_ids.append(count) - pp_sgen_ids.add(pp_upd_data[count].element_index[0]) # If there are no controllers for sgens, we stop here if len(sgen_controller_ids) < 1: - return + return # Let's not create a crash here. If there aren't any sgens then return nothing for sgens data = pp_upd_data[sgen_controller_ids[0]].data_source.df @@ -2261,71 +2112,10 @@ def _pp_update_sgens(self): # pragma: no cover pgm_symgen_profile["id"] = self._get_pgm_ids("sgen", np.array(list(pp_sgen_ids))) - for controller_id in sgen_controller_ids: - - sym_gen_id = self._get_pgm_ids("sgen", np.array(pp_upd_data[controller_id].element_index)).iloc[0] - - scaling_index = np.where(all_sgen_ids == pp_upd_data[controller_id].element_index[0])[0] - - # If the current controller is reponsilbe for the p_mw attribute, set p_specified - if pp_upd_data[controller_id].variable == "p_mw": - p_mw = data.iloc[:, controller_id].to_numpy() - pgm_symgen_profile["p_specified"][pgm_symgen_profile["id"] == sym_gen_id] = p_mw * ( - 1e6 * scaling[scaling_index] - ) - - # If the current controller is reponsilbe for the q_mvar attribute, set q_specified - if pp_upd_data[controller_id].variable == "q_mvar": - q_mvar = data.iloc[:, controller_id].to_numpy() - pgm_symgen_profile["q_specified"][pgm_symgen_profile["id"] == sym_gen_id] = q_mvar * ( - 1e6 * scaling[scaling_index] - ) + pgm_symgen_profile = self._get_sgen_profile(pgm_symgen_profile, sgen_controller_ids, data) self.pgm_update_data["sym_gen"] = pgm_symgen_profile - # if "sgen.p_mw" not in pp_upd_data and "sgen.q_mvar" not in pp_upd_data: - # return - # - # if "sgen.p_mw" in pp_upd_data and "sgen.q_mvar" in pp_upd_data: - # p_mw_ids = np.array(pp_upd_data["sgen.p_mw"].columns) - # q_mvar_ids = np.array(pp_upd_data["sgen.q_mvar"].columns) - # # Should we sort the DF columns initially? - # if p_mw_ids.sort() != q_mvar_ids.sort(): - # raise Exception("The IDs of sgen p_mw Datasource and sgen q_mvar Datasource are different!") - # - # if "sgen.p_mw" in pp_upd_data: - # sgen_pmw_profile = pp_upd_data["sgen.p_mw"] - # - # time_steps = len(sgen_pmw_profile) - # - # profiles = len(sgen_pmw_profile.columns) - # - # sgen_profile = initialize_array("update", "sym_gen", (time_steps, profiles)) - # - # sgen_profile["id"] = self._get_pgm_ids("sgen", np.array(sgen_pmw_profile.columns)) - # - # sgen_profile["p_specified"] = sgen_pmw_profile.to_numpy() * 1e6 - # - # if "sgen.q_mvar" in pp_upd_data: - # sgen_profile["q_specified"] = pp_upd_data["sgen.q_mvar"].to_numpy() * 1e6 - # - # self.pgm_update_data["sym_gen"] = sgen_profile - # - # if "sgen.q_mvar" in pp_upd_data and "sgen.p_mw" not in pp_upd_data: - # sgen_qmvar_profile = pp_upd_data["sgen.q_mvar"] - # - # time_steps = len(sgen_qmvar_profile) - # - # profiles = len(sgen_qmvar_profile) - # - # sgen_profile = initialize_array("update", "sym_gen", (time_steps, profiles)) - # - # sgen_profile["id"] = self._get_pgm_ids("sgen", np.array(sgen_qmvar_profile.columns)) - # - # sgen_profile["q_specified"] = sgen_qmvar_profile.to_numpy() * 1e6 - # - # self.pgm_update_data["sym_gen"] = sgen_profile - def _generate_ids(self, pp_table: str, pp_idx: pd.Index, name: Optional[str] = None) -> np.ndarray: """ Generate numerical power-grid-model IDs for a PandaPower component @@ -2396,6 +2186,113 @@ def _get_timeseries_load_ids(self, pp_load_ids): return pgm_ids + def _get_element_controller_ids(self, element: str): # pragma: no cover + pp_upd_data = self.pp_update_data["controller"]["object"] + element_controller_ids = [] + pp_element_ids = set() + # Loop over all controllers + for count, control in enumerate(pp_upd_data): + # If the element of a controller is a load, we save the controller id and load id + if control.element == element: + element_controller_ids.append(count) + pp_element_ids.add(pp_upd_data[count].element_index[0]) + + return element_controller_ids, list(pp_element_ids) + + def _create_load_profile(self, pgm_load_profile, load_controller_ids, data): # pragma: no cover + pp_upd_data = self.pp_update_data["controller"]["object"] + scaling = self._get_pp_attr("load", "scaling", 1.0) + all_load_ids = self.pp_update_data["load"].index.values + const_i_multiplier = self._get_pp_attr("load", "const_i_percent", 0) * scaling * 1e4 + const_z_multiplier = self._get_pp_attr("load", "const_z_percent", 0) * scaling * 1e4 + const_p_multiplier = (1e6 - const_i_multiplier - const_z_multiplier) * scaling + + # Loop through controller IDs which are responsible for loads + for controller_id in load_controller_ids: + + loads = { + "const_power": self._get_pgm_ids( + "load", np.array(pp_upd_data[controller_id].element_index), name="const_power" + ).iloc[0], + "const_impedance": self._get_pgm_ids( + "load", np.array(pp_upd_data[controller_id].element_index), name="const_impedance" + ).iloc[0], + "const_current": self._get_pgm_ids( + "load", np.array(pp_upd_data[controller_id].element_index), name="const_current" + ).iloc[0], + } + + # load_id_const_power = self._get_pgm_ids( + # "load", np.array(pp_upd_data[controller_id].element_index), name="const_power" + # ).iloc[0] + # + # load_id_const_impedance = self._get_pgm_ids( + # "load", np.array(pp_upd_data[controller_id].element_index), name="const_impedance" + # ).iloc[0] + # + # load_id_const_current = self._get_pgm_ids( + # "load", np.array(pp_upd_data[controller_id].element_index), name="const_current" + # ).iloc[0] + + scaling_index = np.where(all_load_ids == pp_upd_data[controller_id].element_index[0])[0] + + # If the current controller is reponsilbe for the p_mw attribute, set p_specified + if pp_upd_data[controller_id].variable == "p_mw": + p_mw = data.iloc[:, controller_id].to_numpy() + + pgm_load_profile["p_specified"][pgm_load_profile["id"] == loads["const_power"]] = ( + p_mw * const_p_multiplier[scaling_index] + ) + pgm_load_profile["p_specified"][pgm_load_profile["id"] == loads["const_impedance"]] = ( + p_mw * const_z_multiplier[scaling_index] + ) + pgm_load_profile["p_specified"][pgm_load_profile["id"] == loads["const_current"]] = ( + p_mw * const_i_multiplier[scaling_index] + ) + + # If the current controller is reponsilbe for the q_mvar attribute, set q_specified + if pp_upd_data[controller_id].variable == "q_mvar": + q_mvar = data.iloc[:, controller_id].to_numpy() + + pgm_load_profile["q_specified"][pgm_load_profile["id"] == loads["const_power"]] = ( + q_mvar * const_p_multiplier[scaling_index] + ) + pgm_load_profile["q_specified"][pgm_load_profile["id"] == loads["const_impedance"]] = ( + q_mvar * const_z_multiplier[scaling_index] + ) + pgm_load_profile["q_specified"][pgm_load_profile["id"] == loads["const_current"]] = ( + q_mvar * const_i_multiplier[scaling_index] + ) + + return pgm_load_profile + + def _get_sgen_profile(self, pgm_symgen_profile, sgen_controller_ids, data): # pragma: no cover + pp_upd_data = self.pp_update_data["controller"]["object"] + scaling = self._get_pp_attr("sgen", "scaling", 1.0) + all_sgen_ids = self.pp_update_data["sgen"].index.values + + for controller_id in sgen_controller_ids: + + sym_gen_id = self._get_pgm_ids("sgen", np.array(pp_upd_data[controller_id].element_index)).iloc[0] + + scaling_index = np.where(all_sgen_ids == pp_upd_data[controller_id].element_index[0])[0] + + # If the current controller is reponsilbe for the p_mw attribute, set p_specified + if pp_upd_data[controller_id].variable == "p_mw": + p_mw = data.iloc[:, controller_id].to_numpy() + pgm_symgen_profile["p_specified"][pgm_symgen_profile["id"] == sym_gen_id] = p_mw * ( + 1e6 * scaling[scaling_index] + ) + + # If the current controller is reponsilbe for the q_mvar attribute, set q_specified + if pp_upd_data[controller_id].variable == "q_mvar": + q_mvar = data.iloc[:, controller_id].to_numpy() + pgm_symgen_profile["q_specified"][pgm_symgen_profile["id"] == sym_gen_id] = q_mvar * ( + 1e6 * scaling[scaling_index] + ) + + return pgm_symgen_profile + @staticmethod def _get_tap_size(pp_trafo: pd.DataFrame) -> np.ndarray: """ diff --git a/tests/unit/converters/test_pandapower_converter_input.py b/tests/unit/converters/test_pandapower_converter_input.py index 677d7d6e..2121b615 100644 --- a/tests/unit/converters/test_pandapower_converter_input.py +++ b/tests/unit/converters/test_pandapower_converter_input.py @@ -9,6 +9,8 @@ import pandapower as pp import pandas as pd import pytest +from pandapower.control import ConstControl +from pandapower.timeseries import DFData from power_grid_model import Branch3Side, BranchSide, LoadGenType, WindingType, initialize_array, power_grid_meta_data from power_grid_model_io.converters.pandapower_converter import PandaPowerConverter @@ -428,6 +430,19 @@ def test_create_input_data(): converter._create_pgm_input_dclines.assert_called_once_with() +def test_update_input_data(): + # Arrange + converter = MagicMock() + + # Act + PandaPowerConverter._update_input_data(self=converter) # type: ignore + + # Assert + assert len(converter.method_calls) == 2 + converter._pp_update_loads.assert_called_once_with() + converter._pp_update_sgens.assert_called_once_with() + + @pytest.mark.parametrize( ("create_fn", "table"), [