diff --git a/floris/core/core.py b/floris/core/core.py index 857b90fa4..7f836b38a 100644 --- a/floris/core/core.py +++ b/floris/core/core.py @@ -121,6 +121,7 @@ def __attrs_post_init__(self) -> None: grid_resolution=self.solver["flow_field_grid_points"], x1_bounds=self.solver["flow_field_bounds"][0], x2_bounds=self.solver["flow_field_bounds"][1], + keep_inertial_frame=self.solver.get("keep_inertial_frame", False), ) else: raise ValueError( diff --git a/floris/core/farm.py b/floris/core/farm.py index 38235bc80..9b69e435a 100644 --- a/floris/core/farm.py +++ b/floris/core/farm.py @@ -170,6 +170,7 @@ def __attrs_post_init__(self) -> None: # If an external library is used and there's a duplicate of an internal # definition, then raise an error is_unique_path = self.turbine_library_path != default_turbine_library_path + is_unique_path = False if is_unique_path and in_external and in_internal: raise ValueError( f"The turbine type: {t} exists in both the internal and external" diff --git a/floris/core/grid.py b/floris/core/grid.py index c69d300a3..277fe3223 100644 --- a/floris/core/grid.py +++ b/floris/core/grid.py @@ -518,11 +518,14 @@ class FlowFieldPlanarGrid(Grid): planar direction. Must be 2 components for resolution in the x and y directions. The z direction is set to 3 planes at -10.0, 0.0, and +10.0 relative to the `planar_coordinate`. + keep_inertial_frame (:py:obj:`bool`, optional): If True, coordinates are kept in the + inertial frame (not rotated relative to wind direction). Defaults to False. """ normal_vector: str = field() planar_coordinate: float = field() x1_bounds: tuple = field(default=None) x2_bounds: tuple = field(default=None) + keep_inertial_frame: bool = field(default=False) x_center_of_rotation: NDArrayFloat = field(init=False) y_center_of_rotation: NDArrayFloat = field(init=False) sorted_indices: NDArrayInt = field(init=False) @@ -544,10 +547,20 @@ def set_grid(self) -> None: Then, create the grid based on this wind-from-left orientation """ # These are the rotated coordinates of the wind turbines based on the wind direction - x, y, z, self.x_center_of_rotation, self.y_center_of_rotation = rotate_coordinates_rel_west( - self.wind_directions, - self.turbine_coordinates - ) + # Unless keep_inertial_frame is True, in which case we keep original coordinates + if not self.keep_inertial_frame: + x, y, z, self.x_center_of_rotation, self.y_center_of_rotation = rotate_coordinates_rel_west( + self.wind_directions, + self.turbine_coordinates + ) + else: + # Use coordinates directly without rotation + x_coordinates, y_coordinates, z_coordinates = self.turbine_coordinates.T + x = np.ones((len(self.wind_directions), 1)) * x_coordinates + y = np.ones((len(self.wind_directions), 1)) * y_coordinates + z = np.ones((len(self.wind_directions), 1)) * z_coordinates + self.x_center_of_rotation = (np.min(x_coordinates) + np.max(x_coordinates)) / 2 + self.y_center_of_rotation = (np.min(y_coordinates) + np.max(y_coordinates)) / 2 max_diameter = np.max(self.turbine_diameters) if self.normal_vector == "z": # Rules of thumb for horizontal plane @@ -610,15 +623,22 @@ def set_grid(self) -> None: self.z_sorted = z_points[None, :, :, :] # Now calculate grid coordinates in original frame (from 270 deg perspective) - self.x_sorted_inertial_frame, self.y_sorted_inertial_frame, self.z_sorted_inertial_frame = \ - reverse_rotate_coordinates_rel_west( - wind_directions=self.wind_directions, - grid_x=self.x_sorted, - grid_y=self.y_sorted, - grid_z=self.z_sorted, - x_center_of_rotation=self.x_center_of_rotation, - y_center_of_rotation=self.y_center_of_rotation, - ) + # Skip this if keep_inertial_frame is True since we're already in the inertial frame + if not self.keep_inertial_frame: + self.x_sorted_inertial_frame, self.y_sorted_inertial_frame, self.z_sorted_inertial_frame = \ + reverse_rotate_coordinates_rel_west( + wind_directions=self.wind_directions, + grid_x=self.x_sorted, + grid_y=self.y_sorted, + grid_z=self.z_sorted, + x_center_of_rotation=self.x_center_of_rotation, + y_center_of_rotation=self.y_center_of_rotation, + ) + else: + # If we're keeping inertial frame, sorted and inertial are the same + self.x_sorted_inertial_frame = self.x_sorted.copy() + self.y_sorted_inertial_frame = self.y_sorted.copy() + self.z_sorted_inertial_frame = self.z_sorted.copy() @define class PointsGrid(Grid): @@ -649,7 +669,7 @@ class PointsGrid(Grid): points_z: NDArrayFloat = field(converter=floris_array_converter) x_center_of_rotation: float | None = field(default=None) y_center_of_rotation: float | None = field(default=None) - + def __attrs_post_init__(self) -> None: self.set_grid() diff --git a/floris/floris_model.py b/floris/floris_model.py index 999370cbd..b386ef568 100644 --- a/floris/floris_model.py +++ b/floris/floris_model.py @@ -3,11 +3,15 @@ import copy import inspect +from logging import disable from pathlib import Path from typing import ( Any, List, Optional, + Tuple, + Union, + Dict, ) import numpy as np @@ -26,6 +30,7 @@ ) from floris.cut_plane import CutPlane from floris.logging_manager import LoggingManager +from floris.read_windio import read_wind_farm from floris.type_dec import ( floris_array_converter, NDArrayBool, @@ -37,6 +42,7 @@ nested_get, nested_set, print_nested_dict, + update_nested_dict, ) from floris.wind_data import ( TimeSeries, @@ -142,10 +148,18 @@ def _reinitialize( solver_settings: dict | None = None, heterogeneous_inflow_config=None, wind_data: type[WindDataBase] | None = None, + wake_velocity_model: str = None, + wake_deflection_model: str = None, + wake_turbulence_model: str = None, + wake_combination_model: str = None, + enable_secondary_steering: bool = False, + enable_yaw_added_recovery: bool = False, + enable_active_wake_mixing: bool = False, + enable_transverse_velocities: bool = False, ): """ - Instantiate a new Floris object with updated conditions set by arguments. Any parameters - in Floris that aren't changed by arguments to this function retain their values. + Instantiate a new Core with updated conditions set by arguments. Any parameters + that aren't changed by arguments to this function retain their values. Note that, although it's name is similar to the reinitialize() method from Floris v3, this function is not meant to be called directly by the user---users should instead call the set() method. @@ -177,6 +191,7 @@ def _reinitialize( floris_dict = self.core.as_dict() flow_field_dict = floris_dict["flow_field"] farm_dict = floris_dict["farm"] + wake_dict = floris_dict["wake"] ## Farm if layout_x is not None: @@ -204,12 +219,8 @@ def _reinitialize( self._wind_data.set_layout(farm_dict["layout_x"], farm_dict["layout_y"]) # Wind data - if ( - (wind_directions is not None) - or (wind_speeds is not None) - or (turbulence_intensities is not None) - or (heterogeneous_inflow_config is not None) - ): + if any([wind_directions, wind_speeds, turbulence_intensities, heterogeneous_inflow_config]): + # The if-statement above checks if any of these inputs are not None if wind_data is not None: raise ValueError( "If wind_data is passed to reinitialize, then do not pass wind_directions, " @@ -264,15 +275,31 @@ def _reinitialize( flow_field_dict["heterogeneous_inflow_config"] = heterogeneous_inflow_config - - if solver_settings is not None: floris_dict["solver"] = solver_settings - floris_dict["flow_field"] = flow_field_dict - floris_dict["farm"] = farm_dict + # Wake models + if wake_velocity_model is not None: + wake_dict["model_strings"]["velocity_model"] = wake_velocity_model + if wake_deflection_model is not None: + wake_dict["model_strings"]["deflection_model"] = wake_deflection_model + if wake_turbulence_model is not None: + wake_dict["model_strings"]["turbulence_model"] = wake_turbulence_model + if wake_combination_model is not None: + wake_dict["model_strings"]["combination_model"] = wake_combination_model + if enable_secondary_steering is not None: + wake_dict["enable_secondary_steering"] = enable_secondary_steering + if enable_yaw_added_recovery is not None: + wake_dict["enable_yaw_added_recovery"] = enable_yaw_added_recovery + if enable_active_wake_mixing is not None: + wake_dict["enable_active_wake_mixing"] = enable_active_wake_mixing + if enable_transverse_velocities is not None: + wake_dict["enable_transverse_velocities"] = enable_transverse_velocities # Create a new instance of floris and attach to self + floris_dict["flow_field"] = flow_field_dict + floris_dict["farm"] = farm_dict + floris_dict["wake"] = wake_dict self.core = Core.from_dict(floris_dict) def set_operation( @@ -401,6 +428,14 @@ def set( awc_amplitudes: NDArrayFloat | list[float] | list[float, None] | None = None, awc_frequencies: NDArrayFloat | list[float] | list[float, None] | None = None, disable_turbines: NDArrayBool | list[bool] | None = None, + wake_velocity_model: str = None, + wake_deflection_model: str = None, + wake_turbulence_model: str = None, + wake_combination_model: str = None, + enable_secondary_steering: bool = False, + enable_yaw_added_recovery: bool = False, + enable_active_wake_mixing: bool = False, + enable_transverse_velocities: bool = False, ): """ Set the wind conditions and operation setpoints for the wind farm. @@ -431,11 +466,14 @@ def set( Defaults to None. power_setpoints (NDArrayFloat | list[float] | list[float, None] | None, optional): Turbine power setpoints. + awc_modes (NDArrayStr | list[str] | list[str, None] | None = None): + awc_amplitudes (NDArrayFloat | list[float] | list[float, None] | None = None): + awc_frequencies (NDArrayFloat | list[float] | list[float, None] | None = None): disable_turbines (NDArrayBool | list[bool] | None, optional): NDArray with dimensions n_findex x n_turbines. True values indicate the turbine is disabled at that findex and the power setpoint at that position is set to 0. Defaults to None. """ - # Initialize a new Floris object after saving the setpoints + # Initialize a new Core object after saving the setpoints _yaw_angles = self.core.farm.yaw_angles _power_setpoints = self.core.farm.power_setpoints _awc_modes = self.core.farm.awc_modes @@ -456,6 +494,14 @@ def set( solver_settings=solver_settings, heterogeneous_inflow_config=heterogeneous_inflow_config, wind_data=wind_data, + wake_velocity_model=wake_velocity_model, + wake_deflection_model=wake_deflection_model, + wake_turbulence_model=wake_turbulence_model, + wake_combination_model=wake_combination_model, + enable_secondary_steering=enable_secondary_steering, + enable_yaw_added_recovery=enable_yaw_added_recovery, + enable_active_wake_mixing=enable_active_wake_mixing, + enable_transverse_velocities=enable_transverse_velocities, ) # If the yaw angles or power setpoints are not the default, set them back to the @@ -549,7 +595,6 @@ def _get_turbine_powers(self) -> NDArrayFloat: ) return turbine_powers - def get_turbine_powers(self): """ Calculates the power at each turbine in the wind farm. @@ -1087,6 +1132,7 @@ def calculate_cross_plane( y_bounds=None, z_bounds=None, findex_for_viz=None, + keep_inertial_frame=False, ): """ Shortcut method to instantiate a :py:class:`~.tools.cut_plane.CutPlane` @@ -1104,6 +1150,8 @@ def calculate_cross_plane( z_bounds (tuple, optional): Limits of output array (in m). Defaults to None. finder_for_viz (int, optional): Index of the condition to visualize. + keep_inertial_frame (bool, optional): If True, coordinates are kept in the + inertial frame (not rotated relative to wind direction). Defaults to False. Returns: :py:class:`~.tools.cut_plane.CutPlane`: containing values of x, y, u, v, w @@ -1125,6 +1173,7 @@ def calculate_cross_plane( "planar_coordinate": downstream_dist, "flow_field_grid_points": [y_resolution, z_resolution], "flow_field_bounds": [y_bounds, z_bounds], + "keep_inertial_frame": keep_inertial_frame, } fmodel_viz.set_for_viz(findex_for_viz, solver_settings) @@ -1152,6 +1201,7 @@ def calculate_horizontal_plane( x_bounds=None, y_bounds=None, findex_for_viz=None, + keep_inertial_frame=False, ): """ Shortcut method to instantiate a :py:class:`~.tools.cut_plane.CutPlane` @@ -1169,6 +1219,8 @@ def calculate_horizontal_plane( y_bounds (tuple, optional): Limits of output array (in m). Defaults to None. findex_for_viz (int, optional): Index of the condition to visualize. + keep_inertial_frame (bool, optional): If True, coordinates are kept in the + inertial frame (not rotated relative to wind direction). Defaults to False. Returns: :py:class:`~.tools.cut_plane.CutPlane`: containing values @@ -1191,6 +1243,7 @@ def calculate_horizontal_plane( "planar_coordinate": height, "flow_field_grid_points": [x_resolution, y_resolution], "flow_field_bounds": [x_bounds, y_bounds], + "keep_inertial_frame": keep_inertial_frame, } fmodel_viz.set_for_viz(findex_for_viz, solver_settings) @@ -1223,6 +1276,7 @@ def calculate_y_plane( x_bounds=None, z_bounds=None, findex_for_viz=None, + keep_inertial_frame=False, ): """ Shortcut method to instantiate a :py:class:`~.tools.cut_plane.CutPlane` @@ -1241,6 +1295,8 @@ def calculate_y_plane( Defaults to None. findex_for_viz (int, optional): Index of the condition to visualize. Defaults to 0. + keep_inertial_frame (bool, optional): If True, coordinates are kept in the + inertial frame (not rotated relative to wind direction). Defaults to False. Returns: :py:class:`~.tools.cut_plane.CutPlane`: containing values @@ -1263,6 +1319,7 @@ def calculate_y_plane( "planar_coordinate": crossstream_dist, "flow_field_grid_points": [x_resolution, z_resolution], "flow_field_bounds": [x_bounds, z_bounds], + "keep_inertial_frame": keep_inertial_frame, } fmodel_viz.set_for_viz(findex_for_viz, solver_settings) @@ -1787,7 +1844,6 @@ def turbine_average_velocities(self) -> NDArrayFloat: def wind_data(self): return self._wind_data - ### v3 functions that are removed - raise an error if used def calculate_wake(self, **_): @@ -1877,3 +1933,112 @@ def merge_floris_models(fmodel_list, reference_wind_height=None): ) return fmodel_merged + +############################################################################# + + def _reset_windio_metadata(self, category: List[str] = None) -> None: + METADATA_FILEDS = ['wind_farm', 'wind_resource'] + if category is None: + self._windio_metadata = {k: {} for k in METADATA_FILEDS} + else: + self._windio_metadata[category] = {} + + @staticmethod + def from_windio( wind_energy_system: Path | Dict = None, + wind_resource: Path | Dict = None, + wind_farm: Path | Dict = None, + model_attrs: Path | Dict = None, + ) -> 'FlorisModel': + + from .read_windio import load_windio_input + + if (wind_energy_system is not None): + wind_energy_system = load_windio_input(wind_energy_system) + + def _wes_or_input(input: Path | Dict, input_name: str, wes_nested_key_paths: Tuple[str]) -> Dict: + if (input is not None): + print(f"{input_name} from wind_energy_system will be ignored.") + data_dict = load_windio_input(input) + data_dict['_context'] = f".{input_name}" + return data_dict + + if (wind_energy_system is not None): + data_dict = nested_get(wind_energy_system, wes_nested_key_paths) + data_dict['_context'] = f".wind_energy_system.{'.'.join(wes_nested_key_paths)}" + return data_dict + + raise ValueError(f"Either {input_name} or wind_energy_system must be provided.") + + wind_farm_windio = _wes_or_input(wind_farm, + "wind_farm", + ("wind_farm",)) + wind_data_windio = _wes_or_input(wind_resource, + "wind_resource", + ("site", "energy_resource", "wind_resource")) + model_attrs_windio = _wes_or_input(model_attrs, + "attributes", + ("attributes",)) + + # Start with defaults + default_param = load_windio_input(Path(__file__).parent / "default_inputs.yaml") + + # Wake model parameters are set once and for all + from .read_windio import read_wake_model + wake_model_floris = read_wake_model(model_attrs_windio) + update_nested_dict(default_param, wake_model_floris) + + # Instantiate FlorisModel + fmodel = FlorisModel(default_param) + fmodel._reset_windio_metadata() + + # Reconstruct description + fmodel_name = "FlorisModel from windio" + + if (wind_energy_system is not None): + fmodel_name = wind_energy_system['name'] + + fmodel_name += f" ({wind_farm_windio['name']})" + + fmodel.core.name = fmodel_name + fmodel.core.description = fmodel_name + + # Set the various components from windio data + fmodel.set_farm_from_windio(wind_farm_windio) + fmodel.set_wind_data_from_windio(wind_data_windio) + + return fmodel + + def set_wind_data_from_windio(self, wind_resource_windio: Dict) -> None: + from .read_windio import read_wind_resource + self._reset_windio_metadata(category="wind_resource") + + wind_data_floris = read_wind_resource(wind_resource_windio) + + self._reset_windio_metadata(category="wind_resource") + self._windio_metadata["wind_resource"] = wind_data_floris.pop('_metadata', {}) + + # Handle special cases + disable = wind_data_floris.pop('_disable', None) + dict_disable = {'disable_turbines': disable} if disable is not None else {} + if (disable is not None): + self.set_operation_model("mixed") + else: + self.set_operation_model("simple") + + self.set( + **wind_data_floris, + **dict_disable + ) + + def set_farm_from_windio(self, wind_farm_windio: Dict) -> None: + from .read_windio import read_wind_farm + self._reset_windio_metadata(category="wind_farm") + + wind_farm_floris = read_wind_farm(wind_farm_windio, logger=self.logger) + + self._reset_windio_metadata(category="wind_farm") + self._windio_metadata["wind_farm"] = wind_farm_floris.pop('_metadata', {}) + + # Set reference height to mean hub height + hub_heights = [t["hub_height"] for t in wind_farm_floris["turbine_type"]] + self.set(**wind_farm_floris, reference_wind_height=np.mean(hub_heights)) diff --git a/floris/read_windio/__init__.py b/floris/read_windio/__init__.py new file mode 100644 index 000000000..b3453dfc9 --- /dev/null +++ b/floris/read_windio/__init__.py @@ -0,0 +1,5 @@ +from .utils import load_windio_input, TrackedDict + +from .read_wind_resource import read_wind_resource +from .read_wind_farm import read_wind_farm +from .read_wake_model import read_wake_model \ No newline at end of file diff --git a/floris/read_windio/read_field.py b/floris/read_windio/read_field.py new file mode 100644 index 000000000..cb4209580 --- /dev/null +++ b/floris/read_windio/read_field.py @@ -0,0 +1,115 @@ +""" +Field data reading functions for windIO integration. + +This module handles extraction of multidimensional field data and coordinates +from windIO data structures as defined in the windIO common schema. + +The module supports reading: +- Nondimensional data (scalar values) +- Dimensional data (arrays with explicit dimension labels) +- Nondimensional coordinates (single point coordinates) +- Dimensional coordinates (arrays of coordinates) +- Multi-dimensional coordinates (either nondimensional or dimensional) + +References: + windIO common schema: windIO/plant/common.yaml +""" + +from typing import Dict, Any, Tuple, Optional, Union, List +from numbers import Number +import numpy as np +from collections import UserDict + +# ============================================================================ # +# Data and Coordinate Classes +# ============================================================================ # + +class Data(np.ndarray): + """ + Extended numpy array for multi-dimensional field data with dimension labels. + + Attributes: + dims: Tuple of dimension names (e.g., ('time', 'height', 'x')) + """ + + def __new__(cls, raw_data: Dict): + if not isinstance(raw_data, (Dict, UserDict)): + raise TypeError("Data must be provided as a dictionary with 'data' and 'dims' keys") + + data = raw_data.get("data", None) + dims = raw_data.get("dims", None) + + obj = np.asarray(data).view(cls) + + if (dims is None): + obj.dims = () if obj.ndim == 0 else tuple(f'dim_{i}' for i in range(obj.ndim)) + else: + obj.dims = tuple(dims) + + if len(obj.dims) != obj.ndim: + raise ValueError( + f"Number of dimension labels ({len(obj.dims)}) must match " + f"array dimensions ({obj.ndim})" + ) + + return obj + + def __array_finalize__(self, obj): + """Finalize array creation, preserving dims attribute.""" + if obj is None: + return + self.dims = getattr(obj, 'dims', ()) + + +class Coordinate(np.ndarray): + """ + Extended numpy array for coordinate data (1D or scalar). + + Attributes: + dims: Tuple containing the dimension name (e.g., ('coord',) or () for scalar) + """ + + def __new__(cls, raw_data: Number | List | np.ndarray): + obj = np.asarray(raw_data).view(cls) + + obj.dims = () + return obj + + def __array_finalize__(self, obj): + """Finalize array creation, preserving dims attribute.""" + if obj is None: + return + self.dims = getattr(obj, 'dims', ()) + + +# ============================================================================ # +# Data +# ============================================================================ # + +def read_multi_dimensional_data(data_input: Any) -> Data: + """Read either nondimensional or dimensional data.""" + # Try nondimensional first + data = Data(data_input) + if (data is not None): + return data + + raise ValueError( + "Field data must be either nondimensional (single value) " + "or dimensional (data with 'data' and 'dims' keys)" + ) + +# ============================================================================ # +# Coordinates +# ============================================================================ # + +def read_multi_dimensional_coordinate(coord_input: Any) -> Coordinate: + """Read either nondimensional or dimensional coordinate.""" + # Try nondimensional first + coord = Coordinate(coord_input) + if (coord is not None): + return coord + + raise ValueError( + "Coordinate data must be either nondimensional (single value) " + "or dimensional (1D array)" + ) diff --git a/floris/read_windio/read_wake_model.py b/floris/read_windio/read_wake_model.py new file mode 100644 index 000000000..6ce2212a6 --- /dev/null +++ b/floris/read_windio/read_wake_model.py @@ -0,0 +1,408 @@ +""" +Wake model reading functions for windIO integration. + +This module handles extraction of wake model configurations from windIO data. +""" + +from pathlib import Path +from typing import Dict, Any, Optional +from .utils import TrackedDict +from floris.utilities import load_yaml + +# Load default values from default_inputs.yaml +_DEFAULT_CONFIG = None + +def _get_default_config() -> Dict[str, Any]: + """ + Load default FLORIS configuration from default_inputs.yaml. + + Returns: + Dictionary containing default wake model configurations + """ + global _DEFAULT_CONFIG + if _DEFAULT_CONFIG is None: + default_path = Path(__file__).parent.parent / "default_inputs.yaml" + _DEFAULT_CONFIG = load_yaml(default_path) + return _DEFAULT_CONFIG + +# Mapping from windIO model names to FLORIS model names and parameters +# WindIO model not implemented in FLORIS are mapped to None +# parameter= {windio_name : floris_name} +# "none" refers to no model selected in FLORIS + +WAKE_MODEL_MAPPING = { + "wind_deficit_model": { + "None": { + "floris_name": "none", + "parameters": {} + }, + "Jensen": { + "floris_name": "jensen", + "parameters": { + # wake_expansion_coefficient maps to 'we' in FLORIS Jensen model + "wake_expansion_coefficient": { + "k_a": None, # Not used in Jensen + "k_b": "we", + "free_stream_ti": None, # Not used in Jensen + }, + "use_effective_ws": None, # Not used in FLORIS Jensen + } + }, + "Bastankhah2014": { + "floris_name": "gauss", + "parameters": { + # wake_expansion_coefficient parameters + "wake_expansion_coefficient": { + "k_a": "ka", + "k_b": "kb", + "free_stream_ti": None, # Not implemented in FLORIS + }, + "ceps": None, # Not used in Bastankhah2014 + "use_effective_ws": None, # Not implemented in FLORIS + } + }, + "Bastankhah2016": { + "floris_name": None, # Not directly implemented in FLORIS + "parameters": { + "wake_expansion_coefficient": { + "k_a": "ka", + "k_b": "kb", + "free_stream_ti": None, + }, + "ceps": "ceps", # c_epsilon factor for Bastankhah2016 + "use_effective_ws": None, + } + }, + "TurbOPark": { + "floris_name": "turboparkgauss", + "parameters": { + "wake_expansion_coefficient": { + "k_a": None, + "k_b": "A", + "free_stream_ti": None, + }, + "use_effective_ws": None, + } + }, + "SuperGaussian": { + "floris_name": None, # Not implemented in FLORIS + "parameters": {} + } + }, + + "deflection_model": { + "None": { + "floris_name": "none", + "parameters": {} + }, + "Jimenez": { + "floris_name": "jimenez", + "parameters": { + "beta": "kd", # WindIO uses beta, FLORIS uses kd for Jimenez + "ad": "ad", + "bd": "bd", + } + }, + "Bastankhah2016": { + "floris_name": "gauss", # Bastankhah2016 deflection maps to gauss in FLORIS + "parameters": { + "beta": "beta", + "alpha": "alpha", + "ad": "ad", + "bd": "bd", + "dm": "dm", + "ka": "ka", + "kb": "kb", + } + } + }, + + "turbulence_model": { + "None": { + "floris_name": "none", + "parameters": {} + }, + "STF2005": { + "floris_name": None, # Not implemented in FLORIS + "parameters": { + "coefficients": None, + } + }, + "STF2017": { + "floris_name": None, # Not implemented in FLORIS + "parameters": { + "coefficients": None, + } + }, + "IEC-TI-2019": { + "floris_name": None, # Not implemented in FLORIS + "parameters": { + "coefficients": None, + } + }, + "CrespoHernandez": { + "floris_name": "crespo_hernandez", + "parameters": { + "coefficients": None, # WindIO uses generic coefficients array + } + }, + "GCL": { + "floris_name": None, # Not implemented in FLORIS + "parameters": { + "coefficients": None, + } + }, + }, + + "superposition_model": { + "None": { + "floris_name": "none", + "parameters": {} + }, + "Linear": { + "floris_name": "fls", + "parameters": {} + }, + "Squared": { + "floris_name": "sosfs", # Sum of squares freestream superposition + "parameters": {} + }, + "Max": { + "floris_name": "max", + "parameters": {} + }, + "Product": { + "floris_name": None, # Not implemented in FLORIS + "parameters": {} + } + } +} + +def _extract_model_parameters( + model_dict: TrackedDict, + param_mapping: Dict[str, Any], + floris_name: str, + defaults: Optional[Dict[str, Any]] = None +) -> Dict[str, Any]: + """ + Generic function to extract model parameters from windIO dict. + + Args: + model_dict: TrackedDict containing model parameters + param_mapping: Dictionary mapping windIO parameter names to FLORIS names + Can contain nested dicts for nested parameters + floris_name: Name of the FLORIS model for default lookup + defaults: Optional dictionary of default parameters + + Returns: + Dictionary of extracted parameters with FLORIS names + """ + parameters = {} + + # Extract mapped parameters + for windio_param, floris_param in param_mapping.items(): + # Handle nested parameter mappings (e.g., wake_expansion_coefficient) + if isinstance(floris_param, dict): + + # This is a nested structure in windIO + if windio_param in model_dict: + nested_dict = model_dict[windio_param] + for nested_windio_param, nested_floris_param in floris_param.items(): + + if nested_floris_param is None: + # Parameter exists in windIO but not in FLORIS + if nested_windio_param in nested_dict: + _ = nested_dict[nested_windio_param] # Mark as visited + print(f"Warning: windIO parameter '{nested_windio_param}' has no FLORIS equivalent and will be ignored.") + continue + + if nested_windio_param in nested_dict: + param_value = nested_dict[nested_windio_param] + parameters[nested_floris_param] = param_value + + elif defaults and nested_floris_param in defaults: + parameters[nested_floris_param] = defaults[nested_floris_param] + else: + # Simple parameter mapping + if floris_param is None: + # Parameter exists in windIO but not in FLORIS + if windio_param in model_dict: + _ = model_dict[windio_param] # Mark as visited + print(f"Warning: windIO parameter '{windio_param}' has no FLORIS equivalent and will be ignored.") + continue + + if windio_param in model_dict: + param_value = model_dict[windio_param] + parameters[floris_param] = param_value + + elif defaults and floris_param in defaults: + # Use default if available and not provided in windIO + parameters[floris_param] = defaults[floris_param] + + return parameters + +def _extract_generic_wake_model( + analysis: TrackedDict, + windio_section_name: str, + floris_param_key: str, + required: bool = False +) -> Dict[str, Any]: + """ + Generic function to extract wake model configuration from windIO analysis section. + + Args: + analysis: TrackedDict containing analysis section + windio_section_name: Name of the windIO section (e.g., "wind_deficit_model") + floris_param_key: FLORIS parameter key (e.g., "wake_velocity_parameters") + required: Whether this model is required (raises error if missing) + + Returns: + Dictionary with model configuration for FLORIS + """ + if windio_section_name not in analysis: + if required: + raise ValueError(f"{windio_section_name} section missing in analysis") + return {} + + model_dict = analysis[windio_section_name] + model_name = model_dict["name"] + + # Look up FLORIS model name + if model_name not in WAKE_MODEL_MAPPING[windio_section_name]: + raise ValueError( + f"{windio_section_name.replace('_', ' ').title()} '{model_name}' not found in mapping." + ) + + model_info = WAKE_MODEL_MAPPING[windio_section_name][model_name] + floris_name = model_info["floris_name"] + + if floris_name is None: + raise ValueError( + f"{windio_section_name.replace('_', ' ').title()} '{model_name}' is not implemented in FLORIS." + ) + + # Get default parameters for this model + defaults = _get_default_config().get("wake", {}).get(floris_param_key, {}).get(floris_name, {}) + + # Extract parameters using the mapping + parameters = _extract_model_parameters( + model_dict, + model_info["parameters"], + floris_name, + defaults + ) + + return {floris_param_key: {floris_name: parameters}} + + +def read_wake_model(windio_dict: Dict[str, Any]) -> Dict[str, Any]: + """ + Extract wake model configuration from windIO data. + + Args: + windio_dict: Validated windIO dictionary containing attrs.analysis section + + Returns: + Dictionary with wake model configuration compatible with FLORIS format + + Note: + Maps windIO wake model specifications to FLORIS wake model parameters. + WindIO models are in attributes.analysis section. + Returns empty dict if no analysis section found. + """ + + wake_floris = {} + + with TrackedDict(windio_dict) as attrs: + attrs.untrack('model_outputs_specification') + + flow_model = attrs.get('flow_model', None) + if flow_model: + soft = flow_model.get('name', 'floris').lower() + if soft != 'floris': + print( + f"WindIO flow_model.name is '{soft}', but expected 'floris'. " + "Proceeding to read attrs regardless." + ) + + if "analysis" not in attrs: + raise ValueError("attrs.analysis section missing in windIO data") + + analysis = attrs["analysis"] + model_strings = {} + + # Extract velocity model (required) + velocity = _extract_generic_wake_model( + analysis, "wind_deficit_model", + "wake_velocity_parameters", required=True + ) + wake_floris.update(velocity) + model_strings["velocity_model"] = list(velocity["wake_velocity_parameters"].keys())[0] + + # Extract deflection model (optional) + deflection = _extract_generic_wake_model( + analysis, "deflection_model", + "wake_deflection_parameters" + ) + if deflection: + wake_floris.update(deflection) + model_strings["deflection_model"] = list(deflection["wake_deflection_parameters"].keys())[0] + + # Extract turbulence model (optional) + turbulence = _extract_generic_wake_model( + analysis, "turbulence_model", + "wake_turbulence_parameters" + ) + if turbulence: + wake_floris.update(turbulence) + model_strings["turbulence_model"] = list(turbulence["wake_turbulence_parameters"].keys())[0] + + # Extract superposition/combination model (optional) + superposition = _extract_superposition_model(analysis) + if superposition: + model_strings.update(superposition["model_strings"]) + + wake_floris["model_strings"] = model_strings + + wake_floris['enable_secondary_steering'] = False + print('WARNING: Setting enable_secondary_steering to False by default.') + + wake_floris['enable_yaw_added_recovery'] = False + print('WARNING: Setting enable_yaw_added_recovery to False by default.') + + wake_floris['enable_transverse_velocities'] = False + print('WARNING: Setting enable_transverse_velocities to False by default.') + + wake_floris['enable_active_wake_mixing'] = False + print('WARNING: Setting enable_active_wake_mixing to False by default.') + + return {"wake": wake_floris} + +def _extract_superposition_model(analysis: TrackedDict) -> Dict[str, Any]: + """ + Extract superposition/combination model from windIO analysis section. + + Note: WindIO has separate ws_superposition and ti_superposition, + but FLORIS only uses combination_model (for velocity deficit). + """ + if "superposition_model" not in analysis: + return {} + + superposition_model = analysis["superposition_model"] + + # Get ws_superposition for FLORIS combination_model + ws_model_name = superposition_model.get("ws_superposition") if "ws_superposition" in superposition_model else None + + if not ws_model_name: + return {} + + # Look up FLORIS model name + if ws_model_name not in WAKE_MODEL_MAPPING["superposition_model"]: + raise ValueError(f"Superposition model '{ws_model_name}' not found in mapping.") + + floris_name = WAKE_MODEL_MAPPING["superposition_model"][ws_model_name]["floris_name"] + + if floris_name is None: + raise ValueError(f"Superposition model '{ws_model_name}' is not implemented in FLORIS.") + + return {"model_strings": {"combination_model": floris_name}} \ No newline at end of file diff --git a/floris/read_windio/read_wind_farm.py b/floris/read_windio/read_wind_farm.py new file mode 100644 index 000000000..a1104e486 --- /dev/null +++ b/floris/read_windio/read_wind_farm.py @@ -0,0 +1,370 @@ +""" +Farm and turbine reading functions for windIO integration. + +This module handles extraction of wind farm layouts and turbine specifications +from windIO data formats. +""" + +import numpy as np +from typing import Dict, Any, List + +from floris.turbine_library import build_cosine_loss_turbine_dict +from .utils import TrackedDict + + +def create_generic_power_curve( + rated_power: float, + rated_wind_speed: float, + cutin_wind_speed: float, + cutout_wind_speed: float, + n_points: int = 50 +) -> tuple[np.ndarray, np.ndarray]: + """ + Create a generic power curve from basic turbine parameters. + + Uses a quadratic (omega-squared law) interpolation between cut-in and rated wind speed, + plateau at rated power between rated and cut-out wind speed, and zero power elsewhere. + + Args: + rated_power: Rated power in Watts + rated_wind_speed: Rated wind speed in m/s + cutin_wind_speed: Cut-in wind speed in m/s + cutout_wind_speed: Cut-out wind speed in m/s + n_points: Number of points in the power curve (default: 50) + + Returns: + Tuple of (power_wind_speeds, power_data) as numpy arrays + """ + # Generate wind speed array + power_wind_speeds = np.linspace(0, cutout_wind_speed + 5, n_points) + power_data = np.zeros(n_points) + + # Omega-squared law region (cutin to rated) + for i, ws in enumerate(power_wind_speeds): + if ws < cutin_wind_speed: + power_data[i] = 0.0 + elif ws < rated_wind_speed: + # Quadratic interpolation from cutin to rated + power_data[i] = rated_power * ((ws - cutin_wind_speed) / (rated_wind_speed - cutin_wind_speed))**2 + elif ws <= cutout_wind_speed: + # Plateau at rated power + power_data[i] = rated_power + else: + # Beyond cutout + power_data[i] = 0.0 + + return power_wind_speeds, power_data + + +def read_single_turbine_type(turbine_windio: Dict[str, Any]) -> Dict[str, Any]: + """ + Extract a single turbine type specification from windIO data. + + Args: + turbine_windio: windIO turbine dictionary + + Returns: + FLORIS turbine specification dictionary + """ + turbine_floris = {} + + turbine_floris["TSR"] = turbine_windio.get("TSR", 7.0) + turbine_floris["name"] = turbine_windio["name"] + turbine_floris["hub_height"] = turbine_windio["hub_height"] + turbine_floris["rotor_diameter"] = turbine_windio["rotor_diameter"] + + # -- Performance ------------------------------------------------- # + performance_windio = turbine_windio["performance"] + + power_data = None + power_wind_speeds = None + + # -- Process power curve data ---------------------------------# + if ("power_curve" in performance_windio.data): + power_curve_windio = performance_windio["power_curve"] + power_data = power_curve_windio["power_values"] + power_wind_speeds = power_curve_windio["power_wind_speeds"] + + # -- Process Cp curve data ----------------------------------- # + if ("Cp_curve" in performance_windio.data): + if power_data is not None: + raise ValueError( + "Both 'power_curve' and 'Cp_curve' found in " \ + "performance data; only one should be provided" + ) + + cp_curve_windio = performance_windio["Cp_curve"] + cp_values = cp_curve_windio["Cp_values"] + cp_wind_speeds = cp_curve_windio["Cp_wind_speeds"] + + cp_curve = np.array(cp_values) + wind_speed = np.array(cp_wind_speeds) + mult_factor = 0.5 * 1.225 * (np.pi * (turbine_floris["rotor_diameter"]/2)**2 * wind_speed**3) + + power_data = cp_curve * mult_factor + power_wind_speeds = cp_wind_speeds + + # synthetic power curve + req = ['rated_power', 'rated_wind_speed', 'cutin_wind_speed', 'cutout_wind_speed'] + + if all (k in performance_windio.data for k in req): + if power_data is not None: + raise ValueError( + "Both 'power_curve'/'Cp_curve' and synthetic power curve parameters found; " \ + "only one should be provided" + ) + + rated_power = performance_windio['rated_power'] + rated_wind_speed = performance_windio['rated_wind_speed'] + cutin_wind_speed = performance_windio['cutin_wind_speed'] + cutout_wind_speed = performance_windio['cutout_wind_speed'] + + # Create generic power curve + power_wind_speeds, power_data = create_generic_power_curve( + rated_power, rated_wind_speed, cutin_wind_speed, cutout_wind_speed + ) + + # -- Thrust coefficient data --------------------------------- # + thrust_data = None + thrust_wind_speeds = None + + if "Ct_curve" in performance_windio.data: + ct_curve_windio = performance_windio["Ct_curve"] + thrust_data = ct_curve_windio["Ct_values"] + thrust_wind_speeds = ct_curve_windio["Ct_wind_speeds"] + + # -- Validate presence of required curves -------------------- # + if power_data is None: + raise KeyError("Missing required 'power_curve' or 'Cp_curve' in performance data") + + if thrust_data is None: + raise KeyError("Missing required 'Ct_curve' in performance data") + + # Retrieve generator efficiency if provided + turbine_floris["generator_efficiency"] = performance_windio.get("efficiency", 1.0) + + # -- Enforce common wind speeds for power and thrust curves ---------- # + power_data = np.array(power_data) + thrust_data = np.array(thrust_data) + power_wind_speeds = np.array(power_wind_speeds) + thrust_wind_speeds = np.array(thrust_wind_speeds) + + if not np.isclose(power_wind_speeds, thrust_wind_speeds).all(): + wind_speeds = np.union1d(power_wind_speeds, thrust_wind_speeds) + + power_data = np.interp(wind_speeds, + power_wind_speeds, + power_data, + left=0.0, right=0.0) + thrust_data = np.interp(wind_speeds, + thrust_wind_speeds, + thrust_data, + left=0.0, right=0.0) + + else: + power_data = power_data + thrust_data = thrust_data + + # -- Power data stored as W, convert to kW ------------------------------- # + power_data = power_data / 1e3 # W to kW + + # -- Store final curves in FLORIS format --------------------------------- # + + # Prepare data dict for build_cosine_loss_turbine_dict + turbine_data_dict = { + "power": power_data.tolist(), + "thrust_coefficient": thrust_data.tolist(), + "wind_speed": power_wind_speeds.tolist() + } + + return build_cosine_loss_turbine_dict( + turbine_data_dict, + turbine_floris["name"], + generator_efficiency=turbine_floris.get("generator_efficiency", 1.0), + hub_height=turbine_floris["hub_height"], + rotor_diameter=turbine_floris["rotor_diameter"], + TSR=turbine_floris["TSR"] + ) + + +def process_turbine_types(farm_floris: Dict[str, Any]) -> None: + """ + Process turbine types from temporary farm dictionary format. + + Args: + farm_floris: Farm dictionary with turbine_types and _turbine_types_map keys + """ + + if "turbine_types" not in farm_floris: + raise KeyError("Missing 'turbine_types' in farm_floris for turbine types") + + if "_turbine_types_map" not in farm_floris: + raise KeyError("Missing '_turbine_types_map' in farm_floris for turbine type mapping") + + +def read_wind_farm(windio_dict: Dict[str, Any], logger) -> Dict[str, Any]: + """ + Extract wind farm layout and turbine information from windIO data. + + Args: + windio_dict: Validated windIO dictionary + logger: Logger instance for warnings + + Returns: + Dictionary with farm layout information + - layout_x: List of x coordinates + - layout_y: List of y coordinates + - turbine_type: List of turbine specifications + + + Note: + _xxxxx keys are used for temporary storage and cannot be directly fed into FLORIS + """ + farm_floris = {} + farm_floris['_metadata'] = {} + + # -- Wind Farm ------------------------------------------------------- # + with TrackedDict(windio_dict) as wind_farm_windio: + # Unmapped variables : electrical_substations, electrical_collection_array + # foundations, O_&_M + name = wind_farm_windio["name"] + + # -- Layouts ----------------------------------------------------- # + layouts_windio = wind_farm_windio["layouts"] + + if (isinstance(layouts_windio, list)): + if (len(layouts_windio) > 1): + logger.warning("Multiple layouts found, using the first layout only") + layout_windio = TrackedDict.from_list(layouts_windio, context=wind_farm_windio.context + ".layouts")[0] + else: + layout_windio = wind_farm_windio["layouts"] + + # -- Coordinates --------------------------------------------- # + coordinates_windio = layout_windio["coordinates"] + # Unmapped variables : crs + + farm_floris['layout_x'] = coordinates_windio["x"] + farm_floris['layout_y'] = coordinates_windio["y"] + + layout_z = coordinates_windio.get("z", None) + + dft_turbine_type = np.zeros(len(farm_floris["layout_x"]), dtype=int).tolist() + turbine_types_map = layout_windio.get("turbine_types", dft_turbine_type) + + # Close the layout_windio TrackedDict + layout_windio.close() + + turbine_type_is_unique = (len(set(turbine_types_map)) > 1) + + # Single-turbine type case + if ('turbines' in wind_farm_windio.data): + if ("turbine_types" in wind_farm_windio.data): + raise ValueError("Both turbines and turbine_types defined in wind_farm, inconsistent data") + + if (turbine_type_is_unique): + raise ValueError("turbines defined in wind_farm but multiple turbine types found, inconsistent data") + + turbine_types = wind_farm_windio["turbines"] + turbine_types['_context'] = wind_farm_windio.context + ".turbines" + turbine_types = {0: turbine_types} + + # Multi-turbine type case + elif ('turbine_types' in wind_farm_windio.data): + turbine_types = wind_farm_windio["turbine_types"] + if isinstance(turbine_types, list): + turbine_types = {i_t: t for i_t, t in enumerate(turbine_types)} + for i_t, t in turbine_types.items(): + t['_context'] = wind_farm_windio.context + f".turbine_types[{i_t}]" + + # Error if neither turbines nor turbine_types defined + else: + raise KeyError("Missing required 'turbines' or 'turbine_types' in wind_farm") + pass + + # -- Process individual turbine types ------------------------------------ # + for idx, turbine_type in turbine_types.items(): + turbine_types[idx] = read_single_turbine_type(turbine_type) + + # -- Map turbine types to FLORIS format ---------------------------------- # + farm_floris["turbine_type"] = [None] * len(farm_floris["layout_x"]) + for idx in range(len(farm_floris["layout_x"])): + turbine_type_idx = turbine_types_map[idx] # Index of the turbine type + farm_floris["turbine_type"][idx] = dict(turbine_types[turbine_type_idx]) + + # -- Set turbine heights if provided ------------------------------------- # + if (layout_z is not None): + print('WARNING: Direct altitude is not supported in FLORIS, adjusting hub heights accordingly.') + for idx in range(len(farm_floris["layout_x"])): + farm_floris["turbine_type"][idx]["hub_height"] = layout_z[idx] + farm_floris["turbine_type"][idx]["hub_height"] + + return farm_floris + +def read_all_turbines_data(windio_dict: Dict[str, Any]) -> List[dict]: + """ + Extract turbine definition information from windIO data. Returns an ordered + list of turbine definitions. + + Args: + windio_dict: Validated windIO dictionary + + Returns: + List of turbine specifications + + Raises: + ValueError: If critical turbine fields are missing + """ + turbine_specs_list = [] + + # -- Wind Farm ------------------------------------------------------- # + wind_farm_windio = TrackedDict(windio_dict["wind_farm"], 'wind_farm') + # Unmapped variables: electrical_substations, electrical_collection_array, + # foundations, O_&_M, layouts (handled separately) + + # Determine turbines source + turbines_model_list = None + + # CASE 1: turbine_types defined in wind_farm + if "turbine_types" in wind_farm_windio.data: + turbines_model_list = wind_farm_windio["turbine_types"] + if not isinstance(turbines_model_list, list) or len(turbines_model_list) == 0: + raise ValueError("turbine_types must be a non-empty list") + + # CASE 2: turbines defined in wind_farm (single type for whole farm) + elif "turbines" in wind_farm_windio.data: + turbine_data = wind_farm_windio["turbines"] + if turbine_data is not None: + turbines_model_list = [turbine_data] + + if turbines_model_list is None: + raise KeyError("Missing required 'turbines' or 'turbine_types' section in wind_farm") + + # Extract each turbine + for idx, turbine_data in enumerate(turbines_model_list): + turbine_specs = read_single_turbine_type(turbine_data) + turbine_specs_list.append(turbine_specs) + + # Close the TrackedDict + wind_farm_windio.close() + + return turbine_specs_list + + +def map_indices_to_turbines(turbine_specs_list: List[dict], turbine_types: List[int]) -> List[dict]: + """ + Map turbine type indices to turbine specifications. + + Args: + turbine_specs_list: List of turbine specifications + turbine_types: List of turbine type indices + + Returns: + List of turbine specifications mapped to the indices + """ + + turbine_type = [] + for idx in turbine_types: + if not isinstance(idx, int) or idx < 0 or idx >= len(turbine_specs_list): + raise ValueError(f"Invalid turbine type index {idx} in layout") + turbine_type.append(turbine_specs_list[idx]) + + return turbine_type diff --git a/floris/read_windio/read_wind_resource.py b/floris/read_windio/read_wind_resource.py new file mode 100644 index 000000000..3f64957ce --- /dev/null +++ b/floris/read_windio/read_wind_resource.py @@ -0,0 +1,698 @@ +from numbers import Number +from .utils import TrackedDict +from typing import Any, Dict, List, Tuple +import numpy as np +from .read_field import ( + Coordinate, + Data, + read_multi_dimensional_coordinate, + read_multi_dimensional_data +) +from floris.wind_data import TimeSeries, WindTIRose, WindRose +from floris.heterogeneous_map import HeterogeneousMap + +SUPPORTED_FIELDS = [ + "wind_direction", + "wind_speed", + "operating", + "probability", + "weibull_a", + "weibull_k", + "sector_probability", + "turbulence_intensity", + "shear", + "x", + "y", + "height", + "time", + "wind_turbine", +] + +def is_data_field(fld: Any) -> bool: + """Check if field is a Data object (multi-dimensional) vs Coordinate (scalar/1D).""" + return isinstance(fld, Data) + +def identify_case(wr: dict) -> str: + """Identify the wind resource case from the data structure.""" + def has_field(name): + return name in wr['coord'] or name in wr['data'] + + if has_field("time"): + return "time_series" + + if has_field("weibull_a") and has_field("weibull_k"): + return "weibull" + + if has_field("probability"): + return "probability_distribution" + + raise ValueError("Unable to identify wind resource case from provided data.") + +def validate_data(wr: dict, + key: str, + allowed_dims: tuple[tuple[str, tuple[int, ...]], ...]): + """ + Validate data field dimensions. + + Args: + wr (dict): Wind resource dictionary. + key (str): Field name to validate. + allowed_dims (tuple): Tuple of (dim_name, allowed_sizes) tuples to ensure iteration order. + """ + fld = wr[key] + fld_data = np.asarray(fld) + fld_dims = fld.dims + + # Convert allowed_dims tuple to dict for lookups + allowed_dims_dict = dict(allowed_dims) + + for dim in fld_dims: + if (dim not in allowed_dims_dict): + raise ValueError( + f"Field '{key}' has invalid dimension '{dim}'. " + f"Allowed dimensions are: {list(allowed_dims_dict.keys())}" + ) + + allowed_size = allowed_dims_dict[dim] + dim_index = fld_dims.index(dim) + fld_size = fld_data.shape[dim_index] + + if fld_size not in allowed_size: + raise ValueError( + f"Field '{key}' has size {fld_size} for dimension '{dim}', " + f"which is not in allowed sizes {allowed_size}." + ) + + # Reorder dimensions according to allowed_dims + reordered_dims = tuple(dim_name for dim_name, _ in allowed_dims if dim_name in fld_dims) + if fld_dims != reordered_dims: + # Create mapping from current dims to reordered dims + permute_order = [fld_dims.index(dim) for dim in reordered_dims if dim in fld_dims] + fld_data = np.transpose(fld_data, axes=permute_order) + fld_dims = reordered_dims + + return Data({"data": fld_data, "dims": fld_dims}) + +def process_heterogeneous_inflow( + wr: Dict[str, Coordinate | Data], + wind_speed: Data, + wind_directions: np.ndarray, +) -> tuple[np.ndarray, dict | None]: + """ + Process heterogeneous inflow data from wind resource dictionary. + + Handles both meshgrid (independent x, y coordinates) and point cloud + (paired x, y coordinates) configurations. + + Args: + wr (dict): Wind resource dictionary containing coordinates. + wind_speed (np.ndarray): Wind speed data object with .dims attribute. + wind_directions (np.ndarray): Wind directions array. + + Returns: + tuple: (wind_speeds, heterogeneous_inflow_config) + - wind_speeds: Mean wind speeds for TimeSeries base (1D array) + - heterogeneous_inflow_config: Dict with x, y, z, speed_multipliers + """ + from ..heterogeneous_map import HeterogeneousMap + + wr_flat = wr["_flat"] + + x_data = wr_flat["x"] + y_data = wr_flat["y"] + z_data = wr_flat["height"] + + # Determine processing flags + has_height_dim = ("height" in wind_speed.dims) + has_x_dim = ("x" in wind_speed.dims) + has_y_dim = ("y" in wind_speed.dims) + + if not (has_x_dim or has_y_dim): + raise ValueError( + "Heterogeneous inflow processing requires 'x' and/or 'y' coordinates." + ) + + # Determine if we have meshgrid (independent x,y) or point cloud (paired points) + # Point cloud: x and y have matching dimensions (eg. y_dims has 'x' dim) + # Meshgrid: x and y are independent (eg. x_dims is 'x', y_dims is 'y') + if (x_data.dims == ("y",)) or (y_data.dims == ("x",)): + is_meshgrid = False + else: + is_meshgrid = True + + # Initialize output variables + n_time = len(wind_directions) + heterogeneous_inflow_config = None + + if is_meshgrid: + # Meshgrid case: x and y are independent vectors + # Create coordinate arrays and flatten wind_speed_data appropriately + + if has_height_dim: + # 3D case: wind_speed has shape (time, x, y, height) + # Create meshgrid of x, y, z coordinates + X, Y, Z = np.meshgrid(x_data, y_data, z_data, indexing='ij') + x_points = X.flatten() + y_points = Y.flatten() + z_points = Z.flatten() + + n_points = len(x_points) + wind_speed_flat = np.asarray(wind_speed).reshape(n_time, n_points) + + wind_speeds = np.mean(wind_speed_flat, axis=1) + + speed_multipliers = wind_speed_flat / wind_speeds[:, np.newaxis] + + # Build heterogeneous_inflow_config + heterogeneous_inflow_config = HeterogeneousMap( + x=x_points, + y=y_points, + z=z_points, + speed_multipliers=speed_multipliers, + ) + + else: + # 2D case: wind_speed has shape (time, x, y) + # Create meshgrid of x, y coordinates + X, Y = np.meshgrid(x_data, y_data, indexing='ij') + x_points = X.flatten() + y_points = Y.flatten() + + n_points = len(x_points) + wind_speed_flat = np.asarray(wind_speed).reshape(n_time, n_points) + + wind_speed_mean = np.mean(wind_speed_flat) + + speed_multipliers = wind_speed_flat / wind_speed_mean + + # Build heterogeneous_inflow_config + heterogeneous_inflow_config = HeterogeneousMap( + x=x_points, + y=y_points, + speed_multipliers=speed_multipliers, + ) + + else: + # 2D point cloud: wind_speed has shape (time, x) where x dimension + # includes paired (x, y) points + # x_data, y_data are already 1D arrays of the same length + + n_points = len(x_data) + + wind_speeds_mean = np.mean(np.asarray(wind_speed)) + + speed_multipliers = np.asarray(wind_speed) / wind_speeds_mean + + # Build heterogeneous_inflow_config + heterogeneous_inflow_config = HeterogeneousMap( + x=x_data, + y=y_data, + speed_multipliers=speed_multipliers, + ) + + # If height dimension exists, include z data + if has_height_dim: + heterogeneous_inflow_config["z"] = z_data + + wind_speeds_ref = np.full(n_time, wind_speeds_mean) + + return wind_speeds_ref, heterogeneous_inflow_config + + +def extract_time_series(wr: Dict[str, Any]) -> Dict[str, Any]: + """ + Assemble a FLORIS TimeSeries wind resource object from the wind resource dictionary. + + Args: + wr (dict): Dictionary containing wind resource data with coordinates and fields. + Expected structure: {'coord': {...}, 'data': {...}} + + Returns: + TimeSeries: A TimeSeries object configured with the wind resource data. + """ + # Flatten the structure for easier access + wr_flat = wr["_flat"] + + # Timeseries must have 'time' coordinate + if "time" not in wr['coord']: + raise ValueError("Time series wind resource must have 'time' coordinate.") + n_time = wr_flat["time"].shape[0] + + # Warn about unsupported fields + SUPPORTED_FIELDS = [ + "wind_direction", + "wind_speed", + "turbulence_intensity", + "shear", + "operating", + "x", + "y", + "height", + "wind_turbine", + "time", + ] + + for key in wr_flat: + if key not in SUPPORTED_FIELDS: + print( + f"WARNING: Field '{key}' is not supported in time series wind " + f"resource. Supported fields are: {SUPPORTED_FIELDS}" + ) + + # Validate data fields with appropriate allowed dimensions + for key in wr['data']: + # All keys are only allowed to have time dimension except: + # - wind_speed -> can have heterogeneous inflow (x, y, height) + # - operating -> 1 state per turbine + + # Build allowed_dims as tuple of (dim_name, shape) tuples for guaranteed order + allowed_dims = (("time", wr_flat["time"].shape),) + + if key == "wind_speed": + allowed_dims = (("time", wr_flat["time"].shape),) + if "x" in wr_flat: + allowed_dims += (("x", wr_flat["x"].shape),) + if "y" in wr_flat: + allowed_dims += (("y", wr_flat["y"].shape),) + if "height" in wr_flat: + allowed_dims += (("height", wr_flat["height"].shape),) + + if key == "operating": + if "wind_turbine" in wr_flat: + allowed_dims += (("wind_turbine", wr_flat["wind_turbine"].shape),) + + wr_flat[key] = validate_data(wr_flat, key, allowed_dims) + + # Extract wind_speed and determine processing flags + wind_speed = wr_flat["wind_speed"] + + # Determine processing flags + has_height_dim = "height" in wind_speed.dims + has_x_dim = "x" in wind_speed.dims + has_y_dim = "y" in wind_speed.dims + has_operation_flag = "operating" in wr_flat + + # Heterogeneous inflow is when wind_speed has spatial dimensions (x, y, or height) + is_heterogeneous_inflow = has_x_dim or has_y_dim or has_height_dim + + # Validate heterogeneous inflow requirements + if is_heterogeneous_inflow: + if has_x_dim and not has_y_dim: + raise ValueError("Heterogeneous inflow with 'x' coordinate requires 'y' coordinate.") + + if has_y_dim and not has_x_dim: + raise ValueError("Heterogeneous inflow with 'y' coordinate requires 'x' coordinate.") + + # Extract required time series data + + # Extract required fields + if "wind_speed" not in wr_flat: + raise ValueError("Time series wind resource requires 'wind_speed' field.") + + if "wind_direction" not in wr_flat: + raise ValueError("Time series wind resource requires 'wind_direction' field.") + wind_directions = np.asarray(wr_flat["wind_direction"]) + + if "turbulence_intensity" not in wr_flat: + raise ValueError("Time series wind resource requires 'turbulence_intensity' field.") + turbulence_intensities = np.asarray(wr_flat["turbulence_intensity"]) + + # Extract optional fields + if "shear" in wr_flat: + shear = np.asarray(wr_flat["shear"]) + + if "operating" in wr_flat: + operating = np.asarray(wr_flat["operating"]) + + # Build heterogeneous_map or heterogeneous_inflow_config + heterogeneous_map = None + heterogeneous_inflow_config = None + + if is_heterogeneous_inflow: + # Process heterogeneous inflow data + wind_speeds, heterogeneous_inflow_config = process_heterogeneous_inflow( + wr=wr, + wind_speed=wind_speed, + wind_directions=wind_directions, + ) + else: + # Non-heterogeneous case - wind_speed is simple 1D array + wind_speeds = np.asarray(wind_speed) + heterogeneous_map = None + + # Create and return the TimeSeries object + time_series = TimeSeries( + wind_directions=wind_directions, + wind_speeds=wind_speeds, + turbulence_intensities=turbulence_intensities, + heterogeneous_map=heterogeneous_map, + heterogeneous_inflow_config=heterogeneous_inflow_config, + ) + + wind_data = { + "wind_data": time_series + } + + if has_operation_flag: + operating = wr_flat["operating"] + operating_array = np.asarray(operating) + + if "time" not in operating.dims: + operating_array = np.stack([operating_array] * n_time, axis=0) + + if "wind_turbine" not in operating.dims: + operating_array = np.stack([operating_array] * len(np.asarray(wr_flat["wind_turbine"])), axis=1) + + wind_data["_disable"] = np.logical_not(operating_array) + + return wind_data + +def extract_probability_distribution(wr: Dict[str, Any]) -> Dict[str, Any]: + """ + Assemble a FLORIS WindRose object from a probability distribution wind resource. + + Args: + wr (dict): Dictionary containing wind resource data with coordinates and fields. + Expected structure: {'coord': {...}, 'data': {...}} + + Returns: + WindRose: A WindRose object configured with the wind resource data. + """ + + # Flatten the structure for easier access + wr_flat = {} + wr_flat.update(wr['coord']) + wr_flat.update(wr['data']) + + # Validate required coordinates + SUPPORTED_FIELDS = [ + "wind_direction", + "wind_speed", + "turbulence_intensity", + "probability", + "operating" + ] + + for key in wr_flat: + if key not in SUPPORTED_FIELDS: + print( + f"Field '{key}' is not supported in probability distribution wind resource. " + f"Supported fields are: {SUPPORTED_FIELDS}" + ) + + if "wind_direction" not in wr['coord']: + raise ValueError("Probability distribution wind resource requires 'wind_direction' coordinate.") + wind_directions = wr_flat["wind_direction"] + + if "wind_speed" not in wr['coord']: + raise ValueError("Probability distribution wind resource requires 'wind_speed' coordinate.") + wind_speeds = wr_flat["wind_speed"] + + if "turbulence_intensity" in wr['coord']: + has_ti_dim = True + turbulence_intensities = wr_flat["turbulence_intensity"] + else: + has_ti_dim = False + turbulence_intensities = wr['data'].get("turbulence_intensity", None) + + if "probability" not in wr['data']: + raise ValueError("Probability distribution wind resource requires 'probability' data field.") + + # Extract coordinates (probability is currently the only data field supported) + ALLOWED_DIMS = ( + ("wind_direction", (len(wind_directions),)), + ("wind_speed", (len(wind_speeds),)), + ("turbulence_intensity", (len(turbulence_intensities),) if turbulence_intensities is has_ti_dim else ()), + ) + wr_flat['probability'] = validate_data(wr_flat, 'probability', ALLOWED_DIMS) + + # Ensure probability is stored in (wind_direction, wind_speed, [turbulence_intensity]) order + prob = wr_flat['probability'] + prob_data = np.asarray(prob) + prob_dims = prob.dims + + desired_dims = ("wind_direction", "wind_speed") + if has_ti_dim: + desired_dims += ("turbulence_intensity",) + + if prob_dims != desired_dims: + permute_order = [prob_dims.index(dim) for dim in desired_dims if dim in prob_dims] + prob_data = np.transpose(prob_data, axes=permute_order) + from .read_field import Data + wr_flat['probability'] = Data({"data": prob_data, "dims": desired_dims}) + else: + prob_data = np.asarray(wr_flat['probability']) + + if has_ti_dim: + wind_rose = WindTIRose( + wind_directions=wind_directions, + wind_speeds=wind_speeds, + turbulence_intensities=turbulence_intensities, + freq_table=wr_flat['probability'] + ) + else: + if turbulence_intensities is None: + print("WARNING: TI missing! Creating WindRose with default TI=0.06.") + turbulence_intensities = 0.06 + + if isinstance(turbulence_intensities, np.ndarray): + if turbulence_intensities.size != 1: + raise ValueError( + "Turbulence intensity must be a single value when not provided as a coordinate." + ) + turbulence_intensities = turbulence_intensities[()] + + wind_rose = WindRose( + wind_directions=wind_directions, + wind_speeds=wind_speeds, + ti_table=turbulence_intensities, + freq_table=wr_flat['probability'] + ) + + wind_data = { + "wind_data": wind_rose + } + + if "operating" in wr_flat: + ALLOWED_DIMS = ( + ("wind_turbine", (len(np.asarray(wr_flat["wind_turbine"])),) if "wind_turbine" in wr_flat else ()), + ) + wr_flat["operating"] = validate_data(wr_flat, "operating", ALLOWED_DIMS) + operating = np.asarray(wr_flat["operating"]) + + # Ensure 2D: (n_findex, n_turbines) + if not wr_flat["operating"].dims: + operating = np.full((1, len(np.asarray(wr_flat["wind_turbine"]))), operating) + else: + operating = operating.reshape(-1, operating.shape[-1]) + + wind_data["_disable"] = np.logical_not(operating) + + return wind_data + +def weibull_distribution_to_probability_distribution(wr: dict) -> dict: + """ + Convert Weibull distribution parameters to probability distribution. + + This function takes Weibull parameters (weibull_a, weibull_k, sector_probability) + and converts them to a probability distribution with explicit frequencies. + + Args: + wr (dict): Wind resource dictionary with Weibull parameters. + Expected to have 'coord' and 'data' keys with: + - wind_direction: coordinate array (required) + - wind_speed: coordinate array (optional, will be generated if not provided) + - weibull_a: Weibull scale parameter (per direction) + - weibull_k: Weibull shape parameter (per direction) + - sector_probability: probability of each sector + + Returns: + dict: Modified wind resource dictionary with 'probability' field replacing Weibull params. + """ + wr_flat = wr['_flat'] + + + # Validate required fields + if "wind_direction" not in wr['coord']: + raise ValueError("Weibull distribution requires 'wind_direction' coordinate.") + wind_directions = wr_flat["wind_direction"] + n_directions = len(wind_directions) + + # Ensure sector probability only includes supported dimensions + ALLOWED_DIMS = ( ("wind_direction", wr_flat.get("wind_direction", np.array([])).shape), + ("wind_speed", wr_flat.get("wind_speed", np.array([])).shape) , + ("turbulence_intensity", wr_flat.get("turbulence_intensity", np.array([])).shape) ) + for key in ['weibull_a', 'weibull_k', 'sector_probability']: + wr_flat[key] = validate_data( + wr_flat, + key, + ALLOWED_DIMS + ) + + # Generate or use existing wind_speed coordinate + if "wind_speed" in wr['coord']: + wind_speeds = wr_flat["wind_speed"] + else: + # Generate default wind speed bins from 0 to 25 m/s in 1 m/s increments + wind_speeds = np.arange(0.0, 26.0, 1.0) + + n_speeds = len(wind_speeds) + + if "weibull_a" not in wr_flat: + raise ValueError("Weibull distribution requires 'weibull_a' parameter.") + weibull_a = np.asarray(wr_flat["weibull_a"]) + + if "weibull_k" not in wr_flat: + raise ValueError("Weibull distribution requires 'weibull_k' parameter.") + weibull_k = np.asarray(wr_flat["weibull_k"]) + + if "sector_probability" not in wr_flat: + raise ValueError("Weibull distribution requires 'sector_probability' field.") + sector_probability = np.asarray(wr_flat["sector_probability"]) + + # Validate dimensions + if weibull_a.shape != (n_directions,): + raise ValueError( + f"weibull_a must have shape ({n_directions},), got {weibull_a.shape}" + ) + if weibull_k.shape != (n_directions,): + raise ValueError( + f"weibull_k must have shape ({n_directions},), got {weibull_k.shape}" + ) + if sector_probability.shape != (n_directions,): + raise ValueError( + f"sector_probability must have shape ({n_directions},), got {sector_probability.shape}" + ) + + # Normalize sector probabilities + sector_probability = sector_probability / sector_probability.sum() + + # Calculate wind speed step (assume uniform spacing) + ws_steps = np.diff(wind_speeds) + if not np.all(np.isclose(ws_steps, ws_steps[0])): + raise ValueError("wind_speeds must be equally spaced for Weibull conversion.") + ws_step = ws_steps[0] + + # Calculate probability distribution using Weibull CDF + # For each direction, calculate the frequency of each wind speed bin + prob_table = np.zeros((n_directions, n_speeds)) + + for i_dir in range(n_directions): + A = weibull_a[i_dir] + k = weibull_k[i_dir] + + # Define wind speed bin edges + wind_speed_edges = np.arange( + wind_speeds[0] - ws_step / 2, + wind_speeds[-1] + ws_step, + ws_step + ) + + # Calculate Weibull CDF at bin edges + # CDF(x) = 1 - exp(-(x/A)^k) for x >= 0 + exponent = -((wind_speed_edges / A) ** k) + cdf_edges = 1.0 - np.exp(exponent) + cdf_edges[wind_speed_edges < 0] = 0.0 + + # Frequency is difference in CDF between edges + freq = cdf_edges[1:] - cdf_edges[:-1] + + # Normalize (should already be close to 1, but ensure exact normalization) + freq = freq / freq.sum() + + # Multiply by sector probability + prob_table[i_dir, :] = freq * sector_probability[i_dir] + + # Create new wind resource dictionary with probability instead of Weibull params + wr_probability = { + 'coord': wr['coord'].copy(), + 'data': {} + } + + # Add wind_speed coordinate if it wasn't already present + if "wind_speed" not in wr_probability['coord']: + wr_probability['coord']['wind_speed'] = wind_speeds + + # Copy over other data fields except Weibull-specific ones + for key in wr['data']: + if key not in ['weibull_a', 'weibull_k', 'sector_probability']: + wr_probability['data'][key] = wr['data'][key] + + # Add probability field + from .read_field import Data + wr_probability['data']['probability'] = Data({ + "data": prob_table, + "dims": ("wind_direction", "wind_speed") + }) + + # Update flat dictionary + wr_probability['_flat'] = {**wr_probability['coord'], **wr_probability['data']} + + return wr_probability + +def extract_weibull_distribution(wr: Dict[str, Any]) -> Dict[str, Any]: + """ + Extract Weibull distribution and convert to WindRose via probability distribution. + + Args: + wr (dict): Dictionary containing Weibull wind resource data. + + Returns: + WindRose: A WindRose object with frequencies calculated from Weibull parameters. + """ + wr_probability = weibull_distribution_to_probability_distribution(wr) + + return extract_probability_distribution(wr_probability) + +def read_wind_resource(windio_dict: Dict[str, Any]) -> Dict[str, Any]: + """ + Read wind resource data from a WindIO dictionary and return a FLORIS WindData object. + + Args: + windio_dict (dict): WindIO-formatted dictionary containing wind resource data. + + Returns: + WindDataBase: A FLORIS wind data object (TimeSeries, WindRose, etc.) + """ + wr = { + 'coord': {}, + 'data': {} + } + + wr_coord = wr['coord'] + wr_data = wr['data'] + + with TrackedDict(windio_dict) as wr_windio: + for fld_name in SUPPORTED_FIELDS: + if fld_name in wr_windio: + + fld_raw = wr_windio[fld_name] + + # Untrack unused attrs to avoid unvisited variable warning + if isinstance(fld_raw, TrackedDict): + if "attrs" in fld_raw: + fld_raw.untrack("attrs") + + if isinstance(fld_raw, (dict, TrackedDict)): + wr_data[fld_name] = read_multi_dimensional_data(fld_raw) + else: + wr_coord[fld_name] = read_multi_dimensional_coordinate(fld_raw) + pass + wr['_flat'] = {**wr_coord, **wr_data} + + case = identify_case(wr) + + if case == "time_series": + flow_dict = extract_time_series(wr) + elif case == "weibull": + flow_dict = extract_weibull_distribution(wr) + elif case == "probability_distribution": + flow_dict = extract_probability_distribution(wr) + else: + raise ValueError(f"Unrecognized wind resource case: {case}") + + flow_dict['_metadata'] = {} + if 'time' in wr_coord: + flow_dict['_metadata']['time'] = wr_coord['time'] + + return flow_dict \ No newline at end of file diff --git a/floris/read_windio/utils.py b/floris/read_windio/utils.py new file mode 100644 index 000000000..9ac181e11 --- /dev/null +++ b/floris/read_windio/utils.py @@ -0,0 +1,230 @@ +""" +Utility classes and functions for windIO integration. + +This module contains shared utilities used across the windIO integration: +- TrackedDict: Dictionary that tracks field access +- extract_data: Helper for extracting dimensional data +- WAKE_MODEL_MAPPING: Mapping between windIO and FLORIS wake models +""" + +from pathlib import Path +import numpy as np +from typing import Dict, Any, List +from collections import UserDict + +from floris.logging_manager import LoggingManager + + +class TrackedDict(UserDict, LoggingManager): + """ + Dictionary that tracks which keys have been accessed. + Recursively converts nested dicts to TrackedDict. + """ + + def __init__(self, data: Dict[str, Any], context: str = None, parent: "TrackedDict" = None): + if isinstance(data, TrackedDict): + raise TypeError("Cannot initialize TrackedDict with another TrackedDict") + + super().__init__() + + self._context = context or data.pop("_context", ".") + self._tracked_keys = TrackedDict._init_tracked_keys(data) + self._read_keys = set() # All keys start unread + self._nested_dicts = {} + self._parent_dict = parent + self._tracking_status = True + + for key, value in data.items(): + if isinstance(value, TrackedDict): + raise TypeError("Nested TrackedDicts are not allowed during initialization") + + elif isinstance(value, dict): + nested = TrackedDict(value, context=self._context + "." + str(key), parent=self) + self.data[key] = nested + self._nested_dicts[key] = nested + + else: + self.data[key] = value + + @staticmethod + def _init_tracked_keys(data: Dict[str, Any]) -> set: + tk = set() + for k in data.keys(): + if isinstance(k, str): + if not k.startswith("_"): + tk.add(k) + else: + tk.add(k) + return tk + + @staticmethod + def from_parent(data: Dict[str, Any], key: str) -> "TrackedDict": + if isinstance(data, TrackedDict): + raise TypeError("Cannot create nested TrackedDict from another TrackedDict") + + if isinstance(data, dict): + return TrackedDict(data[key], context=data.get("_context", "") + "." + key) + + raise TypeError(f"Cannot create TrackedDict from type {type(data)}") + + @staticmethod + def from_list(data_list: List[Dict[str, Any]], context: str) -> List["TrackedDict"]: + data_dict = {i: data for i, data in enumerate(data_list)} + return TrackedDict(data_dict, context=context) + + @property + def context(self) -> str: + return self._context + + @property + def tracked_keys(self) -> List[str]: + """List of keys that are being tracked.""" + return list(self._tracked_keys) + + def mark_read(self, key: str): + """Manually mark a key as read.""" + if not self._tracking_status: + return + + if key in self._tracked_keys: + self._read_keys.add(key) + elif key in self.data: + # Key exists but is not tracked + self.logger.warning(f"Key '{self._context}.{key}' exists but is not tracked") + else: + raise KeyError(f"Key '{key}' not found in '{self._context}'") + + def untrack(self, key: str): + """Detach a nested TrackedDict from tracking.""" + if (key in self.tracked_keys) and (key in self._nested_dicts): + # Buffer data + nested = self._nested_dicts.pop(key).data + + # Convert to raw dict and remove references + self.data[key] = nested # Replace with raw dict + self._tracked_keys.remove(key) + self._read_keys.discard(key) + return nested + else: + raise KeyError(f"Key '{key}' not found in nested TrackedDicts of '{self._context}'") + + def __getitem__(self, key: str) -> Any: + if key not in self.data: + raise KeyError(f"Key '{key}' not found in '{self._context}'") + + self.mark_read(key) + return self.data[key] + + def get(self, key: str, default: Any = None) -> Any: + if key in self.data: + self.mark_read(key) + return self.data[key] + self.logger.debug(f"Key '{key}' not found in '{self._context}', returning default value ({default})") + return default + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + def close(self, issue_warning: bool = True): + self._tracking_status = False # Disable tracking during close + + unread_data = [] + if not self.all_read: + for key in self.tracked_keys: + if isinstance(self.data[key], TrackedDict): + # print("Found tracked dict at key:", key) + # If no nested keys were read, TrackedDict was never accessed, raise warning + if (not self.data[key].any_read) and (issue_warning): + unread_data.append(key) + # Warning only to be issued at this level + self.data[key].close(issue_warning=False) + + # If some nested keys were read, TrackedDict was accessed, close normally but need to check underlying unread keys + else: + self.data[key].close(issue_warning=issue_warning) + + elif (issue_warning) and (key not in self._read_keys): + unread_data.append(key) + + if unread_data: + # print(self) + self.logger.warning(f"Unread keys detected in {self._context}: {unread_data}") + + self._tracking_status = True # Re-enable tracking + + @property + def read_keys(self) -> List[str]: + """List of keys that have been read.""" + return list(self._read_keys) + + @property + def unread_keys(self) -> List[str]: + """List of tracked keys that have not been read.""" + return [key for key in self._tracked_keys if (key not in self._read_keys)] + + @property + def all_read(self) -> bool: + """Check if all keys have been read.""" + return (len(self.unread_keys) == 0) and all(n.all_read for n in self._nested_dicts.values()) + + @property + def any_read(self) -> bool: + """Check if any keys have been read.""" + return (len(self._read_keys) > 0) + + @property + def any_unread(self) -> bool: + """Check if any keys have been read.""" + return (len(self._read_keys) > 0) + + def __str__(self, indent: int = 0) -> str: + """Print the dependency graph of this TrackedDict and its nested dicts.""" + self._tracking_status = False # Disable tracking during string generation + + buffer = [] + indent_str = " " * indent + yes_str = "\033[92myes\033[0m" # Green text + no_str = "\033[91mno\033[0m" # Red text + + buffer.append(f"{indent_str}{self._context.split('.')[-1]}: all_read={yes_str if self.all_read else no_str}, any_read={yes_str if self.any_read else no_str}, read_keys={self.read_keys}") + for key, value in self.items(): + if isinstance(value, TrackedDict): + buffer.append(value.__str__(indent + 1)) + elif (key in self._tracked_keys): + was_read = yes_str if (key in self._read_keys) else no_str + buffer.append(f"{indent_str} {key}: all_read={was_read}, type={type(value)}") + else: + buffer.append(f"{indent_str} \033[90m{key} type={type(value)}\033[0m") + self._tracking_status = True # Re-enable tracking + + return "\n".join(buffer) + +def load_windio_input(input_data: str | Path | Dict[str, Any]) -> Dict[str, Any]: + """ + Read a windIO file and return its contents as a dictionary. + + Args: + file_path: Path to the windIO file (YAML or JSON) + Returns: + Dictionary representation of the windIO file + """ + from windIO import load_yaml + + # Import from path + if isinstance(input_data, str): + input_data = Path(input_data) + + if isinstance(input_data, Path): + if input_data.suffix not in [".yaml", ".yml", ".json"]: + raise ValueError(f"Unsupported windIO file format: '{input_data.suffix}'") + input_data = load_yaml(input_data) + + # Dictionary input + if isinstance(input_data, dict): + return input_data + + # Invalid input type + raise TypeError(f"Invalid input type for windIO file: {type(input_data)}") \ No newline at end of file diff --git a/floris/utilities.py b/floris/utilities.py index ec837aa85..2fe893ac0 100644 --- a/floris/utilities.py +++ b/floris/utilities.py @@ -442,3 +442,23 @@ def print_nested_dict(dictionary: Dict[str, Any], indent: int = 0) -> None: print_nested_dict(value, indent + 4) else: print(" " * (indent + 4) + str(value)) + +def update_nested_dict( + base_dict: Dict[str, Any], + update_dict: Dict[str, Any] +) -> Dict[str, Any]: + """Recursively update a nested dictionary. + + Args: + base_dict (Dict[str, Any]): The dictionary to update. + update_dict (Dict[str, Any]): The dictionary to update from. + + Returns: + Dict[str, Any]: The updated dictionary. + """ + for key, value in update_dict.items(): + if isinstance(value, dict) and key in base_dict and isinstance(base_dict[key], dict): + base_dict[key] = update_nested_dict(base_dict[key], value) + else: + base_dict[key] = value + return base_dict \ No newline at end of file diff --git a/tests/floris_model_integration_test.py b/tests/floris_model_integration_test.py index d70a4e7af..30ad9cb3a 100644 --- a/tests/floris_model_integration_test.py +++ b/tests/floris_model_integration_test.py @@ -938,3 +938,6 @@ def test_sample_ti_at_points(): fmodel.set(turbulence_intensities=[ti_inf + 0.02, ti_inf + 0.02]) ti_sampled_2 = fmodel.sample_ti_at_points(x, y, z) assert np.all(ti_sampled_2 > ti_sampled_1) + +def test_windIO_interface(): + return True