Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 46 additions & 0 deletions .github/workflows/Publish_FAD.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
name: Build and upload to PyPI

# Build on every branch push, tag push, and pull request change:
#on: [push, pull_request]
# Alternatively, to publish when a (published) GitHub Release is created, use the following:
on:
push:
pull_request:
release:
types:
- published

jobs:
build_and_upload_pypi:
runs-on: ubuntu-latest
permissions:
id-token: write # IMPORTANT: this permission is mandatory for trusted publishing
# upload to PyPI on every tag starting with 'v'
#if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v')
# alternatively, to publish when a GitHub Release is created, use the following rule:
if: github.event_name == 'release' && github.event.action == 'published'
steps:
- name: checkout repository
uses: actions/checkout@v5

- name: Set up Python
uses: actions/setup-python@v6
id: cp
with:
python-version: "3.12"
update-environment: true

- name: Install pypa/build
run: |
python -m pip install build

- name: Build a binary wheel and a source tarball
run: |
python -m build .

- name: Upload to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
# with:
# user: __token__
# password: ${{ secrets.pypi_password }}
# To test: repository_url: https://test.pypi.org/legacy/
6 changes: 3 additions & 3 deletions fad/design/LinearSystem.py
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,7 @@ def __init__(self, coords, intraMat, nPtfm, interMats=[],
# values for ten__w, kl__w, and kt__kl for each mooring object assuming a continuous catenary line


def preprocess(self, plots=0, display=0):
def preprocess(self, plots=0, display=0, rcond=0.0001):
'''Initializes things...

Does all the things that can be done once the lineDesign characteristics are set (f/l/k and f/w)
Expand Down Expand Up @@ -410,10 +410,10 @@ def preprocess(self, plots=0, display=0):
'''based on structure and tension matrices, calculatesd self.Knobs_k, which is used by c_to_k when optimizing stiffness.'''

# Null space of Structure Matrix
N1 = scipy.linalg.null_space(self.StructureMatrix)#, rcond = 0.0001)
N1 = scipy.linalg.null_space(self.StructureMatrix, rcond=rcond)

# null space of N1 augmented with tension matrix
N2 = scipy.linalg.null_space(np.hstack([N1, -self.TensionMatrix])) #, rcond = 0.0001)
N2 = scipy.linalg.null_space(np.hstack([N1, -self.TensionMatrix]), rcond=rcond)
#N2 = scipy.linalg.null_space(np.append(N1, -self.TensionMatrix,1))#, rcond = 0.0001)

# nullspace matrix containing basis vectors of valid line weight solutions for equilibrium given line groupings
Expand Down
116 changes: 113 additions & 3 deletions fad/geography.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,9 +129,8 @@ def getLeaseCoords(lease_name):
raise ValueError(f"The lease area name '{lease_area}' is not supported yet")

# extract the longitude and latitude coordinates of the lease area
#area_longs, area_lats = lease_area.geometry.unary_union.exterior.coords.xy
area_longs, area_lats = lease_area.geometry.union_all().exterior.coords.xy

area_longs, area_lats = lease_area.geometry.unary_union.exterior.coords.xy
#area_longs, area_lats = lease_area.geometry.union_all().exterior.coords.xy

# calculate the centroid of the lease area
centroid = ( lease_area.geometry.centroid.values.x[0], lease_area.geometry.centroid.values.y[0] )
Expand Down Expand Up @@ -346,6 +345,14 @@ def writeBathymetryFile(moorpy_bathymetry_filename, bathXs, bathYs, bath_depths,
else:
f.write(f'{bath_depths[iy,id]:8.3f} ')
f.write('\n')
if soil: # add placeholder soil properties - these will need to be adjusted with future changes
f.write('--- SOIL TYPES ---\n')
f.write('Class\t\tGamma\tSu0\tk\talpha\tphi\tUCS\tEm\n')
f.write('(name)\t\t(kN/m^3)\t(kPa)\t(kPa/m)\t(-)\t(deg)\t(MPa)\t(MPa)\n')
f.write('mud\t\t4.7\t2.39\t1.41\t0.7\t-\t-\t-\n')
f.write('mud_firm\t4.7\t23.94\t2.67\t0.7\t-\t-\t-\n')
f.write('hard\t\t-\t-\t-\t-\t-\t7\t50\n')
f.write('------------------\n')
#for i, y in enumerate(grid_y): # alternative writing version
#row = [y] + list(grid_depth[i, :])
#f.write(" ".join(map(str, row)) + "\n")
Expand Down Expand Up @@ -602,6 +609,109 @@ def getSoilGrid(centroid, latlong_crs, custom_crs, soil_file, nrows=100, ncols=1
return xs, ys, soil_grid


def loadSpeciesData(species_filename, centroid, latlong_crs, custom_crs, grid_x=None, grid_y=None, grid_depth=None):
'''Load species data from CSV file with latitude/longitude coordinates and map to grid

Parameters
----------
species_filename : str
Path to CSV file containing species data
Required to be in the format from https://www.ncei.noaa.gov/maps/deep-sea-corals-portal/?page=Page&views=Basic%2CSummary
centroid : tuple
Project centroid (lon, lat) for coordinate conversion
latlong_crs : pyproj.CRS
Latitude/longitude coordinate reference system
custom_crs : str or pyproj.CRS
Target coordinate reference system for conversion
grid_x : array, optional
X-coordinates of grid lines [m]. If None, grid-bounds filtering is skipped.
grid_y : array, optional
Y-coordinates of grid lines [m]. If None, grid-bounds filtering is skipped.
grid_depth : array, optional
Depth grid for reference (if None, creates a dummy grid)

Returns
-------
species_data : pandas.DataFrame
Loaded species data with columns added: x_local, y_local,
depth_bathy (if grid_depth provided), accuracy_m, obs_date.
'''

required_columns = ['VernacularNameCategory', 'latitude (degrees_north)',
'longitude (degrees_east)', 'DepthInMeters (m)', 'ObservationDate','LocationAccuracy (m)']

# Load CSV data
species_data = pd.read_csv(species_filename)

# Check that required columns exist
missing_columns = [col for col in required_columns if col not in species_data.columns]
if missing_columns:
raise ValueError(f"Missing required columns in CSV file: {missing_columns}")

# Remove any rows with missing coordinates or species data
initial_count = len(species_data)
species_data = species_data.dropna(subset=['latitude (degrees_north)',
'longitude (degrees_east)',
'VernacularNameCategory'])
final_count = len(species_data)

if final_count < initial_count:
print(f"Removed {initial_count - final_count} rows with missing coordinate or species data")

if final_count == 0:
print("Warning: No valid species data found after removing missing data")
return None

# Convert lat/lon to local coordinates
xs, ys = convertLatLong2Meters(
longs=species_data['longitude (degrees_east)'].values,
lats=species_data['latitude (degrees_north)'].values,
centroid=centroid,
latlong_crs=latlong_crs,
target_crs=custom_crs
)

# Store local coordinates in the dataframe
species_data = species_data.copy() # Avoid SettingWithCopyWarning
species_data['x_local'] = xs
species_data['y_local'] = ys

# Remove species data that falls outside the grid bounds (if grid provided)
if grid_x is not None and grid_y is not None:
x_min, x_max = grid_x[0], grid_x[-1]
y_min, y_max = grid_y[0], grid_y[-1]
before_trim = len(species_data)
species_data = species_data[
(species_data['x_local'] >= x_min) & (species_data['x_local'] <= x_max) &
(species_data['y_local'] >= y_min) & (species_data['y_local'] <= y_max)
].copy()
after_trim = len(species_data)
if after_trim < before_trim:
print(f"Removed {before_trim - after_trim} records outside grid bounds")

if len(species_data) == 0:
print("Warning: No species data remaining after grid-bounds filtering")
return None

# Parse LocationAccuracy into numeric meters (e.g., '>1000m' -> 1000)
def parse_accuracy(val):
if pd.isna(val):
return np.nan
s = str(val).strip().lower().replace('>', '').replace('m', '').strip()
try:
return float(s)
except ValueError:
return np.nan
species_data['accuracy_m'] = species_data['LocationAccuracy (m)'].apply(parse_accuracy)

# Parse ObservationDate to datetime
species_data['obs_date'] = pd.to_datetime(
species_data['ObservationDate'], errors='coerce')

print(f"Successfully loaded {len(species_data)} species records")
print(f"Unique species categories: {species_data['VernacularNameCategory'].nunique()}")

return species_data


if __name__ == '__main__':
Expand Down
Loading
Loading