diff --git a/conda_package/mpas_tools/landice/boundary.py b/conda_package/mpas_tools/landice/boundary.py new file mode 100755 index 000000000..474a656f2 --- /dev/null +++ b/conda_package/mpas_tools/landice/boundary.py @@ -0,0 +1,58 @@ +import sys +from optparse import OptionParser +from datetime import datetime + +import netCDF4 + + +def mark_domain_boundaries_dirichlet(): + """ + This script marks all of the boundary cells in a domain as Dirichlet + velocity boundaries. + """ + + print("== Gathering information. (Invoke with --help for more details. " + "All arguments are optional)\n") + parser = OptionParser() + parser.description = __doc__ + parser.add_option( + "-f", "--file", dest="inputFile", + help="name of file to be modified.", + default="landice_grid.nc", metavar="FILENAME") + parser.add_option( + "-t", "--time", dest="time", + help="time level to modify", + default=0, type="int", metavar="FILENAME") + for option in parser.option_list: + if option.default != ("NO", "DEFAULT"): + option.help += (" " if option.help else "") + "[default: %default]" + options, args = parser.parse_args() + + print(" Input file: {}".format(options.inputFile)) + print(" Time level: {}".format(options.time)) + + f = netCDF4.Dataset(options.inputFile, 'r+') + nCells = len(f.dimensions['nCells']) + mask = f.variables['dirichletVelocityMask'][options.time, :, :] + cONc = f.variables['cellsOnCell'][:] + nEdgesOnCell = f.variables['nEdgesOnCell'][:] + + mask[:] = 0 + for i in range(nCells): + nE = nEdgesOnCell[i] + if min(cONc[i, :nE]) == 0: + mask[i, :] = 1 + f.variables['dirichletVelocityMask'][options.time, :, :] = mask[:] + + # Update history attribute of netCDF file + thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ": " + \ + " ".join(sys.argv[:]) + if hasattr(f, 'history'): + newhist = '\n'.join([thiscommand, getattr(f, 'history')]) + else: + newhist = thiscommand + setattr(f, 'history', newhist) + + f.close() + + print('\nMarking boundary cells completed.') diff --git a/conda_package/mpas_tools/landice/create.py b/conda_package/mpas_tools/landice/create.py new file mode 100755 index 000000000..403f829b6 --- /dev/null +++ b/conda_package/mpas_tools/landice/create.py @@ -0,0 +1,445 @@ +import sys +import numpy +from netCDF4 import Dataset +from optparse import OptionParser +from datetime import datetime + +from mpas_tools.cime.constants import constants + + +def create_from_generic_mpas_grid(): + """ + Script to create a grid with land ice variables from an MPAS grid. + Currently variable attributes are not copied. + This script could be modified to copy them (looping over dir(var), skipping + over variable function names "assignValue", "getValue", "typecode"). + """ + + # earth radius, if needed + sphere_radius = constants['SHR_CONST_REARTH'] + + print("** Gathering information. (Invoke with --help for more details. " + "All arguments are optional)") + parser = OptionParser() + parser.add_option( + "-i", "--in", dest="fileinName", + help="input filename. Defaults to 'grid.nc'", + metavar="FILENAME") + parser.add_option( + "-o", "--out", dest="fileoutName", + help="output filename. Defaults to 'landice_grid.nc'", + metavar="FILENAME") + parser.add_option( + "-l", "--level", dest="levels", + help="Number of vertical levels to use in the output file. " + "Defaults to the number in the input file", + metavar="FILENAME") + parser.add_option( + "-v", "--vert", dest="vertMethod", + help="Method of vertical layer spacing: uniform, glimmer. " + "Glimmer spacing follows Eq. 35 of Rutt, I. C., M. Hagdorn, " + "N. R. J. Hulton, and A. J. Payne (2009), The Glimmer community " + "ice sheet model, J. Geophys. Res., 114, F02004, " + "doi:10.1029/2008JF001015", + default='glimmer', metavar="FILENAME") + parser.add_option( + "--beta", dest="beta", action="store_true", + help="DEPRECATED") + parser.add_option( + "--effecpress", dest="effecpress", action="store_true", + help="DEPRECATED") + parser.add_option( + "--diri", dest="dirichlet", action="store_true", + help="Use this flag to include the fields 'dirichletVelocityMask', " + "'uReconstructX', 'uReconstructY' needed for specifying Dirichlet " + "velocity boundary conditions in the resulting file.") + parser.add_option( + "--thermal", dest="thermal", action="store_true", + help="Use this flag to include the fields 'temperature', " + "'surfaceAirTemperature', 'basalHeatFlux' needed for specifying " + "thermal initial conditions in the resulting file.") + parser.add_option( + "--hydro", dest="hydro", action="store_true", + help="Use this flag to include the fields 'waterThickness', " + "'tillWaterThickness', 'basalMeltInput', 'externalWaterInput', " + "'frictionAngle', 'waterPressure', 'waterFluxMask' needed for " + "specifying hydro initial conditions in the resulting file.") + parser.add_option( + "--obs", dest="obs", action="store_true", + help="Use this flag to include the observational fields " + "observedSurfaceVelocityX, observedSurfaceVelocityY, " + "observedSurfaceVelocityUncertainty, observedThicknessTendency, " + "observedThicknessTendencyUncertainty, thicknessUncertainty " + "needed doing optimizations constrained by obs velocities.") + options, args = parser.parse_args() + + if not options.fileinName: + print("No input filename specified, so using 'grid.nc'.") + options.fileinName = 'grid.nc' + else: + print("Input file is: {}".format(options.fileinName)) + if not options.fileoutName: + print("No output filename specified, so using 'landice_grid.nc'.") + options.fileoutName = 'landice_grid.nc' + # make a space in stdout before further output + print('') + + # Get the input file + filein = Dataset(options.fileinName, 'r') + + # Define the new file to be output + fileout = Dataset(options.fileoutName, "w", format=filein.file_format) + + # ============================================ + # Copy over all of the netcdf global attributes + # ============================================ + # Do this first as doing it last is slow for big files since adding + # attributes forces the contents to get reorganized. + print("---- Copying global attributes from input file to output file ----") + for name in filein.ncattrs(): + # sphere radius needs to be set to that of the earth if on a sphere + if name == 'sphere_radius' and \ + getattr(filein, 'on_a_sphere') == "YES ": + setattr(fileout, 'sphere_radius', sphere_radius) + print(f'Set global attribute sphere_radius = {sphere_radius}') + elif name == 'history': + # Update history attribute of netCDF file + newhist = '\n'.join([getattr(filein, 'history'), + ' '.join(sys.argv[:])]) + setattr(fileout, 'history', newhist) + else: + # Otherwise simply copy the attr + setattr(fileout, name, getattr(filein, name)) + print(f'Copied global attribute {name} = {getattr(filein, name)}') + + # Update history attribute of netCDF file if we didn't above + if not hasattr(fileout, 'history'): + setattr(fileout, 'history', sys.argv[:]) + + fileout.sync() + # make a space in stdout before further output + print('') + + # ============================================ + # Copy over all the dimensions to the new file + # ============================================ + # Note: looping over dimensions seems to result in them being written in + # seemingly random order. don't think this matters but it is not + # aesthetically pleasing. It may be better to list them explicitly + # as I do for the grid variables, but this way ensures they all get + # included and is easier. + # Note: The UNLIMITED time dimension will return a dimension value of None + # with Scientific.IO. This is what is supposed to happen. See below + # for how to deal with assigning values to a variable with a + # unlimited dimension. Special handling is needed with the netCDF + # module. + print("---- Copying dimensions from input file to output file ----") + for dim in filein.dimensions.keys(): + if dim == 'nTracers': + pass # Do nothing - we don't want this dimension + elif (dim == 'nVertInterfaces'): + pass # Do nothing - this dimension will be handled below + else: # Copy over all other dimensions + if dim == 'Time': + # netCDF4 won't properly get this with the command below (you + # need to use the isunlimited method) + dimvalue = None + elif (dim == 'nVertLevels'): + if options.levels is None: + # If nVertLevels is in the input file, and a value for it + # was not specified on the command line, then use the value + # from the file (do nothing here) + print(f"Using nVertLevels from the intput file: " + f"{len(filein.dimensions[dim])}") + dimvalue = len(filein.dimensions[dim]) + else: + # if nVertLevels is in the input file, but a value WAS + # specified on the command line, then use the command line + # value + print(f"Using nVertLevels specified on the command line: " + f"{int(options.levels)}") + dimvalue = int(options.levels) + else: + dimvalue = len(filein.dimensions[dim]) + fileout.createDimension(dim, dimvalue) + # There may be input files that do not have nVertLevels specified, in + # which case it has not been added to the output file yet. Treat those + # here. + if 'nVertLevels' not in fileout.dimensions: + if options.levels is None: + print("nVertLevels not in input file and not specified. Using " + "default value of 10.") + fileout.createDimension('nVertLevels', 10) + else: + print(f"Using nVertLevels specified on the command line: " + f"{int(options.levels)}") + fileout.createDimension('nVertLevels', int(options.levels)) + # Also create the nVertInterfaces dimension, even if none of the variables + # require it. + # nVertInterfaces = nVertLevels + 1 + fileout.createDimension('nVertInterfaces', + len(fileout.dimensions['nVertLevels']) + 1) + print(f'Added new dimension nVertInterfaces to output file with value of ' + f'{len(fileout.dimensions["nVertInterfaces"])}') + + fileout.sync() + # include an extra blank line here + print('Finished creating dimensions in output file.\n') + + # ============================================ + # Copy over all of the required grid variables to the new file + # ============================================ + print("Beginning to copy mesh variables to output file.") + vars2copy = ['latCell', 'lonCell', 'xCell', 'yCell', 'zCell', + 'indexToCellID', 'latEdge', 'lonEdge', 'xEdge', 'yEdge', + 'zEdge', 'indexToEdgeID', 'latVertex', 'lonVertex', 'xVertex', + 'yVertex', 'zVertex', 'indexToVertexID', 'cellsOnEdge', + 'nEdgesOnCell', 'nEdgesOnEdge', 'edgesOnCell', 'edgesOnEdge', + 'weightsOnEdge', 'dvEdge', 'dcEdge', 'angleEdge', 'areaCell', + 'areaTriangle', 'cellsOnCell', 'verticesOnCell', + 'verticesOnEdge', 'edgesOnVertex', 'cellsOnVertex', + 'kiteAreasOnVertex'] + # Add these optional fields if they exist in the input file + for optionalVar in ['meshDensity', 'gridSpacing', 'cellQuality', + 'triangleQuality', 'triangleAngleQuality', + 'obtuseTriangle']: + if optionalVar in filein.variables: + vars2copy.append(optionalVar) + + for varname in vars2copy: + print("- ", end='') + print("|") + for varname in vars2copy: + thevar = filein.variables[varname] + datatype = thevar.dtype + newVar = fileout.createVariable(varname, datatype, thevar.dimensions) + if filein.on_a_sphere == "YES ": + if varname in ('xCell', 'yCell', 'zCell', 'xEdge', 'yEdge', + 'zEdge', 'xVertex', 'yVertex', 'zVertex', 'dvEdge', + 'dcEdge'): + newVar[:] = thevar[:] * sphere_radius / filein.sphere_radius + elif varname in ('areaCell', 'areaTriangle', 'kiteAreasOnVertex'): + newVar[:] = ( + thevar[:] * (sphere_radius / filein.sphere_radius)**2) + else: + newVar[:] = thevar[:] + else: + # not on a sphere + newVar[:] = thevar[:] + del newVar, thevar + sys.stdout.write("* ") + sys.stdout.flush() + fileout.sync() + print("|") + print("Finished copying mesh variables to output file.\n") + + # ============================================ + # Create the land ice variables (all the shallow water vars in the input + # file can be ignored) + # ============================================ + nVertLevels = len(fileout.dimensions['nVertLevels']) + # Get the datatype for double precision float + datatype = filein.variables['xCell'].dtype + # Get the datatype for integers + datatypeInt = filein.variables['indexToCellID'].dtype + # Note: it may be necessary to make sure the Time dimension has size 1, + # rather than the 0 it defaults to. For now, letting it be 0 which + # seems to be fine. + + # layerThicknessFractions + layerThicknessFractions = fileout.createVariable('layerThicknessFractions', + datatype, + ('nVertLevels', )) + layerThicknessFractionsData = numpy.zeros(layerThicknessFractions.shape) + # Assign default values to layerThicknessFractions. By default they will + # be uniform fractions. Users can modify them in a subsequent step, but + # doing this here ensures the most likely values are already assigned. + # (Useful for e.g. setting up Greenland where the state variables are + # copied over but the grid variables are not modified.) + if options.vertMethod == 'uniform': + layerThicknessFractionsData[:] = 1.0 / nVertLevels + elif options.vertMethod == 'glimmer': + nInterfaces = nVertLevels + 1 + layerInterfaces = numpy.zeros((nInterfaces,)) + for k in range(nInterfaces): + layerInterfaces[k] = 4.0 / 3.0 * (1.0 - ((k + 1.0 - 1.0) / (nInterfaces - 1.0) + 1.0)**-2) + for k in range(nVertLevels): + layerThicknessFractionsData[k] = ( + layerInterfaces[k+1] - layerInterfaces[k]) + print(f"Setting layerThicknessFractions to: " + f"{layerThicknessFractionsData}") + else: + raise ValueError(f'Unknown method for vertical spacing method ' + f'(--vert): {options.vertMethod}') + + # explictly specify layer fractions + # layerThicknessFractionsData[:] = [0.1663,0.1516,0.1368,0.1221,0.1074,0.0926,0.0779,0.0632,0.0484,0.0337] + + layerThicknessFractions[:] = layerThicknessFractionsData[:] + + # With Scientific.IO.netCDF, entries are appended along the unlimited + # dimension one at a time by assigning to a slice. Therefore we need to + # assign to time level 0, and what we need to assign is a zeros array that + # is the shape of the new variable, exluding the time dimension! + newvar = fileout.createVariable('thickness', datatype, ('Time', 'nCells')) + newvar[0, :] = numpy.zeros(newvar.shape[1:]) + # These landice variables are stored in the mesh currently, and therefore + # do not have a time dimension. It may make sense to eventually move them + # to state. + newvar = fileout.createVariable('bedTopography', datatype, + ('Time', 'nCells')) + newvar[:] = numpy.zeros(newvar.shape) + newvar = fileout.createVariable('sfcMassBal', datatype, + ('Time', 'nCells')) + newvar[:] = numpy.zeros(newvar.shape) + newvar = fileout.createVariable('floatingBasalMassBal', datatype, + ('Time', 'nCells')) + newvar[:] = numpy.zeros(newvar.shape) + print('Added default variables: thickness, temperature, bedTopography, ' + 'sfcMassBal, floatingBasalMassBal') + + newvar = fileout.createVariable('beta', datatype, + ('Time', 'nCells')) + # Give a default beta that won't have much sliding. + newvar[:] = 1.0e8 + print('Added variable: beta') + + newvar = fileout.createVariable('muFriction', datatype, + ('Time', 'nCells')) + # Give a default mu that won't have much sliding. + newvar[:] = 1.0e8 + print('Added variable: muFriction') + + newvar = fileout.createVariable('effectivePressure', datatype, + ('Time', 'nCells')) + # Give a default effective pressure of 1.0 so that, for the linear sliding + # law, beta = mu*effecpress = mu. + newvar[:] = 1.0 + print('Added variable: effectivePressure') + + newvar = fileout.createVariable('stiffnessFactor', datatype, + ('Time', 'nCells')) + # Give default value + newvar[:] = 1.0 + print('Added variable: stiffnessFactor') + + newvar = fileout.createVariable('eigencalvingParameter', datatype, + ('Time', 'nCells')) + # Give default value for eigencalvingParameter + newvar[:] = 3.14e16 + print('Added variable: eigencalvingParameter') + + newvar = fileout.createVariable('groundedVonMisesThresholdStress', + datatype, ('Time', 'nCells')) + # Give default value + newvar[:] = 1.0e6 + print('Added variable: groundedVonMisesThresholdStress') + + newvar = fileout.createVariable('floatingVonMisesThresholdStress', + datatype, ('Time', 'nCells')) + # Give default value + newvar[:] = 1.0e6 + print('Added variable: floatingVonMisesThresholdStress') + + newvar = fileout.createVariable('iceMask', datatype, + ('Time', 'nCells')) + newvar[:] = 0 + print('Added variable: iceMask') + + if options.dirichlet: + newvar = fileout.createVariable('dirichletVelocityMask', datatypeInt, + ('Time', 'nCells', 'nVertInterfaces')) + # default: no Dirichlet b.c. + newvar[:] = 0 + newvar = fileout.createVariable('uReconstructX', datatype, + ('Time', 'nCells', 'nVertInterfaces',)) + newvar[:] = 0.0 + newvar = fileout.createVariable('uReconstructY', datatype, + ('Time', 'nCells', 'nVertInterfaces',)) + newvar[:] = 0.0 + print('Added optional dirichlet variables: dirichletVelocityMask, ' + 'uReconstructX, uReconstructY') + + if options.thermal: + newvar = fileout.createVariable('temperature', datatype, + ('Time', 'nCells', 'nVertLevels')) + # Give default value for temperate ice + newvar[:] = 273.15 + newvar = fileout.createVariable('surfaceAirTemperature', datatype, + ('Time', 'nCells')) + # Give default value for temperate ice + newvar[:] = 273.15 + newvar = fileout.createVariable('basalHeatFlux', datatype, + ('Time', 'nCells')) + # Default to none (W/m2) + newvar[:] = 0.0 + print('Added optional thermal variables: temperature, ' + 'surfaceAirTemperature, basalHeatFlux') + + if options.hydro: + newvar = fileout.createVariable('waterThickness', datatype, + ('Time', 'nCells')) + newvar[:] = 0.0 + newvar = fileout.createVariable('tillWaterThickness', datatype, + ('Time', 'nCells')) + newvar[:] = 0.0 + newvar = fileout.createVariable('basalMeltInput', datatype, + ('Time', 'nCells')) + newvar[:] = 0.0 + newvar = fileout.createVariable('externalWaterInput', datatype, + ('Time', 'nCells')) + newvar[:] = 0.0 + newvar = fileout.createVariable('frictionAngle', datatype, + ('Time', 'nCells')) + newvar[:] = 0.0 + newvar = fileout.createVariable('waterPressure', datatype, + ('Time', 'nCells')) + newvar[:] = 0.0 + newvar = fileout.createVariable('waterFluxMask', 'i', + ('Time', 'nEdges')) + newvar[:] = 0.0 + print('Added optional hydro variables: waterThickness, ' + 'tillWaterThickness, meltInput, frictionAngle, waterPressure, ' + 'waterFluxMask') + + if options.obs: + newvar = fileout.createVariable('observedSurfaceVelocityX', datatype, + ('Time', 'nCells')) + newvar[:] = 0.0 + newvar = fileout.createVariable('observedSurfaceVelocityY', datatype, + ('Time', 'nCells')) + newvar[:] = 0.0 + newvar = fileout.createVariable('observedSurfaceVelocityUncertainty', + datatype, ('Time', 'nCells')) + newvar[:] = 0.0 + newvar = fileout.createVariable('observedThicknessTendency', datatype, + ('Time', 'nCells')) + newvar[:] = 0.0 + newvar = fileout.createVariable('observedThicknessTendencyUncertainty', + datatype, ('Time', 'nCells')) + newvar[:] = 0.0 + newvar = fileout.createVariable('thicknessUncertainty', datatype, + ('Time', 'nCells')) + newvar[:] = 0.0 + print('Added optional velocity optimization variables: ' + 'observedSurfaceVelocityX, observedSurfaceVelocityY, ' + 'observedSurfaceVelocityUncertainty, observedThicknessTendency, ' + 'observedThicknessTendencyUncertainty, thicknessUncertainty') + + # Update history attribute of netCDF file + thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ": " + \ + " ".join(sys.argv[:]) + if hasattr(fileout, 'history'): + newhist = '\n'.join([thiscommand, getattr(fileout, 'history')]) + else: + newhist = thiscommand + setattr(fileout, 'history', newhist) + + print("Completed creating land ice variables in new file. Now syncing to " + "file.") + fileout.sync() + + filein.close() + fileout.close() + + print('\n** Successfully created {}.**'.format(options.fileoutName)) diff --git a/conda_package/mpas_tools/landice/cull.py b/conda_package/mpas_tools/landice/cull.py new file mode 100755 index 000000000..b2bf84c53 --- /dev/null +++ b/conda_package/mpas_tools/landice/cull.py @@ -0,0 +1,251 @@ +import sys +import numpy as np +from optparse import OptionParser +from netCDF4 import Dataset as NetCDFFile +from datetime import datetime +import matplotlib.pyplot as plt + + +def define_cull_mask(): + """ + Script for adding a field named cullMask to an MPAS land ice grid for use + with the MpasCellCuller tool that actually culls the unwanted cells. + """ + + print("** Gathering information.") + parser = OptionParser() + parser.add_option( + "-f", "--file", dest="file", + help="grid file to modify; default: landice_grid.nc", + metavar="FILE") + parser.add_option( + "-m", "--method", dest="method", + help="method to use for marking cells to cull. Supported methods: " + "'noIce', 'numCells', 'distance', 'radius', 'edgeFraction'", + metavar="METHOD") + parser.add_option( + "-n", "--numCells", dest="numCells", default=5, + help="number of cells to keep beyond ice extent", + metavar="NUM") + parser.add_option( + "-d", "--distance", dest="distance", default=50, + help="numeric value to use for the various methods: distance " + "method->distance (km), radius method->radius (km), edgeFraction " + "method->fraction of width or height", + metavar="DIST") + parser.add_option( + "-p", "--plot", dest="makePlot", + help="Include to have the script generate a plot of the resulting " + "mask, default=false", + default=False, action="store_true") + options, args = parser.parse_args() + + if not options.file: + print("No grid filename provided. Using landice_grid.nc.") + options.file = "landice_grid.nc" + + if not options.method: + raise ValueError("No method selected for choosing cells to mark for " + "culling") + else: + maskmethod = options.method + + f = NetCDFFile(options.file, 'r+') + + xCell = f.variables['xCell'][:] + yCell = f.variables['yCell'][:] + nCells = len(f.dimensions['nCells']) + + # -- Get needed fields from the file -- + + # Initialize to cull no cells + cullCell = np.zeros((nCells, ), dtype=np.int8) + + thicknessMissing = True + try: + thickness = f.variables['thickness'][0, :] + print('Using thickness field at time 0') + thicknessMissing = False + except KeyError: + print("The field 'thickness' is not available. Some culling methods " + "will not work.") + + # ===== Various methods for defining the mask ==== + + # ========= + # only keep cells with ice + if maskmethod == 'noIce': + print("Method: remove cells without ice") + if thicknessMissing: + raise ValueError("Unable to perform 'numCells' method because " + "thickness field was missing.") + + cullCell[thickness == 0.0] = 1 + + # ========= + # add a buffer of X cells around the ice + elif maskmethod == 'numCells': + print("Method: remove cells beyond a certain number of cells from " + "existing ice") + + if thicknessMissing: + raise ValueError("Unable to perform 'numCells' method because " + "thickness field was missing.") + + buffersize = int(options.numCells) # number of cells to expand + print("Using a buffer of {} cells".format(buffersize)) + + keepCellMask = np.copy(cullCell[:]) + keepCellMask[:] = 0 + cellsOnCell = f.variables['cellsOnCell'][:] + nEdgesOnCell = f.variables['nEdgesOnCell'][:] + + # mark the cells with ice first + keepCellMask[thickness > 0.0] = 1 + print('Num of cells with ice: {}'.format(sum(keepCellMask))) + + for i in range(buffersize): + print(f'Starting buffer loop {i + 1}') + # make a copy to edit that can be edited without changing the + # original + keepCellMaskNew = np.copy(keepCellMask) + ind = np.nonzero(keepCellMask == 0)[0] + for i in range(len(ind)): + iCell = ind[i] + neighbors = cellsOnCell[iCell, :nEdgesOnCell[iCell]] - 1 + keep = False + for n in neighbors: + if n >= 0: + if keepCellMask[n] == 1: + keepCellMaskNew[iCell] = 1 + # after we've looped over all cells assign the new mask to the variable + # we need (either for another loop around the domain or to write out) + keepCellMask = np.copy(keepCellMaskNew) + print(f'Num of cells to keep: {keepCellMask.sum()}') + + # Now convert the keepCellMask to the cullMask + # Flip the mask for which ones to cull + cullCell[:] = np.absolute(keepCellMask[:]-1) + + # ========= + # remove cells beyond a certain distance of ice extent + elif maskmethod == 'distance': + + print("Method: remove cells beyond a certain distance from existing " + "ice") + + if thicknessMissing: + raise ValueError("Unable to perform 'numCells' method because " + "thickness field was missing.") + + dist = float(options.distance) + print(f"Using a buffer distance of {dist} km") + # convert to m + dist = dist * 1000.0 + + keepCellMask = np.copy(cullCell[:]) + keepCellMask[:] = 0 + cellsOnCell = f.variables['cellsOnCell'][:] + nEdgesOnCell = f.variables['nEdgesOnCell'][:] + xCell = f.variables['xCell'][:] + yCell = f.variables['yCell'][:] + + # mark the cells with ice first + keepCellMask[thickness > 0.0] = 1 + print('Num of cells with ice: {}'.format(sum(keepCellMask))) + + # find list of margin cells + iceCells = np.nonzero(keepCellMask == 1)[0] + marginMask = np.zeros((nCells, ), dtype=np.int8) + for i in range(len(iceCells)): + iCell = iceCells[i] + # the -1 converts from the fortran indexing in the variable to + # python indexing + for neighbor in cellsOnCell[iCell, :nEdgesOnCell[iCell]] - 1: + if thickness[neighbor] == 0.0: + marginMask[iCell] = 1 + continue # stop searching neighbors + + # loop over margin cells + marginCells = np.nonzero(marginMask == 1)[0] + for i in range(len(marginCells)): + iCell = marginCells[i] + # for each margin cell, find all cells within specified distance + ind = np.nonzero(((xCell - xCell[iCell])**2 + (yCell - yCell[iCell])**2)**0.5 < dist)[0] + keepCellMask[ind] = 1 + + print(f'Num of cells to keep: {keepCellMask.sum()}') + + # Now convert the keepCellMask to the cullMask + # Flip the mask for which ones to cull + cullCell[:] = np.absolute(keepCellMask[:] - 1) + + # ========= + # cut out beyond some radius (good for the dome) + elif maskmethod == 'radius': + dist = float(options.distance) + print(f"Method: remove cells beyond a radius of {dist} km from center " + f"of mesh") + xc = (xCell.max() - xCell.min()) / 2.0 + xCell.min() + yc = (yCell.max() - yCell.min()) / 2.0 + yCell.min() + ind = np.nonzero(((xCell[:] - xc)**2 + (yCell[:] - yc)**2)**0.5 > dist*1000.0) + cullCell[ind] = 1 + + # ========= + # cut off some fraction of the height/width on all 4 sides - useful for + # cleaning up a mesh from periodic_general + elif maskmethod == 'edgeFraction': + frac = float(options.distance) + print("Method: remove a fraction from all 4 edges of {}".format(frac)) + if frac >= 0.5: + raise ValueError("fraction cannot be >=0.5.") + if frac < 0.0: + raise ValueError("fraction cannot be <0.") + + cullCell[:] = 0 + width = xCell.max() - xCell.min() + height = yCell.max() - yCell.min() + ind = np.nonzero(xCell[:] < (xCell.min() + width * frac)) + cullCell[ind] = 1 + ind = np.nonzero(xCell[:] > (xCell.max() - width * frac)) + cullCell[ind] = 1 + ind = np.nonzero(yCell[:] < (yCell.min() + height * frac)) + cullCell[ind] = 1 + ind = np.nonzero(yCell[:] > (yCell.max() - height * frac)) + cullCell[ind] = 1 + + # ========= + else: + raise ValueError("no valid culling method selected.") + # ========= + + print(f'Num of cells to cull: {sum(cullCell[:])}') + + # ========= + # Try to add the new variable + if 'cullCell' not in f.variables: + # Get the datatype for integer + datatype = f.variables['indexToCellID'].dtype + f.createVariable('cullCell', datatype, ('nCells',)) + f.variables['cullCell'][:] = cullCell + + # Update history attribute of netCDF file + thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ": " + \ + " ".join(sys.argv[:]) + if hasattr(f, 'history'): + newhist = '\n'.join([thiscommand, getattr(f, 'history')]) + else: + newhist = thiscommand + setattr(f, 'history', newhist) + + # --- make a plot only if requested --- + if options.makePlot: + fig = plt.figure(1, facecolor='w') + ax = fig.add_subplot(111, aspect='equal') + plt.scatter(xCell[:], yCell[:], 50, cullCell[:], edgecolors='none') #, vmin=minval, vmax=maxval) + plt.colorbar() + plt.draw() + plt.show() + + f.close() + print("cullMask generation complete.") diff --git a/conda_package/mpas_tools/landice/interpolate.py b/conda_package/mpas_tools/landice/interpolate.py new file mode 100755 index 000000000..ed464e6d7 --- /dev/null +++ b/conda_package/mpas_tools/landice/interpolate.py @@ -0,0 +1,1301 @@ +import sys +import numpy as np +import netCDF4 +import argparse +import math +from collections import OrderedDict +import scipy.spatial +import scipy.sparse +import time +from datetime import datetime + + +def interpolate_to_mpasli_grid(): + """ + Interpolate fields from an input file to a pre-existing MPAS-LI grid. + + The input file can either be CISM format or MPASLI format. + + For CISM input files, three interpolation methods are supported: + * a built-in bilinear interpolation method + * a built-in barycentric interpolation method (nearest neighbor is used + for extrapolation regions) + * using weights generated by ESMF + + For MPAS input files only barycentric interpolation is supported. + + The ESMF interpolation method can be used to interpolate from history + files of other E3SM components by using the ESMF weight mapping files used + in the E3SM coupler. You will likely need to add your own variable + mappings in the dictionary for the 'other' filetype at the end of this + script. + + Multiple time levels can be interpolated using the --timestart and + --timeend options. Default is to copy the first time level from the source + file to the first time level of the destination file. If multiple time + levels are used, they are translated directly from the source to the + destination. + + NOTE: There is no processing of actual time stamps! + + NOTE: xtime is NOT copied and must be copied manually, if desired! + """ + + # ---------------------------- + # ---------------------------- + # Define needed functions + # ---------------------------- + # ---------------------------- + + def _esmf_interp(sourceField): + # Interpolates from the sourceField to the destinationField using ESMF + # weights + destinationField = np.zeros(xCell.shape) # fields on cells only + try: + # Convert the source field into the SciPy Compressed Sparse Row + # matrix format. This needs some reshaping to get the matching + # dimensions + source_csr = scipy.sparse.csr_matrix( + sourceField.flatten()[:, np.newaxis]) + # Use SciPy CSR dot product - much faster than iterating over + # elements of the full matrix + destinationField = weights_csr.dot(source_csr).toarray().squeeze() + # For conserve remapping, need to normalize by destination area + # fraction. It should be safe to do this for other methods + ind = np.where(dst_frac > 0.0)[0] + destinationField[ind] /= dst_frac[ind] + except: + print('error in ESMF_interp') + return destinationField + + # ---------------------------- + + def _bilinear_interp(Value, gridType): + # Calculate bilinear interpolation of Value field from x, y to new + # ValueCell field (return value) at xCell, yCell + # This assumes that x, y, Value are regular CISM style grids and xCell, + # yCell, ValueCell are 1-D unstructured MPAS style grids + + ValueCell = np.zeros(xCell.shape) + + if gridType == 'x0': + x = x0 + y = y0 + elif gridType == 'x1': + x = x1 + y = y1 + else: + raise ValueError('unknown CISM grid type specified.') + dx = x[1] - x[0] + dy = y[1] - y[0] + for i in range(len(xCell)): + # Calculate the CISM grid cell indices (these are the lower index) + xgrid = int(math.floor((xCell[i]-x[0]) / dx)) + if xgrid >= len(x) - 1: + xgrid = len(x) - 2 + elif xgrid < 0: + xgrid = 0 + ygrid = int(math.floor((yCell[i]-y[0]) / dy)) + if ygrid >= len(y) - 1: + ygrid = len(y) - 2 + elif ygrid < 0: + ygrid = 0 + # print(xgrid, ygrid, i) + ValueCell[i] = ( + Value[ygrid, xgrid] * (x[xgrid+1] - xCell[i]) * (y[ygrid+1] - yCell[i]) / (dx * dy) + + Value[ygrid+1, xgrid] * (x[xgrid+1] - xCell[i]) * (yCell[i] - y[ygrid]) / (dx * dy) + + Value[ygrid, xgrid+1] * (xCell[i] - x[xgrid]) * (y[ygrid+1] - yCell[i]) / (dx * dy) + + Value[ygrid+1, xgrid+1] * (xCell[i] - x[xgrid]) * (yCell[i] - y[ygrid]) / (dx * dy)) + return ValueCell + + # ---------------------------- + + def _delaunay_interp_weights(xy, uv, d=2): + """ + xy = input x,y coords + uv = output (MPSALI) x,y coords + """ + + # print("scipy version=", scipy.version.full_version) + if xy.shape[0] > 2**24-1: + print("WARNING: The source file contains more than 2^24-1 " + "(16,777,215) points due to a limitation in older versions " + "of Qhull (see: https://mail.scipy.org/pipermail/scipy-user/2015-June/036598.html). " + "Delaunay creation may fail if Qhull being linked by " + "scipy.spatial is older than v2015.0.1 2015/8/31.") + + tri = scipy.spatial.Delaunay(xy) + print(" Delaunay triangulation complete.") + simplex = tri.find_simplex(uv) + print(" find_simplex complete.") + vertices = np.take(tri.simplices, simplex, axis=0) + print(" identified vertices.") + temp = np.take(tri.transform, simplex, axis=0) + print(" np.take complete.") + delta = uv - temp[:, d] + bary = np.einsum('njk,nk->nj', temp[:, :d, :], delta) + print(" calculating bary complete.") + wts = np.hstack((bary, 1 - bary.sum(axis=1, keepdims=True))) + + # Now figure out if there is any extrapolation. + # Find indices to points of output file that are outside of convex + # hull of input points + outsideInd = np.nonzero(tri.find_simplex(uv) < 0) + outsideCoords = uv[outsideInd] + # print(outsideInd) + nExtrap = len(outsideInd[0]) + if nExtrap > 0: + print(f" Found {nExtrap} points requiring extrapolation. " + f"Using nearest neighbor extrapolation for those.") + + # Now find nearest neighbor for each outside point + # Use KDTree of input points + tree = scipy.spatial.cKDTree(xy) + + return vertices, wts, outsideInd, tree + + # ---------------------------- + + def _nn_interp_weights(xy, uv, d=2): + """ + xy = input x,y coords + uv = output (MPSALI) x,y coords + Note: could separate out building tree and interpolation for + efficiency if many fields need to be processed + """ + tree = scipy.spatial.cKDTree(xy) + # k is the number of nearest neighbors. + dist, idx = tree.query(uv, k=1) + # # 2d cism fields need to be flattened. (Note the indices were + # # flattened during init, so this just matches that operation for the + # # field data itself.) 1d mpas fields do not, but the operation won't + # # do anything because they are already flat. + # outfield = values.flatten()[idx] + return idx + + # ---------------------------- + + def _delaunay_interpolate(values, gridType): + if gridType == 'x0': + vtx = vtx0 + wts = wts0 + tree = treex0 + outsideInd = outsideIndx0 + elif gridType == 'x1': + vtx = vtx1 + wts = wts1 + outsideInd = outsideIndx1 + tree = treex1 + elif gridType == 'cell': + vtx = vtCell + wts = wtsCell + outsideInd = outsideIndcell + tree = treecell + else: + raise ValueError('unknown input file grid type specified.') + + outfield = np.einsum('nj,nj->n', np.take(values, vtx), wts) + + # Now apply nearest neighbor to points outside convex hull + # We could have this enabled/disabled with a command line option, but + # for now it will always be done. + # Note: the barycentric interp applied above could be restricted to the + # points inside the convex hull instead of being applied to ALL + # points as is currently implemented. However it is assumed that + # "redoing" the outside points has a small performance cost + # because there generally should be few such points and the + # implementation is much simpler this way. + outsideCoord = mpasXY[outsideInd, :] + if len(outsideInd) > 0: + # k is the number of nearest neighbors. Could crank this up to 2 + # (and then average them) with some fiddling, but keeping it simple + # for now. + dist, idx = tree.query(outsideCoord, k=1) + + # 2d cism fields need to be flattened. (Note the indices were + # flattened during init, so this just matches that operation for + # the field data itself.) 1d mpas fields do not, but the operation + # won't do anything because they are already flat. + outfield[outsideInd] = values.flatten()[idx] + return outfield + + # ---------------------------- + + def _interpolate_field(MPASfieldName): + + if fieldInfo[MPASfieldName]['gridType'] == 'x0' and \ + args.interpType == 'e': + raise ValueError("This CISM field is on the staggered grid, and " + "currently this script does not support a second " + "ESMF weight file for the staggered grid.") + + InputFieldName = fieldInfo[MPASfieldName]['InputName'] + if filetype == 'cism': + if 'time' in inputFile.variables[InputFieldName].dimensions: + InputField = inputFile.variables[InputFieldName][timelev, :, :] + else: + if timelev > 0: + raise ValueError( + "--timestart and/or --timeend were specified but the " + "required time dimension of 'time' was not found in " + "the source file.") + InputField = inputFile.variables[InputFieldName][:, :] + elif filetype == 'mpas': + if 'Time' in inputFile.variables[InputFieldName].dimensions: + InputField = inputFile.variables[InputFieldName][timelev, :] + else: + if timelev > 0: + raise ValueError( + "--timestart and/or --timeend were specified but the " + "required time dimension of 'Time' was not found in " + "the source file.") + InputField = inputFile.variables[InputFieldName][:] + elif filetype == 'other': + if 'time' in inputFile.variables[InputFieldName].dimensions: + InputField = inputFile.variables[InputFieldName][timelev, :, :] + else: + if timelev > 0: + raise ValueError( + "--timestart and/or --timeend were specified but the " + "required time dimension of 'time' was not found in " + "the source file.") + InputField = inputFile.variables[InputFieldName][:, :] + + print(f' Input field {InputFieldName} min/max: {InputField.min()} ' + f'{InputField.max()}') + + # Call the appropriate routine for actually doing the interpolation + if args.interpType == 'b': + print(f" ...Interpolating to {MPASfieldName} using built-in " + f"bilinear method...") + MPASfield = _bilinear_interp( + InputField, fieldInfo[MPASfieldName]['gridType']) + elif args.interpType == 'd': + print(f" ...Interpolating to {MPASfieldName} using barycentric " + f"method...") + MPASfield = _delaunay_interpolate( + InputField, fieldInfo[MPASfieldName]['gridType']) + elif args.interpType == 'n': + print(f" ...Interpolating to {MPASfieldName} using nearest " + f"neighbor method...") + if fieldInfo[MPASfieldName]['gridType'] == 'x0': + # 2d cism fields need to be flattened. (Note the indices were + # flattened during init, so this just matches that operation + # for the field data itself.) 1d mpas fields do not, but the + # operation won't do anything because they are already flat. + MPASfield = InputField.flatten()[nn_idx_x0] + elif fieldInfo[MPASfieldName]['gridType'] == 'x1': + # 2d cism fields need to be flattened. (Note the indices were + # flattened during init, so this just matches that operation + # for the field data itself.) 1d mpas fields do not, but the + # operation won't do anything because they are already flat. + MPASfield = InputField.flatten()[nn_idx_x1] + elif fieldInfo[MPASfieldName]['gridType'] == 'cell': + # 2d cism fields need to be flattened. (Note the indices were + # flattened during init, so this just matches that operation + # for the field data itself.) 1d mpas fields do not, but the + # operation won't do anything because they are already flat. + MPASfield = InputField.flatten()[nn_idx_cell] + elif args.interpType == 'e': + print(f" ...Interpolating to {MPASfieldName} using ESMF-weights " + f"method...") + MPASfield = _esmf_interp(InputField) + else: + raise ValueError('Unknown interpolation method specified') + + print(f' interpolated MPAS {MPASfieldName} min/max: ' + f'{MPASfield.min()} {MPASfield.max()}') + + if fieldInfo[MPASfieldName]['scalefactor'] != 1.0: + MPASfield *= fieldInfo[MPASfieldName]['scalefactor'] + print(f' scaled MPAS {MPASfieldName} min/max: ' + f'{MPASfield.min()} {MPASfield.max()}') + if fieldInfo[MPASfieldName]['offset'] != 0.0: + MPASfield += fieldInfo[MPASfieldName]['offset'] + print(f' offset MPAS {MPASfieldName} min/max: ' + f'{MPASfield.min()} {MPASfield.max()}') + + return MPASfield + + # ---------------------------- + + def _interpolate_field_with_layers(MPASfieldName): + + if fieldInfo[MPASfieldName]['gridType'] == 'x0' and \ + args.interpType == 'e': + raise ValueError("This CISM field is on the staggered grid, and " + "currently this script does not support a second " + "ESMF weight file for the staggered grid.") + + InputFieldName = fieldInfo[MPASfieldName]['InputName'] + if filetype == 'cism': + if 'time' in inputFile.variables[InputFieldName].dimensions: + InputField = \ + inputFile.variables[InputFieldName][timelev, :, :, :] + else: + if timelev > 0: + raise ValueError( + "--timestart and/or --timeend were specified but the " + "required time dimension of 'time' was not found in " + "the source file.") + InputField = inputFile.variables[InputFieldName][:, :, :] + # vertical index is the first (since we've eliminated time already) + inputVerticalDimSize = InputField.shape[0] + # second dimension is the vertical one - get the name of it + layerFieldName = inputFile.variables[InputFieldName].dimensions[1] + input_layers = inputFile.variables[layerFieldName][:] + elif filetype == 'mpas': + if 'Time' in inputFile.variables[InputFieldName].dimensions: + InputField = inputFile.variables[InputFieldName][timelev, :, :] + else: + if timelev > 0: + raise ValueError( + "--timestart and/or --timeend were ""specified but " + "the required time dimension of 'Time' was not found " + "in the source file.") + InputField = inputFile.variables[InputFieldName][:, :] + # vertical index is the second (since we've eliminated time + # already) + inputVerticalDimSize = InputField.shape[1] + layerThicknessFractions = \ + inputFile.variables['layerThicknessFractions'][:] + # build MPAS sigma levels at center of each layer + input_layers = np.zeros((inputVerticalDimSize,)) + if inputVerticalDimSize == len(layerThicknessFractions): + print(" Using layer centers for the vertical coordinate of " + "this field.") + input_layers[0] = layerThicknessFractions[0] * 0.5 + for k in range(1, inputVerticalDimSize): + input_layers[k] = ( + input_layers[k-1] + + 0.5 * layerThicknessFractions[k-1] + + 0.5 * layerThicknessFractions[k]) + layerFieldName = \ + '(sigma levels calculated from layerThicknessFractions)' + elif inputVerticalDimSize == len(layerThicknessFractions)+1: + print(" Using layer interfaces for the vertical coordinate " + "of this field.") + input_layers[0] = 0.0 + for k in range(1, inputVerticalDimSize): + input_layers[k] = ( + input_layers[k-1] + layerThicknessFractions[k-1]) + else: + raise ValueError("\nUnknown vertical dimension for this " + "variable source file.") + else: + raise ValueError("Fields with vertical layers can only be " + "interpolated using the barycentric or bilinear " + "methods.") + + # create array for interpolated source field at all layers + # make it the size of the CISM vertical layers, but the MPAS horizontal + # locations + mpas_grid_input_layers = np.zeros((inputVerticalDimSize, nCells)) + + for z in range(inputVerticalDimSize): + if filetype == 'cism': + print( + f' Input layer {z}, layer {InputFieldName} min/max: ' + f'{InputField[z, :, :].min()} {InputField[z, :, :].max()}') + elif filetype == 'mpas': + print( + f' Input layer {z}, layer {InputFieldName} min/max: ' + f'{InputField[z, :, :].min()} {InputField[z, :, :].max()}') + # Call the appropriate routine for actually doing the + # interpolation + if args.interpType == 'b': + print(f" ...Layer {z}, Interpolating this layer to MPAS " + f"grid using built-in bilinear method...") + mpas_grid_input_layers[z, :] = _bilinear_interp( + InputField[z, :, :], + fieldInfo[MPASfieldName]['gridType']) + elif args.interpType == 'd': + print(f" ...Layer {z}, Interpolating this layer to MPAS " + f"grid using built-in barycentric method...") + if filetype == 'cism': + mpas_grid_input_layers[z, :] = _delaunay_interpolate( + InputField[z, :, :], + fieldInfo[MPASfieldName]['gridType']) + elif filetype == 'mpas': + mpas_grid_input_layers[z, :] = _delaunay_interpolate( + InputField[:, z], + fieldInfo[MPASfieldName]['gridType']) + elif args.interpType == 'n': + print(f" ...Layer {z}, Interpolating this layer to MPAS " + f"grid using nearest neighbor method...") + if fieldInfo[MPASfieldName]['gridType'] == 'x0': + # 2d cism fields need to be flattened. (Note the + # indices were flattened during init, so this just + # matches that operation for the field data itself.) + # 1d mpas fields do not, but the operation won't do + # anything because they are already flat. + mpas_grid_input_layers[z, :] = \ + InputField[z, :, :].flatten()[nn_idx_x0] + elif fieldInfo[MPASfieldName]['gridType'] == 'x1': + # 2d cism fields need to be flattened. (Note the + # indices were flattened during init, so this just + # matches that operation for the field data itself.) + # 1d mpas fields do not, but the operation won't do + # anything because they are already flat. + mpas_grid_input_layers[z, :] = \ + InputField[z, :, :].flatten()[nn_idx_x1] + elif fieldInfo[MPASfieldName]['gridType'] == 'cell': + # 2d cism fields need to be flattened. (Note the + # indices were flattened during init, so this just + # matches that operation for the field data itself.) + # 1d mpas fields do not, but the operation won't do + # anything because they are already flat. + mpas_grid_input_layers[z, :] = \ + InputField[:, z].flatten()[nn_idx_cell] + elif args.interpType == 'e': + print(f" ...Layer {z}, Interpolating this layer to MPAS " + f"grid using ESMF-weights method...") + if filetype == 'cism': + mpas_grid_input_layers[z, :] = \ + _esmf_interp(InputField[z, :, :]) + elif filetype == 'mpas': + mpas_grid_input_layers[z, :] = \ + _esmf_interp(InputField[:, z]) + else: + raise ValueError('Unknown interpolation method specified') + print(f' interpolated MPAS {MPASfieldName}, layer {z} min/max: ' + f'{mpas_grid_input_layers[z, :].min()} ' + f'{mpas_grid_input_layers[z, :].max()}') + + if fieldInfo[MPASfieldName]['scalefactor'] != 1.0: + mpas_grid_input_layers *= fieldInfo[MPASfieldName]['scalefactor'] + print(f' scaled MPAS {MPASfieldName} on CISM vertical layers, ' + f'min/max: {mpas_grid_input_layers.min()} ' + f'{mpas_grid_input_layers.max()}') + if fieldInfo[MPASfieldName]['offset'] != 0.0: + mpas_grid_input_layers += fieldInfo[MPASfieldName]['offset'] + print(f' offset MPAS {MPASfieldName} on CISM vertical layers, ' + f'min/max: {mpas_grid_input_layers.min()} ' + f'{mpas_grid_input_layers.max()}') + + # ------------ + # Now interpolate vertically + print(f" Input layer field " + f"{inputFile.variables[InputFieldName].dimensions[1]} has " + f"layers: {input_layers}") + if 'nVertLevels' in MPASfile.variables[MPASfieldName].dimensions: + print(f" MPAS layer centers are: {mpasLayerCenters}") + destVertCoord = mpasLayerCenters + elif 'nVertInterfaces' in MPASfile.variables[MPASfieldName].dimensions: + print(f" MPAS layer interfaces are: {mpasLayerInterfaces}") + destVertCoord = mpasLayerInterfaces + else: + raise ValueError("Unknown vertical dimension for this variable " + "destination file.") + + if input_layers.min() > destVertCoord.min(): + # This fix ensures that interpolation is done when + # input_layers.min is very slightly greater than destVertCoord.min + if input_layers.min() - 1.0e-6 < destVertCoord.min(): + print(f'input_layers.min = {input_layers.min():.16f}') + print(f'destVertCoord.min = {destVertCoord.min():.16f}') + input_layers[0] = input_layers[0] - 1.0e-6 + print(f'New input_layers.min = {input_layers.min():.16f}') + else: + print("WARNING: input_layers.min() > destVertCoord.min() " + "Values at the first level of input_layers will be used " + "for all MPAS layers in this region!") + if input_layers.max() < destVertCoord.max(): + # This fix ensures that interpolation is done when + # input_layers.max is very slightly smaller than destVertCoord.max + if input_layers.max() + 1.0e-6 > destVertCoord.min(): + print(f'input_layers.max = {input_layers.max():.16f}') + print(f'destVertCoord.max = {destVertCoord.max():.16f}') + input_layers[inputVerticalDimSize-1] = \ + input_layers[inputVerticalDimSize-1] + 1.0e-6 + print(f'New input_layers.max = {input_layers.max():.16f}') + print(f'input_layers = {input_layers}') + else: + print("WARNING: input_layers.max() < destVertCoord.max() " + "Values at the last level of input_layers will be used " + "for all MPAS layers in this region!") + MPASfield = _vertical_interp_mpas_grid(mpas_grid_input_layers, + destVertCoord, input_layers) + print(f' MPAS {MPASfieldName} on MPAS vertical layers, min/max of ' + f'all layers: {MPASfield.min()}, {MPASfield.max()}') + + del mpas_grid_input_layers + + return MPASfield + + # ---------------------------- + + def _vertical_interp_mpas_grid(mpas_grid_input_layers, destVertCoord, + input_layers): + destinationField = np.zeros((nCells, len(destVertCoord))) + for i in range(nCells): + destinationField[i, :] = np.interp(destVertCoord, input_layers, + mpas_grid_input_layers[:, i]) + return destinationField + + # ---------------------------- + # ---------------------------- + + print("== Gathering information. (Invoke with --help for more details. " + "All arguments are optional)\n") + parser = argparse.ArgumentParser( + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.description = __doc__ + parser.add_argument( + "-s", "--source", dest="inputFile", + help="name of source (input) file. Can be either CISM format or " + "MPASLI format.", + default="cism.nc", metavar="FILENAME") + parser.add_argument( + "-d", "--destination", dest="mpasFile", + help="name of destination file on which to interpolate fields. This " + "needs to be MPASLI format with desired fields already existing.", + default="landice_grid.nc", metavar="FILENAME") + parser.add_argument( + "-m", "--method", dest="interpType", + help="interpolation method to use. b=bilinear, d=barycentric, e=ESMF, " + "n=nearest neighbor", + default="b", metavar="METHOD") + parser.add_argument( + "-w", "--weight", dest="weightFile", + help="ESMF weight file to input. Only used by ESMF interpolation " + "method", + metavar="FILENAME") + parser.add_argument( + "-t", "--thickness-only", dest="thicknessOnly", action="store_true", + default=False, + help="Only interpolate thickness and ignore all other variables " + "(useful for setting up a cullMask)") + parser.add_argument( + "--timestart", dest="timestart", type=int, default=0, + help="time level in input file to start from (0-based)") + parser.add_argument( + "--timeend", dest="timeend", type=int, default=0, + help="time level in input file to end with (0-based, inclusive)") + parser.add_argument( + "-v", "--variables", dest="vars", nargs='*', type=str, default="all", + help="Variables in the destination mesh for which interpolation should " + "be attempted. Interpolation will only actually occur if the " + "requested field 1) is in the dictionary of supported fields at " + "the end of this script and 2) exists in both the source and " + "destination files. 'all' can be used to attempt to interpolate " + "all fields present in the destination mesh. Provide a " + "space-delimited list.") + args = parser.parse_args() + + print(f" Source file: {args.inputFile}") + print(f" Destination MPASLI file to be modified: {args.mpasFile}") + + print(f" Interpolation method to be used: {args.interpType}") + print(" (b=bilinear, d=barycentric, e=esmf)") + + if args.weightFile and args.interpType == 'e': + print(f" Interpolation will be performed using ESMF-weights method, " + f"where possible, using weights file: {args.weightFile}") + # ---------------------------- + # Get weights from file + wfile = netCDF4.Dataset(args.weightFile, 'r') + S = wfile.variables['S'][:] + col = wfile.variables['col'][:] + row = wfile.variables['row'][:] + n_a = len(wfile.dimensions['n_a']) + n_b = len(wfile.dimensions['n_b']) + dst_frac = wfile.variables['frac_b'][:] + wfile.close() + # ---------------------------- + + # convert to SciPy Compressed Sparse Row (CSR) matrix format + weights_csr = scipy.sparse.coo_array((S, (row - 1, col - 1)), + shape=(n_b, n_a)).tocsr() + + # make a space in stdout before further output + print('') + + print("==================") + print('Gathering coordinate information from input and output files.') + + # Open the output file, get needed dimensions & variables + try: + MPASfile = netCDF4.Dataset(args.mpasFile, 'r+') + MPASfile.set_auto_mask(False) + try: + nVertLevels = len(MPASfile.dimensions['nVertLevels']) + except: + print('Output file is missing the dimension nVertLevels. Might ' + 'not be a problem.') + try: + nVertInterfaces = len(MPASfile.dimensions['nVertInterfaces']) + except: + print('Output file is missing the dimension nVertInterfaces. ' + 'Might not be a problem.') + + try: + # 1d vertical fields - layer centers + layerThicknessFractions = \ + MPASfile.variables['layerThicknessFractions'][:] + # build up sigma levels + mpasLayerCenters = np.zeros((nVertLevels,)) + mpasLayerCenters[0] = 0.5 * layerThicknessFractions[0] + for k in range(nVertLevels)[1:]: # skip the first level + mpasLayerCenters[k] = ( + mpasLayerCenters[k-1] + + 0.5 * layerThicknessFractions[k-1] + + 0.5 * layerThicknessFractions[k]) + print(f" Using MPAS layer centers at sigma levels: " + f"{mpasLayerCenters}") + except: + print('Trouble calculating mpas layer centers. Might not be a ' + 'problem.') + + try: + # 1d vertical field - layer interfaces + layerThicknessFractions = \ + MPASfile.variables['layerThicknessFractions'][:] + # build up sigma levels + mpasLayerInterfaces = np.zeros((nVertInterfaces,)) + mpasLayerInterfaces[0] = 0.0 + for k in range(1, nVertInterfaces): # skip the first level + mpasLayerInterfaces[k] = ( + mpasLayerInterfaces[k-1] + layerThicknessFractions[k-1]) + print(f" Using MPAS layer interfaces at sigma levels: " + f"{mpasLayerInterfaces}") + except: + print('Trouble calculating mpas layer interfaces. Might not be a ' + 'problem.') + + # '2d' spatial fields on cell centers + xCell = MPASfile.variables['xCell'][:] + # print('xCell min/max:', xCell.min(), xCell.max() + yCell = MPASfile.variables['yCell'][:] + # print('yCell min/max:', yCell.min(), yCell.max() + nCells = len(MPASfile.dimensions['nCells']) + + except: + raise ValueError('The output grid file specified is either missing ' + 'or lacking needed dimensions/variables.') + print("==================\n") + + # Open the input file, get needed dimensions + inputFile = netCDF4.Dataset(args.inputFile, 'r') + inputFile.set_auto_mask(False) + + # Figure out if this is CISM or MPAS + if 'x1' in inputFile.variables: + filetype = 'cism' + print("Source file appears to be in CISM format.") + elif 'xCell' in inputFile.variables: + filetype = 'mpas' + print("Source file appears to be in MPAS format.") + else: + filetype = 'other' + print("Source file appears to be in a non-standard format.") + if not args.interpType == 'e': + raise ValueError("Source file does not appear to be a CISM file " + "or an MPAS file. The ESMF interpolation method " + "is the only supported method for files with a " + "non-standard format.") + + if filetype == 'cism': + # Get the CISM vertical dimensions if they exist + try: + level = len(inputFile.dimensions['level']) + except: + print(' Input file is missing the dimension level. Might not be ' + 'a problem.') + + try: + stagwbndlevel = len(inputFile.dimensions['stagwbndlevel']) + except: + print(' Input file is missing the dimension stagwbndlevel. ' + 'Might not be a problem.') + + # Get CISM location variables if they exist + try: + x1 = inputFile.variables['x1'][:] + dx1 = x1[1] - x1[0] + # print('x1 min/max/dx:', x1.min(), x1.max(), dx1 + y1 = inputFile.variables['y1'][:] + dy1 = y1[1] - y1[0] + # print('y1 min/max/dx:', y1.min(), y1.max(), dy1 + + # This was for some shifted CISM grid but should not be used in + # general. + # #x1 = x1 - (x1.max()-x1.min())/2.0 + # #y1 = y1 - (y1.max()-y1.min())/2.0 + except: + print(' Input file is missing x1 and/or y1. Might not be a ' + 'problem.') + + try: + x0 = inputFile.variables['x0'][:] + # print('x0 min/max:', x0.min(), x0.max() + y0 = inputFile.variables['y0'][:] + # print('y0 min/max:', y0.min(), y0.max() + + # #x0 = x0 - (x0.max()-x0.min())/2.0 + # #y0 = y0 - (y0.max()-y0.min())/2.0 + + except: + print(' Input file is missing x0 and/or y0. Might not be a ' + 'problem.') + + # Check the overlap of the grids + print('==================') + print('CISM Input File extents:') + print(' x1 min, max: {} {}'.format(x1.min(), x1.max())) + print(' y1 min, max: {} {}'.format(y1.min(), y1.max())) + print('MPAS File extents:') + print(' xCell min, max: {} {}'.format(xCell.min(), xCell.max())) + print(' yCell min, max: {} {}'.format(yCell.min(), yCell.max())) + print('==================') + + elif filetype == 'mpas': + + # try: + # nVertInterfaces = len(inputFile.dimensions['nVertInterfaces']) + # except: + # print(' Input file is missing the dimension nVertInterfaces. ' + # 'Might not be a problem.' + + # Get MPAS location variables if they exist + try: + inputxCell = inputFile.variables['xCell'][:] + inputyCell = inputFile.variables['yCell'][:] + except: + raise ValueError("Input file is missing xCell and/or yCell") + + # Check the overlap of the grids + print('==================') + print('Input MPAS File extents:') + print(f' xCell min, max: {inputxCell.min()} {inputxCell.max()}') + print(f' yCell min, max: {inputyCell.min()} {inputyCell.max()}') + print('Output MPAS File extents:') + print(f' xCell min, max: {xCell.min()} {xCell.max()}') + print(f' yCell min, max: {yCell.min()} {yCell.max()}') + print('==================') + + if filetype == 'mpas' and args.interpType == 'b': + raise ValueError("Bilinear interpolation not supported for input " + "files of MPAS format.") + + # ---------------------------- + # Setup Delaunay/barycentric interpolation weights if needed + if args.interpType == 'd': + mpasXY = np.vstack((xCell[:], yCell[:])).transpose() + + if filetype == 'cism': + [Yi, Xi] = np.meshgrid(x1[:], y1[:]) + cismXY1 = np.zeros([Xi.shape[0]*Xi.shape[1],2]) + cismXY1[:, 0] = Yi.flatten() + cismXY1[:, 1] = Xi.flatten() + + print('\nBuilding interpolation weights: CISM x1/y1 -> MPAS') + start = time.perf_counter() + vtx1, wts1, outsideIndx1, treex1 = \ + _delaunay_interp_weights(cismXY1, mpasXY) + if len(outsideIndx1) > 0: + outsideIndx1 = outsideIndx1[0] # get the list itself + end = time.perf_counter() + print(f'done in {end - start}') + + if 'x0' in inputFile.variables and not args.thicknessOnly: + # Need to setup separate weights for this grid + [Yi, Xi] = np.meshgrid(x0[:], y0[:]) + cismXY0 = np.zeros([Xi.shape[0]*Xi.shape[1], 2]) + cismXY0[:, 0] = Yi.flatten() + cismXY0[:, 1] = Xi.flatten() + + print('Building interpolation weights: CISM x0/y0 -> MPAS') + start = time.perf_counter() + vtx0, wts0, outsideIndx0, treex0 = \ + _delaunay_interp_weights(cismXY0, mpasXY) + if len(outsideIndx0) > 0: + outsideIndx0 = outsideIndx0[0] # get the list itself + end = time.perf_counter() + print(f'done in {end - start}') + + elif filetype == 'mpas': + inputmpasXY = np.vstack((inputxCell[:], inputyCell[:])).transpose() + print('Building interpolation weights: MPAS in -> MPAS out') + start = time.perf_counter() + vtCell, wtsCell, outsideIndcell, treecell = \ + _delaunay_interp_weights(inputmpasXY, mpasXY) + end = time.perf_counter() + print(f'done in {end - start}') + + # ---------------------------- + # Setup NN interpolation weights if needed + if args.interpType == 'n': + mpasXY = np.vstack((xCell[:], yCell[:])).transpose() + + if filetype == 'cism': + [Yi, Xi] = np.meshgrid(x1[:], y1[:]) + cismXY1 = np.zeros([Xi.shape[0]*Xi.shape[1], 2]) + cismXY1[:, 0] = Yi.flatten() + cismXY1[:, 1] = Xi.flatten() + + print('\nBuilding interpolation weights: CISM x1/y1 -> MPAS') + start = time.perf_counter() + nn_idx_x1 = _nn_interp_weights(cismXY1, mpasXY) + end = time.perf_counter() + print('done in {}'.format(end-start)) + + if 'x0' in inputFile.variables and not args.thicknessOnly: + # Need to setup separate weights for this grid + [Yi, Xi] = np.meshgrid(x0[:], y0[:]) + cismXY0 = np.zeros([Xi.shape[0]*Xi.shape[1], 2]) + cismXY0[:, 0] = Yi.flatten() + cismXY0[:, 1] = Xi.flatten() + + print('Building interpolation weights: CISM x0/y0 -> MPAS') + start = time.perf_counter() + nn_idx_x0 = _nn_interp_weights(cismXY0, mpasXY) + end = time.perf_counter() + print('done in {}'.format(end-start)) + + elif filetype == 'mpas': + inputmpasXY = np.vstack((inputxCell[:], inputyCell[:])).transpose() + print('Building interpolation weights: MPAS in -> MPAS out') + start = time.perf_counter() + nn_idx_cell = _nn_interp_weights(inputmpasXY, mpasXY) + end = time.perf_counter() + print('done in {}'.format(end-start)) + + # ---------------------------- + # Map Input-Output field names - add new fields here as needed + + fieldInfo = OrderedDict() + # ----------------- + if filetype == 'cism': + + fieldInfo['thickness'] = { + 'InputName': 'thk', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'x1', + 'vertDim': False} + fieldInfo['iceMask'] = { + 'InputName': 'iceMask', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'x1', + 'vertDim': False} + if not args.thicknessOnly: + fieldInfo['bedTopography'] = { + 'InputName': 'topg', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'x1', + 'vertDim': False} + # Assuming mm/yr w.e. units for smb + fieldInfo['sfcMassBal'] = { + 'InputName': 'smb', + 'scalefactor': 1.0/(3600.0*24.0*365.0), + 'offset': 0.0, + 'gridType': 'x1', + 'vertDim': False} + # Assuming mm/yr w.e. units for smb + fieldInfo['sfcMassBalUncertainty'] = { + 'InputName': 'smb_std', + 'scalefactor': 1.0/(3600.0*24.0*365.0), + 'offset': 0.0, + 'gridType': 'x1', + 'vertDim': False} + # Assuming default CISM density + fieldInfo['floatingBasalMassBal'] = { + 'InputName': 'subm', + 'scalefactor': 910.0/(3600.0*24.0*365.0), + 'offset': 0.0, + 'gridType': 'x1', + 'vertDim': False} + # # pick one or the other: + # fieldInfo['temperature'] = { + # 'InputName': 'temp', + # 'scalefactor': 1.0, + # 'offset': 273.15, + # 'gridType': 'x1', + # 'vertDim': True} + fieldInfo['temperature'] = { + 'InputName': 'tempstag', + 'scalefactor': 1.0, + 'offset': 273.15, + 'gridType': 'x1', + 'vertDim': True} + fieldInfo['basalHeatFlux'] = { + 'InputName': 'bheatflx', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'x1', + 'vertDim': False} + fieldInfo['surfaceAirTemperature'] = { + 'InputName': 'artm', + 'scalefactor': 1.0, + 'offset': 273.15, + 'gridType': 'x1', + 'vertDim': False} + # needs different mapping file... + fieldInfo['beta'] = { + 'InputName': 'beta', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'x0', + 'vertDim': False} + # # needs different mapping file... + # fieldInfo['observedSpeed'] = { + # 'InputName': 'balvel', + # 'scalefactor': 1.0/(365.0*24.0*3600.0), + # 'offset': 0.0, + # 'gridType': 'x0', + # 'vertDim': False} + # fields for observed surface speed and associated error, observed + # thickness change + fieldInfo['observedSurfaceVelocityX'] = { + 'InputName': 'vx', + 'scalefactor': 1.0/(365.0*24.0*3600.0), + 'offset': 0.0, + 'gridType': 'x1', + 'vertDim': False} + fieldInfo['observedSurfaceVelocityY'] = { + 'InputName': 'vy', + 'scalefactor': 1.0/(365.0*24.0*3600.0), + 'offset': 0.0, + 'gridType': 'x1', + 'vertDim': False} + fieldInfo['observedSurfaceVelocityUncertainty'] = { + 'InputName': 'vErr', + 'scalefactor': 1.0/(365.0*24.0*3600.0), + 'offset': 0.0, + 'gridType': 'x1', + 'vertDim': False} + fieldInfo['observedThicknessTendency'] = { + 'InputName': 'dHdt', + 'scalefactor': 1.0/(365.0*24.0*3600.0), + 'offset': 0.0, + 'gridType': 'x1', + 'vertDim': False} + fieldInfo['observedThicknessTendencyUncertainty'] = { + 'InputName': 'dHdtErr', + 'scalefactor': 1.0/(365.0*24.0*3600.0), + 'offset': 0.0, + 'gridType': 'x1', + 'vertDim': False} + fieldInfo['thicknessUncertainty'] = { + 'InputName': 'topgerr', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'x1', + 'vertDim': False} + + fieldInfo['ismip6shelfMelt_basin'] = { + 'InputName': 'ismip6shelfMelt_basin', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'x1', + 'vertDim': False} + fieldInfo['ismip6shelfMelt_deltaT'] = { + 'InputName': 'ismip6shelfMelt_deltaT', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'x1', + 'vertDim': False} + + # ----------------- + elif filetype == 'mpas': + + fieldInfo['thickness'] = { + 'InputName': 'thickness', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + if not args.thicknessOnly: + fieldInfo['bedTopography'] = { + 'InputName': 'bedTopography', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + fieldInfo['sfcMassBal'] = { + 'InputName': 'sfcMassBal', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + fieldInfo['floatingBasalMassBal'] = { + 'InputName': 'floatingBasalMassBal', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + fieldInfo['temperature'] = { + 'InputName': 'temperature', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': True} + fieldInfo['basalHeatFlux'] = { + 'InputName': 'basalHeatFlux', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + fieldInfo['surfaceAirTemperature'] = { + 'InputName': 'surfaceAirTemperature', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + fieldInfo['beta'] = { + 'InputName': 'beta', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + fieldInfo['muFriction'] = { + 'InputName': 'muFriction', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + fieldInfo['eigencalvingParameter'] = { + 'InputName': 'eigencalvingParameter', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + # obs fields + fieldInfo['observedSurfaceVelocityX'] = { + 'InputName': 'observedSurfaceVelocityX', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + fieldInfo['observedSurfaceVelocityY'] = { + 'InputName': 'observedSurfaceVelocityY', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + fieldInfo['observedSurfaceVelocityUncertainty'] = { + 'InputName': 'observedSurfaceVelocityUncertainty', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + fieldInfo['observedThicknessTendency'] = { + 'InputName': 'observedThicknessTendency', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + fieldInfo['observedThicknessTendencyUncertainty'] = { + 'InputName': 'observedThicknessTendencyUncertainty', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + fieldInfo['thicknessUncertainty'] = { + 'InputName': 'thicknessUncertainty', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + fieldInfo['basalFrictionFlux'] = { + 'InputName': 'basalFrictionFlux', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + fieldInfo['uReconstructX'] = { + 'InputName': 'uReconstructX', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': True} + fieldInfo['uReconstructY'] = { + 'InputName': 'uReconstructY', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': True} + + fieldInfo['ismip6shelfMelt_basin'] = { + 'InputName': 'ismip6shelfMelt_basin', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + fieldInfo['ismip6shelfMelt_deltaT'] = { + 'InputName': 'ismip6shelfMelt_deltaT', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + + fieldInfo['stiffnessFactor'] = { + 'InputName': 'stiffnessFactor', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + fieldInfo['effectivePressure'] = { + 'InputName': 'effectivePressure', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + fieldInfo['iceMask'] = { + 'InputName': 'iceMask', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + + # Used by Trevor + # fieldInfo['sfcMassBalUncertainty'] = { + # 'InputName': 'smb_std_vector', + # 'scalefactor': 910.0/(3600.0*24.0*365.0)/1000.0, + # 'offset': 0.0, + # 'gridType': 'cell', + # 'vertDim': False} + # fieldInfo['ismip6Runoff'] = { + # 'InputName': 'runoff_vector', + # 'scalefactor': 1.0, + # 'offset': 0.0, + # 'gridType': 'cell', + # 'vertDim': False} + # fieldInfo['ismip6_2dThermalForcing'] = { + # 'InputName': 'thermal_forcing_vector', + # 'scalefactor': 1.0, + # 'offset': 0.0, + # 'gridType': 'cell', + # 'vertDim': False} + # fieldInfo['ismip6aST'] = { + # 'InputName': 'aST_vector', + # 'scalefactor': 1.0, + # 'offset': 0.0, + # 'gridType': 'cell', + # 'vertDim': False} + # fieldInfo['ismip6aSMB'] = { + # 'InputName': 'aSMB_vector', + # 'scalefactor': 1.0, + # 'offset': 0.0, + # 'gridType': 'cell', + # 'vertDim': False} + # fieldInfo['ismip6refST'] = { + # 'InputName': 'ST_vector', + # 'scalefactor': 1.0, + # 'offset': 0.0, + # 'gridType': 'cell', + # 'vertDim': False} + # fieldInfo['externalWaterInput'] = { + # 'InputName': 'externalWaterInput_vector', + # 'scalefactor': 1.0/(3600.0*24.0*365.0), + # 'offset': 0.0, + # 'gridType': 'cell', + # 'vertDim': False} + + # ----------------- + elif filetype == 'other': + # These are variable mappings for the ESMF method. Update as needed + # for specific applications. + + # This example can be used with an ELM history file and an EMSF mapping + # file. + fieldInfo['surfaceAirTemperature'] = { + 'InputName': 'TBOT', + 'scalefactor': 1.0, + 'offset': 0.0, + 'gridType': 'cell', + 'vertDim': False} + + else: + raise ValueError("Unknown file type.") + + # ---------------------------- + + # ---------------------------- + # try each field. If it exists in the input file, it will be copied. If + # not, it will be skipped. + interpolated_vars = [] + for MPASfieldName in fieldInfo: + if 'all' not in args.vars and MPASfieldName not in args.vars: + continue + + print('\n## {} ##'.format(MPASfieldName)) + + if MPASfieldName not in MPASfile.variables: + print(f" Warning: Field '{MPASfieldName}' is not in the " + f"destination file. Skipping.") + # skip the rest of this iteration of the for loop over variables + continue + + if fieldInfo[MPASfieldName]['InputName'] not in inputFile.variables: + print(f" Warning: Field '{fieldInfo[MPASfieldName]['InputName']}'" + f" is not in the source file. Skipping.") + # skip the rest of this iteration of the for loop over variables + continue + + for timelev in range(args.timestart, args.timeend+1): + # Note: the interpolate functions called below access timelev as a + # global variable + # + # assuming the time level of the output file should match that of + # the input file + timelevout = timelev + print(f" ---- Interpolating time level {timelev} ----") + start = time.perf_counter() + if fieldInfo[MPASfieldName]['vertDim']: + MPASfield = _interpolate_field_with_layers(MPASfieldName) + else: + MPASfield = _interpolate_field(MPASfieldName) + end = time.perf_counter() + print(f' interpolation done in {end - start}') + + # Don't allow negative thickness. + if MPASfieldName == 'thickness' and MPASfield.min() < 0.0: + MPASfield[MPASfield < 0.0] = 0.0 + print(f' removed negative thickness, new min/max: ' + f'{MPASfield.min()} {MPASfield.max()}') + + # basalHeatFlux must be non-negative + if MPASfieldName == 'basalHeatFlux': + assert np.nanmin(MPASfield) >= 0.0, \ + 'basalHeatFlux contains negative values! This is likely ' \ + 'due to the conventions used in the input file, rather ' \ + 'than bad data. Ensure non-negative values before ' \ + 'interpolating.' + + # Now insert the MPAS field into the file. + if 'Time' in MPASfile.variables[MPASfieldName].dimensions: + # Time will always be leftmost index + MPASfile.variables[MPASfieldName][timelevout, :] = MPASfield + else: + MPASfile.variables[MPASfieldName][:] = MPASfield + + # update the file now in case we get an error later + MPASfile.sync() + interpolated_vars.append(MPASfieldName) + + if args.timeend > args.timestart: + print("\n\nMultiple time levels have been copied, but xtime has not. " + "Be sure to manually copy or assign xtime values in the " + "destination file if needed.") + + print("\nFields successfully interpolated: " + ",".join(interpolated_vars)) + + # Update history attribute of netCDF file + thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ": " + \ + " ".join(sys.argv[:]) # .join("Variables interpolated: {}".format(interpolated_vars)) + thiscommand = thiscommand+"; Variables successfully interpolated: " + \ + ",".join(interpolated_vars) + if hasattr(MPASfile, 'history'): + newhist = '\n'.join([thiscommand, getattr(MPASfile, 'history')]) + else: + newhist = thiscommand + setattr(MPASfile, 'history', newhist) + + inputFile.close() + MPASfile.close() + + print('\nInterpolation completed.') diff --git a/conda_package/mpas_tools/landice/projections.py b/conda_package/mpas_tools/landice/projections.py new file mode 100644 index 000000000..dafd670b4 --- /dev/null +++ b/conda_package/mpas_tools/landice/projections.py @@ -0,0 +1,38 @@ +# ======== DEFINE PROJECTIONS ============= +# Create empty dictionary to store projection definitions: +projections = dict() +# add more as needed: + +# CISM's projection is as follows, with the vertical datum as EIGEN-GL04C +# geoid. Datum is actually EIGEN-GL04C but that is not an option in Proj. +# Therefore using EGM08 which should be within ~1m everywhere (and 10-20 +# cm in most places) +# NOTE!!!!!! egm08_25.gtx can be downloaded from: +# http://download.osgeo.org/proj/vdatum/egm08_25/egm08_25.gtx and the +# path in the projection specification line should point to it! + +# projections['gis-bamber'] = \ +# '+proj=stere +lat_ts=71.0 +lat_0=90 +lon_0=321.0 +k_0=1.0 ' +# '+x_0=800000.0 +y_0=3400000.0 +ellps=WGS84 ' \ +# '+geoidgrids=./egm08_25.gtx' + +# This version ignores the vertical datum shift, which should be a very +# small error for horizontal-only positions +projections['gis-bamber'] = \ + '+proj=stere +lat_ts=71.0 +lat_0=90 +lon_0=321.0 +k_0=1.0 ' \ + '+x_0=800000.0 +y_0=3400000.0 +ellps=WGS84' + +# GIMP projection: This is also polar stereographic but with different +# standard parallel and using the WGS84 ellipsoid. +projections['gis-gimp'] = \ + '+proj=stere +lat_ts=70.0 +lat_0=90 +lon_0=315.0 +k_0=1.0 +x_0=0.0 ' \ + '+y_0=0.0 +ellps=WGS84' + +# BEDMAP2 projection +# Note: BEDMAP2 elevations use EIGEN-GL04C geoid +projections['ais-bedmap2'] = \ + '+proj=stere +lat_ts=-71.0 +lat_0=-90 +lon_0=0.0 +k_0=1.0 +x_0=0.0 ' \ + '+y_0=0.0 +ellps=WGS84' + +# Standard Lat/Long +projections['latlon'] = '+proj=longlat +ellps=WGS84' diff --git a/conda_package/mpas_tools/mesh/mark_horns_for_culling.py b/conda_package/mpas_tools/mesh/mark_horns_for_culling.py new file mode 100644 index 000000000..f30f9c598 --- /dev/null +++ b/conda_package/mpas_tools/mesh/mark_horns_for_culling.py @@ -0,0 +1,75 @@ +import sys +import numpy as np +import netCDF4 +from optparse import OptionParser +from datetime import datetime + + +def main(): + """ + This script identifies "horns" on a mesh (cells with two or fewer + neighbors), and marks them for culling. In some cores/configurations, + these weakly connected cells can be dynamically inactive, and, therefore, + undesirable to keep in a mesh. + + The method used will work on both planar and spherical meshes. + It adds the new masked cell to an existing 'cullCell' field if it exists, + otherwise it creates a new field. + """ + print("== Gathering information. (Invoke with --help for more details. " + "All arguments are optional)\n") + parser = OptionParser() + parser.description = __doc__ + parser.add_option( + "-f", + "--file", + dest="inputFile", + help="Name of file to be processed.", + default="grid.nc", + metavar="FILENAME") + for option in parser.option_list: + if option.default != ("NO", "DEFAULT"): + option.help += (" " if option.help else "") + "[default: %default]" + options, args = parser.parse_args() + + print(" File to be modified: " + options.inputFile) + + # Open file and get needed fields. + inputFile = netCDF4.Dataset(options.inputFile, 'r+') + nCells = len(inputFile.dimensions['nCells']) + cellsOnCell = inputFile.variables['cellsOnCell'][:] + + # Add the horn cells to existing mask if it exists + if 'cullCell' in inputFile.variables: + cullCell = inputFile.variables['cullCell'][:] + else: # otherwise make a new mask initialized empty + cullCell = np.zeros((nCells,)) # local variable + + nHorns = 0 + for i in range(nCells): + # NOTE: Can change this threshold, if needed for a particular use case. + if (cellsOnCell[i, :] > 0).sum() <= 2: + cullCell[i] = 1 + nHorns += 1 + + # Write out the new field + if 'cullCell' in inputFile.variables: + cullCellVar = inputFile.variables['cullCell'] + else: + cullCellVar = inputFile.createVariable('cullCell', 'i', ('nCells',)) + cullCellVar[:] = cullCell + + # Update history attribute of netCDF file + thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + \ + ": " + " ".join(sys.argv[:]) + if hasattr(inputFile, 'history'): + newhist = '\n'.join([thiscommand, getattr(inputFile, 'history')]) + else: + newhist = thiscommand + setattr(inputFile, 'history', newhist) + + inputFile.close() + + print(f'\n{nHorns} "horn" locations have been marked in the field ' + f'cullCell.') + print("Remember to use MpasCellCuller.x to actually remove them!") diff --git a/conda_package/mpas_tools/mesh/set_lat_lon.py b/conda_package/mpas_tools/mesh/set_lat_lon.py new file mode 100755 index 000000000..7636e0873 --- /dev/null +++ b/conda_package/mpas_tools/mesh/set_lat_lon.py @@ -0,0 +1,113 @@ +import sys +import numpy as np +import netCDF4 +from pyproj import Transformer, CRS +from optparse import OptionParser +from datetime import datetime + +from mpas_tools.landice.projections import projections + + +def main(): + """ + Take MPAS planar grid and populate the lat/lon fields based on a specified + projection. + """ + + print("== Gathering information. (Invoke with --help for more details. " + "All arguments are optional)") + parser = OptionParser() + parser.description = "This script populates the MPAS lat and lon fields " \ + "based on the projection specified by the -p option." + parser.add_option( + "-f", "--file", dest="fileInName", + help="MPAS land ice file name.", default="landice_grid.nc", + metavar="FILENAME") + parser.add_option( + "-p", "--proj", dest="projection", + help=f"projection used for the data. Valid options are: " + f"{list(projections.keys())}", + metavar="PROJ") + for option in parser.option_list: + if option.default != ("NO", "DEFAULT"): + option.help += (" " if option.help else "") + "[default: %default]" + options, args = parser.parse_args() + + if not options.projection: + raise ValueError( + f'Data projection required with -p or --proj command line ' + f'argument. Valid options are: {list(projections.keys())}') + + if not options.fileInName: + print("No filename specified, so using 'landice_grid.nc'.") + options.fileInName = 'landice_grid.nc' + # make a space in stdout before further output + print('') + + # ================================================= + + print(f"Using {options.projection} projection, defined as: " + f"{projections[options.projection]}") + + # get needed fields + f = netCDF4.Dataset(options.fileInName, 'r+') + xCell = f.variables['xCell'] + yCell = f.variables['yCell'] + xVertex = f.variables['xVertex'] + yVertex = f.variables['yVertex'] + xEdge = f.variables['xEdge'] + yEdge = f.variables['yEdge'] + + latCellVar = f.variables['latCell'] + lonCellVar = f.variables['lonCell'] + latVertexVar = f.variables['latVertex'] + lonVertexVar = f.variables['lonVertex'] + latEdgeVar = f.variables['latEdge'] + lonEdgeVar = f.variables['lonEdge'] + + print("Input file xCell min/max values:", xCell[:].min(), xCell[:].max()) + print("Input file yCell min/max values:", yCell[:].min(), yCell[:].max()) + + # make a CRS (coordinate reference system) for projections from Proj + # string: + crs_in = CRS.from_proj4(projections[options.projection]) + crs_out = CRS.from_proj4(projections['latlon']) + + # define a transformer + t = Transformer.from_crs(crs_in, crs_out) + + # populate x,y fields + # MPAS uses lat/lon in radians, so have pyproj return fields in radians. + lonCell, latCell = t.transform(xCell[:], yCell[:], radians=True) + lonVertex, latVertex = t.transform(xVertex[:], yVertex[:], radians=True) + lonEdge, latEdge = t.transform(xEdge[:], yEdge[:], radians=True) + + # change the longitude convention to use positive values [0 2pi] + lonCell = np.mod(lonCell, 2.0*np.pi) + lonVertex = np.mod(lonVertex, 2.0*np.pi) + lonEdge = np.mod(lonEdge, 2.0*np.pi) + + print(f"Calculated latCell min/max values (radians): " + f"{latCell.min()}, {latCell.max()}") + print(f"Calculated lonCell min/max values (radians): " + f"{lonCell.min()}, {lonCell.max()}") + + latCellVar[:] = latCell + lonCellVar[:] = lonCell + latVertexVar[:] = latVertex + lonVertexVar[:] = lonVertex + latEdgeVar[:] = latEdge + lonEdgeVar[:] = lonEdge + + # Update history attribute of netCDF file + thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ": " + \ + " ".join(sys.argv[:]) + if hasattr(f, 'history'): + newhist = '\n'.join([thiscommand, getattr(f, 'history')]) + else: + newhist = thiscommand + setattr(f, 'history', newhist) + + f.close() + + print("Lat/lon calculations completed. File has been written.") diff --git a/conda_package/mpas_tools/scrip/from_mpas.py b/conda_package/mpas_tools/scrip/from_mpas.py index eca415d32..f24236f7c 100755 --- a/conda_package/mpas_tools/scrip/from_mpas.py +++ b/conda_package/mpas_tools/scrip/from_mpas.py @@ -1,13 +1,13 @@ # Create a SCRIP file from an MPAS mesh. # See for details: http://www.earthsystemmodeling.org/esmf_releases/public/ESMF_5_2_0rp1/ESMF_refdoc/node3.html#SECTION03024000000000000000 -import sys import netCDF4 import numpy as np from optparse import OptionParser from mpas_tools.cime.constants import constants + def scrip_from_mpas(mpasFile, scripFile, useLandIceMask=False): """ Create a SCRIP file from an MPAS mesh @@ -49,15 +49,15 @@ def scrip_from_mpas(mpasFile, scripFile, useLandIceMask=False): # check the longitude convention to use positive values [0 2pi] if np.any(np.logical_or(lonCell < 0, lonCell > 2.0 * np.pi)): - raise ValueError("lonCell is not in the desired range (0, 2pi)") + raise ValueError("lonCell is not in the desired range (0, 2pi)") if np.any(np.logical_or(lonVertex < 0, lonVertex > 2.0 * np.pi)): - raise ValueError("lonVertex is not in the desired range (0, 2pi)") + raise ValueError("lonVertex is not in the desired range (0, 2pi)") if sphereRadius <= 0: - sphereRadius = constants['SHR_CONST_REARTH'] - print(f" -- WARNING: sphereRadius<0 so setting sphereRadius = " - f"{constants['SHR_CONST_REARTH']}") + sphereRadius = constants['SHR_CONST_REARTH'] + print(f" -- WARNING: sphereRadius<0 so setting sphereRadius = " + f"{constants['SHR_CONST_REARTH']}") if on_a_sphere == "NO": print(" -- WARNING: 'on_a_sphere' attribute is 'NO', which means that " @@ -144,8 +144,8 @@ def scrip_from_mpas(mpasFile, scripFile, useLandIceMask=False): def main(): - print("== Gathering information. (Invoke with --help for more details. All" - " arguments are optional)") + print("== Gathering information. (Invoke with --help for more details. " + "All arguments are optional)") parser = OptionParser() parser.description = "This script takes an MPAS grid file and generates " \ "a SCRIP grid file." @@ -165,11 +165,11 @@ def main(): options, args = parser.parse_args() if not options.mpasFile: - sys.exit('Error: MPAS input grid file is required. Specify with -m ' - 'command line argument.') + raise ValueError('MPAS input grid file is required. Specify with -m ' + 'command line argument.') if not options.scripFile: - sys.exit('Error: SCRIP output grid file is required. Specify with -s ' - 'command line argument.') + raise ValueError('SCRIP output grid file is required. Specify with ' + '-s command line argument.') if not options.landiceMasks: options.landiceMasks = False diff --git a/conda_package/mpas_tools/scrip/from_planar.py b/conda_package/mpas_tools/scrip/from_planar.py new file mode 100644 index 000000000..5d62a4717 --- /dev/null +++ b/conda_package/mpas_tools/scrip/from_planar.py @@ -0,0 +1,195 @@ +# Create a SCRIP file from a planar rectanfular mesh. +# See for details: http://www.earthsystemmodeling.org/esmf_releases/public/ESMF_5_2_0rp1/ESMF_refdoc/node3.html#SECTION03024000000000000000 + +import netCDF4 +import numpy as np +from optparse import OptionParser +import matplotlib.pyplot as plt +from pyproj import Transformer, CRS + +from mpas_tools.landice.projections import projections + + +def main(): + """ + Create a SCRIP file from a planar rectanfular mesh + """ + + print("== Gathering information. (Invoke with --help for more details. " + "All arguments are optional)") + parser = OptionParser() + parser.description = \ + "This script takes an MPAS grid file and generates a SCRIP grid file." + parser.add_option( + "-i", "--input", dest="inputFile", + help="input grid file name used as input.", default="input.nc", + metavar="FILENAME") + parser.add_option( + "-s", "--scrip", dest="scripFile", + help="SCRIP grid file to output.", default="scrip.nc", + metavar="FILENAME") + parser.add_option( + "-p", "--proj", dest="projection", + help=f"projection used by the input data file. Valid options are: " + f"{projections.keys()}", + metavar="PROJ") + parser.add_option( + "-r", "--rank", dest="gridRank", + help="desired rank of the output SCRIP grid data") + parser.add_option( + "--plot", dest="plot", action="store_true", + help="if this flag is used, destination grid points are plotted") + + for option in parser.option_list: + if option.default != ("NO", "DEFAULT"): + option.help += (" " if option.help else "") + "[default: %default]" + options, args = parser.parse_args() + + if not options.inputFile: + raise ValueError('Data input grid file is required. Specify with -c ' + 'command line argument.') + if not options.scripFile: + raise ValueError('SCRIP output grid file is required. Specify with ' + '-s command line argument.') + if not options.projection: + raise ValueError(f'data projection required with -p or --proj command ' + f'line argument. Valid options are: ' + f'{projections.keys()}') + if not options.gridRank: + raise ValueError('desired rank of SCRIP output grid data is required. ' + 'Valid options are 1 (for unstructured grid) or 2') + # make a space in stdout before further output + print('') + + # =================================== + + fin = netCDF4.Dataset(options.inputFile, 'r') + # This will clobber existing files + fout = netCDF4.Dataset(options.scripFile, 'w') + + # Get info from input file + x = fin.variables['x'][:] + y = fin.variables['y'][:] + nx = x.size + ny = y.size + dx = x[1] - x[0] + dy = y[1] - y[0] + + # Write to output file + # Dimensions + fout.createDimension("grid_size", nx * ny) + fout.createDimension("grid_corners", 4) + + if int(options.gridRank) == 1: + print('grid rank is 1') + fout.createDimension("grid_rank", 1) + elif int(options.gridRank) == 2: + print('grid rank is 2') + fout.createDimension("grid_rank", 2) + else: + raise ValueError(f'grid rank value is invalid: valid options are ' + f'1 or 2 but {options.gridRank} was given.') + + # Variables + grid_center_lat = fout.createVariable('grid_center_lat', 'f8', + ('grid_size',)) + grid_center_lat.units = 'degrees' + grid_center_lon = fout.createVariable('grid_center_lon', 'f8', + ('grid_size',)) + grid_center_lon.units = 'degrees' + grid_corner_lat = fout.createVariable('grid_corner_lat', 'f8', + ('grid_size', 'grid_corners')) + grid_corner_lat.units = 'degrees' + grid_corner_lon = fout.createVariable('grid_corner_lon', 'f8', + ('grid_size', 'grid_corners')) + grid_corner_lon.units = 'degrees' + grid_imask = fout.createVariable('grid_imask', 'i4', + ('grid_size',)) + grid_imask.units = 'unitless' + grid_dims = fout.createVariable('grid_dims', 'i4', + ('grid_rank',)) + + # Create matrices of x,y + print('Building matrix version of x, y locations.') + xmatrix, ymatrix = np.meshgrid(x, y) + # get a copy of x that is on the staggered grid and includes both bounding + # edges + xc = np.append(x[:] - dx/2.0, x[-1] + dx / 2.0) + # get a copy of y that is on the staggered grid and includes both bounding + # edges + yc = np.append(y[:] - dy/2.0, y[-1] + dy / 2.0) + xcmatrix, ycmatrix = np.meshgrid(xc, yc) + + # Unproject to lat/long for grid centers and grid corners + print('Unprojecting.') + + xmatrix_flat = xmatrix.flatten(order='C') # Flatten using C indexing + ymatrix_flat = ymatrix.flatten(order='C') + + # make a CRS (coordinate reference system) for projections from the Proj + # string: + crs_in = CRS.from_proj4(projections[options.projection]) + crs_out = CRS.from_proj4(projections['latlon']) + + # building a transformer + t = Transformer.from_crs(crs_in, crs_out) + + # transform the original grid into the lat-lon grid + grid_center_lon[:], grid_center_lat[:] = t.transform(xmatrix_flat, + ymatrix_flat) + + # Now fill in the corners in the right locations + stag_lon, stag_lat = t.transform(xcmatrix, ycmatrix) + print('Filling in corners of each cell.') + # It is WAYYY faster to fill in the array entry-by-entry in memory than to + # disk. + grid_corner_lon_local = np.zeros((nx * ny, 4)) + grid_corner_lat_local = np.zeros((nx * ny, 4)) + + jj = np.arange(ny) + ii = np.arange(nx) + i_ind, j_ind = np.meshgrid(ii, jj) + cell_ind = j_ind * nx + i_ind + grid_corner_lon_local[cell_ind, 0] = stag_lon[j_ind, i_ind] + grid_corner_lon_local[cell_ind, 1] = stag_lon[j_ind, i_ind + 1] + grid_corner_lon_local[cell_ind, 2] = stag_lon[j_ind + 1, i_ind + 1] + grid_corner_lon_local[cell_ind, 3] = stag_lon[j_ind + 1, i_ind] + grid_corner_lat_local[cell_ind, 0] = stag_lat[j_ind, i_ind] + grid_corner_lat_local[cell_ind, 1] = stag_lat[j_ind, i_ind + 1] + grid_corner_lat_local[cell_ind, 2] = stag_lat[j_ind + 1, i_ind + 1] + grid_corner_lat_local[cell_ind, 3] = stag_lat[j_ind + 1, i_ind] + + grid_corner_lon[:] = grid_corner_lon_local[:] + grid_corner_lat[:] = grid_corner_lat_local[:] + + # For now, assume we don't want to mask anything out - but eventually may + # want to exclude certain cells from the input mesh during interpolation + grid_imask[:] = 1 + + # set the grid dimension based on the grid rank + if int(options.gridRank) == 1: + grid_dims[:] = (nx * ny) + elif int(options.gridRank) == 2: + grid_dims[:] = [nx, ny] + + if options.plot: + print("plotting is on") + # plot some stuff + # plot a single point + i = -1 + plt.figure(1) + plt.plot(grid_center_lon[i], grid_center_lat[i], 'o') + plt.plot(grid_corner_lon[i, 0], grid_corner_lat[i, 0], 'kx') + plt.plot(grid_corner_lon[i, 1], grid_corner_lat[i, 1], 'bx') + plt.plot(grid_corner_lon[i, 2], grid_corner_lat[i, 2], 'cx') + plt.plot(grid_corner_lon[i, 3], grid_corner_lat[i, 3], 'gx') + + # plot all points + plt.figure(2) + plt.plot(grid_center_lon[:], grid_center_lat[:], 'bo') + plt.plot(grid_corner_lon[:], grid_corner_lat[:], 'g.') + plt.show() + + fin.close() + fout.close() + print('scrip file generation complete') diff --git a/conda_package/recipe/meta.yaml b/conda_package/recipe/meta.yaml index e80c2bbdd..f36a1d561 100644 --- a/conda_package/recipe/meta.yaml +++ b/conda_package/recipe/meta.yaml @@ -86,17 +86,17 @@ test: - planar_hex --nx=30 --ny=20 --dc=1000. --npx --npy --outFileName='nonperiodic_mesh_30x20_1km.nc' - MpasCellCuller.x nonperiodic_mesh_30x20_1km.nc culled_nonperiodic_mesh_30x20_1km.nc - python -m pytest conda_package/mpas_tools/tests - - mark_horns_for_culling.py --help - - set_lat_lon_fields_in_planar_grid.py --help - - create_SCRIP_file_from_MPAS_mesh.py --help - - create_SCRIP_file_from_planar_rectangular_grid.py --help + - mark_horns_for_culling --help + - set_lat_lon_fields_in_planar_grid --help + - create_scrip_file_from_mpas_mesh --help + - create_scrip_file_from_planar_rectangular_grid --help - prepare_seaice_partitions --help - create_seaice_partitions --help - fix_regrid_output.exe - - create_landice_grid_from_generic_MPAS_grid.py --help - - define_cullMask.py --help - - interpolate_to_mpasli_grid.py --help - - mark_domain_boundaries_dirichlet.py --help + - create_landice_grid_from_generic_mpas_grid --help + - define_landice_cull_mask --help + - interpolate_to_mpasli_grid --help + - mark_domain_boundaries_dirichlet --help - add_critical_land_blockages_to_mask --help - add_land_locked_cells_to_mask --help - widen_transect_edge_masks --help diff --git a/conda_package/setup.py b/conda_package/setup.py index b00b2dfb1..65cfc74ee 100755 --- a/conda_package/setup.py +++ b/conda_package/setup.py @@ -31,14 +31,6 @@ version = re.search(r'{}\s*=\s*[(]([^)]*)[)]'.format('__version_info__'), init_file).group(1).replace(', ', '.') -os.chdir(here) - -for path in ['landice', 'mesh_tools']: - destPath = './{}'.format(path) - if os.path.exists(destPath): - shutil.rmtree(destPath) - shutil.copytree('../{}'.format(path), destPath) - setup(name='mpas_tools', version=version, description='A set of tools for creating and manipulating meshes for the' @@ -64,44 +56,45 @@ ], packages=find_packages(include=['mpas_tools', 'mpas_tools.*']), package_data={'mpas_tools.viz': ['SciVisColorColormaps/*.xml']}, - scripts=['mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py', - 'mesh_tools/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py', - 'mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py', - 'mesh_tools/create_SCRIP_files/create_SCRIP_file_from_planar_rectangular_grid.py', - 'landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py', - 'landice/mesh_tools_li/define_cullMask.py', - 'landice/mesh_tools_li/interpolate_to_mpasli_grid.py', - 'landice/mesh_tools_li/mark_domain_boundaries_dirichlet.py'], install_requires=install_requires, - entry_points={'console_scripts': - ['planar_hex = mpas_tools.planar_hex:main', - 'translate_planar_grid = mpas_tools.translate:main', - 'merge_grids = mpas_tools.merge_grids:main', - 'split_grids = mpas_tools.split_grids:main', - 'inject_bathymetry = mpas_tools.ocean.inject_bathymetry:main', - 'inject_preserve_floodplain = mpas_tools.ocean.inject_preserve_floodplain:main', - 'ocean_add_depth = mpas_tools.ocean.depth:main_add_depth', - 'ocean_add_zmid = mpas_tools.ocean.depth:main_add_zmid', - 'ocean_write_time_varying_zmid = mpas_tools.ocean.depth:main_write_time_varying_zmid', - 'plot_ocean_transects = mpas_tools.ocean.viz.transects:main', - 'mpas_to_triangle = mpas_tools.mesh.creation.mpas_to_triangle:main', - 'triangle_to_netcdf = mpas_tools.mesh.creation.triangle_to_netcdf:main', - 'jigsaw_to_netcdf = mpas_tools.mesh.creation.jigsaw_to_netcdf:main', - 'sort_mesh = mpas_tools.mesh.creation.sort_mesh:main', - 'scrip_from_mpas = mpas_tools.scrip.from_mpas:main', - 'compute_mpas_region_masks = mpas_tools.mesh.mask:entry_point_compute_mpas_region_masks', - 'compute_mpas_transect_masks = mpas_tools.mesh.mask:entry_point_compute_mpas_transect_masks', - 'compute_mpas_flood_fill_mask = mpas_tools.mesh.mask:entry_point_compute_mpas_flood_fill_mask', - 'compute_lon_lat_region_masks = mpas_tools.mesh.mask:entry_point_compute_lon_lat_region_masks', - 'compute_projection_region_masks = mpas_tools.mesh.mask:entry_point_compute_projection_grid_region_masks', - 'prepare_seaice_partitions = mpas_tools.seaice.partition:prepare_partitions', - 'create_seaice_partitions = mpas_tools.seaice.partition:create_partitions', - 'simple_seaice_partitions = mpas_tools.seaice.partition:simple_partitions', - 'vector_reconstruct = mpas_tools.vector.reconstruct:main', - 'add_critical_land_blockages_to_mask = mpas_tools.ocean.coastline_alteration:main_add_critical_land_blockages', - 'add_land_locked_cells_to_mask = mpas_tools.ocean.coastline_alteration:main_add_land_locked_cells_to_mask', - 'widen_transect_edge_masks = mpas_tools.ocean.coastline_alteration:main_widen_transect_edge_masks', - 'moc_southern_boundary_extractor = mpas_tools.ocean.moc:moc_southern_boundary_extractor', - 'paraview_vtk_field_extractor = mpas_tools.viz.paraview_extractor:main', + entry_points={ + 'console_scripts': [ + 'planar_hex = mpas_tools.planar_hex:main', + 'translate_planar_grid = mpas_tools.translate:main', + 'merge_grids = mpas_tools.merge_grids:main', + 'split_grids = mpas_tools.split_grids:main', + 'inject_bathymetry = mpas_tools.ocean.inject_bathymetry:main', + 'inject_preserve_floodplain = mpas_tools.ocean.inject_preserve_floodplain:main', + 'ocean_add_depth = mpas_tools.ocean.depth:main_add_depth', + 'ocean_add_zmid = mpas_tools.ocean.depth:main_add_zmid', + 'ocean_write_time_varying_zmid = mpas_tools.ocean.depth:main_write_time_varying_zmid', + 'plot_ocean_transects = mpas_tools.ocean.viz.transects:main', + 'mpas_to_triangle = mpas_tools.mesh.creation.mpas_to_triangle:main', + 'triangle_to_netcdf = mpas_tools.mesh.creation.triangle_to_netcdf:main', + 'jigsaw_to_netcdf = mpas_tools.mesh.creation.jigsaw_to_netcdf:main', + 'sort_mesh = mpas_tools.mesh.creation.sort_mesh:main', + 'scrip_from_mpas = mpas_tools.scrip.from_mpas:main', + 'compute_mpas_region_masks = mpas_tools.mesh.mask:entry_point_compute_mpas_region_masks', + 'compute_mpas_transect_masks = mpas_tools.mesh.mask:entry_point_compute_mpas_transect_masks', + 'compute_mpas_flood_fill_mask = mpas_tools.mesh.mask:entry_point_compute_mpas_flood_fill_mask', + 'compute_lon_lat_region_masks = mpas_tools.mesh.mask:entry_point_compute_lon_lat_region_masks', + 'compute_projection_region_masks = mpas_tools.mesh.mask:entry_point_compute_projection_grid_region_masks', + 'prepare_seaice_partitions = mpas_tools.seaice.partition:prepare_partitions', + 'create_seaice_partitions = mpas_tools.seaice.partition:create_partitions', + 'simple_seaice_partitions = mpas_tools.seaice.partition:simple_partitions', + 'vector_reconstruct = mpas_tools.vector.reconstruct:main', + 'mark_horns_for_culling = mpas_tools.mesh.mark_horns_for_culling:main', + 'set_lat_lon_fields_in_planar_grid = mpas_tools.mesh.set_lat_lon:main', + 'create_scrip_file_from_mpas_mesh = mpas_tools.scrip.from_mpas:main', + 'create_scrip_file_from_planar_rectangular_grid = mpas_tools.scrip.from_planar:main', + 'create_landice_grid_from_generic_mpas_grid = mpas_tools.landice.create:create_from_generic_mpas_grid', + 'define_landice_cull_mask = mpas_tools.landice.cull:define_cull_mask', + 'interpolate_to_mpasli_grid = mpas_tools.landice.interpolate:interpolate_to_mpasli_grid', + 'mark_domain_boundaries_dirichlet = mpas_tools.landice.boundary:mark_domain_boundaries_dirichlet', + 'add_critical_land_blockages_to_mask = mpas_tools.ocean.coastline_alteration:main_add_critical_land_blockages', + 'add_land_locked_cells_to_mask = mpas_tools.ocean.coastline_alteration:main_add_land_locked_cells_to_mask', + 'widen_transect_edge_masks = mpas_tools.ocean.coastline_alteration:main_widen_transect_edge_masks', + 'moc_southern_boundary_extractor = mpas_tools.ocean.moc:moc_southern_boundary_extractor', + 'paraview_vtk_field_extractor = mpas_tools.viz.paraview_extractor:main', ] }) diff --git a/landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py b/landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py deleted file mode 100755 index b85a31226..000000000 --- a/landice/mesh_tools_li/create_landice_grid_from_generic_MPAS_grid.py +++ /dev/null @@ -1,298 +0,0 @@ -#!/usr/bin/env python -""" -Script to create a grid with land ice variables from an MPAS grid. -Currently variable attributes are not copied. -This script could be modified to copy them (looping over dir(var), skipping over variable function names "assignValue", "getValue", "typecode"). -""" - -from __future__ import absolute_import, division, print_function, \ - unicode_literals - -import sys, numpy -from netCDF4 import Dataset -from optparse import OptionParser -from datetime import datetime - - -sphere_radius = 6.37122e6 # earth radius, if needed - -print("** Gathering information. (Invoke with --help for more details. All arguments are optional)") -parser = OptionParser() -parser.add_option("-i", "--in", dest="fileinName", help="input filename. Defaults to 'grid.nc'", metavar="FILENAME") -parser.add_option("-o", "--out", dest="fileoutName", help="output filename. Defaults to 'landice_grid.nc'", metavar="FILENAME") -parser.add_option("-l", "--level", dest="levels", help="Number of vertical levels to use in the output file. Defaults to the number in the input file", metavar="FILENAME") -parser.add_option("-v", "--vert", dest="vertMethod", help="Method of vertical layer spacing: uniform, glimmer. Glimmer spacing follows Eq. 35 of Rutt, I. C., M. Hagdorn, N. R. J. Hulton, and A. J. Payne (2009), The Glimmer community ice sheet model, J. Geophys. Res., 114, F02004, doi:10.1029/2008JF001015", default='glimmer', metavar="FILENAME") -parser.add_option("--beta", dest="beta", action="store_true", help="DEPRECATED") -parser.add_option("--effecpress", dest="effecpress", action="store_true", help="DEPRECATED") -parser.add_option("--diri", dest="dirichlet", action="store_true", help="Use this flag to include the fields 'dirichletVelocityMask', 'uReconstructX', 'uReconstructY' needed for specifying Dirichlet velocity boundary conditions in the resulting file.") -parser.add_option("--thermal", dest="thermal", action="store_true", help="Use this flag to include the fields 'temperature', 'surfaceAirTemperature', 'basalHeatFlux' needed for specifying thermal initial conditions in the resulting file.") -parser.add_option("--hydro", dest="hydro", action="store_true", help="Use this flag to include the fields 'waterThickness', 'tillWaterThickness', 'basalMeltInput', 'externalWaterInput', 'frictionAngle', 'waterPressure', 'waterFluxMask' needed for specifying hydro initial conditions in the resulting file.") -parser.add_option("--obs", dest="obs", action="store_true", help="Use this flag to include the observational fields observedSurfaceVelocityX, observedSurfaceVelocityY, observedSurfaceVelocityUncertainty, observedThicknessTendency, observedThicknessTendencyUncertainty, thicknessUncertainty needed doing optimizations constrained by obs velocities.") -options, args = parser.parse_args() - -if not options.fileinName: - print("No input filename specified, so using 'grid.nc'.") - options.fileinName = 'grid.nc' -else: - print("Input file is: {}".format(options.fileinName)) -if not options.fileoutName: - print("No output filename specified, so using 'landice_grid.nc'.") - options.fileoutName = 'landice_grid.nc' -print('') # make a space in stdout before further output - -# Get the input file -filein = Dataset(options.fileinName,'r') - -# Define the new file to be output -fileout = Dataset(options.fileoutName,"w",format=filein.file_format) - - -# ============================================ -# Copy over all of the netcdf global attributes -# ============================================ -# Do this first as doing it last is slow for big files since adding -# attributes forces the contents to get reorganized. -print("---- Copying global attributes from input file to output file ----") -for name in filein.ncattrs(): - # sphere radius needs to be set to that of the earth if on a sphere - if name == 'sphere_radius' and getattr(filein, 'on_a_sphere') == "YES ": - setattr(fileout, 'sphere_radius', sphere_radius) - print('Set global attribute sphere_radius = {}'.format(sphere_radius)) - elif name =='history': - # Update history attribute of netCDF file - newhist = '\n'.join([getattr(filein, 'history'), ' '.join(sys.argv[:]) ] ) - setattr(fileout, 'history', newhist ) - else: - # Otherwise simply copy the attr - setattr(fileout, name, getattr(filein, name) ) - print('Copied global attribute {} = {}'.format(name, getattr(filein, name))) - -# Update history attribute of netCDF file if we didn't above -if not hasattr(fileout, 'history'): - setattr(fileout, 'history', sys.argv[:] ) - -fileout.sync() -print('') # make a space in stdout before further output - - -# ============================================ -# Copy over all the dimensions to the new file -# ============================================ -# Note: looping over dimensions seems to result in them being written in seemingly random order. -# I don't think this matters but it is not aesthetically pleasing. -# It may be better to list them explicitly as I do for the grid variables, -# but this way ensures they all get included and is easier. -# Note: The UNLIMITED time dimension will return a dimension value of None with Scientific.IO. This is what is supposed to happen. See below for how to deal with assigning values to a variable with a unlimited dimension. Special handling is needed with the netCDF module. -print("---- Copying dimensions from input file to output file ----") -for dim in filein.dimensions.keys(): - if dim == 'nTracers': - pass # Do nothing - we don't want this dimension - elif (dim == 'nVertInterfaces'): - pass # Do nothing - this dimension will be handled below - else: # Copy over all other dimensions - if dim == 'Time': - dimvalue = None # netCDF4 won't properly get this with the command below (you need to use the isunlimited method) - elif (dim == 'nVertLevels'): - if options.levels is None: - # If nVertLevels is in the input file, and a value for it was not - # specified on the command line, then use the value from the file (do nothing here) - print("Using nVertLevels from the intput file: {}".format(len(filein.dimensions[dim]))) - dimvalue = len(filein.dimensions[dim]) - else: - # if nVertLevels is in the input file, but a value WAS specified - # on the command line, then use the command line value - print("Using nVertLevels specified on the command line: {}".format(int(options.levels))) - dimvalue = int(options.levels) - else: - dimvalue = len(filein.dimensions[dim]) - fileout.createDimension(dim, dimvalue) -# There may be input files that do not have nVertLevels specified, in which case -# it has not been added to the output file yet. Treat those here. -if 'nVertLevels' not in fileout.dimensions: - if options.levels is None: - print("nVertLevels not in input file and not specified. Using default value of 10.") - fileout.createDimension('nVertLevels', 10) - else: - print("Using nVertLevels specified on the command line: {}".format(int(options.levels))) - fileout.createDimension('nVertLevels', int(options.levels)) -# Also create the nVertInterfaces dimension, even if none of the variables require it. -fileout.createDimension('nVertInterfaces', len(fileout.dimensions['nVertLevels']) + 1) # nVertInterfaces = nVertLevels + 1 -print('Added new dimension nVertInterfaces to output file with value of {}.'.format(len(fileout.dimensions['nVertInterfaces']))) - -fileout.sync() -print('Finished creating dimensions in output file.\n') # include an extra blank line here - -# ============================================ -# Copy over all of the required grid variables to the new file -# ============================================ -print("Beginning to copy mesh variables to output file.") -vars2copy = ['latCell', 'lonCell', 'xCell', 'yCell', 'zCell', 'indexToCellID', 'latEdge', 'lonEdge', 'xEdge', 'yEdge', 'zEdge', 'indexToEdgeID', 'latVertex', 'lonVertex', 'xVertex', 'yVertex', 'zVertex', 'indexToVertexID', 'cellsOnEdge', 'nEdgesOnCell', 'nEdgesOnEdge', 'edgesOnCell', 'edgesOnEdge', 'weightsOnEdge', 'dvEdge', 'dcEdge', 'angleEdge', 'areaCell', 'areaTriangle', 'cellsOnCell', 'verticesOnCell', 'verticesOnEdge', 'edgesOnVertex', 'cellsOnVertex', 'kiteAreasOnVertex'] -# Add these optional fields if they exist in the input file -for optionalVar in ['meshDensity', 'gridSpacing', 'cellQuality', 'triangleQuality', 'triangleAngleQuality', 'obtuseTriangle']: - if optionalVar in filein.variables: - vars2copy.append(optionalVar) - -for varname in vars2copy: - print("- ", end='') -print("|") -for varname in vars2copy: - thevar = filein.variables[varname] - datatype = thevar.dtype - newVar = fileout.createVariable(varname, datatype, thevar.dimensions) - if filein.on_a_sphere == "YES ": - if varname in ('xCell', 'yCell', 'zCell', 'xEdge', 'yEdge', 'zEdge', 'xVertex', 'yVertex', 'zVertex', 'dvEdge', 'dcEdge'): - newVar[:] = thevar[:] * sphere_radius / filein.sphere_radius - elif varname in ('areaCell', 'areaTriangle', 'kiteAreasOnVertex'): - newVar[:] = thevar[:] * (sphere_radius / filein.sphere_radius)**2 - else: - newVar[:] = thevar[:] - else: # not on a sphere - newVar[:] = thevar[:] - del newVar, thevar - sys.stdout.write("* "); sys.stdout.flush() -fileout.sync() -print("|") -print("Finished copying mesh variables to output file.\n") - -# ============================================ -# Create the land ice variables (all the shallow water vars in the input file can be ignored) -# ============================================ -nVertLevels = len(fileout.dimensions['nVertLevels']) -datatype = filein.variables['xCell'].dtype # Get the datatype for double precision float -datatypeInt = filein.variables['indexToCellID'].dtype # Get the datatype for integers -# Note: it may be necessary to make sure the Time dimension has size 1, rather than the 0 it defaults to. For now, letting it be 0 which seems to be fine. - -# layerThicknessFractions -layerThicknessFractions = fileout.createVariable('layerThicknessFractions', datatype, ('nVertLevels', )) -layerThicknessFractionsData = numpy.zeros(layerThicknessFractions.shape) -# Assign default values to layerThicknessFractions. By default they will be uniform fractions. Users can modify them in a subsequent step, but doing this here ensures the most likely values are already assigned. (Useful for e.g. setting up Greenland where the state variables are copied over but the grid variables are not modified.) -if options.vertMethod == 'uniform': - layerThicknessFractionsData[:] = 1.0 / nVertLevels -elif options.vertMethod == 'glimmer': - nInterfaces = nVertLevels + 1 - layerInterfaces = numpy.zeros((nInterfaces,)) - for k in range(nInterfaces): - layerInterfaces[k] = 4.0/3.0 * (1.0 - ((k+1.0-1.0)/(nInterfaces-1.0) + 1.0)**-2) - for k in range(nVertLevels): - layerThicknessFractionsData[k] = layerInterfaces[k+1] - layerInterfaces[k] - print("Setting layerThicknessFractions to: {}".format(layerThicknessFractionsData)) -else: - sys.exit('Unknown method for vertical spacing method (--vert): '+options.vertMethod) - -# explictly specify layer fractions -#layerThicknessFractionsData[:] = [0.1663,0.1516,0.1368,0.1221,0.1074,0.0926,0.0779,0.0632,0.0484,0.0337] - -layerThicknessFractions[:] = layerThicknessFractionsData[:] - - -# With Scientific.IO.netCDF, entries are appended along the unlimited dimension one at a time by assigning to a slice. -# Therefore we need to assign to time level 0, and what we need to assign is a zeros array that is the shape of the new variable, exluding the time dimension! -newvar = fileout.createVariable('thickness', datatype, ('Time', 'nCells')) -newvar[0,:] = numpy.zeros( newvar.shape[1:] ) -# These landice variables are stored in the mesh currently, and therefore do not have a time dimension. -# It may make sense to eventually move them to state. -newvar = fileout.createVariable('bedTopography', datatype, ('Time', 'nCells')) -newvar[:] = numpy.zeros(newvar.shape) -newvar = fileout.createVariable('sfcMassBal', datatype, ('Time', 'nCells')) -newvar[:] = numpy.zeros(newvar.shape) -newvar = fileout.createVariable('floatingBasalMassBal', datatype, ('Time', 'nCells')) -newvar[:] = numpy.zeros(newvar.shape) -print('Added default variables: thickness, temperature, bedTopography, sfcMassBal, floatingBasalMassBal') - -newvar = fileout.createVariable('beta', datatype, ('Time', 'nCells')) -newvar[:] = 1.0e8 # Give a default beta that won't have much sliding. -print('Added variable: beta') - -newvar = fileout.createVariable('muFriction', datatype, ('Time', 'nCells')) -newvar[:] = 1.0e8 # Give a default mu that won't have much sliding. -print('Added variable: muFriction') - -newvar = fileout.createVariable('effectivePressure', datatype, ('Time', 'nCells')) -newvar[:] = 1.0 # Give a default effective pressure of 1.0 so that, for the linear sliding law, beta = mu*effecpress = mu. -print('Added variable: effectivePressure') - -newvar = fileout.createVariable('stiffnessFactor', datatype, ('Time', 'nCells')) -newvar[:] = 1.0 # Give default value -print('Added variable: stiffnessFactor') - -newvar = fileout.createVariable('eigencalvingParameter', datatype, ('Time', 'nCells')) -newvar[:] = 3.14e16 # Give default value for eigencalvingParameter -print('Added variable: eigencalvingParameter') - -newvar = fileout.createVariable('groundedVonMisesThresholdStress', datatype, ('Time', 'nCells')) -newvar[:] = 1.0e6 # Give default value -print('Added variable: groundedVonMisesThresholdStress') - -newvar = fileout.createVariable('floatingVonMisesThresholdStress', datatype, ('Time', 'nCells')) -newvar[:] = 1.0e6 # Give default value -print('Added variable: floatingVonMisesThresholdStress') - -newvar = fileout.createVariable('iceMask', datatype, ('Time', 'nCells')) -newvar[:] = 0 -print('Added variable: iceMask') - -if options.dirichlet: - newvar = fileout.createVariable('dirichletVelocityMask', datatypeInt, ('Time', 'nCells', 'nVertInterfaces')) - newvar[:] = 0 # default: no Dirichlet b.c. - newvar = fileout.createVariable('uReconstructX', datatype, ('Time', 'nCells', 'nVertInterfaces',)) - newvar[:] = 0.0 - newvar = fileout.createVariable('uReconstructY', datatype, ('Time', 'nCells', 'nVertInterfaces',)) - newvar[:] = 0.0 - print('Added optional dirichlet variables: dirichletVelocityMask, uReconstructX, uReconstructY') - -if options.thermal: - newvar = fileout.createVariable('temperature', datatype, ('Time', 'nCells', 'nVertLevels')) - newvar[:] = 273.15 # Give default value for temperate ice - newvar = fileout.createVariable('surfaceAirTemperature', datatype, ('Time', 'nCells')) - newvar[:] = 273.15 # Give default value for temperate ice - newvar = fileout.createVariable('basalHeatFlux', datatype, ('Time', 'nCells')) - newvar[:] = 0.0 # Default to none (W/m2) - print('Added optional thermal variables: temperature, surfaceAirTemperature, basalHeatFlux') - -if options.hydro: - newvar = fileout.createVariable('waterThickness', datatype, ('Time', 'nCells')) - newvar[:] = 0.0 - newvar = fileout.createVariable('tillWaterThickness', datatype, ('Time', 'nCells')) - newvar[:] = 0.0 - newvar = fileout.createVariable('basalMeltInput', datatype, ('Time', 'nCells')) - newvar[:] = 0.0 - newvar = fileout.createVariable('externalWaterInput', datatype, ('Time', 'nCells')) - newvar[:] = 0.0 - newvar = fileout.createVariable('frictionAngle', datatype, ('Time', 'nCells')) - newvar[:] = 0.0 - newvar = fileout.createVariable('waterPressure', datatype, ('Time', 'nCells')) - newvar[:] = 0.0 - newvar = fileout.createVariable('waterFluxMask', 'i', ('Time', 'nEdges')) - newvar[:] = 0.0 - print('Added optional hydro variables: waterThickness, tillWaterThickness, meltInput, frictionAngle, waterPressure, waterFluxMask') - -if options.obs: - newvar = fileout.createVariable('observedSurfaceVelocityX', datatype, ('Time', 'nCells')) - newvar[:] = 0.0 - newvar = fileout.createVariable('observedSurfaceVelocityY', datatype, ('Time', 'nCells')) - newvar[:] = 0.0 - newvar = fileout.createVariable('observedSurfaceVelocityUncertainty', datatype, ('Time', 'nCells')) - newvar[:] = 0.0 - newvar = fileout.createVariable('observedThicknessTendency', datatype, ('Time', 'nCells')) - newvar[:] = 0.0 - newvar = fileout.createVariable('observedThicknessTendencyUncertainty', datatype, ('Time', 'nCells')) - newvar[:] = 0.0 - newvar = fileout.createVariable('thicknessUncertainty', datatype, ('Time', 'nCells')) - newvar[:] = 0.0 - print('Added optional velocity optimization variables: observedSurfaceVelocityX, observedSurfaceVelocityY, observedSurfaceVelocityUncertainty, observedThicknessTendency, observedThicknessTendencyUncertainty, thicknessUncertainty') - -# Update history attribute of netCDF file -thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ": " + " ".join(sys.argv[:]) -if hasattr(fileout, 'history'): - newhist = '\n'.join([thiscommand, getattr(fileout, 'history')]) -else: - newhist = thiscommand -setattr(fileout, 'history', newhist ) - -print("Completed creating land ice variables in new file. Now syncing to file.") -fileout.sync() - -filein.close() -fileout.close() - -print('\n** Successfully created {}.**'.format(options.fileoutName)) diff --git a/landice/mesh_tools_li/define_cullMask.py b/landice/mesh_tools_li/define_cullMask.py deleted file mode 100755 index bea6c5c86..000000000 --- a/landice/mesh_tools_li/define_cullMask.py +++ /dev/null @@ -1,227 +0,0 @@ -#!/usr/bin/env python -""" -Script for adding a field named cullMask to an MPAS land ice grid for use with the MpasCellCuller tool that actually culls the unwanted cells. -Matt Hoffman, February 28, 2013 -""" - -from __future__ import absolute_import, division, print_function, \ - unicode_literals - -import sys -import numpy as np -from optparse import OptionParser -from netCDF4 import Dataset as NetCDFFile -from datetime import datetime - - -print("** Gathering information.") -parser = OptionParser() -parser.add_option("-f", "--file", dest="file", help="grid file to modify; default: landice_grid.nc", metavar="FILE") -parser.add_option("-m", "--method", dest="method", help="method to use for marking cells to cull. Supported methods: 'noIce', 'numCells', 'distance', 'radius', 'edgeFraction'", metavar="METHOD") -parser.add_option("-n", "--numCells", dest="numCells", default=5, help="number of cells to keep beyond ice extent", metavar="NUM") -parser.add_option("-d", "--distance", dest="distance", default=50, help="numeric value to use for the various methods: distance method->distance (km), radius method->radius (km), edgeFraction method->fraction of width or height", metavar="DIST") -parser.add_option("-p", "--plot", dest="makePlot", help="Include to have the script generate a plot of the resulting mask, default=false", default=False, action="store_true") -options, args = parser.parse_args() - -if not options.file: - print("No grid filename provided. Using landice_grid.nc.") - options.file = "landice_grid.nc" - -if not options.method: - sys.exit("ERROR: No method selected for choosing cells to mark for culling") -else: - maskmethod = options.method - -try: - f = NetCDFFile(options.file,'r+') -except: - sys.exit('Error loading grid file.') - - -xCell = f.variables['xCell'][:] -yCell = f.variables['yCell'][:] -nCells = len(f.dimensions['nCells']) - - -# -- Get needed fields from the file -- - -cullCell = np.zeros((nCells, ), dtype=np.int8) # Initialize to cull no cells - - -thicknessMissing = True -try: - thickness = f.variables['thickness'][0,:] - print('Using thickness field at time 0') - thicknessMissing = False -except: - print("The field 'thickness' is not available. Some culling methods will not work.") - - -# ===== Various methods for defining the mask ==== - -# ========= -# only keep cells with ice -if maskmethod == 'noIce': - print("Method: remove cells without ice") - if thicknessMissing: - sys.exit("Unable to perform 'numCells' method because thickness field was missing.") - - cullCell[thickness == 0.0] = 1 - -# ========= -# add a buffer of X cells around the ice -elif maskmethod == 'numCells': - print("Method: remove cells beyond a certain number of cells from existing ice") - - if thicknessMissing: - sys.exit("Unable to perform 'numCells' method because thickness field was missing.") - - buffersize=int(options.numCells) # number of cells to expand - print("Using a buffer of {} cells".format(buffersize)) - - keepCellMask = np.copy(cullCell[:]) - keepCellMask[:] = 0 - cellsOnCell = f.variables['cellsOnCell'][:] - nEdgesOnCell = f.variables['nEdgesOnCell'][:] - - # mark the cells with ice first - keepCellMask[thickness > 0.0] = 1 - print('Num of cells with ice: {}'.format(sum(keepCellMask))) - - for i in range(buffersize): - print('Starting buffer loop {}'.format(i+1)) - keepCellMaskNew = np.copy(keepCellMask) # make a copy to edit that can be edited without changing the original - ind = np.nonzero(keepCellMask == 0)[0] - for i in range(len(ind)): - iCell = ind[i] - neighbors = cellsOnCell[iCell,:nEdgesOnCell[iCell]]-1 - keep=False - for n in neighbors: - if n >= 0: - if keepCellMask[n] == 1: - keepCellMaskNew[iCell] = 1 - keepCellMask = np.copy(keepCellMaskNew) # after we've looped over all cells assign the new mask to the variable we need (either for another loop around the domain or to write out) - print(f'Num of cells to keep: {keepCellMask.sum()}') - - # Now convert the keepCellMask to the cullMask - cullCell[:] = np.absolute(keepCellMask[:]-1) # Flip the mask for which ones to cull - -# ========= -# remove cells beyond a certain distance of ice extent -elif maskmethod == 'distance': - - print("Method: remove cells beyond a certain distance from existing ice") - - if thicknessMissing: - sys.exit("Unable to perform 'numCells' method because thickness field was missing.") - - dist=float(options.distance) - print("Using a buffer distance of {} km".format(dist)) - dist = dist * 1000.0 # convert to m - - keepCellMask = np.copy(cullCell[:]) - keepCellMask[:] = 0 - cellsOnCell = f.variables['cellsOnCell'][:] - nEdgesOnCell = f.variables['nEdgesOnCell'][:] - xCell = f.variables['xCell'][:] - yCell = f.variables['yCell'][:] - - # mark the cells with ice first - keepCellMask[thickness > 0.0] = 1 - print('Num of cells with ice: {}'.format(sum(keepCellMask))) - - # find list of margin cells - iceCells = np.nonzero(keepCellMask == 1)[0] - marginMask = np.zeros((nCells, ), dtype=np.int8) - for i in range(len(iceCells)): - iCell = iceCells[i] - for neighbor in cellsOnCell[iCell,:nEdgesOnCell[iCell]]-1: # the -1 converts from the fortran indexing in the variable to python indexing - if thickness[neighbor] == 0.0: - marginMask[iCell] = 1 - continue # stop searching neighbors - - # loop over margin cells - marginCells = np.nonzero(marginMask == 1)[0] - for i in range(len(marginCells)): - iCell = marginCells[i] - # for each margin cell, find all cells within specified distance - ind = np.nonzero(((xCell-xCell[iCell])**2 + (yCell-yCell[iCell])**2)**0.5 < dist)[0] - keepCellMask[ind] = 1 - - print(f'Num of cells to keep: {keepCellMask.sum()}') - - # Now convert the keepCellMask to the cullMask - cullCell[:] = np.absolute(keepCellMask[:]-1) # Flip the mask for which ones to cull - - -# ========= -# cut out beyond some radius (good for the dome) -elif maskmethod == 'radius': - dist=float(options.distance) - print("Method: remove cells beyond a radius of {} km from center of mesh".format(dist)) - xc = (xCell.max()-xCell.min())/2.0 + xCell.min() - yc = (yCell.max()-yCell.min())/2.0 + yCell.min() - ind = np.nonzero( ( (xCell[:]-xc)**2 + (yCell[:]-yc)**2)**0.5 > dist*1000.0 ) - cullCell[ind] = 1 - -# ========= -# cut off some fraction of the height/width on all 4 sides - useful for cleaning up a mesh from periodic_general -elif maskmethod == 'edgeFraction': - frac=float(options.distance) - print("Method: remove a fraction from all 4 edges of {}".format(frac)) - if frac>=0.5: - sys.exit("ERROR: fraction cannot be >=0.5.") - if frac<0.0: - sys.exit("ERROR: fraction cannot be <0.") - - cullCell[:] = 0 - width = xCell.max()-xCell.min() - height = yCell.max()-yCell.min() - ind = np.nonzero( xCell[:] < (xCell.min() + width*frac) ) - cullCell[ind] = 1 - ind = np.nonzero( xCell[:] > (xCell.max() - width*frac) ) - cullCell[ind] = 1 - ind = np.nonzero( yCell[:] < (yCell.min() + height*frac) ) - cullCell[ind] = 1 - ind = np.nonzero( yCell[:] > (yCell.max() - height*frac) ) - cullCell[ind] = 1 - -# ========= -else: - sys.exit("ERROR: no valid culling method selected.") -# ========= - - -print('Num of cells to cull: {}'.format(sum(cullCell[:]))) - -# ========= -# Try to add the new variable -if 'cullCell' not in f.variables: - datatype = f.variables['indexToCellID'].dtype # Get the datatype for integer - f.createVariable('cullCell', datatype, ('nCells',)) -f.variables['cullCell'][:] = cullCell - -# Update history attribute of netCDF file -thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ": " + " ".join(sys.argv[:]) -if hasattr(f, 'history'): - newhist = '\n'.join([thiscommand, getattr(f, 'history')]) -else: - newhist = thiscommand -setattr(f, 'history', newhist ) - - - -# --- make a plot only if requested --- -if options.makePlot: - import matplotlib.pyplot as plt - fig = plt.figure(1, facecolor='w') - ax = fig.add_subplot(111, aspect='equal') - plt.scatter(xCell[:], yCell[:], 50, cullCell[:], edgecolors='none') #, vmin=minval, vmax=maxval) - plt.colorbar() - plt.draw() - plt.show() - -f.close() -print("cullMask generation complete.") - - diff --git a/landice/mesh_tools_li/interpolate_to_mpasli_grid.py b/landice/mesh_tools_li/interpolate_to_mpasli_grid.py deleted file mode 100755 index 2b83a86b9..000000000 --- a/landice/mesh_tools_li/interpolate_to_mpasli_grid.py +++ /dev/null @@ -1,807 +0,0 @@ -#!/usr/bin/env python -''' -Interpolate fields from an input file to a pre-existing MPAS-LI grid. - -The input file can either be CISM format or MPASLI format. - -For CISM input files, three interpolation methods are supported: -* a built-in bilinear interpolation method -* a built-in barycentric interpolation method (nearest neighbor is used for extrapolation regions) -* using weights generated by ESMF - -For MPAS input files only barycentric interpolation is supported. - -The ESMF interpolation method can be used to interpolate from history files of other E3SM components -by using the ESMF weight mapping files used in the E3SM coupler. You will likely need to add your -own variable mappings in the dictionary for the 'other' filetype at the end of this script. - -Multiple time levels can be interpolated using the --timestart and --timeend options. -Default is to copy the first time level from the source file to the first time level of -the destination file. If multiple time levels are used, they are translated directly -from the source to the destination. NOTE: There is no processing of actual time stamps! -NOTE: xtime is NOT copied and must be copied manually, if desired! -''' - -from __future__ import absolute_import, division, print_function, \ - unicode_literals - -import sys -import numpy as np -import netCDF4 -import argparse -import math -from collections import OrderedDict -import scipy.spatial -import scipy.sparse -import time -from datetime import datetime - - -print("== Gathering information. (Invoke with --help for more details. All arguments are optional)\n") -parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) -parser.description = __doc__ -parser.add_argument("-s", "--source", dest="inputFile", help="name of source (input) file. Can be either CISM format or MPASLI format.", default="cism.nc", metavar="FILENAME") -parser.add_argument("-d", "--destination", dest="mpasFile", help="name of destination file on which to interpolate fields. This needs to be MPASLI format with desired fields already existing.", default="landice_grid.nc", metavar="FILENAME") -parser.add_argument("-m", "--method", dest="interpType", help="interpolation method to use. b=bilinear, d=barycentric, e=ESMF, n=nearest neighbor", default="b", metavar="METHOD") -parser.add_argument("-w", "--weight", dest="weightFile", help="ESMF weight file to input. Only used by ESMF interpolation method", metavar="FILENAME") -parser.add_argument("-t", "--thickness-only", dest="thicknessOnly", action="store_true", default=False, help="Only interpolate thickness and ignore all other variables (useful for setting up a cullMask)") -parser.add_argument("--timestart", dest="timestart", type=int, default=0, help="time level in input file to start from (0-based)") -parser.add_argument("--timeend", dest="timeend", type=int, default=0, help="time level in input file to end with (0-based, inclusive)") -parser.add_argument("-v", "--variables", dest="vars", nargs='*', type=str, default="all", help="Variables in the destination mesh for which interpolation should be attempted. Interpolation will only actually occur if the requested field 1) is in the dictionary of supported fields at the end of this script and 2) exists in both the source and destination files. 'all' can be used to attempt to interpolate all fields present in the destination mesh. Provide a space-delimited list.") -args = parser.parse_args() - - -print(" Source file: {}".format(args.inputFile)) -print(" Destination MPASLI file to be modified: {}".format(args.mpasFile)) - -print(" Interpolation method to be used: {}".format(args.interpType)) -print(" (b=bilinear, d=barycentric, e=esmf)") - -if args.weightFile and args.interpType == 'e': - print(" Interpolation will be performed using ESMF-weights method, where possible, using weights file: {}".format(args.weightFile)) - #---------------------------- - # Get weights from file - wfile = netCDF4.Dataset(args.weightFile, 'r') - S = wfile.variables['S'][:] - col = wfile.variables['col'][:] - row = wfile.variables['row'][:] - n_a = len(wfile.dimensions['n_a']) - n_b = len(wfile.dimensions['n_b']) - dst_frac = wfile.variables['frac_b'][:] - wfile.close() - #---------------------------- - - # convert to SciPy Compressed Sparse Row (CSR) matrix format - weights_csr = scipy.sparse.coo_array((S, (row - 1, col - 1)), shape=(n_b, n_a)).tocsr() - -print('') # make a space in stdout before further output - - - -#---------------------------- -#---------------------------- -# Define needed functions -#---------------------------- -#---------------------------- - -def ESMF_interp(sourceField): - # Interpolates from the sourceField to the destinationField using ESMF weights - destinationField = np.zeros(xCell.shape) # fields on cells only - try: - # Convert the source field into the SciPy Compressed Sparse Row matrix format - # This needs some reshaping to get the matching dimensions - source_csr = scipy.sparse.csr_matrix(sourceField.flatten()[:, np.newaxis]) - # Use SciPy CSR dot product - much faster than iterating over elements of the full matrix - destinationField = weights_csr.dot(source_csr).toarray().squeeze() - # For conserve remapping, need to normalize by destination area fraction - # It should be safe to do this for other methods - ind = np.where(dst_frac > 0.0)[0] - destinationField[ind] /= dst_frac[ind] - except: - print('error in ESMF_interp') - return destinationField - -#---------------------------- - -def BilinearInterp(Value, gridType): - # Calculate bilinear interpolation of Value field from x, y to new ValueCell field (return value) at xCell, yCell - # This assumes that x, y, Value are regular CISM style grids and xCell, yCell, ValueCell are 1-D unstructured MPAS style grids - - ValueCell = np.zeros(xCell.shape) - - if gridType == 'x0': - x = x0; y = y0 - elif gridType == 'x1': - x = x1; y = y1 - else: - sys.exit('Error: unknown CISM grid type specified.') - dx = x[1] - x[0] - dy = y[1] - y[0] - for i in range(len(xCell)): - # Calculate the CISM grid cell indices (these are the lower index) - xgrid = int(math.floor( (xCell[i]-x[0]) / dx )) - if xgrid >= len(x) - 1: - xgrid = len(x) - 2 - elif xgrid < 0: - xgrid = 0 - ygrid = int(math.floor( (yCell[i]-y[0]) / dy )) - if ygrid >= len(y) - 1: - ygrid = len(y) - 2 - elif ygrid < 0: - ygrid = 0 - #print(xgrid, ygrid, i) - ValueCell[i] = Value[ygrid,xgrid] * (x[xgrid+1] - xCell[i]) * (y[ygrid+1] - yCell[i]) / (dx * dy) + \ - Value[ygrid+1,xgrid] * (x[xgrid+1] - xCell[i]) * (yCell[i] - y[ygrid]) / (dx * dy) + \ - Value[ygrid,xgrid+1] * (xCell[i] - x[xgrid]) * (y[ygrid+1] - yCell[i]) / (dx * dy) + \ - Value[ygrid+1,xgrid+1] * (xCell[i] - x[xgrid]) * (yCell[i] - y[ygrid]) / (dx * dy) - return ValueCell - -#---------------------------- - -def delaunay_interp_weights(xy, uv, d=2): - ''' - xy = input x,y coords - uv = output (MPSALI) x,y coords - ''' - - #print("scipy version=", scipy.version.full_version) - if xy.shape[0] > 2**24-1: - print("WARNING: The source file contains more than 2^24-1 (16,777,215) points due to a limitation in older versions of Qhull (see: https://mail.scipy.org/pipermail/scipy-user/2015-June/036598.html). Delaunay creation may fail if Qhull being linked by scipy.spatial is older than v2015.0.1 2015/8/31.") - - tri = scipy.spatial.Delaunay(xy) - print(" Delaunay triangulation complete.") - simplex = tri.find_simplex(uv) - print(" find_simplex complete.") - vertices = np.take(tri.simplices, simplex, axis=0) - print(" identified vertices.") - temp = np.take(tri.transform, simplex, axis=0) - print(" np.take complete.") - delta = uv - temp[:, d] - bary = np.einsum('njk,nk->nj', temp[:, :d, :], delta) - print(" calculating bary complete.") - wts = np.hstack((bary, 1 - bary.sum(axis=1, keepdims=True))) - - # Now figure out if there is any extrapolation. - # Find indices to points of output file that are outside of convex hull of input points - outsideInd = np.nonzero(tri.find_simplex(uv)<0) - outsideCoords = uv[outsideInd] - #print(outsideInd) - nExtrap = len(outsideInd[0]) - if nExtrap > 0: - print(" Found {} points requiring extrapolation. Using nearest neighbor extrapolation for those.".format(nExtrap)) - - # Now find nearest neighbor for each outside point - # Use KDTree of input points - tree = scipy.spatial.cKDTree(xy) - - return vertices, wts, outsideInd, tree - -#---------------------------- - -def nn_interp_weights(xy, uv, d=2): - ''' - xy = input x,y coords - uv = output (MPSALI) x,y coords - Note: could separate out building tree and interpolation for efficiency if many fields need to be processed - ''' - tree = scipy.spatial.cKDTree(xy) - dist,idx = tree.query(uv, k=1) # k is the number of nearest neighbors. -# outfield = values.flatten()[idx] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. - return idx -#---------------------------- - -def delaunay_interpolate(values, gridType): - if gridType == 'x0': - vtx = vtx0; wts = wts0 - tree = treex0 - outsideInd = outsideIndx0 - elif gridType == 'x1': - vtx = vtx1; wts = wts1 - outsideInd = outsideIndx1 - tree = treex1 - elif gridType == 'cell': - vtx = vtCell; wts = wtsCell - outsideInd = outsideIndcell - tree = treecell - else: - sys.exit('Error: unknown input file grid type specified.') - - outfield = np.einsum('nj,nj->n', np.take(values, vtx), wts) - - # Now apply nearest neighbor to points outside convex hull - # We could have this enabled/disabled with a command line option, but for now it will always be done. - # Note: the barycentric interp applied above could be restricted to the points inside the convex hull - # instead of being applied to ALL points as is currently implemented. However it is assumed that - # "redoing" the outside points has a small performance cost because there generally should be few such points - # and the implementation is much simpler this way. - outsideCoord = mpasXY[outsideInd,:] - if len(outsideInd) > 0: - dist,idx = tree.query(outsideCoord, k=1) # k is the number of nearest neighbors. Could crank this up to 2 (and then average them) with some fiddling, but keeping it simple for now. - outfield[outsideInd] = values.flatten()[idx] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. - - return outfield - -#---------------------------- - -def interpolate_field(MPASfieldName): - - if fieldInfo[MPASfieldName]['gridType'] == 'x0' and args.interpType == 'e': - assert "This CISM field is on the staggered grid, and currently this script does not support a second ESMF weight file for the staggered grid." - - InputFieldName = fieldInfo[MPASfieldName]['InputName'] - if filetype=='cism': - if 'time' in inputFile.variables[InputFieldName].dimensions: - InputField = inputFile.variables[InputFieldName][timelev,:,:] - else: - if timelev>0: - sys.exit("Error: --timestart and/or --timeend were specified but the required time dimension of 'time' was not found in the source file.") - InputField = inputFile.variables[InputFieldName][:,:] - elif filetype=='mpas': - if 'Time' in inputFile.variables[InputFieldName].dimensions: - InputField = inputFile.variables[InputFieldName][timelev,:] - else: - if timelev>0: - sys.exit("Error: --timestart and/or --timeend were specified but the required time dimension of 'Time' was not found in the source file.") - InputField = inputFile.variables[InputFieldName][:] - elif filetype=='other': - if 'time' in inputFile.variables[InputFieldName].dimensions: - InputField = inputFile.variables[InputFieldName][timelev,:,:] - else: - if timelev>0: - sys.exit("Error: --timestart and/or --timeend were specified but the required time dimension of 'time' was not found in the source file.") - InputField = inputFile.variables[InputFieldName][:,:] - - print(' Input field {} min/max: {} {}'.format(InputFieldName, InputField.min(), InputField.max())) - - # Call the appropriate routine for actually doing the interpolation - if args.interpType == 'b': - print(" ...Interpolating to {} using built-in bilinear method...".format(MPASfieldName)) - MPASfield = BilinearInterp(InputField, fieldInfo[MPASfieldName]['gridType']) - elif args.interpType == 'd': - print(" ...Interpolating to {} using barycentric method...".format(MPASfieldName)) - MPASfield = delaunay_interpolate(InputField, fieldInfo[MPASfieldName]['gridType']) - elif args.interpType == 'n': - print(" ...Interpolating to {} using nearest neighbor method...".format(MPASfieldName)) - if fieldInfo[MPASfieldName]['gridType'] == 'x0': - MPASfield = InputField.flatten()[nn_idx_x0] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. - elif fieldInfo[MPASfieldName]['gridType'] == 'x1': - MPASfield = InputField.flatten()[nn_idx_x1] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. - elif fieldInfo[MPASfieldName]['gridType'] == 'cell': - MPASfield = InputField.flatten()[nn_idx_cell] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. - elif args.interpType == 'e': - print(" ...Interpolating to {} using ESMF-weights method...".format(MPASfieldName)) - MPASfield = ESMF_interp(InputField) - else: - sys.exit('ERROR: Unknown interpolation method specified') - - print(' interpolated MPAS {} min/max: {} {}'.format(MPASfieldName, MPASfield.min(), MPASfield.max())) - - if fieldInfo[MPASfieldName]['scalefactor'] != 1.0: - MPASfield *= fieldInfo[MPASfieldName]['scalefactor'] - print(' scaled MPAS {} min/max: {} {}'.format(MPASfieldName, MPASfield.min(), MPASfield.max())) - if fieldInfo[MPASfieldName]['offset'] != 0.0: - MPASfield += fieldInfo[MPASfieldName]['offset'] - print(' offset MPAS {} min/max: {} {}'.format(MPASfieldName, MPASfield.min(), MPASfield.max())) - - - return MPASfield - - del InputField, MPASfield - -#---------------------------- - -def interpolate_field_with_layers(MPASfieldName): - - if fieldInfo[MPASfieldName]['gridType'] == 'x0' and args.interpType == 'e': - assert "This CISM field is on the staggered grid, and currently this script does not support a second ESMF weight file for the staggered grid." - - InputFieldName = fieldInfo[MPASfieldName]['InputName'] - if filetype=='cism': - if 'time' in inputFile.variables[InputFieldName].dimensions: - InputField = inputFile.variables[InputFieldName][timelev,:,:,:] - else: - if timelev>0: - sys.exit("Error: --timestart and/or --timeend were specified but the required time dimension of 'time' was not found in the source file.") - InputField = inputFile.variables[InputFieldName][:,:,:] - inputVerticalDimSize = InputField.shape[0] # vertical index is the first (since we've eliminated time already) - layerFieldName = inputFile.variables[InputFieldName].dimensions[1] # second dimension is the vertical one - get the name of it - input_layers = inputFile.variables[layerFieldName][:] - elif filetype=='mpas': - if 'Time' in inputFile.variables[InputFieldName].dimensions: - InputField = inputFile.variables[InputFieldName][timelev,:,:] - else: - if timelev>0: - sys.exit("Error: --timestart and/or --timeend were specified but the required time dimension of 'Time' was not found in the source file.") - InputField = inputFile.variables[InputFieldName][:,:] - inputVerticalDimSize = InputField.shape[1] # vertical index is the second (since we've eliminated time already) - layerThicknessFractions = inputFile.variables['layerThicknessFractions'][:] - # build MPAS sigma levels at center of each layer - input_layers = np.zeros( (inputVerticalDimSize,) ) - if inputVerticalDimSize == len(layerThicknessFractions): - print(" Using layer centers for the vertical coordinate of this field.") - input_layers[0] = layerThicknessFractions[0] * 0.5 - for k in range(1,inputVerticalDimSize): - input_layers[k] = input_layers[k-1] + 0.5 * layerThicknessFractions[k-1] + 0.5 * layerThicknessFractions[k] - layerFieldName = '(sigma levels calculated from layerThicknessFractions)' - elif inputVerticalDimSize == len(layerThicknessFractions)+1: - print(" Using layer interfaces for the vertical coordinate of this field.") - input_layers[0] = 0.0 - for k in range(1,inputVerticalDimSize): - input_layers[k] = input_layers[k-1] + layerThicknessFractions[k-1] - else: - sys.exit("\nUnknown vertical dimension for this variable source file.") - else: - sys.exit("ERROR: Fields with vertical layers can only be interpolated using the barycentric or bilinear methods.") - - # create array for interpolated source field at all layers - mpas_grid_input_layers = np.zeros( (inputVerticalDimSize, nCells) ) # make it the size of the CISM vertical layers, but the MPAS horizontal locations - - for z in range(inputVerticalDimSize): - if filetype=='cism': - print(' Input layer {}, layer {} min/max: {} {}'.format(z, InputFieldName, InputField[z,:,:].min(), InputField[z,:,:].max())) - elif filetype=='mpas': - print(' Input layer {}, layer {} min/max: {} {}'.format(z, InputFieldName, InputField[:,z].min(), InputField[:,z].max())) - # Call the appropriate routine for actually doing the interpolation - if args.interpType == 'b': - print(" ...Layer {}, Interpolating this layer to MPAS grid using built-in bilinear method...".format(z)) - mpas_grid_input_layers[z,:] = BilinearInterp(InputField[z,:,:], fieldInfo[MPASfieldName]['gridType']) - elif args.interpType == 'd': - print(" ...Layer {}, Interpolating this layer to MPAS grid using built-in barycentric method...".format(z)) - if filetype=='cism': - mpas_grid_input_layers[z,:] = delaunay_interpolate(InputField[z,:,:], fieldInfo[MPASfieldName]['gridType']) - elif filetype=='mpas': - mpas_grid_input_layers[z,:] = delaunay_interpolate(InputField[:,z], fieldInfo[MPASfieldName]['gridType']) - elif args.interpType == 'n': - print(" ...Layer {}, Interpolating this layer to MPAS grid using nearest neighbor method...".format(z)) - if fieldInfo[MPASfieldName]['gridType'] == 'x0': - mpas_grid_input_layers[z,:] = InputField[z,:,:].flatten()[nn_idx_x0] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. - elif fieldInfo[MPASfieldName]['gridType'] == 'x1': - mpas_grid_input_layers[z,:] = InputField[z,:,:].flatten()[nn_idx_x1] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. - elif fieldInfo[MPASfieldName]['gridType'] == 'cell': - mpas_grid_input_layers[z,:] = InputField[:,z].flatten()[nn_idx_cell] # 2d cism fields need to be flattened. (Note the indices were flattened during init, so this just matches that operation for the field data itself.) 1d mpas fields do not, but the operation won't do anything because they are already flat. - elif args.interpType == 'e': - print(" ...Layer{}, Interpolating this layer to MPAS grid using ESMF-weights method...".format(z)) - if filetype=='cism': - mpas_grid_input_layers[z,:] = ESMF_interp(InputField[z,:,:]) - elif filetype=='mpas': - mpas_grid_input_layers[z,:] = ESMF_interp(InputField[:,z]) - else: - sys.exit('ERROR: Unknown interpolation method specified') - print(' interpolated MPAS {}, layer {} min/max {} {}: '.format(MPASfieldName, z, mpas_grid_input_layers[z,:].min(), mpas_grid_input_layers[z,:].max())) - - if fieldInfo[MPASfieldName]['scalefactor'] != 1.0: - mpas_grid_input_layers *= fieldInfo[MPASfieldName]['scalefactor'] - print(' scaled MPAS {} on CISM vertical layers, min/max: {} {}'.format(MPASfieldName, mpas_grid_input_layers.min(), mpas_grid_input_layers.max())) - if fieldInfo[MPASfieldName]['offset'] != 0.0: - mpas_grid_input_layers += fieldInfo[MPASfieldName]['offset'] - print(' offset MPAS {} on CISM vertical layers, min/max: {} {}'.format(MPASfieldName, mpas_grid_input_layers.min(), mpas_grid_input_layers.max())) - - # ------------ - # Now interpolate vertically - print(" Input layer field {} has layers: {}".format(inputFile.variables[InputFieldName].dimensions[1], input_layers)) - if 'nVertLevels' in MPASfile.variables[MPASfieldName].dimensions: - print(" MPAS layer centers are: {}".format(mpasLayerCenters)) - destVertCoord = mpasLayerCenters - elif 'nVertInterfaces' in MPASfile.variables[MPASfieldName].dimensions: - print(" MPAS layer interfaces are: {}".format(mpasLayerInterfaces)) - destVertCoord = mpasLayerInterfaces - else: - sys.exit("Unknown vertical dimension for this variable destination file.") - - - if input_layers.min() > destVertCoord.min(): - # This fix ensures that interpolation is done when input_layers.min is very slightly greater than destVertCoord.min - if input_layers.min() - 1.0e-6 < destVertCoord.min(): - print('input_layers.min = {0:.16f}'.format(input_layers.min())) - print('destVertCoord.min = {0:.16f}'.format(destVertCoord.min())) - input_layers[0] = input_layers[0] - 1.0e-6 - print('New input_layers.min = {0:.16f}'.format(input_layers.min())) - else: - print("WARNING: input_layers.min() > destVertCoord.min() Values at the first level of input_layers will be used for all MPAS layers in this region!") - if input_layers.max() < destVertCoord.max(): - # This fix ensures that interpolation is done when input_layers.max is very slightly smaller than destVertCoord.max - if input_layers.max() + 1.0e-6 > destVertCoord.min(): - print('input_layers.max = {0:.16f}'.format(input_layers.max())) - print('destVertCoord.max = {0:.16f}'.format(destVertCoord.max())) - input_layers[inputVerticalDimSize-1] = input_layers[inputVerticalDimSize-1] + 1.0e-6 - print('New input_layers.max = {0:.16f}'.format(input_layers.max())) - print('input_layers = {}'.format(input_layers)) - else: - print("WARNING: input_layers.max() < destVertCoord.max() Values at the last level of input_layers will be used for all MPAS layers in this region!") - MPASfield = vertical_interp_MPAS_grid(mpas_grid_input_layers, destVertCoord, input_layers) - print(' MPAS {} on MPAS vertical layers, min/max of all layers:'.format(MPASfieldName, MPASfield.min(), MPASfield.max())) - - del mpas_grid_input_layers - - return MPASfield - - -#---------------------------- - -def vertical_interp_MPAS_grid(mpas_grid_input_layers, destVertCoord, input_layers): - destinationField = np.zeros((nCells, len(destVertCoord))) - for i in range(nCells): - destinationField[i,:] = np.interp(destVertCoord, input_layers, mpas_grid_input_layers[:,i]) - return destinationField - - -#---------------------------- -#---------------------------- - - - - -print("==================") -print('Gathering coordinate information from input and output files.') - - -# Open the output file, get needed dimensions & variables -try: - MPASfile = netCDF4.Dataset(args.mpasFile,'r+') - MPASfile.set_auto_mask(False) - try: - nVertLevels = len(MPASfile.dimensions['nVertLevels']) - except: - print('Output file is missing the dimension nVertLevels. Might not be a problem.') - try: - nVertInterfaces = len(MPASfile.dimensions['nVertInterfaces']) - except: - print('Output file is missing the dimension nVertInterfaces. Might not be a problem.') - - try: - # 1d vertical fields - layer centers - layerThicknessFractions = MPASfile.variables['layerThicknessFractions'][:] - # build up sigma levels - mpasLayerCenters = np.zeros( (nVertLevels,) ) - mpasLayerCenters[0] = 0.5 * layerThicknessFractions[0] - for k in range(nVertLevels)[1:]: # skip the first level - mpasLayerCenters[k] = mpasLayerCenters[k-1] + 0.5 * layerThicknessFractions[k-1] + 0.5 * layerThicknessFractions[k] - print(" Using MPAS layer centers at sigma levels: {}".format(mpasLayerCenters)) - except: - print('Trouble calculating mpas layer centers. Might not be a problem.') - - try: - # 1d vertical field - layer interfaces - layerThicknessFractions = MPASfile.variables['layerThicknessFractions'][:] - # build up sigma levels - mpasLayerInterfaces = np.zeros( (nVertInterfaces,) ) - mpasLayerInterfaces[0] = 0.0 - for k in range(1, nVertInterfaces): # skip the first level - mpasLayerInterfaces[k] = mpasLayerInterfaces[k-1] + layerThicknessFractions[k-1] - print(" Using MPAS layer interfaces at sigma levels: {}".format(mpasLayerInterfaces)) - except: - print('Trouble calculating mpas layer interfaces. Might not be a problem.') - - - # '2d' spatial fields on cell centers - xCell = MPASfile.variables['xCell'][:] - #print('xCell min/max:', xCell.min(), xCell.max() - yCell = MPASfile.variables['yCell'][:] - #print('yCell min/max:', yCell.min(), yCell.max() - nCells = len(MPASfile.dimensions['nCells']) - -except: - sys.exit('Error: The output grid file specified is either missing or lacking needed dimensions/variables.') -print("==================\n") - - - -# Open the input file, get needed dimensions -inputFile = netCDF4.Dataset(args.inputFile,'r') -inputFile.set_auto_mask(False) - -# Figure out if this is CISM or MPAS -if 'x1' in inputFile.variables: - filetype='cism' - print("Source file appears to be in CISM format.") -elif 'xCell' in inputFile.variables: - filetype='mpas' - print("Source file appears to be in MPAS format.") -else: - filetype='other' - print("Source file appears to be in a non-standard format.") - if not args.interpType == 'e': - sys.exit("ERROR: Source file does not appear to be a CISM file or an MPAS file. The ESMF interpolation method is the only supported method for files with a non-standard format.") - -if filetype=='cism': - # Get the CISM vertical dimensions if they exist - try: - level = len(inputFile.dimensions['level']) - except: - print(' Input file is missing the dimension level. Might not be a problem.') - - try: - stagwbndlevel = len(inputFile.dimensions['stagwbndlevel']) - except: - print(' Input file is missing the dimension stagwbndlevel. Might not be a problem.') - - # Get CISM location variables if they exist - try: - x1 = inputFile.variables['x1'][:] - dx1 = x1[1] - x1[0] - #print('x1 min/max/dx:', x1.min(), x1.max(), dx1 - y1 = inputFile.variables['y1'][:] - dy1 = y1[1] - y1[0] - #print('y1 min/max/dx:', y1.min(), y1.max(), dy1 - - ##x1 = x1 - (x1.max()-x1.min())/2.0 # This was for some shifted CISM grid but should not be used in general. - ##y1 = y1 - (y1.max()-y1.min())/2.0 - except: - print(' Input file is missing x1 and/or y1. Might not be a problem.') - - try: - x0 = inputFile.variables['x0'][:] - #print('x0 min/max:', x0.min(), x0.max() - y0 = inputFile.variables['y0'][:] - #print('y0 min/max:', y0.min(), y0.max() - - ##x0 = x0 - (x0.max()-x0.min())/2.0 - ##y0 = y0 - (y0.max()-y0.min())/2.0 - - except: - print(' Input file is missing x0 and/or y0. Might not be a problem.') - - # Check the overlap of the grids - print('==================') - print('CISM Input File extents:') - print(' x1 min, max: {} {}'.format(x1.min(), x1.max())) - print(' y1 min, max: {} {}'.format(y1.min(), y1.max())) - print('MPAS File extents:') - print(' xCell min, max: {} {}'.format(xCell.min(), xCell.max())) - print(' yCell min, max: {} {}'.format(yCell.min(), yCell.max())) - print('==================') - - -elif filetype == 'mpas': - - #try: - # nVertInterfaces = len(inputFile.dimensions['nVertInterfaces']) - #except: - # print(' Input file is missing the dimension nVertInterfaces. Might not be a problem.' - - # Get MPAS location variables if they exist - try: - inputxCell = inputFile.variables['xCell'][:] - inputyCell = inputFile.variables['yCell'][:] - except: - sys.exit("ERROR: Input file is missing xCell and/or yCell") - - - # Check the overlap of the grids - print('==================') - print('Input MPAS File extents:') - print(' xCell min, max: {} {}'.format(inputxCell.min(), inputxCell.max())) - print(' yCell min, max: {} {}'.format(inputyCell.min(), inputyCell.max())) - print('Output MPAS File extents:') - print(' xCell min, max: {} {}'.format(xCell.min(), xCell.max())) - print(' yCell min, max: {} {}'.format(yCell.min(), yCell.max())) - print('==================') - - -if filetype=='mpas' and args.interpType == 'b': - sys.exit("ERROR: Bilinear interpolation not supported for input files of MPAS format.") - -#---------------------------- -# Setup Delaunay/barycentric interpolation weights if needed -if args.interpType == 'd': - mpasXY = np.vstack((xCell[:], yCell[:])).transpose() - - if filetype=='cism': - [Yi,Xi] = np.meshgrid(x1[:], y1[:]) - cismXY1 = np.zeros([Xi.shape[0]*Xi.shape[1],2]) - cismXY1[:,0] = Yi.flatten() - cismXY1[:,1] = Xi.flatten() - - print('\nBuilding interpolation weights: CISM x1/y1 -> MPAS') - start = time.perf_counter() - vtx1, wts1, outsideIndx1, treex1 = delaunay_interp_weights(cismXY1, mpasXY) - if len(outsideIndx1) > 0: - outsideIndx1 = outsideIndx1[0] # get the list itself - end = time.perf_counter(); print('done in {}'.format(end-start)) - - if 'x0' in inputFile.variables and not args.thicknessOnly: - # Need to setup separate weights for this grid - [Yi,Xi] = np.meshgrid(x0[:], y0[:]) - cismXY0 = np.zeros([Xi.shape[0]*Xi.shape[1],2]) - cismXY0[:,0] = Yi.flatten() - cismXY0[:,1] = Xi.flatten() - - print('Building interpolation weights: CISM x0/y0 -> MPAS') - start = time.perf_counter() - vtx0, wts0, outsideIndx0, treex0 = delaunay_interp_weights(cismXY0, mpasXY) - if len(outsideIndx0) > 0: - outsideIndx0 = outsideIndx0[0] # get the list itself - end = time.perf_counter(); print('done in {}'.format(end-start)) - - elif filetype=='mpas': - inputmpasXY= np.vstack((inputxCell[:], inputyCell[:])).transpose() - print('Building interpolation weights: MPAS in -> MPAS out') - start = time.perf_counter() - vtCell, wtsCell, outsideIndcell, treecell = delaunay_interp_weights(inputmpasXY, mpasXY) - end = time.perf_counter(); print('done in {}'.format(end-start)) - -#---------------------------- -# Setup NN interpolation weights if needed -if args.interpType == 'n': - mpasXY = np.vstack((xCell[:], yCell[:])).transpose() - - if filetype=='cism': - [Yi,Xi] = np.meshgrid(x1[:], y1[:]) - cismXY1 = np.zeros([Xi.shape[0]*Xi.shape[1],2]) - cismXY1[:,0] = Yi.flatten() - cismXY1[:,1] = Xi.flatten() - - print('\nBuilding interpolation weights: CISM x1/y1 -> MPAS') - start = time.perf_counter() - nn_idx_x1 = nn_interp_weights(cismXY1, mpasXY) - end = time.perf_counter(); print('done in {}'.format(end-start)) - - if 'x0' in inputFile.variables and not args.thicknessOnly: - # Need to setup separate weights for this grid - [Yi,Xi] = np.meshgrid(x0[:], y0[:]) - cismXY0 = np.zeros([Xi.shape[0]*Xi.shape[1],2]) - cismXY0[:,0] = Yi.flatten() - cismXY0[:,1] = Xi.flatten() - - print('Building interpolation weights: CISM x0/y0 -> MPAS') - start = time.perf_counter() - nn_idx_x0 = nn_interp_weights(cismXY0, mpasXY) - end = time.perf_counter(); print('done in {}'.format(end-start)) - - elif filetype=='mpas': - inputmpasXY= np.vstack((inputxCell[:], inputyCell[:])).transpose() - print('Building interpolation weights: MPAS in -> MPAS out') - start = time.perf_counter() - nn_idx_cell = nn_interp_weights(inputmpasXY, mpasXY) - end = time.perf_counter(); print('done in {}'.format(end-start)) - - -#---------------------------- -# Map Input-Output field names - add new fields here as needed - -fieldInfo = OrderedDict() -# ----------------- -if filetype=='cism': - - fieldInfo['thickness'] = {'InputName':'thk', 'scalefactor':1.0, 'offset':0.0, 'gridType':'x1', 'vertDim':False} - fieldInfo['iceMask'] = {'InputName':'iceMask', 'scalefactor':1.0, 'offset':0.0, 'gridType':'x1', 'vertDim':False} - if not args.thicknessOnly: - fieldInfo['bedTopography'] = {'InputName':'topg', 'scalefactor':1.0, 'offset':0.0, 'gridType':'x1', 'vertDim':False} - fieldInfo['sfcMassBal'] = {'InputName':'smb', 'scalefactor':1.0/(3600.0*24.0*365.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} # Assuming mm/yr w.e. units for smb - fieldInfo['sfcMassBalUncertainty'] = {'InputName':'smb_std', 'scalefactor':1.0/(3600.0*24.0*365.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} # Assuming mm/yr w.e. units for smb - fieldInfo['floatingBasalMassBal'] = {'InputName':'subm', 'scalefactor':910.0/(3600.0*24.0*365.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} # Assuming default CISM density - #fieldInfo['temperature'] = {'InputName':'temp', 'scalefactor':1.0, 'offset':273.15, 'gridType':'x1', 'vertDim':True} - fieldInfo['temperature'] = {'InputName':'tempstag', 'scalefactor':1.0, 'offset':273.15, 'gridType':'x1', 'vertDim':True} # pick one or the other - fieldInfo['basalHeatFlux'] = {'InputName':'bheatflx', 'scalefactor':1.0, 'offset':0.0, 'gridType':'x1', 'vertDim':False} - fieldInfo['surfaceAirTemperature'] = {'InputName':'artm', 'scalefactor':1.0, 'offset':273.15, 'gridType':'x1', 'vertDim':False} - fieldInfo['beta'] = {'InputName':'beta', 'scalefactor':1.0, 'offset':0.0, 'gridType':'x0', 'vertDim':False} # needs different mapping file... - #fieldInfo['observedSpeed'] = {'InputName':'balvel', 'scalefactor':1.0/(365.0*24.0*3600.0), 'offset':0.0, 'gridType':'x0', 'vertDim':False} # needs different mapping file... - # fields for observed surface speed and associated error, observed thickness change - fieldInfo['observedSurfaceVelocityX'] = {'InputName':'vx', 'scalefactor':1.0/(365.0*24.0*3600.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} - fieldInfo['observedSurfaceVelocityY'] = {'InputName':'vy', 'scalefactor':1.0/(365.0*24.0*3600.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} - fieldInfo['observedSurfaceVelocityUncertainty'] = {'InputName':'vErr', 'scalefactor':1.0/(365.0*24.0*3600.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} - fieldInfo['observedThicknessTendency'] = {'InputName':'dHdt', 'scalefactor':1.0/(365.0*24.0*3600.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} - fieldInfo['observedThicknessTendencyUncertainty'] = {'InputName':'dHdtErr', 'scalefactor':1.0/(365.0*24.0*3600.0), 'offset':0.0, 'gridType':'x1', 'vertDim':False} - fieldInfo['thicknessUncertainty'] = {'InputName':'topgerr', 'scalefactor':1.0, 'offset':0.0, 'gridType':'x1', 'vertDim':False} - - fieldInfo['ismip6shelfMelt_basin'] = {'InputName':'ismip6shelfMelt_basin', 'scalefactor':1.0, 'offset':0.0, 'gridType':'x1', 'vertDim':False} - fieldInfo['ismip6shelfMelt_deltaT'] = {'InputName':'ismip6shelfMelt_deltaT', 'scalefactor':1.0, 'offset':0.0, 'gridType':'x1', 'vertDim':False} - -# ----------------- -elif filetype=='mpas': - - fieldInfo['thickness'] = {'InputName':'thickness', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} - if not args.thicknessOnly: - fieldInfo['bedTopography'] = {'InputName':'bedTopography', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} - fieldInfo['sfcMassBal'] = {'InputName':'sfcMassBal', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} - fieldInfo['floatingBasalMassBal'] = {'InputName':'floatingBasalMassBal', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} - fieldInfo['temperature'] = {'InputName':'temperature', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':True} - fieldInfo['basalHeatFlux'] = {'InputName':'basalHeatFlux', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} - fieldInfo['surfaceAirTemperature'] = {'InputName':'surfaceAirTemperature', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} - fieldInfo['beta'] = {'InputName':'beta', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} - fieldInfo['muFriction'] = {'InputName':'muFriction', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} - fieldInfo['eigencalvingParameter'] = {'InputName':'eigencalvingParameter', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} - # obs fields - fieldInfo['observedSurfaceVelocityX'] = {'InputName':'observedSurfaceVelocityX', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} - fieldInfo['observedSurfaceVelocityY'] = {'InputName':'observedSurfaceVelocityY', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} - fieldInfo['observedSurfaceVelocityUncertainty'] = {'InputName':'observedSurfaceVelocityUncertainty', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} - fieldInfo['observedThicknessTendency'] = {'InputName':'observedThicknessTendency', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} - fieldInfo['observedThicknessTendencyUncertainty'] = {'InputName':'observedThicknessTendencyUncertainty', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} - fieldInfo['thicknessUncertainty'] = {'InputName':'thicknessUncertainty', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} - fieldInfo['basalFrictionFlux'] = {'InputName':'basalFrictionFlux', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} - fieldInfo['uReconstructX'] = {'InputName':'uReconstructX', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':True} - fieldInfo['uReconstructY'] = {'InputName':'uReconstructY', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':True} - - fieldInfo['ismip6shelfMelt_basin'] = {'InputName':'ismip6shelfMelt_basin', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} - fieldInfo['ismip6shelfMelt_deltaT'] = {'InputName':'ismip6shelfMelt_deltaT', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} - - fieldInfo['stiffnessFactor'] = {'InputName':'stiffnessFactor', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} - fieldInfo['effectivePressure'] = {'InputName':'effectivePressure', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} - fieldInfo['iceMask'] = {'InputName':'iceMask', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} - -# Used by Trevor -# fieldInfo['sfcMassBalUncertainty'] = {'InputName':'smb_std_vector', 'scalefactor':910.0/(3600.0*24.0*365.0)/1000.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} -# fieldInfo['ismip6Runoff'] = {'InputName':'runoff_vector', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} -# fieldInfo['ismip6_2dThermalForcing'] = {'InputName':'thermal_forcing_vector', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} -# fieldInfo['ismip6aST'] = {'InputName':'aST_vector', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} -# fieldInfo['ismip6aSMB'] = {'InputName':'aSMB_vector', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} -# fieldInfo['ismip6refST'] = {'InputName':'ST_vector', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} -# fieldInfo['externalWaterInput'] = {'InputName':'externalWaterInput_vector', 'scalefactor':1.0/(3600.0*24.0*365.0), 'offset':0.0, 'gridType':'cell', 'vertDim':False} - -# ----------------- -elif filetype=='other': - # These are variable mappings for the ESMF method. Update as needed for specific applications. - fieldInfo['surfaceAirTemperature'] = {'InputName':'TBOT', 'scalefactor':1.0, 'offset':0.0, 'gridType':'cell', 'vertDim':False} # This example can be used with an ELM history file and an EMSF mapping file. - -else: - sys.exit("ERROR: Unknown file type.") - -#---------------------------- - - -#---------------------------- -# try each field. If it exists in the input file, it will be copied. If not, it will be skipped. -interpolated_vars = [] -for MPASfieldName in fieldInfo: - if not 'all' in args.vars and not MPASfieldName in args.vars: - continue - - print('\n## {} ##'.format(MPASfieldName)) - - if not MPASfieldName in MPASfile.variables: - print(" Warning: Field '{}' is not in the destination file. Skipping.".format(MPASfieldName)) - continue # skip the rest of this iteration of the for loop over variables - - if not fieldInfo[MPASfieldName]['InputName'] in inputFile.variables: - print(" Warning: Field '{}' is not in the source file. Skipping.".format(fieldInfo[MPASfieldName]['InputName'])) - continue # skip the rest of this iteration of the for loop over variables - - for timelev in range(args.timestart, args.timeend+1): - # Note; the interpolate functions called below access timelev as a global variable - timelevout = timelev # assuming the time level of the output file should match that of the input file - print(" ---- Interpolating time level {} ----".format(timelev)) - start = time.perf_counter() - if fieldInfo[MPASfieldName]['vertDim']: - MPASfield = interpolate_field_with_layers(MPASfieldName) - else: - MPASfield = interpolate_field(MPASfieldName) - end = time.perf_counter(); print(' interpolation done in {}'.format(end-start)) - - # Don't allow negative thickness. - if MPASfieldName == 'thickness' and MPASfield.min() < 0.0: - MPASfield[MPASfield < 0.0] = 0.0 - print(' removed negative thickness, new min/max: {} {}'.format(MPASfield.min(), MPASfield.max())) - - # basalHeatFlux must be non-negative - if MPASfieldName == 'basalHeatFlux': - assert np.nanmin(MPASfield) >= 0.0, 'basalHeatFlux contains negative values! This is likely due to the ' \ - 'conventions used in the input file, rather than bad data. Ensure ' \ - 'non-negative values before interpolating.' - - # Now insert the MPAS field into the file. - if 'Time' in MPASfile.variables[MPASfieldName].dimensions: - MPASfile.variables[MPASfieldName][timelevout,:] = MPASfield # Time will always be leftmost index - else: - MPASfile.variables[MPASfieldName][:] = MPASfield - - MPASfile.sync() # update the file now in case we get an error later - interpolated_vars.append(MPASfieldName) - -if args.timeend > args.timestart: - print("\n\nMultiple time levels have been copied, but xtime has not. Be sure to manually copy or assign xtime values in the destination file if needed.") - -print("\nFields successfully interpolated: " + ",".join(interpolated_vars)) - -# Update history attribute of netCDF file -thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ": " + " ".join(sys.argv[:])#.join("Variables interpolated: {}".format(interpolated_vars)) -thiscommand = thiscommand+"; Variables successfully interpolated: " + ",".join(interpolated_vars) -if hasattr(MPASfile, 'history'): - newhist = '\n'.join([thiscommand, getattr(MPASfile, 'history')]) -else: - newhist = thiscommand -setattr(MPASfile, 'history', newhist ) - -inputFile.close() -MPASfile.close() - -print('\nInterpolation completed.') diff --git a/landice/mesh_tools_li/mark_domain_boundaries_dirichlet.py b/landice/mesh_tools_li/mark_domain_boundaries_dirichlet.py deleted file mode 100755 index bd826aabd..000000000 --- a/landice/mesh_tools_li/mark_domain_boundaries_dirichlet.py +++ /dev/null @@ -1,53 +0,0 @@ -#!/usr/bin/env python -''' -This script marks all of the boundary cells in a domain as Dirichlet velocity boundaries. -''' - -from __future__ import absolute_import, division, print_function, \ - unicode_literals - -import netCDF4 -import numpy as np -from optparse import OptionParser -from datetime import datetime -import sys - -print("== Gathering information. (Invoke with --help for more details. All arguments are optional)\n") -parser = OptionParser() -parser.description = __doc__ -parser.add_option("-f", "--file", dest="inputFile", help="name of file to be modified.", default="landice_grid.nc", metavar="FILENAME") -parser.add_option("-t", "--time", dest="time", help="time level to modify", default=0, type="int", metavar="FILENAME") -for option in parser.option_list: - if option.default != ("NO", "DEFAULT"): - option.help += (" " if option.help else "") + "[default: %default]" -options, args = parser.parse_args() - - -print(" Input file: {}".format(options.inputFile)) -print(" Time level: {}".format(options.time)) - -f=netCDF4.Dataset(options.inputFile, 'r+') -nCells = len(f.dimensions['nCells']) -mask = f.variables['dirichletVelocityMask'][options.time, :, :] -cONc = f.variables['cellsOnCell'][:] -nEdgesOnCell = f.variables['nEdgesOnCell'][:] - -mask[:] = 0 -for i in range(nCells): - nE = nEdgesOnCell[i] - if min(cONc[i, :nE]) == 0: - mask[i,:] = 1 -f.variables['dirichletVelocityMask'][options.time, :, :] = mask[:] - - -# Update history attribute of netCDF file -thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ": " + " ".join(sys.argv[:]) -if hasattr(f, 'history'): - newhist = '\n'.join([thiscommand, getattr(f, 'history')]) -else: - newhist = thiscommand -setattr(f, 'history', newhist ) - -f.close() - -print('\nMarking boundary cells completed.') diff --git a/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py b/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py deleted file mode 100755 index 2f3b68c73..000000000 --- a/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_MPAS_mesh.py +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env python -# Create a SCRIP file from an MPAS mesh. -# See for details: http://www.earthsystemmodeling.org/esmf_releases/public/ESMF_5_2_0rp1/ESMF_refdoc/node3.html#SECTION03024000000000000000 - -from mpas_tools.scrip.from_mpas import main - - -if __name__ == '__main__': - main() - diff --git a/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_planar_rectangular_grid.py b/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_planar_rectangular_grid.py deleted file mode 100755 index 8d42597c5..000000000 --- a/mesh_tools/create_SCRIP_files/create_SCRIP_file_from_planar_rectangular_grid.py +++ /dev/null @@ -1,175 +0,0 @@ -#!/usr/bin/env python -# Create a SCRIP file from a planar rectanfular mesh. -# See for details: http://www.earthsystemmodeling.org/esmf_releases/public/ESMF_5_2_0rp1/ESMF_refdoc/node3.html#SECTION03024000000000000000 - -import sys -import netCDF4 -import numpy as np -from optparse import OptionParser -import matplotlib.pyplot as plt -from pyproj import Transformer, transform, CRS - -# ======== DEFINE PROJECTIONS ============= -# Create empty dictionary to store projection definitions: -projections = dict() -# add more as needed: - -# CISM's projection is as follows, with the vertical datum as EIGEN-GL04C geoid. -# datum is actually EIGEN-GL04C but that is not an option in Proj. Therefore using EGM08 which should be within ~1m everywhere (and 10-20 cm in most places) -# NOTE!!!!!! egm08_25.gtx can be downloaded from: http://download.osgeo.org/proj/vdatum/egm08_25/egm08_25.gtx and the path in the projection specification line should point to it! -#projections['gis-bamber'] = '+proj=stere +lat_ts=71.0 +lat_0=90 +lon_0=321.0 +k_0=1.0 +x_0=800000.0 +y_0=3400000.0 +ellps=WGS84 +geoidgrids=./egm08_25.gtx' -projections['gis-bamber'] = '+proj=stere +lat_ts=71.0 +lat_0=90 +lon_0=321.0 +k_0=1.0 +x_0=800000.0 +y_0=3400000.0 +ellps=WGS84' # This version ignores the vertical datum shift, which should be a very small error for horizontal-only positions - -# GIMP projection: This is also polar stereographic but with different standard parallel and using the WGS84 ellipsoid. -projections['gis-gimp'] = '+proj=stere +lat_ts=70.0 +lat_0=90 +lon_0=315.0 +k_0=1.0 +x_0=0.0 +y_0=0.0 +ellps=WGS84' - -# BEDMAP2 projection -projections['ais-bedmap2'] = '+proj=stere +lat_ts=-71.0 +lat_0=-90 +lon_0=0.0 +k_0=1.0 +x_0=0.0 +y_0=0.0 +ellps=WGS84' # Note: BEDMAP2 elevations use EIGEN-GL04C geoid - -# Standard Lat/Long -projections['latlon'] = '+proj=longlat +ellps=WGS84' - - - -print ("== Gathering information. (Invoke with --help for more details. All arguments are optional)") -parser = OptionParser() -parser.description = "This script takes an MPAS grid file and generates a SCRIP grid file." -parser.add_option("-i", "--input", dest="inputFile", help="input grid file name used as input.", default="input.nc", metavar="FILENAME") -parser.add_option("-s", "--scrip", dest="scripFile", help="SCRIP grid file to output.", default="scrip.nc", metavar="FILENAME") -parser.add_option("-p", "--proj", dest="projection", help="projection used by the input data file. Valid options are:" + str(projections.keys()), metavar="PROJ") -parser.add_option("-r", "--rank", dest="gridRank", help="desired rank of the output SCRIP grid data") -parser.add_option("--plot", dest="plot", action="store_true", help="if this flag is used, destination grid points are plotted") - -for option in parser.option_list: - if option.default != ("NO", "DEFAULT"): - option.help += (" " if option.help else "") + "[default: %default]" -options, args = parser.parse_args() - -if not options.inputFile: - sys.exit('Error: Data input grid file is required. Specify with -c command line argument.') -if not options.scripFile: - sys.exit('Error: SCRIP output grid file is required. Specify with -s command line argument.') -if not options.projection: - sys.exit('Error: data projection required with -p or --proj command line argument. Valid options are: ' + str(projections.keys())) -if not options.gridRank: - sys.exit('Error: desired rank of SCRIP output grid data is required. Valid options are 1 (for unstructured grid) or 2') -print ('') # make a space in stdout before further output - -# =================================== - -fin = netCDF4.Dataset(options.inputFile, 'r') -fout = netCDF4.Dataset(options.scripFile, 'w') # This will clobber existing files - -# Get info from input file -x = fin.variables['x'][:] -y = fin.variables['y'][:] -nx = x.size -ny = y.size -dx = x[1] - x[0] -dy = y[1] - y[0] - -# Write to output file -# Dimensions -fout.createDimension("grid_size", nx * ny) -fout.createDimension("grid_corners", 4 ) - -if int(options.gridRank) == 1: - print('grid rank is 1') - fout.createDimension("grid_rank", 1) -elif int(options.gridRank) == 2: - print('grid rank is 2') - fout.createDimension("grid_rank", 2) -else: - raise ValueError(f'grid rank value is invalid: valid options are 1 or 2 but {options.gridRank} was given.') - -# Variables -grid_center_lat = fout.createVariable('grid_center_lat', 'f8', ('grid_size',)) -grid_center_lat.units = 'degrees' -grid_center_lon = fout.createVariable('grid_center_lon', 'f8', ('grid_size',)) -grid_center_lon.units = 'degrees' -grid_corner_lat = fout.createVariable('grid_corner_lat', 'f8', ('grid_size', 'grid_corners')) -grid_corner_lat.units = 'degrees' -grid_corner_lon = fout.createVariable('grid_corner_lon', 'f8', ('grid_size', 'grid_corners')) -grid_corner_lon.units = 'degrees' -grid_imask = fout.createVariable('grid_imask', 'i4', ('grid_size',)) -grid_imask.units = 'unitless' -grid_dims = fout.createVariable('grid_dims', 'i4', ('grid_rank',)) - - -# Create matrices of x,y -print ('Building matrix version of x, y locations.') -xmatrix, ymatrix = np.meshgrid(x, y) -xc = np.append(x[:] - dx/2.0, x[-1] + dx/2.0 ) # get a copy of x that is on the staggered grid and includes both bounding edges -yc = np.append(y[:] - dy/2.0, y[-1] + dy/2.0 ) # get a copy of y that is on the staggered grid and includes both bounding edges -xcmatrix, ycmatrix = np.meshgrid(xc, yc) - -# Unproject to lat/long for grid centers and grid corners -print ('Unprojecting.') - -xmatrix_flat = xmatrix.flatten(order='C') # Flatten using C indexing -ymatrix_flat = ymatrix.flatten(order='C') - -# make a CRS (coordinate reference system) for projections from the Proj string: -crs_in = CRS.from_proj4(projections[options.projection]) -crs_out = CRS.from_proj4(projections['latlon']) - -# building a transformer -t = Transformer.from_crs(crs_in,crs_out) - -# transform the original grid into the lat-lon grid -grid_center_lon[:], grid_center_lat[:] = t.transform(xmatrix_flat, ymatrix_flat) - - -# Now fill in the corners in the right locations -stag_lon, stag_lat = t.transform(xcmatrix, ycmatrix) -print ('Filling in corners of each cell.') -grid_corner_lon_local = np.zeros( (nx * ny, 4) ) # It is WAYYY faster to fill in the array entry-by-entry in memory than to disk. -grid_corner_lat_local = np.zeros( (nx * ny, 4) ) - -jj = np.arange(ny) -ii = np.arange(nx) -i_ind, j_ind = np.meshgrid(ii, jj) -cell_ind = j_ind * nx + i_ind -grid_corner_lon_local[cell_ind, 0] = stag_lon[j_ind, i_ind] -grid_corner_lon_local[cell_ind, 1] = stag_lon[j_ind, i_ind + 1] -grid_corner_lon_local[cell_ind, 2] = stag_lon[j_ind + 1, i_ind + 1] -grid_corner_lon_local[cell_ind, 3] = stag_lon[j_ind + 1, i_ind] -grid_corner_lat_local[cell_ind, 0] = stag_lat[j_ind, i_ind] -grid_corner_lat_local[cell_ind, 1] = stag_lat[j_ind, i_ind + 1] -grid_corner_lat_local[cell_ind, 2] = stag_lat[j_ind + 1, i_ind + 1] -grid_corner_lat_local[cell_ind, 3] = stag_lat[j_ind + 1, i_ind] - -grid_corner_lon[:] = grid_corner_lon_local[:] -grid_corner_lat[:] = grid_corner_lat_local[:] - -grid_imask[:] = 1 # For now, assume we don't want to mask anything out - but eventually may want to exclude certain cells from the input mesh during interpolation - -# set the grid dimension based on the grid rank -if int(options.gridRank) == 1: - grid_dims[:] = (nx * ny) -elif int(options.gridRank) == 2: - grid_dims[:] = [nx , ny] - - -if options.plot: - print("plotting is on") - # plot some stuff - #plot a single point - i=-1 - plt.figure(1) - plt.plot(grid_center_lon[i], grid_center_lat[i], 'o') - plt.plot(grid_corner_lon[i, 0], grid_corner_lat[i, 0], 'kx') - plt.plot(grid_corner_lon[i, 1], grid_corner_lat[i, 1], 'bx') - plt.plot(grid_corner_lon[i, 2], grid_corner_lat[i, 2], 'cx') - plt.plot(grid_corner_lon[i, 3], grid_corner_lat[i, 3], 'gx') - - #plot all points - plt.figure(2) - plt.plot(grid_center_lon[:], grid_center_lat[:], 'bo') - plt.plot(grid_corner_lon[:], grid_corner_lat[:], 'g.') - plt.show() - - -fin.close() -fout.close() -print('scrip file generation complete') diff --git a/mesh_tools/create_SCRIP_files/mpas_tools b/mesh_tools/create_SCRIP_files/mpas_tools deleted file mode 120000 index 627733f3b..000000000 --- a/mesh_tools/create_SCRIP_files/mpas_tools +++ /dev/null @@ -1 +0,0 @@ -../../conda_package/mpas_tools/ \ No newline at end of file diff --git a/mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py b/mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py deleted file mode 100755 index 27b9c8715..000000000 --- a/mesh_tools/mesh_conversion_tools/mark_horns_for_culling.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/env python -''' -This script identifies "horns" on a mesh (cells with two or fewer neighbors), -and marks them for culling. In some cores/configurations, these weakly -connected cells can be dynamically inactive, and, therefore, undesirable to -keep in a mesh. - -The method used will work on both planar and spherical meshes. -It adds the new masked cell to an existing 'cullCell' field if it exists, -otherwise it creates a new field. -''' - -from __future__ import absolute_import, division, print_function, \ - unicode_literals - -import sys -import numpy as np -import netCDF4 -from optparse import OptionParser -from datetime import datetime - - -print("== Gathering information. (Invoke with --help for more details. All " - "arguments are optional)\n") -parser = OptionParser() -parser.description = __doc__ -parser.add_option( - "-f", - "--file", - dest="inputFile", - help="Name of file to be processed.", - default="grid.nc", - metavar="FILENAME") -for option in parser.option_list: - if option.default != ("NO", "DEFAULT"): - option.help += (" " if option.help else "") + "[default: %default]" -options, args = parser.parse_args() - -print(" File to be modified: " + options.inputFile) - - -# Open file and get needed fields. -inputFile = netCDF4.Dataset(options.inputFile, 'r+') -nCells = len(inputFile.dimensions['nCells']) -cellsOnCell = inputFile.variables['cellsOnCell'][:] - -# Add the horn cells to existing mask if it exists -if 'cullCell' in inputFile.variables: - cullCell = inputFile.variables['cullCell'][:] -else: # otherwise make a new mask initialized empty - cullCell = np.zeros((nCells,)) # local variable - -nHorns = 0 -for i in range(nCells): - # NOTE: Can change this threshold, if needed for a particular use case. - if (cellsOnCell[i, :] > 0).sum() <= 2: - cullCell[i] = 1 - nHorns += 1 - -# Write out the new field -if 'cullCell' in inputFile.variables: - cullCellVar = inputFile.variables['cullCell'] -else: - cullCellVar = inputFile.createVariable('cullCell', 'i', ('nCells',)) -cullCellVar[:] = cullCell - - -# Update history attribute of netCDF file -thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + \ - ": " + " ".join(sys.argv[:]) -if hasattr(inputFile, 'history'): - newhist = '\n'.join([thiscommand, getattr(inputFile, 'history')]) -else: - newhist = thiscommand -setattr(inputFile, 'history', newhist) - -inputFile.close() - -print('\n{} "horn" locations have been marked in the field cullCell.'.format( - nHorns)) -print("Remember to use MpasCellCuller.x to actually remove them!") diff --git a/mesh_tools/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py b/mesh_tools/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py deleted file mode 100755 index c6d9065f7..000000000 --- a/mesh_tools/planar_grid_transformations/set_lat_lon_fields_in_planar_grid.py +++ /dev/null @@ -1,126 +0,0 @@ -#!/usr/bin/env python -''' -Take MPAS planar grid and populate the lat/lon fields based on a specified projection. -''' - -from __future__ import absolute_import, division, print_function, unicode_literals - -import sys -import numpy as np -import netCDF4 -from pyproj import Transformer, transform, CRS -from optparse import OptionParser -from datetime import datetime - - -# ======== DEFINE PROJECTIONS ============= -# Create empty dictionary to store projection definitions: -projections = dict() -# add more as needed: - -# CISM's projection is as follows, with the vertical datum as EIGEN-GL04C geoid. -# datum is actually EIGEN-GL04C but that is not an option in Proj. Therefore using EGM08 which should be within ~1m everywhere (and 10-20 cm in most places) -# NOTE!!!!!! egm08_25.gtx can be downloaded from: http://download.osgeo.org/proj/vdatum/egm08_25/egm08_25.gtx and the path in the projection specification line should point to it! -#projections['gis-bamber'] = '+proj=stere +lat_ts=71.0 +lat_0=90 +lon_0=321.0 +k_0=1.0 +x_0=800000.0 +y_0=3400000.0 +geoidgrids=./egm08_25.gtx' -projections['gis-bamber'] = '+proj=stere +lat_ts=71.0 +lat_0=90 +lon_0=321.0 +k_0=1.0 +x_0=800000.0 +y_0=3400000.0 +ellps=WGS84' # This version ignores the vertical datum shift, which should be a very small error for horizontal-only positions -projections['gis-bamber-shift'] = '+proj=stere +lat_ts=71.0 +lat_0=90 +lon_0=321.0 +k_0=1.0 +x_0=1300000.0 +y_0=3500000.0 +ellps=WGS84' # This version ignores the vertical datum shift, which should be a very small error for horizontal-only positions - -# GIMP projection: This is also polar stereographic but with different standard parallel and using the WGS84 ellipsoid. -projections['gis-gimp'] = '+proj=stere +lat_ts=70.0 +lat_0=90 +lon_0=315.0 +k_0=1.0 +x_0=0.0 +y_0=0.0 +ellps=WGS84' - -# BEDMAP2 projection -projections['ais-bedmap2'] = '+proj=stere +lat_ts=-71.0 +lat_0=-90 +lon_0=0.0 +k_0=1.0 +x_0=0.0 +y_0=0.0 +ellps=WGS84' # Note: BEDMAP2 elevations use EIGEN-GL04C geoid - -# BEDMAP2 projection of sphere. This projection must be used to adjust MALI mesh when performing -# coupled MALI-SeaLevelModel simulations. Otherwise, ice mass won't be conserved between the MALI planar mesh and -# the spherical sea-level model grid during the post-processing (output analysis) step. -projections['ais-bedmap2-sphere'] = '+proj=stere +lat_ts=-71.0 +lat_0=-90 +lon_0=0.0 +k_0=1.0 +x_0=0.0 +y_0=0.0 +ellps=sphere' - -# Standard Lat/Long -projections['latlon'] = '+proj=longlat +ellps=WGS84' -# =================================== - - - -print("== Gathering information. (Invoke with --help for more details. All arguments are optional)") -parser = OptionParser() -parser.description = "This script populates the MPAS lat and lon fields based on the projection specified by the -p option." -parser.add_option("-f", "--file", dest="fileInName", help="MPAS land ice file name.", default="landice_grid.nc", metavar="FILENAME") -parser.add_option("-p", "--proj", dest="projection", help="projection used for the data. Valid options are: \n" + str(list(projections.keys())), metavar="PROJ") -for option in parser.option_list: - if option.default != ("NO", "DEFAULT"): - option.help += (" " if option.help else "") + "[default: %default]" -options, args = parser.parse_args() - -if not options.projection: - sys.exit('Error: data projection required with -p or --proj command line argument. Valid options are: ' + str(list(projections.keys()))) - -if not options.fileInName: - print("No filename specified, so using 'landice_grid.nc'.") - options.fileInName = 'landice_grid.nc' -print('') # make a space in stdout before further output - - -# ================================================= - -print("Using {} projection, defined as: {}".format(options.projection, projections[options.projection])) - -# get needed fields -f = netCDF4.Dataset(options.fileInName, 'r+') -xCell = f.variables['xCell'] -yCell = f.variables['yCell'] -xVertex = f.variables['xVertex'] -yVertex = f.variables['yVertex'] -xEdge = f.variables['xEdge'] -yEdge = f.variables['yEdge'] - -latCellVar = f.variables['latCell'] -lonCellVar = f.variables['lonCell'] -latVertexVar = f.variables['latVertex'] -lonVertexVar = f.variables['lonVertex'] -latEdgeVar = f.variables['latEdge'] -lonEdgeVar = f.variables['lonEdge'] - -print("Input file xCell min/max values:", xCell[:].min(), xCell[:].max()) -print("Input file yCell min/max values:", yCell[:].min(), yCell[:].max()) - -# make a CRS (coordinate reference system) for projections from Proj string: -crs_in = CRS.from_proj4(projections[options.projection]) -crs_out = CRS.from_proj4(projections['latlon']) - -# define a transformer -t = Transformer.from_crs(crs_in,crs_out) - -# populate x,y fields -# MPAS uses lat/lon in radians, so have pyproj return fields in radians. -lonCell, latCell = t.transform(xCell[:], yCell[:], radians=True) -lonVertex, latVertex = t.transform(xVertex[:], yVertex[:], radians=True) -lonEdge, latEdge = t.transform(xEdge[:], yEdge[:], radians=True) - -# change the longitude convention to use positive values [0 2pi] -lonCell = np.mod(lonCell, 2.0*np.pi) -lonVertex = np.mod(lonVertex, 2.0*np.pi) -lonEdge = np.mod(lonEdge, 2.0*np.pi) - -print("Calculated latCell min/max values (radians):", latCell.min(), latCell.max()) -print("Calculated lonCell min/max values (radians):", lonCell.min(), lonCell.max()) - -latCellVar[:] = latCell -lonCellVar[:] = lonCell -latVertexVar[:] = latVertex -lonVertexVar[:] = lonVertex -latEdgeVar[:] = latEdge -lonEdgeVar[:] = lonEdge - -# Update history attribute of netCDF file -thiscommand = datetime.now().strftime("%a %b %d %H:%M:%S %Y") + ": " + " ".join(sys.argv[:]) -if hasattr(f, 'history'): - newhist = '\n'.join([thiscommand, getattr(f, 'history')]) -else: - newhist = thiscommand -setattr(f, 'history', newhist ) - - -f.close() - -print("Lat/lon calculations completed. File has been written.")