From d0653e31e2d1caf635d485e6627f0017e4cc875c Mon Sep 17 00:00:00 2001 From: tsutterley Date: Thu, 22 Dec 2022 15:37:06 -0800 Subject: [PATCH] feat: add functions to read and interpolate from all constituents to address #91 to do: need to add tests that outputs are as expected to do: need to see if these are actual optimizations test: switch interpolation test to soft tabs --- doc/source/api_reference/constituents.rst | 15 + doc/source/api_reference/io/ATLAS.rst | 4 + doc/source/api_reference/io/FES.rst | 4 + doc/source/api_reference/io/GOT.rst | 4 + doc/source/api_reference/io/OTIS.rst | 4 + doc/source/index.rst | 1 + pyTMD/__init__.py | 1 + pyTMD/compute_tide_corrections.py | 2 + pyTMD/constituents.py | 57 +++ pyTMD/io/ATLAS.py | 313 ++++++++++++-- pyTMD/io/FES.py | 268 ++++++++++-- pyTMD/io/GOT.py | 235 ++++++++++- pyTMD/io/OTIS.py | 477 +++++++++++++++++++--- test/test_interpolate.py | 258 ++++++------ 14 files changed, 1397 insertions(+), 246 deletions(-) create mode 100644 doc/source/api_reference/constituents.rst create mode 100644 pyTMD/constituents.py diff --git a/doc/source/api_reference/constituents.rst b/doc/source/api_reference/constituents.rst new file mode 100644 index 00000000..09a79678 --- /dev/null +++ b/doc/source/api_reference/constituents.rst @@ -0,0 +1,15 @@ +============ +constituents +============ + +Basic tide model constituent class + +`Source code`__ + +.. __: https://github.com/tsutterley/pyTMD/blob/main/pyTMD/constituents.py + +General Attributes and Methods +============================== + +.. autoclass:: pyTMD.constituents + :members: diff --git a/doc/source/api_reference/io/ATLAS.rst b/doc/source/api_reference/io/ATLAS.rst index a472218c..2a99a1fa 100644 --- a/doc/source/api_reference/io/ATLAS.rst +++ b/doc/source/api_reference/io/ATLAS.rst @@ -20,6 +20,10 @@ Calling Sequence .. autofunction:: pyTMD.io.ATLAS.extract_constants +.. autofunction:: pyTMD.io.ATLAS.read_constants + +.. autofunction:: pyTMD.io.ATLAS.interpolate_constants + .. autofunction:: pyTMD.io.ATLAS.read_netcdf_grid .. autofunction:: pyTMD.io.ATLAS.read_netcdf_elevation diff --git a/doc/source/api_reference/io/FES.rst b/doc/source/api_reference/io/FES.rst index 733a5263..4b9a5f39 100644 --- a/doc/source/api_reference/io/FES.rst +++ b/doc/source/api_reference/io/FES.rst @@ -27,6 +27,10 @@ Calling Sequence .. autofunction:: pyTMD.io.FES.extract_constants +.. autofunction:: pyTMD.io.FES.read_constants + +.. autofunction:: pyTMD.io.FES.interpolate_constants + .. autofunction:: pyTMD.io.FES.read_ascii_file .. autofunction:: pyTMD.io.FES.read_netcdf_file diff --git a/doc/source/api_reference/io/GOT.rst b/doc/source/api_reference/io/GOT.rst index 0842008f..8cc8f621 100644 --- a/doc/source/api_reference/io/GOT.rst +++ b/doc/source/api_reference/io/GOT.rst @@ -20,6 +20,10 @@ Calling Sequence .. autofunction:: pyTMD.io.GOT.extract_constants +.. autofunction:: pyTMD.io.GOT.read_constants + +.. autofunction:: pyTMD.io.GOT.interpolate_constants + .. autofunction:: pyTMD.io.GOT.read_ascii_file .. autofunction:: pyTMD.io.GOT.extend_array diff --git a/doc/source/api_reference/io/OTIS.rst b/doc/source/api_reference/io/OTIS.rst index 312ef20d..7d1a7a09 100644 --- a/doc/source/api_reference/io/OTIS.rst +++ b/doc/source/api_reference/io/OTIS.rst @@ -25,6 +25,10 @@ Calling Sequence .. autofunction:: pyTMD.io.OTIS.extract_constants +.. autofunction:: pyTMD.io.OTIS.read_constants + +.. autofunction:: pyTMD.io.OTIS.interpolate_constants + .. autofunction:: pyTMD.io.OTIS.read_otis_grid .. autofunction:: pyTMD.io.OTIS.read_atlas_grid diff --git a/doc/source/index.rst b/doc/source/index.rst index 11b23408..1d4b97e1 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -33,6 +33,7 @@ conventions for calculating radial pole tide displacements. api_reference/check_tide_points.rst api_reference/compute_tide_corrections.rst api_reference/constants.rst + api_reference/constituents.rst api_reference/convert_ll_xy.rst api_reference/eop.rst api_reference/io/ATLAS.rst diff --git a/pyTMD/__init__.py b/pyTMD/__init__.py index 8271b049..ead1c452 100644 --- a/pyTMD/__init__.py +++ b/pyTMD/__init__.py @@ -24,6 +24,7 @@ from pyTMD.check_tide_points import check_tide_points from pyTMD.compute_tide_corrections import compute_tide_corrections from pyTMD.constants import constants, _ellipsoids +from pyTMD.constituents import constituents from pyTMD.convert_ll_xy import convert_ll_xy from pyTMD.load_constituent import load_constituent from pyTMD.load_nodal_corrections import load_nodal_corrections diff --git a/pyTMD/compute_tide_corrections.py b/pyTMD/compute_tide_corrections.py index ab02535e..80521543 100644 --- a/pyTMD/compute_tide_corrections.py +++ b/pyTMD/compute_tide_corrections.py @@ -292,12 +292,14 @@ def compute_tide_corrections(x, y, delta_time, DIRECTORY=None, MODEL=None, model.model_file, model.projection, type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, grid=model.format, apply_flexure=APPLY_FLEXURE) + # use delta time at 2000.0 to match TMD outputs deltat = np.zeros_like(t) elif (model.format == 'netcdf'): amp,ph,D,c = pyTMD.io.ATLAS.extract_constants(lon, lat, model.grid_file, model.model_file, type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) + # use delta time at 2000.0 to match TMD outputs deltat = np.zeros_like(t) elif (model.format == 'GOT'): amp,ph,c = pyTMD.io.GOT.extract_constants(lon, lat, model.model_file, diff --git a/pyTMD/constituents.py b/pyTMD/constituents.py new file mode 100644 index 00000000..e807d536 --- /dev/null +++ b/pyTMD/constituents.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python +u""" +constituents.py +Written by Tyler Sutterley (12/2022) +Basic tide model constituent class + +UPDATE HISTORY: + Written 12/2022 +""" + +class constituents: + """ + Class for tide model constituents + + Attributes + ---------- + fields: list + list of tide model constituents + """ + def __init__(self, **kwargs): + # set initial attributes + self.fields = [] + # set optional fields + for key, val in kwargs.items(): + setattr(self, key, val) + + def append(self, field, constituent): + """ + Append tide model constituents + + Parameters + ---------- + field: str + Tide model constituent name + constituent: float + Tide model constituent (complex form) + """ + # append field + self.fields.append(field) + setattr(self, field, constituent) + return self + + def get(self, field): + """ + Get model constituent + + Parameters + ---------- + field: str + Tide model constituent name + + Returns + ------- + constituent: float + Tide model constituent (complex form) + """ + return getattr(self, field) diff --git a/pyTMD/io/ATLAS.py b/pyTMD/io/ATLAS.py index dbe1265e..dce2b904 100644 --- a/pyTMD/io/ATLAS.py +++ b/pyTMD/io/ATLAS.py @@ -57,6 +57,7 @@ UPDATE HISTORY: Updated 12/2022: refactor tide read programs under io + new functions to read and interpolate from constituents class Updated 11/2022: place some imports within try/except statements use f-strings for formatting verbose or ascii output Updated 07/2022: fix setting of masked array data to NaN @@ -92,6 +93,7 @@ import warnings import numpy as np import scipy.interpolate +import pyTMD.constituents from pyTMD.bilinear_interp import bilinear_interp from pyTMD.nearest_extrap import nearest_extrap @@ -105,7 +107,7 @@ # ignore warnings warnings.filterwarnings("ignore") -# PURPOSE: extract tidal harmonic constants from tide models at coordinates +# PURPOSE: extract harmonic constants from tide models at coordinates def extract_constants(ilon, ilat, grid_file=None, model_files=None, @@ -161,7 +163,7 @@ def extract_constants(ilon, ilat, D: float bathymetry of tide model constituents: list - list of model constituents + Tide model constituent names """ # set default keyword arguments kwargs.setdefault('type', 'z') @@ -210,12 +212,9 @@ def extract_constants(ilon, ilat, # grid step size of tide model dlon = lon[1] - lon[0] - dlat = lat[1] - lat[0] # replace original values with extend arrays/matrices lon = extend_array(lon, dlon) bathymetry = extend_matrix(bathymetry) - # create masks - bathymetry.mask = (bathymetry.data == 0) # number of points npts = len(ilon) @@ -241,10 +240,11 @@ def extract_constants(ilon, ilat, else: # use scipy regular grid to interpolate values for a given method r1 = scipy.interpolate.RegularGridInterpolator((lat, lon), - bathymetry.data, method=kwargs['method'], bounds_error=False) + bathymetry.data.T, method=kwargs['method'], + bounds_error=False) r2 = scipy.interpolate.RegularGridInterpolator((lat, lon), - bathymetry.mask, method=kwargs['method'], bounds_error=False, - fill_value=1) + bathymetry.mask.T, method=kwargs['method'], + bounds_error=False, fill_value=1) D.data[:] = r1.__call__(np.c_[ilat, ilon]) D.mask[:] = np.ceil(r2.__call__(np.c_[ilat, ilon])).astype(bool) @@ -271,14 +271,12 @@ def extract_constants(ilon, ilat, raise FileNotFoundError(os.path.expanduser(model_file)) if (kwargs['type'] == 'z'): # read constituent from elevation file - z,con = read_netcdf_elevation(model_file, + z, cons = read_netcdf_elevation(model_file, compressed=kwargs['compressed']) # append constituent to list - constituents.append(con) + constituents.append(cons) # replace original values with extend matrices z = extend_matrix(z) - # update constituent mask with bathymetry mask - z.mask[:] |= bathymetry.mask[:] # interpolate amplitude and phase of the constituent z1 = np.ma.zeros((npts), dtype=z.dtype) z1.mask = np.zeros((npts), dtype=bool) @@ -325,26 +323,25 @@ def extract_constants(ilon, ilat, ph.mask[:,i] = np.copy(z1.mask) elif kwargs['type'] in ('U','u','V','v'): # read constituent from transport file - tr,con = read_netcdf_transport(model_file, kwargs['type'], + tr, cons = read_netcdf_transport(model_file, kwargs['type'], compressed=kwargs['compressed']) # append constituent to list - constituents.append(con) + constituents.append(cons) # replace original values with extend matrices tr = extend_matrix(tr) - # update constituent mask with bathymetry mask - tr.mask[:] |= bathymetry.mask[:] # interpolate amplitude and phase of the constituent tr1 = np.ma.zeros((npts), dtype=tr.dtype) tr1.mask = np.zeros((npts), dtype=bool) if (kwargs['method'] == 'bilinear'): # replace invalid values with nan tr.data[tr.mask] = np.nan - tr1.data[:]=bilinear_interp(lon, lat, tr, ilon, ilat, + tr1.data[:] = bilinear_interp(lon, lat, tr, ilon, ilat, dtype=tr.dtype) # mask invalid values tr1.mask[:] |= np.copy(D.mask) tr1.data[tr1.mask] = tr1.fill_value elif (kwargs['method'] == 'spline'): + # use scipy splines to interpolate values f1 = scipy.interpolate.RectBivariateSpline(lon, lat, tr.data.real.T, kx=1, ky=1) f2 = scipy.interpolate.RectBivariateSpline(lon, lat, @@ -353,7 +350,7 @@ def extract_constants(ilon, ilat, tr1.data.imag = f2.ev(ilon,ilat) # mask invalid values tr1.mask[:] |= np.copy(D.mask) - tr1.data[tr1.mask] = z1.fill_value + tr1.data[tr1.mask] = tr1.fill_value else: # use scipy regular grid to interpolate values r1 = scipy.interpolate.RegularGridInterpolator((lat, lon), @@ -387,10 +384,276 @@ def extract_constants(ilon, ilat, # return the interpolated values return (amplitude, phase, D, constituents) -# PURPOSE: wrapper function to extend an array +# PURPOSE: read harmonic constants from tide models +def read_constants(grid_file=None, model_files=None, **kwargs): + """ + Reads files for ATLAS netCDF4 tidal models + + Parameters + ---------- + grid_file: str or NoneType, default None + grid file for model + model_files: list or NoneType, default None + list of model files for each constituent + type: str, default 'z' + Tidal variable to read + + - ``'z'``: heights + - ``'u'``: horizontal transport velocities + - ``'U'``: horizontal depth-averaged transport + - ``'v'``: vertical transport velocities + - ``'V'``: vertical depth-averaged transport + compressed: bool, default False + Input files are gzip compressed + + Returns + ------- + constituents: obj + complex form of tide model constituents + """ + # set default keyword arguments + kwargs.setdefault('type', 'z') + kwargs.setdefault('compressed', True) + + # raise warning if model files are entered as a string + if isinstance(model_files,str): + warnings.warn("Tide model is entered as a string") + model_files = [model_files] + + # check that grid file is accessible + if not os.access(os.path.expanduser(grid_file), os.F_OK): + raise FileNotFoundError(os.path.expanduser(grid_file)) + + # read the tide grid file for bathymetry and spatial coordinates + lon, lat, bathymetry = read_netcdf_grid(grid_file, kwargs['type'], + compressed=kwargs['compressed']) + + # grid step size of tide model + dlon = lon[1] - lon[0] + # replace original values with extend arrays/matrices + lon = extend_array(lon, dlon) + bathymetry = extend_matrix(bathymetry) + # save output constituents + constituents = pyTMD.constituents( + longitude=lon, + latitude=lat, + bathymetry=bathymetry.data, + mask=bathymetry.mask + ) + + # read each model constituent + for i,model_file in enumerate(model_files): + # check that model file is accessible + if not os.access(os.path.expanduser(model_file), os.F_OK): + raise FileNotFoundError(os.path.expanduser(model_file)) + if (kwargs['type'] == 'z'): + # read constituent from elevation file + hc, cons = read_netcdf_elevation(model_file, + compressed=kwargs['compressed']) + elif kwargs['type'] in ('U','u','V','v'): + # read constituent from transport file + hc, cons = read_netcdf_transport(model_file, kwargs['type'], + compressed=kwargs['compressed']) + # replace original values with extend matrices + hc = extend_matrix(hc) + # append extended constituent + constituents.append(cons, hc) + + # return the complex form of the model constituents + return constituents + +# PURPOSE: interpolate constants from tide models to input coordinates +def interpolate_constants(ilon, ilat, constituents, **kwargs): + """ + Interpolate constants from ATLAS tidal models to input coordinates + + Makes initial calculations to run the tide program + + Parameters + ---------- + ilon: float + longitude to interpolate + ilat: float + latitude to interpolate + constituents: obj + Tide model constituents (complex form) + type: str, default 'z' + Tidal variable to read + + - ``'z'``: heights + - ``'u'``: horizontal transport velocities + - ``'U'``: horizontal depth-averaged transport + - ``'v'``: vertical transport velocities + - ``'V'``: vertical depth-averaged transport + method: str, default 'spline' + Interpolation method + + - ``'bilinear'``: quick bilinear interpolation + - ``'spline'``: scipy bivariate spline interpolation + - ``'linear'``, ``'nearest'``: scipy regular grid interpolations + extrapolate: bool, default False + Extrapolate model using nearest-neighbors + cutoff: float, default 10.0 + Extrapolation cutoff in kilometers + + Set to np.inf to extrapolate for all points + scale: float, default 1.0 + Scaling factor for converting to output units + + Returns + ------- + amplitude: float + amplitudes of tidal constituents + phase: float + phases of tidal constituents + D: float + bathymetry of tide model + """ + # set default keyword arguments + kwargs.setdefault('type', 'z') + kwargs.setdefault('method', 'spline') + kwargs.setdefault('extrapolate', False) + kwargs.setdefault('cutoff', 10.0) + kwargs.setdefault('scale', 1.0) + # verify that constituents are valid class instance + assert isinstance(constituents, pyTMD.constituents) + # extract model coordinates + lon = np.copy(constituents.longitude) + lat = np.copy(constituents.latitude) + + # adjust dimensions of input coordinates to be iterable + ilon = np.atleast_1d(ilon) + ilat = np.atleast_1d(ilat) + # adjust longitudinal convention of input latitude and longitude + # to fit tide model convention + if (np.min(ilon) < 0.0) & (np.max(lon) > 180.0): + # input points convention (-180:180) + # tide model convention (0:360) + ilon[ilon < 0.0] += 360.0 + elif (np.max(ilon) > 180.0) & (np.min(lon) < 0.0): + # input points convention (0:360) + # tide model convention (-180:180) + ilon[ilon > 180.0] -= 360.0 + + # number of points + npts = len(ilon) + # create masked array of model bathymetry + bathymetry = np.ma.array(constituents.bathymetry, + mask=constituents.mask, fill_value=0.0) + # interpolate bathymetry and mask to output points + D = np.ma.zeros((npts)) + D.mask = np.zeros((npts), dtype=bool) + if (kwargs['method'] == 'bilinear'): + # replace invalid values with nan + bathymetry.data[bathymetry.mask] = np.nan + # use quick bilinear to interpolate values + D.data[:] = bilinear_interp(lon, lat, bathymetry, ilon, ilat) + # replace nan values with fill_value + D.mask[:] = np.isnan(D.data) + D.data[D.mask] = D.fill_value + elif (kwargs['method'] == 'spline'): + # use scipy bivariate splines to interpolate values + f1 = scipy.interpolate.RectBivariateSpline(lon, lat, + bathymetry.data.T, kx=1, ky=1) + f2 = scipy.interpolate.RectBivariateSpline(lon, lat, + bathymetry.mask.T, kx=1, ky=1) + D.data[:] = f1.ev(ilon,ilat) + D.mask[:] = np.ceil(f2.ev(ilon,ilat)).astype(bool) + else: + # use scipy regular grid to interpolate values for a given method + r1 = scipy.interpolate.RegularGridInterpolator((lat, lon), + bathymetry.data, method=kwargs['method'], + bounds_error=False) + r2 = scipy.interpolate.RegularGridInterpolator((lat, lon), + bathymetry.mask, method=kwargs['method'], + bounds_error=False, fill_value=1) + D.data[:] = r1.__call__(np.c_[ilat, ilon]) + D.mask[:] = np.ceil(r2.__call__(np.c_[ilat, ilon])).astype(bool) + + # u and v are velocities in cm/s + if kwargs['type'] in ('v','u'): + unit_conv = (D.data/100.0) + # h is elevation values in m + # U and V are transports in m^2/s + elif kwargs['type'] in ('z','V','U'): + unit_conv = 1.0 + + # number of constituents + nc = len(constituents.fields) + # amplitude and phase + ampl = np.ma.zeros((npts, nc)) + ampl.mask = np.zeros((npts, nc), dtype=bool) + ph = np.ma.zeros((npts, nc)) + ph.mask = np.zeros((npts, nc), dtype=bool) + # default complex fill value + fill_value = np.ma.default_fill_value(np.dtype(complex)) + # read and interpolate each constituent + for i, c in enumerate(constituents.fields): + # get model constituent + hc = constituents.get(c) + # interpolate amplitude and phase of the constituent + hci = np.ma.zeros((npts), dtype=hc.dtype) + hci.mask = np.zeros((npts), dtype=bool) + if (kwargs['method'] == 'bilinear'): + # replace invalid values with nan + hc.data[bathymetry.mask] = np.nan + hci.data[:] = bilinear_interp(lon, lat, hc, ilon, ilat, + dtype=hc.dtype) + # mask invalid values + hci.mask[:] |= np.copy(D.mask) + hci.data[hci.mask] = hci.fill_value + elif (kwargs['method'] == 'spline'): + # replace invalid values with fill value + hc.data[bathymetry.mask] = fill_value + # use scipy splines to interpolate values + f1 = scipy.interpolate.RectBivariateSpline(lon, lat, + hc.real.T, kx=1, ky=1) + f2 = scipy.interpolate.RectBivariateSpline(lon, lat, + hc.imag.T, kx=1, ky=1) + hci.data.real = f1.ev(ilon,ilat) + hci.data.imag = f2.ev(ilon,ilat) + # mask invalid values + hci.mask[:] |= np.copy(D.mask) + hci.data[hci.mask] = hci.fill_value + else: + # replace invalid values with fill value + hc.data[bathymetry.mask] = fill_value + # use scipy regular grid to interpolate values + r1 = scipy.interpolate.RegularGridInterpolator((lat, lon), + hc, method=kwargs['method'], bounds_error=False, + fill_value=hci.fill_value) + hci.data[:] = r1.__call__(np.c_[ilat, ilon]) + # mask invalid values + hci.mask[:] |= np.copy(D.mask) + hci.data[hci.mask] = hci.fill_value + # extrapolate data using nearest-neighbors + if kwargs['extrapolate'] and np.any(hci.mask): + # find invalid data points + inv, = np.nonzero(hci.mask) + # replace invalid values with nan + hc[constituents.mask] = np.nan + # extrapolate points within cutoff of valid model points + hci[inv] = nearest_extrap(lon, lat, hc, ilon[inv], ilat[inv], + dtype=hc.dtype, cutoff=kwargs['cutoff']) + # convert units + # amplitude and phase of the constituent + ampl.data[:,i] = np.abs(hci.data)/unit_conv + ampl.mask[:,i] = np.copy(hci.mask) + ph.data[:,i] = np.arctan2(-np.imag(hci.data), np.real(hci.data)) + ph.mask[:,i] = np.copy(hci.mask) + + # convert amplitude from input units to meters + amplitude = ampl*kwargs['scale'] + # convert phase to degrees + phase = ph*180.0/np.pi + phase[phase < 0] += 360.0 + # return the interpolated values + return (amplitude, phase, D) + +# PURPOSE: Extend a longitude array def extend_array(input_array, step_size): """ - Wrapper function to extend an array + Extends a longitude array Parameters ---------- @@ -412,10 +675,10 @@ def extend_array(input_array, step_size): temp[-1] = input_array[-1] + step_size return temp -# PURPOSE: wrapper function to extend a matrix +# PURPOSE: Extend a global matrix def extend_matrix(input_matrix): """ - Wrapper function to extend a matrix + Extends a global matrix Parameters ---------- @@ -427,7 +690,7 @@ def extend_matrix(input_matrix): temp: float extended matrix """ - ny,nx = np.shape(input_matrix) + ny, nx = np.shape(input_matrix) temp = np.ma.zeros((ny,nx+2), dtype=input_matrix.dtype) temp[:,0] = input_matrix[:,-1] temp[:,1:-1] = input_matrix[:,:] @@ -471,9 +734,9 @@ def read_netcdf_grid(input_file, variable, **kwargs): if kwargs['compressed']: # read gzipped netCDF4 file f = gzip.open(os.path.expanduser(input_file), 'rb') - fileID=netCDF4.Dataset(uuid.uuid4().hex, 'r', memory=f.read()) + fileID = netCDF4.Dataset(uuid.uuid4().hex, 'r', memory=f.read()) else: - fileID=netCDF4.Dataset(os.path.expanduser(input_file), 'r') + fileID = netCDF4.Dataset(os.path.expanduser(input_file), 'r') # variable dimensions nx = fileID.dimensions['nx'].size ny = fileID.dimensions['ny'].size diff --git a/pyTMD/io/FES.py b/pyTMD/io/FES.py index 98b777f9..ededba97 100644 --- a/pyTMD/io/FES.py +++ b/pyTMD/io/FES.py @@ -57,6 +57,7 @@ UPDATE HISTORY: Updated 12/2022: refactor tide read programs under io + new functions to read and interpolate from constituents class Updated 11/2022: place some imports within try/except statements use f-strings for formatting verbose or ascii output Updated 05/2022: reformat arguments to extract_FES_constants definition @@ -89,6 +90,7 @@ import warnings import numpy as np import scipy.interpolate +import pyTMD.constituents from pyTMD.bilinear_interp import bilinear_interp from pyTMD.nearest_extrap import nearest_extrap @@ -102,10 +104,10 @@ # ignore warnings warnings.filterwarnings("ignore") -# PURPOSE: extract tidal harmonic constants from tide models at coordinates +# PURPOSE: extract harmonic constants from tide models at coordinates def extract_constants(ilon, ilat, model_files=None, **kwargs): """ - Reads files for an ascii or netCDF4 tidal model + Reads files for a FES ascii or netCDF4 tidal model Makes initial calculations to run the tide program @@ -230,7 +232,8 @@ def extract_constants(ilon, ilat, model_files=None, **kwargs): hci.mask[:] |= np.isnan(hci.data) hci.data[hci.mask] = hci.fill_value elif (kwargs['method'] == 'spline'): - # interpolate complex form of the constituent with scipy + # interpolate complex form of the constituent + # use scipy splines to interpolate values f1=scipy.interpolate.RectBivariateSpline(lon, lat, hc.data.real.T, kx=1, ky=1) f2=scipy.interpolate.RectBivariateSpline(lon, lat, @@ -243,7 +246,8 @@ def extract_constants(ilon, ilat, model_files=None, **kwargs): # replace invalid values with fill_value hci.data[hci.mask] = hci.fill_value else: - # use scipy regular grid to interpolate values for a given method + # interpolate complex form of the constituent + # use scipy regular grid to interpolate values r1 = scipy.interpolate.RegularGridInterpolator((lat,lon), hc.data, method=kwargs['method'], bounds_error=False, fill_value=hci.fill_value) @@ -278,7 +282,219 @@ def extract_constants(ilon, ilat, model_files=None, **kwargs): amplitude.data[amplitude.mask] = amplitude.fill_value phase.data[phase.mask] = phase.fill_value # return the interpolated values - return (amplitude,phase) + return (amplitude, phase) + +# PURPOSE: read harmonic constants from tide models +def read_constants(model_files=None, **kwargs): + """ + Reads files for a FES ascii or netCDF4 tidal model + + Parameters + ---------- + model_files: list or NoneType, default None + list of model files for each constituent + type: str, default 'z' + Tidal variable to read + + - ``'z'``: heights + - ``'u'``: horizontal transport velocities + - ``'v'``: vertical transport velocities + version: str or NoneType, default None + Model version to read + + - ``'FES1999'`` + - ``'FES2004'`` + - ``'FES2012'`` + - ``'FES2014'`` + - ``'EOT20'`` + compressed: bool, default False + Input files are gzip compressed + + Returns + ------- + constituents: obj + complex form of tide model constituents + """ + # set default keyword arguments + kwargs.setdefault('type', 'z') + kwargs.setdefault('version', None) + kwargs.setdefault('compressed', False) + + # raise warning if model files are entered as a string + if isinstance(model_files,str): + warnings.warn("Tide model is entered as a string") + model_files = [model_files] + + # save output constituents + constituents = pyTMD.constituents() + # read each model constituent + for i,fi in enumerate(model_files): + # check that model file is accessible + if not os.access(os.path.expanduser(fi), os.F_OK): + raise FileNotFoundError(os.path.expanduser(fi)) + # read constituent from elevation file + if kwargs['version'] in ('FES1999','FES2004'): + # FES ascii constituent files + hc,lon,lat = read_ascii_file(os.path.expanduser(fi), **kwargs) + elif kwargs['version'] in ('FES2012','FES2014','EOT20'): + # FES netCDF4 constituent files + hc,lon,lat = read_netcdf_file(os.path.expanduser(fi), **kwargs) + # append extended constituent + constituents.append(i, hc) + # set model coordinates + setattr(constituents, 'longitude', lon) + setattr(constituents, 'latitude', lat) + + # return the complex form of the model constituents + return constituents + +# PURPOSE: interpolate constants from tide models to input coordinates +def interpolate_constants(ilon, ilat, constituents, **kwargs): + """ + Interpolate constants from FES tidal models to input coordinates + + Makes initial calculations to run the tide program + + Parameters + ---------- + ilon: float + longitude to interpolate + ilat: float + latitude to interpolate + constituents: obj + Tide model constituents (complex form) + method: str, default 'spline' + Interpolation method + + - ``'bilinear'``: quick bilinear interpolation + - ``'spline'``: scipy bivariate spline interpolation + - ``'linear'``, ``'nearest'``: scipy regular grid interpolations + extrapolate: bool, default False + Extrapolate model using nearest-neighbors + cutoff: float, default 10.0 + Extrapolation cutoff in kilometers + + Set to np.inf to extrapolate for all points + scale: float, default 1.0 + Scaling factor for converting to output units + + Returns + ------- + amplitude: float + amplitudes of tidal constituents + phase: float + phases of tidal constituents + """ + # set default keyword arguments + kwargs.setdefault('method', 'spline') + kwargs.setdefault('extrapolate', False) + kwargs.setdefault('cutoff', 10.0) + kwargs.setdefault('scale', 1.0) + # verify that constituents are valid class instance + assert isinstance(constituents, pyTMD.constituents) + # extract model coordinates + lon = np.copy(constituents.longitude) + lat = np.copy(constituents.latitude) + + # adjust dimensions of input coordinates to be iterable + ilon = np.atleast_1d(ilon) + ilat = np.atleast_1d(ilat) + # adjust longitudinal convention of input latitude and longitude + # to fit tide model convention + if (np.min(ilon) < 0.0) & (np.max(lon) > 180.0): + # input points convention (-180:180) + # tide model convention (0:360) + ilon[ilon<0.0] += 360.0 + elif (np.max(ilon) > 180.0) & (np.min(lon) < 0.0): + # input points convention (0:360) + # tide model convention (-180:180) + ilon[ilon>180.0] -= 360.0 + # number of points + npts = len(ilon) + # number of constituents + nc = len(constituents.fields) + + # amplitude and phase + amplitude = np.ma.zeros((npts,nc)) + amplitude.mask = np.zeros((npts,nc), dtype=bool) + ph = np.ma.zeros((npts,nc)) + ph.mask = np.zeros((npts,nc), dtype=bool) + # default complex fill value + fill_value = np.ma.default_fill_value(np.dtype(complex)) + # read and interpolate each constituent + for i, c in enumerate(constituents.fields): + # get model constituent + hc = constituents.get(c) + # interpolated complex form of constituent oscillation + hci = np.ma.zeros((npts), dtype=hc.dtype, fill_value=hc.fill_value) + hci.mask = np.zeros((npts),dtype=bool) + # interpolate amplitude and phase of the constituent + if (kwargs['method'] == 'bilinear'): + # replace invalid values with nan + hc.data[hc.mask] = np.nan + # use quick bilinear to interpolate values + hci.data[:] = bilinear_interp(lon, lat, hc, ilon, ilat, + dtype=hc.dtype) + # replace nan values with fill_value + hci.mask[:] |= np.isnan(hci.data) + hci.data[hci.mask] = hci.fill_value + elif (kwargs['method'] == 'spline'): + # replace invalid values with fill value + hc.data[hc.mask] = fill_value + # interpolate complex form of the constituent + # use scipy splines to interpolate values + f1=scipy.interpolate.RectBivariateSpline(lon, lat, + hc.data.real.T, kx=1, ky=1) + f2=scipy.interpolate.RectBivariateSpline(lon, lat, + hc.data.imag.T, kx=1, ky=1) + f3=scipy.interpolate.RectBivariateSpline(lon, lat, + constituents.mask.T, kx=1, ky=1) + hci.data.real[:] = f1.ev(ilon,ilat) + hci.data.imag[:] = f2.ev(ilon,ilat) + hci.mask[:] = f3.ev(ilon,ilat).astype(bool) + # replace invalid values with fill_value + hci.data[hci.mask] = hci.fill_value + else: + # replace invalid values with fill value + hc.data[hc.mask] = fill_value + # interpolate complex form of the constituent + # use scipy regular grid to interpolate values + r1 = scipy.interpolate.RegularGridInterpolator((lon, lat), + hc.data, method=kwargs['method'], bounds_error=False, + fill_value=hci.fill_value) + r2 = scipy.interpolate.RegularGridInterpolator((lon, lat), + constituents.mask, method=kwargs['method'], bounds_error=False, + fill_value=1) + hci.data[:] = r1.__call__(np.c_[ilat,ilon]) + hci.mask[:] = np.ceil(r2.__call__(np.c_[ilat,ilon])).astype(bool) + # replace invalid values with fill_value + hci.mask[:] |= (hci.data == hci.fill_value) + hci.data[hci.mask] = hci.fill_value + # extrapolate data using nearest-neighbors + if kwargs['extrapolate'] and np.any(hci.mask): + # find invalid data points + inv, = np.nonzero(hci.mask) + # replace invalid values with nan + hc.data[hc.mask] = np.nan + # extrapolate points within cutoff of valid model points + hci[inv] = nearest_extrap(lon, lat, hc, + ilon[inv], ilat[inv], dtype=hc.dtype, + cutoff=kwargs['cutoff']) + # convert amplitude from input units to meters + amplitude.data[:,i] = np.abs(hci.data)*kwargs['scale'] + amplitude.mask[:,i] = np.copy(hci.mask) + # phase of the constituent in radians + ph.data[:,i] = np.arctan2(-np.imag(hci.data),np.real(hci.data)) + ph.mask[:,i] = np.copy(hci.mask) + + # convert phase to degrees + phase = ph*180.0/np.pi + phase.data[phase.data < 0] += 360.0 + # replace data for invalid mask values + amplitude.data[amplitude.mask] = amplitude.fill_value + phase.data[phase.mask] = phase.fill_value + # return the interpolated values + return (amplitude, phase) # PURPOSE: read FES ascii tide model grid files def read_ascii_file(input_file, **kwargs): @@ -312,21 +528,21 @@ def read_ascii_file(input_file, **kwargs): file_contents = f.read().splitlines() # parse header text # longitude range (lonmin, lonmax) - lonmin,lonmax = np.array(file_contents[0].split(), dtype=np.float64) + lonmin, lonmax = np.array(file_contents[0].split(), dtype=np.float64) # latitude range (latmin, latmax) - latmin,latmax = np.array(file_contents[1].split(), dtype=np.float64) + latmin, latmax = np.array(file_contents[1].split(), dtype=np.float64) # grid step size (dlon, dlat) - dlon,dlat = np.array(file_contents[2].split(), dtype=np.float64) + dlon, dlat = np.array(file_contents[2].split(), dtype=np.float64) # grid dimensions (nlon, nlat) - nlon,nlat = np.array(file_contents[3].split(), dtype=int) + nlon, nlat = np.array(file_contents[3].split(), dtype=int) # mask fill value masked_values = file_contents[4].split() fill_value = np.float64(masked_values[0]) # create output variables lat = np.linspace(latmin, latmax, nlat) lon = np.linspace(lonmin,lonmax,nlon) - amp = np.ma.zeros((nlat,nlon),fill_value=fill_value,dtype=np.float32) - ph = np.ma.zeros((nlat,nlon),fill_value=fill_value,dtype=np.float32) + amp = np.ma.zeros((nlat,nlon), fill_value=fill_value, dtype=np.float32) + ph = np.ma.zeros((nlat,nlon), fill_value=fill_value, dtype=np.float32) # create masks for output variables (0=valid) amp.mask = np.zeros((nlat,nlon),dtype=bool) ph.mask = np.zeros((nlat,nlon),dtype=bool) @@ -336,21 +552,25 @@ def read_ascii_file(input_file, **kwargs): for i in range(nlat): for j in range(nlon//30): j1 = j*30 - amp.data[i,j1:j1+30]=np.array(file_contents[i1].split(),dtype='f') - ph.data[i,j1:j1+30]=np.array(file_contents[i1+1].split(),dtype='f') + amplitude_data = file_contents[i1].split() + amp.data[i,j1:j1+30] = np.array(amplitude_data, dtype=np.float32) + phase_data = file_contents[i1+1].split() + ph.data[i,j1:j1+30] = np.array(phase_data, dtype=np.float32) i1 += 2 # add last tidal variables j1 = (j+1)*30 j2 = nlon % 30 - amp.data[i,j1:j1+j2] = np.array(file_contents[i1].split(),dtype='f') - ph.data[i,j1:j1+j2] = np.array(file_contents[i1+1].split(),dtype='f') + amplitude_data = file_contents[i1].split() + amp.data[i,j1:j1+j2] = np.array(amplitude_data, dtype=np.float32) + phase_data = file_contents[i1+1].split() + ph.data[i,j1:j1+j2] = np.array(phase_data, dtype=np.float32) i1 += 2 # calculate complex form of constituent oscillation - hc = amp*np.exp(-1j*ph*np.pi/180.0) - # set masks - hc.mask = (amp.data == amp.fill_value) | (ph.data == ph.fill_value) + mask = (amp.data == amp.fill_value) | (ph.data == ph.fill_value) + hc = np.ma.array(amp*np.exp(-1j*ph*np.pi/180.0), mask=mask, + fill_value=np.ma.default_fill_value(np.dtype(complex))) # return output variables - return (hc,lon,lat) + return (hc, lon, lat) # PURPOSE: read FES netCDF4 tide model files def read_netcdf_file(input_file, **kwargs): @@ -389,7 +609,7 @@ def read_netcdf_file(input_file, **kwargs): if kwargs['compressed']: # read gzipped netCDF4 file f = gzip.open(os.path.expanduser(input_file),'rb') - fileID = netCDF4.Dataset(uuid.uuid4().hex,'r',memory=f.read()) + fileID = netCDF4.Dataset(uuid.uuid4().hex, 'r', memory=f.read()) else: fileID = netCDF4.Dataset(os.path.expanduser(input_file), 'r') # variable dimensions for each model @@ -413,10 +633,10 @@ def read_netcdf_file(input_file, **kwargs): fileID.close() f.close() if kwargs['compressed'] else None # calculate complex form of constituent oscillation - hc = amp*np.exp(-1j*ph*np.pi/180.0) - # set masks - hc.mask = (amp.data == amp.fill_value) | \ + mask = (amp.data == amp.fill_value) | \ (ph.data == ph.fill_value) | \ np.isnan(amp.data) | np.isnan(ph.data) + hc = np.ma.array(amp*np.exp(-1j*ph*np.pi/180.0), mask=mask, + fill_value=np.ma.default_fill_value(np.dtype(complex))) # return output variables - return (hc,lon,lat) + return (hc, lon, lat) \ No newline at end of file diff --git a/pyTMD/io/GOT.py b/pyTMD/io/GOT.py index e5520315..8d7b6dea 100644 --- a/pyTMD/io/GOT.py +++ b/pyTMD/io/GOT.py @@ -42,6 +42,7 @@ UPDATE HISTORY: Updated 12/2022: refactor tide read programs under io + new functions to read and interpolate from constituents class Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 05/2022: reformat arguments to extract_GOT_constants definition changed keyword arguments to camel case @@ -81,10 +82,11 @@ import warnings import numpy as np import scipy.interpolate +import pyTMD.constituents from pyTMD.bilinear_interp import bilinear_interp from pyTMD.nearest_extrap import nearest_extrap -# PURPOSE: extract tidal harmonic constants out of GOT model at coordinates +# PURPOSE: extract harmonic constants from tide models at coordinates def extract_constants(ilon, ilat, model_files=None, **kwargs): """ Reads files for Richard Ray's Global Ocean Tide (GOT) models @@ -156,6 +158,7 @@ def extract_constants(ilon, ilat, model_files=None, **kwargs): npts = len(ilon) # number of constituents nc = len(model_files) + # list of constituents constituents = [] # amplitude and phase @@ -203,7 +206,8 @@ def extract_constants(ilon, ilat, model_files=None, **kwargs): hci.mask[:] |= np.isnan(hci.data) hci.data[hci.mask] = hci.fill_value elif (kwargs['method'] == 'spline'): - # interpolate complex form of the constituent with scipy + # interpolate complex form of the constituent + # use scipy splines to interpolate values f1=scipy.interpolate.RectBivariateSpline(lon, lat, hc.data.real.T, kx=1, ky=1) f2=scipy.interpolate.RectBivariateSpline(lon, lat, @@ -216,7 +220,8 @@ def extract_constants(ilon, ilat, model_files=None, **kwargs): # replace invalid values with fill_value hci.data[hci.mask] = hci.fill_value else: - # use scipy regular grid to interpolate values for a given method + # interpolate complex form of the constituent + # use scipy regular grid to interpolate values r1 = scipy.interpolate.RegularGridInterpolator((lat,lon), hc.data, method=kwargs['method'], bounds_error=False, fill_value=hci.fill_value) @@ -253,10 +258,218 @@ def extract_constants(ilon, ilat, model_files=None, **kwargs): # return the interpolated values return (amplitude, phase, constituents) -# PURPOSE: wrapper function to extend an array +# PURPOSE: read harmonic constants from tide models +def read_constants(model_files=None, **kwargs): + """ + Reads files for Richard Ray's Global Ocean Tide (GOT) models + + Parameters + ---------- + model_files: list or NoneType, default None + list of model files for each constituent + compressed: bool, default False + Input files are gzip compressed + + Returns + ------- + constituents: obj + complex form of tide model constituents + """ + # set default keyword arguments + kwargs.setdefault('compressed', False) + + # raise warning if model files are entered as a string + if isinstance(model_files,str): + warnings.warn("Tide model is entered as a string") + model_files = [model_files] + + # save output constituents + constituents = pyTMD.constituents() + # read each model constituent + for i, model_file in enumerate(model_files): + # check that model file is accessible + if not os.access(os.path.expanduser(model_file), os.F_OK): + raise FileNotFoundError(os.path.expanduser(model_file)) + # read constituent from elevation file + hc,lon,lat,cons = read_ascii_file(os.path.expanduser(model_file), + compressed=kwargs['compressed']) + # grid step size of tide model + dlon = np.abs(lon[1] - lon[0]) + # replace original values with extend matrices + lon = extend_array(lon, dlon) + hc = extend_matrix(hc) + # append extended constituent + constituents.append(cons, hc) + # set model coordinates + setattr(constituents, 'longitude', lon) + setattr(constituents, 'latitude', lat) + + # return the complex form of the model constituents + return constituents + +# PURPOSE: interpolate constants from tide models to input coordinates +def interpolate_constants(ilon, ilat, constituents, **kwargs): + """ + Interpolate constants from GOT tidal models to input coordinates + + Makes initial calculations to run the tide program + + Parameters + ---------- + ilon: float + longitude to interpolate + ilat: float + latitude to interpolate + constituents: obj + Tide model constituents (complex form) + method: str, default 'spline' + Interpolation method + + - ``'bilinear'``: quick bilinear interpolation + - ``'spline'``: scipy bivariate spline interpolation + - ``'linear'``, ``'nearest'``: scipy regular grid interpolations + extrapolate: bool, default False + Extrapolate model using nearest-neighbors + cutoff: float, default 10.0 + Extrapolation cutoff in kilometers + + Set to np.inf to extrapolate for all points + scale: float, default 1.0 + Scaling factor for converting to output units + + Returns + ------- + amplitude: float + amplitudes of tidal constituents + phase: float + phases of tidal constituents + """ + # set default keyword arguments + kwargs.setdefault('method', 'spline') + kwargs.setdefault('extrapolate', False) + kwargs.setdefault('cutoff', 10.0) + kwargs.setdefault('scale', 1.0) + + # verify that constituents are valid class instance + assert isinstance(constituents, pyTMD.constituents) + # extract model coordinates + lon = np.copy(constituents.longitude) + lat = np.copy(constituents.latitude) + + # adjust dimensions of input coordinates to be iterable + ilon = np.atleast_1d(ilon) + ilat = np.atleast_1d(ilat) + # adjust longitudinal convention of input latitude and longitude + # to fit tide model convention + if (np.min(ilon) < 0.0) & (np.max(lon) > 180.0): + # input points convention (-180:180) + # tide model convention (0:360) + ilon[ilon<0.0] += 360.0 + elif (np.max(ilon) > 180.0) & (np.min(lon) < 0.0): + # input points convention (0:360) + # tide model convention (-180:180) + ilon[ilon>180.0] -= 360.0 + # number of points + npts = len(ilon) + # number of constituents + nc = len(constituents.fields) + + # adjust longitudinal convention of input latitude and longitude + # to fit tide model convention + if (np.min(ilon) < 0.0) & (np.max(lon) > 180.0): + # input points convention (-180:180) + # tide model convention (0:360) + ilon[ilon<0.0] += 360.0 + elif (np.max(ilon) > 180.0) & (np.min(lon) < 0.0): + # input points convention (0:360) + # tide model convention (-180:180) + ilon[ilon>180.0] -= 360.0 + + # amplitude and phase + amplitude = np.ma.zeros((npts,nc)) + amplitude.mask = np.zeros((npts,nc),dtype=bool) + ph = np.ma.zeros((npts,nc)) + ph.mask = np.zeros((npts,nc),dtype=bool) + # default complex fill value + fill_value = np.ma.default_fill_value(np.dtype(complex)) + # read and interpolate each constituent + for i, c in enumerate(constituents.fields): + # get model constituent + hc = constituents.get(c) + # interpolated complex form of constituent oscillation + hci = np.ma.zeros((npts), dtype=hc.dtype, fill_value=hc.fill_value) + hci.mask = np.zeros((npts),dtype=bool) + # interpolate amplitude and phase of the constituent + if (kwargs['method'] == 'bilinear'): + # replace invalid values with nan + hc[hc.mask] = np.nan + # use quick bilinear to interpolate values + hci.data[:] = bilinear_interp(lon, lat, hc, ilon, ilat, + dtype=hc.dtype) + # replace nan values with fill_value + hci.mask[:] |= np.isnan(hci.data) + hci.data[hci.mask] = hci.fill_value + elif (kwargs['method'] == 'spline'): + # replace invalid values with fill value + hc[hc.mask] = fill_value + # interpolate complex form of the constituent + # use scipy splines to interpolate values + f1=scipy.interpolate.RectBivariateSpline(lon, lat, + hc.data.real.T, kx=1, ky=1) + f2=scipy.interpolate.RectBivariateSpline(lon, lat, + hc.data.imag.T, kx=1, ky=1) + f3=scipy.interpolate.RectBivariateSpline(lon, lat, + hc.mask.T, kx=1, ky=1) + hci.data.real[:] = f1.ev(ilon,ilat) + hci.data.imag[:] = f2.ev(ilon,ilat) + hci.mask[:] = f3.ev(ilon,ilat).astype(bool) + # replace invalid values with fill_value + hci.data[hci.mask] = hci.fill_value + else: + # replace invalid values with fill value + hc[hc.mask] = fill_value + # interpolate complex form of the constituent + # use scipy regular grid to interpolate values + r1 = scipy.interpolate.RegularGridInterpolator((lat,lon), + hc.data, method=kwargs['method'], bounds_error=False, + fill_value=hci.fill_value) + r2 = scipy.interpolate.RegularGridInterpolator((lat,lon), + hc.mask, method=kwargs['method'], bounds_error=False, + fill_value=1) + hci.data[:] = r1.__call__(np.c_[ilat,ilon]) + hci.mask[:] = np.ceil(r2.__call__(np.c_[ilat,ilon])).astype(bool) + # replace invalid values with fill_value + hci.mask[:] |= (hci.data == hci.fill_value) + hci.data[hci.mask] = hci.fill_value + # extrapolate data using nearest-neighbors + if kwargs['extrapolate'] and np.any(hci.mask): + # find invalid data points + inv, = np.nonzero(hci.mask) + # replace invalid values with nan + hc[hc.mask] = np.nan + # extrapolate points within cutoff of valid model points + hci[inv] = nearest_extrap(lon, lat, hc, ilon[inv], ilat[inv], + dtype=hc.dtype, cutoff=kwargs['cutoff']) + # convert amplitude from input units to meters + amplitude.data[:,i] = np.abs(hci.data)*kwargs['scale'] + amplitude.mask[:,i] = np.copy(hci.mask) + # phase of the constituent in radians + ph.data[:,i] = np.arctan2(-np.imag(hci.data),np.real(hci.data)) + ph.mask[:,i] = np.copy(hci.mask) + + # convert phase to degrees + phase = ph*180.0/np.pi + phase.data[phase.data < 0] += 360.0 + # replace data for invalid mask values + amplitude.data[amplitude.mask] = amplitude.fill_value + phase.data[phase.mask] = phase.fill_value + # return the interpolated values + return (amplitude, phase) + +# PURPOSE: Extend a longitude array def extend_array(input_array, step_size): """ - Wrapper function to extend an array + Extends a longitude array Parameters ---------- @@ -271,7 +484,7 @@ def extend_array(input_array, step_size): extended array """ n = len(input_array) - temp = np.zeros((n+3),dtype=input_array.dtype) + temp = np.zeros((n+3), dtype=input_array.dtype) # extended array [x-1,x0,...,xN,xN+1,xN+2] temp[0] = input_array[0] - step_size temp[1:-2] = input_array[:] @@ -279,10 +492,10 @@ def extend_array(input_array, step_size): temp[-1] = input_array[-1] + 2.0*step_size return temp -# PURPOSE: wrapper function to extend a matrix +# PURPOSE: Extend a global matrix def extend_matrix(input_matrix): """ - Wrapper function to extend a matrix + Extends a global matrix Parameters ---------- @@ -294,8 +507,8 @@ def extend_matrix(input_matrix): temp: float extended matrix """ - ny,nx = np.shape(input_matrix) - temp = np.ma.zeros((ny,nx+3),dtype=input_matrix.dtype) + ny, nx = np.shape(input_matrix) + temp = np.ma.zeros((ny,nx+3), dtype=input_matrix.dtype) temp[:,0] = input_matrix[:,-1] temp[:,1:-2] = input_matrix[:,:] temp[:,-2] = input_matrix[:,0] @@ -378,4 +591,4 @@ def read_ascii_file(input_file, **kwargs): # set masks hc.mask = (amp.data == amp.fill_value) | (ph.data == ph.fill_value) # return output variables - return (hc,lon,lat,cons) + return (hc, lon, lat, cons) diff --git a/pyTMD/io/OTIS.py b/pyTMD/io/OTIS.py index 5187fa6e..0ef8bb4c 100644 --- a/pyTMD/io/OTIS.py +++ b/pyTMD/io/OTIS.py @@ -61,6 +61,7 @@ UPDATE HISTORY: Updated 12/2022: refactor tide read programs under io + new functions to read and interpolate from constituents class Updated 11/2022: place some imports within try/except statements fix variable reads for ATLAS compact data formats use f-strings for formatting verbose or ascii output @@ -110,6 +111,7 @@ import warnings import numpy as np import scipy.interpolate +import pyTMD.constituents from pyTMD.convert_ll_xy import convert_ll_xy from pyTMD.bilinear_interp import bilinear_interp from pyTMD.nearest_extrap import nearest_extrap @@ -124,14 +126,14 @@ # ignore warnings warnings.filterwarnings("ignore") -# PURPOSE: extract tidal harmonic constants from tide models at coordinates +# PURPOSE: extract harmonic constants from tide models at coordinates def extract_constants(ilon, ilat, grid_file=None, model_file=None, EPSG=None, **kwargs): """ - Reads files for an OTIS-formatted tidal model + Reads files from tide models in OTIS and ATLAS-compact formats Makes initial calculations to run the tide program @@ -258,7 +260,7 @@ def extract_constants(ilon, ilat, invalid = (x < xi.min()) | (x > xi.max()) | (y < yi.min()) | (y > yi.max()) # masks zero values - hz = np.ma.array(hz,mask=(hz==0)) + hz = np.ma.array(hz, mask=(hz==0)) if (kwargs['type'] != 'z'): # replace original values with extend matrices if global_grid: @@ -267,8 +269,8 @@ def extract_constants(ilon, ilat, mu = extend_matrix(mu) mv = extend_matrix(mv) # masks zero values - hu = np.ma.array(hu,mask=(hu==0)) - hv = np.ma.array(hv,mask=(hv==0)) + hu = np.ma.array(hu, mask=(hu==0)) + hv = np.ma.array(hv, mask=(hv==0)) # interpolate depth and mask to output points if (kwargs['method'] == 'bilinear'): @@ -541,12 +543,373 @@ def extract_constants(ilon, ilat, amplitude.data[amplitude.mask] = amplitude.fill_value phase.data[phase.mask] = phase.fill_value # return the interpolated values - return (amplitude,phase,D,constituents) + return (amplitude, phase, D, constituents) -# PURPOSE: wrapper function to extend an array +# PURPOSE: read harmonic constants from tide models +def read_constants(grid_file=None, model_file=None, EPSG=None, **kwargs): + """ + Reads files from tide models in OTIS and ATLAS-compact formats + + Parameters + ---------- + grid_file: str or NoneType, default None + grid file for model + model_file: str, list or NoneType, default None + model file containing each constituent + EPSG: str or NoneType, default None, + projection of tide model data + type: str, default 'z' + Tidal variable to read + + - ``'z'``: heights + - ``'u'``: horizontal transport velocities + - ``'U'``: horizontal depth-averaged transport + - ``'v'``: vertical transport velocities + - ``'V'``: vertical depth-averaged transport + grid: str, default 'OTIS' + Tide model file type to read + + - ``'ATLAS'``: reading a global solution with localized solutions + - ``'ESR'``: combined global or local netCDF4 solution + - ``'OTIS'``: combined global or local solution + apply_flexure: bool, default False + Apply ice flexure scaling factor to height constituents + + Returns + ------- + constituents: obj + complex form of tide model constituents + """ + # set default keyword arguments + kwargs.setdefault('type', 'z') + kwargs.setdefault('grid', 'OTIS') + kwargs.setdefault('apply_flexure', False) + + # check that grid file is accessible + if not os.access(os.path.expanduser(grid_file), os.F_OK): + raise FileNotFoundError(os.path.expanduser(grid_file)) + # read the OTIS-format tide grid file + if (kwargs['grid'] == 'ATLAS'): + # if reading a global solution with localized solutions + x0,y0,hz0,mz0,iob,dt,pmask,local = read_atlas_grid(grid_file) + xi,yi,hz = combine_atlas_model(x0,y0,hz0,pmask,local,variable='depth') + mz = create_atlas_mask(x0,y0,mz0,local,variable='depth') + elif (kwargs['grid'] == 'ESR'): + # if reading a single ESR netCDF4 solution + xi,yi,hz,mz,sf = read_netcdf_grid(grid_file) + else: + # if reading a single OTIS solution + xi,yi,hz,mz,iob,dt = read_otis_grid(grid_file) + # invert tide mask to be True for invalid points + mz = np.logical_not(mz).astype(mz.dtype) + # grid step size of tide model + dx = xi[1] - xi[0] + dy = yi[1] - yi[0] + + # create current masks and bathymetry estimates + if (kwargs['type'] != 'z'): + mz,mu,mv = Muv(hz) + hu,hv = Huv(hz) + # invert current masks to be True for invalid points + mu = np.logical_not(mu).astype(mu.dtype) + mv = np.logical_not(mv).astype(mv.dtype) + + # if global: extend limits + global_grid = False + # replace original values with extend arrays/matrices + if ((xi[-1] - xi[0]) == (360.0 - dx)) & (EPSG == '4326'): + xi = extend_array(xi, dx) + hz = extend_matrix(hz) + mz = extend_matrix(mz) + # set global grid flag + global_grid = True + + # masks zero values + hz = np.ma.array(hz, mask=(hz==0)) + if (kwargs['type'] != 'z'): + # replace original values with extend matrices + if global_grid: + hu = extend_matrix(hu) + hv = extend_matrix(hv) + mu = extend_matrix(mu) + mv = extend_matrix(mv) + # masks zero values + hu = np.ma.array(hu, mask=(hu==0)) + hv = np.ma.array(hv, mask=(hv==0)) + + # read each constituent + if isinstance(model_file, list): + cons = [read_constituents(m)[0].pop() for m in model_file] + else: + cons,_ = read_constituents(model_file, grid=kwargs['grid']) + # save output constituents + constituents = pyTMD.constituents() + + # for each model constituent + for i,c in enumerate(cons): + if (kwargs['type'] == 'z'): + # read constituent from elevation file + if (kwargs['grid'] == 'ATLAS'): + z0,zlocal = read_atlas_elevation(model_file, i, c) + xi,yi,z = combine_atlas_model(x0, y0, z0, pmask, zlocal, + variable='z') + elif (kwargs['grid'] == 'ESR'): + z = read_netcdf_file(model_file, i, variable='z') + # apply flexure scaling + if kwargs['apply_flexure']: + z *= sf + elif isinstance(model_file,list): + z = read_otis_elevation(model_file[i], 0) + else: + z = read_otis_elevation(model_file, i) + # replace original values with extend matrices + if global_grid: + z = extend_matrix(z) + # copy mask to elevation + z.mask |= mz.astype(bool) + # set model coordinates + setattr(constituents, 'x', xi) + setattr(constituents, 'y', yi) + # set model bathymetry and mask + setattr(constituents, 'bathymetry', hz) + setattr(constituents, 'mask', mz) + + elif kwargs['type'] in ('U','u'): + # read constituent from transport file + if (kwargs['grid'] == 'ATLAS'): + u0,v0,uvlocal = read_atlas_transport(model_file, i, c) + xi,yi,u = combine_atlas_model(x0, y0, u0, pmask, uvlocal, + variable='u') + elif (kwargs['grid'] == 'ESR'): + u = read_netcdf_file(model_file, i, variable='u') + elif isinstance(model_file,list): + u,v = read_otis_transport(model_file[i], 0) + else: + u,v = read_otis_transport(model_file, i) + # replace original values with extend matrices + if global_grid: + u = extend_matrix(u) + # copy mask to u transports + u.mask |= mu.astype(bool) + # x coordinates for u transports + xu = xi - dx/2.0 + # set model coordinates + setattr(constituents, 'x', xu) + setattr(constituents, 'y', yi) + # set model bathymetry and mask + setattr(constituents, 'bathymetry', hu) + setattr(constituents, 'mask', mu) + + elif kwargs['type'] in ('V','v'): + # read constituent from transport file + if (kwargs['grid'] == 'ATLAS'): + u0,v0,uvlocal = read_atlas_transport(model_file, i, c) + xi,yi,v = combine_atlas_model(x0, y0, v0, pmask, uvlocal, + variable='v') + elif (kwargs['grid'] == 'ESR'): + v = read_netcdf_file(model_file, i, type='v') + elif isinstance(model_file,list): + u,v = read_otis_transport(model_file[i], 0) + else: + u,v = read_otis_transport(model_file, i) + # replace original values with extend matrices + if global_grid: + v = extend_matrix(v) + # copy mask to v transports + v.mask |= mv.astype(bool) + # y coordinates for v transports + yv = yi - dy/2.0 + # set model coordinates + setattr(constituents, 'x', xi) + setattr(constituents, 'y', yv) + # set model bathymetry and mask + setattr(constituents, 'bathymetry', hv) + setattr(constituents, 'mask', mv) + + # return the complex form of the model constituents + return constituents + +# PURPOSE: interpolate constants from tide models to input coordinates +def interpolate_constants(ilon, ilat, constituents, + EPSG=None, + **kwargs): + """ + Interpolate constants from OTIS/ATLAS-compact tidal models to input + coordinates + + Makes initial calculations to run the tide program + + Parameters + ---------- + ilon: float + longitude to interpolate + ilat: float + latitude to interpolate + constituents: obj + Tide model constituents (complex form) + EPSG: str or NoneType, default None, + projection of tide model data + type: str, default 'z' + Tidal variable to read + + - ``'z'``: heights + - ``'u'``: horizontal transport velocities + - ``'U'``: horizontal depth-averaged transport + - ``'v'``: vertical transport velocities + - ``'V'``: vertical depth-averaged transport + method: str, default 'spline' + Interpolation method + + - ``'bilinear'``: quick bilinear interpolation + - ``'spline'``: scipy bivariate spline interpolation + - ``'linear'``, ``'nearest'``: scipy regular grid interpolations + extrapolate: bool, default False + Extrapolate model using nearest-neighbors + cutoff: float, default 10.0 + Extrapolation cutoff in kilometers + + Set to np.inf to extrapolate for all points + + Returns + ------- + amplitude: float + amplitudes of tidal constituents + phase: float + phases of tidal constituents + D: float + bathymetry of tide model + """ + # set default keyword arguments + kwargs.setdefault('method', 'spline') + kwargs.setdefault('extrapolate', False) + kwargs.setdefault('cutoff', 10.0) + # verify that constituents are valid class instance + assert isinstance(constituents, pyTMD.constituents) + # extract model coordinates + xi = np.copy(constituents.x) + yi = np.copy(constituents.y) + + # adjust dimensions of input coordinates to be iterable + # run wrapper function to convert coordinate systems of input lat/lon + x,y = convert_ll_xy(np.atleast_1d(ilon), np.atleast_1d(ilat), EPSG, 'F') + # adjust longitudinal convention of input latitude and longitude + # to fit tide model convention + if (np.min(x) < np.min(xi)) & (EPSG == '4326'): + x[x < 0] += 360.0 + if (np.max(x) > np.max(xi)) & (EPSG == '4326'): + x[x > 180] -= 360.0 + # determine if any input points are outside of the model bounds + invalid = (x < xi.min()) | (x > xi.max()) | (y < yi.min()) | (y > yi.max()) + + # interpolate depth and mask to output points + if (kwargs['method'] == 'bilinear'): + # use quick bilinear to interpolate values + D = bilinear_interp(xi, yi, constituents.bathymetry, x, y) + mask = bilinear_interp(xi, yi, constituents.mask, x, y) + mask = np.ceil(mask).astype(constituents.mask.dtype) + elif (kwargs['method'] == 'spline'): + # use scipy bivariate splines to interpolate values + f1 = scipy.interpolate.RectBivariateSpline(xi, yi, + constituents.bathymetry.T, kx=1, ky=1) + f2 = scipy.interpolate.RectBivariateSpline(xi, yi, + constituents.mask.T, kx=1, ky=1) + D = f1.ev(x,y) + mask = np.ceil(f2.ev(x,y)).astype(constituents.mask.dtype) + else: + # use scipy regular grid to interpolate values for a given method + r1 = scipy.interpolate.RegularGridInterpolator((yi,xi), + constituents.bathymetry.T, method=kwargs['method'], + bounds_error=False) + r2 = scipy.interpolate.RegularGridInterpolator((yi,xi), + constituents.mask.T, method=kwargs['method'], + bounds_error=False, fill_value=0) + D = r1.__call__(np.c_[y,x]) + mask = np.ceil(r2.__call__(np.c_[y,x])).astype(constituents.mask.dtype) + + # u and v: velocities in cm/s + if kwargs['type'] in ('v','u'): + unit_conv = (D/100.0) + # h is elevation values in m + # U and V are transports in m^2/s + elif kwargs['type'] in ('z','V','U'): + unit_conv = 1.0 + + # number of constituents + nc = len(constituents.fields) + # number of output data points + npts = len(D) + amplitude = np.ma.zeros((npts,nc)) + amplitude.mask = np.zeros((npts,nc), dtype=bool) + ph = np.ma.zeros((npts,nc)) + ph.mask = np.zeros((npts,nc), dtype=bool) + for i,c in enumerate(constituents): + # get model constituent + hc = constituents.get(c) + # interpolate amplitude and phase of the constituent + hci = np.ma.zeros((npts), dtype=hc.dtype) + if (kwargs['method'] == 'bilinear'): + # replace zero values with nan + hc.data[(hc.data == 0) | hc.mask] = np.nan + # use quick bilinear to interpolate values + hci.data[:] = bilinear_interp(xi, yi, hc, x, y, + dtype=np.longcomplex) + # replace nan values with fill_value + hci.mask = (np.isnan(hci.data) | (mask.astype(bool))) + hci.data[hci.mask] = hci.fill_value + elif (kwargs['method'] == 'spline'): + f1 = scipy.interpolate.RectBivariateSpline(xi, yi, + hc.real.T, kx=1, ky=1) + f2 = scipy.interpolate.RectBivariateSpline(xi, yi, + hc.imag.T, kx=1, ky=1) + hci.data.real = f1.ev(x,y) + hci.data.imag = f2.ev(x,y) + # replace zero values with fill_value + hci.mask = (mask.astype(bool)) + hci.data[hci.mask] = hci.fill_value + else: + # use scipy regular grid to interpolate values + r1 = scipy.interpolate.RegularGridInterpolator((yi,xi), hc, + method=kwargs['method'], + bounds_error=False, + fill_value=hci.fill_value) + hci.data[:] = r1.__call__(np.c_[y,x]) + # replace invalid values with fill_value + hci.mask = (hci.data == hci.fill_value) | (mask.astype(bool)) + hci.data[hci.mask] = hci.fill_value + # extrapolate data using nearest-neighbors + if kwargs['extrapolate'] and np.any(hci.mask): + # find invalid data points + inv, = np.nonzero(hci.mask) + # replace zero values with nan + hc[(hc==0) | hc.mask] = np.nan + # extrapolate points within cutoff of valid model points + hci[inv] = nearest_extrap(xi, yi, hc, x[inv], y[inv], + dtype=np.longcomplex, + cutoff=kwargs['cutoff'], + EPSG=EPSG) + # convert units + # amplitude and phase of the constituent + amplitude.data[:,i] = np.abs(hci.data)/unit_conv + amplitude.mask[:,i] = np.copy(hci.mask) + ph.data[:,i] = np.arctan2(-np.imag(hci), np.real(hci)) + ph.mask[:,i] = np.copy(hci.mask) + # update mask to invalidate points outside model domain + ph.mask[:,i] |= invalid + amplitude.mask[:,i] |= invalid + + # convert phase to degrees + phase = ph*180.0/np.pi + phase.data[phase.data < 0] += 360.0 + # replace data for invalid mask values + amplitude.data[amplitude.mask] = amplitude.fill_value + phase.data[phase.mask] = phase.fill_value + # return the interpolated values + return (amplitude, phase, D) + +# PURPOSE: Extend a longitude array def extend_array(input_array, step_size): """ - Wrapper function to extend an array + Extends a longitude array Parameters ---------- @@ -568,10 +931,10 @@ def extend_array(input_array, step_size): temp[-1] = input_array[-1] + step_size return temp -# PURPOSE: wrapper function to extend a matrix +# PURPOSE: Extend a global matrix def extend_matrix(input_matrix): """ - Wrapper function to extend a matrix + Extends a global matrix Parameters ---------- @@ -583,8 +946,8 @@ def extend_matrix(input_matrix): temp: float extended matrix """ - ny,nx = np.shape(input_matrix) - temp = np.ma.zeros((ny,nx+2), dtype=input_matrix.dtype) + ny, nx = np.shape(input_matrix) + temp = np.ma.zeros((ny, nx+2), dtype=input_matrix.dtype) temp[:,0] = input_matrix[:,-1] temp[:,1:-1] = input_matrix[:,:] temp[:,-1] = input_matrix[:,0] @@ -641,17 +1004,17 @@ def read_otis_grid(input_file): iob = [] else: fid.seek(8,1) - iob=np.fromfile(fid, dtype=np.dtype('>i4'), count=2*nob).reshape(nob,2) + iob=np.fromfile(fid, dtype=np.dtype('>i4'), count=2*nob).reshape(nob, 2) fid.seek(8,1) # read hz matrix - hz = np.fromfile(fid, dtype=np.dtype('>f4'), count=nx*ny).reshape(ny,nx) + hz = np.fromfile(fid, dtype=np.dtype('>f4'), count=nx*ny).reshape(ny, nx) fid.seek(8,1) # read mz matrix - mz = np.fromfile(fid, dtype=np.dtype('>i4'), count=nx*ny).reshape(ny,nx) + mz = np.fromfile(fid, dtype=np.dtype('>i4'), count=nx*ny).reshape(ny, nx) # close the file fid.close() # return values - return (x,y,hz,mz,iob,dt) + return (x, y, hz, mz, iob, dt) # PURPOSE: read tide grid file with localized solutions def read_atlas_grid(input_file): @@ -710,16 +1073,16 @@ def read_atlas_grid(input_file): iob = [] else: fid.seek(8,1) - iob=np.fromfile(fid, dtype=np.dtype('>i4'), count=2*nob).reshape(nob,2) + iob=np.fromfile(fid, dtype=np.dtype('>i4'), count=2*nob).reshape(nob, 2) fid.seek(8,1) # read hz matrix - hz = np.fromfile(fid, dtype=np.dtype('>f4'), count=nx*ny).reshape(ny,nx) + hz = np.fromfile(fid, dtype=np.dtype('>f4'), count=nx*ny).reshape(ny, nx) fid.seek(8,1) # read mz matrix - mz = np.fromfile(fid, dtype=np.dtype('>i4'), count=nx*ny).reshape(ny,nx) + mz = np.fromfile(fid, dtype=np.dtype('>i4'), count=nx*ny).reshape(ny, nx) fid.seek(8,1) # read pmask matrix - pmask = np.fromfile(fid, dtype=np.dtype('>i4'), count=nx*ny).reshape(ny,nx) + pmask = np.fromfile(fid, dtype=np.dtype('>i4'), count=nx*ny).reshape(ny, nx) fid.seek(4,1) # read local models nmod = 0 @@ -752,7 +1115,7 @@ def read_atlas_grid(input_file): # close the file fid.close() # return values - return (x,y,hz,mz,iob,dt,pmask,local) + return (x, y, hz, mz, iob, dt, pmask, local) # PURPOSE: read grid file def read_netcdf_grid(input_file): @@ -795,7 +1158,7 @@ def read_netcdf_grid(input_file): # close the grid file fileID.close() # return values - return (x,y,hz,mz,sf) + return (x, y, hz, mz, sf) # PURPOSE: read list of constituents from an elevation or transport file def read_constituents(input_file, grid='OTIS'): @@ -837,7 +1200,7 @@ def read_constituents(input_file, grid='OTIS'): fid.seek(16,1) constituents = [c.decode("utf8").rstrip() for c in fid.read(nc*4).split()] fid.close() - return (constituents,nc) + return (constituents, nc) # PURPOSE: read elevation file to extract real and imaginary components for # constituent @@ -868,8 +1231,8 @@ def read_otis_elevation(input_file,ic): nskip = ic*(nx*ny*8+8) + 8 + ll - 28 fid.seek(nskip,1) # real and imaginary components of elevation - h = np.ma.zeros((ny,nx), dtype=np.complex64) - h.mask = np.zeros((ny,nx), dtype=bool) + h = np.ma.zeros((ny, nx), dtype=np.complex64) + h.mask = np.zeros((ny, nx), dtype=bool) for i in range(ny): temp = np.fromfile(fid, dtype=np.dtype('>f4'), count=2*nx) h.data.real[i,:] = temp[0:2*nx-1:2] @@ -921,8 +1284,8 @@ def read_atlas_elevation(input_file, ic, constituent): nskip = 8 + nc*4 + ic*(nx*ny*8 + 8) fid.seek(nskip,1) # real and imaginary components of elevation - h = np.ma.zeros((ny,nx), dtype=np.complex64) - h.mask = np.zeros((ny,nx), dtype=bool) + h = np.ma.zeros((ny, nx), dtype=np.complex64) + h.mask = np.zeros((ny, nx), dtype=bool) for i in range(ny): temp = np.fromfile(fid, dtype=np.dtype('>f4'), count=2*nx) h.data.real[i,:] = temp[0:2*nx-1:2] @@ -978,7 +1341,7 @@ def read_atlas_elevation(input_file, ic, constituent): # close the file fid.close() # return the elevation - return (h,local) + return (h, local) # PURPOSE: read transport file to extract real and imaginary components for # constituent @@ -1011,10 +1374,10 @@ def read_otis_transport(input_file,ic): nskip = ic*(nx*ny*16+8) + 8 + ll - 28 fid.seek(nskip,1) # real and imaginary components of transport - u = np.ma.zeros((ny,nx), dtype=np.complex64) - u.mask = np.zeros((ny,nx), dtype=bool) - v = np.ma.zeros((ny,nx), dtype=np.complex64) - v.mask = np.zeros((ny,nx), dtype=bool) + u = np.ma.zeros((ny, nx), dtype=np.complex64) + u.mask = np.zeros((ny, nx), dtype=bool) + v = np.ma.zeros((ny, nx), dtype=np.complex64) + v.mask = np.zeros((ny, nx), dtype=bool) for i in range(ny): temp = np.fromfile(fid, dtype=np.dtype('>f4'), count=4*nx) u.data.real[i,:] = temp[0:4*nx-3:4] @@ -1030,7 +1393,7 @@ def read_otis_transport(input_file,ic): # close the file fid.close() # return the transport components - return (u,v) + return (u, v) # PURPOSE: read transport file with localized solutions to extract real and # imaginary components for constituent @@ -1073,10 +1436,10 @@ def read_atlas_transport(input_file, ic, constituent): nskip = 8 + nc*4 + ic*(nx*ny*16 + 8) fid.seek(nskip,1) # real and imaginary components of transport - u = np.ma.zeros((ny,nx), dtype=np.complex64) - u.mask = np.zeros((ny,nx), dtype=bool) - v = np.ma.zeros((ny,nx), dtype=np.complex64) - v.mask = np.zeros((ny,nx), dtype=bool) + u = np.ma.zeros((ny, nx), dtype=np.complex64) + u.mask = np.zeros((ny, nx), dtype=bool) + v = np.ma.zeros((ny, nx), dtype=np.complex64) + v.mask = np.zeros((ny, nx), dtype=bool) for i in range(ny): temp = np.fromfile(fid, dtype=np.dtype('>f4'), count=4*nx) u.data.real[i,:] = temp[0:4*nx-3:4] @@ -1146,7 +1509,7 @@ def read_atlas_transport(input_file, ic, constituent): # close the file fid.close() # return the transport components - return (u,v,local) + return (u, v, local) # PURPOSE: create a 2 arc-minute grid mask from mz and depth variables def create_atlas_mask(xi, yi, mz, local, variable=None): @@ -1192,9 +1555,9 @@ def create_atlas_mask(xi, yi, mz, local, variable=None): m30 = np.ma.zeros((len(y30),len(x30)), dtype=np.int8,fill_value=0) m30.data[:,:] = mz[IY.astype(np.int32), IX.astype(np.int32)] # iterate over localized solutions to fill in high-resolution coastlines - for key,val in local.items(): + for key, val in local.items(): # shape of local variable - ny,nx = np.shape(val[variable]) + ny, nx = np.shape(val[variable]) # correct limits for local grid lon0 = np.floor(val['lon'][0]/d30)*d30 lat0 = np.floor(val['lat'][0]/d30)*d30 @@ -1258,7 +1621,7 @@ def interpolate_atlas_model(xi, yi, zi, spacing=1.0/30.0): f = scipy.interpolate.RectBivariateSpline(xi, yi, zi.T, kx=1,ky=1) zs.data[:,:] = f(xs,ys).T # return resampled solution and coordinates - return (xs,ys,zs) + return (xs, ys, zs) # PURPOSE: combines global and local atlas solutions def combine_atlas_model(xi, yi, zi, pmask, local, variable=None): @@ -1298,11 +1661,11 @@ def combine_atlas_model(xi, yi, zi, pmask, local, variable=None): # create 2 arc-minute grid dimensions d30 = 1.0/30.0 # interpolate global solution to 2 arc-minute solution - x30,y30,z30 = interpolate_atlas_model(xi, yi, zi, spacing=d30) + x30, y30, z30 = interpolate_atlas_model(xi, yi, zi, spacing=d30) # iterate over localized solutions for key,val in local.items(): # shape of local variable - ny,nx = np.shape(val[variable]) + ny, nx = np.shape(val[variable]) # correct limits for local grid lon0 = np.floor(val['lon'][0]/d30)*d30 lat0 = np.floor(val['lat'][0]/d30)*d30 @@ -1321,7 +1684,7 @@ def combine_atlas_model(xi, yi, zi, pmask, local, variable=None): # fill global mask with regional solution z30.data[jj,ii] = val[variable][validy,validx] # return 2 arc-minute solution and coordinates - return (x30,y30,z30) + return (x30, y30, z30) # PURPOSE: read netCDF4 file to extract real and imaginary components for # constituent @@ -1355,8 +1718,8 @@ def read_netcdf_file(input_file, ic, variable=None): nx = fileID.dimensions['x'].size ny = fileID.dimensions['y'].size # real and imaginary components of tidal constituent - hc = np.ma.zeros((ny,nx), dtype=np.complex64) - hc.mask = np.zeros((ny,nx), dtype=bool) + hc = np.ma.zeros((ny, nx), dtype=np.complex64) + hc.mask = np.zeros((ny, nx), dtype=bool) # extract constituent and flip y orientation if (variable == 'z'): hc.data.real[:,:] = fileID.variables['hRe'][ic,::-1,:] @@ -1397,7 +1760,7 @@ def output_otis_grid(FILE, xlim, ylim, hz, mz, iob, dt): # open this way for files fid = open(os.path.expanduser(FILE), 'wb') nob = len(iob) - ny,nx = np.shape(hz) + ny, nx = np.shape(hz) reclen = 32 fid.write(struct.pack('>i',reclen)) fid.write(struct.pack('>i',nx)) @@ -1449,7 +1812,7 @@ def output_otis_elevation(FILE, h, xlim, ylim, constituents): tidal constituent IDs """ fid = open(os.path.expanduser(FILE), 'wb') - ny,nx,nc = np.shape(h) + ny, nx, nc = np.shape(h) # length of header: allow for 4 character >i c_id strings header_length = 4*(7 + nc) fid.write(struct.pack('>i',header_length)) @@ -1495,7 +1858,7 @@ def output_otis_transport(FILE, u, v, xlim, ylim, constituents): tidal constituent IDs """ fid = open(os.path.expanduser(FILE), 'wb') - ny,nx,nc = np.shape(u) + ny, nx, nc = np.shape(u) # length of header: allow for 4 character >i c_id strings header_length = 4*(7 + nc) fid.write(struct.pack('>i',header_length)) @@ -1528,7 +1891,7 @@ def Muv(hz): """ Construct masks for zeta, u and v nodes on a C-grid """ - ny,nx = np.shape(hz) + ny, nx = np.shape(hz) mz = (hz > 0).astype(int) # x-indices indx = np.zeros((nx), dtype=int) @@ -1539,23 +1902,23 @@ def Muv(hz): indy[:-1] = np.arange(1,ny) indy[-1] = 0 # calculate mu and mv - mu = np.zeros((ny,nx), dtype=int) - mv = np.zeros((ny,nx), dtype=int) + mu = np.zeros((ny, nx), dtype=int) + mv = np.zeros((ny, nx), dtype=int) mu[indy,:] = mz*mz[indy,:] mv[:,indx] = mz*mz[:,indx] - return (mu,mv,mz) + return (mu, mv, mz) # PURPOSE: Interpolate bathymetry to zeta, u and v nodes on a C-grid def Huv(hz): """ Interpolate bathymetry to zeta, u and v nodes on a C-grid """ - ny,nx = np.shape(hz) - mu,mv,mz = Muv(hz) + ny, nx = np.shape(hz) + mu, mv, mz = Muv(hz) # x-indices indx = np.zeros((nx), dtype=int) indx[0] = nx-1 - indx[1:] = np.arange(1,nx) + indx[1:] = np.arange(1, nx) # y-indices indy = np.zeros((ny), dtype=int) indy[0] = ny-1 @@ -1563,4 +1926,4 @@ def Huv(hz): # calculate hu and hv hu = mu*(hz + hz[indy,:])/2.0 hv = mv*(hz + hz[:,indx])/2.0 - return (hu,hv) + return (hu, hv) diff --git a/test/test_interpolate.py b/test/test_interpolate.py index f9d147f0..0bbf681e 100644 --- a/test/test_interpolate.py +++ b/test/test_interpolate.py @@ -5,7 +5,7 @@ UPDATE HISTORY: Updated 11/2022: use f-strings for formatting verbose or ascii output - Written 03/2021 + Written 03/2021 """ import os import inspect @@ -26,151 +26,151 @@ # https://github.com/gradywright/spherepts @pytest.fixture(scope="module", autouse=True) def download_nodes(N=324): - matfile = f'md{N:05d}.mat' - HOST = ['https://github.com','gradywright','spherepts','raw', - 'master','nodes','max_determinant',matfile] - pyTMD.utilities.from_http(HOST, - local=os.path.join(filepath,matfile), - verbose=True) - yield - # remove the node file - os.remove(os.path.join(filepath,matfile)) + matfile = f'md{N:05d}.mat' + HOST = ['https://github.com','gradywright','spherepts','raw', + 'master','nodes','max_determinant',matfile] + pyTMD.utilities.from_http(HOST, + local=os.path.join(filepath,matfile), + verbose=True) + yield + # remove the node file + os.remove(os.path.join(filepath,matfile)) # Franke's 3D evaluation function def franke_3d(x,y,z): - F1 = 0.75*np.exp(-((9.*x-2.)**2 + (9.*y-2.)**2 + (9.0*z-2.)**2)/4.) - F2 = 0.75*np.exp(-((9.*x+1.)**2/49. + (9.*y+1.)/10. + (9.0*z+1.)/10.)) - F3 = 0.5*np.exp(-((9.*x-7.)**2 + (9.*y-3.)**2 + (9.*z-5)**2)/4.) - F4 = 0.2*np.exp(-((9.*x-4.)**2 + (9.*y-7.)**2 + (9.*z-5.)**2)) - F = F1 + F2 + F3 - F4 - return F + F1 = 0.75*np.exp(-((9.*x-2.)**2 + (9.*y-2.)**2 + (9.0*z-2.)**2)/4.) + F2 = 0.75*np.exp(-((9.*x+1.)**2/49. + (9.*y+1.)/10. + (9.0*z+1.)/10.)) + F3 = 0.5*np.exp(-((9.*x-7.)**2 + (9.*y-3.)**2 + (9.*z-5)**2)/4.) + F4 = 0.2*np.exp(-((9.*x-4.)**2 + (9.*y-7.)**2 + (9.*z-5.)**2)) + F = F1 + F2 + F3 - F4 + return F # use max determinant nodes from spherepts def test_cartesian(N=324): - # read the node file - matfile = f'md{N:05d}.mat' - xd = scipy.io.loadmat(os.path.join(filepath,matfile)) - x,y,z = (xd['x'][:,0],xd['x'][:,1],xd['x'][:,2]) - # convert from cartesian to sphere - lon,lat,r = pyTMD.spatial.to_sphere(x,y,z) - X,Y,Z = pyTMD.spatial.to_cartesian(lon,lat,a_axis=r,flat=0.0) - # verify that coordinates are within tolerance - assert np.all(np.isclose(x,X)) - assert np.all(np.isclose(y,Y)) - assert np.all(np.isclose(z,Z)) + # read the node file + matfile = f'md{N:05d}.mat' + xd = scipy.io.loadmat(os.path.join(filepath,matfile)) + x,y,z = (xd['x'][:,0],xd['x'][:,1],xd['x'][:,2]) + # convert from cartesian to sphere + lon,lat,r = pyTMD.spatial.to_sphere(x,y,z) + X,Y,Z = pyTMD.spatial.to_cartesian(lon,lat,a_axis=r,flat=0.0) + # verify that coordinates are within tolerance + assert np.all(np.isclose(x,X)) + assert np.all(np.isclose(y,Y)) + assert np.all(np.isclose(z,Z)) # use max determinant nodes from spherepts def test_geodetic(N=324): - # read the node file - matfile = f'md{N:05d}.mat' - xd = scipy.io.loadmat(os.path.join(filepath,matfile)) - # convert from cartesian to sphere - ln,lt,_ = pyTMD.spatial.to_sphere(xd['x'][:,0], - xd['x'][:,1],xd['x'][:,2]) - # convert from sphere to cartesian - X,Y,Z = pyTMD.spatial.to_cartesian(ln,lt) - # convert from cartesian to geodetic - lon = np.zeros((N)) - lat = np.zeros((N)) - h = np.zeros((N)) - for i in range(N): - lon[i],lat[i],h[i] = pyTMD.spatial.to_geodetic(X[i],Y[i],Z[i]) - # fix coordinates to be 0:360 - lon[lon < 0] += 360.0 - # verify that coordinates are within tolerance - assert np.all(np.isclose(ln,lon)) - assert np.all(np.isclose(lt,lat)) + # read the node file + matfile = f'md{N:05d}.mat' + xd = scipy.io.loadmat(os.path.join(filepath,matfile)) + # convert from cartesian to sphere + ln,lt,_ = pyTMD.spatial.to_sphere(xd['x'][:,0], + xd['x'][:,1],xd['x'][:,2]) + # convert from sphere to cartesian + X,Y,Z = pyTMD.spatial.to_cartesian(ln,lt) + # convert from cartesian to geodetic + lon = np.zeros((N)) + lat = np.zeros((N)) + h = np.zeros((N)) + for i in range(N): + lon[i],lat[i],h[i] = pyTMD.spatial.to_geodetic(X[i],Y[i],Z[i]) + # fix coordinates to be 0:360 + lon[lon < 0] += 360.0 + # verify that coordinates are within tolerance + assert np.all(np.isclose(ln,lon)) + assert np.all(np.isclose(lt,lat)) # parameterize interpolation method @pytest.mark.parametrize("METHOD", ['spline','linear','bilinear']) # PURPOSE: test interpolation routines over a sphere def test_interpolate(METHOD, N=324): - # read the node file - matfile = f'md{N:05d}.mat' - xd = scipy.io.loadmat(os.path.join(filepath,matfile)) - x,y,z = (xd['x'][:,0],xd['x'][:,1],xd['x'][:,2]) - # convert from cartesian to sphere - lon,lat,_ = pyTMD.spatial.to_sphere(x,y,z) - # compute functional values at nodes - val = franke_3d(x,y,z) - # calculate output points (standard lat/lon grid) - dlon,dlat = (1.0,1.0) - LON = np.arange(0,360+dlon,dlon) - LAT = np.arange(-90,90+dlat,dlat) - ny,nx = (len(LAT),len(LON)) - gridlon,gridlat = np.meshgrid(LON,LAT) - X,Y,Z = pyTMD.spatial.to_cartesian(gridlon,gridlat, - a_axis=1.0,flat=0.0) - # calculate functional values at output points - FI = np.ma.zeros((ny,nx)) - FI.data[:] = franke_3d(X,Y,Z) - FI.mask = np.zeros((ny,nx),dtype=bool) - # use interpolation routines to get values - if (METHOD == 'bilinear'): - # use quick bilinear to interpolate values - test = pyTMD.bilinear_interp(LON,LAT,FI,lon,lat) - elif (METHOD == 'spline'): - # use scipy bivariate splines to interpolate values - f1 = scipy.interpolate.RectBivariateSpline(LON,LAT, - FI.T,kx=1,ky=1) - test = f1.ev(lon,lat) - else: - # use scipy regular grid to interpolate values - r1 = scipy.interpolate.RegularGridInterpolator((LAT,LON),FI, - method=METHOD,bounds_error=False) - test = r1.__call__(np.c_[lat,lon]) - # verify that coordinates are within tolerance - eps = np.finfo(np.float16).eps - assert np.all(np.isclose(val,test,atol=eps)) + # read the node file + matfile = f'md{N:05d}.mat' + xd = scipy.io.loadmat(os.path.join(filepath,matfile)) + x,y,z = (xd['x'][:,0],xd['x'][:,1],xd['x'][:,2]) + # convert from cartesian to sphere + lon,lat,_ = pyTMD.spatial.to_sphere(x,y,z) + # compute functional values at nodes + val = franke_3d(x,y,z) + # calculate output points (standard lat/lon grid) + dlon,dlat = (1.0,1.0) + LON = np.arange(0,360+dlon,dlon) + LAT = np.arange(-90,90+dlat,dlat) + ny,nx = (len(LAT),len(LON)) + gridlon,gridlat = np.meshgrid(LON,LAT) + X,Y,Z = pyTMD.spatial.to_cartesian(gridlon,gridlat, + a_axis=1.0,flat=0.0) + # calculate functional values at output points + FI = np.ma.zeros((ny,nx)) + FI.data[:] = franke_3d(X,Y,Z) + FI.mask = np.zeros((ny,nx),dtype=bool) + # use interpolation routines to get values + if (METHOD == 'bilinear'): + # use quick bilinear to interpolate values + test = pyTMD.bilinear_interp(LON,LAT,FI,lon,lat) + elif (METHOD == 'spline'): + # use scipy bivariate splines to interpolate values + f1 = scipy.interpolate.RectBivariateSpline(LON,LAT, + FI.T,kx=1,ky=1) + test = f1.ev(lon,lat) + else: + # use scipy regular grid to interpolate values + r1 = scipy.interpolate.RegularGridInterpolator((LAT,LON),FI, + method=METHOD,bounds_error=False) + test = r1.__call__(np.c_[lat,lon]) + # verify that coordinates are within tolerance + eps = np.finfo(np.float16).eps + assert np.all(np.isclose(val,test,atol=eps)) # PURPOSE: test extrapolation over a sphere def test_extrapolate(N=324): - # read the node file - matfile = f'md{N:05d}.mat' - xd = scipy.io.loadmat(os.path.join(filepath,matfile)) - x,y,z = (xd['x'][:,0],xd['x'][:,1],xd['x'][:,2]) - # convert from cartesian to sphere - lon,lat,_ = pyTMD.spatial.to_sphere(x,y,z) - # compute functional values at nodes - val = franke_3d(x,y,z) - # calculate output points (standard lat/lon grid) - dlon,dlat = (1.0,1.0) - LON = np.arange(0,360+dlon,dlon) - LAT = np.arange(90,-90-dlat,-dlat) - ny,nx = (len(LAT),len(LON)) - gridlon,gridlat = np.meshgrid(LON,LAT) - X,Y,Z = pyTMD.spatial.to_cartesian(gridlon,gridlat, - a_axis=1.0,flat=0.0) - # calculate functional values at output points - FI = np.ma.zeros((ny,nx)) - FI.data[:] = franke_3d(X,Y,Z) - FI.mask = np.zeros((ny,nx),dtype=bool) - # use nearest neighbors extrapolation to points - test = pyTMD.nearest_extrap(LON,LAT,FI,lon,lat,EPSG='4326') - # verify that coordinates are within tolerance - assert np.all(np.isclose(val,test,atol=0.1)) + # read the node file + matfile = f'md{N:05d}.mat' + xd = scipy.io.loadmat(os.path.join(filepath,matfile)) + x,y,z = (xd['x'][:,0],xd['x'][:,1],xd['x'][:,2]) + # convert from cartesian to sphere + lon,lat,_ = pyTMD.spatial.to_sphere(x,y,z) + # compute functional values at nodes + val = franke_3d(x,y,z) + # calculate output points (standard lat/lon grid) + dlon,dlat = (1.0,1.0) + LON = np.arange(0,360+dlon,dlon) + LAT = np.arange(90,-90-dlat,-dlat) + ny,nx = (len(LAT),len(LON)) + gridlon,gridlat = np.meshgrid(LON,LAT) + X,Y,Z = pyTMD.spatial.to_cartesian(gridlon,gridlat, + a_axis=1.0,flat=0.0) + # calculate functional values at output points + FI = np.ma.zeros((ny,nx)) + FI.data[:] = franke_3d(X,Y,Z) + FI.mask = np.zeros((ny,nx),dtype=bool) + # use nearest neighbors extrapolation to points + test = pyTMD.nearest_extrap(LON,LAT,FI,lon,lat,EPSG='4326') + # verify that coordinates are within tolerance + assert np.all(np.isclose(val,test,atol=0.1)) # PURPOSE: test that extrapolation will not occur if invalid def test_extrapolation_checks(N=324): - # read the node file - matfile = f'md{N:05d}.mat' - xd = scipy.io.loadmat(os.path.join(filepath,matfile)) - x,y,z = (xd['x'][:,0],xd['x'][:,1],xd['x'][:,2]) - # convert from cartesian to sphere - lon,lat,_ = pyTMD.spatial.to_sphere(x,y,z) - # calculate output points (standard lat/lon grid) - dlon,dlat = (1.0,1.0) - LON = np.arange(0,360+dlon,dlon) - LAT = np.arange(90,-90-dlat,-dlat) - ny,nx = (len(LAT),len(LON)) - # calculate functional values at output points - FI = np.ma.zeros((ny,nx)) - FI.mask = np.ones((ny,nx),dtype=bool) - # use nearest neighbors extrapolation to points - # in case where there are no valid grid points - test = pyTMD.nearest_extrap(LON,LAT,FI,lon,lat,EPSG='4326') - assert(np.all(test.mask)) - # use nearest neighbors extrapolation - # in case where there are no points to be extrapolated - test = pyTMD.nearest_extrap(LON,LAT,FI,[],[]) - assert np.logical_not(test) + # read the node file + matfile = f'md{N:05d}.mat' + xd = scipy.io.loadmat(os.path.join(filepath,matfile)) + x,y,z = (xd['x'][:,0],xd['x'][:,1],xd['x'][:,2]) + # convert from cartesian to sphere + lon,lat,_ = pyTMD.spatial.to_sphere(x,y,z) + # calculate output points (standard lat/lon grid) + dlon,dlat = (1.0,1.0) + LON = np.arange(0,360+dlon,dlon) + LAT = np.arange(90,-90-dlat,-dlat) + ny,nx = (len(LAT),len(LON)) + # calculate functional values at output points + FI = np.ma.zeros((ny,nx)) + FI.mask = np.ones((ny,nx),dtype=bool) + # use nearest neighbors extrapolation to points + # in case where there are no valid grid points + test = pyTMD.nearest_extrap(LON,LAT,FI,lon,lat,EPSG='4326') + assert(np.all(test.mask)) + # use nearest neighbors extrapolation + # in case where there are no points to be extrapolated + test = pyTMD.nearest_extrap(LON,LAT,FI,[],[]) + assert np.logical_not(test)