diff --git a/doc/Makefile b/doc/Makefile new file mode 100644 index 0000000..5f19f6c --- /dev/null +++ b/doc/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SPHINXPROJ = harmonica +SOURCEDIR = source +BUILDDIR = build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/doc/README.txt b/doc/README.txt new file mode 100644 index 0000000..2c6cbb3 --- /dev/null +++ b/doc/README.txt @@ -0,0 +1,12 @@ +To build the documentation for harmonica execute the following command in a console: + +make + +Format is typically "html", but to view all available formats execute the make command with no arguments. + +After executing the make command, the output files will be in the build directory. "Index.html" +(or "Index" with other format extension) is the root of the documentation pages. + +Edit the *.rst reStructuredText files in the source directory to change the layout/content/format +of the pages. Edit the conf.py file in the source directory to change the settings used by Sphinx +to generate the documentation. \ No newline at end of file diff --git a/doc/make.bat b/doc/make.bat new file mode 100644 index 0000000..096841d --- /dev/null +++ b/doc/make.bat @@ -0,0 +1,36 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build +set SPHINXPROJ=harmonica + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% + +:end +popd diff --git a/doc/source/Documentation.rst b/doc/source/Documentation.rst new file mode 100644 index 0000000..380d2da --- /dev/null +++ b/doc/source/Documentation.rst @@ -0,0 +1,10 @@ +Documentation +===================================== + +.. toctree:: + + tidal_database + tidal_constituents + adcirc_database + leprovost_database + resource \ No newline at end of file diff --git a/doc/source/Installation.rst b/doc/source/Installation.rst new file mode 100644 index 0000000..1953b32 --- /dev/null +++ b/doc/source/Installation.rst @@ -0,0 +1,6 @@ +.. _Installation: + +Installation +===================================== + +TODO: Provide steps for installing the harmonica package. \ No newline at end of file diff --git a/doc/source/Tutorials.rst b/doc/source/Tutorials.rst new file mode 100644 index 0000000..db4c028 --- /dev/null +++ b/doc/source/Tutorials.rst @@ -0,0 +1,6 @@ +Tutorials +===================================== + +.. toctree:: + + simple_python \ No newline at end of file diff --git a/doc/source/adcirc_database.rst b/doc/source/adcirc_database.rst new file mode 100644 index 0000000..22f784e --- /dev/null +++ b/doc/source/adcirc_database.rst @@ -0,0 +1,13 @@ +harmonica.adcirc_database Module +===================================== + +This module is used to extract amplitude and phase tidal harmonics from the +ADCIRC tidal database. NOAA constituent speeds are also provided (https://tidesandcurrents.noaa.gov). +This module can also extract the frequency, earth tidal reduction factor, amplitude, nodal factor, +and equilibrium argument for specified constituents at a specified time. ADCIRC provides two +tidal databases, one for the Northwest Atlantic and one for Northeast Pacific. Specify +the appropriate location on construction. + +.. automodule:: harmonica.adcirc_database + :members: + :noindex: \ No newline at end of file diff --git a/doc/source/conf.py b/doc/source/conf.py new file mode 100644 index 0000000..7e6f457 --- /dev/null +++ b/doc/source/conf.py @@ -0,0 +1,168 @@ +# -*- coding: utf-8 -*- +# +# Configuration file for the Sphinx documentation builder. +# +# This file does only contain a selection of the most common options. For a +# full list see the documentation: +# http://www.sphinx-doc.org/en/master/config + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +import sys +# sys.path.insert(0, os.path.abspath('.')) +sys.path.append("C:\\Users\\aclark\\AppData\\Local\\conda\\conda\\envs\\harmonica\\Lib\\site-packages") + + +# -- Project information ----------------------------------------------------- + +project = 'harmonica' +copyright = '2018, Kevin Winters' +author = 'Kevin Winters' + +# The short X.Y version +version = '' +# The full version, including alpha/beta/rc tags +release = '0.1' + + +# -- General configuration --------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.intersphinx', + 'sphinx.ext.ifconfig', + 'sphinx.ext.napoleon' +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path . +exclude_patterns = [] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_rtd_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +# html_sidebars = {} + + +# -- Options for HTMLHelp output --------------------------------------------- + +# Output file base name for HTML help builder. +htmlhelp_basename = 'harmonicadoc' + + +# -- Options for LaTeX output ------------------------------------------------ + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'harmonica.tex', 'harmonica Documentation', + 'Kevin Winters', 'manual'), +] + + +# -- Options for manual page output ------------------------------------------ + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'harmonica', 'harmonica Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ---------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'harmonica', 'harmonica Documentation', + author, 'harmonica', 'One line description of project.', + 'Miscellaneous'), +] + + +# -- Extension configuration ------------------------------------------------- + +# -- Options for intersphinx extension --------------------------------------- + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = {'https://docs.python.org/': None} \ No newline at end of file diff --git a/doc/source/index.rst b/doc/source/index.rst new file mode 100644 index 0000000..2581326 --- /dev/null +++ b/doc/source/index.rst @@ -0,0 +1,28 @@ +.. harmonica documentation master file, created by + sphinx-quickstart on Thu Sep 20 17:01:36 2018. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to harmonica's documentation! +===================================== + +harmonica is an API and CLI to get amplitude, phase, and speed of tidal harmonics +from various tidal models. harmonica currently supports TPXO (versions 7.2, 8, and 9), +ADCIRC, and LeProvost. Also provides functionality to build water surface time series +utilizing pytides reconstruction and access to pytides deconstruction. + +.. toctree:: + :maxdepth: 1 + + Installation + Documentation + Tutorials + + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/doc/source/leprovost_database.rst b/doc/source/leprovost_database.rst new file mode 100644 index 0000000..84b25e7 --- /dev/null +++ b/doc/source/leprovost_database.rst @@ -0,0 +1,11 @@ +harmonica.leprovost_database Module +===================================== + +This module is used to extract amplitude and phase tidal harmonics from the +LeProvost tidal database. NOAA constituent speeds are also provided (https://tidesandcurrents.noaa.gov). +This module can also provide the frequency, earth tidal reduction factor, amplitude, +nodal factor, and equilibrium argument for specified constituents at a specified time. + +.. automodule:: harmonica.leprovost_database + :members: + :noindex: \ No newline at end of file diff --git a/doc/source/resource.rst b/doc/source/resource.rst new file mode 100644 index 0000000..29cad96 --- /dev/null +++ b/doc/source/resource.rst @@ -0,0 +1,6 @@ +harmonica.resource Module +===================================== + +.. automodule:: harmonica.resource + :members: + :noindex: \ No newline at end of file diff --git a/doc/source/simple_python.rst b/doc/source/simple_python.rst new file mode 100644 index 0000000..4a71c27 --- /dev/null +++ b/doc/source/simple_python.rst @@ -0,0 +1,72 @@ +Using the Python API +===================================== + +This tutorial demonstrates the use of the hamonica Python API. Amplitude, phase, +and speed tidal harmonics are extracted for a small set of points from the TPXO8, ADCIRC, +and LeProvost tidal databases. The tutorial also provides an example of obtaining the +amplitude, frequency, speed, earth tidal reduction factor, equilibrium argument, and +nodal factor of a constituent at a specified time. + +To complete this tutorial you must have the harmonica package installed. See :ref:`Installation` for +instructions on installing the harmonica package to a Python environment. + +We will be using the script located at :file:`{harmonica root}/tutorials/python_api/test.py`. +The code is also provided below. + +.. literalinclude:: ../../tutorials/python_api/test.py + :linenos: + +Executing :file:`test.py` will fetch the required tidal resources from the Internet if +they do not already exist in the harmonica data directory. Edit the config variables in +:file:`__init__.py` to change the data directory or specify a preexisting data directory. + +After executing the script, output from the tidal harmonic extraction can be viewed in +:file:`tidal_test.out` located in the Python API tutorial directory. + +The following lines set up the point locations where tidal harmonic components will be +extracted. Locations should be specified as tuples of latitude and longitude degrees. + +.. literalinclude:: ../../tutorials/python_api/test.py + :lines: 7-12 + :lineno-match: + +In addition to the points of interest, create a list of constituents to query for. + +.. literalinclude:: ../../tutorials/python_api/test.py + :lines: 14-14 + :lineno-match: + +Next, construct the tidal constituent extractor interface. This example starts out by +using the 'leprovost' model. 'tpxo8' is the default model used when none is specified. + +.. literalinclude:: ../../tutorials/python_api/test.py + :lines: 15-15 + :lineno-match: + +The next section of code demonstrates using the interface to get amplitudes, frequencies, +speeds, earth tidal reduction factors, equilibrium arguments, and nodal factors for +specified constituents at a specified time. The tidal model specified at construction has +no effect on this functionality. + +.. literalinclude:: ../../tutorials/python_api/test.py + :lines: 17-18 + :lineno-match: + +The last block uses the ADCIRC, LeProvost, and TPXO interfaces to extract tidal harmonic +constituents for a list of locations and constituents. The optional 'model' argument can +be specified to switch between tidal models. Note that when getting the TPXO components, +we pass in a third positional argument (kwarg is 'positive_ph'). If True, all output +phases will be positive. + +.. literalinclude:: ../../tutorials/python_api/test.py + :lines: 25-42 + :lineno-match: + +The LeProvost model is freely distributed. FES2014 is an updated version of the model that +significantly increases the grid resolution and number of supported constituents. The +data files for FES2014 cannot be openly distributed, but local copies of the files +can be used if they exist. See :file:`resource.py` for the expected filenames. + +.. literalinclude:: ../../tutorials/python_api/test.py + :lines: 44-48 + :lineno-match: \ No newline at end of file diff --git a/doc/source/tidal_constituents.rst b/doc/source/tidal_constituents.rst new file mode 100644 index 0000000..d9259dc --- /dev/null +++ b/doc/source/tidal_constituents.rst @@ -0,0 +1,6 @@ +harmonica.tidal_constituents Module +===================================== + +.. automodule:: harmonica.tidal_constituents + :members: + :noindex: \ No newline at end of file diff --git a/doc/source/tidal_database.rst b/doc/source/tidal_database.rst new file mode 100644 index 0000000..29b9b64 --- /dev/null +++ b/doc/source/tidal_database.rst @@ -0,0 +1,6 @@ +harmonica.tidal_database Module +===================================== + +.. automodule:: harmonica.tidal_database + :members: + :noindex: \ No newline at end of file diff --git a/harmonica/adcirc_database.py b/harmonica/adcirc_database.py new file mode 100644 index 0000000..955defe --- /dev/null +++ b/harmonica/adcirc_database.py @@ -0,0 +1,139 @@ +#! python3 + +import math + +import numpy +import pandas as pd +import xmsinterp_py + +from .resource import ResourceManager +from .tidal_database import convert_coords, get_complex_components, NOAA_SPEEDS, TidalDB + + +DEFAULT_ADCIRC_RESOURCE = 'adcirc2015' + + +class AdcircDB(TidalDB): + """The class for extracting tidal data, specifically amplitude and phases, from an ADCIRC database. + + """ + def __init__(self, model=DEFAULT_ADCIRC_RESOURCE): + """Constructor for the ADCIRC tidal database extractor. + + Args: + model (:obj:`str`, optional): Name of the ADCIRC tidal database version. Currently defaults to the only + supported release, 'adcirc2015'. Expand resource.adcirc_models for future versions. + + """ + model = model.lower() + if model not in ResourceManager.ADCIRC_MODELS: + raise ValueError("\'{}\' is not a supported ADCIRC model. Must be one of: {}.".format( + model, ", ".join(ResourceManager.ADCIRC_MODELS).strip() + )) + super().__init__(model) + + def get_components(self, locs, cons=None, positive_ph=False): + """Get the amplitude, phase, and speed for the given constituents at the given points. + + Args: + locs (:obj:`list` of :obj:`tuple` of :obj:`float`): latitude [-90, 90] and longitude [-180 180] or [0 360] + of the requested points. + cons (:obj:`list` of :obj:`str`, optional): List of the constituent names to get amplitude and phase for. If + not supplied, all valid constituents will be extracted. + positive_ph (bool, optional): Indicate if the returned phase should be all positive [0 360] (True) or + [-180 180] (False, the default). + + Returns: + (:obj:`list` of :obj:`pandas.DataFrame`): A list of dataframes of constituent information including + amplitude (meters), phase (degrees) and speed (degrees/hour, UTC/GMT). The list is parallel with locs, + where each element in the return list is the constituent data for the corresponding element in locs. + Note that function uses fluent interface pattern. + + """ + # pre-allocate the return value + if not cons: + cons = self.resources.available_constituents() # Get all constituents by default + else: + cons = [con.upper() for con in cons] + + # Make sure point locations are valid lat/lon + locs = convert_coords(locs) + if not locs: + return self # ERROR: Not in latitude/longitude + + self.data = [pd.DataFrame(columns=['amplitude', 'phase', 'speed']) for _ in range(len(locs))] + + # Step 1: read the file and get geometry: + con_dsets = self.resources.get_datasets(cons)[0] + tri_search = xmsinterp_py.geometry.GmTriSearch() + con_x = con_dsets.x[0].values + con_y = con_dsets.y[0].values + + mesh_pts = [(float(con_x[idx]), float(con_y[idx]), 0.0) for idx in range(len(con_x))] + tri_list = con_dsets.element.values.flatten().tolist() + tri_search.tris_to_search(mesh_pts, tri_list) + + points_and_weights = [] + for i, pt in enumerate(locs): + pt_flip = (pt[1], pt[0]) + tri_idx = tri_search.tri_containing_pt(pt_flip) + if tri_idx != -1: + pt_1 = int(tri_list[tri_idx]) + pt_2 = int(tri_list[tri_idx+1]) + pt_3 = int(tri_list[tri_idx+2]) + x1, y1, z1 = mesh_pts[pt_1] + x2, y2, z2 = mesh_pts[pt_2] + x3, y3, z3 = mesh_pts[pt_3] + x = pt_flip[0] + y = pt_flip[1] + # Compute barocentric area weights + ta = abs((x2 * y3 - x3 * y2) - (x1 * y3 - x3 * y1) + (x1 * y2 - x2 * y1)) + w1 = ((x - x3) * (y2 - y3) + (x2 - x3) * (y3 - y)) / ta + w2 = ((x - x1) * (y3 - y1) - (y - y1) * (x3 - x1)) / ta + w3 = ((y - y1) * (x2 - x1) - (x - x1) * (y2 - y1)) / ta + points_and_weights.append((i, (pt_1, pt_2, pt_3), (w1, w2, w3))) + else: # Outside domain, return NaN for all constituents + for con in cons: + self.data[i].loc[con] = [numpy.nan, numpy.nan, numpy.nan] + + for con in cons: + con_amp_name = con + "_amplitude" + con_pha_name = con + "_phase" + con_amp = con_dsets[con_amp_name][0] + con_pha = con_dsets[con_pha_name][0] + for i, pts, weights in points_and_weights: + amps = [float(con_amp[pts[0]]), float(con_amp[pts[1]]), float(con_amp[pts[2]])] + phases = [ + math.radians(float(con_pha[pts[0]])), + math.radians(float(con_pha[pts[1]])), + math.radians(float(con_pha[pts[2]])), + ] + + # Get the real and imaginary components from the amplitude and phases in the file. It + # would be better if these values were stored in the file like TPXO. + complex_components = get_complex_components(amps, phases) + + # Perform area weighted interpolation + ctr = ( + complex_components[0][0] * weights[0] + + complex_components[1][0] * weights[1] + + complex_components[2][0] * weights[2] + ) + cti = ( + complex_components[0][1] * weights[0] + + complex_components[1][1] * weights[1] + + complex_components[2][1] * weights[2] + ) + new_amp = math.sqrt(ctr * ctr + cti * cti) + + # Compute interpolated phase + if new_amp == 0.0: + new_phase = 0.0 + else: + new_phase = math.degrees(math.acos(ctr / new_amp)) + if cti < 0.0: + new_phase = 360.0 - new_phase + speed = NOAA_SPEEDS[con][0] if con in NOAA_SPEEDS else numpy.nan + self.data[i].loc[con] = [new_amp, new_phase, speed] + + return self diff --git a/harmonica/cli/main.py b/harmonica/cli/main.py index 9cba0f1..8ab94b2 100644 --- a/harmonica/cli/main.py +++ b/harmonica/cli/main.py @@ -34,4 +34,4 @@ def main(): if __name__ == '__main__': - sys.exit(main()) \ No newline at end of file + sys.exit(main()) diff --git a/harmonica/cli/main_constituents.py b/harmonica/cli/main_constituents.py index 45510c6..194db5e 100644 --- a/harmonica/cli/main_constituents.py +++ b/harmonica/cli/main_constituents.py @@ -37,9 +37,10 @@ def parse_args(args): def execute(args): - cons = Constituents().get_components([args.lat, args.lon], model=args.model, cons=args.cons, - positive_ph=args.positive_phase) - out = cons.data.to_csv(args.output, sep='\t', header=True, index=True, index_label='constituent') + cons = Constituents(model=args.model).get_components( + [(args.lat, args.lon)], cons=args.cons, positive_ph=args.positive_phase + ) + out = cons.data[0].to_csv(args.output, sep='\t', header=True, index=True, index_label='constituent') if args.output is None: print(out) print("\nComplete.\n") diff --git a/harmonica/cli/main_deconstruct.py b/harmonica/cli/main_deconstruct.py index d1820b3..f98cc11 100644 --- a/harmonica/cli/main_deconstruct.py +++ b/harmonica/cli/main_deconstruct.py @@ -5,12 +5,14 @@ import argparse import numpy as np import pandas as pd +import sys DESCR = 'Deconstruct the signal into its tidal constituents.' EXAMPLE = """ Example: - harmonica deconstruct 38.375789 -74.943915 -C M2 K1 -M tpxo8 + harmonica deconstruct CO-OPS__8760922__wl.csv --columns "Date Time" "Water Level" \ + --datetime_format '%Y-%m-%d %H:%M' -C M2 S2 N2 K1 """ def config_parser(p, sub=False): diff --git a/harmonica/cli/main_download.py b/harmonica/cli/main_download.py index 8808196..6b45575 100644 --- a/harmonica/cli/main_download.py +++ b/harmonica/cli/main_download.py @@ -9,6 +9,7 @@ harmonica download tpxo8 """ + def config_parser(p, sub=False): # Subparser info if sub: @@ -51,4 +52,4 @@ def main(args=None): except RuntimeError as e: print(str(e)) sys.exit(1) - return \ No newline at end of file + return diff --git a/harmonica/cli/main_reconstruct.py b/harmonica/cli/main_reconstruct.py index e8c0ca8..2fdcfa8 100644 --- a/harmonica/cli/main_reconstruct.py +++ b/harmonica/cli/main_reconstruct.py @@ -1,4 +1,3 @@ -from ..tidal_constituents import Constituents from ..harmonica import Tide from .common import add_common_args, add_loc_model_args, add_const_out_args from pytides.tide import Tide as pyTide @@ -15,6 +14,7 @@ harmonica reconstruct 38.375789 -74.943915 """ + def validate_date(value): try: # return date.fromisoformat(value) # python 3.7 @@ -72,7 +72,7 @@ def parse_args(args): def execute(args): times = pyTide._times(datetime.fromordinal(args.start_date.toordinal()), np.arange(args.length * 24., dtype=float)) - tide = Tide().reconstruct_tide(loc=[args.lat, args.lon], times=times, model=args.model, cons=args.cons, + tide = Tide(model=args.model).reconstruct_tide(loc=[args.lat, args.lon], times=times, cons=args.cons, positive_ph=args.positive_phase) out = tide.data.to_csv(args.output, sep='\t', header=True, index=False) if args.output is None: @@ -88,4 +88,4 @@ def main(args=None): except RuntimeError as e: print(str(e)) sys.exit(1) - return \ No newline at end of file + return diff --git a/harmonica/cli/main_resources.py b/harmonica/cli/main_resources.py index e5afcea..ee4b845 100644 --- a/harmonica/cli/main_resources.py +++ b/harmonica/cli/main_resources.py @@ -61,4 +61,4 @@ def main(args=None): except RuntimeError as e: print(str(e)) sys.exit(1) - return \ No newline at end of file + return diff --git a/harmonica/examples/getting_started.ipynb b/harmonica/examples/getting_started.ipynb new file mode 100644 index 0000000..4aaf752 --- /dev/null +++ b/harmonica/examples/getting_started.ipynb @@ -0,0 +1,288 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from datetime import datetime\n", + "import numpy as np\n", + "\n", + "from pytides.tide import Tide as pyTide\n", + "import harmonica\n", + "from harmonica.harmonica import Tide" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Tidal Reconstruction" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "Tide.reconstruct_tide?" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Required Input" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# location of interest\n", + "location = (38.375789, -74.943915)\n", + "# datetime object of time zero\n", + "time_zero = datetime.now()\n", + "# array of hour offsets from time zero (MUST BE IN HOURS)\n", + "hours_offset_from_zero = np.arange(0, 1000, 1, dtype=float)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Optional Input" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# set the model name\n", + "model_name = 'tpxo9'\n", + "# requested constituent(s) \n", + "constituents = None\n", + "# should phase be all positive [0 360]?\n", + "positive_phase = True\n", + "# output file\n", + "outfile = None" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "---" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Process input data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# convert the numpy array of offset time to datetime objects\n", + "times = pyTide._times(time_zero, hours_offset_from_zero)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Reconstruct tides" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# reconstruct the tide\n", + "tide = Tide().reconstruct_tide(loc=location, times=times, model=model_name, cons=constituents, \n", + " positive_ph=positive_phase)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### View/Save output" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# if output file requested\n", + "if outfile is not None:\n", + " # write to file\n", + " tide.data.to_csv(outfile, sep='\\t', header=True, index=False)\n", + " \n", + "# display the dataframe\n", + "display(tide.data)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "---\n", + "---" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Tidal Deconstruction" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "Tide.deconstruct_tide?" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Required Input" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# datetime object of time zero\n", + "time_zero = datetime.now()\n", + "# array of hour offsets from time zero (MUST BE IN HOURS)\n", + "hours_offset_from_zero = np.arange(0, 1000, 1, dtype=float)\n", + "# array of water levels\n", + "water_level = np.cos(hours_offset_from_zero)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Optional Input" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# list of requested constituents\n", + "requested_cons = ['M2', 'S2','N2','K1','M4','O1']\n", + "# number of required periods for inclusion of consituents\n", + "periods = 4\n", + "# should phase be all positive [0 360]?\n", + "positive_ph = False\n", + "# output file\n", + "decomp_out = None" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Process input data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# convert the numpy array of offset time to datetime objects\n", + "times = pyTide._times(time_zero, hours_offset_from_zero)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Deconstruct tides" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "constituents = Tide().deconstruct_tide(water_level, times, cons=requested_cons, n_period=periods, positive_ph=positive_ph)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "constituents.constituents.data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# if output file requested\n", + "if decomp_out is not None:\n", + " # write to file\n", + " constituents.constituents.data.to_csv(decomp_out, sep='\\t', header=True, index=False)\n", + " \n", + "# display the dataframe\n", + "display(constituents.constituents.data)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.6" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/harmonica/harmonica.py b/harmonica/harmonica.py index f4dcc38..e050227 100644 --- a/harmonica/harmonica.py +++ b/harmonica/harmonica.py @@ -1,4 +1,5 @@ from .tidal_constituents import Constituents +from .tidal_database import NOAA_SPEEDS from .resource import ResourceManager from pytides.astro import astro from pytides.tide import Tide as pyTide @@ -6,7 +7,6 @@ from datetime import datetime import numpy as np import pandas as pd -import sys class Tide: @@ -25,15 +25,13 @@ class Tide: 'MU2': 'mu2', } - def __init__(self): + def __init__(self, model=ResourceManager.DEFAULT_RESOURCE): # tide dataframe: # date_times (year, month, day, hour, minute, second; UTC/GMT) self.data = pd.DataFrame(columns=['datetimes', 'water_level']) - self.constituents = Constituents() + self.constituents = Constituents(model=model) - - def reconstruct_tide(self, loc, times, model=ResourceManager.DEFAULT_RESOURCE, - cons=[], positive_ph=False, offset=None): + def reconstruct_tide(self, loc, times, model=None, cons=[], positive_ph=False, offset=None): """Rescontruct a tide signal water levels at the given location and times Args: @@ -48,15 +46,15 @@ def reconstruct_tide(self, loc, times, model=ResourceManager.DEFAULT_RESOURCE, """ # get constituent information - self.constituents.get_components(loc, model, cons, positive_ph) + self.constituents.get_components([loc], cons, positive_ph, model=model) - ncons = len(self.constituents.data) + (1 if offset is not None else 0) + ncons = len(self.constituents.data[0]) + (1 if offset is not None else 0) tide_model = np.zeros(ncons, dtype=pyTide.dtype) # load specified model constituent components into pytides model object - for i, key in enumerate(self.constituents.data.index.values): + for i, key in enumerate(self.constituents.data[0].index.values): tide_model[i]['constituent'] = eval('pycons._{}'.format(self.PYTIDES_CON_MAPPER.get(key, key))) - tide_model[i]['amplitude'] = self.constituents.data.loc[key].amplitude - tide_model[i]['phase'] = self.constituents.data.loc[key].phase + tide_model[i]['amplitude'] = self.constituents.data[0].loc[key].amplitude + tide_model[i]['phase'] = self.constituents.data[0].loc[key].phase # if an offset is provided then add as spoofed constituent Z0 if offset is not None: tide_model[-1]['constituent'] = pycons._Z0 @@ -69,7 +67,6 @@ def reconstruct_tide(self, loc, times, model=ResourceManager.DEFAULT_RESOURCE, return self - def deconstruct_tide(self, water_level, times, cons=[], n_period=6, positive_ph=False): """Method to use pytides to deconstruct the tides and reorganize results back into the class structure. @@ -90,12 +87,11 @@ def deconstruct_tide(self, water_level, times, cons=[], n_period=6, positive_ph= cons = pycons.noaa else: cons = [eval('pycons._{}'.format(self.PYTIDES_CON_MAPPER.get(c, c))) for c in cons - if c in self.constituents.NOAA_SPEEDS] + if c in NOAA_SPEEDS] self.model_to_dataframe(pyTide.decompose(water_level, times, constituents=cons, n_period=n_period), times[0], positive_ph=positive_ph) return self - def model_to_dataframe(self, tide, t0=datetime.now(), positive_ph=False): """Method to reorganize data from the pytides tide model format into the native dataframe format. @@ -118,8 +114,8 @@ def extractor(c): cons = np.asarray(np.vectorize(extractor)(tide.model[tide.model['constituent'] != pycons._Z0])).T # convert into dataframe df = pd.DataFrame(cons[:,1:], index=cons[:,0], columns=['amplitude', 'phase', 'speed'], dtype=float) - self.constituents.data = pd.concat([self.constituents.data, df], axis=0, join='inner') + self.constituents.data[0] = pd.concat([self.constituents.data[0], df], axis=0, join='inner') # convert phase if necessary if not positive_ph: - self.constituents.data['phase'] = np.where(self.constituents.data['phase'] > 180., - self.constituents.data['phase'] - 360., self.constituents.data['phase']) \ No newline at end of file + self.constituents.data[0]['phase'] = np.where(self.constituents.data[0]['phase'] > 180., + self.constituents.data[0]['phase'] - 360., self.constituents.data[0]['phase']) \ No newline at end of file diff --git a/harmonica/leprovost_database.py b/harmonica/leprovost_database.py new file mode 100644 index 0000000..0e97ef6 --- /dev/null +++ b/harmonica/leprovost_database.py @@ -0,0 +1,182 @@ +"""LeProvost tidal database extractor + +This module contains the tidal database extractor for the LeProvost tidal database. + +""" + +import math +import numpy +import os + +import pandas as pd + +from .resource import ResourceManager +from .tidal_database import convert_coords, get_complex_components, NOAA_SPEEDS, TidalDB + + +DEFAULT_LEPROVOST_RESOURCE = 'leprovost' + + +class LeProvostDB(TidalDB): + """Extractor class for the LeProvost tidal database. + + """ + def __init__(self, model=DEFAULT_LEPROVOST_RESOURCE): + """Constructor for the LeProvost tidal database extractor. + + + Args: + model (:obj:`str`, optional): Name of the LeProvost tidal database version. Defaults to the freely + distributed but outdated 'leprovost' version. See resource.py for additional supported models. + + """ + model = model.lower() # Be case-insensitive + if model not in ResourceManager.LEPROVOST_MODELS: # Check for valid LeProvost model + raise ValueError("\'{}\' is not a supported LeProvost model. Must be one of: {}.".format( + model, ", ".join(ResourceManager.LEPROVOST_MODELS).strip() + )) + super().__init__(model) + + def get_components(self, locs, cons=None, positive_ph=False): + """Get the amplitude, phase, and speed of specified constituents at specified point locations. + + Args: + locs (:obj:`list` of :obj:`tuple` of :obj:`float`): latitude [-90, 90] and longitude [-180 180] or [0 360] + of the requested points. + cons (:obj:`list` of :obj:`str`, optional): List of the constituent names to get amplitude and phase for. If + not supplied, all valid constituents will be extracted. + positive_ph (bool, optional): Indicate if the returned phase should be all positive [0 360] (True) or + [-180 180] (False, the default). + + Returns: + :obj:`list` of :obj:`pandas.DataFrame`: A list of dataframes of constituent information including + amplitude (meters), phase (degrees) and speed (degrees/hour, UTC/GMT). The list is parallel with locs, + where each element in the return list is the constituent data for the corresponding element in locs. + Empty list on error. Note that function uses fluent interface pattern. + + """ + # If no constituents specified, extract all valid constituents. + if not cons: + cons = self.resources.available_constituents() + else: # Be case-insensitive + cons = [con.upper() for con in cons] + + # Make sure point locations are valid lat/lon + locs = convert_coords(locs, self.model == "fes2014") + if not locs: + return self # ERROR: Not in latitude/longitude + + self.data = [pd.DataFrame(columns=['amplitude', 'phase', 'speed']) for _ in range(len(locs))] + dataset_atts = self.resources.model_atts.dataset_attributes() + + n_lat = dataset_atts['num_lats'] + n_lon = dataset_atts['num_lons'] + lat_min = -90.0 + lon_min = dataset_atts['min_lon'] + d_lat = 180.0 / (n_lat - 1) + d_lon = 360.0 / n_lon + + for file_idx, d in enumerate(self.resources.get_datasets(cons)): + if self.model == 'leprovost': # All constituents in one file with constituent name dataset. + nc_names = [x.strip().upper() for x in d.spectrum.values[0]] + else: # FES2014 has seperate files for each constituent with no constituent name dataset. + # TODO: Probably need to find a better way to get the constituent name. _file_obj is undocumented, so + # TODO: there is no guarantee this functionality will be maintained. + nc_names = [os.path.splitext(os.path.basename(dset.ds.filepath()))[0].upper() for + dset in d._file_obj.file_objs] + for con in set(cons) & set(nc_names): + # Extract components for each point for this constituent + con_idx = nc_names.index(con) + for i, pt in enumerate(locs): + y_lat, x_lon = pt # lat,lon not x,y + xlo = int((x_lon - lon_min) / d_lon) + 1 + xlonlo = lon_min + (xlo - 1) * d_lon + xhi = xlo + 1 + if xlo == n_lon: + xhi = 1 + ylo = int((y_lat - lat_min) / d_lon) + 1 + ylatlo = lat_min + (ylo - 1) * d_lat + yhi = ylo + 1 + xlo -= 1 + xhi -= 1 + ylo -= 1 + yhi -= 1 + skip = False + + # Make sure lat/lon coordinate is in the domain. + if (xlo > n_lon or xhi > n_lon or yhi > n_lat or ylo > n_lat or xlo < 0 or xhi < 0 or yhi < 0 + or ylo < 0): + skip = True + else: # Make sure we have at least one neighbor with an active amplitude value. + if self.model == 'leprovost': # All constituents in single file + amp_dset = d.amplitude[0] + phase_dset = d.phase[0] + else: # FES 2014 format - file per constituent + amp_dset = d.amplitude + phase_dset = d.phase + + # Read potential contributing amplitudes from the file. + xlo_yhi_amp = amp_dset[con_idx][yhi][xlo] + xlo_ylo_amp = amp_dset[con_idx][ylo][xlo] + xhi_yhi_amp = amp_dset[con_idx][yhi][xhi] + xhi_ylo_amp = amp_dset[con_idx][ylo][xhi] + if (numpy.isnan(xlo_yhi_amp) and numpy.isnan(xhi_yhi_amp) and + numpy.isnan(xlo_ylo_amp) and numpy.isnan(xhi_ylo_amp)): + skip = True + else: # Make sure we have at least one neighbor with an active phase value. + # Read potential contributing phases from the file. + xlo_yhi_phase = math.radians(phase_dset[con_idx][yhi][xlo]) + xlo_ylo_phase = math.radians(phase_dset[con_idx][ylo][xlo]) + xhi_yhi_phase = math.radians(phase_dset[con_idx][yhi][xhi]) + xhi_ylo_phase = math.radians(phase_dset[con_idx][ylo][xhi]) + if (numpy.isnan(xlo_yhi_phase) and numpy.isnan(xhi_yhi_phase) and + numpy.isnan(xlo_ylo_phase) and numpy.isnan(xhi_ylo_phase)): + skip = True + + if skip: + self.data[i].loc[con] = [numpy.nan, numpy.nan, numpy.nan] + else: + xratio = (x_lon - xlonlo) / d_lon + yratio = (y_lat - ylatlo) / d_lat + + # Get the real and imaginary components from the amplitude and phases in the file. It + # would be better if these values were stored in the file like TPXO. + complex_comps = get_complex_components( + amps=[xlo_yhi_amp, xhi_yhi_amp, xlo_ylo_amp, xhi_ylo_amp], + phases=[xlo_yhi_phase, xhi_yhi_phase, xlo_ylo_phase, xhi_ylo_phase], + ) + + # Perform bi-linear interpolation from the four cell corners to the target point. + xcos = 0.0 + xsin = 0.0 + denom = 0.0 + if not numpy.isnan(xlo_yhi_amp) and not numpy.isnan(xlo_yhi_phase): + xcos = xcos + complex_comps[0][0] * (1.0 - xratio) * yratio + xsin = xsin + complex_comps[0][1] * (1.0 - xratio) * yratio + denom = denom + (1.0 - xratio) * yratio + if not numpy.isnan(xhi_yhi_amp) and not numpy.isnan(xhi_yhi_phase): + xcos = xcos + complex_comps[1][0] * xratio * yratio + xsin = xsin + complex_comps[1][1] * xratio * yratio + denom = denom + xratio * yratio + if not numpy.isnan(xlo_ylo_amp) and not numpy.isnan(xlo_ylo_phase): + xcos = xcos + complex_comps[2][0] * (1.0 - xratio) * (1 - yratio) + xsin = xsin + complex_comps[2][1] * (1.0 - xratio) * (1 - yratio) + denom = denom + (1.0 - xratio) * (1.0 - yratio) + if not numpy.isnan(xhi_ylo_amp) and not numpy.isnan(xhi_ylo_phase): + xcos = xcos + complex_comps[3][0] * (1.0 - yratio) * xratio + xsin = xsin + complex_comps[3][1] * (1.0 - yratio) * xratio + denom = denom + (1.0 - yratio) * xratio + xcos = xcos / denom + xsin = xsin / denom + amp = math.sqrt(xcos * xcos + xsin * xsin) + + # Compute interpolated phase + phase = math.degrees(math.acos(xcos / amp)) + amp /= 100.0 + if xsin < 0.0: + phase = 360.0 - phase + phase += (360. if positive_ph and phase < 0 else 0) + speed = NOAA_SPEEDS[con][0] if con in NOAA_SPEEDS else numpy.nan + self.data[i].loc[con] = [amp, phase, speed] + + return self diff --git a/harmonica/resource.py b/harmonica/resource.py index 8620f58..4f9d504 100644 --- a/harmonica/resource.py +++ b/harmonica/resource.py @@ -1,93 +1,359 @@ -from harmonica import config -from urllib.request import urlopen -import os.path -import string +from abc import ABCMeta, abstractmethod +import os +import shutil +import urllib.request +from zipfile import ZipFile + import xarray as xr +from harmonica import config + + +class Resources(object): + """Abstract base class for model resources + + """ + def __init__(self): + """Base constructor + + """ + pass + + __metaclass__ = ABCMeta + + @abstractmethod + def resource_attributes(self): + """Get the resource attributes of a model (e.g. web url, compression type) + + Returns: + dict: Dictionary of model resource attributes + + """ + return {} + + @abstractmethod + def dataset_attributes(self): + """Get the dataset attributes of a model (e.g. unit multiplier, grid dimensions) + + Returns: + dict: Dictionary of model dataset attributes + + """ + return {} + + @abstractmethod + def available_constituents(self): + """Get all the available constituents of a model + + Returns: + list: List of all the available constituents + + """ + return [] + + @abstractmethod + def constituent_groups(self): + """Get all the available constituents of a model grouped by compatible file types + + Returns: + list(list): 2-D list of available constituents, where the first dimension groups compatible files + + """ + return [] + + @abstractmethod + def constituent_resource(self, con): + """Get the resource name of a constituent + + Returns: + str: Name of the constituent's resource + + """ + return None + + +class Tpxo7Resources(Resources): + """TPXO7 resources + + """ + TPXO7_CONS = {'K1', 'K2', 'M2', 'M4', 'MF', 'MM', 'MN4', 'MS4', 'N2', 'O1', 'P1', 'Q1', 'S2'} + DEFAULT_RESOURCE_FILE = 'DATA/h_tpxo7.2.nc' + + def __init__(self): + super().__init__() + + def resource_attributes(self): + return { + 'url': 'ftp://ftp.oce.orst.edu/dist/tides/Global/tpxo7.2_netcdf.tar.Z', + 'archive': 'gz', # gzip compression + } + + def dataset_attributes(self): + return { + 'units_multiplier': 1.0, # meter + } + + def available_constituents(self): + return self.TPXO7_CONS + + def constituent_groups(self): + return [self.available_constituents()] + + def constituent_resource(self, con): + if con.upper() in self.TPXO7_CONS: + return self.DEFAULT_RESOURCE_FILE + else: + return None + + +class Tpxo8Resources(Resources): + """TPXO8 resources + + """ + TPXO8_CONS = [ + { # 1/30 degree + 'K1': 'hf.k1_tpxo8_atlas_30c_v1.nc', + 'K2': 'hf.k2_tpxo8_atlas_30c_v1.nc', + 'M2': 'hf.m2_tpxo8_atlas_30c_v1.nc', + 'M4': 'hf.m4_tpxo8_atlas_30c_v1.nc', + 'N2': 'hf.n2_tpxo8_atlas_30c_v1.nc', + 'O1': 'hf.o1_tpxo8_atlas_30c_v1.nc', + 'P1': 'hf.p1_tpxo8_atlas_30c_v1.nc', + 'Q1': 'hf.q1_tpxo8_atlas_30c_v1.nc', + 'S2': 'hf.s2_tpxo8_atlas_30c_v1.nc', + }, + { # 1/6 degree + 'MF': 'hf.mf_tpxo8_atlas_6.nc', + 'MM': 'hf.mm_tpxo8_atlas_6.nc', + 'MN4': 'hf.mn4_tpxo8_atlas_6.nc', + 'MS4': 'hf.ms4_tpxo8_atlas_6.nc', + }, + ] + + def __init__(self): + super().__init__() + + def resource_attributes(self): + return { + 'url': "ftp://ftp.oce.orst.edu/dist/tides/TPXO8_atlas_30_v1_nc/", + 'archive': None, + } + + def dataset_attributes(self): + return { + 'units_multiplier': 0.001, # mm to meter + } + + def available_constituents(self): + # get keys from const groups as list of lists and flatten + return [c for sl in [grp.keys() for grp in self.TPXO8_CONS] for c in sl] + + def constituent_groups(self): + return [self.TPXO8_CONS[0], self.TPXO8_CONS[1]] + + def constituent_resource(self, con): + con = con.upper() + for group in self.TPXO8_CONS: + if con in group: + return group[con] + return None + + +class Tpxo9Resources(Resources): + """TPXO9 resources + + """ + TPXO9_CONS = {'2N2', 'K1', 'K2', 'M2', 'M4', 'MF', 'MM', 'MN4', 'MS4', 'N2', 'O1', 'P1', 'Q1', 'S1', 'S2'} + DEFAULT_RESOURCE_FILE = 'tpxo9_netcdf/h_tpxo9.v1.nc' + + def __init__(self): + super().__init__() + + def resource_attributes(self): + return { + 'url': "ftp://ftp.oce.orst.edu/dist/tides/Global/tpxo9_netcdf.tar.gz", + 'archive': 'gz', + } + + def dataset_attributes(self): + return { + 'units_multiplier': 1.0, # meter + } + + def available_constituents(self): + return self.TPXO9_CONS + + def constituent_groups(self): + return [self.available_constituents()] + + def constituent_resource(self, con): + if con.upper() in self.TPXO9_CONS: + return self.DEFAULT_RESOURCE_FILE + else: + return None + + +class LeProvostResources(Resources): + """LeProvost resources + + """ + LEPROVOST_CONS = {'K1', 'K2', 'M2', 'N2', 'O1', 'P1', 'Q1', 'S2', 'NU2', 'MU2', '2N2', 'T2', 'L2'} + DEFAULT_RESOURCE_FILE = 'leprovost_tidal_db.nc' + + def __init__(self): + super().__init__() + + def resource_attributes(self): + return { + 'url': 'http://sms.aquaveo.com/leprovost_tidal_db.zip', + 'archive': 'zip', # zip compression + } + + def dataset_attributes(self): + return { + 'units_multiplier': 1.0, # meter + 'num_lats': 361, + 'num_lons': 720, + 'min_lon': -180.0, + } + + def available_constituents(self): + return self.LEPROVOST_CONS + + def constituent_groups(self): + return [self.available_constituents()] + + def constituent_resource(self, con): + if con.upper() in self.LEPROVOST_CONS: + return self.DEFAULT_RESOURCE_FILE + else: + return None + + +class FES2014Resources(Resources): + """FES2014 resources + + """ + FES2014_CONS = { + '2N2': '2n2.nc', + 'EPS2': 'eps2.nc', + 'J1': 'j1.nc', + 'K1': 'k1.nc', + 'K2': 'k2.nc', + 'L2': 'l2.nc', + 'LA2': 'la2.nc', + 'M2': 'm2.nc', + 'M3': 'm3.nc', + 'M4': 'm4.nc', + 'M6': 'm6.nc', + 'M8': 'm8.nc', + 'MF': 'mf.nc', + 'MKS2': 'mks2.nc', + 'MM': 'mm.nc', + 'MN4': 'mn4.nc', + 'MS4': 'ms4.nc', + 'MSF': 'msf.nc', + 'MSQM': 'msqm.nc', + 'MTM': 'mtm.nc', + 'MU2': 'mu2.nc', + 'N2': 'n2.nc', + 'N4': 'n4.nc', + 'NU2': 'nu2.nc', + 'O1': 'o1.nc', + 'P1': 'p1.nc', + 'Q1': 'q1.nc', + 'R2': 'r2.nc', + 'S1': 's1.nc', + 'S2': 's2.nc', + 'S4': 's4.nc', + 'SA': 'sa.nc', + 'SSA': 'ssa.nc', + 'T2': 't2.nc', + } + + def __init__(self): + super().__init__() + + def resource_attributes(self): + return { + 'url': None, # Resources must already exist. Licensing restrictions prevent hosting files. + 'archive': None, + } + + def dataset_attributes(self): + return { + 'units_multiplier': 1.0, # meter + 'num_lats': 2881, + 'num_lons': 5760, + 'min_lon': 0.0, + } + + def available_constituents(self): + return self.FES2014_CONS.keys() + + def constituent_groups(self): + return [self.available_constituents()] + + def constituent_resource(self, con): + con = con.upper() + if con in self.FES2014_CONS: + return self.FES2014_CONS[con] + else: + return None + + +class Adcirc2015Resources(Resources): + """ADCIRC (v2015) resources + + """ + ADCIRC_CONS = { + 'M2', 'S2', 'N2', 'K1', 'M4', 'O1', 'M6', 'Q1', 'K2', 'L2', '2N2', 'R2', 'T2', 'LAMBDA2', 'MU2', + 'NU2', 'J1', 'M1', 'OO1', 'P1', '2Q1', 'RHO1', 'M8', 'S4', 'S6', 'M3', 'S1', 'MK3', '2MK3', 'MN4', + 'MS4', '2SM2', 'MF', 'MSF', 'MM', 'SA', 'SSA' + } + DEFAULT_RESOURCE_FILE = 'all_adcirc.nc' + + def __init__(self): + super().__init__() + + def resource_attributes(self): + return { + 'url': 'http://sms.aquaveo.com/', + 'archive': None, # Uncompressed NetCDF file + } + + def dataset_attributes(self): + return { + 'units_multiplier': 1.0, # meter + } + + def available_constituents(self): + return self.ADCIRC_CONS + + def constituent_groups(self): + return [self.available_constituents()] + + def constituent_resource(self, con): + if con.upper() in self.ADCIRC_CONS: + return self.DEFAULT_RESOURCE_FILE + else: + return None + + class ResourceManager(object): """Harmonica resource manager to retrieve and access tide models""" - # Dictionay of model information RESOURCES = { - 'tpxo9': { - 'resource_atts': { - 'url': "ftp://ftp.oce.orst.edu/dist/tides/Global/tpxo9_netcdf.tar.gz", - 'archive': 'gz', - }, - 'dataset_atts': { - 'units_multiplier': 1., # meters - }, - 'consts': [{ # grouped by dimensionally compatiable files - '2N2': 'tpxo9_netcdf/h_tpxo9.v1.nc', - 'K1': 'tpxo9_netcdf/h_tpxo9.v1.nc', - 'K2': 'tpxo9_netcdf/h_tpxo9.v1.nc', - 'M2': 'tpxo9_netcdf/h_tpxo9.v1.nc', - 'M4': 'tpxo9_netcdf/h_tpxo9.v1.nc', - 'MF': 'tpxo9_netcdf/h_tpxo9.v1.nc', - 'MM': 'tpxo9_netcdf/h_tpxo9.v1.nc', - 'MN4': 'tpxo9_netcdf/h_tpxo9.v1.nc', - 'MS4': 'tpxo9_netcdf/h_tpxo9.v1.nc', - 'N2': 'tpxo9_netcdf/h_tpxo9.v1.nc', - 'O1': 'tpxo9_netcdf/h_tpxo9.v1.nc', - 'P1': 'tpxo9_netcdf/h_tpxo9.v1.nc', - 'Q1': 'tpxo9_netcdf/h_tpxo9.v1.nc', - 'S1': 'tpxo9_netcdf/h_tpxo9.v1.nc', - 'S2': 'tpxo9_netcdf/h_tpxo9.v1.nc', - },], - }, - 'tpxo8': { - 'resource_atts': { - 'url': "ftp://ftp.oce.orst.edu/dist/tides/TPXO8_atlas_30_v1_nc/", - 'archive': None, - }, - 'dataset_atts': { - 'units_multiplier': 0.001, # mm to meter - }, - 'consts': [ # grouped by dimensionally compatiable files - { # 1/30 degree - 'K1': 'hf.k1_tpxo8_atlas_30c_v1.nc', - 'K2': 'hf.k2_tpxo8_atlas_30c_v1.nc', - 'M2': 'hf.m2_tpxo8_atlas_30c_v1.nc', - 'M4': 'hf.m4_tpxo8_atlas_30c_v1.nc', - 'N2': 'hf.n2_tpxo8_atlas_30c_v1.nc', - 'O1': 'hf.o1_tpxo8_atlas_30c_v1.nc', - 'P1': 'hf.p1_tpxo8_atlas_30c_v1.nc', - 'Q1': 'hf.q1_tpxo8_atlas_30c_v1.nc', - 'S2': 'hf.s2_tpxo8_atlas_30c_v1.nc', - }, - { # 1/6 degree - 'MF': 'hf.mf_tpxo8_atlas_6.nc', - 'MM': 'hf.mm_tpxo8_atlas_6.nc', - 'MN4': 'hf.mn4_tpxo8_atlas_6.nc', - 'MS4': 'hf.ms4_tpxo8_atlas_6.nc', - }, - ], - }, - 'tpxo7': { - 'resource_atts': { - 'url': "ftp://ftp.oce.orst.edu/dist/tides/Global/tpxo7.2_netcdf.tar.Z", - 'archive': 'gz', # gzip compression - }, - 'dataset_atts': { - 'units_multiplier': 1., # meter - }, - 'consts': [{ # grouped by dimensionally compatiable files - 'K1': 'DATA/h_tpxo7.2.nc', - 'K2': 'DATA/h_tpxo7.2.nc', - 'M2': 'DATA/h_tpxo7.2.nc', - 'M4': 'DATA/h_tpxo7.2.nc', - 'MF': 'DATA/h_tpxo7.2.nc', - 'MM': 'DATA/h_tpxo7.2.nc', - 'MN4': 'DATA/h_tpxo7.2.nc', - 'MS4': 'DATA/h_tpxo7.2.nc', - 'N2': 'DATA/h_tpxo7.2.nc', - 'O1': 'DATA/h_tpxo7.2.nc', - 'P1': 'DATA/h_tpxo7.2.nc', - 'Q1': 'DATA/h_tpxo7.2.nc', - 'S2': 'DATA/h_tpxo7.2.nc', - },], - }, + 'tpxo7': Tpxo7Resources(), + 'tpxo8': Tpxo8Resources(), + 'tpxo9': Tpxo9Resources(), + 'leprovost': LeProvostResources(), + 'fes2014': FES2014Resources(), + 'adcirc2015': Adcirc2015Resources(), } + TPXO_MODELS = {'tpxo7', 'tpxo8', 'tpxo9'} + LEPROVOST_MODELS = {'fes2014', 'leprovost'} + ADCIRC_MODELS = {'adcirc2015'} DEFAULT_RESOURCE = 'tpxo9' def __init__(self, model=DEFAULT_RESOURCE): @@ -97,62 +363,78 @@ def __init__(self, model=DEFAULT_RESOURCE): self.model_atts = self.RESOURCES[self.model] self.datasets = [] - def __del__(self): for d in self.datasets: d.close() - def available_constituents(self): - # get keys from const groups as list of lists and flatten - return [c for sl in [grp.keys() for grp in self.model_atts['consts']] for c in sl] - + return self.model_atts.available_constituents() def get_units_multiplier(self): - return self.model_atts['dataset_atts']['units_multiplier'] - + return self.model_atts.dataset_attributes()['units_multiplier'] def download(self, resource, destination_dir): """Download a specified model resource.""" if not os.path.isdir(destination_dir): os.makedirs(destination_dir) - rsrc_atts = self.model_atts['resource_atts'] + rsrc_atts = self.model_atts.resource_attributes() url = rsrc_atts['url'] + # Check if we can download resources for this model. + if url is None: + raise ValueError("Automatic fetching of resources is not available for the {} model.".format(self.model)) + if rsrc_atts['archive'] is None: url = "".join((url, resource)) print('Downloading resource: {}'.format(url)) - response = urlopen(url) path = os.path.join(destination_dir, resource) - if rsrc_atts['archive'] is not None: - import tarfile - - try: - tar = tarfile.open(mode='r:{}'.format(rsrc_atts['archive']), fileobj=response) - except IOError as e: - print(str(e)) + with urllib.request.urlopen(url) as response: + if rsrc_atts['archive'] is not None: + if rsrc_atts['archive'] == 'gz': + import tarfile + try: + tar = tarfile.open(mode='r:{}'.format(rsrc_atts['archive']), fileobj=response) + except IOError as e: + print(str(e)) + else: + rsrcs = set( + self.model_atts.constituent_resource(con) for con in + self.model_atts.available_constituents() + ) + tar.extractall(path=destination_dir, members=[m for m in tar.getmembers() if m.name in rsrcs]) + tar.close() + elif rsrc_atts['archive'] == 'zip': # Unzip .zip files + zip_file = os.path.join(destination_dir, os.path.basename(resource) + '.zip') + with open(zip_file, 'wb') as out_file: + shutil.copyfileobj(response, out_file) + # Unzip the files + print("Unzipping files to: {}".format(destination_dir)) + with ZipFile(zip_file, 'r') as unzipper: + # Extract all the files in the archive + unzipper.extractall(path=destination_dir) + print("Deleting zip file: {}".format(zip_file)) + os.remove(zip_file) # delete the zip file else: - rsrcs = set(c for sl in [x.values() for x in self.model_atts['consts']] for c in sl) - tar.extractall(path = destination_dir, members = [m for m in tar.getmembers() if m.name in rsrcs]) - tar.close() - else: - with open(path, 'wb') as f: - f.write(response.read()) + with open(path, 'wb') as f: + f.write(response.read()) return path - - def download_model(self): + def download_model(self, resource_dir=None): """Download all of the model's resources for later use.""" - resources = set(r for sl in [grp.values() for grp in self.model_atts['consts']] for r in sl) - resource_dir = os.path.join(config['data_dir'], self.model) + resources = set( + self.model_atts.constituent_resource(con) for con in + self.model_atts.available_constituents() + ) + if not resource_dir: + resource_dir = os.path.join(config['data_dir'], self.model) for r in resources: path = os.path.join(resource_dir, r) if not os.path.exists(path): self.download(r, resource_dir) - + return resource_dir def remove_model(self): """Remove all of the model's resources.""" @@ -162,19 +444,18 @@ def remove_model(self): shutil.rmtree(resource_dir, ignore_errors=True) - def get_datasets(self, constituents): """Returns a list of xarray datasets.""" available = self.available_constituents() if any(const not in available for const in constituents): raise ValueError('Constituent not recognized.') - # handle compatiable files together + # handle compatible files together self.datasets = [] - for const_group in self.model_atts['consts']: - rsrcs = set(const_group[const] for const in set(constituents) & set(const_group)) + for const_group in self.model_atts.constituent_groups(): + rsrcs = set(self.model_atts.constituent_resource(const) for const in set(constituents) & set(const_group)) paths = set() - if (config['pre_existing_data_dir']): + if config['pre_existing_data_dir']: missing = set() for r in rsrcs: path = os.path.join(config['pre_existing_data_dir'], self.model, r) @@ -194,4 +475,4 @@ def get_datasets(self, constituents): if paths: self.datasets.append(xr.open_mfdataset(paths, engine='netcdf4', concat_dim='nc')) - return self.datasets \ No newline at end of file + return self.datasets diff --git a/harmonica/tidal_constituents.py b/harmonica/tidal_constituents.py index f052e29..cf3c73e 100644 --- a/harmonica/tidal_constituents.py +++ b/harmonica/tidal_constituents.py @@ -1,141 +1,98 @@ +from .adcirc_database import AdcircDB +from .leprovost_database import LeProvostDB from .resource import ResourceManager -from bisect import bisect -import os -import numpy as np -import pandas as pd -import sys +from .tpxo_database import TpxoDB class Constituents: - """Harmonica tidal constituents.""" - - # Dictionary of NOAA constituent speed constants (deg/hr) - # Source: https://tidesandcurrents.noaa.gov - # The speed is the rate change in the phase of a constituent, and is equal to 360 degrees divided by the - # constituent period expressed in hours - NOAA_SPEEDS = { - 'OO1': 16.139101, - '2Q1': 12.854286, - '2MK3': 42.92714, - '2N2': 27.895355, - '2SM2': 31.015896, - 'K1': 15.041069, - 'K2': 30.082138, - 'J1': 15.5854435, - 'L2': 29.528479, - 'LAM2': 29.455626, - 'M1': 14.496694, - 'M2': 28.984104, - 'M3': 43.47616, - 'M4': 57.96821, - 'M6': 86.95232, - 'M8': 115.93642, - 'MF': 1.0980331, - 'MK3': 44.025173, - 'MM': 0.5443747, - 'MN4': 57.423832, - 'MS4': 58.984104, - 'MSF': 1.0158958, - 'MU2': 27.968208, - 'N2': 28.43973, - 'NU2': 28.512583, - 'O1': 13.943035, - 'P1': 14.958931, - 'Q1': 13.398661, - 'R2': 30.041067, - 'RHO': 13.471515, - 'S1': 15.0, - 'S2': 30.0, - 'S4': 60.0, - 'S6': 90.0, - 'SA': 0.0410686, - 'SSA': 0.0821373, - 'T2': 29.958933, - } - - def __init__(self): - # constituent information dataframe: - # amplitude (meters) - # phase (degrees) - # speed (degrees/hour, UTC/GMT) - self.data = pd.DataFrame(columns=['amplitude', 'phase', 'speed']) - - - def get_components(self, loc, model=ResourceManager.DEFAULT_RESOURCE, cons=[], positive_ph=False): - """Query the a tide model database and return amplitude, phase and speed for a location. - - Currently written to query tpxo7, tpxo8, and tpxo9 tide models. + """Class for extracting tidal constituent data + + Attributes: + _current_model (:obj:`tidal_database.TidalDB`): The tidal model currently being used for extraction + + """ + def __init__(self, model=ResourceManager.DEFAULT_RESOURCE): + """Constructor tidal constituent extractor interface. + + Use this class as opposed to the lower level implementations Args: - loc (tuple(float, float)): latitude [-90, 90] and longitude [-180 180] or [0 360] of the requested point. - model (str, optional): Model name, defaults to 'tpxo8'. - cons (list(str), optional): List of constituents requested, defaults to all constituents if None or empty. + model (:obj:`str`, optional): Name of the tidal model. See resource.py for supported models. + + """ + self._current_model = None + self.change_model(model) + + @property + def data(self): + """:obj:`Pandas.DataFrame` Access the underlying Pandas data frame of this constituent object.""" + if self._current_model: + return self._current_model.data + return [] + + @data.setter + def data(self, value): + if self._current_model: + self._current_model.data = value + + def change_model(self, new_model): + new_model = new_model.lower() + if self._current_model and self._current_model.model == new_model: + return # Already have the correct impl and resources for this model, nothing to do. + + if new_model in ResourceManager.TPXO_MODELS: # Switch to a TPXO model + self._current_model = TpxoDB(new_model) + elif new_model in ResourceManager.LEPROVOST_MODELS: + self._current_model = LeProvostDB(new_model) + elif new_model in ResourceManager.ADCIRC_MODELS: + self._current_model = AdcircDB() + else: + supported_models = ( + ", ".join(ResourceManager.TPXO_MODELS) + ", " + + ", ".join(ResourceManager.LEPROVOST_MODELS) + ", " + + ", ".join(ResourceManager.ADCIRC_MODELS) + ) + raise ValueError("Model not supported: \'{}\'. Must be one of: {}.".format( + new_model, supported_models.strip() + )) + + def get_components(self, locs, cons=None, positive_ph=False, model=None): + """Abstract method to get amplitude, phase, and speed of specified constituents at specified point locations. + + Args: + locs (:obj:`list` of :obj:`tuple` of :obj:`float`): latitude [-90, 90] and longitude [-180 180] or [0 360] + of the requested points. + cons (:obj:`list` of :obj:`str`, optional): List of the constituent names to get amplitude and phase for. If + not supplied, all valid constituents will be extracted. positive_ph (bool, optional): Indicate if the returned phase should be all positive [0 360] (True) or [-180 180] (False, the default). + model (:obj:`str`, optional): Name of the tidal model to use to query for the data. If not provided, current + model will be used. If a model other than the current is provided, current model is switched. Returns: - A dataframe of constituent information including amplitude (meters), phase (degrees) and - speed (degrees/hour, UTC/GMT) - + :obj:`list` of :obj:`pandas.DataFrame`: Implementations should return a list of dataframes of constituent + information including amplitude (meters), phase (degrees) and speed (degrees/hour, UTC/GMT). The list is + parallel with locs, where each element in the return list is the constituent data for the corresponding + element in locs. Empty list on error. Note that function uses fluent interface pattern. + """ + if model and model.lower() != self._current_model.model: + self.change_model(model.lower()) + return self._current_model.get_components(locs, cons, positive_ph) - # ensure lower case - model = model.lower() - if model == 'tpxo7_2': - model = 'tpxo7' - - lat, lon = loc - # check the phase of the longitude - if lon < 0: - lon = lon + 360. - - resources = ResourceManager(model=model) - # if no constituents were requested, return all available - if cons is None or not len(cons): - cons = resources.available_constituents() - # open the netcdf database(s) - for d in resources.get_datasets(cons): - # remove unnecessary data array dimensions if present (e.g. tpxo7.2) - if 'nx' in d.lat_z.dims: - d['lat_z'] = d.lat_z.sel(nx=0, drop=True) - if 'ny' in d.lon_z.dims: - d['lon_z'] = d.lon_z.sel(ny=0, drop=True) - # get the dataset constituent name array from data cube - nc_names = [x.tostring().decode('utf-8').strip().upper() for x in d.con.values] - for c in set(cons) & set(nc_names): - # get constituent and bounding indices within the data cube - idx = {'con': nc_names.index(c)} - idx['top'] = bisect(d.lat_z[idx['con']], lat) - idx['right'] = bisect(d.lon_z[idx['con']], lon) - idx['bottom'] = idx['top'] - 1 - idx['left'] = idx['right'] - 1 - # get distance from the bottom left to the requested point - dx = (lon - d.lon_z.values[idx['con'], idx['left']]) / \ - (d.lon_z.values[idx['con'], idx['right']] - d.lon_z.values[idx['con'], idx['left']]) - dy = (lat - d.lat_z.values[idx['con'], idx['bottom']]) / \ - (d.lat_z.values[idx['con'], idx['top']] - d.lat_z.values[idx['con'], idx['bottom']]) - # calculate weights for bilinear spline - weights = np.array([ - (1. - dx) * (1. - dy), # w00 :: bottom left - (1. - dx) * dy, # w01 :: bottom right - dx * (1. - dy), # w10 :: top left - dx * dy # w11 :: top right - ]).reshape((2,2)) - weights = weights / weights.sum() - # devise the slice to subset surrounding values - query = np.s_[idx['con'], idx['left']:idx['right']+1, idx['bottom']:idx['top']+1] - # calculate the weighted tide from real and imaginary components - h = np.complex((d.hRe.values[query] * weights).sum(), -(d.hIm.values[query] * weights).sum()) - # get the phase and amplitude - ph = np.angle(h, deg=True) - # place info into data table - self.data.loc[c] = [ - # amplitude - np.absolute(h) * resources.get_units_multiplier(), - # phase - ph + (360. if positive_ph and ph < 0 else 0), - # speed - self.NOAA_SPEEDS[c] - ] - - return self \ No newline at end of file + def get_nodal_factor(self, names, timestamp, timestamp_middle=None): + """Get the nodal factor for specified constituents at a specified time. + + Args: + names (:obj:`list` of :obj:`str`): Names of the constituents to get nodal factors for + timestamp (:obj:`datetime.datetime`): Stat date and time to extract constituent arguments at + timestamp_middle (:obj:`datetime.datetime`, optional): Date and time to consider as the middle of the + series. By default, just uses the start day with half the hours. + + Returns: + :obj:`pandas.DataFrame`: Constituent data frames. Each row contains frequency, earth tidal reduction factor, + amplitude, nodal factor, and equilibrium argument for one of the specified constituents. Rows labeled by + constituent name. + + """ + return self._current_model.get_nodal_factor(names, timestamp, timestamp_middle) diff --git a/harmonica/tidal_database.py b/harmonica/tidal_database.py new file mode 100644 index 0000000..94d59a1 --- /dev/null +++ b/harmonica/tidal_database.py @@ -0,0 +1,418 @@ +#! python3 + +from abc import ABCMeta, abstractmethod +from datetime import datetime +import math + +import numpy +import pandas as pd +from pytides.astro import astro + +from .resource import ResourceManager + +NCNST = 37 + +# Dictionary of NOAA constituent speed constants (deg/hr) +# Source: https://tidesandcurrents.noaa.gov +# The speed is the rate change in the phase of a constituent, and is equal to 360 degrees divided by the +# constituent period expressed in hours +# name: (speed, amplitude, frequency, ETRF) +NOAA_SPEEDS = { + 'OO1': (16.139101, 0.0, 0.00007824457305, 0.069), + '2Q1': (12.854286, 0.0, 0.000062319338107, 0.069), + '2MK3': (42.92714, 0.0, 0.000208116646659, 0.069), + '2N2': (27.895355, 0.0, 0.000135240496464, 0.069), + '2SM2': (31.015896, 0.0, 0.000150369306157, 0.069), + 'K1': (15.041069, 0.141565, 0.000072921158358, 0.736), + 'K2': (30.082138, 0.030704, 0.000145842317201, 0.693), + 'J1': (15.5854435, 0.0, 0.00007556036138, 0.069), + 'L2': (29.528479, 0.0, 0.000143158105531, 0.069), + 'LAM2': (29.455626, 0.0, 0.000142804901311, 0.069), + 'M1': (14.496694, 0.0, 0.000070281955336, 0.069), + 'M2': (28.984104, 0.242334, 0.000140518902509, 0.693), + 'M3': (43.47616, 0.0, 0.000210778353763, 0.069), + 'M4': (57.96821, 0.0, 0.000281037805017, 0.069), + 'M6': (86.95232, 0.0, 0.000421556708011, 0.069), + 'M8': (115.93642, 0.0, 0.000562075610519, 0.069), + 'MF': (1.0980331, 0.0, 0.000005323414692, 0.069), + 'MK3': (44.025173, 0.0, 0.000213440061351, 0.069), + 'MM': (0.5443747, 0.0, 0.000002639203022, 0.069), + 'MN4': (57.423832, 0.0, 0.000278398601995, 0.069), + 'MS4': (58.984104, 0.0, 0.000285963006842, 0.069), + 'MSF': (1.0158958, 0.0, 0.000004925201824, 0.069), + 'MU2': (27.968208, 0.0, 0.000135593700684, 0.069), + 'N2': (28.43973, 0.046398, 0.000137879699487, 0.693), + 'NU2': (28.512583, 0.0, 0.000138232903707, 0.069), + 'O1': (13.943035, 0.100514, 0.000067597744151, 0.695), + 'P1': (14.958931, 0.046834, 0.000072522945975, 0.706), + 'Q1': (13.398661, 0.019256, 0.000064958541129, 0.695), + 'R2': (30.041067, 0.0, 0.000145643201313, 0.069), + 'RHO': (13.471515, 0.0, 0.000065311745349, 0.069), + 'S1': (15.0, 0.0, 0.000072722052166, 0.069), + 'S2': (30.0, 0.112841, 0.000145444104333, 0.693), + 'S4': (60.0, 0.0, 0.000290888208666, 0.069), + 'S6': (90.0, 0.0, 0.000436332312999, 0.069), + 'SA': (0.0410686, 0.0, 0.000000199106191, 0.069), + 'SSA': (0.0821373, 0.0, 0.000000398212868, 0.069), + 'T2': (29.958933, 0.0, 0.000145245007353, 0.069), +} + + +def get_complex_components(amps, phases): + """Get the real and imaginary components of amplitudes and phases. + + Args: + amps (:obj:`list` of :obj:`float`): List of constituent amplitudes + phases (:obj:`list` of :obj:`float`): List of constituent phases in radians + + Returns: + :obj:`list` of :obj:`tuple` of :obj:`float`: The list of the complex components, + e.g. [[real1, imag1], [real2, imag2]] + + """ + components = [[0.0, 0.0] for _ in range(len(amps))] + for idx, (amp, phase) in enumerate(zip(amps, phases)): + components[idx][0] = amp * math.cos(phase) + components[idx][1] = amp * math.sin(phase) + return components + + +def convert_coords(coords, zero_to_360=False): + """Convert latitude coordinates to [-180, 180] or [0, 360]. + + Args: + coords (:obj:`list` of :obj:`tuple` of :obj:`float`): latitude [-90, 90] and longitude [-180 180] or [0 360] + of the requested point. + zero_to_360 (:obj:`bool`, optional) If True, coordinates will be converted to the [0, 360] range. If False, + coordinates will be converted to the [-180, 180] range. + + Returns: + :obj:`list` of :obj:`tuple` of :obj:`float`: The list of converted coordinates. None if a coordinate out of + range was encountered + + """ + # Make sure point locations are valid lat/lon + for idx, pt in enumerate(coords): + y_lat = pt[0] + x_lon = pt[1] + if x_lon < 0.0: + x_lon += 360.0 + if not zero_to_360 and x_lon > 180.0: + x_lon -= 360.0 + if y_lat > 90.0 or y_lat < -90.0 or ( # Invalid latitude + not zero_to_360 and (x_lon > 180.0 or x_lon < -180.0)) or ( # Invalid [-180, 180] + zero_to_360 and (x_lon > 360.0 or x_lon < 0.0)): # Invalid [0, 360] + # ERROR: Not in latitude/longitude + return None + else: + coords[idx] = (y_lat, x_lon) + return coords + + +class OrbitVariables(object): + """Container for variables used in astronomical equations. + + Attributes: + astro (:obj:`pytides.astro.astro`): Orbit variables obtained from pytides. + grterm (:obj:`dict` of :obj:`float`): Dictionary of equilibrium arguments where the key is constituent name + nodfac (:obj:`dict` of :obj:`float`): Dictionary of nodal factors where the key is constituent name + + """ + def __init__(self): + """Construct the container + + """ + self.astro = {} + self.grterm = {con: 0.0 for con in NOAA_SPEEDS} + self.nodfac = {con: 0.0 for con in NOAA_SPEEDS} + + +class TidalDB(object): + """The base class for extracting tidal data from a database. + + Attributes: + orbit (:obj:`OrbitVariables`): The orbit variables. + data (:obj:`list` of :obj:`pandas.DataFrame`): List of the constituent component DataFrames with one + per point location requested from get_components(). Intended return value of get_components(). + resources (:obj:`harmonica.resource.ResourceManager`): Manages fetching of tidal data + + """ + """(:obj:`list` of :obj:`float`): The starting days of the months (non-leap year).""" + day_t = [0.0, 31.0, 59.0, 90.0, 120.0, 151.0, 181.0, 212.0, 243.0, 273.0, 304.0, 334.0] + + def __init__(self, model): + """Base class constructor for the tidal extractors + + Args: + model (str): The name of the model. See resource.py for supported models. + + """ + self.orbit = OrbitVariables() + # constituent information dataframe: + # amplitude (meters) + # phase (degrees) + # speed (degrees/hour, UTC/GMT) + self.data = [] + self.model = model + self.resources = ResourceManager(self.model) + + __metaclass__ = ABCMeta + + @abstractmethod + def get_components(self, locs, cons, positive_ph): + """Abstract method to get amplitude, phase, and speed of specified constituents at specified point locations. + + Args: + locs (:obj:`list` of :obj:`tuple` of :obj:`float`): latitude [-90, 90] and longitude [-180 180] or [0 360] + of the requested points. + cons (:obj:`list` of :obj:`str`, optional): List of the constituent names to get amplitude and phase for. If + not supplied, all valid constituents will be extracted. + positive_ph (bool, optional): Indicate if the returned phase should be all positive [0 360] (True) or + [-180 180] (False, the default). + + Returns: + :obj:`list` of :obj:`pandas.DataFrame`: Implementations should return a list of dataframes of constituent + information including amplitude (meters), phase (degrees) and speed (degrees/hour, UTC/GMT). The list is + parallel with locs, where each element in the return list is the constituent data for the corresponding + element in locs. Empty list on error. Note that function uses fluent interface pattern. + + """ + pass + + def have_constituent(self, name): + """Determine if a constituent is valid for this tidal extractor. + + Args: + name (str): The name of the constituent. + + Returns: + bool: True if the constituent is valid, False otherwise + + """ + return name.upper() in self.resources.available_constituents() + + def get_nodal_factor(self, names, timestamp, timestamp_middle): + """Get the nodal factor for specified constituents at a specified time. + + Args: + names (:obj:`list` of :obj:`str`): Names of the constituents to get nodal factors for + timestamp (:obj:`datetime.datetime`): Start date and time to extract constituent arguments at + timestamp_middle (:obj:`datetime.datetime`): Date and time to consider as the middle of the series. + + Returns: + :obj:`pandas.DataFrame`: Constituent data frames. Each row contains frequency, earth tidal reduction factor, + amplitude, nodal factor, and equilibrium argument for one of the specified constituents. Rows labeled by + constituent name. + + """ + con_data = pd.DataFrame(columns=['amplitude', 'frequency', 'speed', 'earth_tide_reduction_factor', + 'equilibrium_argument', 'nodal_factor']) + if not timestamp_middle: + float_hours = timestamp.hour / 2.0 + hour = int(float_hours) + float_minutes = (float_hours - hour) * 60.0 + minute = int(float_minutes) + second = int((float_minutes - minute) * 60.0) + timestamp_middle = datetime(timestamp.year, timestamp.month, timestamp.day, hour, minute, second) + self.get_eq_args(timestamp, timestamp_middle) + for idx, name in enumerate(names): + name = name.upper() + if name not in NOAA_SPEEDS: + con_data.loc[name] = [numpy.nan, numpy.nan, numpy.nan, numpy.nan, numpy.nan, numpy.nan] + else: + equilibrium_arg = 0.0 + nodal_factor = self.orbit.nodfac[name] + if nodal_factor != 0.0: + equilibrium_arg = self.orbit.grterm[name] + con_data.loc[name] = [ + NOAA_SPEEDS[name][1], NOAA_SPEEDS[name][2], NOAA_SPEEDS[name][0], NOAA_SPEEDS[name][3], + equilibrium_arg, nodal_factor + ] + return con_data + + def get_eq_args(self, timestamp, timestamp_middle): + """Get equilibrium arguments at a starting time. + + Args: + timestamp (:obj:`datetime.datetime`): Date and time to extract constituent arguments at + timestamp_middle (:obj:`datetime.datetime`): Date and time to consider as the middle of the series + + """ + self.nfacs(timestamp_middle) + self.gterms(timestamp, timestamp_middle) + + @staticmethod + def angle(a_number): + """Converts the angle to be within 0-360. + + Args: + a_number (float): The angle to convert. + + Returns: + The angle converted to be within 0-360. + + """ + ret_val = a_number + while ret_val < 0.0: + ret_val += 360.0 + while ret_val > 360.0: + ret_val -= 360.0 + return ret_val + + def set_orbit(self, timestamp): + """Determination of primary and secondary orbital functions. + + Args: + timestamp (:obj:`datetime.datetime`): Date and time to extract constituent arguments at. + + """ + self.orbit.astro = astro(timestamp) + + def nfacs(self, timestamp): + """Calculates node factors for constituent tidal signal. + + Args: + timestamp (:obj:`datetime.datetime`): Date and time to extract constituent arguments at + + Returns: + The same values as found in table 14 of Schureman. + + """ + self.set_orbit(timestamp) + fi = math.radians(self.orbit.astro['i'].value) + nu = math.radians(self.orbit.astro['nu'].value) + sini = math.sin(fi) + sini2 = math.sin(fi / 2.0) + sin2i = math.sin(2.0 * fi) + cosi2 = math.cos(fi / 2.0) + # EQUATION 197, SCHUREMAN + # qainv = math.sqrt(2.310+1.435*math.cos(2.0*pc)) + # EQUATION 213, SCHUREMAN + # rainv = math.sqrt(1.0-12.0*math.pow(tani2, 2)*math.cos(2.0*pc)+36.0*math.pow(tani2, 4)) + # VARIABLE NAMES REFER TO EQUATION NUMBERS IN SCHUREMAN + eq73 = (2.0 / 3.0 - math.pow(sini, 2)) / 0.5021 + eq74 = math.pow(sini, 2.0) / 0.1578 + eq75 = sini * math.pow(cosi2, 2.0) / 0.37988 + eq76 = math.sin(2 * fi) / 0.7214 + eq77 = sini * math.pow(sini2, 2.0) / 0.0164 + eq78 = math.pow(cosi2, 4.0) / 0.91544 + eq149 = math.pow(cosi2, 6.0) / 0.8758 + # eq207 = eq75 * qainv + # eq215 = eq78 * rainv + eq227 = math.sqrt(0.8965 * math.pow(sin2i, 2.0) + 0.6001 * sin2i * math.cos(nu) + 0.1006) + eq235 = 0.001 + math.sqrt(19.0444 * math.pow(sini, 4.0) + 2.7702 * pow(sini, 2.0) * math.cos(2.0 * nu) + 0.0981) + # NODE FACTORS FOR 37 CONSTITUENTS: + self.orbit.nodfac['M2'] = eq78 + self.orbit.nodfac['S2'] = 1.0 + self.orbit.nodfac['N2'] = eq78 + self.orbit.nodfac['K1'] = eq227 + self.orbit.nodfac['M4'] = math.pow(self.orbit.nodfac['M2'], 2.0) + self.orbit.nodfac['O1'] = eq75 + self.orbit.nodfac['M6'] = math.pow(self.orbit.nodfac['M2'], 3.0) + self.orbit.nodfac['MK3'] = self.orbit.nodfac['M2'] * self.orbit.nodfac['K1'] + self.orbit.nodfac['S4'] = 1.0 + self.orbit.nodfac['MN4'] = math.pow(self.orbit.nodfac['M2'], 2.0) + self.orbit.nodfac['NU2'] = eq78 + self.orbit.nodfac['S6'] = 1.0 + self.orbit.nodfac['MU2'] = eq78 + self.orbit.nodfac['2N2'] = eq78 + self.orbit.nodfac['OO1'] = eq77 + self.orbit.nodfac['LAM2'] = eq78 + self.orbit.nodfac['S1'] = 1.0 + # EQUATION 207 NOT PRODUCING CORRECT ANSWER FOR M1 + # SET NODE FACTOR FOR M1 = 0 UNTIL CAN FURTHER RESEARCH + self.orbit.nodfac['M1'] = 0.0 + self.orbit.nodfac['J1'] = eq76 + self.orbit.nodfac['MM'] = eq73 + self.orbit.nodfac['SSA'] = 1.0 + self.orbit.nodfac['SA'] = 1.0 + self.orbit.nodfac['MSF'] = eq78 + self.orbit.nodfac['MF'] = eq74 + self.orbit.nodfac['RHO'] = eq75 + self.orbit.nodfac['Q1'] = eq75 + self.orbit.nodfac['T2'] = 1.0 + self.orbit.nodfac['R2'] = 1.0 + self.orbit.nodfac['2Q1'] = eq75 + self.orbit.nodfac['P1'] = 1.0 + self.orbit.nodfac['2SM2'] = eq78 + self.orbit.nodfac['M3'] = eq149 + # EQUATION 215 NOT PRODUCING CORRECT ANSWER FOR L2 + # SET NODE FACTOR FOR L2 = 0 UNTIL CAN FURTHER RESEARCH + self.orbit.nodfac['L2'] = 0.0 + self.orbit.nodfac['2MK3'] = math.pow(self.orbit.nodfac['M2'], 2.0) * self.orbit.nodfac['K1'] + self.orbit.nodfac['K2'] = eq235 + self.orbit.nodfac['M8'] = math.pow(self.orbit.nodfac['M2'], 4.0) + self.orbit.nodfac['MS4'] = eq78 + + def gterms(self, timestamp, timestamp_middle): + """Determines the Greenwich equilibrium terms. + + Args: + timestamp (:obj:`datetime.datetime`): Start date and time to extract constituent arguments at + timestamp_middle (:obj:`datetime.datetime`): Date and time to consider as the middle of the series + Returns: + The same values as found in table 15 of Schureman. + + """ + # OBTAINING ORBITAL VALUES AT BEGINNING OF SERIES FOR V0 + self.set_orbit(timestamp) + s = self.orbit.astro['s'].value + p = self.orbit.astro['p'].value + h = self.orbit.astro['h'].value + p1 = self.orbit.astro['pp'].value + t = self.angle(180.0 + timestamp.hour * (360.0 / 24.0)) + + # OBTAINING ORBITAL VALUES AT MIDDLE OF SERIES FOR U + self.set_orbit(timestamp_middle) + nu = self.orbit.astro['nu'].value + xi = self.orbit.astro['xi'].value + nup = self.orbit.astro['nup'].value + nup2 = self.orbit.astro['nupp'].value + + # SUMMING TERMS TO OBTAIN EQUILIBRIUM ARGUMENTS + self.orbit.grterm['M2'] = 2.0 * (t - s + h) + 2.0 * (xi - nu) + self.orbit.grterm['S2'] = 2.0 * t + self.orbit.grterm['N2'] = 2.0 * (t + h) - 3.0 * s + p + 2.0 * (xi - nu) + self.orbit.grterm['K1'] = t + h - 90.0 - nup + self.orbit.grterm['M4'] = 4.0 * (t - s + h) + 4.0 * (xi - nu) + self.orbit.grterm['O1'] = t - 2.0 * s + h + 90.0 + 2.0 * xi - nu + self.orbit.grterm['M6'] = 6.0 * (t - s + h) + 6.0 * (xi - nu) + self.orbit.grterm['MK3'] = 3.0 * (t + h) - 2.0 * s - 90.0 + 2.0 * (xi - nu) - nup + self.orbit.grterm['S4'] = 4.0 * t + self.orbit.grterm['MN4'] = 4.0 * (t + h) - 5.0 * s + p + 4.0 * (xi - nu) + self.orbit.grterm['NU2'] = 2.0 * t - 3.0 * s + 4.0 * h - p + 2.0 * (xi - nu) + self.orbit.grterm['S6'] = 6.0 * t + self.orbit.grterm['MU2'] = 2.0 * (t + 2.0 * (h - s)) + 2.0 * (xi - nu) + self.orbit.grterm['2N2'] = 2.0 * (t - 2.0 * s + h + p) + 2.0 * (xi - nu) + self.orbit.grterm['OO1'] = t + 2.0 * s + h - 90.0 - 2.0 * xi - nu + self.orbit.grterm['LAM2'] = 2.0 * t - s + p + 180.0 + 2.0 * (xi - nu) + self.orbit.grterm['S1'] = t + fi = math.radians(self.orbit.astro['i'].value) + pc = math.radians(self.orbit.astro['P'].value) + top = (5.0 * math.cos(fi) - 1.0) * math.sin(pc) + bottom = (7.0 * math.cos(fi) + 1.0) * math.cos(pc) + q = math.degrees(math.atan2(top, bottom)) + self.orbit.grterm['M1'] = t - s + h - 90.0 + xi - nu + q + self.orbit.grterm['J1'] = t + s + h - p - 90.0 - nu + self.orbit.grterm['MM'] = s - p + self.orbit.grterm['SSA'] = 2.0 * h + self.orbit.grterm['SA'] = h + self.orbit.grterm['MSF'] = 2.0 * (s - h) + self.orbit.grterm['MF'] = 2.0 * s - 2.0 * xi + self.orbit.grterm['RHO'] = t + 3.0 * (h - s) - p + 90.0 + 2.0 * xi - nu + self.orbit.grterm['Q1'] = t - 3.0 * s + h + p + 90.0 + 2.0 * xi - nu + self.orbit.grterm['T2'] = 2.0 * t - h + p1 + self.orbit.grterm['R2'] = 2.0 * t + h - p1 + 180.0 + self.orbit.grterm['2Q1'] = t - 4.0 * s + h + 2.0 * p + 90.0 + 2.0 * xi - nu + self.orbit.grterm['P1'] = t - h + 90.0 + self.orbit.grterm['2SM2'] = 2.0 * (t + s - h) + 2.0 * (nu - xi) + self.orbit.grterm['M3'] = 3.0 * (t - s + h) + 3.0 * (xi - nu) + r = math.sin(2.0 * pc) / ((1.0 / 6.0) * math.pow((1.0 / math.tan(0.5 * fi)), 2) - math.cos(2.0 * pc)) + r = math.degrees(math.atan(r)) + self.orbit.grterm['L2'] = 2.0 * (t + h) - s - p + 180.0 + 2.0 * (xi - nu) - r + self.orbit.grterm['2MK3'] = 3.0 * (t + h) - 4.0 * s + 90.0 + 4.0 * (xi - nu) + nup + self.orbit.grterm['K2'] = 2.0 * (t + h) - 2.0 * nup2 + self.orbit.grterm['M8'] = 8.0 * (t - s + h) + 8.0 * (xi - nu) + self.orbit.grterm['MS4'] = 2.0 * (2.0 * t - s + h) + 2.0 * (xi - nu) + for con, value in self.orbit.grterm.items(): + self.orbit.grterm[con] = self.angle(value) diff --git a/harmonica/tpxo_database.py b/harmonica/tpxo_database.py new file mode 100644 index 0000000..7cc8064 --- /dev/null +++ b/harmonica/tpxo_database.py @@ -0,0 +1,103 @@ +from bisect import bisect +import numpy as np +import pandas as pd + +from .resource import ResourceManager +from .tidal_database import NOAA_SPEEDS, TidalDB + + +DEFAULT_TPXO_RESOURCE = 'tpxo9' + + +class TpxoDB(TidalDB): + """Harmonica tidal constituents.""" + + def __init__(self, model=DEFAULT_TPXO_RESOURCE): + """Constructor for the TPXO tidal extractor. + + Args: + model (:obj:`str`, optional): The name of the TPXO model. See resource.py for supported models. + + """ + model = model.lower() # Be case-insensitive + if model not in ResourceManager.TPXO_MODELS: # Check for valid TPXO model + raise ValueError("\'{}\' is not a supported TPXO model. Must be one of: {}.".format( + model, ", ".join(ResourceManager.TPXO_MODELS).strip() + )) + super().__init__(model) + + def get_components(self, locs, cons=None, positive_ph=False): + """Get the amplitude, phase, and speed of specified constituents at specified point locations. + + Args: + locs (:obj:`list` of :obj:`tuple` of :obj:`float`): latitude [-90, 90] and longitude [-180 180] or [0 360] + of the requested points. + cons (:obj:`list` of :obj:`str`, optional): List of the constituent names to get amplitude and phase for. If + not supplied, all valid constituents will be extracted. + positive_ph (bool, optional): Indicate if the returned phase should be all positive [0 360] (True) or + [-180 180] (False, the default). + + Returns: + :obj:`list` of :obj:`pandas.DataFrame`: A list of dataframes of constituent information including + amplitude (meters), phase (degrees) and speed (degrees/hour, UTC/GMT). The list is parallel with locs, + where each element in the return list is the constituent data for the corresponding element in locs. + Empty list on error. Note that function uses fluent interface pattern. + + """ + self.data = [pd.DataFrame(columns=['amplitude', 'phase', 'speed']) for _ in range(len(locs))] + + # if no constituents were requested, return all available + if cons is None or not len(cons): + cons = self.resources.available_constituents() + # open the netcdf database(s) + for d in self.resources.get_datasets(cons): + # remove unnecessary data array dimensions if present (e.g. tpxo7.2) + if 'nx' in d.lat_z.dims: + d['lat_z'] = d.lat_z.sel(nx=0, drop=True) + if 'ny' in d.lon_z.dims: + d['lon_z'] = d.lon_z.sel(ny=0, drop=True) + # get the dataset constituent name array from data cube + nc_names = [x.tostring().decode('utf-8').strip().upper() for x in d.con.values] + for c in set(cons) & set(nc_names): + for i, loc in enumerate(locs): + lat, lon = loc + # check the phase of the longitude + if lon < 0: + lon = lon + 360. + + # get constituent and bounding indices within the data cube + idx = {'con': nc_names.index(c)} + idx['top'] = bisect(d.lat_z[idx['con']], lat) + idx['right'] = bisect(d.lon_z[idx['con']], lon) + idx['bottom'] = idx['top'] - 1 + idx['left'] = idx['right'] - 1 + # get distance from the bottom left to the requested point + dx = (lon - d.lon_z.values[idx['con'], idx['left']]) / \ + (d.lon_z.values[idx['con'], idx['right']] - d.lon_z.values[idx['con'], idx['left']]) + dy = (lat - d.lat_z.values[idx['con'], idx['bottom']]) / \ + (d.lat_z.values[idx['con'], idx['top']] - d.lat_z.values[idx['con'], idx['bottom']]) + # calculate weights for bilinear spline + weights = np.array([ + (1. - dx) * (1. - dy), # w00 :: bottom left + (1. - dx) * dy, # w01 :: bottom right + dx * (1. - dy), # w10 :: top left + dx * dy # w11 :: top right + ]).reshape((2, 2)) + weights = weights / weights.sum() + # devise the slice to subset surrounding values + query = np.s_[idx['con'], idx['left']:idx['right']+1, idx['bottom']:idx['top']+1] + # calculate the weighted tide from real and imaginary components + h = np.complex((d.hRe.values[query] * weights).sum(), -(d.hIm.values[query] * weights).sum()) + # get the phase and amplitude + ph = np.angle(h, deg=True) + # place info into data table + self.data[i].loc[c] = [ + # amplitude + np.absolute(h) * self.resources.get_units_multiplier(), + # phase + ph + (360. if positive_ph and ph < 0 else 0), + # speed + NOAA_SPEEDS[c][0] + ] + + return self diff --git a/tutorials/python_api/expected_tidal_test.out b/tutorials/python_api/expected_tidal_test.out new file mode 100644 index 0000000..d7f0add --- /dev/null +++ b/tutorials/python_api/expected_tidal_test.out @@ -0,0 +1,100 @@ +Nodal factor: + amplitude frequency speed earth_tide_reduction_factor equilibrium_argument nodal_factor +M2 0.242334 0.000141 28.984104 0.693 345.201515 1.087974 +S2 0.112841 0.000145 30.000000 0.693 90.000000 1.000000 +N2 0.046398 0.000138 28.439730 0.693 77.612890 1.087974 +K1 0.141565 0.000073 15.041069 0.736 105.776383 0.483807 + +LeProvost components: + amplitude phase speed +K1 0.080152 171.446949 15.041069 +M2 0.589858 353.748502 28.984104 +N2 0.136323 337.950345 28.439730 +S2 0.083580 20.950538 30.000000 + + amplitude phase speed +K1 0.1194 193.8 15.041069 +M2 1.1736 113.8 28.984104 +N2 0.1628 50.8 28.439730 +S2 0.1318 101.1 30.000000 + + amplitude phase speed +K1 NaN NaN NaN +M2 NaN NaN NaN +N2 NaN NaN NaN +S2 NaN NaN NaN + + amplitude phase speed +K1 0.428306 233.851748 15.041069 +M2 0.770437 224.199371 28.984104 +N2 0.159378 202.381338 28.439730 +S2 0.209673 249.043022 30.000000 + + amplitude phase speed +K1 0.441807 238.246609 15.041069 +M2 0.858488 232.029922 28.984104 +N2 0.175845 210.476586 28.439730 +S2 0.242000 258.787649 30.000000 + +ADCIRC components: + amplitude phase speed +K1 0.099715 173.295539 15.041069 +M2 0.554417 352.729485 28.984104 +N2 0.126147 335.014198 28.439730 +S2 0.106317 16.530658 30.000000 + + amplitude phase speed +K1 0.115789 195.361646 15.041069 +M2 1.190749 111.097782 28.984104 +N2 0.254679 75.426588 28.439730 +S2 0.170824 143.134712 30.000000 + + amplitude phase speed +K1 0.141171 189.620866 15.041069 +M2 4.255815 105.440818 28.984104 +N2 0.781585 74.002776 28.439730 +S2 0.631516 145.428306 30.000000 + + amplitude phase speed +K1 0.443556 234.182261 15.041069 +M2 0.842798 221.629047 28.984104 +N2 0.172478 195.837684 28.439730 +S2 0.226702 245.824817 30.000000 + + amplitude phase speed +K1 0.456021 238.806713 15.041069 +M2 0.944207 230.498719 28.984104 +N2 0.192711 205.061278 28.439730 +S2 0.264533 256.892259 30.000000 + +TPX0 components: + amplitude phase speed +K1 0.092135 176.901563 15.041069 +M2 0.560701 352.265551 28.984104 +N2 0.139185 346.750468 28.439730 +S2 0.106576 18.615143 30.000000 + + amplitude phase speed +K1 0.130943 197.327927 15.041069 +M2 1.150805 107.257205 28.984104 +N2 0.261782 76.929914 28.439730 +S2 0.185605 141.870518 30.000000 + + amplitude phase speed +K1 0.155800 192.680364 15.041069 +M2 4.128097 104.709481 28.984104 +N2 0.851786 79.022472 28.439730 +S2 0.648562 145.516010 30.000000 + + amplitude phase speed +K1 0.412604 232.880147 15.041069 +M2 0.809024 222.339913 28.984104 +N2 0.169892 196.376201 28.439730 +S2 0.219652 246.510256 30.000000 + + amplitude phase speed +K1 0.428817 239.531238 15.041069 +M2 0.910663 234.117743 28.984104 +N2 0.188528 206.717292 28.439730 +S2 0.253107 260.181916 30.000000 + diff --git a/tutorials/python_api/test.py b/tutorials/python_api/test.py new file mode 100644 index 0000000..4166474 --- /dev/null +++ b/tutorials/python_api/test.py @@ -0,0 +1,50 @@ +import datetime +import os + +from harmonica.tidal_constituents import Constituents + +if __name__ == "__main__": + # Need to be in (lat, lon), not (x, y) + all_points = [(39.74, -74.07), + (42.32, -70.0), + (45.44, -65.0), + (43.63, -124.55), + (46.18, -124.38)] + + cons = ['M2', 'S2', 'N2', 'K1'] + constituents = Constituents('leprovost') + + # Get astronomical nodal factor data (not dependent on the tidal model) + nodal_factors = constituents.get_nodal_factor(cons, datetime.datetime(2018, 8, 30, 15)) + + f = open(os.path.join(os.getcwd(), "tidal_test.out"), "w") + f.write("Nodal factor:\n") + f.write(nodal_factors.to_string() + "\n\n") + f.flush() + + # Get tidal harmonic components for a list of points using the ADCIRC, + # LeProvost, and TPXO databases. + f.write("LeProvost components:\n") + f.flush() + leprovost_comps = constituents.get_components(all_points, cons) + for pt in leprovost_comps.data: + f.write(pt.sort_index().to_string() + "\n\n") + + ad_atlantic_comps = constituents.get_components(all_points, cons, model='adcirc2015') + f.write("ADCIRC components:\n") + for pt in ad_atlantic_comps.data: + f.write(pt.sort_index().to_string() + "\n\n") + + f.write("TPX0 components:\n") + f.flush() + tpxo_comps = constituents.get_components(all_points, cons, True, 'tpxo8') + for pt in tpxo_comps.data: + f.write(pt.sort_index().to_string() + "\n\n") + + # f.write("FES2014 components:\n") + # f.flush() + # fes2014_comps = constituents.get_components(all_points, cons, model='fes2014') + # for pt in fes2014_comps.data: + # f.write(pt.sort_index().to_string() + "\n\n") + + f.close() \ No newline at end of file