From 902d203e81fc3f4b000fe5e124e928919ee7a9d0 Mon Sep 17 00:00:00 2001
From: Jonathan Socoy <72121903+socoyjonathan@users.noreply.github.com>
Date: Sun, 27 Feb 2022 22:01:44 -0600
Subject: [PATCH 01/14] Updated tide module and Jupyter notebook
---
tide_predictions/Tide_Module_Examples.ipynb | 497 ++++++++++++++
tide_predictions/tide.py | 696 +++++++++++++++++++-
2 files changed, 1179 insertions(+), 14 deletions(-)
create mode 100644 tide_predictions/Tide_Module_Examples.ipynb
diff --git a/tide_predictions/Tide_Module_Examples.ipynb b/tide_predictions/Tide_Module_Examples.ipynb
new file mode 100644
index 0000000..7583e88
--- /dev/null
+++ b/tide_predictions/Tide_Module_Examples.ipynb
@@ -0,0 +1,497 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "070f5263",
+ "metadata": {
+ "scrolled": false
+ },
+ "source": [
+ "## Tide Prediction Module Functions"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "progressive-revolution",
+ "metadata": {},
+ "source": [
+ " - retrieve_constituents - retrieves harmonic constituents from NOAA gauge station\n",
+ " - retrieve_water_levels() - retrieves observed water levels from NOAA's API\n",
+ " - retrieve_predicted_tide() - retrieves predicted tide from NOAA's API\n",
+ " - datetimes - prepares a collection of datetimes from beginning to end dates\n",
+ " \n",
+ " - predict_tide() - predicts tide for desired NOAA station given station ID, start date and end date for prediction \n",
+ " - datum_value - retrieves datum value for desired datum reference, hardcoded for MTL\n",
+ " - detide() - detides observed water levels\n",
+ " - surge() - predicts surge at NOAA gauge station provided station ID, start date, end date, and landfall date, best for Clawpack Simulation!"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "valued-rocket",
+ "metadata": {},
+ "source": [
+ "# Example of Tide Prediction For One Date Instance\n",
+ "\n",
+ "- In this example, method used to predict tide is adapated from Pytides\n",
+ "- This implementation will only work for known NOAA gauge stations\n",
+ "- Harmonic Constituents data is scraped from NOAA. "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "f1656e3a",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import matplotlib.pyplot as plt\n",
+ "import datetime\n",
+ "import tide "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "1cd0bf32",
+ "metadata": {},
+ "source": [
+ "### **** Station Information ****"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "33307057",
+ "metadata": {},
+ "source": [
+ "Locate NOAA station ID. NOAA gauge stations home: https://tidesandcurrents.noaa.gov/
\n",
+ "Fill in station ID and date for tide prediction!"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "3c76d8a9",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#Station Information\n",
+ "stationID = '8518750'\n",
+ "\n",
+ "#Date of prediction (YEAR, MTH, DAY, HR)\n",
+ "prediction_date = datetime.datetime(2017,10,5,0)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "444bf7f2",
+ "metadata": {},
+ "source": [
+ "### Tide Prediction"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "67132515",
+ "metadata": {},
+ "source": [
+ "Prediction of tide at specified location (station ID) and specified time (GMT) implemented below by calling predict_tide( ) method with the following arguments: stationID, beg_prediction_date, end_prediction_date.
\n",
+ "\n",
+ "To predict tide at an instant, set beg_prediction_date and end_prediction_date in predict_tide( ) method to the same date!"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "fa3eebea",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#NOAA Data Scraping Implementation \n",
+ "height = tide.predict_tide(stationID, prediction_date, prediction_date) # in meters\n",
+ "print(height[0], \"meters\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "e775a8ff",
+ "metadata": {},
+ "source": [
+ "*******************************************************************************************************************"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "d4be333b",
+ "metadata": {},
+ "source": [
+ "# Example of Tide Prediction In A Date Interval "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "f64a6167",
+ "metadata": {},
+ "source": [
+ "### Station Information "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "b2b2c5c8",
+ "metadata": {},
+ "source": [
+ "Fill in station ID, a beginning date and an end date for tide prediction below"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "6af4f3b1",
+ "metadata": {
+ "scrolled": false
+ },
+ "outputs": [],
+ "source": [
+ "#Station Information\n",
+ "stationID = '8518750'\n",
+ "\n",
+ "#Beginning and End Dates \n",
+ "beg_date = datetime.datetime(2017,10,1,0,0)\n",
+ "end_date = datetime.datetime(2017,10,5,0,0)\n",
+ "\n",
+ "#Predict tide with arguments set as: (stationID, beg_prediction_date, end_prediction_date)\n",
+ "predicted_tide = tide.predict_tide(stationID, beg_date, end_date)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "5fdc6fa2",
+ "metadata": {},
+ "source": [
+ "### Tide Predictions\n",
+ "Plot results in a time series plot"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "b07d1fa6",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#Method datetimes() makes a range of datetimes given arguments: (beg_prediction_date, end_prediction_date)\n",
+ "times = tide.datetimes(beg_date, end_date)\n",
+ "\n",
+ "plt.figure(figsize=(20,10))\n",
+ "plt.plot(times, predicted_tide, \"-\", label=\"Tide Prediction\")\n",
+ "plt.xlabel('Hours since ' + str(beg_date) + ' (GMT)')\n",
+ "plt.ylabel('Meters'), plt.margins(x=0), plt.legend(loc = 'best')\n",
+ "plt.title('My Prediction for Station {}'.format(stationID))\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "6e68cdb9",
+ "metadata": {},
+ "source": [
+ "*******************************************************************************************************************"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "1d65a5a1",
+ "metadata": {},
+ "source": [
+ "# Example Comparing NOAA vs Our Tide Prediction In A Date Interval "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "bb42cecd",
+ "metadata": {
+ "scrolled": false
+ },
+ "outputs": [],
+ "source": [
+ "#Station Information\n",
+ "stationID = '8518750'\n",
+ "\n",
+ "#Beginning and End Dates \n",
+ "beg_date = datetime.datetime(2017,10,1,0,0)\n",
+ "end_date = datetime.datetime(2017,10,2,0,0)\n",
+ "\n",
+ "#Predict Tide \n",
+ "predicted_tide = tide.predict_tide(stationID, beg_date, end_date)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "0bcba7b5",
+ "metadata": {},
+ "source": [
+ "- Calling function retrieve_water_levels( ) with arguments set as: (stationID, beg_prediction_date, end_prediction_date) retrieves and downloads NOAA's datetimes and observed water level data for the specified NOAA station in the date interval provided\n",
+ "- The function retrieve_predicted_tide( ) arguments set as: (stationID, beg_prediction_date, end_prediction_date) retrieves and downloads NOAA's predicted tide data for the specified NOAA station. \n",
+ "- Data is scraped in Metric units, GMT timezone, MSL datum and 6 min interval. "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "558239c0",
+ "metadata": {
+ "scrolled": false
+ },
+ "outputs": [],
+ "source": [
+ "times, NOAA_observed_water_lvl = tide.retrieve_water_levels(stationID, beg_date, end_date)\n",
+ "NOAA_predicted_tide = tide.retrieve_predicted_tide(stationID, beg_date, end_date)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "c91f7d0d",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#Plot Comparisons\n",
+ "plt.figure(figsize=(20,10))\n",
+ "plt.plot(times, predicted_tide, \"-\", label=\"Our Tide Prediction\")\n",
+ "plt.plot(times, NOAA_predicted_tide, \"-\", label=\"NOAA Tide Prediction\")\n",
+ "plt.plot(times, NOAA_observed_water_lvl, \"-\", label=\"NOAA Water Level Observation\")\n",
+ "plt.xlabel('Hours since ' + str(beg_date) + ' (GMT)')\n",
+ "plt.ylabel('Metres'), plt.margins(x=0), plt.legend(loc = 'best')\n",
+ "plt.title('Comparison of Our Prediction vs NOAA prediction for Station {}'.format(stationID))\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "edabf72c",
+ "metadata": {},
+ "source": [
+ "*******************************************************************************************************************"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "25f9fca1",
+ "metadata": {},
+ "source": [
+ "# Example Detiding and Capturing A Surge for a Gauge Station "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "ff60fcac",
+ "metadata": {},
+ "source": [
+ "- Calling predict_tide( ) method with arguments set as: (stationID, beg_prediction_date, end_prediction_date) will output predicted tide at the specified location and time interval\n",
+ "- Calling retrieve_water_levels( ) method with arguments set as: (stationID, beg_prediction_date, end_prediction_date) with output datetimes and observed water levels at the specified location and time interval\n",
+ "- Calling detide( ) method with arguments set as: (NOAA observed water level, predicted tide) will output detided water level. "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "2e12ce1c",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#Station Information\n",
+ "stationID = '8518750'\n",
+ "\n",
+ "#Beginning and End Dates \n",
+ "beg_date = datetime.datetime(2017,10,1,0,0)\n",
+ "end_date = datetime.datetime(2017,10,5,0,0)\n",
+ "\n",
+ "predicted_tide = tide.predict_tide(stationID, beg_date, end_date)\n",
+ "times, NOAA_observed_water_lvl = tide.retrieve_water_levels(stationID, beg_date, end_date)\n",
+ "\n",
+ "surge = tide.detide(NOAA_observed_water_lvl, predicted_tide)\n",
+ "\n",
+ "#Plot Comparisons\n",
+ "plt.figure(figsize=(20,10))\n",
+ "plt.plot(times, surge, \"-\", label=\"Our Surge Prediction\")\n",
+ "plt.xlabel('Hours since ' + str(beg_date) + ' (GMT)')\n",
+ "plt.ylabel('Metres'), plt.margins(x=0), plt.legend(loc = 'best')\n",
+ "plt.title('Detided Water Level for Station {}'.format(stationID))\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "5361dc29",
+ "metadata": {},
+ "source": [
+ "*******************************************************************************************************************"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "48fd6775",
+ "metadata": {},
+ "source": [
+ "# Example for Clawpack Storm Surge Implementation"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "44b5db31",
+ "metadata": {},
+ "source": [
+ "- Uncomment line below to utilize Clawpack's pytides module located under geoclaw\n",
+ "- Code below works best if placed in gauge_afteraxes( ) in setplot.py\n",
+ "- Calling surge( ) method with arguments set as: (stationID, beginning_date, end_date, landfall_date) will output observed surge from NOAA."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "15e31fba",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import clawpack.geoclaw.tide as tide\n",
+ "import datetime"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "1483072e",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#Station Information\n",
+ "stationID = '8518750'\n",
+ "\n",
+ "#Beginning, End, Landfall Dates \n",
+ "beg_date = datetime.datetime(2017,10,1,0,0)\n",
+ "end_date = datetime.datetime(2017,10,5,0,0)\n",
+ "landfall_date = datetime.datetime(2017,10,3,12,0)\n",
+ "\n",
+ "# Surge Prediction\n",
+ "times, surge = tide.surge(stationID, beg_date, end_date, landfall_date)\n",
+ "plt.plot(times, surge, color=\"b\", label=\"Our Prediction\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "d364a6c2",
+ "metadata": {},
+ "source": [
+ "*******************************************************************************************************************"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "b5b0d89f",
+ "metadata": {},
+ "source": [
+ "# Example Iterating Through A Library Of Stations And Date Intervals"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "b3cdcd65",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import clawpack.geoclaw.tide as tide\n",
+ "from datetime import datetime"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "d26bd732",
+ "metadata": {
+ "scrolled": false
+ },
+ "outputs": [],
+ "source": [
+ "\n",
+ "station_dict = {'8518750':(datetime(2017,10,1,0), datetime(2017,10,5,0), datetime(2017,10,3,0)),\n",
+ " '8540433':(datetime(2017,9,1,0), datetime(2017,9,10,12), datetime(2017,9,5,6)),\n",
+ " '8531680':(datetime(2020,10,1,0), datetime(2020,10,10,0), datetime(2020,10,6,0)),\n",
+ " '8722956':(datetime(2019,8,1,0), datetime(2019,8,12,0), datetime(2019,8,6,12)),\n",
+ " '8658120':(datetime(2018,11,1,0), datetime(2018,11,8,0), datetime(2018,11,3,18)),\n",
+ " '8516945':(datetime(2013,1,1,0), datetime(2013,1,6,0), datetime(2013,1,3,12))}\n",
+ "\n",
+ "for (key, value) in station_dict.items():\n",
+ " stationID = key\n",
+ " beg_date = value[0]\n",
+ " end_date = value[1]\n",
+ " landfall_date = value[2]\n",
+ " \n",
+ " #NOAA Data Scraping Implementation\n",
+ " predicted_tide = tide.predict_tide(stationID, beg_date, end_date) \n",
+ " times, NOAA_observed_water_lvl = tide.retrieve_water_levels(stationID, beg_date, end_date)\n",
+ " NOAA_predicted_tide = tide.retrieve_predicted_tide(stationID, beg_date, end_date)\n",
+ " \n",
+ " #Detide Water Level\n",
+ " surge = tide.detide(NOAA_observed_water_lvl, predicted_tide)\n",
+ " NOAA_surge = tide.detide(NOAA_observed_water_lvl, NOAA_predicted_tide)\n",
+ " \n",
+ " #Plot Comparisons\n",
+ " plt.figure(figsize=(20,10))\n",
+ " plt.plot(times, predicted_tide, \"-\", label=\"Our Tide Prediction\")\n",
+ " plt.plot(times, NOAA_predicted_tide, \"-\", label=\"NOAA Tide Prediction\")\n",
+ " plt.plot(times, NOAA_observed_water_lvl, \"-\", label=\"NOAA Water Level Observation\")\n",
+ " plt.xlabel('Hours since ' + str(beg_date) + ' (GMT)')\n",
+ " plt.ylabel('Metres'), plt.margins(x=0), plt.legend(loc = 'best')\n",
+ " plt.title('Comparison of Our Prediction vs NOAA prediction for Station {}'.format(stationID))\n",
+ " plt.show()\n",
+ " \n",
+ " #Detided Water Level Comparison\n",
+ " plt.figure(figsize=(20,10))\n",
+ " plt.plot(times, surge, \"-\", label=\"Our Detided Prediction\")\n",
+ " plt.plot(times, NOAA_surge, \"-\", label=\"NOAA's Detided Prediction\")\n",
+ " plt.xlabel('Hours since ' + str(beg_date) + ' (GMT)')\n",
+ " plt.ylabel('Metres'), plt.margins(x=0), plt.legend(loc = 'best')\n",
+ " plt.title('Detided Water Level Comparison of Our Prediction vs NOAA prediction for Station {}'.format(stationID))\n",
+ " plt.show()\n",
+ " \n",
+ " \n",
+ " #### Clawpack Implementation (in setplot.py) ####\n",
+ " times, surge = tide.surge(stationID, beg_date, end_date, landfall_date)\n",
+ " plt.plot(times, surge, color=\"b\", label=\"Our Surge Prediction\")\n",
+ " \n",
+ " "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "dd4972d2",
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.9.10"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/tide_predictions/tide.py b/tide_predictions/tide.py
index 7e4e110..02f4d7c 100644
--- a/tide_predictions/tide.py
+++ b/tide_predictions/tide.py
@@ -1,19 +1,482 @@
+#!/usr/bin/env python
+
+"""
+GeoClaw util Module `$CLAW/geoclaw/src/python/geoclaw/tide.py`
+
+Module provides provides tide prediction functions.
+
+:Functions:
+
+ - retrieve_constituents - retrieves harmonic constituents from NOAA gauge station
+ - retrieve_water_levels - retrieves observed water levels from NOAA's API
+ - retrieve_predicted_tide - retrieves predicted tide from NOAA's API
+ - datum_value - retrieves datum value for desired datum reference
+ - predict_tide - predicts tide with Pytides
+ - datetimes - prepares a collection of datetimes from beginning to end dates
+ - detide - detides observed water levels
+ - surge - predicts surge at NOAA gauge station
+"""
+
from collections.abc import Iterable
-from collections import OrderedDict
+from collections import OrderedDict, namedtuple
+from scipy.optimize import leastsq, fsolve
from itertools import takewhile, count
+from datetime import datetime, timedelta
+from functools import reduce
+
try:
- from itertools import izip, ifilter
+ from itertools import izip, ifilter
except ImportError: #Python3
- izip = zip
- ifilter = filter
-from datetime import datetime, timedelta
-import numpy as np
-from scipy.optimize import leastsq, fsolve
-from astro import astro
-import constituent
+ izip = zip
+ ifilter = filter
+
+try:
+ import requests
+ import json
+ import string
+ import lxml.html as lh
+ import pandas as pd
+ import operator as op
+ import numpy as np
+except ImportError as e:
+ print(e)
+
d2r, r2d = np.pi/180.0, 180.0/np.pi
+
+######################## Tide Prediction Functions ########################
+
+def retrieve_constituents(stationID):
+ #Forms URL
+ url = 'https://tidesandcurrents.noaa.gov/harcon.html?unit=0&timezone=0&id={}'.format(stationID)
+
+ #Requests URL
+ page = requests.get(url)
+ doc = lh.fromstring(page.content)
+ tr_elements = doc.xpath('//tr')
+ col = [((t.text_content(),[])) for t in tr_elements[0]]
+ for j in range(1, len(tr_elements)):
+ T, i = tr_elements[j], 0
+ for t in T.iterchildren():
+ col[i][1].append(t.text_content())
+ i+=1
+
+ #Appends data to csv file
+ component_dict = {title:column for (title,column) in col}
+ component_array = pd.DataFrame(component_dict)
+ #component_array.to_csv('{}_constituents.csv'.format(stationID), index=False)
+
+ return component_dict
+
+
+def retrieve_water_levels(*args):
+ #NOAA api
+ api = 'https://api.tidesandcurrents.noaa.gov/api/prod/datagetter?begin_date={}'\
+ '&end_date={}&'.format(args[1].strftime("%Y%m%d %H:%M"), args[2].strftime("%Y%m%d %H:%M"))
+
+ #NOAA observed data
+ obs_url = 'station={}&product=water_level&datum=MTL&units=metric&time_zone=gmt'\
+ '&application=ports_screen&format=json'.format(args[0])
+
+ obs_data_page = requests.get(api + obs_url)
+ obs_data = obs_data_page.json()['data']
+ obs_heights = [float(d['v']) for d in obs_data]
+
+ NOAA_times = [datetime.strptime(d['t'], '%Y-%m-%d %H:%M') for d in obs_data]
+
+ component_dict = {'Datetimes': NOAA_times, 'Observed Heights': obs_heights}
+ component_array = pd.DataFrame(component_dict)
+ #component_array.to_csv('{}_NOAA_water_levels.csv'.format(args[0]), index=False)
+
+ return NOAA_times, obs_heights
+
+
+def retrieve_predicted_tide(*args):
+ #NOAA api
+ api = 'https://api.tidesandcurrents.noaa.gov/api/prod/datagetter?begin_date={}'\
+ '&end_date={}&'.format(args[1].strftime("%Y%m%d %H:%M"), args[2].strftime("%Y%m%d %H:%M"))
+
+ #NOAA predicted data
+ pred_url = 'station={}&product=predictions&datum=MTL&units=metric&time_zone=gmt'\
+ '&application=ports_screen&format=json'.format(args[0])
+
+ pred_data_page = requests.get(api + pred_url)
+ pred_data = pred_data_page.json()['predictions']
+ pred_heights = [float(d['v']) for d in pred_data]
+
+ return pred_heights
+
+
+def datum_value(stationID, datum):
+ #Scrapes MTL/MSL Datum Value
+ datum_url = 'https://api.tidesandcurrents.noaa.gov/api/prod/datagetter?&station={}&product=datums'\
+ '&units=metric&time_zone=gmt&application=ports_screen&format=json'.format(stationID)
+ page_data = requests.get(datum_url)
+ data = page_data.json()['datums']
+ datum_value = [d['v'] for d in data if d['n'] == datum]
+
+ return float(datum_value[0])
+
+
+def predict_tide(*args):
+ #These are the NOAA constituents, in the order presented on NOAA's website.
+ constituents = [c for c in noaa if c != _Z0]
+ noaa_values = retrieve_constituents(args[0])
+ noaa_amplitudes = [float(amplitude) for amplitude in noaa_values['Amplitude']]
+ noaa_phases = [float(phases) for phases in noaa_values['Phase']]
+
+ #We can add a constant offset - set to MTL
+ MTL = datum_value(args[0], 'MTL')
+ MSL = datum_value(args[0], 'MSL')
+ offset = MSL - MTL
+ constituents.append(_Z0)
+ noaa_phases.append(0)
+ noaa_amplitudes.append(offset)
+
+ #Build the model
+ assert(len(constituents) == len(noaa_phases) == len(noaa_amplitudes))
+ model = np.zeros(len(constituents), dtype = Tide.dtype)
+ model['constituent'] = constituents
+ model['amplitude'] = noaa_amplitudes
+ model['phase'] = noaa_phases
+ tide = Tide(model = model, radians = False)
+
+ #Time Calculations
+ delta = (args[2]-args[1])/timedelta(hours=1) + .1
+ times = Tide._times(args[1], np.arange(0, delta, .1))
+
+ #Height Calculations
+ heights_arrays = [tide.at([times[i]]) for i in range(len(times))]
+ pytide_heights = [val for sublist in heights_arrays for val in sublist]
+
+ return pytide_heights
+
+
+def datetimes(*args):
+ #Time Calculations
+ delta = (args[1]-args[0])/timedelta(hours=1) + .1
+ times = Tide._times(args[1], np.arange(0, delta, .1))
+
+ return times
+
+
+def detide(*args):
+ # NOAA observed water level - predicted tide
+ return [(args[0][i] - args[1][i]) for i in range(len(args[0]))]
+
+
+def surge(*args):
+ #Surge Implementation
+ predicted_tide = predict_tide(args[0], args[1], args[2])
+ NOAA_times, NOAA_observed_water_level = retrieve_water_levels(args[0], args[1], args[2])
+ surge = detide(NOAA_observed_water_level, predicted_tide)
+ times = [(time-args[3])/timedelta(days=1) for time in NOAA_times]
+
+ return times, surge
+
+
+
+######################## Nodal Corrections ########################
+
+def f_unity(a):
+ return 1.0
+
+#Schureman equations 73, 65
+def f_Mm(a):
+ omega = d2r*a['omega'].value
+ i = d2r*a['i'].value
+ I = d2r*a['I'].value
+ mean = (2/3.0 - np.sin(omega)**2)*(1 - 3/2.0 * np.sin(i)**2)
+ return (2/3.0 - np.sin(I)**2) / mean
+
+#Schureman equations 74, 66
+def f_Mf(a):
+ omega = d2r*a['omega'].value
+ i = d2r*a['i'].value
+ I = d2r*a['I'].value
+ mean = np.sin(omega)**2 * np.cos(0.5*i)**4
+ return np.sin(I)**2 / mean
+
+#Schureman equations 75, 67
+def f_O1(a):
+ omega = d2r*a['omega'].value
+ i = d2r*a['i'].value
+ I = d2r*a['I'].value
+ mean = np.sin(omega) * np.cos(0.5*omega)**2 * np.cos(0.5*i)**4
+ return (np.sin(I) * np.cos(0.5*I)**2) / mean
+
+#Schureman equations 76, 68
+def f_J1(a):
+ omega = d2r*a['omega'].value
+ i = d2r*a['i'].value
+ I = d2r*a['I'].value
+ mean = np.sin(2*omega) * (1-3/2.0 * np.sin(i)**2)
+ return np.sin(2*I) / mean
+
+#Schureman equations 77, 69
+def f_OO1(a):
+ omega = d2r*a['omega'].value
+ i = d2r*a['i'].value
+ I = d2r*a['I'].value
+ mean = np.sin(omega) * np.sin(0.5*omega)**2 * np.cos(0.5*i)**4
+ return np.sin(I) * np.sin(0.5*I)**2 / mean
+
+#Schureman equations 78, 70
+def f_M2(a):
+ omega = d2r*a['omega'].value
+ i = d2r*a['i'].value
+ I = d2r*a['I'].value
+ mean = np.cos(0.5*omega)**4 * np.cos(0.5*i)**4
+ return np.cos(0.5*I)**4 / mean
+
+#Schureman equations 227, 226, 68
+#Should probably eventually include the derivations of the magic numbers (0.5023 etc).
+def f_K1(a):
+ omega = d2r*a['omega'].value
+ i = d2r*a['i'].value
+ I = d2r*a['I'].value
+ nu = d2r*a['nu'].value
+ sin2Icosnu_mean = np.sin(2*omega) * (1-3/2.0 * np.sin(i)**2)
+ mean = 0.5023*sin2Icosnu_mean + 0.1681
+ return (0.2523*np.sin(2*I)**2 + 0.1689*np.sin(2*I)*np.cos(nu)+0.0283)**(0.5) / mean
+
+#Schureman equations 215, 213, 204
+#It can be (and has been) confirmed that the exponent for R_a reads 1/2 via Schureman Table 7
+def f_L2(a):
+ P = d2r*a['P'].value
+ I = d2r*a['I'].value
+ R_a_inv = (1 - 12*np.tan(0.5*I)**2 * np.cos(2*P)+36*np.tan(0.5*I)**4)**(0.5)
+ return f_M2(a) * R_a_inv
+
+#Schureman equations 235, 234, 71
+#Again, magic numbers
+def f_K2(a):
+ omega = d2r*a['omega'].value
+ i = d2r*a['i'].value
+ I = d2r*a['I'].value
+ nu = d2r*a['nu'].value
+ sinsqIcos2nu_mean = np.sin(omega)**2 * (1-3/2.0 * np.sin(i)**2)
+ mean = 0.5023*sinsqIcos2nu_mean + 0.0365
+ return (0.2533*np.sin(I)**4 + 0.0367*np.sin(I)**2 *np.cos(2*nu)+0.0013)**(0.5) / mean
+
+#Schureman equations 206, 207, 195
+def f_M1(a):
+ P = d2r*a['P'].value
+ I = d2r*a['I'].value
+ Q_a_inv = (0.25 + 1.5*np.cos(I)*np.cos(2*P)*np.cos(0.5*I)**(-0.5) + 2.25*np.cos(I)**2 * np.cos(0.5*I)**(-4))**(0.5)
+ return f_O1(a) * Q_a_inv
+
+#See e.g. Schureman equation 149
+def f_Modd(a, n):
+ return f_M2(a) ** (n / 2.0)
+
+#Node factors u, see Table 2 of Schureman.
+
+def u_zero(a):
+ return 0.0
+
+def u_Mf(a):
+ return -2.0 * a['xi'].value
+
+def u_O1(a):
+ return 2.0 * a['xi'].value - a['nu'].value
+
+def u_J1(a):
+ return -a['nu'].value
+
+def u_OO1(a):
+ return -2.0 * a['xi'].value - a['nu'].value
+
+def u_M2(a):
+ return 2.0 * a['xi'].value - 2.0 * a['nu'].value
+
+def u_K1(a):
+ return -a['nup'].value
+
+#Schureman 214
+def u_L2(a):
+ I = d2r*a['I'].value
+ P = d2r*a['P'].value
+ R = r2d*np.arctan(np.sin(2*P)/(1/6.0 * np.tan(0.5*I) **(-2) -np.cos(2*P)))
+ return 2.0 * a['xi'].value - 2.0 * a['nu'].value - R
+
+def u_K2(a):
+ return -2.0 * a['nupp'].value
+
+#Schureman 202
+def u_M1(a):
+ I = d2r*a['I'].value
+ P = d2r*a['P'].value
+ Q = r2d*np.arctan((5*np.cos(I)-1)/(7*np.cos(I)+1)*np.tan(P))
+ return a['xi'].value - a['nu'].value + Q
+
+def u_Modd(a, n):
+ return n/2.0 * u_M2(a)
+
+
+
+######################## Constituents ########################
+
+
+class BaseConstituent(object):
+ xdo_int = {
+ 'A': 1, 'B': 2, 'C': 3, 'D': 4, 'E': 5, 'F': 6, 'G': 7, 'H': 8, 'I': 9,
+ 'J': 10, 'K': 11, 'L': 12, 'M': 13, 'N': 14, 'O': 15, 'P': 16, 'Q': 17,
+ 'R': -8, 'S': -7, 'T': -6, 'U': -5, 'V': -4, 'W': -3, 'X': -2, 'Y': -1,
+ 'Z': 0
+ }
+
+ int_xdo = {v:k for k, v in xdo_int.items()}
+
+ def __init__(self, name, xdo='', coefficients=[], u=u_zero, f=f_unity):
+ if xdo == '':
+ self.coefficients = np.array(coefficients)
+ else:
+ self.coefficients = np.array(self.xdo_to_coefficients(xdo))
+ self.name = name
+ self.u = u
+ self.f = f
+
+ def xdo_to_coefficients(self, xdo):
+ return [self.xdo_int[l.upper()] for l in xdo if l in string.ascii_letters]
+
+ def coefficients_to_xdo(self, coefficients):
+ return ''.join([self.int_xdo[c] for c in coefficients])
+
+ def V(self, astro):
+ return np.dot(self.coefficients, self.astro_values(astro))
+
+ def xdo(self):
+ return self.coefficients_to_xdo(self.coefficients)
+
+ def speed(self, a):
+ return np.dot(self.coefficients, self.astro_speeds(a))
+
+ def astro_xdo(self, a):
+ return [a['T+h-s'], a['s'], a['h'], a['p'], a['N'], a['pp'], a['90']]
+
+ def astro_speeds(self, a):
+ return np.array([each.speed for each in self.astro_xdo(a)])
+
+ def astro_values(self, a):
+ return np.array([each.value for each in self.astro_xdo(a)])
+
+ #Consider two out of phase constituents which travel at the same speed to
+ #be identical
+ def __eq__(self, c):
+ return np.all(self.coefficients[:-1] == c.coefficients[:-1])
+
+ def __hash__(self):
+ return hash(tuple(self.coefficients[:-1]))
+
+class CompoundConstituent(BaseConstituent):
+
+ def __init__(self, members = [], **kwargs):
+ self.members = members
+
+ if 'u' not in kwargs:
+ kwargs['u'] = self.u
+ if 'f' not in kwargs:
+ kwargs['f'] = self.f
+
+ super(CompoundConstituent,self).__init__(**kwargs)
+
+ self.coefficients = reduce(op.add,[c.coefficients * n for (c,n) in members])
+
+ def speed(self, a):
+ return reduce(op.add, [n * c.speed(a) for (c,n) in self.members])
+
+ def V(self, a):
+ return reduce(op.add, [n * c.V(a) for (c,n) in self.members])
+
+ def u(self, a):
+ return reduce(op.add, [n * c.u(a) for (c,n) in self.members])
+
+ def f(self, a):
+ return reduce(op.mul, [c.f(a) ** abs(n) for (c,n) in self.members])
+
+###### Base Constituents
+#Long Term
+_Z0 = BaseConstituent(name = 'Z0', xdo = 'Z ZZZ ZZZ', u = u_zero, f = f_unity)
+_Sa = BaseConstituent(name = 'Sa', xdo = 'Z ZAZ ZZZ', u = u_zero, f = f_unity)
+_Ssa = BaseConstituent(name = 'Ssa', xdo = 'Z ZBZ ZZZ', u = u_zero, f = f_unity)
+_Mm = BaseConstituent(name = 'Mm', xdo = 'Z AZY ZZZ', u = u_zero, f = f_Mm)
+_Mf = BaseConstituent(name = 'Mf', xdo = 'Z BZZ ZZZ', u = u_Mf, f = f_Mf)
+
+#Diurnals
+_Q1 = BaseConstituent(name = 'Q1', xdo = 'A XZA ZZA', u = u_O1, f = f_O1)
+_O1 = BaseConstituent(name = 'O1', xdo = 'A YZZ ZZA', u = u_O1, f = f_O1)
+_K1 = BaseConstituent(name = 'K1', xdo = 'A AZZ ZZY', u = u_K1, f = f_K1)
+_J1 = BaseConstituent(name = 'J1', xdo = 'A BZY ZZY', u = u_J1, f = f_J1)
+
+#M1 is a tricky business for reasons of convention, rather than theory. The
+#reasons for this are best summarised by Schureman paragraphs 126, 127 and in
+#the comments found in congen_input.txt of xtides, so I won't go over all this
+#again here.
+
+_M1 = BaseConstituent(name = 'M1', xdo = 'A ZZZ ZZA', u = u_M1, f = f_M1)
+_P1 = BaseConstituent(name = 'P1', xdo = 'A AXZ ZZA', u = u_zero, f = f_unity)
+_S1 = BaseConstituent(name = 'S1', xdo = 'A AYZ ZZZ', u = u_zero, f = f_unity)
+_OO1 = BaseConstituent(name = 'OO1', xdo = 'A CZZ ZZY', u = u_OO1, f = f_OO1)
+
+#Semi-Diurnals
+_2N2 = BaseConstituent(name = '2N2', xdo = 'B XZB ZZZ', u = u_M2, f = f_M2)
+_N2 = BaseConstituent(name = 'N2', xdo = 'B YZA ZZZ', u = u_M2, f = f_M2)
+_nu2 = BaseConstituent(name = 'nu2', xdo = 'B YBY ZZZ', u = u_M2, f = f_M2)
+_M2 = BaseConstituent(name = 'M2', xdo = 'B ZZZ ZZZ', u = u_M2, f = f_M2)
+_lambda2 = BaseConstituent(name = 'lambda2', xdo = 'B AXA ZZB', u = u_M2, f = f_M2)
+_L2 = BaseConstituent(name = 'L2', xdo = 'B AZY ZZB', u = u_L2, f = f_L2)
+_T2 = BaseConstituent(name = 'T2', xdo = 'B BWZ ZAZ', u = u_zero, f = f_unity)
+_S2 = BaseConstituent(name = 'S2', xdo = 'B BXZ ZZZ', u = u_zero, f = f_unity)
+_R2 = BaseConstituent(name = 'R2', xdo = 'B BYZ ZYB', u = u_zero, f = f_unity)
+_K2 = BaseConstituent(name = 'K2', xdo = 'B BZZ ZZZ', u = u_K2, f = f_K2)
+
+#Third-Diurnals
+_M3 = BaseConstituent(name = 'M3', xdo = 'C ZZZ ZZZ', u = lambda a: u_Modd(a,3), f = lambda a: f_Modd(a,3))
+
+###### Compound Constituents
+#Long Term
+_MSF = CompoundConstituent(name = 'MSF', members = [(_S2, 1), (_M2, -1)])
+
+#Diurnal
+_2Q1 = CompoundConstituent(name = '2Q1', members = [(_N2, 1), (_J1, -1)])
+_rho1 = CompoundConstituent(name = 'rho1', members = [(_nu2, 1), (_K1, -1)])
+
+#Semi-Diurnal
+
+_mu2 = CompoundConstituent(name = 'mu2', members = [(_M2, 2), (_S2, -1)]) #2MS2
+_2SM2 = CompoundConstituent(name = '2SM2', members = [(_S2, 2), (_M2, -1)])
+
+#Third-Diurnal
+_2MK3 = CompoundConstituent(name = '2MK3', members = [(_M2, 1), (_O1, 1)])
+_MK3 = CompoundConstituent(name = 'MK3', members = [(_M2, 1), (_K1, 1)])
+
+#Quarter-Diurnal
+_MN4 = CompoundConstituent(name = 'MN4', members = [(_M2, 1), (_N2, 1)])
+_M4 = CompoundConstituent(name = 'M4', members = [(_M2, 2)])
+_MS4 = CompoundConstituent(name = 'MS4', members = [(_M2, 1), (_S2, 1)])
+_S4 = CompoundConstituent(name = 'S4', members = [(_S2, 2)])
+
+#Sixth-Diurnal
+_M6 = CompoundConstituent(name = 'M6', members = [(_M2, 3)])
+_S6 = CompoundConstituent(name = 'S6', members = [(_S2, 3)])
+
+#Eighth-Diurnals
+_M8 = CompoundConstituent(name = 'M8', members = [(_M2, 4)])
+
+
+noaa = [
+ _M2, _S2, _N2, _K1, _M4, _O1, _M6, _MK3, _S4, _MN4, _nu2, _S6, _mu2,
+ _2N2, _OO1, _lambda2, _S1, _M1, _J1, _Mm, _Ssa, _Sa, _MSF, _Mf,
+ _rho1, _Q1, _T2, _R2, _2Q1, _P1, _2SM2, _M3, _L2, _2MK3, _K2,
+ _M8, _MS4
+]
+
+
+####################### Tide ########################
+
+
class Tide(object):
dtype = np.dtype([
('constituent', object),
@@ -139,7 +602,7 @@ def form_number(self):
"""
k1, o1, m2, s2 = (
np.extract(self.model['constituent'] == c, self.model['amplitude'])
- for c in [constituent._K1, constituent._O1, constituent._M2, constituent._S2]
+ for c in [_K1, _O1, _M2, _S2]
)
return (k1+o1)/(m2+s2)
@@ -274,7 +737,7 @@ def decompose(
t = None,
t0 = None,
interval = None,
- constituents = constituent.noaa,
+ constituents = noaa,
initial = None,
n_period = 2,
callback = None,
@@ -288,7 +751,7 @@ def decompose(
t -- ndarray of tidal observation times
t0 -- datetime representing the time at which heights[0] was recorded
interval -- hourly interval between readings
- constituents -- list of constituents to use in the fit (default: constituent.noaa)
+ constituents -- list of constituents to use in the fit (default: noaa)
initial -- optional Tide instance to use as first guess for least squares solver
n_period -- only include constituents which complete at least this many periods (default: 2)
callback -- optional function to be called at each iteration of the solver
@@ -318,7 +781,7 @@ def decompose(
#No need for least squares to find the mean water level constituent z0,
#work relative to mean
- constituents = [c for c in constituents if not c == constituent._Z0]
+ constituents = [c for c in constituents if not c == _Z0]
z0 = np.mean(heights)
heights = heights - z0
@@ -396,7 +859,7 @@ def D_residual(hp):
lsq = leastsq(residual, initial, Dfun=D_residual, col_deriv=True, ftol=1e-7)
model = np.zeros(1+n, dtype=cls.dtype)
- model[0] = (constituent._Z0, z0, 0)
+ model[0] = (_Z0, z0, 0)
model[1:]['constituent'] = constituents[:]
model[1:]['amplitude'] = lsq[0][:n]
model[1:]['phase'] = lsq[0][n:]
@@ -404,3 +867,208 @@ def D_residual(hp):
if full_output:
return cls(model = model, radians = True), lsq
return cls(model = model, radians = True)
+
+
+################# Astronomical Values #######################
+
+
+#Convert a sexagesimal angle into decimal degrees
+def s2d(degrees, arcmins = 0, arcsecs = 0, mas = 0, muas = 0):
+ return (
+ degrees
+ + (arcmins / 60.0)
+ + (arcsecs / (60.0*60.0))
+ + (mas / (60.0*60.0*1e3))
+ + (muas / (60.0*60.0*1e6))
+ )
+
+#Evaluate a polynomial at argument
+def polynomial(coefficients, argument):
+ return sum([c * (argument ** i) for i,c in enumerate(coefficients)])
+
+#Evaluate the first derivative of a polynomial at argument
+def d_polynomial(coefficients, argument):
+ return sum([c * i * (argument ** (i-1)) for i,c in enumerate(coefficients)])
+
+#Meeus formula 11.1
+def T(t):
+ return (JD(t) - 2451545.0)/36525
+
+#Meeus formula 7.1
+def JD(t):
+ Y, M = t.year, t.month
+ D = (
+ t.day
+ + t.hour / (24.0)
+ + t.minute / (24.0*60.0)
+ + t.second / (24.0*60.0*60.0)
+ + t.microsecond / (24.0 * 60.0 * 60.0 * 1e6)
+ )
+ if M <= 2:
+ Y = Y - 1
+ M = M + 12
+ A = np.floor(Y / 100.0)
+ B = 2 - A + np.floor(A / 4.0)
+ return np.floor(365.25*(Y+4716)) + np.floor(30.6001*(M+1)) + D + B - 1524.5
+
+#Meeus formula 21.3
+terrestrial_obliquity_coefficients = (
+ s2d(23,26,21.448),
+ -s2d(0,0,4680.93),
+ -s2d(0,0,1.55),
+ s2d(0,0,1999.25),
+ -s2d(0,0,51.38),
+ -s2d(0,0,249.67),
+ -s2d(0,0,39.05),
+ s2d(0,0,7.12),
+ s2d(0,0,27.87),
+ s2d(0,0,5.79),
+ s2d(0,0,2.45)
+)
+
+#Adjust these coefficients for parameter T rather than U
+terrestrial_obliquity_coefficients = [
+ c * (1e-2) ** i for i,c in enumerate(terrestrial_obliquity_coefficients)
+]
+
+#Not entirely sure about this interpretation, but this is the difference
+#between Meeus formulae 24.2 and 24.3 and seems to work
+solar_perigee_coefficients = (
+ 280.46645 - 357.52910,
+ 36000.76932 - 35999.05030,
+ 0.0003032 + 0.0001559,
+ 0.00000048
+)
+
+#Meeus formula 24.2
+solar_longitude_coefficients = (
+ 280.46645,
+ 36000.76983,
+ 0.0003032
+)
+
+#This value is taken from JPL Horizon and is essentially constant
+lunar_inclination_coefficients = (
+ 5.145,
+)
+
+#Meeus formula 45.1
+lunar_longitude_coefficients = (
+ 218.3164591,
+ 481267.88134236,
+ -0.0013268,
+ 1/538841.0
+ -1/65194000.0
+)
+
+#Meeus formula 45.7
+lunar_node_coefficients = (
+ 125.0445550,
+ -1934.1361849,
+ 0.0020762,
+ 1/467410.0,
+ -1/60616000.0
+)
+
+#Meeus, unnumbered formula directly preceded by 45.7
+lunar_perigee_coefficients = (
+ 83.3532430,
+ 4069.0137111,
+ -0.0103238,
+ -1/80053.0,
+ 1/18999000.0
+)
+
+#Now follow some useful auxiliary values, we won't need their speed.
+#See notes on Table 6 in Schureman for I, nu, xi, nu', 2nu''
+def _I(N, i, omega):
+ N, i, omega = d2r * N, d2r*i, d2r*omega
+ cosI = np.cos(i)*np.cos(omega)-np.sin(i)*np.sin(omega)*np.cos(N)
+ return r2d*np.arccos(cosI)
+
+def _xi(N, i, omega):
+ N, i, omega = d2r * N, d2r*i, d2r*omega
+ e1 = np.cos(0.5*(omega-i))/np.cos(0.5*(omega+i)) * np.tan(0.5*N)
+ e2 = np.sin(0.5*(omega-i))/np.sin(0.5*(omega+i)) * np.tan(0.5*N)
+ e1, e2 = np.arctan(e1), np.arctan(e2)
+ e1, e2 = e1 - 0.5*N, e2 - 0.5*N
+ return -(e1 + e2)*r2d
+
+def _nu(N, i, omega):
+ N, i, omega = d2r * N, d2r*i, d2r*omega
+ e1 = np.cos(0.5*(omega-i))/np.cos(0.5*(omega+i)) * np.tan(0.5*N)
+ e2 = np.sin(0.5*(omega-i))/np.sin(0.5*(omega+i)) * np.tan(0.5*N)
+ e1, e2 = np.arctan(e1), np.arctan(e2)
+ e1, e2 = e1 - 0.5*N, e2 - 0.5*N
+ return (e1 - e2)*r2d
+
+#Schureman equation 224
+#Can we be more precise than B "the solar coefficient" = 0.1681?
+def _nup(N, i, omega):
+ I = d2r * _I(N, i, omega)
+ nu = d2r * _nu(N, i, omega)
+ return r2d * np.arctan(np.sin(2*I)*np.sin(nu)/(np.sin(2*I)*np.cos(nu)+0.3347))
+
+#Schureman equation 232
+def _nupp(N, i, omega):
+ I = d2r * _I(N, i, omega)
+ nu = d2r * _nu(N, i, omega)
+ tan2nupp = (np.sin(I)**2*np.sin(2*nu))/(np.sin(I)**2*np.cos(2*nu)+0.0727)
+ return r2d * 0.5 * np.arctan(tan2nupp)
+
+AstronomicalParameter = namedtuple('AstronomicalParameter', ['value', 'speed'])
+
+def astro(t):
+ a = {}
+ #We can use polynomial fits from Meeus to obtain good approximations to
+ #some astronomical values (and therefore speeds).
+ polynomials = {
+ 's': lunar_longitude_coefficients,
+ 'h': solar_longitude_coefficients,
+ 'p': lunar_perigee_coefficients,
+ 'N': lunar_node_coefficients,
+ 'pp': solar_perigee_coefficients,
+ '90': (90.0,),
+ 'omega': terrestrial_obliquity_coefficients,
+ 'i': lunar_inclination_coefficients
+ }
+ #Polynomials are in T, that is Julian Centuries; we want our speeds to be
+ #in the more convenient unit of degrees per hour.
+ dT_dHour = 1 / (24 * 365.25 * 100)
+ for name, coefficients in polynomials.items():
+ a[name] = AstronomicalParameter(
+ np.mod(polynomial(coefficients, T(t)), 360.0),
+ d_polynomial(coefficients, T(t)) * dT_dHour
+ )
+
+ #Some other parameters defined by Schureman which are dependent on the
+ #parameters N, i, omega for use in node factor calculations. We don't need
+ #their speeds.
+ args = list(each.value for each in [a['N'], a['i'], a['omega']])
+ for name, function in {
+ 'I': _I,
+ 'xi': _xi,
+ 'nu': _nu,
+ 'nup': _nup,
+ 'nupp': _nupp
+ }.items():
+ a[name] = AstronomicalParameter(np.mod(function(*args), 360.0), None)
+
+ #We don't work directly with the T (hours) parameter, instead our spanning
+ #set for equilibrium arguments #is given by T+h-s, s, h, p, N, pp, 90.
+ #This is in line with convention.
+ hour = AstronomicalParameter((JD(t) - np.floor(JD(t))) * 360.0, 15.0)
+ a['T+h-s'] = AstronomicalParameter(
+ hour.value + a['h'].value - a['s'].value,
+ hour.speed + a['h'].speed - a['s'].speed
+ )
+ #It is convenient to calculate Schureman's P here since several node
+ #factors need it, although it could be argued that these
+ #(along with I, xi, nu etc) belong somewhere else.
+ a['P'] = AstronomicalParameter(
+ np.mod(a['p'].value -a['xi'].value,360.0),
+ None
+ )
+ return a
+
+
From 4489e7238f47f57dd626acc2d0ce50509e110dbd Mon Sep 17 00:00:00 2001
From: Jonathan Socoy <72121903+socoyjonathan@users.noreply.github.com>
Date: Sun, 27 Feb 2022 22:02:14 -0600
Subject: [PATCH 02/14] Delete Boundary_Conditions.ipynb
---
tide_predictions/Boundary_Conditions.ipynb | 468 ---------------------
1 file changed, 468 deletions(-)
delete mode 100644 tide_predictions/Boundary_Conditions.ipynb
diff --git a/tide_predictions/Boundary_Conditions.ipynb b/tide_predictions/Boundary_Conditions.ipynb
deleted file mode 100644
index b34cef6..0000000
--- a/tide_predictions/Boundary_Conditions.ipynb
+++ /dev/null
@@ -1,468 +0,0 @@
-{
- "cells": [
- {
- "cell_type": "markdown",
- "id": "070f5263",
- "metadata": {
- "scrolled": false
- },
- "source": [
- "# Example of Tide Prediction For One Date Instance\n",
- "\n",
- "- In this example, method used to predict tide is adapated from Pytides\n",
- "- This implementation will only work for known NOAA gauge stations\n",
- "- Harmonic Constituents data is scraped from NOAA. "
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "f1656e3a",
- "metadata": {},
- "outputs": [],
- "source": [
- "import matplotlib.pyplot as plt\n",
- "import datetime\n",
- "import noaa_scraper as noaa "
- ]
- },
- {
- "cell_type": "markdown",
- "id": "1cd0bf32",
- "metadata": {},
- "source": [
- "### **** Station Information ****"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "33307057",
- "metadata": {},
- "source": [
- "Locate NOAA station ID. NOAA gauge stations home: https://tidesandcurrents.noaa.gov/
\n",
- "Fill in station ID and date for tide prediction!"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "3c76d8a9",
- "metadata": {},
- "outputs": [],
- "source": [
- "#Station Information\n",
- "stationID = '8518750'\n",
- "\n",
- "#Date of prediction (YEAR, MTH, DAY, HR)\n",
- "prediction_date = datetime.datetime(2017,10,5,0)"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "444bf7f2",
- "metadata": {},
- "source": [
- "### Tide Prediction"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "67132515",
- "metadata": {},
- "source": [
- "Prediction of tide at specified location (station ID) and specified time (GMT) implemented below by calling predict_tide( ) method with the following arguments: stationID, beg_prediction_date, end_prediction_date.
\n",
- "\n",
- "To predict tide at an instant, set beg_prediction_date and end_prediction_date in predict_tide( ) method to the same date!"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "fa3eebea",
- "metadata": {},
- "outputs": [],
- "source": [
- "#NOAA Data Scraping Implementation \n",
- "height = noaa.predict_tide(stationID, prediction_date, prediction_date)\n",
- "print(height[0])"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "e775a8ff",
- "metadata": {},
- "source": [
- "*******************************************************************************************************************"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "d4be333b",
- "metadata": {},
- "source": [
- "# Example of Tide Prediction In A Date Interval "
- ]
- },
- {
- "cell_type": "markdown",
- "id": "f64a6167",
- "metadata": {},
- "source": [
- "### Station Information "
- ]
- },
- {
- "cell_type": "markdown",
- "id": "b2b2c5c8",
- "metadata": {},
- "source": [
- "Fill in station ID, a beginning date and an end date for tide prediction below"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "6af4f3b1",
- "metadata": {},
- "outputs": [],
- "source": [
- "#Station Information\n",
- "stationID = '8518750'\n",
- "\n",
- "#Beginning and End Dates \n",
- "beg_date = datetime.datetime(2017,10,1,0,0)\n",
- "end_date = datetime.datetime(2017,10,5,0,0)\n",
- "\n",
- "#Predict tide with arguments set as: (stationID, beg_prediction_date, end_prediction_date)\n",
- "predicted_tide = noaa.predict_tide(stationID, beg_date, end_date)"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "5fdc6fa2",
- "metadata": {},
- "source": [
- "### Tide Predictions\n",
- "Plot results in a time series plot"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "b07d1fa6",
- "metadata": {},
- "outputs": [],
- "source": [
- "#Method datetimes() makes a range of datetimes given arguments: (beg_prediction_date, end_prediction_date)\n",
- "times = noaa.datetimes(beg_date, end_date)\n",
- "\n",
- "plt.figure(figsize=(20,10))\n",
- "plt.plot(times, predicted_tide, \"-\", label=\"My Prediction\")\n",
- "plt.xlabel('Hours since ' + str(beg_date) + ' (GMT)')\n",
- "plt.ylabel('Metres'), plt.margins(x=0), plt.legend(loc = 'best')\n",
- "plt.title('My Prediction for Station {}'.format(stationID))\n",
- "plt.show()"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "6e68cdb9",
- "metadata": {},
- "source": [
- "*******************************************************************************************************************"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "1d65a5a1",
- "metadata": {},
- "source": [
- "# Example Comparing NOAA vs Our Tide Prediction In A Date Interval "
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "bb42cecd",
- "metadata": {
- "scrolled": false
- },
- "outputs": [],
- "source": [
- "#Station Information\n",
- "stationID = '8518750'\n",
- "\n",
- "#Beginning and End Dates \n",
- "beg_date = datetime.datetime(2017,10,1,0,0)\n",
- "end_date = datetime.datetime(2017,10,2,0,0)\n",
- "\n",
- "#Predict Tide \n",
- "predicted_tide = noaa.predict_tide(stationID, beg_date, end_date)"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "0bcba7b5",
- "metadata": {},
- "source": [
- "- Calling function retrieve_water_levels( ) with arguments set as: (stationID, beg_prediction_date, end_prediction_date) retrieves and downloads NOAA's datetimes and observed water level data for the specified NOAA station in the date interval provided\n",
- "- The function retrieve_predicted_tide( ) arguments set as: (stationID, beg_prediction_date, end_prediction_date) retrieves and downloads NOAA's predicted tide data for the specified NOAA station. \n",
- "- Data is scraped in Metric units, GMT timezone, MSL datum and 6 min interval. "
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "558239c0",
- "metadata": {
- "scrolled": false
- },
- "outputs": [],
- "source": [
- "times, NOAA_observed_water_lvl = noaa.retrieve_water_levels(stationID, beg_date, end_date)\n",
- "NOAA_predicted_tide = noaa.retrieve_predicted_tide(stationID, beg_date, end_date)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "c91f7d0d",
- "metadata": {},
- "outputs": [],
- "source": [
- "#Plot Comparisons\n",
- "plt.figure(figsize=(20,10))\n",
- "plt.plot(times, predicted_tide, \"-\", label=\"My Prediction\")\n",
- "plt.plot(times, NOAA_predicted_tide, \"-\", label=\"NOAA Prediction\")\n",
- "plt.plot(times, NOAA_observed_water_lvl, \"-\", label=\"NOAA Observation\")\n",
- "plt.xlabel('Hours since ' + str(beg_date) + ' (GMT)')\n",
- "plt.ylabel('Metres'), plt.margins(x=0), plt.legend(loc = 'best')\n",
- "plt.title('Comparison of Our Prediction vs NOAA prediction for Station {}'.format(stationID))\n",
- "plt.show()"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "edabf72c",
- "metadata": {},
- "source": [
- "*******************************************************************************************************************"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "25f9fca1",
- "metadata": {},
- "source": [
- "# Example Detiding and Capturing A Surge for a Gauge Station "
- ]
- },
- {
- "cell_type": "markdown",
- "id": "ff60fcac",
- "metadata": {},
- "source": [
- "- Calling predict_tide( ) method with arguments set as: (stationID, beg_prediction_date, end_prediction_date) will output predicted tide at the specified location and time interval\n",
- "- Calling retrieve_water_levels( ) method with arguments set as: (stationID, beg_prediction_date, end_prediction_date) with output datetimes and observed water levels at the specified location and time interval\n",
- "- Calling detide( ) method with arguments set as: (NOAA observed water level, predicted tide) will output detided water level. "
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "2e12ce1c",
- "metadata": {},
- "outputs": [],
- "source": [
- "#Station Information\n",
- "stationID = '8518750'\n",
- "\n",
- "#Beginning and End Dates \n",
- "beg_date = datetime.datetime(2017,10,1,0,0)\n",
- "end_date = datetime.datetime(2017,10,5,0,0)\n",
- "\n",
- "predicted_tide = noaa.predict_tide(stationID, beg_date, end_date)\n",
- "times, NOAA_observed_water_lvl = noaa.retrieve_water_levels(stationID, beg_date, end_date)\n",
- "\n",
- "surge = noaa.detide(NOAA_observed_water_lvl, predicted_tide)\n",
- "\n",
- "#Plot Comparisons\n",
- "plt.figure(figsize=(20,10))\n",
- "plt.plot(times, surge, \"-\", label=\"My Prediction\")\n",
- "plt.xlabel('Hours since ' + str(beg_date) + ' (GMT)')\n",
- "plt.ylabel('Metres'), plt.margins(x=0), plt.legend(loc = 'best')\n",
- "plt.title('Detided Water Level for Station {}'.format(stationID))\n",
- "plt.show()"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "5361dc29",
- "metadata": {},
- "source": [
- "*******************************************************************************************************************"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "48fd6775",
- "metadata": {},
- "source": [
- "# Example for Clawpack Storm Surge Implementation"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "44b5db31",
- "metadata": {},
- "source": [
- "- Uncomment line below to utilize Clawpack's pytides module located under geoclaw\n",
- "- Code below works best if placed in gauge_afteraxes( ) in setplot.py\n",
- "- Calling surge( ) method with arguments set as: (stationID, beginning_date, end_date, landfall_date) will output observed surge from NOAA."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "15e31fba",
- "metadata": {},
- "outputs": [],
- "source": [
- "#import clawpack.geoclaw.pytides.noaa_scraper as noaa"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "1483072e",
- "metadata": {},
- "outputs": [],
- "source": [
- "#Station Information\n",
- "stationID = '8518750'\n",
- "\n",
- "#Beginning and End Dates \n",
- "beg_date = datetime.datetime(2017,10,1,0,0)\n",
- "end_date = datetime.datetime(2017,10,5,0,0)\n",
- "landfall_date = datetime.datetime(2017,10,3,12,0)\n",
- "\n",
- "times, surge = noaa.surge(stationID, beg_date, end_date, landfall_date)\n",
- "plt.plot(times, surge, color=\"b\", label=\"My Prediction\")"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "d364a6c2",
- "metadata": {},
- "source": [
- "*******************************************************************************************************************"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "b5b0d89f",
- "metadata": {},
- "source": [
- "# Example Iterating Through A Library Of Stations And Date Intervals"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "b3cdcd65",
- "metadata": {},
- "outputs": [],
- "source": [
- "#import clawpack.geoclaw.pytides.noaa_scraper as noaa\n",
- "from datetime import datetime"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "d26bd732",
- "metadata": {
- "scrolled": false
- },
- "outputs": [],
- "source": [
- "\n",
- "station_dict = {'8518750':(datetime(2017,10,1,0), datetime(2017,10,5,0), datetime(2017,10,3,0)),\n",
- " '8540433':(datetime(2017,9,1,0), datetime(2017,9,10,12), datetime(2017,9,5,6)),\n",
- " '8531680':(datetime(2020,10,1,0), datetime(2020,10,10,0), datetime(2020,10,6,0)),\n",
- " '8722956':(datetime(2019,8,1,0), datetime(2019,8,12,0), datetime(2019,8,6,12)),\n",
- " '8658120':(datetime(2018,11,1,0), datetime(2018,11,8,0), datetime(2018,11,3,18)),\n",
- " '8516945':(datetime(2013,1,1,0), datetime(2013,1,6,0), datetime(2013,1,3,12))}\n",
- "\n",
- "for (key, value) in station_dict.items():\n",
- " stationID = key\n",
- " beg_date = value[0]\n",
- " end_date = value[1]\n",
- " landfall_date = value[2]\n",
- " \n",
- " #NOAA Data Scraping Implementation\n",
- " predicted_tide = noaa.predict_tide(stationID, beg_date, end_date) \n",
- " times, NOAA_observed_water_lvl = noaa.retrieve_water_levels(stationID, beg_date, end_date)\n",
- " NOAA_predicted_tide = noaa.retrieve_predicted_tide(stationID, beg_date, end_date)\n",
- " \n",
- " #Detide Water Level\n",
- " surge = noaa.detide(NOAA_observed_water_lvl, predicted_tide)\n",
- " NOAA_surge = noaa.detide(NOAA_observed_water_lvl, NOAA_predicted_tide)\n",
- " \n",
- " #Plot Comparisons\n",
- " plt.figure(figsize=(20,10))\n",
- " plt.plot(times, predicted_tide, \"-\", label=\"My Prediction\")\n",
- " plt.plot(times, NOAA_predicted_tide, \"-\", label=\"NOAA Prediction\")\n",
- " plt.plot(times, NOAA_observed_water_lvl, \"-\", label=\"NOAA Observation\")\n",
- " plt.xlabel('Hours since ' + str(beg_date) + ' (GMT)')\n",
- " plt.ylabel('Metres'), plt.margins(x=0), plt.legend(loc = 'best')\n",
- " plt.title('Comparison of Our Prediction vs NOAA prediction for Station {}'.format(stationID))\n",
- " plt.show()\n",
- " \n",
- " #Detided Water Level Comparison\n",
- " plt.figure(figsize=(20,10))\n",
- " plt.plot(times, surge, \"-\", label=\"Our Detided Prediction\")\n",
- " plt.plot(times, NOAA_surge, \"-\", label=\"NOAA's Detided Prediction\")\n",
- " plt.xlabel('Hours since ' + str(beg_date) + ' (GMT)')\n",
- " plt.ylabel('Metres'), plt.margins(x=0), plt.legend(loc = 'best')\n",
- " plt.title('Detided Water Level Comparison of Our Prediction vs NOAA prediction for Station {}'.format(stationID))\n",
- " plt.show()\n",
- " \n",
- " #Clawpack Implementation\n",
- " times, surge = noaa.surge(stationID, beg_date, end_date, landfall_date)\n",
- " plt.plot(times, surge, color=\"b\", label=\"My Prediction\")\n",
- " \n",
- " "
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "dd4972d2",
- "metadata": {},
- "outputs": [],
- "source": []
- }
- ],
- "metadata": {
- "kernelspec": {
- "display_name": "Python 3",
- "language": "python",
- "name": "python3"
- },
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.9.6"
- }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
From fe41ac100088151bb538a0501116c660a30f35f4 Mon Sep 17 00:00:00 2001
From: Jonathan Socoy <72121903+socoyjonathan@users.noreply.github.com>
Date: Sun, 27 Feb 2022 22:02:22 -0600
Subject: [PATCH 03/14] Delete astro.py
---
tide_predictions/astro.py | 209 --------------------------------------
1 file changed, 209 deletions(-)
delete mode 100644 tide_predictions/astro.py
diff --git a/tide_predictions/astro.py b/tide_predictions/astro.py
deleted file mode 100644
index a15764d..0000000
--- a/tide_predictions/astro.py
+++ /dev/null
@@ -1,209 +0,0 @@
-from collections import namedtuple
-import numpy as np
-d2r, r2d = np.pi/180.0, 180.0/np.pi
-
-# Most of this is based around Meeus's Astronomical Algorithms, since it
-# presents reasonably good approximations of all the quantities we require in a
-# clear fashion. Reluctant to go all out and use VSOP87 unless it can be shown
-# to make a significant difference to the resulting accuracy of harmonic
-# analysis.
-
-#Convert a sexagesimal angle into decimal degrees
-def s2d(degrees, arcmins = 0, arcsecs = 0, mas = 0, muas = 0):
- return (
- degrees
- + (arcmins / 60.0)
- + (arcsecs / (60.0*60.0))
- + (mas / (60.0*60.0*1e3))
- + (muas / (60.0*60.0*1e6))
- )
-
-#Evaluate a polynomial at argument
-def polynomial(coefficients, argument):
- return sum([c * (argument ** i) for i,c in enumerate(coefficients)])
-
-#Evaluate the first derivative of a polynomial at argument
-def d_polynomial(coefficients, argument):
- return sum([c * i * (argument ** (i-1)) for i,c in enumerate(coefficients)])
-
-#Meeus formula 11.1
-def T(t):
- return (JD(t) - 2451545.0)/36525
-
-#Meeus formula 7.1
-def JD(t):
- Y, M = t.year, t.month
- D = (
- t.day
- + t.hour / (24.0)
- + t.minute / (24.0*60.0)
- + t.second / (24.0*60.0*60.0)
- + t.microsecond / (24.0 * 60.0 * 60.0 * 1e6)
- )
- if M <= 2:
- Y = Y - 1
- M = M + 12
- A = np.floor(Y / 100.0)
- B = 2 - A + np.floor(A / 4.0)
- return np.floor(365.25*(Y+4716)) + np.floor(30.6001*(M+1)) + D + B - 1524.5
-
-#Meeus formula 21.3
-terrestrial_obliquity_coefficients = (
- s2d(23,26,21.448),
- -s2d(0,0,4680.93),
- -s2d(0,0,1.55),
- s2d(0,0,1999.25),
- -s2d(0,0,51.38),
- -s2d(0,0,249.67),
- -s2d(0,0,39.05),
- s2d(0,0,7.12),
- s2d(0,0,27.87),
- s2d(0,0,5.79),
- s2d(0,0,2.45)
-)
-
-#Adjust these coefficients for parameter T rather than U
-terrestrial_obliquity_coefficients = [
- c * (1e-2) ** i for i,c in enumerate(terrestrial_obliquity_coefficients)
-]
-
-#Not entirely sure about this interpretation, but this is the difference
-#between Meeus formulae 24.2 and 24.3 and seems to work
-solar_perigee_coefficients = (
- 280.46645 - 357.52910,
- 36000.76932 - 35999.05030,
- 0.0003032 + 0.0001559,
- 0.00000048
-)
-
-#Meeus formula 24.2
-solar_longitude_coefficients = (
- 280.46645,
- 36000.76983,
- 0.0003032
-)
-
-#This value is taken from JPL Horizon and is essentially constant
-lunar_inclination_coefficients = (
- 5.145,
-)
-
-#Meeus formula 45.1
-lunar_longitude_coefficients = (
- 218.3164591,
- 481267.88134236,
- -0.0013268,
- 1/538841.0
- -1/65194000.0
-)
-
-#Meeus formula 45.7
-lunar_node_coefficients = (
- 125.0445550,
- -1934.1361849,
- 0.0020762,
- 1/467410.0,
- -1/60616000.0
-)
-
-#Meeus, unnumbered formula directly preceded by 45.7
-lunar_perigee_coefficients = (
- 83.3532430,
- 4069.0137111,
- -0.0103238,
- -1/80053.0,
- 1/18999000.0
-)
-
-#Now follow some useful auxiliary values, we won't need their speed.
-#See notes on Table 6 in Schureman for I, nu, xi, nu', 2nu''
-def _I(N, i, omega):
- N, i, omega = d2r * N, d2r*i, d2r*omega
- cosI = np.cos(i)*np.cos(omega)-np.sin(i)*np.sin(omega)*np.cos(N)
- return r2d*np.arccos(cosI)
-
-def _xi(N, i, omega):
- N, i, omega = d2r * N, d2r*i, d2r*omega
- e1 = np.cos(0.5*(omega-i))/np.cos(0.5*(omega+i)) * np.tan(0.5*N)
- e2 = np.sin(0.5*(omega-i))/np.sin(0.5*(omega+i)) * np.tan(0.5*N)
- e1, e2 = np.arctan(e1), np.arctan(e2)
- e1, e2 = e1 - 0.5*N, e2 - 0.5*N
- return -(e1 + e2)*r2d
-
-def _nu(N, i, omega):
- N, i, omega = d2r * N, d2r*i, d2r*omega
- e1 = np.cos(0.5*(omega-i))/np.cos(0.5*(omega+i)) * np.tan(0.5*N)
- e2 = np.sin(0.5*(omega-i))/np.sin(0.5*(omega+i)) * np.tan(0.5*N)
- e1, e2 = np.arctan(e1), np.arctan(e2)
- e1, e2 = e1 - 0.5*N, e2 - 0.5*N
- return (e1 - e2)*r2d
-
-#Schureman equation 224
-#Can we be more precise than B "the solar coefficient" = 0.1681?
-def _nup(N, i, omega):
- I = d2r * _I(N, i, omega)
- nu = d2r * _nu(N, i, omega)
- return r2d * np.arctan(np.sin(2*I)*np.sin(nu)/(np.sin(2*I)*np.cos(nu)+0.3347))
-
-#Schureman equation 232
-def _nupp(N, i, omega):
- I = d2r * _I(N, i, omega)
- nu = d2r * _nu(N, i, omega)
- tan2nupp = (np.sin(I)**2*np.sin(2*nu))/(np.sin(I)**2*np.cos(2*nu)+0.0727)
- return r2d * 0.5 * np.arctan(tan2nupp)
-
-AstronomicalParameter = namedtuple('AstronomicalParameter', ['value', 'speed'])
-
-def astro(t):
- a = {}
- #We can use polynomial fits from Meeus to obtain good approximations to
- #some astronomical values (and therefore speeds).
- polynomials = {
- 's': lunar_longitude_coefficients,
- 'h': solar_longitude_coefficients,
- 'p': lunar_perigee_coefficients,
- 'N': lunar_node_coefficients,
- 'pp': solar_perigee_coefficients,
- '90': (90.0,),
- 'omega': terrestrial_obliquity_coefficients,
- 'i': lunar_inclination_coefficients
- }
- #Polynomials are in T, that is Julian Centuries; we want our speeds to be
- #in the more convenient unit of degrees per hour.
- dT_dHour = 1 / (24 * 365.25 * 100)
- for name, coefficients in polynomials.items():
- a[name] = AstronomicalParameter(
- np.mod(polynomial(coefficients, T(t)), 360.0),
- d_polynomial(coefficients, T(t)) * dT_dHour
- )
-
- #Some other parameters defined by Schureman which are dependent on the
- #parameters N, i, omega for use in node factor calculations. We don't need
- #their speeds.
- args = list(each.value for each in [a['N'], a['i'], a['omega']])
- for name, function in {
- 'I': _I,
- 'xi': _xi,
- 'nu': _nu,
- 'nup': _nup,
- 'nupp': _nupp
- }.items():
- a[name] = AstronomicalParameter(np.mod(function(*args), 360.0), None)
-
- #We don't work directly with the T (hours) parameter, instead our spanning
- #set for equilibrium arguments #is given by T+h-s, s, h, p, N, pp, 90.
- #This is in line with convention.
- hour = AstronomicalParameter((JD(t) - np.floor(JD(t))) * 360.0, 15.0)
- a['T+h-s'] = AstronomicalParameter(
- hour.value + a['h'].value - a['s'].value,
- hour.speed + a['h'].speed - a['s'].speed
- )
- #It is convenient to calculate Schureman's P here since several node
- #factors need it, although it could be argued that these
- #(along with I, xi, nu etc) belong somewhere else.
- a['P'] = AstronomicalParameter(
- np.mod(a['p'].value -a['xi'].value,360.0),
- None
- )
- return a
-
From 7a867daf5ba755d5e659dd3d1b65938aa747c2e5 Mon Sep 17 00:00:00 2001
From: Jonathan Socoy <72121903+socoyjonathan@users.noreply.github.com>
Date: Sun, 27 Feb 2022 22:02:30 -0600
Subject: [PATCH 04/14] Delete nodal_corrections.py
---
tide_predictions/nodal_corrections.py | 141 --------------------------
1 file changed, 141 deletions(-)
delete mode 100644 tide_predictions/nodal_corrections.py
diff --git a/tide_predictions/nodal_corrections.py b/tide_predictions/nodal_corrections.py
deleted file mode 100644
index 170f0d6..0000000
--- a/tide_predictions/nodal_corrections.py
+++ /dev/null
@@ -1,141 +0,0 @@
-
-import numpy as np
-d2r, r2d = np.pi/180.0, 180.0/np.pi
-
-#The following functions take a dictionary of astronomical values (in degrees)
-#and return dimensionless scale factors for constituent amplitudes.
-
-def f_unity(a):
- return 1.0
-
-#Schureman equations 73, 65
-def f_Mm(a):
- omega = d2r*a['omega'].value
- i = d2r*a['i'].value
- I = d2r*a['I'].value
- mean = (2/3.0 - np.sin(omega)**2)*(1 - 3/2.0 * np.sin(i)**2)
- return (2/3.0 - np.sin(I)**2) / mean
-
-#Schureman equations 74, 66
-def f_Mf(a):
- omega = d2r*a['omega'].value
- i = d2r*a['i'].value
- I = d2r*a['I'].value
- mean = np.sin(omega)**2 * np.cos(0.5*i)**4
- return np.sin(I)**2 / mean
-
-#Schureman equations 75, 67
-def f_O1(a):
- omega = d2r*a['omega'].value
- i = d2r*a['i'].value
- I = d2r*a['I'].value
- mean = np.sin(omega) * np.cos(0.5*omega)**2 * np.cos(0.5*i)**4
- return (np.sin(I) * np.cos(0.5*I)**2) / mean
-
-#Schureman equations 76, 68
-def f_J1(a):
- omega = d2r*a['omega'].value
- i = d2r*a['i'].value
- I = d2r*a['I'].value
- mean = np.sin(2*omega) * (1-3/2.0 * np.sin(i)**2)
- return np.sin(2*I) / mean
-
-#Schureman equations 77, 69
-def f_OO1(a):
- omega = d2r*a['omega'].value
- i = d2r*a['i'].value
- I = d2r*a['I'].value
- mean = np.sin(omega) * np.sin(0.5*omega)**2 * np.cos(0.5*i)**4
- return np.sin(I) * np.sin(0.5*I)**2 / mean
-
-#Schureman equations 78, 70
-def f_M2(a):
- omega = d2r*a['omega'].value
- i = d2r*a['i'].value
- I = d2r*a['I'].value
- mean = np.cos(0.5*omega)**4 * np.cos(0.5*i)**4
- return np.cos(0.5*I)**4 / mean
-
-#Schureman equations 227, 226, 68
-#Should probably eventually include the derivations of the magic numbers (0.5023 etc).
-def f_K1(a):
- omega = d2r*a['omega'].value
- i = d2r*a['i'].value
- I = d2r*a['I'].value
- nu = d2r*a['nu'].value
- sin2Icosnu_mean = np.sin(2*omega) * (1-3/2.0 * np.sin(i)**2)
- mean = 0.5023*sin2Icosnu_mean + 0.1681
- return (0.2523*np.sin(2*I)**2 + 0.1689*np.sin(2*I)*np.cos(nu)+0.0283)**(0.5) / mean
-
-#Schureman equations 215, 213, 204
-#It can be (and has been) confirmed that the exponent for R_a reads 1/2 via Schureman Table 7
-def f_L2(a):
- P = d2r*a['P'].value
- I = d2r*a['I'].value
- R_a_inv = (1 - 12*np.tan(0.5*I)**2 * np.cos(2*P)+36*np.tan(0.5*I)**4)**(0.5)
- return f_M2(a) * R_a_inv
-
-#Schureman equations 235, 234, 71
-#Again, magic numbers
-def f_K2(a):
- omega = d2r*a['omega'].value
- i = d2r*a['i'].value
- I = d2r*a['I'].value
- nu = d2r*a['nu'].value
- sinsqIcos2nu_mean = np.sin(omega)**2 * (1-3/2.0 * np.sin(i)**2)
- mean = 0.5023*sinsqIcos2nu_mean + 0.0365
- return (0.2533*np.sin(I)**4 + 0.0367*np.sin(I)**2 *np.cos(2*nu)+0.0013)**(0.5) / mean
-
-#Schureman equations 206, 207, 195
-def f_M1(a):
- P = d2r*a['P'].value
- I = d2r*a['I'].value
- Q_a_inv = (0.25 + 1.5*np.cos(I)*np.cos(2*P)*np.cos(0.5*I)**(-0.5) + 2.25*np.cos(I)**2 * np.cos(0.5*I)**(-4))**(0.5)
- return f_O1(a) * Q_a_inv
-
-#See e.g. Schureman equation 149
-def f_Modd(a, n):
- return f_M2(a) ** (n / 2.0)
-
-#Node factors u, see Table 2 of Schureman.
-
-def u_zero(a):
- return 0.0
-
-def u_Mf(a):
- return -2.0 * a['xi'].value
-
-def u_O1(a):
- return 2.0 * a['xi'].value - a['nu'].value
-
-def u_J1(a):
- return -a['nu'].value
-
-def u_OO1(a):
- return -2.0 * a['xi'].value - a['nu'].value
-
-def u_M2(a):
- return 2.0 * a['xi'].value - 2.0 * a['nu'].value
-
-def u_K1(a):
- return -a['nup'].value
-
-#Schureman 214
-def u_L2(a):
- I = d2r*a['I'].value
- P = d2r*a['P'].value
- R = r2d*np.arctan(np.sin(2*P)/(1/6.0 * np.tan(0.5*I) **(-2) -np.cos(2*P)))
- return 2.0 * a['xi'].value - 2.0 * a['nu'].value - R
-
-def u_K2(a):
- return -2.0 * a['nupp'].value
-
-#Schureman 202
-def u_M1(a):
- I = d2r*a['I'].value
- P = d2r*a['P'].value
- Q = r2d*np.arctan((5*np.cos(I)-1)/(7*np.cos(I)+1)*np.tan(P))
- return a['xi'].value - a['nu'].value + Q
-
-def u_Modd(a, n):
- return n/2.0 * u_M2(a)
From 30a32f4fb1b69d045832d6fd3bebb7407cf8a20d Mon Sep 17 00:00:00 2001
From: Jonathan Socoy <72121903+socoyjonathan@users.noreply.github.com>
Date: Sun, 27 Feb 2022 22:02:36 -0600
Subject: [PATCH 05/14] Delete noaa_scraper.py
---
tide_predictions/noaa_scraper.py | 138 -------------------------------
1 file changed, 138 deletions(-)
delete mode 100644 tide_predictions/noaa_scraper.py
diff --git a/tide_predictions/noaa_scraper.py b/tide_predictions/noaa_scraper.py
deleted file mode 100644
index 8f5fea0..0000000
--- a/tide_predictions/noaa_scraper.py
+++ /dev/null
@@ -1,138 +0,0 @@
-import datetime
-import requests
-import lxml.html as lh
-import pandas as pd
-import json
-
-from tide import Tide
-import constituent as cons
-import numpy as np
-
-def retrieve_constituents(stationID):
- #Forms URL
- url = 'https://tidesandcurrents.noaa.gov/harcon.html?unit=0&timezone=0&id={}'.format(stationID)
-
- #Requests URL
- page = requests.get(url)
- doc = lh.fromstring(page.content)
- tr_elements = doc.xpath('//tr')
- col = [((t.text_content(),[])) for t in tr_elements[0]]
- for j in range(1, len(tr_elements)):
- T, i = tr_elements[j], 0
- for t in T.iterchildren():
- col[i][1].append(t.text_content())
- i+=1
-
- #Appends data to csv file
- component_dict = {title:column for (title,column) in col}
- component_array = pd.DataFrame(component_dict)
- component_array.to_csv('{}_constituents.csv'.format(stationID), index=False)
-
- return component_dict
-
-
-def retrieve_water_levels(*args):
- #NOAA api
- api = 'https://api.tidesandcurrents.noaa.gov/api/prod/datagetter?begin_date={}'\
- '&end_date={}&'.format(args[1].strftime("%Y%m%d %H:%M"), args[2].strftime("%Y%m%d %H:%M"))
-
- #NOAA observed data
- obs_url = 'station={}&product=water_level&datum=MTL&units=metric&time_zone=gmt'\
- '&application=ports_screen&format=json'.format(args[0])
-
- obs_data_page = requests.get(api + obs_url)
- obs_data = obs_data_page.json()['data']
- obs_heights = [float(d['v']) for d in obs_data]
-
- NOAA_times = [datetime.datetime.strptime(d['t'], '%Y-%m-%d %H:%M') for d in obs_data]
-
- component_dict = {'Datetimes': NOAA_times, 'Observed Heights': obs_heights}
- component_array = pd.DataFrame(component_dict)
- component_array.to_csv('{}_NOAA_water_levels.csv'.format(args[0]), index=False)
-
- return NOAA_times, obs_heights
-
-
-def retrieve_predicted_tide(*args):
- #NOAA api
- api = 'https://api.tidesandcurrents.noaa.gov/api/prod/datagetter?begin_date={}'\
- '&end_date={}&'.format(args[1].strftime("%Y%m%d %H:%M"), args[2].strftime("%Y%m%d %H:%M"))
-
- #NOAA predicted data
- pred_url = 'station={}&product=predictions&datum=MTL&units=metric&time_zone=gmt'\
- '&application=ports_screen&format=json'.format(args[0])
-
- pred_data_page = requests.get(api + pred_url)
- pred_data = pred_data_page.json()['predictions']
- pred_heights = [float(d['v']) for d in pred_data]
-
- return pred_heights
-
-
-def datum_value(stationID, datum):
- #Scrapes MTL/MSL Datum Value
- datum_url = 'https://api.tidesandcurrents.noaa.gov/api/prod/datagetter?&station={}&product=datums'\
- '&units=metric&time_zone=gmt&application=ports_screen&format=json'.format(stationID)
- page_data = requests.get(datum_url)
- data = page_data.json()['datums']
- datum_value = [d['v'] for d in data if d['n'] == datum]
-
- return float(datum_value[0])
-
-
-def predict_tide(*args):
- #These are the NOAA constituents, in the order presented on their website.
- constituents = [c for c in cons.noaa if c != cons._Z0]
- noaa_values = retrieve_constituents(args[0])
- noaa_amplitudes = [float(amplitude) for amplitude in noaa_values['Amplitude']]
- noaa_phases = [float(phases) for phases in noaa_values['Phase']]
-
- #We can add a constant offset (e.g. for a different datum, we will use relative to MLLW):
- MTL = datum_value(args[0], 'MTL')
- MSL = datum_value(args[0], 'MSL')
- offset = MSL - MTL
- constituents.append(cons._Z0)
- noaa_phases.append(0)
- noaa_amplitudes.append(offset)
-
- #Build the model
- assert(len(constituents) == len(noaa_phases) == len(noaa_amplitudes))
- model = np.zeros(len(constituents), dtype = Tide.dtype)
- model['constituent'] = constituents
- model['amplitude'] = noaa_amplitudes
- model['phase'] = noaa_phases
- tide = Tide(model = model, radians = False)
-
- #Time Calculations
- delta = (args[2]-args[1])/datetime.timedelta(hours=1) + .1
- times = Tide._times(args[1], np.arange(0, delta, .1))
-
- #Height Calculations
- heights_arrays = [tide.at([times[i]]) for i in range(len(times))]
- pytide_heights = [val for sublist in heights_arrays for val in sublist]
-
- return pytide_heights
-
-
-def datetimes(*args):
- #Time Calculations
- delta = (args[1]-args[0])/datetime.timedelta(hours=1) + .1
- times = Tide._times(args[1], np.arange(0, delta, .1))
-
- return times
-
-
-def detide(*args):
- # NOAA observed water level - predicted tide
- return [(args[0][i] - args[1][i]) for i in range(len(args[0]))]
-
-
-def surge(*args):
- #Surge Implementation
- predicted_tide = predict_tide(args[0], args[1], args[2])
- NOAA_times, NOAA_observed_water_level = retrieve_water_levels(args[0], args[1], args[2])
- surge = detide(NOAA_observed_water_level, predicted_tide)
- times = [(time-args[3])/datetime.timedelta(days=1) for time in NOAA_times]
-
- return times, surge
-
From 44e18f6b5ef08baa51da387482d270279235e9a2 Mon Sep 17 00:00:00 2001
From: Jonathan Socoy <72121903+socoyjonathan@users.noreply.github.com>
Date: Sun, 27 Feb 2022 22:02:42 -0600
Subject: [PATCH 06/14] Delete constituent.py
---
tide_predictions/constituent.py | 160 --------------------------------
1 file changed, 160 deletions(-)
delete mode 100644 tide_predictions/constituent.py
diff --git a/tide_predictions/constituent.py b/tide_predictions/constituent.py
deleted file mode 100644
index 9fe1fa2..0000000
--- a/tide_predictions/constituent.py
+++ /dev/null
@@ -1,160 +0,0 @@
-import string
-import operator as op
-from functools import reduce
-import numpy as np
-import nodal_corrections as nc
-
-class BaseConstituent(object):
- xdo_int = {
- 'A': 1, 'B': 2, 'C': 3, 'D': 4, 'E': 5, 'F': 6, 'G': 7, 'H': 8, 'I': 9,
- 'J': 10, 'K': 11, 'L': 12, 'M': 13, 'N': 14, 'O': 15, 'P': 16, 'Q': 17,
- 'R': -8, 'S': -7, 'T': -6, 'U': -5, 'V': -4, 'W': -3, 'X': -2, 'Y': -1,
- 'Z': 0
- }
-
- int_xdo = {v:k for k, v in xdo_int.items()}
-
- def __init__(self, name, xdo='', coefficients=[], u=nc.u_zero, f=nc.f_unity):
- if xdo == '':
- self.coefficients = np.array(coefficients)
- else:
- self.coefficients = np.array(self.xdo_to_coefficients(xdo))
- self.name = name
- self.u = u
- self.f = f
-
- def xdo_to_coefficients(self, xdo):
- return [self.xdo_int[l.upper()] for l in xdo if l in string.ascii_letters]
-
- def coefficients_to_xdo(self, coefficients):
- return ''.join([self.int_xdo[c] for c in coefficients])
-
- def V(self, astro):
- return np.dot(self.coefficients, self.astro_values(astro))
-
- def xdo(self):
- return self.coefficients_to_xdo(self.coefficients)
-
- def speed(self, a):
- return np.dot(self.coefficients, self.astro_speeds(a))
-
- def astro_xdo(self, a):
- return [a['T+h-s'], a['s'], a['h'], a['p'], a['N'], a['pp'], a['90']]
-
- def astro_speeds(self, a):
- return np.array([each.speed for each in self.astro_xdo(a)])
-
- def astro_values(self, a):
- return np.array([each.value for each in self.astro_xdo(a)])
-
- #Consider two out of phase constituents which travel at the same speed to
- #be identical
- def __eq__(self, c):
- return np.all(self.coefficients[:-1] == c.coefficients[:-1])
-
- def __hash__(self):
- return hash(tuple(self.coefficients[:-1]))
-
-class CompoundConstituent(BaseConstituent):
-
- def __init__(self, members = [], **kwargs):
- self.members = members
-
- if 'u' not in kwargs:
- kwargs['u'] = self.u
- if 'f' not in kwargs:
- kwargs['f'] = self.f
-
- super(CompoundConstituent,self).__init__(**kwargs)
-
- self.coefficients = reduce(op.add,[c.coefficients * n for (c,n) in members])
-
- def speed(self, a):
- return reduce(op.add, [n * c.speed(a) for (c,n) in self.members])
-
- def V(self, a):
- return reduce(op.add, [n * c.V(a) for (c,n) in self.members])
-
- def u(self, a):
- return reduce(op.add, [n * c.u(a) for (c,n) in self.members])
-
- def f(self, a):
- return reduce(op.mul, [c.f(a) ** abs(n) for (c,n) in self.members])
-
-###### Base Constituents
-#Long Term
-_Z0 = BaseConstituent(name = 'Z0', xdo = 'Z ZZZ ZZZ', u = nc.u_zero, f = nc.f_unity)
-_Sa = BaseConstituent(name = 'Sa', xdo = 'Z ZAZ ZZZ', u = nc.u_zero, f = nc.f_unity)
-_Ssa = BaseConstituent(name = 'Ssa', xdo = 'Z ZBZ ZZZ', u = nc.u_zero, f = nc.f_unity)
-_Mm = BaseConstituent(name = 'Mm', xdo = 'Z AZY ZZZ', u = nc.u_zero, f = nc.f_Mm)
-_Mf = BaseConstituent(name = 'Mf', xdo = 'Z BZZ ZZZ', u = nc.u_Mf, f = nc.f_Mf)
-
-#Diurnals
-_Q1 = BaseConstituent(name = 'Q1', xdo = 'A XZA ZZA', u = nc.u_O1, f = nc.f_O1)
-_O1 = BaseConstituent(name = 'O1', xdo = 'A YZZ ZZA', u = nc.u_O1, f = nc.f_O1)
-_K1 = BaseConstituent(name = 'K1', xdo = 'A AZZ ZZY', u = nc.u_K1, f = nc.f_K1)
-_J1 = BaseConstituent(name = 'J1', xdo = 'A BZY ZZY', u = nc.u_J1, f = nc.f_J1)
-
-#M1 is a tricky business for reasons of convention, rather than theory. The
-#reasons for this are best summarised by Schureman paragraphs 126, 127 and in
-#the comments found in congen_input.txt of xtides, so I won't go over all this
-#again here.
-
-_M1 = BaseConstituent(name = 'M1', xdo = 'A ZZZ ZZA', u = nc.u_M1, f = nc.f_M1)
-_P1 = BaseConstituent(name = 'P1', xdo = 'A AXZ ZZA', u = nc.u_zero, f = nc.f_unity)
-_S1 = BaseConstituent(name = 'S1', xdo = 'A AYZ ZZZ', u = nc.u_zero, f = nc.f_unity)
-_OO1 = BaseConstituent(name = 'OO1', xdo = 'A CZZ ZZY', u = nc.u_OO1, f = nc.f_OO1)
-
-#Semi-Diurnals
-_2N2 = BaseConstituent(name = '2N2', xdo = 'B XZB ZZZ', u = nc.u_M2, f = nc.f_M2)
-_N2 = BaseConstituent(name = 'N2', xdo = 'B YZA ZZZ', u = nc.u_M2, f = nc.f_M2)
-_nu2 = BaseConstituent(name = 'nu2', xdo = 'B YBY ZZZ', u = nc.u_M2, f = nc.f_M2)
-_M2 = BaseConstituent(name = 'M2', xdo = 'B ZZZ ZZZ', u = nc.u_M2, f = nc.f_M2)
-_lambda2 = BaseConstituent(name = 'lambda2', xdo = 'B AXA ZZB', u = nc.u_M2, f = nc.f_M2)
-_L2 = BaseConstituent(name = 'L2', xdo = 'B AZY ZZB', u = nc.u_L2, f = nc.f_L2)
-_T2 = BaseConstituent(name = 'T2', xdo = 'B BWZ ZAZ', u = nc.u_zero, f = nc.f_unity)
-_S2 = BaseConstituent(name = 'S2', xdo = 'B BXZ ZZZ', u = nc.u_zero, f = nc.f_unity)
-_R2 = BaseConstituent(name = 'R2', xdo = 'B BYZ ZYB', u = nc.u_zero, f = nc.f_unity)
-_K2 = BaseConstituent(name = 'K2', xdo = 'B BZZ ZZZ', u = nc.u_K2, f = nc.f_K2)
-
-#Third-Diurnals
-_M3 = BaseConstituent(name = 'M3', xdo = 'C ZZZ ZZZ', u = lambda a: nc.u_Modd(a,3), f = lambda a: nc.f_Modd(a,3))
-
-###### Compound Constituents
-#Long Term
-_MSF = CompoundConstituent(name = 'MSF', members = [(_S2, 1), (_M2, -1)])
-
-#Diurnal
-_2Q1 = CompoundConstituent(name = '2Q1', members = [(_N2, 1), (_J1, -1)])
-_rho1 = CompoundConstituent(name = 'rho1', members = [(_nu2, 1), (_K1, -1)])
-
-#Semi-Diurnal
-
-_mu2 = CompoundConstituent(name = 'mu2', members = [(_M2, 2), (_S2, -1)]) #2MS2
-_2SM2 = CompoundConstituent(name = '2SM2', members = [(_S2, 2), (_M2, -1)])
-
-#Third-Diurnal
-_2MK3 = CompoundConstituent(name = '2MK3', members = [(_M2, 1), (_O1, 1)])
-_MK3 = CompoundConstituent(name = 'MK3', members = [(_M2, 1), (_K1, 1)])
-
-#Quarter-Diurnal
-_MN4 = CompoundConstituent(name = 'MN4', members = [(_M2, 1), (_N2, 1)])
-_M4 = CompoundConstituent(name = 'M4', members = [(_M2, 2)])
-_MS4 = CompoundConstituent(name = 'MS4', members = [(_M2, 1), (_S2, 1)])
-_S4 = CompoundConstituent(name = 'S4', members = [(_S2, 2)])
-
-#Sixth-Diurnal
-_M6 = CompoundConstituent(name = 'M6', members = [(_M2, 3)])
-_S6 = CompoundConstituent(name = 'S6', members = [(_S2, 3)])
-
-#Eighth-Diurnals
-_M8 = CompoundConstituent(name = 'M8', members = [(_M2, 4)])
-
-
-noaa = [
- _M2, _S2, _N2, _K1, _M4, _O1, _M6, _MK3, _S4, _MN4, _nu2, _S6, _mu2,
- _2N2, _OO1, _lambda2, _S1, _M1, _J1, _Mm, _Ssa, _Sa, _MSF, _Mf,
- _rho1, _Q1, _T2, _R2, _2Q1, _P1, _2SM2, _M3, _L2, _2MK3, _K2,
- _M8, _MS4
-]
-
From 21acb47e89e230a3975eb52f234f6bbf58b228c7 Mon Sep 17 00:00:00 2001
From: Jonathan Socoy <72121903+socoyjonathan@users.noreply.github.com>
Date: Sun, 27 Feb 2022 22:03:18 -0600
Subject: [PATCH 07/14] Update README.md
---
tide_predictions/README.md | 4 +---
1 file changed, 1 insertion(+), 3 deletions(-)
diff --git a/tide_predictions/README.md b/tide_predictions/README.md
index 799f3c5..c11e843 100644
--- a/tide_predictions/README.md
+++ b/tide_predictions/README.md
@@ -11,9 +11,7 @@ Harmonic Constituents data is scraped from NOAA's gauge station harmonic constit
* Scipy
* Pandas
* Request
-* lxml
-
-To install dependencies, run pip install -r requirements.txt .
+* LXML
# Usage
Use [Boundary_Conditions.ipynb](Boundary_Conditions.ipynb) to modify tide prediction examples, develop your own tide predictions, and obtain code for Clawpack implementation if placed in gauge_afteraxes( ) in setplot.py and calling surge( ) method with arguments set as: (stationID, beginning_date, end_date, landfall_date) to obtain NOAA's observed storm surge.
From 21802ccd00947cc39363b3e9e367413a5fc01db2 Mon Sep 17 00:00:00 2001
From: Jonathan Socoy <72121903+socoyjonathan@users.noreply.github.com>
Date: Sun, 27 Feb 2022 22:03:34 -0600
Subject: [PATCH 08/14] Delete requirements.txt
---
tide_predictions/requirements.txt | 7 -------
1 file changed, 7 deletions(-)
delete mode 100644 tide_predictions/requirements.txt
diff --git a/tide_predictions/requirements.txt b/tide_predictions/requirements.txt
deleted file mode 100644
index e5e854b..0000000
--- a/tide_predictions/requirements.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-###### Requirements ######
-numpy
-scipy
-matplotlib
-pandas
-requests
-lxml
\ No newline at end of file
From f9c3cc5b12e68c434deda497f1fcd6360058bd49 Mon Sep 17 00:00:00 2001
From: Jonathan Socoy <72121903+socoyjonathan@users.noreply.github.com>
Date: Sun, 27 Feb 2022 22:05:12 -0600
Subject: [PATCH 09/14] Update README.md
---
tide_predictions/README.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/tide_predictions/README.md b/tide_predictions/README.md
index c11e843..a185112 100644
--- a/tide_predictions/README.md
+++ b/tide_predictions/README.md
@@ -14,9 +14,9 @@ Harmonic Constituents data is scraped from NOAA's gauge station harmonic constit
* LXML
# Usage
-Use [Boundary_Conditions.ipynb](Boundary_Conditions.ipynb) to modify tide prediction examples, develop your own tide predictions, and obtain code for Clawpack implementation if placed in gauge_afteraxes( ) in setplot.py and calling surge( ) method with arguments set as: (stationID, beginning_date, end_date, landfall_date) to obtain NOAA's observed storm surge.
+Use [Tide_Module_Examples.ipynb](Tide_Module_Examples.ipynb) to modify tide prediction examples, develop your own tide predictions, and obtain code for Clawpack implementation if placed in gauge_afteraxes( ) in setplot.py and calling surge( ) method with arguments set as: (stationID, beginning_date, end_date, landfall_date) to obtain NOAA's observed storm surge.
Modify NOAA station ID and date(s) of prediction (UTC) to make your own example.
# Acknowledgments
-This code was modified from [Pytides](https://github.com/sam-cox/pytides) to work with Python 3 and to more easily predict NOAA gauge station tides and storm surges with only calling surge( ) method with a stationID, a beginning date, an end date, and a landfall date in setplot.py to compare Clawpack storm surge ouput to NOAA's observed storm surge.
+This code was modified from [Pytides](https://github.com/sam-cox/pytides) to work with Python 3 and to more easily predict NOAA gauge station tide by calling predict_tide( ) method with a stationID, a beginning date and an end date, and storm surges with only calling surge( ) method with a stationID, a beginning date, an end date, and a landfall date in setplot.py to compare Clawpack storm surge ouput to NOAA's observed storm surge.
From 0f20b3c673fd8649a1952c8721afb6a61fc345a8 Mon Sep 17 00:00:00 2001
From: Jonathan Socoy <72121903+socoyjonathan@users.noreply.github.com>
Date: Wed, 2 Mar 2022 15:45:24 -0600
Subject: [PATCH 10/14] Update README.md
---
tide_predictions/README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tide_predictions/README.md b/tide_predictions/README.md
index a185112..975bb69 100644
--- a/tide_predictions/README.md
+++ b/tide_predictions/README.md
@@ -14,7 +14,7 @@ Harmonic Constituents data is scraped from NOAA's gauge station harmonic constit
* LXML
# Usage
-Use [Tide_Module_Examples.ipynb](Tide_Module_Examples.ipynb) to modify tide prediction examples, develop your own tide predictions, and obtain code for Clawpack implementation if placed in gauge_afteraxes( ) in setplot.py and calling surge( ) method with arguments set as: (stationID, beginning_date, end_date, landfall_date) to obtain NOAA's observed storm surge.
+Use [Tide_Module_Examples.ipynb](Tide_Module_Examples.ipynb) to modify tide prediction examples, develop your own tide predictions, and obtain code for Clawpack implementation if placed in '''gauge_afteraxes( )''' in setplot.py and calling surge( ) method with arguments set as: (stationID, beginning_date, end_date, landfall_date) to obtain NOAA's observed storm surge.
Modify NOAA station ID and date(s) of prediction (UTC) to make your own example.
From 14be4f854c2cecb2882ffc18ff9368d1103d7e4d Mon Sep 17 00:00:00 2001
From: Jonathan Socoy <72121903+socoyjonathan@users.noreply.github.com>
Date: Wed, 2 Mar 2022 15:45:42 -0600
Subject: [PATCH 11/14] Update README.md
---
tide_predictions/README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tide_predictions/README.md b/tide_predictions/README.md
index 975bb69..510bf4d 100644
--- a/tide_predictions/README.md
+++ b/tide_predictions/README.md
@@ -14,7 +14,7 @@ Harmonic Constituents data is scraped from NOAA's gauge station harmonic constit
* LXML
# Usage
-Use [Tide_Module_Examples.ipynb](Tide_Module_Examples.ipynb) to modify tide prediction examples, develop your own tide predictions, and obtain code for Clawpack implementation if placed in '''gauge_afteraxes( )''' in setplot.py and calling surge( ) method with arguments set as: (stationID, beginning_date, end_date, landfall_date) to obtain NOAA's observed storm surge.
+Use [Tide_Module_Examples.ipynb](Tide_Module_Examples.ipynb) to modify tide prediction examples, develop your own tide predictions, and obtain code for Clawpack implementation if placed in '''gauge_afteraxes( )''' in setplot.py and calling surge( ) method with arguments set as: (stationID, beginning_date, end_date, landfall_date) to obtain NOAA's observed storm surge.
Modify NOAA station ID and date(s) of prediction (UTC) to make your own example.
From cd44dd3c67b71503d3156361bf6ac5dbdb770262 Mon Sep 17 00:00:00 2001
From: Jonathan Socoy <72121903+socoyjonathan@users.noreply.github.com>
Date: Wed, 2 Mar 2022 15:46:26 -0600
Subject: [PATCH 12/14] Update README.md
---
tide_predictions/README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tide_predictions/README.md b/tide_predictions/README.md
index 510bf4d..19886d2 100644
--- a/tide_predictions/README.md
+++ b/tide_predictions/README.md
@@ -14,7 +14,7 @@ Harmonic Constituents data is scraped from NOAA's gauge station harmonic constit
* LXML
# Usage
-Use [Tide_Module_Examples.ipynb](Tide_Module_Examples.ipynb) to modify tide prediction examples, develop your own tide predictions, and obtain code for Clawpack implementation if placed in '''gauge_afteraxes( )''' in setplot.py and calling surge( ) method with arguments set as: (stationID, beginning_date, end_date, landfall_date) to obtain NOAA's observed storm surge.
+Use [Tide_Module_Examples.ipynb](Tide_Module_Examples.ipynb) to modify tide prediction examples, develop your own tide predictions, and obtain code for Clawpack implementation if placed in ```gauge_afteraxes( )``` in setplot.py and calling surge( ) method with arguments set as: (stationID, beginning_date, end_date, landfall_date) to obtain NOAA's observed storm surge.
Modify NOAA station ID and date(s) of prediction (UTC) to make your own example.
From 6084644b35f0fee0f93a7bf741f9528d5511adca Mon Sep 17 00:00:00 2001
From: Jonathan Socoy <72121903+socoyjonathan@users.noreply.github.com>
Date: Wed, 2 Mar 2022 15:58:20 -0600
Subject: [PATCH 13/14] Updated jupyter notebook
---
tide_predictions/Tide_Module_Examples.ipynb | 153 ++++++++++----------
1 file changed, 76 insertions(+), 77 deletions(-)
diff --git a/tide_predictions/Tide_Module_Examples.ipynb b/tide_predictions/Tide_Module_Examples.ipynb
index 7583e88..04bf986 100644
--- a/tide_predictions/Tide_Module_Examples.ipynb
+++ b/tide_predictions/Tide_Module_Examples.ipynb
@@ -12,23 +12,23 @@
},
{
"cell_type": "markdown",
- "id": "progressive-revolution",
+ "id": "proper-patch",
"metadata": {},
"source": [
- " - retrieve_constituents - retrieves harmonic constituents from NOAA gauge station\n",
- " - retrieve_water_levels() - retrieves observed water levels from NOAA's API\n",
- " - retrieve_predicted_tide() - retrieves predicted tide from NOAA's API\n",
- " - datetimes - prepares a collection of datetimes from beginning to end dates\n",
+ " - retrieve_constituents - retrieves harmonic constituents from NOAA gauge station\n",
+ " - fetch_noaa_tide_data() - retrieves datetimes, water levels and tide predictions at given NOAA tide station from NOAA's API\n",
" \n",
- " - predict_tide() - predicts tide for desired NOAA station given station ID, start date and end date for prediction \n",
- " - datum_value - retrieves datum value for desired datum reference, hardcoded for MTL\n",
- " - detide() - detides observed water levels\n",
- " - surge() - predicts surge at NOAA gauge station provided station ID, start date, end date, and landfall date, best for Clawpack Simulation!"
+ " - datetimes - prepares a collection of datetimes from beginning to end dates if needed\n",
+ " \n",
+ " - predict_tide() - predicts tide for desired NOAA station given station ID, start date and end date for prediction \n",
+ " - datum_value - retrieves datum value for desired datum reference, utilized by predict_tide to obtain MTL value\n",
+ " - detide() - detides observed water levels with predicted tide\n",
+ " - surge() - predicts surge at NOAA gauge station provided station ID, start date, end date, and landfall date, best for a Clawpack Simulation!"
]
},
{
"cell_type": "markdown",
- "id": "valued-rocket",
+ "id": "noticed-delhi",
"metadata": {},
"source": [
"# Example of Tide Prediction For One Date Instance\n",
@@ -47,7 +47,9 @@
"source": [
"import matplotlib.pyplot as plt\n",
"import datetime\n",
- "import tide "
+ "import clawpack.geoclaw.tide as tide\n",
+ "\n",
+ "#%env CLAW="
]
},
{
@@ -64,7 +66,7 @@
"metadata": {},
"source": [
"Locate NOAA station ID. NOAA gauge stations home: https://tidesandcurrents.noaa.gov/
\n",
- "Fill in station ID and date for tide prediction!"
+ "Fill in station ID, reference datum and date instance for tide prediction!"
]
},
{
@@ -75,10 +77,11 @@
"outputs": [],
"source": [
"#Station Information\n",
- "stationID = '8518750'\n",
+ "station_id = '8761724'\n",
+ "datum = 'MTL'\n",
"\n",
"#Date of prediction (YEAR, MTH, DAY, HR)\n",
- "prediction_date = datetime.datetime(2017,10,5,0)"
+ "prediction_date = datetime.datetime(2005, 8, 29, 11)"
]
},
{
@@ -94,9 +97,11 @@
"id": "67132515",
"metadata": {},
"source": [
- "Prediction of tide at specified location (station ID) and specified time (GMT) implemented below by calling predict_tide( ) method with the following arguments: stationID, beg_prediction_date, end_prediction_date.
\n",
+ "Prediction of tide at specified location (station ID) and specified time (GMT) implemented below by calling predict_tide() method with the following arguments: station_id, beg_prediction_date, end_prediction_date. Note: datum argument is optional\n",
+ "\n",
+ "
\n",
"\n",
- "To predict tide at an instant, set beg_prediction_date and end_prediction_date in predict_tide( ) method to the same date!"
+ "To predict tide at an instant, set beg_prediction_date and end_prediction_date in predict_tide() method to the same date!"
]
},
{
@@ -107,7 +112,8 @@
"outputs": [],
"source": [
"#NOAA Data Scraping Implementation \n",
- "height = tide.predict_tide(stationID, prediction_date, prediction_date) # in meters\n",
+ "height = tide.predict_tide(station_id, prediction_date, prediction_date, datum='MTL')\n",
+ "times = tide.datetimes(prediction_date, prediction_date) # in meters\n",
"print(height[0], \"meters\")"
]
},
@@ -153,14 +159,15 @@
"outputs": [],
"source": [
"#Station Information\n",
- "stationID = '8518750'\n",
+ "station_id = '8761724'\n",
+ "datum = 'MTL'\n",
"\n",
"#Beginning and End Dates \n",
- "beg_date = datetime.datetime(2017,10,1,0,0)\n",
- "end_date = datetime.datetime(2017,10,5,0,0)\n",
+ "beg_date = datetime.datetime(2005, 8, 26, hour=0)\n",
+ "end_date = datetime.datetime(2005, 8, 31, hour=0)\n",
"\n",
- "#Predict tide with arguments set as: (stationID, beg_prediction_date, end_prediction_date)\n",
- "predicted_tide = tide.predict_tide(stationID, beg_date, end_date)"
+ "#Predict tide with arguments set as: (station_id, beg_prediction_date, end_prediction_date)\n",
+ "predicted_tide = tide.predict_tide(station_id, beg_date, end_date, datum='MTL')"
]
},
{
@@ -186,7 +193,7 @@
"plt.plot(times, predicted_tide, \"-\", label=\"Tide Prediction\")\n",
"plt.xlabel('Hours since ' + str(beg_date) + ' (GMT)')\n",
"plt.ylabel('Meters'), plt.margins(x=0), plt.legend(loc = 'best')\n",
- "plt.title('My Prediction for Station {}'.format(stationID))\n",
+ "plt.title('My Prediction for Station {}'.format(station_id))\n",
"plt.show()"
]
},
@@ -216,14 +223,15 @@
"outputs": [],
"source": [
"#Station Information\n",
- "stationID = '8518750'\n",
+ "station_id = '8761724'\n",
+ "datum = 'MTL'\n",
"\n",
"#Beginning and End Dates \n",
- "beg_date = datetime.datetime(2017,10,1,0,0)\n",
- "end_date = datetime.datetime(2017,10,2,0,0)\n",
+ "beg_date = datetime.datetime(2005, 8, 26)\n",
+ "end_date = datetime.datetime(2005, 8, 31)\n",
"\n",
"#Predict Tide \n",
- "predicted_tide = tide.predict_tide(stationID, beg_date, end_date)"
+ "predicted_tide = tide.predict_tide(station_id, beg_date, end_date, datum='MTL')"
]
},
{
@@ -231,9 +239,8 @@
"id": "0bcba7b5",
"metadata": {},
"source": [
- "- Calling function retrieve_water_levels( ) with arguments set as: (stationID, beg_prediction_date, end_prediction_date) retrieves and downloads NOAA's datetimes and observed water level data for the specified NOAA station in the date interval provided\n",
- "- The function retrieve_predicted_tide( ) arguments set as: (stationID, beg_prediction_date, end_prediction_date) retrieves and downloads NOAA's predicted tide data for the specified NOAA station. \n",
- "- Data is scraped in Metric units, GMT timezone, MSL datum and 6 min interval. "
+ "- Calling function fetch_noaa_tide_data() with arguments set as (station_id, beg_prediction_date, end_prediction_date) retrieves datetimes, water levels and tide predictions for the specified NOAA station in the date interval provided from NOAA's API\n",
+ "- Data is scraped in Metric units, GMT timezone, MTL datum and 6 min intervals. These arguments are optional in fetch_noaa_tide_data()."
]
},
{
@@ -245,8 +252,8 @@
},
"outputs": [],
"source": [
- "times, NOAA_observed_water_lvl = tide.retrieve_water_levels(stationID, beg_date, end_date)\n",
- "NOAA_predicted_tide = tide.retrieve_predicted_tide(stationID, beg_date, end_date)"
+ "#Retrieve NOAA Tide Data\n",
+ "times, NOAA_observed_water_lvl, NOAA_predicted_tide = tide.fetch_noaa_tide_data(station_id, beg_date, end_date, datum='MTL')"
]
},
{
@@ -263,7 +270,7 @@
"plt.plot(times, NOAA_observed_water_lvl, \"-\", label=\"NOAA Water Level Observation\")\n",
"plt.xlabel('Hours since ' + str(beg_date) + ' (GMT)')\n",
"plt.ylabel('Metres'), plt.margins(x=0), plt.legend(loc = 'best')\n",
- "plt.title('Comparison of Our Prediction vs NOAA prediction for Station {}'.format(stationID))\n",
+ "plt.title('Comparison of Our Prediction vs NOAA prediction for Station {}'.format(station_id))\n",
"plt.show()"
]
},
@@ -288,9 +295,9 @@
"id": "ff60fcac",
"metadata": {},
"source": [
- "- Calling predict_tide( ) method with arguments set as: (stationID, beg_prediction_date, end_prediction_date) will output predicted tide at the specified location and time interval\n",
- "- Calling retrieve_water_levels( ) method with arguments set as: (stationID, beg_prediction_date, end_prediction_date) with output datetimes and observed water levels at the specified location and time interval\n",
- "- Calling detide( ) method with arguments set as: (NOAA observed water level, predicted tide) will output detided water level. "
+ "- Calling predict_tide() method with arguments set as: (station_id, beg_prediction_date, end_prediction_date) outputs predicted tide\n",
+ "- Calling fetch_noaa_tide_data() with arguments set as (station_id, beg_prediction_date, end_prediction_date) retrieves datetimes, water levels and tide predictions from NOAA\n",
+ "- Calling detide() method with arguments set as: (NOAA observed water level, predicted tide) will output detided water level. "
]
},
{
@@ -301,14 +308,15 @@
"outputs": [],
"source": [
"#Station Information\n",
- "stationID = '8518750'\n",
+ "station_id = '8761724'\n",
+ "datum = 'MTL'\n",
"\n",
"#Beginning and End Dates \n",
- "beg_date = datetime.datetime(2017,10,1,0,0)\n",
- "end_date = datetime.datetime(2017,10,5,0,0)\n",
+ "beg_date = datetime.datetime(2005, 8, 26)\n",
+ "end_date = datetime.datetime(2005, 8, 31)\n",
"\n",
- "predicted_tide = tide.predict_tide(stationID, beg_date, end_date)\n",
- "times, NOAA_observed_water_lvl = tide.retrieve_water_levels(stationID, beg_date, end_date)\n",
+ "predicted_tide = tide.predict_tide(station_id, beg_date, end_date)\n",
+ "times, NOAA_observed_water_lvl, NOAA_predicted_tide = tide.fetch_noaa_tide_data(station_id, beg_date, end_date, datum='MTL')\n",
"\n",
"surge = tide.detide(NOAA_observed_water_lvl, predicted_tide)\n",
"\n",
@@ -317,7 +325,7 @@
"plt.plot(times, surge, \"-\", label=\"Our Surge Prediction\")\n",
"plt.xlabel('Hours since ' + str(beg_date) + ' (GMT)')\n",
"plt.ylabel('Metres'), plt.margins(x=0), plt.legend(loc = 'best')\n",
- "plt.title('Detided Water Level for Station {}'.format(stationID))\n",
+ "plt.title('Detided Water Level for Station {}'.format(station_id))\n",
"plt.show()"
]
},
@@ -342,9 +350,8 @@
"id": "44b5db31",
"metadata": {},
"source": [
- "- Uncomment line below to utilize Clawpack's pytides module located under geoclaw\n",
- "- Code below works best if placed in gauge_afteraxes( ) in setplot.py\n",
- "- Calling surge( ) method with arguments set as: (stationID, beginning_date, end_date, landfall_date) will output observed surge from NOAA."
+ "- Code below works best if placed in gauge_afteraxes( ) in setplot.py for a storm simulation.\n",
+ "- Calling surge() method with arguments set as: (station_id, beginning_date, end_date, landfall_date) will output storm surge from NOAA observed water levels and predicted tide."
]
},
{
@@ -366,15 +373,15 @@
"outputs": [],
"source": [
"#Station Information\n",
- "stationID = '8518750'\n",
+ "station_id = '8761724'\n",
"\n",
- "#Beginning, End, Landfall Dates \n",
- "beg_date = datetime.datetime(2017,10,1,0,0)\n",
- "end_date = datetime.datetime(2017,10,5,0,0)\n",
- "landfall_date = datetime.datetime(2017,10,3,12,0)\n",
+ "#Beginning, End, Landfall Dates\n",
+ "beg_date = datetime.datetime(2005, 8, 26)\n",
+ "end_date = datetime.datetime(2005, 8, 31)\n",
+ "landfall_date = datetime.datetime(2005, 8, 29, 11, 10)\n",
"\n",
"# Surge Prediction\n",
- "times, surge = tide.surge(stationID, beg_date, end_date, landfall_date)\n",
+ "times, surge = tide.surge(station_id, beg_date, end_date, landfall_date)\n",
"plt.plot(times, surge, color=\"b\", label=\"Our Prediction\")"
]
},
@@ -402,7 +409,7 @@
"outputs": [],
"source": [
"import clawpack.geoclaw.tide as tide\n",
- "from datetime import datetime"
+ "import datetime"
]
},
{
@@ -414,25 +421,25 @@
},
"outputs": [],
"source": [
- "\n",
- "station_dict = {'8518750':(datetime(2017,10,1,0), datetime(2017,10,5,0), datetime(2017,10,3,0)),\n",
- " '8540433':(datetime(2017,9,1,0), datetime(2017,9,10,12), datetime(2017,9,5,6)),\n",
- " '8531680':(datetime(2020,10,1,0), datetime(2020,10,10,0), datetime(2020,10,6,0)),\n",
- " '8722956':(datetime(2019,8,1,0), datetime(2019,8,12,0), datetime(2019,8,6,12)),\n",
- " '8658120':(datetime(2018,11,1,0), datetime(2018,11,8,0), datetime(2018,11,3,18)),\n",
- " '8516945':(datetime(2013,1,1,0), datetime(2013,1,6,0), datetime(2013,1,3,12))}\n",
+ "station_dict = {'8761724': ('Grand Isle, LA', (2005, 8, 26), (2005, 8, 31), (2005, 8, 29, 11, 10)), #katrina\n",
+ " '8760922': ('Pilots Station East, SW Pass, LA', (2005, 8, 26), (2005, 8, 31), (2005, 8, 29, 11)), #michael\n",
+ " '8658120': ('Wilmington, NC', (2016, 10, 6, 12), (2016, 10, 9, 12), (2016, 10, 8, 12)), #matthew\n",
+ " '8721604': ('Trident Pier, Port Canaveral, FL', (2019, 8, 24), (2019, 9, 9), (2019, 9, 4, 12)), #dorian\n",
+ " '8723970': ('Vaca Key, Florida Bay, FL', (2017, 9, 6, 13), (2017, 9, 12, 13), (2017, 9, 10, 13)) #irma\n",
+ " }\n",
"\n",
"for (key, value) in station_dict.items():\n",
- " stationID = key\n",
- " beg_date = value[0]\n",
- " end_date = value[1]\n",
- " landfall_date = value[2]\n",
+ " station_id = key\n",
+ " station_name = value[0]\n",
+ " beg_date = datetime.datetime(*value[1])\n",
+ " end_date = datetime.datetime(*value[2])\n",
+ " landfall_date = datetime.datetime(*value[3])\n",
" \n",
" #NOAA Data Scraping Implementation\n",
- " predicted_tide = tide.predict_tide(stationID, beg_date, end_date) \n",
- " times, NOAA_observed_water_lvl = tide.retrieve_water_levels(stationID, beg_date, end_date)\n",
- " NOAA_predicted_tide = tide.retrieve_predicted_tide(stationID, beg_date, end_date)\n",
+ " predicted_tide = tide.predict_tide(station_id, beg_date, end_date) \n",
" \n",
+ " times, NOAA_observed_water_lvl, NOAA_predicted_tide = tide.fetch_noaa_tide_data(station_id, beg_date, end_date, datum='MTL')\n",
+ "\n",
" #Detide Water Level\n",
" surge = tide.detide(NOAA_observed_water_lvl, predicted_tide)\n",
" NOAA_surge = tide.detide(NOAA_observed_water_lvl, NOAA_predicted_tide)\n",
@@ -444,7 +451,7 @@
" plt.plot(times, NOAA_observed_water_lvl, \"-\", label=\"NOAA Water Level Observation\")\n",
" plt.xlabel('Hours since ' + str(beg_date) + ' (GMT)')\n",
" plt.ylabel('Metres'), plt.margins(x=0), plt.legend(loc = 'best')\n",
- " plt.title('Comparison of Our Prediction vs NOAA prediction for Station {}'.format(stationID))\n",
+ " plt.title('Comparison of Our Prediction vs NOAA prediction for Station {}, {}'.format(station_id, station_name))\n",
" plt.show()\n",
" \n",
" #Detided Water Level Comparison\n",
@@ -453,24 +460,16 @@
" plt.plot(times, NOAA_surge, \"-\", label=\"NOAA's Detided Prediction\")\n",
" plt.xlabel('Hours since ' + str(beg_date) + ' (GMT)')\n",
" plt.ylabel('Metres'), plt.margins(x=0), plt.legend(loc = 'best')\n",
- " plt.title('Detided Water Level Comparison of Our Prediction vs NOAA prediction for Station {}'.format(stationID))\n",
+ " plt.title('Detided Water Level Comparison of Our Prediction vs NOAA prediction for Station {}, {}'.format(station_id, station_name))\n",
" plt.show()\n",
" \n",
" \n",
" #### Clawpack Implementation (in setplot.py) ####\n",
- " times, surge = tide.surge(stationID, beg_date, end_date, landfall_date)\n",
+ " times, surge = tide.surge(station_id, beg_date, end_date, landfall_date)\n",
" plt.plot(times, surge, color=\"b\", label=\"Our Surge Prediction\")\n",
" \n",
" "
]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "dd4972d2",
- "metadata": {},
- "outputs": [],
- "source": []
}
],
"metadata": {
From ac99cdb8e1ada7bea68ee2c41b0a7344c6c7501e Mon Sep 17 00:00:00 2001
From: Jonathan Socoy <72121903+socoyjonathan@users.noreply.github.com>
Date: Wed, 2 Mar 2022 15:58:37 -0600
Subject: [PATCH 14/14] Delete tide.py
---
tide_predictions/tide.py | 1074 --------------------------------------
1 file changed, 1074 deletions(-)
delete mode 100644 tide_predictions/tide.py
diff --git a/tide_predictions/tide.py b/tide_predictions/tide.py
deleted file mode 100644
index 02f4d7c..0000000
--- a/tide_predictions/tide.py
+++ /dev/null
@@ -1,1074 +0,0 @@
-#!/usr/bin/env python
-
-"""
-GeoClaw util Module `$CLAW/geoclaw/src/python/geoclaw/tide.py`
-
-Module provides provides tide prediction functions.
-
-:Functions:
-
- - retrieve_constituents - retrieves harmonic constituents from NOAA gauge station
- - retrieve_water_levels - retrieves observed water levels from NOAA's API
- - retrieve_predicted_tide - retrieves predicted tide from NOAA's API
- - datum_value - retrieves datum value for desired datum reference
- - predict_tide - predicts tide with Pytides
- - datetimes - prepares a collection of datetimes from beginning to end dates
- - detide - detides observed water levels
- - surge - predicts surge at NOAA gauge station
-"""
-
-from collections.abc import Iterable
-from collections import OrderedDict, namedtuple
-from scipy.optimize import leastsq, fsolve
-from itertools import takewhile, count
-from datetime import datetime, timedelta
-from functools import reduce
-
-try:
- from itertools import izip, ifilter
-except ImportError: #Python3
- izip = zip
- ifilter = filter
-
-try:
- import requests
- import json
- import string
- import lxml.html as lh
- import pandas as pd
- import operator as op
- import numpy as np
-except ImportError as e:
- print(e)
-
-
-d2r, r2d = np.pi/180.0, 180.0/np.pi
-
-
-######################## Tide Prediction Functions ########################
-
-def retrieve_constituents(stationID):
- #Forms URL
- url = 'https://tidesandcurrents.noaa.gov/harcon.html?unit=0&timezone=0&id={}'.format(stationID)
-
- #Requests URL
- page = requests.get(url)
- doc = lh.fromstring(page.content)
- tr_elements = doc.xpath('//tr')
- col = [((t.text_content(),[])) for t in tr_elements[0]]
- for j in range(1, len(tr_elements)):
- T, i = tr_elements[j], 0
- for t in T.iterchildren():
- col[i][1].append(t.text_content())
- i+=1
-
- #Appends data to csv file
- component_dict = {title:column for (title,column) in col}
- component_array = pd.DataFrame(component_dict)
- #component_array.to_csv('{}_constituents.csv'.format(stationID), index=False)
-
- return component_dict
-
-
-def retrieve_water_levels(*args):
- #NOAA api
- api = 'https://api.tidesandcurrents.noaa.gov/api/prod/datagetter?begin_date={}'\
- '&end_date={}&'.format(args[1].strftime("%Y%m%d %H:%M"), args[2].strftime("%Y%m%d %H:%M"))
-
- #NOAA observed data
- obs_url = 'station={}&product=water_level&datum=MTL&units=metric&time_zone=gmt'\
- '&application=ports_screen&format=json'.format(args[0])
-
- obs_data_page = requests.get(api + obs_url)
- obs_data = obs_data_page.json()['data']
- obs_heights = [float(d['v']) for d in obs_data]
-
- NOAA_times = [datetime.strptime(d['t'], '%Y-%m-%d %H:%M') for d in obs_data]
-
- component_dict = {'Datetimes': NOAA_times, 'Observed Heights': obs_heights}
- component_array = pd.DataFrame(component_dict)
- #component_array.to_csv('{}_NOAA_water_levels.csv'.format(args[0]), index=False)
-
- return NOAA_times, obs_heights
-
-
-def retrieve_predicted_tide(*args):
- #NOAA api
- api = 'https://api.tidesandcurrents.noaa.gov/api/prod/datagetter?begin_date={}'\
- '&end_date={}&'.format(args[1].strftime("%Y%m%d %H:%M"), args[2].strftime("%Y%m%d %H:%M"))
-
- #NOAA predicted data
- pred_url = 'station={}&product=predictions&datum=MTL&units=metric&time_zone=gmt'\
- '&application=ports_screen&format=json'.format(args[0])
-
- pred_data_page = requests.get(api + pred_url)
- pred_data = pred_data_page.json()['predictions']
- pred_heights = [float(d['v']) for d in pred_data]
-
- return pred_heights
-
-
-def datum_value(stationID, datum):
- #Scrapes MTL/MSL Datum Value
- datum_url = 'https://api.tidesandcurrents.noaa.gov/api/prod/datagetter?&station={}&product=datums'\
- '&units=metric&time_zone=gmt&application=ports_screen&format=json'.format(stationID)
- page_data = requests.get(datum_url)
- data = page_data.json()['datums']
- datum_value = [d['v'] for d in data if d['n'] == datum]
-
- return float(datum_value[0])
-
-
-def predict_tide(*args):
- #These are the NOAA constituents, in the order presented on NOAA's website.
- constituents = [c for c in noaa if c != _Z0]
- noaa_values = retrieve_constituents(args[0])
- noaa_amplitudes = [float(amplitude) for amplitude in noaa_values['Amplitude']]
- noaa_phases = [float(phases) for phases in noaa_values['Phase']]
-
- #We can add a constant offset - set to MTL
- MTL = datum_value(args[0], 'MTL')
- MSL = datum_value(args[0], 'MSL')
- offset = MSL - MTL
- constituents.append(_Z0)
- noaa_phases.append(0)
- noaa_amplitudes.append(offset)
-
- #Build the model
- assert(len(constituents) == len(noaa_phases) == len(noaa_amplitudes))
- model = np.zeros(len(constituents), dtype = Tide.dtype)
- model['constituent'] = constituents
- model['amplitude'] = noaa_amplitudes
- model['phase'] = noaa_phases
- tide = Tide(model = model, radians = False)
-
- #Time Calculations
- delta = (args[2]-args[1])/timedelta(hours=1) + .1
- times = Tide._times(args[1], np.arange(0, delta, .1))
-
- #Height Calculations
- heights_arrays = [tide.at([times[i]]) for i in range(len(times))]
- pytide_heights = [val for sublist in heights_arrays for val in sublist]
-
- return pytide_heights
-
-
-def datetimes(*args):
- #Time Calculations
- delta = (args[1]-args[0])/timedelta(hours=1) + .1
- times = Tide._times(args[1], np.arange(0, delta, .1))
-
- return times
-
-
-def detide(*args):
- # NOAA observed water level - predicted tide
- return [(args[0][i] - args[1][i]) for i in range(len(args[0]))]
-
-
-def surge(*args):
- #Surge Implementation
- predicted_tide = predict_tide(args[0], args[1], args[2])
- NOAA_times, NOAA_observed_water_level = retrieve_water_levels(args[0], args[1], args[2])
- surge = detide(NOAA_observed_water_level, predicted_tide)
- times = [(time-args[3])/timedelta(days=1) for time in NOAA_times]
-
- return times, surge
-
-
-
-######################## Nodal Corrections ########################
-
-def f_unity(a):
- return 1.0
-
-#Schureman equations 73, 65
-def f_Mm(a):
- omega = d2r*a['omega'].value
- i = d2r*a['i'].value
- I = d2r*a['I'].value
- mean = (2/3.0 - np.sin(omega)**2)*(1 - 3/2.0 * np.sin(i)**2)
- return (2/3.0 - np.sin(I)**2) / mean
-
-#Schureman equations 74, 66
-def f_Mf(a):
- omega = d2r*a['omega'].value
- i = d2r*a['i'].value
- I = d2r*a['I'].value
- mean = np.sin(omega)**2 * np.cos(0.5*i)**4
- return np.sin(I)**2 / mean
-
-#Schureman equations 75, 67
-def f_O1(a):
- omega = d2r*a['omega'].value
- i = d2r*a['i'].value
- I = d2r*a['I'].value
- mean = np.sin(omega) * np.cos(0.5*omega)**2 * np.cos(0.5*i)**4
- return (np.sin(I) * np.cos(0.5*I)**2) / mean
-
-#Schureman equations 76, 68
-def f_J1(a):
- omega = d2r*a['omega'].value
- i = d2r*a['i'].value
- I = d2r*a['I'].value
- mean = np.sin(2*omega) * (1-3/2.0 * np.sin(i)**2)
- return np.sin(2*I) / mean
-
-#Schureman equations 77, 69
-def f_OO1(a):
- omega = d2r*a['omega'].value
- i = d2r*a['i'].value
- I = d2r*a['I'].value
- mean = np.sin(omega) * np.sin(0.5*omega)**2 * np.cos(0.5*i)**4
- return np.sin(I) * np.sin(0.5*I)**2 / mean
-
-#Schureman equations 78, 70
-def f_M2(a):
- omega = d2r*a['omega'].value
- i = d2r*a['i'].value
- I = d2r*a['I'].value
- mean = np.cos(0.5*omega)**4 * np.cos(0.5*i)**4
- return np.cos(0.5*I)**4 / mean
-
-#Schureman equations 227, 226, 68
-#Should probably eventually include the derivations of the magic numbers (0.5023 etc).
-def f_K1(a):
- omega = d2r*a['omega'].value
- i = d2r*a['i'].value
- I = d2r*a['I'].value
- nu = d2r*a['nu'].value
- sin2Icosnu_mean = np.sin(2*omega) * (1-3/2.0 * np.sin(i)**2)
- mean = 0.5023*sin2Icosnu_mean + 0.1681
- return (0.2523*np.sin(2*I)**2 + 0.1689*np.sin(2*I)*np.cos(nu)+0.0283)**(0.5) / mean
-
-#Schureman equations 215, 213, 204
-#It can be (and has been) confirmed that the exponent for R_a reads 1/2 via Schureman Table 7
-def f_L2(a):
- P = d2r*a['P'].value
- I = d2r*a['I'].value
- R_a_inv = (1 - 12*np.tan(0.5*I)**2 * np.cos(2*P)+36*np.tan(0.5*I)**4)**(0.5)
- return f_M2(a) * R_a_inv
-
-#Schureman equations 235, 234, 71
-#Again, magic numbers
-def f_K2(a):
- omega = d2r*a['omega'].value
- i = d2r*a['i'].value
- I = d2r*a['I'].value
- nu = d2r*a['nu'].value
- sinsqIcos2nu_mean = np.sin(omega)**2 * (1-3/2.0 * np.sin(i)**2)
- mean = 0.5023*sinsqIcos2nu_mean + 0.0365
- return (0.2533*np.sin(I)**4 + 0.0367*np.sin(I)**2 *np.cos(2*nu)+0.0013)**(0.5) / mean
-
-#Schureman equations 206, 207, 195
-def f_M1(a):
- P = d2r*a['P'].value
- I = d2r*a['I'].value
- Q_a_inv = (0.25 + 1.5*np.cos(I)*np.cos(2*P)*np.cos(0.5*I)**(-0.5) + 2.25*np.cos(I)**2 * np.cos(0.5*I)**(-4))**(0.5)
- return f_O1(a) * Q_a_inv
-
-#See e.g. Schureman equation 149
-def f_Modd(a, n):
- return f_M2(a) ** (n / 2.0)
-
-#Node factors u, see Table 2 of Schureman.
-
-def u_zero(a):
- return 0.0
-
-def u_Mf(a):
- return -2.0 * a['xi'].value
-
-def u_O1(a):
- return 2.0 * a['xi'].value - a['nu'].value
-
-def u_J1(a):
- return -a['nu'].value
-
-def u_OO1(a):
- return -2.0 * a['xi'].value - a['nu'].value
-
-def u_M2(a):
- return 2.0 * a['xi'].value - 2.0 * a['nu'].value
-
-def u_K1(a):
- return -a['nup'].value
-
-#Schureman 214
-def u_L2(a):
- I = d2r*a['I'].value
- P = d2r*a['P'].value
- R = r2d*np.arctan(np.sin(2*P)/(1/6.0 * np.tan(0.5*I) **(-2) -np.cos(2*P)))
- return 2.0 * a['xi'].value - 2.0 * a['nu'].value - R
-
-def u_K2(a):
- return -2.0 * a['nupp'].value
-
-#Schureman 202
-def u_M1(a):
- I = d2r*a['I'].value
- P = d2r*a['P'].value
- Q = r2d*np.arctan((5*np.cos(I)-1)/(7*np.cos(I)+1)*np.tan(P))
- return a['xi'].value - a['nu'].value + Q
-
-def u_Modd(a, n):
- return n/2.0 * u_M2(a)
-
-
-
-######################## Constituents ########################
-
-
-class BaseConstituent(object):
- xdo_int = {
- 'A': 1, 'B': 2, 'C': 3, 'D': 4, 'E': 5, 'F': 6, 'G': 7, 'H': 8, 'I': 9,
- 'J': 10, 'K': 11, 'L': 12, 'M': 13, 'N': 14, 'O': 15, 'P': 16, 'Q': 17,
- 'R': -8, 'S': -7, 'T': -6, 'U': -5, 'V': -4, 'W': -3, 'X': -2, 'Y': -1,
- 'Z': 0
- }
-
- int_xdo = {v:k for k, v in xdo_int.items()}
-
- def __init__(self, name, xdo='', coefficients=[], u=u_zero, f=f_unity):
- if xdo == '':
- self.coefficients = np.array(coefficients)
- else:
- self.coefficients = np.array(self.xdo_to_coefficients(xdo))
- self.name = name
- self.u = u
- self.f = f
-
- def xdo_to_coefficients(self, xdo):
- return [self.xdo_int[l.upper()] for l in xdo if l in string.ascii_letters]
-
- def coefficients_to_xdo(self, coefficients):
- return ''.join([self.int_xdo[c] for c in coefficients])
-
- def V(self, astro):
- return np.dot(self.coefficients, self.astro_values(astro))
-
- def xdo(self):
- return self.coefficients_to_xdo(self.coefficients)
-
- def speed(self, a):
- return np.dot(self.coefficients, self.astro_speeds(a))
-
- def astro_xdo(self, a):
- return [a['T+h-s'], a['s'], a['h'], a['p'], a['N'], a['pp'], a['90']]
-
- def astro_speeds(self, a):
- return np.array([each.speed for each in self.astro_xdo(a)])
-
- def astro_values(self, a):
- return np.array([each.value for each in self.astro_xdo(a)])
-
- #Consider two out of phase constituents which travel at the same speed to
- #be identical
- def __eq__(self, c):
- return np.all(self.coefficients[:-1] == c.coefficients[:-1])
-
- def __hash__(self):
- return hash(tuple(self.coefficients[:-1]))
-
-class CompoundConstituent(BaseConstituent):
-
- def __init__(self, members = [], **kwargs):
- self.members = members
-
- if 'u' not in kwargs:
- kwargs['u'] = self.u
- if 'f' not in kwargs:
- kwargs['f'] = self.f
-
- super(CompoundConstituent,self).__init__(**kwargs)
-
- self.coefficients = reduce(op.add,[c.coefficients * n for (c,n) in members])
-
- def speed(self, a):
- return reduce(op.add, [n * c.speed(a) for (c,n) in self.members])
-
- def V(self, a):
- return reduce(op.add, [n * c.V(a) for (c,n) in self.members])
-
- def u(self, a):
- return reduce(op.add, [n * c.u(a) for (c,n) in self.members])
-
- def f(self, a):
- return reduce(op.mul, [c.f(a) ** abs(n) for (c,n) in self.members])
-
-###### Base Constituents
-#Long Term
-_Z0 = BaseConstituent(name = 'Z0', xdo = 'Z ZZZ ZZZ', u = u_zero, f = f_unity)
-_Sa = BaseConstituent(name = 'Sa', xdo = 'Z ZAZ ZZZ', u = u_zero, f = f_unity)
-_Ssa = BaseConstituent(name = 'Ssa', xdo = 'Z ZBZ ZZZ', u = u_zero, f = f_unity)
-_Mm = BaseConstituent(name = 'Mm', xdo = 'Z AZY ZZZ', u = u_zero, f = f_Mm)
-_Mf = BaseConstituent(name = 'Mf', xdo = 'Z BZZ ZZZ', u = u_Mf, f = f_Mf)
-
-#Diurnals
-_Q1 = BaseConstituent(name = 'Q1', xdo = 'A XZA ZZA', u = u_O1, f = f_O1)
-_O1 = BaseConstituent(name = 'O1', xdo = 'A YZZ ZZA', u = u_O1, f = f_O1)
-_K1 = BaseConstituent(name = 'K1', xdo = 'A AZZ ZZY', u = u_K1, f = f_K1)
-_J1 = BaseConstituent(name = 'J1', xdo = 'A BZY ZZY', u = u_J1, f = f_J1)
-
-#M1 is a tricky business for reasons of convention, rather than theory. The
-#reasons for this are best summarised by Schureman paragraphs 126, 127 and in
-#the comments found in congen_input.txt of xtides, so I won't go over all this
-#again here.
-
-_M1 = BaseConstituent(name = 'M1', xdo = 'A ZZZ ZZA', u = u_M1, f = f_M1)
-_P1 = BaseConstituent(name = 'P1', xdo = 'A AXZ ZZA', u = u_zero, f = f_unity)
-_S1 = BaseConstituent(name = 'S1', xdo = 'A AYZ ZZZ', u = u_zero, f = f_unity)
-_OO1 = BaseConstituent(name = 'OO1', xdo = 'A CZZ ZZY', u = u_OO1, f = f_OO1)
-
-#Semi-Diurnals
-_2N2 = BaseConstituent(name = '2N2', xdo = 'B XZB ZZZ', u = u_M2, f = f_M2)
-_N2 = BaseConstituent(name = 'N2', xdo = 'B YZA ZZZ', u = u_M2, f = f_M2)
-_nu2 = BaseConstituent(name = 'nu2', xdo = 'B YBY ZZZ', u = u_M2, f = f_M2)
-_M2 = BaseConstituent(name = 'M2', xdo = 'B ZZZ ZZZ', u = u_M2, f = f_M2)
-_lambda2 = BaseConstituent(name = 'lambda2', xdo = 'B AXA ZZB', u = u_M2, f = f_M2)
-_L2 = BaseConstituent(name = 'L2', xdo = 'B AZY ZZB', u = u_L2, f = f_L2)
-_T2 = BaseConstituent(name = 'T2', xdo = 'B BWZ ZAZ', u = u_zero, f = f_unity)
-_S2 = BaseConstituent(name = 'S2', xdo = 'B BXZ ZZZ', u = u_zero, f = f_unity)
-_R2 = BaseConstituent(name = 'R2', xdo = 'B BYZ ZYB', u = u_zero, f = f_unity)
-_K2 = BaseConstituent(name = 'K2', xdo = 'B BZZ ZZZ', u = u_K2, f = f_K2)
-
-#Third-Diurnals
-_M3 = BaseConstituent(name = 'M3', xdo = 'C ZZZ ZZZ', u = lambda a: u_Modd(a,3), f = lambda a: f_Modd(a,3))
-
-###### Compound Constituents
-#Long Term
-_MSF = CompoundConstituent(name = 'MSF', members = [(_S2, 1), (_M2, -1)])
-
-#Diurnal
-_2Q1 = CompoundConstituent(name = '2Q1', members = [(_N2, 1), (_J1, -1)])
-_rho1 = CompoundConstituent(name = 'rho1', members = [(_nu2, 1), (_K1, -1)])
-
-#Semi-Diurnal
-
-_mu2 = CompoundConstituent(name = 'mu2', members = [(_M2, 2), (_S2, -1)]) #2MS2
-_2SM2 = CompoundConstituent(name = '2SM2', members = [(_S2, 2), (_M2, -1)])
-
-#Third-Diurnal
-_2MK3 = CompoundConstituent(name = '2MK3', members = [(_M2, 1), (_O1, 1)])
-_MK3 = CompoundConstituent(name = 'MK3', members = [(_M2, 1), (_K1, 1)])
-
-#Quarter-Diurnal
-_MN4 = CompoundConstituent(name = 'MN4', members = [(_M2, 1), (_N2, 1)])
-_M4 = CompoundConstituent(name = 'M4', members = [(_M2, 2)])
-_MS4 = CompoundConstituent(name = 'MS4', members = [(_M2, 1), (_S2, 1)])
-_S4 = CompoundConstituent(name = 'S4', members = [(_S2, 2)])
-
-#Sixth-Diurnal
-_M6 = CompoundConstituent(name = 'M6', members = [(_M2, 3)])
-_S6 = CompoundConstituent(name = 'S6', members = [(_S2, 3)])
-
-#Eighth-Diurnals
-_M8 = CompoundConstituent(name = 'M8', members = [(_M2, 4)])
-
-
-noaa = [
- _M2, _S2, _N2, _K1, _M4, _O1, _M6, _MK3, _S4, _MN4, _nu2, _S6, _mu2,
- _2N2, _OO1, _lambda2, _S1, _M1, _J1, _Mm, _Ssa, _Sa, _MSF, _Mf,
- _rho1, _Q1, _T2, _R2, _2Q1, _P1, _2SM2, _M3, _L2, _2MK3, _K2,
- _M8, _MS4
-]
-
-
-####################### Tide ########################
-
-
-class Tide(object):
- dtype = np.dtype([
- ('constituent', object),
- ('amplitude', float),
- ('phase', float)])
-
- def __init__(
- self,
- constituents = None,
- amplitudes = None,
- phases = None,
- model = None,
- radians = False
- ):
- """
- Initialise a tidal model. Provide constituents, amplitudes and phases OR a model.
- Arguments:
- constituents -- list of constituents used in the model.
- amplitudes -- list of amplitudes corresponding to constituents
- phases -- list of phases corresponding to constituents
- model -- an ndarray of type Tide.dtype representing the constituents, amplitudes and phases.
- radians -- boolean representing whether phases are in radians (default False)
- """
- if None not in [constituents, amplitudes, phases]:
- if len(constituents) == len(amplitudes) == len(phases):
- model = np.zeros(len(phases), dtype=Tide.dtype)
- model['constituent'] = np.array(constituents)
- model['amplitude'] = np.array(amplitudes)
- model['phase'] = np.array(phases)
- else:
- raise ValueError("Constituents, amplitudes and phases should all be arrays of equal length.")
- elif model is not None:
- if not model.dtype == Tide.dtype:
- raise ValueError("Model must be a numpy array with dtype == Tide.dtype")
- else:
- raise ValueError("Must be initialised with constituents, amplitudes and phases; or a model.")
- if radians:
- model['phase'] = r2d*model['phase']
- self.model = model[:]
- self.normalize()
-
- def prepare(self, *args, **kwargs):
- return Tide._prepare(self.model['constituent'], *args, **kwargs)
-
- @staticmethod
- def _prepare(constituents, t0, t = None, radians = True):
- """
- Return constituent speed and equilibrium argument at a given time, and constituent node factors at given times.
- Arguments:
- constituents -- list of constituents to prepare
- t0 -- time at which to evaluate speed and equilibrium argument for each constituent
- t -- list of times at which to evaluate node factors for each constituent (default: t0)
- radians -- whether to return the angular arguments in radians or degrees (default: True)
- """
- #The equilibrium argument is constant and taken at the beginning of the
- #time series (t0). The speed of the equilibrium argument changes very
- #slowly, so again we take it to be constant over any length of data. The
- #node factors change more rapidly.
- if isinstance(t0, Iterable):
- t0 = t0[0]
- if t is None:
- t = [t0]
- if not isinstance(t, Iterable):
- t = [t]
- a0 = astro(t0)
- a = [astro(t_i) for t_i in t]
-
- #For convenience give u, V0 (but not speed!) in [0, 360)
- V0 = np.array([c.V(a0) for c in constituents])[:, np.newaxis]
- speed = np.array([c.speed(a0) for c in constituents])[:, np.newaxis]
- u = [np.mod(np.array([c.u(a_i) for c in constituents])[:, np.newaxis], 360.0)
- for a_i in a]
- f = [np.mod(np.array([c.f(a_i) for c in constituents])[:, np.newaxis], 360.0)
- for a_i in a]
-
- if radians:
- speed = d2r*speed
- V0 = d2r*V0
- u = [d2r*each for each in u]
- return speed, u, f, V0
-
- def at(self, t):
- """
- Return the modelled tidal height at given times.
- Arguments:
- t -- array of times at which to evaluate the tidal height
- """
- t0 = t[0]
- hours = self._hours(t0, t)
- partition = 240.0
- t = self._partition(hours, partition)
- times = self._times(t0, [(i + 0.5)*partition for i in range(len(t))])
- speed, u, f, V0 = self.prepare(t0, times, radians = True)
- H = self.model['amplitude'][:, np.newaxis]
- p = d2r*self.model['phase'][:, np.newaxis]
-
- return np.concatenate([
- Tide._tidal_series(t_i, H, p, speed, u_i, f_i, V0)
- for t_i, u_i, f_i in izip(t, u, f)
- ])
-
- def highs(self, *args):
- """
- Generator yielding only the high tides.
- Arguments:
- see Tide.extrema()
- """
- for t in ifilter(lambda e: e[2] == 'H', self.extrema(*args)):
- yield t
-
- def lows(self, *args):
- """
- Generator yielding only the low tides.
- Arguments:
- see Tide.extrema()
- """
- for t in ifilter(lambda e: e[2] == 'L', self.extrema(*args)):
- yield t
-
- def form_number(self):
- """
- Returns the model's form number, a helpful heuristic for classifying tides.
- """
- k1, o1, m2, s2 = (
- np.extract(self.model['constituent'] == c, self.model['amplitude'])
- for c in [_K1, _O1, _M2, _S2]
- )
- return (k1+o1)/(m2+s2)
-
- def classify(self):
- """
- Classify the tide according to its form number
- """
- form = self.form_number()
- if 0 <= form <= 0.25:
- return 'semidiurnal'
- elif 0.25 < form <= 1.5:
- return 'mixed (semidiurnal)'
- elif 1.5 < form <= 3.0:
- return 'mixed (diurnal)'
- else:
- return 'diurnal'
-
- def extrema(self, t0, t1 = None, partition = 2400.0):
- """
- A generator for high and low tides.
- Arguments:
- t0 -- time after which extrema are sought
- t1 -- optional time before which extrema are sought (if not given, the generator is infinite)
- partition -- number of hours for which we consider the node factors to be constant (default: 2400.0)
- """
- if t1:
- #yield from in python 3.4
- for e in takewhile(lambda t: t[0] < t1, self.extrema(t0)):
- yield e
- else:
- #We assume that extrema are separated by at least delta hours
- delta = np.amin([
- 90.0 / c.speed(astro(t0)) for c in self.model['constituent']
- if not c.speed(astro(t0)) == 0
- ])
- #We search for stationary points from offset hours before t0 to
- #ensure we find any which might occur very soon after t0.
- offset = 24.0
- partitions = (
- Tide._times(t0, i*partition) for i in count()), (Tide._times(t0, i*partition) for i in count(1)
- )
-
- #We'll overestimate to be on the safe side;
- #values outside (start,end) won't get yielded.
- interval_count = int(np.ceil((partition + offset) / delta)) + 1
- amplitude = self.model['amplitude'][:, np.newaxis]
- phase = d2r*self.model['phase'][:, np.newaxis]
-
- for start, end in izip(*partitions):
- speed, [u], [f], V0 = self.prepare(start, Tide._times(start, 0.5*partition))
- #These derivatives don't include the time dependence of u or f,
- #but these change slowly.
- def d(t):
- return np.sum(-speed*amplitude*f*np.sin(speed*t + (V0 + u) - phase), axis=0)
- def d2(t):
- return np.sum(-speed**2.0 * amplitude*f*np.cos(speed*t + (V0 + u) - phase), axis=0)
- #We'll overestimate to be on the safe side;
- #values outside (start,end) won't get yielded.
- intervals = (
- delta * i -offset for i in range(interval_count)), (delta*(i+1) - offset for i in range(interval_count)
- )
- for a, b in izip(*intervals):
- if d(a)*d(b) < 0:
- extrema = fsolve(d, (a + b) / 2.0, fprime = d2)[0]
- time = Tide._times(start, extrema)
- [height] = self.at([time])
- hilo = 'H' if d2(extrema) < 0 else 'L'
- if start < time < end:
- yield (time, height, hilo)
-
- @staticmethod
- def _hours(t0, t):
- """
- Return the hourly offset(s) of a (list of) time from a given time.
- Arguments:
- t0 -- time from which offsets are sought
- t -- times to find hourly offsets from t0.
- """
- if not isinstance(t, Iterable):
- return Tide._hours(t0, [t])[0]
- elif isinstance(t[0], datetime):
- return np.array([(ti-t0).total_seconds() / 3600.0 for ti in t])
- else:
- return t
-
- @staticmethod
- def _partition(hours, partition = 3600.0):
- """
- Partition a sorted list of numbers (or in this case hours).
- Arguments:
- hours -- sorted ndarray of hours.
- partition -- maximum partition length (default: 3600.0)
- """
- partition = float(partition)
- relative = hours - hours[0]
- total_partitions = np.ceil(relative[-1] / partition + 10*np.finfo(np.float).eps).astype('int')
- return [hours[np.floor(np.divide(relative, partition)) == i] for i in range(total_partitions)]
-
- @staticmethod
- def _times(t0, hours):
- """
- Return a (list of) datetime(s) given an initial time and an (list of) hourly offset(s).
- Arguments:
- t0 -- initial time
- hours -- hourly offsets from t0
- """
- if not isinstance(hours, Iterable):
- return Tide._times(t0, [hours])[0]
- elif not isinstance(hours[0], datetime):
- return np.array([t0 + timedelta(hours=h) for h in hours])
- else:
- return np.array(hours)
-
- @staticmethod
- def _tidal_series(t, amplitude, phase, speed, u, f, V0):
- return np.sum(amplitude*f*np.cos(speed*t + (V0 + u) - phase), axis=0)
-
- def normalize(self):
- """
- Adapt self.model so that amplitudes are positive and phases are in [0,360) as per convention
- """
- for i, (_, amplitude, phase) in enumerate(self.model):
- if amplitude < 0:
- self.model['amplitude'][i] = -amplitude
- self.model['phase'][i] = phase + 180.0
- self.model['phase'][i] = np.mod(self.model['phase'][i], 360.0)
-
- @classmethod
- def decompose(
- cls,
- heights,
- t = None,
- t0 = None,
- interval = None,
- constituents = noaa,
- initial = None,
- n_period = 2,
- callback = None,
- full_output = False
- ):
- """
- Return an instance of Tide which has been fitted to a series of tidal observations.
- Arguments:
- It is not necessary to provide t0 or interval if t is provided.
- heights -- ndarray of tidal observation heights
- t -- ndarray of tidal observation times
- t0 -- datetime representing the time at which heights[0] was recorded
- interval -- hourly interval between readings
- constituents -- list of constituents to use in the fit (default: noaa)
- initial -- optional Tide instance to use as first guess for least squares solver
- n_period -- only include constituents which complete at least this many periods (default: 2)
- callback -- optional function to be called at each iteration of the solver
- full_output -- whether to return the output of scipy's leastsq solver (default: False)
- """
- if t is not None:
- if isinstance(t[0], datetime):
- hours = Tide._hours(t[0], t)
- t0 = t[0]
- elif t0 is not None:
- hours = t
- else:
- raise ValueError("t can be an array of datetimes, or an array "
- "of hours since t0 in which case t0 must be "
- "specified.")
- elif None not in [t0, interval]:
- hours = np.arange(len(heights)) * interval
- else:
- raise ValueError("Must provide t(datetimes), or t(hours) and "
- "t0(datetime), or interval(hours) and t0(datetime) "
- "so that each height can be identified with an "
- "instant in time.")
-
- #Remove duplicate constituents (those which travel at exactly the same
- #speed, irrespective of phase)
- constituents = list(OrderedDict.fromkeys(constituents))
-
- #No need for least squares to find the mean water level constituent z0,
- #work relative to mean
- constituents = [c for c in constituents if not c == _Z0]
- z0 = np.mean(heights)
- heights = heights - z0
-
- #Only analyse frequencies which complete at least n_period cycles over
- #the data period.
- constituents = [
- c for c in constituents
- if 360.0 * n_period < hours[-1] * c.speed(astro(t0))
- ]
- n = len(constituents)
-
- sort = np.argsort(hours)
- hours = hours[sort]
- heights = heights[sort]
-
- #We partition our time/height data into intervals over which we consider
- #the values of u and f to assume a constant value (that is, their true
- #value at the midpoint of the interval). Constituent
- #speeds change much more slowly than the node factors, so we will
- #consider these constant and equal to their speed at t0, regardless of
- #the length of the time series.
-
- partition = 240.0
-
- t = Tide._partition(hours, partition)
- times = Tide._times(t0, [(i + 0.5)*partition for i in range(len(t))])
-
- speed, u, f, V0 = Tide._prepare(constituents, t0, times, radians = True)
-
- #Residual to be minimised by variation of parameters (amplitudes, phases)
- def residual(hp):
- H, p = hp[:n, np.newaxis], hp[n:, np.newaxis]
- s = np.concatenate([
- Tide._tidal_series(t_i, H, p, speed, u_i, f_i, V0)
- for t_i, u_i, f_i in izip(t, u, f)
- ])
- res = heights - s
- if callback:
- callback(res)
- return res
-
- #Analytic Jacobian of the residual - this makes solving significantly
- #faster than just using gradient approximation, especially with many
- #measurements / constituents.
- def D_residual(hp):
- H, p = hp[:n, np.newaxis], hp[n:, np.newaxis]
- ds_dH = np.concatenate([
- f_i*np.cos(speed*t_i+u_i+V0-p)
- for t_i, u_i, f_i in izip(t, u, f)],
- axis = 1)
-
- ds_dp = np.concatenate([
- H*f_i*np.sin(speed*t_i+u_i+V0-p)
- for t_i, u_i, f_i in izip(t, u, f)],
- axis = 1)
-
- return np.append(-ds_dH, -ds_dp, axis=0)
-
- #Initial guess for solver, haven't done any analysis on this since the
- #solver seems to converge well regardless of the initial guess We do
- #however scale the initial amplitude guess with some measure of the
- #variation
- amplitudes = np.ones(n) * (np.sqrt(np.dot(heights, heights)) / len(heights))
- phases = np.ones(n)
-
- if initial:
- for (c0, amplitude, phase) in initial.model:
- for i, c in enumerate(constituents):
- if c0 == c:
- amplitudes[i] = amplitude
- phases[i] = d2r*phase
-
- initial = np.append(amplitudes, phases)
-
- lsq = leastsq(residual, initial, Dfun=D_residual, col_deriv=True, ftol=1e-7)
-
- model = np.zeros(1+n, dtype=cls.dtype)
- model[0] = (_Z0, z0, 0)
- model[1:]['constituent'] = constituents[:]
- model[1:]['amplitude'] = lsq[0][:n]
- model[1:]['phase'] = lsq[0][n:]
-
- if full_output:
- return cls(model = model, radians = True), lsq
- return cls(model = model, radians = True)
-
-
-################# Astronomical Values #######################
-
-
-#Convert a sexagesimal angle into decimal degrees
-def s2d(degrees, arcmins = 0, arcsecs = 0, mas = 0, muas = 0):
- return (
- degrees
- + (arcmins / 60.0)
- + (arcsecs / (60.0*60.0))
- + (mas / (60.0*60.0*1e3))
- + (muas / (60.0*60.0*1e6))
- )
-
-#Evaluate a polynomial at argument
-def polynomial(coefficients, argument):
- return sum([c * (argument ** i) for i,c in enumerate(coefficients)])
-
-#Evaluate the first derivative of a polynomial at argument
-def d_polynomial(coefficients, argument):
- return sum([c * i * (argument ** (i-1)) for i,c in enumerate(coefficients)])
-
-#Meeus formula 11.1
-def T(t):
- return (JD(t) - 2451545.0)/36525
-
-#Meeus formula 7.1
-def JD(t):
- Y, M = t.year, t.month
- D = (
- t.day
- + t.hour / (24.0)
- + t.minute / (24.0*60.0)
- + t.second / (24.0*60.0*60.0)
- + t.microsecond / (24.0 * 60.0 * 60.0 * 1e6)
- )
- if M <= 2:
- Y = Y - 1
- M = M + 12
- A = np.floor(Y / 100.0)
- B = 2 - A + np.floor(A / 4.0)
- return np.floor(365.25*(Y+4716)) + np.floor(30.6001*(M+1)) + D + B - 1524.5
-
-#Meeus formula 21.3
-terrestrial_obliquity_coefficients = (
- s2d(23,26,21.448),
- -s2d(0,0,4680.93),
- -s2d(0,0,1.55),
- s2d(0,0,1999.25),
- -s2d(0,0,51.38),
- -s2d(0,0,249.67),
- -s2d(0,0,39.05),
- s2d(0,0,7.12),
- s2d(0,0,27.87),
- s2d(0,0,5.79),
- s2d(0,0,2.45)
-)
-
-#Adjust these coefficients for parameter T rather than U
-terrestrial_obliquity_coefficients = [
- c * (1e-2) ** i for i,c in enumerate(terrestrial_obliquity_coefficients)
-]
-
-#Not entirely sure about this interpretation, but this is the difference
-#between Meeus formulae 24.2 and 24.3 and seems to work
-solar_perigee_coefficients = (
- 280.46645 - 357.52910,
- 36000.76932 - 35999.05030,
- 0.0003032 + 0.0001559,
- 0.00000048
-)
-
-#Meeus formula 24.2
-solar_longitude_coefficients = (
- 280.46645,
- 36000.76983,
- 0.0003032
-)
-
-#This value is taken from JPL Horizon and is essentially constant
-lunar_inclination_coefficients = (
- 5.145,
-)
-
-#Meeus formula 45.1
-lunar_longitude_coefficients = (
- 218.3164591,
- 481267.88134236,
- -0.0013268,
- 1/538841.0
- -1/65194000.0
-)
-
-#Meeus formula 45.7
-lunar_node_coefficients = (
- 125.0445550,
- -1934.1361849,
- 0.0020762,
- 1/467410.0,
- -1/60616000.0
-)
-
-#Meeus, unnumbered formula directly preceded by 45.7
-lunar_perigee_coefficients = (
- 83.3532430,
- 4069.0137111,
- -0.0103238,
- -1/80053.0,
- 1/18999000.0
-)
-
-#Now follow some useful auxiliary values, we won't need their speed.
-#See notes on Table 6 in Schureman for I, nu, xi, nu', 2nu''
-def _I(N, i, omega):
- N, i, omega = d2r * N, d2r*i, d2r*omega
- cosI = np.cos(i)*np.cos(omega)-np.sin(i)*np.sin(omega)*np.cos(N)
- return r2d*np.arccos(cosI)
-
-def _xi(N, i, omega):
- N, i, omega = d2r * N, d2r*i, d2r*omega
- e1 = np.cos(0.5*(omega-i))/np.cos(0.5*(omega+i)) * np.tan(0.5*N)
- e2 = np.sin(0.5*(omega-i))/np.sin(0.5*(omega+i)) * np.tan(0.5*N)
- e1, e2 = np.arctan(e1), np.arctan(e2)
- e1, e2 = e1 - 0.5*N, e2 - 0.5*N
- return -(e1 + e2)*r2d
-
-def _nu(N, i, omega):
- N, i, omega = d2r * N, d2r*i, d2r*omega
- e1 = np.cos(0.5*(omega-i))/np.cos(0.5*(omega+i)) * np.tan(0.5*N)
- e2 = np.sin(0.5*(omega-i))/np.sin(0.5*(omega+i)) * np.tan(0.5*N)
- e1, e2 = np.arctan(e1), np.arctan(e2)
- e1, e2 = e1 - 0.5*N, e2 - 0.5*N
- return (e1 - e2)*r2d
-
-#Schureman equation 224
-#Can we be more precise than B "the solar coefficient" = 0.1681?
-def _nup(N, i, omega):
- I = d2r * _I(N, i, omega)
- nu = d2r * _nu(N, i, omega)
- return r2d * np.arctan(np.sin(2*I)*np.sin(nu)/(np.sin(2*I)*np.cos(nu)+0.3347))
-
-#Schureman equation 232
-def _nupp(N, i, omega):
- I = d2r * _I(N, i, omega)
- nu = d2r * _nu(N, i, omega)
- tan2nupp = (np.sin(I)**2*np.sin(2*nu))/(np.sin(I)**2*np.cos(2*nu)+0.0727)
- return r2d * 0.5 * np.arctan(tan2nupp)
-
-AstronomicalParameter = namedtuple('AstronomicalParameter', ['value', 'speed'])
-
-def astro(t):
- a = {}
- #We can use polynomial fits from Meeus to obtain good approximations to
- #some astronomical values (and therefore speeds).
- polynomials = {
- 's': lunar_longitude_coefficients,
- 'h': solar_longitude_coefficients,
- 'p': lunar_perigee_coefficients,
- 'N': lunar_node_coefficients,
- 'pp': solar_perigee_coefficients,
- '90': (90.0,),
- 'omega': terrestrial_obliquity_coefficients,
- 'i': lunar_inclination_coefficients
- }
- #Polynomials are in T, that is Julian Centuries; we want our speeds to be
- #in the more convenient unit of degrees per hour.
- dT_dHour = 1 / (24 * 365.25 * 100)
- for name, coefficients in polynomials.items():
- a[name] = AstronomicalParameter(
- np.mod(polynomial(coefficients, T(t)), 360.0),
- d_polynomial(coefficients, T(t)) * dT_dHour
- )
-
- #Some other parameters defined by Schureman which are dependent on the
- #parameters N, i, omega for use in node factor calculations. We don't need
- #their speeds.
- args = list(each.value for each in [a['N'], a['i'], a['omega']])
- for name, function in {
- 'I': _I,
- 'xi': _xi,
- 'nu': _nu,
- 'nup': _nup,
- 'nupp': _nupp
- }.items():
- a[name] = AstronomicalParameter(np.mod(function(*args), 360.0), None)
-
- #We don't work directly with the T (hours) parameter, instead our spanning
- #set for equilibrium arguments #is given by T+h-s, s, h, p, N, pp, 90.
- #This is in line with convention.
- hour = AstronomicalParameter((JD(t) - np.floor(JD(t))) * 360.0, 15.0)
- a['T+h-s'] = AstronomicalParameter(
- hour.value + a['h'].value - a['s'].value,
- hour.speed + a['h'].speed - a['s'].speed
- )
- #It is convenient to calculate Schureman's P here since several node
- #factors need it, although it could be argued that these
- #(along with I, xi, nu etc) belong somewhere else.
- a['P'] = AstronomicalParameter(
- np.mod(a['p'].value -a['xi'].value,360.0),
- None
- )
- return a
-
-