Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
50 commits
Select commit Hold shift + click to select a range
ab3564d
Working version of Gaussian Copula
ssolson Aug 11, 2021
2d9d3c2
Create dedicated function for iso probs and quantiles
ssolson Aug 11, 2021
26beca9
Contour plots worsk for single and multiple contours
ssolson Aug 16, 2021
1611328
Gumbel Copula.
ssolson Aug 17, 2021
be32f71
Clayton Copula
ssolson Aug 17, 2021
7dd64b9
Rosenblatt Copula
ssolson Aug 17, 2021
07f9ef6
Guass and gumbel general copula
ssolson Aug 18, 2021
93b5d8d
Add support for nonparametric gaussian, clayton, and gumbel copulas
ssolson Aug 27, 2021
b8474c7
Require statsmodel for nonparametric KDE copulas
ssolson Aug 27, 2021
cd35b99
Small changes
ssolson Sep 15, 2021
31737c2
Fix bug in KDE log transformation
ssolson Sep 23, 2021
3a5d404
Adding docstrings
ssolson Sep 29, 2021
79c3864
All doc strings updated
ssolson Sep 30, 2021
9f4911f
Add markers option to contours plotting
ssolson Oct 6, 2021
528fb20
Add Copula tests comparing to WDRTresults
ssolson Oct 6, 2021
b7c77aa
Fix x1, x2 pts bug in KDE contour, clean up
ssolson Oct 6, 2021
d6981ff
Example showing the calculation of all copla methods and comparing to…
ssolson Oct 6, 2021
7b40fa1
Simplifications and lanuguage cleanup
ssolson Oct 6, 2021
983ecd2
Merge branch 'master' into wdrt
ssolson Oct 7, 2021
7207c81
Add statsmodel
ssolson Oct 7, 2021
336ac62
Add testing data files
ssolson Oct 7, 2021
5500528
Merge branch 'wdrt' of https://github.com/ssolson/MHKiT-Python into wdrt
ssolson Oct 7, 2021
33d3397
In plot envrionmental contors convert x1,x2 to values if a Series is …
ssolson Oct 11, 2021
df88198
Docstring typo corrections
ssolson Oct 11, 2021
fc1651b
Corrected notebook description typos.
ssolson Oct 11, 2021
0548242
Merge branch 'master' of https://github.com/MHKiT-Software/MHKiT-Pyth…
ssolson Nov 1, 2021
c221c5e
add environmental_contours to init
ssolson Nov 11, 2021
187334f
Remove WDRT functionality
ssolson Nov 18, 2021
3e8658d
All WDRT contour functionality can be called from one function
ssolson Nov 18, 2021
164a731
Rework tests for new contour functions setup and location
ssolson Nov 18, 2021
d8d878a
Move all contour example into this file
ssolson Nov 18, 2021
aa15dd8
Update to work with new structure.
ssolson Nov 18, 2021
9c9ae94
Remove all commented out wdrt functions from resource
ssolson Nov 30, 2021
b9a7d9f
No changes made. Reverting back.
ssolson Dec 5, 2021
a6946ab
Have PCA method use general fit. Adds PCAmethod to fit method but can…
ssolson Dec 5, 2021
039c0a5
Merge with latest origin commit
ssolson Dec 5, 2021
23ada68
Update discussion around the use of the copula method.
ssolson Dec 5, 2021
27bd9b3
Remove copula stand alone notebook
ssolson Dec 5, 2021
44c4f33
minor formmating changes
ssolson Dec 5, 2021
ba36282
Remove test bugs created from merge with origin
ssolson Dec 6, 2021
d474d6f
Import env contours into resource module and adjust test, examples, e…
ssolson Dec 6, 2021
e5cf2ac
Uncommented tests
ssolson Dec 6, 2021
8e38230
Remove reference to import the env contours module
ssolson Dec 6, 2021
2664262
fixing minor typo
rpauly18 Dec 8, 2021
408a04c
Move env contours to _file and adjust package to handle new structure
ssolson Dec 9, 2021
61aaf0a
Merge branch 'wdrt' of https://github.com/ssolson/MHKiT-Python into wdrt
ssolson Dec 9, 2021
549ff92
Minor formatting cleanup
ssolson Dec 14, 2021
515cd37
Cleanup unsed packages and variables
ssolson Dec 14, 2021
33324f1
sampling rate to averaging period
ssolson Jan 19, 2022
16b3dee
Update setup.py
ssolson Jan 31, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
194,476 changes: 194,476 additions & 0 deletions examples/data/wave/NDBC46050.csv

Large diffs are not rendered by default.

14,868 changes: 14,868 additions & 0 deletions examples/data/wave/WDRT_caluculated_countours.json

Large diffs are not rendered by default.

993 changes: 71 additions & 922 deletions examples/environmental_contours.ipynb

Large diffs are not rendered by default.

241 changes: 172 additions & 69 deletions mhkit/tests/test_wave.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
from os.path import abspath, dirname, join, isfile, normpath, relpath
from pandas.testing import assert_frame_equal
import xarray.testing as xrt
from numpy.testing import assert_allclose
from scipy.interpolate import interp1d
from random import seed, randint
import matplotlib.pylab as plt
from datetime import datetime
import xarray.testing as xrt
import mhkit.wave as wave
from io import StringIO
import pandas as pd
Expand All @@ -14,11 +15,11 @@
import netCDF4
import inspect
import pickle
import time
import json
import sys
import os
import time
from random import seed, randint


testdir = dirname(abspath(__file__))
datadir = normpath(join(testdir,relpath('../../examples/data/wave')))
Expand Down Expand Up @@ -431,29 +432,22 @@ def test_metrics(self):
expected = data['J'][j]
calculated = wave.resource.energy_flux(S,i)
error = np.abs(expected-calculated.values)/expected
self.assertLess(error, 0.1)

self.assertLess(error, 0.1)

# v
if file_i == 'CDiP':
# this should be updated to run on other datasets
expected = data['metrics']['v']
calculated = wave.resource.spectral_width(S,
frequency_bins=f_bins).iloc[0,0]
error = np.abs(expected-calculated)/expected


error = np.abs(expected-calculated)/expected
self.assertLess(error, 0.01)



if file_i == 'MC':
expected = data['metrics']['v']
# testing that default uniform frequency bin widths works
calculated = wave.resource.spectral_width(S).iloc[0,0]
error = np.abs(expected-calculated)/expected


self.assertLess(error, 0.01)


Expand Down Expand Up @@ -482,12 +476,23 @@ def setUpClass(self):

f_name= 'Hm0_Te_46022.json'
self.Hm0Te = pd.read_json(join(datadir,f_name))


with open(join(datadir, 'principal_component_analysis.pkl'), 'rb') as f:
self.pca = pickle.load(f)



file_loc=join(datadir, 'principal_component_analysis.pkl')
with open(file_loc, 'rb') as f:
self.pca = pickle.load(f)
f.close()

file_loc=join(datadir,'WDRT_caluculated_countours.json')
with open(file_loc) as f:
self.wdrt_copulas = json.load(f)
f.close()

ndbc_46050=pd.read_csv(join(datadir,'NDBC46050.csv'))
self.wdrt_Hm0 = ndbc_46050['Hm0']
self.wdrt_Te = ndbc_46050['Te']

self.wdrt_dt=3600
self.wdrt_period= 50

@classmethod
def tearDownClass(self):
Expand All @@ -497,58 +502,80 @@ def test_environmental_contour(self):

Hm0Te = self.Hm0Te
df = Hm0Te[Hm0Te['Hm0'] < 20]

Hm0 = df.Hm0.values
Te = df.Te.values

dt_ss = (Hm0Te.index[2]-Hm0Te.index[1]).seconds
time_R = 100

Hm0_contour, Te_contour = wave.resource.environmental_contour(Hm0, Te,
dt_ss, time_R)

expected_contours = pd.read_csv(join(datadir,'Hm0_Te_contours_46022.csv'))
assert_allclose(expected_contours.Hm0_contour.values, Hm0_contour, rtol=1e-3)

Hm0 = df.Hm0.values
Te = df.Te.values

dt_ss = (Hm0Te.index[2]-Hm0Te.index[1]).seconds
period = 100

copula = wave.resource.environmental_contours(Hm0,
Te, dt_ss, period, 'PCA')

Hm0_contour=copula['PCA_x1']
Te_contour=copula['PCA_x2']

file_loc=join(datadir,'Hm0_Te_contours_46022.csv')
expected_contours = pd.read_csv(file_loc)
assert_allclose(expected_contours.Hm0_contour.values,
Hm0_contour, rtol=1e-3)


def test__principal_component_analysis(self):
Hm0Te = self.Hm0Te
df = Hm0Te[Hm0Te['Hm0'] < 20]

Hm0 = df.Hm0.values
Te = df.Te.values
PCA = wave.resource._principal_component_analysis(Hm0,Te, bin_size=250)
PCA = (wave._environmental_contours
._principal_component_analysis(Hm0,Te, bin_size=250))

assert_allclose(PCA['principal_axes'], self.pca['principal_axes'])
assert_allclose(PCA['principal_axes'],
self.pca['principal_axes'])
self.assertAlmostEqual(PCA['shift'], self.pca['shift'])
self.assertAlmostEqual(PCA['x1_fit']['mu'], self.pca['x1_fit']['mu'])
self.assertAlmostEqual(PCA['mu_fit'].slope, self.pca['mu_fit'].slope)
self.assertAlmostEqual(PCA['mu_fit'].intercept, self.pca['mu_fit'].intercept)
assert_allclose(PCA['sigma_fit']['x'], self.pca['sigma_fit']['x'])
self.assertAlmostEqual(PCA['x1_fit']['mu'],
self.pca['x1_fit']['mu'])
self.assertAlmostEqual(PCA['mu_fit'].slope,
self.pca['mu_fit'].slope)
self.assertAlmostEqual(PCA['mu_fit'].intercept,
self.pca['mu_fit'].intercept)
assert_allclose(PCA['sigma_fit']['x'],
self.pca['sigma_fit']['x'])


def test_plot_environmental_contour(self):
filename = abspath(join(testdir, 'wave_plot_environmental_contour.png'))
file_loc= join(testdir, 'wave_plot_environmental_contour.png')
filename = abspath(file_loc)
if isfile(filename):
os.remove(filename)

Hm0Te = self.Hm0Te
df = Hm0Te[Hm0Te['Hm0'] < 20]

Hm0 = df.Hm0.values
Te = df.Te.values


Hm0 = df.Hm0.values
Te = df.Te.values

dt_ss = (Hm0Te.index[2]-Hm0Te.index[1]).seconds
time_R = 100

copulas = wave.resource.environmental_contours(Hm0, Te, dt_ss,
time_R, 'PCA')

Hm0_contour=copulas['PCA_x1']
Te_contour=copulas['PCA_x2']

dt_ss = (Hm0Te.index[2]-Hm0Te.index[1]).seconds
time_R = 100

Hm0_contour, Te_contour = wave.resource.environmental_contour(Hm0, Te,
dt_ss, time_R)

plt.figure()
wave.graphics.plot_environmental_contour(Te, Hm0,
Te_contour, Hm0_contour,
data_label='NDBC 46022',
contour_label='100-year Contour',
x_label = 'Te [s]',
y_label = 'Hm0 [m]')
(wave.graphics
.plot_environmental_contour(Te, Hm0,
Te_contour, Hm0_contour,
data_label='NDBC 46022',
contour_label='100-year Contour',
x_label = 'Te [s]',
y_label = 'Hm0 [m]')
)
plt.savefig(filename, format='png')
plt.close()

Expand All @@ -568,24 +595,93 @@ def test_plot_environmental_contour_multiyear(self):

dt_ss = (Hm0Te.index[2]-Hm0Te.index[1]).seconds

time_R = np.array([100, 105, 110, 120, 150])

Hm0_contour, Te_contour = wave.resource.environmental_contour(Hm0, Te,
dt_ss, time_R)
time_R = [100, 105, 110, 120, 150]

Hm0s=[]
Tes=[]
for period in time_R:
copulas = (wave.resource
.environmental_contours(Hm0,Te,dt_ss,period,'PCA'))

Hm0s.append(copulas['PCA_x1'])
Tes.append(copulas['PCA_x2'])

contour_label = [f'{year}-year Contour' for year in time_R]
plt.figure()
wave.graphics.plot_environmental_contour(Te, Hm0,
Te_contour, Hm0_contour,
data_label='NDBC 46022',
contour_label=contour_label,
x_label = 'Te [s]',
y_label = 'Hm0 [m]')
(wave.graphics
.plot_environmental_contour(Te, Hm0,
Tes, Hm0s,
data_label='NDBC 46022',
contour_label=contour_label,
x_label = 'Te [s]',
y_label = 'Hm0 [m]')
)
plt.savefig(filename, format='png')
plt.close()

self.assertTrue(isfile(filename))


def test_standard_copulas(self):
copulas = (wave.resource
.environmental_contours(self.wdrt_Hm0, self.wdrt_Te,
self.wdrt_dt, self.wdrt_period,
method=['gaussian', 'gumbel', 'clayton'])
)

# WDRT slightly vaires Rosenblatt copula parameters from
# the other copula default parameters
rosen =(wave.resource
.environmental_contours(self.wdrt_Hm0, self.wdrt_Te,
self.wdrt_dt, self.wdrt_period, method=['rosenblatt'],
min_bin_count=50, initial_bin_max_val=0.5,
bin_val_size=0.25))
copulas['rosenblatt_x1'] = rosen['rosenblatt_x1']
copulas['rosenblatt_x2'] = rosen['rosenblatt_x2']

methods=['gaussian', 'gumbel', 'clayton', 'rosenblatt']
close=[]
for method in methods:
close.append(np.allclose(copulas[f'{method}_x1'],
self.wdrt_copulas[f'{method}_x1']))
close.append(np.allclose(copulas[f'{method}_x2'],
self.wdrt_copulas[f'{method}_x2']))
self.assertTrue(all(close))

def test_nonparametric_copulas(self):
methods=['nonparametric_gaussian','nonparametric_clayton',
'nonparametric_gumbel']

np_copulas=wave.resource.environmental_contours(self.wdrt_Hm0,
self.wdrt_Te, self.wdrt_dt, self.wdrt_period, method=methods)

close=[]
for method in methods:
close.append(np.allclose(np_copulas[f'{method}_x1'],
self.wdrt_copulas[f'{method}_x1'], atol=0.13))
close.append(np.allclose(np_copulas[f'{method}_x2'],
self.wdrt_copulas[f'{method}_x2'], atol=0.13))
self.assertTrue(all(close))

def test_kde_copulas(self):
kde_copula = wave.resource.environmental_contours(self.wdrt_Hm0,
self.wdrt_Te, self.wdrt_dt, self.wdrt_period,
method=['bivariate_KDE'], bandwidth=[0.23, 0.23])
log_kde_copula = (wave.resource
.environmental_contours(self.wdrt_Hm0, self.wdrt_Te,
self.wdrt_dt, self.wdrt_period, method=['bivariate_KDE_log'], bandwidth=[0.02, 0.11])
)

close= [ np.allclose(kde_copula['bivariate_KDE_x1'],
self.wdrt_copulas['bivariate_KDE_x1']),
np.allclose(kde_copula['bivariate_KDE_x2'],
self.wdrt_copulas['bivariate_KDE_x2']),
np.allclose(log_kde_copula['bivariate_KDE_log_x1'],
self.wdrt_copulas['bivariate_KDE_log_x1']),
np.allclose(log_kde_copula['bivariate_KDE_log_x2'],
self.wdrt_copulas['bivariate_KDE_log_x2'])]
self.assertTrue(all(close))

class TestPerformance(unittest.TestCase):

@classmethod
Expand Down Expand Up @@ -918,30 +1014,36 @@ def setUpClass(self):
def tearDownClass(self):
pass

### WPTO hindcast data
# only run test for one version of python per to not spam the server
# yet keep coverage high on each test
## WPTO hindcast data
# only run test for one version of python per to not spam the server
# yet keep coverage high on each test
if float(sys.version[0:3]) == 3.7:
def test_multi_year(self):
data_type = '3-hour'
years = [1990,1992]
lat_lon = (44.624076,-124.280097)
parameters = 'significant_wave_height'
wave_multiyear, meta = wave.io.hindcast.request_wpto_point_data(data_type,parameters,lat_lon,years)
(wave_multiyear,
meta) = (wave.io.hindcast
.request_wpto_point_data(data_type,parameters,
lat_lon,years))
assert_frame_equal(self.my_swh,wave_multiyear)
assert_frame_equal(self.my_meta,meta)

elif float(sys.version[0:3]) == 3.8:
# wait five minute to ensure python 3.7 call is complete
#time.sleep(300)
time.sleep(300)
def test_multi_loc(self):
data_type = '3-hour'
years = [1995]
lat_lon = ((44.624076,-124.280097),(43.489171,-125.152137))
parameters = 'mean_absolute_period'
wave_multiloc, meta= wave.io.hindcast.request_wpto_point_data(data_type,
parameters,lat_lon,years)
dir_multiyear, meta_dir = wave.io.hindcast.request_wpto_directional_spectrum(lat_lon,year='1995')
wave_multiloc, meta=(wave.io.hindcast
.request_wpto_point_data(data_type,
parameters,lat_lon,years))
(dir_multiyear,
meta_dir)=(wave.io.hindcast
.request_wpto_directional_spectrum(lat_lon,year='1995'))
dir_multiyear = dir_multiyear.sel(time_index=slice(dir_multiyear.time_index[0],dir_multiyear.time_index[99]))
dir_multiyear = dir_multiyear.rename_vars({87:'87',58:'58'})

Expand Down Expand Up @@ -1216,6 +1318,7 @@ def test_plot_monthly_cumulative_distribution(self):
plt.close()

self.assertTrue(isfile(filename))



if __name__ == '__main__':
unittest.main()
Loading