From 7d4adad4d78d10d217fb17ce15877bf193d9d040 Mon Sep 17 00:00:00 2001 From: ssolson Date: Thu, 23 Jun 2022 11:02:36 -0600 Subject: [PATCH 01/16] test_wave file to folder with individual feature files --- mhkit/tests/test_wave.py | 1361 ----------------- mhkit/tests/wave/test_WECSim.py | 73 + mhkit/tests/wave/test_WPTOhindcast.py | 135 ++ mhkit/tests/wave/test_cdip.py | 188 +++ mhkit/tests/wave/test_contours.py | 269 ++++ mhkit/tests/wave/test_ndbc.py | 180 +++ mhkit/tests/wave/test_performance.py | 127 ++ .../wave/test_plotResouceCharacterizations.py | 71 + mhkit/tests/wave/test_resource_metrics.py | 317 ++++ mhkit/tests/wave/test_resource_spectrum.py | 187 +++ mhkit/tests/wave/test_swan.py | 83 + 11 files changed, 1630 insertions(+), 1361 deletions(-) delete mode 100644 mhkit/tests/test_wave.py create mode 100644 mhkit/tests/wave/test_WECSim.py create mode 100644 mhkit/tests/wave/test_WPTOhindcast.py create mode 100644 mhkit/tests/wave/test_cdip.py create mode 100644 mhkit/tests/wave/test_contours.py create mode 100644 mhkit/tests/wave/test_ndbc.py create mode 100644 mhkit/tests/wave/test_performance.py create mode 100644 mhkit/tests/wave/test_plotResouceCharacterizations.py create mode 100644 mhkit/tests/wave/test_resource_metrics.py create mode 100644 mhkit/tests/wave/test_resource_spectrum.py create mode 100644 mhkit/tests/wave/test_swan.py diff --git a/mhkit/tests/test_wave.py b/mhkit/tests/test_wave.py deleted file mode 100644 index f247a63d9..000000000 --- a/mhkit/tests/test_wave.py +++ /dev/null @@ -1,1361 +0,0 @@ -from os.path import abspath, dirname, join, isfile, normpath, relpath -from pandas.testing import assert_frame_equal -from numpy.testing import assert_allclose -from scipy.interpolate import interp1d -from random import seed, randint -import matplotlib.pylab as plt -from datetime import datetime -import xarray.testing as xrt -import mhkit.wave as wave -from io import StringIO -import pandas as pd -import numpy as np -import contextlib -import unittest -import netCDF4 -import inspect -import pickle -import time -import json -import sys -import os - - -testdir = dirname(abspath(__file__)) -datadir = normpath(join(testdir,relpath('../../examples/data/wave'))) - - -class TestResourceSpectrum(unittest.TestCase): - - @classmethod - def setUpClass(self): - omega = np.arange(0.1,3.5,0.01) - self.f = omega/(2*np.pi) - self.Hs = 2.5 - self.Tp = 8 - df = self.f[1] - self.f[0] - Trep = 1/df - self.t = np.arange(0, Trep, 0.05) - - @classmethod - def tearDownClass(self): - pass - - def test_pierson_moskowitz_spectrum(self): - S = wave.resource.pierson_moskowitz_spectrum(self.f,self.Tp,self.Hs) - Hm0 = wave.resource.significant_wave_height(S).iloc[0,0] - Tp0 = wave.resource.peak_period(S).iloc[0,0] - - errorHm0 = np.abs(self.Tp - Tp0)/self.Tp - errorTp0 = np.abs(self.Hs - Hm0)/self.Hs - - self.assertLess(errorHm0, 0.01) - self.assertLess(errorTp0, 0.01) - - def test_jonswap_spectrum(self): - S = wave.resource.jonswap_spectrum(self.f, self.Tp, self.Hs) - Hm0 = wave.resource.significant_wave_height(S).iloc[0,0] - Tp0 = wave.resource.peak_period(S).iloc[0,0] - - errorHm0 = np.abs(self.Tp - Tp0)/self.Tp - errorTp0 = np.abs(self.Hs - Hm0)/self.Hs - - self.assertLess(errorHm0, 0.01) - self.assertLess(errorTp0, 0.01) - - def test_surface_elevation_phases_np_and_pd(self): - S0 = wave.resource.jonswap_spectrum(self.f,self.Tp,self.Hs) - S1 = wave.resource.jonswap_spectrum(self.f,self.Tp,self.Hs*1.1) - S = pd.concat([S0, S1], axis=1) - - phases_np = np.random.rand(S.shape[0], S.shape[1]) * 2 * np.pi - phases_pd = pd.DataFrame(phases_np, index=S.index, columns=S.columns) - - eta_np = wave.resource.surface_elevation(S, self.t, phases=phases_np, seed=1) - eta_pd = wave.resource.surface_elevation(S, self.t, phases=phases_pd, seed=1) - - assert_frame_equal(eta_np, eta_pd) - - def test_surface_elevation_frequency_bins_np_and_pd(self): - S0 = wave.resource.jonswap_spectrum(self.f,self.Tp,self.Hs) - S1 = wave.resource.jonswap_spectrum(self.f,self.Tp,self.Hs*1.1) - S = pd.concat([S0, S1], axis=1) - - eta0 = wave.resource.surface_elevation(S, self.t, seed=1) - - f_bins_np = np.array([np.diff(S.index)[0]]*len(S)) - f_bins_pd = pd.DataFrame(f_bins_np, index=S.index, columns=['df']) - - eta_np = wave.resource.surface_elevation(S, self.t, frequency_bins=f_bins_np, seed=1) - eta_pd = wave.resource.surface_elevation(S, self.t, frequency_bins=f_bins_pd, seed=1) - - assert_frame_equal(eta0, eta_np) - assert_frame_equal(eta_np, eta_pd) - - def test_surface_elevation_moments(self): - S = wave.resource.jonswap_spectrum(self.f, self.Tp, self.Hs) - eta = wave.resource.surface_elevation(S, self.t, seed=1) - dt = self.t[1] - self.t[0] - Sn = wave.resource.elevation_spectrum(eta, 1/dt, len(eta.values), - detrend=False, window='boxcar', - noverlap=0) - - m0 = wave.resource.frequency_moment(S,0).m0.values[0] - m0n = wave.resource.frequency_moment(Sn,0).m0.values[0] - errorm0 = np.abs((m0 - m0n)/m0) - - self.assertLess(errorm0, 0.01) - - m1 = wave.resource.frequency_moment(S,1).m1.values[0] - m1n = wave.resource.frequency_moment(Sn,1).m1.values[0] - errorm1 = np.abs((m1 - m1n)/m1) - - self.assertLess(errorm1, 0.01) - - def test_surface_elevation_rmse(self): - S = wave.resource.jonswap_spectrum(self.f, self.Tp, self.Hs) - eta = wave.resource.surface_elevation(S, self.t, seed=1) - dt = self.t[1] - self.t[0] - Sn = wave.resource.elevation_spectrum(eta, 1/dt, len(eta), - detrend=False, window='boxcar', - noverlap=0) - - fSn = interp1d(Sn.index.values, Sn.values, axis=0) - rmse = (S.values - fSn(S.index.values))**2 - rmse_sum = (np.sum(rmse)/len(rmse))**0.5 - - self.assertLess(rmse_sum, 0.02) - - def test_plot_spectrum(self): - filename = abspath(join(testdir, 'wave_plot_spectrum.png')) - if isfile(filename): - os.remove(filename) - - S = wave.resource.pierson_moskowitz_spectrum(self.f,self.Tp,self.Hs) - - plt.figure() - wave.graphics.plot_spectrum(S) - plt.savefig(filename, format='png') - plt.close() - - self.assertTrue(isfile(filename)) - - def test_plot_chakrabarti(self): - filename = abspath(join(testdir, 'wave_plot_chakrabarti.png')) - if isfile(filename): - os.remove(filename) - - D = 5 - H = 10 - lambda_w = 200 - - wave.graphics.plot_chakrabarti(H, lambda_w, D) - plt.savefig(filename) - - def test_plot_chakrabarti_np(self): - filename = abspath(join(testdir, 'wave_plot_chakrabarti_np.png')) - if isfile(filename): - os.remove(filename) - - D = np.linspace(5, 15, 5) - H = 10 * np.ones_like(D) - lambda_w = 200 * np.ones_like(D) - - wave.graphics.plot_chakrabarti(H, lambda_w, D) - plt.savefig(filename) - - self.assertTrue(isfile(filename)) - - def test_plot_chakrabarti_pd(self): - filename = abspath(join(testdir, 'wave_plot_chakrabarti_pd.png')) - if isfile(filename): - os.remove(filename) - - D = np.linspace(5, 15, 5) - H = 10 * np.ones_like(D) - lambda_w = 200 * np.ones_like(D) - df = pd.DataFrame([H.flatten(),lambda_w.flatten(),D.flatten()], - index=['H','lambda_w','D']).transpose() - - wave.graphics.plot_chakrabarti(df.H, df.lambda_w, df.D) - plt.savefig(filename) - - self.assertTrue(isfile(filename)) - -class TestResourceMetrics(unittest.TestCase): - - @classmethod - def setUpClass(self): - omega = np.arange(0.1,3.5,0.01) - self.f = omega/(2*np.pi) - self.Hs = 2.5 - self.Tp = 8 - - file_name = join(datadir, 'ValData1.json') - with open(file_name, "r") as read_file: - self.valdata1 = pd.DataFrame(json.load(read_file)) - - self.valdata2 = {} - - file_name = join(datadir, 'ValData2_MC.json') - with open(file_name, "r") as read_file: - data = json.load(read_file) - self.valdata2['MC'] = data - for i in data.keys(): - # Calculate elevation spectra - elevation = pd.DataFrame(data[i]['elevation']) - elevation.index = elevation.index.astype(float) - elevation.sort_index(inplace=True) - sample_rate = data[i]['sample_rate'] - NFFT = data[i]['NFFT'] - self.valdata2['MC'][i]['S'] = wave.resource.elevation_spectrum(elevation, - sample_rate, NFFT) - - file_name = join(datadir, 'ValData2_AH.json') - with open(file_name, "r") as read_file: - data = json.load(read_file) - self.valdata2['AH'] = data - for i in data.keys(): - # Calculate elevation spectra - elevation = pd.DataFrame(data[i]['elevation']) - elevation.index = elevation.index.astype(float) - elevation.sort_index(inplace=True) - sample_rate = data[i]['sample_rate'] - NFFT = data[i]['NFFT'] - self.valdata2['AH'][i]['S'] = wave.resource.elevation_spectrum(elevation, - sample_rate, NFFT) - - file_name = join(datadir, 'ValData2_CDiP.json') - with open(file_name, "r") as read_file: - data = json.load(read_file) - self.valdata2['CDiP'] = data - for i in data.keys(): - temp = pd.Series(data[i]['S']).to_frame('S') - temp.index = temp.index.astype(float) - self.valdata2['CDiP'][i]['S'] = temp - - - @classmethod - def tearDownClass(self): - pass - - def test_kfromw(self): - for i in self.valdata1.columns: - f = np.array(self.valdata1[i]['w'])/(2*np.pi) - h = self.valdata1[i]['h'] - rho = self.valdata1[i]['rho'] - - expected = self.valdata1[i]['k'] - k = wave.resource.wave_number(f, h, rho) - calculated = k.loc[:,'k'].values - error = ((expected-calculated)**2).sum() # SSE - - self.assertLess(error, 1e-6) - - def test_kfromw_one_freq(self): - g = 9.81 - f = 0.1 - h = 1e9 - w = np.pi*2*f # deep water dispersion - expected = w**2 / g - calculated = wave.resource.wave_number(f=f, h=h, g=g).values[0][0] - error = np.abs(expected-calculated) - self.assertLess(error, 1e-6) - - def test_wave_length(self): - k_list=[1,2,10,3] - l_expected = (2.*np.pi/np.array(k_list)).tolist() - - k_df = pd.DataFrame(k_list,index = [1,2,3,4]) - k_series= k_df[0] - k_array=np.array(k_list) - - for l in [k_list, k_df, k_series, k_array]: - l_calculated = wave.resource.wave_length(l) - self.assertListEqual(l_expected,l_calculated.tolist()) - - idx=0 - k_int = k_list[idx] - l_calculated = wave.resource.wave_length(k_int) - self.assertEqual(l_expected[idx],l_calculated) - - def test_depth_regime(self): - expected = [True,True,False,True] - l_list=[1,2,10,3] - l_df = pd.DataFrame(l_list,index = [1,2,3,4]) - l_series= l_df[0] - l_array=np.array(l_list) - h = 10 - for l in [l_list, l_df, l_series, l_array]: - calculated = wave.resource.depth_regime(l,h) - self.assertListEqual(expected,calculated.tolist()) - - idx=0 - l_int = l_list[idx] - calculated = wave.resource.depth_regime(l_int,h) - self.assertEqual(expected[idx],calculated) - - - def test_wave_celerity(self): - # Depth regime ratio - dr_ratio=2 - - # small change in f will give similar value cg - f=np.linspace(20.0001,20.0005,5) - - # Choose index to spike at. cg spike is inversly proportional to k - k_idx=2 - k_tmp=[1, 1, 0.5, 1, 1] - k = pd.DataFrame(k_tmp, index=f) - - # all shallow - cg_shallow1 = wave.resource.wave_celerity(k, h=0.0001,depth_check=True) - cg_shallow2 = wave.resource.wave_celerity(k, h=0.0001,depth_check=False) - self.assertTrue(all(cg_shallow1.squeeze().values == - cg_shallow2.squeeze().values)) - - - # all deep - cg = wave.resource.wave_celerity(k, h=1000,depth_check=True) - self.assertTrue(all(np.pi*f/k.squeeze().values == cg.squeeze().values)) - - def test_energy_flux_deep(self): - # Dependent on mhkit.resource.BS spectrum - S = wave.resource.jonswap_spectrum(self.f,self.Tp,self.Hs) - Te = wave.resource.energy_period(S) - Hm0 = wave.resource.significant_wave_height(S) - rho=1025 - g=9.80665 - coeff = rho*(g**2)/(64*np.pi) - J = coeff*(Hm0.squeeze()**2)*Te.squeeze() - - h=-1 # not used when deep=True - J_calc = wave.resource.energy_flux(S, h, deep=True) - - self.assertTrue(J_calc.squeeze() == J) - - - def test_moments(self): - for file_i in self.valdata2.keys(): # for each file MC, AH, CDiP - datasets = self.valdata2[file_i] - for s in datasets.keys(): # for each set - data = datasets[s] - for m in data['m'].keys(): - expected = data['m'][m] - S = data['S'] - if s == 'CDiP1' or s == 'CDiP6': - f_bins=pd.Series(data['freqBinWidth']) - else: - f_bins = None - - calculated = wave.resource.frequency_moment(S, int(m) - ,frequency_bins=f_bins).iloc[0,0] - error = np.abs(expected-calculated)/expected - - self.assertLess(error, 0.01) - - - - def test_metrics(self): - for file_i in self.valdata2.keys(): # for each file MC, AH, CDiP - datasets = self.valdata2[file_i] - - for s in datasets.keys(): # for each set - - - data = datasets[s] - S = data['S'] - if file_i == 'CDiP': - f_bins=pd.Series(data['freqBinWidth']) - else: - f_bins = None - - # Hm0 - expected = data['metrics']['Hm0'] - calculated = wave.resource.significant_wave_height(S, - frequency_bins=f_bins).iloc[0,0] - error = np.abs(expected-calculated)/expected - #print('Hm0', expected, calculated, error) - self.assertLess(error, 0.01) - - # Te - expected = data['metrics']['Te'] - calculated = wave.resource.energy_period(S, - frequency_bins=f_bins).iloc[0,0] - error = np.abs(expected-calculated)/expected - #print('Te', expected, calculated, error) - self.assertLess(error, 0.01) - - # T0 - expected = data['metrics']['T0'] - calculated = wave.resource.average_zero_crossing_period(S, - frequency_bins=f_bins).iloc[0,0] - error = np.abs(expected-calculated)/expected - #print('T0', expected, calculated, error) - self.assertLess(error, 0.01) - - # Tc - expected = data['metrics']['Tc'] - calculated = wave.resource.average_crest_period(S, - # Tc = Tavg**2 - frequency_bins=f_bins).iloc[0,0]**2 - error = np.abs(expected-calculated)/expected - #print('Tc', expected, calculated, error) - self.assertLess(error, 0.01) - - # Tm - expected = np.sqrt(data['metrics']['Tm']) - calculated = wave.resource.average_wave_period(S, - frequency_bins=f_bins).iloc[0,0] - error = np.abs(expected-calculated)/expected - #print('Tm', expected, calculated, error) - self.assertLess(error, 0.01) - - # Tp - expected = data['metrics']['Tp'] - calculated = wave.resource.peak_period(S).iloc[0,0] - error = np.abs(expected-calculated)/expected - #print('Tp', expected, calculated, error) - self.assertLess(error, 0.001) - - # e - expected = data['metrics']['e'] - calculated = wave.resource.spectral_bandwidth(S, - frequency_bins=f_bins).iloc[0,0] - error = np.abs(expected-calculated)/expected - #print('e', expected, calculated, error) - self.assertLess(error, 0.001) - - # J - if file_i != 'CDiP': - for i,j in zip(data['h'],data['J']): - expected = data['J'][j] - calculated = wave.resource.energy_flux(S,i) - error = np.abs(expected-calculated.values)/expected - self.assertLess(error, 0.1) - - # v - if file_i == 'CDiP': - # this should be updated to run on other datasets - expected = data['metrics']['v'] - calculated = wave.resource.spectral_width(S, - frequency_bins=f_bins).iloc[0,0] - error = np.abs(expected-calculated)/expected - self.assertLess(error, 0.01) - - if file_i == 'MC': - expected = data['metrics']['v'] - # testing that default uniform frequency bin widths works - calculated = wave.resource.spectral_width(S).iloc[0,0] - error = np.abs(expected-calculated)/expected - self.assertLess(error, 0.01) - - - def test_plot_elevation_timeseries(self): - filename = abspath(join(testdir, 'wave_plot_elevation_timeseries.png')) - if isfile(filename): - os.remove(filename) - - data = self.valdata2['MC'] - temp = pd.DataFrame(data[list(data.keys())[0]]['elevation']) - temp.index = temp.index.astype(float) - temp.sort_index(inplace=True) - eta = temp.iloc[0:100,:] - - plt.figure() - wave.graphics.plot_elevation_timeseries(eta) - plt.savefig(filename, format='png') - plt.close() - - self.assertTrue(isfile(filename)) - -class TestContours(unittest.TestCase): - - @classmethod - def setUpClass(self): - - f_name= 'Hm0_Te_46022.json' - self.Hm0Te = pd.read_json(join(datadir,f_name)) - - file_loc=join(datadir, 'principal_component_analysis.pkl') - with open(file_loc, 'rb') as f: - self.pca = pickle.load(f) - f.close() - - file_loc=join(datadir,'WDRT_caluculated_countours.json') - with open(file_loc) as f: - self.wdrt_copulas = json.load(f) - f.close() - - ndbc_46050=pd.read_csv(join(datadir,'NDBC46050.csv')) - self.wdrt_Hm0 = ndbc_46050['Hm0'] - self.wdrt_Te = ndbc_46050['Te'] - - self.wdrt_dt=3600 - self.wdrt_period= 50 - - @classmethod - def tearDownClass(self): - pass - - def test_environmental_contour(self): - - Hm0Te = self.Hm0Te - df = Hm0Te[Hm0Te['Hm0'] < 20] - - Hm0 = df.Hm0.values - Te = df.Te.values - - dt_ss = (Hm0Te.index[2]-Hm0Te.index[1]).seconds - period = 100 - - copula = wave.contours.environmental_contours(Hm0, - Te, dt_ss, period, 'PCA') - - Hm0_contour=copula['PCA_x1'] - Te_contour=copula['PCA_x2'] - - file_loc=join(datadir,'Hm0_Te_contours_46022.csv') - expected_contours = pd.read_csv(file_loc) - assert_allclose(expected_contours.Hm0_contour.values, - Hm0_contour, rtol=1e-3) - - def test__principal_component_analysis(self): - Hm0Te = self.Hm0Te - df = Hm0Te[Hm0Te['Hm0'] < 20] - - Hm0 = df.Hm0.values - Te = df.Te.values - PCA = (wave.contours - ._principal_component_analysis(Hm0,Te, bin_size=250)) - - assert_allclose(PCA['principal_axes'], - self.pca['principal_axes']) - self.assertAlmostEqual(PCA['shift'], self.pca['shift']) - self.assertAlmostEqual(PCA['x1_fit']['mu'], - self.pca['x1_fit']['mu']) - self.assertAlmostEqual(PCA['mu_fit'].slope, - self.pca['mu_fit'].slope) - self.assertAlmostEqual(PCA['mu_fit'].intercept, - self.pca['mu_fit'].intercept) - assert_allclose(PCA['sigma_fit']['x'], - self.pca['sigma_fit']['x']) - - def test_plot_environmental_contour(self): - file_loc= join(testdir, 'wave_plot_environmental_contour.png') - filename = abspath(file_loc) - if isfile(filename): - os.remove(filename) - - Hm0Te = self.Hm0Te - df = Hm0Te[Hm0Te['Hm0'] < 20] - - Hm0 = df.Hm0.values - Te = df.Te.values - - dt_ss = (Hm0Te.index[2]-Hm0Te.index[1]).seconds - time_R = 100 - - copulas = wave.contours.environmental_contours(Hm0, Te, dt_ss, - time_R, 'PCA') - - Hm0_contour=copulas['PCA_x1'] - Te_contour=copulas['PCA_x2'] - - dt_ss = (Hm0Te.index[2]-Hm0Te.index[1]).seconds - time_R = 100 - - plt.figure() - (wave.graphics - .plot_environmental_contour(Te, Hm0, - Te_contour, Hm0_contour, - data_label='NDBC 46022', - contour_label='100-year Contour', - x_label = 'Te [s]', - y_label = 'Hm0 [m]') - ) - plt.savefig(filename, format='png') - plt.close() - - self.assertTrue(isfile(filename)) - - def test_plot_environmental_contour_multiyear(self): - filename = abspath(join(testdir, - 'wave_plot_environmental_contour_multiyear.png')) - if isfile(filename): - os.remove(filename) - - Hm0Te = self.Hm0Te - df = Hm0Te[Hm0Te['Hm0'] < 20] - - Hm0 = df.Hm0.values - Te = df.Te.values - - dt_ss = (Hm0Te.index[2]-Hm0Te.index[1]).seconds - - time_R = [100, 105, 110, 120, 150] - - Hm0s=[] - Tes=[] - for period in time_R: - copulas = (wave.contours - .environmental_contours(Hm0,Te,dt_ss,period,'PCA')) - - Hm0s.append(copulas['PCA_x1']) - Tes.append(copulas['PCA_x2']) - - contour_label = [f'{year}-year Contour' for year in time_R] - plt.figure() - (wave.graphics - .plot_environmental_contour(Te, Hm0, - Tes, Hm0s, - data_label='NDBC 46022', - contour_label=contour_label, - x_label = 'Te [s]', - y_label = 'Hm0 [m]') - ) - plt.savefig(filename, format='png') - plt.close() - - self.assertTrue(isfile(filename)) - - def test_standard_copulas(self): - copulas = (wave.contours - .environmental_contours(self.wdrt_Hm0, self.wdrt_Te, - self.wdrt_dt, self.wdrt_period, - method=['gaussian', 'gumbel', 'clayton']) - ) - - # WDRT slightly vaires Rosenblatt copula parameters from - # the other copula default parameters - rosen = (wave.contours - .environmental_contours(self.wdrt_Hm0, self.wdrt_Te, - self.wdrt_dt, self.wdrt_period, method=['rosenblatt'], - min_bin_count=50, initial_bin_max_val=0.5, - bin_val_size=0.25)) - copulas['rosenblatt_x1'] = rosen['rosenblatt_x1'] - copulas['rosenblatt_x2'] = rosen['rosenblatt_x2'] - - methods=['gaussian', 'gumbel', 'clayton', 'rosenblatt'] - close=[] - for method in methods: - close.append(np.allclose(copulas[f'{method}_x1'], - self.wdrt_copulas[f'{method}_x1'])) - close.append(np.allclose(copulas[f'{method}_x2'], - self.wdrt_copulas[f'{method}_x2'])) - self.assertTrue(all(close)) - - def test_nonparametric_copulas(self): - methods=['nonparametric_gaussian','nonparametric_clayton', - 'nonparametric_gumbel'] - - np_copulas = wave.contours.environmental_contours(self.wdrt_Hm0, - self.wdrt_Te, self.wdrt_dt, self.wdrt_period, method=methods) - - close=[] - for method in methods: - close.append(np.allclose(np_copulas[f'{method}_x1'], - self.wdrt_copulas[f'{method}_x1'], atol=0.13)) - close.append(np.allclose(np_copulas[f'{method}_x2'], - self.wdrt_copulas[f'{method}_x2'], atol=0.13)) - self.assertTrue(all(close)) - - def test_kde_copulas(self): - kde_copula = wave.contours.environmental_contours(self.wdrt_Hm0, - self.wdrt_Te, self.wdrt_dt, self.wdrt_period, - method=['bivariate_KDE'], bandwidth=[0.23, 0.23]) - log_kde_copula = (wave.contours - .environmental_contours(self.wdrt_Hm0, self.wdrt_Te, - self.wdrt_dt, self.wdrt_period, method=['bivariate_KDE_log'], bandwidth=[0.02, 0.11]) - ) - - close= [ np.allclose(kde_copula['bivariate_KDE_x1'], - self.wdrt_copulas['bivariate_KDE_x1']), - np.allclose(kde_copula['bivariate_KDE_x2'], - self.wdrt_copulas['bivariate_KDE_x2']), - np.allclose(log_kde_copula['bivariate_KDE_log_x1'], - self.wdrt_copulas['bivariate_KDE_log_x1']), - np.allclose(log_kde_copula['bivariate_KDE_log_x2'], - self.wdrt_copulas['bivariate_KDE_log_x2'])] - self.assertTrue(all(close)) - - def test_samples_contours(self): - te_samples = np.array([10, 15, 20]) - hs_samples_0 = np.array([8.56637939, 9.27612515, 8.70427774]) - hs_contour = np.array(self.wdrt_copulas["gaussian_x1"]) - te_contour = np.array(self.wdrt_copulas["gaussian_x2"]) - hs_samples = wave.contours.samples_contour( - te_samples, te_contour, hs_contour) - assert_allclose(hs_samples, hs_samples_0) - - def test_samples_seastate(self): - hs_0 = np.array([5.91760129, 4.55185088, 1.41144991, 12.64443154, - 7.89753791, 0.93890797]) - te_0 = np.array([14.24199604, 8.25383556, 6.03901866, 16.9836369, - 9.51967777, 3.46969355]) - w_0 = np.array([2.18127398e-01, 2.18127398e-01, 2.18127398e-01, - 2.45437862e-07, 2.45437862e-07, 2.45437862e-07]) - - df = self.Hm0Te[self.Hm0Te['Hm0'] < 20] - dt_ss = (self.Hm0Te.index[2]-self.Hm0Te.index[1]).seconds - points_per_interval = 3 - return_periods = np.array([50, 100]) - np.random.seed(0) - hs, te, w = wave.contours.samples_full_seastate( - df.Hm0.values, df.Te.values, points_per_interval, return_periods, - dt_ss) - assert_allclose(hs, hs_0) - assert_allclose(te, te_0) - assert_allclose(w, w_0) - - -class TestPerformance(unittest.TestCase): - - @classmethod - def setUpClass(self): - np.random.seed(123) - Hm0 = np.random.rayleigh(4, 100000) - Te = np.random.normal(4.5, .8, 100000) - P = np.random.normal(200, 40, 100000) - J = np.random.normal(300, 10, 100000) - ndbc_data_file = join(datadir,'data.txt') - [raw_ndbc_data, meta] = wave.io.ndbc.read_file(ndbc_data_file) - self.S = raw_ndbc_data.T - - self.data = pd.DataFrame({'Hm0': Hm0, 'Te': Te, 'P': P,'J': J}) - self.Hm0_bins = np.arange(0,19,0.5) - self.Te_bins = np.arange(0,9,1) - self.expected_stats = ["mean","std","median","count","sum","min","max","freq"] - - @classmethod - def tearDownClass(self): - pass - - def test_capture_length(self): - L = wave.performance.capture_length(self.data['P'], self.data['J']) - L_stats = wave.performance.statistics(L) - - self.assertAlmostEqual(L_stats['mean'], 0.6676, 3) - - def test_capture_length_matrix(self): - L = wave.performance.capture_length(self.data['P'], self.data['J']) - LM = wave.performance.capture_length_matrix(self.data['Hm0'], self.data['Te'], - L, 'std', self.Hm0_bins, self.Te_bins) - - self.assertEqual(LM.shape, (38,9)) - self.assertEqual(LM.isna().sum().sum(), 131) - - def test_wave_energy_flux_matrix(self): - JM = wave.performance.wave_energy_flux_matrix(self.data['Hm0'], self.data['Te'], - self.data['J'], 'mean', self.Hm0_bins, self.Te_bins) - - self.assertEqual(JM.shape, (38,9)) - self.assertEqual(JM.isna().sum().sum(), 131) - - def test_power_matrix(self): - L = wave.performance.capture_length(self.data['P'], self.data['J']) - LM = wave.performance.capture_length_matrix(self.data['Hm0'], self.data['Te'], - L, 'mean', self.Hm0_bins, self.Te_bins) - JM = wave.performance.wave_energy_flux_matrix(self.data['Hm0'], self.data['Te'], - self.data['J'], 'mean', self.Hm0_bins, self.Te_bins) - PM = wave.performance.power_matrix(LM, JM) - - self.assertEqual(PM.shape, (38,9)) - self.assertEqual(PM.isna().sum().sum(), 131) - - def test_mean_annual_energy_production(self): - L = wave.performance.capture_length(self.data['P'], self.data['J']) - maep = wave.performance.mean_annual_energy_production_timeseries(L, self.data['J']) - - self.assertAlmostEqual(maep, 1754020.077, 2) - - - def test_plot_matrix(self): - filename = abspath(join(testdir, 'wave_plot_matrix.png')) - if isfile(filename): - os.remove(filename) - - M = wave.performance.wave_energy_flux_matrix(self.data['Hm0'], self.data['Te'], - self.data['J'], 'mean', self.Hm0_bins, self.Te_bins) - - plt.figure() - wave.graphics.plot_matrix(M) - plt.savefig(filename, format='png') - plt.close() - - self.assertTrue(isfile(filename)) - - def test_powerperformance_workflow(self): - filename = abspath(join(testdir, 'Capture Length Matrix mean.png')) - if isfile(filename): - os.remove(filename) - P = pd.Series(np.random.normal(200, 40, 743),index = self.S.columns) - statistic = ['mean'] - savepath = testdir - show_values = True - h = 60 - expected = 401239.4822345051 - x = self.S.T - CM,MAEP = wave.performance.power_performance_workflow(self.S, h, - P, statistic, savepath=savepath, show_values=show_values) - - self.assertTrue(isfile(filename)) - self.assertEqual(list(CM.data_vars),self.expected_stats) - - error = (expected-MAEP)/expected # SSE - - self.assertLess(error, 1e-6) - -class TestIOndbc(unittest.TestCase): - - @classmethod - def setUpClass(self): - self.expected_columns_metRT = ['WDIR', 'WSPD', 'GST', 'WVHT', 'DPD', - 'APD', 'MWD', 'PRES', 'ATMP', 'WTMP', 'DEWP', 'VIS', 'PTDY', 'TIDE'] - self.expected_units_metRT = {'WDIR': 'degT', 'WSPD': 'm/s', 'GST': 'm/s', - 'WVHT': 'm', 'DPD': 'sec', 'APD': 'sec', 'MWD': 'degT', 'PRES': 'hPa', - 'ATMP': 'degC', 'WTMP': 'degC', 'DEWP': 'degC', 'VIS': 'nmi', - 'PTDY': 'hPa', 'TIDE': 'ft'} - - self.expected_columns_metH = ['WDIR', 'WSPD', 'GST', 'WVHT', 'DPD', - 'APD', 'MWD', 'PRES', 'ATMP', 'WTMP', 'DEWP', 'VIS', 'TIDE'] - self.expected_units_metH = {'WDIR': 'degT', 'WSPD': 'm/s', 'GST': 'm/s', - 'WVHT': 'm', 'DPD': 'sec', 'APD': 'sec', 'MWD': 'deg', 'PRES': 'hPa', - 'ATMP': 'degC', 'WTMP': 'degC', 'DEWP': 'degC', 'VIS': 'nmi', - 'TIDE': 'ft'} - self.filenames=['46042w1996.txt.gz', - '46029w1997.txt.gz', - '46029w1998.txt.gz'] - self.swden = pd.read_csv(join(datadir,self.filenames[0]), sep=r'\s+', - compression='gzip') - - @classmethod - def tearDownClass(self): - pass - - ### Realtime data - def test_ndbc_read_realtime_met(self): - data, units = wave.io.ndbc.read_file(join(datadir, '46097.txt')) - expected_index0 = datetime(2019,4,2,13,50) - self.assertSetEqual(set(data.columns), set(self.expected_columns_metRT)) - self.assertEqual(data.index[0], expected_index0) - self.assertEqual(data.shape, (6490, 14)) - self.assertEqual(units,self.expected_units_metRT) - - ### Historical data - def test_ndbnc_read_historical_met(self): - # QC'd monthly data, Aug 2019 - data, units = wave.io.ndbc.read_file(join(datadir, '46097h201908qc.txt')) - expected_index0 = datetime(2019,8,1,0,0) - self.assertSetEqual(set(data.columns), set(self.expected_columns_metH)) - self.assertEqual(data.index[0], expected_index0) - self.assertEqual(data.shape, (4464, 13)) - self.assertEqual(units,self.expected_units_metH) - - ### Spectral data - def test_ndbc_read_spectral(self): - data, units = wave.io.ndbc.read_file(join(datadir, 'data.txt')) - self.assertEqual(data.shape, (743, 47)) - self.assertEqual(units, None) - - ### Continuous wind data - def test_ndbc_read_cwind_no_units(self): - data, units = wave.io.ndbc.read_file(join(datadir, '42a01c2003.txt')) - self.assertEqual(data.shape, (4320, 5)) - self.assertEqual(units, None) - - def test_ndbc_read_cwind_units(self): - data, units = wave.io.ndbc.read_file(join(datadir, '46002c2016.txt')) - self.assertEqual(data.shape, (28468, 5)) - self.assertEqual(units, wave.io.ndbc.parameter_units('cwind')) - - def test_ndbc_available_data(self): - data=wave.io.ndbc.available_data('swden', buoy_number='46029') - cols = data.columns.tolist() - exp_cols = ['id', 'year', 'filename'] - self.assertEqual(cols, exp_cols) - - years = [int(year) for year in data.year.tolist()] - exp_years=[*range(1996,1996+len(years))] - self.assertEqual(years, exp_years) - self.assertEqual(data.shape, (len(data), 3)) - - def test__ndbc_parse_filenames(self): - filenames= pd.Series(self.filenames) - buoys = wave.io.ndbc._parse_filenames('swden', filenames) - years = buoys.year.tolist() - numbers = buoys.id.tolist() - fnames = buoys.filename.tolist() - - self.assertEqual(buoys.shape, (len(filenames),3)) - self.assertListEqual(years, ['1996','1997','1998']) - self.assertListEqual(numbers, ['46042','46029','46029']) - self.assertListEqual(fnames, self.filenames) - - def test_ndbc_request_data(self): - filenames= pd.Series(self.filenames[0]) - ndbc_data = wave.io.ndbc.request_data('swden', filenames) - self.assertTrue(self.swden.equals(ndbc_data['1996'])) - - def test_ndbc_request_data_from_dataframe(self): - filenames= pd.DataFrame(pd.Series(data=self.filenames[0])) - ndbc_data = wave.io.ndbc.request_data('swden', filenames) - assert_frame_equal(self.swden, ndbc_data['1996']) - - def test_ndbc_request_data_filenames_length(self): - with self.assertRaises(AssertionError): - wave.io.ndbc.request_data('swden', pd.Series(dtype=float)) - - def test_ndbc_to_datetime_index(self): - dt = wave.io.ndbc.to_datetime_index('swden', self.swden) - self.assertEqual(type(dt.index), pd.DatetimeIndex) - self.assertFalse({'YY','MM','DD','hh'}.issubset(dt.columns)) - - def test_ndbc_request_data_empty_file(self): - temp_stdout = StringIO() - # known empty file. If NDBC replaces, this test may fail. - filename = "42008h1984.txt.gz" - buoy_id='42008' - year = '1984' - with contextlib.redirect_stdout(temp_stdout): - wave.io.ndbc.request_data('stdmet', pd.Series(filename)) - output = temp_stdout.getvalue().strip() - msg = (f'The NDBC buoy {buoy_id} for year {year} with ' - f'filename {filename} is empty or missing ' - 'data. Please omit this file from your data ' - 'request in the future.') - self.assertEqual(output, msg) - - def test_ndbc_request_multiple_files_with_empty_file(self): - temp_stdout = StringIO() - # known empty file. If NDBC replaces, this test may fail. - empty_file = '42008h1984.txt.gz' - working_file = '46042h1996.txt.gz' - filenames = pd.Series([empty_file, working_file]) - with contextlib.redirect_stdout(temp_stdout): - ndbc_data =wave.io.ndbc.request_data('stdmet', filenames) - self.assertEqual(1, len(ndbc_data)) - - def test_ndbc_dates_to_datetime(self): - dt = wave.io.ndbc.dates_to_datetime('swden', self.swden) - self.assertEqual(datetime(1996, 1, 1, 1, 0), dt[1]) - - def test_date_string_to_datetime(self): - swden = self.swden.copy(deep=True) - swden['mm'] = np.zeros(len(swden)).astype(int).astype(str) - year_string='YY' - year_fmt='%y' - parse_columns = [year_string, 'MM', 'DD', 'hh', 'mm'] - df = wave.io.ndbc._date_string_to_datetime(swden, parse_columns, - year_fmt) - dt = df['date'] - self.assertEqual(datetime(1996, 1, 1, 1, 0), dt[1]) - - def test_parameter_units(self): - parameter='swden' - units = wave.io.ndbc.parameter_units(parameter) - self.assertEqual(units[parameter], '(m*m)/Hz') - -class TestWECSim(unittest.TestCase): - - @classmethod - def setUpClass(self): - pass - - @classmethod - def tearDownClass(self): - pass - - ### WEC-Sim data, mo mooring - def test_read_wecSim_no_mooring(self): - ws_output = wave.io.wecsim.read_output(join(datadir, 'RM3_matlabWorkspace_structure.mat')) - self.assertEqual(ws_output['wave'].elevation.name,'elevation') - self.assertEqual(ws_output['bodies']['body1'].name,'float') - self.assertEqual(ws_output['ptos'].name,'PTO1') - self.assertEqual(ws_output['constraints'].name,'Constraint1') - self.assertEqual(len(ws_output['mooring']),0) - self.assertEqual(len(ws_output['moorDyn']),0) - self.assertEqual(len(ws_output['ptosim']),0) - - ### WEC-Sim data, with mooring - def test_read_wecSim_with_mooring(self): - ws_output = wave.io.wecsim.read_output(join(datadir, 'RM3MooringMatrix_matlabWorkspace_structure.mat')) - self.assertEqual(ws_output['wave'].elevation.name,'elevation') - self.assertEqual(ws_output['bodies']['body1'].name,'float') - self.assertEqual(ws_output['ptos'].name,'PTO1') - self.assertEqual(ws_output['constraints'].name,'Constraint1') - self.assertEqual(len(ws_output['mooring']),40001) - self.assertEqual(len(ws_output['moorDyn']),0) - self.assertEqual(len(ws_output['ptosim']),0) - - ### WEC-Sim data, with moorDyn - def test_read_wecSim_with_moorDyn(self): - ws_output = wave.io.wecsim.read_output(join(datadir, 'RM3MoorDyn_matlabWorkspace_structure.mat')) - self.assertEqual(ws_output['wave'].elevation.name,'elevation') - self.assertEqual(ws_output['bodies']['body1'].name,'float') - self.assertEqual(ws_output['ptos'].name,'PTO1') - self.assertEqual(ws_output['constraints'].name,'Constraint1') - self.assertEqual(len(ws_output['mooring']),40001) - self.assertEqual(len(ws_output['moorDyn']),7) - self.assertEqual(len(ws_output['ptosim']),0) - -class TestWPTOhindcast(unittest.TestCase): - - @classmethod - def setUpClass(self): - - self.my_swh = pd.read_csv(join(datadir,'hindcast/multi_year_hindcast.csv'),index_col = 'time_index', - names = ['time_index','significant_wave_height_0'],header = 0, - dtype = {'significant_wave_height_0':'float32'}) - self.my_swh.index = pd.to_datetime(self.my_swh.index) - - self.ml = pd.read_csv(join(datadir,'hindcast/single_year_hindcast_multiloc.csv'),index_col = 'time_index', - names = ['time_index','mean_absolute_period_0','mean_absolute_period_1'], - header = 0, dtype = {'mean_absolute_period_0':'float32', - 'mean_absolute_period_1':'float32'}) - self.ml.index = pd.to_datetime(self.ml.index) - - self.mp = pd.read_csv(join(datadir,'hindcast/multiparm.csv'),index_col = 'time_index', - names = ['time_index','energy_period_0','mean_zero-crossing_period_0'], - header = 0, dtype = {'energy_period_0':'float32', - 'mean_zero-crossing_period_0':'float32'}) - self.mp.index = pd.to_datetime(self.mp.index) - - self.ml_meta = pd.read_csv(join(datadir,'hindcast/multiloc_meta.csv'),index_col = 0, - names = [None,'water_depth','latitude','longitude','distance_to_shore','timezone' - ,'jurisdiction'],header = 0, dtype = {'water_depth':'float32','latitude':'float32' - ,'longitude':'float32','distance_to_shore':'float32','timezone':'int16'}) - - self.my_meta = pd.read_csv(join(datadir,'hindcast/multi_year_meta.csv'),index_col = 0, - names = [None,'water_depth','latitude','longitude','distance_to_shore','timezone' - ,'jurisdiction'],header = 0, dtype = {'water_depth':'float32','latitude':'float32' - ,'longitude':'float32','distance_to_shore':'float32','timezone':'int16'}) - - self.mp_meta = pd.read_csv(join(datadir,'hindcast/multiparm_meta.csv'),index_col = 0, - names = [None,'water_depth','latitude','longitude','distance_to_shore','timezone' - ,'jurisdiction'],header = 0, dtype = {'water_depth':'float32','latitude':'float32' - ,'longitude':'float32','distance_to_shore':'float32','timezone':'int16'}) - - my_dir = pd.read_csv(join(datadir,'hindcast/multi_year_dir.csv'),header = 0, - dtype={'87':'float32','58':'float32'}) - my_dir['time_index'] = pd.to_datetime(my_dir['time_index']) - my_dir = my_dir.set_index(['time_index','frequency','direction']) - self.my_dir = my_dir.to_xarray() - - self.my_dir_meta = pd.read_csv(join(datadir,'hindcast/multi_year_dir_meta.csv'), - names = ['water_depth','latitude','longitude','distance_to_shore','timezone' - ,'jurisdiction'],header = 0, dtype = {'water_depth':'float32','latitude':'float32' - ,'longitude':'float32','distance_to_shore':'float32','timezone':'int16'}) - - @classmethod - def tearDownClass(self): - pass - - ## WPTO hindcast data - # only run test for one version of python per to not spam the server - # yet keep coverage high on each test - if float(sys.version[0:3]) == 3.7: - def test_multi_year(self): - data_type = '3-hour' - years = [1990,1992] - lat_lon = (44.624076,-124.280097) - parameters = 'significant_wave_height' - (wave_multiyear, - meta) = (wave.io.hindcast - .request_wpto_point_data(data_type,parameters, - lat_lon,years)) - assert_frame_equal(self.my_swh,wave_multiyear) - assert_frame_equal(self.my_meta,meta) - - elif float(sys.version[0:3]) == 3.8: - # wait five minute to ensure python 3.7 call is complete - time.sleep(300) - def test_multi_loc(self): - data_type = '3-hour' - years = [1995] - lat_lon = ((44.624076,-124.280097),(43.489171,-125.152137)) - parameters = 'mean_absolute_period' - wave_multiloc, meta=(wave.io.hindcast - .request_wpto_point_data(data_type, - parameters,lat_lon,years)) - (dir_multiyear, - meta_dir)=(wave.io.hindcast - .request_wpto_directional_spectrum(lat_lon,year='1995')) - dir_multiyear = dir_multiyear.sel(time_index=slice(dir_multiyear.time_index[0],dir_multiyear.time_index[99])) - dir_multiyear = dir_multiyear.rename_vars({87:'87',58:'58'}) - - assert_frame_equal(self.ml,wave_multiloc) - assert_frame_equal(self.ml_meta,meta) - xrt.assert_allclose(self.my_dir,dir_multiyear) - assert_frame_equal(self.my_dir_meta,meta_dir) - - elif float(sys.version[0:3]) == 3.9: - # wait ten minutes to ensure python 3.7 and 3.8 call is complete - time.sleep(500) - - def test_multi_parm(self): - data_type = '1-hour' - years = [1996] - lat_lon = (44.624076,-124.280097) - parameters = ['energy_period','mean_zero-crossing_period'] - wave_multiparm, meta= wave.io.hindcast.request_wpto_point_data(data_type, - parameters,lat_lon,years) - - assert_frame_equal(self.mp,wave_multiparm) - assert_frame_equal(self.mp_meta,meta) - -class TestSWAN(unittest.TestCase): - - @classmethod - def setUpClass(self): - swan_datadir = join(datadir,'swan') - self.table_file = join(swan_datadir,'SWANOUT.DAT') - self.swan_block_mat_file = join(swan_datadir,'SWANOUT.MAT') - self.swan_block_txt_file = join(swan_datadir,'SWANOUTBlock.DAT') - self.expected_table = pd.read_csv(self.table_file, sep='\s+', comment='%', - names=['Xp', 'Yp', 'Hsig', 'Dir', 'RTpeak', 'TDir']) - - @classmethod - def tearDownClass(self): - pass - - def test_read_table(self): - swan_table, swan_meta = wave.io.swan.read_table(self.table_file) - assert_frame_equal(self.expected_table, swan_table) - - def test_read_block_mat(self): - swanBlockMat, metaDataMat = wave.io.swan.read_block(self.swan_block_mat_file ) - self.assertEqual(len(swanBlockMat), 4) - self.assertAlmostEqual(self.expected_table['Hsig'].sum(), - swanBlockMat['Hsig'].sum().sum(), places=1) - - def test_read_block_txt(self): - swanBlockTxt, metaData = wave.io.swan.read_block(self.swan_block_txt_file) - self.assertEqual(len(swanBlockTxt), 4) - sumSum = swanBlockTxt['Significant wave height'].sum().sum() - self.assertAlmostEqual(self.expected_table['Hsig'].sum(), - sumSum, places=-2) - - def test_block_to_table(self): - x=np.arange(5) - y=np.arange(5,10) - df = pd.DataFrame(np.random.rand(5,5), columns=x, index=y) - dff = wave.io.swan.block_to_table(df) - self.assertEqual(dff.shape, (len(x)*len(y), 3)) - self.assertTrue(all(dff.x.unique() == np.unique(x))) - - def test_dictionary_of_block_to_table(self): - x=np.arange(5) - y=np.arange(5,10) - df = pd.DataFrame(np.random.rand(5,5), columns=x, index=y) - keys = ['data1', 'data2'] - data = [df, df] - dict_of_dfs = dict(zip(keys,data)) - dff = wave.io.swan.dictionary_of_block_to_table(dict_of_dfs) - self.assertEqual(dff.shape, (len(x)*len(y), 2+len(keys))) - self.assertTrue(all(dff.x.unique() == np.unique(x))) - for key in keys: - self.assertTrue(key in dff.keys()) - -class TestIOcdip(unittest.TestCase): - - @classmethod - def setUpClass(self): - b067_1996='http://thredds.cdip.ucsd.edu/thredds/dodsC/cdip/' + \ - 'archive/067p1/067p1_d04.nc' - self.test_nc = netCDF4.Dataset(b067_1996) - - self.vars2D = [ 'waveEnergyDensity', 'waveMeanDirection', - 'waveA1Value', 'waveB1Value', 'waveA2Value', - 'waveB2Value', 'waveCheckFactor', 'waveSpread', - 'waveM2Value', 'waveN2Value'] - - @classmethod - def tearDownClass(self): - pass - - def test_validate_date(self): - date='2013-11-12' - start_date = wave.io.cdip._validate_date(date) - assert isinstance(start_date, datetime) - - date='11-12-2012' - self.assertRaises(ValueError, wave.io.cdip._validate_date, date) - - def test_request_netCDF_historic(self): - station_number='067' - nc = wave.io.cdip.request_netCDF(station_number, 'historic') - isinstance(nc, netCDF4.Dataset) - - def test_request_netCDF_realtime(self): - station_number='067' - nc = wave.io.cdip.request_netCDF(station_number, 'realtime') - isinstance(nc, netCDF4.Dataset) - - - def test_start_and_end_of_year(self): - year = 2020 - start_day, end_day = wave.io.cdip._start_and_end_of_year(year) - - assert isinstance(start_day, datetime) - assert isinstance(end_day, datetime) - - expected_start = datetime(year,1,1) - expected_end = datetime(year,12,31) - - self.assertEqual(start_day, expected_start) - self.assertEqual(end_day, expected_end) - - def test_dates_to_timestamp(self): - - start_date='1996-10-02' - end_date='1996-10-20' - - start_stamp, end_stamp = wave.io.cdip._dates_to_timestamp(self.test_nc, - start_date=start_date, end_date=end_date) - - start_dt = datetime.utcfromtimestamp(start_stamp) - end_dt = datetime.utcfromtimestamp(end_stamp) - - self.assertTrue(start_dt.strftime('%Y-%m-%d') == start_date) - self.assertTrue(end_dt.strftime('%Y-%m-%d') == end_date) - - def test_get_netcdf_variables_all2Dvars(self): - data = wave.io.cdip.get_netcdf_variables(self.test_nc, - all_2D_variables=True) - returned_keys = [key for key in data['data']['wave2D'].keys()] - self.assertTrue( returned_keys == self.vars2D) - - def test_get_netcdf_variables_params(self): - parameters =['waveHs', 'waveTp','notParam', 'waveMeanDirection'] - data = wave.io.cdip.get_netcdf_variables(self.test_nc, - parameters=parameters) - - returned_keys_1D = [key for key in data['data']['wave'].keys()] - returned_keys_2D = [key for key in data['data']['wave2D'].keys()] - returned_keys_metadata = [key for key in data['metadata']['wave']] - - self.assertTrue( returned_keys_1D == ['waveHs', 'waveTp']) - self.assertTrue( returned_keys_2D == ['waveMeanDirection']) - self.assertTrue( returned_keys_metadata == ['waveFrequency']) - - - def test_get_netcdf_variables_time_slice(self): - start_date='1996-10-01' - end_date='1996-10-31' - - data = wave.io.cdip.get_netcdf_variables(self.test_nc, - start_date=start_date, end_date=end_date, - parameters='waveHs') - - start_dt = datetime.strptime(start_date, '%Y-%m-%d') - end_dt = datetime.strptime(end_date, '%Y-%m-%d') - - self.assertTrue(data['data']['wave'].index[-1] < end_dt) - self.assertTrue(data['data']['wave'].index[0] > start_dt) - - - def test_request_parse_workflow_multiyear(self): - station_number = '067' - year1=2011 - year2=2013 - years = [year1, year2] - parameters =['waveHs', 'waveMeanDirection', 'waveA1Value'] - data = wave.io.cdip.request_parse_workflow(station_number=station_number, - years=years, parameters =parameters ) - - expected_index0 = datetime(year1,1,1) - expected_index_final = datetime(year2,12,31) - - wave1D = data['data']['wave'] - self.assertEqual(wave1D.index[0].floor('d').to_pydatetime(), expected_index0) - - self.assertEqual(wave1D.index[-1].floor('d').to_pydatetime(), expected_index_final) - - for key,wave2D in data['data']['wave2D'].items(): - self.assertEqual(wave2D.index[0].floor('d').to_pydatetime(), expected_index0) - self.assertEqual(wave2D.index[-1].floor('d').to_pydatetime(), expected_index_final) - - - def test_plot_boxplot(self): - filename = abspath(join(testdir, 'wave_plot_boxplot.png')) - if isfile(filename): - os.remove(filename) - - station_number = '067' - year = 2011 - data = wave.io.cdip.request_parse_workflow(station_number=station_number,years=year, - parameters =['waveHs'], - all_2D_variables=False) - - plt.figure() - wave.graphics.plot_boxplot(data['data']['wave']['waveHs']) - plt.savefig(filename, format='png') - plt.close() - - self.assertTrue(isfile(filename)) - - - def test_plot_compendium(self): - filename = abspath(join(testdir, 'wave_plot_boxplot.png')) - if isfile(filename): - os.remove(filename) - - station_number = '067' - year = 2011 - data = wave.io.cdip.request_parse_workflow(station_number=station_number,years=year, - parameters =['waveHs', 'waveTp', 'waveDp'], - all_2D_variables=False) - - plt.figure() - wave.graphics.plot_compendium(data['data']['wave']['waveHs'], - data['data']['wave']['waveTp'], data['data']['wave']['waveDp'] ) - plt.savefig(filename, format='png') - plt.close() - - self.assertTrue(isfile(filename)) - -class TestPlotResouceCharacterizations(unittest.TestCase): - - @classmethod - def setUpClass(self): - f_name= 'Hm0_Te_46022.json' - self.Hm0Te = pd.read_json(join(datadir,f_name)) - @classmethod - def tearDownClass(self): - pass - def test_plot_avg_annual_energy_matrix(self): - - filename = abspath(join(testdir, 'avg_annual_scatter_table.png')) - if isfile(filename): - os.remove(filename) - - Hm0Te = self.Hm0Te - Hm0Te.drop(Hm0Te[Hm0Te.Hm0 > 20].index, inplace=True) - J = np.random.random(len(Hm0Te))*100 - - plt.figure() - fig = wave.graphics.plot_avg_annual_energy_matrix(Hm0Te.Hm0, - Hm0Te.Te, J, Hm0_bin_size=0.5, Te_bin_size=1) - plt.savefig(filename, format='png') - plt.close() - - self.assertTrue(isfile(filename)) - - def test_plot_monthly_cumulative_distribution(self): - - filename = abspath(join(testdir, 'monthly_cumulative_distribution.png')) - if isfile(filename): - os.remove(filename) - - a = pd.date_range(start='1/1/2010', periods=10000, freq='h') - S = pd.Series(np.random.random(len(a)) , index=a) - ax=wave.graphics.monthly_cumulative_distribution(S) - plt.savefig(filename, format='png') - plt.close() - - self.assertTrue(isfile(filename)) - - -if __name__ == '__main__': - unittest.main() diff --git a/mhkit/tests/wave/test_WECSim.py b/mhkit/tests/wave/test_WECSim.py new file mode 100644 index 000000000..8737279be --- /dev/null +++ b/mhkit/tests/wave/test_WECSim.py @@ -0,0 +1,73 @@ +from os.path import abspath, dirname, join, isfile, normpath, relpath +from pandas.testing import assert_frame_equal +from numpy.testing import assert_allclose +from scipy.interpolate import interp1d +from random import seed, randint +import matplotlib.pylab as plt +from datetime import datetime +import xarray.testing as xrt +import mhkit.wave as wave +from io import StringIO +import pandas as pd +import numpy as np +import contextlib +import unittest +import netCDF4 +import inspect +import pickle +import time +import json +import sys +import os + + +testdir = dirname(abspath(__file__)) +datadir = normpath(join(testdir,relpath('../../../examples/data/wave'))) + + +class TestWECSim(unittest.TestCase): + + @classmethod + def setUpClass(self): + pass + + @classmethod + def tearDownClass(self): + pass + + ### WEC-Sim data, mo mooring + def test_read_wecSim_no_mooring(self): + ws_output = wave.io.wecsim.read_output(join(datadir, 'RM3_matlabWorkspace_structure.mat')) + self.assertEqual(ws_output['wave'].elevation.name,'elevation') + self.assertEqual(ws_output['bodies']['body1'].name,'float') + self.assertEqual(ws_output['ptos'].name,'PTO1') + self.assertEqual(ws_output['constraints'].name,'Constraint1') + self.assertEqual(len(ws_output['mooring']),0) + self.assertEqual(len(ws_output['moorDyn']),0) + self.assertEqual(len(ws_output['ptosim']),0) + + ### WEC-Sim data, with mooring + def test_read_wecSim_with_mooring(self): + ws_output = wave.io.wecsim.read_output(join(datadir, 'RM3MooringMatrix_matlabWorkspace_structure.mat')) + self.assertEqual(ws_output['wave'].elevation.name,'elevation') + self.assertEqual(ws_output['bodies']['body1'].name,'float') + self.assertEqual(ws_output['ptos'].name,'PTO1') + self.assertEqual(ws_output['constraints'].name,'Constraint1') + self.assertEqual(len(ws_output['mooring']),40001) + self.assertEqual(len(ws_output['moorDyn']),0) + self.assertEqual(len(ws_output['ptosim']),0) + + ### WEC-Sim data, with moorDyn + def test_read_wecSim_with_moorDyn(self): + ws_output = wave.io.wecsim.read_output(join(datadir, 'RM3MoorDyn_matlabWorkspace_structure.mat')) + self.assertEqual(ws_output['wave'].elevation.name,'elevation') + self.assertEqual(ws_output['bodies']['body1'].name,'float') + self.assertEqual(ws_output['ptos'].name,'PTO1') + self.assertEqual(ws_output['constraints'].name,'Constraint1') + self.assertEqual(len(ws_output['mooring']),40001) + self.assertEqual(len(ws_output['moorDyn']),7) + self.assertEqual(len(ws_output['ptosim']),0) + + +if __name__ == '__main__': + unittest.main() diff --git a/mhkit/tests/wave/test_WPTOhindcast.py b/mhkit/tests/wave/test_WPTOhindcast.py new file mode 100644 index 000000000..662a529ea --- /dev/null +++ b/mhkit/tests/wave/test_WPTOhindcast.py @@ -0,0 +1,135 @@ +from os.path import abspath, dirname, join, isfile, normpath, relpath +from pandas.testing import assert_frame_equal +from numpy.testing import assert_allclose +from scipy.interpolate import interp1d +from random import seed, randint +import matplotlib.pylab as plt +from datetime import datetime +import xarray.testing as xrt +import mhkit.wave as wave +from io import StringIO +import pandas as pd +import numpy as np +import contextlib +import unittest +import netCDF4 +import inspect +import pickle +import time +import json +import sys +import os + + +testdir = dirname(abspath(__file__)) +datadir = normpath(join(testdir,relpath('../../../examples/data/wave'))) + + +class TestWPTOhindcast(unittest.TestCase): + + @classmethod + def setUpClass(self): + + self.my_swh = pd.read_csv(join(datadir,'hindcast/multi_year_hindcast.csv'),index_col = 'time_index', + names = ['time_index','significant_wave_height_0'],header = 0, + dtype = {'significant_wave_height_0':'float32'}) + self.my_swh.index = pd.to_datetime(self.my_swh.index) + + self.ml = pd.read_csv(join(datadir,'hindcast/single_year_hindcast_multiloc.csv'),index_col = 'time_index', + names = ['time_index','mean_absolute_period_0','mean_absolute_period_1'], + header = 0, dtype = {'mean_absolute_period_0':'float32', + 'mean_absolute_period_1':'float32'}) + self.ml.index = pd.to_datetime(self.ml.index) + + self.mp = pd.read_csv(join(datadir,'hindcast/multiparm.csv'),index_col = 'time_index', + names = ['time_index','energy_period_0','mean_zero-crossing_period_0'], + header = 0, dtype = {'energy_period_0':'float32', + 'mean_zero-crossing_period_0':'float32'}) + self.mp.index = pd.to_datetime(self.mp.index) + + self.ml_meta = pd.read_csv(join(datadir,'hindcast/multiloc_meta.csv'),index_col = 0, + names = [None,'water_depth','latitude','longitude','distance_to_shore','timezone' + ,'jurisdiction'],header = 0, dtype = {'water_depth':'float32','latitude':'float32' + ,'longitude':'float32','distance_to_shore':'float32','timezone':'int16'}) + + self.my_meta = pd.read_csv(join(datadir,'hindcast/multi_year_meta.csv'),index_col = 0, + names = [None,'water_depth','latitude','longitude','distance_to_shore','timezone' + ,'jurisdiction'],header = 0, dtype = {'water_depth':'float32','latitude':'float32' + ,'longitude':'float32','distance_to_shore':'float32','timezone':'int16'}) + + self.mp_meta = pd.read_csv(join(datadir,'hindcast/multiparm_meta.csv'),index_col = 0, + names = [None,'water_depth','latitude','longitude','distance_to_shore','timezone' + ,'jurisdiction'],header = 0, dtype = {'water_depth':'float32','latitude':'float32' + ,'longitude':'float32','distance_to_shore':'float32','timezone':'int16'}) + + my_dir = pd.read_csv(join(datadir,'hindcast/multi_year_dir.csv'),header = 0, + dtype={'87':'float32','58':'float32'}) + my_dir['time_index'] = pd.to_datetime(my_dir['time_index']) + my_dir = my_dir.set_index(['time_index','frequency','direction']) + self.my_dir = my_dir.to_xarray() + + self.my_dir_meta = pd.read_csv(join(datadir,'hindcast/multi_year_dir_meta.csv'), + names = ['water_depth','latitude','longitude','distance_to_shore','timezone' + ,'jurisdiction'],header = 0, dtype = {'water_depth':'float32','latitude':'float32' + ,'longitude':'float32','distance_to_shore':'float32','timezone':'int16'}) + + @classmethod + def tearDownClass(self): + pass + + ## WPTO hindcast data + # only run test for one version of python per to not spam the server + # yet keep coverage high on each test + if float(sys.version[0:3]) == 3.7: + def test_multi_year(self): + data_type = '3-hour' + years = [1990,1992] + lat_lon = (44.624076,-124.280097) + parameters = 'significant_wave_height' + (wave_multiyear, + meta) = (wave.io.hindcast + .request_wpto_point_data(data_type,parameters, + lat_lon,years)) + assert_frame_equal(self.my_swh,wave_multiyear) + assert_frame_equal(self.my_meta,meta) + + elif float(sys.version[0:3]) == 3.8: + # wait five minute to ensure python 3.7 call is complete + time.sleep(300) + def test_multi_loc(self): + data_type = '3-hour' + years = [1995] + lat_lon = ((44.624076,-124.280097),(43.489171,-125.152137)) + parameters = 'mean_absolute_period' + wave_multiloc, meta=(wave.io.hindcast + .request_wpto_point_data(data_type, + parameters,lat_lon,years)) + (dir_multiyear, + meta_dir)=(wave.io.hindcast + .request_wpto_directional_spectrum(lat_lon,year='1995')) + dir_multiyear = dir_multiyear.sel(time_index=slice(dir_multiyear.time_index[0],dir_multiyear.time_index[99])) + dir_multiyear = dir_multiyear.rename_vars({87:'87',58:'58'}) + + assert_frame_equal(self.ml,wave_multiloc) + assert_frame_equal(self.ml_meta,meta) + xrt.assert_allclose(self.my_dir,dir_multiyear) + assert_frame_equal(self.my_dir_meta,meta_dir) + + elif float(sys.version[0:3]) == 3.9: + # wait ten minutes to ensure python 3.7 and 3.8 call is complete + time.sleep(500) + + def test_multi_parm(self): + data_type = '1-hour' + years = [1996] + lat_lon = (44.624076,-124.280097) + parameters = ['energy_period','mean_zero-crossing_period'] + wave_multiparm, meta= wave.io.hindcast.request_wpto_point_data(data_type, + parameters,lat_lon,years) + + assert_frame_equal(self.mp,wave_multiparm) + assert_frame_equal(self.mp_meta,meta) + + +if __name__ == '__main__': + unittest.main() diff --git a/mhkit/tests/wave/test_cdip.py b/mhkit/tests/wave/test_cdip.py new file mode 100644 index 000000000..ebfe969c5 --- /dev/null +++ b/mhkit/tests/wave/test_cdip.py @@ -0,0 +1,188 @@ +from os.path import abspath, dirname, join, isfile, normpath, relpath +from pandas.testing import assert_frame_equal +from numpy.testing import assert_allclose +from scipy.interpolate import interp1d +from random import seed, randint +import matplotlib.pylab as plt +from datetime import datetime +import xarray.testing as xrt +import mhkit.wave as wave +from io import StringIO +import pandas as pd +import numpy as np +import contextlib +import unittest +import netCDF4 +import inspect +import pickle +import time +import json +import sys +import os + + +testdir = dirname(abspath(__file__)) +datadir = normpath(join(testdir,relpath('../../../examples/data/wave'))) + + +class TestIOcdip(unittest.TestCase): + + @classmethod + def setUpClass(self): + b067_1996='http://thredds.cdip.ucsd.edu/thredds/dodsC/cdip/' + \ + 'archive/067p1/067p1_d04.nc' + self.test_nc = netCDF4.Dataset(b067_1996) + + self.vars2D = [ 'waveEnergyDensity', 'waveMeanDirection', + 'waveA1Value', 'waveB1Value', 'waveA2Value', + 'waveB2Value', 'waveCheckFactor', 'waveSpread', + 'waveM2Value', 'waveN2Value'] + + @classmethod + def tearDownClass(self): + pass + + def test_validate_date(self): + date='2013-11-12' + start_date = wave.io.cdip._validate_date(date) + assert isinstance(start_date, datetime) + + date='11-12-2012' + self.assertRaises(ValueError, wave.io.cdip._validate_date, date) + + def test_request_netCDF_historic(self): + station_number='067' + nc = wave.io.cdip.request_netCDF(station_number, 'historic') + isinstance(nc, netCDF4.Dataset) + + def test_request_netCDF_realtime(self): + station_number='067' + nc = wave.io.cdip.request_netCDF(station_number, 'realtime') + isinstance(nc, netCDF4.Dataset) + + + def test_start_and_end_of_year(self): + year = 2020 + start_day, end_day = wave.io.cdip._start_and_end_of_year(year) + + assert isinstance(start_day, datetime) + assert isinstance(end_day, datetime) + + expected_start = datetime(year,1,1) + expected_end = datetime(year,12,31) + + self.assertEqual(start_day, expected_start) + self.assertEqual(end_day, expected_end) + + def test_dates_to_timestamp(self): + + start_date='1996-10-02' + end_date='1996-10-20' + + start_stamp, end_stamp = wave.io.cdip._dates_to_timestamp(self.test_nc, + start_date=start_date, end_date=end_date) + + start_dt = datetime.utcfromtimestamp(start_stamp) + end_dt = datetime.utcfromtimestamp(end_stamp) + + self.assertTrue(start_dt.strftime('%Y-%m-%d') == start_date) + self.assertTrue(end_dt.strftime('%Y-%m-%d') == end_date) + + def test_get_netcdf_variables_all2Dvars(self): + data = wave.io.cdip.get_netcdf_variables(self.test_nc, + all_2D_variables=True) + returned_keys = [key for key in data['data']['wave2D'].keys()] + self.assertTrue( returned_keys == self.vars2D) + + def test_get_netcdf_variables_params(self): + parameters =['waveHs', 'waveTp','notParam', 'waveMeanDirection'] + data = wave.io.cdip.get_netcdf_variables(self.test_nc, + parameters=parameters) + + returned_keys_1D = [key for key in data['data']['wave'].keys()] + returned_keys_2D = [key for key in data['data']['wave2D'].keys()] + returned_keys_metadata = [key for key in data['metadata']['wave']] + + self.assertTrue( returned_keys_1D == ['waveHs', 'waveTp']) + self.assertTrue( returned_keys_2D == ['waveMeanDirection']) + self.assertTrue( returned_keys_metadata == ['waveFrequency']) + + + def test_get_netcdf_variables_time_slice(self): + start_date='1996-10-01' + end_date='1996-10-31' + + data = wave.io.cdip.get_netcdf_variables(self.test_nc, + start_date=start_date, end_date=end_date, + parameters='waveHs') + + start_dt = datetime.strptime(start_date, '%Y-%m-%d') + end_dt = datetime.strptime(end_date, '%Y-%m-%d') + + self.assertTrue(data['data']['wave'].index[-1] < end_dt) + self.assertTrue(data['data']['wave'].index[0] > start_dt) + + + def test_request_parse_workflow_multiyear(self): + station_number = '067' + year1=2011 + year2=2013 + years = [year1, year2] + parameters =['waveHs', 'waveMeanDirection', 'waveA1Value'] + data = wave.io.cdip.request_parse_workflow(station_number=station_number, + years=years, parameters =parameters ) + + expected_index0 = datetime(year1,1,1) + expected_index_final = datetime(year2,12,31) + + wave1D = data['data']['wave'] + self.assertEqual(wave1D.index[0].floor('d').to_pydatetime(), expected_index0) + + self.assertEqual(wave1D.index[-1].floor('d').to_pydatetime(), expected_index_final) + + for key,wave2D in data['data']['wave2D'].items(): + self.assertEqual(wave2D.index[0].floor('d').to_pydatetime(), expected_index0) + self.assertEqual(wave2D.index[-1].floor('d').to_pydatetime(), expected_index_final) + + + def test_plot_boxplot(self): + filename = abspath(join(testdir, 'wave_plot_boxplot.png')) + if isfile(filename): + os.remove(filename) + + station_number = '067' + year = 2011 + data = wave.io.cdip.request_parse_workflow(station_number=station_number,years=year, + parameters =['waveHs'], + all_2D_variables=False) + + plt.figure() + wave.graphics.plot_boxplot(data['data']['wave']['waveHs']) + plt.savefig(filename, format='png') + plt.close() + + self.assertTrue(isfile(filename)) + + + def test_plot_compendium(self): + filename = abspath(join(testdir, 'wave_plot_boxplot.png')) + if isfile(filename): + os.remove(filename) + + station_number = '067' + year = 2011 + data = wave.io.cdip.request_parse_workflow(station_number=station_number,years=year, + parameters =['waveHs', 'waveTp', 'waveDp'], + all_2D_variables=False) + + plt.figure() + wave.graphics.plot_compendium(data['data']['wave']['waveHs'], + data['data']['wave']['waveTp'], data['data']['wave']['waveDp'] ) + plt.savefig(filename, format='png') + plt.close() + + self.assertTrue(isfile(filename)) + + +if __name__ == '__main__': + unittest.main() diff --git a/mhkit/tests/wave/test_contours.py b/mhkit/tests/wave/test_contours.py new file mode 100644 index 000000000..89770270f --- /dev/null +++ b/mhkit/tests/wave/test_contours.py @@ -0,0 +1,269 @@ +from os.path import abspath, dirname, join, isfile, normpath, relpath +from pandas.testing import assert_frame_equal +from numpy.testing import assert_allclose +from scipy.interpolate import interp1d +from random import seed, randint +import matplotlib.pylab as plt +from datetime import datetime +import xarray.testing as xrt +import mhkit.wave as wave +from io import StringIO +import pandas as pd +import numpy as np +import contextlib +import unittest +import netCDF4 +import inspect +import pickle +import time +import json +import sys +import os + + +testdir = dirname(abspath(__file__)) +datadir = normpath(join(testdir,relpath('../../../examples/data/wave'))) + + +class TestContours(unittest.TestCase): + + @classmethod + def setUpClass(self): + + f_name= 'Hm0_Te_46022.json' + self.Hm0Te = pd.read_json(join(datadir,f_name)) + + file_loc=join(datadir, 'principal_component_analysis.pkl') + with open(file_loc, 'rb') as f: + self.pca = pickle.load(f) + f.close() + + file_loc=join(datadir,'WDRT_caluculated_countours.json') + with open(file_loc) as f: + self.wdrt_copulas = json.load(f) + f.close() + + ndbc_46050=pd.read_csv(join(datadir,'NDBC46050.csv')) + self.wdrt_Hm0 = ndbc_46050['Hm0'] + self.wdrt_Te = ndbc_46050['Te'] + + self.wdrt_dt=3600 + self.wdrt_period= 50 + + @classmethod + def tearDownClass(self): + pass + + def test_environmental_contour(self): + + Hm0Te = self.Hm0Te + df = Hm0Te[Hm0Te['Hm0'] < 20] + + Hm0 = df.Hm0.values + Te = df.Te.values + + dt_ss = (Hm0Te.index[2]-Hm0Te.index[1]).seconds + period = 100 + + copula = wave.contours.environmental_contours(Hm0, + Te, dt_ss, period, 'PCA') + + Hm0_contour=copula['PCA_x1'] + Te_contour=copula['PCA_x2'] + + file_loc=join(datadir,'Hm0_Te_contours_46022.csv') + expected_contours = pd.read_csv(file_loc) + assert_allclose(expected_contours.Hm0_contour.values, + Hm0_contour, rtol=1e-3) + + def test__principal_component_analysis(self): + Hm0Te = self.Hm0Te + df = Hm0Te[Hm0Te['Hm0'] < 20] + + Hm0 = df.Hm0.values + Te = df.Te.values + PCA = (wave.contours + ._principal_component_analysis(Hm0,Te, bin_size=250)) + + assert_allclose(PCA['principal_axes'], + self.pca['principal_axes']) + self.assertAlmostEqual(PCA['shift'], self.pca['shift']) + self.assertAlmostEqual(PCA['x1_fit']['mu'], + self.pca['x1_fit']['mu']) + self.assertAlmostEqual(PCA['mu_fit'].slope, + self.pca['mu_fit'].slope) + self.assertAlmostEqual(PCA['mu_fit'].intercept, + self.pca['mu_fit'].intercept) + assert_allclose(PCA['sigma_fit']['x'], + self.pca['sigma_fit']['x']) + + def test_plot_environmental_contour(self): + file_loc= join(testdir, 'wave_plot_environmental_contour.png') + filename = abspath(file_loc) + if isfile(filename): + os.remove(filename) + + Hm0Te = self.Hm0Te + df = Hm0Te[Hm0Te['Hm0'] < 20] + + Hm0 = df.Hm0.values + Te = df.Te.values + + dt_ss = (Hm0Te.index[2]-Hm0Te.index[1]).seconds + time_R = 100 + + copulas = wave.contours.environmental_contours(Hm0, Te, dt_ss, + time_R, 'PCA') + + Hm0_contour=copulas['PCA_x1'] + Te_contour=copulas['PCA_x2'] + + dt_ss = (Hm0Te.index[2]-Hm0Te.index[1]).seconds + time_R = 100 + + plt.figure() + (wave.graphics + .plot_environmental_contour(Te, Hm0, + Te_contour, Hm0_contour, + data_label='NDBC 46022', + contour_label='100-year Contour', + x_label = 'Te [s]', + y_label = 'Hm0 [m]') + ) + plt.savefig(filename, format='png') + plt.close() + + self.assertTrue(isfile(filename)) + + def test_plot_environmental_contour_multiyear(self): + filename = abspath(join(testdir, + 'wave_plot_environmental_contour_multiyear.png')) + if isfile(filename): + os.remove(filename) + + Hm0Te = self.Hm0Te + df = Hm0Te[Hm0Te['Hm0'] < 20] + + Hm0 = df.Hm0.values + Te = df.Te.values + + dt_ss = (Hm0Te.index[2]-Hm0Te.index[1]).seconds + + time_R = [100, 105, 110, 120, 150] + + Hm0s=[] + Tes=[] + for period in time_R: + copulas = (wave.contours + .environmental_contours(Hm0,Te,dt_ss,period,'PCA')) + + Hm0s.append(copulas['PCA_x1']) + Tes.append(copulas['PCA_x2']) + + contour_label = [f'{year}-year Contour' for year in time_R] + plt.figure() + (wave.graphics + .plot_environmental_contour(Te, Hm0, + Tes, Hm0s, + data_label='NDBC 46022', + contour_label=contour_label, + x_label = 'Te [s]', + y_label = 'Hm0 [m]') + ) + plt.savefig(filename, format='png') + plt.close() + + self.assertTrue(isfile(filename)) + + def test_standard_copulas(self): + copulas = (wave.contours + .environmental_contours(self.wdrt_Hm0, self.wdrt_Te, + self.wdrt_dt, self.wdrt_period, + method=['gaussian', 'gumbel', 'clayton']) + ) + + # WDRT slightly vaires Rosenblatt copula parameters from + # the other copula default parameters + rosen = (wave.contours + .environmental_contours(self.wdrt_Hm0, self.wdrt_Te, + self.wdrt_dt, self.wdrt_period, method=['rosenblatt'], + min_bin_count=50, initial_bin_max_val=0.5, + bin_val_size=0.25)) + copulas['rosenblatt_x1'] = rosen['rosenblatt_x1'] + copulas['rosenblatt_x2'] = rosen['rosenblatt_x2'] + + methods=['gaussian', 'gumbel', 'clayton', 'rosenblatt'] + close=[] + for method in methods: + close.append(np.allclose(copulas[f'{method}_x1'], + self.wdrt_copulas[f'{method}_x1'])) + close.append(np.allclose(copulas[f'{method}_x2'], + self.wdrt_copulas[f'{method}_x2'])) + self.assertTrue(all(close)) + + def test_nonparametric_copulas(self): + methods=['nonparametric_gaussian','nonparametric_clayton', + 'nonparametric_gumbel'] + + np_copulas = wave.contours.environmental_contours(self.wdrt_Hm0, + self.wdrt_Te, self.wdrt_dt, self.wdrt_period, method=methods) + + close=[] + for method in methods: + close.append(np.allclose(np_copulas[f'{method}_x1'], + self.wdrt_copulas[f'{method}_x1'], atol=0.13)) + close.append(np.allclose(np_copulas[f'{method}_x2'], + self.wdrt_copulas[f'{method}_x2'], atol=0.13)) + self.assertTrue(all(close)) + + def test_kde_copulas(self): + kde_copula = wave.contours.environmental_contours(self.wdrt_Hm0, + self.wdrt_Te, self.wdrt_dt, self.wdrt_period, + method=['bivariate_KDE'], bandwidth=[0.23, 0.23]) + log_kde_copula = (wave.contours + .environmental_contours(self.wdrt_Hm0, self.wdrt_Te, + self.wdrt_dt, self.wdrt_period, method=['bivariate_KDE_log'], bandwidth=[0.02, 0.11]) + ) + + close= [ np.allclose(kde_copula['bivariate_KDE_x1'], + self.wdrt_copulas['bivariate_KDE_x1']), + np.allclose(kde_copula['bivariate_KDE_x2'], + self.wdrt_copulas['bivariate_KDE_x2']), + np.allclose(log_kde_copula['bivariate_KDE_log_x1'], + self.wdrt_copulas['bivariate_KDE_log_x1']), + np.allclose(log_kde_copula['bivariate_KDE_log_x2'], + self.wdrt_copulas['bivariate_KDE_log_x2'])] + self.assertTrue(all(close)) + + def test_samples_contours(self): + te_samples = np.array([10, 15, 20]) + hs_samples_0 = np.array([8.56637939, 9.27612515, 8.70427774]) + hs_contour = np.array(self.wdrt_copulas["gaussian_x1"]) + te_contour = np.array(self.wdrt_copulas["gaussian_x2"]) + hs_samples = wave.contours.samples_contour( + te_samples, te_contour, hs_contour) + assert_allclose(hs_samples, hs_samples_0) + + def test_samples_seastate(self): + hs_0 = np.array([5.91760129, 4.55185088, 1.41144991, 12.64443154, + 7.89753791, 0.93890797]) + te_0 = np.array([14.24199604, 8.25383556, 6.03901866, 16.9836369, + 9.51967777, 3.46969355]) + w_0 = np.array([2.18127398e-01, 2.18127398e-01, 2.18127398e-01, + 2.45437862e-07, 2.45437862e-07, 2.45437862e-07]) + + df = self.Hm0Te[self.Hm0Te['Hm0'] < 20] + dt_ss = (self.Hm0Te.index[2]-self.Hm0Te.index[1]).seconds + points_per_interval = 3 + return_periods = np.array([50, 100]) + np.random.seed(0) + hs, te, w = wave.contours.samples_full_seastate( + df.Hm0.values, df.Te.values, points_per_interval, return_periods, + dt_ss) + assert_allclose(hs, hs_0) + assert_allclose(te, te_0) + assert_allclose(w, w_0) + + +if __name__ == '__main__': + unittest.main() diff --git a/mhkit/tests/wave/test_ndbc.py b/mhkit/tests/wave/test_ndbc.py new file mode 100644 index 000000000..6b7483034 --- /dev/null +++ b/mhkit/tests/wave/test_ndbc.py @@ -0,0 +1,180 @@ +from os.path import abspath, dirname, join, isfile, normpath, relpath +from pandas.testing import assert_frame_equal +from numpy.testing import assert_allclose +from scipy.interpolate import interp1d +from random import seed, randint +import matplotlib.pylab as plt +from datetime import datetime +import xarray.testing as xrt +import mhkit.wave as wave +from io import StringIO +import pandas as pd +import numpy as np +import contextlib +import unittest +import netCDF4 +import inspect +import pickle +import time +import json +import sys +import os + + +testdir = dirname(abspath(__file__)) +datadir = normpath(join(testdir,relpath('../../../examples/data/wave'))) + + +class TestIOndbc(unittest.TestCase): + + @classmethod + def setUpClass(self): + self.expected_columns_metRT = ['WDIR', 'WSPD', 'GST', 'WVHT', 'DPD', + 'APD', 'MWD', 'PRES', 'ATMP', 'WTMP', 'DEWP', 'VIS', 'PTDY', 'TIDE'] + self.expected_units_metRT = {'WDIR': 'degT', 'WSPD': 'm/s', 'GST': 'm/s', + 'WVHT': 'm', 'DPD': 'sec', 'APD': 'sec', 'MWD': 'degT', 'PRES': 'hPa', + 'ATMP': 'degC', 'WTMP': 'degC', 'DEWP': 'degC', 'VIS': 'nmi', + 'PTDY': 'hPa', 'TIDE': 'ft'} + + self.expected_columns_metH = ['WDIR', 'WSPD', 'GST', 'WVHT', 'DPD', + 'APD', 'MWD', 'PRES', 'ATMP', 'WTMP', 'DEWP', 'VIS', 'TIDE'] + self.expected_units_metH = {'WDIR': 'degT', 'WSPD': 'm/s', 'GST': 'm/s', + 'WVHT': 'm', 'DPD': 'sec', 'APD': 'sec', 'MWD': 'deg', 'PRES': 'hPa', + 'ATMP': 'degC', 'WTMP': 'degC', 'DEWP': 'degC', 'VIS': 'nmi', + 'TIDE': 'ft'} + self.filenames=['46042w1996.txt.gz', + '46029w1997.txt.gz', + '46029w1998.txt.gz'] + self.swden = pd.read_csv(join(datadir,self.filenames[0]), sep=r'\s+', + compression='gzip') + + @classmethod + def tearDownClass(self): + pass + + ### Realtime data + def test_ndbc_read_realtime_met(self): + data, units = wave.io.ndbc.read_file(join(datadir, '46097.txt')) + expected_index0 = datetime(2019,4,2,13,50) + self.assertSetEqual(set(data.columns), set(self.expected_columns_metRT)) + self.assertEqual(data.index[0], expected_index0) + self.assertEqual(data.shape, (6490, 14)) + self.assertEqual(units,self.expected_units_metRT) + + ### Historical data + def test_ndbnc_read_historical_met(self): + # QC'd monthly data, Aug 2019 + data, units = wave.io.ndbc.read_file(join(datadir, '46097h201908qc.txt')) + expected_index0 = datetime(2019,8,1,0,0) + self.assertSetEqual(set(data.columns), set(self.expected_columns_metH)) + self.assertEqual(data.index[0], expected_index0) + self.assertEqual(data.shape, (4464, 13)) + self.assertEqual(units,self.expected_units_metH) + + ### Spectral data + def test_ndbc_read_spectral(self): + data, units = wave.io.ndbc.read_file(join(datadir, 'data.txt')) + self.assertEqual(data.shape, (743, 47)) + self.assertEqual(units, None) + + ### Continuous wind data + def test_ndbc_read_cwind_no_units(self): + data, units = wave.io.ndbc.read_file(join(datadir, '42a01c2003.txt')) + self.assertEqual(data.shape, (4320, 5)) + self.assertEqual(units, None) + + def test_ndbc_read_cwind_units(self): + data, units = wave.io.ndbc.read_file(join(datadir, '46002c2016.txt')) + self.assertEqual(data.shape, (28468, 5)) + self.assertEqual(units, wave.io.ndbc.parameter_units('cwind')) + + def test_ndbc_available_data(self): + data=wave.io.ndbc.available_data('swden', buoy_number='46029') + cols = data.columns.tolist() + exp_cols = ['id', 'year', 'filename'] + self.assertEqual(cols, exp_cols) + + years = [int(year) for year in data.year.tolist()] + exp_years=[*range(1996,1996+len(years))] + self.assertEqual(years, exp_years) + self.assertEqual(data.shape, (len(data), 3)) + + def test__ndbc_parse_filenames(self): + filenames= pd.Series(self.filenames) + buoys = wave.io.ndbc._parse_filenames('swden', filenames) + years = buoys.year.tolist() + numbers = buoys.id.tolist() + fnames = buoys.filename.tolist() + + self.assertEqual(buoys.shape, (len(filenames),3)) + self.assertListEqual(years, ['1996','1997','1998']) + self.assertListEqual(numbers, ['46042','46029','46029']) + self.assertListEqual(fnames, self.filenames) + + def test_ndbc_request_data(self): + filenames= pd.Series(self.filenames[0]) + ndbc_data = wave.io.ndbc.request_data('swden', filenames) + self.assertTrue(self.swden.equals(ndbc_data['1996'])) + + def test_ndbc_request_data_from_dataframe(self): + filenames= pd.DataFrame(pd.Series(data=self.filenames[0])) + ndbc_data = wave.io.ndbc.request_data('swden', filenames) + assert_frame_equal(self.swden, ndbc_data['1996']) + + def test_ndbc_request_data_filenames_length(self): + with self.assertRaises(AssertionError): + wave.io.ndbc.request_data('swden', pd.Series(dtype=float)) + + def test_ndbc_to_datetime_index(self): + dt = wave.io.ndbc.to_datetime_index('swden', self.swden) + self.assertEqual(type(dt.index), pd.DatetimeIndex) + self.assertFalse({'YY','MM','DD','hh'}.issubset(dt.columns)) + + def test_ndbc_request_data_empty_file(self): + temp_stdout = StringIO() + # known empty file. If NDBC replaces, this test may fail. + filename = "42008h1984.txt.gz" + buoy_id='42008' + year = '1984' + with contextlib.redirect_stdout(temp_stdout): + wave.io.ndbc.request_data('stdmet', pd.Series(filename)) + output = temp_stdout.getvalue().strip() + msg = (f'The NDBC buoy {buoy_id} for year {year} with ' + f'filename {filename} is empty or missing ' + 'data. Please omit this file from your data ' + 'request in the future.') + self.assertEqual(output, msg) + + def test_ndbc_request_multiple_files_with_empty_file(self): + temp_stdout = StringIO() + # known empty file. If NDBC replaces, this test may fail. + empty_file = '42008h1984.txt.gz' + working_file = '46042h1996.txt.gz' + filenames = pd.Series([empty_file, working_file]) + with contextlib.redirect_stdout(temp_stdout): + ndbc_data =wave.io.ndbc.request_data('stdmet', filenames) + self.assertEqual(1, len(ndbc_data)) + + def test_ndbc_dates_to_datetime(self): + dt = wave.io.ndbc.dates_to_datetime('swden', self.swden) + self.assertEqual(datetime(1996, 1, 1, 1, 0), dt[1]) + + def test_date_string_to_datetime(self): + swden = self.swden.copy(deep=True) + swden['mm'] = np.zeros(len(swden)).astype(int).astype(str) + year_string='YY' + year_fmt='%y' + parse_columns = [year_string, 'MM', 'DD', 'hh', 'mm'] + df = wave.io.ndbc._date_string_to_datetime(swden, parse_columns, + year_fmt) + dt = df['date'] + self.assertEqual(datetime(1996, 1, 1, 1, 0), dt[1]) + + def test_parameter_units(self): + parameter='swden' + units = wave.io.ndbc.parameter_units(parameter) + self.assertEqual(units[parameter], '(m*m)/Hz') + + +if __name__ == '__main__': + unittest.main() diff --git a/mhkit/tests/wave/test_performance.py b/mhkit/tests/wave/test_performance.py new file mode 100644 index 000000000..0a30906ba --- /dev/null +++ b/mhkit/tests/wave/test_performance.py @@ -0,0 +1,127 @@ +from os.path import abspath, dirname, join, isfile, normpath, relpath +from pandas.testing import assert_frame_equal +from numpy.testing import assert_allclose +from scipy.interpolate import interp1d +from random import seed, randint +import matplotlib.pylab as plt +from datetime import datetime +import xarray.testing as xrt +import mhkit.wave as wave +from io import StringIO +import pandas as pd +import numpy as np +import contextlib +import unittest +import netCDF4 +import inspect +import pickle +import time +import json +import sys +import os + + +testdir = dirname(abspath(__file__)) +datadir = normpath(join(testdir,relpath('../../../examples/data/wave'))) + + +class TestPerformance(unittest.TestCase): + + @classmethod + def setUpClass(self): + np.random.seed(123) + Hm0 = np.random.rayleigh(4, 100000) + Te = np.random.normal(4.5, .8, 100000) + P = np.random.normal(200, 40, 100000) + J = np.random.normal(300, 10, 100000) + ndbc_data_file = join(datadir,'data.txt') + [raw_ndbc_data, meta] = wave.io.ndbc.read_file(ndbc_data_file) + self.S = raw_ndbc_data.T + + self.data = pd.DataFrame({'Hm0': Hm0, 'Te': Te, 'P': P,'J': J}) + self.Hm0_bins = np.arange(0,19,0.5) + self.Te_bins = np.arange(0,9,1) + self.expected_stats = ["mean","std","median","count","sum","min","max","freq"] + + @classmethod + def tearDownClass(self): + pass + + def test_capture_length(self): + L = wave.performance.capture_length(self.data['P'], self.data['J']) + L_stats = wave.performance.statistics(L) + + self.assertAlmostEqual(L_stats['mean'], 0.6676, 3) + + def test_capture_length_matrix(self): + L = wave.performance.capture_length(self.data['P'], self.data['J']) + LM = wave.performance.capture_length_matrix(self.data['Hm0'], self.data['Te'], + L, 'std', self.Hm0_bins, self.Te_bins) + + self.assertEqual(LM.shape, (38,9)) + self.assertEqual(LM.isna().sum().sum(), 131) + + def test_wave_energy_flux_matrix(self): + JM = wave.performance.wave_energy_flux_matrix(self.data['Hm0'], self.data['Te'], + self.data['J'], 'mean', self.Hm0_bins, self.Te_bins) + + self.assertEqual(JM.shape, (38,9)) + self.assertEqual(JM.isna().sum().sum(), 131) + + def test_power_matrix(self): + L = wave.performance.capture_length(self.data['P'], self.data['J']) + LM = wave.performance.capture_length_matrix(self.data['Hm0'], self.data['Te'], + L, 'mean', self.Hm0_bins, self.Te_bins) + JM = wave.performance.wave_energy_flux_matrix(self.data['Hm0'], self.data['Te'], + self.data['J'], 'mean', self.Hm0_bins, self.Te_bins) + PM = wave.performance.power_matrix(LM, JM) + + self.assertEqual(PM.shape, (38,9)) + self.assertEqual(PM.isna().sum().sum(), 131) + + def test_mean_annual_energy_production(self): + L = wave.performance.capture_length(self.data['P'], self.data['J']) + maep = wave.performance.mean_annual_energy_production_timeseries(L, self.data['J']) + + self.assertAlmostEqual(maep, 1754020.077, 2) + + + def test_plot_matrix(self): + filename = abspath(join(testdir, 'wave_plot_matrix.png')) + if isfile(filename): + os.remove(filename) + + M = wave.performance.wave_energy_flux_matrix(self.data['Hm0'], self.data['Te'], + self.data['J'], 'mean', self.Hm0_bins, self.Te_bins) + + plt.figure() + wave.graphics.plot_matrix(M) + plt.savefig(filename, format='png') + plt.close() + + self.assertTrue(isfile(filename)) + + def test_powerperformance_workflow(self): + filename = abspath(join(testdir, 'Capture Length Matrix mean.png')) + if isfile(filename): + os.remove(filename) + P = pd.Series(np.random.normal(200, 40, 743),index = self.S.columns) + statistic = ['mean'] + savepath = testdir + show_values = True + h = 60 + expected = 401239.4822345051 + x = self.S.T + CM,MAEP = wave.performance.power_performance_workflow(self.S, h, + P, statistic, savepath=savepath, show_values=show_values) + + self.assertTrue(isfile(filename)) + self.assertEqual(list(CM.data_vars),self.expected_stats) + + error = (expected-MAEP)/expected # SSE + + self.assertLess(error, 1e-6) + + +if __name__ == '__main__': + unittest.main() diff --git a/mhkit/tests/wave/test_plotResouceCharacterizations.py b/mhkit/tests/wave/test_plotResouceCharacterizations.py new file mode 100644 index 000000000..9c175eda8 --- /dev/null +++ b/mhkit/tests/wave/test_plotResouceCharacterizations.py @@ -0,0 +1,71 @@ +from os.path import abspath, dirname, join, isfile, normpath, relpath +from pandas.testing import assert_frame_equal +from numpy.testing import assert_allclose +from scipy.interpolate import interp1d +from random import seed, randint +import matplotlib.pylab as plt +from datetime import datetime +import xarray.testing as xrt +import mhkit.wave as wave +from io import StringIO +import pandas as pd +import numpy as np +import contextlib +import unittest +import netCDF4 +import inspect +import pickle +import time +import json +import sys +import os + + +testdir = dirname(abspath(__file__)) +datadir = normpath(join(testdir,relpath('../../../examples/data/wave'))) + + +class TestPlotResouceCharacterizations(unittest.TestCase): + + @classmethod + def setUpClass(self): + f_name= 'Hm0_Te_46022.json' + self.Hm0Te = pd.read_json(join(datadir,f_name)) + @classmethod + def tearDownClass(self): + pass + def test_plot_avg_annual_energy_matrix(self): + + filename = abspath(join(testdir, 'avg_annual_scatter_table.png')) + if isfile(filename): + os.remove(filename) + + Hm0Te = self.Hm0Te + Hm0Te.drop(Hm0Te[Hm0Te.Hm0 > 20].index, inplace=True) + J = np.random.random(len(Hm0Te))*100 + + plt.figure() + fig = wave.graphics.plot_avg_annual_energy_matrix(Hm0Te.Hm0, + Hm0Te.Te, J, Hm0_bin_size=0.5, Te_bin_size=1) + plt.savefig(filename, format='png') + plt.close() + + self.assertTrue(isfile(filename)) + + def test_plot_monthly_cumulative_distribution(self): + + filename = abspath(join(testdir, 'monthly_cumulative_distribution.png')) + if isfile(filename): + os.remove(filename) + + a = pd.date_range(start='1/1/2010', periods=10000, freq='h') + S = pd.Series(np.random.random(len(a)) , index=a) + ax=wave.graphics.monthly_cumulative_distribution(S) + plt.savefig(filename, format='png') + plt.close() + + self.assertTrue(isfile(filename)) + + +if __name__ == '__main__': + unittest.main() diff --git a/mhkit/tests/wave/test_resource_metrics.py b/mhkit/tests/wave/test_resource_metrics.py new file mode 100644 index 000000000..996c3f115 --- /dev/null +++ b/mhkit/tests/wave/test_resource_metrics.py @@ -0,0 +1,317 @@ +from os.path import abspath, dirname, join, isfile, normpath, relpath +from pandas.testing import assert_frame_equal +from numpy.testing import assert_allclose +from scipy.interpolate import interp1d +from random import seed, randint +import matplotlib.pylab as plt +from datetime import datetime +import xarray.testing as xrt +import mhkit.wave as wave +from io import StringIO +import pandas as pd +import numpy as np +import contextlib +import unittest +import netCDF4 +import inspect +import pickle +import time +import json +import sys +import os + + +testdir = dirname(abspath(__file__)) +datadir = normpath(join(testdir,relpath('../../../examples/data/wave'))) + + +class TestResourceMetrics(unittest.TestCase): + + @classmethod + def setUpClass(self): + omega = np.arange(0.1,3.5,0.01) + self.f = omega/(2*np.pi) + self.Hs = 2.5 + self.Tp = 8 + + file_name = join(datadir, 'ValData1.json') + with open(file_name, "r") as read_file: + self.valdata1 = pd.DataFrame(json.load(read_file)) + + self.valdata2 = {} + + file_name = join(datadir, 'ValData2_MC.json') + with open(file_name, "r") as read_file: + data = json.load(read_file) + self.valdata2['MC'] = data + for i in data.keys(): + # Calculate elevation spectra + elevation = pd.DataFrame(data[i]['elevation']) + elevation.index = elevation.index.astype(float) + elevation.sort_index(inplace=True) + sample_rate = data[i]['sample_rate'] + NFFT = data[i]['NFFT'] + self.valdata2['MC'][i]['S'] = wave.resource.elevation_spectrum(elevation, + sample_rate, NFFT) + + file_name = join(datadir, 'ValData2_AH.json') + with open(file_name, "r") as read_file: + data = json.load(read_file) + self.valdata2['AH'] = data + for i in data.keys(): + # Calculate elevation spectra + elevation = pd.DataFrame(data[i]['elevation']) + elevation.index = elevation.index.astype(float) + elevation.sort_index(inplace=True) + sample_rate = data[i]['sample_rate'] + NFFT = data[i]['NFFT'] + self.valdata2['AH'][i]['S'] = wave.resource.elevation_spectrum(elevation, + sample_rate, NFFT) + + file_name = join(datadir, 'ValData2_CDiP.json') + with open(file_name, "r") as read_file: + data = json.load(read_file) + self.valdata2['CDiP'] = data + for i in data.keys(): + temp = pd.Series(data[i]['S']).to_frame('S') + temp.index = temp.index.astype(float) + self.valdata2['CDiP'][i]['S'] = temp + + + @classmethod + def tearDownClass(self): + pass + + def test_kfromw(self): + for i in self.valdata1.columns: + f = np.array(self.valdata1[i]['w'])/(2*np.pi) + h = self.valdata1[i]['h'] + rho = self.valdata1[i]['rho'] + + expected = self.valdata1[i]['k'] + k = wave.resource.wave_number(f, h, rho) + calculated = k.loc[:,'k'].values + error = ((expected-calculated)**2).sum() # SSE + + self.assertLess(error, 1e-6) + + def test_kfromw_one_freq(self): + g = 9.81 + f = 0.1 + h = 1e9 + w = np.pi*2*f # deep water dispersion + expected = w**2 / g + calculated = wave.resource.wave_number(f=f, h=h, g=g).values[0][0] + error = np.abs(expected-calculated) + self.assertLess(error, 1e-6) + + def test_wave_length(self): + k_list=[1,2,10,3] + l_expected = (2.*np.pi/np.array(k_list)).tolist() + + k_df = pd.DataFrame(k_list,index = [1,2,3,4]) + k_series= k_df[0] + k_array=np.array(k_list) + + for l in [k_list, k_df, k_series, k_array]: + l_calculated = wave.resource.wave_length(l) + self.assertListEqual(l_expected,l_calculated.tolist()) + + idx=0 + k_int = k_list[idx] + l_calculated = wave.resource.wave_length(k_int) + self.assertEqual(l_expected[idx],l_calculated) + + def test_depth_regime(self): + expected = [True,True,False,True] + l_list=[1,2,10,3] + l_df = pd.DataFrame(l_list,index = [1,2,3,4]) + l_series= l_df[0] + l_array=np.array(l_list) + h = 10 + for l in [l_list, l_df, l_series, l_array]: + calculated = wave.resource.depth_regime(l,h) + self.assertListEqual(expected,calculated.tolist()) + + idx=0 + l_int = l_list[idx] + calculated = wave.resource.depth_regime(l_int,h) + self.assertEqual(expected[idx],calculated) + + + def test_wave_celerity(self): + # Depth regime ratio + dr_ratio=2 + + # small change in f will give similar value cg + f=np.linspace(20.0001,20.0005,5) + + # Choose index to spike at. cg spike is inversly proportional to k + k_idx=2 + k_tmp=[1, 1, 0.5, 1, 1] + k = pd.DataFrame(k_tmp, index=f) + + # all shallow + cg_shallow1 = wave.resource.wave_celerity(k, h=0.0001,depth_check=True) + cg_shallow2 = wave.resource.wave_celerity(k, h=0.0001,depth_check=False) + self.assertTrue(all(cg_shallow1.squeeze().values == + cg_shallow2.squeeze().values)) + + + # all deep + cg = wave.resource.wave_celerity(k, h=1000,depth_check=True) + self.assertTrue(all(np.pi*f/k.squeeze().values == cg.squeeze().values)) + + def test_energy_flux_deep(self): + # Dependent on mhkit.resource.BS spectrum + S = wave.resource.jonswap_spectrum(self.f,self.Tp,self.Hs) + Te = wave.resource.energy_period(S) + Hm0 = wave.resource.significant_wave_height(S) + rho=1025 + g=9.80665 + coeff = rho*(g**2)/(64*np.pi) + J = coeff*(Hm0.squeeze()**2)*Te.squeeze() + + h=-1 # not used when deep=True + J_calc = wave.resource.energy_flux(S, h, deep=True) + + self.assertTrue(J_calc.squeeze() == J) + + + def test_moments(self): + for file_i in self.valdata2.keys(): # for each file MC, AH, CDiP + datasets = self.valdata2[file_i] + for s in datasets.keys(): # for each set + data = datasets[s] + for m in data['m'].keys(): + expected = data['m'][m] + S = data['S'] + if s == 'CDiP1' or s == 'CDiP6': + f_bins=pd.Series(data['freqBinWidth']) + else: + f_bins = None + + calculated = wave.resource.frequency_moment(S, int(m) + ,frequency_bins=f_bins).iloc[0,0] + error = np.abs(expected-calculated)/expected + + self.assertLess(error, 0.01) + + + + def test_metrics(self): + for file_i in self.valdata2.keys(): # for each file MC, AH, CDiP + datasets = self.valdata2[file_i] + + for s in datasets.keys(): # for each set + + + data = datasets[s] + S = data['S'] + if file_i == 'CDiP': + f_bins=pd.Series(data['freqBinWidth']) + else: + f_bins = None + + # Hm0 + expected = data['metrics']['Hm0'] + calculated = wave.resource.significant_wave_height(S, + frequency_bins=f_bins).iloc[0,0] + error = np.abs(expected-calculated)/expected + #print('Hm0', expected, calculated, error) + self.assertLess(error, 0.01) + + # Te + expected = data['metrics']['Te'] + calculated = wave.resource.energy_period(S, + frequency_bins=f_bins).iloc[0,0] + error = np.abs(expected-calculated)/expected + #print('Te', expected, calculated, error) + self.assertLess(error, 0.01) + + # T0 + expected = data['metrics']['T0'] + calculated = wave.resource.average_zero_crossing_period(S, + frequency_bins=f_bins).iloc[0,0] + error = np.abs(expected-calculated)/expected + #print('T0', expected, calculated, error) + self.assertLess(error, 0.01) + + # Tc + expected = data['metrics']['Tc'] + calculated = wave.resource.average_crest_period(S, + # Tc = Tavg**2 + frequency_bins=f_bins).iloc[0,0]**2 + error = np.abs(expected-calculated)/expected + #print('Tc', expected, calculated, error) + self.assertLess(error, 0.01) + + # Tm + expected = np.sqrt(data['metrics']['Tm']) + calculated = wave.resource.average_wave_period(S, + frequency_bins=f_bins).iloc[0,0] + error = np.abs(expected-calculated)/expected + #print('Tm', expected, calculated, error) + self.assertLess(error, 0.01) + + # Tp + expected = data['metrics']['Tp'] + calculated = wave.resource.peak_period(S).iloc[0,0] + error = np.abs(expected-calculated)/expected + #print('Tp', expected, calculated, error) + self.assertLess(error, 0.001) + + # e + expected = data['metrics']['e'] + calculated = wave.resource.spectral_bandwidth(S, + frequency_bins=f_bins).iloc[0,0] + error = np.abs(expected-calculated)/expected + #print('e', expected, calculated, error) + self.assertLess(error, 0.001) + + # J + if file_i != 'CDiP': + for i,j in zip(data['h'],data['J']): + expected = data['J'][j] + calculated = wave.resource.energy_flux(S,i) + error = np.abs(expected-calculated.values)/expected + self.assertLess(error, 0.1) + + # v + if file_i == 'CDiP': + # this should be updated to run on other datasets + expected = data['metrics']['v'] + calculated = wave.resource.spectral_width(S, + frequency_bins=f_bins).iloc[0,0] + error = np.abs(expected-calculated)/expected + self.assertLess(error, 0.01) + + if file_i == 'MC': + expected = data['metrics']['v'] + # testing that default uniform frequency bin widths works + calculated = wave.resource.spectral_width(S).iloc[0,0] + error = np.abs(expected-calculated)/expected + self.assertLess(error, 0.01) + + + def test_plot_elevation_timeseries(self): + filename = abspath(join(testdir, 'wave_plot_elevation_timeseries.png')) + if isfile(filename): + os.remove(filename) + + data = self.valdata2['MC'] + temp = pd.DataFrame(data[list(data.keys())[0]]['elevation']) + temp.index = temp.index.astype(float) + temp.sort_index(inplace=True) + eta = temp.iloc[0:100,:] + + plt.figure() + wave.graphics.plot_elevation_timeseries(eta) + plt.savefig(filename, format='png') + plt.close() + + self.assertTrue(isfile(filename)) + + +if __name__ == '__main__': + unittest.main() diff --git a/mhkit/tests/wave/test_resource_spectrum.py b/mhkit/tests/wave/test_resource_spectrum.py new file mode 100644 index 000000000..b74383d9b --- /dev/null +++ b/mhkit/tests/wave/test_resource_spectrum.py @@ -0,0 +1,187 @@ +from os.path import abspath, dirname, join, isfile, normpath, relpath +from pandas.testing import assert_frame_equal +from numpy.testing import assert_allclose +from scipy.interpolate import interp1d +from random import seed, randint +import matplotlib.pylab as plt +from datetime import datetime +import xarray.testing as xrt +import mhkit.wave as wave +from io import StringIO +import pandas as pd +import numpy as np +import contextlib +import unittest +import netCDF4 +import inspect +import pickle +import time +import json +import sys +import os + + +testdir = dirname(abspath(__file__)) +datadir = normpath(join(testdir,relpath('../../../examples/data/wave'))) + + +class TestResourceSpectrum(unittest.TestCase): + + @classmethod + def setUpClass(self): + omega = np.arange(0.1,3.5,0.01) + self.f = omega/(2*np.pi) + self.Hs = 2.5 + self.Tp = 8 + df = self.f[1] - self.f[0] + Trep = 1/df + self.t = np.arange(0, Trep, 0.05) + + @classmethod + def tearDownClass(self): + pass + + def test_pierson_moskowitz_spectrum(self): + S = wave.resource.pierson_moskowitz_spectrum(self.f,self.Tp,self.Hs) + Hm0 = wave.resource.significant_wave_height(S).iloc[0,0] + Tp0 = wave.resource.peak_period(S).iloc[0,0] + + errorHm0 = np.abs(self.Tp - Tp0)/self.Tp + errorTp0 = np.abs(self.Hs - Hm0)/self.Hs + + self.assertLess(errorHm0, 0.01) + self.assertLess(errorTp0, 0.01) + + def test_jonswap_spectrum(self): + S = wave.resource.jonswap_spectrum(self.f, self.Tp, self.Hs) + Hm0 = wave.resource.significant_wave_height(S).iloc[0,0] + Tp0 = wave.resource.peak_period(S).iloc[0,0] + + errorHm0 = np.abs(self.Tp - Tp0)/self.Tp + errorTp0 = np.abs(self.Hs - Hm0)/self.Hs + + self.assertLess(errorHm0, 0.01) + self.assertLess(errorTp0, 0.01) + + def test_surface_elevation_phases_np_and_pd(self): + S0 = wave.resource.jonswap_spectrum(self.f,self.Tp,self.Hs) + S1 = wave.resource.jonswap_spectrum(self.f,self.Tp,self.Hs*1.1) + S = pd.concat([S0, S1], axis=1) + + phases_np = np.random.rand(S.shape[0], S.shape[1]) * 2 * np.pi + phases_pd = pd.DataFrame(phases_np, index=S.index, columns=S.columns) + + eta_np = wave.resource.surface_elevation(S, self.t, phases=phases_np, seed=1) + eta_pd = wave.resource.surface_elevation(S, self.t, phases=phases_pd, seed=1) + + assert_frame_equal(eta_np, eta_pd) + + def test_surface_elevation_frequency_bins_np_and_pd(self): + S0 = wave.resource.jonswap_spectrum(self.f,self.Tp,self.Hs) + S1 = wave.resource.jonswap_spectrum(self.f,self.Tp,self.Hs*1.1) + S = pd.concat([S0, S1], axis=1) + + eta0 = wave.resource.surface_elevation(S, self.t, seed=1) + + f_bins_np = np.array([np.diff(S.index)[0]]*len(S)) + f_bins_pd = pd.DataFrame(f_bins_np, index=S.index, columns=['df']) + + eta_np = wave.resource.surface_elevation(S, self.t, frequency_bins=f_bins_np, seed=1) + eta_pd = wave.resource.surface_elevation(S, self.t, frequency_bins=f_bins_pd, seed=1) + + assert_frame_equal(eta0, eta_np) + assert_frame_equal(eta_np, eta_pd) + + def test_surface_elevation_moments(self): + S = wave.resource.jonswap_spectrum(self.f, self.Tp, self.Hs) + eta = wave.resource.surface_elevation(S, self.t, seed=1) + dt = self.t[1] - self.t[0] + Sn = wave.resource.elevation_spectrum(eta, 1/dt, len(eta.values), + detrend=False, window='boxcar', + noverlap=0) + + m0 = wave.resource.frequency_moment(S,0).m0.values[0] + m0n = wave.resource.frequency_moment(Sn,0).m0.values[0] + errorm0 = np.abs((m0 - m0n)/m0) + + self.assertLess(errorm0, 0.01) + + m1 = wave.resource.frequency_moment(S,1).m1.values[0] + m1n = wave.resource.frequency_moment(Sn,1).m1.values[0] + errorm1 = np.abs((m1 - m1n)/m1) + + self.assertLess(errorm1, 0.01) + + def test_surface_elevation_rmse(self): + S = wave.resource.jonswap_spectrum(self.f, self.Tp, self.Hs) + eta = wave.resource.surface_elevation(S, self.t, seed=1) + dt = self.t[1] - self.t[0] + Sn = wave.resource.elevation_spectrum(eta, 1/dt, len(eta), + detrend=False, window='boxcar', + noverlap=0) + + fSn = interp1d(Sn.index.values, Sn.values, axis=0) + rmse = (S.values - fSn(S.index.values))**2 + rmse_sum = (np.sum(rmse)/len(rmse))**0.5 + + self.assertLess(rmse_sum, 0.02) + + def test_plot_spectrum(self): + filename = abspath(join(testdir, 'wave_plot_spectrum.png')) + if isfile(filename): + os.remove(filename) + + S = wave.resource.pierson_moskowitz_spectrum(self.f,self.Tp,self.Hs) + + plt.figure() + wave.graphics.plot_spectrum(S) + plt.savefig(filename, format='png') + plt.close() + + self.assertTrue(isfile(filename)) + + def test_plot_chakrabarti(self): + filename = abspath(join(testdir, 'wave_plot_chakrabarti.png')) + if isfile(filename): + os.remove(filename) + + D = 5 + H = 10 + lambda_w = 200 + + wave.graphics.plot_chakrabarti(H, lambda_w, D) + plt.savefig(filename) + + def test_plot_chakrabarti_np(self): + filename = abspath(join(testdir, 'wave_plot_chakrabarti_np.png')) + if isfile(filename): + os.remove(filename) + + D = np.linspace(5, 15, 5) + H = 10 * np.ones_like(D) + lambda_w = 200 * np.ones_like(D) + + wave.graphics.plot_chakrabarti(H, lambda_w, D) + plt.savefig(filename) + + self.assertTrue(isfile(filename)) + + def test_plot_chakrabarti_pd(self): + filename = abspath(join(testdir, 'wave_plot_chakrabarti_pd.png')) + if isfile(filename): + os.remove(filename) + + D = np.linspace(5, 15, 5) + H = 10 * np.ones_like(D) + lambda_w = 200 * np.ones_like(D) + df = pd.DataFrame([H.flatten(),lambda_w.flatten(),D.flatten()], + index=['H','lambda_w','D']).transpose() + + wave.graphics.plot_chakrabarti(df.H, df.lambda_w, df.D) + plt.savefig(filename) + + self.assertTrue(isfile(filename)) + +if __name__ == '__main__': + unittest.main() + \ No newline at end of file diff --git a/mhkit/tests/wave/test_swan.py b/mhkit/tests/wave/test_swan.py new file mode 100644 index 000000000..96443c9a8 --- /dev/null +++ b/mhkit/tests/wave/test_swan.py @@ -0,0 +1,83 @@ +from os.path import abspath, dirname, join, isfile, normpath, relpath +from pandas.testing import assert_frame_equal +from numpy.testing import assert_allclose +from scipy.interpolate import interp1d +from random import seed, randint +import matplotlib.pylab as plt +from datetime import datetime +import xarray.testing as xrt +import mhkit.wave as wave +from io import StringIO +import pandas as pd +import numpy as np +import contextlib +import unittest +import netCDF4 +import inspect +import pickle +import time +import json +import sys +import os + + +testdir = dirname(abspath(__file__)) +datadir = normpath(join(testdir,relpath('../../../examples/data/wave'))) + + +class TestSWAN(unittest.TestCase): + + @classmethod + def setUpClass(self): + swan_datadir = join(datadir,'swan') + self.table_file = join(swan_datadir,'SWANOUT.DAT') + self.swan_block_mat_file = join(swan_datadir,'SWANOUT.MAT') + self.swan_block_txt_file = join(swan_datadir,'SWANOUTBlock.DAT') + self.expected_table = pd.read_csv(self.table_file, sep='\s+', comment='%', + names=['Xp', 'Yp', 'Hsig', 'Dir', 'RTpeak', 'TDir']) + + @classmethod + def tearDownClass(self): + pass + + def test_read_table(self): + swan_table, swan_meta = wave.io.swan.read_table(self.table_file) + assert_frame_equal(self.expected_table, swan_table) + + def test_read_block_mat(self): + swanBlockMat, metaDataMat = wave.io.swan.read_block(self.swan_block_mat_file ) + self.assertEqual(len(swanBlockMat), 4) + self.assertAlmostEqual(self.expected_table['Hsig'].sum(), + swanBlockMat['Hsig'].sum().sum(), places=1) + + def test_read_block_txt(self): + swanBlockTxt, metaData = wave.io.swan.read_block(self.swan_block_txt_file) + self.assertEqual(len(swanBlockTxt), 4) + sumSum = swanBlockTxt['Significant wave height'].sum().sum() + self.assertAlmostEqual(self.expected_table['Hsig'].sum(), + sumSum, places=-2) + + def test_block_to_table(self): + x=np.arange(5) + y=np.arange(5,10) + df = pd.DataFrame(np.random.rand(5,5), columns=x, index=y) + dff = wave.io.swan.block_to_table(df) + self.assertEqual(dff.shape, (len(x)*len(y), 3)) + self.assertTrue(all(dff.x.unique() == np.unique(x))) + + def test_dictionary_of_block_to_table(self): + x=np.arange(5) + y=np.arange(5,10) + df = pd.DataFrame(np.random.rand(5,5), columns=x, index=y) + keys = ['data1', 'data2'] + data = [df, df] + dict_of_dfs = dict(zip(keys,data)) + dff = wave.io.swan.dictionary_of_block_to_table(dict_of_dfs) + self.assertEqual(dff.shape, (len(x)*len(y), 2+len(keys))) + self.assertTrue(all(dff.x.unique() == np.unique(x))) + for key in keys: + self.assertTrue(key in dff.keys()) + + +if __name__ == '__main__': + unittest.main() From 777f5b8d43d09079b09052ec943f1f4bddacf440 Mon Sep 17 00:00:00 2001 From: ssolson Date: Fri, 24 Jun 2022 10:49:40 -0600 Subject: [PATCH 02/16] Move io tests into io folder --- mhkit/tests/wave/{ => io}/test_cdip.py | 0 mhkit/tests/wave/{test_WPTOhindcast.py => io/test_hindcast.py} | 0 mhkit/tests/wave/{ => io}/test_ndbc.py | 0 mhkit/tests/wave/{ => io}/test_swan.py | 0 mhkit/tests/wave/{test_WECSim.py => io/test_wecsim.py} | 0 5 files changed, 0 insertions(+), 0 deletions(-) rename mhkit/tests/wave/{ => io}/test_cdip.py (100%) rename mhkit/tests/wave/{test_WPTOhindcast.py => io/test_hindcast.py} (100%) rename mhkit/tests/wave/{ => io}/test_ndbc.py (100%) rename mhkit/tests/wave/{ => io}/test_swan.py (100%) rename mhkit/tests/wave/{test_WECSim.py => io/test_wecsim.py} (100%) diff --git a/mhkit/tests/wave/test_cdip.py b/mhkit/tests/wave/io/test_cdip.py similarity index 100% rename from mhkit/tests/wave/test_cdip.py rename to mhkit/tests/wave/io/test_cdip.py diff --git a/mhkit/tests/wave/test_WPTOhindcast.py b/mhkit/tests/wave/io/test_hindcast.py similarity index 100% rename from mhkit/tests/wave/test_WPTOhindcast.py rename to mhkit/tests/wave/io/test_hindcast.py diff --git a/mhkit/tests/wave/test_ndbc.py b/mhkit/tests/wave/io/test_ndbc.py similarity index 100% rename from mhkit/tests/wave/test_ndbc.py rename to mhkit/tests/wave/io/test_ndbc.py diff --git a/mhkit/tests/wave/test_swan.py b/mhkit/tests/wave/io/test_swan.py similarity index 100% rename from mhkit/tests/wave/test_swan.py rename to mhkit/tests/wave/io/test_swan.py diff --git a/mhkit/tests/wave/test_WECSim.py b/mhkit/tests/wave/io/test_wecsim.py similarity index 100% rename from mhkit/tests/wave/test_WECSim.py rename to mhkit/tests/wave/io/test_wecsim.py From 02e820eef041a4cd185f20d10cd15d62423480bf Mon Sep 17 00:00:00 2001 From: ssolson Date: Fri, 24 Jun 2022 10:52:06 -0600 Subject: [PATCH 03/16] Send plots to a plots folder --- mhkit/tests/wave/test_contours.py | 7 +++++-- mhkit/tests/wave/test_performance.py | 9 ++++++--- mhkit/tests/wave/test_plotResouceCharacterizations.py | 7 +++++-- mhkit/tests/wave/test_resource_metrics.py | 5 ++++- mhkit/tests/wave/test_resource_spectrum.py | 11 +++++++---- 5 files changed, 27 insertions(+), 12 deletions(-) diff --git a/mhkit/tests/wave/test_contours.py b/mhkit/tests/wave/test_contours.py index 89770270f..fab2f828a 100644 --- a/mhkit/tests/wave/test_contours.py +++ b/mhkit/tests/wave/test_contours.py @@ -22,6 +22,9 @@ testdir = dirname(abspath(__file__)) +plotdir = join(testdir, 'plots') +isdir = os.path.isdir(plotdir) +if not isdir: os.mkdir(plotdir) datadir = normpath(join(testdir,relpath('../../../examples/data/wave'))) @@ -98,7 +101,7 @@ def test__principal_component_analysis(self): self.pca['sigma_fit']['x']) def test_plot_environmental_contour(self): - file_loc= join(testdir, 'wave_plot_environmental_contour.png') + file_loc= join(plotdir, 'wave_plot_environmental_contour.png') filename = abspath(file_loc) if isfile(filename): os.remove(filename) @@ -136,7 +139,7 @@ def test_plot_environmental_contour(self): self.assertTrue(isfile(filename)) def test_plot_environmental_contour_multiyear(self): - filename = abspath(join(testdir, + filename = abspath(join(plotdir, 'wave_plot_environmental_contour_multiyear.png')) if isfile(filename): os.remove(filename) diff --git a/mhkit/tests/wave/test_performance.py b/mhkit/tests/wave/test_performance.py index 0a30906ba..f4bc2a566 100644 --- a/mhkit/tests/wave/test_performance.py +++ b/mhkit/tests/wave/test_performance.py @@ -22,6 +22,9 @@ testdir = dirname(abspath(__file__)) +plotdir = join(testdir, 'plots') +isdir = os.path.isdir(plotdir) +if not isdir: os.mkdir(plotdir) datadir = normpath(join(testdir,relpath('../../../examples/data/wave'))) @@ -87,7 +90,7 @@ def test_mean_annual_energy_production(self): def test_plot_matrix(self): - filename = abspath(join(testdir, 'wave_plot_matrix.png')) + filename = abspath(join(plotdir, 'wave_plot_matrix.png')) if isfile(filename): os.remove(filename) @@ -102,12 +105,12 @@ def test_plot_matrix(self): self.assertTrue(isfile(filename)) def test_powerperformance_workflow(self): - filename = abspath(join(testdir, 'Capture Length Matrix mean.png')) + filename = abspath(join(plotdir, 'Capture Length Matrix mean.png')) if isfile(filename): os.remove(filename) P = pd.Series(np.random.normal(200, 40, 743),index = self.S.columns) statistic = ['mean'] - savepath = testdir + savepath = plotdir show_values = True h = 60 expected = 401239.4822345051 diff --git a/mhkit/tests/wave/test_plotResouceCharacterizations.py b/mhkit/tests/wave/test_plotResouceCharacterizations.py index 9c175eda8..f973f85d8 100644 --- a/mhkit/tests/wave/test_plotResouceCharacterizations.py +++ b/mhkit/tests/wave/test_plotResouceCharacterizations.py @@ -22,6 +22,9 @@ testdir = dirname(abspath(__file__)) +plotdir = join(testdir, 'plots') +isdir = os.path.isdir(plotdir) +if not isdir: os.mkdir(plotdir) datadir = normpath(join(testdir,relpath('../../../examples/data/wave'))) @@ -36,7 +39,7 @@ def tearDownClass(self): pass def test_plot_avg_annual_energy_matrix(self): - filename = abspath(join(testdir, 'avg_annual_scatter_table.png')) + filename = abspath(join(plotdir, 'avg_annual_scatter_table.png')) if isfile(filename): os.remove(filename) @@ -54,7 +57,7 @@ def test_plot_avg_annual_energy_matrix(self): def test_plot_monthly_cumulative_distribution(self): - filename = abspath(join(testdir, 'monthly_cumulative_distribution.png')) + filename = abspath(join(plotdir, 'monthly_cumulative_distribution.png')) if isfile(filename): os.remove(filename) diff --git a/mhkit/tests/wave/test_resource_metrics.py b/mhkit/tests/wave/test_resource_metrics.py index 996c3f115..d01d6710c 100644 --- a/mhkit/tests/wave/test_resource_metrics.py +++ b/mhkit/tests/wave/test_resource_metrics.py @@ -22,6 +22,9 @@ testdir = dirname(abspath(__file__)) +plotdir = join(testdir, 'plots') +isdir = os.path.isdir(plotdir) +if not isdir: os.mkdir(plotdir) datadir = normpath(join(testdir,relpath('../../../examples/data/wave'))) @@ -295,7 +298,7 @@ def test_metrics(self): def test_plot_elevation_timeseries(self): - filename = abspath(join(testdir, 'wave_plot_elevation_timeseries.png')) + filename = abspath(join(plotdir, 'wave_plot_elevation_timeseries.png')) if isfile(filename): os.remove(filename) diff --git a/mhkit/tests/wave/test_resource_spectrum.py b/mhkit/tests/wave/test_resource_spectrum.py index b74383d9b..3b96cb477 100644 --- a/mhkit/tests/wave/test_resource_spectrum.py +++ b/mhkit/tests/wave/test_resource_spectrum.py @@ -22,6 +22,9 @@ testdir = dirname(abspath(__file__)) +plotdir = join(testdir, 'plots') +isdir = os.path.isdir(plotdir) +if not isdir: os.mkdir(plotdir) datadir = normpath(join(testdir,relpath('../../../examples/data/wave'))) @@ -127,7 +130,7 @@ def test_surface_elevation_rmse(self): self.assertLess(rmse_sum, 0.02) def test_plot_spectrum(self): - filename = abspath(join(testdir, 'wave_plot_spectrum.png')) + filename = abspath(join(plotdir, 'wave_plot_spectrum.png')) if isfile(filename): os.remove(filename) @@ -141,7 +144,7 @@ def test_plot_spectrum(self): self.assertTrue(isfile(filename)) def test_plot_chakrabarti(self): - filename = abspath(join(testdir, 'wave_plot_chakrabarti.png')) + filename = abspath(join(plotdir, 'wave_plot_chakrabarti.png')) if isfile(filename): os.remove(filename) @@ -153,7 +156,7 @@ def test_plot_chakrabarti(self): plt.savefig(filename) def test_plot_chakrabarti_np(self): - filename = abspath(join(testdir, 'wave_plot_chakrabarti_np.png')) + filename = abspath(join(plotdir, 'wave_plot_chakrabarti_np.png')) if isfile(filename): os.remove(filename) @@ -167,7 +170,7 @@ def test_plot_chakrabarti_np(self): self.assertTrue(isfile(filename)) def test_plot_chakrabarti_pd(self): - filename = abspath(join(testdir, 'wave_plot_chakrabarti_pd.png')) + filename = abspath(join(plotdir, 'wave_plot_chakrabarti_pd.png')) if isfile(filename): os.remove(filename) From eb6568c1c0a9c6a6b5c7882069b8f7422894b0ab Mon Sep 17 00:00:00 2001 From: ssolson Date: Fri, 24 Jun 2022 11:01:46 -0600 Subject: [PATCH 04/16] Move resource characterizations plot tests into resource metrics file --- .../wave/test_plotResouceCharacterizations.py | 74 ------------------- mhkit/tests/wave/test_resource_metrics.py | 41 ++++++++++ 2 files changed, 41 insertions(+), 74 deletions(-) delete mode 100644 mhkit/tests/wave/test_plotResouceCharacterizations.py diff --git a/mhkit/tests/wave/test_plotResouceCharacterizations.py b/mhkit/tests/wave/test_plotResouceCharacterizations.py deleted file mode 100644 index f973f85d8..000000000 --- a/mhkit/tests/wave/test_plotResouceCharacterizations.py +++ /dev/null @@ -1,74 +0,0 @@ -from os.path import abspath, dirname, join, isfile, normpath, relpath -from pandas.testing import assert_frame_equal -from numpy.testing import assert_allclose -from scipy.interpolate import interp1d -from random import seed, randint -import matplotlib.pylab as plt -from datetime import datetime -import xarray.testing as xrt -import mhkit.wave as wave -from io import StringIO -import pandas as pd -import numpy as np -import contextlib -import unittest -import netCDF4 -import inspect -import pickle -import time -import json -import sys -import os - - -testdir = dirname(abspath(__file__)) -plotdir = join(testdir, 'plots') -isdir = os.path.isdir(plotdir) -if not isdir: os.mkdir(plotdir) -datadir = normpath(join(testdir,relpath('../../../examples/data/wave'))) - - -class TestPlotResouceCharacterizations(unittest.TestCase): - - @classmethod - def setUpClass(self): - f_name= 'Hm0_Te_46022.json' - self.Hm0Te = pd.read_json(join(datadir,f_name)) - @classmethod - def tearDownClass(self): - pass - def test_plot_avg_annual_energy_matrix(self): - - filename = abspath(join(plotdir, 'avg_annual_scatter_table.png')) - if isfile(filename): - os.remove(filename) - - Hm0Te = self.Hm0Te - Hm0Te.drop(Hm0Te[Hm0Te.Hm0 > 20].index, inplace=True) - J = np.random.random(len(Hm0Te))*100 - - plt.figure() - fig = wave.graphics.plot_avg_annual_energy_matrix(Hm0Te.Hm0, - Hm0Te.Te, J, Hm0_bin_size=0.5, Te_bin_size=1) - plt.savefig(filename, format='png') - plt.close() - - self.assertTrue(isfile(filename)) - - def test_plot_monthly_cumulative_distribution(self): - - filename = abspath(join(plotdir, 'monthly_cumulative_distribution.png')) - if isfile(filename): - os.remove(filename) - - a = pd.date_range(start='1/1/2010', periods=10000, freq='h') - S = pd.Series(np.random.random(len(a)) , index=a) - ax=wave.graphics.monthly_cumulative_distribution(S) - plt.savefig(filename, format='png') - plt.close() - - self.assertTrue(isfile(filename)) - - -if __name__ == '__main__': - unittest.main() diff --git a/mhkit/tests/wave/test_resource_metrics.py b/mhkit/tests/wave/test_resource_metrics.py index d01d6710c..1bef43f12 100644 --- a/mhkit/tests/wave/test_resource_metrics.py +++ b/mhkit/tests/wave/test_resource_metrics.py @@ -315,6 +315,47 @@ def test_plot_elevation_timeseries(self): self.assertTrue(isfile(filename)) +class TestPlotResouceCharacterizations(unittest.TestCase): + @classmethod + def setUpClass(self): + f_name= 'Hm0_Te_46022.json' + self.Hm0Te = pd.read_json(join(datadir,f_name)) + @classmethod + def tearDownClass(self): + pass + def test_plot_avg_annual_energy_matrix(self): + + filename = abspath(join(plotdir, 'avg_annual_scatter_table.png')) + if isfile(filename): + os.remove(filename) + + Hm0Te = self.Hm0Te + Hm0Te.drop(Hm0Te[Hm0Te.Hm0 > 20].index, inplace=True) + J = np.random.random(len(Hm0Te))*100 + + plt.figure() + fig = wave.graphics.plot_avg_annual_energy_matrix(Hm0Te.Hm0, + Hm0Te.Te, J, Hm0_bin_size=0.5, Te_bin_size=1) + plt.savefig(filename, format='png') + plt.close() + + self.assertTrue(isfile(filename)) + + def test_plot_monthly_cumulative_distribution(self): + + filename = abspath(join(plotdir, 'monthly_cumulative_distribution.png')) + if isfile(filename): + os.remove(filename) + + a = pd.date_range(start='1/1/2010', periods=10000, freq='h') + S = pd.Series(np.random.random(len(a)) , index=a) + ax=wave.graphics.monthly_cumulative_distribution(S) + plt.savefig(filename, format='png') + plt.close() + + self.assertTrue(isfile(filename)) + + if __name__ == '__main__': unittest.main() From 150023027762dd246fdc22bae9a1380060bfd16a Mon Sep 17 00:00:00 2001 From: ssolson Date: Tue, 28 Jun 2022 10:42:59 -0600 Subject: [PATCH 05/16] move load tests to folder --- mhkit/tests/loads/__init__.py | 0 mhkit/tests/{ => loads}/test_loads.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 mhkit/tests/loads/__init__.py rename mhkit/tests/{ => loads}/test_loads.py (100%) diff --git a/mhkit/tests/loads/__init__.py b/mhkit/tests/loads/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/mhkit/tests/test_loads.py b/mhkit/tests/loads/test_loads.py similarity index 100% rename from mhkit/tests/test_loads.py rename to mhkit/tests/loads/test_loads.py From c091f7c9b9b182845c060328c7cbec78ab0cc5fa Mon Sep 17 00:00:00 2001 From: ssolson Date: Tue, 28 Jun 2022 10:43:54 -0600 Subject: [PATCH 06/16] move power tests to folder --- mhkit/tests/power/__init__.py | 0 mhkit/tests/{ => power}/test_power.py | 12 +++++++----- 2 files changed, 7 insertions(+), 5 deletions(-) create mode 100644 mhkit/tests/power/__init__.py rename mhkit/tests/{ => power}/test_power.py (98%) diff --git a/mhkit/tests/power/__init__.py b/mhkit/tests/power/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/mhkit/tests/test_power.py b/mhkit/tests/power/test_power.py similarity index 98% rename from mhkit/tests/test_power.py rename to mhkit/tests/power/test_power.py index 65a0d7846..eb2807932 100644 --- a/mhkit/tests/test_power.py +++ b/mhkit/tests/power/test_power.py @@ -1,12 +1,14 @@ -import unittest + from os.path import abspath, dirname, join, isfile, normpath, relpath -import os -import numpy as np -import pandas as pd import mhkit.power as power +import pandas as pd +import numpy as np +import unittest +import os + testdir = dirname(abspath(__file__)) -datadir = normpath(join(testdir,relpath('../../examples/data/power'))) +datadir = normpath(join(testdir,relpath('../../../examples/data/power'))) class TestDevice(unittest.TestCase): From 6766eaf7e0e4f2b555b5aabf600282f090fdb1b1 Mon Sep 17 00:00:00 2001 From: ssolson Date: Tue, 28 Jun 2022 10:44:23 -0600 Subject: [PATCH 07/16] move river tests to folder --- mhkit/tests/river/__init__.py | 0 mhkit/tests/river/test_io.py | 157 +++++++++++ mhkit/tests/river/test_performance.py | 80 ++++++ mhkit/tests/river/test_resource.py | 187 +++++++++++++ mhkit/tests/test_river.py | 372 -------------------------- 5 files changed, 424 insertions(+), 372 deletions(-) create mode 100644 mhkit/tests/river/__init__.py create mode 100644 mhkit/tests/river/test_io.py create mode 100644 mhkit/tests/river/test_performance.py create mode 100644 mhkit/tests/river/test_resource.py delete mode 100644 mhkit/tests/test_river.py diff --git a/mhkit/tests/river/__init__.py b/mhkit/tests/river/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/mhkit/tests/river/test_io.py b/mhkit/tests/river/test_io.py new file mode 100644 index 000000000..805734b77 --- /dev/null +++ b/mhkit/tests/river/test_io.py @@ -0,0 +1,157 @@ + +from os.path import abspath, dirname, join, isfile, normpath, relpath +from numpy.testing import assert_array_almost_equal +from pandas.testing import assert_frame_equal +import scipy.interpolate as interp +import matplotlib.pylab as plt +import mhkit.river as river +import pandas as pd +import numpy as np +import unittest +import netCDF4 +import os + + +testdir = dirname(abspath(__file__)) +plotdir = join(testdir, 'plots') +isdir = os.path.isdir(plotdir) +if not isdir: os.mkdir(plotdir) +datadir = normpath(join(testdir,'..','..','..','examples','data','river')) + + +class TestIO(unittest.TestCase): + + @classmethod + def setUpClass(self): + d3ddatadir = normpath(join(datadir,'d3d')) + + filename= 'turbineTest_map.nc' + self.d3d_flume_data = netCDF4.Dataset(join(d3ddatadir,filename)) + + @classmethod + def tearDownClass(self): + pass + + def test_load_usgs_data_instantaneous(self): + file_name = join(datadir, 'USGS_08313000_Jan2019_instantaneous.json') + data = river.io.usgs.read_usgs_file(file_name) + + self.assertEqual(data.columns, ['Discharge, cubic feet per second']) + self.assertEqual(data.shape, (2972, 1)) # 4 data points are missing + + def test_load_usgs_data_daily(self): + file_name = join(datadir, 'USGS_08313000_Jan2019_daily.json') + data = river.io.usgs.read_usgs_file(file_name) + + expected_index = pd.date_range('2019-01-01', '2019-01-31', freq='D') + self.assertEqual(data.columns, ['Discharge, cubic feet per second']) + self.assertEqual((data.index == expected_index.tz_localize('UTC')).all(), True) + self.assertEqual(data.shape, (31, 1)) + + + def test_request_usgs_data_daily(self): + data=river.io.usgs.request_usgs_data(station="15515500", + parameter='00060', + start_date='2009-08-01', + end_date='2009-08-10', + data_type='Daily') + self.assertEqual(data.columns, ['Discharge, cubic feet per second']) + self.assertEqual(data.shape, (10, 1)) + + + def test_request_usgs_data_instant(self): + data=river.io.usgs.request_usgs_data(station="15515500", + parameter='00060', + start_date='2009-08-01', + end_date='2009-08-10', + data_type='Instantaneous') + self.assertEqual(data.columns, ['Discharge, cubic feet per second']) + # Every 15 minutes or 4 times per hour + self.assertEqual(data.shape, (10*24*4, 1)) + + + def test_layer_data(self): + data=self.d3d_flume_data + variable= 'ucx' + layer=2 + time_index= 3 + layer_data= river.io.d3d.get_layer_data(data, variable, layer, time_index) + layer_compare = 2 + time_index_compare= 4 + layer_data_expected= river.io.d3d.get_layer_data(data, + variable, layer_compare, + time_index_compare) + + assert_array_almost_equal(layer_data.x,layer_data_expected.x, decimal = 2) + assert_array_almost_equal(layer_data.y,layer_data_expected.y, decimal = 2) + assert_array_almost_equal(layer_data.v,layer_data_expected.v, decimal= 2) + + + def test_create_points(self): + x=np.linspace(1, 3, num= 3) + y=np.linspace(1, 3, num= 3) + z=1 + points= river.io.d3d.create_points(x,y,z) + + x=[1,2,3,1,2,3,1,2,3] + y=[1,1,1,2,2,2,3,3,3] + z=[1,1,1,1,1,1,1,1,1] + + points_array= np.array([ [x_i, y_i, z_i] for x_i, y_i, z_i in zip(x, y, z)]) + points_expected= pd.DataFrame(points_array, columns=('x','y','z')) + assert_array_almost_equal(points, points_expected,decimal = 2) + + + def test_get_all_data_points(self): + data=self.d3d_flume_data + variable= 'ucx' + time_step= 3 + output = river.io.d3d.get_all_data_points(data, variable, time_step) + size_output = np.size(output) + time_step_compair=4 + output_expected= river.io.d3d.get_all_data_points(data, variable, time_step_compair) + size_output_expected= np.size(output_expected) + self.assertEqual(size_output, size_output_expected) + + + def test_unorm(self): + x=np.linspace(1, 3, num= 3) + y=np.linspace(1, 3, num= 3) + z=np.linspace(1, 3, num= 3) + unorm = river.io.d3d.unorm(x,y,z) + unorm_expected= [np.sqrt(1**2+1**2+1**2),np.sqrt(2**2+2**2+2**2), np.sqrt(3**2+3**2+3**2)] + assert_array_almost_equal(unorm, unorm_expected, decimal = 2) + + def test_turbulent_intensity(self): + data=self.d3d_flume_data + time_step= -1 + x_test=np.linspace(1, 17, num= 10) + y_test=np.linspace(3, 3, num= 10) + z_test=np.linspace(1, 1, num= 10) + + test_points = np.array([ [x, y, z] for x, y, z in zip(x_test, y_test, z_test)]) + points= pd.DataFrame(test_points, columns=['x','y','z']) + + TI= river.io.d3d.turbulent_intensity(data, points, time_step) + + TI_vars= ['turkin1', 'ucx', 'ucy', 'ucz'] + TI_data_raw = {} + for var in TI_vars: + #get all data + var_data_df = river.io.d3d.get_all_data_points(data, var,time_step) + TI_data_raw[var] = var_data_df + TI_data= points.copy(deep=True) + + for var in TI_vars: + TI_data[var] = interp.griddata(TI_data_raw[var][['x','y','z']], + TI_data_raw[var][var], points[['x','y','z']]) + + u_mag=river.io.d3d.unorm(TI_data['ucx'],TI_data['ucy'], TI_data['ucz']) + turbulent_intensity_expected= np.sqrt(2/3*TI_data['turkin1'])/u_mag + + + assert_array_almost_equal(TI.turbulent_intensity, turbulent_intensity_expected, decimal = 2) + +if __name__ == '__main__': + unittest.main() + diff --git a/mhkit/tests/river/test_performance.py b/mhkit/tests/river/test_performance.py new file mode 100644 index 000000000..807093bfc --- /dev/null +++ b/mhkit/tests/river/test_performance.py @@ -0,0 +1,80 @@ +from os.path import abspath, dirname, join, isfile, normpath, relpath +from numpy.testing import assert_array_almost_equal +from pandas.testing import assert_frame_equal +import scipy.interpolate as interp +import matplotlib.pylab as plt +import mhkit.river as river +import pandas as pd +import numpy as np +import unittest +import netCDF4 +import os + + +testdir = dirname(abspath(__file__)) +plotdir = join(testdir, 'plots') +isdir = os.path.isdir(plotdir) +if not isdir: os.mkdir(plotdir) +datadir = normpath(join(testdir,'..','..','..','examples','data','river')) + + +class TestPerformance(unittest.TestCase): + @classmethod + def setUpClass(self): + self.diameter = 1 + self.height = 2 + self.width = 3 + self.diameters = [1,2,3,4] + + @classmethod + def tearDownClass(self): + pass + + def test_circular(self): + eq, ca = river.performance.circular(self.diameter) + self.assertEqual(eq, self.diameter) + self.assertEqual(ca, 4*np.pi*self.diameter**2.) + + def test_ducted(self): + eq, ca =river.performance.ducted(self.diameter) + self.assertEqual(eq, self.diameter) + self.assertEqual(ca, 4*np.pi*self.diameter**2.) + + def test_rectangular(self): + eq, ca = river.performance.rectangular(self.height, self.width) + self.assertAlmostEqual(eq, 2.76, places=2) + self.assertAlmostEqual(ca, self.height*self.width, places=2) + + def test_multiple_circular(self): + eq, ca = river.performance.multiple_circular(self.diameters) + self.assertAlmostEqual(eq, 5.48, places=2) + self.assertAlmostEqual(ca, 23.56, places=2) + + def test_tip_speed_ratio(self): + rotor_speed = [15,16,17,18] # create array of rotor speeds + rotor_diameter = 77 # diameter of rotor for GE 1.5 + inflow_speed = [13,13,13,13] # array of wind speeds + TSR_answer = [4.7,5.0,5.3,5.6] + + TSR = river.performance.tip_speed_ratio(np.asarray(rotor_speed)/60,rotor_diameter,inflow_speed) + + for i,j in zip(TSR,TSR_answer): + self.assertAlmostEqual(i,j,delta=0.05) + + def test_power_coefficient(self): + # data obtained from power performance report of wind turbine + inflow_speed = [4,6,8,10,12,14,16,18,20] + power_out = np.asarray([59,304,742,1200,1400,1482,1497,1497,1511]) + capture_area = 4656.63 + rho = 1.225 + Cp_answer = [0.320,0.493,0.508,0.421,0.284,0.189,0.128,0.090,0.066] + + Cp = river.performance.power_coefficient(power_out*1000,inflow_speed,capture_area,rho) + + for i,j in zip(Cp,Cp_answer): + self.assertAlmostEqual(i,j,places=2) + + +if __name__ == '__main__': + unittest.main() + diff --git a/mhkit/tests/river/test_resource.py b/mhkit/tests/river/test_resource.py new file mode 100644 index 000000000..d26cd8a5e --- /dev/null +++ b/mhkit/tests/river/test_resource.py @@ -0,0 +1,187 @@ +from os.path import abspath, dirname, join, isfile, normpath, relpath +from numpy.testing import assert_array_almost_equal +from pandas.testing import assert_frame_equal +import scipy.interpolate as interp +import matplotlib.pylab as plt +import mhkit.river as river +import pandas as pd +import numpy as np +import unittest +import netCDF4 +import os + + +testdir = dirname(abspath(__file__)) +plotdir = join(testdir, 'plots') +isdir = os.path.isdir(plotdir) +if not isdir: os.mkdir(plotdir) +datadir = normpath(join(testdir,'..','..','..','examples','data','river')) + + +class TestResource(unittest.TestCase): + + @classmethod + def setUpClass(self): + self.data = pd.read_csv(join(datadir, 'tanana_discharge_data.csv'), index_col=0, + parse_dates=True) + self.data.columns = ['Q'] + + self.results = pd.read_csv(join(datadir, 'tanana_test_results.csv'), index_col=0, + parse_dates=True) + + @classmethod + def tearDownClass(self): + pass + + def test_Froude_number(self): + v = 2 + h = 5 + Fr = river.resource.Froude_number(v, h) + self.assertAlmostEqual(Fr, 0.286, places=3) + + def test_exceedance_probability(self): + # Create arbitrary discharge between 0 and 8(N=9) + Q = pd.Series(np.arange(9)) + # Rank order for non-repeating elements simply adds 1 to each element + #if N=9, max F = 100((max(Q)+1)/10) = 90% + #if N=9, min F = 100((min(Q)+1)/10) = 10% + f = river.resource.exceedance_probability(Q) + self.assertEqual(f.min().values , 10. ) + self.assertEqual(f.max().values , 90. ) + + def test_polynomial_fit(self): + # Calculate a first order polynomial on an x=y line + p, r2 = river.resource.polynomial_fit(np.arange(8), np.arange(8),1) + # intercept should be 0 + self.assertAlmostEqual(p[0], 0.0, places=2 ) + # slope should be 1 + self.assertAlmostEqual(p[1], 1.0, places=2 ) + # r-squared should be perfect + self.assertAlmostEqual(r2, 1.0, places=2 ) + + + def test_discharge_to_velocity(self): + # Create arbitrary discharge between 0 and 8(N=9) + Q = pd.Series(np.arange(9)) + # Calculate a first order polynomial on an DV_Curve x=y line 10 times greater than the Q values + p, r2 = river.resource.polynomial_fit(np.arange(9), 10*np.arange(9),1) + # Becuase the polynomial line fits perfect we should expect the V to equal 10*Q + V = river.resource.discharge_to_velocity(Q, p) + self.assertAlmostEqual(np.sum(10*Q - V['V']), 0.00, places=2 ) + + def test_velocity_to_power(self): + # Calculate a first order polynomial on an DV_Curve x=y line 10 times greater than the Q values + p, r2 = river.resource.polynomial_fit(np.arange(9), 10*np.arange(9),1) + # Becuase the polynomial line fits perfect we should expect the V to equal 10*Q + V = river.resource.discharge_to_velocity(pd.Series(np.arange(9)), p) + # Calculate a first order polynomial on an VP_Curve x=y line 10 times greater than the V values + p2, r22 = river.resource.polynomial_fit(np.arange(9), 10*np.arange(9),1) + # Set cut in/out to exclude 1 bin on either end of V range + cut_in = V['V'][1] + cut_out = V['V'].iloc[-2] + # Power should be 10x greater and exclude the ends of V + P = river.resource.velocity_to_power(V['V'], p2, cut_in, cut_out) + #Cut in power zero + self.assertAlmostEqual(P['P'][0], 0.00, places=2 ) + #Cut out power zero + self.assertAlmostEqual(P['P'].iloc[-1], 0.00, places=2 ) + # Middle 10x greater than velocity + self.assertAlmostEqual((P['P'][1:-1] - 10*V['V'][1:-1] ).sum(), 0.00, places=2 ) + + + def test_energy_produced(self): + # If power is always X then energy produced with be x*seconds + X=1 + seconds=1 + P = pd.Series(X*np.ones(10) ) + EP = river.resource.energy_produced(P, seconds) + self.assertAlmostEqual(EP, X*seconds, places=1 ) + # for a normal distribution of Power EP = mean *seconds + mu=5 + sigma=1 + power_dist = pd.Series(np.random.normal(mu, sigma, 10000)) + EP2 = river.resource.energy_produced(power_dist, seconds) +# import ipdb; ipdb.set_trace() + self.assertAlmostEqual(EP2, mu*seconds, places=1 ) + + + def test_plot_flow_duration_curve(self): + filename = abspath(join(plotdir, 'river_plot_flow_duration_curve.png')) + if isfile(filename): + os.remove(filename) + + f = river.resource.exceedance_probability(self.data.Q) + plt.figure() + river.graphics.plot_flow_duration_curve(self.data['Q'], f['F']) + plt.savefig(filename, format='png') + plt.close() + + self.assertTrue(isfile(filename)) + + def test_plot_power_duration_curve(self): + filename = abspath(join(plotdir, 'river_plot_power_duration_curve.png')) + if isfile(filename): + os.remove(filename) + + f = river.resource.exceedance_probability(self.data.Q) + plt.figure() + river.graphics.plot_flow_duration_curve(self.results['P_control'], f['F']) + plt.savefig(filename, format='png') + plt.close() + + self.assertTrue(isfile(filename)) + + def test_plot_velocity_duration_curve(self): + filename = abspath(join(plotdir, 'river_plot_velocity_duration_curve.png')) + if isfile(filename): + os.remove(filename) + + f = river.resource.exceedance_probability(self.data.Q) + plt.figure() + river.graphics.plot_velocity_duration_curve(self.results['V_control'], f['F']) + plt.savefig(filename, format='png') + plt.close() + + self.assertTrue(isfile(filename)) + + def test_plot_discharge_timeseries(self): + filename = abspath(join(plotdir, 'river_plot_discharge_timeseries.png')) + if isfile(filename): + os.remove(filename) + + plt.figure() + river.graphics.plot_discharge_timeseries(self.data['Q']) + plt.savefig(filename, format='png') + plt.close() + + self.assertTrue(isfile(filename)) + + def test_plot_discharge_vs_velocity(self): + filename = abspath(join(plotdir, 'river_plot_discharge_vs_velocity.png')) + if isfile(filename): + os.remove(filename) + + plt.figure() + river.graphics.plot_discharge_vs_velocity(self.data['Q'], self.results['V_control']) + plt.savefig(filename, format='png') + plt.close() + + self.assertTrue(isfile(filename)) + + def test_plot_velocity_vs_power(self): + filename = abspath(join(plotdir, 'river_plot_velocity_vs_power.png')) + if isfile(filename): + os.remove(filename) + + plt.figure() + river.graphics.plot_velocity_vs_power(self.results['V_control'], self.results['P_control']) + plt.savefig(filename, format='png') + plt.close() + + self.assertTrue(isfile(filename)) + + + +if __name__ == '__main__': + unittest.main() + diff --git a/mhkit/tests/test_river.py b/mhkit/tests/test_river.py deleted file mode 100644 index 2a4c4549d..000000000 --- a/mhkit/tests/test_river.py +++ /dev/null @@ -1,372 +0,0 @@ -import unittest -from os.path import abspath, dirname, join, isfile, normpath, relpath -import os -import numpy as np -import pandas as pd -import matplotlib.pylab as plt -import mhkit.river as river -import netCDF4 -from numpy.testing import assert_array_almost_equal -from pandas.testing import assert_frame_equal -import scipy.interpolate as interp - - -testdir = dirname(abspath(__file__)) -datadir = normpath(join(testdir,'..','..','examples','data','river')) - - -class TestPerformance(unittest.TestCase): - @classmethod - def setUpClass(self): - self.diameter = 1 - self.height = 2 - self.width = 3 - self.diameters = [1,2,3,4] - - @classmethod - def tearDownClass(self): - pass - - def test_circular(self): - eq, ca = river.performance.circular(self.diameter) - self.assertEqual(eq, self.diameter) - self.assertEqual(ca, 4*np.pi*self.diameter**2.) - - def test_ducted(self): - eq, ca =river.performance.ducted(self.diameter) - self.assertEqual(eq, self.diameter) - self.assertEqual(ca, 4*np.pi*self.diameter**2.) - - def test_rectangular(self): - eq, ca = river.performance.rectangular(self.height, self.width) - self.assertAlmostEqual(eq, 2.76, places=2) - self.assertAlmostEqual(ca, self.height*self.width, places=2) - - def test_multiple_circular(self): - eq, ca = river.performance.multiple_circular(self.diameters) - self.assertAlmostEqual(eq, 5.48, places=2) - self.assertAlmostEqual(ca, 23.56, places=2) - - def test_tip_speed_ratio(self): - rotor_speed = [15,16,17,18] # create array of rotor speeds - rotor_diameter = 77 # diameter of rotor for GE 1.5 - inflow_speed = [13,13,13,13] # array of wind speeds - TSR_answer = [4.7,5.0,5.3,5.6] - - TSR = river.performance.tip_speed_ratio(np.asarray(rotor_speed)/60,rotor_diameter,inflow_speed) - - for i,j in zip(TSR,TSR_answer): - self.assertAlmostEqual(i,j,delta=0.05) - - def test_power_coefficient(self): - # data obtained from power performance report of wind turbine - inflow_speed = [4,6,8,10,12,14,16,18,20] - power_out = np.asarray([59,304,742,1200,1400,1482,1497,1497,1511]) - capture_area = 4656.63 - rho = 1.225 - Cp_answer = [0.320,0.493,0.508,0.421,0.284,0.189,0.128,0.090,0.066] - - Cp = river.performance.power_coefficient(power_out*1000,inflow_speed,capture_area,rho) - - for i,j in zip(Cp,Cp_answer): - self.assertAlmostEqual(i,j,places=2) - -class TestResource(unittest.TestCase): - - @classmethod - def setUpClass(self): - self.data = pd.read_csv(join(datadir, 'tanana_discharge_data.csv'), index_col=0, - parse_dates=True) - self.data.columns = ['Q'] - - self.results = pd.read_csv(join(datadir, 'tanana_test_results.csv'), index_col=0, - parse_dates=True) - - @classmethod - def tearDownClass(self): - pass - - def test_Froude_number(self): - v = 2 - h = 5 - Fr = river.resource.Froude_number(v, h) - self.assertAlmostEqual(Fr, 0.286, places=3) - - def test_exceedance_probability(self): - # Create arbitrary discharge between 0 and 8(N=9) - Q = pd.Series(np.arange(9)) - # Rank order for non-repeating elements simply adds 1 to each element - #if N=9, max F = 100((max(Q)+1)/10) = 90% - #if N=9, min F = 100((min(Q)+1)/10) = 10% - f = river.resource.exceedance_probability(Q) - self.assertEqual(f.min().values , 10. ) - self.assertEqual(f.max().values , 90. ) - - def test_polynomial_fit(self): - # Calculate a first order polynomial on an x=y line - p, r2 = river.resource.polynomial_fit(np.arange(8), np.arange(8),1) - # intercept should be 0 - self.assertAlmostEqual(p[0], 0.0, places=2 ) - # slope should be 1 - self.assertAlmostEqual(p[1], 1.0, places=2 ) - # r-squared should be perfect - self.assertAlmostEqual(r2, 1.0, places=2 ) - - - def test_discharge_to_velocity(self): - # Create arbitrary discharge between 0 and 8(N=9) - Q = pd.Series(np.arange(9)) - # Calculate a first order polynomial on an DV_Curve x=y line 10 times greater than the Q values - p, r2 = river.resource.polynomial_fit(np.arange(9), 10*np.arange(9),1) - # Becuase the polynomial line fits perfect we should expect the V to equal 10*Q - V = river.resource.discharge_to_velocity(Q, p) - self.assertAlmostEqual(np.sum(10*Q - V['V']), 0.00, places=2 ) - - def test_velocity_to_power(self): - # Calculate a first order polynomial on an DV_Curve x=y line 10 times greater than the Q values - p, r2 = river.resource.polynomial_fit(np.arange(9), 10*np.arange(9),1) - # Becuase the polynomial line fits perfect we should expect the V to equal 10*Q - V = river.resource.discharge_to_velocity(pd.Series(np.arange(9)), p) - # Calculate a first order polynomial on an VP_Curve x=y line 10 times greater than the V values - p2, r22 = river.resource.polynomial_fit(np.arange(9), 10*np.arange(9),1) - # Set cut in/out to exclude 1 bin on either end of V range - cut_in = V['V'][1] - cut_out = V['V'].iloc[-2] - # Power should be 10x greater and exclude the ends of V - P = river.resource.velocity_to_power(V['V'], p2, cut_in, cut_out) - #Cut in power zero - self.assertAlmostEqual(P['P'][0], 0.00, places=2 ) - #Cut out power zero - self.assertAlmostEqual(P['P'].iloc[-1], 0.00, places=2 ) - # Middle 10x greater than velocity - self.assertAlmostEqual((P['P'][1:-1] - 10*V['V'][1:-1] ).sum(), 0.00, places=2 ) - - - def test_energy_produced(self): - # If power is always X then energy produced with be x*seconds - X=1 - seconds=1 - P = pd.Series(X*np.ones(10) ) - EP = river.resource.energy_produced(P, seconds) - self.assertAlmostEqual(EP, X*seconds, places=1 ) - # for a normal distribution of Power EP = mean *seconds - mu=5 - sigma=1 - power_dist = pd.Series(np.random.normal(mu, sigma, 10000)) - EP2 = river.resource.energy_produced(power_dist, seconds) -# import ipdb; ipdb.set_trace() - self.assertAlmostEqual(EP2, mu*seconds, places=1 ) - - - def test_plot_flow_duration_curve(self): - filename = abspath(join(testdir, 'river_plot_flow_duration_curve.png')) - if isfile(filename): - os.remove(filename) - - f = river.resource.exceedance_probability(self.data.Q) - plt.figure() - river.graphics.plot_flow_duration_curve(self.data['Q'], f['F']) - plt.savefig(filename, format='png') - plt.close() - - self.assertTrue(isfile(filename)) - - def test_plot_power_duration_curve(self): - filename = abspath(join(testdir, 'river_plot_power_duration_curve.png')) - if isfile(filename): - os.remove(filename) - - f = river.resource.exceedance_probability(self.data.Q) - plt.figure() - river.graphics.plot_flow_duration_curve(self.results['P_control'], f['F']) - plt.savefig(filename, format='png') - plt.close() - - self.assertTrue(isfile(filename)) - - def test_plot_velocity_duration_curve(self): - filename = abspath(join(testdir, 'river_plot_velocity_duration_curve.png')) - if isfile(filename): - os.remove(filename) - - f = river.resource.exceedance_probability(self.data.Q) - plt.figure() - river.graphics.plot_velocity_duration_curve(self.results['V_control'], f['F']) - plt.savefig(filename, format='png') - plt.close() - - self.assertTrue(isfile(filename)) - - def test_plot_discharge_timeseries(self): - filename = abspath(join(testdir, 'river_plot_discharge_timeseries.png')) - if isfile(filename): - os.remove(filename) - - plt.figure() - river.graphics.plot_discharge_timeseries(self.data['Q']) - plt.savefig(filename, format='png') - plt.close() - - self.assertTrue(isfile(filename)) - - def test_plot_discharge_vs_velocity(self): - filename = abspath(join(testdir, 'river_plot_discharge_vs_velocity.png')) - if isfile(filename): - os.remove(filename) - - plt.figure() - river.graphics.plot_discharge_vs_velocity(self.data['Q'], self.results['V_control']) - plt.savefig(filename, format='png') - plt.close() - - self.assertTrue(isfile(filename)) - - def test_plot_velocity_vs_power(self): - filename = abspath(join(testdir, 'river_plot_velocity_vs_power.png')) - if isfile(filename): - os.remove(filename) - - plt.figure() - river.graphics.plot_velocity_vs_power(self.results['V_control'], self.results['P_control']) - plt.savefig(filename, format='png') - plt.close() - - self.assertTrue(isfile(filename)) - - -class TestIO(unittest.TestCase): - - @classmethod - def setUpClass(self): - d3ddatadir = normpath(join(datadir,'d3d')) - - filename= 'turbineTest_map.nc' - self.d3d_flume_data = netCDF4.Dataset(join(d3ddatadir,filename)) - - @classmethod - def tearDownClass(self): - pass - - def test_load_usgs_data_instantaneous(self): - file_name = join(datadir, 'USGS_08313000_Jan2019_instantaneous.json') - data = river.io.usgs.read_usgs_file(file_name) - - self.assertEqual(data.columns, ['Discharge, cubic feet per second']) - self.assertEqual(data.shape, (2972, 1)) # 4 data points are missing - - def test_load_usgs_data_daily(self): - file_name = join(datadir, 'USGS_08313000_Jan2019_daily.json') - data = river.io.usgs.read_usgs_file(file_name) - - expected_index = pd.date_range('2019-01-01', '2019-01-31', freq='D') - self.assertEqual(data.columns, ['Discharge, cubic feet per second']) - self.assertEqual((data.index == expected_index.tz_localize('UTC')).all(), True) - self.assertEqual(data.shape, (31, 1)) - - - def test_request_usgs_data_daily(self): - data=river.io.usgs.request_usgs_data(station="15515500", - parameter='00060', - start_date='2009-08-01', - end_date='2009-08-10', - data_type='Daily') - self.assertEqual(data.columns, ['Discharge, cubic feet per second']) - self.assertEqual(data.shape, (10, 1)) - - - def test_request_usgs_data_instant(self): - data=river.io.usgs.request_usgs_data(station="15515500", - parameter='00060', - start_date='2009-08-01', - end_date='2009-08-10', - data_type='Instantaneous') - self.assertEqual(data.columns, ['Discharge, cubic feet per second']) - # Every 15 minutes or 4 times per hour - self.assertEqual(data.shape, (10*24*4, 1)) - - - def test_layer_data(self): - data=self.d3d_flume_data - variable= 'ucx' - layer=2 - time_index= 3 - layer_data= river.io.d3d.get_layer_data(data, variable, layer, time_index) - layer_compare = 2 - time_index_compare= 4 - layer_data_expected= river.io.d3d.get_layer_data(data, - variable, layer_compare, - time_index_compare) - - assert_array_almost_equal(layer_data.x,layer_data_expected.x, decimal = 2) - assert_array_almost_equal(layer_data.y,layer_data_expected.y, decimal = 2) - assert_array_almost_equal(layer_data.v,layer_data_expected.v, decimal= 2) - - - def test_create_points(self): - x=np.linspace(1, 3, num= 3) - y=np.linspace(1, 3, num= 3) - z=1 - points= river.io.d3d.create_points(x,y,z) - - x=[1,2,3,1,2,3,1,2,3] - y=[1,1,1,2,2,2,3,3,3] - z=[1,1,1,1,1,1,1,1,1] - - points_array= np.array([ [x_i, y_i, z_i] for x_i, y_i, z_i in zip(x, y, z)]) - points_expected= pd.DataFrame(points_array, columns=('x','y','z')) - assert_array_almost_equal(points, points_expected,decimal = 2) - - - def test_get_all_data_points(self): - data=self.d3d_flume_data - variable= 'ucx' - time_step= 3 - output = river.io.d3d.get_all_data_points(data, variable, time_step) - size_output = np.size(output) - time_step_compair=4 - output_expected= river.io.d3d.get_all_data_points(data, variable, time_step_compair) - size_output_expected= np.size(output_expected) - self.assertEqual(size_output, size_output_expected) - - - def test_unorm(self): - x=np.linspace(1, 3, num= 3) - y=np.linspace(1, 3, num= 3) - z=np.linspace(1, 3, num= 3) - unorm = river.io.d3d.unorm(x,y,z) - unorm_expected= [np.sqrt(1**2+1**2+1**2),np.sqrt(2**2+2**2+2**2), np.sqrt(3**2+3**2+3**2)] - assert_array_almost_equal(unorm, unorm_expected, decimal = 2) - - def test_turbulent_intensity(self): - data=self.d3d_flume_data - time_step= -1 - x_test=np.linspace(1, 17, num= 10) - y_test=np.linspace(3, 3, num= 10) - z_test=np.linspace(1, 1, num= 10) - - test_points = np.array([ [x, y, z] for x, y, z in zip(x_test, y_test, z_test)]) - points= pd.DataFrame(test_points, columns=['x','y','z']) - - TI= river.io.d3d.turbulent_intensity(data, points, time_step) - - TI_vars= ['turkin1', 'ucx', 'ucy', 'ucz'] - TI_data_raw = {} - for var in TI_vars: - #get all data - var_data_df = river.io.d3d.get_all_data_points(data, var,time_step) - TI_data_raw[var] = var_data_df - TI_data= points.copy(deep=True) - - for var in TI_vars: - TI_data[var] = interp.griddata(TI_data_raw[var][['x','y','z']], - TI_data_raw[var][var], points[['x','y','z']]) - - u_mag=river.io.d3d.unorm(TI_data['ucx'],TI_data['ucy'], TI_data['ucz']) - turbulent_intensity_expected= np.sqrt(2/3*TI_data['turkin1'])/u_mag - - - assert_array_almost_equal(TI.turbulent_intensity, turbulent_intensity_expected, decimal = 2) - -if __name__ == '__main__': - unittest.main() - From dd34e28532a13c8009849bc24c13d8aaf510e43f Mon Sep 17 00:00:00 2001 From: ssolson Date: Tue, 28 Jun 2022 10:44:55 -0600 Subject: [PATCH 08/16] move tidal tests to folder --- mhkit/tests/tidal/__init__.py | 0 mhkit/tests/tidal/test_io.py | 43 +++++++++++++++++++ .../{test_tidal.py => tidal/test_resource.py} | 39 ++++------------- 3 files changed, 52 insertions(+), 30 deletions(-) create mode 100644 mhkit/tests/tidal/__init__.py create mode 100644 mhkit/tests/tidal/test_io.py rename mhkit/tests/{test_tidal.py => tidal/test_resource.py} (71%) diff --git a/mhkit/tests/tidal/__init__.py b/mhkit/tests/tidal/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/mhkit/tests/tidal/test_io.py b/mhkit/tests/tidal/test_io.py new file mode 100644 index 000000000..d0f4dd738 --- /dev/null +++ b/mhkit/tests/tidal/test_io.py @@ -0,0 +1,43 @@ +from os.path import abspath, dirname, join, isfile, normpath, relpath +import matplotlib.pylab as plt +import mhkit.tidal as tidal +import pandas as pd +import numpy as np +import unittest +import os + + +testdir = dirname(abspath(__file__)) +plotdir = join(testdir, 'plots') +isdir = os.path.isdir(plotdir) +if not isdir: os.mkdir(plotdir) +datadir = normpath(join(testdir,relpath('../../../examples/data/tidal'))) + + +class TestIO(unittest.TestCase): + + @classmethod + def setUpClass(self): + pass + + @classmethod + def tearDownClass(self): + pass + + def test_load_noaa_data(self): + file_name = join(datadir, 's08010.json') + data, metadata = tidal.io.noaa.read_noaa_json(file_name) + self.assertTrue(np.all(data.columns == ['s','d','b']) ) + self.assertEqual(data.shape, (18890, 3)) + + def test_request_noaa_data(self): + data, metadata = tidal.io.noaa.request_noaa_data(station='s08010', parameter='currents', + start_date='20180101', end_date='20180102', + proxy=None, write_json=None) + self.assertTrue(np.all(data.columns == ['s','d','b']) ) + self.assertEqual(data.shape, (92, 3)) + + +if __name__ == '__main__': + unittest.main() + diff --git a/mhkit/tests/test_tidal.py b/mhkit/tests/tidal/test_resource.py similarity index 71% rename from mhkit/tests/test_tidal.py rename to mhkit/tests/tidal/test_resource.py index df29c922e..a7adc996c 100644 --- a/mhkit/tests/test_tidal.py +++ b/mhkit/tests/tidal/test_resource.py @@ -7,33 +7,12 @@ import mhkit.tidal as tidal testdir = dirname(abspath(__file__)) -datadir = normpath(join(testdir,relpath('../../examples/data/tidal'))) +plotdir = join(testdir, 'plots') +isdir = os.path.isdir(plotdir) +if not isdir: os.mkdir(plotdir) +datadir = normpath(join(testdir,relpath('../../../examples/data/tidal'))) -class TestIO(unittest.TestCase): - - @classmethod - def setUpClass(self): - pass - - @classmethod - def tearDownClass(self): - pass - - def test_load_noaa_data(self): - file_name = join(datadir, 's08010.json') - data, metadata = tidal.io.noaa.read_noaa_json(file_name) - self.assertTrue(np.all(data.columns == ['s','d','b']) ) - self.assertEqual(data.shape, (18890, 3)) - - def test_request_noaa_data(self): - data, metadata = tidal.io.noaa.request_noaa_data(station='s08010', parameter='currents', - start_date='20180101', end_date='20180102', - proxy=None, write_json=None) - self.assertTrue(np.all(data.columns == ['s','d','b']) ) - self.assertEqual(data.shape, (92, 3)) - - class TestResource(unittest.TestCase): @classmethod @@ -63,7 +42,7 @@ def test_principal_flow_directions(self): self.assertEqual(round(direction2,1),round(352.3,1)) def test_plot_current_timeseries(self): - filename = abspath(join(testdir, 'tidal_plot_current_timeseries.png')) + filename = abspath(join(plotdir, 'tidal_plot_current_timeseries.png')) if isfile(filename): os.remove(filename) @@ -75,7 +54,7 @@ def test_plot_current_timeseries(self): self.assertTrue(isfile(filename)) def test_plot_joint_probability_distribution(self): - filename = abspath(join(testdir, 'tidal_plot_joint_probability_distribution.png')) + filename = abspath(join(plotdir, 'tidal_plot_joint_probability_distribution.png')) if isfile(filename): os.remove(filename) @@ -87,7 +66,7 @@ def test_plot_joint_probability_distribution(self): self.assertTrue(isfile(filename)) def test_plot_rose(self): - filename = abspath(join(testdir, 'tidal_plot_rose.png')) + filename = abspath(join(plotdir, 'tidal_plot_rose.png')) if isfile(filename): os.remove(filename) @@ -99,7 +78,7 @@ def test_plot_rose(self): self.assertTrue(isfile(filename)) def test_tidal_phase_probability(self): - filename = abspath(join(testdir, 'tidal_plot_tidal_phase_probability.png')) + filename = abspath(join(plotdir, 'tidal_plot_tidal_phase_probability.png')) if isfile(filename): os.remove(filename) @@ -112,7 +91,7 @@ def test_tidal_phase_probability(self): self.assertTrue(isfile(filename)) def test_tidal_phase_exceedance(self): - filename = abspath(join(testdir, 'tidal_plot_tidal_phase_exceedance.png')) + filename = abspath(join(plotdir, 'tidal_plot_tidal_phase_exceedance.png')) if isfile(filename): os.remove(filename) From c5e170f419c5c19dd2f95a28475cc4afa45af74e Mon Sep 17 00:00:00 2001 From: ssolson Date: Tue, 28 Jun 2022 10:45:22 -0600 Subject: [PATCH 09/16] move utils tests to folder --- mhkit/tests/utils/__init__.py | 0 mhkit/tests/{ => utils}/test_utils.py | 13 +++++++------ 2 files changed, 7 insertions(+), 6 deletions(-) create mode 100644 mhkit/tests/utils/__init__.py rename mhkit/tests/{ => utils}/test_utils.py (98%) diff --git a/mhkit/tests/utils/__init__.py b/mhkit/tests/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/mhkit/tests/test_utils.py b/mhkit/tests/utils/test_utils.py similarity index 98% rename from mhkit/tests/test_utils.py rename to mhkit/tests/utils/test_utils.py index 339b8c9c7..07e1ed029 100644 --- a/mhkit/tests/test_utils.py +++ b/mhkit/tests/utils/test_utils.py @@ -1,13 +1,14 @@ -import unittest -import numpy as np -import pandas as pd -import mhkit.utils as utils +from os.path import abspath, dirname, join, isfile, normpath, relpath from pandas.testing import assert_frame_equal +import mhkit.utils as utils +import pandas as pd +import numpy as np +import unittest import json -from os.path import abspath, dirname, join, isfile, normpath, relpath + testdir = dirname(abspath(__file__)) -loads_datadir = normpath(join(testdir,relpath('../../examples/data/loads'))) +loads_datadir = normpath(join(testdir,relpath('../../../examples/data/loads'))) class TestGenUtils(unittest.TestCase): From 794d5892eb5bca73fdb31a317b6da49d5f653277 Mon Sep 17 00:00:00 2001 From: ssolson Date: Tue, 28 Jun 2022 10:46:57 -0600 Subject: [PATCH 10/16] add inits so that tests pick up the tests in subfolders --- mhkit/tests/wave/__init__.py | 0 mhkit/tests/wave/io/__init__.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 mhkit/tests/wave/__init__.py create mode 100644 mhkit/tests/wave/io/__init__.py diff --git a/mhkit/tests/wave/__init__.py b/mhkit/tests/wave/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/mhkit/tests/wave/io/__init__.py b/mhkit/tests/wave/io/__init__.py new file mode 100644 index 000000000..e69de29bb From c5473fea7e2788fa8b39d3c8eece656e93cb2634 Mon Sep 17 00:00:00 2001 From: ssolson Date: Fri, 8 Jul 2022 09:29:25 -0600 Subject: [PATCH 11/16] Require previous netCDF4 release. --- requirements.txt | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 619ed87f7..ca278fe2f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,7 +9,7 @@ lxml scikit-learn NREL-rex>=0.2.63 six>=1.13.0 -netCDF4 +netCDF4<=1.5.8 xarray statsmodels dolfyn>=1.0.0 diff --git a/setup.py b/setup.py index 2352619f4..9ab24f9d3 100644 --- a/setup.py +++ b/setup.py @@ -28,7 +28,7 @@ 'scikit-learn', 'NREL-rex>=0.2.63', 'six>=1.13.0', - 'netCDF4', + 'netCDF4<=1.5.8', 'xarray', 'statsmodels', 'pytz', From fe8f4a053b18f453e76225b77a95170e50997c11 Mon Sep 17 00:00:00 2001 From: ssolson Date: Fri, 8 Jul 2022 09:56:08 -0600 Subject: [PATCH 12/16] FIx data directory path --- mhkit/tests/wave/io/test_wecsim.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mhkit/tests/wave/io/test_wecsim.py b/mhkit/tests/wave/io/test_wecsim.py index 8737279be..f7bd60570 100644 --- a/mhkit/tests/wave/io/test_wecsim.py +++ b/mhkit/tests/wave/io/test_wecsim.py @@ -22,7 +22,7 @@ testdir = dirname(abspath(__file__)) -datadir = normpath(join(testdir,relpath('../../../examples/data/wave'))) +datadir = normpath(join(testdir,relpath('../../../../examples/data/wave'))) class TestWECSim(unittest.TestCase): From 630ad4fec090a90afc68914f32c0f8da9bc3f900 Mon Sep 17 00:00:00 2001 From: ssolson Date: Fri, 8 Jul 2022 10:14:20 -0600 Subject: [PATCH 13/16] Fix data directory path --- mhkit/tests/wave/io/test_cdip.py | 2 +- mhkit/tests/wave/io/test_hindcast.py | 2 +- mhkit/tests/wave/io/test_ndbc.py | 2 +- mhkit/tests/wave/io/test_swan.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/mhkit/tests/wave/io/test_cdip.py b/mhkit/tests/wave/io/test_cdip.py index ebfe969c5..b7267c8e0 100644 --- a/mhkit/tests/wave/io/test_cdip.py +++ b/mhkit/tests/wave/io/test_cdip.py @@ -22,7 +22,7 @@ testdir = dirname(abspath(__file__)) -datadir = normpath(join(testdir,relpath('../../../examples/data/wave'))) +datadir = normpath(join(testdir,relpath('../../../../examples/data/wave'))) class TestIOcdip(unittest.TestCase): diff --git a/mhkit/tests/wave/io/test_hindcast.py b/mhkit/tests/wave/io/test_hindcast.py index 662a529ea..3b7a4743a 100644 --- a/mhkit/tests/wave/io/test_hindcast.py +++ b/mhkit/tests/wave/io/test_hindcast.py @@ -22,7 +22,7 @@ testdir = dirname(abspath(__file__)) -datadir = normpath(join(testdir,relpath('../../../examples/data/wave'))) +datadir = normpath(join(testdir,relpath('../../../../examples/data/wave'))) class TestWPTOhindcast(unittest.TestCase): diff --git a/mhkit/tests/wave/io/test_ndbc.py b/mhkit/tests/wave/io/test_ndbc.py index 6b7483034..17dcf70f5 100644 --- a/mhkit/tests/wave/io/test_ndbc.py +++ b/mhkit/tests/wave/io/test_ndbc.py @@ -22,7 +22,7 @@ testdir = dirname(abspath(__file__)) -datadir = normpath(join(testdir,relpath('../../../examples/data/wave'))) +datadir = normpath(join(testdir,relpath('../../../../examples/data/wave'))) class TestIOndbc(unittest.TestCase): diff --git a/mhkit/tests/wave/io/test_swan.py b/mhkit/tests/wave/io/test_swan.py index 96443c9a8..9bb5d45a2 100644 --- a/mhkit/tests/wave/io/test_swan.py +++ b/mhkit/tests/wave/io/test_swan.py @@ -22,7 +22,7 @@ testdir = dirname(abspath(__file__)) -datadir = normpath(join(testdir,relpath('../../../examples/data/wave'))) +datadir = normpath(join(testdir,relpath('../../../../examples/data/wave'))) class TestSWAN(unittest.TestCase): From c26a750651323ca4dfff7e8d6a0180d909d3c3ac Mon Sep 17 00:00:00 2001 From: ssolson Date: Fri, 8 Jul 2022 10:32:12 -0600 Subject: [PATCH 14/16] Fix load data directory path --- mhkit/tests/loads/test_loads.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mhkit/tests/loads/test_loads.py b/mhkit/tests/loads/test_loads.py index 633d6ca00..8c83d4283 100644 --- a/mhkit/tests/loads/test_loads.py +++ b/mhkit/tests/loads/test_loads.py @@ -13,7 +13,7 @@ import os testdir = dirname(abspath(__file__)) -datadir = normpath(join(testdir,relpath('../../examples/data/loads'))) +datadir = normpath(join(testdir,relpath('../../../examples/data/loads'))) class TestLoads(unittest.TestCase): From 3bca5a0d9350b63efc9f163b89b8d626f2c3e334 Mon Sep 17 00:00:00 2001 From: ssolson Date: Fri, 8 Jul 2022 11:20:32 -0600 Subject: [PATCH 15/16] Remove relpath call to see if it fixes Windows test suite issues --- mhkit/tests/wave/io/test_hindcast.py | 3 ++- mhkit/tests/wave/io/test_wecsim.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/mhkit/tests/wave/io/test_hindcast.py b/mhkit/tests/wave/io/test_hindcast.py index 3b7a4743a..21ca0e85a 100644 --- a/mhkit/tests/wave/io/test_hindcast.py +++ b/mhkit/tests/wave/io/test_hindcast.py @@ -22,7 +22,8 @@ testdir = dirname(abspath(__file__)) -datadir = normpath(join(testdir,relpath('../../../../examples/data/wave'))) +# datadir = normpath(join(testdir,relpath('../../../../examples/data/wave'))) +datadir = normpath(join(testdir,'..','..','..','..','examples','data','wave')) class TestWPTOhindcast(unittest.TestCase): diff --git a/mhkit/tests/wave/io/test_wecsim.py b/mhkit/tests/wave/io/test_wecsim.py index f7bd60570..ebd5b2e5e 100644 --- a/mhkit/tests/wave/io/test_wecsim.py +++ b/mhkit/tests/wave/io/test_wecsim.py @@ -22,7 +22,7 @@ testdir = dirname(abspath(__file__)) -datadir = normpath(join(testdir,relpath('../../../../examples/data/wave'))) +datadir = normpath(join(testdir,'..','..','..','..','examples','data','wave')) class TestWECSim(unittest.TestCase): From a5ba538879e281bfc3f625c73faedfbe1ac5357a Mon Sep 17 00:00:00 2001 From: ssolson Date: Fri, 8 Jul 2022 12:32:31 -0600 Subject: [PATCH 16/16] Remove relpath from the rest of io tests in wave --- mhkit/tests/wave/io/test_cdip.py | 2 +- mhkit/tests/wave/io/test_ndbc.py | 2 +- mhkit/tests/wave/io/test_swan.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/mhkit/tests/wave/io/test_cdip.py b/mhkit/tests/wave/io/test_cdip.py index b7267c8e0..f42227329 100644 --- a/mhkit/tests/wave/io/test_cdip.py +++ b/mhkit/tests/wave/io/test_cdip.py @@ -22,7 +22,7 @@ testdir = dirname(abspath(__file__)) -datadir = normpath(join(testdir,relpath('../../../../examples/data/wave'))) +datadir = normpath(join(testdir,'..','..','..','..','examples','data','wave')) class TestIOcdip(unittest.TestCase): diff --git a/mhkit/tests/wave/io/test_ndbc.py b/mhkit/tests/wave/io/test_ndbc.py index 17dcf70f5..27feb5aa6 100644 --- a/mhkit/tests/wave/io/test_ndbc.py +++ b/mhkit/tests/wave/io/test_ndbc.py @@ -22,7 +22,7 @@ testdir = dirname(abspath(__file__)) -datadir = normpath(join(testdir,relpath('../../../../examples/data/wave'))) +datadir = normpath(join(testdir,'..','..','..','..','examples','data','wave')) class TestIOndbc(unittest.TestCase): diff --git a/mhkit/tests/wave/io/test_swan.py b/mhkit/tests/wave/io/test_swan.py index 9bb5d45a2..c3e113d81 100644 --- a/mhkit/tests/wave/io/test_swan.py +++ b/mhkit/tests/wave/io/test_swan.py @@ -22,7 +22,7 @@ testdir = dirname(abspath(__file__)) -datadir = normpath(join(testdir,relpath('../../../../examples/data/wave'))) +datadir = normpath(join(testdir,'..','..','..','..','examples','data','wave')) class TestSWAN(unittest.TestCase):