From ae6b715313b8f1b89db56f4eb12d90ddae7015e0 Mon Sep 17 00:00:00 2001 From: ssolson Date: Tue, 21 Nov 2023 07:48:06 -0700 Subject: [PATCH 01/15] assert to exception --- mhkit/river/io/d3d.py | 645 ++++++++++++++++++++++-------------------- 1 file changed, 344 insertions(+), 301 deletions(-) diff --git a/mhkit/river/io/d3d.py b/mhkit/river/io/d3d.py index d4db2e266..3354aef01 100644 --- a/mhkit/river/io/d3d.py +++ b/mhkit/river/io/d3d.py @@ -10,7 +10,7 @@ def get_all_time(data): ''' Returns all of the time stamps from a D3D simulation passed to the function as a NetCDF object (data) - + Parameters ---------- data: NetCDF4 object @@ -24,8 +24,9 @@ def get_all_time(data): the simulation started and that the data object contains a snapshot of simulation conditions at that time. ''' - - assert type(data)== netCDF4._netCDF4.Dataset, 'data must be NetCDF4 object' + + if not isinstance(data, netCDF4._netCDF4.Dataset): + raise TypeError('data must be a NetCDF4 object') seconds_run = np.ma.getdata(data.variables['time'][:], False) @@ -57,7 +58,7 @@ def seconds_to_index(data, seconds_run): ''' The function will return the nearest 'time_index' in the data if passed an integer number of 'seconds_run' - + Parameters ---------- data: NetCDF4 object @@ -103,26 +104,34 @@ def _convert_time(data, time_index=None, seconds_run=None): and incrementing until in simulation is complete. The 'seconds_run' is the seconds corresponding to the 'time_index' increments. ''' - - assert type(data)== netCDF4._netCDF4.Dataset, 'data must be NetCDF4 object' - assert time_index or seconds_run, 'input of time_index or seconds_run needed' - assert not(time_index and seconds_run), f'only one time_index or seconds_run' - assert isinstance(time_index, (int, float)) or isinstance(seconds_run, (int, - float)),'time_index or seconds_run input must be a int or float' - + + if not isinstance(data, netCDF4._netCDF4.Dataset): + raise TypeError('data must be NetCDF4 object') + + if not (time_index or seconds_run): + raise ValueError('Input of time_index or seconds_run needed') + + if time_index and seconds_run: + raise ValueError( + 'Only one of time_index or seconds_run should be provided') + + if not (isinstance(time_index, (int, float)) or isinstance(seconds_run, (int, float))): + raise TypeError( + 'time_index or seconds_run input must be an int or float') + times = get_all_time(data) - + if time_index: - QoI= times[time_index] + QoI = times[time_index] if seconds_run: - try: - idx=np.where(times == seconds_run) - QoI=idx[0][0] - except: + try: + idx = np.where(times == seconds_run) + QoI = idx[0][0] + except: idx = (np.abs(times - seconds_run)).argmin() - QoI= idx - warnings.warn( f'Warning: seconds_run not found. Closest time stamp' - +'found {times[idx]}', stacklevel= 2) + QoI = idx + warnings.warn(f'Warning: seconds_run not found. Closest time stamp' + + 'found {times[idx]}', stacklevel=2) return QoI @@ -154,104 +163,118 @@ def get_layer_data(data, variable, layer_index=-1, time_index=-1): simulation has run. The waterdepth is measured from the water surface and the "waterlevel" is the water level diffrencein meters from the zero water level. ''' - - assert isinstance(time_index, int), 'time_index must be an int' - assert isinstance(layer_index, int), 'layer_index must be an int' - assert type(data)== netCDF4._netCDF4.Dataset, 'data must be NetCDF4 object' - assert variable in data.variables.keys(), 'variable not recognized' + + if not isinstance(time_index, int): + raise TypeError('time_index must be an int') + + if not isinstance(layer_index, int): + raise TypeError('layer_index must be an int') + + if not isinstance(data, netCDF4._netCDF4.Dataset): + raise TypeError('data must be NetCDF4 object') + + if variable not in data.variables.keys(): + raise ValueError('variable not recognized') coords = str(data.variables[variable].coordinates).split() - var=data.variables[variable][:] - max_time_index= data['time'].shape[0]-1 # to account for zero index - assert abs(time_index) <= max_time_index, (f'time_index must be less than' - +'the absolute value of the max time index {max_time_index}') - - x=np.ma.getdata(data.variables[coords[0]][:], False) - y=np.ma.getdata(data.variables[coords[1]][:], False) - - - if type(var[0][0]) == np.ma.core.MaskedArray: - max_layer= len(var[0][0]) - - assert abs(layer_index) <= max_layer,( f'layer_index must be less than' - +'the max layer {max_layer}') - v= np.ma.getdata(var[time_index,:,layer_index], False) - dimensions= 3 - - else: - assert type(var[0][0])== np.float64, 'data not recognized' - dimensions= 2 - v= np.ma.getdata(var[time_index,:], False) - - #waterdepth + var = data.variables[variable][:] + max_time_index = data['time'].shape[0]-1 # to account for zero index + if abs(time_index) > max_time_index: + raise ValueError( + f'time_index must be less than the absolute value of the max time index {max_time_index}') + + x = np.ma.getdata(data.variables[coords[0]][:], False) + y = np.ma.getdata(data.variables[coords[1]][:], False) + + if type(var[0][0]) == np.ma.core.MaskedArray: + max_layer = len(var[0][0]) + + if abs(layer_index) > max_layer: + raise ValueError( + f'layer_index must be less than the max layer {max_layer}') + + v = np.ma.getdata(var[time_index, :, layer_index], False) + dimensions = 3 + + else: + if type(var[0][0]) != np.float64: + raise TypeError('data not recognized') + dimensions = 2 + v = np.ma.getdata(var[time_index, :], False) + + # waterdepth if "mesh2d" in variable: - cords_to_layers= {'mesh2d_face_x mesh2d_face_y': {'name':'mesh2d_nLayers', - 'coords':data.variables['mesh2d_layer_sigma'][:]}, - 'mesh2d_edge_x mesh2d_edge_y': {'name':'mesh2d_nInterfaces', - 'coords':data.variables['mesh2d_interface_sigma'][:]}} - bottom_depth=np.ma.getdata(data.variables['mesh2d_waterdepth'][time_index, :], False) - waterlevel= np.ma.getdata(data.variables['mesh2d_s1'][time_index, :], False) + cords_to_layers = {'mesh2d_face_x mesh2d_face_y': {'name': 'mesh2d_nLayers', + 'coords': data.variables['mesh2d_layer_sigma'][:]}, + 'mesh2d_edge_x mesh2d_edge_y': {'name': 'mesh2d_nInterfaces', + 'coords': data.variables['mesh2d_interface_sigma'][:]}} + bottom_depth = np.ma.getdata( + data.variables['mesh2d_waterdepth'][time_index, :], False) + waterlevel = np.ma.getdata( + data.variables['mesh2d_s1'][time_index, :], False) coords = str(data.variables['waterdepth'].coordinates).split() - + else: - cords_to_layers= {'FlowElem_xcc FlowElem_ycc':{'name':'laydim', - 'coords':data.variables['LayCoord_cc'][:]}, - 'FlowLink_xu FlowLink_yu': {'name':'wdim', - 'coords':data.variables['LayCoord_w'][:]}} - bottom_depth=np.ma.getdata(data.variables['waterdepth'][time_index, :], False) - waterlevel= np.ma.getdata(data.variables['s1'][time_index, :], False) + cords_to_layers = {'FlowElem_xcc FlowElem_ycc': {'name': 'laydim', + 'coords': data.variables['LayCoord_cc'][:]}, + 'FlowLink_xu FlowLink_yu': {'name': 'wdim', + 'coords': data.variables['LayCoord_w'][:]}} + bottom_depth = np.ma.getdata( + data.variables['waterdepth'][time_index, :], False) + waterlevel = np.ma.getdata(data.variables['s1'][time_index, :], False) coords = str(data.variables['waterdepth'].coordinates).split() - - layer_dim = str(data.variables[variable].coordinates) - - cord_sys= cords_to_layers[layer_dim]['coords'] - layer_percentages= np.ma.getdata(cord_sys, False) #accumulative + + layer_dim = str(data.variables[variable].coordinates) + + cord_sys = cords_to_layers[layer_dim]['coords'] + layer_percentages = np.ma.getdata(cord_sys, False) # accumulative if layer_dim == 'FlowLink_xu FlowLink_yu': - #interpolate - x_laydim=np.ma.getdata(data.variables[coords[0]][:], False) - y_laydim=np.ma.getdata(data.variables[coords[1]][:], False) - points_laydim = np.array([ [x, y] for x, y in zip(x_laydim, y_laydim)]) - + # interpolate + x_laydim = np.ma.getdata(data.variables[coords[0]][:], False) + y_laydim = np.ma.getdata(data.variables[coords[1]][:], False) + points_laydim = np.array([[x, y] for x, y in zip(x_laydim, y_laydim)]) + coords_request = str(data.variables[variable].coordinates).split() - x_wdim=np.ma.getdata(data.variables[coords_request[0]][:], False) - y_wdim=np.ma.getdata(data.variables[coords_request[1]][:], False) - points_wdim=np.array([ [x, y] for x, y in zip(x_wdim, y_wdim)]) - + x_wdim = np.ma.getdata(data.variables[coords_request[0]][:], False) + y_wdim = np.ma.getdata(data.variables[coords_request[1]][:], False) + points_wdim = np.array([[x, y] for x, y in zip(x_wdim, y_wdim)]) + bottom_depth_wdim = interp.griddata(points_laydim, bottom_depth, points_wdim) - water_level_wdim= interp.griddata(points_laydim, waterlevel, - points_wdim) - - idx_bd= np.where(np.isnan(bottom_depth_wdim)) - - for i in idx_bd: - bottom_depth_wdim[i]= interp.griddata(points_laydim, bottom_depth, - points_wdim[i], method='nearest') - water_level_wdim[i]= interp.griddata(points_laydim, waterlevel, - points_wdim[i], method='nearest') - - - waterdepth=[] - - if dimensions== 2: - if layer_dim == 'FlowLink_xu FlowLink_yu': + water_level_wdim = interp.griddata(points_laydim, waterlevel, + points_wdim) + + idx_bd = np.where(np.isnan(bottom_depth_wdim)) + + for i in idx_bd: + bottom_depth_wdim[i] = interp.griddata(points_laydim, bottom_depth, + points_wdim[i], method='nearest') + water_level_wdim[i] = interp.griddata(points_laydim, waterlevel, + points_wdim[i], method='nearest') + + waterdepth = [] + + if dimensions == 2: + if layer_dim == 'FlowLink_xu FlowLink_yu': z = [bottom_depth_wdim] - waterlevel=water_level_wdim + waterlevel = water_level_wdim else: z = [bottom_depth] else: - if layer_dim == 'FlowLink_xu FlowLink_yu': + if layer_dim == 'FlowLink_xu FlowLink_yu': z = [bottom_depth_wdim*layer_percentages[layer_index]] - waterlevel=water_level_wdim + waterlevel = water_level_wdim else: z = [bottom_depth*layer_percentages[layer_index]] - waterdepth=np.append(waterdepth, z) + waterdepth = np.append(waterdepth, z) - time= np.ma.getdata(data.variables['time'][time_index], False)*np.ones(len(x)) + time = np.ma.getdata( + data.variables['time'][time_index], False)*np.ones(len(x)) - layer= np.array([ [x_i, y_i, d_i, w_i, v_i, t_i] for x_i, y_i, d_i, w_i, v_i, t_i in - zip(x, y, waterdepth, waterlevel, v, time)]) - layer_data = pd.DataFrame(layer, columns=['x', 'y', 'waterdepth','waterlevel', 'v', 'time']) + layer = np.array([[x_i, y_i, d_i, w_i, v_i, t_i] for x_i, y_i, d_i, w_i, v_i, t_i in + zip(x, y, waterdepth, waterlevel, v, time)]) + layer_data = pd.DataFrame( + layer, columns=['x', 'y', 'waterdepth', 'waterlevel', 'v', 'time']) return layer_data @@ -262,7 +285,7 @@ def create_points(x, y, waterdepth): In any order the three inputs can consist of 3 points, 2 points and 1 array, or 1 point and 2 arrays. The final output DataFrame will be the unique combinations of the 3 inputs. - + Parameters ---------- x: float, array or int @@ -276,17 +299,17 @@ def create_points(x, y, waterdepth): ------- points: DateFrame DataFrame with columns x, y and waterdepth points. - + Example ------- If the inputs are 2 arrays: and [3,4,5] and 1 point [6], the output will contain 6 array combinations of the 3 inputs as shown. - + x=np.array([1,2]) y=np.array([3,4,5]) waterdepth= 6 d3d.create_points(x,y,waterdepth) - + x y waterdepth 0 1.0 3.0 6.0 1 2.0 3.0 6.0 @@ -295,86 +318,88 @@ def create_points(x, y, waterdepth): 4 1.0 5.0 6.0 5 2.0 5.0 6.0 ''' - - assert isinstance(x, (int, float, np.ndarray)), ('x must be a int, float' - +' or array') - assert isinstance(y, (int, float, np.ndarray)), ('y must be a int, float' - +' or array') - assert isinstance(waterdepth, (int, float, np.ndarray)), ('waterdepth must be a int, float' - +' or array') - - directions = {0:{'name': 'x', - 'values': x}, - 1:{'name': 'y', - 'values': y}, - 2:{'name': 'waterdepth', - 'values': waterdepth}} + + if not isinstance(x, (int, float, np.ndarray)): + raise TypeError('x must be an int, float, or array') + + if not isinstance(y, (int, float, np.ndarray)): + raise TypeError('y must be an int, float, or array') + + if not isinstance(waterdepth, (int, float, np.ndarray)): + raise TypeError('waterdepth must be an int, float, or array') + + directions = {0: {'name': 'x', + 'values': x}, + 1: {'name': 'y', + 'values': y}, + 2: {'name': 'waterdepth', + 'values': waterdepth}} for i in directions: try: - N=len(directions[i]['values']) + N = len(directions[i]['values']) except: - directions[i]['values'] = np.array([directions[i]['values']]) - N=len(directions[i]['values']) - if N == 1 : - directions[i]['type']= 'point' - elif N > 1 : - directions[i]['type']= 'array' + directions[i]['values'] = np.array([directions[i]['values']]) + N = len(directions[i]['values']) + if N == 1: + directions[i]['type'] = 'point' + elif N > 1: + directions[i]['type'] = 'array' else: raise Exception(f'length of direction {directions[i]["name"]} was' - +'neagative or zero') - - # Check how many times point is in "types" - types= [directions[i]['type'] for i in directions] + + 'neagative or zero') + + # Check how many times point is in "types" + types = [directions[i]['type'] for i in directions] N_points = types.count('point') if N_points >= 2: - lens = np.array([len(directions[d]['values']) for d in directions]) + lens = np.array([len(directions[d]['values']) for d in directions]) max_len_idx = lens.argmax() - not_max_idxs= [i for i in directions.keys()] - + not_max_idxs = [i for i in directions.keys()] + del not_max_idxs[max_len_idx] - - for not_max in not_max_idxs: - N= len(directions[max_len_idx]['values']) - vals =np.ones(N)*directions[not_max]['values'] + + for not_max in not_max_idxs: + N = len(directions[max_len_idx]['values']) + vals = np.ones(N)*directions[not_max]['values'] directions[not_max]['values'] = np.array(vals) - + x_new = directions[0]['values'] y_new = directions[1]['values'] depth_new = directions[2]['values'] - - request= np.array([ [x_i, y_i, depth_i] for x_i, y_i, depth_i in zip(x_new, - y_new, depth_new)]) - points= pd.DataFrame(request, columns=[ 'x', 'y', 'waterdepth']) - - elif N_points == 1: + + request = np.array([[x_i, y_i, depth_i] for x_i, y_i, depth_i in zip(x_new, + y_new, depth_new)]) + points = pd.DataFrame(request, columns=['x', 'y', 'waterdepth']) + + elif N_points == 1: # treat as plane - #find index of point + # find index of point idx_point = types.index('point') - max_idxs= [i for i in directions.keys()] + max_idxs = [i for i in directions.keys()] print(max_idxs) del max_idxs[idx_point] - #find vectors + # find vectors XX, YY = np.meshgrid(directions[max_idxs[0]]['values'], - directions[max_idxs[1]]['values'] ) - N_X=np.shape(XX)[1] - N_Y=np.shape(YY)[0] - ZZ= np.ones((N_Y,N_X))*directions[idx_point]['values'] - - request= np.array([ [x_i, y_i, z_i] for x_i, y_i, z_i in zip(XX.ravel(), - YY.ravel() , ZZ.ravel())]) - columns=[ directions[max_idxs[0]]['name'], - directions[max_idxs[1]]['name'], directions[idx_point]['name']] - - points= pd.DataFrame(request, columns=columns) - else: - raise Exception('Can provide at most two arrays') - - return points - - -def variable_interpolation(data, variables, points='cells', edges= 'none'): + directions[max_idxs[1]]['values']) + N_X = np.shape(XX)[1] + N_Y = np.shape(YY)[0] + ZZ = np.ones((N_Y, N_X))*directions[idx_point]['values'] + + request = np.array([[x_i, y_i, z_i] for x_i, y_i, z_i in zip(XX.ravel(), + YY.ravel(), ZZ.ravel())]) + columns = [directions[max_idxs[0]]['name'], + directions[max_idxs[1]]['name'], directions[idx_point]['name']] + + points = pd.DataFrame(request, columns=columns) + else: + raise ValueError('Can provide at most two arrays') + + return points + + +def variable_interpolation(data, variables, points='cells', edges='none'): ''' Interpolate multiple variables from the Delft3D onto the same points. @@ -395,53 +420,57 @@ def variable_interpolation(data, variables, points='cells', edges= 'none'): edges: sting: 'nearest' If edges is set to 'nearest' the code will fill in nan values with nearest interpolation. Otherwise only linear interpolarion will be used. - + Returns ------- transformed_data: DataFrame Variables on specified grid points saved under the input variable names and the x, y, and waterdepth coordinates of those points. ''' - - assert isinstance(points, (str, pd.DataFrame)),('points must be a string ' - +'or DataFrame') - if isinstance ( points, str): - assert any([points == 'cells', points=='faces']), ('points must be' - +' cells or faces') - assert type(data)== netCDF4._netCDF4.Dataset, 'data must be nerCDF4 object' + + if not isinstance(points, (str, pd.DataFrame)): + raise TypeError('points must be a string or DataFrame') + + if isinstance(points, str): + if not any([points == 'cells', points == 'faces']): + raise ValueError('points must be cells or faces') + + if not isinstance(data, netCDF4._netCDF4.Dataset): + raise TypeError('data must be netCDF4 object') data_raw = {} for var in variables: - var_data_df = get_all_data_points(data, var,time_index=-1) - var_data_df=var_data_df.loc[:,~var_data_df.T.duplicated(keep='first')] - data_raw[var] = var_data_df - if type(points) == pd.DataFrame: + var_data_df = get_all_data_points(data, var, time_index=-1) + var_data_df = var_data_df.loc[:, ~ + var_data_df.T.duplicated(keep='first')] + data_raw[var] = var_data_df + if type(points) == pd.DataFrame: print('points provided') - elif points=='faces': - points = data_raw['ucx'][['x','y','waterdepth']] - elif points=='cells': - points = data_raw['turkin1'][['x','y','waterdepth']] - - transformed_data= points.copy(deep=True) - - for var in variables : - transformed_data[var] = interp.griddata(data_raw[var][['x','y','waterdepth']], - data_raw[var][var], points[['x','y','waterdepth']]) - if edges == 'nearest' : - idx= np.where(np.isnan(transformed_data[var])) - + elif points == 'faces': + points = data_raw['ucx'][['x', 'y', 'waterdepth']] + elif points == 'cells': + points = data_raw['turkin1'][['x', 'y', 'waterdepth']] + + transformed_data = points.copy(deep=True) + + for var in variables: + transformed_data[var] = interp.griddata(data_raw[var][['x', 'y', 'waterdepth']], + data_raw[var][var], points[['x', 'y', 'waterdepth']]) + if edges == 'nearest': + idx = np.where(np.isnan(transformed_data[var])) + if len(idx[0]): - for i in idx[0]: - transformed_data[var][i]= (interp - .griddata(data_raw[var][['x','y','waterdepth']], - data_raw[var][var], - [points['x'][i],points['y'][i], - points['waterdepth'][i]], method='nearest')) - + for i in idx[0]: + transformed_data[var][i] = (interp + .griddata(data_raw[var][['x', 'y', 'waterdepth']], + data_raw[var][var], + [points['x'][i], points['y'][i], + points['waterdepth'][i]], method='nearest')) + return transformed_data -def get_all_data_points(data, variable, time_index=-1): +def get_all_data_points(data, variable, time_index=-1): ''' Get data points for a passed variable for all layers at a specified time from the Delft3D NetCDF4 object by iterating over the `get_layer_data` function. @@ -457,75 +486,80 @@ def get_all_data_points(data, variable, time_index=-1): time_index: int An integer to pull the time step from the dataset. Default is last time step, found with the input -1. - + Returns ------- all_data: DataFrame Dataframe with columns x, y, waterdepth, waterlevel, variable, and time. The waterdepth is measured from the water surface and the "waterlevel" is the water level diffrence in meters from the zero water level. - - ''' - - assert isinstance(time_index, int), 'time_index must be a int' - assert type(data)== netCDF4._netCDF4.Dataset, 'data must be NetCDF4 object' - assert variable in data.variables.keys(), 'variable not recognized' + + ''' + + if not isinstance(time_index, int): + raise TypeError('time_index must be an int') + + if not isinstance(data, netCDF4._netCDF4.Dataset): + raise TypeError('data must be NetCDF4 object') + + if variable not in data.variables.keys(): + raise ValueError('variable not recognized') max_time_index = len(data.variables[variable][:]) - assert abs(time_index) <= max_time_index, (f'time_index must be less than' - +'the max time index {max_time_index}') + if abs(time_index) > max_time_index: + raise ValueError( + f'time_index must be less than the max time index {max_time_index}') if "mesh2d" in variable: - cords_to_layers= {'mesh2d_face_x mesh2d_face_y': {'name':'mesh2d_nLayers', - 'coords':data.variables['mesh2d_layer_sigma'][:]}, - 'mesh2d_edge_x mesh2d_edge_y': {'name':'mesh2d_nInterfaces', - 'coords':data.variables['mesh2d_interface_sigma'][:]}} + cords_to_layers = {'mesh2d_face_x mesh2d_face_y': {'name': 'mesh2d_nLayers', + 'coords': data.variables['mesh2d_layer_sigma'][:]}, + 'mesh2d_edge_x mesh2d_edge_y': {'name': 'mesh2d_nInterfaces', + 'coords': data.variables['mesh2d_interface_sigma'][:]}} else: - cords_to_layers= {'FlowElem_xcc FlowElem_ycc':{'name':'laydim', - 'coords':data.variables['LayCoord_cc'][:]}, - 'FlowLink_xu FlowLink_yu': {'name':'wdim', - 'coords':data.variables['LayCoord_w'][:]}} - - layer_dim = str(data.variables[variable].coordinates) - - try: - cord_sys= cords_to_layers[layer_dim]['coords'] - except: + cords_to_layers = {'FlowElem_xcc FlowElem_ycc': {'name': 'laydim', + 'coords': data.variables['LayCoord_cc'][:]}, + 'FlowLink_xu FlowLink_yu': {'name': 'wdim', + 'coords': data.variables['LayCoord_w'][:]}} + + layer_dim = str(data.variables[variable].coordinates) + + try: + cord_sys = cords_to_layers[layer_dim]['coords'] + except: raise Exception('Coordinates not recognized.') - else: - layer_percentages= np.ma.getdata(cord_sys, False) - - x_all=[] - y_all=[] - depth_all=[] - water_level_all=[] - v_all=[] - time_all=[] - + else: + layer_percentages = np.ma.getdata(cord_sys, False) + + x_all = [] + y_all = [] + depth_all = [] + water_level_all = [] + v_all = [] + time_all = [] + layers = range(len(layer_percentages)) for layer in layers: - layer_data= get_layer_data(data, variable, layer, time_index) - - x_all=np.append(x_all, layer_data.x) - y_all=np.append(y_all, layer_data.y) - depth_all=np.append(depth_all, layer_data.waterdepth) - water_level_all=np.append(water_level_all, layer_data.waterlevel) - v_all=np.append(v_all, layer_data.v) - time_all= np.append(time_all, layer_data.time) - - known_points = np.array([ [x, y, waterdepth, waterlevel, v, time] - for x, y, waterdepth, waterlevel, v, time in zip(x_all, y_all, - depth_all, water_level_all, v_all, time_all)]) - - all_data= pd.DataFrame(known_points, columns=['x','y','waterdepth', 'waterlevel' - ,f'{variable}', 'time']) + layer_data = get_layer_data(data, variable, layer, time_index) - return all_data + x_all = np.append(x_all, layer_data.x) + y_all = np.append(y_all, layer_data.y) + depth_all = np.append(depth_all, layer_data.waterdepth) + water_level_all = np.append(water_level_all, layer_data.waterlevel) + v_all = np.append(v_all, layer_data.v) + time_all = np.append(time_all, layer_data.time) + known_points = np.array([[x, y, waterdepth, waterlevel, v, time] + for x, y, waterdepth, waterlevel, v, time in zip(x_all, y_all, + depth_all, water_level_all, v_all, time_all)]) + all_data = pd.DataFrame(known_points, columns=[ + 'x', 'y', 'waterdepth', 'waterlevel', f'{variable}', 'time']) -def turbulent_intensity(data, points='cells', time_index= -1, - intermediate_values = False ): + return all_data + + +def turbulent_intensity(data, points='cells', time_index=-1, + intermediate_values=False): ''' Calculate the turbulent intensity percentage for a given data set for the specified points. Assumes variable names: ucx, ucy, ucz and turkin1. @@ -548,7 +582,7 @@ def turbulent_intensity(data, points='cells', time_index= -1, If false the function will return position and turbulent intensity values. If true the function will return position(x,y,z) and values need to calculate turbulent intensity (ucx, uxy, uxz and turkin1) in a Dataframe. Default False. - + Returns ------- TI_data : Dataframe @@ -565,64 +599,73 @@ def turbulent_intensity(data, points='cells', time_index= -1, ucy- velocity in the y direction ucz- velocity in the vertical direction ''' - - assert isinstance(points, (str, pd.DataFrame)),('points must a string or' - +' DataFrame') - if isinstance ( points, str): - assert any([points == 'cells', points=='faces']), ('points must be cells' - +' or faces') - assert isinstance(time_index, int), 'time_index must be a int' - max_time_index= data['time'].shape[0]-1 # to account for zero index - assert abs(time_index) <= max_time_index, (f'time_index must be less than' - +'the absolute value of the max time index {max_time_index}') - assert type(data)== netCDF4._netCDF4.Dataset, 'data must be nerCDF4 object' - assert 'turkin1' in data.variables.keys(), ('Varaiable turkin1 not' - +' present in Data') - assert 'ucx' in data.variables.keys(),'Varaiable ucx 1 not present in Data' - assert 'ucy' in data.variables.keys(),'Varaiable ucy 1 not present in Data' - assert 'ucz' in data.variables.keys(),'Varaiable ucz 1 not present in Data' - - TI_vars= ['turkin1', 'ucx', 'ucy', 'ucz'] + + if not isinstance(points, (str, pd.DataFrame)): + raise TypeError('points must be a string or DataFrame') + + if isinstance(points, str): + if not (points == 'cells' or points == 'faces'): + raise ValueError('points must be cells or faces') + + if not isinstance(time_index, int): + raise TypeError('time_index must be an int') + + max_time_index = data['time'].shape[0] - 1 # to account for zero index + if abs(time_index) > max_time_index: + raise ValueError( + f'time_index must be less than the absolute value of the max time index {max_time_index}') + + if not isinstance(data, netCDF4._netCDF4.Dataset): + raise TypeError('data must be netCDF4 object') + + for variable in ['turkin1', 'ucx', 'ucy', 'ucz']: + if variable not in data.variables.keys(): + raise ValueError(f'Variable {variable} not present in Data') + + TI_vars = ['turkin1', 'ucx', 'ucy', 'ucz'] TI_data_raw = {} for var in TI_vars: - var_data_df = get_all_data_points(data, var ,time_index) - TI_data_raw[var] = var_data_df - if type(points) == pd.DataFrame: + var_data_df = get_all_data_points(data, var, time_index) + TI_data_raw[var] = var_data_df + if type(points) == pd.DataFrame: print('points provided') - elif points=='faces': - points = TI_data_raw['turkin1'].drop(['waterlevel','turkin1'],axis=1) - elif points=='cells': - points = TI_data_raw['ucx'].drop(['waterlevel','ucx'],axis=1) - + elif points == 'faces': + points = TI_data_raw['turkin1'].drop(['waterlevel', 'turkin1'], axis=1) + elif points == 'cells': + points = TI_data_raw['ucx'].drop(['waterlevel', 'ucx'], axis=1) + TI_data = points.copy(deep=True) - for var in TI_vars: - TI_data[var] = interp.griddata(TI_data_raw[var][['x','y','waterdepth']], - TI_data_raw[var][var], points[['x','y','waterdepth']]) - idx= np.where(np.isnan(TI_data[var])) - + for var in TI_vars: + TI_data[var] = interp.griddata(TI_data_raw[var][['x', 'y', 'waterdepth']], + TI_data_raw[var][var], points[['x', 'y', 'waterdepth']]) + idx = np.where(np.isnan(TI_data[var])) + if len(idx[0]): - for i in idx[0]: - TI_data[var][i]= interp.griddata(TI_data_raw[var][['x','y','waterdepth']], - TI_data_raw[var][var], - [points['x'][i],points['y'][i], points['waterdepth'][i]], - method='nearest') - - u_mag=unorm(np.array(TI_data['ucx']),np.array(TI_data['ucy']), - np.array(TI_data['ucz'])) - - neg_index=np.where( TI_data['turkin1']<0) - zero_bool= np.isclose( TI_data['turkin1'][ TI_data['turkin1']<0].array, - np.zeros(len( TI_data['turkin1'][TI_data['turkin1']<0].array)), - atol=1.0e-4) - zero_ind= neg_index[0][zero_bool] - non_zero_ind= neg_index[0][~zero_bool] - TI_data.loc[zero_ind,'turkin1']=np.zeros(len(zero_ind)) - TI_data.loc[non_zero_ind,'turkin1']=[np.nan]*len(non_zero_ind) - - TI_data['turbulent_intensity']= np.sqrt(2/3*TI_data['turkin1'])/u_mag * 100 #% - + for i in idx[0]: + TI_data[var][i] = interp.griddata(TI_data_raw[var][['x', 'y', 'waterdepth']], + TI_data_raw[var][var], + [points['x'][i], points['y'] + [i], points['waterdepth'][i]], + method='nearest') + + u_mag = unorm(np.array(TI_data['ucx']), np.array(TI_data['ucy']), + np.array(TI_data['ucz'])) + + neg_index = np.where(TI_data['turkin1'] < 0) + zero_bool = np.isclose(TI_data['turkin1'][TI_data['turkin1'] < 0].array, + np.zeros( + len(TI_data['turkin1'][TI_data['turkin1'] < 0].array)), + atol=1.0e-4) + zero_ind = neg_index[0][zero_bool] + non_zero_ind = neg_index[0][~zero_bool] + TI_data.loc[zero_ind, 'turkin1'] = np.zeros(len(zero_ind)) + TI_data.loc[non_zero_ind, 'turkin1'] = [np.nan]*len(non_zero_ind) + + TI_data['turbulent_intensity'] = np.sqrt( + 2/3*TI_data['turkin1'])/u_mag * 100 # % + if intermediate_values == False: - TI_data= TI_data.drop(TI_vars, axis = 1) - + TI_data = TI_data.drop(TI_vars, axis=1) + return TI_data From 7614867df05d0ae3b04c4b04898ce10f33e6baaf Mon Sep 17 00:00:00 2001 From: ssolson Date: Tue, 21 Nov 2023 09:08:09 -0700 Subject: [PATCH 02/15] Revert "Ensure interpolation values are within range when sampling contours (#278)" This reverts commit 2387a257b218709b6e82052d9d2fc864db5d4509. --- mhkit/tests/wave/test_contours.py | 50 +++++++++++-------------------- mhkit/wave/contours.py | 43 +++++++------------------- 2 files changed, 27 insertions(+), 66 deletions(-) diff --git a/mhkit/tests/wave/test_contours.py b/mhkit/tests/wave/test_contours.py index a555fec28..7b870325b 100644 --- a/mhkit/tests/wave/test_contours.py +++ b/mhkit/tests/wave/test_contours.py @@ -1,15 +1,24 @@ from os.path import abspath, dirname, join, isfile, normpath, relpath -import unittest -import pickle -import json -import os - +from pandas.testing import assert_frame_equal from numpy.testing import assert_allclose +from scipy.interpolate import interp1d +from random import seed, randint import matplotlib.pylab as plt +from datetime import datetime +import xarray.testing as xrt +import mhkit.wave as wave +from io import StringIO import pandas as pd import numpy as np - -import mhkit.wave as wave +import contextlib +import unittest +import netCDF4 +import inspect +import pickle +import time +import json +import sys +import os testdir = dirname(abspath(__file__)) @@ -45,10 +54,6 @@ def setUpClass(self): self.wdrt_dt = 3600 self.wdrt_period = 50 - # `samples_contour`Example data - self.hs_contour = np.array([8.56637939, 9.27612515, 8.70427774]) - self.te_contour = np.array([10, 15, 20]) - @classmethod def tearDownClass(self): pass @@ -235,28 +240,7 @@ def test_kde_copulas(self): self.wdrt_copulas['bivariate_KDE_log_x2'])] self.assertTrue(all(close)) - def test_samples_contours_type_validation(self): - with self.assertRaises(TypeError): - wave.contours.samples_contour( - 'not an array', self.te_contour, self.hs_contour) - with self.assertRaises(TypeError): - wave.contours.samples_contour( - self.te_contour, 'not an array', self.hs_contour) - with self.assertRaises(TypeError): - wave.contours.samples_contour( - self.te_contour, self.hs_contour, 'not an array') - - def test_samples_contours_length_mismatch(self): - with self.assertRaises(ValueError): - wave.contours.samples_contour( - self.te_contour, self.hs_contour, np.array([1, 2])) - - def test_samples_contours_range_validation(self): - with self.assertRaises(ValueError): - wave.contours.samples_contour( - np.array([5, 25]), self.te_contour, self.hs_contour) - - def test_samples_contours_correct_interpolation(self): + def test_samples_contours(self): te_samples = np.array([10, 15, 20]) hs_samples_0 = np.array([8.56637939, 9.27612515, 8.70427774]) hs_contour = np.array(self.wdrt_copulas["gaussian_x1"]) diff --git a/mhkit/wave/contours.py b/mhkit/wave/contours.py index c4695e85f..3007448b8 100644 --- a/mhkit/wave/contours.py +++ b/mhkit/wave/contours.py @@ -1792,51 +1792,28 @@ def samples_contour(t_samples, t_contour, hs_contour): raise TypeError(f't_contour must be of type np.ndarray. Got: {type(t_contour)}') if not isinstance(hs_contour, np.ndarray): raise TypeError(f'hs_contour must be of type np.ndarray. Got: {type(hs_contour)}') - if len(t_contour) != len(hs_contour): - raise ValueError( - "t_contour and hs_contour must be of the same length.") - if np.any(t_samples < np.min(t_contour)) or np.any(t_samples > np.max(t_contour)): - raise ValueError( - "All t_samples must be within the range of t_contour.") - - - # Find minimum and maximum energy period values + # finds minimum and maximum energy period values amin = np.argmin(t_contour) amax = np.argmax(t_contour) aamin = np.min([amin, amax]) aamax = np.max([amin, amax]) - - # Separate points along the contour into upper & lower half + # finds points along the contour w1 = hs_contour[aamin:aamax] w2 = np.concatenate((hs_contour[aamax:], hs_contour[:aamin])) - - # Get samples min and max - t_min, t_max = np.min(t_samples), np.max(t_samples) - - # Choose the half of the contour with the largest wave height - if np.max(w1) > np.max(w2): - # Check if the max or min Tp values are within the contour half - include_aamax = t_max >= t_contour[aamax] or t_min <= t_contour[aamin] - # Set the x and y values for interpolation - x1 = t_contour[aamin:aamax + int(include_aamax)] - y1 = hs_contour[aamin:aamax + int(include_aamax)] + if (np.max(w1) > np.max(w2)): + x1 = t_contour[aamin:aamax] + y1 = hs_contour[aamin:aamax] else: - # Check if the max or min Tp values are within the contour half - include_aamin = t_max >= t_contour[aamin] or t_min <= t_contour[aamax] - # Set the x and y values for interpolation - x1 = np.concatenate( - (t_contour[aamax:], t_contour[:aamin + int(include_aamin)])) - y1 = np.concatenate( - (hs_contour[aamax:], hs_contour[:aamin + int(include_aamin)])) - - # Sort data based on the max and min Tp values + x1 = np.concatenate((t_contour[aamax:], t_contour[:aamin])) + y1 = np.concatenate((hs_contour[aamax:], hs_contour[:aamin])) + # sorts data based on the max and min energy period values ms = np.argsort(x1) x = x1[ms] y = y1[ms] - # Interpolation function + # interpolates the sorted data si = interp.interp1d(x, y) - # Interpolate Tp samples values to get Hs values + # finds the wave height based on the user specified energy period values hs_samples = si(t_samples) return hs_samples From 9597e9b9c89cba975a0c00b5241317cfd07a3aea Mon Sep 17 00:00:00 2001 From: ssolson Date: Mon, 29 Apr 2024 08:22:45 -0600 Subject: [PATCH 03/15] use conda-incubator/setup-miniconda --- .github/workflows/main.yml | 42 ++++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 22 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 8ad4e4428..f3b6c378f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -190,24 +190,25 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Setup Conda - uses: s-weigand/setup-conda@v1 + - name: Setup Miniconda + uses: conda-incubator/setup-miniconda@v3 with: - activate-conda: false - conda-channels: conda-forge + miniconda-version: 'latest' + auto-update-conda: true + activate-environment: TEST + python-version: ${{ matrix.python-version }} + environment-file: environment.yml + use-only-tar-bz2: true - - name: Python ${{ matrix.python-version }} + - name: Create and setup Conda environment shell: bash -l {0} run: | - conda create --name TEST python=${PYTHON_VER} numpy cython pip hdf5 libnetcdf cftime netcdf4 --strict-channel-priority - source activate TEST - export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH" # so setup.py finds nc-config + conda install numpy cython pip hdf5 libnetcdf cftime netcdf4 --strict-channel-priority pip install -e . --no-deps --force-reinstall - name: Install dependencies shell: bash -l {0} run: | - source activate TEST python -m pip install --upgrade pip wheel pip install coverage pytest coveralls . @@ -220,13 +221,11 @@ jobs: - name: Run pytest shell: bash -l {0} run: | - source activate TEST coverage run --rcfile=.github/workflows/.coveragerc --source=./mhkit/ -m pytest -c .github/workflows/pytest.ini - name: Upload coverage data to coveralls.io shell: bash -l {0} run: | - source activate TEST coveralls --service=github env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -298,18 +297,19 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Setup Conda - uses: s-weigand/setup-conda@v1 + - name: Setup Miniconda + uses: conda-incubator/setup-miniconda@v3 with: - activate-conda: false - conda-channels: conda-forge + miniconda-version: 'latest' + auto-update-conda: true + activate-environment: TEST + python-version: ${{ matrix.python-version }} + use-only-tar-bz2: true # Optional: use if you want to only download .tar.bz2 packages - - name: Python ${{ matrix.python-version }} + - name: Setup Conda environment shell: bash -l {0} run: | - conda create --name TEST python=${{ matrix.python-version }} numpy cython pip pytest hdf5 libnetcdf cftime netcdf4 coverage --strict-channel-priority - source activate TEST - export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH" # so setup.py finds nc-config + conda install numpy cython pip pytest hdf5 libnetcdf cftime netcdf4 coverage --strict-channel-priority pip install -e . --no-deps --force-reinstall - name: Download Wave Hindcast data from artifact @@ -334,15 +334,13 @@ jobs: - name: Install MHKiT and run pytest shell: bash -l {0} run: | - source activate TEST python -m pip install --upgrade pip wheel pip install coveralls . - coverage run --rcfile=.github/workflows/.coveragehindcastrc -m pytest -c .github/workflows/pytest-hindcast.ini + coverage run --rcfile=.github/workflows/.coveragehindcastrc -m pytest -c .github/workflows/pytest-hindcast.ini - name: Upload coverage data to coveralls.io shell: bash -l {0} run: | - source activate TEST coveralls --service=github env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From 908dcc700a94b5f6afe349dcf1b68fd46969e3da Mon Sep 17 00:00:00 2001 From: ssolson Date: Mon, 29 Apr 2024 10:04:34 -0600 Subject: [PATCH 04/15] remove environment-file arg --- .github/workflows/main.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index f3b6c378f..6bd64e7e5 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -197,7 +197,6 @@ jobs: auto-update-conda: true activate-environment: TEST python-version: ${{ matrix.python-version }} - environment-file: environment.yml use-only-tar-bz2: true - name: Create and setup Conda environment From ecab2c3be582bb469a38e552f975a5e6c57b9ee3 Mon Sep 17 00:00:00 2001 From: ssolson Date: Mon, 29 Apr 2024 10:24:37 -0600 Subject: [PATCH 05/15] replace remaining `s-weigand/setup-conda@v1` --- .github/workflows/main.yml | 57 ++++++++++++++++++-------------------- 1 file changed, 27 insertions(+), 30 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 6bd64e7e5..9cfa2822f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -57,30 +57,29 @@ jobs: - name: Checkout code uses: actions/checkout@v4 - - name: Setup Conda - uses: s-weigand/setup-conda@v1 + - name: Setup Miniconda + uses: conda-incubator/setup-miniconda@v2 with: - activate-conda: false - conda-channels: conda-forge + miniconda-version: 'latest' + auto-update-conda: true + activate-environment: TEST + python-version: ${{ matrix.python-version }} + use-only-tar-bz2: true - - name: Python setup + - name: Setup Conda environment shell: bash -l {0} run: | - conda create --name TEST python=${{ env.PYTHON_VER }} numpy cython pip hdf5 libnetcdf cftime netcdf4 --strict-channel-priority - source activate TEST - export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH" + conda install numpy cython pip pytest hdf5 libnetcdf cftime netcdf4 coverage --strict-channel-priority pip install -e . --no-deps --force-reinstall - name: Install dependencies shell: bash -l {0} run: | - source activate TEST python -m pip install --upgrade pip wheel pip install coverage pytest coveralls . - name: Prepare non-hindcast API data run: | - source activate TEST pytest mhkit/tests/river/test_io_usgs.py pytest mhkit/tests/tidal/test_io.py pytest mhkit/tests/wave/io/test_cdip.py @@ -101,30 +100,29 @@ jobs: - name: Checkout code uses: actions/checkout@v4 - - name: Setup Conda - uses: s-weigand/setup-conda@v1 + - name: Setup Miniconda + uses: conda-incubator/setup-miniconda@v2 with: - activate-conda: false - conda-channels: conda-forge + miniconda-version: 'latest' + auto-update-conda: true + activate-environment: TEST + python-version: ${{ matrix.python-version }} + use-only-tar-bz2: true - - name: Python setup + - name: Setup Conda environment shell: bash -l {0} run: | - conda create --name TEST python=${{ env.PYTHON_VER }} numpy cython pip hdf5 libnetcdf cftime netcdf4 --strict-channel-priority - source activate TEST - export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH" + conda install numpy cython pip pytest hdf5 libnetcdf cftime netcdf4 coverage --strict-channel-priority pip install -e . --no-deps --force-reinstall - name: Install dependencies shell: bash -l {0} run: | - source activate TEST python -m pip install --upgrade pip wheel pip install coverage pytest coveralls . - name: Prepare Wave Hindcast data run: | - source activate TEST pytest mhkit/tests/wave/io/hindcast/test_hindcast.py - name: Upload Wave Hindcast data as artifact @@ -143,30 +141,29 @@ jobs: - name: Checkout code uses: actions/checkout@v4 - - name: Setup Conda - uses: s-weigand/setup-conda@v1 + - name: Setup Miniconda + uses: conda-incubator/setup-miniconda@v2 with: - activate-conda: false - conda-channels: conda-forge + miniconda-version: 'latest' + auto-update-conda: true + activate-environment: TEST + python-version: ${{ matrix.python-version }} + use-only-tar-bz2: true - - name: Python setup + - name: Setup Conda environment shell: bash -l {0} run: | - conda create --name TEST python=${{ env.PYTHON_VER }} numpy cython pip hdf5 libnetcdf cftime netcdf4 --strict-channel-priority - source activate TEST - export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH" + conda install numpy cython pip pytest hdf5 libnetcdf cftime netcdf4 coverage --strict-channel-priority pip install -e . --no-deps --force-reinstall - name: Install dependencies shell: bash -l {0} run: | - source activate TEST python -m pip install --upgrade pip wheel pip install coverage pytest coveralls . - name: Prepare Wind Hindcast data run: | - source activate TEST pytest mhkit/tests/wave/io/hindcast/test_wind_toolkit.py - name: Upload Wind Hindcast data as artifact From bd4f299f06a932911a855ad64cc779780c569456 Mon Sep 17 00:00:00 2001 From: ssolson Date: Mon, 29 Apr 2024 11:00:34 -0600 Subject: [PATCH 06/15] python-version: ${{ env.PYTHON_VER }} --- .github/workflows/main.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 9cfa2822f..03f00e9c4 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -63,14 +63,14 @@ jobs: miniconda-version: 'latest' auto-update-conda: true activate-environment: TEST - python-version: ${{ matrix.python-version }} + python-version: ${{ env.PYTHON_VER }} use-only-tar-bz2: true - name: Setup Conda environment shell: bash -l {0} run: | conda install numpy cython pip pytest hdf5 libnetcdf cftime netcdf4 coverage --strict-channel-priority - pip install -e . --no-deps --force-reinstall + pip install -e . --force-reinstall - name: Install dependencies shell: bash -l {0} From 0623dd934c341bc15c2081ec48b968fad56a8e02 Mon Sep 17 00:00:00 2001 From: ssolson Date: Tue, 30 Apr 2024 08:43:20 -0600 Subject: [PATCH 07/15] source activate TEST --- .github/workflows/main.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 03f00e9c4..7d5f90dfc 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -75,11 +75,13 @@ jobs: - name: Install dependencies shell: bash -l {0} run: | + source activate TEST python -m pip install --upgrade pip wheel pip install coverage pytest coveralls . - name: Prepare non-hindcast API data run: | + source activate TEST pytest mhkit/tests/river/test_io_usgs.py pytest mhkit/tests/tidal/test_io.py pytest mhkit/tests/wave/io/test_cdip.py From cbbfb4e76ac6a7060160393c92654b3663cf39a0 Mon Sep 17 00:00:00 2001 From: ssolson Date: Tue, 30 Apr 2024 08:49:28 -0600 Subject: [PATCH 08/15] more source activate TEST --- .github/workflows/main.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 7d5f90dfc..912b5326d 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -69,6 +69,7 @@ jobs: - name: Setup Conda environment shell: bash -l {0} run: | + source activate TEST conda install numpy cython pip pytest hdf5 libnetcdf cftime netcdf4 coverage --strict-channel-priority pip install -e . --force-reinstall From 79d2cf2ff43588f39e8c3f483c27109fb53568e1 Mon Sep 17 00:00:00 2001 From: ssolson Date: Tue, 30 Apr 2024 08:54:20 -0600 Subject: [PATCH 09/15] conda create --- .github/workflows/main.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 912b5326d..1aa3e3b6d 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -62,15 +62,14 @@ jobs: with: miniconda-version: 'latest' auto-update-conda: true - activate-environment: TEST python-version: ${{ env.PYTHON_VER }} use-only-tar-bz2: true - name: Setup Conda environment shell: bash -l {0} run: | + conda create --name TEST python=${{ env.PYTHON_VER }} numpy cython pip hdf5 libnetcdf cftime netcdf4 --strict-channel-priority source activate TEST - conda install numpy cython pip pytest hdf5 libnetcdf cftime netcdf4 coverage --strict-channel-priority pip install -e . --force-reinstall - name: Install dependencies From ea3065a5b8aad10b290f6c27efb04fc736fe0c81 Mon Sep 17 00:00:00 2001 From: ssolson Date: Tue, 30 Apr 2024 08:56:05 -0600 Subject: [PATCH 10/15] export PATH --- .github/workflows/main.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 1aa3e3b6d..93a4d556f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -70,6 +70,7 @@ jobs: run: | conda create --name TEST python=${{ env.PYTHON_VER }} numpy cython pip hdf5 libnetcdf cftime netcdf4 --strict-channel-priority source activate TEST + export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH" pip install -e . --force-reinstall - name: Install dependencies From 4a99b64a768a918bfb7aa2234ac7bb3589087b1c Mon Sep 17 00:00:00 2001 From: ssolson Date: Tue, 30 Apr 2024 08:58:45 -0600 Subject: [PATCH 11/15] conda activate test --- .github/workflows/main.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 93a4d556f..3d027a894 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -69,20 +69,20 @@ jobs: shell: bash -l {0} run: | conda create --name TEST python=${{ env.PYTHON_VER }} numpy cython pip hdf5 libnetcdf cftime netcdf4 --strict-channel-priority - source activate TEST + conda activate TEST export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH" pip install -e . --force-reinstall - name: Install dependencies shell: bash -l {0} run: | - source activate TEST + conda activate TEST python -m pip install --upgrade pip wheel pip install coverage pytest coveralls . - name: Prepare non-hindcast API data run: | - source activate TEST + conda activate TEST pytest mhkit/tests/river/test_io_usgs.py pytest mhkit/tests/tidal/test_io.py pytest mhkit/tests/wave/io/test_cdip.py From 7a5f9ae49d54141558b5f21761fa4e6fe4e77f1e Mon Sep 17 00:00:00 2001 From: ssolson Date: Tue, 30 Apr 2024 09:34:19 -0600 Subject: [PATCH 12/15] back to default conda env --- .github/workflows/main.yml | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 3d027a894..141085269 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -63,26 +63,23 @@ jobs: miniconda-version: 'latest' auto-update-conda: true python-version: ${{ env.PYTHON_VER }} + activate-environment: TEST use-only-tar-bz2: true - name: Setup Conda environment shell: bash -l {0} run: | - conda create --name TEST python=${{ env.PYTHON_VER }} numpy cython pip hdf5 libnetcdf cftime netcdf4 --strict-channel-priority - conda activate TEST - export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH" + conda install numpy cython pip hdf5 libnetcdf cftime netcdf4 --strict-channel-priority pip install -e . --force-reinstall - name: Install dependencies shell: bash -l {0} run: | - conda activate TEST python -m pip install --upgrade pip wheel pip install coverage pytest coveralls . - name: Prepare non-hindcast API data run: | - conda activate TEST pytest mhkit/tests/river/test_io_usgs.py pytest mhkit/tests/tidal/test_io.py pytest mhkit/tests/wave/io/test_cdip.py From 249f24a62c5f53bcc59af3628149c50cf06e47f6 Mon Sep 17 00:00:00 2001 From: ssolson Date: Tue, 30 Apr 2024 09:40:05 -0600 Subject: [PATCH 13/15] shell: bash -l {0} --- .github/workflows/main.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 141085269..f13d4c198 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -79,6 +79,7 @@ jobs: pip install coverage pytest coveralls . - name: Prepare non-hindcast API data + shell: bash -l {0} run: | pytest mhkit/tests/river/test_io_usgs.py pytest mhkit/tests/tidal/test_io.py From 401aa9523603ba61baa2277f561bf9724e088497 Mon Sep 17 00:00:00 2001 From: ssolson Date: Tue, 30 Apr 2024 10:15:47 -0600 Subject: [PATCH 14/15] conda-incubator/setup-miniconda v2 -> v3 --- .github/workflows/main.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index f13d4c198..d6edf4235 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -58,12 +58,12 @@ jobs: uses: actions/checkout@v4 - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: miniconda-version: 'latest' auto-update-conda: true python-version: ${{ env.PYTHON_VER }} - activate-environment: TEST + activate-environment: TESTconda use-only-tar-bz2: true - name: Setup Conda environment @@ -102,7 +102,7 @@ jobs: uses: actions/checkout@v4 - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: miniconda-version: 'latest' auto-update-conda: true @@ -143,7 +143,7 @@ jobs: uses: actions/checkout@v4 - name: Setup Miniconda - uses: conda-incubator/setup-miniconda@v2 + uses: conda-incubator/setup-miniconda@v3 with: miniconda-version: 'latest' auto-update-conda: true From e4012a85f9332a6a6068a86a2843189282ee671a Mon Sep 17 00:00:00 2001 From: ssolson Date: Tue, 30 Apr 2024 10:21:25 -0600 Subject: [PATCH 15/15] shell: bash -l {0} --- .github/workflows/main.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index d6edf4235..57c3e03eb 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -123,6 +123,7 @@ jobs: pip install coverage pytest coveralls . - name: Prepare Wave Hindcast data + shell: bash -l {0} run: | pytest mhkit/tests/wave/io/hindcast/test_hindcast.py @@ -164,6 +165,7 @@ jobs: pip install coverage pytest coveralls . - name: Prepare Wind Hindcast data + shell: bash -l {0} run: | pytest mhkit/tests/wave/io/hindcast/test_wind_toolkit.py @@ -257,11 +259,13 @@ jobs: path: ~/.cache/mhkit - name: Update and install packages + shell: bash -l {0} run: | python -m pip install --upgrade pip wheel pip install coverage pytest coveralls . - name: Run pytest + shell: bash -l {0} run: | coverage run --rcfile=.github/workflows/.coveragerc --source=./mhkit/ -m pytest -c .github/workflows/pytest.ini @@ -301,7 +305,7 @@ jobs: auto-update-conda: true activate-environment: TEST python-version: ${{ matrix.python-version }} - use-only-tar-bz2: true # Optional: use if you want to only download .tar.bz2 packages + use-only-tar-bz2: true - name: Setup Conda environment shell: bash -l {0}