Index: trunk/tests/test_extract_slp.py =================================================================== diff -u --- trunk/tests/test_extract_slp.py (revision 0) +++ trunk/tests/test_extract_slp.py (revision 17) @@ -0,0 +1,35 @@ +from os import path +import netCDF4 +from tests.TestUtils import TestUtils + +from SDToolBox import main as main +from SDToolBox import extract_sea_level_pressure_era5 +from SDToolBox import data_acquisition +import numpy as np + + +class Test_subset_spl_era_5: + + @pytest.mark.systemtest + def test_slp_folder_then_subset_collection_is_extracted(self): + # 1. Given + # When using local data you can just replace the comment in these lines + #dir_test_data = TestUtils.get_test_data_dir('netCDF_Waves_data') + #dir_test_data = 'P:\\metocean-data\\open\\ERA5\\data\\Global' + + area_latitude = [45.0,33.0] + area_longitude = [127.5,141.75] + dir_test_data = 'D:\\workspace\\SD_Toolbox\\trunk\\tests\\test_data\\' # TODO to be removed + + input_data = data_acquisition.InputData() + input_data.coord_list = [(4.2, 2.4)] + + # 2. When + extract_slp = extract_slp.ExtractSeaLevelPressureERA5(input_data) + + # not sure about some parameters + + dataset_list = extract_slp.subset_slp_era5(self, area_latitude, area_longitude, 1979, 2016, dir_test_data, 1, 1, 1, 1) + + # 3. Then + assert dataset_list is not None \ No newline at end of file Index: trunk/SDToolBox/extract_sea_level_pressure_era5.py =================================================================== diff -u --- trunk/SDToolBox/extract_sea_level_pressure_era5.py (revision 0) +++ trunk/SDToolBox/extract_sea_level_pressure_era5.py (revision 17) @@ -0,0 +1,125 @@ +#! /usr/bin/env python +""" + +""" + +# region // imports +import sys +import os + +from SDToolBox import outputmessages as outputmessages +from SDToolBox import data_acquisition +from netCDF4 import Dataset +import numpy as np + +# endregion + +# region // variables + +# endregion + + +class ExtractSeaLevelPressureERA5: + + __lon_key = 'longitude' + __lat_key = 'latitude' + __input_lon = None + __input_lat = None + __area_longitude = None + __area_latitude = None + __input_data = None + + def __init__(self, input_data: data_acquisition.InputData, area_longitude, area_latitude) : + """Initialize the waves extraction. + + Arguments: + input_data {data_acquisition.InputData} -- Required. + """ + # verify input_data not none + if not input_data or \ + not input_data.coord_list or \ + len(input_data.coord_list) < 1: + raise IOError('No valid input data.') + + input_data.lon_key = self.__lon_key + input_data.lat_key = self.__lat_key + self.__input_data = input_data + self.__input_lon, self.__input_lat = input_data.coord_list[0] + self.__area_longitude = area_longitude + self.__area_latitude = area_latitude + + + + def subset_slp_era5(self, area_latitude, area_longitude, year_from, year_to, pathSLP, resR, resT, redR, redT) : + """Extracts a collection of netCDF4 subsets based on the input data + set when creating the extract_sea_level_pressure object. + + Arguments: + directory_path {str} -- Location of all the variable diretories. + area_latitude {list} -- List with x,y coordinate for the latitude + area_longitude {list} -- List with x,y coordinate for the longitude + year_from {int} -- Start of time data to substract. + year_to {int} -- End of time data to substract. + + TODO other parameters to be determined (probably floats as they are probably steps) + + Returns: + list(Dataset) -- collection of netCDF4 subsets per variable. + """ + with Dataset(pathSLP, 'r', format='netCDF4') as case_dataset: + + filename = pathSLP + 'era5_Global_msl_p_1979.nc' + latitude_ERA = case_dataset.variables[self.__lat_key][:] + longitude_ERA = case_dataset.variables[self.__lon_key][:] + + position_longitude1 = find_indices_that_meet_condition(longitude_ERA, lambda longitude_ERA, area_longitude: longitude_ERA == area_longitude[0]) + position_longitude2 = find_indices_that_meet_condition(longitude_ERA, lambda longitude_ERA, area_longitude : longitude_ERA == area_longitude[1]) + position_latitude1 = find_indices_that_meet_condition(latitude_ERA, lambda latitude_ERA, area_latitude: latitude_ERA == area_latitude[0]) + position_latitude2 = find_indices_that_meet_condition(latitude_ERA, lambda latitude_ERA, area_latitude: latitude_ERA == area_latitude[1]) + + latitude_in_selected_area = np.arange(position_latitude1, position_latitude2).tolist() + longitude_in_selected_area = np.arange(position_longitude1, position_longitude2).tolist() + + latitude_ERA.clear() + longitude_ERA.clear() + + [x, y] = np.meshgrid(longitude_in_selected_area, latitude_in_selected_area) + + longitude_in_selected_area.clear() + latitude_in_selected_area.clear() + + X = [] + Y = [] + + for element in range(len(x)) : + X.append(element) + + for element in range(len(y)) : + Y.append(element) + + x.clear() + y.clear() + + def extract_slp_data(self, year_from, year_to, position_longitude1, position_latitude1, position_longitude2, position_latitude2): + years = generate_years_array(year_from, year_to) + + #extracts for all the years + for year_idx, year in range(len(years)): + base_file_name = 'era5_Global_msl_p_{}.nc'.format(year) + #with Dataset(base_file_name, 'r', format='netCDF4') as case_dataset: + #position = case_dataset.variables['msl'][[position_longitude1, position_latitude1, 1] # don't really get the following parameter to ncread in the matlab script [((position_longitude2-position_longitude1)+1), ((position_latitude2-position_latitude1)+1), ~] + + def find_indices_that_meet_condition(self, input_list, condition): + ''' + find all the indices in a list that meet the condition (similar to matlab find()) + ''' + return [index for (index, val) in enumerate(input_list) if condition(val)] + +# this method could be moved in a generic utils stript outside here + @staticmethod + def generate_years_array(year_from, year_to): + years = [] + for i in range(year_to - year_from): + years.append(year_from + i) # fills an array of years + years.append(year_to) + return years \ No newline at end of file Index: trunk/SDToolBox/extract_sea_level_pressure_EARTH.py =================================================================== diff -u --- trunk/SDToolBox/extract_sea_level_pressure_EARTH.py (revision 0) +++ trunk/SDToolBox/extract_sea_level_pressure_EARTH.py (revision 17) @@ -0,0 +1,55 @@ +#! /usr/bin/env python +""" + +""" + +# region // imports +import sys +import os + +from SDToolBox import outputmessages as outputmessages +from SDToolBox import data_acquisition +from netCDF4 import Dataset +from netCDF4.utils import ncinfo +import numpy as np + +# endregion + +# region // variables + +# endregion + +class ExtractSeaLevelPressure: + + __lon_key = 'lon' + __lat_key = 'lat' + __input_lon_area = None + __input_lat_area = None + __input_data = None + __areaR_lat = 0 + __area_Rlon = 0 + + def __init__(self, input_data: data_acquisition.InputData): + if not input_data or not input_data.coord_list or len(input_data.coord_list) < 1: + raise IOError('No valid input data.') + + input_data.lon_key = self.__lon_key + input_data.lat_key = self.__lat_key + self.__input_data = input_data + self.__input_lon, self.__input_lat = input_data.coord_list[0] + + def subset_earth_v2(self, areaR_lat, areaR_lon, year1, yearN, pathSLP, pathEARTH) : + + case_file_path = pathSLP + 'RCP45_' + 'EC-Earth_RCP4.5_MSLP_204101.nc' + ncfile = ncinfo(case_file_path) + + with Dataset(case_file_path, 'r', format='netCDF4') as case_dataset: + lat_list = case_dataset.variables[self.__lat_key][:] # read latitude EARTH v2 + #lon_list = case_dataset.variables[0:1.125:180, -180+1.125:1.125:0-1.125] # read latitude EARTH v2 + lon_list = [np.arange(0, 180, 1.125).tolist(), np.arange(-180+1.125, 0-1.125, 1.125).tolist()] + + position_longitude1 = data_acquisition.get_nearest_neighbor(self.__input_lon_area, lon_list[0]) + position_longitude2 = data_acquisition.get_nearest_neighbor(self.__input_lon_area, lon_list[1]) + position_latitude1 = data_acquisition.get_nearest_neighbor(self.__input_lat, lat_list[0]) + position_latitude2 = data_acquisition.get_nearest_neighbor(self.__input_lat, lat_list[1]) + latitude = np.arange(position_latitude1, position_latitude2).tolist() \ No newline at end of file Index: trunk/tests/test_extract_waves.py =================================================================== diff -u -r16 -r17 --- trunk/tests/test_extract_waves.py (.../test_extract_waves.py) (revision 16) +++ trunk/tests/test_extract_waves.py (.../test_extract_waves.py) (revision 17) @@ -29,74 +29,38 @@ 'Expected exception message {},'.format(expected_error) + \ 'retrieved {}'.format(error_message) - class Test_subset_era_5: @pytest.mark.systemtest def test_given_waves_folder_then_subset_collection_is_extracted(self): # 1. Given # When using local data you can just replace the comment in these lines #dir_test_data = TestUtils.get_test_data_dir('netCDF_Waves_data') - dir_test_data = 'P:\\metocean-data\\open\\ERA5\\data\\Global' + #dir_test_data = 'P:\\metocean-data\\open\\ERA5\\data\\Global' + dir_test_data = 'D:\\workspace\\SD_Toolbox\\trunk\\tests\\test_data\\' # TODO to be removed input_data = data_acquisition.InputData() input_data.coord_list = [(4.2, 2.4)] # 2. When extract_wave = extract_waves.ExtractWaves(input_data) - dataset_list = extract_wave.subset_era_5(dir_test_data, 1980, 1982) + dataset_list = extract_wave.subset_era_5(dir_test_data, 1981, 1982) # 3. Then assert dataset_list is not None - -# class Test_ExtractWavesEra5: + # """ + # Instantiates a wave extraction class and checks that + # it is correctly created + # """ + # @pytest.mark.unittest + # def test_instantiating_extract_wave_era5_returns_allocated_object(self): + # waves_era5 = extract_waves.ExtractWaves(0.0,0.0,"path", 1980,2000) + # assert waves_era5.lat == 0.0 + # assert waves_era5.lon == 0.0 + # assert waves_era5.dpath == "path" + # assert waves_era5.year1 == 1980 + # assert waves_era5.yearN == 2000 # """ -# Creates a netCFD4 file stores it in the local test data dir -# It then checks it can correctly read it -# """ -# @pytest.mark.unittest -# def test_create_and_read_dummy_netCDF_data(self): -# # dummy test to create and read netCDF test file -# keys_list = [] -# # expected_keys_list = dict(lon = 3, lat = 3, time = None) -# x = np.array([[1, 2, 3], [4, 5, 6]], np.int32) - -# data_dir= TestUtils.get_local_test_data_dir("netCDF_dummy_data") -# data_file = data_dir + "era5_Global_Hs_1980.nc" - -# if not path.exists(data_file): -# f = netCDF4.Dataset(data_file,'w', format='NETCDF4') -# else: -# f = netCDF4.Dataset(data_file,'r', format='NETCDF4') - -# tempgrp = f.createGroup('Temp_data') -# tempgrp = f.createGroup('Temp_data') -# tempgrp.createDimension('lon', len(x[0])) -# tempgrp.createDimension('lat', len(x[1])) -# tempgrp.createDimension('time', None) -# f.close() -# f = Dataset(data_file, 'r') -# tempgrp = f.groups['Temp_data'] -# keys_list=tempgrp.dimensions.keys() -# f.close() - -# # assert expected_keys_list[0] == keys_list[0] -# # assert expected_keys_list[1] == keys_list[1] -# # assert expected_keys_list[2] == keys_list[2] - -# """ -# Instantiates a wave extraction class and checks that -# it is correctly created -# """ -# @pytest.mark.unittest -# def test_instantiating_extract_wave_era5_returns_allocated_object(self): -# waves_era5 = extract_waves.ExtractWaves(0.0,0.0,"path", 1980,2000) -# assert waves_era5.lat == 0.0 -# assert waves_era5.lon == 0.0 -# assert waves_era5.dpath == "path" -# assert waves_era5.year1 == 1980 -# assert waves_era5.yearN == 2000 -# """ # Checks that the longitude is normalized if a value higher than 180 is passed # """ # @pytest.mark.unittest Index: trunk/SDToolBox/extract_waves.py =================================================================== diff -u -r16 -r17 --- trunk/SDToolBox/extract_waves.py (.../extract_waves.py) (revision 16) +++ trunk/SDToolBox/extract_waves.py (.../extract_waves.py) (revision 17) @@ -76,7 +76,7 @@ base_file_name = 'era5_Global_{}_{}.nc'.format(case_name_value, year) case_dir = directory_path + case_name_value case_file_path = os.path.join(case_dir, base_file_name) - + # If file does not exist simply go to the next one if not os.path.exists(case_file_path): print('File {} does not exist or could not be found.'.format(case_file_path))