Commit e6ff1511 authored by Eva Börgens's avatar Eva Börgens
Browse files

linting the code

parent 52034e1b
......@@ -3,3 +3,6 @@
# Ignore IDE
.idea
# Ignore Test folder
Test
\ No newline at end of file
......@@ -9,6 +9,7 @@ numpy = "==1.20.3"
scipy = "==1.6.3"
[dev-packages]
pylint = "*"
[requires]
python_version = "3.8"
{
"_meta": {
"hash": {
"sha256": "d2fe735be0d95d0e392406ecb52331b61e892d9cb7579aac2050cca803da1257"
"sha256": "8c019f5f9428c7dd42a3d8805c949fcdbc34dfb07feebf38a9c344d706669a9d"
},
"pipfile-spec": 6,
"requires": {
......@@ -150,5 +150,79 @@
"version": "==1.6.3"
}
},
"develop": {}
"develop": {
"astroid": {
"hashes": [
"sha256:4db03ab5fc3340cf619dbc25e42c2cc3755154ce6009469766d7143d1fc2ee4e",
"sha256:8a398dfce302c13f14bab13e2b14fe385d32b73f4e4853b9bdfb64598baa1975"
],
"markers": "python_version ~= '3.6'",
"version": "==2.5.6"
},
"isort": {
"hashes": [
"sha256:0a943902919f65c5684ac4e0154b1ad4fac6dcaa5d9f3426b732f1c8b5419be6",
"sha256:2bb1680aad211e3c9944dbce1d4ba09a989f04e238296c87fe2139faa26d655d"
],
"markers": "python_version >= '3.6' and python_version < '4.0'",
"version": "==5.8.0"
},
"lazy-object-proxy": {
"hashes": [
"sha256:17e0967ba374fc24141738c69736da90e94419338fd4c7c7bef01ee26b339653",
"sha256:1fee665d2638491f4d6e55bd483e15ef21f6c8c2095f235fef72601021e64f61",
"sha256:22ddd618cefe54305df49e4c069fa65715be4ad0e78e8d252a33debf00f6ede2",
"sha256:24a5045889cc2729033b3e604d496c2b6f588c754f7a62027ad4437a7ecc4837",
"sha256:410283732af311b51b837894fa2f24f2c0039aa7f220135192b38fcc42bd43d3",
"sha256:4732c765372bd78a2d6b2150a6e99d00a78ec963375f236979c0626b97ed8e43",
"sha256:489000d368377571c6f982fba6497f2aa13c6d1facc40660963da62f5c379726",
"sha256:4f60460e9f1eb632584c9685bccea152f4ac2130e299784dbaf9fae9f49891b3",
"sha256:5743a5ab42ae40caa8421b320ebf3a998f89c85cdc8376d6b2e00bd12bd1b587",
"sha256:85fb7608121fd5621cc4377a8961d0b32ccf84a7285b4f1d21988b2eae2868e8",
"sha256:9698110e36e2df951c7c36b6729e96429c9c32b3331989ef19976592c5f3c77a",
"sha256:9d397bf41caad3f489e10774667310d73cb9c4258e9aed94b9ec734b34b495fd",
"sha256:b579f8acbf2bdd9ea200b1d5dea36abd93cabf56cf626ab9c744a432e15c815f",
"sha256:b865b01a2e7f96db0c5d12cfea590f98d8c5ba64ad222300d93ce6ff9138bcad",
"sha256:bf34e368e8dd976423396555078def5cfc3039ebc6fc06d1ae2c5a65eebbcde4",
"sha256:c6938967f8528b3668622a9ed3b31d145fab161a32f5891ea7b84f6b790be05b",
"sha256:d1c2676e3d840852a2de7c7d5d76407c772927addff8d742b9808fe0afccebdf",
"sha256:d7124f52f3bd259f510651450e18e0fd081ed82f3c08541dffc7b94b883aa981",
"sha256:d900d949b707778696fdf01036f58c9876a0d8bfe116e8d220cfd4b15f14e741",
"sha256:ebfd274dcd5133e0afae738e6d9da4323c3eb021b3e13052d8cbd0e457b1256e",
"sha256:ed361bb83436f117f9917d282a456f9e5009ea12fd6de8742d1a4752c3017e93",
"sha256:f5144c75445ae3ca2057faac03fda5a902eff196702b0a24daf1d6ce0650514b"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'",
"version": "==1.6.0"
},
"mccabe": {
"hashes": [
"sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
"sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
],
"version": "==0.6.1"
},
"pylint": {
"hashes": [
"sha256:0a049c5d47b629d9070c3932d13bff482b12119b6a241a93bc460b0be16953c8",
"sha256:792b38ff30903884e4a9eab814ee3523731abd3c463f3ba48d7b627e87013484"
],
"index": "pypi",
"version": "==2.8.3"
},
"toml": {
"hashes": [
"sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b",
"sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"
],
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==0.10.2"
},
"wrapt": {
"hashes": [
"sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"
],
"version": "==1.12.1"
}
}
}
from .covariance import *
from .input_arguments import *
""""
This is the main file to invoke the regional tws uncertainty routines.
"""
from io import read_netcdf, read_ascii, save_results
from covariance import get_timeseries, compute_covariance
from input_arguments import arg_parser
parsed_command_line_input = arg_parser()
......@@ -15,17 +20,16 @@ region_coords = read_ascii(region_filename)
grid, grid_std, lon, lat, time = read_netcdf(filename)
timeseries = None
if flag_timeseries:
timeseries, flag_timeseries = get_timeseries(grid, lon, lat, region_coords)
timeseries, flag_timeseries = get_timeseries(grid, lon, lat, region_coords)
else:
timeseries = None
results = None
if flag_uncertainty or flag_matrix:
results = compute_covariance(region_coords, grid_std, flag_uncertainty, flag_matrix)
save_results(out_filename, filename, results, region_coords, timeseries, flag_uncertainty, flag_matrix, flag_timeseries )
results = compute_covariance(region_coords, grid_std, flag_uncertainty, flag_matrix)
else:
results = None
save_results(out_filename, filename, results, region_coords, timeseries,
flag_uncertainty, flag_matrix, flag_timeseries )
from .io import *
""""
Module handling the computation of the covariances and uncertainties
"""
import math
from typing import Dict
import scipy.special as sp
import numpy as np
import numpy.typing as npt
rho = np.pi/180
R = 6367.4447
def get_grid_area(lon, lat):
# Function getting the area weights of a coordinate list
w = np.zeros(len(lat))
def get_grid_area(lon: npt.ArrayLike, lat: npt.ArrayLike) -> np.ndarray:
""""
Function getting the area weights of a coordinate list
Arg:
lon, array of longitudes: np.ndarray
lat, array of latitudes: np.ndarray
Returns:
area per grid point
"""
area = np.zeros(len(lat))
delta_lon = np.abs(np.unique(lon)[1]-np.unique(lon)[0])
delta_lat = np.abs(np.unique(lat)[1] - np.unique(lat)[0])
for i in range(len(lat)):
w[i] = get_area(0, lat[i]-delta_lat/2, delta_lon, lat[i]+delta_lat/2)
return w
def get_area(lon_ll, lat_ll, lon_ur,lat_ur):
# returns area of rectangular on sphere, defined by corner points lower left ll and upper right ur
a = np.abs((lon_ll-lon_ur)*rho)*np.abs((np.sin(lat_ll*rho)-np.sin(lat_ur*rho)))*R**2
return a
def Cov_function_lat(lat1, lat2, d, theta, ny,a0, ka0_2, ka0_4, ka0_6, ka0_8, a1,ka1_2, ka1_4, ka1_6, ka1_8, c0, k2,k4, k6, k8):
# Function to compute covariance function between two points according to publication Boergens et al. 2020
# Input:
# lat1, lat2: Latitude of the two points
# d: distance between points
# theta: azimuth angle between points
# ny: order of Bessle function
# a0: anisotropy parameter
# ka0_2, ka0_4, ka0_6, ka0_8: Legende polynome parameters for a0
# a1: isotropy shape parameter
# ka1_2, ka1_4, ka1_6, ka1_8: Legende polynome parameters for a1
# c0: amplitude parameter
# k2,k4, k6, k8: Legende polynome parameters for c0
# Author: Eva Boergens, GFZ
# date: June 2019
k = [1,0,k2,0,k4, 0, k6, 0, k8]
P1 = sum_Legendre(len(k)-1, k, lat1*rho)
P2 = sum_Legendre(len(k)-1, k, lat2*rho)
ka0 = [1, 0, ka0_2, 0,ka0_4, 0, ka0_6, 0, ka0_8]
Pa0_1 = sum_Legendre(len(ka0)-1, ka0, np.mean([lat1, lat2])*rho)
A0 = Pa0_1 * a0
ka1 = [1, 0, ka1_2, 0,ka1_4, 0, ka1_6, 0, ka1_8]
Pa1_1 = sum_Legendre(len(ka1) - 1, ka1, np.mean([lat1, lat2]) * rho)
A1 = Pa1_1 * a1
if d==0:
d = 0.001
C0 = yaglom(d, theta, A0, A1, c0, ny)
Cov = P1*P2*C0
for i, lat_i in enumerate(lat):
area[i] = get_area(0, lat_i-delta_lat/2, delta_lon, lat_i+delta_lat/2)
return area
def get_area(lon_ll: float, lat_ll: float, lon_ur: float,lat_ur: float) -> float:
""""
returns area of rectangular on sphere, defined by corner points lower left ll and upper right ur
Arg:
lon_ll, lon of lower left corner: float
lat_ll, lat of lower left corner: float
lon_ur, lon of upper right corner: float
lat_ur, lat of upper right corner: float
Returns:
area per grid point
"""
area = np.abs((lon_ll-lon_ur)*rho)*np.abs((np.sin(lat_ll*rho)-np.sin(lat_ur*rho)))*R**2
return area
def cov_function_lat(lat1: float, lat2: float,
dist: float,
theta: float,
ny: float,
a_0: float, k_a_0_2: float, k_a_0_4: float, k_a_0_6: float, k_a_0_8: float,
a_1: float, k_a_1_2: float, k_a_1_4: float, k_a_1_6: float, k_a_1_8: float,
c_0: float, k_2: float, k_4: float, k_6: float, k_8: float) -> float:
""""
Function to compute covariance function between two points according to
publication Boergens et al. 2020
Arg:
lat1, lat2, Latitude of the two points: float
d, distance between points: float
theta, azimuth angle between points: float
ny, order of Bessle function: float
a0, anisotropy parameter: float
ka0_2, ka0_4, ka0_6, ka0_8, Legende polynome parameters for a0: float
a1, isotropy shape parameter: float
ka1_2, ka1_4, ka1_6, ka1_8, Legende polynome parameters for a1: float
c0, amplitude parameter: float
k2,k4, k6, k8, Legende polynome parameters for c0: float
Returns:
Covariance: float
"""
k = [1, 0, k_2, 0, k_4, 0, k_6, 0, k_8]
P_1 = sum_legendre(len(k) - 1, k, lat1 * rho)
P_2 = sum_legendre(len(k) - 1, k, lat2 * rho)
k_a_0 = [1, 0, k_a_0_2, 0, k_a_0_4, 0, k_a_0_6, 0, k_a_0_8]
P_a_0_1 = sum_legendre(len(k_a_0) - 1, k_a_0, np.mean([lat1, lat2]) * rho)
A_0 = P_a_0_1 * a_0
k_a_1 = [1, 0, k_a_1_2, 0, k_a_1_4, 0, k_a_1_6, 0, k_a_1_8]
P_a_1_1 = sum_legendre(len(k_a_1) - 1, k_a_1, np.mean([lat1, lat2]) * rho)
A_1 = P_a_1_1 * a_1
if dist==0:
dist = 0.001
C_0 = yaglom(dist, theta, A_0, A_1, c_0, ny)
Cov = P_1*P_2*C_0
return Cov
def Legendre_polynome(n,lat):
def legendre_polynome(n: int, lat: float) -> float:
'''
Computes Legendre Polynome of degree n at given latitude lat
:param n: int
:param lat: float
:return: float
'''
sin_lat = np.sin(lat)
P_n = sp.legendre(n)
P_n_x = P_n(sin_lat)
return P_n_x
def sum_Legendre(n_max, k, lat):
if n_max!=len(k)-1:
def sum_legendre(n_max: int, leg_weights: npt.ArrayLike, lat:float) -> float:
'''
Computes weighted sum of Legendre Polynomes
:param n_max: maximum degree of Legendre Polynomes: int
:param leg_weights: arrays of the weights for the sum: np.ndarray
:param lat: latitude where the Legendre Polynomes are summed: float
:return:
'''
if n_max!=len(leg_weights)-1:
raise('N_max coefficients are needed ')
P = 0
p_sum = 0
for i in range(n_max+1):
P += k[i]*Legendre_polynome(i, lat)
return P
def yaglom(d, theta, a0, a1, c0, ny):
alpha = a0 * np.sin(theta) + a1
C = c0 / (alpha * d) ** ny * sp.jv(ny, alpha * d)
return C
def distance(glon0, glat0, glon1, glat1):
# convert geograpic coordinates to spherical distances
# author: Eva Boergens
lon = glon1*rho
lat = glat1*rho
lon0 = glon0*rho
lat0 = glat0*rho
sin_lat0 = math.sin(lat0)
cos_lat0 = math.cos(lat0)
cos_lon = math.cos(lon-lon0)
sin_lat = math.sin(lat)
cos_lat = math.cos(lat)
p_sum += leg_weights[i] * legendre_polynome(i, lat)
return p_sum
def yaglom(dist: float,
theta: float,
a_0: float,
a_1: float,
c_0: float,
ny: float) -> float:
'''
Function to compute the adapted Yaglom function
:param dist: spherical distance: float
:param theta: azimut angel: float
:param a_0: anisotropic width parameter: float
:param a_1: isotropic width parameter: float
:param c_0: global scaling factor: float
:param ny: Order of Bessel function: int
:return: float
'''
alpha = a_0 * np.sin(theta) + a_1
cov = c_0 / (alpha * dist) ** ny * sp.jv(ny, alpha * dist)
return cov
def distance(lon_0: float, lat_0: float, lon_1: float, lat_1: float):
'''
convert geograpic coordinates to spherical distances
:param lon_0[degree]: float
:param lat_0[degree]: float
:param lon_1[degree]: float
:param lat_1[degree]: float
:return: float
'''
lon_1_rad = lon_1 * rho
lat_1_rad = lat_1 * rho
lon_0_rad = lon_0 * rho
lat_0_rad = lat_0 * rho
sin_lat0 = math.sin(lat_0_rad)
cos_lat0 = math.cos(lat_0_rad)
cos_lon = math.cos(lon_1_rad-lon_0_rad)
sin_lat = math.sin(lat_1_rad)
cos_lat = math.cos(lat_1_rad)
hilf = sin_lat*sin_lat0 + cos_lat*cos_lat0*cos_lon
di = R * math.acos(hilf)
d_i = R * math.acos(hilf)
if glon0==glon1 and glat0==glat1:
di =0
if lon_0==lon_1 and lat_0==lat_1:
d_i =0
return di
return d_i
def azimut_angle(lon0, lat0, glon1, glat1):
# get azimut angle between geograpic coordinates
# author: Eva Boergens
def azimut_angle(lon_0: float, lat_0: float, lon_1: float, lat_1: float):
'''
get azimut angle between geograpic coordinates
:param lon_0[degree]: float
:param lat_0[degree]: float
:param lon_1[degree]: float
:param lat_1[degree]: float
:return: float
'''
lat1 = glat1 * rho
lat0 = lat0 * rho
lon0 = lon0 * rho
lon1 = glon1 * rho
lat_1_rad = lat_1 * rho
lat_0_rad = lat_0 * rho
lon_0_rad = lon_0 * rho
lon_1_rad = lon_1 * rho
sin_lat0 = math.sin(lat0)
cos_lat0 = math.cos(lat0)
tan_lat = math.tan(lat1)
sin_lat0 = math.sin(lat_0_rad)
cos_lat0 = math.cos(lat_0_rad)
tan_lat = math.tan(lat_1_rad)
cos_lon = math.cos(lon1-lon0)
sin_lon = math.sin(lon1-lon0)
cos_lon = math.cos(lon_1_rad-lon_0_rad)
sin_lon = math.sin(lon_1_rad-lon_0_rad)
alpha = math.atan2(sin_lon,(cos_lat0*tan_lat-sin_lat0*cos_lon))
if glat1 == 90:
if lat_1 == 90:
alpha=0
elif glat1 ==-90:
elif lat_1 ==-90:
alpha = np.pi
return alpha
def compute_covariance(region_coords, gridstd, flag_uncertainty, flag_matrix):
def compute_covariance(region_coords: npt.ArrayLike,
gridstd: npt.ArrayLike,
flag_uncertainty: bool, flag_matrix: bool) -> Dict[str, np.ndarray]:
'''
Function to compute the covariances for a region
:param region_coords: coordinates of region: np.ndarray[n,2]
:param gridstd: standard deviation for each grid point: np.ndarray[n]
:param flag_uncertainty: return uncertainty of mean tws of region: bool
:param flag_matrix: return covariance matrix of region: bool
:return: Dict[str, np.ndarray]
'''
lon = np.array([r[0] for r in region_coords])
lat = np.array([r[1] for r in region_coords])
......@@ -134,51 +218,55 @@ def compute_covariance(region_coords, gridstd, flag_uncertainty, flag_matrix):
m = len(gridstd)
# Grid weights
w = get_grid_area(lon, lat)
region_size = np.sum(w)
area = get_grid_area(lon, lat)
region_size = np.sum(area)
# Parameter of covariance function
x = np.array([1.09008520e-03, -2.91816028e+00, -1.90076454e+00, -8.56445896e-01, -4.60125098e+00,
0.00265558, 0.40380741, -0.29669686, 0.06876961, 0.32137515,
5.82423256 * 2.30 * 0.08 * 2.41 * 1.45, -0.2, -0.26511069, -0.17415433, -0.21936711])
x = np.array([1.09e-03, -2.92, -1.90, -0.86, -4.60,
2.66e-03, 0.40, -0.30, 0.07, 0.32,
3.74, -0.2, -0.27, -0.17, -0.22])
if flag_uncertainty:
var_model = np.zeros(m)
else:
var_model = None
if flag_matrix:
cov_model = np.zeros((m,n,n))
else:
cov_model = None
for ii in range(n):
d = 0
dist = 0
theta = np.pi * 2
C = Cov_function_lat(lat[ii], lat[ii], d, theta, 2, *x)
w1 = w[ii] / region_size
w2 = w[ii] / region_size
corr = cov_function_lat(lat[ii], lat[ii], dist, theta, 2, *x)
w_1 = area[ii] / region_size
w_2 = area[ii] / region_size
for j in range(m):
sigma = gridstd[j]
if np.isnan(sigma):
continue
if flag_uncertainty:
var_model[j] += w1 * w2 * sigma **2 * C
var_model[j] += w_1 * w_2 * sigma **2 * corr
if flag_matrix:
cov_model[j,ii,ii] = sigma **2 * C
cov_model[j,ii,ii] = sigma **2 * corr
for jj in range(ii+1, n):
d = distance(lon[ii], lat[ii], lon[jj], lat[jj])
dist = distance(lon[ii], lat[ii], lon[jj], lat[jj])
theta = azimut_angle(lon[ii], lat[ii], lon[jj], lat[jj])
C = Cov_function_lat(lat[ii], lat[jj], d, theta, 2, *x)
w1 = w[ii] / region_size
w2 = w[jj] / region_size
corr = cov_function_lat(lat[ii], lat[jj], dist, theta, 2, *x)
w_1 = area[ii] / region_size
w_2 = area[jj] / region_size
for j in range(m):
sigma = gridstd[j]
if np.isnan(sigma):
continue
if flag_uncertainty:
var_model[j] += w1 * w2 * sigma**2 * C
var_model[j] += w_1 * w_2 * sigma**2 * corr
if flag_matrix:
cov_model[j, ii, jj] = sigma ** 2 * C
cov_model[j, jj, ii] = sigma ** 2 * C
cov_model[j, ii, jj] = sigma ** 2 * corr
cov_model[j, jj, ii] = sigma ** 2 * corr
result = {}
if flag_uncertainty:
std_model = np.sqrt(var_model)
......@@ -189,17 +277,25 @@ def compute_covariance(region_coords, gridstd, flag_uncertainty, flag_matrix):
def get_timeseries(grid, lon_grid, lat_grid, region_coords):
'''
Returns mean tws time series of region
:param grid: tws grid: np.ndarray[t,n,m]
:param lon_grid: longitude of grid: np.ndarray[m]
:param lat_grid: latitude of grid: np.ndarray[n]
:param region_coords: coordinates of region of interest: np.ndarray[l,2]
:return: np.ndarray[t]
'''
lon_region = np.array([r[0] for r in region_coords])
lat_region = np.array([r[1] for r in region_coords])
if len(np.isin(lon_grid, lon_region).nonzero()[0])< len(np.unique(lon_region)) or \
len(np.isin(lat_grid, lat_region).nonzero()[0])< len(np.unique(lat_region)):
print("Warning: Region coordinates are not located on the given TWS grid, thus no mean tws time series "
"can be computed. Continue without timeseries output")
print("Warning: Region coordinates are not located on the given TWS grid, thus no "
"mean tws time series can be computed. Continue without timeseries output")
return None, False
# Grid weights
w = get_grid_area(lon_region, lat_region)
area = get_grid_area(lon_region, lat_region)
region_id_lat = np.array([np.nonzero(lat_grid==l)[0] for l in lat_region]).squeeze()
......@@ -208,6 +304,6 @@ def get_timeseries(grid, lon_grid, lat_grid, region_coords):
timeseries = np.zeros(grid.shape[0])
for i in range(grid.shape[0]):
grid_part = grid[i,:,:]
timeseries[i] = np.nansum(w * grid_part[region_id_lat, region_id_lon]) / np.sum(w)
timeseries[i] = np.nansum(area * grid_part[region_id_lat, region_id_lon]) / np.sum(area)
return timeseries, True
\ No newline at end of file
return timeseries, True
""""
Module checking the input parameter
"""
from argparse import ArgumentParser, FileType, RawTextHelpFormatter
from .io import *
from io import test_coordiantes
def arg_parser():
'''
Argument parser of input line
:return: ArgumentParser.arguments
'''
parser = ArgumentParser(
prog="tws_covariances.sh",
description="Programm to compute the covariances of given region and return the standard deviation of "
"the mean TWS of this region and/or the full covariance matrix",
description="Programm to compute the covariances of given region and return the "
"standard deviation of the mean TWS of this region and/or the full "
"covariance matrix",
formatter_class=RawTextHelpFormatter,
)
parser.add_argument(
......@@ -21,7 +30,8 @@ def arg_parser():
"--region",
required=True,
type=FileType(),
help="Filename of the ascii file defining the requested region. List of coordinates lon, lat, each line one coordinate."
help="Filename of the ascii file defining the requested region. List of coordinates "
"lon, lat, each line one coordinate."
"Coordinates have to be on an even spaced grid. Comment lines have to start with '#'. "
)
parser.add_argument(
......@@ -52,7 +62,8 @@ def arg_parser():
args = parser.parse_args()
if not args.matrix and not args.uncertainty and not args.timeseries:
raise RuntimeError("Either covariance matrix (-m) or uncertainties (-u) or timeseries (-t) have to be set for the output!")
raise RuntimeError("Either covariance matrix (-m) or uncertainties (-u) or timeseries (-t) "