Commit 9b9a71fe authored by Eva Börgens's avatar Eva Börgens
Browse files

Initial added source code

parent 4bb6a9ac
from .covariance import *
from .input_arguments import *
parsed_command_line_input = arg_parser()
filename = parsed_command_line_input.filename.name
region_filename = parsed_command_line_input.region.name
out_filename = parsed_command_line_input.output
flag_matrix = parsed_command_line_input.matrix
flag_uncertainty = parsed_command_line_input.uncertainty
flag_timeseries = parsed_command_line_input.timeseries
region_coords = read_ascii(region_filename)
grid, grid_std, lon, lat, time = read_netcdf(filename)
results = compute_covariance(region_coords,grid_std, flag_uncertainty, flag_matrix )
if flag_timeseries:
timeseries, flag_timeseries = get_timeseries(grid, lon, lat, region_coords)
save_results(out_filename, filename, results,region_coords, timeseries, flag_uncertainty, flag_matrix, flag_timeseries )
from .io import *
rho = np.pi/180
R = 6367.4447
def get_grid_area(lon, lat):
# Function getting the area weights of a coordinate list
w = np.zeros(len(lat))
delta_lon = np.abs(np.unique(lon)[1]-np.unique(lon)[0])
delta_lat = np.abs(np.unique(lat)[1] - np.unique(lat)[0])
for i in range(len(lat)):
w[i] = get_area(0, lat[i]-delta_lat/2, delta_lon, lat[i]+delta_lat/2)
return w
def get_area(lon_ll, lat_ll, lon_ur,lat_ur):
# returns area of rectangular on sphere, defined by corner points lower left ll and upper right ur
a = np.abs((lon_ll-lon_ur)*rho)*np.abs((np.sin(lat_ll*rho)-np.sin(lat_ur*rho)))*R**2
return a
def Cov_function_lat(lat1, lat2, d, theta, ny,a0, ka0_2, ka0_4, ka0_6, ka0_8, a1,ka1_2, ka1_4, ka1_6, ka1_8, c0, k2,k4, k6, k8):
# Function to compute covariance function between two points according to publication Boergens et al. 2020
# Input:
# lat1, lat2: Latitude of the two points
# d: distance between points
# theta: azimuth angle between points
# ny: order of Bessle function
# a0: anisotropy parameter
# ka0_2, ka0_4, ka0_6, ka0_8: Legende polynome parameters for a0
# a1: isotropy shape parameter
# ka1_2, ka1_4, ka1_6, ka1_8: Legende polynome parameters for a1
# c0: amplitude parameter
# k2,k4, k6, k8: Legende polynome parameters for c0
# Author: Eva Boergens, GFZ
# date: June 2019
k = [1,0,k2,0,k4, 0, k6, 0, k8]
P1 = sum_Legendre(len(k)-1, k, lat1*rho)
P2 = sum_Legendre(len(k)-1, k, lat2*rho)
ka0 = [1, 0, ka0_2, 0,ka0_4, 0, ka0_6, 0, ka0_8]
Pa0_1 = sum_Legendre(len(ka0)-1, ka0, np.mean([lat1, lat2])*rho)
A0 = Pa0_1 * a0
ka1 = [1, 0, ka1_2, 0,ka1_4, 0, ka1_6, 0, ka1_8]
Pa1_1 = sum_Legendre(len(ka1) - 1, ka1, np.mean([lat1, lat2]) * rho)
A1 = Pa1_1 * a1
if d==0:
d = 0.001
C0 = yaglom(d, theta, A0, A1, c0, ny)
Cov = P1*P2*C0
return Cov
def Legendre_polynome(n,lat):
sin_lat = np.sin(lat)
P_n = sp.legendre(n)
P_n_x = P_n(sin_lat)
return P_n_x
def sum_Legendre(n_max, k, lat):
if n_max!=len(k)-1:
raise('N_max coefficients are needed ')
P = 0
for i in range(n_max+1):
P += k[i]*Legendre_polynome(i, lat)
return P
def yaglom(d, theta, a0, a1, c0, ny):
alpha = a0 * np.sin(theta) + a1
C = c0 / (alpha * d) ** ny * sp.jv(ny, alpha * d)
return C
def distance(glon0, glat0, glon1, glat1):
# convert geograpic coordinates to spherical distances
# author: Eva Boergens
lon = glon1*rho
lat = glat1*rho
lon0 = glon0*rho
lat0 = glat0*rho
sin_lat0 = math.sin(lat0)
cos_lat0 = math.cos(lat0)
cos_lon = math.cos(lon-lon0)
sin_lat = math.sin(lat)
cos_lat = math.cos(lat)
hilf = sin_lat*sin_lat0 + cos_lat*cos_lat0*cos_lon
di = R * math.acos(hilf)
if glon0==glon1 and glat0==glat1:
di =0
return di
def azimut_angle(lon0, lat0, glon1, glat1):
# get azimut angle between geograpic coordinates
# author: Eva Boergens
lat1 = glat1 * rho
lat0 = lat0 * rho
lon0 = lon0 * rho
lon1 = glon1 * rho
sin_lat0 = math.sin(lat0)
cos_lat0 = math.cos(lat0)
tan_lat = math.tan(lat1)
cos_lon = math.cos(lon1-lon0)
sin_lon = math.sin(lon1-lon0)
alpha = math.atan2(sin_lon,(cos_lat0*tan_lat-sin_lat0*cos_lon))
if glat1 == 90:
alpha=0
elif glat1 ==-90:
alpha = np.pi
return alpha
def compute_covariance(region_coords, gridstd, flag_uncertainty, flag_matrix):
lon = np.array([r[0] for r in region_coords])
lat = np.array([r[1] for r in region_coords])
n = len(lat)
m = len(gridstd)
# Grid weights
w = get_grid_area(lon, lat)
region_size = np.sum(w)
# Parameter of covariance function
x = np.array([1.09008520e-03, -2.91816028e+00, -1.90076454e+00, -8.56445896e-01, -4.60125098e+00,
0.00265558, 0.40380741, -0.29669686, 0.06876961, 0.32137515,
5.82423256 * 2.30 * 0.08 * 2.41 * 1.45, -0.2, -0.26511069, -0.17415433, -0.21936711])
if flag_uncertainty:
var_model = np.zeros(m)
if flag_matrix:
cov_model = np.zeros((m,n,n))
for ii in range(n):
d = 0
theta = np.pi * 2
C = Cov_function_lat(lat[ii], lat[ii], d, theta, 2, *x)
w1 = w[ii] / region_size
w2 = w[ii] / region_size
for j in range(m):
sigma = gridstd[j]
if np.isnan(sigma):
continue
if flag_uncertainty:
var_model[j] += w1 * w2 * sigma **2 * C
if flag_matrix:
cov_model[j,ii,ii] = sigma **2 * C
for jj in range(ii+1, n):
d = distance(lon[ii], lat[ii], lon[jj], lat[jj])
theta = azimut_angle(lon[ii], lat[ii], lon[jj], lat[jj])
C = Cov_function_lat(lat[ii], lat[jj], d, theta, 2, *x)
w1 = w[ii] / region_size
w2 = w[jj] / region_size
for j in range(m):
sigma = gridstd[j]
if np.isnan(sigma):
continue
if flag_uncertainty:
var_model[j] += w1 * w2 * sigma**2 * C
if flag_matrix:
cov_model[j, ii, jj] = sigma ** 2 * C
cov_model[j, jj, ii] = sigma ** 2 * C
result = {}
if flag_uncertainty:
std_model = np.sqrt(var_model)
result['uncertainty'] = std_model
if flag_matrix:
result['matrix'] = cov_model
return result
def get_timeseries(grid, lon_grid, lat_grid, region_coords):
lon_region = np.array([r[0] for r in region_coords])
lat_region = np.array([r[1] for r in region_coords])
if len(np.isin(lon_grid, lon_region).nonzero()[0])< len(np.unique(lon_region)) or \
len(np.isin(lat_grid, lat_region).nonzero()[0])< len(np.unique(lat_region)):
print("Warning: Region coordinates are not located on the given TWS grid, thus no mean tws time series "
"can be computed. Continue without timeseries output")
return None, False
# Grid weights
w = get_grid_area(lon_region, lat_region)
region_id_lat = np.array([np.nonzero(lat_grid==l)[0] for l in lat_region]).squeeze()
region_id_lon = np.array([np.nonzero(lon_grid==l)[0] for l in lon_region]).squeeze()
timeseries = np.zeros(grid.shape[0])
for i in range(grid.shape[0]):
grid_part = grid[i,:,:]
timeseries[i] = np.nansum(w * grid_part[region_id_lat, region_id_lon]) / np.sum(w)
return timeseries, True
\ No newline at end of file
from argparse import ArgumentParser, FileType, RawTextHelpFormatter
from .io import *
def arg_parser():
parser = ArgumentParser(
prog="tws_covariances.sh",
description="Programm to compute the covariances of given region and return the standard deviation of "
"the mean TWS of this region and/or the full covariance matrix",
formatter_class=RawTextHelpFormatter,
)
parser.add_argument(
"-f",
"--filename",
required=True,
type=FileType(),
help="Filename of the netcdf TWS input file, as downloaded from gravis.gfz-potsdam.de"
)
parser.add_argument(
"-r",
"--region",
required=True,
type=FileType(),
help="Filename of the ascii file defining the requested region. List of coordinates lon, lat, each line one coordinate."
"Coordinates have to be on an even spaced grid. Comment lines have to start with '#'. "
)
parser.add_argument(
"-o",
"--output",
required=True,
type=str,
help="Filename of the netcdf output file"
)
parser.add_argument(
"-m",
"--matrix",
action="store_true",
help="Return full covariance matrix"
)
parser.add_argument(
"-u",
"--uncertainty",
action="store_true",
help="Return time series of standard deviations for mean TWS time series"
)
parser.add_argument(
"-t",
"--timeseries",
action="store_true",
help="Return mean TWS time series"
)
args = parser.parse_args()
if not args.matrix and not args.uncertainty:
raise RuntimeError("Either covariance matrix (-m) or uncertainties (-u) have to be set for the output")
test_coordiantes(args.region.name)
return args
\ No newline at end of file
import netCDF4
import re
import numpy as np
import scipy.special as sp
import math
import sys
def read_netcdf(filename):
try:
with netCDF4.Dataset(filename, 'r') as S:
lon = S.variables['lon'][:]
lat = S.variables['lat'][:]
time = S.variables['time'][:]
grid = S.variables['tws'][:]
grid_std = S.variables['std_tws'][:]
grid_std = np.array([np.nanmean(g) for g in grid_std])
except Exception:
print(" Failed to read %s. Variable names in file have to be: lon, lat, time, tws, std_tws" %filename)
sys.exit()
return grid, grid_std, lon, lat, time
def read_ascii(filename):
try:
with open(filename) as fid:
coords = []
for l in fid:
if l[0]=='#':
continue
line = [float(i) for i in re.split('\s+', l.strip())]
coords.append([line[0], line[1]])
except:
print("Could not read %s: Please ensure that every line contains two columns and comment lines are indicated with '#'" %filename)
sys.exit()
return coords
def test_coordiantes(filename):
coords = read_ascii(filename)
lon = np.array([r[0] for r in coords])
lat = np.array([r[1] for r in coords])
if len(lon)<2:
raise RuntimeError("Coordinate list of region have to contain at least 2 points")
u_lon = np.unique(lon)
u_lat = np.unique(lat)
if not same_dist_elems(u_lon):
raise RuntimeError("Longitudes of region not evenly spaced")
if not same_dist_elems(u_lat):
raise RuntimeError("Latitudes of region not evenly spaced")
return
def same_dist_elems(arr):
diff = arr[1] - arr[0]
for x in range(1, len(arr) - 1):
if arr[x + 1] - arr[x] != diff:
return False
return True
def save_results(outname, inname, results, region_coords, timeseries, flag_uncertainty, flag_matrix, flag_timeseries):
lon = np.array([r[0] for r in region_coords])
lat = np.array([r[1] for r in region_coords])
with netCDF4.Dataset(inname, format='NETCDF4') as Sin:
time = Sin.variables['time'][:]
time_unit = Sin.variables['time'].units
time_calendar = Sin.variables['time'].calendar
tws_unit = Sin.variables['tws'].units
try:
with netCDF4.Dataset(outname, 'w', format='NETCDF4') as Sout:
Sout.createDimension('time', len(time))
time_out = Sout.createVariable('time', 'i4', 'time')
time_out.units = time_unit
time_out.calendar = time_calendar
time_out[:] = time
Sout.createDimension('pointID', len(region_coords))
coords_id = Sout.createVariable('pointID', 'i4', 'pointID')
coords_id[:] = np.arange(0, len(region_coords))
coords_lon = Sout.createVariable('lon', 'f4', ('pointID'))
coords_lon[:] = lon
coords_lon.long_name = "Lon coordinates tuple of the grid points of the region"
coords_lon.units = "degrees_north"
coords_lat = Sout.createVariable('lat', 'f4', ('pointID'))
coords_lat[:] = lat
coords_lat.long_name = "Lat coordinates tuple of the grid points of the region"
coords_lat.units = "degrees_east"
if flag_uncertainty:
uncertainty = Sout.createVariable('std', 'f4', 'time')
uncertainty.units = tws_unit
uncertainty.long_name = "standard deviation of regions mean tws time series"
uncertainty[:] = results['uncertainty']
if flag_matrix:
cov_matrix = Sout.createVariable('cov_matrix', 'f4', ('time','pointID', 'pointID'))
cov_matrix.long_name = "Covariance matrix between all points of the region"
cov_matrix.units = tws_unit + '*' + tws_unit
cov_matrix[:] = results['matrix']
if flag_timeseries:
timeseries_out = Sout.createVariable('tws_timeseries', 'f4', 'time')
timeseries_out.units = tws_unit
timeseries_out.long_name = "Regions mean tws time series"
timeseries_out[:] = timeseries
except PermissionError:
print("Could not open %s for writing results" %outname)
sys.exit()
return
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment