Commit 823d0384 authored by Michael Rudolf's avatar Michael Rudolf
Browse files

Major changes and performance improvements.

- Changed from Pandas dataframes to numpy arrays or native python types
- Replaced savgol-filter with scipy.signal.savgol_filter (250% faster)
- Improved peak-detection (2000% faster)
- Optimized the mutual linear regression with list comprehensions
- Added a bootstrap linear regression function
- Replaced hardcoded axis scaling by ratio (x1.125 instead of +2000)
- Removed all r'strings' for a more consistent layout, using \\ for TeX
- The timeseries plot is now flexible for experiments with less runs
- The saving functions were adjusted to match the new data types
- Some variables have been renamed
- Cleaned and reformatted code to match pep8 style
parent c869694e
......@@ -2,25 +2,26 @@
"""
Created on Mon Jul 23 11:41:01 2018
@author: analab
@author: Michael Warsitzka, Michael Rudolf
"""
# %%=================FUNCTION================================================
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import pandas as pd
import codecs
import csv
import os
from nptdms import TdmsFile
import nptdms
import matplotlib.pyplot as plt
from scipy import signal
from scipy import stats
from scipy.signal import savgol_filter
from scipy import optimize
# %%==============CONVERSION===================================================
def convert(path, file, var):
'''Reads data from source files and returns a dictionary with data'''
# possible inputs and linked helper functions
file_convert = {'.asc': _readasc,
'.dat': _readdat,
......@@ -37,90 +38,58 @@ def convert(path, file, var):
# Additional data
data['shearstress'] = (data['shearforce']*var['lo'])/(var['li']*var['A'])
data['normalstress'] = data['normalforce']/var['A']
data['displacement'] = np.cumsum(data['time'][1]*data['velocity'][:-1])
data['displacement'] = np.cumsum(data['time'][1]*data['velocity'])
# create new dataframe
df = pd.DataFrame.from_dict(data, orient='index').transpose()
print(file+' read')
return df
return data
# %%==================EVALUATION SHEAR CURVE DATA==============================
def eval_shearstress(R, var):
def eval_shearstress(cur_dat, var):
velnoise, stressnoise = var['velnoise'], var['stressnoise']
sw = var['smoothwindow']
# Evaluation:
firstdata = next(a for a, value in enumerate(R.velocity) if R.velocity[a] >= velnoise)
firststop = next(b for b, value in enumerate(pd.Series(R.velocity).values[firstdata:])
if pd.Series(R.velocity).values[firstdata:][b] <= velnoise)-10
firststop = firststop+firstdata-1
# Searching for velocity changes to define data range
vel_above = np.nonzero(cur_dat['velocity'] > var['velnoise'])
switch = np.nonzero(np.diff(vel_above) > 10)
start_1 = vel_above[0][0]
end_1 = start_1+switch[1][0]
start_2 = end_1+np.max(np.diff(vel_above))
end_2 = vel_above[0][-1]
# smoothing function: 1st#: window size, 2nd#: polynomial order
shearstress_smooth = savitzky_golay(
pd.Series(R.shearstress).values,
window_size=sw,
order=3)
# shearstress_smooth = R.shearstress.rolling(window=10, center = True).median()
# ==========PEAK FRICTION=============================
peakstress = np.nanmax(shearstress_smooth[firstdata:firststop])
peakind = int([a for a, b in enumerate(shearstress_smooth[firstdata:firststop])
if b == np.nanmax(shearstress_smooth[firstdata:firststop])][0])
peakind = peakind+firstdata-1
shearstress_min = np.nanmin(shearstress_smooth[peakind:firststop]) # index of peak stress
# ==========DYNAMIC FRICTION=============================
dynind = next(a for a, value in enumerate(shearstress_smooth[peakind:firststop])
if shearstress_smooth[peakind:firststop][a] <= (shearstress_min+shearstress_min*stressnoise))
dynind = dynind+peakind-1 # index of dynamic stress
dynstress = shearstress_smooth[peakind:firststop][dynind]
peak = [R.displacement[peakind], peakstress]
dyn = [R.displacement[dynind], dynstress]
shearstress_smooth = savgol_filter(
cur_dat['shearstress'],
var['smoothwindow'],
3)
# Peak friction
i_peak = np.argmax(shearstress_smooth[start_1:end_1])+start_1
tau_peak = shearstress_smooth[i_peak]
# Dynamic friction
i_dyn = np.argmin(shearstress_smooth[i_peak:end_1])+i_peak
tau_dyn = shearstress_smooth[i_dyn]
# Static friction (reactivation)
i_stat = np.argmax(shearstress_smooth[start_2:end_2])+start_2
tau_stat = shearstress_smooth[i_stat]
peak = [cur_dat['displacement'][i_peak], tau_peak]
dyn = [cur_dat['displacement'][i_dyn], tau_dyn]
stat = [cur_dat['displacement'][i_stat], tau_stat]
normal = int(np.mean(cur_dat['normalstress'])/var['prec'])*var['prec']
# Calculate relative weakening (after Ritter et al., 2016)
weak = 1-(dyn[1]/peak[1])
weak_p = (peak[1]/dyn[1])-1
# =============REACTIVATION FRICTION=====================
normalstress_mean = np.mean(R.normalstress)
d1 = np.min(R.liddispl[1:peakind])
d1ind = int([a for a, b in enumerate(R.liddispl[1:peakind])
if b == np.min(R.liddispl[1:peakind])][0])
d2 = np.max(R.liddispl[d1ind:firststop])
# ~] = max(R.liddispl(d1ind:firststop));
# Calculate dilation for peak
i_d1 = np.argmin(cur_dat['liddispl'][:i_peak])
d1 = cur_dat['liddispl'][i_d1]
i_d2 = np.argmax(cur_dat['liddispl'][i_peak:end_1])
d2 = cur_dat['liddispl'][i_d2]
deltad = d2-d1
reactstress = np.nanmax(shearstress_smooth[firststop:])
reactind = int([a for a, b in enumerate(shearstress_smooth[firststop:])
if b == np.nanmax(shearstress_smooth[firststop:])][0])
reactind = reactind+firststop-1
react = [R.displacement[reactind], reactstress]
out_var = (normalstress_mean, peak, dyn, react, deltad, weak, weak_p)
# print('rst data evaluated')
return out_var
# %===================SMOOTHING FUCTION========================================
def savitzky_golay(y, window_size, order, deriv=0, rate=1):
from math import factorial
try:
order = np.abs(np.int(order))
except ValueError(msg):
raise ValueError("window_size and order have to be of type int")
if window_size % 2 != 1 or window_size < 1:
raise TypeError("window_size size must be a positive odd number")
if window_size < order+2:
raise TypeError("window_size is too small for the polynomials order")
ord_rng = range(order+1)
half_w = (window_size-1) // 2
# precompute coefficients
b = np.mat([[k**i for i in ord_rng] for k in range(-half_w, half_w+1)])
m = np.linalg.pinv(b).A[deriv] * rate**deriv * factorial(deriv)
# pad the signal at the extremes with values taken from the signal itself
firstvals = y[0]-np.abs(y[1:half_w+1][::-1] - y[0])
lastvals = y[-1]+np.abs(y[-half_w-1:-1][::-1] - y[-1])
y = np.concatenate((firstvals, y, lastvals))
return np.convolve(m[::-1], y, mode='valid')
return (normal, peak, dyn, stat, deltad, weak, weak_p)
# %%=======================ANALYSIS OF DATA====================================
......@@ -129,30 +98,18 @@ def rst_analmut(x, y):
# calculation of friction coefficients (slope, M) and y-axis intercept
# (cohesion, C) by mutual two point linear regressions:
n = len(x)
M = np.zeros((n, n)) # Matrix of zeros as container for Beta values
C = np.zeros((n, n)) # Matrix of zeros as container for Cohesion values
for k in range(0, n-1):
for j in range(0, n-k):
M[k, j] = (y[k+j]-y[k])/(x[k+j]-x[k]) # calculate slope/ friction
j += 1
k += 1
M[M == np.inf] = np.nan # set inf to Nan
M[M == -np.inf] = np.nan # set -inf to Nan
M[M == 0] = np.nan # set 0 to Nan
M[M < 0] = np.nan # set <0 to Nan
M[M > 1] = np.nan # set 0 to Nan
M_avg, M_std = stats.norm.fit(M[~np.isnan(M)]) # mean and standard deviation
for k in range(0, n-1):
for j in range(0, n-k):
C[k, j] = y[k]-M[k, j]*x[k] # calculate y-axis intercept/cohesion
j = j+1
k = k+1
# calculation of cohesions (y axis intercept):
C[C == np.inf] = np.nan # set inf to Nan
C[C == -np.inf] = np.nan # set -inf to Nan
C[C == 0.0] = np.nan # set 0 to Nan
C_avg, C_std = stats.norm.fit(C[~np.isnan(C)]) # mean and standard deviation
ys = [y[k] for k in range(len(x)) for j in range(len(x)-k)]
xs = [x[k] for k in range(len(x)) for j in range(len(x)-k)]
M = np.array([(y[k+j]-y[k])/(x[k+j]-x[k])
for k in range(len(x)) for j in range(len(x)-k)])
C = np.array([(y-m*x) for y, m, x in zip(ys, xs, M)])
M = M[np.nonzero(np.isfinite(M))]
C = C[np.nonzero(np.isfinite(C))]
M_avg = np.mean(M)
M_std = np.std(M)
C_avg = np.mean(C)
C_std = np.std(C)
fric_mut = (M_avg, M_std, C_avg, C_std)
data_mut = (M, C)
return fric_mut, data_mut
......@@ -161,6 +118,8 @@ def rst_analmut(x, y):
# %%=======================STANDARD LINEAR REGRESSION==========================
def rst_analstd(x, y):
# Correlation of normal and shear stress: y = slope * x+intercept
x = np.array(x)
y = np.array(y)
n = len(x)
P1 = n*np.sum(x*y)-np.sum(x)*np.sum(y)
P2 = n*np.sum(x**2)-np.sum(x)**2
......@@ -175,24 +134,59 @@ def rst_analstd(x, y):
out_var = (slope, std_slope, intercept, std_intercept, y_fit)
return out_var
# calculate linear least squares fit
# n = len(x)
# shearstress_fit = np.polyfit(normalstress, shearstress[:,1], 1)
# #alternative:
# #stats.linregress(normalstress, Tau2[:,1])
# normalstressT = np.vstack([normalstress, np.ones(len(normalstress))]).T
#
# #tau2 = np.linalg.lstsq(normalstressT, Tau2[:,1], rcond=-1)[0]
# ## transform results to match expected input of polyval
# #tau2T = tau2.T
# ## variance of data & standard deviation of data
# Cperr = np.sqrt((sum((shearstress[:,1] - np.matmul(normalstressT,shearstress_fit))**2))/(n-2))
# ## standard deviation of fit parameter
# dp = np.zeros(2)
# dp[0] = Cperr*np.sqrt(n/(n*sum(normalstress**2)-(sum(normalstress)**2)))
# dp[1] = Cperr*np.sqrt(sum(normalstress**2)/(n*sum(normalstress**2)-(sum(normalstress)**2)))
#
#
# %%======================BOOTSTRAP LINEAR REGRESSION==========================
def fit_bootstrap(p0, datax, datay, function=None,
yerr_systematic=0.0, nsigma=2, nsets=100):
# Does a bootstrap fit of datax and datay with the initial parameters p0.
# As a standard the function used is a linear function
# You can choose the confidence interval that you want for your
# parameter estimates:
# 1sigma corresponds to 68.3% confidence interval
# 2sigma corresponds to 95.44% confidence interval
# ---- Helper function which gives a linear function ----
def _poly1(x, a, b):
return (a*x+b)
if ~function:
function = _poly1
# Error function producing residuals
def errfunc(p, x, y):
return function(x, *p) - y
# Fit first time
pfit, perr = optimize.leastsq(errfunc,
p0,
args=(datax, datay),
full_output=0)
# Get the stdev of the residuals
residuals = errfunc(pfit, datax, datay)
sigma_res = np.std(residuals)
sigma_err_total = np.sqrt(sigma_res**2 + yerr_systematic**2)
# 100 random data sets are generated and fitted
ps = []
for i in range(nsets):
randomDelta = np.random.normal(0., sigma_err_total, len(datay))
randomdataY = datay + randomDelta
randomfit, randomcov = \
optimize.leastsq(errfunc, p0, args=(datax, randomdataY),
full_output=0)
ps.append(randomfit)
ps = np.array(ps)
mean_pfit = np.mean(ps, 0)
err_pfit = nsigma * np.std(ps, 0)
pfit_bootstrap = mean_pfit
perr_bootstrap = err_pfit
return pfit_bootstrap, perr_bootstrap
# %%========================PLOT===============================================
......@@ -224,12 +218,17 @@ def plotstd(path, name, strength, fricdata):
plt.plot(x, fit,
'-', color=linecolor[i],
linewidth=1,
label=r'Lin. Regr. '+label2[i]+r': $\tau$=({:.3f}$\pm${:.3f})$\sigma_N$+({:.2f}$\pm${:.2f})'.format(slope, std_slope, intercept, std_intercept))
plt.xlabel('Normal stress $\sigma_N$ [Pa]')
plt.ylabel(r'Shear stress $\tau$ [Pa]')
plt.xlim(0, max(strength[0])+2000)
plt.ylim(0, round(max(stre_max), -3)+2001)
plt.yticks(np.arange(0, round(max(stre_max), -3)+2001, 1000))
label='Lin. Regr. ' +
label2[i] +
': $\\tau$=(%.3f$\\pm$%.3f)$\\sigma_N$+(%.2f$\\pm$%.2f)'
% (slope, std_slope, intercept, std_intercept))
plt.xlabel('Normal stress $\\sigma_N$ [Pa]')
plt.ylabel('Shear stress $\\tau$ [Pa]')
plt.xlim(0, max(strength[0])*1.125)
plt.ylim(0, round(max(stre_max), -2)*1.125)
# Removed because 1000 is too large for experiments at small normal load
# plt.yticks(np.arange(0, round(max(stre_max), -2)*1.125, 1000))
plt.legend(fontsize=8,
loc='upper left',
facecolor='w',
......@@ -246,12 +245,12 @@ def plotstd(path, name, strength, fricdata):
# %%======================PLOT HISTOGRAMS======================================
def plothist(path, name, strength, data_mut):
title_mu = [r'Peak friction coefficient $\mu_P$',
r'Dynamic friction coefficient $\mu_D$',
r'Reactivation friction coefficient $\mu_R$']
title_C = [r'Peak cohesion $C_P$',
r'Dynamic cohesion $C_D$',
r'Reactivation cohesion $C_R$']
title_mu = ['Peak friction coefficient $\\mu_P$',
'Dynamic friction coefficient $\\mu_D$',
'Reactivation friction coefficient $\\mu_R$']
title_C = ['Peak cohesion $C_P$',
'Dynamic cohesion $C_D$',
'Reactivation cohesion $C_R$']
plt.rcParams['figure.figsize'] = (10, 14)
M = np.array((data_mut[0][0], data_mut[1][0], data_mut[2][0]))
C = np.array((data_mut[0][1], data_mut[1][1], data_mut[2][1]))
......@@ -275,11 +274,17 @@ def plothist(path, name, strength, data_mut):
linewidth=2,
label='normal distribution')
axrow[0].set_title(tit_mu)
axrow[0].set_xlabel(r'Friction coefficient $\mu$')
axrow[0].set_xlabel('Friction coefficient $\\mu$')
axrow[0].set_ylabel('Counts')
text = 'Mean: '+str(round(stats.norm.fit(coef[~np.isnan(coef)])[0], 3))+'\n' + \
'Std.: '+str(round(stats.norm.fit(coef[~np.isnan(coef)])[1], 3))+'\n' + \
' (' + str(coef[~np.isnan(coef)].size)+' data)'
text = 'Mean: ' + \
str(round(stats.norm.fit(coef[~np.isnan(coef)])[0], 3)) + \
'\n' + \
'Std.: ' + \
str(round(stats.norm.fit(coef[~np.isnan(coef)])[1], 3)) + \
'\n' + \
' (' + \
str(coef[~np.isnan(coef)].size) + \
' data)'
axrow[0].text(0.98, 0.84, text,
horizontalalignment='right',
verticalalignment='bottom',
......@@ -300,9 +305,15 @@ def plothist(path, name, strength, data_mut):
axrow[1].set_title(tit_C)
axrow[1].set_xlabel('Cohesion $C$ [Pa]')
axrow[1].set_ylabel('Counts')
text = 'Mean: '+str(round(stats.norm.fit(coh[~np.isnan(coh)])[0], 2))+'\n' + \
'Std.: '+str(round(stats.norm.fit(coh[~np.isnan(coh)])[1], 2))+'\n' + \
' ('+str(coh[~np.isnan(coh)].size)+' data)'
text = 'Mean: ' + \
str(round(stats.norm.fit(coh[~np.isnan(coh)])[0], 2)) + \
'\n' + \
'Std.: ' + \
str(round(stats.norm.fit(coh[~np.isnan(coh)])[1], 2)) + \
'\n' + \
' (' + \
str(coh[~np.isnan(coh)].size) + \
' data)'
axrow[1].text(0.98, 0.84, text,
horizontalalignment='right',
verticalalignment='bottom',
......@@ -322,51 +333,74 @@ def plothist(path, name, strength, data_mut):
# %%=======================PLOT TS============================================
def plotts(path, name, ts, sigma_sort, var):
def plotts(path, name, exp_data, normal_stress):
plt.rcParams['figure.figsize'] = (10, 8)
ts.iloc[:, 0] = (ts.iloc[:, 0] * var['vel']/60) # convert time to shear displacement [mm]
ts.iloc[:, 1:] = (ts.iloc[:, 1:]*var['lo'])/(var['li']*var['A']) # convert force [N] to stress [Pa]
# =======================PLOT TIME SERIES==================================
plt.rcParams['figure.figsize'] = (10, 8)
linecolor = ['', 'red', 'orange', 'gold', 'green', 'royalblue', 'k']
t = int(len(sigma_sort)/3)
fig3 = plt.figure()
for i in range(0, t):
sigma_legend = int(np.sum(sigma_sort[i*3:(i*3)+3])/3)
plt.plot(ts.iloc[:, 0], np.zeros(len(ts.iloc[:, 0])),
linewidth=0.5,
color=linecolor[i+1],
label=str(sigma_legend)+' Pa')
plt.plot(ts.iloc[:, 0],
ts.iloc[:, i*3+1:(i+1)*3+1],
linewidth=0.5,
color=linecolor[i+1])
plt.legend(fontsize=8,
uq_normal = np.sort(np.unique(normal_stress))
linecolor = ['red', 'orange', 'gold', 'green', 'royalblue', 'k']
# Plot everything
fig3, ax3 = plt.subplots()
for cur_set, cur_norm in zip(exp_data, normal_stress):
cur_ind = np.argwhere(uq_normal == cur_norm)[0][0]
ax3.plot(cur_set['displacement'], cur_set['shearstress'],
color=linecolor[cur_ind],
label=cur_norm)
# Extract labels, sort them and only display once per repetition
handles, labels = ax3.get_legend_handles_labels()
labels_num = [int(l) for l in labels]
sort_ind = np.argsort(labels_num)
handles = np.take_along_axis(np.array(handles), sort_ind, axis=0)
labels = np.take_along_axis(np.array(labels), sort_ind, axis=0)
num_rep = int(len(exp_data)/len(uq_normal))
sl_leg = slice(0, len(handles), num_rep)
ax3.legend(handles[sl_leg], labels[sl_leg],
fontsize=8,
facecolor='w',
edgecolor='k',
framealpha=1,
loc='upper right',
title=r"Normal stress $\sigma_N$")
plt.xlabel('Shear displacement $d$ [mm]')
plt.ylabel(r'Shear stress $\tau$ [Pa]')
plt.xlim(0, max(ts.iloc[:, 0]))
plt.ylim(0, round(max(ts.max()), -3)+1001)
plt.yticks(np.arange(0, round(max(ts.max()), -3)+1001, 1000))
title='Normal stress $\\sigma_N$')
ax3.set_xlabel('Shear displacement $d$ [mm]')
ax3.set_ylabel('Shear stress $\\tau$ [Pa]')
ax3.set_xlim(0, np.max([np.max(m['displacement']) for m in exp_data]))
ax3.set_ylim(0, np.max([np.max(m['shearstress']) for m in exp_data])*1.125)
# ax3.yticks(np.arange(0, round(max(ts.max()), -3)+1001, 1000))
fig3.suptitle(name, y=0.92)
plt.savefig(path+name+'_ts',
bbox_inches='tight',
edgecolor='w')
fig3.savefig(path+name+'_ts',
bbox_inches='tight',
edgecolor='w')
plt.close()
# %%========================SAVE===============================================
def saveTS(path, name, ts):
ts.to_csv(path+name+'_ts.txt',
header=True,
index=None,
sep='\t',
mode='w',
na_rep='np.nan') # write to txt file
def saveTS(path, name, exp_data):
# create header
header = ['Time [s]']
n_stress = ['%.0f' % np.mean(m['normalstress']) for m in exp_data]
for n in n_stress:
header.append(n)
# position of longest measurement
data_lens = [len(ex['time']) for ex in exp_data]
pos_dfmax = np.argmax(data_lens)
max_len = np.max(data_lens)
time_max = exp_data[pos_dfmax]['time']
# create array to save to file
ts_data = np.zeros((max_len, len(exp_data)+1))
ts_data[:, 0] = time_max
for i, ex in enumerate(exp_data):
pads = np.ones(max_len-len(ex['shearstress']))*np.nan
ts_data[:, i+1] = np.hstack((ex['shearstress'], pads))
# create file and save it
with open(path+name+'_ts.txt', 'w+') as f:
csvout = csv.writer(f,
delimiter='\t',
lineterminator='\n')
csvout.writerows([header])
csvout.writerows(ts_data)
def saveStrength(path, name, strength):
......@@ -384,23 +418,80 @@ def saveStrength(path, name, strength):
f.closed
# %% Save friction data to txt files:
def saveFric(path, name, fricmut, fricstd):
header = '# Parameter'+'\t'+'Coeff. of internal friction'+'\t'+'Std. deviation (Coeff.)'+'\t'+'Cohesion [Pa]'+'\t'+'Std deviation (Coh.) [Pa]'
header = '# Parameter' + \
'\t' + \
'Coeff. of internal friction' + \
'\t' + \
'Std. deviation (Coeff.)' + \
'\t' + \
'Cohesion [Pa]' + \
'\t' + \
'Std deviation (Coh.) [Pa]'
with open(path+name+'_fricstd.txt', 'w') as f:
f.write(header+'\n')
f_string = ''
f_string += 'Peak friction:'+'\t'+str(fricstd[0][0])+'\t'+str(fricstd[0][1])+'\t'+str(fricstd[0][2])+'\t'+str(fricstd[0][3])+'\n'
f_string += 'Dynamic friction:'+'\t'+str(fricstd[1][0])+'\t'+str(fricstd[1][1])+'\t'+str(fricstd[1][2])+'\t'+str(fricstd[1][3])+'\n'
f_string += 'Reactivation friction:'+'\t'+str(fricstd[2][0])+'\t'+str(fricstd[2][1])+'\t'+str(fricstd[2][2])+'\t'+str(fricstd[2][3])+'\n'
f_string += 'Peak friction:' + \
'\t' + \
str(fricstd[0][0]) + \
'\t'+str(fricstd[0][1]) + \
'\t'+str(fricstd[0][2]) + \
'\t'+str(fricstd[0][3]) + \
'\n'
f_string += 'Dynamic friction:' + \
'\t' + \
str(fricstd[1][0]) + \
'\t' + \
str(fricstd[1][1]) + \
'\t' + \
str(fricstd[1][2]) + \
'\t' + \
str(fricstd[1][3]) + \
'\n'
f_string += 'Reactivation friction:' + \
'\t' + \
str(fricstd[2][0]) + \
'\t' + \
str(fricstd[2][1]) + \
'\t' + \
str(fricstd[2][2]) + \
'\t' + \
str(fricstd[2][3]) + \
'\n'
write_f = f.write(f_string)
f.closed
with open(path+name+'_fricmut.txt', 'w') as f:
f.write(header+'\n')
f_string = ''
f_string += 'Peak friction:'+'\t'+str(fricmut[0][0])+'\t'+str(fricmut[0][1])+'\t'+str(fricmut[0][2])+'\t'+str(fricmut[0][3])+'\n'
f_string += 'Dynamic friction:'+'\t'+str(fricmut[1][0])+'\t'+str(fricmut[1][1])+'\t'+str(fricmut[1][2])+'\t'+str(fricmut[1][3])+'\n'
f_string += 'Reactivation friction:'+'\t'+str(fricmut[2][0])+'\t'+str(fricmut[2][1])+'\t'+str(fricmut[2][2])+'\t'+str(fricmut[2][3])+'\n'
f_string += 'Peak friction:' + \
'\t' + \
str(fricmut[0][0]) + \
'\t' + \
str(fricmut[0][1]) + \
'\t' + \
str(fricmut[0][2]) + \
'\t' + \
str(fricmut[0][3]) + \
'\n'
f_string += 'Dynamic friction:' + \
'\t' + \
str(fricmut[1][0]) + \
'\t' + \
str(fricmut[1][1]) + \
'\t' + \
str(fricmut[1][2]) + \
'\t' + \
str(fricmut[1][3]) + \
'\n'
f_string += 'Reactivation friction:' + \
'\t' + \
str(fricmut[2][0]) + \
'\t' + \
str(fricmut[2][1]) + \
'\t' + \
str(fricmut[2][2]) + \
'\t' + \
str(fricmut[2][3])+'\n'
write_f = f.write(f_string)
f.closed
......@@ -442,18 +533,17 @@ def _readdat(path, file):
# ---- Helper function to read *.tdms file ----
def _readtdms(path, file):
tdms = TdmsFile(path+file)
groups = tdms.groups()
channels = tdms.group_channels(groups[0])
# time increment between each point:
inc = list((tdms.object(groups[0], 'Velocity').properties).values())[2]
df_raw = tdms.object(groups[0]).as_dataframe()
def _readtdms(path, file_in):
f = nptdms