C-Tool Acceleration Instrumentation

A lift tool was instrumented to measure the acceleration of the lower beam. A Nexus 6 android phone was used as the data acquisition device with the help of the Physics Toolbox app. Multiple lifts were performed. Acceleration in three axes and the total magnitude were recorded and uploaded to a cloud storage provider as a csv file.

We want to extract the spectral information from the acceleration time series. We use the discrete fast fourier transform (DFT or FFT) to compute the acceleration versus frequency.

In [82]:
import numpy as np
import pandas as pd
import datetime
import scipy
In [83]:
from pint import UnitRegistry
u = UnitRegistry()
In [84]:
import matplotlib
import matplotlib.pyplot as plt
import pylab
In [85]:
%matplotlib inline
%config InlineBackend.figure_format = 'svg'

Acceleration at mid-beam

In [86]:
acc = pd.read_csv('guardian_accelerometer_data_20160526.csv',
                  header=0,
                  names=['time',
                         'x',
                         'y',
                         'z',
                         'magnitude'],
                  dtype={'time':datetime.time, 
                         'x':np.float64, 
                         'y':np.float64, 
                         'z':np.float64, 
                         'magnitude':np.float64})

Fix time column

In [87]:
from dateutil.parser import parse
def munge_time(x):
    return parse(':'.join(x.split(':')[:-1]))
In [88]:
acc['time'] = acc['time'].map(munge_time)
In [89]:
print(acc)
                      time      x      y      z  magnitude
0      2016-05-30 09:45:11 -0.066  0.203  0.976      0.999
1      2016-05-30 09:45:11 -0.061  0.196  0.989      1.010
2      2016-05-30 09:45:11 -0.054  0.215  1.019      1.043
3      2016-05-30 09:45:11 -0.057  0.229  1.027      1.054
4      2016-05-30 09:45:11 -0.069  0.214  1.034      1.059
5      2016-05-30 09:45:11 -0.070  0.217  1.031      1.056
6      2016-05-30 09:45:11 -0.074  0.215  1.032      1.057
7      2016-05-30 09:45:11 -0.073  0.214  1.066      1.089
8      2016-05-30 09:45:11 -0.073  0.216  1.073      1.097
9      2016-05-30 09:45:11 -0.099  0.205  1.102      1.125
10     2016-05-30 09:45:11 -0.111  0.223  1.116      1.144
11     2016-05-30 09:45:11 -0.089  0.227  1.118      1.144
12     2016-05-30 09:45:11 -0.084  0.251  1.117      1.148
13     2016-05-30 09:45:11 -0.086  0.243  1.137      1.166
14     2016-05-30 09:45:11 -0.061  0.258  1.139      1.169
15     2016-05-30 09:45:11 -0.073  0.266  1.127      1.161
16     2016-05-30 09:45:11 -0.064  0.269  1.113      1.147
17     2016-05-30 09:45:11 -0.050  0.268  1.116      1.149
18     2016-05-30 09:45:11 -0.043  0.279  1.082      1.118
19     2016-05-30 09:45:11 -0.016  0.282  1.071      1.107
20     2016-05-30 09:45:11 -0.025  0.276  1.064      1.099
21     2016-05-30 09:45:11 -0.021  0.284  1.059      1.097
22     2016-05-30 09:45:11 -0.003  0.266  1.029      1.062
23     2016-05-30 09:45:11  0.005  0.296  1.020      1.062
24     2016-05-30 09:45:11  0.013  0.282  1.009      1.048
25     2016-05-30 09:45:11  0.033  0.303  0.977      1.023
26     2016-05-30 09:45:11  0.034  0.318  0.935      0.988
27     2016-05-30 09:45:11  0.041  0.330  0.926      0.984
28     2016-05-30 09:45:11  0.051  0.341  0.900      0.964
29     2016-05-30 09:45:11  0.059  0.343  0.866      0.933
...                    ...    ...    ...    ...        ...
508998 2016-05-30 10:22:51 -0.107  0.449  0.501      0.681
508999 2016-05-30 10:22:51 -0.146  0.461  0.496      0.693
509000 2016-05-30 10:22:51 -0.162  0.472  0.550      0.742
509001 2016-05-30 10:22:51 -0.113  0.464  0.671      0.824
509002 2016-05-30 10:22:51 -0.087  0.459  0.839      0.960
509003 2016-05-30 10:22:51 -0.060  0.462  0.975      1.080
509004 2016-05-30 10:22:51 -0.060  0.465  1.117      1.211
509005 2016-05-30 10:22:51 -0.061  0.477  1.198      1.291
509006 2016-05-30 10:22:51 -0.064  0.476  1.223      1.314
509007 2016-05-30 10:22:51 -0.056  0.462  1.218      1.304
509008 2016-05-30 10:22:51 -0.065  0.447  1.193      1.276
509009 2016-05-30 10:22:51 -0.055  0.436  1.157      1.238
509010 2016-05-30 10:22:51 -0.078  0.414  1.093      1.172
509011 2016-05-30 10:22:51 -0.084  0.411  1.022      1.105
509012 2016-05-30 10:22:51 -0.076  0.386  0.940      1.019
509013 2016-05-30 10:22:51 -0.043  0.354  0.845      0.917
509014 2016-05-30 10:22:51 -0.046  0.378  0.759      0.849
509015 2016-05-30 10:22:51 -0.070  0.404  0.672      0.787
509016 2016-05-30 10:22:51 -0.075  0.429  0.648      0.781
509017 2016-05-30 10:22:51 -0.084  0.452  0.633      0.782
509018 2016-05-30 10:22:51 -0.098  0.468  0.635      0.795
509019 2016-05-30 10:22:51 -0.108  0.456  0.688      0.833
509020 2016-05-30 10:22:51 -0.104  0.447  0.767      0.894
509021 2016-05-30 10:22:51 -0.085  0.444  0.810      0.928
509022 2016-05-30 10:22:51 -0.066  0.442  0.895      1.000
509023 2016-05-30 10:22:51 -0.053  0.422  0.984      1.072
509024 2016-05-30 10:22:51 -0.031  0.412  1.023      1.103
509025 2016-05-30 10:22:51 -0.008  0.401  1.093      1.165
509026 2016-05-30 10:22:51 -0.011  0.388  1.107      1.173
509027 2016-05-30 10:22:51  0.004  0.380  1.117      1.180

[509028 rows x 5 columns]
In [90]:
ax = acc.plot(x='time', 
              y='magnitude')
ax.set_ylabel("Acceleration (g)")
Out[90]:
<matplotlib.text.Text at 0x7f7fe56506a0>
In [91]:
ax = acc.plot(x='time', 
              y='z')
ax.set_ylabel("Acceleration, z (g)")
Out[91]:
<matplotlib.text.Text at 0x7f7fe61fd6d8>

Calculate sampling rate

In [92]:
print('Duration of test: {}'.format(acc['time'].iloc[-1] - acc['time'].iloc[0]))
print('Average time between samples: {}'.format((acc['time'].iloc[-1] - acc['time'].iloc[0])/len(acc)))
delta_t = (acc['time'].iloc[-1] - acc['time'].iloc[0])/len(acc)
Duration of test: 0 days 00:37:40
Average time between samples: 0 days 00:00:00.004439
In [93]:
print(len(acc))
509028
In [94]:
time_fine = [acc['time'].iloc[0],]
for i in range(1,len(acc.time)):
    time_fine.append(time_fine[-1] + delta_t)
acc['time_fine'] = time_fine
In [95]:
print(delta_t.microseconds/1.0e6)
0.004439
In [96]:
T = (acc['time_fine'].iloc[-1] - acc['time_fine'].iloc[0]).seconds
print('Duration of test (seconds): {}'.format(T))
Duration of test (seconds): 2259

Filter to times of actual lifts. We filter the complete time history to extract the time history for each discrete lift which will be processed later.

In [97]:
lifts = {1:{}}
In [98]:
lifts[1]['acc'] = acc[(acc['time'] > '2016-05-30 9:50:00') & (acc['time'] < '2016-05-30 9:52:00')]
In [99]:
lifts[2] = {}
In [100]:
lifts[2]['acc'] = acc[(acc['time'] > '2016-05-30 9:56:00') & (acc['time'] < '2016-05-30 9:58:00')]
In [101]:
lifts[3] = {}
In [102]:
lifts[3]['acc'] = acc[(acc['time'] > '2016-05-30 10:00:00') & (acc['time'] < '2016-05-30 10:02:00')]
In [103]:
lifts[5] = {}
In [104]:
lifts[5]['acc'] = acc[(acc['time'] > '2016-05-30 10:15:00') & (acc['time'] < '2016-05-30 10:17:00')]
In [105]:
lifts[6] = {}
In [106]:
lifts[6]['acc'] = acc[(acc['time'] > '2016-05-30 10:19:00') & (acc['time'] < '2016-05-30 10:21:00')]

Apply lowpass filter

In [107]:
from scipy.signal import butter, lfilter, freqz
In [108]:
def butter_lowpass(cutoff, fs, order=5):
    nyq = 0.5 * fs
    normal_cutoff = cutoff / nyq
    b, a = butter(order, normal_cutoff, btype='low', analog=False)
    return b, a
In [109]:
def butter_lowpass_filter(data, cutoff, fs, order=5):
    b, a = butter_lowpass(cutoff, fs, order=order)
    y = lfilter(b, a, data)
    return y

Filter requirements

In [110]:
order = 2
f_s = 1.0/(delta_t.microseconds/1.0e6)      # sample rate, Hz
print('Sampling rate: {:0.2f} Hz'.format(f_s))
cutoff = 10  # desired cutoff frequency of the filter, Hz
Sampling rate: 225.28 Hz

Get the filter coefficients so we can check its frequency response.

In [111]:
b, a = butter_lowpass(cutoff, f_s, order)

Plot the frequency response.

In [112]:
w, h = freqz(b, a, worN=8000)
#plt.subplot(2, 1, 1)
plt.plot(0.5*f_s*w/np.pi, np.abs(h), 'b')
plt.plot(cutoff, 0.5*np.sqrt(2), 'ko')
plt.axvline(cutoff, color='k')
plt.xlim(0, 0.5*f_s)
plt.title("Lowpass Filter Frequency Response")
plt.xlabel('Frequency [Hz]')
plt.grid()

Filter the data, and plot both the original and filtered signals

In [113]:
y = butter_lowpass_filter(acc['magnitude'], cutoff, f_s, order)

DC component (~1g)

In [114]:
y.mean()
Out[114]:
1.0020599152546816
In [115]:
from matplotlib import dates
hfmt = dates.DateFormatter('%m/%d %H:%M')

ax = plt.subplot(1, 1, 1)
plt.plot(acc['time_fine'], y, 'g-', linewidth=2, label='filtered data')
plt.xlabel('Time')
plt.grid()
plt.legend()

ax.xaxis.set_major_locator(dates.MinuteLocator())
ax.xaxis.set_major_formatter(hfmt)
ax.set_ylim(bottom = 0)
plt.xticks(rotation='vertical')
plt.subplots_adjust(bottom=.3)

plt.subplots_adjust(hspace=0.35)
plt.show()

Compute the FFT on the unfiltered data

The following approach is borrowed from https://gist.github.com/jedludlow/3919130.

The FFT and a matching vector of frequencies

In [116]:
print(acc['magnitude'].mean())
1.00207067195

Subtract the DC component (~1g)

In [117]:
# test
# Number of samplepoints
#N = 1024
# sampling frequency
#f_s = 750.0
# sample spacing
#T = 1.0 / 750
#x = np.linspace(0.0, N*T, N)
#y = 2*np.sin(75.0 * 2.0*np.pi*x)
#print(y)
#fft_x = np.fft.fft(y)
#actual
fft_x = np.fft.fft(acc['magnitude'] - acc['magnitude'].mean())
n = len(fft_x)
freq = np.fft.fftfreq(n, 1/f_s)
print(n)
print(freq)
509028
[ 0.          0.00044256  0.00088512 ..., -0.00132768 -0.00088512
 -0.00044256]
In [118]:
print(max(fft_x))
print(min(fft_x))
(513.45971076+125.253199219j)
(-581.868073948-166.546615929j)
In [119]:
print(max(np.abs(fft_x)))
print(min(np.abs(fft_x)))
709.68107278
1.89950277729e-10
In [120]:
plt.plot(np.abs(fft_x))
Out[120]:
[<matplotlib.lines.Line2D at 0x7f7ffe0d8780>]

Swap Half Spaces

Note that frequencies in the FFT and the freq vector go from zero to some larger positive number then from a large negative number back toward zero. We can swap that so that the DC component is in the center of the vector while maintaining a two-sided spectrum.

In [121]:
fft_x_shifted = np.fft.fftshift(fft_x)
freq_shifted = np.fft.fftshift(freq)
In [122]:
plt.plot(freq_shifted, np.abs(fft_x_shifted))
plt.xlabel("Frequency (Hz)")
Out[122]:
<matplotlib.text.Text at 0x7f7fe63ee588>

Fold Negative Frequencies and Scale

It's actually more common to look at just the first half of the unshifted FFT and frequency vectors and fold all the amplitude information into the positive frequencies. Furthermore, to get ampltude right, we must normalize by the length of the original FFT. Note the factor of $2/n$ in the following which accomplishes both the folding and scaling.

In [123]:
half_n = n // 2
print(half_n)
fft_x_half = (2 / n) * fft_x[:half_n]
freq_half = freq[:half_n]
254514
In [124]:
print(np.abs(fft_x_half))
[  7.46325458e-16   1.16169145e-03   4.04096163e-04 ...,   2.72523043e-05
   1.18643710e-04   3.21919974e-05]
In [125]:
data = np.abs(fft_x_half)
plt.plot(freq_half, data)
plt.xlabel("Frequency (Hz)")
plt.ylabel("Amplitude")
Out[125]:
<matplotlib.text.Text at 0x7f7ff28673c8>
In [126]:
from scipy import signal
In [127]:
len(freq_half)
Out[127]:
254514
In [128]:
np.argmax(freq_half > 20)
Out[128]:
45192

We can use the peakutils module to find the peaks in the FFT

In [129]:
import peakutils
peakind = peakutils.indexes(data, thres=0.4, min_dist=100)
In [130]:
print('Number of relative peaks: {}'.format(len(peakind)))
Number of relative peaks: 13
In [131]:
print(freq_half[peakind])
[  4.42561044e-04   1.63659074e+00   3.54889701e+00   3.60111921e+00
   3.77283290e+00   3.83744681e+00   3.91755036e+00   3.97729610e+00
   4.03305879e+00   4.08306819e+00   4.17025272e+00   4.21539394e+00
   4.26186285e+00]
In [132]:
print(data[peakind])
[ 0.00116169  0.00124901  0.00278838  0.00122306  0.00117931  0.00158109
  0.00140451  0.00182064  0.00113182  0.00164623  0.00245867  0.00162976
  0.00217062]

Compute the FFT on the filtered data

The multi-step procedure described above can be compressed into 6 lines of code

In [134]:
fft_x_filt = np.fft.fft((y - y.mean()))
freq_filt = np.fft.fftfreq(n, 1/f_s)
fft_x_shifted_filt = np.fft.fftshift(fft_x_filt)
freq_shifted_filt = np.fft.fftshift(freq_filt)
fft_x_half_filt = (2 / n) * fft_x_filt[:half_n]
freq_half_filt = freq_filt[:half_n]
In [135]:
data_filt = np.abs(fft_x_half_filt)
plt.plot(freq_half_filt, data_filt)
plt.xlabel("Frequency (Hz)")
plt.ylabel("Amplitude")
plt.xlim([0,10])
Out[135]:
(0, 10)
In [136]:
len(freq_half_filt)
Out[136]:
254514
In [137]:
np.argmax(freq_half_filt > 20)
Out[137]:
45192

Find the peaks using the peakutils module

In [138]:
peakind_filt = peakutils.indexes(data_filt, thres=0.4, min_dist=100)
In [139]:
print('Number of relative peaks: {}'.format(len(peakind_filt)))
Number of relative peaks: 13
In [140]:
print(freq_half[peakind_filt])
[  4.42561044e-04   1.63659074e+00   3.54889701e+00   3.60111921e+00
   3.77283290e+00   3.83744681e+00   3.91755036e+00   3.97729610e+00
   4.03305879e+00   4.08306819e+00   4.17025272e+00   4.21539394e+00
   4.26186285e+00]
In [141]:
print(data_filt[peakind_filt])
[ 0.00115708  0.00126882  0.00278437  0.00119223  0.00115763  0.00155411
  0.00141014  0.0018042   0.00112623  0.00163765  0.00241123  0.00162506
  0.00215121]

Process acceleration per lift

The above transforms operated on the entire duration of the experiment. We are not too interested in the acceleration between lifts. We thus window the data to extract the acceleration time series for each lift event.

In [142]:
from matplotlib import dates
hfmt = dates.DateFormatter('%m/%d %H:%M')

def plot_lift_acc(lift_dict):

    ax = plt.subplot(1, 1, 1)
    plt.plot(lift_dict['acc']['time_fine'], lift_dict['acc']['magnitude'], 'b-', label='data')
    plt.xlabel('Time')
    plt.grid()
    plt.legend()

    ax.xaxis.set_major_locator(dates.MinuteLocator())
    ax.xaxis.set_major_formatter(hfmt)
    ax.set_ylim(bottom = 0)
    plt.xticks(rotation='vertical')
    plt.subplots_adjust(bottom=.3)

    plt.subplots_adjust(hspace=0.35)
    plt.show()
In [143]:
def plot_lift_acc_filt(lift_dict):
    ax = plt.subplot(1, 1, 1)
    plt.plot(lift_dict['acc']['time_fine'], lift_dict['y'], 'g-', linewidth=2, label='filtered data')
    plt.xlabel('Time')
    plt.grid()
    plt.legend()

    ax.xaxis.set_major_locator(dates.MinuteLocator())
    ax.xaxis.set_major_formatter(hfmt)
    ax.set_ylim(bottom = 0.8)
    plt.xticks(rotation='vertical')
    plt.subplots_adjust(bottom=.3)

    plt.subplots_adjust(hspace=0.35)
    plt.show()
In [144]:
def calc_lift_fft(lift_dict):
    y = lift_dict['y'] - lift_dict['y'].mean()
    n_lift = len(y)
    w = blackman(n_lift)
    fft_lift_filt = np.fft.fft(y)
    half_n_lift = n_lift // 2
    freq_lift_filt = np.fft.fftfreq(n_lift, 1/f_s)
    fft_lift_shifted_filt = np.fft.fftshift(fft_lift_filt)
    freq_lift_shifted_filt = np.fft.fftshift(freq_lift_filt)
    lift_dict['fft_lift_half_filt'] = (2 / n_lift) * fft_lift_filt[:half_n_lift]
    lift_dict['freq_lift_half_filt'] = freq_lift_filt[:half_n_lift]
In [145]:
def plot_lift_fft(lift_dict):
    plt.plot(lift_dict['freq_lift_half_filt'], np.abs(lift_dict['fft_lift_half_filt']))
    plt.xlabel("Frequency (Hz)")
    plt.ylabel("Amplitude")
    plt.xlim([0,15])
    plt.show()
In [146]:
def process_lift(lift_dict):
    lift_dict['y'] = butter_lowpass_filter(lift_dict['acc']['magnitude'], cutoff, f_s, order)
    print('DC component of acceleration: {:0.2f}'.format(lift_dict['y'].mean()))
    plot_lift_acc(lift_dict)
    plot_lift_acc_filt(lift_dict)
    calc_lift_fft(lift_dict)
    plot_lift_fft(lift_dict)
    lift_dict['peak_indices'] = peakutils.indexes(np.abs(lift_dict['fft_lift_half_filt']), 
                                                  thres=0.4, 
                                                  min_dist=100)
    print('Number of relative peaks: {}'.format(len(lift_dict['peak_indices'])))
    frequencies = ','.join(['{:0.2f}'.format(f) for f in 
                            lift_dict['freq_lift_half_filt'][lift_dict['peak_indices']]])
    print('{}'.format(frequencies))

Lift 1

In [147]:
process_lift(lifts[1])
DC component of acceleration: 1.00
Number of relative peaks: 2
1.65,3.98

Lift 2

In [148]:
process_lift(lifts[2])
DC component of acceleration: 1.00
Number of relative peaks: 1
4.20

Lift 3

In [149]:
process_lift(lifts[3])
DC component of acceleration: 1.00
Number of relative peaks: 2
1.65,4.13

Lift 5

In [150]:
process_lift(lifts[5])
DC component of acceleration: 1.00
Number of relative peaks: 2
1.34,3.83

Lift 6

In [151]:
process_lift(lifts[6])
DC component of acceleration: 1.00
Number of relative peaks: 2
1.35,4.08