Skip to content
2 changes: 1 addition & 1 deletion dgp/lib/etc.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def fill_nans(frame):
left = fill_nans(left)
right = fill_nans(right)

left = left.reindex(new_index).dropna()
left = left.reindex(new_index).dropna() # TODO Why does dropna matter here for some flights?
right = right.reindex(new_index).dropna()

# crop frames
Expand Down
3 changes: 1 addition & 2 deletions dgp/lib/gravity_ingestor.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def _unpack_bits(n):
'temp', 'pressure', 'Etemp'}


def read_at1a(path, columns=None, fill_with_nans=True, interp=False,
def read_at1a(path, columns=None, fill_with_nans=False, interp=False,
skiprows=None):
"""
Read and parse gravity data file from DGS AT1A (Airborne) meter.
Expand Down Expand Up @@ -140,7 +140,6 @@ def read_at1a(path, columns=None, fill_with_nans=True, interp=False,
index = pd.date_range(df.index[0], df.index[-1], freq=interval)
df = df.reindex(index)

# TODO: Replace interp_nans with pandas interpolate
if interp:
numeric = df.select_dtypes(include=[np.number])
numeric = numeric.interpolate(method='time')
Expand Down
4 changes: 2 additions & 2 deletions dgp/lib/plots.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,15 +54,15 @@ def timeseries_gravity_diagnostic(df, my_varlist, my_varunits, st, et, plottitle
my_ls = '-'
my_lw = 0.5
my_marker = None
print('p v')
# print('p v')
plt.subplots_adjust(hspace=0.000)
plt.style.use('ggplot')
number_of_subplots = np.shape(my_varlist)[0]
fig = plt.figure(figsize=(8, 6), facecolor='white', dpi=96)
fig.suptitle(plottitle)
for p, v in enumerate(my_varlist):
p = p + 1
print('{} {}'.format(p, v))
# print('{} {}'.format(p, v))
ax = plt.subplot(number_of_subplots, 1, p)
ax.plot(df[v].loc[st:et].index, df[v].loc[st:et].values, color='black', label=v,
ls=my_ls, lw=my_lw, marker=my_marker)
Expand Down
23 changes: 21 additions & 2 deletions dgp/lib/trajectory_ingestor.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
"""
import numpy as np
import pandas as pd
from datetime import datetime
from pandas.tseries.offsets import Milli

from .time_utils import leap_seconds, convert_gps_time, datenum_to_datetime
Expand Down Expand Up @@ -58,10 +59,10 @@ def import_trajectory(filepath, delim_whitespace=False, interval=0,
Interpreter for Pandas read_csv. Waypoint, the default trajectory,
uses standard Pandas default of 'c'.
sep : str
',' | '\t' Default: ','
',' | '\s+' Default: ','
Delimiter for Pandas read_csv. Waypoint, the default trajectory,
uses standard Pandas default of ',', but others, such as TerraPOS is
tab delimited, requiring '\t'.
space delimited, requiring '\s+'.

Returns
-------
Expand Down Expand Up @@ -141,3 +142,21 @@ def import_trajectory(filepath, delim_whitespace=False, interval=0,
df[col] = numeric[col]

return df


def import_imar_zbias(path, starttime=None, endtime=None):
if starttime is not None and not isinstance(starttime, datetime):
raise TypeError('starttime: expected datetime object, got {typ}'.format(typ=type(starttime)))

df = pd.read_csv(path, header=None, delim_whitespace=False, skiprows=18, engine='python', sep='\s+')
df.columns = ['date_utc', 'gps_sow', 'hms_utc', 'unix_utc', 'lat', 'lon', 'h', 'pitch', 'roll',
'heading', 'ns', 'pdop', 'x_acc_bias', 'y_acc_bias', 'z_acc_bias']
df.index = pd.to_datetime(df["unix_utc"], unit='s')

if starttime is not None:
df = df[df['unix_utc'] >= starttime]
if endtime is not None:
df = df[df['unix_utc'] <= endtime]
df.reset_index(drop=True, inplace=True)

return df
45 changes: 23 additions & 22 deletions examples/config_runtime.yaml
Original file line number Diff line number Diff line change
@@ -1,22 +1,23 @@
---
campaign: 'OIB_GV-2019'
flight: 'F1014'
begin_line: '2019-11-09 22:00'
end_line: '2019-11-10 06:00'

gravity_dir: '/Volumes/BASEQI/data_baseqi/OIB_GV_2019/F1014/Gravity/DgS/'
gravity_file: 'OIB-GV_F1014_20191109.dat'

trajectory_src: 'Waypoint'
trajectory_dir: '/Volumes/BASEQI/data_baseqi/OIB_GV_2019/F1014/Gravity/DgS/'
trajectory_file: 'OIB-GV_F1014_20191109_iMAR-INS_FINAL_DGS.txt'

config_dir: '/Volumes/BASEQI/data_baseqi/OIB_GV_2019/F1014/Gravity/DgS/'
out_dir: '/Volumes/BASEQI/data_baseqi/OIB_GV_2019/F1014/Gravity/DgS/'

QC1:
start: '2019-11-10 00:10'
end: '2019-11-10 00:25'

# Change These Only As Needed
gps_fields: ['mdy', 'hms', 'lat', 'long', 'ortho_ht', 'ell_ht', 'num_stats', 'pdop']
---
campaign: 'OIB-GV'
flight: 'F1003'
begin_line: '2019-10-26 23:00'
end_line: '2019-10-27 08:00'

config_dir: '/Volumes/BASEQI/data_baseqi/Antarctica/OIB/gravity/dgs/raw/F1003'

gravity_dir: '/Volumes/BASEQI/data_baseqi/Antarctica/OIB/gravity/dgs/raw/F1003'
gravity_file: 'OIB-GV_F1003_20191026.dat'

trajectory_src: 'Waypoint'
trajectory_dir: '/Volumes/BASEQI/data_baseqi/Antarctica/OIB/trajectory'
trajectory_file: 'OIB-GV_F1003_20191026_iMAR-INS_FINAL_DGS.txt'

out_dir: '/Volumes/BASEQI/data_baseqi/Antarctica/OIB/gravity/dgs'

QC1:
start: '2019-10-27 02:15'
end: '2019-10-27 02:30'

# Change These Only As Needed
gps_fields: ['mdy', 'hms', 'lat', 'long', 'ortho_ht', 'ell_ht', 'num_stats', 'pdop']
142 changes: 109 additions & 33 deletions examples/process_script.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import os
from datetime import datetime
import yaml
import sys

from dgp.lib.gravity_ingestor import read_at1a
from dgp.lib.trajectory_ingestor import import_trajectory
from dgp.lib.trajectory_ingestor import import_trajectory, import_imar_zbias
from dgp.lib.etc import align_frames
from dgp.lib.transform.transform_graphs import AirbornePost
from dgp.lib.transform.filters import detrend
Expand All @@ -14,27 +15,35 @@
make_plots = True
add_map_plots = False
diagnostic = True
import_auxiliary = True

# Confirm a few things with the human
if diagnostic:
print("Diagnostic set to True. The means:\n"
"1) entire gravity record used,\n"
"2) still readings NOT considered (no detrend)\n")

# Read YAML config file
proj_path = os.path.abspath(os.path.dirname(__file__))
try:
with open(os.path.join(proj_path, 'config_runtime.yaml'), 'r') as file:
config = yaml.safe_load(file)
print('Read YAML configuration file for {}'.format(config['flight']))
except Exception as e:
print('Error reading the config file')
# TODO What to do if exception is reached? Exit, or read in test data?
config_file = sys.argv[1]
print(f"Getting processing parameters from {config_file}")
except IndexError:
proj_path = os.path.abspath(os.path.dirname(__file__))
config_file = os.path.join(proj_path, 'config_runtime.yaml')
print(f"Reading default configuration file from {proj_path}")

with open(config_file, 'r') as file: # TODO write a functino to get parameters from conig YAML all at once
config = yaml.safe_load(file)
print(f"\n~~~~~~~~~~~ Flight {config['flight']} ~~~~~~~~~~~")
campaign = config['campaign']
flight = config['flight']
begin_line = datetime.strptime(config['begin_line'], '%Y-%m-%d %H:%M')
end_line = datetime.strptime(config['end_line'], '%Y-%m-%d %H:%M')
gravity_directory = config['gravity_dir']
gravity_file = config['gravity_file']
trajectory_source = config['trajectory_src']
trajectory_directory = config['trajectory_dir']
trajectory_file = config['trajectory_file']
gps_fields = config['gps_fields']
configdir = config['config_dir']
meterconfig_dir = config['config_dir']
outdir = config['out_dir']
try:
QC_segment = config['QC1'] #
Expand All @@ -52,30 +61,59 @@
# Load Data Files
print('\nImporting gravity')
gravity = read_at1a(os.path.join(gravity_directory, gravity_file), interp=True)
print("Gravity START: {}".format(gravity.index[0]))
print("Gravity END: {}".format(gravity.index[-1]))
print(f"Gravity Data Starts: {gravity.index[0]}")
print(f"Gravity Data Ends: {gravity.index[-1]}")
print('\nImporting trajectory')
trajectory = import_trajectory(os.path.join(trajectory_directory, trajectory_file),
columns=gps_fields, skiprows=1,
timeformat='hms', engine=trajectory_engine, sep=trajectory_delim)
# Append iMAR AccBiasZ
if import_auxiliary:
plot_imar = True
imar_file = trajectory_file.replace('DGS', 'iMAR_1Hz')
# imar_file = f'{os.path.splitext(imar_file)[0]}_1Hz{os.path.splitext(imar_file)[1]}'
imar = import_imar_zbias(os.path.join(trajectory_directory, imar_file))
# imar_10Hz = imar.resample('100L').first().bfill(limit=1)[['lat','z_acc_bias']].interpolate(method='linear', limit_area='inside')

import pandas as pd
# pd.merge(gravity, imar_10Hz).head()
gravity = pd.concat([gravity, imar[['z_acc_bias']]], axis=1)
gravity['z_acc_bias'] = gravity[['z_acc_bias']].interpolate(method='linear', limit_area='inside')


# Read MeterProcessing file in Data Directory
config_file = os.path.join(configdir, 'DGS_config_files', 'MeterProcessing.ini')
k_factor = read_meterconfig(config_file, 'kfactor')
tie_gravity = read_meterconfig(config_file, 'TieGravity')
print('K-factor: {}\nGravity-tie: {}\n'.format(k_factor, tie_gravity))
meterconfig_file = os.path.join(meterconfig_dir, 'DGS_config_files', 'MeterProcessing.ini')
if diagnostic:
k_factor = 1
else:
k_factor = read_meterconfig(meterconfig_file, 'kfactor')
tie_gravity = read_meterconfig(meterconfig_file, 'TieGravity')
if abs(tie_gravity / 980000) > 2:
print("Check your gravity tie units.")
exit()

print(f"K-factor: {k_factor}\nGravity-tie: {tie_gravity}\n")

# Still Readings
# TODO: Semi-automate or create GUI to get statics
first_static = read_meterconfig(config_file, 'PreStill')
second_static = read_meterconfig(config_file, 'PostStill')
first_static = read_meterconfig(meterconfig_file, 'PreStill')
second_static = read_meterconfig(meterconfig_file, 'PostStill')

# pre-processing prep
if not begin_line < end_line:
print('Check your times. Using start and end of gravity file instead.')
if diagnostic:
begin_line = gravity.index[0]
end_line = gravity.index[-1]
trajectory_full = trajectory[['long', 'lat']]
else:
begin_line = datetime.strptime(config['begin_line'], '%Y-%m-%d %H:%M')
end_line = datetime.strptime(config['end_line'], '%Y-%m-%d %H:%M')
if not begin_line < end_line:
print("Check your times. Using start and end of gravity file instead.")
begin_line = gravity.index[0]
end_line = gravity.index[-1]


if add_map_plots:
trajectory_full = trajectory[['long', 'lat']]
gravity = gravity[(begin_line <= gravity.index) & (gravity.index <= end_line)]
trajectory = trajectory[(begin_line <= trajectory.index) & (trajectory.index <= end_line)]

Expand All @@ -85,6 +123,11 @@
# align gravity and trajectory frames
gravity, trajectory = align_frames(gravity, trajectory)

# Check that arrays are same length
if abs(gravity.shape[0] - trajectory.shape[0]) > 0:
trajectory = trajectory.iloc[0:gravity.shape[0]]
# TODO or check for duplicates, like trajectory.drop_duplicates(keep='first')

# adjust for crossing the prime meridian
trajectory['long'] = trajectory['long'].where(trajectory['long'] > 0, trajectory['long'] + 360)

Expand All @@ -100,25 +143,47 @@
g = AirbornePost(trajectory, gravity, begin_static=first_static, end_static=second_static)
results = g.execute()

if write_out: # TODO: split this file up into a Diagnostic and Standard output
# Check that arrays are same length
if abs(gravity.shape[0] - results['corrected_grav'].shape[0]) != 0:
results['corrected_grav'] = results['corrected_grav'].iloc[0:gravity.shape[0]]
if abs(gravity.shape[0] - results['filtered_grav'].shape[0]) != 0:
results['filtered_grav'] = results['filtered_grav'].iloc[0:gravity.shape[0]]

# TODO: split this file up into a QC and Official output
if write_out:
import numpy as np
import pandas as pd

print('\nWriting Output to File')
time = pd.Series(trajectory.index.astype(np.int64) / 10 ** 9,
index=trajectory.index, name='unix_time')
columns = ['unixtime', 'lat', 'long', 'ell_ht',
'eotvos_corr', 'kin_accel_corr', 'meter_grav',
'eotvos_corr', 'kin_accel_corr',
'meter_grav', 'beam',
'lat_corr', 'fa_corr', 'total_corr',
'abs_grav', 'FAA', 'FAA_LP']
values = np.array([time.values, trajectory['lat'].values, trajectory['long'].values, trajectory['ell_ht'].values,
results['eotvos'].values, results['kin_accel'].values, gravity['meter_gravity'].values,
results['lat_corr'].values, results['fac'].values, results['total_corr'].values,
results['abs_grav'].values, results['corrected_grav'].values, results['filtered_grav'].values])
#
df = pd.DataFrame(data=values.T, columns=columns, index=time) #pd.DatetimeIndex(gravity.index)
out_array = np.array([time.values,
trajectory['lat'].values.round(decimals=6),
trajectory['long'].values.round(decimals=6),
trajectory['ell_ht'].values.round(decimals=3),
results['eotvos'].values.round(decimals=2),
results['kin_accel'].values.round(decimals=2),
gravity['meter_gravity'].values.round(decimals=2),
gravity['beam'].values.round(decimals=5),
results['lat_corr'].values.round(decimals=2),
results['fac'].values.round(decimals=2),
results['total_corr'].values.round(decimals=2),
results['abs_grav'].values.round(decimals=2),
results['corrected_grav'].values.round(decimals=2),
results['filtered_grav'].values.round(decimals=2)])
if import_auxiliary:
columns += ['AccBiasZ']
out_array = np.vstack([out_array, gravity['z_acc_bias'].values.round(decimals=7)])
df = pd.DataFrame(data=out_array.T, columns=columns, index=time)
df = df.apply(pd.to_numeric, errors='ignore')
df.index = pd.to_datetime(trajectory.index)
outfile = os.path.join(outdir, '{}_{}_{}_DGP.csv'.format(campaign, flight, str(begin_line.strftime('%Y%m%d_%H%Mz'))))
outfile = os.path.join(outdir,
'{}_{}_{}_DGS_FINAL.csv'.format(campaign, flight, str(begin_line.strftime('%Y%m%d_%H%Mz'))))
df.to_csv(outfile) # , na_rep=" ")

###########
Expand Down Expand Up @@ -156,7 +221,18 @@
plot_title, plot_file)

if QC_plot:
# QC Segment Plot
# QC Segment Plot - AccBiasZ
try:
variables = ['z_acc_bias', 'long_accel', 'cross_accel']
variable_units = ['mGal', 'mGal', 'mGal', 'mGal']
plot_title = '{} {}: Accel (segment)'.format(campaign, flight)
plot_file = os.path.join(outdir, '{}_{}_DGP_QCplot_accel_segment.png'.format(campaign, flight))
timeseries_gravity_diagnostic(gravity, variables, variable_units,
QC_segment['start'], QC_segment['end'],
plot_title, plot_file)
except KeyError:
print("Couldn't make AccBiasZ plot...")
# QC Segment Plot - Gravity Output
variables = ['filtered_grav', 'corrected_grav', 'abs_grav']
variable_units = ['mGal', 'mGal', 'mGal', 'mGal']
plot_title = '{} {}: Gravity (segment)'.format(campaign, flight)
Expand All @@ -170,4 +246,4 @@
plot_file = os.path.join(outdir, '{}_{}_DGP_QCplot_freeair_map.png'.format(campaign, flight))
mapplot_segment(results, 'filtered_grav',
QC_segment['start'], QC_segment['end'],
'mGal', plot_title, plot_file)
'mGal', plot_title, plot_file)