Skip to content

Commit

Permalink
Merge branch 'Jakidxav-picker2-mc'
Browse files Browse the repository at this point in the history
Also increment the release
  • Loading branch information
dlilien committed Jul 17, 2019
2 parents 5bfa495 + 50b1b94 commit 5238a7f
Show file tree
Hide file tree
Showing 12 changed files with 341 additions and 87 deletions.
4 changes: 4 additions & 0 deletions impdar/bin/impdar.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,10 @@ def _get_args():
parser_plot.add_argument('-power', type=int, default=None,
help='Plot the power on this layer number')
parser_plot.add_argument('-o', type=str, help='Write to this filename')
parser_plot.add_argument('-spectra', type=bool, default=False, help='Plot power spectral density across traces of radar profile')
parser_plot.add_argument('-freq_limit', type=float, default=None, help='Maximum frequeny to plot power spectral density to')
parser_plot.add_argument('-window', type=str, default=None, help='Type of window function to be used for the singal.periodogram() method')
parser_plot.add_argument('-scale', type=str, default='spectrum', help='Whether to plot power spectral density or power spectrum: default is spectrum')

parser_convert = subparsers.add_parser('convert', help='Convert filetype (potentially lossy)')
parser_convert.set_defaults(func=convert.convert)
Expand Down
30 changes: 23 additions & 7 deletions impdar/bin/impproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,12 @@

import os.path
import argparse
from impdar.lib.load import load
import numpy as np
from impdar.lib.convert import convert
from impdar.lib.load import load
from impdar.lib.process import concat
from impdar.lib.gpslib import interp as interpdeep


def _get_args():
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help='Choose a processing step')
Expand Down Expand Up @@ -87,12 +87,16 @@ def _get_args():

# Migration
parser_mig = add_procparser(subparsers, 'migrate', 'Migration', mig, defname='migrated')
parser_mig.add_argument('--mtype', type=str, default='stolt', choices=['stolt', 'kirch', 'phsh', 'su'], help='Migration routines.')
parser_mig.add_argument('--mtype', type=str, default='stolt', choices=['stolt', 'kirch', 'phsh', 'tk', 'su'], help='Migration routines.')
parser_mig.add_argument('--vel', type=float, default=1.69e8, help='Speed of light in dielectric medium m/s (default is for ice, 1.69e8)')
parser_mig.add_argument('--vel_fn', type=str, default=None, help='Filename for inupt velocity array. Column 1: velocities, Column 2: z locations, Column 3: x locations (optional)')
parser_mig.add_argument('--nearfield', action='store_true', help='Boolean for nearfield operator in Kirchhoff migration.')
parser_mig.add_argument('--htaper', type=int, default=100, help='Number of samples for horizontal taper')
parser_mig.add_argument('--vtaper', type=int, default=1000, help='Number of samples for vertical taper')
parser_mig.add_argument('--nxpad', type=int, default=100, help='Number of traces to pad with zeros for FFT')
parser_mig.add_argument('--tmig', type=int, default=0, help='Times for velocity profile')
parser_mig.add_argument('--verbose', type=int, default=1, help='Print output from SeisUnix migration')
parser_mig.add_argument('--sutype', type=str, default='sumigtk', choices=['sustolt','sumigtk','sumigffd'], help='Migration command for SeisUnix')
add_def_args(parser_mig)

return parser
Expand Down Expand Up @@ -205,11 +209,23 @@ def interp(dats, spacing, gps_fn, offset=0.0, minmove=1.0e-2, **kwargs):
interpdeep(dats, spacing, fn=gps_fn, offset=offset, min_movement=minmove)


def mig(dat, mtype='stolt', vel=1.69e8, vel_fn=None, nearfield=False, htaper=100, vtaper=1000, **kwargs):
def mig(dat, mtype='stolt', vel=1.69e8, **kwargs):
# save to seisunix format for migration with SU routines
if mtype == 'su':
convert(dat.fn, 'segy')
dat.migrate(mtype, vel=vel, vel_fn=vel_fn, nearfield=nearfield, htaper=htaper, vtaper=vtaper)

try:
out_fn = os.path.splitext(dat.fn)[0] + '.sgy'
dat.save_as_segy(out_fn)
except:
raise ValueError('Could not save .sgy')
# migrate
dat.migrate(mtype, vel=vel, **kwargs)

# Read the migrated .bin file
if mtype == 'su':
bin_fn = os.path.splitext(dat.fn)[0] + '_mig.bin'
with open(bin_fn,'rb') as fid:
data_flat = np.fromfile(fid,np.float32)
dat.data = np.transpose(np.reshape(data_flat,(dat.tnum,dat.snum)))

if __name__ == '__main__':
main()
10 changes: 8 additions & 2 deletions impdar/lib/RadarData/_RadarDataFiltering.py
Original file line number Diff line number Diff line change
Expand Up @@ -407,7 +407,7 @@ def vertical_band_pass(self,
self.flags.bpass[2] = high


def migrate(self, mtype='stolt', **kwargs):
def migrate(self, mtype='stolt', sutype='sumigtk', **kwargs):
"""Migrate the data.
This is a wrapper around all the migration routines in migration_routines.py.
Expand All @@ -424,10 +424,16 @@ def migrate(self, mtype='stolt', **kwargs):
migrationlib.migrationStolt(self, **kwargs)
elif mtype == 'phsh':
migrationlib.migrationPhaseShift(self, **kwargs)
elif mtype == 'tk':
migrationlib.migrationTimeWavenumber(self, **kwargs)
elif mtype == 'su':
migrationlib.migrationSeisUnix(self, **kwargs)
else:
raise ValueError('Unrecognized migration routine')

# change migration flag
self.flags.mig = mtype
if mtype == 'su':
mflag = sutype
else:
mflag = mtype
self.flags.mig = mflag
3 changes: 2 additions & 1 deletion impdar/lib/RadarData/_RadarDataSaving.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,8 @@ def save_as_segy(self, fn):
if not SEGY:
raise ImportError('segyio failed to import, cannot save as segy')

segyio.tools.from_array2D(fn, self.data.transpose(), dt=self.dt * 1.0e6)
print(self.dt, self.dt * 1.0e12)
segyio.tools.from_array2D(fn, self.data.transpose(), dt=self.dt * 1.0e12)


def output_shp(self, fn, t_srs=4326, target_out=None):
Expand Down
47 changes: 36 additions & 11 deletions impdar/lib/gpslib.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ def __init__(self, gga, scans, trace_num):
self.get_dist()


def kinematic_gps_control(dats, lat, lon, elev, decday, offset=0.0):
def kinematic_gps_control(dats, lat, lon, elev, decday, offset=0.0, extrapolate=False):
"""Use new, better GPS data for lat, lon, and elevation
The interpolation in this function is done using the time since the radar has accurate timing from its GPS. The old version of this function in StoDeep required redundant variables (x_coord, y_coord, dist). I've dropped that dependency.
Expand All @@ -201,10 +201,19 @@ def kinematic_gps_control(dats, lat, lon, elev, decday, offset=0.0):
Decimal day. You need to reference this to match up with what the radar uses using offset
offset: float, optional
Translate the GPS times by this amount for alignment with the radar
extrapoloate: bool, optional
If true, extrapolate data to fill values rather than using NaNs.
Desirable for small offsets with the GPS, but dangerous since you can totally screw up
the geolocation and not get an error.
USE WITH CAUTION.
"""
int_lat = interp1d(decday, lat)
int_long = interp1d(decday, lon)
int_elev = interp1d(decday, elev)
if extrapolate:
fill_value = 'extrapolate'
else:
fill_value = np.NaN
int_lat = interp1d(decday, lat, kind='linear', fill_value=fill_value)
int_long = interp1d(decday, lon, kind='linear', fill_value=fill_value)
int_elev = interp1d(decday, elev, kind='linear', fill_value=fill_value)
if type(dats) not in [list, tuple]:
dats = [dats]
for dat in dats:
Expand All @@ -221,7 +230,7 @@ def kinematic_gps_control(dats, lat, lon, elev, decday, offset=0.0):
dat.dist = gpsdat.dist


def kinematic_gps_mat(dats, mat_fn, offset=0.0):
def kinematic_gps_mat(dats, mat_fn, offset=0.0, extrapolate=False):
"""Use a matlab file with gps info to redo radar GPS
Parameters
Expand All @@ -232,16 +241,21 @@ def kinematic_gps_mat(dats, mat_fn, offset=0.0):
The matlab file, containing lat, long, elev, and decday, to use
offset: float, optional
Change decday by this much to match the radar's gps
extrapoloate: bool, optional
If true, extrapolate data to fill values rather than using NaNs.
Desirable for small offsets with the GPS, but dangerous since you can totally screw up
the geolocation and not get an error.
USE WITH CAUTION.
"""
from scipy.io import loadmat
mat = loadmat(mat_fn)
for val in ['lat', 'long', 'elev', 'decday']:
if val not in mat:
raise ValueError('{:s} needs to be contained in matlab input file'.format(val))
kinematic_gps_control(dats, mat['lat'].flatten(), mat['long'].flatten(), mat['elev'].flatten(), mat['decday'].flatten(), offset=offset)
kinematic_gps_control(dats, mat['lat'].flatten(), mat['long'].flatten(), mat['elev'].flatten(), mat['decday'].flatten(), offset=offset, extrapolate=extrapolate)


def kinematic_gps_csv(dats, csv_fn, offset=0, names='decday,long,lat,elev', **genfromtxt_flags):
def kinematic_gps_csv(dats, csv_fn, offset=0, names='decday,long,lat,elev', extrapolate=False, **genfromtxt_flags):
"""Use a csv gps file to redo the GPS on radar data.
The csv is read using numpy.genfromtxt, which supports a number of options. One, 'names', is set explicitly by the argument 'names' to this function: you can change the value of that string to True to read column names from the first post-header line in the file. You can also manually change the column names by giving a different comma-separated string. The names must contain 'decday', 'long', 'lat', and 'elev'.
Expand All @@ -256,14 +270,20 @@ def kinematic_gps_csv(dats, csv_fn, offset=0, names='decday,long,lat,elev', **ge
Change decday by this much to match the radar's gps
names: str, bool, list, or None
names argument to numpy.genfromtxt used to read the csv.
extrapoloate: bool, optional
If true, extrapolate data to fill values rather than using NaNs.
Desirable for small offsets with the GPS, but dangerous since you can totally screw up
the geolocation and not get an error.
USE WITH CAUTION.
Any additional kwargs are passed to numpy.genfromtxt
"""
data = np.genfromtxt(csv_fn, names=names, **genfromtxt_flags)
kinematic_gps_control(dats, data['lat'].flatten(), data['long'].flatten(), data['elev'].flatten(), data['decday'].flatten(), offset=offset)
kinematic_gps_control(dats, data['lat'].flatten(), data['long'].flatten(), data['elev'].flatten(), data['decday'].flatten(), offset=offset, extrapolate=extrapolate)


def interp(dats, spacing, fn=None, fn_type=None, offset=0.0, min_movement=1.0e-2, genfromtxt_kwargs={}, **kwargs):
def interp(dats, spacing, fn=None, fn_type=None, offset=0.0, min_movement=1.0e-2, genfromtxt_kwargs={}, extrapolate=False, **kwargs):
"""Do kinematic GPS control then interpolate the data to constant spacing
Parameters
Expand All @@ -280,12 +300,17 @@ def interp(dats, spacing, fn=None, fn_type=None, offset=0.0, min_movement=1.0e-2
use this separation to try to cull stationary entries in the gps
genfromtxt_kwargs: dict, optional
kwargs to pass to genfromtxt when reading a csv. Ignored otherwise.
extrapoloate: bool, optional
If true, extrapolate data to fill values rather than using NaNs.
Desirable for small offsets with the GPS, but dangerous since you can totally screw up
the geolocation and not get an error.
USE WITH CAUTION.
"""
if fn is not None:
if fn_type == 'mat' or ((fn_type is None) and (fn[-4:] == '.mat')):
kinematic_gps_mat(dats, fn, offset=offset)
kinematic_gps_mat(dats, fn, offset=offset, extrapolate=extrapolate)
elif fn_type == 'csv' or (fn_type is None and fn[-4:] in ['.csv', '.txt']):
kinematic_gps_csv(dats, fn, offset=offset, **genfromtxt_kwargs)
kinematic_gps_csv(dats, fn, offset=offset, extrapolate=extrapolate, **genfromtxt_kwargs)
else:
raise ValueError('fn_type must be mat or csv')
for dat in dats:
Expand Down
3 changes: 2 additions & 1 deletion impdar/lib/load/load_segy.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
except ImportError:
SEGY = False


def load_segy(fn_sgy, *args, **kwargs):
"""Load segy data. This is very generic for now,
need to do work if there are particular types of segy files that need to be read"""
Expand All @@ -32,7 +33,7 @@ def load_segy(fn_sgy, *args, **kwargs):
segy_data.f.trace[where_good[0]:where_good[-1] + 1]).transpose()
segy_data.snum = segy_data.f.bin[segyio.BinField.Samples]
segy_data.tnum = segy_data.data.shape[1]
segy_data.dt = segy_data.f.bin[segyio.BinField.Interval]
segy_data.dt = segy_data.f.bin[segyio.BinField.Interval] * 1.0e-12
segy_data.travel_time = np.arange(segy_data.snum) * segy_data.dt * 1.0e6
segy_data.trace_num = np.arange(segy_data.data.shape[1]) + 1
segy_data.flags = RadarFlags()
Expand Down
Loading

0 comments on commit 5238a7f

Please sign in to comment.