diff --git a/AutomaticQC/imosRingingBinSetQC.m b/AutomaticQC/imosRingingBinSetQC.m new file mode 100644 index 000000000..2419a11f5 --- /dev/null +++ b/AutomaticQC/imosRingingBinSetQC.m @@ -0,0 +1,57 @@ +function [sample_data, varChecked, paramsLog] = imosRingingBinSetQC(sample_data, ~) +%function [sample_data, varChecked, paramsLog] = imosRingingBinSetQC(sample_data, ~) +% +% A ringing bin removal for ADCPs. +% +% The user enters the bin number(s) to be flagged bad in +% imosRingingBinSetQC.txt. +% +% Every datapoint in the bin(s) is marked as bad. +% +% See imosRingingBinSetQC.txt to enter bins for flagging. +% +% +% +% author: Rebecca.Cowley@csiro.au +% +% +narginchk(1, 2); +varChecked = {}; +paramsLog = []; +currentQCtest = mfilename; +if ~isstruct(sample_data), error('sample_data must be a struct'); end + +%get the bins to be flagged +options = IMOS.resolve.function_parameters(); +%probably a better way to do this. +bins = options('bin'); +if ischar(bins) + bins = str2num(bins); +end + +nt = numel(IMOS.get_data(sample_data.dimensions, 'TIME')); +if IMOS.adcp.is_along_beam(sample_data) + bin_dist = IMOS.get_data(sample_data.dimensions, 'DIST_ALONG_BEAMS'); +else + bin_dist = IMOS.get_data(sample_data.dimensions, 'HEIGHT_ABOVE_SENSOR'); +end + + +flag_vars = IMOS.adcp.current_variables(sample_data); +qcSet = str2double(readProperty('toolbox.qc_set')); +badFlag = imosQCFlag('bad', qcSet, 'flag'); +goodFlag = imosQCFlag('good', qcSet, 'flag'); + +flags = ones(nt,length(bin_dist), 'int8') * goodFlag; +flags(:,bins) = badFlag; +flag_vars_inds = IMOS.find(sample_data.variables, flag_vars); +for k = 1:numel(flag_vars_inds) + vind = flag_vars_inds(k); + sample_data.variables{vind}.flags = flags; +end + +varChecked = flag_vars; + +paramsLog = ['ringing_bin_removed=' num2str(bins)]; +writeDatasetParameter(sample_data.toolbox_input_file, currentQCtest, 'ringing_bin_removed', bins); +end diff --git a/AutomaticQC/imosRingingBinSetQC.txt b/AutomaticQC/imosRingingBinSetQC.txt new file mode 100644 index 000000000..c413a0fce --- /dev/null +++ b/AutomaticQC/imosRingingBinSetQC.txt @@ -0,0 +1,4 @@ +% list of bins to be flagged as bad, separated by a comma and a space +% eg: bin = 1 or bin = 1, 2, 3 +bin= 2, 4 + diff --git a/IMOS/imosSites.txt b/IMOS/imosSites.txt index 078d9a753..20f8b941e 100644 --- a/IMOS/imosSites.txt +++ b/IMOS/imosSites.txt @@ -68,6 +68,7 @@ SAM7DS, 135.8439, -36.1809, 0.025, 0.025, 2.5 SAM8SG, 136.69, -35.25, 0.025, 0.025, 2.5 TAN100, 113.9065833, -21.8493, 0.025, 0.025, 2.5 EAC0500, 153.89, -27.32, 0.05, 0.05, 2.5 +EAC1520, 153.96, -27.31, 0.05, 0.05, 2.5 EAC2000, 153.99, -27.31, 0.05, 0.05, 2.5 EAC3200, 154.13, -27.28, 0.05, 0.05, 2.5 EAC4200, 154.29, -27.24, 0.05, 0.05, 2.5 diff --git a/Preprocessing/+TeledyneADCP/workhorse_3beamsolution.m b/Preprocessing/+TeledyneADCP/workhorse_3beamsolution.m new file mode 100644 index 000000000..7131d04a0 --- /dev/null +++ b/Preprocessing/+TeledyneADCP/workhorse_3beamsolution.m @@ -0,0 +1,50 @@ +function [b1,b2,b3,b4] = workhorse_3beamsolution(T, beamdat) +%function btim = workhorse_3beamsolution(T, beamdat) +% +% Return the 3 beam solutions for instrument coordinate data with screened +% data indicating one bad beam +% +% +% Reference: page 14-15 of ADCP Coordinate Transformation, +% Formulas and Calculations, Teledyne RD Instruments. +% P/N 951-6079-00 (January 2008). +% +% +% Inputs: +% +% T=transformation matrix from beam to instrument axis as obtained from +% workhorse_beam2inst +% beamdat: mx4 matrix with m depth cells, and 4 beams (one profile) +% +% Outputs: +% +% btim [double] - A 4x4 beam data matrix with 3-beam solutions included. +% +% +% author: rebecca.cowley@csiro.au +% +% +narginchk(2, 2) + +if nargin < 2 + errormsg('Not enough arguments to continue') +end + +%code snippet from any3beam.m from UWA code set. +three_beam = find(isfinite(beamdat)*ones(4, 1) == 3); +btim=beamdat; +%disp(length(three_beam)) +if any(three_beam) + data = beamdat(three_beam, :); % CB selecting depth cells with exactly 3 "good" beams + mask = isnan(data); % CB finding which of the 4 beam is bad + data(mask) = 0; %CB forcing "bad beam" to zero instead of NaN. Now the error vel is 0. + tran = ones(length(three_beam), 1) * T(4, :); % CB matrix with rows representing the depth cells with 3 beams, each row represents the error vel transformation + err = (data .* tran) * ones(4, 1);% mult the data with the error vel trans matrix + data(mask) = -err ./ tran(mask); % CB Assigning the bad beam to its value dependent on other 3 beams + btim(three_beam, :) = data; +end +b1 = btim(:,1)'; +b2 = btim(:,2)'; +b3 = btim(:,3)'; +b4 = btim(:,4)'; +end diff --git a/Preprocessing/+TeledyneADCP/workhorse_beam2earth.m b/Preprocessing/+TeledyneADCP/workhorse_beam2earth.m index 37d7fb83b..1c693ea96 100644 --- a/Preprocessing/+TeledyneADCP/workhorse_beam2earth.m +++ b/Preprocessing/+TeledyneADCP/workhorse_beam2earth.m @@ -84,6 +84,9 @@ end for t = 1:size(v1, 1) + %get values for 3-beam solutions + [b1(t, :), b2(t, :), b3(t, :), b4(t, :)] = TeledyneADCP.workhorse_3beamsolution(I, [b1(t, :); b2(t, :); b3(t, :); b4(t, :)]'); + h_t = heading(t); p_t = gpitch(t); r_t = adjusted_roll(t); diff --git a/Preprocessing/adcpWorkhorseCorrMagPP.m b/Preprocessing/adcpWorkhorseCorrMagPP.m new file mode 100644 index 000000000..a5acd9252 --- /dev/null +++ b/Preprocessing/adcpWorkhorseCorrMagPP.m @@ -0,0 +1,132 @@ +function sample_data = adcpWorkhorseCorrMagPP( sample_data, qcLevel, ~ ) +%ADCPWORKHORSECORRMAGPP Screening procedure for Teledyne Workhorse (and similar) +% ADCP instrument data, using the correlation magnitude velocity diagnostic variable. +% +% +% Inputs: +% sample_data - struct containing the entire data set and dimension data. +% auto - logical, run QC in batch mode +% +% Outputs: +% sample_data - same as input, with QC flags added for variable/dimension +% data. +% Author: Guillaume Galibert +% Rebecca Cowley +% + +% +% Copyright (C) 2017, Australian Ocean Data Network (AODN) and Integrated +% Marine Observing System (IMOS). +% +% This program is free software: you can redistribute it and/or modify +% it under the terms of the GNU General Public License as published by +% the Free Software Foundation version 3 of the License. +% +% This program is distributed in the hope that it will be useful, +% but WITHOUT ANY WARRANTY; without even the implied warranty of +% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +% GNU General Public License for more details. + +% You should have received a copy of the GNU General Public License +% along with this program. +% If not, see . +% +narginchk(2, 3); +if ~iscell(sample_data), error('sample_data must be a cell array'); end + +% auto logical in input to enable running under batch processing +if nargin<2, auto=false; end + +% no modification of data is performed on the raw FV00 dataset except +% local time to UTC conversion +if strcmpi(qcLevel, 'raw'), return; end + +for k = 1:length(sample_data) + % do not process if not RDI nor Nortek + isRDI = false; + if strcmpi(sample_data{k}.meta.instrument_make, 'Teledyne RDI'), isRDI = true; end + if ~isRDI, continue; end + % get all necessary dimensions and variables id in sample_data struct + idVel1 = 0; + idVel2 = 0; + idVel3 = 0; + idVel4 = 0; + idCMAG = cell(4, 1); + for j=1:4 + idCMAG{j} = 0; + end + lenVar = length(sample_data{k}.variables); + for i=1:lenVar + paramName = sample_data{k}.variables{i}.name; + + if strncmpi(paramName, 'VEL1', 4), idVel1 = i; end + if strncmpi(paramName, 'VEL2', 4), idVel2 = i; end + if strncmpi(paramName, 'VEL3',4), idVel3 = i; end + if strncmpi(paramName, 'VEL4',4), idVel4 = i; end + for j=1:4 + cc = int2str(j); + if strcmpi(paramName, ['CMAG' cc]), idCMAG{j} = i; end + end + end + + % check if the data is compatible with the QC algorithm + idMandatory = (idVel1 | idVel2 | idVel3 | idVel4 ); + for j=1:4 + idMandatory = idMandatory & idCMAG{j}; + end + if ~idMandatory, return; end + + % let's get the associated vertical dimension + idVertDim = sample_data{k}.variables{idCMAG{1}}.dimensions(2); + if strcmpi(sample_data{k}.dimensions{idVertDim}.name, 'DIST_ALONG_BEAMS') + disp(['Warning : imosCorrMagVelocitySetQC applied with a non tilt-corrected CMAGn (no bin mapping) on dataset ' sample_data{k}.toolbox_input_file]); + end + + qcSet = str2double(readProperty('toolbox.qc_set')); + badFlag = imosQCFlag('bad', qcSet, 'flag'); + goodFlag = imosQCFlag('good', qcSet, 'flag'); + rawFlag = imosQCFlag('raw', qcSet, 'flag'); + + %Pull out correlation magnitude + sizeData = size(sample_data{k}.variables{idCMAG{1}}.data); + + % read in filter parameters + propFile = fullfile('Preprocessing', 'adcpWorkhorseCorrMagPP.txt'); + cmag = str2double(readProperty('cmagPP', propFile)); + + % read dataset QC parameters if exist and override previous + % parameters file + currentQCtest = mfilename; + cmag = readDatasetParameter(sample_data{k}.toolbox_input_file, currentQCtest, 'cmag', cmag); + + paramsLog = ['cmag=' num2str(cmag)]; + + sizeCur = size(sample_data{k}.variables{idVel1}.flags); + + % same flags are given to any variable + for a = 1:4 + flags = ones(sizeCur, 'int8')*rawFlag; + + % Run QC. For this screening test, each beam is tested independently + eval(['iPass = sample_data{k}.variables{idCMAG{' num2str(a) '}}.data > cmag;']) + % Run QC filter (iFail) on velocity data + flags(~iPass) = badFlag; + flags(iPass) = goodFlag; + + eval(['sample_data{k}.variables{idVel' num2str(a) '}.flags = flags;']) + + end + + + % write/update dataset QC parameters + writeDatasetParameter(sample_data{k}.toolbox_input_file, currentQCtest, 'cmagPP', cmag); + cmagcomment = ['adcpWorkhorseCorrMagPP.m: Correlation Magnitude preprocessing screening applied to beam velocities. Threshold = ' num2str(cmag)]; + + + if isfield(sample_data{k}, 'history') && ~isempty(sample_data{k}.history) + sample_data{k}.history = sprintf('%s\n%s - %s', sample_data{k}.history, datestr(now_utc, readProperty('exportNetCDF.dateFormat')), cmagcomment); + else + sample_data{k}.history = sprintf('%s - %s', datestr(now_utc, readProperty('exportNetCDF.dateFormat')), cmagcomment); + end +end +end \ No newline at end of file diff --git a/Preprocessing/adcpWorkhorseCorrMagPP.txt b/Preprocessing/adcpWorkhorseCorrMagPP.txt new file mode 100644 index 000000000..de6cf98c4 --- /dev/null +++ b/Preprocessing/adcpWorkhorseCorrMagPP.txt @@ -0,0 +1 @@ +cmagPP = 64 diff --git a/Preprocessing/adcpWorkhorseEchoRangePP.m b/Preprocessing/adcpWorkhorseEchoRangePP.m new file mode 100644 index 000000000..9b6c6e024 --- /dev/null +++ b/Preprocessing/adcpWorkhorseEchoRangePP.m @@ -0,0 +1,194 @@ +function [sample_data,df] = adcpWorkhorseEchoRangePP( sample_data, qcLevel, ~ ) +% adcpWorkhorseEchoRangePP Quality control procedure for Teledyne Workhorse (and similar) +% ADCP instrument data, using the echo intensity diagnostic variable. +% +% Echo Range test : +% The Echo Range test is also known as the 'Fish Detection Test' and is +% designed as a screening check for the single ping data. The equivalent is +% performed on board the RDI if the WA command is enabled. Only use this +% test if the WA command has not been enabled in the RDI data you have +% collected. +% The test is probably only useful for single ping data, prior to ensemble +% averaging. +% +% This test checks the difference between the highest and lowest values in the 4 beams +% in each bin (echo intensity range, EIR) at each time stamp. If the threshold +% is exceeded, the beam with the lowest echo intensity is flagged. +% Then checks the difference in the second lowest value from the highest +% value. If greater than threshold, entire 4 beams of the bin are flagged +% bad. +% Once this information is collected, three beam solutions can be applied +% to calculate velocity from raw beam data where one bin is flagged as bad. +% The echo intensity data should ideally be bin-mapped first using +% adcpBinMappingPP.m routine. +% If the difference exceeds a threshold, the entire bin is flagged as bad +% +% Inputs: +% sample_data - struct containing the entire data set and dimension data. +% qcLevel - string, 'raw' or 'qc'. Some pp not applied when 'raw'. +% +% Outputs: +% sample_data - same as input, with QC flags added for variable/dimension +% data. +% +% Author: Guillaume Galibert +% Rebecca Cowley +% +narginchk(2,3); +if ~iscell(sample_data), error('sample_data must be a cell array'); end + +% auto logical in input to enable running under batch processing +if nargin<2, auto=false; end + +% no modification of data is performed on the raw FV00 dataset except +% local time to UTC conversion +if strcmpi(qcLevel, 'raw'), return; end + +paramsLog = []; + +for k = 1:length(sample_data) + %TODO: rafactor this whole block as a funciton + % do not process if not RDI nor Nortek + isRDI = false; + if strcmpi(sample_data{k}.meta.instrument_make, 'Teledyne RDI'), isRDI = true; end + if ~isRDI, continue; end + % get all necessary dimensions and variables id in sample_data struct + idVel1 = 0; + idVel2 = 0; + idVel3 = 0; + idVel4 = 0; + idABSIC = cell(4, 1); + for j=1:4 + idABSIC{j} = 0; + end + lenVar = length(sample_data{k}.variables); + for i=1:lenVar + paramName = sample_data{k}.variables{i}.name; + + if strncmpi(paramName, 'VEL1', 4), idVel1 = i; end + if strncmpi(paramName, 'VEL2', 4), idVel2 = i; end + if strncmpi(paramName, 'VEL3', 4), idVel3 = i; end + if strncmpi(paramName, 'VEL4', 4), idVel4 = i; end + for j=1:4 + cc = int2str(j); + if strcmpi(paramName, ['ABSIC' cc]), idABSIC{j} = i; end + end + end + + % check if the data is compatible with the QC algorithm + idMandatory = (idVel1 | idVel2 | idVel3 | idVel4 ); + for j=1:4 + idMandatory = idMandatory & idABSIC{j}; + end + if ~idMandatory, return; end + + % let's get the associated vertical dimension + idVertDim = sample_data{k}.variables{idABSIC{1}}.dimensions(2); + if strcmpi(sample_data{k}.dimensions{idVertDim}.name, 'DIST_ALONG_BEAMS') + disp(['Warning : adcpWorkhorseEchoRangePP applied with a non tilt-corrected ABSICn (no bin mapping) on dataset ' sample_data{k}.toolbox_input_file]); + end + + qcSet = str2double(readProperty('toolbox.qc_set')); + badFlag = imosQCFlag('bad', qcSet, 'flag'); + goodFlag = imosQCFlag('good', qcSet, 'flag'); + rawFlag = imosQCFlag('raw', qcSet, 'flag'); + + %Pull out echo intensity + sizeData = size(sample_data{k}.variables{idABSIC{1}}.data); + ea = nan(4, sizeData(1), sizeData(2)); + for j=1:4 + ea(j, :, :) = sample_data{k}.variables{idABSIC{j}}.data; + end + + % read in filter parameters + propFile = fullfile('Preprocessing', 'adcpWorkhorseEchoRangePP.txt'); + ea_fishthresh = str2double(readProperty('ea_fishthresh', propFile)); + + % read dataset QC parameters if exist and override previous + % parameters file + currentQCtest = mfilename; + ea_fishthresh = readDatasetParameter(sample_data{k}.toolbox_input_file, currentQCtest, 'ea_fishthresh', ea_fishthresh); + + paramsLog = ['ea_fishthresh=' num2str(ea_fishthresh)]; + + try + frame_of_reference = sample_data{k}.meta.adcp_info.coords.frame_of_reference; + is_beam = strcmpi(frame_of_reference,'beam'); + catch + try + is_beam = unique(sample_data{k}.meta.fixedLeader.coordinateTransform) ~=7; + catch + is_beam = false; + end + end + + %TODO: refactor from below + % Run QC + % Following code is adapted from the UWA 'adcpfishdetection.m' code + + [n, t, m]=size(ea); % m depth cells, n (4) beams, t timesteps + % same flags are given to any variable + bad_ea = ones(n,t,m,'int8')*rawFlag; + + % matrix operation of the UW original code which loops over each timestep + [B, Ix]=sort(ea,1,'descend'); %sort echo from highest to lowest along each bin + %step one - really only useful for data in beam coordinates. If one + %beam fails, then can do 3-beam solutions + if is_beam + %step 1: Find the beams with the highest and two lowest echo levels + df = squeeze(B(1,:,:)-B(4,:,:)); %get the difference from highest value to lowest + ind=find(df>ea_fishthresh); % problematic depth cells + bad_ea(4,ind) = true; %flag these as bad (individual cells, of the lowest echo value) + end + %step 2: Flags entire bin of velocity data as more than one cell is > + %threshold, which means three beam solution can't be calculated. + % Is this test really useful for data that has already been converted to + % ENU? No, it should not be run as the thresholds used flag out the data + % for identification of 3-beam solutions + df = squeeze(B(1,:,:)-B(3,:,:)); %get the difference from highest value to second lowest + ind=df>ea_fishthresh; % problematic depth cells + bad_ea(:,ind) = true; %flag the entire bin (all beams) as bad + + % % need to re-sort the bad_ea matrix back to match beam order: + [~,be_sorted] = sort3d(bad_ea,1,Ix); + + + % % %very slow option + % be = bad_ea; + % for a = 1:t + % for b = 1:m + % [~,ixx] = sort(Ix(:,a,b)); + % bad_ea(:,a,b) = bad_ea(ixx,a,b); + % end + % end + + % Keep the flags for each velocity parameter (beam) + flags = be_sorted; + flags(flags == 1) = badFlag; + flags(flags == 0) = goodFlag; + + sample_data{k}.variables{idVel1}.flags = squeeze(flags(1,:,:)); + sample_data{k}.variables{idVel2}.flags = squeeze(flags(2,:,:)); + sample_data{k}.variables{idVel3}.flags = squeeze(flags(3,:,:)); + sample_data{k}.variables{idVel4}.flags = squeeze(flags(4,:,:)); + % write/update dataset QC parameters + writeDatasetParameter(sample_data{k}.toolbox_input_file, currentQCtest, 'ea_fishthresh', ea_fishthresh); + echorangecomment = ['adcpWorkhorseEchoRangePP.m: Echo Range preprocessing screening applied to beam velocities. Threshold = ' num2str(ea_fishthresh)]; + + + if isfield(sample_data{k}, 'history') && ~isempty(sample_data{k}.history) + sample_data{k}.history = sprintf('%s\n%s - %s', sample_data{k}.history, datestr(now_utc, readProperty('exportNetCDF.dateFormat')), echorangecomment); + else + sample_data{k}.history = sprintf('%s - %s', datestr(now_utc, readProperty('exportNetCDF.dateFormat')), echorangecomment); + end +end + +end + +function [sA,B] = sort3d(sA,dim,idx) + [~,idxx] = sort(idx,dim); + gridargs = arrayfun(@(s) 1:s, size(sA), 'UniformOutput',false); + [gridargs{:}]=ndgrid(gridargs{:}); + gridargs{dim} = idxx; + B = sA(sub2ind(size(sA), gridargs{:})); +end diff --git a/Preprocessing/adcpWorkhorseEchoRangePP.txt b/Preprocessing/adcpWorkhorseEchoRangePP.txt new file mode 100644 index 000000000..28bdced19 --- /dev/null +++ b/Preprocessing/adcpWorkhorseEchoRangePP.txt @@ -0,0 +1 @@ +ea_fishthresh = 50 diff --git a/Preprocessing/adcpWorkhorseRDIensembleAveragingPP.m b/Preprocessing/adcpWorkhorseRDIensembleAveragingPP.m new file mode 100644 index 000000000..457887b26 --- /dev/null +++ b/Preprocessing/adcpWorkhorseRDIensembleAveragingPP.m @@ -0,0 +1,161 @@ +function sample_data=adcpWorkhorseRDIensembleAveragingPP(sample_data, qcLevel, ~) +% function sample_data_averaged=adcpWorkhorseRDIensembleAveragingPP(sample_data, qcLevel, ~) +% Uses a moving average to smooth the data, then interpolates to new time +% grid +% Required inputs: +% sample_data: structure of data for all instruments +% qcLevel - string, 'raw' or 'qc'. Some pp not applied when 'raw'. +% Outputs: +% sample_data_averaged: structure with ensemble averaged data +% Author: Rebecca Cowley +% + +% +% Copyright (C) 2021, Australian Ocean Data Network (AODN) and Integrated +% Marine Observing System (IMOS). +% +% This program is free software: you can redistribute it and/or modify +% it under the terms of the GNU General Public License as published by +% the Free Software Foundation version 3 of the License. +% +% This program is distributed in the hope that it will be useful, +% but WITHOUT ANY WARRANTY; without even the implied warranty of +% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +% GNU General Public License for more details. + +% You should have received a copy of the GNU General Public License +% along with this program. +% If not, see . +% +%% Improvements & Limitations +% Not all datafields will be supported, it's a work in progress +% Could output a structure with all the structures, but given the shear volume of data, I prefer saving as we go so that I can restart a given Navg +% Should remove fields not used in adcp, to make it smaller/faster fpr the code to handle +% Moved to a PP routine after review of single-ping processing +narginchk(2, 3); +if ~iscell(sample_data), error('sample_data must be a cell array'); end + +% no modification of data is performed on the raw FV00 dataset except +% local time to UTC conversion +if strcmpi(qcLevel, 'raw'), return; end + +% read in filter parameters +propFile = fullfile('Preprocessing', 'adcpWorkhorseRDIensembleAveragingPP.txt'); +binInt = str2double(readProperty('binIntPP', propFile)); + +% read dataset QC parameters if exist and override previous +% parameters file +currentQCtest = mfilename; + +%get our new structure ready: +sample_data_averaged = sample_data; + +for k = 1:length(sample_data) + % do not process if not RDI nor Nortek + isRDI = false; + if strcmpi(sample_data{k}.meta.instrument_make, 'Teledyne RDI'), isRDI = true; end + if ~isRDI, continue; end + + binInt = readDatasetParameter(sample_data{k}.toolbox_input_file, currentQCtest, 'binIntPP', binInt); + %set up the time range + time = sample_data{k}.dimensions{1}.data; + sampInt = sample_data{k}.instrument_sample_interval; % mean samping interval in seconds + N = binInt/sampInt; + new_datestamp = time(1)+(N*sampInt/2)/(3600*24):binInt/(3600*24):time(end); % the datestmp mid-way through the interval, str date + + %PRESSURE-averaging. If pressure recorded every ping, it + %is noisy, this needs to be cleaned up first + ind = IMOS.find(sample_data{k}.variables,{'DEPTH'}); + if ~isempty(ind) + disp('Warning, DEPTHPP routine should be run after bin averaging. Not completing bin averaging.'); + return + end + ind = IMOS.find(sample_data{k}.variables,{'PRES_REL'}); + if ~isempty(ind) + sample_data{k}.variables{ind}.data = g_boxcar_smoothNONAN(sample_data{k}.variables{ind}.data',N)'; + end + + %start bin averaging + isBinAverageApplied = false; + % now we operate on anything with TIME as a dimension. + fldnms = IMOS.get(sample_data{k}.variables,'name'); + for j=1:length(fldnms) + if ~IMOS.var_contains_dim(sample_data{k}, fldnms{j}, 'TIME') + continue + end + isBinAverageApplied = true; + % bin average the data: + if binInt 2) = NaN; + else + singlePing(flags > 2) = NaN + NaN*i; + end + + inan = isnan(singlePing); + if ~isreal(singlePing) + avDat1 = g_boxcar_smoothNONAN(imag(singlePing)',N)'; + avDat2 = g_boxcar_smoothNONAN(real(singlePing)',N)'; + avDat = complex(avDat2,avDat1); + else + avDat = g_boxcar_smoothNONAN(singlePing',N)'; + end + if isreal(avDat) + avDat(inan) = NaN; + else + avDat(inan) = NaN + NaN*i; + end + + %now interpolate + averagedData = interp1(time,avDat,new_datestamp'); + if isBinAverageApplied + % we re-assign the data + sample_data_averaged{k}.variables{j}.data = averagedData; + + %reset flags to zero for upcoming QC + sample_data_averaged{k}.variables{j}.flags = zeros(size(averagedData)); + end + end + if isBinAverageApplied + sample_data_averaged{k}.dimensions{1}.data = new_datestamp'; + sample_data_averaged{k}.dimensions{1}.flags = zeros(size(new_datestamp')); + sample_data_averaged{k}.time_coverage_start = new_datestamp(1); + sample_data_averaged{k}.time_coverage_end = new_datestamp(end); + sample_data_averaged{k}.instrument_average_interval = binInt; + sample_data_averaged{k}.meta.instrument_average_interval = binInt; + + %update the time dimension comment: + sample_data_averaged{k}.dimensions{1}.comment = ... + ['Time stamp corresponds to the middle of the measurement which lasts ' num2str(binInt) ' seconds.']; + sample_data_averaged{k}.dimensions{1}.history = [sample_data_averaged{k}.dimensions{1}.history ... + 'Time has been bin averaged from single ping data, original seconds_to_middle_of_measurement = ' ... + num2str(sample_data_averaged{k}.dimensions{1}.seconds_to_middle_of_measurement)]; + sample_data_averaged{k}.dimensions{1}.seconds_to_middle_of_measurement = 0; + end + + %assign sample_data_averaged back to sample_data + sample_data{k} = sample_data_averaged{k}; + + % write/update dataset QC parameters + writeDatasetParameter(sample_data{k}.toolbox_input_file, currentQCtest, 'binIntPP', binInt); + binavecomment = ['adcpWorkhorseRDIbinAveragingPP.m: Ensemble averaging to ' num2str(binInt) 'seconds preprocessing applied.']; + + + if isfield(sample_data{k}, 'history') && ~isempty(sample_data{k}.history) + sample_data{k}.history = sprintf('%s\n%s - %s', sample_data{k}.history, datestr(now_utc, readProperty('exportNetCDF.dateFormat')), binavecomment); + else + sample_data{k}.history = sprintf('%s - %s', datestr(now_utc, readProperty('exportNetCDF.dateFormat')), binavecomment); + end +end + + diff --git a/Preprocessing/adcpWorkhorseRDIensembleAveragingPP.txt b/Preprocessing/adcpWorkhorseRDIensembleAveragingPP.txt new file mode 100644 index 000000000..0f2e2738d --- /dev/null +++ b/Preprocessing/adcpWorkhorseRDIensembleAveragingPP.txt @@ -0,0 +1 @@ +binIntPP = 3600 diff --git a/Preprocessing/adcpWorkhorseVelocityBeam2EnuPP.m b/Preprocessing/adcpWorkhorseVelocityBeam2EnuPP.m index 1f50b7ff5..53ca1bfc1 100644 --- a/Preprocessing/adcpWorkhorseVelocityBeam2EnuPP.m +++ b/Preprocessing/adcpWorkhorseVelocityBeam2EnuPP.m @@ -29,6 +29,10 @@ if isempty(sample_data) || strcmpi(qcLevel, 'raw') return end +qcSet = str2double(readProperty('toolbox.qc_set')); +badFlag = imosQCFlag('bad', qcSet, 'flag'); +goodFlag = imosQCFlag('good', qcSet, 'flag'); +rawFlag = imosQCFlag('raw', qcSet, 'flag'); sind = TeledyneADCP.find_teledyne_beam_datasets(sample_data,'HEIGHT_ABOVE_SENSOR'); dind = TeledyneADCP.find_teledyne_beam_datasets(sample_data,'DIST_ALONG_BEAMS'); @@ -84,19 +88,28 @@ vel4_ind = IMOS.find(sample_data{ind}.variables,'VEL4'); I = TeledyneADCP.workhorse_beam2inst(beam_angle); + %here we need to check for 3-beam solutions if screening has happened + %TODO: get a test in place for 3-beam solutions. Currently, all data is + %converted (4-beam solutions) + %first apply the flags to NaN out bad data: + v1 = sample_data{ind}.variables{vel1_ind}.data; + v2 = sample_data{ind}.variables{vel2_ind}.data; + v3 = sample_data{ind}.variables{vel3_ind}.data; + v4 = sample_data{ind}.variables{vel4_ind}.data; + v1(sample_data{ind}.variables{vel1_ind}.flags > 2) = NaN; + v2(sample_data{ind}.variables{vel2_ind}.flags > 2) = NaN; + v3(sample_data{ind}.variables{vel3_ind}.flags > 2) = NaN; + v4(sample_data{ind}.variables{vel4_ind}.flags > 2) = NaN; + [v1, v2, v3, v4] = TeledyneADCP.workhorse_beam2earth(pitch_bit, ... beam_face_config, ... sample_data{ind}.variables{head_ind}.data,... sample_data{ind}.variables{pitch_ind}.data, ... sample_data{ind}.variables{roll_ind}.data,... - I, ... - sample_data{ind}.variables{vel1_ind}.data, ... - sample_data{ind}.variables{vel2_ind}.data, ... - sample_data{ind}.variables{vel3_ind}.data, ... - sample_data{ind}.variables{vel4_ind}.data); + I, v1, v2, v3, v4); Beam2EnuComment = ['adcpWorkhorseVelocityBeam2EnuPP.m: velocity data in Easting Northing Up (ENU) coordinates has been calculated from velocity data in Beams coordinates ' ... - 'using heading and tilt information and instrument coordinate transform matrix.']; + 'using heading and tilt information, 3-beam solutions and instrument coordinate transform matrix.']; %change in place. sample_data{ind}.variables{vel1_ind}.name = vel_vars{1}; @@ -121,6 +134,13 @@ end end + %reset flags as any present would have been from screening tests and + %apply to beam velocities and would have been used in 3-beam solutions + [sample_data{ind}.variables{vel1_ind}.flags, ... + sample_data{ind}.variables{vel1_ind}.flags, ... + sample_data{ind}.variables{vel1_ind}.flags, ... + sample_data{ind}.variables{vel1_ind}.flags] = deal(repmat(rawFlag,size(v1))); + no_toolbox_tilt_correction = isfield(sample_data{ind}, 'history') && ~contains(sample_data{ind}.history, 'adcpBinMappingPP'); if no_toolbox_tilt_correction %TODO: using warnmsg here would be too verbose - we probably need another function with the scope report level as argument. diff --git a/Util/+IMOS/+adcp/current_variables.m b/Util/+IMOS/+adcp/current_variables.m new file mode 100644 index 000000000..df77e81a2 --- /dev/null +++ b/Util/+IMOS/+adcp/current_variables.m @@ -0,0 +1,47 @@ +function [cvars] = current_variables(sample_data) +% function [bool] = echo_intensity_variables(sample_data) +% +% Return variables names associated with currents. +% Purpose primarily for use in ringing bin removal +% +% Inputs: +% +% sample_data [struct] - a toolbox dataset. +% +% Outputs: +% +% cvars [cell] - A cell with variable names. +% +% Example: +% +% %basic usage +% x.variables{1}.name = 'VEL1'; +% x.variables{2}.name = 'X'; +% assert(inCell(IMOS.adcp.echo_intensity_variables(x),'VEL1')) +% assert(~inCell(IMOS.adcp.echo_intensity_variables(x),'X')) +% +% y.variables{1}.name = 'UCUR'; +% y.variables{2}.name = 'DEPTH'; +% assert(inCell(IMOS.adcp.echo_intensity_variables(y),'UCUR')) +% assert(~inCell(IMOS.adcp.echo_intensity_variables(y),'DEPTH')) +% +% author: hugo.oliveira@utas.edu.au +% edited:rebecca.cowley@csiro.au + +narginchk(1, 1) + +var_pool = { + 'VEL1', ... + 'UCUR', ... + 'UCUR_MAG', ... + 'VEL2', ... + 'VCUR', ... + 'VCUR_MAG', ... + 'VEL3', ... + 'WCUR', ... + 'VEL4', ... + 'CSPD', ... + 'CDIR' + }; +cvars = intersect(var_pool, IMOS.get(sample_data.variables, 'name')); +end diff --git a/Util/blocklen.m b/Util/blocklen.m new file mode 100644 index 000000000..1967b4cbf --- /dev/null +++ b/Util/blocklen.m @@ -0,0 +1,79 @@ +function[L,ia,ib,num]=blocklen(x,delta) +%BLOCKLEN Counts the lengths of 'blocks' in an array. +% +% Suppose X is a column vector which contains blocks of identical +% values, e.g. X=[0 0 0 1 1 3 2 2 2]'; +% +% L=BLOCKLEN(X) counts the lengths of contiguous blocks containing +% identical values of X, and returns an array L of size SIZE(X). +% Each elements of L specifies the length of the block to which the +% corresponding element of X belongs. +% +% In the above example, L=[3 3 3 2 2 1 3 3 3]'; +% +% [L,IA,IB,NUM]=BLOCKLEN(X) optionally returns indices IA and IB +% into the first and last elements, respectively, of each block, as +% as well as the block number NUM. NUM is the same size as L. +% +% [...]=BLOCKLEN(X,D) defines the junction between two blocks as +% locations where ABS(DIFF(X))>D. Thus D=1 is a 'rate of change' +% definition, and X=[1 2 3 5 6 10 16 17 18]'; will yield the same +% result for L as in the previous example. +% +% See also BLOCKNUM. +% +% Usage: [L,ia,ib]=blocklen(x); +% _________________________________________________________________ +% This is part of JLAB --- type 'help jlab' for more information +% (C) 2000--2014 J.M. Lilly --- type 'help jlab_license' for details + + +% 06.09.04 JML fixed incorrect IA, IB output length + +if strcmpi(x,'--t') + blocklentest;return; +end + +if nargin==1 + delta=0; +end + +ii=(1:length(x))'; + +index=find(diff(x)~=delta); +ia=[1;index+1]; +ib=[index;length(ii)]; + +L=zeros(size(ii)); +L(ia)=ib-ia+1; +L=cumsum(L); + +L0=zeros(size(ii)); +ib2=ib(1:end-1); +ia2=ia(1:end-1); +L0(ib2+1)=ib2-ia2+1; +L0=cumsum(L0); +L=L-L0; + +if nargout ==4 + num=zeros(size(x)); + num(ia)=1; + num=cumsum(num); +end + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +function[]=blocklentest +x= [0 0 0 1 1 3 2 2 2]'; +y= [3 3 3 2 2 1 3 3 3]'; +num= [1 1 1 2 2 3 4 4 4]'; + +[y2,ia2,ib2,num2]=blocklen(x); +reporttest('BLOCKLEN with D==0',all(y==y2)&&all(num==num2)) + + +x=[1 2 3 5 6 10 16 17 18]'; +[y2,ia2,ib2,num2]=blocklen(x,1); +reporttest('BLOCKLEN with D==1',all(y==y2)&&all(num==num2)) + + diff --git a/toolboxProperties.txt b/toolboxProperties.txt index cfab7422e..be8ab8330 100644 --- a/toolboxProperties.txt +++ b/toolboxProperties.txt @@ -1,6 +1,6 @@ % filename or ODBC DSN of MS-ACCESS deployment database % ex. : /home/ggalibert/OceanDB.mdb or imos-ddb -toolbox.ddb = +toolbox.ddb = /oa-decadal-climate/work/observations/oceanobs_data/EACdata/mooring/IMOScsvfiles/ % or full connection details to other kind of deployment database : % class name of JDBC database driver @@ -79,13 +79,13 @@ dataFileStatusDialog.noFileFormat = bold dataFileStatusDialog.multipleFileFormat = italic % last selected values for start dialog -startDialog.dataDir.timeSeries = +startDialog.dataDir.timeSeries = /oa-decadal-climate/work/observations/oceanobs_data/EACdata/mooring/EAC1505_1611/raw_data startDialog.dataDir.profile = -startDialog.fieldTrip.timeSeries = +startDialog.fieldTrip.timeSeries = EAC-2016-10-29 startDialog.fieldTrip.profile = -startDialog.lowDate.timeSeries = +startDialog.lowDate.timeSeries = 0 startDialog.lowDate.profile = -startDialog.highDate.timeSeries = +startDialog.highDate.timeSeries = 738607.9888 startDialog.highDate.profile = % last selected directory for manual data import