forked from HERMES-SOC/hermes_eea
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
This does everything but write to cdf. I think I am supposed to use H…
…ermesData container
- Loading branch information
Showing
10 changed files
with
810 additions
and
23 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,133 @@ | ||
import os | ||
from datetime import datetime | ||
import math | ||
|
||
class Global_Attrs: | ||
|
||
|
||
def __init__(self, version,file_id,lo_ext): | ||
|
||
# this is linux date format: | ||
self.Generation_date = datetime.now().strftime('%a %b %d %H:%M:%S %Y') | ||
self.Logical_file_id = os.path.basename(file_id) | ||
self.Data_version = version | ||
self.Spacecraft_attitude_filenames = "" | ||
self.lo_ext = lo_ext | ||
|
||
|
||
def setCorrectionTableValues(DES_glbattr, corrTable,conf,usec_searchfile): | ||
"""saved in commit 2374012bbab301a23075b29c79ba9ab89285eb08 | ||
these and others aren't needed by DES_L1A | ||
""" | ||
sf_des = float(corrTable['sf_' + conf.mode]) | ||
#DES_glbattr.Correction_table_rev = getfilerev(corrTable["dis_p2_file_path"]) | ||
DES_glbattr.Correction_table_name = os.path.basename(conf.corr_file) | ||
DES_glbattr.Correction_table_scaling_factor = "{:0.5f}".format(sf_des) | ||
DES_glbattr.Energy_table_name = conf.energyfile | ||
#DES_glbattr.Lower_energy_integration_limit = "{:02f}".format(corrTable[inp['mode'] + 'lowIntegLimit']) | ||
#DES_glbattr.Upper_energy_integration_limit = "{:02f}".format(corrTable[inp['mode'] + 'highIntegLimit']) | ||
DES_glbattr.Dead_time_correction = str(math.ceil(conf.effective_deadtime / 1e-9)) + ' ns' | ||
#DES_glbattr.Magnetic_field_filenames = srvy_fnames | ||
DES_glbattr.skymap_avgN = corrTable['skymap_avgN'] | ||
if usec_searchfile != None and os.path.exists(usec_searchfile): | ||
DES_glbattr.Microsecond_offset_filename = os.path.basename(usec_searchfile) | ||
|
||
''' | ||
glb attrs needed for L1A: | ||
Data_version: 3.4.0, 0.0.0 | ||
Logical_file_id: mms1_fpi_fast_l1a_des-cnts_20171221020000_v3.4.0, mms1_fpi_fast_l1a_des-cnts_00000000000000_v0.0.0 | ||
Generation_date: Mon Oct 17 15:45:17 2022, 20151102 | ||
''' | ||
def populate_global_attrs(self, myDES_L1A): | ||
|
||
myDES_L1A.Generation_date = self.Generation_date | ||
myDES_L1A.Logical_file_id = self.Logical_file_id.replace(".cdf","") | ||
myDES_L1A.Data_version = self.Data_version | ||
|
||
''' | ||
POPULATE GLOBAL ATTRIBUTES | ||
''' | ||
|
||
def slow_survey_DES(DES_gblattr,corrTable, conf, MG, MGB, | ||
sc_pot, photo_model,PKT_SECHDREXID): | ||
luts = corrTable['photomodel_luts'] | ||
cfile = "_".join([conf.sc_id , conf.mode, 'f1ct', corrTable["tag_des"] , | ||
'p' + corrTable["etag_des1"] + '.txt']) | ||
|
||
# BURST uses both brst magnetic field files and fast srvy magnetic field files | ||
magfield_filenames = ((str(MG.srvy_basenames).replace("[","")[0:-1]).replace("'","")).replace(",","") | ||
if MGB != None: | ||
brst_magfield_filenames = ((str(MGB.srvy_basenames).replace("[", "")[0:-1]).replace("'", "")).replace(",", "") | ||
magfield_filenames = " ".join([magfield_filenames, brst_magfield_filenames]) | ||
|
||
|
||
energy_e0 = 100 | ||
scp_energy_frac = 1.0 | ||
DES_gblattr.Correction_table_name = cfile | ||
DES_gblattr.Correction_table_scaling_factor = "{:0.5f}".format(float(luts[5])) | ||
#DES_gblattr.Correction_table_rev = getfilerev(os.path.join(corrTable['pathdir'], conf.sc_id, cfile)) | ||
correction_table_scaling_factor = "{:0.5f}".format(float(corrTable['sf_des'])) | ||
DES_gblattr.Energy_table_name = conf.energyfile | ||
DES_gblattr.Dead_time_correction = conf.effective_deadtime | ||
DES_gblattr.Magnetic_field_filenames = magfield_filenames | ||
DES_gblattr.Spacecraft_potential_filenames = ((str(sc_pot.scp_basenames).replace("[","")[0:-1]).replace("'","")).replace(",","") | ||
DES_gblattr.Photoelectron_model_filenames = photo_model.fname | ||
DES_gblattr.Photoelectron_model_scaling_factor = "{:0.7f}".format(float(luts[5])) | ||
if conf.use_pfilter: | ||
DES_gblattr.Photoelectron_filter = "On" | ||
else: | ||
DES_gblattr.Photoelectron_filter = "Off" | ||
DES_gblattr.Lower_energy_integration_limit = "{:0.2f}eV".format(float(corrTable["deslowIntegLimit"])) | ||
DES_gblattr.Upper_energy_integration_limit = "{:0.2f}eV".format(float(corrTable["deshighIntegLimit"])) | ||
DES_gblattr.Energy_e0 = "{:0.2f}eV".format(energy_e0) # from orbit_constants | ||
DES_gblattr.Scp_energy_fraction = "{:0.2f}eV".format(scp_energy_frac) | ||
DES_gblattr.skymap_avgN = corrTable['skymap_avgN'] | ||
DES_gblattr.Quadrant = PKT_SECHDREXID & 3 | ||
DES_gblattr.High_energy_extrapolation = 'Enabled' # as this is always true | ||
try: | ||
DES_gblattr.Low_energy_extrapolation = (['Disabled', 'Enabled'])[DES_gblattr.lo_ext] | ||
except TypeError: | ||
pass | ||
|
||
# These files are obtained only during processing... | ||
def build_attitude(self,defatt_filenames_obtained_from_dbcs): | ||
# This file changes every time a day boundary is crossed and a new transform is needed (every 10 min) | ||
for file in defatt_filenames_obtained_from_dbcs: | ||
if file != None: | ||
if self.Spacecraft_attitude_filenames.find(os.path.basename(file)) < 0: | ||
self.Spacecraft_attitude_filenames = self.Spacecraft_attitude_filenames + " " + \ | ||
os.path.basename(file) | ||
|
||
def slow_survey_DIS(DIS_gblattr,corrTable, conf, | ||
MG, MGB, sc_pot, photo_model, PKT_SECHDREXID): | ||
cfile = "_".join([conf.sc_id, conf.mode,'f1ct', corrTable["tag_dis"], 'p' + corrTable["etag_dis1"]]) + '.txt' | ||
luts = corrTable['photomodel_luts'] | ||
energy_e0 = 100 | ||
scp_energy_frac = 1.0 | ||
DIS_gblattr.Correction_table_name = cfile | ||
DIS_gblattr.Correction_table_scaling_factor = "{:0.5f}".format(float(corrTable['sf_dis'])) | ||
#DIS_gblattr.Correction_table_rev = getfilerev(os.path.join(corrTable['pathdir'], conf.sc_id, cfile)) | ||
correction_table_scaling_factor = "{:0.5f}".format(float(corrTable['sf_dis'])) | ||
DIS_gblattr.Energy_table_name = conf.energyfile | ||
DIS_gblattr.Dead_time_correction = conf.effective_deadtime | ||
DIS_gblattr.Magnetic_field_filenames = ( | ||
(str(MG.srvy_basenames).replace("[", "")[0:-1]).replace("'", "")).replace(",", "") | ||
DIS_gblattr.Spacecraft_potential_filenames = ( | ||
(str(sc_pot.scp_basenames).replace("[", "")[0:-1]).replace("'", "")).replace(",", "") | ||
DIS_gblattr.Photoelectron_model_filenames = photo_model.fname | ||
DIS_gblattr.Photoelectron_model_scaling_factor = float(luts[5]) | ||
DIS_gblattr.Photoelectron_filter = conf.use_pfilter | ||
DIS_gblattr.Lower_energy_integration_limit = "{:0.2f}eV".format(float(corrTable["dislowIntegLimit"])) | ||
DIS_gblattr.Upper_energy_integration_limit = "{:0.2f}eV".format(float(corrTable["dishighIntegLimit"])) | ||
|
||
DIS_gblattr.Energy_e0 = "{:0.2f}eV".format(energy_e0) # from orbit_constants | ||
DIS_gblattr.Scp_energy_fraction = "{:0.2f}eV".format(scp_energy_frac) | ||
DIS_gblattr.skymap_avgN = corrTable['skymap_avgN'] | ||
DIS_gblattr.Quadrant = PKT_SECHDREXID & 3 | ||
DIS_gblattr.High_energy_extrapolation = 'Enabled' | ||
DIS_gblattr.Spacecraft_attitude_filenames = '' # updated in map loop | ||
DIS_gblattr.High_energy_extrapolation = 'Enabled' # as this is always true | ||
try: | ||
DIS_gblattr.Low_energy_extrapolation = (['Disabled', 'Enabled'])[DIS_gblattr.lo_ext] | ||
except TypeError: | ||
pass |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,104 @@ | ||
|
||
import numpy as np | ||
from hermes_core import log | ||
from hermes_eea.io import EEA | ||
from hermes_eea.util.time import ccsds_to_cdf_time | ||
|
||
|
||
# This may eventually be put inside a multiprocessor: | ||
def SkymapFactory(l0_cdf,energies,deflections,myEEA): | ||
#['Epoch', 'Epoch_plus_var', 'Epoch_minus_var', 'hermes_eea_step_counter', | ||
#'hermes_eea_counter1', 'hermes_eea_counter2', 'hermes_eea_accumulations', | ||
#'hermes_eea_sector_index', 'hermes_eea_sector_label']) | ||
|
||
# science_data: | ||
start_of_good_data = np.where(l0_cdf['SHEID'][:] == 1)[0][0] | ||
integrates_at_end = np.where(l0_cdf['SHEID'][start_of_good_data:] == 0) # has 63 values | ||
# We are expecting integrates to be only at the beginning | ||
|
||
# The Science Data: | ||
stepper_table_packets = (np.where(l0_cdf['SHEID'][:] > 0))[0] | ||
return_package = {} | ||
beginning_packets = np.where((l0_cdf['STEP'][stepper_table_packets[0]:]) == 0 )[0] + stepper_table_packets[0] | ||
package = [] | ||
|
||
epochs = ccsds_to_cdf_time.helpConvertEEA(l0_cdf) | ||
try: | ||
#for ptr in range(0,len(beginning_packets)): | ||
for ptr in range(0, 7000): | ||
#skymap = np.zeros((beginning_packets[ptr+1]-beginning_packets[ptr],32)) | ||
package.append(( | ||
l0_cdf['STEP'][beginning_packets[ptr]:beginning_packets[ptr+1]], | ||
l0_cdf['ACCUM'][beginning_packets[ptr]:beginning_packets[ptr+1]], | ||
l0_cdf['COUNTER1'][beginning_packets[ptr]:beginning_packets[ptr+1]], | ||
l0_cdf['COUNTER2'][beginning_packets[ptr]:beginning_packets[ptr+1]], | ||
epochs[beginning_packets[ptr]:beginning_packets[ptr+1]], | ||
|
||
energies, deflections,ptr | ||
)) | ||
except IndexError: | ||
log.info("Finished last interval") | ||
|
||
result = [] | ||
for pckt in package: | ||
packet_contents = do_eea_packet(*pckt) | ||
if packet_contents != None: | ||
result.append(packet_contents) | ||
myEEA.populate(myEEA, result) | ||
|
||
#epoch = ccsds_to_cdf_time.helpConvert_eea(l0_cdf) | ||
#zero_values_past_first = np.where(l0_cdf['hermes_eea_intgr_or_stepper'][135:] == 0)[0] | ||
#l0_cdf['hermes_eea_step_counter'][zero_values_past_first] | ||
#first_packages = np.where(l0_cdf['SHEID'] > 0) | ||
|
||
# This does an entire sweep of, nominally, 164 thingies | ||
def do_eea_packet( stepperTableCounter, | ||
counts, | ||
cnt1,cnt2, | ||
epoch, | ||
energies, | ||
deflections,ith_FSmap): | ||
|
||
return_package = {} | ||
rows = len(stepperTableCounter) | ||
# skymap is already full of zeros, why do it again? | ||
# skymap = np.zeros((beginning_packets[ptr+1]-beginning_packets[ptr],32)) | ||
skymaps = [] | ||
pulse_a = np.zeros((41,4)) | ||
pulse_b = np.zeros((41,4)) | ||
counter1 = np.zeros((41,4)) | ||
counter2 = np.zeros((41,4)) | ||
µepoch = np.zeros((41,4)) | ||
|
||
skymap = np.zeros((41, 4, 32)) | ||
|
||
for row in stepperTableCounter: | ||
dim0 = energies[row] | ||
dim1 = deflections[row] | ||
if cnt1[row] > 3: | ||
pass | ||
skymap[dim0, dim1, :] = counts[row,0:32] | ||
pulse_a[dim0, dim1] = counts[row][32] | ||
pulse_b[dim0, dim1] = counts[row][33] | ||
counter1[dim0, dim1] = cnt1[row] | ||
counter2[dim0, dim1] = cnt2[row] | ||
µepoch[dim0, dim1] = epoch[row] | ||
|
||
|
||
# if len(stepperTableCounter) != 64: | ||
# log.info(str(ith_FSmap) + ": stepperTable rows:" + str(len(stepperTableCounter))) | ||
# return None | ||
return_package['pulse_a'] = pulse_a | ||
return_package['pulse_b'] = list(pulse_b) | ||
return_package['counts'] = skymap | ||
return_package['µEpoch'] = µepoch | ||
return_package['Epoch'] = epoch[0] | ||
return_package['stats'] = np.sum(skymap) | ||
return_package['energies'] = energies | ||
return_package['sun_angles'] = deflections | ||
return_package['counter1'] = counter1 | ||
return_package['counter2'] = counter2 | ||
|
||
return return_package | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.