Skip to content

Commit

Permalink
Merge branch 'main' into truth_data
Browse files Browse the repository at this point in the history
  • Loading branch information
HenningSE committed Feb 1, 2024
2 parents 6e41613 + 5cc78d3 commit 2711b37
Show file tree
Hide file tree
Showing 11 changed files with 272 additions and 48 deletions.
34 changes: 34 additions & 0 deletions .github/scripts/create_readonly_utilix_config.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
#!/bin/bash

if [ ! -z "$RUNDB_API_URL" ]
then
cat > $HOME/.xenon_config <<EOF
[basic]
logging_level=debug
[RunDB]
rundb_api_url = $RUNDB_API_URL
rundb_api_user = $RUNDB_API_USER_READONLY
rundb_api_password = $RUNDB_API_PASSWORD_READONLY
xent_url = $PYMONGO_URL
xent_user = $PYMONGO_USER
xent_password = $PYMONGO_PASSWORD
xent_database = $PYMONGO_DATABASE
pymongo_url = $PYMONGO_URL
pymongo_user = $PYMONGO_USER
pymongo_password = $PYMONGO_PASSWORD
pymongo_database = $PYMONGO_DATABASE
[scada]
scdata_url = $SCADA_URL
sclastvalue_url = $SCADA_VALUE_URL
sclogin_url = $SCADA_LOGIN_URL
straxen_username = $SCADA_USER
straxen_password = $SCADA_PWD
pmt_parameter_names = no_file_found
EOF
echo "YEAH boy, complete github actions voodoo now made you have access to our database!"
else
echo "You have no power here! Environment variables are not set, therefore no utilix file will be created"
fi
101 changes: 101 additions & 0 deletions .github/workflows/pytest.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
# copied from https://github.com/XENONnT/straxen/blob/master/.github/workflows/pytest.yml
# Test fuse on each PR.
# We run three types of tests:
# - Pytest -> these are the "normal" tests and should be run for all
# python versions
# - Coveralls -> this is to see if we are covering all our lines of
# code with our tests. The results get uploaded to
# coveralls.io/github/XENONnT/fuse

name: Test package

# Trigger this code when a new release is published
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]

jobs:
build:
runs-on: ${{ matrix.os }}
env:
HAVE_ACCESS_TO_SECRETS: ${{ secrets.RUNDB_API_URL }}
strategy:
fail-fast: false
matrix:
os: [ "ubuntu-latest" ]
python-version: [ "3.9", "3.10" ]
test: [ 'coveralls', 'pytest' ]

steps:
# Setup and installation
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}

- name: Checkout repo
uses: actions/checkout@v4

- name: Install dependencies
# following https://github.com/NESTCollaboration/nestpy/blob/master/README.md
run: |
python -m pip install --upgrade pip
python -m pip install pytest coverage coveralls
git clone https://github.com/NESTCollaboration/nestpy.git
cd nestpy
git submodule update --init --recursive
pip install .
cd ..
rm -rf nestpy
- name: Start MongoDB
uses: supercharge/mongodb-github-action@1.10.0
with:
mongodb-version: 4.4.1

- name: patch utilix file
# Secrets and required files
# Patch this file if we want to have access to the database
run: bash .github/scripts/create_readonly_utilix_config.sh
env:
# RunDB
RUNDB_API_URL: ${{ secrets.RUNDB_API_URL }}
RUNDB_API_USER_READONLY: ${{ secrets.RUNDB_API_USER_READONLY }}
RUNDB_API_PASSWORD_READONLY: ${{ secrets.RUNDB_API_PASSWORD_READONLY}}
PYMONGO_URL: ${{ secrets.PYMONGO_URL }}
PYMONGO_USER: ${{ secrets.PYMONGO_USER }}
PYMONGO_PASSWORD: ${{ secrets.PYMONGO_PASSWORD }}
PYMONGO_DATABASE: ${{ secrets.PYMONGO_DATABASE }}
# SCADA
SCADA_URL: ${{ secrets.SCADA_URL }}
SCADA_VALUE_URL: ${{ secrets.SCADA_VALUE_URL }}
SCADA_USER: ${{ secrets.SCADA_USER }}
SCADA_LOGIN_URL: ${{ secrets.SCADA_LOGIN_URL }}
SCADA_PWD: ${{ secrets.SCADA_PWD }}

- name: Install fuse
run: |
pip install .
- name: Test package
# This is running a normal test
env:
TEST_MONGO_URI: 'mongodb://localhost:27017/'
run: |
coverage run --source=fuse -m pytest --durations 0
coverage report
- name: Coveralls
# Make the coverage report and upload
env:
TEST_MONGO_URI: 'mongodb://localhost:27017/'
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
if: matrix.test == 'coveralls' && env.HAVE_ACCESS_TO_SECRETS != null
run: |
coverage run --source=fuse -m pytest -v
coveralls --service=github
- name: goodbye
run: echo "tests done, bye bye"
9 changes: 5 additions & 4 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@

fuse/__pycache__/*
__pycache__
fuse.egg-info/*
.eggs/*
build/*
.DS_Store
.vscode/*
docs/build/*
tests/resource_cache/*
resource_cache
.coverage
.hypothesis
.DS_Store
6 changes: 4 additions & 2 deletions fuse/plugins/detector_physics/s2_photon_propagation.py
Original file line number Diff line number Diff line change
Expand Up @@ -374,8 +374,10 @@ def compute(self, individual_electrons, interactions_in_roi, start, end):

n_chunks = len(electron_chunks)
if n_chunks > 1:
log.info("Chunk size exceeding file size target.")
log.info("Downchunking to %d chunks" % n_chunks)
log.info(
"Chunk size exceeding file size target. "
f"Downchunking to {n_chunks} chunks"
)

last_start = start
if n_chunks>1:
Expand Down
6 changes: 4 additions & 2 deletions fuse/plugins/micro_physics/input.py
Original file line number Diff line number Diff line change
Expand Up @@ -339,8 +339,10 @@ def output_chunk(self):
self.chunk_bounds = np.append(chunk_start[0]-self.first_chunk_left, chunk_bounds)

else:
log.warning("Only one Chunk created! Only a few events simulated? If no, your chunking parameters might not be optimal.")
log.warning("Try to decrease the source_rate or decrease the n_interactions_per_chunk")
log.warning(
"Only one Chunk created! Only a few events simulated? If no, your chunking parameters might not be optimal. "
"Try to decrease the source_rate or decrease the n_interactions_per_chunk."
)
self.chunk_bounds = [chunk_start[0] - self.first_chunk_left, chunk_end[0]+self.last_chunk_length]

source_done = False
Expand Down
6 changes: 4 additions & 2 deletions fuse/plugins/pmt_and_daq/pmt_response_and_daq.py
Original file line number Diff line number Diff line change
Expand Up @@ -364,8 +364,10 @@ def compute(self, propagated_photons, pulse_windows, start, end):

n_chunks = len(pulse_window_chunks)
if n_chunks > 1:
log.info("Chunk size exceeding file size target.")
log.info("Downchunking to %d chunks" % n_chunks)
log.info(
"Chunk size exceeding file size target. "
f"Downchunking to {n_chunks} chunks"
)

last_start = start

Expand Down
7 changes: 5 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,17 @@ classifiers = [
]
dependencies = [
"numpy",
"strax",
"pandas",
"scipy",
"scikit-learn",
"immutabledict",
"nestpy >= 2.0.0",
"timeout_decorator",
"nestpy >= 2.0.2",
"numba >= 0.57.0",
"awkward >= 2.2.1",
"uproot >= 5.0.7",
"strax >= 1.6.0",
"straxen >= 2.2.0",
]

[project.urls]
Expand Down
1 change: 1 addition & 0 deletions tests/_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
test_root_file_name = 'test_cryo_neutrons_tpc-nveto.root'
53 changes: 44 additions & 9 deletions tests/test_FullChain.py
Original file line number Diff line number Diff line change
@@ -1,61 +1,96 @@
import os
import shutil
import unittest
import fuse
import tempfile
import timeout_decorator
import fuse
import straxen
from _utils import test_root_file_name

TIMEOUT = 60


class TestFullChain(unittest.TestCase):

@classmethod
def setUpClass(self):
def setUpClass(cls):

self.temp_dir = tempfile.TemporaryDirectory()
cls.temp_dir = tempfile.TemporaryDirectory()

self.test_context = fuse.context.full_chain_context(output_folder = self.temp_dir.name)
cls.test_context = fuse.context.full_chain_context(output_folder = cls.temp_dir.name)

self.test_context.set_config({"path": "/project2/lgrandi/xenonnt/simulations/testing",
"file_name": "pmt_neutrons_100.root",
cls.test_context.set_config({"path": cls.temp_dir.name,
"file_name": test_root_file_name,
"entry_stop": 5,
})

self.run_number = "TestRun_00000"
cls.run_number = "TestRun_00000"

@classmethod
def tearDownClass(self):
def tearDownClass(cls):

cls.temp_dir.cleanup()

self.temp_dir.cleanup()
def setUp(self):
downloader = straxen.MongoDownloader(store_files_at=(self.temp_dir.name,))
downloader.download_single(test_root_file_name, human_readable_file_name=True)

assert os.path.exists(os.path.join(self.temp_dir.name, test_root_file_name))

def tearDown(self):

# self.temp_dir.cleanup()
shutil.rmtree(self.temp_dir.name)
os.makedirs(self.temp_dir.name)

@timeout_decorator.timeout(TIMEOUT, exception_message='S1PhotonHits timed out')
def test_S1PhotonHits(self):

self.test_context.make(self.run_number, "s1_photons")

@timeout_decorator.timeout(TIMEOUT, exception_message='S1PhotonPropagation timed out')
def test_S1PhotonPropagation(self):

self.test_context.make(self.run_number, "propagated_s1_photons")

@timeout_decorator.timeout(TIMEOUT, exception_message='ElectronDrift timed out')
def test_ElectronDrift(self):

self.test_context.make(self.run_number, "drifted_electrons")

@timeout_decorator.timeout(TIMEOUT, exception_message='ElectronExtraction timed out')
def test_ElectronExtraction(self):

self.test_context.make(self.run_number, "extracted_electrons")

@timeout_decorator.timeout(TIMEOUT, exception_message='ElectronTiming timed out')
def test_ElectronTiming(self):

self.test_context.make(self.run_number, "electron_time")

@timeout_decorator.timeout(TIMEOUT, exception_message='SecondaryScintillation timed out')
def test_SecondaryScintillation(self):

self.test_context.make(self.run_number, "s2_photons")
self.test_context.make(self.run_number, "s2_photons_sum")

@timeout_decorator.timeout(TIMEOUT, exception_message='S2PhotonPropagation timed out')
def test_S2PhotonPropagation(self):

self.test_context.make(self.run_number, "propagated_s2_photons")

@timeout_decorator.timeout(TIMEOUT, exception_message='PMTAfterPulses timed out')
def test_PMTAfterPulses(self):

self.test_context.make(self.run_number, "pmt_afterpulses")

@timeout_decorator.timeout(TIMEOUT, exception_message='PulseWindow timed out')
def test_PulseWindow(self):

self.test_context.make(self.run_number, "pulse_windows")
self.test_context.make(self.run_number, "pulse_ids")

@timeout_decorator.timeout(TIMEOUT, exception_message='PMTResponseAndDAQ timed out')
def test_PMTResponseAndDAQ(self):

self.test_context.make(self.run_number, "raw_records")
Expand Down
Loading

0 comments on commit 2711b37

Please sign in to comment.