diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 531977ab0..b9807cee3 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -9,3 +9,8 @@ updates: directory: '/provisioning/roles/publicdb/files' schedule: interval: 'weekly' + + - package-ecosystem: 'docker' + directory: '/' + schedule: + interval: 'weekly' diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index b7bee9a09..48fb70d90 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -30,7 +30,7 @@ jobs: - run: make devinstall - run: make coveragetests - flake: + lint: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -40,7 +40,9 @@ jobs: cache: pip cache-dependency-path: 'requirements-dev.txt' - run: pip install --upgrade --upgrade-strategy eager -r requirements-dev.txt - - run: make flaketest + - run: make linttest + env: + RUFF_OUTPUT_FORMAT: github ansible-lint: runs-on: ubuntu-latest @@ -62,7 +64,7 @@ jobs: docs: if: github.ref == 'refs/heads/master' - needs: [tests, flake, ansible-lint, hadolint] + needs: [tests, lint, ansible-lint, hadolint] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 diff --git a/Makefile b/Makefile index 452276f87..bf82a390a 100644 --- a/Makefile +++ b/Makefile @@ -1,23 +1,29 @@ -.PHONY: devinstall test unittests coveragetests flaketest doctest ansibletest - +.PHONY: devinstall devinstall: pip install --upgrade --upgrade-strategy eager -r requirements-dev.txt conda install --quiet --yes --channel conda-forge --file provisioning/roles/publicdb/files/requirements-conda.txt pip install -r provisioning/roles/publicdb/files/requirements-pip.txt -test: coveragetests flaketest doctest ansibletest +.PHONY: test +test: coveragetests linttest doctest ansibletest +.PHONY: unittests unittests: coverage run ./manage.py test $(tests) +.PHONY: coveragetests coveragetests: unittests coverage report -flaketest: - flake8 +.PHONY: linttest +linttest: + ruff check . + typos . +.PHONY: doctest doctest: PYTHONPATH=$(CURDIR):$(PYTHONPATH) sphinx-build -anW doc doc/_build/html +.PHONY: ansibletest ansibletest: ansible-lint -p provisioning/playbook.yml || true diff --git a/manage.py b/manage.py index 148aac96d..56c08a5de 100755 --- a/manage.py +++ b/manage.py @@ -2,8 +2,8 @@ import os import sys -if __name__ == "__main__": - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "publicdb.settings") +if __name__ == '__main__': + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'publicdb.settings') from django.core.management import execute_from_command_line diff --git a/publicdb/analysissessions/models.py b/publicdb/analysissessions/models.py index 04031952d..e9118dbca 100644 --- a/publicdb/analysissessions/models.py +++ b/publicdb/analysissessions/models.py @@ -26,35 +26,35 @@ class AnalysisSession(models.Model): starts = models.DateTimeField() ends = models.DateTimeField() - def in_progress(self): - return self.starts <= datetime.datetime.now() < self.ends + class Meta: + verbose_name = 'Analysis session' + verbose_name_plural = 'Analysis sessions' - in_progress.boolean = True + def __str__(self): + return self.title def save(self, *args, **kwargs): self.hash = hashlib.md5(self.slug.encode('utf-8')).hexdigest() super().save(*args, **kwargs) Student(session=self, name='Test student').save() - def __str__(self): - return self.title + def in_progress(self): + return self.starts <= datetime.datetime.now() < self.ends - class Meta: - verbose_name = 'Analysis session' - verbose_name_plural = 'Analysis sessions' + in_progress.boolean = True class Student(models.Model): session = models.ForeignKey(AnalysisSession, models.CASCADE, related_name='students') name = models.CharField(max_length=255) - def __str__(self): - return f'{self.session} - {self.name}' - class Meta: verbose_name = 'Student' verbose_name_plural = 'Students' + def __str__(self): + return f'{self.session} - {self.name}' + class AnalyzedCoincidence(models.Model): session = models.ForeignKey(AnalysisSession, models.CASCADE, related_name='analyzed_coincidences') @@ -68,14 +68,14 @@ class AnalyzedCoincidence(models.Model): phi = models.FloatField(null=True, blank=True) error_estimate = models.FloatField(null=True, blank=True) - def __str__(self): - return f"{self.coincidence} - {self.student}" - class Meta: verbose_name = 'Analyzed coincidence' verbose_name_plural = 'Analyzed coincidences' ordering = ['coincidence'] + def __str__(self): + return f'{self.coincidence} - {self.student}' + class SessionRequest(models.Model): first_name = models.CharField(max_length=255) @@ -100,7 +100,7 @@ class Meta: @property def name(self): - return f"{self.first_name} {self.sur_name}" + return f'{self.first_name} {self.sur_name}' def create_session(self): self.session_pending = False @@ -157,7 +157,7 @@ def find_coincidence(self, date, session): all_coincidences = cq.any(stations) coincidences = cq.events_from_stations(all_coincidences, stations, n=3) for coincidence in coincidences: - # Todo: Double check for multiple events from same station, + # TODO: Double check for multiple events from same station, self.save_coincidence(coincidence, session) number_of_coincidences += 1 @@ -212,14 +212,14 @@ def generate_url(self): def sendmail_request(self): subject = 'HiSPARC analysis session request' message = textwrap.dedent( - f'''\ + f"""\ Hello {self.name}, Please click on this link to confirm your request for an analysis session with jSparc: https://data.hisparc.nl/analysis-session/request/{self.url}/ Greetings, - The HiSPARC Team''' + The HiSPARC Team""", ) sender = 'Beheer HiSPARC ' send_mail(subject, message, sender, [self.email], fail_silently=False) @@ -229,7 +229,7 @@ def sendmail_request(self): def sendmail_created(self): subject = 'HiSPARC analysis session created' message = textwrap.dedent( - f'''\ + f"""\ Hello {self.name}, Your analysis session for jSparc has been created. @@ -244,7 +244,7 @@ def sendmail_created(self): https://data.hisparc.nl/analysis-session/{slugify(self.sid)}/data/ Greetings, - The HiSPARC Team''' + The HiSPARC Team""", ) sender = 'Beheer HiSPARC ' send_mail(subject, message, sender, [self.email], fail_silently=False) @@ -252,7 +252,7 @@ def sendmail_created(self): def sendmail_created_less(self): subject = 'HiSPARC analysis session created with less events' message = textwrap.dedent( - f'''\ + f"""\ Hello {self.name}, Your analysis session for jSparc has been created. @@ -269,7 +269,7 @@ def sendmail_created_less(self): https://data.hisparc.nl/analysis-session/{slugify(self.sid)}/data/ Greetings, - The HiSPARC Team''' + The HiSPARC Team""", ) sender = 'Beheer HiSPARC ' send_mail(subject, message, sender, [self.email], fail_silently=False) @@ -277,7 +277,7 @@ def sendmail_created_less(self): def sendmail_zero(self): subject = 'HiSPARC analysis session creation failed' message = textwrap.dedent( - f'''\ + f"""\ Hello {self.name}, Your analysis session for jSparc could not be created. @@ -285,7 +285,7 @@ def sendmail_zero(self): Please try selecting a different cluster or date. Greetings, - The HiSPARC Team''' + The HiSPARC Team""", ) sender = 'Beheer HiSPARC ' send_mail(subject, message, sender, [self.email], fail_silently=False) diff --git a/publicdb/analysissessions/urls.py b/publicdb/analysissessions/urls.py index dd83e8a5b..e2f2307f8 100644 --- a/publicdb/analysissessions/urls.py +++ b/publicdb/analysissessions/urls.py @@ -4,10 +4,10 @@ app_name = 'sessions' urlpatterns = [ - path('/data/', views.data_display, name="data_display"), - path('request/', views.request_form, name="request"), - path('request/validate/', views.validate_request_form, name="validate"), - path('request//', views.confirm_request, name="confirm"), - path('get_coincidence/', views.get_coincidence, name="get_coincidence"), - path('result/', views.result, name="result"), + path('/data/', views.data_display, name='data_display'), + path('request/', views.request_form, name='request'), + path('request/validate/', views.validate_request_form, name='validate'), + path('request//', views.confirm_request, name='confirm'), + path('get_coincidence/', views.get_coincidence, name='get_coincidence'), + path('result/', views.result, name='result'), ] diff --git a/publicdb/analysissessions/views.py b/publicdb/analysissessions/views.py index c6b39e7c2..45e341e8c 100644 --- a/publicdb/analysissessions/views.py +++ b/publicdb/analysissessions/views.py @@ -45,7 +45,7 @@ def get_coincidence(request): raise else: if not session.in_progress(): - return error_json(404, "The requested session has not started yet or is already expired.") + return error_json(404, 'The requested session has not started yet or is already expired.') if not student_name: student = Student.objects.get(session=session, name='Test student') @@ -61,7 +61,7 @@ def get_coincidence(request): analyzed_coincidence.student = student analyzed_coincidence.save() except IndexError: - return error_json(404, "No unanalysed coincidences available, request a new session.") + return error_json(404, 'No unanalysed coincidences available, request a new session.') events = get_events(analyzed_coincidence) response = data_json(analyzed_coincidence, events) @@ -82,20 +82,20 @@ def get_events(analyzed_coincidence): continue timestamp = datetime_to_gps(datetime.combine(event.date, event.time)) - event_dict = dict( - timestamp=timestamp, - nanoseconds=event.nanoseconds, - number=event.station.number, - latitude=config.gps_latitude, - longitude=config.gps_longitude, - altitude=config.gps_altitude, - status='on', - detectors=len(event.traces), - traces=event.traces, - pulseheights=event.pulseheights, - integrals=event.integrals, - mips=[ph / 200.0 if ph > 0 else ph for ph in event.pulseheights], - ) + event_dict = { + 'timestamp': timestamp, + 'nanoseconds': event.nanoseconds, + 'number': event.station.number, + 'latitude': config.gps_latitude, + 'longitude': config.gps_longitude, + 'altitude': config.gps_altitude, + 'status': 'on', + 'detectors': len(event.traces), + 'traces': event.traces, + 'pulseheights': event.pulseheights, + 'integrals': event.integrals, + 'mips': [ph / 200.0 if ph > 0 else ph for ph in event.pulseheights], + } events.append(event_dict) return events @@ -103,12 +103,12 @@ def get_events(analyzed_coincidence): def data_json(coincidence, events): """Construct json with data for jSparc to display""" timestamp = datetime_to_gps(datetime.combine(coincidence.coincidence.date, coincidence.coincidence.time)) - data = dict( - pk=coincidence.pk, - timestamp=timestamp, - nanoseconds=coincidence.coincidence.nanoseconds, - events=events, - ) + data = { + 'pk': coincidence.pk, + 'timestamp': timestamp, + 'nanoseconds': coincidence.coincidence.nanoseconds, + 'events': events, + } response = HttpResponse(json.dumps(data), content_type='application/json') response['Access-Control-Allow-Origin'] = '*' return response @@ -116,7 +116,7 @@ def data_json(coincidence, events): def error_json(error_code, message): """Construct error response json for jSparc requests""" - data = dict(message=message, code=error_code) + data = {'message': message, 'code': error_code} response = HttpResponse(json.dumps(data), status=error_code, content_type='application/json') response['Access-Control-Allow-Origin'] = '*' return response @@ -143,7 +143,7 @@ def top_lijst(slug): 'wgh_error': wgh_error, 'min_error': min_error, 'num_events': num_events, - } + }, ) return sorted(scores, key=operator.itemgetter('wgh_error')) @@ -192,17 +192,17 @@ def result(request): rank = [x['name'] for x in ranking].index(student_name) + 1 except ValueError: rank = None - msg = "OK [result stored]" - response = HttpResponse(json.dumps(dict(msg=msg, rank=rank)), content_type='application/json') + msg = 'OK [result stored]' + response = HttpResponse(json.dumps({'msg': msg, 'rank': rank}), content_type='application/json') response['Access-Control-Allow-Origin'] = '*' return response def test_result(): """Generate random ranking for test sessions""" - msg = "Test session, result not stored" + msg = 'Test session, result not stored' rank = randint(1, 10) - response = HttpResponse(json.dumps(dict(msg=msg, rank=rank)), content_type='application/json') + response = HttpResponse(json.dumps({'msg': msg, 'rank': rank}), content_type='application/json') response['Access-Control-Allow-Origin'] = '*' return response diff --git a/publicdb/api/urls.py b/publicdb/api/urls.py index 13699de9e..7632bfc3c 100644 --- a/publicdb/api/urls.py +++ b/publicdb/api/urls.py @@ -9,121 +9,121 @@ app_name = 'api' urlpatterns = [ - path('', views.man, name="man"), + path('', views.man, name='man'), path('network/status/', views.network_status), - path('stations/', views.stations, name="stations"), - path('subclusters/', views.subclusters, name="subclusters"), - path('clusters/', views.clusters, name="clusters"), - path('countries/', views.countries, name="countries"), - path('subclusters//', views.stations, name="stations"), - path('clusters//', views.subclusters, name="subclusters"), - path('countries//', views.clusters, name="clusters"), - path('stations/data/', views.stations_with_data, {'type': 'events'}, name="data_stations"), - path('stations/data//', views.stations_with_data, {'type': 'events'}, name="data_stations"), + path('stations/', views.stations, name='stations'), + path('subclusters/', views.subclusters, name='subclusters'), + path('clusters/', views.clusters, name='clusters'), + path('countries/', views.countries, name='countries'), + path('subclusters//', views.stations, name='stations'), + path('clusters//', views.subclusters, name='subclusters'), + path('countries//', views.clusters, name='clusters'), + path('stations/data/', views.stations_with_data, {'type': 'events'}, name='data_stations'), + path('stations/data//', views.stations_with_data, {'type': 'events'}, name='data_stations'), path( 'stations/data///', views.stations_with_data, {'type': 'events'}, - name="data_stations", + name='data_stations', ), - path('stations/data//', views.stations_with_data, {'type': 'events'}, name="data_stations"), - path('stations/weather/', views.stations_with_data, {'type': 'weather'}, name="weather_stations"), + path('stations/data//', views.stations_with_data, {'type': 'events'}, name='data_stations'), + path('stations/weather/', views.stations_with_data, {'type': 'weather'}, name='weather_stations'), path( 'stations/weather//', views.stations_with_data, {'type': 'weather'}, - name="weather_stations", + name='weather_stations', ), path( 'stations/weather///', views.stations_with_data, {'type': 'weather'}, - name="weather_stations", + name='weather_stations', ), path( 'stations/weather//', views.stations_with_data, {'type': 'weather'}, - name="weather_stations", + name='weather_stations', ), - path('stations/singles/', views.stations_with_data, {'type': 'singles'}, name="singles_stations"), + path('stations/singles/', views.stations_with_data, {'type': 'singles'}, name='singles_stations'), path( 'stations/singles//', views.stations_with_data, {'type': 'singles'}, - name="singles_stations", + name='singles_stations', ), path( 'stations/singles///', views.stations_with_data, {'type': 'singles'}, - name="singles_stations", + name='singles_stations', ), path( 'stations/singles//', views.stations_with_data, {'type': 'singles'}, - name="singles_stations", + name='singles_stations', ), - path('station//', views.station, name="station"), - path('station///', views.station, name="station"), - path('station//data/', views.has_data, {'type': 'events'}, name="has_data"), - path('station//data//', views.has_data, {'type': 'events'}, name="has_data"), + path('station//', views.station, name='station'), + path('station///', views.station, name='station'), + path('station//data/', views.has_data, {'type': 'events'}, name='has_data'), + path('station//data//', views.has_data, {'type': 'events'}, name='has_data'), path( 'station//data///', views.has_data, {'type': 'events'}, - name="has_data", + name='has_data', ), - path('station//data//', views.has_data, {'type': 'events'}, name="has_data"), - path('station//weather/', views.has_data, {'type': 'weather'}, name="has_weather"), + path('station//data//', views.has_data, {'type': 'events'}, name='has_data'), + path('station//weather/', views.has_data, {'type': 'weather'}, name='has_weather'), path( 'station//weather//', views.has_data, {'type': 'weather'}, - name="has_weather", + name='has_weather', ), path( 'station//weather///', views.has_data, {'type': 'weather'}, - name="has_weather", + name='has_weather', ), path( 'station//weather//', views.has_data, {'type': 'weather'}, - name="has_weather", + name='has_weather', ), - path('station//singles/', views.has_data, {'type': 'singles'}, name="has_singles"), + path('station//singles/', views.has_data, {'type': 'singles'}, name='has_singles'), path( 'station//singles//', views.has_data, {'type': 'singles'}, - name="has_singles", + name='has_singles', ), path( 'station//singles///', views.has_data, {'type': 'singles'}, - name="has_singles", + name='has_singles', ), path( 'station//singles//', views.has_data, {'type': 'singles'}, - name="has_singles", + name='has_singles', ), - path('station//config/', views.config, name="config"), - path('station//config//', views.config, name="config"), - path('station//num_events/', views.num_events, name="num_events"), - path('station//num_events//', views.num_events, name="num_events"), + path('station//config/', views.config, name='config'), + path('station//config//', views.config, name='config'), + path('station//num_events/', views.num_events, name='num_events'), + path('station//num_events//', views.num_events, name='num_events'), path( 'station//num_events///', views.num_events, - name="num_events", + name='num_events', ), - path('station//num_events//', views.num_events, name="num_events"), - path('station//num_events///', views.num_events, name="num_events"), - path('station//trace//', views.get_event_traces, name="event_traces"), + path('station//num_events//', views.num_events, name='num_events'), + path('station//num_events///', views.num_events, name='num_events'), + path('station//trace//', views.get_event_traces, name='event_traces'), ] diff --git a/publicdb/api/views.py b/publicdb/api/views.py index eab8ee8ea..994708ab2 100644 --- a/publicdb/api/views.py +++ b/publicdb/api/views.py @@ -24,24 +24,24 @@ def man(request): """Give overview of the possible urls""" man = { - "base_url": 'https://data.hisparc.nl/api/', - "stations": 'stations/', - "stations_in_subcluster": 'subclusters/{subcluster_number}/', - "subclusters": 'subclusters/', - "subclusters_in_cluster": 'clusters/{cluster_number}/', - "clusters": 'clusters/', - "clusters_in_country": 'countries/{country_number}/', - "countries": 'countries/', - "stations_with_data": 'stations/data/{year}/{month}/{day}/', - "stations_with_weather": 'stations/weather/{year}/{month}/{day}/', - "stations_with_singles": 'stations/singles/{year}/{month}/{day}/', - "station_info": 'station/{station_number}/{year}/{month}/{day}/', - "has_data": 'station/{station_number}/data/{year}/{month}/{day}/', - "has_weather": 'station/{station_number}/weather/{year}/{month}/{day}/', - "has_singles": 'station/{station_number}/singles/{year}/{month}/{day}/', - "configuration": 'station/{station_number}/config/{year}/{month}/{day}/', - "number_of_events": 'station/{station_number}/num_events/{year}/{month}/{day}/{hour}/', - "event_trace": 'station/{station_number}/trace/{ext_timestamp}/', + 'base_url': 'https://data.hisparc.nl/api/', + 'stations': 'stations/', + 'stations_in_subcluster': 'subclusters/{subcluster_number}/', + 'subclusters': 'subclusters/', + 'subclusters_in_cluster': 'clusters/{cluster_number}/', + 'clusters': 'clusters/', + 'clusters_in_country': 'countries/{country_number}/', + 'countries': 'countries/', + 'stations_with_data': 'stations/data/{year}/{month}/{day}/', + 'stations_with_weather': 'stations/weather/{year}/{month}/{day}/', + 'stations_with_singles': 'stations/singles/{year}/{month}/{day}/', + 'station_info': 'station/{station_number}/{year}/{month}/{day}/', + 'has_data': 'station/{station_number}/data/{year}/{month}/{day}/', + 'has_weather': 'station/{station_number}/weather/{year}/{month}/{day}/', + 'has_singles': 'station/{station_number}/singles/{year}/{month}/{day}/', + 'configuration': 'station/{station_number}/config/{year}/{month}/{day}/', + 'number_of_events': 'station/{station_number}/num_events/{year}/{month}/{day}/{hour}/', + 'event_trace': 'station/{station_number}/trace/{ext_timestamp}/', } return json_dict(man) @@ -101,7 +101,7 @@ def station(request, station_number, year=None, month=None, date=None): 'alpha': layout.detector_1_alpha, 'height': layout.detector_1_height, 'beta': layout.detector_1_beta, - } + }, ] scintillators.append( { @@ -109,7 +109,7 @@ def station(request, station_number, year=None, month=None, date=None): 'alpha': layout.detector_2_alpha, 'height': layout.detector_2_height, 'beta': layout.detector_2_beta, - } + }, ) if station.number_of_detectors() == 4: @@ -119,7 +119,7 @@ def station(request, station_number, year=None, month=None, date=None): 'alpha': layout.detector_3_alpha, 'height': layout.detector_3_height, 'beta': layout.detector_3_beta, - } + }, ) scintillators.append( { @@ -127,7 +127,7 @@ def station(request, station_number, year=None, month=None, date=None): 'alpha': layout.detector_4_alpha, 'height': layout.detector_4_height, 'beta': layout.detector_4_beta, - } + }, ) station_info = { @@ -395,7 +395,7 @@ def config(request, station_number, date=None): except (Configuration.DoesNotExist, Summary.DoesNotExist): return HttpResponseNotFound() - config = serializers.serialize("json", [configuration]) + config = serializers.serialize('json', [configuration]) config = json.loads(config) try: config = config[0]['fields'] diff --git a/publicdb/coincidences/models.py b/publicdb/coincidences/models.py index 54645eceb..62d67a577 100644 --- a/publicdb/coincidences/models.py +++ b/publicdb/coincidences/models.py @@ -5,7 +5,6 @@ class SerializedDataField(models.Field): - system_check_removed_details = { 'hint': 'Use ArrayField instead.', } @@ -20,17 +19,17 @@ class Coincidence(models.Model): time = models.TimeField() nanoseconds = models.IntegerField() - def num_events(self): - return self.events.count() - - def __str__(self): - return f'{self.num_events()}-fold - {self.date} {self.time} {self.nanoseconds}' - class Meta: verbose_name = 'Coincidence' verbose_name_plural = 'Coincidences' ordering = ['date', 'time', 'nanoseconds'] + def __str__(self): + return f'{self.num_events()}-fold - {self.date} {self.time} {self.nanoseconds}' + + def num_events(self): + return self.events.count() + class Event(models.Model): date = models.DateField() diff --git a/publicdb/default/management/commands/createfakedata.py b/publicdb/default/management/commands/createfakedata.py index e52e4a36e..3e7af6744 100644 --- a/publicdb/default/management/commands/createfakedata.py +++ b/publicdb/default/management/commands/createfakedata.py @@ -13,7 +13,7 @@ class Command(BaseCommand): def handle(*args, **options): if not settings.DEBUG: - raise Exception('Never run this on a production database!!') + raise RuntimeError('Never run this on a production database!!') with factory.Faker.override_default_locale('nl_NL'): # Inforecords @@ -27,7 +27,9 @@ def handle(*args, **options): ] subclusters = [ inforecords_factories.ClusterFactory( - country=cluster.country, parent=cluster, number=cluster_number + cluster.number + country=cluster.country, + parent=cluster, + number=cluster_number + cluster.number, ) for cluster in clusters for cluster_number in range(100, 201, 100) diff --git a/publicdb/default/static/styles/leaflet.css b/publicdb/default/static/styles/leaflet.css index 5453cd737..48ad106d7 100644 --- a/publicdb/default/static/styles/leaflet.css +++ b/publicdb/default/static/styles/leaflet.css @@ -39,7 +39,7 @@ .leaflet-marker-shadow { display: block; } -/* .leaflet-container svg: reset svg max-width decleration shipped in Joomla! (joomla.org) 3.x */ +/* .leaflet-container svg: reset svg max-width declaration shipped in Joomla! (joomla.org) 3.x */ /* .leaflet-container img: map is broken in FF if you have max-width: 100% on tiles */ .leaflet-container .leaflet-overlay-pane svg, .leaflet-container .leaflet-marker-pane img, diff --git a/publicdb/histograms/admin.py b/publicdb/histograms/admin.py index d62512136..c6c7d2b2b 100644 --- a/publicdb/histograms/admin.py +++ b/publicdb/histograms/admin.py @@ -64,23 +64,23 @@ class NetworkSummaryAdmin(admin.ModelAdmin): def unset_update_flag(self, request, qs): qs.update(needs_update=False) - unset_update_flag.short_description = "Unset needs_update" + unset_update_flag.short_description = 'Unset needs_update' def unset_coincidences_flag(self, request, qs): qs.update(needs_update_coincidences=False) - unset_coincidences_flag.short_description = "Unset " "needs_update_coincidences" + unset_coincidences_flag.short_description = 'Unset needs_update_coincidences' def set_update_flag(self, request, qs): qs.update(needs_update=True) - set_update_flag.short_description = "Set needs_update" + set_update_flag.short_description = 'Set needs_update' def set_coincidences_flag(self, request, qs): """Only set flags if num coincidences is not null""" (qs.filter(num_coincidences__isnull=False).update(needs_update_coincidences=True)) - set_coincidences_flag.short_description = "Set needs_update_coincidences" + set_coincidences_flag.short_description = 'Set needs_update_coincidences' @admin.register(models.Summary) @@ -130,34 +130,34 @@ class SummaryAdmin(admin.ModelAdmin): def unset_update_flag(self, request, qs): qs.update(needs_update=False) - unset_update_flag.short_description = "Unset needs_update" + unset_update_flag.short_description = 'Unset needs_update' def unset_events_flag(self, request, qs): qs.update(needs_update_events=False) - unset_events_flag.short_description = "Unset needs_update_events" + unset_events_flag.short_description = 'Unset needs_update_events' def unset_config_flag(self, request, qs): qs.update(needs_update_config=False) - unset_config_flag.short_description = "Unset needs_update_config" + unset_config_flag.short_description = 'Unset needs_update_config' def set_update_flag(self, request, qs): qs.update(needs_update=True) - set_update_flag.short_description = "Set needs_update" + set_update_flag.short_description = 'Set needs_update' def set_events_flag(self, request, qs): """Only set flags if num events is not null""" qs.filter(num_events__isnull=False).update(needs_update_events=True) - set_events_flag.short_description = "Set needs_update_events" + set_events_flag.short_description = 'Set needs_update_events' def set_config_flag(self, request, qs): """Only set flags if num config is not null""" qs.filter(num_config__isnull=False).update(needs_update_config=True) - set_config_flag.short_description = "Set needs_update_config" + set_config_flag.short_description = 'Set needs_update_config' @admin.register(models.Configuration) diff --git a/publicdb/histograms/checks.py b/publicdb/histograms/checks.py index 625da5cdf..9b04e78e4 100644 --- a/publicdb/histograms/checks.py +++ b/publicdb/histograms/checks.py @@ -72,7 +72,7 @@ def process_possible_stations_for_date(date, station_list): """ logger.info('Now processing %s', date) - unique_table_list = {table_name for table_list in station_list.values() for table_name in table_list.keys()} + unique_table_list = {table_name for table_list in station_list.values() for table_name in table_list} for table_name in unique_table_list: process_possible_tables_for_network(date, table_name) for station, table_list in station_list.items(): @@ -88,7 +88,7 @@ def process_possible_tables_for_network(date, table_name): """ try: update_flag_attr = f'needs_update_{NETWORK_TABLES[table_name]}' - logger.info("New %s data on %s.", table_name, date.strftime("%a %b %d %Y")) + logger.info('New %s data on %s.', table_name, date.strftime('%a %b %d %Y')) network_summary, _ = NetworkSummary.objects.get_or_create(date=date) setattr(network_summary, update_flag_attr, True) network_summary.needs_update = True @@ -119,9 +119,9 @@ def check_table_and_update_flags(table_name, num_events, summary): if getattr(summary, number_of_events_attr) != num_events: logger.info( - "New data (%s) on %s for station %d", + 'New data (%s) on %s for station %d', table_name, - summary.date.strftime("%a %b %d %Y"), + summary.date.strftime('%a %b %d %Y'), summary.station.number, ) # only record number of events for *some* tables at this time diff --git a/publicdb/histograms/esd.py b/publicdb/histograms/esd.py index 6e024712c..d741ae9e1 100644 --- a/publicdb/histograms/esd.py +++ b/publicdb/histograms/esd.py @@ -178,7 +178,12 @@ def reconstruct_events_and_store_temporary_esd(summary): tmp_filename = create_temporary_file() with tables.open_file(tmp_filename, 'w') as tmp_file: reconstruct = ReconstructESDEventsFromSource( - source_file, tmp_file, source_path, '/', station.number, progress=False + source_file, + tmp_file, + source_path, + '/', + station.number, + progress=False, ) reconstruct.reconstruct_and_store() node_path = reconstruct.reconstructions._v_pathname @@ -382,12 +387,12 @@ def determine_detector_timing_offsets_for_summary(summary): station_node = get_station_node(datafile, station) table = datafile.get_node(station_node, 'events') except tables.NoSuchNodeError: - logger.error("Cannot find table events for %s", summary) + logger.error('Cannot find table events for %s', summary) offsets = [np.nan, np.nan, np.nan, np.nan] else: try: station = HiSPARCStations([station.number]).stations[0] - except Exception: + except (KeyError, RuntimeError): station = None offsets = determine_detector_timing_offsets(table, station) @@ -472,7 +477,7 @@ def get_data(summary, tablename, quantity): station_node = get_station_node(datafile, station) table = datafile.get_node(station_node, tablename) except tables.NoSuchNodeError: - logger.error("Cannot find table %s for %s", tablename, summary) + logger.error('Cannot find table %s for %s', tablename, summary) data = None else: data = table.col(quantity) @@ -497,7 +502,7 @@ def get_table(summary, tablename): station_node = get_station_node(datafile, station) table = datafile.get_node(station_node, tablename) except tables.NoSuchNodeError: - logger.error("Cannot find table %s for %s", tablename, summary) + logger.error('Cannot find table %s for %s', tablename, summary) return None return table.read() @@ -538,7 +543,7 @@ def get_coincidences(network_summary, tablename, quantity): coincidences_node = get_coincidences_node(datafile) table = datafile.get_node(coincidences_node, tablename) except tables.NoSuchNodeError: - logger.error("Cannot find table %s for %s", tablename, network_summary) + logger.error('Cannot find table %s for %s', tablename, network_summary) data = None else: data = table.col(quantity) @@ -564,7 +569,7 @@ def get_time_series(summary, tablename, quantity): station_node = get_station_node(datafile, station) table = datafile.get_node(station_node, tablename) except tables.NoSuchNodeError: - logger.error("Cannot find table %s for %s", tablename, summary) + logger.error('Cannot find table %s for %s', tablename, summary) data = None else: col1 = table.col('timestamp') @@ -591,12 +596,12 @@ def get_timedeltas(date, ref_station, station): tablename = 'time_deltas' table = datafile.get_node(table_path, tablename) except tables.NoSuchNodeError: - logger.debug("Cannot find table %s %s for %s", table_path, tablename, date) + logger.debug('Cannot find table %s %s for %s', table_path, tablename, date) data = None else: data = table.col('delta') except OSError: - logger.debug("ESD file %s does not exists", path) + logger.debug('ESD file %s does not exists', path) return None return data diff --git a/publicdb/histograms/jobs.py b/publicdb/histograms/jobs.py index 313cafce6..654f7f1b7 100644 --- a/publicdb/histograms/jobs.py +++ b/publicdb/histograms/jobs.py @@ -14,7 +14,6 @@ from django.conf import settings -# from sapphire.analysis.calibration import datetime_range from sapphire.utils import round_in_base from ..station_layout.models import StationLayout @@ -180,13 +179,13 @@ def process_and_store_temporary_esd_for_summary(summary): django.db.close_old_connections() tmp_locations = [] if summary.needs_update_events: - logger.info("Processing events and storing ESD for %s", summary) + logger.info('Processing events and storing ESD for %s', summary) tmp_locations.append(esd.process_events_and_store_temporary_esd(summary)) if summary.needs_update_weather: - logger.info("Processing weather and storing ESD for %s", summary) + logger.info('Processing weather and storing ESD for %s', summary) tmp_locations.append(esd.process_weather_and_store_temporary_esd(summary)) if summary.needs_update_singles: - logger.info("Processing singles and storing ESD for %s", summary) + logger.info('Processing singles and storing ESD for %s', summary) tmp_locations.append(esd.process_singles_and_store_temporary_esd(summary)) return summary, tmp_locations @@ -199,11 +198,11 @@ def search_and_store_coincidences(network_summary): """ django.db.close_old_connections() if network_summary.needs_update_coincidences: - logger.info("Processing coincidences and storing ESD for %s", network_summary) + logger.info('Processing coincidences and storing ESD for %s', network_summary) num_coincidences = esd.search_coincidences_and_store_in_esd(network_summary) network_summary.num_coincidences = num_coincidences - logger.info("Processing time deltas and storing ESD for %s", network_summary) + logger.info('Processing time deltas and storing ESD for %s', network_summary) esd.determine_time_delta_and_store_in_esd(network_summary) return network_summary @@ -211,11 +210,11 @@ def search_and_store_coincidences(network_summary): def update_histograms(): """Update new configs, histograms and datasets""" - perform_tasks_manager(NetworkSummary, "needs_update_coincidences", perform_coincidences_tasks) - perform_tasks_manager(Summary, "needs_update_config", perform_config_tasks) - perform_tasks_manager(Summary, "needs_update_events", perform_events_tasks) - perform_tasks_manager(Summary, "needs_update_weather", perform_weather_tasks) - perform_tasks_manager(Summary, "needs_update_singles", perform_singles_tasks) + perform_tasks_manager(NetworkSummary, 'needs_update_coincidences', perform_coincidences_tasks) + perform_tasks_manager(Summary, 'needs_update_config', perform_config_tasks) + perform_tasks_manager(Summary, 'needs_update_events', perform_events_tasks) + perform_tasks_manager(Summary, 'needs_update_weather', perform_weather_tasks) + perform_tasks_manager(Summary, 'needs_update_singles', perform_singles_tasks) def perform_tasks_manager(model, needs_update_item, perform_certain_tasks): @@ -245,7 +244,7 @@ def perform_tasks_manager(model, needs_update_item, perform_certain_tasks): for summary, tmp_locations in results: if current_date is None: current_date = summary.date - if not current_date == summary.date: + if current_date != summary.date: # Finish delayed store jobs. for summary_res, tmp_locations_res in tmp_results: copy_temporary_and_set_flag(summary_res, needs_update_item, tmp_locations_res) @@ -269,7 +268,7 @@ def perform_tasks_manager(model, needs_update_item, perform_certain_tasks): def perform_events_tasks(summary): django.db.close_old_connections() - logger.info("Updating event histograms for %s", summary) + logger.info('Updating event histograms for %s', summary) update_eventtime_histogram(summary) update_pulseheight_histogram(summary) update_pulseintegral_histogram(summary) @@ -278,20 +277,20 @@ def perform_events_tasks(summary): try: layout = summary.station.layouts.filter(active_date__lte=summary.date).latest() except StationLayout.DoesNotExist: - logger.debug("No station layout available for %s", summary) + logger.debug('No station layout available for %s', summary) else: if layout.has_four_detectors: tmp_locations.append(esd.reconstruct_events_and_store_temporary_esd(summary)) update_zenith_histogram(summary, *tmp_locations[-1]) update_azimuth_histogram(summary, *tmp_locations[-1]) else: - logger.debug("No reconstructions for 2-detector station %s", summary) + logger.debug('No reconstructions for 2-detector station %s', summary) return summary, tmp_locations def perform_config_tasks(summary): django.db.close_old_connections() - logger.info("Updating configuration messages for %s", summary) + logger.info('Updating configuration messages for %s', summary) num_config = update_config(summary) summary.num_config = num_config return summary, [] @@ -299,7 +298,7 @@ def perform_config_tasks(summary): def perform_weather_tasks(summary): django.db.close_old_connections() - logger.info("Updating weather datasets for %s", summary) + logger.info('Updating weather datasets for %s', summary) update_temperature_dataset(summary) update_barometer_dataset(summary) return summary, [] @@ -307,7 +306,7 @@ def perform_weather_tasks(summary): def perform_singles_tasks(summary): django.db.close_old_connections() - logger.info("Updating singles datasets for %s", summary) + logger.info('Updating singles datasets for %s', summary) update_singles_histogram(summary) update_singles_rate_dataset(summary) return summary, [] @@ -315,7 +314,7 @@ def perform_singles_tasks(summary): def perform_coincidences_tasks(network_summary): django.db.close_old_connections() - logger.info("Updating coincidence histograms for %s", network_summary) + logger.info('Updating coincidence histograms for %s', network_summary) update_coincidencetime_histogram(network_summary) update_coincidencenumber_histogram(network_summary) update_station_timing_offsets(network_summary) @@ -323,7 +322,7 @@ def perform_coincidences_tasks(network_summary): def update_eventtime_histogram(summary): - logger.debug("Updating eventtime histogram for %s", summary) + logger.debug('Updating eventtime histogram for %s', summary) timestamps = esd.get_event_timestamps(summary) # creating a histogram with bins consisting of timestamps instead of @@ -349,7 +348,7 @@ def update_eventtime_histogram(summary): def update_coincidencetime_histogram(network_summary): """Histograms that show the number of coincidences per hour""" - logger.debug("Updating coincidencetime histogram for %s", network_summary) + logger.debug('Updating coincidencetime histogram for %s', network_summary) timestamps = esd.get_coincidence_timestamps(network_summary) # creating a histogram with bins consisting of timestamps instead of @@ -369,7 +368,7 @@ def update_coincidencetime_histogram(network_summary): def update_coincidencenumber_histogram(network_summary): """Histograms of the number of stations participating in coincidences""" - logger.debug("Updating coincidencenumber histogram for %s", network_summary) + logger.debug('Updating coincidencenumber histogram for %s', network_summary) n_stations = esd.get_coincidence_data(network_summary, 'N') # create bins, don't forget right-most edge @@ -383,7 +382,7 @@ def update_coincidencenumber_histogram(network_summary): def update_pulseheight_histogram(summary): """Histograms of pulseheights for each detector individually""" - logger.debug("Updating pulseheight histogram for %s", summary) + logger.debug('Updating pulseheight histogram for %s', summary) pulseheights = esd.get_pulseheights(summary) bins, histograms = create_histogram(pulseheights, MAX_PH, BIN_PH_NUM) save_histograms(summary, 'pulseheight', bins, histograms) @@ -392,7 +391,7 @@ def update_pulseheight_histogram(summary): def update_pulseintegral_histogram(summary): """Histograms of pulseintegral for each detector individually""" - logger.debug("Updating pulseintegral histogram for %s", summary) + logger.debug('Updating pulseintegral histogram for %s', summary) integrals = esd.get_integrals(summary) bins, histograms = create_histogram(integrals, MAX_IN, BIN_IN_NUM) save_histograms(summary, 'pulseintegral', bins, histograms) @@ -401,7 +400,7 @@ def update_pulseintegral_histogram(summary): def update_singles_histogram(summary): """Histograms of singles data for each detector individually""" - logger.debug("Updating singles histograms for %s", summary) + logger.debug('Updating singles histograms for %s', summary) _, high, low = esd.get_singles(summary) bins, histograms = create_histogram(low, MAX_SINGLES_LOW, BIN_SINGLES_LOW_NUM) @@ -414,7 +413,7 @@ def update_singles_histogram(summary): def update_singles_rate_dataset(summary): """Singles rate for each detector individually""" - logger.debug("Updating singles rate datasets for %s", summary) + logger.debug('Updating singles rate datasets for %s', summary) ts, high, low = esd.get_singles(summary) # timestamp at midnight (start of day) of date @@ -435,7 +434,7 @@ def update_singles_rate_dataset(summary): def update_detector_timing_offsets(summary): """Determine detector timing offsets""" - logger.debug("Determining detector timing offsets for %s", summary) + logger.debug('Determining detector timing offsets for %s', summary) offsets = esd.determine_detector_timing_offsets_for_summary(summary) save_offsets(summary, offsets) @@ -443,7 +442,7 @@ def update_detector_timing_offsets(summary): def update_station_timing_offsets(network_summary): """Determine which station timing offsets need updating and update""" - logger.debug("Determining update of station offsets for %s", network_summary) + logger.debug('Determining update of station offsets for %s', network_summary) summary_date = network_summary.date stations = esd.get_station_numbers_from_esd_coincidences(network_summary) @@ -464,10 +463,10 @@ def update_station_timing_offsets(network_summary): if summary is None: continue if date in cuts: - logger.debug("Setting offset for config cut to nan for %s ref %s at %s", summary, ref_summary, date) + logger.debug('Setting offset for config cut to nan for %s ref %s at %s', summary, ref_summary, date) offset, error = np.nan, np.nan else: - logger.debug("Determining station offset for %s ref %s at %s", summary, ref_summary, date) + logger.debug('Determining station offset for %s ref %s at %s', summary, ref_summary, date) offset, error = off.determine_station_timing_offset(date, sn, ref_sn) save_station_offset(ref_summary, summary, offset, error) @@ -475,7 +474,7 @@ def update_station_timing_offsets(network_summary): def update_zenith_histogram(summary, tempfile_path, node_path): """Histogram of the reconstructed azimuth""" - logger.debug("Updating zenith histogram for %s", summary) + logger.debug('Updating zenith histogram for %s', summary) zeniths = esd.get_zeniths(tempfile_path, node_path) # create bins, don't forget right-most edge @@ -489,7 +488,7 @@ def update_zenith_histogram(summary, tempfile_path, node_path): def update_azimuth_histogram(summary, tempfile_path, node_path): """Histogram of the reconstructed azimuth""" - logger.debug("Updating azimuth histogram for %s", summary) + logger.debug('Updating azimuth histogram for %s', summary) azimuths = esd.get_azimuths(tempfile_path, node_path) # create bins, don't forget right-most edge @@ -503,7 +502,7 @@ def update_azimuth_histogram(summary, tempfile_path, node_path): def update_temperature_dataset(summary): """Create dataset of timestamped temperature data""" - logger.debug("Updating temperature dataset for %s", summary) + logger.debug('Updating temperature dataset for %s', summary) temperature = esd.get_temperature(summary) error_values = [-999, -(2**15)] temperature = [(x, y) for x, y in temperature if y not in error_values] @@ -515,7 +514,7 @@ def update_temperature_dataset(summary): def update_barometer_dataset(summary): """Create dataset of timestamped barometer data""" - logger.debug("Updating barometer dataset for %s", summary) + logger.debug('Updating barometer dataset for %s', summary) barometer = esd.get_barometer(summary) error_values = [-999] barometer = [(x, y) for x, y in barometer if y not in error_values] @@ -549,7 +548,7 @@ def shrink(column, bin_idxs, n_bins): """ with warnings.catch_warnings(): # suppress "Mean of empty slice" - warnings.simplefilter("ignore", category=RuntimeWarning) + warnings.simplefilter('ignore', category=RuntimeWarning) data = np.nan_to_num([np.nanmean(column[bin_idxs[i] : bin_idxs[i + 1]]) for i in range(n_bins)]) return data.tolist() @@ -600,35 +599,35 @@ def create_histogram(data, high, samples): def save_histograms(summary, slug, bins, values): """Store the binned data in database""" - logger.debug("Saving histogram %s for %s", slug, summary) + logger.debug('Saving histogram %s for %s', slug, summary) type = HistogramType.objects.get(slug=slug) histogram = {'bins': bins, 'values': values} if not type.has_multiple_datasets: DailyHistogram.objects.update_or_create(summary=summary, type=type, defaults=histogram) else: MultiDailyHistogram.objects.update_or_create(summary=summary, type=type, defaults=histogram) - logger.debug("Saved succesfully") + logger.debug('Saved successfully') def save_network_histograms(network_summary, slug, bins, values): """Store the binned data in database""" - logger.debug("Saving histogram %s for %s", slug, network_summary) + logger.debug('Saving histogram %s for %s', slug, network_summary) type = HistogramType.objects.get(slug=slug) histogram = {'bins': bins, 'values': values} NetworkHistogram.objects.update_or_create(network_summary=network_summary, type=type, defaults=histogram) - logger.debug("Saved succesfully") + logger.debug('Saved successfully') def save_dataset(summary, slug, x, y): """Store the data in database""" - logger.debug("Saving dataset %s for %s", slug, summary) + logger.debug('Saving dataset %s for %s', slug, summary) type = DatasetType.objects.get(slug=slug) dataset = {'x': x, 'y': y} if slug in ['barometer', 'temperature']: DailyDataset.objects.update_or_create(summary=summary, type=type, defaults=dataset) else: MultiDailyDataset.objects.update_or_create(summary=summary, type=type, defaults=dataset) - logger.debug("Saved succesfully") + logger.debug('Saved successfully') def save_offsets(summary, offsets): @@ -639,10 +638,10 @@ def save_offsets(summary, offsets): :param offsets: list of 4 timing offsets """ - logger.debug("Saving detector timing offsets for %s", summary) + logger.debug('Saving detector timing offsets for %s', summary) off = {f'offset_{i}': round_in_base(o, 0.25) if not np.isnan(o) else None for i, o in enumerate(offsets, 1)} DetectorTimingOffset.objects.update_or_create(summary=summary, defaults=off) - logger.debug("Saved succesfully") + logger.debug('Saved successfully') def save_station_offset(ref_summary, summary, offset, error): @@ -654,7 +653,7 @@ def save_station_offset(ref_summary, summary, offset, error): :param error: error of the offset """ - logger.debug("Saving station offset for %s ref %s", summary, ref_summary) + logger.debug('Saving station offset for %s ref %s', summary, ref_summary) field = {} if not np.isnan(offset): field['offset'] = round(offset, 1) @@ -664,7 +663,7 @@ def save_station_offset(ref_summary, summary, offset, error): field['error'] = None StationTimingOffset.objects.update_or_create(summary=summary, ref_summary=ref_summary, defaults=field) - logger.debug("Saved succesfully") + logger.debug('Saved successfully') def get_station_cluster_number(station): diff --git a/publicdb/histograms/management/commands/updatehistograms.py b/publicdb/histograms/management/commands/updatehistograms.py index 328fca3fb..88f260701 100644 --- a/publicdb/histograms/management/commands/updatehistograms.py +++ b/publicdb/histograms/management/commands/updatehistograms.py @@ -9,21 +9,21 @@ class Command(BaseCommand): - help = "Perform tasks to check for new data and process that data" + help = 'Perform tasks to check for new data and process that data' def handle(*args, **options): - logger.info("Checking for new events...") + logger.info('Checking for new events...') has_run = check_for_updates() if not has_run: - logger.warning("Check has not completed a previous run yet") + logger.warning('Check has not completed a previous run yet') else: - logger.info("Update check finished.") + logger.info('Update check finished.') - logger.info("Building new histograms...") + logger.info('Building new histograms...') completed = update_all_histograms() if not completed: - logger.warning("Histograms did not complete a previous run yet") + logger.warning('Histograms did not complete a previous run yet') else: - logger.info("Finished building histograms") + logger.info('Finished building histograms') - logger.info("Done.") + logger.info('Done.') diff --git a/publicdb/histograms/models.py b/publicdb/histograms/models.py index e6bf7189d..bfc4cf278 100644 --- a/publicdb/histograms/models.py +++ b/publicdb/histograms/models.py @@ -27,19 +27,19 @@ class NetworkSummary(models.Model): objects = NetworkSummaryQuerySet.as_manager() - def get_absolute_url(self): - kwargs = {'date': self.date} - return reverse('status:network:coincidences', kwargs=kwargs) - - def __str__(self): - return f'Network Summary: {self.date}' - class Meta: verbose_name = 'Network summary' verbose_name_plural = 'Network summaries' ordering = ['date'] get_latest_by = 'date' + def __str__(self): + return f'Network Summary: {self.date}' + + def get_absolute_url(self): + kwargs = {'date': self.date} + return reverse('status:network:coincidences', kwargs=kwargs) + class SummaryQuerySet(models.QuerySet): def valid_date(self): @@ -81,16 +81,6 @@ class Summary(models.Model): objects = SummaryQuerySet.as_manager() - def get_absolute_url(self): - kwargs = { - 'station_number': self.station.number, - 'date': self.date, - } - return reverse('status:station:summary', kwargs=kwargs) - - def __str__(self): - return f'Summary: {self.station.number} - {self.date}' - class Meta: verbose_name = 'Summary' verbose_name_plural = 'Summaries' @@ -98,6 +88,16 @@ class Meta: ordering = ['date', 'station'] get_latest_by = 'date' + def __str__(self): + return f'Summary: {self.station.number} - {self.date}' + + def get_absolute_url(self): + kwargs = { + 'station_number': self.station.number, + 'date': self.date, + } + return reverse('status:station:summary', kwargs=kwargs) + class Configuration(models.Model): summary = models.ForeignKey(Summary, models.CASCADE, related_name='configurations') @@ -188,15 +188,15 @@ class Configuration(models.Model): slv_ch2_comp_gain = models.FloatField() slv_ch2_comp_offset = models.FloatField() - def __str__(self): - return f'{self.summary.station.number} - {self.timestamp}' - class Meta: verbose_name = 'Configuration' verbose_name_plural = 'Configurations' get_latest_by = 'timestamp' ordering = ['summary'] + def __str__(self): + return f'{self.summary.station.number} - {self.timestamp}' + def station(self): return self.summary.station.number @@ -267,13 +267,13 @@ class HistogramType(models.Model): value_axis_title = models.CharField(max_length=40) description = models.TextField(blank=True) - def __str__(self): - return self.name - class Meta: verbose_name = 'Histogram type' verbose_name_plural = 'Histogram types' + def __str__(self): + return self.name + class DatasetType(models.Model): name = models.CharField(max_length=40, unique=True) @@ -283,13 +283,13 @@ class DatasetType(models.Model): y_axis_title = models.CharField(max_length=40) description = models.TextField(blank=True) - def __str__(self): - return self.name - class Meta: verbose_name = 'Dataset type' verbose_name_plural = 'Dataset types' + def __str__(self): + return self.name + class NetworkHistogram(models.Model): network_summary = models.ForeignKey(NetworkSummary, models.CASCADE, related_name='network_histograms') @@ -297,23 +297,31 @@ class NetworkHistogram(models.Model): bins = ArrayField(models.PositiveIntegerField()) values = ArrayField(models.PositiveIntegerField()) - def get_absolute_url(self): - kwargs = {'date': self.network_summary.date} - return reverse(f'status:source:{self.type.slug}', kwargs=kwargs) - - def __str__(self): - return f'{self.network_summary.date} - {self.type}' - class Meta: verbose_name = 'Network histogram' verbose_name_plural = 'Network histograms' unique_together = ('network_summary', 'type') ordering = ['network_summary', 'type'] + def __str__(self): + return f'{self.network_summary.date} - {self.type}' + + def get_absolute_url(self): + kwargs = {'date': self.network_summary.date} + return reverse(f'status:source:{self.type.slug}', kwargs=kwargs) + class BaseDailyStationDataMixin(models.Model): """Base class for daily station data models""" + class Meta: + abstract = True + unique_together = ('summary', 'type') + ordering = ['summary', 'type'] + + def __str__(self): + return f'{self.summary.station.number} - {self.summary.date} - {self.type}' + def get_absolute_url(self): kwargs = { 'station_number': self.summary.station.number, @@ -321,14 +329,6 @@ def get_absolute_url(self): } return reverse(f'status:source:{self.type.slug}', kwargs=kwargs) - def __str__(self): - return f'{self.summary.station.number} - {self.summary.date} - {self.type}' - - class Meta: - abstract = True - unique_together = ('summary', 'type') - ordering = ['summary', 'type'] - class DailyHistogram(BaseDailyStationDataMixin): summary = models.ForeignKey(Summary, models.CASCADE, related_name='histograms') @@ -373,10 +373,7 @@ def update_has_finished(self, day=None): if day is None: day = datetime.date.today() - if self.update_last_run.date() >= day and not self.update_is_running: - return True - else: - return False + return bool(self.update_last_run.date() >= day and not self.update_is_running) class DetectorTimingOffset(models.Model): @@ -398,14 +395,14 @@ class StationTimingOffset(models.Model): offset = models.FloatField(blank=True, null=True) error = models.FloatField(blank=True, null=True) - def clean(self): - if self.ref_summary.station == self.summary.station: - raise ValidationError("The stations may not be the same") - if self.ref_summary.date != self.summary.date: - raise ValidationError("The summary dates should be the same") - class Meta: verbose_name = 'Station timing offset' verbose_name_plural = 'Station timing offsets' unique_together = ('ref_summary', 'summary') ordering = ['ref_summary'] + + def clean(self): + if self.ref_summary.station == self.summary.station: + raise ValidationError('The stations may not be the same') + if self.ref_summary.date != self.summary.date: + raise ValidationError('The summary dates should be the same') diff --git a/publicdb/inforecords/admin.py b/publicdb/inforecords/admin.py index 7af1c7609..3b47a4841 100644 --- a/publicdb/inforecords/admin.py +++ b/publicdb/inforecords/admin.py @@ -32,7 +32,7 @@ class ContactAdmin(admin.ModelAdmin): def last_name(self, obj): if obj.prefix_surname: - return f"{obj.surname}, {obj.prefix_surname}" + return f'{obj.surname}, {obj.prefix_surname}' else: return f'{obj.surname}' diff --git a/publicdb/inforecords/models.py b/publicdb/inforecords/models.py index b9c0fd2e7..218e2f032 100644 --- a/publicdb/inforecords/models.py +++ b/publicdb/inforecords/models.py @@ -19,13 +19,13 @@ class Profession(models.Model): description = models.CharField(max_length=255, unique=True) - def __str__(self): - return self.description - class Meta: verbose_name = 'Profession' verbose_name_plural = 'Professions' + def __str__(self): + return self.description + class ContactInformation(models.Model): street_1 = models.CharField(max_length=255) @@ -42,8 +42,13 @@ class ContactInformation(models.Model): email_private = models.EmailField(null=True, blank=True) url = models.URLField(null=True, blank=True) + class Meta: + verbose_name = 'Contact information' + verbose_name_plural = 'Contact information' + ordering = ['city', 'street_1', 'email_work'] + def __str__(self): - return f"{self.city} {self.street_1} {self.email_work}" + return f'{self.city} {self.street_1} {self.email_work}' @property def type(self): @@ -71,11 +76,6 @@ def contact_owner(self): else: return 'no owner' - class Meta: - verbose_name = "Contact information" - verbose_name_plural = "Contact information" - ordering = ['city', 'street_1', 'email_work'] - class Contact(models.Model): profession = models.ForeignKey(Profession, models.CASCADE, related_name='contacts') @@ -85,6 +85,12 @@ class Contact(models.Model): surname = models.CharField(max_length=255) contactinformation = models.ForeignKey(ContactInformation, models.CASCADE, related_name='contacts') + class Meta: + verbose_name = 'contact' + verbose_name_plural = 'contacts' + unique_together = ('first_name', 'prefix_surname', 'surname') + ordering = ['surname', 'first_name'] + def __str__(self): return self.name @@ -94,19 +100,18 @@ def email_work(self): @property def name(self): - return ' '.join((self.title, self.first_name, self.prefix_surname, self.surname)).replace(' ', ' ').strip() - - class Meta: - verbose_name = 'contact' - verbose_name_plural = 'contacts' - unique_together = ('first_name', 'prefix_surname', 'surname') - ordering = ['surname', 'first_name'] + return f'{self.title} {self.first_name} {self.prefix_surname} {self.surname}'.replace(' ', ' ').strip() class Country(models.Model): name = models.CharField(max_length=255, unique=True) number = models.IntegerField(unique=True, blank=True) + class Meta: + verbose_name = 'Country' + verbose_name_plural = 'Countries' + ordering = ['number'] + def __str__(self): return self.name @@ -119,7 +124,7 @@ def clean(self): self.number = 0 if self.number % 10000: - raise ValidationError("Country number must be multiple of 10000") + raise ValidationError('Country number must be multiple of 10000') def last_cluster_number(self): clusters = self.clusters.filter(parent=None) @@ -129,11 +134,6 @@ def last_cluster_number(self): else: return self.number - 1000 - class Meta: - verbose_name = "Country" - verbose_name_plural = "Countries" - ordering = ['number'] - class Cluster(models.Model): name = models.CharField(max_length=255, unique=True) @@ -142,9 +142,18 @@ class Cluster(models.Model): country = models.ForeignKey(Country, models.CASCADE, related_name='clusters') url = models.URLField(null=True, blank=True) + class Meta: + verbose_name = 'Cluster' + verbose_name_plural = 'Clusters' + ordering = ['name'] + def __str__(self): return self.name + def save(self, *args, **kwargs): + super().save(*args, **kwargs) + reload_datastore() + def clean(self): if self.number is None: if self.parent is None: @@ -156,27 +165,23 @@ def clean(self): if self.parent is None: if self.number % 1000: - raise ValidationError("Cluster number must be multiple of 1000") + raise ValidationError('Cluster number must be multiple of 1000') if not 0 <= (self.number - self.country.number) < 10000: raise ValidationError( - "Cluster number must be in range of " - f"numbers for the country ({self.country.number}, {self.country.number + 10000})." + 'Cluster number must be in range of ' + f'numbers for the country ({self.country.number}, {self.country.number + 10000}).', ) if self.parent is not None: if self.parent.parent is not None: - raise ValidationError("Subsubclusters are not allowed") + raise ValidationError('Subsubclusters are not allowed') if self.number % 100: - raise ValidationError("Subcluster number must be multiple of 100") + raise ValidationError('Subcluster number must be multiple of 100') if not 0 < (self.number - self.parent.number) < 1000: raise ValidationError( - "Subcluster number must be in range of " - f"numbers for the cluster ({self.parent.number}, {self.parent.number + 1000})." + 'Subcluster number must be in range of ' + f'numbers for the cluster ({self.parent.number}, {self.parent.number + 1000}).', ) - def save(self, *args, **kwargs): - super().save(*args, **kwargs) - reload_datastore() - def delete(self, *args, **kwargs): super().delete(*args, **kwargs) reload_datastore() @@ -203,11 +208,6 @@ def last_subcluster_number(self): else: return self.number - class Meta: - verbose_name = 'Cluster' - verbose_name_plural = 'Clusters' - ordering = ['name'] - class Station(models.Model): name = models.CharField(max_length=255) @@ -219,18 +219,14 @@ class Station(models.Model): password = models.CharField(max_length=255) info_page = models.TextField(blank=True) + class Meta: + verbose_name = 'Station' + verbose_name_plural = 'Stations' + ordering = ['number'] + def __str__(self): return f'{self.number:5}: {self.name}' - def clean(self): - if self.number is None: - self.number = self.cluster.last_station_number() + 1 - if not 0 < (self.number - self.cluster.number) < 100: - raise ValidationError( - "Station number must be in range of numbers for the (sub)cluster " - f"({self.cluster.number}, {self.cluster.number + 100})." - ) - def save(self, *args, **kwargs): # Strip some problematic characters self.name = self.name.replace('"', '').replace("'", '') @@ -239,6 +235,15 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) reload_datastore() + def clean(self): + if self.number is None: + self.number = self.cluster.last_station_number() + 1 + if not 0 < (self.number - self.cluster.number) < 100: + raise ValidationError( + 'Station number must be in range of numbers for the (sub)cluster ' + f'({self.cluster.number}, {self.cluster.number + 100}).', + ) + def delete(self, *args, **kwargs): super().delete(*args, **kwargs) reload_datastore() @@ -297,23 +302,18 @@ def latest_location(self, date=None): 'altitude': (round(config.gps_altitude, 2) if config.gps_altitude is not None else None), } - class Meta: - verbose_name = 'Station' - verbose_name_plural = 'Stations' - ordering = ['number'] - class PcType(models.Model): description = models.CharField(max_length=255, unique=True) slug = models.CharField(max_length=255) - def __str__(self): - return self.description - class Meta: verbose_name = 'PC Type' verbose_name_plural = 'PC Types' + def __str__(self): + return self.description + class Pc(models.Model): station = models.ForeignKey(Station, models.CASCADE, related_name='pcs') @@ -324,50 +324,28 @@ class Pc(models.Model): ip = models.GenericIPAddressField(unique=True, blank=True, null=True, protocol='ipv4') notes = models.TextField(blank=True) - def __str__(self): - return self.name - - def keys(self): - url = reverse('keys', kwargs={'host': self.name}) - return mark_safe(f'Certificate {self.name}') - - keys.short_description = 'Certificates' - - def url(self): - if self.type.slug == 'admin': - return '' - else: - return mark_safe(f's{self.station.number}.his') - - url.short_description = 'VNC URL' - class Meta: verbose_name = 'PC and certificates' verbose_name_plural = 'PCs and certificates' ordering = ['name'] - def get_next_ip_address(self, ip): - """Generate new IP address - - Increments given IP address by 1. - - """ - return str(ipaddress.ip_address(ip) + 1) + def __str__(self): + return self.name def save(self, *args, **kwargs): # slugify the short name to keep it clean self.name = slugify(self.name).replace('-', '').replace('_', '') if self.id is None: - if self.type.slug == "admin": + if self.type.slug == 'admin': try: - last_ip = Pc.objects.filter(type__slug="admin").latest('id').ip + last_ip = Pc.objects.filter(type__slug='admin').latest('id').ip except Pc.DoesNotExist: # Initial Admin IP last_ip = '172.16.66.1' else: try: - last_ip = Pc.objects.exclude(type__slug="admin").latest('id').ip + last_ip = Pc.objects.exclude(type__slug='admin').latest('id').ip except Pc.DoesNotExist: # Initial station IP last_ip = '194.171.82.1' @@ -378,6 +356,28 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) + def keys(self): + url = reverse('keys', kwargs={'host': self.name}) + return mark_safe(f'Certificate {self.name}') + + keys.short_description = 'Certificates' + + def url(self): + if self.type.slug == 'admin': + return '' + else: + return mark_safe(f's{self.station.number}.his') + + url.short_description = 'VNC URL' + + def get_next_ip_address(self, ip): + """Generate new IP address + + Increments given IP address by 1. + + """ + return str(ipaddress.ip_address(ip) + 1) + def create_keys(pc): """Create VPN keys for the given Pc""" diff --git a/publicdb/inforecords/views.py b/publicdb/inforecords/views.py index 96519f12f..418f624db 100644 --- a/publicdb/inforecords/views.py +++ b/publicdb/inforecords/views.py @@ -34,7 +34,7 @@ def create_datastore_config(request): """Create the datastore configuration""" # Limit access to only allow access from the Datastore server - if socket.gethostbyaddr(request.META["REMOTE_ADDR"])[0] != settings.DATASTORE_HOST: + if socket.gethostbyaddr(request.META['REMOTE_ADDR'])[0] != settings.DATASTORE_HOST: raise PermissionDenied return render( diff --git a/publicdb/maps/urls.py b/publicdb/maps/urls.py index a28e31e2f..3e5f1b98b 100644 --- a/publicdb/maps/urls.py +++ b/publicdb/maps/urls.py @@ -4,9 +4,9 @@ app_name = 'maps' urlpatterns = [ - path('', views.stations_on_map, name="map"), - path('/', views.station_on_map, name="map"), - path('/', views.stations_on_map, name="map"), - path('//', views.stations_on_map, name="map"), - path('///', views.stations_on_map, name="map"), + path('', views.stations_on_map, name='map'), + path('/', views.station_on_map, name='map'), + path('/', views.stations_on_map, name='map'), + path('//', views.stations_on_map, name='map'), + path('///', views.stations_on_map, name='map'), ] diff --git a/publicdb/maps/views.py b/publicdb/maps/views.py index ec27bec3b..914b1551c 100644 --- a/publicdb/maps/views.py +++ b/publicdb/maps/views.py @@ -19,26 +19,34 @@ def station_on_map(request, station_number): return render(request, 'maps/map.html', {'subclusters': subclusters, 'center': center}) +def get_focus(country=None, cluster=None, subcluster=None): + if not country: + return Cluster.objects.all().values_list('name', flat=True) + + country = get_object_or_404(Country, name=country) + + if not cluster: + return country.clusters.values_list('name', flat=True) + + cluster = get_object_or_404(country.clusters, name=cluster, parent=None) + + if not subcluster: + focus = [cluster.name] + focus.extend(cluster.subclusters.values_list('name', flat=True)) + return focus + + if cluster.name == subcluster: + return [cluster.name] + + subcluster = get_object_or_404(cluster.subclusters, name=subcluster) + + return [subcluster.name] + + def stations_on_map(request, country=None, cluster=None, subcluster=None): """Show all stations from a subcluster on a map""" - if not country: - focus = Cluster.objects.all().values_list('name', flat=True) - else: - country = get_object_or_404(Country, name=country) - if not cluster: - focus = country.clusters.values_list('name', flat=True) - else: - cluster = get_object_or_404(country.clusters, name=cluster, parent=None) - if not subcluster: - focus = [cluster.name] - focus.extend(cluster.subclusters.values_list('name', flat=True)) - else: - if cluster.name == subcluster: - focus = [cluster.name] - else: - focus = [get_object_or_404(cluster.subclusters, name=subcluster).name] - + focus = get_focus(country, cluster, subcluster) subclusters = get_subclusters() return render(request, 'maps/map.html', {'subclusters': subclusters, 'focus': focus}) diff --git a/publicdb/raw_data/__init__.py b/publicdb/raw_data/__init__.py old mode 100755 new mode 100644 diff --git a/publicdb/raw_data/forms.py b/publicdb/raw_data/forms.py index feb2cfa20..a6f4672ae 100644 --- a/publicdb/raw_data/forms.py +++ b/publicdb/raw_data/forms.py @@ -52,7 +52,7 @@ def clean(self): if not station: self.add_error(station_field, 'Choose a station') else: - cleaned_data["station"] = station + cleaned_data['station'] = station else: del cleaned_data[station_field] return cleaned_data @@ -64,7 +64,7 @@ class CoincidenceDownloadForm(forms.Form): stations = forms.CharField(help_text="e.g. '103, 104, 105'", required=False) start = forms.DateTimeField(help_text="e.g. '2014-4-5', or '2014-4-18 12:45'") end = forms.DateTimeField(help_text="e.g. '2014-4-29', or '2014-04-30 9:05'") - n = forms.IntegerField(min_value=2, help_text="Minimum number of events in a coincidence") + n = forms.IntegerField(min_value=2, help_text='Minimum number of events in a coincidence') download = forms.BooleanField(initial=True, required=False) def clean(self): @@ -73,15 +73,15 @@ def clean(self): cleaned_data = super().clean() filter_by = cleaned_data.get('filter_by') if filter_by == 'network': - del cleaned_data["cluster"] - del cleaned_data["stations"] + del cleaned_data['cluster'] + del cleaned_data['stations'] elif filter_by == 'cluster': - del cleaned_data["stations"] + del cleaned_data['stations'] cluster = cleaned_data.get('cluster') if not cluster: - self.add_error("cluster", ValidationError('Choose a cluster.', 'invalid_choice')) + self.add_error('cluster', ValidationError('Choose a cluster.', 'invalid_choice')) elif filter_by == 'stations': - del cleaned_data["cluster"] + del cleaned_data['cluster'] msg = None stations = cleaned_data.get('stations') if not stations: @@ -89,14 +89,14 @@ def clean(self): else: try: s_numbers = [int(x) for x in stations.strip('[]()').split(',')] - except Exception: + except ValueError: msg = ValidationError('Incorrect station entry.', 'incorrect_entry') else: if len(s_numbers) < cleaned_data.get('n'): msg = ValidationError('Enter at least N stations.', 'too_few') elif len(s_numbers) > 30: msg = ValidationError('Exceeded limit of 30 stations.', 'too_many') - elif not Station.objects.filter(number__in=s_numbers).count() == len(s_numbers): + elif Station.objects.filter(number__in=s_numbers).count() != len(s_numbers): msg = ValidationError('Invalid station numbers.', 'invalid_choices') if msg is not None: self.add_error('stations', msg) diff --git a/publicdb/raw_data/urls.py b/publicdb/raw_data/urls.py index 34f5115c0..cb2c3c203 100644 --- a/publicdb/raw_data/urls.py +++ b/publicdb/raw_data/urls.py @@ -6,23 +6,23 @@ app_name = 'data' urlpatterns = [ path('', RedirectView.as_view(url='download', permanent=False)), - path('download/', views.download_form, name="download_form"), - path('download////', views.download_form, name="download_form"), - path('download/coincidences/', views.coincidences_download_form, name="coincidences_download_form"), + path('download/', views.download_form, name='download_form'), + path('download////', views.download_form, name='download_form'), + path('download/coincidences/', views.coincidences_download_form, name='coincidences_download_form'), path( 'download/coincidences///', views.coincidences_download_form, - name="coincidences_download_form", + name='coincidences_download_form', ), - path('rpc', views.call_xmlrpc, name="rpc"), - path('/events/', views.download_data, {'data_type': 'events'}, name="events"), - path('/weather/', views.download_data, {'data_type': 'weather'}, name="weather"), - path('/singles/', views.download_data, {'data_type': 'singles'}, name="singles"), + path('rpc', views.call_xmlrpc, name='rpc'), + path('/events/', views.download_data, {'data_type': 'events'}, name='events'), + path('/weather/', views.download_data, {'data_type': 'weather'}, name='weather'), + path('/singles/', views.download_data, {'data_type': 'singles'}, name='singles'), path( 'knmi/lightning//', views.download_data, {'data_type': 'lightning'}, - name="lightning", + name='lightning', ), - path('network/coincidences/', views.download_coincidences, name="coincidences"), + path('network/coincidences/', views.download_coincidences, name='coincidences'), ] diff --git a/publicdb/raw_data/views.py b/publicdb/raw_data/views.py index 5b5e46b9d..46d8f0148 100644 --- a/publicdb/raw_data/views.py +++ b/publicdb/raw_data/views.py @@ -61,9 +61,10 @@ def call_xmlrpc(request): # Show documentation on available methods response = HttpResponse() template = loader.get_template('raw_data/xmlrpc.html') - methods = [] - for method in dispatcher.system_listMethods(): - methods.append({'name': method, 'help': dispatcher.system_methodHelp(method)}) + methods = [ + {'name': method, 'help': dispatcher.system_methodHelp(method)} + for method in dispatcher.system_listMethods() + ] context = {'methods': methods} response.write(template.render(context)) return response @@ -122,7 +123,7 @@ def get_raw_datafile(date): try: datafile = tables.open_file(name, 'r') except OSError: - raise Exception("No data for that date") + raise ValueError('No data for that date') return datafile @@ -136,7 +137,7 @@ def get_station_node(datafile, station_number): if station in cluster: return datafile.get_node(cluster, station) - raise Exception("No data available for this station on that date") + raise ValueError('No data available for this station on that date') def get_target(): @@ -179,7 +180,7 @@ def download_form(request, station_number=None, start=None, end=None): 'start': start, 'end': end, 'data_type': 'events', - } + }, ) return render(request, 'raw_data/data_download.html', {'form': form}) @@ -213,14 +214,8 @@ def download_data(request, data_type='events', station_number=None, lightning_ty else: end = start + datetime.timedelta(days=1) except ValueError: - msg = "Incorrect optional parameters (start [datetime], " "end [datetime])" - return HttpResponseBadRequest(msg, content_type=MIME_PLAIN) - - download = request.GET.get('download', False) - if download in ['true', 'True']: - download = True - else: - download = False + error_msg = 'Incorrect optional parameters (start [datetime], end [datetime])' + return HttpResponseBadRequest(error_msg, content_type=MIME_PLAIN) timerange_string = prettyprint_timerange(start, end) if data_type == 'events': @@ -234,14 +229,14 @@ def download_data(request, data_type='events', station_number=None, lightning_ty filename = f'singles-s{station_number}-{timerange_string}.tsv' elif data_type == 'lightning': if lightning_type not in list(range(6)): - msg = "Incorrect lightning type, should be a value between 0-5" + msg = 'Incorrect lightning type, should be a value between 0-5' return HttpResponseBadRequest(msg, content_type=MIME_PLAIN) tsv_output = generate_lightning_as_tsv(lightning_type, start, end) filename = f'lightning-knmi-{timerange_string}.tsv' response = StreamingHttpResponse(tsv_output, content_type=MIME_TSV) - if download: + if request.GET.get('download', False) in ['true', 'True']: content_disposition = f'attachment; filename="{filename}"' else: content_disposition = f'inline; filename="{filename}"' @@ -291,7 +286,7 @@ def generate_events_as_tsv(station, start, end): clean_float_array(events['t_trigger']), clean_angle_array(reconstructions['zenith']), clean_angle_array(reconstructions['azimuth']), - ] + ], ) block_buffer = StringIO() writer = csv.writer(block_buffer, delimiter='\t', lineterminator='\n') @@ -300,9 +295,9 @@ def generate_events_as_tsv(station, start, end): events_returned = True if not events_returned: - yield "# No events found for the chosen query." + yield '# No events found for the chosen query.' else: - yield "# Finished downloading." + yield '# Finished downloading.' def get_events_from_esd_in_range(station, start, end): @@ -377,7 +372,7 @@ def generate_weather_as_tsv(station, start, end): events['heat_index'], clean_float_array(events['dew_point']), clean_float_array(events['wind_chill']), - ] + ], ) block_buffer = StringIO() writer = csv.writer(block_buffer, delimiter='\t', lineterminator='\n') @@ -386,9 +381,9 @@ def generate_weather_as_tsv(station, start, end): weather_returned = True if not weather_returned: - yield "# No weather data found for the chosen query." + yield '# No weather data found for the chosen query.' else: - yield "# Finished downloading." + yield '# Finished downloading.' def get_weather_from_esd_in_range(station, start, end): @@ -446,7 +441,7 @@ def generate_singles_as_tsv(station, start, end): events['slv_ch1_high'], events['slv_ch2_low'], events['slv_ch2_high'], - ] + ], ) block_buffer = StringIO() writer = csv.writer(block_buffer, delimiter='\t', lineterminator='\n') @@ -455,9 +450,9 @@ def generate_singles_as_tsv(station, start, end): singles_returned = True if not singles_returned: - yield "# No singles data found for the chosen query." + yield '# No singles data found for the chosen query.' else: - yield "# Finished downloading." + yield '# Finished downloading.' def get_singles_from_esd_in_range(station, start, end): @@ -528,9 +523,9 @@ def generate_lightning_as_tsv(lightning_type, start, end): lightning_returned = True if not lightning_returned: - yield "# No lightning data found for the chosen query." + yield '# No lightning data found for the chosen query.' else: - yield "# Finished downloading." + yield '# Finished downloading.' def get_lightning_in_range(lightning_type, start, end): @@ -570,7 +565,7 @@ def coincidences_download_form(request, start=None, end=None): 'end': end, 'n': n, 'download': download, - } + }, ) url = reverse('data:coincidences') return HttpResponseRedirect(f'{url}?{query_string}') @@ -607,7 +602,7 @@ def download_coincidences(request): else: end = start + datetime.timedelta(days=1) except ValueError: - error_msg = "Incorrect optional parameters (start [datetime], end [datetime])" + error_msg = 'Incorrect optional parameters (start [datetime], end [datetime])' return HttpResponseBadRequest(error_msg, content_type=MIME_PLAIN) try: @@ -625,43 +620,38 @@ def download_coincidences(request): error_msg = None if stations and cluster: - error_msg = "Both stations and cluster are defined." + error_msg = 'Both stations and cluster are defined.' elif stations: try: stations = [int(number.strip('"\' ')) for number in stations.strip('[](), ').split(',')] except ValueError: - error_msg = "Unable to parse station numbers." + error_msg = 'Unable to parse station numbers.' else: if len(stations) < n: - error_msg = "To few stations in query, give at least n." + error_msg = 'To few stations in query, give at least n.' elif len(stations) >= 30: - error_msg = "To many stations in query, use less than 30." + error_msg = 'To many stations in query, use less than 30.' elif Station.objects.filter(number__in=stations).count() != len(stations): - error_msg = "Not all station numbers are valid." + error_msg = 'Not all station numbers are valid.' elif cluster: cluster = get_object_or_404(Cluster, name=cluster) stations = Station.objects.filter(Q(cluster__parent=cluster) | Q(cluster=cluster)).values_list( - 'number', flat=True + 'number', + flat=True, ) if len(stations) >= 30: - error_msg = "To many stations in this cluster, manually select a subset of stations." + error_msg = 'To many stations in this cluster, manually select a subset of stations.' if error_msg is not None: return HttpResponseBadRequest(error_msg, content_type=MIME_PLAIN) - download = request.GET.get('download', False) - if download in ['true', 'True']: - download = True - else: - download = False - timerange_string = prettyprint_timerange(start, end) tsv_output = generate_coincidences_as_tsv(start, end, cluster, stations, n) filename = f'coincidences-{timerange_string}.tsv' response = StreamingHttpResponse(tsv_output, content_type=MIME_TSV) - if download: + if request.GET.get('download', False) in ['true', 'True']: content_disposition = f'attachment; filename="{filename}"' else: content_disposition = f'inline; filename="{filename}"' @@ -717,9 +707,9 @@ def generate_coincidences_as_tsv(start, end, cluster, stations, n): coincidences_returned = True if not coincidences_returned: - yield "# No coincidences found for the chosen query." + yield '# No coincidences found for the chosen query.' else: - yield "# Finished downloading." + yield '# Finished downloading.' def get_coincidences_from_esd_in_range(start, end, stations, n): diff --git a/publicdb/settings_develop.py b/publicdb/settings_develop.py index 90555aceb..8e01815f1 100644 --- a/publicdb/settings_develop.py +++ b/publicdb/settings_develop.py @@ -7,9 +7,7 @@ DEBUG = True -ADMINS = ( - # ('Your Name', 'your_email@example.com'), -) +ADMINS = () MANAGERS = ADMINS @@ -23,7 +21,7 @@ 'PASSWORD': 'postgres', 'HOST': 'localhost', 'PORT': '5432', - } + }, } # Path of the mounted HiSPARC datastore root folder @@ -48,7 +46,7 @@ # Process data with multiple threads. Default is enabled (True). # Disable multiprocessing for debugging purposes. When multithreaded # processing is enabled the traceback doesn't go to the exact location. -USE_MULTIPROCESSING = True +USE_MULTIPROCESSING = False # Disable emailing EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' diff --git a/publicdb/settings_docker.py b/publicdb/settings_docker.py index 5ef613614..558e50e3e 100644 --- a/publicdb/settings_docker.py +++ b/publicdb/settings_docker.py @@ -1,6 +1,6 @@ # Django settings for when dunning publicdb via docker-compose. -from .settings_develop import * # noqa: F401,F403 +from .settings_develop import * # noqa: F403 DATABASES = { 'default': { @@ -10,7 +10,7 @@ 'PASSWORD': 'postgres', 'HOST': 'postgres', 'PORT': '5432', - } + }, } ALLOWED_HOSTS += [ diff --git a/publicdb/station_layout/forms.py b/publicdb/station_layout/forms.py index ee6d471d6..28c76be30 100644 --- a/publicdb/station_layout/forms.py +++ b/publicdb/station_layout/forms.py @@ -20,7 +20,7 @@ class StationLayoutQuarantineForm(forms.Form): station = forms.ModelChoiceField(queryset=Station.objects.filter(pcs__is_test=False).distinct()) active_date = forms.DateTimeField( - help_text="Date the detectors were placed in this configuration, e.g. '2010-5-17 12:45'." + help_text="Date the detectors were placed in this configuration, e.g. '2010-5-17 12:45'.", ) # Master detectors diff --git a/publicdb/station_layout/models.py b/publicdb/station_layout/models.py index 412ce4f36..52db3edad 100644 --- a/publicdb/station_layout/models.py +++ b/publicdb/station_layout/models.py @@ -30,10 +30,6 @@ class StationLayout(models.Model): detector_4_height = models.FloatField(null=True, blank=True) detector_4_beta = models.FloatField(null=True, blank=True) - @property - def has_four_detectors(self): - return self.detector_3_radius is not None and self.detector_4_radius is not None - class Meta: verbose_name = 'Station layout' verbose_name_plural = 'Station layouts' @@ -45,7 +41,8 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) try: next_layout = StationLayout.objects.filter( - station=self.station, active_date__gt=self.active_date + station=self.station, + active_date__gt=self.active_date, ).earliest() next_date = next_layout.active_date except StationLayout.DoesNotExist: @@ -58,6 +55,10 @@ def save(self, *args, **kwargs): summary.needs_update_events = True summary.save() + @property + def has_four_detectors(self): + return self.detector_3_radius is not None and self.detector_4_radius is not None + class StationLayoutQuarantine(models.Model): name = models.CharField(max_length=255) @@ -97,7 +98,7 @@ def generate_hashes(self): hash_submit = os.urandom(16).encode('hex') hash_review = os.urandom(16).encode('hex') if StationLayoutQuarantine.objects.filter(hash_submit=hash_submit) or StationLayoutQuarantine.objects.filter( - hash_review=hash_review + hash_review=hash_review, ): self.generate_hashes() else: @@ -107,7 +108,7 @@ def generate_hashes(self): def sendmail_submit(self): subject = 'HiSPARC station layout submission' message = dedent( - f'''\ + f"""\ Hello {self.name}, Please click on this link to confirm your submission @@ -115,7 +116,7 @@ def sendmail_submit(self): https://data.hisparc.nl/layout/confirm/{self.hash_submit}/ Greetings, - The HiSPARC Team''' + The HiSPARC Team""", ) sender = 'Beheer HiSPARC ' send_mail(subject, message, sender, [self.email], fail_silently=False) @@ -123,7 +124,7 @@ def sendmail_submit(self): def sendmail_review(self): subject = 'HiSPARC station layout review' message = dedent( - f'''\ + f"""\ Hello, A new station layout has been submitted for station {self.station}. @@ -134,7 +135,7 @@ def sendmail_review(self): https://data.hisparc.nl/layout/review/{self.hash_review}/ Greetings, - The HiSPARC Team''' + The HiSPARC Team""", ) sender = 'Beheer HiSPARC ' send_mail(subject, message, sender, ['beheer@hisparc.nl'], fail_silently=False) @@ -142,14 +143,14 @@ def sendmail_review(self): def sendmail_accepted(self): subject = 'HiSPARC station layout accepted' message = dedent( - f'''\ + f"""\ Hello {self.name}, The station layout which you submitted for station {self.station} has been approved by the reviewer. Greetings, - The HiSPARC Team''' + The HiSPARC Team""", ) sender = 'Beheer HiSPARC ' send_mail(subject, message, sender, [self.email], fail_silently=False) @@ -157,14 +158,14 @@ def sendmail_accepted(self): def sendmail_declined(self): subject = 'HiSPARC station layout declined' message = dedent( - f'''\ + f"""\ Hello {self.name}, The station layout which you submitted for station {self.station} has been declined by the reviewer. Greetings, - The HiSPARC Team''' + The HiSPARC Team""", ) sender = 'Beheer HiSPARC ' send_mail(subject, message, sender, [self.email], fail_silently=False) diff --git a/publicdb/station_layout/urls.py b/publicdb/station_layout/urls.py index ea1c90e47..2f59cb842 100644 --- a/publicdb/station_layout/urls.py +++ b/publicdb/station_layout/urls.py @@ -4,9 +4,9 @@ app_name = 'layout' urlpatterns = [ - path('submit/', views.layout_submit, name="submit"), - path('submit/validate/', views.validate_layout_submit, name="validate_submit"), - path('confirm//', views.confirmed_layout, name="confirm"), - path('review//', views.review_layout, name="review"), - path('review//validate/', views.validate_review_layout, name="validate_review"), + path('submit/', views.layout_submit, name='submit'), + path('submit/validate/', views.validate_layout_submit, name='validate_submit'), + path('confirm//', views.confirmed_layout, name='confirm'), + path('review//', views.review_layout, name='review'), + path('review//validate/', views.validate_review_layout, name='validate_review'), ] diff --git a/publicdb/station_layout/views.py b/publicdb/station_layout/views.py index 235bb7baf..3f55d2f6e 100644 --- a/publicdb/station_layout/views.py +++ b/publicdb/station_layout/views.py @@ -86,13 +86,15 @@ def review_layout(request, hash): active_date = submitted_layout.active_date.replace(hour=23, minute=59, second=59) config = ( Configuration.objects.filter( - summary__station=station, timestamp__gte=FIRSTDATE, timestamp__lte=active_date + summary__station=station, + timestamp__gte=FIRSTDATE, + timestamp__lte=active_date, ).exclude(gps_latitude=0.0) ).latest() except Configuration.DoesNotExist: try: configs = Configuration.objects.filter(summary__station=station, timestamp__gte=active_date).exclude( - gps_latitude=0.0 + gps_latitude=0.0, ) config = configs.earliest() except Configuration.DoesNotExist: diff --git a/publicdb/status_display/templates/status_display/base_stations.html b/publicdb/status_display/templates/status_display/base_stations.html index 5c7bc6950..ad399363c 100644 --- a/publicdb/status_display/templates/status_display/base_stations.html +++ b/publicdb/status_display/templates/status_display/base_stations.html @@ -56,7 +56,7 @@

{% block header %}List of HiSPARC stations{% endblock %}

Recent data
- Up: Recieved data yesterday {% if statuscount %}({{ statuscount.up }}){% endif %}
+ Up: Received data yesterday {% if statuscount %}({{ statuscount.up }}){% endif %}
diff --git a/publicdb/status_display/templates/status_display/help.html b/publicdb/status_display/templates/status_display/help.html index 6c8d38a2a..a3e423b87 100644 --- a/publicdb/status_display/templates/status_display/help.html +++ b/publicdb/status_display/templates/status_display/help.html @@ -116,7 +116,7 @@

Histogram

HiSPARC electronics

- This contols the PMT voltages and converts their signals + This controls the PMT voltages and converts their signals into digital values. It constantly samples the PMT. When signals cross the thresholds and the trigger conditions are met it will store the signals as an event. After combining this @@ -239,7 +239,7 @@

Pulseintegral

The pulseintegral is determined by taking the integral of the signal trace. The pulseintegral histogram shows how - often a value occured for a detector for all measured + often a value occurred for a detector for all measured events. This graph shows how this value is determined.

diff --git a/publicdb/status_display/templatetags/fix_data.py b/publicdb/status_display/templatetags/fix_data.py index d784c18d6..0cdfce622 100644 --- a/publicdb/status_display/templatetags/fix_data.py +++ b/publicdb/status_display/templatetags/fix_data.py @@ -11,7 +11,7 @@ def fix_histogram_data(value): """Append one value to end of data, to fix step histogram""" if len(value) > 1: - return value + [[value[-1][0] + (value[-1][0] - value[-2][0]), value[-1][1]]] + return [*value, [value[-1][0] + (value[-1][0] - value[-2][0]), value[-1][1]]] else: return value @@ -26,7 +26,7 @@ def fix_histogram_time(value): tomorrow = datetime.date.today() + datetime.timedelta(days=1) timestamp = calendar.timegm(tomorrow.timetuple()) - return value + [[timestamp, value[-1][1]]] + return [*value, [timestamp, value[-1][1]]] @register.filter diff --git a/publicdb/status_display/templatetags/fix_strings.py b/publicdb/status_display/templatetags/fix_strings.py index 3b1818f5c..162cc48f5 100644 --- a/publicdb/status_display/templatetags/fix_strings.py +++ b/publicdb/status_display/templatetags/fix_strings.py @@ -7,4 +7,4 @@ def remove_hyphens(value): """Remove hyphens from string""" - return value.replace("-", "") + return value.replace('-', '') diff --git a/publicdb/status_display/urls.py b/publicdb/status_display/urls.py index b5e45be83..288d2f37d 100644 --- a/publicdb/status_display/urls.py +++ b/publicdb/status_display/urls.py @@ -6,23 +6,23 @@ register_converter(converters.DateConverter, 'date') maps_patterns = [ - path('', views.stations_on_map, name="stations_on_map"), - path('/', views.stations_on_map, name="stations_on_map"), - path('//', views.stations_on_map, name="stations_on_map"), - path('///', views.stations_on_map, name="stations_on_map"), + path('', views.stations_on_map, name='stations_on_map'), + path('/', views.stations_on_map, name='stations_on_map'), + path('//', views.stations_on_map, name='stations_on_map'), + path('///', views.stations_on_map, name='stations_on_map'), ] network_patterns = [ - path('coincidences/', views.LatestNetworkSummaryRedirectView.as_view(), name="coincidences"), - path('coincidences//', views.NetworkSummaryDetailView.as_view(), name="coincidences"), + path('coincidences/', views.LatestNetworkSummaryRedirectView.as_view(), name='coincidences'), + path('coincidences//', views.NetworkSummaryDetailView.as_view(), name='coincidences'), ] station_patterns = [ - path('/', views.LatestSummaryRedirectView.as_view(), name="summary"), - path('//', views.SummaryDetailView.as_view(), name="summary"), - path('/status/', views.station_status, name="status"), - path('/config/', views.station_config, name="config"), - path('/latest/', views.station_latest, name="latest"), + path('/', views.LatestSummaryRedirectView.as_view(), name='summary'), + path('//', views.SummaryDetailView.as_view(), name='summary'), + path('/status/', views.station_status, name='status'), + path('/config/', views.station_config, name='config'), + path('/latest/', views.station_latest, name='latest'), ] source_patterns = [ @@ -97,26 +97,26 @@ path( 'detector_timing_offsets//', views.get_detector_timing_offsets_source, - name="detector_offsets", + name='detector_offsets', ), path( 'station_timing_offsets///', views.get_station_timing_offsets_source, - name="station_offsets", + name='station_offsets', ), ], ] app_name = 'status' urlpatterns = [ - path('stations/', views.stations, name="stations"), - path('stations_by_country/', views.stations_by_country, name="stations_by_country"), - path('stations_by_name/', views.stations_by_name, name="stations_by_name"), - path('stations_by_number/', views.stations_by_number, name="stations_by_number"), - path('stations_by_status/', views.stations_by_status, name="stations_by_status"), + path('stations/', views.stations, name='stations'), + path('stations_by_country/', views.stations_by_country, name='stations_by_country'), + path('stations_by_name/', views.stations_by_name, name='stations_by_name'), + path('stations_by_number/', views.stations_by_number, name='stations_by_number'), + path('stations_by_status/', views.stations_by_status, name='stations_by_status'), path('stations_on_map/', include((maps_patterns, 'map'))), path('network/', include((network_patterns, 'network'))), path('stations/', include((station_patterns, 'station'))), path('source/', include((source_patterns, 'source'))), - path('help/', views.help, name="help"), + path('help/', views.help, name='help'), ] diff --git a/publicdb/status_display/views.py b/publicdb/status_display/views.py index 4e9118f93..62335d9f4 100644 --- a/publicdb/status_display/views.py +++ b/publicdb/status_display/views.py @@ -55,7 +55,8 @@ def stations_by_country(request): test_stations = [] for station in Station.objects.exclude(pcs__type__slug='admin').select_related( - 'cluster__country', 'cluster__parent' + 'cluster__country', + 'cluster__parent', ): link = station in data_stations status = station_status.get_status(station.number) @@ -147,25 +148,34 @@ def stations_by_name(request): return render(request, 'status_display/stations_by_name.html', {'stations': stations, 'statuscount': statuscount}) +def get_focus(country=None, cluster=None, subcluster=None): + if not country: + return Cluster.objects.all().values_list('name', flat=True) + + country = get_object_or_404(Country, name=country) + + if not cluster: + return country.clusters.values_list('name', flat=True) + + cluster = get_object_or_404(country.clusters, name=cluster, parent=None) + + if not subcluster: + focus = [cluster.name] + focus.extend(cluster.subclusters.values_list('name', flat=True)) + return focus + + if cluster.name == subcluster: + return [cluster.name] + + subcluster = get_object_or_404(cluster.subclusters, name=subcluster) + + return [subcluster.name] + + def stations_on_map(request, country=None, cluster=None, subcluster=None): """Show all stations from a subcluster on a map""" - if not country: - focus = Cluster.objects.all().values_list('name', flat=True) - else: - country = get_object_or_404(Country, name=country) - if not cluster: - focus = country.clusters.values_list('name', flat=True) - else: - cluster = get_object_or_404(country.clusters, name=cluster, parent=None) - if not subcluster: - focus = [cluster.name] - focus.extend(cluster.subclusters.values_list('name', flat=True)) - else: - if cluster.name == subcluster: - focus = [cluster.name] - else: - focus = [get_object_or_404(cluster.subclusters, name=subcluster).name] + focus = get_focus(country, cluster, subcluster) data_stations = stations_with_data() station_status = DataStatus() @@ -216,7 +226,8 @@ def get_object(self, queryset=None): def get_queryset(self): return NetworkSummary.objects.with_coincidences().prefetch_related( - 'network_histograms', 'network_histograms__type' + 'network_histograms', + 'network_histograms__type', ) def get_context_data(self, **kwargs): @@ -241,7 +252,9 @@ def get_context_data(self, **kwargs): .count() ) histograms = DailyHistogram.objects.filter( - summary__date=date, summary__station__pcs__is_test=False, type__slug='eventtime' + summary__date=date, + summary__station__pcs__is_test=False, + type__slug='eventtime', ).distinct() number_of_events = sum(sum(histogram.values) for histogram in histograms) status = {'station_count': n_stations, 'n_events': number_of_events} @@ -256,7 +269,9 @@ def get_context_data(self, **kwargs): # data for singles plots singles_datasets = MultiDailyDataset.objects.filter( - summary__date=date, summary__station__pcs__is_test=False, type__slug='singlesratelow' + summary__date=date, + summary__station__pcs__is_test=False, + type__slug='singlesratelow', ).distinct() singles_plots = [(dataset.summary.station.number, plot_dataset(dataset)) for dataset in singles_datasets] singles_plots = sorted(singles_plots) @@ -273,7 +288,7 @@ def get_context_data(self, **kwargs): 'year_list': year_list, 'prev': prev, 'next': next, - } + }, ) return context @@ -373,12 +388,12 @@ def get_object(self, queryset=None): ) date = self.kwargs['date'] - station_numner = self.kwargs['station_number'] + station_number = self.kwargs['station_number'] try: obj = queryset.get( date=date, - station__number=station_numner, + station__number=station_number, ) except queryset.model.DoesNotExist: raise Http404 @@ -429,7 +444,7 @@ def get_context_data(self, **kwargs): # Data for the plots plots = {histogram.type.slug: plot_histogram(histogram) for histogram in self.object.histograms.all()} plots.update( - {histogram.type.slug: plot_histogram(histogram) for histogram in self.object.multi_histograms.all()} + {histogram.type.slug: plot_histogram(histogram) for histogram in self.object.multi_histograms.all()}, ) plots.update({dataset.type.slug: plot_dataset(dataset) for dataset in self.object.datasets.all()}) plots.update({dataset.type.slug: plot_dataset(dataset) for dataset in self.object.multi_datasets.all()}) @@ -451,7 +466,7 @@ def get_context_data(self, **kwargs): 'has_data': True, 'has_config': has_config, 'coincidences_found': coincidences_found, - } + }, ) return context @@ -646,10 +661,10 @@ def station_latest(request, station_number): { histogram.type.slug: plot_histogram(histogram) for histogram in summary.multi_histograms.filter(type__slug__in=['pulseheight', 'pulseintegral']) - } + }, ) plots.update( - {dataset.type.slug: plot_dataset(dataset) for dataset in summary.datasets.filter(type__slug='barometer')} + {dataset.type.slug: plot_dataset(dataset) for dataset in summary.datasets.filter(type__slug='barometer')}, ) # Show alternative @@ -709,7 +724,7 @@ def get_specific_histogram_source(request, station_number, date, type): }, content_type=MIME_TSV, ) - response['Content-Disposition'] = 'attachment; filename={type}-s{station_numer}-{date:%Y%-m%-d}.tsv' + response['Content-Disposition'] = 'attachment; filename={type}-s{station_number}-{date:%Y%-m%-d}.tsv' return response @@ -1008,7 +1023,8 @@ def get_config_source(station_number, type): if type == 'electronics': data = [ - (config.timestamp, config.primary, config.secondary, config.primary_fpga, config.secondary_fpga) for config in configs + (config.timestamp, config.primary, config.secondary, config.primary_fpga, config.secondary_fpga) + for config in configs ] else: data = list(configs.values_list(*fields)) diff --git a/publicdb/updates/__init__.py b/publicdb/updates/__init__.py old mode 100755 new mode 100644 diff --git a/publicdb/updates/models.py b/publicdb/updates/models.py old mode 100755 new mode 100644 index fc8c422f7..afdbbe53b --- a/publicdb/updates/models.py +++ b/publicdb/updates/models.py @@ -21,6 +21,12 @@ class AdminUpdate(models.Model): update = models.FileField(upload_to=upload_queue) queue = models.ForeignKey(UpdateQueue, models.CASCADE, related_name='admin_updates') + class Meta: + verbose_name = 'Admin update' + verbose_name_plural = 'Admin updates' + unique_together = ('queue', 'version') + ordering = ['-version'] + def __str__(self): return f'Queue: {self.queue} - Admin Update v{self.version}' @@ -29,18 +35,18 @@ def save(self, *args, **kwargs): self.version = int(match.group(1)) super().save(*args, **kwargs) - class Meta: - verbose_name = 'Admin update' - verbose_name_plural = 'Admin updates' - unique_together = ('queue', 'version') - ordering = ['-version'] - class UserUpdate(models.Model): version = models.PositiveSmallIntegerField() update = models.FileField(upload_to=upload_queue) queue = models.ForeignKey(UpdateQueue, models.CASCADE, related_name='user_updates') + class Meta: + verbose_name = 'User update' + verbose_name_plural = 'User updates' + unique_together = ('queue', 'version') + ordering = ['-version'] + def __str__(self): return f'Queue: {self.queue} - User Update v{self.version}' @@ -52,18 +58,18 @@ def save(self, *args, **kwargs): except IntegrityError: return - class Meta: - verbose_name = 'User update' - verbose_name_plural = 'User updates' - unique_together = ('queue', 'version') - ordering = ['-version'] - class InstallerUpdate(models.Model): version = models.CharField(max_length=5) installer = models.FileField(upload_to=upload_queue) queue = models.ForeignKey(UpdateQueue, models.CASCADE, related_name='installer_updates') + class Meta: + verbose_name = 'Installer update' + verbose_name_plural = 'Installer updates' + unique_together = ('queue', 'version') + ordering = ['-version'] + def __str__(self): return f'Installer v{self.version}' @@ -71,9 +77,3 @@ def save(self, *args, **kwargs): match = re.search(r'_v(\d+\.\d+)', self.installer.name) self.version = match.group(1) super().save(*args, **kwargs) - - class Meta: - verbose_name = 'Installer update' - verbose_name_plural = 'Installer updates' - unique_together = ('queue', 'version') - ordering = ['-version'] diff --git a/publicdb/updates/urls.py b/publicdb/updates/urls.py index 0de8847b3..5a9920e77 100644 --- a/publicdb/updates/urls.py +++ b/publicdb/updates/urls.py @@ -4,8 +4,8 @@ app_name = 'updates' urlpatterns = [ - path('installer/latest/', views.get_latest_installer, name="latest"), - path('/check', views.update_check_querystring, name="check"), - path('/check//', views.update_check, name="check"), - path('/check///', views.update_check, name="check"), + path('installer/latest/', views.get_latest_installer, name='latest'), + path('/check', views.update_check_querystring, name='check'), + path('/check//', views.update_check, name='check'), + path('/check///', views.update_check, name='check'), ] diff --git a/publicdb/updates/views.py b/publicdb/updates/views.py old mode 100755 new mode 100644 index 3fd4a4cd0..23e8cf86d --- a/publicdb/updates/views.py +++ b/publicdb/updates/views.py @@ -22,7 +22,7 @@ def update_check_querystring(request, queue): admin_version = request.GET['admin_version'] user_version = request.GET['user_version'] except KeyError: - return http.HttpResponseBadRequest("Incomplete request.") + return http.HttpResponseBadRequest('Incomplete request.') return update_check(request, queue, admin_version, user_version) @@ -31,7 +31,7 @@ def update_check(request, queue, admin_version, user_version): try: queue = UpdateQueue.objects.get(slug=queue) except UpdateQueue.DoesNotExist: - return http.HttpResponseBadRequest("Queue does not exist.") + return http.HttpResponseBadRequest('Queue does not exist.') answer = {'mustUpdate': 0b00} diff --git a/publicdb/urls.py b/publicdb/urls.py index 506b74f6d..782eaa4b2 100644 --- a/publicdb/urls.py +++ b/publicdb/urls.py @@ -15,7 +15,7 @@ path('software-updates/', include('publicdb.updates.urls')), path('raw_data/', include('publicdb.raw_data.urls', namespace='raw_data')), path('data/', include('publicdb.raw_data.urls')), - path('config/datastore', create_datastore_config, name='datatore_config'), + path('config/datastore', create_datastore_config, name='datastore_config'), path('keys//', keys, name='keys'), path('admin/', admin.site.urls), ] diff --git a/pyproject.toml b/pyproject.toml index 89bd1d2a5..b65cfc80d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,10 +1,114 @@ -[tool.black] -target-version = ['py310'] +[tool.ruff] line-length = 120 -skip-string-normalization = true -extend-exclude = ''' -( - /migrations/ - | /templates/ -) -''' +target-version = 'py310' +select = ['ALL'] # https://docs.astral.sh/ruff/rules/ +extend-exclude = [ + '*/migrations/*', + '*/templates/*', + 'scripts/*', + 'doc/conf.py', +] +ignore = [ + 'A003', # Model fields shadowing builtins + 'ANN', # Requires type annotation + 'ARG002', # Unused function arguments + 'B904', # Exception causes + 'D', # Docstrings + 'DJ001', # Do not allow nullable CharField + 'DTZ', # Timezone aware datetimes + 'EM', # String literals in exceptions + 'FBT002', # Allow default boolean positional argument + 'FIX', # Using FIXME/TODO/XXX comments + 'ICN001', # Common package import alias + 'PD', # Not using pandas + 'PERF203', # Allow try/except in loop + 'PLR0913', # Allow functions with many arguments + 'PT', # Requires pytest + 'RET', # Logic around return + 'RUF012', # Requires type annotation + 'S311', # Unsafe random generator for cryptographic purpose + 'SIM108', # Requires ternary operator instead of if-else + 'SLF001', # Using 'private' members, e.g. _v_name + 'TD001', # Allow FIXME + 'TD002', # Requires TODO author + 'TD003', # Requires TODO issue link + 'TID252', # Requires absolute imports + 'TRY003', # Long messages for common exception classes + 'TRY200', # Requires cause for raise in except + 'TRY400', # Allow logger.error instead of logger.exception, message is clear and unique + # FIXME: The following should be 'easy' to fix + 'A001', # Stop shadowing builtins with variables + 'A002', # Stop shadowing builtins with arguments + 'ARG001', # Functions with unused arguments + 'B905', # Explicit value for strict in zip + 'DJ008', # Create __str__ for all models + 'PLR0911', # Reduce number of return statements + 'PLR0912', # Reduce number of branches in function + 'PLR0915', # Reduce number of statements in function + 'PLR2004', # Assign 'magic numbers' via constant variables + 'PLW2901', # Loop variable overwritten in loop + 'PTH', # Should move from os.path to pathlib.Path + 'S308', # Check if mark_safe is safe here + 'S324', # Consider using different hash method + 'TD', # Incorrectly formatted TODO +] + +[tool.ruff.per-file-ignores] +'publicdb/settings_develop.py' = ['S105'] # Hard coded SECRET_KEY for development +'publicdb/settings_docker.py' = ['F405'] # Star imports +# FIXME: The following should be fixable +'publicdb/inforecords/models.py' = ['S110', 'BLE001'] # Capture specific exceptions and handle it + +[tool.ruff.isort] +lines-between-types = 1 +section-order = [ + 'future', + 'standard-library', + 'third-party', + 'django', + 'sapphire', + 'first-party', + 'tests', + 'local-folder', +] + +[tool.ruff.isort.sections] +django = ['django'] +sapphire = ['sapphire'] +tests = ['tests'] + +[tool.ruff.mccabe] +max-complexity = 18 + +[tool.ruff.flake8-quotes] +inline-quotes = 'single' + +[tool.ruff.format] +quote-style = 'single' + +[tool.coverage.run] +branch = true +source = [ + 'publicdb', + 'tests', +] +omit = [ + '*/migrations/*', + 'publicdb/settings.py', +] + +[tool.coverage.report] +show_missing = true +skip_empty = true +skip_covered = true + +[tool.typos.files] +extend-exclude = [ + 'publicdb/default/static/scripts/', + 'provisioning/roles/openvpn/files/dnsmasq.conf', + 'provisioning/roles/openvpn/files/openvpn/', + 'provisioning/roles/openvpn/files/openvpn/README.md', # Dutch +] + +[tool.typos.default.extend-words] +sur = 'sur' diff --git a/requirements-dev.txt b/requirements-dev.txt index 283ab3159..2a5ec94fa 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,17 +1,12 @@ -black==22.8.0 +# lint & format +ruff==0.0.292 +typos==1.16.17 # unittests coverage Faker factory_boy -# flake8 tests -flake8 -flake8-assertive -flake8-bugbear -flake8-isort -pep8-naming - # Docs sphinx diff --git a/scripts/download_test_datastore.py b/scripts/download_test_datastore.py index 7b55cdc43..0387ef429 100644 --- a/scripts/download_test_datastore.py +++ b/scripts/download_test_datastore.py @@ -49,8 +49,7 @@ def test_for_datastore_directory(): print("Checking for datastore path at {datastore_path} ...", end=' ') if not os.path.exists(datastore_path): raise RuntimeError("Datastore path cannot be found!") - else: - print("Found.") + print("Found.") def fill_datastore(): diff --git a/setup.cfg b/setup.cfg index c66ba66d4..5a2cced0a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -39,17 +39,3 @@ skip = ./scripts/ ./examples/ .ansible/ - -[coverage:run] -branch = true -source = - publicdb - tests -omit = - */migrations/* - publicdb/settings.py - -[coverage:report] -show_missing = true -skip_empty = true -skip_covered = true diff --git a/tests/test_analysissessions/test_management.py b/tests/test_analysissessions/test_management.py index ac68a8553..7d9def196 100644 --- a/tests/test_analysissessions/test_management.py +++ b/tests/test_analysissessions/test_management.py @@ -15,23 +15,38 @@ def setUp(self): # Newly created self.new = SessionRequestFactory( - session_confirmed=False, session_pending=True, session_created=False, cluster=cluster + session_confirmed=False, + session_pending=True, + session_created=False, + cluster=cluster, ) # Email confirmed self.confirmed = SessionRequestFactory( - session_confirmed=True, session_pending=True, session_created=False, cluster=cluster + session_confirmed=True, + session_pending=True, + session_created=False, + cluster=cluster, ) # Selected for create session self.selected = SessionRequestFactory( - session_confirmed=False, session_pending=True, session_created=False, cluster=cluster + session_confirmed=False, + session_pending=True, + session_created=False, + cluster=cluster, ) # Create session started self.started = SessionRequestFactory( - session_confirmed=False, session_pending=False, session_created=False, cluster=cluster + session_confirmed=False, + session_pending=False, + session_created=False, + cluster=cluster, ) # Session created self.created = SessionRequestFactory( - session_confirmed=False, session_pending=False, session_created=True, cluster=cluster + session_confirmed=False, + session_pending=False, + session_created=True, + cluster=cluster, ) @patch('publicdb.analysissessions.models.SessionRequest.create_session') diff --git a/tests/test_analysissessions/test_models.py b/tests/test_analysissessions/test_models.py index de38d8583..2d0f19865 100644 --- a/tests/test_analysissessions/test_models.py +++ b/tests/test_analysissessions/test_models.py @@ -39,7 +39,8 @@ def test_not_yet_in_progress(self): def test_hash(self): self.assertEqual( - hashlib.md5(self.analysis_session.slug.encode('utf-8')).hexdigest(), self.analysis_session.hash + hashlib.md5(self.analysis_session.slug.encode('utf-8')).hexdigest(), + self.analysis_session.hash, ) def test_str(self): diff --git a/tests/test_api/test_views.py b/tests/test_api/test_views.py index 2f98cde95..955b3c02a 100644 --- a/tests/test_api/test_views.py +++ b/tests/test_api/test_views.py @@ -109,7 +109,7 @@ def test_subclusters(self): data = self.get_json(reverse('api:stations', kwargs=kwargs)) self.assertEqual([{'number': 1, 'name': self.station.name}], data) - # Non existant subcluster number + # Non existent subcluster number kwargs = {'subcluster_number': 1337} self.assert_not_found(reverse('api:stations', kwargs=kwargs)) @@ -121,7 +121,7 @@ def test_clusters(self): data = self.get_json(reverse('api:subclusters', kwargs=kwargs)) self.assertEqual([{'number': 0, 'name': self.station.cluster.name}], data) - # Non existant cluster number + # Non existent cluster number kwargs = {'cluster_number': 1337} self.assert_not_found(reverse('api:subclusters', kwargs=kwargs)) @@ -133,6 +133,6 @@ def test_countries(self): data = self.get_json(reverse('api:clusters', kwargs=kwargs)) self.assertEqual([{'number': 0, 'name': self.station.cluster.name}], data) - # Non existant country number + # Non existent country number kwargs = {'country_number': 1337} self.assert_not_found(reverse('api:clusters', kwargs=kwargs)) diff --git a/tests/test_histograms/test_checks.py b/tests/test_histograms/test_checks.py index 9df972eaf..6a72f9b8e 100644 --- a/tests/test_histograms/test_checks.py +++ b/tests/test_histograms/test_checks.py @@ -44,8 +44,8 @@ def test_check_for_new_events_and_update_flags_creates_summaries(self, mock_even 'singles_old': 86400, 'config': 1, 'events': 168, - } - } + }, + }, } self.setup_station() state = Mock(check_last_run=datetime(2004, 1, 1, 1, 0, 0)) @@ -80,8 +80,8 @@ def test_check_for_new_events_and_update_flags_updates_existing_summary(self, mo 'singles_old': 86400, 'config': 1, 'events': 168, - } - } + }, + }, } self.setup_station() summary = histograms_factories.SummaryFactory( diff --git a/tests/test_histograms/test_datastore.py b/tests/test_histograms/test_datastore.py index aba9c248a..7b24c163a 100644 --- a/tests/test_histograms/test_datastore.py +++ b/tests/test_histograms/test_datastore.py @@ -28,8 +28,8 @@ def test_check_for_new_events(self): 'singles_old': 86400, 'config': 1, 'events': 168, - } - } + }, + }, }, event_summary, ) diff --git a/tests/test_histograms/test_jobs.py b/tests/test_histograms/test_jobs.py index d9226a58a..0e7e00073 100644 --- a/tests/test_histograms/test_jobs.py +++ b/tests/test_histograms/test_jobs.py @@ -17,7 +17,6 @@ @override_settings(DATASTORE_PATH=join(dirname(__file__), '../data/datastore')) class TestJobs(LiveServerTestCase): - fixtures = ['initial_generator_state.json'] def setUp(self): @@ -77,7 +76,7 @@ def test_perform_update_tasks(self): validate_results(self, test_data, reference_path) rmtree(settings.ESD_PATH) - # Procesed configuration from data into database + # Processed configuration from data into database self.assertEqual(1, models.Configuration.objects.filter(summary=summary).count()) # Calculated detector offsets diff --git a/tests/test_histograms/test_models.py b/tests/test_histograms/test_models.py index a2ed8154c..cd12ee1f8 100644 --- a/tests/test_histograms/test_models.py +++ b/tests/test_histograms/test_models.py @@ -84,7 +84,8 @@ def setUp(self): self.station = inforecords_factories.StationFactory(number=9, cluster__number=0, cluster__country__number=0) self.summary = histograms_factories.SummaryFactory(station=self.station, date=date(2016, 1, 12)) self.configuration = histograms_factories.ConfigurationFactory( - summary=self.summary, timestamp=datetime.combine(self.summary.date, time(10, 11, 20)) + summary=self.summary, + timestamp=datetime.combine(self.summary.date, time(10, 11, 20)), ) def test_str(self): @@ -144,26 +145,30 @@ def setUp(self): def test_clean(self): offset = histograms_factories.StationTimingOffsetFactory.build( - ref_summary=self.ref_summary, summary=self.summary + ref_summary=self.ref_summary, + summary=self.summary, ) offset.clean() def test_clean_same_station(self): offset = histograms_factories.StationTimingOffsetFactory.build( - ref_summary=self.ref_summary, summary=self.ref_summary + ref_summary=self.ref_summary, + summary=self.ref_summary, ) with self.assertRaisesMessage(ValidationError, 'stations'): offset.clean() offset = histograms_factories.StationTimingOffsetFactory.build( - ref_summary=self.ref_summary, summary=self.ref_summary_date + ref_summary=self.ref_summary, + summary=self.ref_summary_date, ) with self.assertRaisesMessage(ValidationError, 'stations'): offset.clean() def test_clean_different_date(self): offset = histograms_factories.StationTimingOffsetFactory.build( - ref_summary=self.ref_summary, summary=self.summary_date + ref_summary=self.ref_summary, + summary=self.summary_date, ) with self.assertRaisesMessage(ValidationError, 'summary dates'): offset.clean() diff --git a/tests/test_inforecords/test_models.py b/tests/test_inforecords/test_models.py index 977df258a..92d95c856 100644 --- a/tests/test_inforecords/test_models.py +++ b/tests/test_inforecords/test_models.py @@ -22,7 +22,10 @@ def test_type_with_contact(self): def test_type_with_station(self): contact_info = inforecords_factories.ContactInformationFactory() inforecords_factories.StationFactory( - number=1, cluster__number=0, cluster__country__number=0, contactinformation=contact_info + number=1, + cluster__number=0, + cluster__country__number=0, + contactinformation=contact_info, ) self.assertEqual('Station', contact_info.type) @@ -38,7 +41,10 @@ def test_contact_owner_with_contact(self): def test_contact_owner_with_station(self): contact_info = inforecords_factories.ContactInformationFactory() station = inforecords_factories.StationFactory( - number=1, cluster__number=0, cluster__country__number=0, contactinformation=contact_info + number=1, + cluster__number=0, + cluster__country__number=0, + contactinformation=contact_info, ) self.assertEqual(str(station), contact_info.contact_owner) @@ -46,7 +52,10 @@ def test_contact_owner_with_contact_and_station(self): contact_info = inforecords_factories.ContactInformationFactory() contact = inforecords_factories.ContactFactory(contactinformation=contact_info) station = inforecords_factories.StationFactory( - number=1, cluster__number=0, cluster__country__number=0, contactinformation=contact_info + number=1, + cluster__number=0, + cluster__country__number=0, + contactinformation=contact_info, ) self.assertEqual(f'{contact}, {station}', contact_info.contact_owner) @@ -59,7 +68,8 @@ def test_contact_owner_with_multiple_contacts(self): def test_str(self): contact_info = inforecords_factories.ContactInformationFactory() self.assertEqual( - ' '.join([contact_info.city, contact_info.street_1, contact_info.email_work]), str(contact_info) + f'{contact_info.city} {contact_info.street_1} {contact_info.email_work}', + str(contact_info), ) diff --git a/tests/test_status_display/test_views.py b/tests/test_status_display/test_views.py index c3e7ae4e9..b87d9efc7 100644 --- a/tests/test_status_display/test_views.py +++ b/tests/test_status_display/test_views.py @@ -185,7 +185,7 @@ def test_configs(self): with self.subTest(config_type=config_type): response = self.get_tsv(reverse(f'status:source:{config_type}', kwargs=kwargs)) expected_context = { - 'station_number': self.station.number + 'station_number': self.station.number, # data structures are a bit more work to check. } self.assert_context_contains(expected_context, response.context)