diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
index 7b039315d..743f468c3 100644
--- a/.git-blame-ignore-revs
+++ b/.git-blame-ignore-revs
@@ -1,2 +1,4 @@
# pyupgrade --py36-plus promgen/**/*.py
4d53038426aedf2abf337a2876d0d6ceccefc09b
+# black
+a81731039c118543398c90869e608dde0acaf32c
diff --git a/promgen/__init__.py b/promgen/__init__.py
index 22cf5fb1a..ecf999eaf 100644
--- a/promgen/__init__.py
+++ b/promgen/__init__.py
@@ -40,4 +40,3 @@
)
except ImportError:
pass
-
diff --git a/promgen/admin.py b/promgen/admin.py
index f18a5cc57..6c0681b25 100644
--- a/promgen/admin.py
+++ b/promgen/admin.py
@@ -19,7 +19,7 @@ class FilterInline(admin.TabularInline):
@admin.register(models.Host)
class HostAdmin(admin.ModelAdmin):
- list_display = ('name', 'farm')
+ list_display = ("name", "farm")
@admin.register(models.Shard)
@@ -36,54 +36,54 @@ class ShardAdmin(admin.ModelAdmin):
@admin.register(models.Service)
class ServiceAdmin(admin.ModelAdmin):
- list_display = ('name', 'owner')
- list_filter = (('owner', admin.RelatedOnlyFieldListFilter),)
- list_select_related = ('owner',)
+ list_display = ("name", "owner")
+ list_filter = (("owner", admin.RelatedOnlyFieldListFilter),)
+ list_select_related = ("owner",)
@admin.register(models.Project)
class ProjectAdmin(admin.ModelAdmin):
- list_display = ('name', 'shard', 'service', 'farm', 'owner')
- list_select_related = ('service', 'farm', 'shard', 'owner')
- list_filter = ('shard', ('owner', admin.RelatedOnlyFieldListFilter),)
+ list_display = ("name", "shard", "service", "farm", "owner")
+ list_select_related = ("service", "farm", "shard", "owner")
+ list_filter = ("shard", ("owner", admin.RelatedOnlyFieldListFilter))
class SenderForm(forms.ModelForm):
- sender = forms.ChoiceField(choices=[
- (entry.module_name, entry.module_name) for entry in plugins.notifications()
- ])
+ sender = forms.ChoiceField(
+ choices=[(entry.module_name, entry.module_name) for entry in plugins.notifications()]
+ )
class Meta:
model = models.Sender
- exclude = ['content_object']
+ exclude = ["content_object"]
@admin.register(models.Sender)
class SenderAdmin(admin.ModelAdmin):
- list_display = ('content_object', 'content_type', 'sender', 'show_value', 'owner')
+ list_display = ("content_object", "content_type", "sender", "show_value", "owner")
form = SenderForm
- list_filter = ('sender', 'content_type')
- list_select_related = ('content_type',)
+ list_filter = ("sender", "content_type")
+ list_select_related = ("content_type",)
inlines = [FilterInline]
@admin.register(models.Farm)
class FarmAdmin(admin.ModelAdmin):
- list_display = ('name', 'source')
- list_filter = ('source',)
+ list_display = ("name", "source")
+ list_filter = ("source",)
@admin.register(models.Exporter)
class ExporterAdmin(admin.ModelAdmin):
- list_display = ('job', 'port', 'path', 'project', 'enabled')
- list_filter = ('job', 'port',)
- readonly_fields = ('project',)
+ list_display = ("job", "port", "path", "project", "enabled")
+ list_filter = ("job", "port")
+ readonly_fields = ("project",)
@admin.register(models.DefaultExporter)
class DefaultExporterAdmin(admin.ModelAdmin):
- list_display = ('job', 'port', 'path')
- list_filter = ('job', 'port')
+ list_display = ("job", "port", "path")
+ list_filter = ("job", "port")
@admin.register(models.Probe)
@@ -114,14 +114,14 @@ class RuleAnnotationInline(admin.TabularInline):
@admin.register(models.Rule)
class RuleAdmin(admin.ModelAdmin):
- list_display = ('name', 'clause', 'duration', 'content_object')
- list_filter = ('duration',)
- list_select_related = ('content_type',)
+ list_display = ("name", "clause", "duration", "content_object")
+ list_filter = ("duration",)
+ list_select_related = ("content_type",)
inlines = [RuleLabelInline, RuleAnnotationInline]
def get_queryset(self, request):
qs = super().get_queryset(request)
- return qs.prefetch_related('content_object',)
+ return qs.prefetch_related("content_object")
@admin.register(models.Prometheus)
@@ -145,9 +145,9 @@ def __getattr__(self, name):
def __get_label(label):
def __wrapped(instance):
try:
- return instance.json['commonLabels'][label]
+ return instance.json["commonLabels"][label]
except KeyError:
- return ''
+ return ""
# We give the wrapped function the same description as
# our label so that it shows up right in the admin panel
@@ -157,24 +157,24 @@ def __wrapped(instance):
if name in self.list_display:
return __get_label(name)
- date_hierarchy = 'created'
+ date_hierarchy = "created"
list_display = (
- 'created',
- 'datasource',
- 'alertname',
- 'service',
- 'project',
- 'severity',
- 'job',
+ "created",
+ "datasource",
+ "alertname",
+ "service",
+ "project",
+ "severity",
+ "job",
)
- fields = ('created', '_json')
- readonly_fields = ('created', '_json')
- ordering = ('-created',)
+ fields = ("created", "_json")
+ readonly_fields = ("created", "_json")
+ ordering = ("-created",)
@admin.display(description="json")
def _json(self, instance):
- return format_html('
{}
', json.dumps(instance.json, indent=2))
+ return format_html("{}
", json.dumps(instance.json, indent=2))
def has_add_permission(self, request, obj=None):
return False
diff --git a/promgen/apps.py b/promgen/apps.py
index 6eb367de1..745913e76 100644
--- a/promgen/apps.py
+++ b/promgen/apps.py
@@ -14,28 +14,29 @@ def default_admin(sender, interactive, **kwargs):
# Have to import here to ensure that the apps are already registered and
# we get a real model instead of __fake__.User
from django.contrib.auth.models import User
+
if User.objects.filter(is_superuser=True).count() == 0:
if interactive:
- print(' Adding default admin user')
+ print(" Adding default admin user")
User.objects.create_user(
- username='admin',
- password='admin',
+ username="admin",
+ password="admin",
is_staff=True,
is_active=True,
is_superuser=True,
)
if interactive:
- print('BE SURE TO UPDATE THE PASSWORD!!!')
+ print("BE SURE TO UPDATE THE PASSWORD!!!")
def default_shard(sender, apps, interactive, **kwargs):
- Shard = apps.get_model('promgen.Shard')
+ Shard = apps.get_model("promgen.Shard")
if Shard.objects.count() == 0:
if interactive:
- print(' Adding default shard')
+ print(" Adding default shard")
Shard.objects.create(
- name='Default',
- url='http://prometheus.example.com',
+ name="Default",
+ url="http://prometheus.example.com",
proxy=True,
enabled=True,
)
diff --git a/promgen/discovery/__init__.py b/promgen/discovery/__init__.py
index 707648096..bc8cdf315 100644
--- a/promgen/discovery/__init__.py
+++ b/promgen/discovery/__init__.py
@@ -2,25 +2,26 @@
# These sources are released under the terms of the MIT license: see LICENSE
-FARM_DEFAULT = 'promgen'
+FARM_DEFAULT = "promgen"
class DiscoveryBase:
remote = True
- '''
+ """
Basic discovery plugin base
Child classes should implement both fetch and farm methods
- '''
+ """
+
def fetch(self, farm):
- '''
+ """
Return list of hosts for farm
- '''
+ """
raise NotImplemented()
def farms(self):
- '''
+ """
Return a list of farm names
- '''
+ """
raise NotImplemented()
diff --git a/promgen/discovery/default.py b/promgen/discovery/default.py
index 64dd700a6..7e2256c05 100644
--- a/promgen/discovery/default.py
+++ b/promgen/discovery/default.py
@@ -12,21 +12,21 @@
class DiscoveryPromgen(discovery.DiscoveryBase):
- '''Promgen local database discovery plugin
+ """Promgen local database discovery plugin
This is the default discovery plugin for farms and hosts stored locally in
promgen's database. They are queried directly from Django's ORM
- '''
+ """
remote = False
def fetch(self, farm_name):
- '''Fetch list of hosts for a farm from the local database'''
+ """Fetch list of hosts for a farm from the local database"""
farm = get_object_or_404(models.Farm, name=farm_name)
for host in models.Host.objects.filter(farm=farm):
yield host.name
def farms(self):
- '''Fetch farms from local database'''
+ """Fetch farms from local database"""
for farm in models.Farm.objects.filter(source=discovery.FARM_DEFAULT):
yield farm.name
diff --git a/promgen/filters.py b/promgen/filters.py
index 48a2d9fe8..39354798f 100644
--- a/promgen/filters.py
+++ b/promgen/filters.py
@@ -11,17 +11,11 @@ class ServiceFilter(django_filters.rest_framework.FilterSet):
class ProjectFilter(django_filters.rest_framework.FilterSet):
name = django_filters.CharFilter(field_name="name", lookup_expr="contains")
- service = django_filters.CharFilter(
- field_name="service__name", lookup_expr="contains"
- )
- shard = django_filters.CharFilter(
- field_name="shard__name", lookup_expr="contains"
- )
+ service = django_filters.CharFilter(field_name="service__name", lookup_expr="contains")
+ shard = django_filters.CharFilter(field_name="shard__name", lookup_expr="contains")
class RuleFilter(django_filters.rest_framework.FilterSet):
name = django_filters.CharFilter(field_name="name", lookup_expr="contains")
- parent = django_filters.CharFilter(
- field_name="parent__name", lookup_expr="contains"
- )
+ parent = django_filters.CharFilter(field_name="parent__name", lookup_expr="contains")
enabled = django_filters.BooleanFilter(field_name="enabled")
diff --git a/promgen/forms.py b/promgen/forms.py
index ac149e83d..97908548d 100644
--- a/promgen/forms.py
+++ b/promgen/forms.py
@@ -13,28 +13,28 @@
class ImportConfigForm(forms.Form):
def _choices():
- return [('', '')] + sorted((shard.name, 'Import into: ' + shard.name) for shard in models.Shard.objects.all())
+ return [("", "")] + sorted(
+ (shard.name, "Import into: " + shard.name) for shard in models.Shard.objects.all()
+ )
config = forms.CharField(
- widget=forms.Textarea(attrs={'rows': 5, 'class': 'form-control'}),
- required=False)
- url = forms.CharField(
- widget=forms.TextInput(attrs={'class': 'form-control'}),
- required=False)
+ widget=forms.Textarea(attrs={"rows": 5, "class": "form-control"}), required=False
+ )
+ url = forms.CharField(widget=forms.TextInput(attrs={"class": "form-control"}), required=False)
file_field = forms.FileField(
- widget=forms.FileInput(attrs={'class': 'form-control'}),
- required=False)
+ widget=forms.FileInput(attrs={"class": "form-control"}), required=False
+ )
shard = forms.ChoiceField(choices=_choices, required=False)
class ImportRuleForm(forms.Form):
rules = forms.CharField(
- widget=forms.Textarea(attrs={'rows': 5, 'class': 'form-control'}),
- required=False)
+ widget=forms.Textarea(attrs={"rows": 5, "class": "form-control"}), required=False
+ )
file_field = forms.FileField(
- widget=forms.FileInput(attrs={'class': 'form-control'}),
- required=False)
+ widget=forms.FileInput(attrs={"class": "form-control"}), required=False
+ )
def clean(self):
if any(self.cleaned_data.values()):
@@ -51,28 +51,28 @@ class SilenceForm(forms.Form):
createdBy = forms.CharField(required=False)
def clean_comment(self):
- if self.cleaned_data['comment']:
- return self.cleaned_data['comment']
+ if self.cleaned_data["comment"]:
+ return self.cleaned_data["comment"]
return "Silenced from Promgen"
def clean_createdBy(self):
- if self.cleaned_data['createdBy']:
- return self.cleaned_data['createdBy']
+ if self.cleaned_data["createdBy"]:
+ return self.cleaned_data["createdBy"]
return "Promgen"
def clean(self):
- duration = self.data.get('duration')
- start = self.data.get('startsAt')
- stop = self.data.get('endsAt')
+ duration = self.data.get("duration")
+ start = self.data.get("startsAt")
+ stop = self.data.get("endsAt")
if duration:
# No further validation is required if only duration is set
return
if not all([start, stop]):
- raise forms.ValidationError('Both start and end are required')
+ raise forms.ValidationError("Both start and end are required")
elif parser.parse(start) > parser.parse(stop):
- raise forms.ValidationError('Start time and end time is mismatch')
+ raise forms.ValidationError("Start time and end time is mismatch")
class SilenceExpireForm(forms.Form):
@@ -97,7 +97,7 @@ class ServiceRegister(forms.ModelForm):
class Meta:
model = models.Service
# shard is determined by the pk in the service register url
- exclude = ['shard']
+ exclude = ["shard"]
class ServiceUpdate(forms.ModelForm):
@@ -119,20 +119,20 @@ class Meta:
class AlertRuleForm(forms.ModelForm):
class Meta:
model = models.Rule
- exclude = ['parent', 'content_type', 'object_id']
+ exclude = ["parent", "content_type", "object_id"]
widgets = {
- 'name': forms.TextInput(attrs={'class': 'form-control'}),
- 'duration': forms.TextInput(attrs={'class': 'form-control'}),
- 'clause': forms.Textarea(attrs={'rows': 5, 'class': 'form-control'}),
- 'enabled': forms.CheckboxInput(attrs={'data-toggle': 'toggle', 'data-size': 'mini'}),
- 'description': forms.Textarea(attrs={'rows': 5, 'class': 'form-control'}),
+ "name": forms.TextInput(attrs={"class": "form-control"}),
+ "duration": forms.TextInput(attrs={"class": "form-control"}),
+ "clause": forms.Textarea(attrs={"rows": 5, "class": "form-control"}),
+ "enabled": forms.CheckboxInput(attrs={"data-toggle": "toggle", "data-size": "mini"}),
+ "description": forms.Textarea(attrs={"rows": 5, "class": "form-control"}),
}
def clean(self):
# Check our cleaned data then let Prometheus check our rule
super().clean()
rule = models.Rule(**self.cleaned_data)
-
+
# Make sure we pull in our labels and annotations for
# testing if needed
# See django docs on cached_property
@@ -143,30 +143,30 @@ def clean(self):
class RuleCopyForm(forms.Form):
- content_type = forms.ChoiceField(choices=[(x, x) for x in ['service', 'project']])
+ content_type = forms.ChoiceField(choices=[(x, x) for x in ["service", "project"]])
object_id = forms.IntegerField()
class FarmForm(forms.ModelForm):
class Meta:
model = models.Farm
- exclude = ['source']
+ exclude = ["source"]
class SenderForm(forms.ModelForm):
- sender = forms.ChoiceField(choices=[
- (entry.module_name, entry.module_name) for entry in plugins.notifications()
- ])
+ sender = forms.ChoiceField(
+ choices=[(entry.module_name, entry.module_name) for entry in plugins.notifications()]
+ )
class Meta:
model = models.Sender
- exclude = ['content_type', 'object_id', 'owner', 'enabled']
+ exclude = ["content_type", "object_id", "owner", "enabled"]
class NotifierUpdate(forms.ModelForm):
class Meta:
model = models.Sender
- exclude = ['value']
+ exclude = ["value"]
class HostForm(forms.Form):
diff --git a/promgen/manage.py b/promgen/manage.py
index de78862b2..f572fc985 100644
--- a/promgen/manage.py
+++ b/promgen/manage.py
@@ -19,5 +19,6 @@ def main():
raise
execute_from_command_line(sys.argv)
+
if __name__ == "__main__":
main()
diff --git a/promgen/management/commands/alerts-index.py b/promgen/management/commands/alerts-index.py
index 27a0235f5..eae7cc358 100644
--- a/promgen/management/commands/alerts-index.py
+++ b/promgen/management/commands/alerts-index.py
@@ -1,9 +1,11 @@
# Copyright (c) 2020 LINE Corporation
# These sources are released under the terms of the MIT license: see LICENSE
+import time
+
from django.core.management.base import BaseCommand
+
from promgen import models, tasks
-import time
class Command(BaseCommand):
diff --git a/promgen/management/commands/alerts-prune.py b/promgen/management/commands/alerts-prune.py
index 197cc75ee..6c63e2421 100644
--- a/promgen/management/commands/alerts-prune.py
+++ b/promgen/management/commands/alerts-prune.py
@@ -1,34 +1,33 @@
# Copyright (c) 2018 LINE Corporation
# These sources are released under the terms of the MIT license: see LICENSE
-'''
+"""
Prune old alerts from Promgen's Database
Simple command to prune old alerts from Promgen's Database
based on days.
Use without arguments as dryrun or --force to execute
-'''
+"""
import datetime
from django.core.management.base import BaseCommand
from django.utils import timezone
+
from promgen import models
class Command(BaseCommand):
- help = __doc__.strip().split('\n')[0]
+ help = __doc__.strip().split("\n")[0]
def add_arguments(self, parser):
+ parser.add_argument("--days", type=int, default=30, help="Days of alerts to delete")
parser.add_argument(
- '--days', type=int, default=30, help='Days of alerts to delete'
- )
- parser.add_argument(
- '--force',
- dest='dryrun',
- action='store_false',
- help='Defaults to dry run. Use to execute operation',
+ "--force",
+ dest="dryrun",
+ action="store_false",
+ help="Defaults to dry run. Use to execute operation",
)
def success(self, message, *args):
@@ -38,15 +37,15 @@ def handle(self, days, dryrun, verbosity, **options):
cutoff = timezone.now() - datetime.timedelta(days=days)
if verbosity > 1:
- self.success('Removing alerts before %s (%d days)', cutoff, days)
+ self.success("Removing alerts before %s (%d days)", cutoff, days)
alerts = models.Alert.objects.filter(created__lt=cutoff)
if dryrun:
- self.success('Would have removed %d alerts', alerts.count())
+ self.success("Would have removed %d alerts", alerts.count())
return
count, objects = alerts.delete()
if verbosity > 1:
- self.success('Removed %d Alerts', count)
+ self.success("Removed %d Alerts", count)
diff --git a/promgen/management/commands/bootstrap.py b/promgen/management/commands/bootstrap.py
index 2ddb9e792..d0794251c 100644
--- a/promgen/management/commands/bootstrap.py
+++ b/promgen/management/commands/bootstrap.py
@@ -9,9 +9,7 @@
from promgen import PROMGEN_CONFIG_DIR, PROMGEN_CONFIG_FILE
-PROMGEN_CONFIG_DEFAULT = (
- settings.BASE_DIR / "promgen" / "tests" / "examples" / "promgen.yml"
-)
+PROMGEN_CONFIG_DEFAULT = settings.BASE_DIR / "promgen" / "tests" / "examples" / "promgen.yml"
class Command(BaseCommand):
diff --git a/promgen/management/commands/export-rules.py b/promgen/management/commands/export-rules.py
index fb1c8f2f4..67feda97c 100644
--- a/promgen/management/commands/export-rules.py
+++ b/promgen/management/commands/export-rules.py
@@ -4,6 +4,7 @@
import logging
from django.core.management.base import BaseCommand
+
from promgen import prometheus, tasks
logger = logging.getLogger(__name__)
@@ -11,18 +12,22 @@
class Command(BaseCommand):
def add_arguments(self, parser):
- parser.add_argument('--reload', action='store_true', help='Trigger Prometheus Reload')
parser.add_argument(
- 'out',
- nargs='?',
- help='Optionally specify an output file to use an atomic write operation'
+ "--reload",
+ action="store_true",
+ help="Trigger Prometheus Reload",
+ )
+ parser.add_argument(
+ "out",
+ nargs="?",
+ help="Optionally specify an output file to use an atomic write operation",
)
def handle(self, **kwargs):
- if kwargs['out']:
+ if kwargs["out"]:
tasks.write_rules(
- path=kwargs['out'],
- reload=kwargs['reload'],
+ path=kwargs["out"],
+ reload=kwargs["reload"],
)
else:
# Since we're already working with utf8 encoded data, we can skip
diff --git a/promgen/management/commands/export-targets.py b/promgen/management/commands/export-targets.py
index c485df65e..8c9efc873 100644
--- a/promgen/management/commands/export-targets.py
+++ b/promgen/management/commands/export-targets.py
@@ -4,6 +4,7 @@
import logging
from django.core.management.base import BaseCommand
+
from promgen import prometheus, tasks
logger = logging.getLogger(__name__)
@@ -11,16 +12,25 @@
class Command(BaseCommand):
def add_arguments(self, parser):
- parser.add_argument('--reload', action='store_true',
- help='Trigger Prometheus Reload')
- parser.add_argument('--mode', type=lambda x: int(x, 8), default=0o644,
- help='Set mode for output file (default 644)')
- parser.add_argument('out', nargs='?',
- help='Optionally specify an output file to use an atomic write operation'
+ parser.add_argument(
+ "--reload",
+ action="store_true",
+ help="Trigger Prometheus Reload",
+ )
+ parser.add_argument(
+ "--mode",
+ type=lambda x: int(x, 8),
+ default=0o644,
+ help="Set mode for output file (default 644)",
+ )
+ parser.add_argument(
+ "out",
+ nargs="?",
+ help="Optionally specify an output file to use an atomic write operation",
)
def handle(self, **kwargs):
- if kwargs['out']:
- tasks.write_config(kwargs['out'], kwargs['reload'], kwargs['mode'])
+ if kwargs["out"]:
+ tasks.write_config(kwargs["out"], kwargs["reload"], kwargs["mode"])
else:
self.stdout.write(prometheus.render_config())
diff --git a/promgen/management/commands/export-urls.py b/promgen/management/commands/export-urls.py
index 84d7a41a5..c757b1649 100644
--- a/promgen/management/commands/export-urls.py
+++ b/promgen/management/commands/export-urls.py
@@ -4,6 +4,7 @@
import logging
from django.core.management.base import BaseCommand
+
from promgen import models, prometheus, tasks
logger = logging.getLogger(__name__)
@@ -11,16 +12,20 @@
class Command(BaseCommand):
def add_arguments(self, parser):
- parser.add_argument('--reload', action='store_true', help='Trigger Prometheus Reload')
parser.add_argument(
- 'out',
- nargs='?',
- help='Optionally specify an output file to use an atomic write operation'
+ "--reload",
+ action="store_true",
+ help="Trigger Prometheus Reload",
+ )
+ parser.add_argument(
+ "out",
+ nargs="?",
+ help="Optionally specify an output file to use an atomic write operation",
)
def handle(self, **kwargs):
prometheus.check_rules(models.Rule.objects.all())
- if kwargs['out']:
- tasks.write_rules(kwargs['out'], kwargs['reload'])
+ if kwargs["out"]:
+ tasks.write_rules(kwargs["out"], kwargs["reload"])
else:
self.stdout.write(prometheus.render_urls())
diff --git a/promgen/management/commands/import-jobs.py b/promgen/management/commands/import-jobs.py
index 347860703..4b43d2ed6 100644
--- a/promgen/management/commands/import-jobs.py
+++ b/promgen/management/commands/import-jobs.py
@@ -5,30 +5,34 @@
from django.core.management.base import BaseCommand
-from promgen.signals import trigger_write_config, trigger_write_rules, trigger_write_urls
from promgen import prometheus, util
+from promgen.signals import (
+ trigger_write_config,
+ trigger_write_rules,
+ trigger_write_urls,
+)
class Command(BaseCommand):
def add_arguments(self, parser):
- parser.add_argument('target_file')
- parser.add_argument('replace_shard', nargs='?')
+ parser.add_argument("target_file")
+ parser.add_argument("replace_shard", nargs="?")
def handle(self, target_file, replace_shard, **kwargs):
- if target_file.startswith('http'):
+ if target_file.startswith("http"):
config = util.get(target_file).json()
else:
- config = json.load(open(target_file), encoding='utf8')
+ config = json.load(open(target_file), encoding="utf8")
imported, skipped = prometheus.import_config(config, replace_shard)
if imported:
counters = {key: len(imported[key]) for key in imported}
- self.stdout.write(f'Imported {counters}')
+ self.stdout.write(f"Imported {counters}")
if skipped:
counters = {key: len(skipped[key]) for key in skipped}
- self.stdout.write(f'Skipped {counters}')
+ self.stdout.write(f"Skipped {counters}")
trigger_write_config.send(self, force=True)
trigger_write_rules.send(self, force=True)
diff --git a/promgen/management/commands/import-probe.py b/promgen/management/commands/import-probe.py
index 1a17c34f6..e53d399ba 100644
--- a/promgen/management/commands/import-probe.py
+++ b/promgen/management/commands/import-probe.py
@@ -7,11 +7,11 @@
import yaml
-from promgen import models
-
from django.core import exceptions
from django.core.management.base import BaseCommand
+from promgen import models
+
logging.basicConfig(level=logging.WARNING)
logger = logging.getLogger(__name__)
diff --git a/promgen/management/commands/register-exporter.py b/promgen/management/commands/register-exporter.py
index 27cb4ef4e..1846791bb 100644
--- a/promgen/management/commands/register-exporter.py
+++ b/promgen/management/commands/register-exporter.py
@@ -7,23 +7,20 @@
class Command(BaseCommand):
- help = '''Register default exporter from the commandline'''
+ help = """Register default exporter from the commandline"""
# This is intended to be used from a configuration management tool
# where there may already be a port mapping that we want to import
# into Promgen
def add_arguments(self, parser):
- parser.add_argument('job')
- parser.add_argument('port', type=int)
- parser.add_argument('path', nargs='?', default='')
+ parser.add_argument("job")
+ parser.add_argument("port", type=int)
+ parser.add_argument("path", nargs="?", default="")
def handle(self, job, port, path, **kargs):
- exporter, created = DefaultExporter.objects.get_or_create(
- job=job, port=port, path=path
- )
+ exporter, created = DefaultExporter.objects.get_or_create(job=job, port=port, path=path)
if created:
- self.stdout.write(f'Created {exporter}')
+ self.stdout.write(f"Created {exporter}")
else:
- self.stdout.write(f'Already exists {exporter}')
-
+ self.stdout.write(f"Already exists {exporter}")
diff --git a/promgen/management/commands/register-host.py b/promgen/management/commands/register-host.py
index 766940057..3e24b37bd 100644
--- a/promgen/management/commands/register-host.py
+++ b/promgen/management/commands/register-host.py
@@ -10,15 +10,17 @@ class Command(BaseCommand):
def add_arguments(self, parser):
help_text = util.help_text(models.Host)
- parser.add_argument("project", type=util.cast(models.Project), help="Existing Project")
+ parser.add_argument(
+ "project",
+ type=util.cast(models.Project),
+ help="Existing Project",
+ )
parser.add_argument("host", help=help_text("name"))
# parser.add_argument("--enabled", default=False, action="store_true", help=help_text('enabled'))
def handle(self, project, **kwargs):
if project.farm is None:
- raise CommandError(
- "Project currently not associated with a farm :%s" % project
- )
+ raise CommandError("Project currently not associated with a farm :%s" % project)
host, created = project.farm.host_set.get_or_create(name=kwargs["host"])
if created:
diff --git a/promgen/management/commands/register-job.py b/promgen/management/commands/register-job.py
index f22f7b78f..ded618fcf 100644
--- a/promgen/management/commands/register-job.py
+++ b/promgen/management/commands/register-job.py
@@ -10,11 +10,20 @@ class Command(BaseCommand):
def add_arguments(self, parser):
help_text = util.help_text(models.Exporter)
- parser.add_argument("project", type=util.cast(models.Project), help="Existing Project")
+ parser.add_argument(
+ "project",
+ type=util.cast(models.Project),
+ help="Existing Project",
+ )
parser.add_argument("job", help=help_text("job"))
parser.add_argument("port", type=int, help=help_text("port"))
parser.add_argument("path", default="", nargs="?", help=help_text("path"))
- parser.add_argument("--enabled", default=False, action="store_true", help=help_text("enabled"))
+ parser.add_argument(
+ "--enabled",
+ default=False,
+ action="store_true",
+ help=help_text("enabled"),
+ )
def handle(self, project, **kwargs):
job, created = models.Exporter.objects.get_or_create(
diff --git a/promgen/management/commands/register-server.py b/promgen/management/commands/register-server.py
index 551556121..f63230454 100644
--- a/promgen/management/commands/register-server.py
+++ b/promgen/management/commands/register-server.py
@@ -3,29 +3,27 @@
from django.core.management.base import BaseCommand
-from promgen.models import Shard, Prometheus
+from promgen.models import Prometheus, Shard
class Command(BaseCommand):
def add_arguments(self, parser):
- parser.add_argument('shard')
- parser.add_argument('host')
- parser.add_argument('port', type=int)
+ parser.add_argument("shard")
+ parser.add_argument("host")
+ parser.add_argument("port", type=int)
def handle(self, shard, host, port, **kwargs):
shard, created = Shard.objects.get_or_create(name=shard)
if created:
- self.stdout.write('Created shard ' + shard.name)
+ self.stdout.write("Created shard " + shard.name)
server, created = Prometheus.objects.get_or_create(
- host=host,
- port=port,
- defaults={'shard': shard}
+ host=host, port=port, defaults={"shard": shard}
)
if created:
- self.stdout.write(f'Created {server} on {shard.name}')
+ self.stdout.write(f"Created {server} on {shard.name}")
else:
old_shard = server.shard
server.shard = shard
server.save()
- self.stdout.write(f'Moved {server} from {old_shard.name} to {shard.name}')
+ self.stdout.write(f"Moved {server} from {old_shard.name} to {shard.name}")
diff --git a/promgen/middleware.py b/promgen/middleware.py
index 3d10ff139..41c5ae640 100644
--- a/promgen/middleware.py
+++ b/promgen/middleware.py
@@ -1,7 +1,7 @@
# Copyright (c) 2017 LINE Corporation
# These sources are released under the terms of the MIT license: see LICENSE
-'''
+"""
Promgen middleware
The middleware ensures three main things
@@ -15,7 +15,7 @@
3. Since many different actions can trigger a write of the target.json or rules
files, we need to handle some deduplication. This is handled by using the django
caching system to set a key and then triggering the actual event from middleware
-'''
+"""
import logging
from threading import local
@@ -24,8 +24,7 @@
from django.db.models import prefetch_related_objects
from promgen import models
-from promgen.signals import (trigger_write_config, trigger_write_rules,
- trigger_write_urls)
+from promgen.signals import trigger_write_config, trigger_write_rules, trigger_write_urls
logger = logging.getLogger(__name__)
@@ -45,7 +44,7 @@ def __call__(self, request):
request.site = models.Site.objects.get_current()
# Prefetch our rule_set as needed, since request.site is used on
# many different pages
- prefetch_related_objects([request.site], 'rule_set')
+ prefetch_related_objects([request.site], "rule_set")
# Get our logged in user to use with our audit logging plugin
if request.user.is_authenticated:
@@ -54,17 +53,17 @@ def __call__(self, request):
response = self.get_response(request)
triggers = {
- 'Config': trigger_write_config.send,
- 'Rules': trigger_write_rules.send,
- 'URLs': trigger_write_urls.send,
+ "Config": trigger_write_config.send,
+ "Rules": trigger_write_rules.send,
+ "URLs": trigger_write_urls.send,
}
for msg, func in triggers.items():
for (receiver, status) in func(self, request=request, force=True):
if status is False:
- messages.warning(request, 'Error queueing %s ' % msg)
+ messages.warning(request, "Error queueing %s " % msg)
return response
def get_current_user():
- return getattr(_user, 'value', None)
+ return getattr(_user, "value", None)
diff --git a/promgen/mixins.py b/promgen/mixins.py
index fd5c97aa3..84aaad5f9 100644
--- a/promgen/mixins.py
+++ b/promgen/mixins.py
@@ -13,9 +13,7 @@
class ContentTypeMixin:
def set_object(self, content_type, object_id):
- self.content_type = ContentType.objects.get(
- model=content_type, app_label="promgen"
- )
+ self.content_type = ContentType.objects.get(model=content_type, app_label="promgen")
self.object_id = object_id
@@ -34,9 +32,7 @@ def post(self, request, content_type, object_id):
importer = self.get_form(self.form_import_class)
if importer.is_valid():
- ct = ContentType.objects.get_by_natural_key(
- "promgen", content_type
- ).model_class()
+ ct = ContentType.objects.get_by_natural_key("promgen", content_type).model_class()
content_object = ct.objects.get(pk=object_id)
return self.form_import(importer, content_object)
diff --git a/promgen/models.py b/promgen/models.py
index 8169063a2..a71a6615f 100644
--- a/promgen/models.py
+++ b/promgen/models.py
@@ -6,8 +6,7 @@
import django.contrib.sites.models
from django.conf import settings
-from django.contrib.contenttypes.fields import (GenericForeignKey,
- GenericRelation)
+from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.db import models, transaction
from django.forms.models import model_to_dict
@@ -26,7 +25,7 @@
class Site(django.contrib.sites.models.Site):
# Proxy model for sites so that we can easily
# query our related Rules
- rule_set = GenericRelation('promgen.Rule', for_concrete_model=False)
+ rule_set = GenericRelation("promgen.Rule", for_concrete_model=False)
def get_absolute_url(self):
return reverse("site-detail")
@@ -37,17 +36,17 @@ class Meta:
class ObjectFilterManager(models.Manager):
def create(self, *args, **kwargs):
- if 'obj' in kwargs:
- obj = kwargs.pop('obj')
- kwargs['object_id'] = obj.id
- kwargs['content_type_id'] = ContentType.objects.get_for_model(obj).id
+ if "obj" in kwargs:
+ obj = kwargs.pop("obj")
+ kwargs["object_id"] = obj.id
+ kwargs["content_type_id"] = ContentType.objects.get_for_model(obj).id
return self.get_queryset().create(*args, **kwargs)
def filter(self, *args, **kwargs):
- if 'obj' in kwargs:
- obj = kwargs.pop('obj')
- kwargs['object_id'] = obj.id
- kwargs['content_type_id'] = ContentType.objects.get_for_model(obj).id
+ if "obj" in kwargs:
+ obj = kwargs.pop("obj")
+ kwargs["object_id"] = obj.id
+ kwargs["content_type_id"] = ContentType.objects.get_for_model(obj).id
return self.get_queryset().filter(*args, **kwargs)
def get_or_create(self, *args, **kwargs):
@@ -58,9 +57,7 @@ def get_or_create(self, *args, **kwargs):
if "defaults" in kwargs and "obj" in kwargs["defaults"]:
obj = kwargs["defaults"].pop("obj")
kwargs["defaults"]["object_id"] = obj.id
- kwargs["defaults"]["content_type_id"] = ContentType.objects.get_for_model(
- obj
- ).id
+ kwargs["defaults"]["content_type_id"] = ContentType.objects.get_for_model(obj).id
return self.get_queryset().get_or_create(*args, **kwargs)
@@ -72,12 +69,17 @@ class Sender(models.Model):
value = models.CharField(max_length=128)
alias = models.CharField(max_length=128, blank=True)
- content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE, limit_choices_to=(
- models.Q(app_label='auth', model='user') |
- models.Q(app_label='promgen', model='project') | models.Q(app_label='promgen', model='service'))
+ content_type = models.ForeignKey(
+ ContentType,
+ on_delete=models.CASCADE,
+ limit_choices_to=(
+ models.Q(app_label="auth", model="user")
+ | models.Q(app_label="promgen", model="project")
+ | models.Q(app_label="promgen", model="service")
+ ),
)
object_id = models.PositiveIntegerField()
- content_object = GenericForeignKey('content_type', 'object_id')
+ content_object = GenericForeignKey("content_type", "object_id")
owner = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True)
@@ -88,25 +90,25 @@ def show_value(self):
return self.alias
return self.value
- show_value.short_description = 'Value'
+ show_value.short_description = "Value"
def __str__(self):
- return f'{self.sender}:{self.show_value()}'
+ return f"{self.sender}:{self.show_value()}"
@classmethod
def driver_set(cls):
- '''Return the list of drivers for Sender model'''
+ """Return the list of drivers for Sender model"""
for entry in plugins.notifications():
try:
yield entry.module_name, entry.load()
except ImportError:
- logger.warning('Error importing %s', entry.module_name)
+ logger.warning("Error importing %s", entry.module_name)
__driver = {}
@property
def driver(self):
- '''Return configured driver for Sender model instance'''
+ """Return configured driver for Sender model instance"""
if self.sender in self.__driver:
return self.__driver[self.sender]
@@ -114,23 +116,24 @@ def driver(self):
try:
self.__driver[entry.module_name] = entry.load()()
except ImportError:
- logger.warning('Error importing %s', entry.module_name)
+ logger.warning("Error importing %s", entry.module_name)
return self.__driver[self.sender]
def test(self):
- '''
+ """
Test sender plugin
Uses the same test json from our unittests but subs in the currently
tested object as part of the test data
- '''
+ """
data = tests.Data("examples", "alertmanager.json").json()
- if hasattr(self.content_object, 'name'):
- data['commonLabels'][self.content_type.name] = self.content_object.name
- for alert in data.get('alerts', []):
- alert['labels'][self.content_type.name] = self.content_object.name
+ if hasattr(self.content_object, "name"):
+ data["commonLabels"][self.content_type.name] = self.content_object.name
+ for alert in data.get("alerts", []):
+ alert["labels"][self.content_type.name] = self.content_object.name
from promgen import tasks
+
tasks.send_alert(self.sender, self.value, data)
def filtered(self, alert):
@@ -167,9 +170,7 @@ class Meta:
class Shard(models.Model):
- name = models.CharField(
- max_length=128, unique=True, validators=[validators.labelvalue]
- )
+ name = models.CharField(max_length=128, unique=True, validators=[validators.labelvalue])
url = models.URLField(max_length=256)
proxy = models.BooleanField(
default=False,
@@ -204,57 +205,56 @@ def __str__(self):
class Service(models.Model):
name = models.CharField(max_length=128, unique=True, validators=[validators.labelvalue])
description = models.TextField(blank=True)
- owner = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, null=True, default=None)
+ owner = models.ForeignKey(
+ settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, null=True, default=None
+ )
notifiers = GenericRelation(Sender)
- rule_set = GenericRelation('Rule')
+ rule_set = GenericRelation("Rule")
class Meta:
- ordering = ['name']
+ ordering = ["name"]
def get_absolute_url(self):
- return reverse('service-detail', kwargs={'pk': self.pk})
+ return reverse("service-detail", kwargs={"pk": self.pk})
def __str__(self):
return self.name
@classmethod
- def default(cls, service_name='Default', shard_name='Default'):
- shard, created = Shard.objects.get_or_create(
- name=shard_name
- )
+ def default(cls, service_name="Default", shard_name="Default"):
+ shard, created = Shard.objects.get_or_create(name=shard_name)
if created:
- logger.info('Created default shard')
+ logger.info("Created default shard")
- service, created = cls.objects.get_or_create(
- name=service_name,
- defaults={'shard': shard}
- )
+ service, created = cls.objects.get_or_create(name=service_name, defaults={"shard": shard})
if created:
- logger.info('Created default service')
+ logger.info("Created default service")
return service
class Project(models.Model):
name = models.CharField(max_length=128, unique=True, validators=[validators.labelvalue])
description = models.TextField(blank=True)
- owner = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, null=True, default=None)
+ owner = models.ForeignKey(
+ settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, null=True, default=None
+ )
- service = models.ForeignKey('promgen.Service', on_delete=models.CASCADE)
- shard = models.ForeignKey('promgen.Shard', on_delete=models.CASCADE)
- farm = models.ForeignKey('promgen.Farm', blank=True, null=True, on_delete=models.SET_NULL)
+ service = models.ForeignKey("promgen.Service", on_delete=models.CASCADE)
+ shard = models.ForeignKey("promgen.Shard", on_delete=models.CASCADE)
+ farm = models.ForeignKey("promgen.Farm", blank=True, null=True, on_delete=models.SET_NULL)
notifiers = GenericRelation(Sender)
- rule_set = GenericRelation('Rule')
+ rule_set = GenericRelation("Rule")
class Meta:
- ordering = ['name']
+ ordering = ["name"]
def get_absolute_url(self):
- return reverse('project-detail', kwargs={'pk': self.pk})
+ return reverse("project-detail", kwargs={"pk": self.pk})
def __str__(self):
- return f'{self.service} » {self.name}'
+ return f"{self.service} » {self.name}"
class Farm(models.Model):
@@ -262,11 +262,11 @@ class Farm(models.Model):
source = models.CharField(max_length=128)
class Meta:
- ordering = ['name']
- unique_together = (('name', 'source',),)
+ ordering = ["name"]
+ unique_together = (("name", "source"),)
def get_absolute_url(self):
- return reverse('farm-detail', kwargs={'pk': self.pk})
+ return reverse("farm-detail", kwargs={"pk": self.pk})
def refresh(self):
target = set()
@@ -279,13 +279,11 @@ def refresh(self):
add = target - current
if add:
- Audit.log(f'Adding {add} to {self}', self)
- Host.objects.bulk_create([
- Host(name=name, farm_id=self.id) for name in add
- ])
+ Audit.log(f"Adding {add} to {self}", self)
+ Host.objects.bulk_create([Host(name=name, farm_id=self.id) for name in add])
if remove:
- Audit.log(f'Removing {add} from {self}', self)
+ Audit.log(f"Removing {add} from {self}", self)
Host.objects.filter(farm=self, name__in=remove).delete()
return add, remove
@@ -298,7 +296,7 @@ def fetch(cls, source):
@cached_property
def driver(self):
- '''Return configured driver for Farm model instance'''
+ """Return configured driver for Farm model instance"""
for entry in plugins.discovery():
if entry.name == self.source:
return entry.load()()
@@ -309,33 +307,31 @@ def editable(self):
@classmethod
def driver_set(cls):
- '''Return the list of drivers for Farm model'''
+ """Return the list of drivers for Farm model"""
for entry in plugins.discovery():
yield entry.name, entry.load()()
def __str__(self):
- return f'{self.name} ({self.source})'
+ return f"{self.name} ({self.source})"
class Host(models.Model):
name = models.CharField(max_length=128)
- farm = models.ForeignKey('Farm', on_delete=models.CASCADE)
+ farm = models.ForeignKey("Farm", on_delete=models.CASCADE)
class Meta:
- ordering = ['name']
- unique_together = (('name', 'farm',),)
+ ordering = ["name"]
+ unique_together = (("name", "farm"),)
def get_absolute_url(self):
- return reverse('host-detail', kwargs={'slug': self.name})
+ return reverse("host-detail", kwargs={"slug": self.name})
def __str__(self):
- return f'{self.name} [{self.farm.name}]'
+ return f"{self.name} [{self.farm.name}]"
class BaseExporter(models.Model):
- job = models.CharField(
- max_length=128, help_text="Exporter name. Example node, jmx, app"
- )
+ job = models.CharField(max_length=128, help_text="Exporter name. Example node, jmx, app")
port = models.IntegerField(help_text="Port Exporter is running on")
path = models.CharField(
max_length=128, blank=True, help_text="Exporter path. Defaults to /metrics"
@@ -370,7 +366,9 @@ def __str__(self):
class Probe(models.Model):
- module = models.CharField(help_text='Probe Module from blackbox_exporter config', max_length=128, unique=True)
+ module = models.CharField(
+ help_text="Probe Module from blackbox_exporter config", max_length=128, unique=True
+ )
description = models.TextField(blank=True)
def __str__(self):
@@ -393,30 +391,32 @@ class Rule(models.Model):
objects = ObjectFilterManager()
name = models.CharField(max_length=128, unique=True, validators=[validators.metricname])
- clause = models.TextField(help_text='Prometheus query')
+ clause = models.TextField(help_text="Prometheus query")
duration = models.CharField(
- max_length=128, validators=[validators.duration],
- help_text="Duration field with postfix. Example 30s, 5m, 1d"
- )
+ max_length=128,
+ validators=[validators.duration],
+ help_text="Duration field with postfix. Example 30s, 5m, 1d",
+ )
enabled = models.BooleanField(default=True)
parent = models.ForeignKey(
- 'Rule',
- null=True,
- related_name='overrides',
- on_delete=models.SET_NULL
+ "Rule", null=True, related_name="overrides", on_delete=models.SET_NULL
)
- content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE, limit_choices_to=(
- models.Q(app_label='promgen', model='site') |
- models.Q(app_label='promgen', model='project') |
- models.Q(app_label='promgen', model='service'))
+ content_type = models.ForeignKey(
+ ContentType,
+ on_delete=models.CASCADE,
+ limit_choices_to=(
+ models.Q(app_label="promgen", model="site")
+ | models.Q(app_label="promgen", model="project")
+ | models.Q(app_label="promgen", model="service")
+ ),
)
object_id = models.PositiveIntegerField()
- content_object = GenericForeignKey('content_type', 'object_id', for_concrete_model=False)
+ content_object = GenericForeignKey("content_type", "object_id", for_concrete_model=False)
description = models.TextField(blank=True)
class Meta:
- ordering = ['content_type', 'object_id', 'name']
+ ordering = ["content_type", "object_id", "name"]
@cached_property
def labels(self):
@@ -432,36 +432,38 @@ def add_annotation(self, name, value):
def annotations(self):
_annotations = {obj.name: obj.value for obj in self.ruleannotation_set.all()}
# Skip when pk is not set, such as when test rendering a rule
- if self.pk and 'rule' not in _annotations:
- _annotations['rule'] = resolve_domain('rule-detail', pk=self.pk)
+ if self.pk and "rule" not in _annotations:
+ _annotations["rule"] = resolve_domain("rule-detail", pk=self.pk)
return _annotations
def __str__(self):
- return f'{self.name} [{self.content_object.name}]'
+ return f"{self.name} [{self.content_object.name}]"
def get_absolute_url(self):
- return reverse('rule-detail', kwargs={'pk': self.pk})
+ return reverse("rule-detail", kwargs={"pk": self.pk})
def set_object(self, content_type, object_id):
self.content_type = ContentType.objects.get(
model=content_type,
- app_label='promgen'
- )
+ app_label="promgen",
+ )
self.object_id = object_id
def copy_to(self, content_type, object_id):
- '''
+ """
Make a copy under a new service
It's important that we set pk to None so a new object is created, but we
also need to ensure the new name is unique by appending some unique data
to the end of the name
- '''
+ """
with transaction.atomic():
- content_type = ContentType.objects.get(model=content_type, app_label='promgen')
+ content_type = ContentType.objects.get(model=content_type, app_label="promgen")
# First check to see if this rule is already overwritten
- for rule in Rule.objects.filter(parent_id=self.pk, content_type=content_type, object_id=object_id):
+ for rule in Rule.objects.filter(
+ parent_id=self.pk, content_type=content_type, object_id=object_id
+ ):
return rule
content_object = content_type.get_object_for_this_type(pk=object_id)
@@ -469,7 +471,7 @@ def copy_to(self, content_type, object_id):
orig_pk = self.pk
self.pk = None
self.parent_id = orig_pk
- self.name = f'{self.name}_{slugify(content_object.name)}'.replace('-', '_')
+ self.name = f"{self.name}_{slugify(content_object.name)}".replace("-", "_")
self.content_type = content_type
self.object_id = object_id
# Enable the copy by default since it's more likely the user prefers
@@ -487,16 +489,16 @@ def copy_to(self, content_type, object_id):
for label in RuleLabel.objects.filter(rule_id=orig_pk):
# Skip service labels from our previous rule
- if label.name in ['service', 'project']:
- logger.debug('Skipping %s: %s', label.name, label.value)
+ if label.name in ["service", "project"]:
+ logger.debug("Skipping %s: %s", label.name, label.value)
continue
- logger.debug('Copying %s to %s', label, self)
+ logger.debug("Copying %s to %s", label, self)
label.pk = None
label.rule = self
label.save()
for annotation in RuleAnnotation.objects.filter(rule_id=orig_pk):
- logger.debug('Copying %s to %s', annotation, self)
+ logger.debug("Copying %s to %s", annotation, self)
annotation.pk = None
annotation.rule = self
annotation.save()
@@ -507,19 +509,21 @@ def copy_to(self, content_type, object_id):
class RuleLabel(models.Model):
name = models.CharField(max_length=128)
value = models.CharField(max_length=128)
- rule = models.ForeignKey('Rule', on_delete=models.CASCADE)
+ rule = models.ForeignKey("Rule", on_delete=models.CASCADE)
class RuleAnnotation(models.Model):
name = models.CharField(max_length=128)
value = models.TextField()
- rule = models.ForeignKey('Rule', on_delete=models.CASCADE)
+ rule = models.ForeignKey("Rule", on_delete=models.CASCADE)
+
class AlertLabel(models.Model):
- alert = models.ForeignKey('Alert', on_delete=models.CASCADE)
+ alert = models.ForeignKey("Alert", on_delete=models.CASCADE)
name = models.CharField(max_length=128)
value = models.TextField()
+
class Alert(models.Model):
created = models.DateTimeField(default=timezone.now)
body = models.TextField()
@@ -532,31 +536,31 @@ def get_absolute_url(self):
def expand(self):
# Map of Prometheus labels to Promgen objects
LABEL_MAPPING = [
- ('project', Project),
- ('service', Service),
+ ("project", Project),
+ ("service", Service),
]
routable = {}
data = json.loads(self.body)
- data.setdefault('commonLabels', {})
- data.setdefault('commonAnnotations', {})
+ data.setdefault("commonLabels", {})
+ data.setdefault("commonAnnotations", {})
# Set our link back to Promgen for processed notifications
# The original externalURL can still be visible from the alerts page
- data['externalURL'] = resolve_domain(self.get_absolute_url())
+ data["externalURL"] = resolve_domain(self.get_absolute_url())
# Look through our labels and find the object from Promgen's DB
# If we find an object in Promgen, add an annotation with a direct link
for label, klass in LABEL_MAPPING:
- if label not in data['commonLabels']:
- logger.debug('Missing label %s', label)
+ if label not in data["commonLabels"]:
+ logger.debug("Missing label %s", label)
continue
# Should only find a single value, but I think filter is a little
# bit more forgiving than get in terms of throwing errors
- for obj in klass.objects.filter(name=data['commonLabels'][label]):
- logger.debug('Found %s %s', label, obj)
+ for obj in klass.objects.filter(name=data["commonLabels"][label]):
+ logger.debug("Found %s %s", label, obj)
routable[label] = obj
- data['commonAnnotations'][label] = resolve_domain(obj)
+ data["commonAnnotations"][label] = resolve_domain(obj)
return routable, data
@@ -579,47 +583,49 @@ class Audit(models.Model):
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE, null=True)
object_id = models.PositiveIntegerField(default=0)
- content_object = GenericForeignKey('content_type', 'object_id')
+ content_object = GenericForeignKey("content_type", "object_id")
- user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, null=True, default=None)
+ user = models.ForeignKey(
+ settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, null=True, default=None
+ )
@property
def highlight(self):
- if self.body.startswith('Created'):
- return 'success'
- if self.body.startswith('Updated'):
- return 'warning'
- if self.body.startswith('Deleted'):
- return 'danger'
- return ''
+ if self.body.startswith("Created"):
+ return "success"
+ if self.body.startswith("Updated"):
+ return "warning"
+ if self.body.startswith("Deleted"):
+ return "danger"
+ return ""
@classmethod
def log(cls, body, instance=None, old=None, **kwargs):
from promgen.middleware import get_current_user
- kwargs['body'] = body
- kwargs['created'] = timezone.now()
- kwargs['user'] = get_current_user()
+ kwargs["body"] = body
+ kwargs["created"] = timezone.now()
+ kwargs["user"] = get_current_user()
if instance:
- kwargs['content_type'] = ContentType.objects.get_for_model(instance)
- kwargs['object_id'] = instance.id
- kwargs['data'] = json.dumps(model_to_dict(instance), sort_keys=True)
+ kwargs["content_type"] = ContentType.objects.get_for_model(instance)
+ kwargs["object_id"] = instance.id
+ kwargs["data"] = json.dumps(model_to_dict(instance), sort_keys=True)
if old:
- kwargs['old'] = json.dumps(model_to_dict(old), sort_keys=True)
+ kwargs["old"] = json.dumps(model_to_dict(old), sort_keys=True)
return cls.objects.create(**kwargs)
class Prometheus(models.Model):
- shard = models.ForeignKey('promgen.Shard', on_delete=models.CASCADE)
+ shard = models.ForeignKey("promgen.Shard", on_delete=models.CASCADE)
host = models.CharField(max_length=128)
port = models.IntegerField()
def __str__(self):
- return f'{self.host}:{self.port}'
+ return f"{self.host}:{self.port}"
class Meta:
- ordering = ['shard', 'host']
- unique_together = (('host', 'port',),)
- verbose_name_plural = 'prometheis'
+ ordering = ["shard", "host"]
+ unique_together = (("host", "port"),)
+ verbose_name_plural = "prometheis"
diff --git a/promgen/notification/email.py b/promgen/notification/email.py
index 8b5581491..0138d2bd7 100644
--- a/promgen/notification/email.py
+++ b/promgen/notification/email.py
@@ -5,6 +5,7 @@
from django import forms
from django.core.mail import send_mail
+
from promgen.notification import NotificationBase
logger = logging.getLogger(__name__)
@@ -13,28 +14,28 @@
class FormEmail(forms.Form):
value = forms.CharField(
required=True,
- label='Email Address'
+ label="Email Address",
)
alias = forms.CharField(
required=False,
- help_text='Use to hide email from being displayed'
+ help_text="Use to hide email from being displayed",
)
class NotificationEmail(NotificationBase):
- '''
+ """
Simple plaintext Email notification
- '''
+ """
form = FormEmail
def _send(self, address, data):
- subject = self.render('promgen/sender/email.subject.txt', data)
- body = self.render('promgen/sender/email.body.txt', data)
+ subject = self.render("promgen/sender/email.subject.txt", data)
+ body = self.render("promgen/sender/email.body.txt", data)
send_mail(
subject,
body,
- self.config('sender'),
- [address]
+ self.config("sender"),
+ [address],
)
return True
diff --git a/promgen/notification/linenotify.py b/promgen/notification/linenotify.py
index e421d554f..09f2229fa 100644
--- a/promgen/notification/linenotify.py
+++ b/promgen/notification/linenotify.py
@@ -14,37 +14,35 @@
class FormLineNotify(forms.Form):
value = forms.CharField(
required=True,
- label='LINE Notify Token'
+ label="LINE Notify Token",
)
alias = forms.CharField(
required=True,
- help_text='Use to hide token from being displayed'
+ help_text="Use to hide token from being displayed",
)
class NotificationLineNotify(NotificationBase):
- '''
+ """
Send messages to line notify
https://notify-bot.line.me/en/
- '''
+ """
form = FormLineNotify
def _send(self, token, data):
- url = self.config('server')
+ url = self.config("server")
- if data['status'] == 'resolved':
- message = self.render('promgen/sender/linenotify.resolved.txt', data)
+ if data["status"] == "resolved":
+ message = self.render("promgen/sender/linenotify.resolved.txt", data)
else:
- message = self.render('promgen/sender/linenotify.body.txt', data)
+ message = self.render("promgen/sender/linenotify.body.txt", data)
params = {
- 'message': message,
+ "message": message,
}
- headers = {
- 'Authorization': 'Bearer %s' % token
- }
+ headers = {"Authorization": "Bearer %s" % token}
util.post(url, data=params, headers=headers).raise_for_status()
diff --git a/promgen/notification/slack.py b/promgen/notification/slack.py
index 7d3ecace2..b5104536f 100644
--- a/promgen/notification/slack.py
+++ b/promgen/notification/slack.py
@@ -13,15 +13,16 @@
class FormSlack(forms.Form):
value = forms.URLField(
required=True,
- label='Slack webhook URL'
+ label="Slack webhook URL",
)
alias = forms.CharField(
required=False,
- help_text='Optional description to be displayed instead of the URL.'
+ help_text="Optional description to be displayed instead of the URL.",
)
+
class NotificationSlack(NotificationBase):
- '''
+ """
Send messages to slack via webhook.
A webhook has to be configured for your workspace; you
@@ -32,7 +33,7 @@ class NotificationSlack(NotificationBase):
A fitting prometheus icon can be selected from here:
https://github.com/quintessence/slack-icons
- '''
+ """
form = FormSlack
@@ -40,19 +41,18 @@ def _send(self, url, data):
kwargs = {}
proxy = self.config("proxies", default=None)
if proxy:
- kwargs['proxies'] = {
- 'http': proxy,
- 'https': proxy,
+ kwargs["proxies"] = {
+ "http": proxy,
+ "https": proxy,
}
-
- if data['status'] == 'resolved':
- message = self.render('promgen/sender/slack.resolved.txt', data)
+
+ if data["status"] == "resolved":
+ message = self.render("promgen/sender/slack.resolved.txt", data)
else:
- message = self.render('promgen/sender/slack.body.txt', data)
+ message = self.render("promgen/sender/slack.body.txt", data)
json = {
- 'text': message,
+ "text": message,
}
-
+
util.post(url, json=json, **kwargs).raise_for_status()
-
diff --git a/promgen/notification/user.py b/promgen/notification/user.py
index 468e1a2d8..e242df196 100644
--- a/promgen/notification/user.py
+++ b/promgen/notification/user.py
@@ -24,15 +24,15 @@ def _choices():
class FormUser(forms.Form):
value = forms.ChoiceField(
required=True,
- label='Username',
- choices=_choices
+ label="Username",
+ choices=_choices,
)
class NotificationUser(NotificationBase):
- '''
+ """
Send notification to specific user
- '''
+ """
form = FormUser
@@ -50,5 +50,5 @@ def _send(self, address, data):
try:
sender.driver._send(sender.value, data)
except:
- logger.exception('Error sending with %s', sender)
+ logger.exception("Error sending with %s", sender)
return True
diff --git a/promgen/notification/webhook.py b/promgen/notification/webhook.py
index c70078f9e..4c50efa0e 100644
--- a/promgen/notification/webhook.py
+++ b/promgen/notification/webhook.py
@@ -1,11 +1,11 @@
# Copyright (c) 2017 LINE Corporation
# These sources are released under the terms of the MIT license: see LICENSE
-'''
+"""
Simple webhook bridge
Accepts alert json from Alert Manager and then POSTs individual alerts to
configured webhook destinations
-'''
+"""
import logging
@@ -20,14 +20,15 @@
class FormWebhook(forms.Form):
value = forms.URLField(
required=True,
- label='URL'
+ label="URL",
)
class NotificationWebhook(NotificationBase):
- '''
+ """
Post notifications to a specific web endpoint
- '''
+ """
+
form = FormWebhook
def _send(self, url, data):
diff --git a/promgen/plugins.py b/promgen/plugins.py
index e74e0c016..aeed7d3b3 100644
--- a/promgen/plugins.py
+++ b/promgen/plugins.py
@@ -9,14 +9,17 @@
def discovery():
- return working_set.iter_entry_points('promgen.discovery')
+ return working_set.iter_entry_points("promgen.discovery")
def notifications():
- return working_set.iter_entry_points('promgen.notification')
+ return working_set.iter_entry_points("promgen.notification")
+
# Since plugins may need to load other resources bundled with them, we loop
# through an additional promgen.apps entry point so that the default django
# project loaders work as expected. This also should simplfy some configuration
# for plugin authors
-apps_from_setuptools = [entry.module_name for entry in working_set.iter_entry_points('promgen.apps')]
+apps_from_setuptools = [
+ entry.module_name for entry in working_set.iter_entry_points("promgen.apps")
+]
diff --git a/promgen/prometheus.py b/promgen/prometheus.py
index f12f8c015..f3b05de7e 100644
--- a/promgen/prometheus.py
+++ b/promgen/prometheus.py
@@ -21,16 +21,16 @@
def check_rules(rules):
- '''
+ """
Use promtool to check to see if a rule is valid or not
The command name changed slightly from 1.x -> 2.x but this uses promtool
to verify if the rules are correct or not. This can be bypassed by setting
a dummy command such as /usr/bin/true that always returns true
- '''
+ """
- with tempfile.NamedTemporaryFile(mode='w+b') as fp:
- logger.debug('Rendering to %s', fp.name)
+ with tempfile.NamedTemporaryFile(mode="w+b") as fp:
+ logger.debug("Rendering to %s", fp.name)
# Normally we wouldn't bother saving a copy to a variable here and would
# leave it in the fp.write() call, but saving a copy in the variable
# means we can see the rendered output in a Sentry stacktrace
@@ -44,11 +44,11 @@ def check_rules(rules):
try:
subprocess.check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
- raise ValidationError(rendered.decode('utf8') + e.output.decode('utf8'))
+ raise ValidationError(rendered.decode("utf8") + e.output.decode("utf8"))
def render_rules(rules=None):
- '''
+ """
Render rules in a format that Prometheus understands
:param rules: List of rules
@@ -60,13 +60,11 @@ def render_rules(rules=None):
This function can render in either v1 or v2 format
We call prefetch_related_objects within this function to populate the
other related objects that are mostly used for the sub lookups.
- '''
+ """
if rules is None:
rules = models.Rule.objects.filter(enabled=True)
- return renderers.RuleRenderer().render(
- serializers.AlertRuleSerializer(rules, many=True).data
- )
+ return renderers.RuleRenderer().render(serializers.AlertRuleSerializer(rules, many=True).data)
def render_urls():
@@ -141,12 +139,12 @@ def render_config(service=None, project=None):
def import_rules_v2(config, content_object=None):
- '''
+ """
Loop through a dictionary and add rules to the database
This assumes a dictionary in the 2.x rule format.
See promgen/tests/examples/import.rule.yml for an example
- '''
+ """
# If not already a dictionary, try to load as YAML
if not isinstance(config, dict):
config = yaml.safe_load(config)
@@ -160,34 +158,31 @@ def import_rules_v2(config, content_object=None):
config = {"groups": [{"name": "Import", "rules": [config]}]}
counters = collections.defaultdict(int)
- for group in config['groups']:
- for r in group['rules']:
- labels = r.get('labels', {})
- annotations = r.get('annotations', {})
+ for group in config["groups"]:
+ for r in group["rules"]:
+ labels = r.get("labels", {})
+ annotations = r.get("annotations", {})
defaults = {
- 'clause': r['expr'],
- 'duration': r['for'],
+ "clause": r["expr"],
+ "duration": r["for"],
}
# Check our labels to see if we have a project or service
# label set and if not, default it to a global rule
if content_object:
- defaults['obj'] = content_object
- elif 'project' in labels:
- defaults['obj'] = models.Project.objects.get(name=labels['project'])
- elif 'service' in labels:
- defaults['obj'] = models.Service.objects.get(name=labels['service'])
+ defaults["obj"] = content_object
+ elif "project" in labels:
+ defaults["obj"] = models.Project.objects.get(name=labels["project"])
+ elif "service" in labels:
+ defaults["obj"] = models.Service.objects.get(name=labels["service"])
else:
- defaults['obj'] = models.Site.objects.get_current()
+ defaults["obj"] = models.Site.objects.get_current()
- rule, created = models.Rule.objects.get_or_create(
- name=r['alert'],
- defaults=defaults
- )
+ rule, created = models.Rule.objects.get_or_create(name=r["alert"], defaults=defaults)
if created:
- counters['Rules'] += 1
+ counters["Rules"] += 1
for k, v in labels.items():
rule.add_label(k, v)
for k, v in annotations.items():
@@ -201,104 +196,101 @@ def import_config(config, replace_shard=None):
skipped = collections.defaultdict(list)
for entry in config:
if replace_shard:
- logger.debug('Importing into shard %s', replace_shard)
- entry['labels']['__shard'] = replace_shard
+ logger.debug("Importing into shard %s", replace_shard)
+ entry["labels"]["__shard"] = replace_shard
shard, created = models.Shard.objects.get_or_create(
- name=entry['labels'].get('__shard', 'Default')
+ name=entry["labels"].get("__shard", "Default")
)
if created:
- logger.debug('Created shard %s', shard)
- counters['Shard'].append(shard)
+ logger.debug("Created shard %s", shard)
+ counters["Shard"].append(shard)
else:
- skipped['Shard'].append(shard)
+ skipped["Shard"].append(shard)
service, created = models.Service.objects.get_or_create(
- name=entry['labels']['service'],
+ name=entry["labels"]["service"],
)
if created:
- logger.debug('Created service %s', service)
- counters['Service'].append(service)
+ logger.debug("Created service %s", service)
+ counters["Service"].append(service)
else:
- skipped['Service'].append(service)
+ skipped["Service"].append(service)
farm, created = models.Farm.objects.get_or_create(
- name=entry['labels']['farm'],
- defaults={'source': entry['labels'].get('__farm_source', 'pmc')}
+ name=entry["labels"]["farm"],
+ defaults={"source": entry["labels"].get("__farm_source", "pmc")},
)
if created:
- logger.debug('Created farm %s', farm)
- counters['Farm'].append(farm)
+ logger.debug("Created farm %s", farm)
+ counters["Farm"].append(farm)
else:
- skipped['Farm'].append(farm)
+ skipped["Farm"].append(farm)
project, created = models.Project.objects.get_or_create(
- name=entry['labels']['project'],
+ name=entry["labels"]["project"],
service=service,
shard=shard,
- defaults={'farm': farm}
+ defaults={"farm": farm},
)
if created:
- logger.debug('Created project %s', project)
- counters['Project'].append(project)
+ logger.debug("Created project %s", project)
+ counters["Project"].append(project)
elif project.farm != farm:
- logger.debug('Linking farm [%s] with [%s]', farm, project)
+ logger.debug("Linking farm [%s] with [%s]", farm, project)
project.farm = farm
project.save()
- for target in entry['targets']:
- target, port = target.split(':')
+ for target in entry["targets"]:
+ target, port = target.split(":")
host, created = models.Host.objects.get_or_create(
name=target,
farm_id=farm.id,
)
if created:
- logger.debug('Created host %s', host)
- counters['Host'].append(host)
+ logger.debug("Created host %s", host)
+ counters["Host"].append(host)
exporter, created = models.Exporter.objects.get_or_create(
- job=entry['labels']['job'],
+ job=entry["labels"]["job"],
port=port,
project=project,
- path=entry['labels'].get('__metrics_path__', '')
+ path=entry["labels"].get("__metrics_path__", ""),
)
if created:
- logger.debug('Created exporter %s', exporter)
- counters['Exporter'].append(exporter)
+ logger.debug("Created exporter %s", exporter)
+ counters["Exporter"].append(exporter)
return counters, skipped
def silence(labels, duration=None, **kwargs):
- '''
+ """
Post a silence message to Alert Manager
Duration should be sent in a format like 1m 2h 1d etc
- '''
+ """
if duration:
start = timezone.now()
- if duration.endswith('m'):
+ if duration.endswith("m"):
end = start + datetime.timedelta(minutes=int(duration[:-1]))
- elif duration.endswith('h'):
+ elif duration.endswith("h"):
end = start + datetime.timedelta(hours=int(duration[:-1]))
- elif duration.endswith('d'):
+ elif duration.endswith("d"):
end = start + datetime.timedelta(days=int(duration[:-1]))
else:
- raise ValidationError('Unknown time modifier')
- kwargs['endsAt'] = end.isoformat()
- kwargs.pop('startsAt', False)
+ raise ValidationError("Unknown time modifier")
+ kwargs["endsAt"] = end.isoformat()
+ kwargs.pop("startsAt", False)
else:
local_timezone = pytz.timezone(util.setting("timezone", "UTC"))
- for key in ['startsAt', 'endsAt']:
- kwargs[key] = local_timezone.localize(
- parser.parse(kwargs[key])
- ).isoformat()
-
- kwargs['matchers'] = [{
- 'name': name,
- 'value': value,
- 'isRegex': True if value.endswith("*") else False
- } for name, value in labels.items()]
+ for key in ["startsAt", "endsAt"]:
+ kwargs[key] = local_timezone.localize(parser.parse(kwargs[key])).isoformat()
+
+ kwargs["matchers"] = [
+ {"name": name, "value": value, "isRegex": True if value.endswith("*") else False}
+ for name, value in labels.items()
+ ]
logger.debug("Sending silence for %s", kwargs)
url = urljoin(util.setting("alertmanager:url"), "/api/v1/silences")
diff --git a/promgen/proxy.py b/promgen/proxy.py
index 770c3807e..29d03326a 100644
--- a/promgen/proxy.py
+++ b/promgen/proxy.py
@@ -224,9 +224,7 @@ def post(self, request):
class ProxyDeleteSilence(View):
def delete(self, request, silence_id):
- url = urljoin(
- util.setting("alertmanager:url"), "/api/v1/silence/%s" % silence_id
- )
+ url = urljoin(util.setting("alertmanager:url"), "/api/v1/silence/%s" % silence_id)
response = util.delete(url)
return HttpResponse(
response.text, status=response.status_code, content_type="application/json"
diff --git a/promgen/rest.py b/promgen/rest.py
index b2b668689..623c0afe2 100644
--- a/promgen/rest.py
+++ b/promgen/rest.py
@@ -38,14 +38,12 @@ class ShardViewSet(viewsets.ModelViewSet):
queryset = models.Shard.objects.all()
filterset_class = filters.ShardFilter
serializer_class = serializers.ShardSerializer
- lookup_field = 'name'
+ lookup_field = "name"
- @action(detail=True, methods=['get'])
+ @action(detail=True, methods=["get"])
def services(self, request, name):
shard = self.get_object()
- return Response(
- serializers.ServiceSerializer(shard.service_set.all(), many=True).data
- )
+ return Response(serializers.ServiceSerializer(shard.service_set.all(), many=True).data)
class RuleMixin:
@@ -62,9 +60,7 @@ class NotifierMixin:
@action(detail=True, methods=["get"])
def notifiers(self, request, name):
return Response(
- serializers.SenderSerializer(
- self.get_object().notifiers.all(), many=True
- ).data
+ serializers.SenderSerializer(self.get_object().notifiers.all(), many=True).data
)
@@ -72,21 +68,19 @@ class ServiceViewSet(NotifierMixin, RuleMixin, viewsets.ModelViewSet):
queryset = models.Service.objects.all()
filterset_class = filters.ServiceFilter
serializer_class = serializers.ServiceSerializer
- lookup_value_regex = '[^/]+'
- lookup_field = 'name'
+ lookup_value_regex = "[^/]+"
+ lookup_field = "name"
- @action(detail=True, methods=['get'])
+ @action(detail=True, methods=["get"])
def projects(self, request, name):
service = self.get_object()
- return Response(
- serializers.ProjectSerializer(service.project_set.all(), many=True).data
- )
+ return Response(serializers.ProjectSerializer(service.project_set.all(), many=True).data)
- @action(detail=True, methods=['get'])
+ @action(detail=True, methods=["get"])
def targets(self, request, name):
return HttpResponse(
prometheus.render_config(service=self.get_object()),
- content_type='application/json',
+ content_type="application/json",
)
@@ -94,13 +88,12 @@ class ProjectViewSet(NotifierMixin, RuleMixin, viewsets.ModelViewSet):
queryset = models.Project.objects.prefetch_related("service", "shard", "farm")
filterset_class = filters.ProjectFilter
serializer_class = serializers.ProjectSerializer
- lookup_value_regex = '[^/]+'
- lookup_field = 'name'
+ lookup_value_regex = "[^/]+"
+ lookup_field = "name"
- @action(detail=True, methods=['get'])
+ @action(detail=True, methods=["get"])
def targets(self, request, name):
return HttpResponse(
prometheus.render_config(project=self.get_object()),
- content_type='application/json',
+ content_type="application/json",
)
-
diff --git a/promgen/serializers.py b/promgen/serializers.py
index 98a66d420..364975518 100644
--- a/promgen/serializers.py
+++ b/promgen/serializers.py
@@ -21,8 +21,8 @@ class ShardSerializer(serializers.ModelSerializer):
class Meta:
model = models.Shard
- exclude = ('id',)
- lookup_field = 'name'
+ exclude = ("id",)
+ lookup_field = "name"
class ServiceSerializer(serializers.ModelSerializer):
@@ -32,8 +32,8 @@ class ServiceSerializer(serializers.ModelSerializer):
class Meta:
model = models.Service
- exclude = ('id',)
- lookup_field = 'name'
+ exclude = ("id",)
+ lookup_field = "name"
class ProjectSerializer(serializers.ModelSerializer):
@@ -44,13 +44,13 @@ class ProjectSerializer(serializers.ModelSerializer):
class Meta:
model = models.Project
- lookup_field = 'name'
+ lookup_field = "name"
exclude = ("id", "farm")
class SenderSerializer(serializers.ModelSerializer):
- owner = serializers.ReadOnlyField(source='owner.username')
- label = serializers.ReadOnlyField(source='show_value')
+ owner = serializers.ReadOnlyField(source="owner.username")
+ label = serializers.ReadOnlyField(source="show_value")
class Meta:
model = models.Sender
diff --git a/promgen/settings.py b/promgen/settings.py
index be6087850..c8facadbd 100644
--- a/promgen/settings.py
+++ b/promgen/settings.py
@@ -75,55 +75,53 @@
# We explicitly include debug_toolbar and whitenoise here, but selectively
# remove it below, so that we can more easily control the import order
MIDDLEWARE = [
- 'debug_toolbar.middleware.DebugToolbarMiddleware', # Only enabled for debug
- 'django.middleware.security.SecurityMiddleware',
- 'whitenoise.middleware.WhiteNoiseMiddleware', # Used primarily for docker
- 'django.middleware.locale.LocaleMiddleware',
- 'django.contrib.sessions.middleware.SessionMiddleware',
- 'django.middleware.common.CommonMiddleware',
- 'django.middleware.csrf.CsrfViewMiddleware',
- 'django.contrib.auth.middleware.AuthenticationMiddleware',
- 'django.contrib.messages.middleware.MessageMiddleware',
- 'django.middleware.clickjacking.XFrameOptionsMiddleware',
- 'promgen.middleware.PromgenMiddleware',
+ "debug_toolbar.middleware.DebugToolbarMiddleware", # Only enabled for debug
+ "django.middleware.security.SecurityMiddleware",
+ "whitenoise.middleware.WhiteNoiseMiddleware", # Used primarily for docker
+ "django.middleware.locale.LocaleMiddleware",
+ "django.contrib.sessions.middleware.SessionMiddleware",
+ "django.middleware.common.CommonMiddleware",
+ "django.middleware.csrf.CsrfViewMiddleware",
+ "django.contrib.auth.middleware.AuthenticationMiddleware",
+ "django.contrib.messages.middleware.MessageMiddleware",
+ "django.middleware.clickjacking.XFrameOptionsMiddleware",
+ "promgen.middleware.PromgenMiddleware",
]
SOCIAL_AUTH_RAISE_EXCEPTIONS = DEBUG
-LOGIN_URL = reverse_lazy('login')
-LOGIN_REDIRECT_URL = reverse_lazy('home')
-LOGOUT_REDIRECT_URL = reverse_lazy('home')
+LOGIN_URL = reverse_lazy("login")
+LOGIN_REDIRECT_URL = reverse_lazy("home")
+LOGOUT_REDIRECT_URL = reverse_lazy("home")
-ROOT_URLCONF = 'promgen.urls'
+ROOT_URLCONF = "promgen.urls"
TEMPLATES = [
{
- 'BACKEND': 'django.template.backends.django.DjangoTemplates',
- 'DIRS': [],
- 'APP_DIRS': True,
- 'OPTIONS': {
- 'context_processors': [
- 'django.template.context_processors.debug',
- 'django.template.context_processors.request',
- 'django.contrib.auth.context_processors.auth',
- 'django.contrib.messages.context_processors.messages',
- 'promgen.context_processors.settings_in_view',
- 'social_django.context_processors.backends',
- 'social_django.context_processors.login_redirect',
+ "BACKEND": "django.template.backends.django.DjangoTemplates",
+ "DIRS": [],
+ "APP_DIRS": True,
+ "OPTIONS": {
+ "context_processors": [
+ "django.template.context_processors.debug",
+ "django.template.context_processors.request",
+ "django.contrib.auth.context_processors.auth",
+ "django.contrib.messages.context_processors.messages",
+ "promgen.context_processors.settings_in_view",
+ "social_django.context_processors.backends",
+ "social_django.context_processors.login_redirect",
],
},
},
]
-WSGI_APPLICATION = 'promgen.wsgi.application'
+WSGI_APPLICATION = "promgen.wsgi.application"
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
- "default": env.db(
- "DATABASE_URL", default="sqlite:///" + str(BASE_DIR / "db.sqlite3")
- )
+ "default": env.db("DATABASE_URL", default="sqlite:///" + str(BASE_DIR / "db.sqlite3")),
}
@@ -131,27 +129,19 @@
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
- {
- 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
- },
- {
- 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
- },
- {
- 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
- },
- {
- 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
- },
+ {"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"},
+ {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"},
+ {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
+ {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
-LANGUAGE_CODE = 'en-us'
+LANGUAGE_CODE = "en-us"
-TIME_ZONE = 'UTC'
+TIME_ZONE = "UTC"
USE_I18N = True
@@ -195,16 +185,14 @@
REST_FRAMEWORK = {
- 'DEFAULT_AUTHENTICATION_CLASSES': (
- 'rest_framework.authentication.TokenAuthentication',
- 'rest_framework.authentication.SessionAuthentication',
+ "DEFAULT_AUTHENTICATION_CLASSES": (
+ "rest_framework.authentication.TokenAuthentication",
+ "rest_framework.authentication.SessionAuthentication",
),
- 'DEFAULT_PERMISSION_CLASSES': (
- 'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly',
+ "DEFAULT_PERMISSION_CLASSES": (
+ "rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly",
),
- 'DEFAULT_FILTER_BACKENDS': (
- 'django_filters.rest_framework.DjangoFilterBackend',
- )
+ "DEFAULT_FILTER_BACKENDS": ("django_filters.rest_framework.DjangoFilterBackend",),
}
# If CELERY_BROKER_URL is set in our environment, then we configure celery as
@@ -220,14 +208,15 @@
try:
# If debug_toolbar is not available, we will remove it from our middleware
import debug_toolbar # NOQA
- INSTALLED_APPS += ['debug_toolbar']
- INTERNAL_IPS = ['127.0.0.1']
+
+ INSTALLED_APPS += ["debug_toolbar"]
+ INTERNAL_IPS = ["127.0.0.1"]
except ImportError:
- MIDDLEWARE.remove('debug_toolbar.middleware.DebugToolbarMiddleware')
+ MIDDLEWARE.remove("debug_toolbar.middleware.DebugToolbarMiddleware")
# Load overrides from PROMGEN to replace Django settings
-for k, v in PROMGEN.pop('django', {}).items():
+for k, v in PROMGEN.pop("django", {}).items():
globals()[k] = v
-DEFAULT_AUTO_FIELD = 'django.db.models.AutoField'
+DEFAULT_AUTO_FIELD = "django.db.models.AutoField"
diff --git a/promgen/signals.py b/promgen/signals.py
index 8dab7532c..aaae64c57 100644
--- a/promgen/signals.py
+++ b/promgen/signals.py
@@ -8,8 +8,7 @@
from django.contrib import messages
from django.contrib.auth.models import Group, User
from django.core.cache import cache
-from django.db.models.signals import (post_delete, post_save, pre_delete,
- pre_save)
+from django.db.models.signals import post_delete, post_save, pre_delete, pre_save
from django.dispatch import Signal, receiver
from promgen import models, prometheus, tasks
@@ -26,11 +25,12 @@ def _decorator(func):
for sender in senders:
signal.connect(func, sender=sender, **kwargs)
return func
+
return _decorator
def run_once(signal):
- '''
+ """
Run a signal only once
Certain actions we want to run only once, at the end of
@@ -38,23 +38,26 @@ def run_once(signal):
that uses Django's caching system to set whether we
want to run it or not, and trigger the actual run with
a force keyword at the end of the request when we run to run it
- '''
+ """
+
def _decorator(func):
@wraps(func)
def _wrapper(*args, **kwargs):
- key = f'{func.__module__}.{func.__name__}'
- if 'force' in kwargs:
- logger.debug('Checking %s for %s', key, kwargs['sender'])
- kwargs.pop('force')
+ key = f"{func.__module__}.{func.__name__}"
+ if "force" in kwargs:
+ logger.debug("Checking %s for %s", key, kwargs["sender"])
+ kwargs.pop("force")
if cache.get(key):
cache.delete(key)
- logger.debug('Running %s for %s', key, kwargs['sender'])
+ logger.debug("Running %s for %s", key, kwargs["sender"])
return func(*args, **kwargs)
else:
- logger.debug('Queueing %s for %s', key, kwargs['sender'])
+ logger.debug("Queueing %s for %s", key, kwargs["sender"])
cache.set(key, 1)
+
signal.connect(_wrapper)
return _wrapper
+
return _decorator
@@ -80,10 +83,10 @@ def _wrapper(*, raw=False, instance, **kwargs):
def _trigger_write_config(signal, **kwargs):
targets = [server.host for server in models.Prometheus.objects.all()]
for target in targets:
- logger.info('Queueing write_config on %s', target)
+ logger.info("Queueing write_config on %s", target)
tasks.write_config.apply_async(queue=target)
- if 'request' in kwargs:
- messages.info(kwargs['request'], f'Updating config on {targets}')
+ if "request" in kwargs:
+ messages.info(kwargs["request"], f"Updating config on {targets}")
return True
@@ -91,10 +94,10 @@ def _trigger_write_config(signal, **kwargs):
def _trigger_write_rules(signal, **kwargs):
targets = [server.host for server in models.Prometheus.objects.all()]
for target in targets:
- logger.info('Queueing write_rules on %s', target)
+ logger.info("Queueing write_rules on %s", target)
tasks.write_rules.apply_async(queue=target)
- if 'request' in kwargs:
- messages.info(kwargs['request'], f'Updating rules on {targets}')
+ if "request" in kwargs:
+ messages.info(kwargs["request"], f"Updating rules on {targets}")
return True
@@ -102,10 +105,10 @@ def _trigger_write_rules(signal, **kwargs):
def _trigger_write_urls(signal, **kwargs):
targets = [server.host for server in models.Prometheus.objects.all()]
for target in targets:
- logger.info('Queueing write_urls on %s', target)
+ logger.info("Queueing write_urls on %s", target)
tasks.write_urls.apply_async(queue=target)
- if 'request' in kwargs:
- messages.info(kwargs['request'], f'Updating urls on {targets}')
+ if "request" in kwargs:
+ messages.info(kwargs["request"], f"Updating urls on {targets}")
return True
@@ -117,7 +120,9 @@ def update_log(sender, instance, **kwargs):
# changes
if instance.pk:
old = sender.objects.get(pk=instance.pk)
- models.Audit.log(f'Updated {sender.__name__} {instance}', instance, old)
+ models.Audit.log(f"Updated {sender.__name__} {instance}", instance, old)
+
+
pre_save.connect(update_log, sender=models.Exporter)
pre_save.connect(update_log, sender=models.Farm)
pre_save.connect(update_log, sender=models.Host)
@@ -133,7 +138,9 @@ def create_log(sender, instance, created, **kwargs):
# primary key set so that we can link back to it using the ContentType
# system.
if created:
- models.Audit.log(f'Created {sender.__name__} {instance}', instance)
+ models.Audit.log(f"Created {sender.__name__} {instance}", instance)
+
+
post_save.connect(create_log, sender=models.Exporter)
post_save.connect(create_log, sender=models.Farm)
post_save.connect(create_log, sender=models.Host)
@@ -144,7 +151,9 @@ def create_log(sender, instance, created, **kwargs):
def delete_log(sender, instance, **kwargs):
- models.Audit.log(f'Deleted {sender.__name__} {instance}', instance)
+ models.Audit.log(f"Deleted {sender.__name__} {instance}", instance)
+
+
post_delete.connect(delete_log, sender=models.Exporter)
post_delete.connect(delete_log, sender=models.Farm)
post_delete.connect(delete_log, sender=models.Host)
@@ -180,7 +189,7 @@ def delete_url(sender, instance, **kwargs):
@receiver(post_save, sender=models.Host)
@skip_raw
def save_host(sender, instance, **kwargs):
- '''Only trigger write if parent project also has exporters'''
+ """Only trigger write if parent project also has exporters"""
for project in instance.farm.project_set.all():
if project.exporter_set:
trigger_write_config.send(instance)
@@ -188,7 +197,7 @@ def save_host(sender, instance, **kwargs):
@receiver(pre_delete, sender=models.Host)
def delete_host(sender, instance, **kwargs):
- '''Only trigger write if parent project also has exporters'''
+ """Only trigger write if parent project also has exporters"""
for project in instance.farm.project_set.all():
if project.exporter_set.exists():
trigger_write_config.send(instance)
@@ -196,7 +205,7 @@ def delete_host(sender, instance, **kwargs):
@receiver(pre_delete, sender=models.Farm)
def delete_farm(sender, instance, **kwargs):
- '''Only trigger write if parent project also has exporters'''
+ """Only trigger write if parent project also has exporters"""
for project in instance.project_set.all():
trigger_write_config.send(instance)
@@ -204,7 +213,7 @@ def delete_farm(sender, instance, **kwargs):
@receiver(post_save, sender=models.Exporter)
@skip_raw
def save_exporter(sender, instance, **kwargs):
- '''Only trigger write if parent project also has hosts'''
+ """Only trigger write if parent project also has hosts"""
if instance.project.farm:
if instance.project.farm.host_set.exists():
trigger_write_config.send(instance)
@@ -212,7 +221,7 @@ def save_exporter(sender, instance, **kwargs):
@receiver(pre_delete, sender=models.Exporter)
def delete_exporter(sender, instance, **kwargs):
- '''Only trigger write if parent project also has hosts'''
+ """Only trigger write if parent project also has hosts"""
if instance.project.farm:
if instance.project.farm.host_set.exists():
trigger_write_config.send(instance)
@@ -242,10 +251,7 @@ def save_service(*, sender, instance, **kwargs):
# attached signals
# We don't use sender here, but put it in our parameters so we don't pass
# two sender entries to save_project
- for project in instance.project_set.prefetch_related(
- 'farm',
- 'farm__host_set',
- 'exporter_set'):
+ for project in instance.project_set.prefetch_related("farm", "farm__host_set", "exporter_set"):
if save_project(sender=models.Project, instance=project, **kwargs):
# If any of our save_project returns True, then we do not need to
# check any others
@@ -270,9 +276,12 @@ def add_user_to_default_group(instance, created, **kwargs):
@skip_raw
def add_email_sender(instance, created, **kwargs):
if instance.email:
- models.Sender.objects.get_or_create(obj=instance, sender='promgen.notification.email', value=instance.email)
+ models.Sender.objects.get_or_create(
+ obj=instance, sender="promgen.notification.email", value=instance.email
+ )
else:
- logger.warning('No email for user %s', instance)
+ logger.warning("No email for user %s", instance)
+
# Not a 'real' signal but we match most of the interface for post_save
def check_user_subscription(sender, instance, created, request):
diff --git a/promgen/tasks.py b/promgen/tasks.py
index 5455318b1..fb72ef5b5 100644
--- a/promgen/tasks.py
+++ b/promgen/tasks.py
@@ -125,9 +125,7 @@ def reload_prometheus():
@shared_task
def clear_tombstones():
- target = urljoin(
- util.setting("prometheus:url"), "/api/v1/admin/tsdb/clean_tombstones"
- )
+ target = urljoin(util.setting("prometheus:url"), "/api/v1/admin/tsdb/clean_tombstones")
response = util.post(target)
response.raise_for_status()
diff --git a/promgen/templatetags/promgen.py b/promgen/templatetags/promgen.py
index be54be647..59f9414e2 100644
--- a/promgen/templatetags/promgen.py
+++ b/promgen/templatetags/promgen.py
@@ -20,7 +20,7 @@
register = template.Library()
-EXCLUSION_MACRO = ''
+EXCLUSION_MACRO = ""
@register.filter()
@@ -41,7 +41,7 @@ def rule_dict(rule):
@register.filter()
def rulemacro(rule, clause=None):
- '''
+ """
Macro rule expansion
Assuming a list of rules with children and parents, expand our macro to exclude child rules
@@ -57,7 +57,7 @@ def rulemacro(rule, clause=None):
foo{project~="A|B"} / bar{project~="A|B"} > 5
foo{project="A", } / bar{project="A"} > 3
foo{project="B"} / bar{project="B"} > 4
- '''
+ """
if not clause:
clause = rule.clause
@@ -66,12 +66,8 @@ def rulemacro(rule, clause=None):
for r in rule.overrides.all():
labels[r.content_type.model].append(r.content_object.name)
- filters = {
- k: '|'.join(labels[k]) for k in sorted(labels)
- }
- macro = ','.join(
- sorted(f'{k}!~"{v}"' for k, v in filters.items())
- )
+ filters = {k: "|".join(labels[k]) for k in sorted(labels)}
+ macro = ",".join(sorted(f'{k}!~"{v}"' for k, v in filters.items()))
return clause.replace(EXCLUSION_MACRO, macro)
@@ -83,10 +79,10 @@ def diff_json(a, b):
b = json.loads(b)
a = json.dumps(a, indent=4, sort_keys=True).splitlines(keepends=True)
b = json.dumps(b, indent=4, sort_keys=True).splitlines(keepends=True)
- diff = ''.join(difflib.unified_diff(a, b))
+ diff = "".join(difflib.unified_diff(a, b))
if diff:
return diff
- return 'No Changes'
+ return "No Changes"
@register.filter()
diff --git a/promgen/tests/__init__.py b/promgen/tests/__init__.py
index 892e7b46d..4bb5cac43 100644
--- a/promgen/tests/__init__.py
+++ b/promgen/tests/__init__.py
@@ -35,9 +35,7 @@ def fireAlert(self, source="alertmanager.json", data=None):
if data is None:
data = Data("examples", source).raw()
- return self.client.post(
- reverse("alert"), data=data, content_type="application/json"
- )
+ return self.client.post(reverse("alert"), data=data, content_type="application/json")
def assertRoute(self, response, view, status=200, msg=None):
self.assertEqual(response.status_code, status, msg)
diff --git a/promgen/tests/notification/test_email.py b/promgen/tests/notification/test_email.py
index 6e2b3c0f2..b97cd765d 100644
--- a/promgen/tests/notification/test_email.py
+++ b/promgen/tests/notification/test_email.py
@@ -31,12 +31,8 @@ def test_email(self, mock_email):
mock_email.assert_has_calls(
[
- mock.call(
- _SUBJECT, _MESSAGE, "promgen@example.com", ["example@example.com"]
- ),
- mock.call(
- _SUBJECT, _MESSAGE, "promgen@example.com", ["foo@example.com"]
- ),
+ mock.call(_SUBJECT, _MESSAGE, "promgen@example.com", ["example@example.com"]),
+ mock.call(_SUBJECT, _MESSAGE, "promgen@example.com", ["foo@example.com"]),
],
any_order=True,
)
diff --git a/promgen/tests/notification/test_linenotify.py b/promgen/tests/notification/test_linenotify.py
index 06d25f57c..3076c6120 100644
--- a/promgen/tests/notification/test_linenotify.py
+++ b/promgen/tests/notification/test_linenotify.py
@@ -5,7 +5,7 @@
from django.test import override_settings
-from promgen import models, tests, rest
+from promgen import models, rest, tests
from promgen.notification.linenotify import NotificationLineNotify
diff --git a/promgen/tests/notification/test_slack.py b/promgen/tests/notification/test_slack.py
index fa19b8df5..da2c36a1c 100644
--- a/promgen/tests/notification/test_slack.py
+++ b/promgen/tests/notification/test_slack.py
@@ -10,12 +10,8 @@
class SlackTest(tests.PromgenTest):
- TestHook1 = (
- "https://hooks.slack.com/services/XXXXXXXXX/XXXXXXXXX/XXXXXXXXXXXXXXXXXXXXXXXX"
- )
- TestHook2 = (
- "https://hooks.slack.com/services/YYYYYYYYY/YYYYYYYYY/YYYYYYYYYYYYYYYYYYYYYYYY"
- )
+ TestHook1 = "https://hooks.slack.com/services/XXXXXXXXX/XXXXXXXXX/XXXXXXXXXXXXXXXXXXXXXXXX"
+ TestHook2 = "https://hooks.slack.com/services/YYYYYYYYY/YYYYYYYYY/YYYYYYYYYYYYYYYYYYYYYYYY"
def setUp(self):
one = models.Project.objects.get(pk=1)
diff --git a/promgen/tests/test_alert_rules.py b/promgen/tests/test_alert_rules.py
index ff6150397..d42f90f09 100644
--- a/promgen/tests/test_alert_rules.py
+++ b/promgen/tests/test_alert_rules.py
@@ -10,7 +10,7 @@
import promgen.templatetags.promgen as macro
from promgen import models, prometheus, tests, views
-_RULE_V2 = '''
+_RULE_V2 = """
groups:
- name: promgen.example.com
rules:
@@ -22,42 +22,56 @@
for: 1s
labels:
severity: severe
-'''.lstrip().encode('utf-8')
+""".lstrip().encode(
+ "utf-8"
+)
-TEST_SETTINGS = tests.Data('examples', 'promgen.yml').yaml()
+TEST_SETTINGS = tests.Data("examples", "promgen.yml").yaml()
class RuleTest(tests.PromgenTest):
- @mock.patch('django.dispatch.dispatcher.Signal.send')
+ @mock.patch("django.dispatch.dispatcher.Signal.send")
def setUp(self, mock_signal):
self.user = self.force_login(username="demo")
self.site = models.Site.objects.get_current()
- self.shard = models.Shard.objects.create(name='Shard 1')
- self.service = models.Service.objects.create(id=999, name='Service 1')
+ self.shard = models.Shard.objects.create(name="Shard 1")
+ self.service = models.Service.objects.create(id=999, name="Service 1")
self.rule = models.Rule.objects.create(
- name='RuleName',
- clause='up==0',
- duration='1s',
- obj=self.site
+ name="RuleName",
+ clause="up==0",
+ duration="1s",
+ obj=self.site,
+ )
+ models.RuleLabel.objects.create(
+ name="severity",
+ value="severe",
+ rule=self.rule,
+ )
+ models.RuleAnnotation.objects.create(
+ name="summary",
+ value="Test case",
+ rule=self.rule,
)
- models.RuleLabel.objects.create(name='severity', value='severe', rule=self.rule)
- models.RuleAnnotation.objects.create(name='summary', value='Test case', rule=self.rule)
- @override_settings(PROMGEN_SCHEME='https')
- @mock.patch('django.dispatch.dispatcher.Signal.send')
+ @override_settings(PROMGEN_SCHEME="https")
+ @mock.patch("django.dispatch.dispatcher.Signal.send")
def test_write_new(self, mock_post):
result = prometheus.render_rules()
self.assertEqual(result, _RULE_V2 % self.rule.id)
- @mock.patch('django.dispatch.dispatcher.Signal.send')
+ @mock.patch("django.dispatch.dispatcher.Signal.send")
def test_copy(self, mock_post):
- service = models.Service.objects.create(name='Service 2')
- copy = self.rule.copy_to(content_type='service', object_id=service.id)
+ service = models.Service.objects.create(name="Service 2")
+ copy = self.rule.copy_to(content_type="service", object_id=service.id)
# Test that our copy has the same labels and annotations
- self.assertIn('severity', copy.labels)
- self.assertIn('summary', copy.annotations)
+ self.assertIn("severity", copy.labels)
+ self.assertIn("summary", copy.annotations)
# and test that we actually duplicated them and not moved them
- self.assertCount(models.RuleLabel, 3, 'Copied rule has exiting labels + service label')
+ self.assertCount(
+ models.RuleLabel,
+ 3,
+ "Copied rule has exiting labels + service label",
+ )
self.assertCount(models.RuleAnnotation, 2)
@override_settings(PROMGEN=TEST_SETTINGS)
@@ -84,9 +98,7 @@ def test_import_project_rule(self, mock_post):
name="Project 1", service=self.service, shard=self.shard
)
response = self.client.post(
- reverse(
- "rule-new", kwargs={"content_type": "project", "object_id": project.id}
- ),
+ reverse("rule-new", kwargs={"content_type": "project", "object_id": project.id}),
{"rules": tests.Data("examples", "import.rule.yml").raw()},
follow=True,
)
@@ -112,40 +124,45 @@ def test_import_service_rule(self, mock_post):
self.assertCount(models.RuleLabel, 4, "Missing labels")
self.assertCount(models.RuleAnnotation, 9, "Missing annotations")
- @mock.patch('django.dispatch.dispatcher.Signal.send')
+ @mock.patch("django.dispatch.dispatcher.Signal.send")
def test_missing_permission(self, mock_post):
- self.client.post(reverse('rule-import'), {
- 'rules': tests.Data('examples', 'import.rule.yml').raw()
- })
+ self.client.post(
+ reverse("rule-import"),
+ {"rules": tests.Data("examples", "import.rule.yml").raw()},
+ )
# Should only be a single rule from our initial setup
- self.assertCount(models.Rule, 1, 'Missing Rule')
+ self.assertCount(models.Rule, 1, "Missing Rule")
- @mock.patch('django.dispatch.dispatcher.Signal.send')
+ @mock.patch("django.dispatch.dispatcher.Signal.send")
def test_macro(self, mock_post):
- self.project = models.Project.objects.create(name='Project 1', service=self.service, shard=self.shard)
- clause = 'up{%s}' % macro.EXCLUSION_MACRO
+ self.project = models.Project.objects.create(
+ name="Project 1", service=self.service, shard=self.shard
+ )
+ clause = "up{%s}" % macro.EXCLUSION_MACRO
rules = {
- 'common': {'assert': 'up{service!~"Service 1"}'},
- 'service': {'assert': 'up{service="Service 1",project!~"Project 1"}'},
- 'project': {'assert': 'up{service="Service 1",project="Project 1",}'},
+ "common": {"assert": 'up{service!~"Service 1"}'},
+ "service": {"assert": 'up{service="Service 1",project!~"Project 1"}'},
+ "project": {"assert": 'up{service="Service 1",project="Project 1",}'},
}
- common_rule = models.Rule.objects.create(name='Common', clause=clause, duration='1s', obj=self.site)
- rules['common']['model'] = models.Rule.objects.get(pk=common_rule.pk)
- service_rule = common_rule.copy_to('service', self.service.id)
- rules['service']['model'] = models.Rule.objects.get(pk=service_rule.pk)
- project_rule = service_rule.copy_to('project', self.project.id)
- rules['project']['model'] = models.Rule.objects.get(pk=project_rule.pk)
+ common_rule = models.Rule.objects.create(
+ name="Common", clause=clause, duration="1s", obj=self.site
+ )
+ rules["common"]["model"] = models.Rule.objects.get(pk=common_rule.pk)
+ service_rule = common_rule.copy_to("service", self.service.id)
+ rules["service"]["model"] = models.Rule.objects.get(pk=service_rule.pk)
+ project_rule = service_rule.copy_to("project", self.project.id)
+ rules["project"]["model"] = models.Rule.objects.get(pk=project_rule.pk)
for k, r in rules.items():
- self.assertEqual(macro.rulemacro(r['model']), r['assert'], 'Expansion wrong for %s' % k)
+ self.assertEqual(macro.rulemacro(r["model"]), r["assert"], "Expansion wrong for %s" % k)
@override_settings(PROMGEN=TEST_SETTINGS)
- @mock.patch('django.dispatch.dispatcher.Signal.send')
+ @mock.patch("django.dispatch.dispatcher.Signal.send")
def test_invalid_annotation(self, mock_post):
# $label.foo is invalid (should be $labels) so make sure we raise an exception
- models.RuleAnnotation.objects.create(name='summary', value='{{$label.foo}}', rule=self.rule)
+ models.RuleAnnotation.objects.create(name="summary", value="{{$label.foo}}", rule=self.rule)
with self.assertRaises(ValidationError):
prometheus.check_rules([self.rule])
diff --git a/promgen/tests/test_routes.py b/promgen/tests/test_routes.py
index a64167add..dfa0befd8 100644
--- a/promgen/tests/test_routes.py
+++ b/promgen/tests/test_routes.py
@@ -7,11 +7,11 @@
from django.test import override_settings
from django.urls import reverse
-from promgen import models, views, tests
+from promgen import models, tests, views
-TEST_SETTINGS = tests.Data('examples', 'promgen.yml').yaml()
-TEST_IMPORT = tests.Data('examples', 'import.json').raw()
-TEST_REPLACE = tests.Data('examples', 'replace.json').raw()
+TEST_SETTINGS = tests.Data("examples", "promgen.yml").yaml()
+TEST_IMPORT = tests.Data("examples", "import.json").raw()
+TEST_REPLACE = tests.Data("examples", "replace.json").raw()
class RouteTests(tests.PromgenTest):
@@ -20,8 +20,8 @@ def setUp(self):
@override_settings(PROMGEN=TEST_SETTINGS)
@override_settings(CELERY_TASK_ALWAYS_EAGER=True)
- @mock.patch('promgen.signals._trigger_write_config')
- @mock.patch('promgen.tasks.reload_prometheus')
+ @mock.patch("promgen.signals._trigger_write_config")
+ @mock.patch("promgen.tasks.reload_prometheus")
def test_import(self, mock_write, mock_reload):
self.add_user_permissions(
"promgen.change_rule", "promgen.change_site", "promgen.change_exporter"
@@ -52,7 +52,9 @@ def test_replace(self, mock_write, mock_reload):
self.assertCount(models.Service, 3, "Import one service (Fixture has two services)")
self.assertCount(models.Project, 4, "Import two projects (Fixture has 2 projectsa)")
self.assertCount(models.Exporter, 2, "Import two exporters")
- self.assertCount(models.Farm, 4, "Original two farms and one new farm (fixture has one farm)")
+ self.assertCount(
+ models.Farm, 4, "Original two farms and one new farm (fixture has one farm)"
+ )
self.assertCount(models.Host, 5, "Original 3 hosts and two new ones")
@mock.patch("requests.get")
@@ -91,9 +93,7 @@ def test_scrape(self, mock_get):
# For each POST body, check to see that we generate and attempt to
# scrape the correct URL
- response = self.client.post(
- reverse("exporter-scrape", kwargs={"pk": project.pk}), body
- )
+ response = self.client.post(reverse("exporter-scrape", kwargs={"pk": project.pk}), body)
self.assertRoute(response, views.ExporterScrape, 200)
self.assertEqual(mock_get.call_args[0][0], url)
diff --git a/promgen/tests/test_signals.py b/promgen/tests/test_signals.py
index 5db1c3feb..2226213d9 100644
--- a/promgen/tests/test_signals.py
+++ b/promgen/tests/test_signals.py
@@ -7,54 +7,62 @@
class SignalTest(tests.PromgenTest):
- @mock.patch('promgen.models.Audit.log')
- @mock.patch('promgen.signals.trigger_write_config.send')
+ @mock.patch("promgen.models.Audit.log")
+ @mock.patch("promgen.signals.trigger_write_config.send")
def test_write_signal(self, write_mock, log_mock):
# Create a test farm
farm = models.Farm.objects.create(name="farm")
models.Host.objects.create(name="Host", farm=farm)
# Create a new project or testing
- project = models.Project.objects.create(
- name="Project", service_id=1, farm=farm, shard_id=1
- )
+ project = models.Project.objects.create(name="Project", service_id=1, farm=farm, shard_id=1)
e1 = models.Exporter.objects.create(
- job='Exporter 1', port=1234, project=project,
+ job="Exporter 1",
+ port=1234,
+ project=project,
)
e2 = models.Exporter.objects.create(
- job='Exporter 2', port=1234, project=project,
+ job="Exporter 2",
+ port=1234,
+ project=project,
)
# Should be called once for each created exporter
self.assertEqual(write_mock.call_count, 2, "Two write calls")
write_mock.assert_has_calls([mock.call(e1), mock.call(e2)])
- @mock.patch('promgen.models.Audit.log')
- @mock.patch('promgen.signals.trigger_write_config.send')
+ @mock.patch("promgen.models.Audit.log")
+ @mock.patch("promgen.signals.trigger_write_config.send")
def test_write_and_delete(self, write_mock, log_mock):
# Create a test farm
farm = models.Farm.objects.create(name="farm")
models.Host.objects.create(name="Host", farm=farm)
- project = models.Project.objects.create(
- name="Project", service_id=1, farm=farm, shard_id=1
- )
+ project = models.Project.objects.create(name="Project", service_id=1, farm=farm, shard_id=1)
# Farm but no exporters so no call
- self.assertEqual(write_mock.call_count, 0, 'Should not be called without exporters')
+ self.assertEqual(write_mock.call_count, 0, "Should not be called without exporters")
models.Exporter.objects.create(
- job='Exporter 1', port=1234, project=project,
+ job="Exporter 1",
+ port=1234,
+ project=project,
)
# Create an exporter so our call should be 1
- self.assertEqual(write_mock.call_count, 1, 'Should be called after creating exporter')
+ self.assertEqual(write_mock.call_count, 1, "Should be called after creating exporter")
farm.delete()
# Deleting our farm will call pre_delete on Farm and post_save on project
- self.assertEqual(write_mock.call_count, 3, 'Should be called after deleting farm')
+ self.assertEqual(write_mock.call_count, 3, "Should be called after deleting farm")
models.Exporter.objects.create(
- job='Exporter 2', port=1234, project=project,
+ job="Exporter 2",
+ port=1234,
+ project=project,
)
# Deleting our farm means our config is inactive, so no additional calls
# from creating exporter
- self.assertEqual(write_mock.call_count, 3, 'No farms, so should not be called after deleting exporter')
+ self.assertEqual(
+ write_mock.call_count,
+ 3,
+ "No farms, so should not be called after deleting exporter",
+ )
diff --git a/promgen/tests/test_silence.py b/promgen/tests/test_silence.py
index 3c280d177..3008db78d 100644
--- a/promgen/tests/test_silence.py
+++ b/promgen/tests/test_silence.py
@@ -9,12 +9,12 @@
from promgen import tests
-TEST_SETTINGS = tests.Data('examples', 'promgen.yml').yaml()
-TEST_DURATION = tests.Data('examples', 'silence.duration.json').json()
-TEST_RANGE = tests.Data('examples', 'silence.range.json').json()
+TEST_SETTINGS = tests.Data("examples", "promgen.yml").yaml()
+TEST_DURATION = tests.Data("examples", "silence.duration.json").json()
+TEST_RANGE = tests.Data("examples", "silence.range.json").json()
# Explicitly set a timezone for our test to try to catch conversion errors
-TEST_SETTINGS['timezone'] = 'Asia/Tokyo'
+TEST_SETTINGS["timezone"] = "Asia/Tokyo"
class SilenceTest(tests.PromgenTest):
@@ -22,19 +22,19 @@ def setUp(self):
self.user = self.force_login(username="demo")
@override_settings(PROMGEN=TEST_SETTINGS)
- @mock.patch('promgen.util.post')
+ @mock.patch("promgen.util.post")
def test_duration(self, mock_post):
mock_post.return_value.status_code = 200
- with mock.patch('django.utils.timezone.now') as mock_now:
+ with mock.patch("django.utils.timezone.now") as mock_now:
mock_now.return_value = datetime.datetime(2017, 12, 14, tzinfo=datetime.timezone.utc)
# I would prefer to be able to test with multiple labels, but since
# it's difficult to test a list of dictionaries (the order is non-
# deterministic) we just test with a single label for now
self.client.post(
- reverse('proxy-silence'),
+ reverse("proxy-silence"),
data={
- 'duration': '1m',
+ "duration": "1m",
"labels": {"instance": "example.com:[0-9]*"},
},
content_type="application/json",
@@ -44,22 +44,20 @@ def test_duration(self, mock_post):
)
@override_settings(PROMGEN=TEST_SETTINGS)
- @mock.patch('promgen.util.post')
+ @mock.patch("promgen.util.post")
def test_range(self, mock_post):
mock_post.return_value.status_code = 200
- with mock.patch('django.utils.timezone.now') as mock_now:
+ with mock.patch("django.utils.timezone.now") as mock_now:
mock_now.return_value = datetime.datetime(2017, 12, 14, tzinfo=datetime.timezone.utc)
self.client.post(
- reverse('proxy-silence'),
+ reverse("proxy-silence"),
data={
- 'startsAt': '2017-12-14 00:01',
- 'endsAt': '2017-12-14 00:05',
+ "startsAt": "2017-12-14 00:01",
+ "endsAt": "2017-12-14 00:05",
"labels": {"instance": "example.com:[0-9]*"},
},
content_type="application/json",
)
- self.assertMockCalls(
- mock_post, "http://alertmanager:9093/api/v1/silences", json=TEST_RANGE
- )
+ self.assertMockCalls(mock_post, "http://alertmanager:9093/api/v1/silences", json=TEST_RANGE)
diff --git a/promgen/urls.py b/promgen/urls.py
index 51c8e71cb..eaeeed625 100644
--- a/promgen/urls.py
+++ b/promgen/urls.py
@@ -16,12 +16,14 @@
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
+from rest_framework import routers
+
from django.conf.urls import url
from django.contrib import admin
from django.urls import include, path
from django.views.decorators.csrf import csrf_exempt
+
from promgen import proxy, rest, views
-from rest_framework import routers
router = routers.DefaultRouter()
router.register("all", rest.AllViewSet, basename="all")
@@ -31,90 +33,85 @@
urlpatterns = [
- path('admin/', admin.site.urls),
-
- path('', views.HomeList.as_view(), name='home'),
- path('datasource', views.DatasourceList.as_view(), name='datasource-list'),
- path('datasource/', views.DatasourceDetail.as_view(), name='datasource-detail'),
-
- path('new/service', views.ServiceRegister.as_view(), name='service-new'),
- path('service', views.ServiceList.as_view(), name='service-list'),
- path('service/', views.ServiceDetail.as_view(), name='service-detail'),
- path('service//delete', views.ServiceDelete.as_view(), name='service-delete'),
- path('service//new', views.ProjectRegister.as_view(), name='project-new'),
- path('service//update', views.ServiceUpdate.as_view(), name='service-update'),
- path('service//notifier', views.ServiceNotifierRegister.as_view(), name='service-notifier'),
-
- path('project/', views.ProjectDetail.as_view(), name='project-detail'),
- path('project//delete', views.ProjectDelete.as_view(), name='project-delete'),
- path('project//update', views.ProjectUpdate.as_view(), name='project-update'),
- path('project//unlink', views.UnlinkFarm.as_view(), name='farm-unlink'),
- path('project//link/