route: group_by: ["alertname", "cluster", "service"] receiver: devops-team group_wait: 15s group_interval: 1m routes: - receiver: devops-team continue: true - receiver: pagerduty matchers: - severity="page" continue: true - receiver: email continue: true receivers: - name: devops-team slack_configs: - api_url_file: "/opt/pydis/alertmanager/webhooks/DEVOPS_HOOK" send_resolved: true title: '{{ if eq .Status "firing" }}[FIRING]{{ else }}[RESOLVED]{{ end }}' text: | {{ if eq .Status "firing" }}{{ range .Alerts }} {{ if .Labels.instance }}`{{ .Labels.instance }}`: {{ end }}**{{ .Annotations.summary }}:** {{ .Annotations.description }} [(Link)]({{.GeneratorURL}}) {{ end }}{{ else }}Alert has resolved.{{ end }} fields: - title: Alert value: "{{ .GroupLabels.alertname }}" - name: pagerduty pagerduty_configs: - routing_key_file: "/opt/pydis/alertmanager/webhooks/PAGERDUTY_KEY" url: https://events.pagerduty.com/v2/enqueue - name: email email_configs: - to: devops-alerts@pydis.wtf send_resolved: true from: alertmanager@int.pydis.wtf smarthost: mail.pydis.wtf:587 auth_username: alertmanager@int.pydis.wtf auth_password_file: /opt/pydis/alertmanager/webhooks/EMAIL_PASSWORD text: | {{ .CommonAnnotations.summary }} {{ .CommonAnnotations.description }} {{ if eq .Status "firing" }}{{ range .Alerts }} {{ if .Labels.instance }}{{ .Labels.instance }}: {{ end }}{{ .Annotations.summary }}: {{ .Annotations.description }} {{ .GeneratorURL }} {{ end }}{{ else }}Alert has resolved.{{ end }}