rohan-arora-ibm commited on
Commit
7af85b6
·
unverified ·
1 Parent(s): 7f31d04

Configure Git LFS

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T02_58_30.498798.json +0 -114
  2. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T02_59_30.494899.json +0 -142
  3. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_00_30.494694.json +0 -100
  4. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_01_30.495267.json +0 -132
  5. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_02_30.495072.json +0 -146
  6. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_03_30.495653.json +0 -132
  7. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_04_30.495210.json +0 -132
  8. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_05_30.495422.json +0 -132
  9. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_06_30.496224.json +0 -198
  10. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_07_30.495783.json +0 -184
  11. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_08_30.495958.json +0 -190
  12. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_09_30.496225.json +0 -190
  13. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_10_30.496221.json +0 -218
  14. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_19_09.407395.json +0 -86
  15. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_20_09.403671.json +0 -86
  16. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_21_09.403302.json +0 -72
  17. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_22_09.403805.json +0 -104
  18. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_23_09.403458.json +0 -118
  19. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_24_09.403612.json +0 -104
  20. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_25_09.403752.json +0 -104
  21. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_26_09.404082.json +0 -125
  22. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_27_09.404699.json +0 -176
  23. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_28_09.404331.json +0 -204
  24. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_29_09.404619.json +0 -190
  25. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_in_alerting_state_2025-11-19T030519.802461Z.json +0 -92
  26. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_in_alerting_state_2025-12-01T212502.573985Z.json +0 -64
  27. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/ground_truth.yaml +0 -69
  28. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/k8s_events_otel-demo_chaos-mesh.tsv +0 -3
  29. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/k8s_objects_otel-demo_chaos-mesh.tsv +0 -3
  30. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_accounting-687b789684-679q7.tsv +0 -3
  31. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_accounting-687b789684-lp2pr.tsv +0 -3
  32. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_ad-554b849958-5pgd4.tsv +0 -3
  33. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_ad-554b849958-gssfx.tsv +0 -3
  34. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_cart-9fd895bb7-4lx68.tsv +0 -3
  35. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_cart-9fd895bb7-82967.tsv +0 -3
  36. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_checkout-8546fdc74d-7m4dn.tsv +0 -3
  37. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_checkout-8546fdc74d-zpplx.tsv +0 -3
  38. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_currency-68d8484df8-dk45t.tsv +0 -3
  39. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_currency-68d8484df8-xb4rs.tsv +0 -3
  40. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_email-64ddb8d8d7-nbf2t.tsv +0 -3
  41. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_email-64ddb8d8d7-xz5lv.tsv +0 -3
  42. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_flagd-6554fc5b8d-crn2h.tsv +0 -3
  43. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_flagd-6554fc5b8d-tmt6g.tsv +0 -3
  44. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_fraud-detection-886c99494-2gpr4.tsv +0 -3
  45. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_fraud-detection-886c99494-s2knk.tsv +0 -3
  46. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_frontend-675fd7b5c5-gd8gl.tsv +0 -3
  47. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_frontend-675fd7b5c5-ks5z4.tsv +0 -3
  48. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_frontend-proxy-6b4d584985-6bl4q.tsv +0 -3
  49. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_frontend-proxy-6b4d584985-9sfgn.tsv +0 -3
  50. snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_image-provider-55cfd9d7d6-2v48s.tsv +0 -3
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T02_58_30.498798.json DELETED
@@ -1,114 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "RequestLatency",
5
- "namespace": "otel-demo",
6
- "service_name": "currency",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "Latency in service currency in namespace otel-demo is above 1500ms (current: 15000s)"
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-11-19T02:55:55.247231189Z",
14
- "value": "1.5e+04"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "RequestLatency",
19
- "namespace": "otel-demo",
20
- "service_name": "frontend-proxy",
21
- "severity": "warning"
22
- },
23
- "annotations": {
24
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
25
- },
26
- "state": "firing",
27
- "activeAt": "2025-11-19T02:55:55.247231189Z",
28
- "value": "1.5e+04"
29
- },
30
- {
31
- "labels": {
32
- "alertname": "RequestLatency",
33
- "namespace": "otel-demo",
34
- "service_name": "checkout",
35
- "severity": "warning"
36
- },
37
- "annotations": {
38
- "description": "Latency in service checkout in namespace otel-demo is above 1500ms (current: 15000s)"
39
- },
40
- "state": "firing",
41
- "activeAt": "2025-11-19T02:55:55.247231189Z",
42
- "value": "1.5e+04"
43
- },
44
- {
45
- "labels": {
46
- "alertname": "RequestLatency",
47
- "namespace": "otel-demo",
48
- "service_name": "frontend",
49
- "severity": "warning"
50
- },
51
- "annotations": {
52
- "description": "Latency in service frontend in namespace otel-demo is above 1500ms (current: 3792.497157894716s)"
53
- },
54
- "state": "firing",
55
- "activeAt": "2025-11-19T02:56:55.247231189Z",
56
- "value": "3.792497157894716e+03"
57
- },
58
- {
59
- "labels": {
60
- "alertname": "RequestErrorRate",
61
- "namespace": "otel-demo",
62
- "service_name": "frontend-proxy",
63
- "severity": "warning"
64
- },
65
- "annotations": {
66
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 22.666666666666664)"
67
- },
68
- "state": "firing",
69
- "activeAt": "2025-11-19T02:55:55.247231189Z",
70
- "value": "2.2666666666666664e+01"
71
- },
72
- {
73
- "labels": {
74
- "alertname": "RequestErrorRate",
75
- "namespace": "otel-demo",
76
- "service_name": "frontend",
77
- "severity": "warning"
78
- },
79
- "annotations": {
80
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 4.033333333333333)"
81
- },
82
- "state": "firing",
83
- "activeAt": "2025-11-19T02:55:55.247231189Z",
84
- "value": "4.033333333333333e+00"
85
- },
86
- {
87
- "labels": {
88
- "alertname": "RequestErrorRate",
89
- "namespace": "otel-demo",
90
- "service_name": "checkout",
91
- "severity": "warning"
92
- },
93
- "annotations": {
94
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 2.555616111111111)"
95
- },
96
- "state": "firing",
97
- "activeAt": "2025-11-19T02:56:55.247231189Z",
98
- "value": "2.555616111111111e+00"
99
- },
100
- {
101
- "labels": {
102
- "alertname": "Watchdog",
103
- "severity": "none"
104
- },
105
- "annotations": {
106
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
107
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
108
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
109
- },
110
- "state": "firing",
111
- "activeAt": "2025-11-19T02:55:01.790424011Z",
112
- "value": "1e+00"
113
- }
114
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T02_59_30.494899.json DELETED
@@ -1,142 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "RequestLatency",
5
- "namespace": "otel-demo",
6
- "service_name": "currency",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "Latency in service currency in namespace otel-demo is above 1500ms (current: 15000s)"
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-11-19T02:55:55.247231189Z",
14
- "value": "1.5e+04"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "RequestLatency",
19
- "namespace": "otel-demo",
20
- "service_name": "frontend-proxy",
21
- "severity": "warning"
22
- },
23
- "annotations": {
24
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
25
- },
26
- "state": "firing",
27
- "activeAt": "2025-11-19T02:55:55.247231189Z",
28
- "value": "1.5e+04"
29
- },
30
- {
31
- "labels": {
32
- "alertname": "RequestLatency",
33
- "namespace": "otel-demo",
34
- "service_name": "checkout",
35
- "severity": "warning"
36
- },
37
- "annotations": {
38
- "description": "Latency in service checkout in namespace otel-demo is above 1500ms (current: 15000s)"
39
- },
40
- "state": "firing",
41
- "activeAt": "2025-11-19T02:55:55.247231189Z",
42
- "value": "1.5e+04"
43
- },
44
- {
45
- "labels": {
46
- "alertname": "RequestLatency",
47
- "namespace": "otel-demo",
48
- "service_name": "frontend",
49
- "severity": "warning"
50
- },
51
- "annotations": {
52
- "description": "Latency in service frontend in namespace otel-demo is above 1500ms (current: 3410.493827160485s)"
53
- },
54
- "state": "firing",
55
- "activeAt": "2025-11-19T02:56:55.247231189Z",
56
- "value": "3.410493827160485e+03"
57
- },
58
- {
59
- "labels": {
60
- "alertname": "RequestErrorRate",
61
- "namespace": "otel-demo",
62
- "service_name": "frontend-proxy",
63
- "severity": "warning"
64
- },
65
- "annotations": {
66
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 23.577777777777772)"
67
- },
68
- "state": "firing",
69
- "activeAt": "2025-11-19T02:55:55.247231189Z",
70
- "value": "2.3577777777777772e+01"
71
- },
72
- {
73
- "labels": {
74
- "alertname": "RequestErrorRate",
75
- "namespace": "otel-demo",
76
- "service_name": "frontend",
77
- "severity": "warning"
78
- },
79
- "annotations": {
80
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 5.488888888888888)"
81
- },
82
- "state": "firing",
83
- "activeAt": "2025-11-19T02:55:55.247231189Z",
84
- "value": "5.488888888888888e+00"
85
- },
86
- {
87
- "labels": {
88
- "alertname": "RequestErrorRate",
89
- "namespace": "otel-demo",
90
- "service_name": "checkout",
91
- "severity": "warning"
92
- },
93
- "annotations": {
94
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 4.333426666666666)"
95
- },
96
- "state": "firing",
97
- "activeAt": "2025-11-19T02:56:55.247231189Z",
98
- "value": "4.333426666666666e+00"
99
- },
100
- {
101
- "labels": {
102
- "alertname": "RequestErrorRate",
103
- "namespace": "otel-demo",
104
- "service_name": "ad",
105
- "severity": "warning"
106
- },
107
- "annotations": {
108
- "description": "Request error rate in service ad in namespace otel-demo is above 0 (current: 0.1120411111111111)"
109
- },
110
- "state": "firing",
111
- "activeAt": "2025-11-19T02:57:55.247231189Z",
112
- "value": "1.120411111111111e-01"
113
- },
114
- {
115
- "labels": {
116
- "alertname": "RequestErrorRate",
117
- "namespace": "otel-demo",
118
- "service_name": "fraud-detection",
119
- "severity": "warning"
120
- },
121
- "annotations": {
122
- "description": "Request error rate in service fraud-detection in namespace otel-demo is above 0 (current: 0.02037111111111111)"
123
- },
124
- "state": "firing",
125
- "activeAt": "2025-11-19T02:57:55.247231189Z",
126
- "value": "2.037111111111111e-02"
127
- },
128
- {
129
- "labels": {
130
- "alertname": "Watchdog",
131
- "severity": "none"
132
- },
133
- "annotations": {
134
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
135
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
136
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
137
- },
138
- "state": "firing",
139
- "activeAt": "2025-11-19T02:55:01.790424011Z",
140
- "value": "1e+00"
141
- }
142
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_00_30.494694.json DELETED
@@ -1,100 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "RequestLatency",
5
- "namespace": "otel-demo",
6
- "service_name": "checkout",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "Latency in service checkout in namespace otel-demo is above 1500ms (current: 15000s)"
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-11-19T02:55:55.247231189Z",
14
- "value": "1.5e+04"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "RequestLatency",
19
- "namespace": "otel-demo",
20
- "service_name": "frontend",
21
- "severity": "warning"
22
- },
23
- "annotations": {
24
- "description": "Latency in service frontend in namespace otel-demo is above 1500ms (current: 3430.9523809523716s)"
25
- },
26
- "state": "firing",
27
- "activeAt": "2025-11-19T02:56:55.247231189Z",
28
- "value": "3.4309523809523716e+03"
29
- },
30
- {
31
- "labels": {
32
- "alertname": "RequestLatency",
33
- "namespace": "otel-demo",
34
- "service_name": "frontend-proxy",
35
- "severity": "warning"
36
- },
37
- "annotations": {
38
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
39
- },
40
- "state": "firing",
41
- "activeAt": "2025-11-19T02:55:55.247231189Z",
42
- "value": "1.5e+04"
43
- },
44
- {
45
- "labels": {
46
- "alertname": "RequestErrorRate",
47
- "namespace": "otel-demo",
48
- "service_name": "frontend-proxy",
49
- "severity": "warning"
50
- },
51
- "annotations": {
52
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 30.955555555555552)"
53
- },
54
- "state": "firing",
55
- "activeAt": "2025-11-19T02:55:55.247231189Z",
56
- "value": "3.0955555555555552e+01"
57
- },
58
- {
59
- "labels": {
60
- "alertname": "RequestErrorRate",
61
- "namespace": "otel-demo",
62
- "service_name": "frontend",
63
- "severity": "warning"
64
- },
65
- "annotations": {
66
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 6.188888888888888)"
67
- },
68
- "state": "firing",
69
- "activeAt": "2025-11-19T02:55:55.247231189Z",
70
- "value": "6.188888888888888e+00"
71
- },
72
- {
73
- "labels": {
74
- "alertname": "RequestErrorRate",
75
- "namespace": "otel-demo",
76
- "service_name": "checkout",
77
- "severity": "warning"
78
- },
79
- "annotations": {
80
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 3.733333333333333)"
81
- },
82
- "state": "firing",
83
- "activeAt": "2025-11-19T02:56:55.247231189Z",
84
- "value": "3.733333333333333e+00"
85
- },
86
- {
87
- "labels": {
88
- "alertname": "Watchdog",
89
- "severity": "none"
90
- },
91
- "annotations": {
92
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
93
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
94
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
95
- },
96
- "state": "firing",
97
- "activeAt": "2025-11-19T02:55:01.790424011Z",
98
- "value": "1e+00"
99
- }
100
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_01_30.495267.json DELETED
@@ -1,132 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "RequestLatency",
5
- "namespace": "otel-demo",
6
- "service_name": "frontend-proxy",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-11-19T02:55:55.247231189Z",
14
- "value": "1.5e+04"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "RequestLatency",
19
- "namespace": "otel-demo",
20
- "service_name": "checkout",
21
- "severity": "warning"
22
- },
23
- "annotations": {
24
- "description": "Latency in service checkout in namespace otel-demo is above 1500ms (current: 1906.2499999999993s)"
25
- },
26
- "state": "firing",
27
- "activeAt": "2025-11-19T02:55:55.247231189Z",
28
- "value": "1.9062499999999993e+03"
29
- },
30
- {
31
- "labels": {
32
- "alertname": "RequestLatency",
33
- "namespace": "otel-demo",
34
- "service_name": "frontend",
35
- "severity": "warning"
36
- },
37
- "annotations": {
38
- "description": "Latency in service frontend in namespace otel-demo is above 1500ms (current: 1867.9268292682912s)"
39
- },
40
- "state": "firing",
41
- "activeAt": "2025-11-19T02:56:55.247231189Z",
42
- "value": "1.8679268292682912e+03"
43
- },
44
- {
45
- "labels": {
46
- "alertname": "RequestErrorRate",
47
- "namespace": "otel-demo",
48
- "service_name": "frontend",
49
- "severity": "warning"
50
- },
51
- "annotations": {
52
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.4555555555555555)"
53
- },
54
- "state": "firing",
55
- "activeAt": "2025-11-19T02:55:55.247231189Z",
56
- "value": "1.4555555555555555e+00"
57
- },
58
- {
59
- "labels": {
60
- "alertname": "RequestErrorRate",
61
- "namespace": "otel-demo",
62
- "service_name": "checkout",
63
- "severity": "warning"
64
- },
65
- "annotations": {
66
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.7555555555555554)"
67
- },
68
- "state": "firing",
69
- "activeAt": "2025-11-19T02:56:55.247231189Z",
70
- "value": "7.555555555555554e-01"
71
- },
72
- {
73
- "labels": {
74
- "alertname": "RequestErrorRate",
75
- "namespace": "otel-demo",
76
- "service_name": "frontend-proxy",
77
- "severity": "warning"
78
- },
79
- "annotations": {
80
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 37.49999999999999)"
81
- },
82
- "state": "firing",
83
- "activeAt": "2025-11-19T02:55:55.247231189Z",
84
- "value": "3.749999999999999e+01"
85
- },
86
- {
87
- "labels": {
88
- "alertname": "KubeClientCertificateExpiration",
89
- "instance": "10.0.120.157:443",
90
- "job": "apiserver",
91
- "severity": "warning"
92
- },
93
- "annotations": {
94
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
95
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
96
- "summary": "Client certificate is about to expire."
97
- },
98
- "state": "firing",
99
- "activeAt": "2025-11-19T02:55:57.949337198Z",
100
- "value": "4.416990967741935e+04"
101
- },
102
- {
103
- "labels": {
104
- "alertname": "KubeClientCertificateExpiration",
105
- "instance": "10.0.120.157:443",
106
- "job": "apiserver",
107
- "severity": "critical"
108
- },
109
- "annotations": {
110
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
111
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
112
- "summary": "Client certificate is about to expire."
113
- },
114
- "state": "firing",
115
- "activeAt": "2025-11-19T02:55:57.949337198Z",
116
- "value": "4.416990967741935e+04"
117
- },
118
- {
119
- "labels": {
120
- "alertname": "Watchdog",
121
- "severity": "none"
122
- },
123
- "annotations": {
124
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
125
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
126
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
127
- },
128
- "state": "firing",
129
- "activeAt": "2025-11-19T02:55:01.790424011Z",
130
- "value": "1e+00"
131
- }
132
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_02_30.495072.json DELETED
@@ -1,146 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "Watchdog",
5
- "severity": "none"
6
- },
7
- "annotations": {
8
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
9
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
10
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-11-19T02:55:01.790424011Z",
14
- "value": "1e+00"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "RequestLatency",
19
- "namespace": "otel-demo",
20
- "service_name": "frontend",
21
- "severity": "warning"
22
- },
23
- "annotations": {
24
- "description": "Latency in service frontend in namespace otel-demo is above 1500ms (current: 2071.905697445965s)"
25
- },
26
- "state": "firing",
27
- "activeAt": "2025-11-19T02:56:55.247231189Z",
28
- "value": "2.071905697445965e+03"
29
- },
30
- {
31
- "labels": {
32
- "alertname": "RequestLatency",
33
- "namespace": "otel-demo",
34
- "service_name": "frontend-proxy",
35
- "severity": "warning"
36
- },
37
- "annotations": {
38
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
39
- },
40
- "state": "firing",
41
- "activeAt": "2025-11-19T02:55:55.247231189Z",
42
- "value": "1.5e+04"
43
- },
44
- {
45
- "labels": {
46
- "alertname": "RequestLatency",
47
- "namespace": "otel-demo",
48
- "service_name": "checkout",
49
- "severity": "warning"
50
- },
51
- "annotations": {
52
- "description": "Latency in service checkout in namespace otel-demo is above 1500ms (current: 2947.560975609752s)"
53
- },
54
- "state": "firing",
55
- "activeAt": "2025-11-19T02:55:55.247231189Z",
56
- "value": "2.947560975609752e+03"
57
- },
58
- {
59
- "labels": {
60
- "alertname": "RequestErrorRate",
61
- "namespace": "otel-demo",
62
- "service_name": "frontend",
63
- "severity": "warning"
64
- },
65
- "annotations": {
66
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 2.0999999999999996)"
67
- },
68
- "state": "firing",
69
- "activeAt": "2025-11-19T02:55:55.247231189Z",
70
- "value": "2.0999999999999996e+00"
71
- },
72
- {
73
- "labels": {
74
- "alertname": "RequestErrorRate",
75
- "namespace": "otel-demo",
76
- "service_name": "checkout",
77
- "severity": "warning"
78
- },
79
- "annotations": {
80
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 1.111111111111111)"
81
- },
82
- "state": "firing",
83
- "activeAt": "2025-11-19T02:56:55.247231189Z",
84
- "value": "1.111111111111111e+00"
85
- },
86
- {
87
- "labels": {
88
- "alertname": "RequestErrorRate",
89
- "namespace": "otel-demo",
90
- "service_name": "recommendation",
91
- "severity": "warning"
92
- },
93
- "annotations": {
94
- "description": "Request error rate in service recommendation in namespace otel-demo is above 0 (current: 0.010185555555555555)"
95
- },
96
- "state": "firing",
97
- "activeAt": "2025-11-19T03:00:55.247231189Z",
98
- "value": "1.0185555555555555e-02"
99
- },
100
- {
101
- "labels": {
102
- "alertname": "RequestErrorRate",
103
- "namespace": "otel-demo",
104
- "service_name": "frontend-proxy",
105
- "severity": "warning"
106
- },
107
- "annotations": {
108
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 37.42222222222222)"
109
- },
110
- "state": "firing",
111
- "activeAt": "2025-11-19T02:55:55.247231189Z",
112
- "value": "3.742222222222222e+01"
113
- },
114
- {
115
- "labels": {
116
- "alertname": "KubeClientCertificateExpiration",
117
- "instance": "10.0.120.157:443",
118
- "job": "apiserver",
119
- "severity": "warning"
120
- },
121
- "annotations": {
122
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
123
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
124
- "summary": "Client certificate is about to expire."
125
- },
126
- "state": "firing",
127
- "activeAt": "2025-11-19T02:55:57.949337198Z",
128
- "value": "5.1348705882352944e+04"
129
- },
130
- {
131
- "labels": {
132
- "alertname": "KubeClientCertificateExpiration",
133
- "instance": "10.0.120.157:443",
134
- "job": "apiserver",
135
- "severity": "critical"
136
- },
137
- "annotations": {
138
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
139
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
140
- "summary": "Client certificate is about to expire."
141
- },
142
- "state": "firing",
143
- "activeAt": "2025-11-19T02:55:57.949337198Z",
144
- "value": "5.1348705882352944e+04"
145
- }
146
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_03_30.495653.json DELETED
@@ -1,132 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "RequestLatency",
5
- "namespace": "otel-demo",
6
- "service_name": "checkout",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "Latency in service checkout in namespace otel-demo is above 1500ms (current: 2205.8823529411798s)"
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-11-19T02:55:55.247231189Z",
14
- "value": "2.2058823529411798e+03"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "RequestLatency",
19
- "namespace": "otel-demo",
20
- "service_name": "frontend",
21
- "severity": "warning"
22
- },
23
- "annotations": {
24
- "description": "Latency in service frontend in namespace otel-demo is above 1500ms (current: 1961.8809980806145s)"
25
- },
26
- "state": "firing",
27
- "activeAt": "2025-11-19T02:56:55.247231189Z",
28
- "value": "1.9618809980806145e+03"
29
- },
30
- {
31
- "labels": {
32
- "alertname": "RequestLatency",
33
- "namespace": "otel-demo",
34
- "service_name": "frontend-proxy",
35
- "severity": "warning"
36
- },
37
- "annotations": {
38
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
39
- },
40
- "state": "firing",
41
- "activeAt": "2025-11-19T02:55:55.247231189Z",
42
- "value": "1.5e+04"
43
- },
44
- {
45
- "labels": {
46
- "alertname": "RequestErrorRate",
47
- "namespace": "otel-demo",
48
- "service_name": "frontend-proxy",
49
- "severity": "warning"
50
- },
51
- "annotations": {
52
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 39.14444444444444)"
53
- },
54
- "state": "firing",
55
- "activeAt": "2025-11-19T02:55:55.247231189Z",
56
- "value": "3.914444444444444e+01"
57
- },
58
- {
59
- "labels": {
60
- "alertname": "RequestErrorRate",
61
- "namespace": "otel-demo",
62
- "service_name": "frontend",
63
- "severity": "warning"
64
- },
65
- "annotations": {
66
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 2.766666666666666)"
67
- },
68
- "state": "firing",
69
- "activeAt": "2025-11-19T02:55:55.247231189Z",
70
- "value": "2.766666666666666e+00"
71
- },
72
- {
73
- "labels": {
74
- "alertname": "RequestErrorRate",
75
- "namespace": "otel-demo",
76
- "service_name": "checkout",
77
- "severity": "warning"
78
- },
79
- "annotations": {
80
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 1.4666666666666666)"
81
- },
82
- "state": "firing",
83
- "activeAt": "2025-11-19T02:56:55.247231189Z",
84
- "value": "1.4666666666666666e+00"
85
- },
86
- {
87
- "labels": {
88
- "alertname": "KubeClientCertificateExpiration",
89
- "instance": "10.0.120.157:443",
90
- "job": "apiserver",
91
- "severity": "warning"
92
- },
93
- "annotations": {
94
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
95
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
96
- "summary": "Client certificate is about to expire."
97
- },
98
- "state": "firing",
99
- "activeAt": "2025-11-19T02:55:57.949337198Z",
100
- "value": "5.34492e+04"
101
- },
102
- {
103
- "labels": {
104
- "alertname": "KubeClientCertificateExpiration",
105
- "instance": "10.0.120.157:443",
106
- "job": "apiserver",
107
- "severity": "critical"
108
- },
109
- "annotations": {
110
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
111
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
112
- "summary": "Client certificate is about to expire."
113
- },
114
- "state": "firing",
115
- "activeAt": "2025-11-19T02:55:57.949337198Z",
116
- "value": "5.34492e+04"
117
- },
118
- {
119
- "labels": {
120
- "alertname": "Watchdog",
121
- "severity": "none"
122
- },
123
- "annotations": {
124
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
125
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
126
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
127
- },
128
- "state": "firing",
129
- "activeAt": "2025-11-19T02:55:01.790424011Z",
130
- "value": "1e+00"
131
- }
132
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_04_30.495210.json DELETED
@@ -1,132 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeClientCertificateExpiration",
5
- "instance": "10.0.120.157:443",
6
- "job": "apiserver",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
11
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
12
- "summary": "Client certificate is about to expire."
13
- },
14
- "state": "firing",
15
- "activeAt": "2025-11-19T02:55:57.949337198Z",
16
- "value": "5.4012e+04"
17
- },
18
- {
19
- "labels": {
20
- "alertname": "KubeClientCertificateExpiration",
21
- "instance": "10.0.120.157:443",
22
- "job": "apiserver",
23
- "severity": "critical"
24
- },
25
- "annotations": {
26
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
27
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
28
- "summary": "Client certificate is about to expire."
29
- },
30
- "state": "firing",
31
- "activeAt": "2025-11-19T02:55:57.949337198Z",
32
- "value": "5.4012e+04"
33
- },
34
- {
35
- "labels": {
36
- "alertname": "Watchdog",
37
- "severity": "none"
38
- },
39
- "annotations": {
40
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
41
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
42
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
43
- },
44
- "state": "firing",
45
- "activeAt": "2025-11-19T02:55:01.790424011Z",
46
- "value": "1e+00"
47
- },
48
- {
49
- "labels": {
50
- "alertname": "RequestLatency",
51
- "namespace": "otel-demo",
52
- "service_name": "frontend-proxy",
53
- "severity": "warning"
54
- },
55
- "annotations": {
56
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
57
- },
58
- "state": "firing",
59
- "activeAt": "2025-11-19T02:55:55.247231189Z",
60
- "value": "1.5e+04"
61
- },
62
- {
63
- "labels": {
64
- "alertname": "RequestLatency",
65
- "namespace": "otel-demo",
66
- "service_name": "checkout",
67
- "severity": "warning"
68
- },
69
- "annotations": {
70
- "description": "Latency in service checkout in namespace otel-demo is above 1500ms (current: 1799.3749999999989s)"
71
- },
72
- "state": "firing",
73
- "activeAt": "2025-11-19T02:55:55.247231189Z",
74
- "value": "1.7993749999999989e+03"
75
- },
76
- {
77
- "labels": {
78
- "alertname": "RequestLatency",
79
- "namespace": "otel-demo",
80
- "service_name": "frontend",
81
- "severity": "warning"
82
- },
83
- "annotations": {
84
- "description": "Latency in service frontend in namespace otel-demo is above 1500ms (current: 1766.0532150776057s)"
85
- },
86
- "state": "firing",
87
- "activeAt": "2025-11-19T02:56:55.247231189Z",
88
- "value": "1.7660532150776057e+03"
89
- },
90
- {
91
- "labels": {
92
- "alertname": "RequestErrorRate",
93
- "namespace": "otel-demo",
94
- "service_name": "checkout",
95
- "severity": "warning"
96
- },
97
- "annotations": {
98
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 1.4)"
99
- },
100
- "state": "firing",
101
- "activeAt": "2025-11-19T02:56:55.247231189Z",
102
- "value": "1.4e+00"
103
- },
104
- {
105
- "labels": {
106
- "alertname": "RequestErrorRate",
107
- "namespace": "otel-demo",
108
- "service_name": "frontend-proxy",
109
- "severity": "warning"
110
- },
111
- "annotations": {
112
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 40.133333333333326)"
113
- },
114
- "state": "firing",
115
- "activeAt": "2025-11-19T02:55:55.247231189Z",
116
- "value": "4.0133333333333326e+01"
117
- },
118
- {
119
- "labels": {
120
- "alertname": "RequestErrorRate",
121
- "namespace": "otel-demo",
122
- "service_name": "frontend",
123
- "severity": "warning"
124
- },
125
- "annotations": {
126
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 2.755555555555555)"
127
- },
128
- "state": "firing",
129
- "activeAt": "2025-11-19T02:55:55.247231189Z",
130
- "value": "2.755555555555555e+00"
131
- }
132
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_05_30.495422.json DELETED
@@ -1,132 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "RequestLatency",
5
- "namespace": "otel-demo",
6
- "service_name": "frontend-proxy",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-11-19T02:55:55.247231189Z",
14
- "value": "1.5e+04"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "RequestLatency",
19
- "namespace": "otel-demo",
20
- "service_name": "checkout",
21
- "severity": "warning"
22
- },
23
- "annotations": {
24
- "description": "Latency in service checkout in namespace otel-demo is above 1500ms (current: 2863.7096774193437s)"
25
- },
26
- "state": "firing",
27
- "activeAt": "2025-11-19T02:55:55.247231189Z",
28
- "value": "2.8637096774193437e+03"
29
- },
30
- {
31
- "labels": {
32
- "alertname": "RequestLatency",
33
- "namespace": "otel-demo",
34
- "service_name": "frontend",
35
- "severity": "warning"
36
- },
37
- "annotations": {
38
- "description": "Latency in service frontend in namespace otel-demo is above 1500ms (current: 1677.132352941176s)"
39
- },
40
- "state": "firing",
41
- "activeAt": "2025-11-19T02:56:55.247231189Z",
42
- "value": "1.677132352941176e+03"
43
- },
44
- {
45
- "labels": {
46
- "alertname": "RequestErrorRate",
47
- "namespace": "otel-demo",
48
- "service_name": "frontend-proxy",
49
- "severity": "warning"
50
- },
51
- "annotations": {
52
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 37.96666666666666)"
53
- },
54
- "state": "firing",
55
- "activeAt": "2025-11-19T02:55:55.247231189Z",
56
- "value": "3.796666666666666e+01"
57
- },
58
- {
59
- "labels": {
60
- "alertname": "RequestErrorRate",
61
- "namespace": "otel-demo",
62
- "service_name": "frontend",
63
- "severity": "warning"
64
- },
65
- "annotations": {
66
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 2.677777777777777)"
67
- },
68
- "state": "firing",
69
- "activeAt": "2025-11-19T02:55:55.247231189Z",
70
- "value": "2.677777777777777e+00"
71
- },
72
- {
73
- "labels": {
74
- "alertname": "RequestErrorRate",
75
- "namespace": "otel-demo",
76
- "service_name": "checkout",
77
- "severity": "warning"
78
- },
79
- "annotations": {
80
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 1.3555555555555554)"
81
- },
82
- "state": "firing",
83
- "activeAt": "2025-11-19T02:56:55.247231189Z",
84
- "value": "1.3555555555555554e+00"
85
- },
86
- {
87
- "labels": {
88
- "alertname": "KubeClientCertificateExpiration",
89
- "instance": "10.0.120.157:443",
90
- "job": "apiserver",
91
- "severity": "warning"
92
- },
93
- "annotations": {
94
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
95
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
96
- "summary": "Client certificate is about to expire."
97
- },
98
- "state": "firing",
99
- "activeAt": "2025-11-19T02:55:57.949337198Z",
100
- "value": "5.3892e+04"
101
- },
102
- {
103
- "labels": {
104
- "alertname": "KubeClientCertificateExpiration",
105
- "instance": "10.0.120.157:443",
106
- "job": "apiserver",
107
- "severity": "critical"
108
- },
109
- "annotations": {
110
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
111
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
112
- "summary": "Client certificate is about to expire."
113
- },
114
- "state": "firing",
115
- "activeAt": "2025-11-19T02:55:57.949337198Z",
116
- "value": "5.3892e+04"
117
- },
118
- {
119
- "labels": {
120
- "alertname": "Watchdog",
121
- "severity": "none"
122
- },
123
- "annotations": {
124
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
125
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
126
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
127
- },
128
- "state": "firing",
129
- "activeAt": "2025-11-19T02:55:01.790424011Z",
130
- "value": "1e+00"
131
- }
132
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_06_30.496224.json DELETED
@@ -1,198 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "PrometheusNotConnectedToAlertmanagers",
5
- "container": "prometheus",
6
- "endpoint": "http-web",
7
- "instance": "100.96.2.209:9090",
8
- "job": "prometheus-kube-prometheus-prometheus",
9
- "namespace": "prometheus",
10
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
11
- "service": "prometheus-kube-prometheus-prometheus",
12
- "severity": "warning"
13
- },
14
- "annotations": {
15
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
16
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
17
- "summary": "Prometheus is not connected to any Alertmanagers."
18
- },
19
- "state": "firing",
20
- "activeAt": "2025-11-19T02:55:33.468639093Z",
21
- "value": "0e+00"
22
- },
23
- {
24
- "labels": {
25
- "alertname": "RequestLatency",
26
- "namespace": "otel-demo",
27
- "service_name": "frontend-proxy",
28
- "severity": "warning"
29
- },
30
- "annotations": {
31
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
32
- },
33
- "state": "firing",
34
- "activeAt": "2025-11-19T02:55:55.247231189Z",
35
- "value": "1.5e+04"
36
- },
37
- {
38
- "labels": {
39
- "alertname": "RequestLatency",
40
- "namespace": "otel-demo",
41
- "service_name": "email",
42
- "severity": "warning"
43
- },
44
- "annotations": {
45
- "description": "Latency in service email in namespace otel-demo is above 1500ms (current: 1624s)"
46
- },
47
- "state": "firing",
48
- "activeAt": "2025-11-19T03:04:55.247231189Z",
49
- "value": "1.624e+03"
50
- },
51
- {
52
- "labels": {
53
- "alertname": "RequestLatency",
54
- "namespace": "otel-demo",
55
- "service_name": "checkout",
56
- "severity": "warning"
57
- },
58
- "annotations": {
59
- "description": "Latency in service checkout in namespace otel-demo is above 1500ms (current: 3244.117647058826s)"
60
- },
61
- "state": "firing",
62
- "activeAt": "2025-11-19T02:55:55.247231189Z",
63
- "value": "3.244117647058826e+03"
64
- },
65
- {
66
- "labels": {
67
- "alertname": "RequestLatency",
68
- "namespace": "otel-demo",
69
- "service_name": "frontend",
70
- "severity": "warning"
71
- },
72
- "annotations": {
73
- "description": "Latency in service frontend in namespace otel-demo is above 1500ms (current: 1806.9879518072285s)"
74
- },
75
- "state": "firing",
76
- "activeAt": "2025-11-19T02:56:55.247231189Z",
77
- "value": "1.8069879518072285e+03"
78
- },
79
- {
80
- "labels": {
81
- "alertname": "RequestErrorRate",
82
- "namespace": "otel-demo",
83
- "service_name": "frontend-proxy",
84
- "severity": "warning"
85
- },
86
- "annotations": {
87
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 36.39999999999999)"
88
- },
89
- "state": "firing",
90
- "activeAt": "2025-11-19T02:55:55.247231189Z",
91
- "value": "3.639999999999999e+01"
92
- },
93
- {
94
- "labels": {
95
- "alertname": "RequestErrorRate",
96
- "namespace": "otel-demo",
97
- "service_name": "frontend",
98
- "severity": "warning"
99
- },
100
- "annotations": {
101
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 2.533333333333333)"
102
- },
103
- "state": "firing",
104
- "activeAt": "2025-11-19T02:55:55.247231189Z",
105
- "value": "2.533333333333333e+00"
106
- },
107
- {
108
- "labels": {
109
- "alertname": "RequestErrorRate",
110
- "namespace": "otel-demo",
111
- "service_name": "checkout",
112
- "severity": "warning"
113
- },
114
- "annotations": {
115
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 1.3333333333333333)"
116
- },
117
- "state": "firing",
118
- "activeAt": "2025-11-19T02:56:55.247231189Z",
119
- "value": "1.3333333333333333e+00"
120
- },
121
- {
122
- "labels": {
123
- "alertname": "RequestErrorRate",
124
- "namespace": "otel-demo",
125
- "service_name": "ad",
126
- "severity": "warning"
127
- },
128
- "annotations": {
129
- "description": "Request error rate in service ad in namespace otel-demo is above 0 (current: 0.010185555555555555)"
130
- },
131
- "state": "firing",
132
- "activeAt": "2025-11-19T03:04:55.247231189Z",
133
- "value": "1.0185555555555555e-02"
134
- },
135
- {
136
- "labels": {
137
- "alertname": "KubeClientCertificateExpiration",
138
- "instance": "10.0.120.157:443",
139
- "job": "apiserver",
140
- "severity": "warning"
141
- },
142
- "annotations": {
143
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
144
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
145
- "summary": "Client certificate is about to expire."
146
- },
147
- "state": "firing",
148
- "activeAt": "2025-11-19T02:55:57.949337198Z",
149
- "value": "5.100141176470588e+04"
150
- },
151
- {
152
- "labels": {
153
- "alertname": "KubeClientCertificateExpiration",
154
- "instance": "10.0.120.157:443",
155
- "job": "apiserver",
156
- "severity": "critical"
157
- },
158
- "annotations": {
159
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
160
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
161
- "summary": "Client certificate is about to expire."
162
- },
163
- "state": "firing",
164
- "activeAt": "2025-11-19T02:55:57.949337198Z",
165
- "value": "5.100141176470588e+04"
166
- },
167
- {
168
- "labels": {
169
- "alertname": "TargetDown",
170
- "job": "otel-collector",
171
- "namespace": "otel-demo",
172
- "service": "otel-collector",
173
- "severity": "warning"
174
- },
175
- "annotations": {
176
- "description": "50% of the otel-collector/otel-collector targets in otel-demo namespace are down.",
177
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/targetdown",
178
- "summary": "One or more targets are unreachable."
179
- },
180
- "state": "firing",
181
- "activeAt": "2025-11-19T02:55:31.790424011Z",
182
- "value": "5e+01"
183
- },
184
- {
185
- "labels": {
186
- "alertname": "Watchdog",
187
- "severity": "none"
188
- },
189
- "annotations": {
190
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
191
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
192
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
193
- },
194
- "state": "firing",
195
- "activeAt": "2025-11-19T02:55:01.790424011Z",
196
- "value": "1e+00"
197
- }
198
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_07_30.495783.json DELETED
@@ -1,184 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "PrometheusNotConnectedToAlertmanagers",
5
- "container": "prometheus",
6
- "endpoint": "http-web",
7
- "instance": "100.96.2.209:9090",
8
- "job": "prometheus-kube-prometheus-prometheus",
9
- "namespace": "prometheus",
10
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
11
- "service": "prometheus-kube-prometheus-prometheus",
12
- "severity": "warning"
13
- },
14
- "annotations": {
15
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
16
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
17
- "summary": "Prometheus is not connected to any Alertmanagers."
18
- },
19
- "state": "firing",
20
- "activeAt": "2025-11-19T02:55:33.468639093Z",
21
- "value": "0e+00"
22
- },
23
- {
24
- "labels": {
25
- "alertname": "RequestLatency",
26
- "namespace": "otel-demo",
27
- "service_name": "checkout",
28
- "severity": "warning"
29
- },
30
- "annotations": {
31
- "description": "Latency in service checkout in namespace otel-demo is above 1500ms (current: 2495.394736842107s)"
32
- },
33
- "state": "firing",
34
- "activeAt": "2025-11-19T02:55:55.247231189Z",
35
- "value": "2.495394736842107e+03"
36
- },
37
- {
38
- "labels": {
39
- "alertname": "RequestLatency",
40
- "namespace": "otel-demo",
41
- "service_name": "frontend",
42
- "severity": "warning"
43
- },
44
- "annotations": {
45
- "description": "Latency in service frontend in namespace otel-demo is above 1500ms (current: 1892.876404494381s)"
46
- },
47
- "state": "firing",
48
- "activeAt": "2025-11-19T02:56:55.247231189Z",
49
- "value": "1.892876404494381e+03"
50
- },
51
- {
52
- "labels": {
53
- "alertname": "RequestLatency",
54
- "namespace": "otel-demo",
55
- "service_name": "frontend-proxy",
56
- "severity": "warning"
57
- },
58
- "annotations": {
59
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
60
- },
61
- "state": "firing",
62
- "activeAt": "2025-11-19T02:55:55.247231189Z",
63
- "value": "1.5e+04"
64
- },
65
- {
66
- "labels": {
67
- "alertname": "RequestErrorRate",
68
- "namespace": "otel-demo",
69
- "service_name": "checkout",
70
- "severity": "warning"
71
- },
72
- "annotations": {
73
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 1.422222222222222)"
74
- },
75
- "state": "firing",
76
- "activeAt": "2025-11-19T02:56:55.247231189Z",
77
- "value": "1.422222222222222e+00"
78
- },
79
- {
80
- "labels": {
81
- "alertname": "RequestErrorRate",
82
- "namespace": "otel-demo",
83
- "service_name": "fraud-detection",
84
- "severity": "warning"
85
- },
86
- "annotations": {
87
- "description": "Request error rate in service fraud-detection in namespace otel-demo is above 0 (current: 0.010185555555555555)"
88
- },
89
- "state": "firing",
90
- "activeAt": "2025-11-19T03:05:55.247231189Z",
91
- "value": "1.0185555555555555e-02"
92
- },
93
- {
94
- "labels": {
95
- "alertname": "RequestErrorRate",
96
- "namespace": "otel-demo",
97
- "service_name": "frontend-proxy",
98
- "severity": "warning"
99
- },
100
- "annotations": {
101
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 37.05555555555555)"
102
- },
103
- "state": "firing",
104
- "activeAt": "2025-11-19T02:55:55.247231189Z",
105
- "value": "3.705555555555555e+01"
106
- },
107
- {
108
- "labels": {
109
- "alertname": "RequestErrorRate",
110
- "namespace": "otel-demo",
111
- "service_name": "frontend",
112
- "severity": "warning"
113
- },
114
- "annotations": {
115
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 2.755555555555555)"
116
- },
117
- "state": "firing",
118
- "activeAt": "2025-11-19T02:55:55.247231189Z",
119
- "value": "2.755555555555555e+00"
120
- },
121
- {
122
- "labels": {
123
- "alertname": "KubeClientCertificateExpiration",
124
- "instance": "10.0.120.157:443",
125
- "job": "apiserver",
126
- "severity": "warning"
127
- },
128
- "annotations": {
129
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
130
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
131
- "summary": "Client certificate is about to expire."
132
- },
133
- "state": "firing",
134
- "activeAt": "2025-11-19T02:55:57.949337198Z",
135
- "value": "5.5431e+04"
136
- },
137
- {
138
- "labels": {
139
- "alertname": "KubeClientCertificateExpiration",
140
- "instance": "10.0.120.157:443",
141
- "job": "apiserver",
142
- "severity": "critical"
143
- },
144
- "annotations": {
145
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
146
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
147
- "summary": "Client certificate is about to expire."
148
- },
149
- "state": "firing",
150
- "activeAt": "2025-11-19T02:55:57.949337198Z",
151
- "value": "5.5431e+04"
152
- },
153
- {
154
- "labels": {
155
- "alertname": "TargetDown",
156
- "job": "otel-collector",
157
- "namespace": "otel-demo",
158
- "service": "otel-collector",
159
- "severity": "warning"
160
- },
161
- "annotations": {
162
- "description": "50% of the otel-collector/otel-collector targets in otel-demo namespace are down.",
163
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/targetdown",
164
- "summary": "One or more targets are unreachable."
165
- },
166
- "state": "firing",
167
- "activeAt": "2025-11-19T02:55:31.790424011Z",
168
- "value": "5e+01"
169
- },
170
- {
171
- "labels": {
172
- "alertname": "Watchdog",
173
- "severity": "none"
174
- },
175
- "annotations": {
176
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
177
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
178
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
179
- },
180
- "state": "firing",
181
- "activeAt": "2025-11-19T02:55:01.790424011Z",
182
- "value": "1e+00"
183
- }
184
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_08_30.495958.json DELETED
@@ -1,190 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "RequestLatency",
5
- "namespace": "otel-demo",
6
- "service_name": "frontend",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "Latency in service frontend in namespace otel-demo is above 1500ms (current: 1737.5392670157041s)"
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-11-19T02:56:55.247231189Z",
14
- "value": "1.7375392670157041e+03"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "RequestLatency",
19
- "namespace": "otel-demo",
20
- "service_name": "frontend-proxy",
21
- "severity": "warning"
22
- },
23
- "annotations": {
24
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
25
- },
26
- "state": "firing",
27
- "activeAt": "2025-11-19T02:55:55.247231189Z",
28
- "value": "1.5e+04"
29
- },
30
- {
31
- "labels": {
32
- "alertname": "RequestLatency",
33
- "namespace": "otel-demo",
34
- "service_name": "checkout",
35
- "severity": "warning"
36
- },
37
- "annotations": {
38
- "description": "Latency in service checkout in namespace otel-demo is above 1500ms (current: 1561.5384615384603s)"
39
- },
40
- "state": "firing",
41
- "activeAt": "2025-11-19T02:55:55.247231189Z",
42
- "value": "1.5615384615384603e+03"
43
- },
44
- {
45
- "labels": {
46
- "alertname": "RequestErrorRate",
47
- "namespace": "otel-demo",
48
- "service_name": "checkout",
49
- "severity": "warning"
50
- },
51
- "annotations": {
52
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 1.3555555555555554)"
53
- },
54
- "state": "firing",
55
- "activeAt": "2025-11-19T02:56:55.247231189Z",
56
- "value": "1.3555555555555554e+00"
57
- },
58
- {
59
- "labels": {
60
- "alertname": "RequestErrorRate",
61
- "namespace": "otel-demo",
62
- "service_name": "frontend-proxy",
63
- "severity": "warning"
64
- },
65
- "annotations": {
66
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 36.8111111111111)"
67
- },
68
- "state": "firing",
69
- "activeAt": "2025-11-19T02:55:55.247231189Z",
70
- "value": "3.68111111111111e+01"
71
- },
72
- {
73
- "labels": {
74
- "alertname": "RequestErrorRate",
75
- "namespace": "otel-demo",
76
- "service_name": "frontend",
77
- "severity": "warning"
78
- },
79
- "annotations": {
80
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 2.533333333333333)"
81
- },
82
- "state": "firing",
83
- "activeAt": "2025-11-19T02:55:55.247231189Z",
84
- "value": "2.533333333333333e+00"
85
- },
86
- {
87
- "labels": {
88
- "alertname": "KubeClientCertificateExpiration",
89
- "instance": "10.0.120.157:443",
90
- "job": "apiserver",
91
- "severity": "warning"
92
- },
93
- "annotations": {
94
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
95
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
96
- "summary": "Client certificate is about to expire."
97
- },
98
- "state": "firing",
99
- "activeAt": "2025-11-19T02:55:57.949337198Z",
100
- "value": "4.6782439024390245e+04"
101
- },
102
- {
103
- "labels": {
104
- "alertname": "KubeClientCertificateExpiration",
105
- "instance": "10.0.120.157:443",
106
- "job": "apiserver",
107
- "severity": "critical"
108
- },
109
- "annotations": {
110
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
111
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
112
- "summary": "Client certificate is about to expire."
113
- },
114
- "state": "firing",
115
- "activeAt": "2025-11-19T02:55:57.949337198Z",
116
- "value": "4.6782439024390245e+04"
117
- },
118
- {
119
- "labels": {
120
- "alertname": "TargetDown",
121
- "job": "otel-collector",
122
- "namespace": "otel-demo",
123
- "service": "otel-collector",
124
- "severity": "warning"
125
- },
126
- "annotations": {
127
- "description": "50% of the otel-collector/otel-collector targets in otel-demo namespace are down.",
128
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/targetdown",
129
- "summary": "One or more targets are unreachable."
130
- },
131
- "state": "firing",
132
- "activeAt": "2025-11-19T02:55:31.790424011Z",
133
- "value": "5e+01"
134
- },
135
- {
136
- "labels": {
137
- "alertname": "Watchdog",
138
- "severity": "none"
139
- },
140
- "annotations": {
141
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
142
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
143
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
144
- },
145
- "state": "firing",
146
- "activeAt": "2025-11-19T02:55:01.790424011Z",
147
- "value": "1e+00"
148
- },
149
- {
150
- "labels": {
151
- "alertname": "InfoInhibitor",
152
- "alertstate": "pending",
153
- "container": "recorder",
154
- "instance": "10.0.120.175:10250",
155
- "namespace": "data-recorders",
156
- "pod": "clickhouse-unified-recorder-d5ds6",
157
- "service": "prometheus-kube-prometheus-kubelet",
158
- "severity": "none"
159
- },
160
- "annotations": {
161
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
162
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
163
- "summary": "Info-level alert inhibition."
164
- },
165
- "state": "firing",
166
- "activeAt": "2025-11-19T03:07:31.790424011Z",
167
- "value": "1e+00"
168
- },
169
- {
170
- "labels": {
171
- "alertname": "PrometheusNotConnectedToAlertmanagers",
172
- "container": "prometheus",
173
- "endpoint": "http-web",
174
- "instance": "100.96.2.209:9090",
175
- "job": "prometheus-kube-prometheus-prometheus",
176
- "namespace": "prometheus",
177
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
178
- "service": "prometheus-kube-prometheus-prometheus",
179
- "severity": "warning"
180
- },
181
- "annotations": {
182
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
183
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
184
- "summary": "Prometheus is not connected to any Alertmanagers."
185
- },
186
- "state": "firing",
187
- "activeAt": "2025-11-19T02:55:33.468639093Z",
188
- "value": "0e+00"
189
- }
190
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_09_30.496225.json DELETED
@@ -1,190 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "PrometheusNotConnectedToAlertmanagers",
5
- "container": "prometheus",
6
- "endpoint": "http-web",
7
- "instance": "100.96.2.209:9090",
8
- "job": "prometheus-kube-prometheus-prometheus",
9
- "namespace": "prometheus",
10
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
11
- "service": "prometheus-kube-prometheus-prometheus",
12
- "severity": "warning"
13
- },
14
- "annotations": {
15
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
16
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
17
- "summary": "Prometheus is not connected to any Alertmanagers."
18
- },
19
- "state": "firing",
20
- "activeAt": "2025-11-19T02:55:33.468639093Z",
21
- "value": "0e+00"
22
- },
23
- {
24
- "labels": {
25
- "alertname": "RequestLatency",
26
- "namespace": "otel-demo",
27
- "service_name": "checkout",
28
- "severity": "warning"
29
- },
30
- "annotations": {
31
- "description": "Latency in service checkout in namespace otel-demo is above 1500ms (current: 1724.2857142857101s)"
32
- },
33
- "state": "firing",
34
- "activeAt": "2025-11-19T02:55:55.247231189Z",
35
- "value": "1.7242857142857101e+03"
36
- },
37
- {
38
- "labels": {
39
- "alertname": "RequestLatency",
40
- "namespace": "otel-demo",
41
- "service_name": "frontend",
42
- "severity": "warning"
43
- },
44
- "annotations": {
45
- "description": "Latency in service frontend in namespace otel-demo is above 1500ms (current: 1805.568181818181s)"
46
- },
47
- "state": "firing",
48
- "activeAt": "2025-11-19T02:56:55.247231189Z",
49
- "value": "1.805568181818181e+03"
50
- },
51
- {
52
- "labels": {
53
- "alertname": "RequestLatency",
54
- "namespace": "otel-demo",
55
- "service_name": "frontend-proxy",
56
- "severity": "warning"
57
- },
58
- "annotations": {
59
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
60
- },
61
- "state": "firing",
62
- "activeAt": "2025-11-19T02:55:55.247231189Z",
63
- "value": "1.5e+04"
64
- },
65
- {
66
- "labels": {
67
- "alertname": "RequestErrorRate",
68
- "namespace": "otel-demo",
69
- "service_name": "frontend-proxy",
70
- "severity": "warning"
71
- },
72
- "annotations": {
73
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 35.599999999999994)"
74
- },
75
- "state": "firing",
76
- "activeAt": "2025-11-19T02:55:55.247231189Z",
77
- "value": "3.5599999999999994e+01"
78
- },
79
- {
80
- "labels": {
81
- "alertname": "RequestErrorRate",
82
- "namespace": "otel-demo",
83
- "service_name": "frontend",
84
- "severity": "warning"
85
- },
86
- "annotations": {
87
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 2.255555555555555)"
88
- },
89
- "state": "firing",
90
- "activeAt": "2025-11-19T02:55:55.247231189Z",
91
- "value": "2.255555555555555e+00"
92
- },
93
- {
94
- "labels": {
95
- "alertname": "RequestErrorRate",
96
- "namespace": "otel-demo",
97
- "service_name": "checkout",
98
- "severity": "warning"
99
- },
100
- "annotations": {
101
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 1.1777777777777776)"
102
- },
103
- "state": "firing",
104
- "activeAt": "2025-11-19T02:56:55.247231189Z",
105
- "value": "1.1777777777777776e+00"
106
- },
107
- {
108
- "labels": {
109
- "alertname": "KubeClientCertificateExpiration",
110
- "instance": "10.0.120.157:443",
111
- "job": "apiserver",
112
- "severity": "warning"
113
- },
114
- "annotations": {
115
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
116
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
117
- "summary": "Client certificate is about to expire."
118
- },
119
- "state": "firing",
120
- "activeAt": "2025-11-19T02:55:57.949337198Z",
121
- "value": "4.665940157480315e+04"
122
- },
123
- {
124
- "labels": {
125
- "alertname": "KubeClientCertificateExpiration",
126
- "instance": "10.0.120.157:443",
127
- "job": "apiserver",
128
- "severity": "critical"
129
- },
130
- "annotations": {
131
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
132
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
133
- "summary": "Client certificate is about to expire."
134
- },
135
- "state": "firing",
136
- "activeAt": "2025-11-19T02:55:57.949337198Z",
137
- "value": "4.665940157480315e+04"
138
- },
139
- {
140
- "labels": {
141
- "alertname": "TargetDown",
142
- "job": "otel-collector",
143
- "namespace": "otel-demo",
144
- "service": "otel-collector",
145
- "severity": "warning"
146
- },
147
- "annotations": {
148
- "description": "50% of the otel-collector/otel-collector targets in otel-demo namespace are down.",
149
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/targetdown",
150
- "summary": "One or more targets are unreachable."
151
- },
152
- "state": "firing",
153
- "activeAt": "2025-11-19T02:55:31.790424011Z",
154
- "value": "5e+01"
155
- },
156
- {
157
- "labels": {
158
- "alertname": "Watchdog",
159
- "severity": "none"
160
- },
161
- "annotations": {
162
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
163
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
164
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
165
- },
166
- "state": "firing",
167
- "activeAt": "2025-11-19T02:55:01.790424011Z",
168
- "value": "1e+00"
169
- },
170
- {
171
- "labels": {
172
- "alertname": "InfoInhibitor",
173
- "alertstate": "pending",
174
- "container": "recorder",
175
- "instance": "10.0.120.175:10250",
176
- "namespace": "data-recorders",
177
- "pod": "clickhouse-unified-recorder-d5ds6",
178
- "service": "prometheus-kube-prometheus-kubelet",
179
- "severity": "none"
180
- },
181
- "annotations": {
182
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
183
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
184
- "summary": "Info-level alert inhibition."
185
- },
186
- "state": "firing",
187
- "activeAt": "2025-11-19T03:07:31.790424011Z",
188
- "value": "1e+00"
189
- }
190
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-11-19T03_10_30.496221.json DELETED
@@ -1,218 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "PrometheusNotConnectedToAlertmanagers",
5
- "container": "prometheus",
6
- "endpoint": "http-web",
7
- "instance": "100.96.2.209:9090",
8
- "job": "prometheus-kube-prometheus-prometheus",
9
- "namespace": "prometheus",
10
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
11
- "service": "prometheus-kube-prometheus-prometheus",
12
- "severity": "warning"
13
- },
14
- "annotations": {
15
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
16
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
17
- "summary": "Prometheus is not connected to any Alertmanagers."
18
- },
19
- "state": "firing",
20
- "activeAt": "2025-11-19T02:55:33.468639093Z",
21
- "value": "0e+00"
22
- },
23
- {
24
- "labels": {
25
- "alertname": "KubeControllerManagerDown",
26
- "severity": "critical"
27
- },
28
- "annotations": {
29
- "description": "KubeControllerManager has disappeared from Prometheus target discovery.",
30
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubecontrollermanagerdown",
31
- "summary": "Target disappeared from Prometheus target discovery."
32
- },
33
- "state": "firing",
34
- "activeAt": "2025-11-19T02:55:16.942844619Z",
35
- "value": "1e+00"
36
- },
37
- {
38
- "labels": {
39
- "alertname": "KubeSchedulerDown",
40
- "severity": "critical"
41
- },
42
- "annotations": {
43
- "description": "KubeScheduler has disappeared from Prometheus target discovery.",
44
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeschedulerdown",
45
- "summary": "Target disappeared from Prometheus target discovery."
46
- },
47
- "state": "firing",
48
- "activeAt": "2025-11-19T02:55:06.54704151Z",
49
- "value": "1e+00"
50
- },
51
- {
52
- "labels": {
53
- "alertname": "RequestLatency",
54
- "namespace": "otel-demo",
55
- "service_name": "frontend-proxy",
56
- "severity": "warning"
57
- },
58
- "annotations": {
59
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
60
- },
61
- "state": "firing",
62
- "activeAt": "2025-11-19T02:55:55.247231189Z",
63
- "value": "1.5e+04"
64
- },
65
- {
66
- "labels": {
67
- "alertname": "RequestLatency",
68
- "namespace": "otel-demo",
69
- "service_name": "checkout",
70
- "severity": "warning"
71
- },
72
- "annotations": {
73
- "description": "Latency in service checkout in namespace otel-demo is above 1500ms (current: 1834.318181818182s)"
74
- },
75
- "state": "firing",
76
- "activeAt": "2025-11-19T02:55:55.247231189Z",
77
- "value": "1.834318181818182e+03"
78
- },
79
- {
80
- "labels": {
81
- "alertname": "RequestLatency",
82
- "namespace": "otel-demo",
83
- "service_name": "frontend",
84
- "severity": "warning"
85
- },
86
- "annotations": {
87
- "description": "Latency in service frontend in namespace otel-demo is above 1500ms (current: 1831.8811881188112s)"
88
- },
89
- "state": "firing",
90
- "activeAt": "2025-11-19T02:56:55.247231189Z",
91
- "value": "1.8318811881188112e+03"
92
- },
93
- {
94
- "labels": {
95
- "alertname": "RequestErrorRate",
96
- "namespace": "otel-demo",
97
- "service_name": "frontend-proxy",
98
- "severity": "warning"
99
- },
100
- "annotations": {
101
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 37.17777777777777)"
102
- },
103
- "state": "firing",
104
- "activeAt": "2025-11-19T02:55:55.247231189Z",
105
- "value": "3.717777777777777e+01"
106
- },
107
- {
108
- "labels": {
109
- "alertname": "RequestErrorRate",
110
- "namespace": "otel-demo",
111
- "service_name": "frontend",
112
- "severity": "warning"
113
- },
114
- "annotations": {
115
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 2.5222222222222217)"
116
- },
117
- "state": "firing",
118
- "activeAt": "2025-11-19T02:55:55.247231189Z",
119
- "value": "2.5222222222222217e+00"
120
- },
121
- {
122
- "labels": {
123
- "alertname": "RequestErrorRate",
124
- "namespace": "otel-demo",
125
- "service_name": "checkout",
126
- "severity": "warning"
127
- },
128
- "annotations": {
129
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 1.2666666666666666)"
130
- },
131
- "state": "firing",
132
- "activeAt": "2025-11-19T02:56:55.247231189Z",
133
- "value": "1.2666666666666666e+00"
134
- },
135
- {
136
- "labels": {
137
- "alertname": "KubeClientCertificateExpiration",
138
- "instance": "10.0.120.157:443",
139
- "job": "apiserver",
140
- "severity": "warning"
141
- },
142
- "annotations": {
143
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
144
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
145
- "summary": "Client certificate is about to expire."
146
- },
147
- "state": "firing",
148
- "activeAt": "2025-11-19T02:55:57.949337198Z",
149
- "value": "4.691593220338983e+04"
150
- },
151
- {
152
- "labels": {
153
- "alertname": "KubeClientCertificateExpiration",
154
- "instance": "10.0.120.157:443",
155
- "job": "apiserver",
156
- "severity": "critical"
157
- },
158
- "annotations": {
159
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
160
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
161
- "summary": "Client certificate is about to expire."
162
- },
163
- "state": "firing",
164
- "activeAt": "2025-11-19T02:55:57.949337198Z",
165
- "value": "4.691593220338983e+04"
166
- },
167
- {
168
- "labels": {
169
- "alertname": "TargetDown",
170
- "job": "otel-collector",
171
- "namespace": "otel-demo",
172
- "service": "otel-collector",
173
- "severity": "warning"
174
- },
175
- "annotations": {
176
- "description": "50% of the otel-collector/otel-collector targets in otel-demo namespace are down.",
177
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/targetdown",
178
- "summary": "One or more targets are unreachable."
179
- },
180
- "state": "firing",
181
- "activeAt": "2025-11-19T02:55:31.790424011Z",
182
- "value": "5e+01"
183
- },
184
- {
185
- "labels": {
186
- "alertname": "Watchdog",
187
- "severity": "none"
188
- },
189
- "annotations": {
190
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
191
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
192
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
193
- },
194
- "state": "firing",
195
- "activeAt": "2025-11-19T02:55:01.790424011Z",
196
- "value": "1e+00"
197
- },
198
- {
199
- "labels": {
200
- "alertname": "InfoInhibitor",
201
- "alertstate": "pending",
202
- "container": "recorder",
203
- "instance": "10.0.120.175:10250",
204
- "namespace": "data-recorders",
205
- "pod": "clickhouse-unified-recorder-d5ds6",
206
- "service": "prometheus-kube-prometheus-kubelet",
207
- "severity": "none"
208
- },
209
- "annotations": {
210
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
211
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
212
- "summary": "Info-level alert inhibition."
213
- },
214
- "state": "firing",
215
- "activeAt": "2025-11-19T03:07:31.790424011Z",
216
- "value": "1e+00"
217
- }
218
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_19_09.407395.json DELETED
@@ -1,86 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "RequestLatency",
5
- "namespace": "otel-demo",
6
- "service_name": "frontend-proxy",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-01T21:16:24.914078592Z",
14
- "value": "1.5e+04"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "RequestErrorRate",
19
- "namespace": "otel-demo",
20
- "service_name": "checkout",
21
- "severity": "warning"
22
- },
23
- "annotations": {
24
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 1.1555555555555554)"
25
- },
26
- "state": "firing",
27
- "activeAt": "2025-12-01T21:16:24.914078592Z",
28
- "value": "1.1555555555555554e+00"
29
- },
30
- {
31
- "labels": {
32
- "alertname": "RequestErrorRate",
33
- "namespace": "otel-demo",
34
- "service_name": "frontend-proxy",
35
- "severity": "warning"
36
- },
37
- "annotations": {
38
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 35.75555555555555)"
39
- },
40
- "state": "firing",
41
- "activeAt": "2025-12-01T21:16:24.914078592Z",
42
- "value": "3.575555555555555e+01"
43
- },
44
- {
45
- "labels": {
46
- "alertname": "RequestErrorRate",
47
- "namespace": "otel-demo",
48
- "service_name": "frontend",
49
- "severity": "warning"
50
- },
51
- "annotations": {
52
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 2.3)"
53
- },
54
- "state": "firing",
55
- "activeAt": "2025-12-01T21:16:24.914078592Z",
56
- "value": "2.3e+00"
57
- },
58
- {
59
- "labels": {
60
- "alertname": "RequestErrorRate",
61
- "namespace": "otel-demo",
62
- "service_name": "ad",
63
- "severity": "warning"
64
- },
65
- "annotations": {
66
- "description": "Request error rate in service ad in namespace otel-demo is above 0 (current: 0.15502740740740742)"
67
- },
68
- "state": "firing",
69
- "activeAt": "2025-12-01T21:17:24.914078592Z",
70
- "value": "1.5502740740740742e-01"
71
- },
72
- {
73
- "labels": {
74
- "alertname": "Watchdog",
75
- "severity": "none"
76
- },
77
- "annotations": {
78
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
79
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
80
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
81
- },
82
- "state": "firing",
83
- "activeAt": "2025-12-01T21:15:53.495039596Z",
84
- "value": "1e+00"
85
- }
86
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_20_09.403671.json DELETED
@@ -1,86 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "RequestLatency",
5
- "namespace": "otel-demo",
6
- "service_name": "frontend-proxy",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-01T21:16:24.914078592Z",
14
- "value": "1.5e+04"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "RequestErrorRate",
19
- "namespace": "otel-demo",
20
- "service_name": "checkout",
21
- "severity": "warning"
22
- },
23
- "annotations": {
24
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 1.0444444444444443)"
25
- },
26
- "state": "firing",
27
- "activeAt": "2025-12-01T21:16:24.914078592Z",
28
- "value": "1.0444444444444443e+00"
29
- },
30
- {
31
- "labels": {
32
- "alertname": "RequestErrorRate",
33
- "namespace": "otel-demo",
34
- "service_name": "frontend-proxy",
35
- "severity": "warning"
36
- },
37
- "annotations": {
38
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 35.98888888888888)"
39
- },
40
- "state": "firing",
41
- "activeAt": "2025-12-01T21:16:24.914078592Z",
42
- "value": "3.598888888888888e+01"
43
- },
44
- {
45
- "labels": {
46
- "alertname": "RequestErrorRate",
47
- "namespace": "otel-demo",
48
- "service_name": "frontend",
49
- "severity": "warning"
50
- },
51
- "annotations": {
52
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 1.922222222222222)"
53
- },
54
- "state": "firing",
55
- "activeAt": "2025-12-01T21:16:24.914078592Z",
56
- "value": "1.922222222222222e+00"
57
- },
58
- {
59
- "labels": {
60
- "alertname": "RequestErrorRate",
61
- "namespace": "otel-demo",
62
- "service_name": "fraud-detection",
63
- "severity": "warning"
64
- },
65
- "annotations": {
66
- "description": "Request error rate in service fraud-detection in namespace otel-demo is above 0 (current: 0.01823851851851852)"
67
- },
68
- "state": "firing",
69
- "activeAt": "2025-12-01T21:18:24.914078592Z",
70
- "value": "1.823851851851852e-02"
71
- },
72
- {
73
- "labels": {
74
- "alertname": "Watchdog",
75
- "severity": "none"
76
- },
77
- "annotations": {
78
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
79
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
80
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
81
- },
82
- "state": "firing",
83
- "activeAt": "2025-12-01T21:15:53.495039596Z",
84
- "value": "1e+00"
85
- }
86
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_21_09.403302.json DELETED
@@ -1,72 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "RequestLatency",
5
- "namespace": "otel-demo",
6
- "service_name": "frontend-proxy",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-01T21:16:24.914078592Z",
14
- "value": "1.5e+04"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "RequestErrorRate",
19
- "namespace": "otel-demo",
20
- "service_name": "checkout",
21
- "severity": "warning"
22
- },
23
- "annotations": {
24
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 1.1333333333333333)"
25
- },
26
- "state": "firing",
27
- "activeAt": "2025-12-01T21:16:24.914078592Z",
28
- "value": "1.1333333333333333e+00"
29
- },
30
- {
31
- "labels": {
32
- "alertname": "RequestErrorRate",
33
- "namespace": "otel-demo",
34
- "service_name": "frontend-proxy",
35
- "severity": "warning"
36
- },
37
- "annotations": {
38
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 34.65555555555555)"
39
- },
40
- "state": "firing",
41
- "activeAt": "2025-12-01T21:16:24.914078592Z",
42
- "value": "3.465555555555555e+01"
43
- },
44
- {
45
- "labels": {
46
- "alertname": "RequestErrorRate",
47
- "namespace": "otel-demo",
48
- "service_name": "frontend",
49
- "severity": "warning"
50
- },
51
- "annotations": {
52
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 2.0444444444444443)"
53
- },
54
- "state": "firing",
55
- "activeAt": "2025-12-01T21:16:24.914078592Z",
56
- "value": "2.0444444444444443e+00"
57
- },
58
- {
59
- "labels": {
60
- "alertname": "Watchdog",
61
- "severity": "none"
62
- },
63
- "annotations": {
64
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
65
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
66
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
67
- },
68
- "state": "firing",
69
- "activeAt": "2025-12-01T21:15:53.495039596Z",
70
- "value": "1e+00"
71
- }
72
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_22_09.403805.json DELETED
@@ -1,104 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "RequestLatency",
5
- "namespace": "otel-demo",
6
- "service_name": "frontend-proxy",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-01T21:16:24.914078592Z",
14
- "value": "1.5e+04"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "RequestErrorRate",
19
- "namespace": "otel-demo",
20
- "service_name": "checkout",
21
- "severity": "warning"
22
- },
23
- "annotations": {
24
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 1.222222222222222)"
25
- },
26
- "state": "firing",
27
- "activeAt": "2025-12-01T21:16:24.914078592Z",
28
- "value": "1.222222222222222e+00"
29
- },
30
- {
31
- "labels": {
32
- "alertname": "RequestErrorRate",
33
- "namespace": "otel-demo",
34
- "service_name": "frontend-proxy",
35
- "severity": "warning"
36
- },
37
- "annotations": {
38
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 35.04444444444444)"
39
- },
40
- "state": "firing",
41
- "activeAt": "2025-12-01T21:16:24.914078592Z",
42
- "value": "3.504444444444444e+01"
43
- },
44
- {
45
- "labels": {
46
- "alertname": "RequestErrorRate",
47
- "namespace": "otel-demo",
48
- "service_name": "frontend",
49
- "severity": "warning"
50
- },
51
- "annotations": {
52
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 2.2888888888888888)"
53
- },
54
- "state": "firing",
55
- "activeAt": "2025-12-01T21:16:24.914078592Z",
56
- "value": "2.2888888888888888e+00"
57
- },
58
- {
59
- "labels": {
60
- "alertname": "Watchdog",
61
- "severity": "none"
62
- },
63
- "annotations": {
64
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
65
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
66
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
67
- },
68
- "state": "firing",
69
- "activeAt": "2025-12-01T21:15:53.495039596Z",
70
- "value": "1e+00"
71
- },
72
- {
73
- "labels": {
74
- "alertname": "KubeClientCertificateExpiration",
75
- "instance": "10.0.104.155:443",
76
- "job": "apiserver",
77
- "severity": "warning"
78
- },
79
- "annotations": {
80
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
81
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
82
- "summary": "Client certificate is about to expire."
83
- },
84
- "state": "firing",
85
- "activeAt": "2025-12-01T21:16:21.395442537Z",
86
- "value": "4.390776907763769e+04"
87
- },
88
- {
89
- "labels": {
90
- "alertname": "KubeClientCertificateExpiration",
91
- "instance": "10.0.104.155:443",
92
- "job": "apiserver",
93
- "severity": "critical"
94
- },
95
- "annotations": {
96
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
97
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
98
- "summary": "Client certificate is about to expire."
99
- },
100
- "state": "firing",
101
- "activeAt": "2025-12-01T21:16:21.395442537Z",
102
- "value": "4.390776907763769e+04"
103
- }
104
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_23_09.403458.json DELETED
@@ -1,118 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeClientCertificateExpiration",
5
- "instance": "10.0.104.155:443",
6
- "job": "apiserver",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
11
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
12
- "summary": "Client certificate is about to expire."
13
- },
14
- "state": "firing",
15
- "activeAt": "2025-12-01T21:16:21.395442537Z",
16
- "value": "4.390439263803681e+04"
17
- },
18
- {
19
- "labels": {
20
- "alertname": "KubeClientCertificateExpiration",
21
- "instance": "10.0.104.155:443",
22
- "job": "apiserver",
23
- "severity": "critical"
24
- },
25
- "annotations": {
26
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
27
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
28
- "summary": "Client certificate is about to expire."
29
- },
30
- "state": "firing",
31
- "activeAt": "2025-12-01T21:16:21.395442537Z",
32
- "value": "4.390439263803681e+04"
33
- },
34
- {
35
- "labels": {
36
- "alertname": "RequestLatency",
37
- "namespace": "otel-demo",
38
- "service_name": "frontend-proxy",
39
- "severity": "warning"
40
- },
41
- "annotations": {
42
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
43
- },
44
- "state": "firing",
45
- "activeAt": "2025-12-01T21:16:24.914078592Z",
46
- "value": "1.5e+04"
47
- },
48
- {
49
- "labels": {
50
- "alertname": "RequestErrorRate",
51
- "namespace": "otel-demo",
52
- "service_name": "checkout",
53
- "severity": "warning"
54
- },
55
- "annotations": {
56
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 1.4888888888888887)"
57
- },
58
- "state": "firing",
59
- "activeAt": "2025-12-01T21:16:24.914078592Z",
60
- "value": "1.4888888888888887e+00"
61
- },
62
- {
63
- "labels": {
64
- "alertname": "RequestErrorRate",
65
- "namespace": "otel-demo",
66
- "service_name": "frontend-proxy",
67
- "severity": "warning"
68
- },
69
- "annotations": {
70
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 34.166666666666664)"
71
- },
72
- "state": "firing",
73
- "activeAt": "2025-12-01T21:16:24.914078592Z",
74
- "value": "3.4166666666666664e+01"
75
- },
76
- {
77
- "labels": {
78
- "alertname": "RequestErrorRate",
79
- "namespace": "otel-demo",
80
- "service_name": "frontend",
81
- "severity": "warning"
82
- },
83
- "annotations": {
84
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 2.822222222222222)"
85
- },
86
- "state": "firing",
87
- "activeAt": "2025-12-01T21:16:24.914078592Z",
88
- "value": "2.822222222222222e+00"
89
- },
90
- {
91
- "labels": {
92
- "alertname": "RequestErrorRate",
93
- "namespace": "otel-demo",
94
- "service_name": "recommendation",
95
- "severity": "warning"
96
- },
97
- "annotations": {
98
- "description": "Request error rate in service recommendation in namespace otel-demo is above 0 (current: 0.00911925925925926)"
99
- },
100
- "state": "firing",
101
- "activeAt": "2025-12-01T21:21:24.914078592Z",
102
- "value": "9.11925925925926e-03"
103
- },
104
- {
105
- "labels": {
106
- "alertname": "Watchdog",
107
- "severity": "none"
108
- },
109
- "annotations": {
110
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
111
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
112
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
113
- },
114
- "state": "firing",
115
- "activeAt": "2025-12-01T21:15:53.495039596Z",
116
- "value": "1e+00"
117
- }
118
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_24_09.403612.json DELETED
@@ -1,104 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "RequestLatency",
5
- "namespace": "otel-demo",
6
- "service_name": "frontend-proxy",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-01T21:16:24.914078592Z",
14
- "value": "1.5e+04"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "RequestErrorRate",
19
- "namespace": "otel-demo",
20
- "service_name": "frontend",
21
- "severity": "warning"
22
- },
23
- "annotations": {
24
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 2.5666666666666664)"
25
- },
26
- "state": "firing",
27
- "activeAt": "2025-12-01T21:16:24.914078592Z",
28
- "value": "2.5666666666666664e+00"
29
- },
30
- {
31
- "labels": {
32
- "alertname": "RequestErrorRate",
33
- "namespace": "otel-demo",
34
- "service_name": "checkout",
35
- "severity": "warning"
36
- },
37
- "annotations": {
38
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 1.4)"
39
- },
40
- "state": "firing",
41
- "activeAt": "2025-12-01T21:16:24.914078592Z",
42
- "value": "1.4e+00"
43
- },
44
- {
45
- "labels": {
46
- "alertname": "RequestErrorRate",
47
- "namespace": "otel-demo",
48
- "service_name": "frontend-proxy",
49
- "severity": "warning"
50
- },
51
- "annotations": {
52
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 27.499999999999996)"
53
- },
54
- "state": "firing",
55
- "activeAt": "2025-12-01T21:16:24.914078592Z",
56
- "value": "2.7499999999999996e+01"
57
- },
58
- {
59
- "labels": {
60
- "alertname": "Watchdog",
61
- "severity": "none"
62
- },
63
- "annotations": {
64
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
65
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
66
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
67
- },
68
- "state": "firing",
69
- "activeAt": "2025-12-01T21:15:53.495039596Z",
70
- "value": "1e+00"
71
- },
72
- {
73
- "labels": {
74
- "alertname": "KubeClientCertificateExpiration",
75
- "instance": "10.0.104.155:443",
76
- "job": "apiserver",
77
- "severity": "warning"
78
- },
79
- "annotations": {
80
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
81
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
82
- "summary": "Client certificate is about to expire."
83
- },
84
- "state": "firing",
85
- "activeAt": "2025-12-01T21:16:21.395442537Z",
86
- "value": "5.172538775510204e+04"
87
- },
88
- {
89
- "labels": {
90
- "alertname": "KubeClientCertificateExpiration",
91
- "instance": "10.0.104.155:443",
92
- "job": "apiserver",
93
- "severity": "critical"
94
- },
95
- "annotations": {
96
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
97
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
98
- "summary": "Client certificate is about to expire."
99
- },
100
- "state": "firing",
101
- "activeAt": "2025-12-01T21:16:21.395442537Z",
102
- "value": "5.172538775510204e+04"
103
- }
104
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_25_09.403752.json DELETED
@@ -1,104 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "RequestLatency",
5
- "namespace": "otel-demo",
6
- "service_name": "frontend-proxy",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
11
- },
12
- "state": "firing",
13
- "activeAt": "2025-12-01T21:16:24.914078592Z",
14
- "value": "1.5e+04"
15
- },
16
- {
17
- "labels": {
18
- "alertname": "RequestErrorRate",
19
- "namespace": "otel-demo",
20
- "service_name": "checkout",
21
- "severity": "warning"
22
- },
23
- "annotations": {
24
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.5555555555555555)"
25
- },
26
- "state": "firing",
27
- "activeAt": "2025-12-01T21:16:24.914078592Z",
28
- "value": "5.555555555555555e-01"
29
- },
30
- {
31
- "labels": {
32
- "alertname": "RequestErrorRate",
33
- "namespace": "otel-demo",
34
- "service_name": "frontend-proxy",
35
- "severity": "warning"
36
- },
37
- "annotations": {
38
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 27.188888888888886)"
39
- },
40
- "state": "firing",
41
- "activeAt": "2025-12-01T21:16:24.914078592Z",
42
- "value": "2.7188888888888886e+01"
43
- },
44
- {
45
- "labels": {
46
- "alertname": "RequestErrorRate",
47
- "namespace": "otel-demo",
48
- "service_name": "frontend",
49
- "severity": "warning"
50
- },
51
- "annotations": {
52
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.9999999999999999)"
53
- },
54
- "state": "firing",
55
- "activeAt": "2025-12-01T21:16:24.914078592Z",
56
- "value": "9.999999999999999e-01"
57
- },
58
- {
59
- "labels": {
60
- "alertname": "Watchdog",
61
- "severity": "none"
62
- },
63
- "annotations": {
64
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
65
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
66
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
67
- },
68
- "state": "firing",
69
- "activeAt": "2025-12-01T21:15:53.495039596Z",
70
- "value": "1e+00"
71
- },
72
- {
73
- "labels": {
74
- "alertname": "KubeClientCertificateExpiration",
75
- "instance": "10.0.104.155:443",
76
- "job": "apiserver",
77
- "severity": "warning"
78
- },
79
- "annotations": {
80
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
81
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
82
- "summary": "Client certificate is about to expire."
83
- },
84
- "state": "firing",
85
- "activeAt": "2025-12-01T21:16:21.395442537Z",
86
- "value": "5.2272e+04"
87
- },
88
- {
89
- "labels": {
90
- "alertname": "KubeClientCertificateExpiration",
91
- "instance": "10.0.104.155:443",
92
- "job": "apiserver",
93
- "severity": "critical"
94
- },
95
- "annotations": {
96
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
97
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
98
- "summary": "Client certificate is about to expire."
99
- },
100
- "state": "firing",
101
- "activeAt": "2025-12-01T21:16:21.395442537Z",
102
- "value": "5.2272e+04"
103
- }
104
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_26_09.404082.json DELETED
@@ -1,125 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "KubeClientCertificateExpiration",
5
- "instance": "10.0.104.155:443",
6
- "job": "apiserver",
7
- "severity": "warning"
8
- },
9
- "annotations": {
10
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
11
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
12
- "summary": "Client certificate is about to expire."
13
- },
14
- "state": "firing",
15
- "activeAt": "2025-12-01T21:16:21.395442537Z",
16
- "value": "4.878641095890411e+04"
17
- },
18
- {
19
- "labels": {
20
- "alertname": "KubeClientCertificateExpiration",
21
- "instance": "10.0.104.155:443",
22
- "job": "apiserver",
23
- "severity": "critical"
24
- },
25
- "annotations": {
26
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
27
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
28
- "summary": "Client certificate is about to expire."
29
- },
30
- "state": "firing",
31
- "activeAt": "2025-12-01T21:16:21.395442537Z",
32
- "value": "4.878641095890411e+04"
33
- },
34
- {
35
- "labels": {
36
- "alertname": "PrometheusNotConnectedToAlertmanagers",
37
- "container": "prometheus",
38
- "endpoint": "http-web",
39
- "instance": "100.96.2.27:9090",
40
- "job": "prometheus-kube-prometheus-prometheus",
41
- "namespace": "prometheus",
42
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
43
- "service": "prometheus-kube-prometheus-prometheus",
44
- "severity": "warning"
45
- },
46
- "annotations": {
47
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
48
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
49
- "summary": "Prometheus is not connected to any Alertmanagers."
50
- },
51
- "state": "firing",
52
- "activeAt": "2025-12-01T21:16:02.996855383Z",
53
- "value": "0e+00"
54
- },
55
- {
56
- "labels": {
57
- "alertname": "RequestLatency",
58
- "namespace": "otel-demo",
59
- "service_name": "frontend-proxy",
60
- "severity": "warning"
61
- },
62
- "annotations": {
63
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
64
- },
65
- "state": "firing",
66
- "activeAt": "2025-12-01T21:16:24.914078592Z",
67
- "value": "1.5e+04"
68
- },
69
- {
70
- "labels": {
71
- "alertname": "RequestErrorRate",
72
- "namespace": "otel-demo",
73
- "service_name": "checkout",
74
- "severity": "warning"
75
- },
76
- "annotations": {
77
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.2666666666666666)"
78
- },
79
- "state": "firing",
80
- "activeAt": "2025-12-01T21:16:24.914078592Z",
81
- "value": "2.666666666666666e-01"
82
- },
83
- {
84
- "labels": {
85
- "alertname": "RequestErrorRate",
86
- "namespace": "otel-demo",
87
- "service_name": "frontend-proxy",
88
- "severity": "warning"
89
- },
90
- "annotations": {
91
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 27.766666666666662)"
92
- },
93
- "state": "firing",
94
- "activeAt": "2025-12-01T21:16:24.914078592Z",
95
- "value": "2.7766666666666662e+01"
96
- },
97
- {
98
- "labels": {
99
- "alertname": "RequestErrorRate",
100
- "namespace": "otel-demo",
101
- "service_name": "frontend",
102
- "severity": "warning"
103
- },
104
- "annotations": {
105
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.4222222222222221)"
106
- },
107
- "state": "firing",
108
- "activeAt": "2025-12-01T21:16:24.914078592Z",
109
- "value": "4.222222222222221e-01"
110
- },
111
- {
112
- "labels": {
113
- "alertname": "Watchdog",
114
- "severity": "none"
115
- },
116
- "annotations": {
117
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
118
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
119
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
120
- },
121
- "state": "firing",
122
- "activeAt": "2025-12-01T21:15:53.495039596Z",
123
- "value": "1e+00"
124
- }
125
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_27_09.404699.json DELETED
@@ -1,176 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "PrometheusNotConnectedToAlertmanagers",
5
- "container": "prometheus",
6
- "endpoint": "http-web",
7
- "instance": "100.96.2.27:9090",
8
- "job": "prometheus-kube-prometheus-prometheus",
9
- "namespace": "prometheus",
10
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
11
- "service": "prometheus-kube-prometheus-prometheus",
12
- "severity": "warning"
13
- },
14
- "annotations": {
15
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
16
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
17
- "summary": "Prometheus is not connected to any Alertmanagers."
18
- },
19
- "state": "firing",
20
- "activeAt": "2025-12-01T21:16:02.996855383Z",
21
- "value": "0e+00"
22
- },
23
- {
24
- "labels": {
25
- "alertname": "RequestLatency",
26
- "namespace": "otel-demo",
27
- "service_name": "frontend-proxy",
28
- "severity": "warning"
29
- },
30
- "annotations": {
31
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
32
- },
33
- "state": "firing",
34
- "activeAt": "2025-12-01T21:16:24.914078592Z",
35
- "value": "1.5e+04"
36
- },
37
- {
38
- "labels": {
39
- "alertname": "RequestErrorRate",
40
- "namespace": "otel-demo",
41
- "service_name": "checkout",
42
- "severity": "warning"
43
- },
44
- "annotations": {
45
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.28888888888888886)"
46
- },
47
- "state": "firing",
48
- "activeAt": "2025-12-01T21:16:24.914078592Z",
49
- "value": "2.8888888888888886e-01"
50
- },
51
- {
52
- "labels": {
53
- "alertname": "RequestErrorRate",
54
- "namespace": "otel-demo",
55
- "service_name": "frontend-proxy",
56
- "severity": "warning"
57
- },
58
- "annotations": {
59
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 23.84444444444444)"
60
- },
61
- "state": "firing",
62
- "activeAt": "2025-12-01T21:16:24.914078592Z",
63
- "value": "2.384444444444444e+01"
64
- },
65
- {
66
- "labels": {
67
- "alertname": "RequestErrorRate",
68
- "namespace": "otel-demo",
69
- "service_name": "frontend",
70
- "severity": "warning"
71
- },
72
- "annotations": {
73
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.43333333333333324)"
74
- },
75
- "state": "firing",
76
- "activeAt": "2025-12-01T21:16:24.914078592Z",
77
- "value": "4.3333333333333324e-01"
78
- },
79
- {
80
- "labels": {
81
- "alertname": "RequestErrorRate",
82
- "namespace": "otel-demo",
83
- "service_name": "ad",
84
- "severity": "warning"
85
- },
86
- "annotations": {
87
- "description": "Request error rate in service ad in namespace otel-demo is above 0 (current: 0.00911925925925926)"
88
- },
89
- "state": "firing",
90
- "activeAt": "2025-12-01T21:25:24.914078592Z",
91
- "value": "9.11925925925926e-03"
92
- },
93
- {
94
- "labels": {
95
- "alertname": "TargetDown",
96
- "job": "otel-collector",
97
- "namespace": "otel-demo",
98
- "service": "otel-collector",
99
- "severity": "warning"
100
- },
101
- "annotations": {
102
- "description": "50% of the otel-collector/otel-collector targets in otel-demo namespace are down.",
103
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/targetdown",
104
- "summary": "One or more targets are unreachable."
105
- },
106
- "state": "firing",
107
- "activeAt": "2025-12-01T21:16:23.495039596Z",
108
- "value": "5e+01"
109
- },
110
- {
111
- "labels": {
112
- "alertname": "Watchdog",
113
- "severity": "none"
114
- },
115
- "annotations": {
116
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
117
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
118
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
119
- },
120
- "state": "firing",
121
- "activeAt": "2025-12-01T21:15:53.495039596Z",
122
- "value": "1e+00"
123
- },
124
- {
125
- "labels": {
126
- "alertname": "InfoInhibitor",
127
- "alertstate": "pending",
128
- "container": "recorder",
129
- "instance": "10.0.105.231:10250",
130
- "namespace": "data-recorders",
131
- "pod": "clickhouse-unified-recorder-f82bv",
132
- "service": "prometheus-kube-prometheus-kubelet",
133
- "severity": "none"
134
- },
135
- "annotations": {
136
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
137
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
138
- "summary": "Info-level alert inhibition."
139
- },
140
- "state": "firing",
141
- "activeAt": "2025-12-01T21:26:53.495039596Z",
142
- "value": "1e+00"
143
- },
144
- {
145
- "labels": {
146
- "alertname": "KubeClientCertificateExpiration",
147
- "instance": "10.0.104.155:443",
148
- "job": "apiserver",
149
- "severity": "warning"
150
- },
151
- "annotations": {
152
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
153
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
154
- "summary": "Client certificate is about to expire."
155
- },
156
- "state": "firing",
157
- "activeAt": "2025-12-01T21:16:21.395442537Z",
158
- "value": "5.0693236363636366e+04"
159
- },
160
- {
161
- "labels": {
162
- "alertname": "KubeClientCertificateExpiration",
163
- "instance": "10.0.104.155:443",
164
- "job": "apiserver",
165
- "severity": "critical"
166
- },
167
- "annotations": {
168
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
169
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
170
- "summary": "Client certificate is about to expire."
171
- },
172
- "state": "firing",
173
- "activeAt": "2025-12-01T21:16:21.395442537Z",
174
- "value": "5.0693236363636366e+04"
175
- }
176
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_28_09.404331.json DELETED
@@ -1,204 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "PrometheusNotConnectedToAlertmanagers",
5
- "container": "prometheus",
6
- "endpoint": "http-web",
7
- "instance": "100.96.2.27:9090",
8
- "job": "prometheus-kube-prometheus-prometheus",
9
- "namespace": "prometheus",
10
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
11
- "service": "prometheus-kube-prometheus-prometheus",
12
- "severity": "warning"
13
- },
14
- "annotations": {
15
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
16
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
17
- "summary": "Prometheus is not connected to any Alertmanagers."
18
- },
19
- "state": "firing",
20
- "activeAt": "2025-12-01T21:16:02.996855383Z",
21
- "value": "0e+00"
22
- },
23
- {
24
- "labels": {
25
- "alertname": "RequestLatency",
26
- "namespace": "otel-demo",
27
- "service_name": "frontend-proxy",
28
- "severity": "warning"
29
- },
30
- "annotations": {
31
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
32
- },
33
- "state": "firing",
34
- "activeAt": "2025-12-01T21:16:24.914078592Z",
35
- "value": "1.5e+04"
36
- },
37
- {
38
- "labels": {
39
- "alertname": "RequestLatency",
40
- "namespace": "otel-demo",
41
- "service_name": "checkout",
42
- "severity": "warning"
43
- },
44
- "annotations": {
45
- "description": "Latency in service checkout in namespace otel-demo is above 1500ms (current: 3804.4776119402964s)"
46
- },
47
- "state": "firing",
48
- "activeAt": "2025-12-01T21:26:24.914078592Z",
49
- "value": "3.8044776119402964e+03"
50
- },
51
- {
52
- "labels": {
53
- "alertname": "RequestLatency",
54
- "namespace": "otel-demo",
55
- "service_name": "email",
56
- "severity": "warning"
57
- },
58
- "annotations": {
59
- "description": "Latency in service email in namespace otel-demo is above 1500ms (current: 1961s)"
60
- },
61
- "state": "firing",
62
- "activeAt": "2025-12-01T21:26:24.914078592Z",
63
- "value": "1.961e+03"
64
- },
65
- {
66
- "labels": {
67
- "alertname": "RequestErrorRate",
68
- "namespace": "otel-demo",
69
- "service_name": "fraud-detection",
70
- "severity": "warning"
71
- },
72
- "annotations": {
73
- "description": "Request error rate in service fraud-detection in namespace otel-demo is above 0 (current: 0.00911925925925926)"
74
- },
75
- "state": "firing",
76
- "activeAt": "2025-12-01T21:26:24.914078592Z",
77
- "value": "9.11925925925926e-03"
78
- },
79
- {
80
- "labels": {
81
- "alertname": "RequestErrorRate",
82
- "namespace": "otel-demo",
83
- "service_name": "checkout",
84
- "severity": "warning"
85
- },
86
- "annotations": {
87
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.1111111111111111)"
88
- },
89
- "state": "firing",
90
- "activeAt": "2025-12-01T21:16:24.914078592Z",
91
- "value": "1.111111111111111e-01"
92
- },
93
- {
94
- "labels": {
95
- "alertname": "RequestErrorRate",
96
- "namespace": "otel-demo",
97
- "service_name": "frontend-proxy",
98
- "severity": "warning"
99
- },
100
- "annotations": {
101
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 22.21111111111111)"
102
- },
103
- "state": "firing",
104
- "activeAt": "2025-12-01T21:16:24.914078592Z",
105
- "value": "2.221111111111111e+01"
106
- },
107
- {
108
- "labels": {
109
- "alertname": "RequestErrorRate",
110
- "namespace": "otel-demo",
111
- "service_name": "frontend",
112
- "severity": "warning"
113
- },
114
- "annotations": {
115
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.2222222222222222)"
116
- },
117
- "state": "firing",
118
- "activeAt": "2025-12-01T21:16:24.914078592Z",
119
- "value": "2.222222222222222e-01"
120
- },
121
- {
122
- "labels": {
123
- "alertname": "TargetDown",
124
- "job": "otel-collector",
125
- "namespace": "otel-demo",
126
- "service": "otel-collector",
127
- "severity": "warning"
128
- },
129
- "annotations": {
130
- "description": "50% of the otel-collector/otel-collector targets in otel-demo namespace are down.",
131
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/targetdown",
132
- "summary": "One or more targets are unreachable."
133
- },
134
- "state": "firing",
135
- "activeAt": "2025-12-01T21:16:23.495039596Z",
136
- "value": "5e+01"
137
- },
138
- {
139
- "labels": {
140
- "alertname": "Watchdog",
141
- "severity": "none"
142
- },
143
- "annotations": {
144
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
145
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
146
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
147
- },
148
- "state": "firing",
149
- "activeAt": "2025-12-01T21:15:53.495039596Z",
150
- "value": "1e+00"
151
- },
152
- {
153
- "labels": {
154
- "alertname": "InfoInhibitor",
155
- "alertstate": "pending",
156
- "container": "recorder",
157
- "instance": "10.0.105.231:10250",
158
- "namespace": "data-recorders",
159
- "pod": "clickhouse-unified-recorder-f82bv",
160
- "service": "prometheus-kube-prometheus-kubelet",
161
- "severity": "none"
162
- },
163
- "annotations": {
164
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
165
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
166
- "summary": "Info-level alert inhibition."
167
- },
168
- "state": "firing",
169
- "activeAt": "2025-12-01T21:26:53.495039596Z",
170
- "value": "1e+00"
171
- },
172
- {
173
- "labels": {
174
- "alertname": "KubeClientCertificateExpiration",
175
- "instance": "10.0.104.155:443",
176
- "job": "apiserver",
177
- "severity": "warning"
178
- },
179
- "annotations": {
180
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
181
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
182
- "summary": "Client certificate is about to expire."
183
- },
184
- "state": "firing",
185
- "activeAt": "2025-12-01T21:16:21.395442537Z",
186
- "value": "4.660875e+04"
187
- },
188
- {
189
- "labels": {
190
- "alertname": "KubeClientCertificateExpiration",
191
- "instance": "10.0.104.155:443",
192
- "job": "apiserver",
193
- "severity": "critical"
194
- },
195
- "annotations": {
196
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
197
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
198
- "summary": "Client certificate is about to expire."
199
- },
200
- "state": "firing",
201
- "activeAt": "2025-12-01T21:16:21.395442537Z",
202
- "value": "4.660875e+04"
203
- }
204
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_at_2025-12-01T21_29_09.404619.json DELETED
@@ -1,190 +0,0 @@
1
- [
2
- {
3
- "labels": {
4
- "alertname": "PrometheusNotConnectedToAlertmanagers",
5
- "container": "prometheus",
6
- "endpoint": "http-web",
7
- "instance": "100.96.2.27:9090",
8
- "job": "prometheus-kube-prometheus-prometheus",
9
- "namespace": "prometheus",
10
- "pod": "prometheus-prometheus-kube-prometheus-prometheus-0",
11
- "service": "prometheus-kube-prometheus-prometheus",
12
- "severity": "warning"
13
- },
14
- "annotations": {
15
- "description": "Prometheus prometheus/prometheus-prometheus-kube-prometheus-prometheus-0 is not connected to any Alertmanagers.",
16
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/prometheus/prometheusnotconnectedtoalertmanagers",
17
- "summary": "Prometheus is not connected to any Alertmanagers."
18
- },
19
- "state": "firing",
20
- "activeAt": "2025-12-01T21:16:02.996855383Z",
21
- "value": "0e+00"
22
- },
23
- {
24
- "labels": {
25
- "alertname": "RequestLatency",
26
- "namespace": "otel-demo",
27
- "service_name": "frontend-proxy",
28
- "severity": "warning"
29
- },
30
- "annotations": {
31
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
32
- },
33
- "state": "firing",
34
- "activeAt": "2025-12-01T21:16:24.914078592Z",
35
- "value": "1.5e+04"
36
- },
37
- {
38
- "labels": {
39
- "alertname": "RequestLatency",
40
- "namespace": "otel-demo",
41
- "service_name": "checkout",
42
- "severity": "warning"
43
- },
44
- "annotations": {
45
- "description": "Latency in service checkout in namespace otel-demo is above 1500ms (current: 3522.9838709677456s)"
46
- },
47
- "state": "firing",
48
- "activeAt": "2025-12-01T21:26:24.914078592Z",
49
- "value": "3.5229838709677456e+03"
50
- },
51
- {
52
- "labels": {
53
- "alertname": "RequestLatency",
54
- "namespace": "otel-demo",
55
- "service_name": "email",
56
- "severity": "warning"
57
- },
58
- "annotations": {
59
- "description": "Latency in service email in namespace otel-demo is above 1500ms (current: 1663.4782608695637s)"
60
- },
61
- "state": "firing",
62
- "activeAt": "2025-12-01T21:26:24.914078592Z",
63
- "value": "1.6634782608695637e+03"
64
- },
65
- {
66
- "labels": {
67
- "alertname": "RequestErrorRate",
68
- "namespace": "otel-demo",
69
- "service_name": "checkout",
70
- "severity": "warning"
71
- },
72
- "annotations": {
73
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.3555555555555555)"
74
- },
75
- "state": "firing",
76
- "activeAt": "2025-12-01T21:16:24.914078592Z",
77
- "value": "3.555555555555555e-01"
78
- },
79
- {
80
- "labels": {
81
- "alertname": "RequestErrorRate",
82
- "namespace": "otel-demo",
83
- "service_name": "frontend-proxy",
84
- "severity": "warning"
85
- },
86
- "annotations": {
87
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 29.24444444444444)"
88
- },
89
- "state": "firing",
90
- "activeAt": "2025-12-01T21:16:24.914078592Z",
91
- "value": "2.924444444444444e+01"
92
- },
93
- {
94
- "labels": {
95
- "alertname": "RequestErrorRate",
96
- "namespace": "otel-demo",
97
- "service_name": "frontend",
98
- "severity": "warning"
99
- },
100
- "annotations": {
101
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.7555555555555554)"
102
- },
103
- "state": "firing",
104
- "activeAt": "2025-12-01T21:16:24.914078592Z",
105
- "value": "7.555555555555554e-01"
106
- },
107
- {
108
- "labels": {
109
- "alertname": "TargetDown",
110
- "job": "otel-collector",
111
- "namespace": "otel-demo",
112
- "service": "otel-collector",
113
- "severity": "warning"
114
- },
115
- "annotations": {
116
- "description": "50% of the otel-collector/otel-collector targets in otel-demo namespace are down.",
117
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/targetdown",
118
- "summary": "One or more targets are unreachable."
119
- },
120
- "state": "firing",
121
- "activeAt": "2025-12-01T21:16:23.495039596Z",
122
- "value": "5e+01"
123
- },
124
- {
125
- "labels": {
126
- "alertname": "Watchdog",
127
- "severity": "none"
128
- },
129
- "annotations": {
130
- "description": "This is an alert meant to ensure that the entire alerting pipeline is functional.\nThis alert is always firing, therefore it should always be firing in Alertmanager\nand always fire against a receiver. There are integrations with various notification\nmechanisms that send a notification when this alert is not firing. For example the\n\"DeadMansSnitch\" integration in PagerDuty.\n",
131
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/watchdog",
132
- "summary": "An alert that should always be firing to certify that Alertmanager is working properly."
133
- },
134
- "state": "firing",
135
- "activeAt": "2025-12-01T21:15:53.495039596Z",
136
- "value": "1e+00"
137
- },
138
- {
139
- "labels": {
140
- "alertname": "InfoInhibitor",
141
- "alertstate": "pending",
142
- "container": "recorder",
143
- "instance": "10.0.105.231:10250",
144
- "namespace": "data-recorders",
145
- "pod": "clickhouse-unified-recorder-f82bv",
146
- "service": "prometheus-kube-prometheus-kubelet",
147
- "severity": "none"
148
- },
149
- "annotations": {
150
- "description": "This is an alert that is used to inhibit info alerts.\nBy themselves, the info-level alerts are sometimes very noisy, but they are relevant when combined with\nother alerts.\nThis alert fires whenever there's a severity=\"info\" alert, and stops firing when another alert with a\nseverity of 'warning' or 'critical' starts firing on the same namespace.\nThis alert should be routed to a null receiver and configured to inhibit alerts with severity=\"info\".\n",
151
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/general/infoinhibitor",
152
- "summary": "Info-level alert inhibition."
153
- },
154
- "state": "firing",
155
- "activeAt": "2025-12-01T21:26:53.495039596Z",
156
- "value": "1e+00"
157
- },
158
- {
159
- "labels": {
160
- "alertname": "KubeClientCertificateExpiration",
161
- "instance": "10.0.104.155:443",
162
- "job": "apiserver",
163
- "severity": "warning"
164
- },
165
- "annotations": {
166
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 7.0 days on cluster .",
167
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
168
- "summary": "Client certificate is about to expire."
169
- },
170
- "state": "firing",
171
- "activeAt": "2025-12-01T21:16:21.395442537Z",
172
- "value": "4.64704e+04"
173
- },
174
- {
175
- "labels": {
176
- "alertname": "KubeClientCertificateExpiration",
177
- "instance": "10.0.104.155:443",
178
- "job": "apiserver",
179
- "severity": "critical"
180
- },
181
- "annotations": {
182
- "description": "A client certificate used to authenticate to kubernetes apiserver is expiring in less than 24.0 hours on cluster .",
183
- "runbook_url": "https://runbooks.prometheus-operator.dev/runbooks/kubernetes/kubeclientcertificateexpiration",
184
- "summary": "Client certificate is about to expire."
185
- },
186
- "state": "firing",
187
- "activeAt": "2025-12-01T21:16:21.395442537Z",
188
- "value": "4.64704e+04"
189
- }
190
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_in_alerting_state_2025-11-19T030519.802461Z.json DELETED
@@ -1,92 +0,0 @@
1
- {
2
- "data": {
3
- "alerts": [
4
- {
5
- "activeAt": "2025-11-19T02:56:55.247231189Z",
6
- "annotations": {
7
- "description": "Latency in service frontend in namespace otel-demo is above 1500ms (current: 1677.132352941176s)"
8
- },
9
- "labels": {
10
- "alertname": "RequestLatency",
11
- "namespace": "otel-demo",
12
- "service_name": "frontend",
13
- "severity": "warning"
14
- },
15
- "state": "firing",
16
- "value": "1.677132352941176e+03"
17
- },
18
- {
19
- "activeAt": "2025-11-19T02:55:55.247231189Z",
20
- "annotations": {
21
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
22
- },
23
- "labels": {
24
- "alertname": "RequestLatency",
25
- "namespace": "otel-demo",
26
- "service_name": "frontend-proxy",
27
- "severity": "warning"
28
- },
29
- "state": "firing",
30
- "value": "1.5e+04"
31
- },
32
- {
33
- "activeAt": "2025-11-19T02:55:55.247231189Z",
34
- "annotations": {
35
- "description": "Latency in service checkout in namespace otel-demo is above 1500ms (current: 2863.7096774193437s)"
36
- },
37
- "labels": {
38
- "alertname": "RequestLatency",
39
- "namespace": "otel-demo",
40
- "service_name": "checkout",
41
- "severity": "warning"
42
- },
43
- "state": "firing",
44
- "value": "2.8637096774193437e+03"
45
- },
46
- {
47
- "activeAt": "2025-11-19T02:56:55.247231189Z",
48
- "annotations": {
49
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 1.3555555555555554)"
50
- },
51
- "labels": {
52
- "alertname": "RequestErrorRate",
53
- "namespace": "otel-demo",
54
- "service_name": "checkout",
55
- "severity": "warning"
56
- },
57
- "state": "firing",
58
- "value": "1.3555555555555554e+00"
59
- },
60
- {
61
- "activeAt": "2025-11-19T02:55:55.247231189Z",
62
- "annotations": {
63
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 37.96666666666666)"
64
- },
65
- "labels": {
66
- "alertname": "RequestErrorRate",
67
- "namespace": "otel-demo",
68
- "service_name": "frontend-proxy",
69
- "severity": "warning"
70
- },
71
- "state": "firing",
72
- "value": "3.796666666666666e+01"
73
- },
74
- {
75
- "activeAt": "2025-11-19T02:55:55.247231189Z",
76
- "annotations": {
77
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 2.677777777777777)"
78
- },
79
- "labels": {
80
- "alertname": "RequestErrorRate",
81
- "namespace": "otel-demo",
82
- "service_name": "frontend",
83
- "severity": "warning"
84
- },
85
- "state": "firing",
86
- "value": "2.677777777777777e+00"
87
- }
88
- ]
89
- },
90
- "status": "success",
91
- "timestamp": "2025-11-19T03:05:19Z"
92
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/alerts/alerts_in_alerting_state_2025-12-01T212502.573985Z.json DELETED
@@ -1,64 +0,0 @@
1
- {
2
- "data": {
3
- "alerts": [
4
- {
5
- "activeAt": "2025-12-01T21:16:24.914078592Z",
6
- "annotations": {
7
- "description": "Latency in service frontend-proxy in namespace otel-demo is above 1500ms (current: 15000s)"
8
- },
9
- "labels": {
10
- "alertname": "RequestLatency",
11
- "namespace": "otel-demo",
12
- "service_name": "frontend-proxy",
13
- "severity": "warning"
14
- },
15
- "state": "firing",
16
- "value": "1.5e+04"
17
- },
18
- {
19
- "activeAt": "2025-12-01T21:16:24.914078592Z",
20
- "annotations": {
21
- "description": "Request error rate in service checkout in namespace otel-demo is above 0 (current: 0.5555555555555555)"
22
- },
23
- "labels": {
24
- "alertname": "RequestErrorRate",
25
- "namespace": "otel-demo",
26
- "service_name": "checkout",
27
- "severity": "warning"
28
- },
29
- "state": "firing",
30
- "value": "5.555555555555555e-01"
31
- },
32
- {
33
- "activeAt": "2025-12-01T21:16:24.914078592Z",
34
- "annotations": {
35
- "description": "Request error rate in service frontend-proxy in namespace otel-demo is above 0 (current: 27.188888888888886)"
36
- },
37
- "labels": {
38
- "alertname": "RequestErrorRate",
39
- "namespace": "otel-demo",
40
- "service_name": "frontend-proxy",
41
- "severity": "warning"
42
- },
43
- "state": "firing",
44
- "value": "2.7188888888888886e+01"
45
- },
46
- {
47
- "activeAt": "2025-12-01T21:16:24.914078592Z",
48
- "annotations": {
49
- "description": "Request error rate in service frontend in namespace otel-demo is above 0 (current: 0.9999999999999999)"
50
- },
51
- "labels": {
52
- "alertname": "RequestErrorRate",
53
- "namespace": "otel-demo",
54
- "service_name": "frontend",
55
- "severity": "warning"
56
- },
57
- "state": "firing",
58
- "value": "9.999999999999999e-01"
59
- }
60
- ]
61
- },
62
- "status": "success",
63
- "timestamp": "2025-12-01T21:25:02Z"
64
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/ground_truth.yaml DELETED
@@ -1,69 +0,0 @@
1
- ---
2
- metadata:
3
- version: "v1"
4
- fault:
5
- - entity:
6
- name: load-generator-pod-1
7
- group_id: load-generator-pod-1
8
- kind: Pod
9
- condition: To be specified
10
- category: Change
11
- fault_mechanism: custom
12
- alerts:
13
- - id: RequestErrorRate
14
- group_id: frontend-proxy-service-1
15
- metadata:
16
- description: Error rate is above threshold for frontend-proxy service
17
- - id: RequestLatency
18
- group_id: frontend-proxy-service-1
19
- metadata:
20
- description: Latency is above threshold for frontend-proxy service
21
- groups:
22
- - id: load-generator-pod-1
23
- kind: Pod
24
- filter:
25
- - load-generator-.*
26
- namespace: otel-demo
27
- root_cause: true
28
- - id: load-generator-service-1
29
- kind: Service
30
- filter:
31
- - load-generator\b
32
- namespace: otel-demo
33
- - id: frontend-proxy-service-1
34
- kind: Service
35
- filter:
36
- - frontend-proxy\b
37
- namespace: otel-demo
38
- - id: frontend-proxy-pod-1
39
- kind: Pod
40
- namespace: otel-demo
41
- filter:
42
- - frontend-proxy-.*
43
- aliases:
44
- - - load-generator-service-1
45
- - load-generator-pod-1
46
- - frontend-proxy-service-1
47
- - frontend-proxy-pod-1
48
- propagations:
49
- - source: load-generator-pod-1
50
- target: load-generator-service-1
51
- condition: load-generator pod is configured with a higher number of users
52
- effect: load-generator creates a high number of requests as a result
53
- - source: load-generator-service-1
54
- target: frontend-proxy-service-1
55
- condition: frontend-proxy service overloaded - request volume exceeds configured capacity
56
- effect: frontend-proxy service error rate is above threshold
57
- - source: load-generator-service-1
58
- target: frontend-proxy-service-1
59
- condition: frontend-proxy service overloaded - request volume exceeds configured capacity
60
- effect: frontend-proxy service latency is above threshold
61
- recommended_actions:
62
- - solution:
63
- id: deem_if_traffic_is_legit
64
- actions:
65
- - deem if traffic is legit
66
- - solution:
67
- id: set_up_autoscaler
68
- actions:
69
- - set up autoscaler
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/k8s_events_otel-demo_chaos-mesh.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:4a20737646e5f2211fbb5877708342a7a53381fe5672bf5b7bc457626062dcba
3
- size 50198
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/k8s_objects_otel-demo_chaos-mesh.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:e48eada3d30f3a268b373894fba4d279a1c3eedc6b5a40f1deb8ac4f499d0f31
3
- size 1144666
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_accounting-687b789684-679q7.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:62566126cfb6290747bf53a6918549a579bc4765a414aceb5f126a4dadf2d99c
3
- size 4126644
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_accounting-687b789684-lp2pr.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:21219b2e392d8aec71b209c2fd8dcd29263b19a023c8101419b7b20c5b536895
3
- size 4745502
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_ad-554b849958-5pgd4.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:a20c55fb68af84e744031833f6e66af5f10056131f0fd7e7bfa22ede613a1b91
3
- size 4153563
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_ad-554b849958-gssfx.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:43af2899e114887949481388a2d6f2b795bdc9675f3760a165be23dba8bc9fca
3
- size 4851060
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_cart-9fd895bb7-4lx68.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:4a98427ccf44b1fcb1681557f766cffce79994dc445ab5d5c23f3c798e492c4a
3
- size 4029769
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_cart-9fd895bb7-82967.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:52b45e7d8ce72afb95c9535c13537bc2ff41e3fa3095a1f299adb694fb9338d4
3
- size 4626922
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_checkout-8546fdc74d-7m4dn.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:00ca2dbbda3192000fe127816401ca4a4d6f4b3eb772663fda42c5e59426d8fd
3
- size 4687128
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_checkout-8546fdc74d-zpplx.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:0b0a36cbbe3b5936593ab329521ac8c055ee03632482c41549b763dd9b8079cc
3
- size 4708288
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_currency-68d8484df8-dk45t.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:b92109e17be4ec10a511847c4e3298426a42d93d8c16da1b2ebef726041fe875
3
- size 4810692
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_currency-68d8484df8-xb4rs.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:d9976d2443fbacb28f4019d8aa5307fc790344f010953667a6c8ff164aa0e04c
3
- size 4239355
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_email-64ddb8d8d7-nbf2t.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:fb88a8da4fee4891c62ceb945b4356a6880c026d44015fb9502484c7bcf9006f
3
- size 4196443
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_email-64ddb8d8d7-xz5lv.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:87aea793cf2bf99fa0c35231173dfe76a48a0f5517bcdb4958c0e743983032c9
3
- size 4901242
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_flagd-6554fc5b8d-crn2h.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:9b7751ebb926f24d0f71938bd99e0351dbafa2d1d7cb6faf798665df616ba2ce
3
- size 5493595
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_flagd-6554fc5b8d-tmt6g.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:f8f2b70153d0dc065cb9e287a541d90d9dbc8570ee9b5bc4587b65c13e333054
3
- size 6284150
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_fraud-detection-886c99494-2gpr4.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:82bcbce9589d93f537fa51ae6f57e5605b1c34e8d6ad8e3dd9e6ba0d671abe0c
3
- size 4816594
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_fraud-detection-886c99494-s2knk.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:78336e68e1aee5b44ee17a49de9860a21f7b21bf9dfd24907a88468a5413fc18
3
- size 4792013
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_frontend-675fd7b5c5-gd8gl.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:ea5532c6b657ce7cb972b5afa5536109362b498698d53f7f60a9531807cd09d9
3
- size 4867480
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_frontend-675fd7b5c5-ks5z4.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:2a8c3f7f0cacaa64869088679ccc03b8fbee9db4240da8c3e0e905ca296c5d42
3
- size 4952311
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_frontend-proxy-6b4d584985-6bl4q.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:e5657c604cb4e8ae7a3df2b06a8ab70d2c74f3b1e37638860d82e6d1653df249
3
- size 4329501
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_frontend-proxy-6b4d584985-9sfgn.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:2e61de8962931070274a2e7a414a0b60d6a10b9eb731990490a22595b83d5d82
3
- size 5057529
 
 
 
 
snapshots/sre/v0.1-ca9707b2-8b70-468b-a8f9-9658438f80b1/Scenario-1/metrics/pod_image-provider-55cfd9d7d6-2v48s.tsv DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:916857edb9fddb69cf9b4a3c98a3387a240b179b9f6f6cd32ab06a8c7487aef6
3
- size 4327913